diff --git a/.rubocop.yml b/.rubocop.yml index 9055a997..28d2dcd7 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,7 +1,7 @@ AllCops: DisplayCopNames: true DisplayStyleGuide: true - TargetRubyVersion: 2.1 + TargetRubyVersion: 2.2 Exclude: - 'vendor/**/*' diff --git a/Dockerfile.dev b/Dockerfile.dev new file mode 100644 index 00000000..3f602747 --- /dev/null +++ b/Dockerfile.dev @@ -0,0 +1,12 @@ +FROM ruby:2.4.0 +MAINTAINER data@localytics.com + +ENV DEBIAN_FRONTEND noninteractive +RUN echo "deb http://deb.debian.org/debian/ jessie main" > /etc/apt/sources.list +RUN echo "deb-src http://deb.debian.org/debian/ jessie main" >> /etc/apt/sources.list +RUN echo "deb http://security.debian.org/ jessie/updates main" >> /etc/apt/sources.list +RUN echo "deb-src http://security.debian.org/ jessie/updates main" >> /etc/apt/sources.list +RUN apt-get update && apt-get -y install libnss3-tools unixodbc-dev libmyodbc mysql-client odbc-postgresql postgresql + +WORKDIR /workspace +CMD docker/docker-entrypoint.sh diff --git a/Gemfile b/Gemfile index c0cf8f42..fa75df15 100644 --- a/Gemfile +++ b/Gemfile @@ -1,6 +1,3 @@ source 'https://rubygems.org' gemspec - -gem 'activerecord', '5.0.1' -gem 'pry', '~> 0.11.1' diff --git a/README.md b/README.md index 27f46dcc..0f5b5519 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,25 @@ ActiveRecord models that use this connection will now be connecting to the confi To run the tests, you'll need the ODBC driver as well as the connection adapter for each database against which you're trying to test. Then run `DSN=MyDatabaseDSN bundle exec rake test` and the test suite will be run by connecting to your database. +## Testing Using a Docker Container Because ODBC on Mac is Hard + +Tested on Sierra. + + +Run from project root: + +``` +bundle package +docker build -f Dockerfile.dev -t odbc-dev . + +# Local mount mysql directory to avoid some permissions problems +mkdir -p /tmp/mysql +docker run -it --rm -v $(pwd):/workspace -v /tmp/mysql:/var/lib/mysql odbc-dev:latest + +# In container +docker/test.sh +``` + ## Contributing Bug reports and pull requests are welcome on GitHub at https://github.com/localytics/odbc_adapter. diff --git a/docker/docker-entrypoint.sh b/docker/docker-entrypoint.sh new file mode 100755 index 00000000..be611261 --- /dev/null +++ b/docker/docker-entrypoint.sh @@ -0,0 +1,21 @@ +#!/bin/bash +set -e -x + +# Installing mysql at startup due to file permissions: https://github.com/geerlingguy/drupal-vm/issues/1497 +apt-get install -y mysql-server +bundle install --local +service mysql start + +# Allows passwordless auth from command line and odbc +sed -i "s/local all postgres peer/local all postgres trust/" /etc/postgresql/9.4/main/pg_hba.conf +sed -i "s/host all all 127.0.0.1\/32 md5/host all all 127.0.0.1\/32 trust/" /etc/postgresql/9.4/main/pg_hba.conf +service postgresql start + +odbcinst -i -d -f /usr/share/libmyodbc/odbcinst.ini +mysql -e "DROP DATABASE IF EXISTS odbc_test; CREATE DATABASE IF NOT EXISTS odbc_test;" -uroot +mysql -e "GRANT ALL PRIVILEGES ON *.* TO 'root'@'localhost';" -uroot + +odbcinst -i -d -f /usr/share/psqlodbc/odbcinst.ini.template +psql -c "CREATE DATABASE odbc_test;" -U postgres + +/bin/bash diff --git a/docker/test.sh b/docker/test.sh new file mode 100755 index 00000000..067de0c3 --- /dev/null +++ b/docker/test.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +echo "Testing mysql" && CONN_STR='DRIVER=MySQL;SERVER=localhost;DATABASE=odbc_test;USER=root;PASSWORD=;' bundle exec rake && \ + echo "Testing postgres" && CONN_STR='DRIVER={PostgreSQL ANSI};SERVER=localhost;PORT=5432;DATABASE=odbc_test;UID=postgres;' bundle exec rake && \ + echo "Testing postgres utf8" && CONN_STR='DRIVER={PostgreSQL UNICODE};SERVER=localhost;PORT=5432;DATABASE=odbc_test;UID=postgres;ENCODING=utf8' bundle exec rake \ No newline at end of file diff --git a/lib/active_record/connection_adapters/odbc_adapter.rb b/lib/active_record/connection_adapters/odbc_adapter.rb index 672c5db1..f869f46c 100644 --- a/lib/active_record/connection_adapters/odbc_adapter.rb +++ b/lib/active_record/connection_adapters/odbc_adapter.rb @@ -1,6 +1,6 @@ require 'active_record' -require 'arel/visitors/bind_visitor' require 'odbc' +require 'odbc_utf8' require 'odbc_adapter/database_limits' require 'odbc_adapter/database_statements' @@ -14,6 +14,9 @@ require 'odbc_adapter/registry' require 'odbc_adapter/version' +require 'odbc_adapter/type/type' +require 'odbc_adapter/concerns/concern' + module ActiveRecord class Base class << self @@ -30,7 +33,7 @@ def odbc_connection(config) raise ArgumentError, 'No data source name (:dsn) or connection string (:conn_str) specified.' end - database_metadata = ::ODBCAdapter::DatabaseMetadata.new(connection) + database_metadata = ::ODBCAdapter::DatabaseMetadata.new(connection, config[:encoding_bug]) database_metadata.adapter_class.new(connection, logger, config, database_metadata) end @@ -40,8 +43,11 @@ def odbc_connection(config) def odbc_dsn_connection(config) username = config[:username] ? config[:username].to_s : nil password = config[:password] ? config[:password].to_s : nil - connection = ODBC.connect(config[:dsn], username, password) - [connection, config.merge(username: username, password: password)] + odbc_module = config[:encoding] == 'utf8' ? ODBC_UTF8 : ODBC + connection = odbc_module.connect(config[:dsn], username, password) + + # encoding_bug indicates that the driver is using non ASCII and has the issue referenced here https://github.com/larskanis/ruby-odbc/issues/2 + [connection, config.merge(username: username, password: password, encoding_bug: config[:encoding] == 'utf8')] end # Connect using ODBC connection string @@ -49,12 +55,15 @@ def odbc_dsn_connection(config) # e.g. "DSN=virt5;UID=rails;PWD=rails" # "DRIVER={OpenLink Virtuoso};HOST=carlmbp;UID=rails;PWD=rails" def odbc_conn_str_connection(config) - driver = ODBC::Driver.new + attrs = config[:conn_str].split(';').map { |option| option.split('=', 2) }.to_h + odbc_module = attrs['ENCODING'] == 'utf8' ? ODBC_UTF8 : ODBC + driver = odbc_module::Driver.new driver.name = 'odbc' - driver.attrs = config[:conn_str].split(';').map { |option| option.split('=', 2) }.to_h + driver.attrs = attrs - connection = ODBC::Database.new.drvconnect(driver) - [connection, config.merge(driver: driver)] + connection = odbc_module::Database.new.drvconnect(driver) + # encoding_bug indicates that the driver is using non ASCII and has the issue referenced here https://github.com/larskanis/ruby-odbc/issues/2 + [connection, config.merge(driver: driver, encoding: attrs['ENCODING'], encoding_bug: attrs['ENCODING'] == 'utf8')] end end end @@ -68,10 +77,15 @@ class ODBCAdapter < AbstractAdapter ADAPTER_NAME = 'ODBC'.freeze BOOLEAN_TYPE = 'BOOLEAN'.freeze + VARIANT_TYPE = 'VARIANT'.freeze + DATE_TYPE = 'DATE'.freeze + JSON_TYPE = 'JSON'.freeze - ERR_DUPLICATE_KEY_VALUE = 23_505 - ERR_QUERY_TIMED_OUT = 57_014 - ERR_QUERY_TIMED_OUT_MESSAGE = /Query has timed out/ + ERR_DUPLICATE_KEY_VALUE = 23_505 + ERR_QUERY_TIMED_OUT = 57_014 + ERR_QUERY_TIMED_OUT_MESSAGE = /Query has timed out/ + ERR_CONNECTION_FAILED_REGEX = '^08[0S]0[12347]'.freeze + ERR_CONNECTION_FAILED_MESSAGE = /Client connection failed/ # The object that stores the information that is fetched from the DBMS # when a connection is first established. @@ -107,11 +121,12 @@ def active? # new connection with the database. def reconnect! disconnect! + odbc_module = @config[:encoding] == 'utf8' ? ODBC_UTF8 : ODBC @connection = if @config.key?(:dsn) - ODBC.connect(@config[:dsn], @config[:username], @config[:password]) + odbc_module.connect(@config[:dsn], @config[:username], @config[:password]) else - ODBC::Database.new.drvconnect(@config[:driver]) + odbc_module::Database.new.drvconnect(@config[:driver]) end configure_time_options(@connection) super @@ -127,54 +142,72 @@ def disconnect! # Build a new column object from the given options. Effectively the same # as super except that it also passes in the native type. # rubocop:disable Metrics/ParameterLists - def new_column(name, default, sql_type_metadata, null, table_name, default_function = nil, collation = nil, native_type = nil) - ::ODBCAdapter::Column.new(name, default, sql_type_metadata, null, table_name, default_function, collation, native_type) + def new_column(name, default, sql_type_metadata, null, native_type = nil) + ::ODBCAdapter::Column.new(name, default, sql_type_metadata, null, native_type) + end + + #Snowflake doesn't have a mechanism to return the primary key on inserts, it needs prefetched + def prefetch_primary_key?(table_name = nil) + true + end + + def next_sequence_value(table_name = nil) + exec_query("SELECT #{table_name}.NEXTVAL as new_id").first["new_id"] + end + + def build_merge_sql(merge) # :nodoc: + <<~SQL + MERGE #{merge.into} AS TARGET USING (#{merge.values_list}) AS SOURCE ON #{merge.match} + #{merge.merge_delete} + #{merge.merge_update} + #{merge.merge_insert} + SQL + end + + def exec_merge_all(sql, name) # :nodoc: + exec_query(sql, name) end protected - # Build the type map for ActiveRecord + #Snowflake ODBC Adapter specific def initialize_type_map(map) - map.register_type 'boolean', Type::Boolean.new - map.register_type ODBC::SQL_CHAR, Type::String.new - map.register_type ODBC::SQL_LONGVARCHAR, Type::Text.new - map.register_type ODBC::SQL_TINYINT, Type::Integer.new(limit: 4) - map.register_type ODBC::SQL_SMALLINT, Type::Integer.new(limit: 8) - map.register_type ODBC::SQL_INTEGER, Type::Integer.new(limit: 16) - map.register_type ODBC::SQL_BIGINT, Type::BigInteger.new(limit: 32) - map.register_type ODBC::SQL_REAL, Type::Float.new(limit: 24) - map.register_type ODBC::SQL_FLOAT, Type::Float.new - map.register_type ODBC::SQL_DOUBLE, Type::Float.new(limit: 53) - map.register_type ODBC::SQL_DECIMAL, Type::Float.new - map.register_type ODBC::SQL_NUMERIC, Type::Integer.new - map.register_type ODBC::SQL_BINARY, Type::Binary.new - map.register_type ODBC::SQL_DATE, Type::Date.new - map.register_type ODBC::SQL_DATETIME, Type::DateTime.new - map.register_type ODBC::SQL_TIME, Type::Time.new - map.register_type ODBC::SQL_TIMESTAMP, Type::DateTime.new - map.register_type ODBC::SQL_GUID, Type::String.new - - alias_type map, ODBC::SQL_BIT, 'boolean' - alias_type map, ODBC::SQL_VARCHAR, ODBC::SQL_CHAR - alias_type map, ODBC::SQL_WCHAR, ODBC::SQL_CHAR - alias_type map, ODBC::SQL_WVARCHAR, ODBC::SQL_CHAR - alias_type map, ODBC::SQL_WLONGVARCHAR, ODBC::SQL_LONGVARCHAR - alias_type map, ODBC::SQL_VARBINARY, ODBC::SQL_BINARY - alias_type map, ODBC::SQL_LONGVARBINARY, ODBC::SQL_BINARY - alias_type map, ODBC::SQL_TYPE_DATE, ODBC::SQL_DATE - alias_type map, ODBC::SQL_TYPE_TIME, ODBC::SQL_TIME - alias_type map, ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP + map.register_type %r(boolean)i, Type::Boolean.new + map.register_type %r(date)i, Type::Date.new + map.register_type %r(varchar)i, Type::String.new + map.register_type %r(time)i, Type::Time.new + map.register_type %r(timestamp)i, Type::DateTime.new + map.register_type %r(binary)i, Type::Binary.new + map.register_type %r(double)i, Type::Float.new + map.register_type(%r(decimal)i) do |sql_type| + scale = extract_scale(sql_type) + if scale == 0 + ::ODBCAdapter::Type::SnowflakeInteger.new + else + Type::Decimal.new(precision: extract_precision(sql_type), scale: scale) + end + end + map.register_type %r(struct)i, ::ODBCAdapter::Type::SnowflakeObject.new + map.register_type %r(array)i, ::ODBCAdapter::Type::ArrayOfValues.new + map.register_type %r(variant)i, ::ODBCAdapter::Type::Variant.new end # Translate an exception from the native DBMS to something usable by # ActiveRecord. - def translate_exception(exception, message) + def translate_exception(exception, message:, sql:, binds:) error_number = exception.message[/^\d+/].to_i if error_number == ERR_DUPLICATE_KEY_VALUE - ActiveRecord::RecordNotUnique.new(message, exception) + ActiveRecord::RecordNotUnique.new(message, sql: sql, binds: binds) elsif error_number == ERR_QUERY_TIMED_OUT || exception.message =~ ERR_QUERY_TIMED_OUT_MESSAGE - ::ODBCAdapter::QueryTimeoutError.new(message, exception) + ::ODBCAdapter::QueryTimeoutError.new(message, sql: sql, binds: binds) + elsif exception.message.match(ERR_CONNECTION_FAILED_REGEX) || exception.message =~ ERR_CONNECTION_FAILED_MESSAGE + begin + reconnect! + ::ODBCAdapter::ConnectionFailedError.new(message, sql: sql, binds: binds) + rescue => e + puts "unable to reconnect #{e}" + end else super end @@ -186,8 +219,8 @@ def translate_exception(exception, message) # work with non-string keys, and in our case the keys are (almost) all # numeric def alias_type(map, new_type, old_type) - map.register_type(new_type) do |_, *args| - map.lookup(old_type, *args) + map.register_type(new_type) do |_| + map.lookup(old_type) end end diff --git a/lib/active_record/merge_all.rb b/lib/active_record/merge_all.rb new file mode 100644 index 00000000..352b9cb3 --- /dev/null +++ b/lib/active_record/merge_all.rb @@ -0,0 +1,195 @@ +# frozen_string_literal: true + +require "active_support/core_ext/enumerable" + +module ActiveRecord + class MergeAll # :nodoc: + attr_reader :model, :connection, :merges, :keys + attr_reader :perform_inserts, :perform_updates, :delete_key + + def initialize(model, merges, perform_inserts: true, perform_updates: true, prune_duplicates: false) + raise ArgumentError, "Empty list of attributes passed" if merges.blank? + + # TODO: Implement perform_deletes. Most of the code is here, but all completely untested. + @model, @connection, @merges, @keys = model, model.connection, merges, merges.first.keys.map(&:to_s) + @perform_inserts, @perform_updates, @delete_key = perform_inserts, perform_updates, nil + + if model.scope_attributes? + @scope_attributes = model.scope_attributes + @keys |= @scope_attributes.keys + end + @keys = @keys.to_set + + ensure_valid_options_for_connection! + + if prune_duplicates + do_prune_duplicates + end + end + + def execute + message = +"#{model} " + message << "Bulk " if merges.many? + message << "Merge" + connection.exec_merge_all to_sql, message + end + + def updatable_columns + keys - readonly_columns - [delete_key] + end + + def insertable_columns + keys - [delete_key] + end + + def insertable_non_primary_columns + insertable_columns - primary_keys + end + + def primary_keys + Array(connection.schema_cache.primary_keys(model.table_name)) + end + + def map_key_with_value + merges.map do |attributes| + attributes = attributes.stringify_keys + attributes.merge!(scope_attributes) if scope_attributes + + verify_attributes(attributes) + + keys.map do |key| + yield key, attributes[key] + end + end + end + + def perform_deletes + !delete_key.nil? + end + + private + attr_reader :scope_attributes + + def ensure_valid_options_for_connection! + + end + + def do_prune_duplicates + unless primary_keys.to_set.subset?(keys) + raise ArgumentError, "Pruning duplicates requires presense of all primary keys in the merges" + end + @merges = merges.reverse + merges.uniq! do |merge| + # Map the primary keys to determine uniqueness. If a primary key is nil, return a new empty object to + # guarantee a unique value. We don't ever want to throw out records that have a nil primary key as these are + # new records. + primary_keys.map { |key| merge[key].nil? ? Object.new : merge[key] } + end + merges.reverse! + end + + def to_sql + connection.build_merge_sql(ActiveRecord::MergeAll::Builder.new(self)) + end + + def readonly_columns + primary_keys + model.readonly_attributes.to_a + end + + def verify_attributes(attributes) + if keys != attributes.keys.to_set + raise ArgumentError, "All objects being merged must have the same keys" + end + end + + class Builder # :nodoc: + attr_reader :model + + delegate :keys, to: :merge_all + + def initialize(merge_all) + @merge_all, @model, @connection = merge_all, merge_all.model, merge_all.connection + end + + def into + # "INTO #{model.quoted_table_name} (#{columns_list})" + "INTO #{model.quoted_table_name}" + end + + def values_list + types = extract_types_from_columns_on(model.table_name, keys: keys) + + values_list = merge_all.map_key_with_value do |key, value| + connection.with_yaml_fallback(types[key].serialize(value)) + end + + values = connection.visitor.compile(Arel::Nodes::ValuesList.new(values_list)) + + "SELECT * FROM (#{values}) AS v1 (#{columns_list})" + end + + def match + quote_columns(merge_all.primary_keys).map { |column| "SOURCE.#{column}=TARGET.#{column}" }.join(" AND ") + end + + def merge_delete + merge_all.perform_deletes ? "WHEN MATCHED AND SOURCE.#{quote_column(merge_all.delete_key)} = TRUE THEN DELETE" : "" + end + + def merge_update + merge_all.perform_updates ? "WHEN MATCHED THEN UPDATE SET #{updatable_columns.map { |column| "TARGET.#{column}=SOURCE.#{column}" }.join(",")}" : "" + end + + def merge_insert + if merge_all.perform_inserts + <<~SQL + WHEN NOT MATCHED AND #{quote_columns(merge_all.primary_keys).map { |column| "SOURCE.#{column} IS NOT NULL" }.join(" AND ")} THEN INSERT (#{insertable_columns_list}) VALUES (#{quote_columns(merge_all.insertable_columns).map { |column| "SOURCE.#{column}"}.join(",")}) + WHEN NOT MATCHED AND #{quote_columns(merge_all.primary_keys).map { |column| "SOURCE.#{column} IS NULL" }.join(" OR ")} THEN INSERT (#{insertable_non_primary_columns_list}) VALUES (#{quote_columns(merge_all.insertable_non_primary_columns).map { |column| "SOURCE.#{column}"}.join(",")}) + SQL + else + "" + end + end + + private + attr_reader :connection, :merge_all + + def columns_list + format_columns(merge_all.keys) + end + + def insertable_columns_list + format_columns(merge_all.insertable_columns) + end + + def insertable_non_primary_columns_list + format_columns(merge_all.insertable_non_primary_columns) + end + + def updatable_columns + quote_columns(merge_all.updatable_columns) + end + + def extract_types_from_columns_on(table_name, keys:) + columns = connection.schema_cache.columns_hash(table_name) + + unknown_column = (keys - columns.keys).first + raise UnknownAttributeError.new(model.new, unknown_column) if unknown_column + + keys.index_with { |key| model.type_for_attribute(key) } + end + + def format_columns(columns) + columns.respond_to?(:map) ? quote_columns(columns).join(",") : columns + end + + def quote_columns(columns) + columns.map(&method(:quote_column)) + end + + def quote_column(column) + connection.quote_column_name(column) + end + end + end +end diff --git a/lib/active_record/merge_all_persistence.rb b/lib/active_record/merge_all_persistence.rb new file mode 100644 index 00000000..72f671de --- /dev/null +++ b/lib/active_record/merge_all_persistence.rb @@ -0,0 +1,14 @@ +require 'active_record/merge_all' + +module ActiveRecord + # = Active Record \Persistence + module MergeAllPersistence + extend ActiveSupport::Concern + + module ClassMethods + def merge_all!(attributes, perform_inserts: true, perform_updates: true, prune_duplicates: false) + MergeAll.new(self, attributes, perform_inserts: perform_inserts, perform_updates: perform_updates, prune_duplicates: prune_duplicates).execute + end + end + end +end diff --git a/lib/odbc_adapter.rb b/lib/odbc_adapter.rb index 194fb562..838d80ed 100644 --- a/lib/odbc_adapter.rb +++ b/lib/odbc_adapter.rb @@ -1,2 +1,3 @@ # Requiring with this pattern to mirror ActiveRecord require 'active_record/connection_adapters/odbc_adapter' +require 'active_record/merge_all_persistence' \ No newline at end of file diff --git a/lib/odbc_adapter/adapters/mysql_odbc_adapter.rb b/lib/odbc_adapter/adapters/mysql_odbc_adapter.rb index eaa690ef..0d439462 100644 --- a/lib/odbc_adapter/adapters/mysql_odbc_adapter.rb +++ b/lib/odbc_adapter/adapters/mysql_odbc_adapter.rb @@ -5,12 +5,8 @@ module Adapters class MySQLODBCAdapter < ActiveRecord::ConnectionAdapters::ODBCAdapter PRIMARY_KEY = 'INT(11) NOT NULL AUTO_INCREMENT PRIMARY KEY'.freeze - class BindSubstitution < Arel::Visitors::MySQL - include Arel::Visitors::BindVisitor - end - def arel_visitor - BindSubstitution.new(self) + Arel::Visitors::MySQL.new(self) end # Explicitly turning off prepared statements in the MySQL adapter because diff --git a/lib/odbc_adapter/adapters/null_odbc_adapter.rb b/lib/odbc_adapter/adapters/null_odbc_adapter.rb index 1a179905..c78e991f 100644 --- a/lib/odbc_adapter/adapters/null_odbc_adapter.rb +++ b/lib/odbc_adapter/adapters/null_odbc_adapter.rb @@ -4,15 +4,14 @@ module Adapters # registry. This allows for minimal support for DBMSs for which we don't # have an explicit adapter. class NullODBCAdapter < ActiveRecord::ConnectionAdapters::ODBCAdapter - class BindSubstitution < Arel::Visitors::ToSql - include Arel::Visitors::BindVisitor - end - + VARIANT_TYPE = 'VARIANT'.freeze + DATE_TYPE = 'DATE'.freeze + JSON_TYPE = 'JSON'.freeze # Using a BindVisitor so that the SQL string gets substituted before it is # sent to the DBMS (to attempt to get as much coverage as possible for # DBMSs we don't support). def arel_visitor - BindSubstitution.new(self) + Arel::Visitors::PostgreSQL.new(self) end # Explicitly turning off prepared_statements in the null adapter because diff --git a/lib/odbc_adapter/adapters/postgresql_odbc_adapter.rb b/lib/odbc_adapter/adapters/postgresql_odbc_adapter.rb index 28a28f7c..2cb76617 100644 --- a/lib/odbc_adapter/adapters/postgresql_odbc_adapter.rb +++ b/lib/odbc_adapter/adapters/postgresql_odbc_adapter.rb @@ -3,14 +3,17 @@ module Adapters # Overrides specific to PostgreSQL. Mostly taken from # ActiveRecord::ConnectionAdapters::PostgreSQLAdapter class PostgreSQLODBCAdapter < ActiveRecord::ConnectionAdapters::ODBCAdapter - BOOLEAN_TYPE = 'bool'.freeze + BOOLEAN_TYPE = 'boolean'.freeze PRIMARY_KEY = 'SERIAL PRIMARY KEY'.freeze + VARIANT_TYPE = 'VARIANT'.freeze + DATE_TYPE = 'DATE'.freeze + JSON_TYPE = 'JSON'.freeze alias create insert # Override to handle booleans appropriately def native_database_types - @native_database_types ||= super.merge(boolean: { name: 'bool' }) + @native_database_types ||= super.merge(boolean: { name: 'boolean' }) end def arel_visitor @@ -35,7 +38,7 @@ def default_sequence_name(table_name, pk = nil) "#{table_name}_#{pk || 'id'}_seq" end - def sql_for_insert(sql, pk, _id_value, _sequence_name, binds) + def sql_for_insert(sql, pk, binds) unless pk table_ref = extract_table_ref_from_insert_sql(sql) pk = primary_key(table_ref) if table_ref diff --git a/lib/odbc_adapter/column.rb b/lib/odbc_adapter/column.rb index 36492a82..4f2901ac 100644 --- a/lib/odbc_adapter/column.rb +++ b/lib/odbc_adapter/column.rb @@ -5,8 +5,8 @@ class Column < ActiveRecord::ConnectionAdapters::Column # Add the native_type accessor to allow the native DBMS to report back what # it uses to represent the column internally. # rubocop:disable Metrics/ParameterLists - def initialize(name, default, sql_type_metadata = nil, null = true, table_name = nil, native_type = nil, default_function = nil, collation = nil) - super(name, default, sql_type_metadata, null, table_name, default_function, collation) + def initialize(name, default, sql_type_metadata = nil, null = true, native_type = nil, **kwargs) + super(name, default, sql_type_metadata, null, **kwargs) @native_type = native_type end end diff --git a/lib/odbc_adapter/concerns/concern.rb b/lib/odbc_adapter/concerns/concern.rb new file mode 100644 index 00000000..54a24986 --- /dev/null +++ b/lib/odbc_adapter/concerns/concern.rb @@ -0,0 +1,3 @@ + +require 'odbc_adapter/concerns/easy_identified' +require 'odbc_adapter/concerns/insert_attribute_stripper' diff --git a/lib/odbc_adapter/concerns/easy_identified.rb b/lib/odbc_adapter/concerns/easy_identified.rb new file mode 100644 index 00000000..0a214e1f --- /dev/null +++ b/lib/odbc_adapter/concerns/easy_identified.rb @@ -0,0 +1,32 @@ + +module ODBCAdapter + module EasyIdentified + extend ActiveSupport::Concern + + included do + alias_method :pre_easy_identified_save, :save + alias_method :pre_easy_identified_save!, :save! + + def save(**options, &block) + if self[:id] == :auto_generate then generate_id(true) end + pre_easy_identified_save(**options, &block) + end + + def save!(**options, &block) + if self[:id] == :auto_generate then generate_id(true) end + pre_easy_identified_save!(**options, &block) + end + + def generate_id(force_new = false) + if self[:id] == nil || force_new then self[:id] = retrieve_id end + end + + private + + def retrieve_id + sequence_name = self.class.table_name + "_ID_SEQ" + self.class.connection.exec_query("Select #{sequence_name}.nextval as new_id")[0]["new_id"] + end + end + end +end diff --git a/lib/odbc_adapter/concerns/insert_attribute_stripper.rb b/lib/odbc_adapter/concerns/insert_attribute_stripper.rb new file mode 100644 index 00000000..4af38cb8 --- /dev/null +++ b/lib/odbc_adapter/concerns/insert_attribute_stripper.rb @@ -0,0 +1,59 @@ +module ODBCAdapter + module InsertAttributeStripper + extend ActiveSupport::Concern + + included do + alias_method :pre_insert_attribute_stripper_save, :save + alias_method :pre_insert_attribute_stripper_save!, :save! + + def save(**options, &block) + save_internal(method(:pre_insert_attribute_stripper_save), **options, &block) + end + + def save!(**options, &block) + save_internal(method(:pre_insert_attribute_stripper_save!), **options, &block) + end + + private + + UNSAFE_INSERT_TYPES ||= %i(variant object array) + + def save_internal(base_function, **options, &block) + # Unless the validations are turned off or the hash is valid just run the save. This will trigger validation + # errors normally for an invalid record. We then disable validations during the initial save, because we'll + # often be saving a technically invalid record as we've stripped off required elements. + unless options[:validate] == false || valid? + return base_function.call(**options, &block) + end + self.class.transaction do + if new_record? + stripped_attributes = {} + self.class.columns.each do |column| + if UNSAFE_INSERT_TYPES.include?(column.type) && attributes[column.name] != nil + stripped_attributes[column.name] = attributes[column.name] + self[column.name] = nil + end + end + else + stripped_attributes = {} + end + temp_options = options.merge(validate: false) + first_call_result = base_function.call(**temp_options, &block) + return false if first_call_result == false + if stripped_attributes.any? + restore_stripped_attributes(stripped_attributes) + return base_function.call(**options, &block) + else + return first_call_result + end + end + end + + def restore_stripped_attributes(stripped_attributes) + stripped_attributes.each do |key, value| + self[key] = value + end + end + end + end +end diff --git a/lib/odbc_adapter/database_metadata.rb b/lib/odbc_adapter/database_metadata.rb index f3572e9c..11fa9255 100644 --- a/lib/odbc_adapter/database_metadata.rb +++ b/lib/odbc_adapter/database_metadata.rb @@ -15,8 +15,15 @@ class DatabaseMetadata attr_reader :values - def initialize(connection) - @values = Hash[FIELDS.map { |field| [field, connection.get_info(ODBC.const_get(field))] }] + # has_encoding_bug refers to https://github.com/larskanis/ruby-odbc/issues/2 where ruby-odbc in UTF8 mode + # returns incorrectly encoded responses to getInfo + def initialize(connection, has_encoding_bug = false) + @values = Hash[FIELDS.map do |field| + info = connection.get_info(ODBC.const_get(field)) + info = info.encode(Encoding.default_external, 'UTF-16LE') if info.is_a?(String) && has_encoding_bug + + [field, info] + end] end def adapter_class diff --git a/lib/odbc_adapter/database_statements.rb b/lib/odbc_adapter/database_statements.rb index cac31682..cb7ad683 100644 --- a/lib/odbc_adapter/database_statements.rb +++ b/lib/odbc_adapter/database_statements.rb @@ -9,11 +9,8 @@ module DatabaseStatements # Returns the number of rows affected. def execute(sql, name = nil, binds = []) log(sql, name) do - if prepared_statements - @connection.do(sql, *prepared_binds(binds)) - else - @connection.do(sql) - end + sql = bind_params(binds, sql) if prepared_statements + @connection.do(sql) end end @@ -22,12 +19,8 @@ def execute(sql, name = nil, binds = []) # the executed +sql+ statement. def exec_query(sql, name = 'SQL', binds = [], prepare: false) # rubocop:disable Lint/UnusedMethodArgument log(sql, name) do - stmt = - if prepared_statements - @connection.run(sql, *prepared_binds(binds)) - else - @connection.run(sql) - end + sql = bind_params(binds, sql) if prepared_statements + stmt = @connection.run(sql) columns = stmt.columns values = stmt.to_a @@ -72,13 +65,74 @@ def default_sequence_name(table, _column) "#{table}_seq" end + def empty_insert_statement_value(primary_key = nil) + "(#{primary_key}) VALUES (DEFAULT)" + end + private # A custom hook to allow end users to overwrite the type casting before it # is returned to ActiveRecord. Useful before a full adapter has made its way # back into this repository. - def dbms_type_cast(_columns, values) - values + def dbms_type_cast(columns, rows) + # Cast the values to the correct type + columns.each_with_index do |column, col_index| + #puts " #{column.name} type #{column.type} length #{column.length} nullable #{column.nullable} scale #{column.scale} precision #{column.precision} searchable #{column.searchable} unsigned #{column.unsigned}" + rows.each do |row| + value = row[col_index] + + new_value = case + when value.nil? + nil + when [ODBC::SQL_CHAR, ODBC::SQL_VARCHAR, ODBC::SQL_LONGVARCHAR].include?(column.type) + # Do nothing, because the data defaults to strings + # This also covers null values, as they are VARCHARs of length 0 + value.is_a?(String) ? value.force_encoding("UTF-8") : value + when [ODBC::SQL_DECIMAL, ODBC::SQL_NUMERIC].include?(column.type) + column.scale == 0 ? value.to_i : value.to_f + when [ODBC::SQL_REAL, ODBC::SQL_FLOAT, ODBC::SQL_DOUBLE].include?(column.type) + value.to_f + when [ODBC::SQL_INTEGER, ODBC::SQL_SMALLINT, ODBC::SQL_TINYINT, ODBC::SQL_BIGINT].include?(column.type) + value.to_i + when [ODBC::SQL_BIT].include?(column.type) + value == 1 + when [ODBC::SQL_DATE, ODBC::SQL_TYPE_DATE].include?(column.type) + value.to_date + when [ODBC::SQL_TIME, ODBC::SQL_TYPE_TIME].include?(column.type) + value.to_time + when [ODBC::SQL_DATETIME, ODBC::SQL_TIMESTAMP, ODBC::SQL_TYPE_TIMESTAMP].include?(column.type) + value.to_datetime + # when ["ARRAY"?, "OBJECT"?, "VARIANT"?].include?(column.type) + # TODO: "ARRAY", "OBJECT", "VARIANT" all return as VARCHAR + # so we'd need to parse them to make them the correct type + + # As of now, we are just going to return the value as a string + # and let the consumer handle it. In the future, we could handle + # if here, but there's not a good way to tell what the type is + # without trying to parse the value as JSON as see if it works + # JSON.parse(value) + when [ODBC::SQL_BINARY].include?(column.type) + # These don't actually ever seem to return, even though they are + # defined in the ODBC driver, but I left them in here just in case + # so that future us can see what they should be + value + else + # the use of @connection.types() results in a "was not dropped before garbage collection" warning. + raise "Unknown column type: #{column.type} #{@connection.types(column.type).first[0]}" + end + + row[col_index] = new_value + end + end + rows + end + + def bind_params(binds, sql) + prepared_binds = *prepared_binds(binds) + prepared_binds.each.with_index(1) do |val, ind| + sql = sql.gsub("$#{ind}", "'#{val}'") + end + sql end # Assume received identifier is in DBMS's data dictionary case. @@ -127,8 +181,13 @@ def nullability(col_name, is_nullable, nullable) col_name == 'id' ? false : result end + # Adapt to Rails 5.2 + def prepare_statement_sub(sql) + sql.gsub(/\$\d+/, '?') + end + def prepared_binds(binds) - prepare_binds_for_database(binds).map { |bind| _type_cast(bind) } + binds.map(&:value_for_database).map { |bind| _type_cast(bind) } end end end diff --git a/lib/odbc_adapter/error.rb b/lib/odbc_adapter/error.rb index 1f8acfc9..d0e0172b 100644 --- a/lib/odbc_adapter/error.rb +++ b/lib/odbc_adapter/error.rb @@ -1,4 +1,6 @@ module ODBCAdapter class QueryTimeoutError < ActiveRecord::StatementInvalid end + class ConnectionFailedError < ActiveRecord::StatementInvalid + end end diff --git a/lib/odbc_adapter/quoting.rb b/lib/odbc_adapter/quoting.rb index a499612e..789cb693 100644 --- a/lib/odbc_adapter/quoting.rb +++ b/lib/odbc_adapter/quoting.rb @@ -38,5 +38,24 @@ def quoted_date(value) value.strftime('%Y-%m-%d') # Date end end + + def lookup_cast_type_from_column(column) # :nodoc: + type_map.lookup(column.sql_type) + end + + def quote_hash(hash:) + "OBJECT_CONSTRUCT(" + hash.map {|key, value| quote(key) + "," + quote(value)}.join(",") + ")" + end + + def quote_array(array:) + "ARRAY_CONSTRUCT(" + array.map { |element| quote(element) }.join(",") + ")" + end + + def quote(value) + if value.is_a? Hash then return quote_hash hash: value end + if value.is_a? Array then return quote_array array: value end + if value.is_a? Type::SnowflakeVariant then return value.quote self end + super + end end end diff --git a/lib/odbc_adapter/schema_statements.rb b/lib/odbc_adapter/schema_statements.rb index df149765..50238ca4 100644 --- a/lib/odbc_adapter/schema_statements.rb +++ b/lib/odbc_adapter/schema_statements.rb @@ -14,7 +14,10 @@ def tables(_name = nil) result = stmt.fetch_all || [] stmt.drop + db_regex = name_regex(current_database) + schema_regex = name_regex(current_schema) result.each_with_object([]) do |row, table_names| + next unless row[0] =~ db_regex && row[1] =~ schema_regex schema_name, table_name, table_type = row[1..3] next if respond_to?(:table_filtered?) && table_filtered?(schema_name, table_type) table_names << format_case(table_name) @@ -36,7 +39,10 @@ def indexes(table_name, _name = nil) index_name = nil unique = nil + db_regex = name_regex(current_database) + schema_regex = name_regex(current_schema) result.each_with_object([]).with_index do |(row, indices), row_idx| + next unless row[0] =~ db_regex && row[1] =~ schema_regex # Skip table statistics next if row[6].zero? # SQLStatistics: TYPE @@ -56,34 +62,71 @@ def indexes(table_name, _name = nil) end end + def retrieve_column_data(table_name) + column_query = "SHOW COLUMNS IN TABLE #{table_name}" + + # Temporarily disable debug logging so we don't spam the log with table column queries + query_results = ActiveRecord::Base.logger.silence do + exec_query(column_query) + end + + column_data = query_results.map do |query_result| + data_type_parsed = JSON.parse(query_result["data_type"]) + { + column_name: query_result["column_name"], + col_default: extract_default_from_snowflake(query_result["default"]), + col_native_type: extract_data_type_from_snowflake(data_type_parsed["type"]), + column_size: extract_column_size_from_snowflake(data_type_parsed), + numeric_scale: extract_scale_from_snowflake(data_type_parsed), + is_nullable: data_type_parsed["nullable"] + } + end + + column_data + end + + # Returns an array of Column objects for the table specified by # +table_name+. + # This entire function has been customized for Snowflake and will not work in general. def columns(table_name, _name = nil) - stmt = @connection.columns(native_case(table_name.to_s)) - result = stmt.fetch_all || [] - stmt.drop + result = retrieve_column_data(table_name) result.each_with_object([]) do |col, cols| - col_name = col[3] # SQLColumns: COLUMN_NAME - col_default = col[12] # SQLColumns: COLUMN_DEF - col_sql_type = col[4] # SQLColumns: DATA_TYPE - col_native_type = col[5] # SQLColumns: TYPE_NAME - col_limit = col[6] # SQLColumns: COLUMN_SIZE - col_scale = col[8] # SQLColumns: DECIMAL_DIGITS - - # SQLColumns: IS_NULLABLE, SQLColumns: NULLABLE - col_nullable = nullability(col_name, col[17], col[10]) - - args = { sql_type: col_sql_type, type: col_sql_type, limit: col_limit } - args[:sql_type] = 'boolean' if col_native_type == self.class::BOOLEAN_TYPE - - if [ODBC::SQL_DECIMAL, ODBC::SQL_NUMERIC].include?(col_sql_type) - args[:scale] = col_scale || 0 - args[:precision] = col_limit - end + col_name = col[:column_name] + col_default = col[:col_default] + col_native_type = col[:col_native_type] + col_limit = col[:column_size] + col_scale = col[:numeric_scale] + col_nullable = col[:is_nullable] + + args = { sql_type: construct_sql_type(col_native_type, col_limit, col_scale), type: col_native_type, limit: col_limit } + args[:type] = case col_native_type + when "BOOLEAN" then :boolean + when "VARIANT" then :variant + when "ARRAY" then :array + when "STRUCT" then :object + when "DATE" then :date + when "VARCHAR" then :string + when "TIMESTAMP" then :datetime + when "TIME" then :time + when "BINARY" then :binary + when "DOUBLE" then :float + when "DECIMAL" + if col_scale == 0 + :integer + else + args[:scale] = col_scale + args[:precision] = col_limit + :decimal + end + else + nil + end + sql_type_metadata = ActiveRecord::ConnectionAdapters::SqlTypeMetadata.new(**args) - cols << new_column(format_case(col_name), col_default, sql_type_metadata, col_nullable, table_name, col_native_type) + cols << new_column(format_case(col_name), col_default, sql_type_metadata, col_nullable, col_native_type) end end @@ -92,7 +135,10 @@ def primary_key(table_name) stmt = @connection.primary_keys(native_case(table_name.to_s)) result = stmt.fetch_all || [] stmt.drop unless stmt.nil? - result[0] && result[0][3] + + db_regex = name_regex(current_database) + schema_regex = name_regex(current_schema) + result.reduce(nil) { |pkey, key| (key[0] =~ db_regex && key[1] =~ schema_regex) ? format_case(key[3]) : pkey } end def foreign_keys(table_name) @@ -100,7 +146,10 @@ def foreign_keys(table_name) result = stmt.fetch_all || [] stmt.drop unless stmt.nil? + db_regex = name_regex(current_database) + schema_regex = name_regex(current_schema) result.map do |key| + next unless key[0] =~ db_regex && key[1] =~ schema_regex fk_from_table = key[2] # PKTABLE_NAME fk_to_table = key[6] # FKTABLE_NAME @@ -126,5 +175,93 @@ def index_name(table_name, options) def current_database database_metadata.database_name.strip end + + def current_schema + @config[:driver].attrs['schema'] + end + + def name_regex(name) + if name =~ /^".*"$/ + /^#{name.delete_prefix('"').delete_suffix('"')}$/ + else + /^#{name}$/i + end + end + + # Changes in rails 7 mean that we need all of the type information in the sql_type column + # This reconstructs sql types using limit (which is precision) and scale + def construct_sql_type(native_type, limit, scale) + if scale > 0 + "#{native_type}(#{limit},#{scale})" + elsif limit > 0 + "#{native_type}(#{limit})" + else + native_type + end + end + + private + + # Extracts the value from a Snowflake column default definition. + def extract_default_from_snowflake(default) + case default + # null + when nil + nil + # Quoted strings + when /\A[(B]?'(.*)'\z/m + $1.gsub("''", "'").gsub("\\\\","\\") + # Boolean types + when "TRUE" + "true" + when "FALSE" + "false" + # Numeric types + when /\A(-?\d+(\.\d*)?)\z/ + $1 + else + nil + end + end + + def extract_data_type_from_snowflake(snowflake_data_type) + case snowflake_data_type + when "NUMBER" + "DECIMAL" + when /\ATIMESTAMP_.*/ + "TIMESTAMP" + when "TEXT" + "VARCHAR" + when "FLOAT" + "DOUBLE" + when "FIXED" + "DECIMAL" + when "REAL" + "DOUBLE" + else + snowflake_data_type + end + end + + def extract_column_size_from_snowflake(type_information) + case type_information["type"] + when /\ATIMESTAMP_.*/ + 35 + when "DATE" + 10 + when "FLOAT" + 38 + when "REAL" + 38 + when "BOOLEAN" + 1 + else + type_information["length"] || type_information["precision"] || 0 + end + end + + def extract_scale_from_snowflake(type_information) + type_information["scale"] || 0 + end end end diff --git a/lib/odbc_adapter/type/array_of.rb b/lib/odbc_adapter/type/array_of.rb new file mode 100644 index 00000000..01b3ac46 --- /dev/null +++ b/lib/odbc_adapter/type/array_of.rb @@ -0,0 +1,23 @@ +module ODBCAdapter + module Type + def Type.array_of(type) + newArrayClass = Class.new(ActiveRecord::Type::Value) + + newArrayClass.define_method :cast_value do |value| + return value unless value.is_a? String + base_array = ActiveSupport::JSON.decode(value) rescue nil + base_array.map { |element| type.cast(element) } + end + + newArrayClass.define_method :serialize do |value| + value.to_a.map { |element| type.serialize(element)} unless value.nil? + end + + newArrayClass.define_method :changed_in_place? do |raw_old_value, new_value| + deserialize(raw_old_value) != new_value + end + + newArrayClass + end + end +end diff --git a/lib/odbc_adapter/type/internal/snowflake_variant.rb b/lib/odbc_adapter/type/internal/snowflake_variant.rb new file mode 100644 index 00000000..7875f332 --- /dev/null +++ b/lib/odbc_adapter/type/internal/snowflake_variant.rb @@ -0,0 +1,18 @@ +module ODBCAdapter + module Type + class SnowflakeVariant + # Acts as a wrapper around other data types to make sure that they get typecasted into variants during quoting + def initialize(internal_data) + @internal_data = internal_data + end + + def quote(adapter) + adapter.quote(@internal_data) + "::VARIANT" + end + + def internal_data + @internal_data + end + end + end +end diff --git a/lib/odbc_adapter/type/object.rb b/lib/odbc_adapter/type/object.rb new file mode 100644 index 00000000..853cf4d1 --- /dev/null +++ b/lib/odbc_adapter/type/object.rb @@ -0,0 +1,24 @@ +module ODBCAdapter + module Type + class SnowflakeObject < ActiveRecord::Type::Value + + def cast_value(value) + # deserialize can contain the results of the previous serialize, rather than the database returned value + if value.is_a? Hash then return value end + ActiveSupport::JSON.decode(value) rescue nil + end + + def serialize(value) + value.to_h unless value.nil? + end + + def changed_in_place?(raw_old_value, new_value) + deserialize(raw_old_value) != new_value + end + + def accessor + ActiveRecord::Store::StringKeyedHashAccessor + end + end + end +end diff --git a/lib/odbc_adapter/type/snowflake_integer.rb b/lib/odbc_adapter/type/snowflake_integer.rb new file mode 100644 index 00000000..516c5c73 --- /dev/null +++ b/lib/odbc_adapter/type/snowflake_integer.rb @@ -0,0 +1,15 @@ + +module ODBCAdapter + module Type + class SnowflakeInteger < ActiveRecord::Type::BigInteger + # In order to allow for querying of IDs, + def cast(value) + if value == :auto_generate + return value + else + super + end + end + end + end +end diff --git a/lib/odbc_adapter/type/type.rb b/lib/odbc_adapter/type/type.rb new file mode 100644 index 00000000..c199e716 --- /dev/null +++ b/lib/odbc_adapter/type/type.rb @@ -0,0 +1,42 @@ +require 'odbc_adapter/type/array_of' +require 'odbc_adapter/type/object' +require 'odbc_adapter/type/variant' +require 'odbc_adapter/type/snowflake_integer' + +require 'odbc_adapter/type/internal/snowflake_variant' + +module ODBCAdapter + module Type + ArrayOfBigIntegers = array_of(ActiveRecord::Type::BigInteger.new) + ArrayOfBinaries = array_of(ActiveRecord::Type::Binary.new) + ArrayOfBooleans = array_of(ActiveRecord::Type::Boolean.new) + ArrayOfDates = array_of(ActiveRecord::Type::Date.new) + ArrayOfDateTimes = array_of(ActiveRecord::Type::DateTime.new) + ArrayOfDecimals = array_of(ActiveRecord::Type::Decimal.new) + ArrayOfFloats = array_of(ActiveRecord::Type::Float.new) + ArrayOfImmutableStrings = array_of(ActiveRecord::Type::ImmutableString.new) + ArrayOfIntegers = array_of(ActiveRecord::Type::Integer.new) + ArrayOfStrings = array_of(ActiveRecord::Type::String.new) + ArrayOfTimes = array_of(ActiveRecord::Type::Time.new) + ArrayOfValues = array_of(ActiveRecord::Type::Value.new) + + ActiveRecord::Type.register(:array_of_big_integers, ArrayOfBigIntegers, adapter: :odbc) + ActiveRecord::Type.register(:array_of_binaries, ArrayOfBinaries, adapter: :odbc) + ActiveRecord::Type.register(:array_of_booleans, ArrayOfBooleans, adapter: :odbc) + ActiveRecord::Type.register(:array_of_dates, ArrayOfDates, adapter: :odbc) + ActiveRecord::Type.register(:array_of_date_times, ArrayOfDateTimes, adapter: :odbc) + ActiveRecord::Type.register(:array_of_decimals, ArrayOfDecimals, adapter: :odbc) + ActiveRecord::Type.register(:array_of_floats, ArrayOfFloats, adapter: :odbc) + ActiveRecord::Type.register(:array_of_immutable_strings, ArrayOfImmutableStrings, adapter: :odbc) + ActiveRecord::Type.register(:array_of_integers, ArrayOfIntegers, adapter: :odbc) + ActiveRecord::Type.register(:array_of_strings, ArrayOfStrings, adapter: :odbc) + ActiveRecord::Type.register(:array_of_times, ArrayOfTimes, adapter: :odbc) + ActiveRecord::Type.register(:array_of_values, ArrayOfValues, adapter: :odbc) + + ActiveRecord::Type.register(:object, Object, adapter: :odbc) + + ActiveRecord::Type.register(:variant, Variant, adapter: :odbc) + + ActiveRecord::Type.register(:integer, SnowflakeInteger, adapter: :odbc) + end +end diff --git a/lib/odbc_adapter/type/variant.rb b/lib/odbc_adapter/type/variant.rb new file mode 100644 index 00000000..fda5b67e --- /dev/null +++ b/lib/odbc_adapter/type/variant.rb @@ -0,0 +1,28 @@ +module ODBCAdapter + module Type + class Variant < ActiveRecord::Type::Value + + def deserialize(value) + # deserialize can contain the results of the previous serialize, rather than the database returned value + if value.is_a? SnowflakeVariant then return value.internal_data end + ActiveSupport::JSON.decode(value) rescue nil + end + + def cast(value) + value + end + + def serialize(value) + SnowflakeVariant.new(value) unless value.nil? + end + + def changed_in_place?(raw_old_value, new_value) + deserialize(raw_old_value) != new_value + end + + def accessor + ActiveRecord::Store::StringKeyedHashAccessor + end + end + end +end diff --git a/lib/odbc_adapter/version.rb b/lib/odbc_adapter/version.rb index 693cb713..33905ee1 100644 --- a/lib/odbc_adapter/version.rb +++ b/lib/odbc_adapter/version.rb @@ -1,3 +1,3 @@ module ODBCAdapter - VERSION = '5.0.3'.freeze + VERSION = '5.0.8'.freeze end diff --git a/odbc_adapter.gemspec b/odbc_adapter.gemspec index ae02a406..1dd9546a 100644 --- a/odbc_adapter.gemspec +++ b/odbc_adapter.gemspec @@ -1,5 +1,3 @@ -# coding: utf-8 - lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'odbc_adapter/version' @@ -21,11 +19,13 @@ Gem::Specification.new do |spec| spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ['lib'] - spec.add_dependency 'ruby-odbc', '~> 0.9' + spec.add_dependency 'activerecord', '>= 5.0.1' + spec.add_dependency 'ruby-odbc', '~> 0.99998' - spec.add_development_dependency 'bundler', '~> 1.14' + spec.add_development_dependency 'bundler', '>= 1.14' spec.add_development_dependency 'minitest', '~> 5.10' spec.add_development_dependency 'rake', '~> 12.0' - spec.add_development_dependency 'rubocop', '~> 0.48' + spec.add_development_dependency 'rubocop', '0.48.1' spec.add_development_dependency 'simplecov', '~> 0.14' + spec.add_development_dependency 'pry', '~> 0.11.1' end diff --git a/test/connection_fail_test.rb b/test/connection_fail_test.rb new file mode 100644 index 00000000..8061af5e --- /dev/null +++ b/test/connection_fail_test.rb @@ -0,0 +1,19 @@ +require 'test_helper' + +class ConnectionFailTest < Minitest::Test + def test_connection_fail + # We're only interested in testing a MySQL connection failure for now. + # Postgres disconnects generate a different class of errors + skip 'Only executed for MySQL' unless ActiveRecord::Base.connection.instance_values['config'][:conn_str].include? 'MySQL' + begin + conn.execute('KILL CONNECTION_ID();') + rescue => e + puts "caught exception #{e}" + end + assert_raises(ODBCAdapter::ConnectionFailedError) { User.average(:letters).round(2) } + end + + def conn + ActiveRecord::Base.connection + end +end diff --git a/test/test_helper.rb b/test/test_helper.rb index 65cc6d52..623b1960 100644 --- a/test/test_helper.rb +++ b/test/test_helper.rb @@ -42,7 +42,7 @@ class User < ActiveRecord::Base { first_name: 'Jason', last_name: 'Dsouza', letters: 11 }, { first_name: 'Ash', last_name: 'Hepburn', letters: 10 }, { first_name: 'Sharif', last_name: 'Younes', letters: 12 }, - { first_name: 'Ryan', last_name: 'Brown', letters: 9 } + { first_name: 'Ryan', last_name: 'Brüwn', letters: 9 } ] ) end