active record extensions for rails 3

Change-Id: I6740be8a35ec7a955d1714b4049527e113ce7a21
Reviewed-on: https://gerrit.instructure.com/27020
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Cody Cutrer <cody@instructure.com>
Product-Review: James Williams  <jamesw@instructure.com>
QA-Review: James Williams  <jamesw@instructure.com>
This commit is contained in:
James Williams 2013-12-05 09:23:59 -07:00
parent 351cb7197c
commit 271d9a2eb0
1 changed files with 258 additions and 14 deletions

View File

@ -3,6 +3,19 @@ class ActiveRecord::Base
extend ActiveSupport::Memoizable # used for a lot of the reporting queries
unless CANVAS_RAILS2
class << self
def preload_associations(records, associations, preload_options={})
ActiveRecord::Associations::Preloader.new(records, associations, preload_options).run
end
end
def write_attribute(*args)
value = super
value.is_a?(ActiveRecord::AttributeMethods::Serialization::Attribute) ? value.value : value
end
end
if CANVAS_RAILS2
# this functionality is built into rails 3
class ProtectedAttributeAssigned < Exception; end
@ -475,6 +488,7 @@ class ActiveRecord::Base
end
end
if CANVAS_RAILS2
def self.find_in_batches_with_usefulness(options = {}, &block)
# already in a transaction (or transactions don't matter); cursor is fine
if (connection.adapter_name == 'PostgreSQL' && (Shackles.environment == :slave || connection.open_transactions > (Rails.env.test? ? 1 : 0))) && !options[:start]
@ -582,6 +596,7 @@ class ActiveRecord::Base
connection.execute "DROP #{temporary}TABLE #{table}"
end
end
end
# set up class-specific getters/setters for a polymorphic association, e.g.
# belongs_to :context, :polymorphic => true, :types => [:course, :account]
@ -663,19 +678,22 @@ class ActiveRecord::Base
# the primary key from smallest to largest.
def self.find_ids_in_ranges(options = {})
batch_size = options[:batch_size].try(:to_i) || 1000
ids = connection.select_rows("select min(id), max(id) from (#{self.send(:construct_finder_sql, :select => "#{quoted_table_name}.#{primary_key} as id", :order => primary_key, :limit => batch_size)}) as subquery").first
subquery_scope = self.scoped.except(:select).select("#{quoted_table_name}.#{primary_key} as id").reorder(primary_key).limit(batch_size)
ids = connection.select_rows("select min(id), max(id) from (#{subquery_scope.to_sql}) as subquery").first
while ids.first.present?
yield(*ids)
last_value = ids.last
ids = connection.select_rows("select min(id), max(id) from (#{self.send(:construct_finder_sql, :select => "#{quoted_table_name}.#{primary_key} as id", :conditions => ["#{quoted_table_name}.#{primary_key}>?", last_value], :order => primary_key, :limit => batch_size)}) as subquery").first
next_subquery_scope = subquery_scope.where(["#{quoted_table_name}.#{primary_key}>?", last_value])
ids = connection.select_rows("select min(id), max(id) from (#{next_subquery_scope.to_sql}) as subquery").first
end
end
class << self
def deconstruct_joins
joins_sql = ''
add_joins!(joins_sql, nil)
def deconstruct_joins(joins_sql=nil)
unless joins_sql
joins_sql = ''
add_joins!(joins_sql, nil)
end
tables = []
join_conditions = []
joins_sql.strip.split('INNER JOIN')[1..-1].each do |join|
@ -687,6 +705,7 @@ class ActiveRecord::Base
[tables, join_conditions]
end
if CANVAS_RAILS2
def update_all_with_joins(updates, conditions = nil, options = {})
scope = scope(:find)
if scope && scope[:joins]
@ -800,6 +819,7 @@ class ActiveRecord::Base
end
alias_method_chain :delete_all, :limit
end
end
def self.bulk_insert(records)
return if records.empty?
@ -812,11 +832,152 @@ class ActiveRecord::Base
if CANVAS_RAILS2
named_scope :none, lambda { where("?", false) }
else
scope :none, lambda { where("?", false) }
scope :none, lambda { {:conditions => ["?", false]} }
end
end
end
unless CANVAS_RAILS2
ActiveRecord::Relation.class_eval do
def find_in_batches_with_usefulness(options = {}, &block)
# already in a transaction (or transactions don't matter); cursor is fine
if (connection.adapter_name == 'PostgreSQL' && (Shackles.environment == :slave || connection.open_transactions > (Rails.env.test? ? 1 : 0))) && !options[:start]
self.activate { find_in_batches_with_cursor(options, &block) }
elsif order_values.any? || group_values.any? || select_values.to_s =~ /DISTINCT/i
raise ArgumentError.new("GROUP and ORDER are incompatible with :start") if options[:start]
self.activate { find_in_batches_with_temp_table(options, &block) }
else
find_in_batches_without_usefulness(options) do |batch|
klass.send(:with_exclusive_scope) { yield batch }
end
end
end
alias_method_chain :find_in_batches, :usefulness
def find_in_batches_with_cursor(options = {}, &block)
batch_size = options[:batch_size] || 1000
klass.transaction do
begin
cursor = "#{table_name}_in_batches_cursor"
connection.execute("DECLARE #{cursor} CURSOR FOR #{to_sql}")
includes = includes_values
klass.send(:with_exclusive_scope) do
batch = connection.uncached { klass.find_by_sql("FETCH FORWARD #{batch_size} FROM #{cursor}") }
while !batch.empty?
ActiveRecord::Associations::Preloader.new(batch, includes).run if includes
yield batch
break if batch.size < batch_size
batch = connection.uncached { klass.find_by_sql("FETCH FORWARD #{batch_size} FROM #{cursor}") }
end
end
# not ensure; if the transaction rolls back due to another exception, it will
# automatically close
connection.execute("CLOSE #{cursor}")
end
end
end
def find_in_batches_with_temp_table(options = {})
batch_size = options[:batch_size] || 1000
table = "#{table_name}_find_in_batches_temporary_table"
connection.execute "CREATE TEMPORARY TABLE #{table} AS #{to_sql}"
begin
index = "temp_primary_key"
case connection.adapter_name
when 'PostgreSQL'
begin
old_proc = connection.raw_connection.set_notice_processor {}
connection.execute "ALTER TABLE #{table}
ADD temp_primary_key SERIAL PRIMARY KEY"
ensure
connection.raw_connection.set_notice_processor(&old_proc) if old_proc
end
when 'MySQL', 'Mysql2'
connection.execute "ALTER TABLE #{table}
ADD temp_primary_key MEDIUMINT NOT NULL PRIMARY KEY AUTO_INCREMENT"
when 'SQLite'
# Sqlite always has an implicit primary key
index = 'rowid'
else
raise "Temp tables not supported!"
end
includes = includes_values
sql = "SELECT *
FROM #{table}
ORDER BY #{index} ASC
LIMIT #{batch_size}"
klass.send(:with_exclusive_scope) do
batch = klass.find_by_sql(sql)
while !batch.empty?
ActiveRecord::Associations::Preloader.new(batch, includes).run if includes
yield batch
break if batch.size < batch_size
last_value = batch.last[index]
sql = "SELECT *
FROM #{table}
WHERE #{index} > #{last_value}
ORDER BY #{index} ASC
LIMIT #{batch_size}"
batch = klass.find_by_sql(sql)
end
end
ensure
temporary = "TEMPORARY " if connection.adapter_name == 'Mysql2'
connection.execute "DROP #{temporary}TABLE #{table}"
end
end
def delete_all_with_joins(conditions = nil)
if joins_values.any?
if conditions
where(conditions).delete_all
else
case connection.adapter_name
when 'PostgreSQL'
sql = "DELETE FROM #{quoted_table_name} "
tables, join_conditions = deconstruct_joins(arel.join_sql.to_s)
sql.concat('USING ')
sql.concat(tables.join(', '))
sql.concat(' ')
sql.concat(where(join_conditions).arel.where_sql.to_s)
when 'MySQL', 'Mysql2'
sql = "DELETE #{quoted_table_name} FROM #{quoted_table_name} #{arel.join_sql.to_s} #{arel.where_sql.to_s}"
else
raise "Joins in delete not supported!"
end
connection.delete(sql, "#{name} Delete all")
end
else
delete_all_without_joins(conditions)
end
end
alias_method_chain :delete_all, :joins
def delete_all_with_limit(conditions = nil)
if limit_value
case connection.adapter_name
when 'MySQL', 'Mysql2'
v = arel.visitor
sql = "DELETE #{quoted_table_name} FROM #{quoted_table_name} #{arel.where_sql.to_s}
ORDER BY #{arel.orders.map { |x| v.send(:visit, x) }.join(', ')} LIMIT #{v.send(:visit, arel.limit)}"
return connection.delete(sql, "#{name} Delete all")
else
scope = scoped.select(primary_key)
return unscoped.where(primary_key => scope).delete_all
end
end
delete_all_without_limit(conditions)
end
alias_method_chain :delete_all, :limit
end
end
ActiveRecord::ConnectionAdapters::AbstractAdapter.class_eval do
def bulk_insert(table_name, records)
keys = records.first.keys
@ -919,7 +1080,7 @@ end
module MySQLAdapterExtensions
def self.included(klass)
klass::NATIVE_DATABASE_TYPES[:primary_key] = "bigint DEFAULT NULL auto_increment PRIMARY KEY".freeze
klass.alias_method_chain :add_column, :foreign_key_check
klass.alias_method_chain :add_column, :foreign_key_check if CANVAS_RAILS2
klass.alias_method_chain :configure_connection, :pg_compat
klass.alias_method_chain :commit_db_transaction, :callbacks
klass.alias_method_chain :rollback_db_transaction, :callbacks
@ -1035,7 +1196,12 @@ if defined?(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter)
concurrently = "CONCURRENTLY " if options[:algorithm] == :concurrently && self.open_transactions == 0
conditions = options[:where]
if conditions
conditions = " WHERE #{ActiveRecord::Base.send(:sanitize_sql, conditions, table_name.to_s.dup)}"
if CANVAS_RAILS2
conditions = " WHERE #{ActiveRecord::Base.send(:sanitize_sql, conditions, table_name.to_s.dup)}"
else
model_class = ActiveRecord::Base.all_models.detect{|m| m.table_name.to_s == table_name.to_s} || ActiveRecord::Base
conditions = " WHERE #{model_class.send(:sanitize_sql, conditions, table_name.to_s.dup)}"
end
end
else
index_type = options
@ -1121,6 +1287,7 @@ class ActiveRecord::Error
end
end
if CANVAS_RAILS2
# We need to have 64-bit ids and foreign keys.
if defined?(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter)
ActiveRecord::ConnectionAdapters::PostgreSQLAdapter::NATIVE_DATABASE_TYPES[:primary_key] = "bigserial primary key".freeze
@ -1150,6 +1317,7 @@ ActiveRecord::ConnectionAdapters::TableDefinition.class_eval do
end
alias_method_chain :column, :foreign_key_check
end
end
# See https://rails.lighthouseapp.com/projects/8994-ruby-on-rails/tickets/66-true-false-conditions-broken-for-sqlite#ticket-66-9
# The default 't' and 'f' are no good, since sqlite treats them both as 0 in boolean logic.
@ -1214,6 +1382,48 @@ if defined?(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter)
infer_group_by_columns(columns).flatten.join(', ')
end
unless CANVAS_RAILS2
# ActiveRecord 3.2 ignores indexes if it cannot parse the column names
# (for instance when using functions like LOWER)
# this will lead to problems if we try to remove the index (index_exists? will return false)
def indexes(table_name)
result = query(<<-SQL, 'SCHEMA')
SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid
FROM pg_class t
INNER JOIN pg_index d ON t.oid = d.indrelid
INNER JOIN pg_class i ON d.indexrelid = i.oid
WHERE i.relkind = 'i'
AND d.indisprimary = 'f'
AND t.relname = '#{table_name}'
AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )
ORDER BY i.relname
SQL
result.map do |row|
index_name = row[0]
unique = row[1] == 't'
indkey = row[2].split(" ")
inddef = row[3]
oid = row[4]
columns = Hash[query(<<-SQL, "SCHEMA")]
SELECT a.attnum, a.attname
FROM pg_attribute a
WHERE a.attrelid = #{oid}
AND a.attnum IN (#{indkey.join(",")})
SQL
column_names = columns.values_at(*indkey).compact
# add info on sort order for columns (only desc order is explicitly specified, asc is the default)
desc_order_columns = inddef.scan(/(\w+) DESC/).flatten
orders = desc_order_columns.any? ? Hash[desc_order_columns.map {|order_column| [order_column, :desc]}] : {}
ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, index_name, unique, column_names, [], orders)
end
end
end
alias_method_chain :commit_db_transaction, :callbacks
alias_method_chain :rollback_db_transaction, :callbacks
alias_method_chain :release_savepoint, :callbacks if Rails.env.test?
@ -1278,17 +1488,20 @@ class ActiveRecord::MigrationProxy
end
class ActiveRecord::Migrator
cattr_accessor :migrated_versions
def self.migrations_paths
@@migration_paths ||= []
end
def migrations
@@migrations ||= begin
@migrations_path ||= File.join(Rails.root, 'db/migrate')
files = ([@migrations_path].compact + self.class.migrations_paths).uniq.
map { |p| Dir["#{p}/[0-9]*_*.rb"] }.flatten
migrations = files.inject([]) do |klasses, file|
version, name = file.scan(/([0-9]+)_([_a-z0-9]*).rb/).first
version, name, scope = file.scan(/([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?\.rb\z/).first
raise ActiveRecord::IllegalMigrationNameError.new(file) unless version
version = version.to_i
@ -1301,11 +1514,16 @@ class ActiveRecord::Migrator
raise ActiveRecord::DuplicateMigrationNameError.new(name.camelize)
end
klasses << (ActiveRecord::MigrationProxy.new).tap do |migration|
migration.name = name.camelize
migration.version = version
migration.filename = file
if CANVAS_RAILS2
klasses << (ActiveRecord::MigrationProxy.new).tap do |migration|
migration.name = name.camelize
migration.version = version
migration.filename = file
end
else
klasses << ActiveRecord::MigrationProxy.new(name.camelize, version, file, scope)
end
klasses
end
migrations = migrations.sort_by(&:version)
@ -1358,7 +1576,9 @@ class ActiveRecord::Migrator
end
ddl_transaction(migration) do
self.class.migrated_versions = @migrated_versions
migration.migrate(@direction)
@migrated_versions = self.class.migrated_versions
record_version_state_after_migrating(migration.version) unless tag == :predeploy && migration.tags.include?(:postdeploy)
end
rescue => e
@ -1432,4 +1652,28 @@ ActiveRecord::ConnectionAdapters::SchemaStatements.class_eval do
execute("SELECT COUNT(*) FROM #{table} WHERE #{column} IS NULL") if open_transactions == 0
change_column_null table, column, false
end
unless CANVAS_RAILS2
def index_exists_with_options?(table_name, column_name, options = {})
if options.is_a?(Hash)
index_exists_without_options?(table_name, column_name, options)
else
# in ActiveRecord 2.3, the second argument is index_name
name = column_name.to_s
index_exists_without_options?(table_name, nil, {:name => name})
end
end
alias_method_chain :index_exists?, :options
# in ActiveRecord 3.2, it will raise an ArgumentError if the index doesn't exist
def remove_index(table_name, options)
name = index_name(table_name, options)
unless index_exists?(table_name, nil, {:name => name})
@logger.warn("Index name '#{name}' on table '#{table_name}' does not exist. Skipping.")
return
end
remove_index!(table_name, name)
end
end
end