2011-02-01 09:57:29 +08:00
|
|
|
#
|
2016-01-05 02:22:31 +08:00
|
|
|
# Copyright (C) 2016 Instructure, Inc.
|
2011-02-01 09:57:29 +08:00
|
|
|
#
|
|
|
|
# This file is part of Canvas.
|
|
|
|
#
|
|
|
|
# Canvas is free software: you can redistribute it and/or modify it under
|
|
|
|
# the terms of the GNU Affero General Public License as published by the Free
|
|
|
|
# Software Foundation, version 3 of the License.
|
|
|
|
#
|
|
|
|
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
|
|
|
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
|
|
|
# details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License along
|
|
|
|
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
2013-11-27 04:43:48 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
begin
|
2016-01-13 00:50:57 +08:00
|
|
|
require 'byebug'
|
2014-07-24 01:14:22 +08:00
|
|
|
rescue LoadError
|
2014-01-24 05:13:49 +08:00
|
|
|
end
|
|
|
|
|
2015-04-09 01:21:08 +08:00
|
|
|
require 'securerandom'
|
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
RSpec.configure do |c|
|
2014-10-14 11:03:02 +08:00
|
|
|
c.raise_errors_for_deprecations!
|
2014-07-24 01:14:22 +08:00
|
|
|
c.color = true
|
|
|
|
|
|
|
|
c.around(:each) do |example|
|
2015-10-02 00:49:14 +08:00
|
|
|
Timeout::timeout(180) do
|
2015-12-12 07:25:12 +08:00
|
|
|
Rails.logger.info "STARTING SPEC #{example.full_description}"
|
2015-10-02 00:49:14 +08:00
|
|
|
example.run
|
|
|
|
end
|
2014-02-27 14:04:17 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-12-19 00:34:57 +08:00
|
|
|
ENV["RAILS_ENV"] = 'test'
|
|
|
|
|
2015-12-31 16:28:24 +08:00
|
|
|
if ENV['COVERAGE'] == "1"
|
|
|
|
puts "Code Coverage enabled"
|
|
|
|
require 'coverage_tool'
|
|
|
|
CoverageTool.start("RSpec:#{Process.pid}#{ENV['TEST_ENV_NUMBER']}")
|
|
|
|
end
|
|
|
|
|
2013-03-21 04:30:20 +08:00
|
|
|
require File.expand_path('../../config/environment', __FILE__) unless defined?(Rails)
|
2014-07-24 01:14:22 +08:00
|
|
|
require 'rspec/rails'
|
2014-02-27 14:04:17 +08:00
|
|
|
|
2015-12-11 01:12:27 +08:00
|
|
|
# ensure people aren't creating records outside the rspec lifecycle, e.g.
|
|
|
|
# inside a describe/context block rather than a let/before/example
|
|
|
|
require_relative 'support/blank_slate_protection'
|
|
|
|
BlankSlateProtection.enable!
|
|
|
|
|
2015-02-20 07:28:33 +08:00
|
|
|
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
|
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
ActionView::TestCase::TestController.view_paths = ApplicationController.view_paths
|
2014-02-08 07:20:45 +08:00
|
|
|
|
2016-02-13 02:47:44 +08:00
|
|
|
# this makes sure that a broken transaction becomes functional again
|
|
|
|
# by the time we hit rescue_action_in_public, so that the error report
|
|
|
|
# can be recorded
|
|
|
|
ActionController::Base.set_callback(:process_action, :around, ->(_r, block) do
|
|
|
|
exception = nil
|
|
|
|
ActiveRecord::Base.transaction(joinable: false, requires_new: true) do
|
|
|
|
begin
|
|
|
|
if Rails.version < '5'
|
|
|
|
# that transaction didn't count as a "real" transaction within the test
|
|
|
|
test_open_transactions = ActiveRecord::Base.connection.instance_variable_get(:@test_open_transactions)
|
|
|
|
ActiveRecord::Base.connection.instance_variable_set(:@test_open_transactions, test_open_transactions.to_i - 1)
|
|
|
|
begin
|
|
|
|
block.call
|
|
|
|
ensure
|
|
|
|
ActiveRecord::Base.connection.instance_variable_set(:@test_open_transactions, test_open_transactions)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
block.call
|
|
|
|
end
|
|
|
|
rescue ActiveRecord::StatementInvalid
|
|
|
|
# these need to properly roll back the transaction
|
|
|
|
raise
|
|
|
|
rescue
|
|
|
|
# anything else, the transaction needs to commit, but we need to re-raise outside the transaction
|
|
|
|
exception = $!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
raise exception if exception
|
|
|
|
end)
|
|
|
|
|
2015-10-30 04:40:30 +08:00
|
|
|
module RSpec::Core::Hooks
|
|
|
|
class AfterContextHook < Hook
|
|
|
|
def run(example)
|
|
|
|
exception_class = if defined?(RSpec::Support::AllExceptionsExceptOnesWeMustNotRescue)
|
|
|
|
RSpec::Support::AllExceptionsExceptOnesWeMustNotRescue
|
|
|
|
else
|
|
|
|
Exception
|
|
|
|
end
|
|
|
|
example.instance_exec(example, &block)
|
|
|
|
rescue exception_class => e
|
|
|
|
# TODO: Come up with a better solution for this.
|
|
|
|
RSpec.configuration.reporter.message <<-EOS
|
|
|
|
An error occurred in an `after(:context)` hook.
|
|
|
|
#{e.class}: #{e.message}
|
|
|
|
occurred at #{e.backtrace.join("\n")}
|
|
|
|
EOS
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-12-18 01:14:52 +08:00
|
|
|
Time.class_eval do
|
|
|
|
def compare_with_round(other)
|
|
|
|
other = Time.at(other.to_i, other.usec) if other.respond_to?(:usec)
|
|
|
|
Time.at(self.to_i, self.usec).compare_without_round(other)
|
2015-07-31 00:21:36 +08:00
|
|
|
end
|
2015-12-18 01:14:52 +08:00
|
|
|
alias_method :compare_without_round, :<=>
|
|
|
|
alias_method :<=>, :compare_with_round
|
2015-07-31 00:21:36 +08:00
|
|
|
end
|
|
|
|
|
2015-12-18 01:14:52 +08:00
|
|
|
# temporary patch to keep things sane
|
|
|
|
# TODO: actually fix the deprecation messages once we're on Rails 4 permanently and remove this
|
|
|
|
ActiveSupport::Deprecation.silenced = true
|
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
module RSpec::Rails
|
|
|
|
module ViewExampleGroup
|
|
|
|
module ExampleMethods
|
|
|
|
# normally in rspec 2, assigns returns a newly constructed hash
|
|
|
|
# which means that 'assigns[:key] = value' in view specs does nothing
|
|
|
|
def assigns
|
|
|
|
@assigns ||= super
|
2014-02-08 07:20:45 +08:00
|
|
|
end
|
2014-02-27 14:04:17 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
alias :view_assigns :assigns
|
2014-02-12 21:58:05 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
delegate :content_for, :to => :view
|
2014-02-12 21:58:05 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
def render_with_helpers(*args)
|
|
|
|
controller_class = ("#{@controller.controller_path.camelize}Controller".constantize rescue nil) || ApplicationController
|
2014-02-12 21:58:05 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
controller_class.instance_variable_set(:@js_env, nil)
|
|
|
|
# this extends the controller's helper methods to the view
|
|
|
|
# however, these methods are delegated to the test controller
|
|
|
|
view.singleton_class.class_eval do
|
|
|
|
include controller_class._helpers unless included_modules.include?(controller_class._helpers)
|
2014-02-12 21:58:05 +08:00
|
|
|
end
|
2014-02-27 14:04:17 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
# so create a "real_controller"
|
|
|
|
# and delegate the helper methods to it
|
|
|
|
@controller.singleton_class.class_eval do
|
|
|
|
attr_accessor :real_controller
|
2014-02-12 21:58:05 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
controller_class._helper_methods.each do |helper|
|
2015-07-18 01:01:07 +08:00
|
|
|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
|
|
|
|
def #{helper}(*args, &block)
|
|
|
|
real_controller.send(:#{helper}, *args, &block)
|
|
|
|
end
|
|
|
|
RUBY
|
2014-02-12 21:58:05 +08:00
|
|
|
end
|
2014-07-24 01:14:22 +08:00
|
|
|
end
|
2014-02-12 21:58:05 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
real_controller = controller_class.new
|
|
|
|
real_controller.instance_variable_set(:@_request, @controller.request)
|
2015-09-04 01:09:37 +08:00
|
|
|
real_controller.instance_variable_set(:@context, @controller.instance_variable_get(:@context))
|
2014-07-24 01:14:22 +08:00
|
|
|
@controller.real_controller = real_controller
|
2014-02-12 21:58:05 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
# just calling "render 'path/to/view'" by default looks for a partial
|
|
|
|
if args.first && args.first.is_a?(String)
|
|
|
|
file = args.shift
|
|
|
|
args = [{:template => file}] + args
|
2014-02-12 21:58:05 +08:00
|
|
|
end
|
2014-07-24 01:14:22 +08:00
|
|
|
render_without_helpers(*args)
|
2014-02-12 21:58:05 +08:00
|
|
|
end
|
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
alias_method_chain :render, :helpers
|
|
|
|
end
|
|
|
|
end
|
2014-02-12 21:58:05 +08:00
|
|
|
|
2016-01-12 02:49:01 +08:00
|
|
|
RSpec::Matchers.define :have_tag do |expected|
|
|
|
|
match do |actual|
|
|
|
|
!!Nokogiri::HTML(actual).at_css(expected)
|
2014-07-24 01:14:22 +08:00
|
|
|
end
|
2014-02-12 21:58:05 +08:00
|
|
|
end
|
2013-03-22 07:32:21 +08:00
|
|
|
end
|
2014-07-24 01:14:22 +08:00
|
|
|
|
2014-01-09 04:47:34 +08:00
|
|
|
require 'action_controller_test_process'
|
2013-04-09 10:56:50 +08:00
|
|
|
require File.expand_path(File.dirname(__FILE__) + '/mocha_rspec_adapter')
|
2013-03-15 02:51:28 +08:00
|
|
|
require File.expand_path(File.dirname(__FILE__) + '/mocha_extensions')
|
import ActiveModel::Serializers port and convert quizzes api to it
test plan:
- The quiz api should work like it normally does when you don't pass
an 'Accept: application/vnd.api+json' header.
- The quizzes index page and quiz edit page should work like they
always do.
- Testing the Quizzes API for "jsonapi" style:
- For all requests, you MUST have the "Accept" header set to
"application/vnd.api+json"
- Test all the endpoints (PUT, POST, GET, INDEX, DELETE) like you
normally would, except you'll need to format the data according to
the next few steps:
- For "POST" and "PUT" (create and update) requests, you should send
the data like: { "quizzes": [ { id: 1, title: "blah" } ]
- For all requests (except DELETE), you should get back a response
that looks like: { "quizzes": [ { quiz you requested } ]
- For the "delete" action, you should get a "no content" response
and the request should be successful
Change-Id: Ie91deaeb6772cbe52a0fc46a28ab93a4e3036061
Reviewed-on: https://gerrit.instructure.com/25997
Reviewed-by: Jacob Fugal <jacob@instructure.com>
Tested-by: Jenkins <jenkins@instructure.com>
QA-Review: Caleb Guanzon <cguanzon@instructure.com>
Product-Review: Stanley Stuart <stanley@instructure.com>
2013-12-05 03:06:32 +08:00
|
|
|
require File.expand_path(File.dirname(__FILE__) + '/ams_spec_helper')
|
2011-02-01 09:57:29 +08:00
|
|
|
|
2014-03-07 00:40:43 +08:00
|
|
|
require 'i18n_tasks'
|
2014-03-07 00:47:05 +08:00
|
|
|
require 'handlebars_tasks'
|
2014-03-07 00:40:43 +08:00
|
|
|
|
2014-03-15 05:19:02 +08:00
|
|
|
# if mocha was initialized before rails (say by another spec), CollectionProxy would have
|
|
|
|
# undef_method'd them; we need to restore them
|
2014-07-24 01:14:22 +08:00
|
|
|
Mocha::ObjectMethods.instance_methods.each do |m|
|
|
|
|
ActiveRecord::Associations::CollectionProxy.class_eval <<-RUBY
|
|
|
|
def #{m}; end
|
|
|
|
remove_method #{m.inspect}
|
|
|
|
RUBY
|
2014-03-15 05:19:02 +08:00
|
|
|
end
|
|
|
|
|
2015-05-09 07:38:39 +08:00
|
|
|
factories = "#{File.dirname(__FILE__).gsub(/\\/, "/")}/factories/*.rb"
|
|
|
|
Dir.glob(factories).each { |file| require file }
|
|
|
|
|
|
|
|
examples = "#{File.dirname(__FILE__).gsub(/\\/, "/")}/shared_examples/*.rb"
|
|
|
|
Dir.glob(examples).each { |file| require file }
|
2011-02-01 09:57:29 +08:00
|
|
|
|
2014-01-11 01:38:35 +08:00
|
|
|
def pend_with_bullet
|
2014-02-20 05:46:50 +08:00
|
|
|
if defined?(Bullet) && Bullet.enable?
|
2014-10-17 21:46:48 +08:00
|
|
|
skip ('PENDING: Bullet')
|
2014-01-11 01:38:35 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-02-10 01:16:29 +08:00
|
|
|
# rspec aliases :describe to :context in a way that it's pretty much defined
|
|
|
|
# globally on every object. :context is already heavily used in our application,
|
2013-03-07 04:32:49 +08:00
|
|
|
# so we remove rspec's definition. This does not prevent 'context' from being
|
|
|
|
# used within a 'describe' block.
|
2013-05-24 03:18:11 +08:00
|
|
|
|
2013-03-22 07:32:21 +08:00
|
|
|
if defined?(Spec::DSL::Main)
|
|
|
|
module Spec::DSL::Main
|
|
|
|
remove_method :context if respond_to? :context
|
|
|
|
end
|
2011-02-10 01:16:29 +08:00
|
|
|
end
|
|
|
|
|
2011-06-06 23:32:11 +08:00
|
|
|
def truncate_table(model)
|
|
|
|
case model.connection.adapter_name
|
2012-04-19 07:06:28 +08:00
|
|
|
when "SQLite"
|
|
|
|
model.delete_all
|
|
|
|
begin
|
|
|
|
model.connection.execute("delete from sqlite_sequence where name='#{model.connection.quote_table_name(model.table_name)}';")
|
|
|
|
model.connection.execute("insert into sqlite_sequence (name, seq) values ('#{model.connection.quote_table_name(model.table_name)}', #{rand(100)});")
|
|
|
|
rescue
|
|
|
|
end
|
2012-05-08 04:18:47 +08:00
|
|
|
when "PostgreSQL"
|
|
|
|
begin
|
|
|
|
old_proc = model.connection.raw_connection.set_notice_processor {}
|
|
|
|
model.connection.execute("TRUNCATE TABLE #{model.connection.quote_table_name(model.table_name)} CASCADE")
|
|
|
|
ensure
|
|
|
|
model.connection.raw_connection.set_notice_processor(&old_proc)
|
|
|
|
end
|
2012-04-19 07:06:28 +08:00
|
|
|
else
|
2012-06-29 05:49:36 +08:00
|
|
|
model.connection.execute("SET FOREIGN_KEY_CHECKS=0")
|
2012-04-19 07:06:28 +08:00
|
|
|
model.connection.execute("TRUNCATE TABLE #{model.connection.quote_table_name(model.table_name)}")
|
2012-06-29 05:49:36 +08:00
|
|
|
model.connection.execute("SET FOREIGN_KEY_CHECKS=1")
|
2011-06-06 23:32:11 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-05-08 04:18:47 +08:00
|
|
|
def truncate_all_tables
|
2016-02-27 01:03:39 +08:00
|
|
|
raise "don't use truncate_all_tables with transactional fixtures. this kills the postgres" if ActiveRecord::Base.connection.open_transactions > 0
|
|
|
|
|
|
|
|
Shard.with_each_shard do
|
|
|
|
model_connections = ActiveRecord::Base.descendants.map(&:connection).uniq
|
|
|
|
model_connections.each do |connection|
|
|
|
|
if connection.adapter_name == "PostgreSQL"
|
|
|
|
# use custom SQL to exclude tables from extensions
|
|
|
|
schema = connection.shard.name if connection.instance_variable_get(:@config)[:use_qualified_names]
|
|
|
|
table_names = connection.query(<<-SQL, 'SCHEMA').map(&:first)
|
|
|
|
SELECT relname
|
|
|
|
FROM pg_class INNER JOIN pg_namespace ON relnamespace=pg_namespace.oid
|
|
|
|
WHERE nspname = #{schema ? "'#{schema}'" : 'ANY (current_schemas(false))'}
|
|
|
|
AND relkind='r'
|
|
|
|
AND NOT EXISTS (
|
|
|
|
SELECT 1 FROM pg_depend WHERE deptype='e' AND objid=pg_class.oid
|
|
|
|
)
|
|
|
|
SQL
|
|
|
|
table_names.delete('schema_migrations')
|
|
|
|
connection.execute("TRUNCATE TABLE #{table_names.map { |t| connection.quote_table_name(t) }.join(',')}")
|
|
|
|
else
|
|
|
|
connection.tables.each { |model| truncate_table(model) }
|
|
|
|
end
|
2012-05-08 04:18:47 +08:00
|
|
|
end
|
2016-02-27 01:03:39 +08:00
|
|
|
|
|
|
|
Role.ensure_built_in_roles!
|
2012-05-08 04:18:47 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-10-01 04:22:22 +08:00
|
|
|
# Make AR not puke if MySQL auto-commits the transaction
|
2014-01-15 08:03:00 +08:00
|
|
|
module MysqlOutsideTransaction
|
2011-10-01 04:22:22 +08:00
|
|
|
def outside_transaction?
|
|
|
|
# MySQL ignores creation of savepoints outside of a transaction; so if we can create one
|
|
|
|
# and then can't release it because it doesn't exist, we're not in a transaction
|
|
|
|
execute('SAVEPOINT outside_transaction')
|
|
|
|
!!execute('RELEASE SAVEPOINT outside_transaction') rescue true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-01-15 08:03:00 +08:00
|
|
|
module ActiveRecord::ConnectionAdapters
|
|
|
|
if defined?(MysqlAdapter)
|
|
|
|
MysqlAdapter.send(:include, MysqlOutsideTransaction)
|
|
|
|
end
|
|
|
|
if defined?(Mysql2Adapter)
|
|
|
|
Mysql2Adapter.send(:include, MysqlOutsideTransaction)
|
|
|
|
end
|
|
|
|
end
|
2013-10-12 00:37:18 +08:00
|
|
|
|
|
|
|
# Be sure to actually test serializing things to non-existent caches,
|
|
|
|
# but give Mocks a pass, since they won't exist in dev/prod
|
|
|
|
Mocha::Mock.class_eval do
|
|
|
|
def marshal_dump
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
|
|
|
|
def marshal_load(data)
|
|
|
|
raise "Mocks aren't really serializeable!"
|
|
|
|
end
|
|
|
|
|
2014-06-24 02:32:10 +08:00
|
|
|
def to_yaml(opts = {})
|
|
|
|
YAML.quick_emit(self.object_id, opts) do |out|
|
|
|
|
out.scalar(nil, 'null')
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-10-12 00:37:18 +08:00
|
|
|
def respond_to_with_marshalling?(symbol, include_private = false)
|
|
|
|
return true if [:marshal_dump, :marshal_load].include?(symbol)
|
|
|
|
respond_to_without_marshalling?(symbol, include_private)
|
|
|
|
end
|
2013-11-27 04:43:48 +08:00
|
|
|
|
2013-10-12 00:37:18 +08:00
|
|
|
alias_method_chain :respond_to?, :marshalling
|
|
|
|
end
|
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
RSpec::Matchers.define :encompass do |expected|
|
2011-12-15 04:34:57 +08:00
|
|
|
match do |actual|
|
|
|
|
if expected.is_a?(Array) && actual.is_a?(Array)
|
2013-05-24 03:18:11 +08:00
|
|
|
expected.size == actual.size && expected.zip(actual).all? { |e, a| a.slice(*e.keys) == e }
|
2011-12-15 04:34:57 +08:00
|
|
|
elsif expected.is_a?(Hash) && actual.is_a?(Hash)
|
|
|
|
actual.slice(*expected.keys) == expected
|
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
RSpec::Matchers.define :match_ignoring_whitespace do |expected|
|
2013-04-18 04:06:57 +08:00
|
|
|
def whitespaceless(str)
|
|
|
|
str.gsub(/\s+/, '')
|
|
|
|
end
|
|
|
|
|
|
|
|
match do |actual|
|
|
|
|
whitespaceless(actual) == whitespaceless(expected)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-01-25 02:47:43 +08:00
|
|
|
module Helpers
|
|
|
|
def message(opts={})
|
|
|
|
m = Message.new
|
|
|
|
m.to = opts[:to] || 'some_user'
|
|
|
|
m.from = opts[:from] || 'some_other_user'
|
|
|
|
m.subject = opts[:subject] || 'a message for you'
|
|
|
|
m.body = opts[:body] || 'nice body'
|
|
|
|
m.sent_at = opts[:sent_at] || 5.days.ago
|
|
|
|
m.workflow_state = opts[:workflow_state] || 'sent'
|
|
|
|
m.user_id = opts[:user_id] || opts[:user].try(:id)
|
|
|
|
m.path_type = opts[:path_type] || 'email'
|
|
|
|
m.root_account_id = opts[:account_id] || Account.default.id
|
|
|
|
m.save!
|
|
|
|
m
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
RSpec.configure do |config|
|
2011-02-01 09:57:29 +08:00
|
|
|
# If you're not using ActiveRecord you should remove these
|
|
|
|
# lines, delete config/database.yml and disable :active_record
|
|
|
|
# in your config/boot.rb
|
|
|
|
config.use_transactional_fixtures = true
|
2013-05-24 03:18:11 +08:00
|
|
|
config.use_instantiated_fixtures = false
|
2013-03-08 08:08:47 +08:00
|
|
|
config.fixture_path = Rails.root+'spec/fixtures/'
|
2014-08-14 22:55:40 +08:00
|
|
|
config.infer_spec_type_from_file_location!
|
2011-02-01 09:57:29 +08:00
|
|
|
|
2016-01-23 07:00:29 +08:00
|
|
|
config.order = :random
|
|
|
|
|
2014-01-25 02:47:43 +08:00
|
|
|
config.include Helpers
|
2011-02-01 09:57:29 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
config.include Onceler::BasicHelpers
|
2014-07-23 23:15:39 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
# rspec 2+ only runs global before(:all)'s before the top-level
|
|
|
|
# groups, not before each nested one. so we need to reset some
|
|
|
|
# things to play nicely with its caching
|
|
|
|
Onceler.configure do |c|
|
|
|
|
c.before :record do
|
2014-08-26 22:53:26 +08:00
|
|
|
Account.clear_special_account_cache!(true)
|
2014-07-24 01:14:22 +08:00
|
|
|
AdheresToPolicy::Cache.clear
|
|
|
|
Folder.reset_path_lookups!
|
2014-06-28 16:57:40 +08:00
|
|
|
end
|
2014-07-24 01:14:22 +08:00
|
|
|
end
|
2014-06-28 16:57:40 +08:00
|
|
|
|
2014-07-24 01:14:22 +08:00
|
|
|
Onceler.instance_eval do
|
|
|
|
# since once-ler creates potentially multiple levels of transaction
|
|
|
|
# nesting, we need a way to know the base level so we can compare it
|
|
|
|
# to AR::Conn#open_transactions. that will tell us if something is
|
|
|
|
# "committed" or not (from the perspective of the spec)
|
|
|
|
def base_transactions
|
|
|
|
# if not recording, it's presumed we're in a spec, in which case
|
|
|
|
# transactional fixtures add one more level
|
|
|
|
open_transactions + (recording? ? 0 : 1)
|
2014-06-28 16:57:40 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-09-11 03:28:00 +08:00
|
|
|
Notification.after_create do
|
|
|
|
Notification.reset_cache!
|
|
|
|
BroadcastPolicy.notification_finder.refresh_cache
|
|
|
|
end
|
|
|
|
|
2012-02-07 02:36:46 +08:00
|
|
|
config.before :all do
|
|
|
|
# so before(:all)'s don't get confused
|
2014-08-26 22:53:26 +08:00
|
|
|
Account.clear_special_account_cache!(true)
|
2014-05-03 00:35:29 +08:00
|
|
|
AdheresToPolicy::Cache.clear
|
2015-06-11 05:13:08 +08:00
|
|
|
# silence migration specs
|
|
|
|
ActiveRecord::Migration.verbose = false
|
2015-07-01 05:23:33 +08:00
|
|
|
|
|
|
|
# allow tests to still run in non-DA state even though it's hard-coded on
|
|
|
|
Feature.definitions["differentiated_assignments"].send(:instance_variable_set, '@state', 'allowed')
|
2012-02-07 02:36:46 +08:00
|
|
|
end
|
|
|
|
|
2013-11-07 07:38:56 +08:00
|
|
|
def delete_fixtures!
|
|
|
|
# noop for now, needed for plugin spec tweaks. implementation coming
|
|
|
|
# in g/24755
|
|
|
|
end
|
|
|
|
|
2014-05-17 04:05:40 +08:00
|
|
|
# UTC for tests, cuz it's easier :P
|
|
|
|
Account.time_zone_attribute_defaults[:default_time_zone] = 'UTC'
|
|
|
|
|
2011-02-09 04:34:00 +08:00
|
|
|
config.before :each do
|
2013-02-28 11:24:10 +08:00
|
|
|
I18n.locale = :en
|
2011-02-09 04:34:00 +08:00
|
|
|
Time.zone = 'UTC'
|
2014-08-26 22:53:26 +08:00
|
|
|
LoadAccount.force_special_account_reload = true
|
|
|
|
Account.clear_special_account_cache!(true)
|
2015-12-12 03:23:29 +08:00
|
|
|
PluginSetting.current_account = nil
|
2014-06-28 16:57:40 +08:00
|
|
|
AdheresToPolicy::Cache.clear
|
2011-09-02 23:34:12 +08:00
|
|
|
Setting.reset_cache!
|
2014-05-17 00:49:42 +08:00
|
|
|
ConfigFile.unstub
|
2011-11-15 06:03:57 +08:00
|
|
|
HostUrl.reset_cache!
|
2012-03-03 05:42:23 +08:00
|
|
|
Notification.reset_cache!
|
2011-12-03 07:09:07 +08:00
|
|
|
ActiveRecord::Base.reset_any_instantiation!
|
2012-05-08 04:18:47 +08:00
|
|
|
Attachment.clear_cached_mime_ids
|
2014-07-13 15:07:19 +08:00
|
|
|
Folder.reset_path_lookups!
|
2014-09-08 20:48:45 +08:00
|
|
|
Role.ensure_built_in_roles!
|
2012-12-21 07:30:03 +08:00
|
|
|
RoleOverride.clear_cached_contexts
|
2012-08-16 23:21:18 +08:00
|
|
|
Delayed::Job.redis.flushdb if Delayed::Job == Delayed::Backend::Redis::Job
|
2012-03-31 06:51:02 +08:00
|
|
|
Rails::logger.try(:info, "Running #{self.class.description} #{@method_name}")
|
2013-01-08 01:57:48 +08:00
|
|
|
Attachment.domain_namespace = nil
|
2016-03-03 03:45:33 +08:00
|
|
|
Canvas::DynamicSettings.reset_cache!
|
2014-07-11 23:25:57 +08:00
|
|
|
$spec_api_tokens = {}
|
2011-02-09 04:34:00 +08:00
|
|
|
end
|
|
|
|
|
2015-12-11 01:12:27 +08:00
|
|
|
config.before :suite do
|
|
|
|
BlankSlateProtection.disable!
|
2015-12-12 07:25:12 +08:00
|
|
|
|
|
|
|
if ENV['TEST_ENV_NUMBER'].present?
|
|
|
|
Rails.logger.reopen("log/test#{ENV['TEST_ENV_NUMBER']}.log")
|
|
|
|
end
|
2015-12-31 16:28:24 +08:00
|
|
|
|
|
|
|
if ENV['COVERAGE'] == "1"
|
|
|
|
# do this in a hook so that results aren't clobbered under test-queue
|
|
|
|
# (it forks and changes the TEST_ENV_NUMBER)
|
|
|
|
SimpleCov.command_name("rspec:#{Process.pid}:#{ENV['TEST_ENV_NUMBER']}")
|
|
|
|
end
|
2016-02-27 01:03:39 +08:00
|
|
|
|
|
|
|
# wipe out the test db, in case some non-transactional tests crapped out before
|
|
|
|
# cleaning up after themselves
|
|
|
|
truncate_all_tables
|
2015-12-11 01:12:27 +08:00
|
|
|
end
|
|
|
|
|
selinimum: run the minimum selenium necessary for your commit
What is this?
=============
Selinimum speeds up selenium by only running the specs it needs to for
your commit (iff it can safely determine that). For the purposes of the
initial implementation, that means that if your commit ONLY affects:
1. controllers (but not ApplicationController)
2. views (but not shared / layouts)
3. misc whitelisted stuff (images, .md files, etc.)
then it will only run the selenium specs that actually exercise those.
If your commit touches ANYTHING else (models, lib, etc.), all selenium
specs will run.
But wait, there's more!
=======================
Very soon selinimum will also handle:
1. js/coffee/jsx/hbs (except in the common bundle)
2. scss (except in the common bundle)
We already capture which bundles get used by each spec (see Capture), we
just need to correlate that with the individual files via a dependency
graph (probably using madge and sass-graph)
How does it work?
=================
The new post-merge selenium build will run all the specs with selinimum
capturing enabled. This records any controllers/views/js_bundle/css_bundle
that gets used in the course of each selenium spec, and stores a bunch of
data in S3.
Then when your patchset build runs, it will run Selinimum.minimize (via
corresponding tweak in rspect repo) on the list of spec files. If your
commit's dependents can be fully inferred and synthesized with the spec
dependency data from S3, only the relevant specs will actually be run.
Test Plan
=========
This commit doesn't actually cause selinimum to run on jenkins; that
requires some rspect changes and jenkins config. Refer to the test plan
here: https://gerrit.instructure.com/#/c/58088/
Change-Id: I991574c327a3a580c6fdc3ca3797dcfe0490a096
Reviewed-on: https://gerrit.instructure.com/58085
Tested-by: Jenkins
Reviewed-by: Simon Williams <simon@instructure.com>
Product-Review: Jon Jensen <jon@instructure.com>
QA-Review: Jon Jensen <jon@instructure.com>
2015-07-10 03:00:50 +08:00
|
|
|
# this runs on post-merge builds to capture dependencies of each spec;
|
|
|
|
# we then use that data to run just the bare minimum subset of selenium
|
|
|
|
# specs on the patchset builds
|
|
|
|
if ENV["SELINIMUM_CAPTURE"]
|
|
|
|
require "selinimum"
|
|
|
|
|
|
|
|
config.before :suite do
|
|
|
|
Selinimum::Capture.install!
|
|
|
|
end
|
|
|
|
|
|
|
|
config.before do |example|
|
|
|
|
Selinimum::Capture.current_example = example
|
|
|
|
end
|
|
|
|
|
|
|
|
config.after :suite do
|
|
|
|
Selinimum::Capture.report!(ENV["SELINIMUM_BATCH_NAME"])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-11-04 05:51:51 +08:00
|
|
|
# flush redis before the first spec, and before each spec that comes after
|
|
|
|
# one that used redis
|
|
|
|
class << Canvas
|
|
|
|
attr_accessor :redis_used
|
2013-05-24 03:18:11 +08:00
|
|
|
|
2011-11-04 05:51:51 +08:00
|
|
|
def redis_with_track_usage(*a, &b)
|
|
|
|
self.redis_used = true
|
|
|
|
redis_without_track_usage(*a, &b)
|
|
|
|
end
|
2013-05-24 03:18:11 +08:00
|
|
|
|
2011-11-04 05:51:51 +08:00
|
|
|
alias_method_chain :redis, :track_usage
|
|
|
|
Canvas.redis_used = true
|
|
|
|
end
|
|
|
|
config.before :each do
|
|
|
|
if Canvas.redis_enabled? && Canvas.redis_used
|
2014-11-21 05:42:22 +08:00
|
|
|
Canvas.redis.flushdb
|
2011-11-04 05:51:51 +08:00
|
|
|
end
|
|
|
|
Canvas.redis_used = false
|
|
|
|
end
|
|
|
|
|
2015-07-08 03:04:12 +08:00
|
|
|
#****************************************************************
|
|
|
|
# There used to be a lot of factory methods here!
|
|
|
|
# In an effort to move us toward a nicer test factory solution,
|
|
|
|
# all factories should now live in a separate file named to
|
|
|
|
# correspond with the model that should be built by the factory.
|
|
|
|
# Please see spec/factories for examples!
|
|
|
|
#****************************************************************
|
2012-03-14 04:08:19 +08:00
|
|
|
|
|
|
|
def enter_student_view(opts={})
|
|
|
|
course = opts[:course] || @course || course(opts)
|
|
|
|
@fake_student = course.student_view_student
|
|
|
|
post "/users/#{@fake_student.id}/masquerade"
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(session[:become_user_id]).to eq @fake_student.id.to_s
|
2012-03-07 04:56:52 +08:00
|
|
|
end
|
|
|
|
|
2011-08-25 03:40:15 +08:00
|
|
|
def login_as(username = "nobody@example.com", password = "asdfasdf")
|
|
|
|
post_via_redirect "/login",
|
2012-04-19 07:06:28 +08:00
|
|
|
"pseudonym_session[unique_id]" => username,
|
|
|
|
"pseudonym_session[password]" => password
|
2011-08-25 03:40:15 +08:00
|
|
|
assert_response :success
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(request.fullpath).to eq "/?login_success=1"
|
2011-08-25 03:40:15 +08:00
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def assert_status(status=500)
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(response.status.to_i).to eq status
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def assert_unauthorized
|
refactor PseudonymSessionsController
fixes CNVS-20394
split it into appropriate concerns. main points are:
* /login never renders a login form - it redirects forward to the
default auth controller based on the first account
authorization config (or discovery url on the account)
* /login/canvas is the new home of the old login form. this form is
never rendered in-situ anymore - other places that used to render
it now redirect to /login (and then forward to here), reducing
their knowledge of SSO
* /login/ldap ends up at the same place (cause LDAP auth is handled
transparently)
* /login/cas and /login/saml redirect forward to the first SSO
configuration of the appropriate type. /login/:auth_type/:id can
be used to select a specific one
* if an SSO fails, it redirects back to /login with flash[:error]
set. this can forward to the discovery url appropriately, or
render an error page appropriately (the old no_auto=1, but now
it's not layered on top of the login partial that didn't show a
login form)
* ?canvas_login=1 is deprecated. just go directly to /login/canvas
* /saml_consume, /saml_logout are deprecated. they are processed
directly by /login/saml and /login/saml/logout
* /login/:id is deprecated - it forwards to /login/:auth_type/:id
as appropriate (presumably only saml, since that was the only
one that previously should have been using these links)
* OTP has been split into its own controller, and separated into
multiple actions instead of one all-in-one action
* /logout has been vastly simplified. the login controller should
set session[:login_aac], and on logout it will check with that
AAC for a url to redirect to after logout, instead of /login.
SSO logout is handled by each controller if they support it
test plan:
* regression test the following functionality -
* login with canvas auth
* login with LDAP auth
* login with SAML auth - and multiple SAMLs
* login with CAS auth
* MFA (configure, using, auto-setup)
* Canvas as OAuth Provider flow
* redirects to the login page when you're not
logged in
* failure of SAML/CAS (i.e. can't find user)
show a decent error page and allows retry
* "sticky" site admin auth (site admin is CAS/SAML,
going directly to another domain logs you in with
site admin)
Change-Id: I1bb9d81a101939f812cbd5020e20749e883fdc0f
Reviewed-on: https://gerrit.instructure.com/53220
QA-Review: August Thornton <august@instructure.com>
Tested-by: Jenkins
Reviewed-by: Ethan Vizitei <evizitei@instructure.com>
Product-Review: Cody Cutrer <cody@instructure.com>
2015-05-01 03:58:57 +08:00
|
|
|
# we allow either a raw unauthorized or a redirect to login
|
|
|
|
unless response.status == 401
|
|
|
|
expect(response).to redirect_to(login_url)
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
|
refactor PseudonymSessionsController
fixes CNVS-20394
split it into appropriate concerns. main points are:
* /login never renders a login form - it redirects forward to the
default auth controller based on the first account
authorization config (or discovery url on the account)
* /login/canvas is the new home of the old login form. this form is
never rendered in-situ anymore - other places that used to render
it now redirect to /login (and then forward to here), reducing
their knowledge of SSO
* /login/ldap ends up at the same place (cause LDAP auth is handled
transparently)
* /login/cas and /login/saml redirect forward to the first SSO
configuration of the appropriate type. /login/:auth_type/:id can
be used to select a specific one
* if an SSO fails, it redirects back to /login with flash[:error]
set. this can forward to the discovery url appropriately, or
render an error page appropriately (the old no_auto=1, but now
it's not layered on top of the login partial that didn't show a
login form)
* ?canvas_login=1 is deprecated. just go directly to /login/canvas
* /saml_consume, /saml_logout are deprecated. they are processed
directly by /login/saml and /login/saml/logout
* /login/:id is deprecated - it forwards to /login/:auth_type/:id
as appropriate (presumably only saml, since that was the only
one that previously should have been using these links)
* OTP has been split into its own controller, and separated into
multiple actions instead of one all-in-one action
* /logout has been vastly simplified. the login controller should
set session[:login_aac], and on logout it will check with that
AAC for a url to redirect to after logout, instead of /login.
SSO logout is handled by each controller if they support it
test plan:
* regression test the following functionality -
* login with canvas auth
* login with LDAP auth
* login with SAML auth - and multiple SAMLs
* login with CAS auth
* MFA (configure, using, auto-setup)
* Canvas as OAuth Provider flow
* redirects to the login page when you're not
logged in
* failure of SAML/CAS (i.e. can't find user)
show a decent error page and allows retry
* "sticky" site admin auth (site admin is CAS/SAML,
going directly to another domain logs you in with
site admin)
Change-Id: I1bb9d81a101939f812cbd5020e20749e883fdc0f
Reviewed-on: https://gerrit.instructure.com/53220
QA-Review: August Thornton <august@instructure.com>
Tested-by: Jenkins
Reviewed-by: Ethan Vizitei <evizitei@instructure.com>
Product-Review: Cody Cutrer <cody@instructure.com>
2015-05-01 03:58:57 +08:00
|
|
|
def assert_page_not_found
|
2014-07-24 01:14:22 +08:00
|
|
|
yield
|
|
|
|
assert_status(404)
|
2014-02-08 02:41:40 +08:00
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def assert_require_login
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(response).to be_redirect
|
|
|
|
expect(flash[:warning]).to eq "You must be logged in to access this page"
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2015-09-19 05:02:44 +08:00
|
|
|
# Instead of directly comparing urls
|
|
|
|
# this will make sure urls match
|
|
|
|
# by parsing them, and comparing the results
|
|
|
|
# meaning these would match
|
|
|
|
# http://test.dev/?foo=bar&other=1
|
|
|
|
# http://test.dev/?other=1&foo=bar
|
|
|
|
def assert_url_parse_match(test_url, expected_url)
|
|
|
|
parsed_test = URI.parse(test_url)
|
|
|
|
parsed_expected = URI.parse(expected_url)
|
|
|
|
|
|
|
|
parsed_test_query = Rack::Utils.parse_nested_query(parsed_test.query)
|
|
|
|
parsed_expected_query = Rack::Utils.parse_nested_query(parsed_expected.query)
|
|
|
|
|
|
|
|
expect(parsed_test.scheme).to eq parsed_expected.scheme
|
|
|
|
expect(parsed_test.host).to eq parsed_expected.host
|
|
|
|
expect(parsed_test_query).to eq parsed_expected_query
|
|
|
|
end
|
|
|
|
|
2015-10-28 06:05:48 +08:00
|
|
|
def assert_hash_contains(test_hash, expected_hash)
|
|
|
|
expected_hash.each do |key, expected_value|
|
|
|
|
expect(test_hash[key]).to eq expected_value
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-01-09 04:47:34 +08:00
|
|
|
def fixture_file_upload(path, mime_type=nil, binary=false)
|
|
|
|
Rack::Test::UploadedFile.new(File.join(ActionController::TestCase.fixture_path, path), mime_type, binary)
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def default_uploaded_data
|
2014-01-09 04:47:34 +08:00
|
|
|
fixture_file_upload('scribd_docs/doc.doc', 'application/msword', true)
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def factory_with_protected_attributes(ar_klass, attrs, do_save = true)
|
2011-05-21 06:29:34 +08:00
|
|
|
obj = ar_klass.respond_to?(:new) ? ar_klass.new : ar_klass.build
|
2013-05-24 03:18:11 +08:00
|
|
|
attrs.each { |k, v| obj.send("#{k}=", attrs[k]) }
|
2011-02-01 09:57:29 +08:00
|
|
|
obj.save! if do_save
|
|
|
|
obj
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_with_protected_attributes!(ar_instance, attrs)
|
2013-05-24 03:18:11 +08:00
|
|
|
attrs.each { |k, v| ar_instance.send("#{k}=", attrs[k]) }
|
2011-02-01 09:57:29 +08:00
|
|
|
ar_instance.save!
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_with_protected_attributes(ar_instance, attrs)
|
|
|
|
update_with_protected_attributes!(ar_instance, attrs) rescue false
|
|
|
|
end
|
|
|
|
|
2014-03-19 23:57:19 +08:00
|
|
|
def process_csv_data(*lines)
|
|
|
|
opts = lines.extract_options!
|
|
|
|
opts.reverse_merge!(allow_printing: false)
|
|
|
|
account = opts[:account] || @account || account_model
|
2011-09-22 01:36:45 +08:00
|
|
|
|
2011-06-14 04:39:15 +08:00
|
|
|
tmp = Tempfile.new("sis_rspec")
|
|
|
|
path = "#{tmp.path}.csv"
|
|
|
|
tmp.close!
|
2011-09-27 07:19:39 +08:00
|
|
|
File.open(path, "w+") { |f| f.puts lines.flatten.join "\n" }
|
2011-09-22 01:36:45 +08:00
|
|
|
opts[:files] = [path]
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2014-03-19 23:57:19 +08:00
|
|
|
importer = SIS::CSV::Import.process(account, opts)
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-06-14 04:39:15 +08:00
|
|
|
File.unlink path
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-06-14 04:39:15 +08:00
|
|
|
importer
|
|
|
|
end
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-09-22 01:36:45 +08:00
|
|
|
def process_csv_data_cleanly(*lines_or_opts)
|
|
|
|
importer = process_csv_data(*lines_or_opts)
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(importer.errors).to eq []
|
|
|
|
expect(importer.warnings).to eq []
|
2011-06-14 04:39:15 +08:00
|
|
|
end
|
|
|
|
|
2014-01-24 05:37:03 +08:00
|
|
|
def enable_cache(new_cache=:memory_store)
|
2014-03-12 00:35:21 +08:00
|
|
|
new_cache ||= :null_store
|
|
|
|
new_cache = ActiveSupport::Cache.lookup_store(new_cache)
|
|
|
|
previous_cache = Rails.cache
|
|
|
|
Rails.stubs(:cache).returns(new_cache)
|
|
|
|
ActionController::Base.stubs(:cache_store).returns(new_cache)
|
|
|
|
ActionController::Base.any_instance.stubs(:cache_store).returns(new_cache)
|
|
|
|
previous_perform_caching = ActionController::Base.perform_caching
|
|
|
|
ActionController::Base.stubs(:perform_caching).returns(true)
|
|
|
|
ActionController::Base.any_instance.stubs(:perform_caching).returns(true)
|
|
|
|
if block_given?
|
|
|
|
begin
|
|
|
|
yield
|
|
|
|
ensure
|
|
|
|
Rails.stubs(:cache).returns(previous_cache)
|
|
|
|
ActionController::Base.stubs(:cache_store).returns(previous_cache)
|
|
|
|
ActionController::Base.any_instance.stubs(:cache_store).returns(previous_cache)
|
|
|
|
ActionController::Base.stubs(:perform_caching).returns(previous_perform_caching)
|
|
|
|
ActionController::Base.any_instance.stubs(:perform_caching).returns(previous_perform_caching)
|
|
|
|
end
|
|
|
|
end
|
2011-07-16 00:30:31 +08:00
|
|
|
end
|
|
|
|
|
2011-07-20 01:47:59 +08:00
|
|
|
# enforce forgery protection, so we can verify usage of the authenticity token
|
2013-03-12 04:17:20 +08:00
|
|
|
def enable_forgery_protection(enable = true)
|
|
|
|
old_value = ActionController::Base.allow_forgery_protection
|
2013-12-31 01:38:28 +08:00
|
|
|
ActionController::Base.stubs(:allow_forgery_protection).returns(enable)
|
2014-02-26 01:15:17 +08:00
|
|
|
ActionController::Base.any_instance.stubs(:allow_forgery_protection).returns(enable)
|
2012-03-21 06:08:20 +08:00
|
|
|
|
|
|
|
yield if block_given?
|
|
|
|
|
2011-07-20 01:47:59 +08:00
|
|
|
ensure
|
2014-02-26 01:15:17 +08:00
|
|
|
if block_given?
|
|
|
|
ActionController::Base.stubs(:allow_forgery_protection).returns(old_value)
|
|
|
|
ActionController::Base.any_instance.stubs(:allow_forgery_protection).returns(old_value)
|
|
|
|
end
|
2011-07-20 01:47:59 +08:00
|
|
|
end
|
|
|
|
|
2011-09-28 02:15:28 +08:00
|
|
|
def stub_kaltura
|
|
|
|
# trick kaltura into being activated
|
2014-03-18 01:38:50 +08:00
|
|
|
CanvasKaltura::ClientV3.stubs(:config).returns({
|
2012-04-19 07:06:28 +08:00
|
|
|
'domain' => 'kaltura.example.com',
|
|
|
|
'resource_domain' => 'kaltura.example.com',
|
|
|
|
'partner_id' => '100',
|
|
|
|
'subpartner_id' => '10000',
|
|
|
|
'secret_key' => 'fenwl1n23k4123lk4hl321jh4kl321j4kl32j14kl321',
|
|
|
|
'user_secret_key' => '1234821hrj3k21hjk4j3kl21j4kl321j4kl3j21kl4j3k2l1',
|
|
|
|
'player_ui_conf' => '1',
|
|
|
|
'kcw_ui_conf' => '1',
|
|
|
|
'upload_ui_conf' => '1'
|
|
|
|
})
|
2011-09-28 02:15:28 +08:00
|
|
|
end
|
2011-08-12 00:51:57 +08:00
|
|
|
|
2011-11-24 04:49:27 +08:00
|
|
|
def json_parse(json_string = response.body)
|
|
|
|
JSON.parse(json_string.sub(%r{^while\(1\);}, ''))
|
|
|
|
end
|
2012-02-22 04:35:39 +08:00
|
|
|
|
2013-03-07 03:44:57 +08:00
|
|
|
# inspired by http://blog.jayfields.com/2007/08/ruby-calling-methods-of-specific.html
|
|
|
|
module AttachmentStorageSwitcher
|
rails4: gemify attachment_fu
closes CNVS-14268
Since this is very clearly our own fork of the gem at this point, I've
removed a lot of unused code, rather than fixing it up to work as a gem.
This includes:
* all the other processors besides mini_magick
* red_artisan, it was only used by the core_image processor
* geometry and the Array monkey patch, it was only used by image science
* the db_file_backend
* the Tempfile monkey patch, I fixed the AttachmentFu code to properly
create tempfiles with the desired extension
* removed the Technoweenie outer namespace, to match normal gem practices
test plan:
Attachments should still work as before, including viewing, uploading,
downloading, and thumbnail generation.
Change-Id: I94ff63182af839ec54b64714defd6912b0d91f65
Reviewed-on: https://gerrit.instructure.com/41281
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Cody Cutrer <cody@instructure.com>
QA-Review: August Thornton <august@instructure.com>
Product-Review: Brian Palmer <brianp@instructure.com>
2014-09-18 04:22:23 +08:00
|
|
|
BACKENDS = %w{FileSystem S3}.map { |backend| AttachmentFu::Backends.const_get(:"#{backend}Backend") }.freeze
|
2013-03-07 03:44:57 +08:00
|
|
|
|
|
|
|
class As #:nodoc:
|
|
|
|
private *instance_methods.select { |m| m !~ /(^__|^\W|^binding$)/ }
|
|
|
|
|
|
|
|
def initialize(subject, ancestor)
|
|
|
|
@subject = subject
|
|
|
|
@ancestor = ancestor
|
|
|
|
end
|
|
|
|
|
|
|
|
def method_missing(sym, *args, &blk)
|
2013-05-24 03:18:11 +08:00
|
|
|
@ancestor.instance_method(sym).bind(@subject).call(*args, &blk)
|
2013-03-07 03:44:57 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.included(base)
|
|
|
|
base.cattr_accessor :current_backend
|
|
|
|
base.current_backend = (base.ancestors & BACKENDS).first
|
|
|
|
|
|
|
|
# make sure we have all the backends
|
|
|
|
BACKENDS.each do |backend|
|
|
|
|
base.send(:include, backend) unless base.ancestors.include?(backend)
|
|
|
|
end
|
|
|
|
# remove the duplicate callbacks added by multiple backends
|
|
|
|
base.before_update.uniq!
|
|
|
|
|
|
|
|
BACKENDS.map(&:instance_methods).flatten.uniq.each do |method|
|
2013-03-17 01:50:19 +08:00
|
|
|
# overridden by Attachment anyway; don't re-overwrite it
|
2014-08-20 06:31:40 +08:00
|
|
|
next if base.instance_method(method).owner == base
|
2013-03-07 03:44:57 +08:00
|
|
|
if method.to_s[-1..-1] == '='
|
|
|
|
base.class_eval <<-CODE
|
|
|
|
def #{method}(arg)
|
|
|
|
self.as(self.class.current_backend).#{method} arg
|
|
|
|
end
|
|
|
|
CODE
|
|
|
|
else
|
|
|
|
base.class_eval <<-CODE
|
|
|
|
def #{method}(*args, &block)
|
|
|
|
self.as(self.class.current_backend).#{method}(*args, &block)
|
|
|
|
end
|
|
|
|
CODE
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def as(ancestor)
|
|
|
|
@__as ||= {}
|
|
|
|
unless r = @__as[ancestor]
|
|
|
|
r = (@__as[ancestor] = As.new(self, ancestor))
|
|
|
|
end
|
|
|
|
r
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def s3_storage!(opts = {:stubs => true})
|
2014-08-20 06:31:40 +08:00
|
|
|
[Attachment, Thumbnail].each do |model|
|
|
|
|
model.send(:include, AttachmentStorageSwitcher) unless model.ancestors.include?(AttachmentStorageSwitcher)
|
rails4: gemify attachment_fu
closes CNVS-14268
Since this is very clearly our own fork of the gem at this point, I've
removed a lot of unused code, rather than fixing it up to work as a gem.
This includes:
* all the other processors besides mini_magick
* red_artisan, it was only used by the core_image processor
* geometry and the Array monkey patch, it was only used by image science
* the db_file_backend
* the Tempfile monkey patch, I fixed the AttachmentFu code to properly
create tempfiles with the desired extension
* removed the Technoweenie outer namespace, to match normal gem practices
test plan:
Attachments should still work as before, including viewing, uploading,
downloading, and thumbnail generation.
Change-Id: I94ff63182af839ec54b64714defd6912b0d91f65
Reviewed-on: https://gerrit.instructure.com/41281
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Cody Cutrer <cody@instructure.com>
QA-Review: August Thornton <august@instructure.com>
Product-Review: Brian Palmer <brianp@instructure.com>
2014-09-18 04:22:23 +08:00
|
|
|
model.stubs(:current_backend).returns(AttachmentFu::Backends::S3Backend)
|
2014-08-20 06:31:40 +08:00
|
|
|
|
|
|
|
model.stubs(:s3_storage?).returns(true)
|
|
|
|
model.stubs(:local_storage?).returns(false)
|
|
|
|
end
|
2013-03-07 03:44:57 +08:00
|
|
|
|
|
|
|
if opts[:stubs]
|
2013-03-08 03:28:42 +08:00
|
|
|
conn = mock('AWS::S3::Client')
|
2014-08-20 06:31:40 +08:00
|
|
|
|
|
|
|
AWS::S3::S3Object.any_instance.stubs(:read).returns("i am stub data from spec helper. nom nom nom")
|
|
|
|
AWS::S3::S3Object.any_instance.stubs(:write).returns(true)
|
|
|
|
AWS::S3::S3Object.any_instance.stubs(:create_temp_file).returns(true)
|
2013-03-08 03:28:42 +08:00
|
|
|
AWS::S3::S3Object.any_instance.stubs(:client).returns(conn)
|
|
|
|
AWS::Core::Configuration.any_instance.stubs(:access_key_id).returns('stub_id')
|
|
|
|
AWS::Core::Configuration.any_instance.stubs(:secret_access_key).returns('stub_key')
|
|
|
|
AWS::S3::Bucket.any_instance.stubs(:name).returns('no-bucket')
|
2013-03-07 03:44:57 +08:00
|
|
|
else
|
|
|
|
if Attachment.s3_config.blank? || Attachment.s3_config[:access_key_id] == 'access_key'
|
2014-10-17 03:03:02 +08:00
|
|
|
skip "Please put valid S3 credentials in config/amazon_s3.yml"
|
2013-03-07 03:44:57 +08:00
|
|
|
end
|
|
|
|
end
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(Attachment.s3_storage?).to be true
|
|
|
|
expect(Attachment.local_storage?).to be false
|
2012-02-22 04:35:39 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def local_storage!
|
2014-08-20 06:31:40 +08:00
|
|
|
[Attachment, Thumbnail].each do |model|
|
|
|
|
model.send(:include, AttachmentStorageSwitcher) unless model.ancestors.include?(AttachmentStorageSwitcher)
|
rails4: gemify attachment_fu
closes CNVS-14268
Since this is very clearly our own fork of the gem at this point, I've
removed a lot of unused code, rather than fixing it up to work as a gem.
This includes:
* all the other processors besides mini_magick
* red_artisan, it was only used by the core_image processor
* geometry and the Array monkey patch, it was only used by image science
* the db_file_backend
* the Tempfile monkey patch, I fixed the AttachmentFu code to properly
create tempfiles with the desired extension
* removed the Technoweenie outer namespace, to match normal gem practices
test plan:
Attachments should still work as before, including viewing, uploading,
downloading, and thumbnail generation.
Change-Id: I94ff63182af839ec54b64714defd6912b0d91f65
Reviewed-on: https://gerrit.instructure.com/41281
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Cody Cutrer <cody@instructure.com>
QA-Review: August Thornton <august@instructure.com>
Product-Review: Brian Palmer <brianp@instructure.com>
2014-09-18 04:22:23 +08:00
|
|
|
model.stubs(:current_backend).returns(AttachmentFu::Backends::FileSystemBackend)
|
2014-08-20 06:31:40 +08:00
|
|
|
|
|
|
|
model.stubs(:s3_storage?).returns(false)
|
|
|
|
model.stubs(:local_storage?).returns(true)
|
|
|
|
end
|
2013-03-07 03:44:57 +08:00
|
|
|
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(Attachment.local_storage?).to be true
|
|
|
|
expect(Attachment.s3_storage?).to be false
|
2012-02-22 04:35:39 +08:00
|
|
|
end
|
2011-12-10 06:37:12 +08:00
|
|
|
|
refactor jobs admin functionality to not use AR queries
A set of class functions were added to Delayed::Backend::ActiveRecord
for all the querying a updating functionality that the jobs admin needs,
so that no direct ActiveRecord queries are needed. The /jobs UI is
refactored to use these new functions.
There are a few differences in behavior: The search isn't a combined
wildcard search anymore. Instead, new "flavors" were added to the
drop-down for strand, tag, and ID. The search box searches only the
selected attribute, and it's exact match now.
Specs are being updated to use these new functions as well. Eventually,
no direct AR queries will be done against Jobs anywhere, so that non-AR
jobs backends are possible.
Also as part of this, all jobs require a queue now. Passing nil for the
queue will use the default of Delayed::Worker.queue.
test plan: Load /jobs, and verify that it works as before except where
there are differences as described above.
* Selecting flavors of jobs lists only those jobs.
* Searching by ID, strand or tag works.
* The hold/unhold/delete actions work in the various combinations of
filtering/searching.
* Linking to an individual job still works (though the query string
has changed so old links don't work)
* Running jobs and list of popular tags still works as expected.
Change-Id: Iffd5b8c7b3d6e4b128792a9dee7b97c6dfb251dc
Reviewed-on: https://gerrit.instructure.com/12632
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Bracken Mosbacker <bracken@instructure.com>
Reviewed-on: https://gerrit.instructure.com/13089
Reviewed-by: Jacob Fugal <jacob@instructure.com>
Tested-by: Jacob Fugal <jacob@instructure.com>
2012-08-01 04:22:52 +08:00
|
|
|
def run_job(job)
|
2011-12-10 06:37:12 +08:00
|
|
|
Delayed::Worker.new.perform(job)
|
|
|
|
end
|
2012-03-21 06:08:20 +08:00
|
|
|
|
2012-06-07 04:16:55 +08:00
|
|
|
def run_jobs
|
|
|
|
while job = Delayed::Job.get_and_lock_next_available(
|
2013-05-24 03:18:11 +08:00
|
|
|
'spec run_jobs',
|
2014-10-01 02:41:19 +08:00
|
|
|
Delayed::Settings.queue,
|
2013-05-24 03:18:11 +08:00
|
|
|
0,
|
|
|
|
Delayed::MAX_PRIORITY)
|
2012-06-07 04:16:55 +08:00
|
|
|
run_job(job)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
refactor jobs admin functionality to not use AR queries
A set of class functions were added to Delayed::Backend::ActiveRecord
for all the querying a updating functionality that the jobs admin needs,
so that no direct ActiveRecord queries are needed. The /jobs UI is
refactored to use these new functions.
There are a few differences in behavior: The search isn't a combined
wildcard search anymore. Instead, new "flavors" were added to the
drop-down for strand, tag, and ID. The search box searches only the
selected attribute, and it's exact match now.
Specs are being updated to use these new functions as well. Eventually,
no direct AR queries will be done against Jobs anywhere, so that non-AR
jobs backends are possible.
Also as part of this, all jobs require a queue now. Passing nil for the
queue will use the default of Delayed::Worker.queue.
test plan: Load /jobs, and verify that it works as before except where
there are differences as described above.
* Selecting flavors of jobs lists only those jobs.
* Searching by ID, strand or tag works.
* The hold/unhold/delete actions work in the various combinations of
filtering/searching.
* Linking to an individual job still works (though the query string
has changed so old links don't work)
* Running jobs and list of popular tags still works as expected.
Change-Id: Iffd5b8c7b3d6e4b128792a9dee7b97c6dfb251dc
Reviewed-on: https://gerrit.instructure.com/12632
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Bracken Mosbacker <bracken@instructure.com>
Reviewed-on: https://gerrit.instructure.com/13089
Reviewed-by: Jacob Fugal <jacob@instructure.com>
Tested-by: Jacob Fugal <jacob@instructure.com>
2012-08-01 04:22:52 +08:00
|
|
|
def track_jobs
|
|
|
|
@jobs_tracking = Delayed::JobTracking.track { yield }
|
|
|
|
end
|
|
|
|
|
|
|
|
def created_jobs
|
|
|
|
@jobs_tracking.created
|
|
|
|
end
|
|
|
|
|
|
|
|
def expects_job_with_tag(tag, count = 1)
|
|
|
|
track_jobs do
|
|
|
|
yield
|
|
|
|
end
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(created_jobs.count { |j| j.tag == tag }).to eq count
|
refactor jobs admin functionality to not use AR queries
A set of class functions were added to Delayed::Backend::ActiveRecord
for all the querying a updating functionality that the jobs admin needs,
so that no direct ActiveRecord queries are needed. The /jobs UI is
refactored to use these new functions.
There are a few differences in behavior: The search isn't a combined
wildcard search anymore. Instead, new "flavors" were added to the
drop-down for strand, tag, and ID. The search box searches only the
selected attribute, and it's exact match now.
Specs are being updated to use these new functions as well. Eventually,
no direct AR queries will be done against Jobs anywhere, so that non-AR
jobs backends are possible.
Also as part of this, all jobs require a queue now. Passing nil for the
queue will use the default of Delayed::Worker.queue.
test plan: Load /jobs, and verify that it works as before except where
there are differences as described above.
* Selecting flavors of jobs lists only those jobs.
* Searching by ID, strand or tag works.
* The hold/unhold/delete actions work in the various combinations of
filtering/searching.
* Linking to an individual job still works (though the query string
has changed so old links don't work)
* Running jobs and list of popular tags still works as expected.
Change-Id: Iffd5b8c7b3d6e4b128792a9dee7b97c6dfb251dc
Reviewed-on: https://gerrit.instructure.com/12632
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Bracken Mosbacker <bracken@instructure.com>
Reviewed-on: https://gerrit.instructure.com/13089
Reviewed-by: Jacob Fugal <jacob@instructure.com>
Tested-by: Jacob Fugal <jacob@instructure.com>
2012-08-01 04:22:52 +08:00
|
|
|
end
|
|
|
|
|
2012-03-21 06:08:20 +08:00
|
|
|
# send a multipart post request in an integration spec post_params is
|
|
|
|
# an array of [k,v] params so that the order of the params can be
|
|
|
|
# defined
|
|
|
|
def send_multipart(url, post_params = {}, http_headers = {}, method = :post)
|
2014-02-13 04:05:32 +08:00
|
|
|
mp = Multipart::Post.new
|
2012-03-21 06:08:20 +08:00
|
|
|
query, headers = mp.prepare_query(post_params)
|
2014-03-26 00:00:24 +08:00
|
|
|
|
|
|
|
# A bug in the testing adapter in Rails 3-2-stable doesn't corretly handle
|
|
|
|
# translating this header to the Rack/CGI compatible version:
|
|
|
|
# (https://github.com/rails/rails/blob/3-2-stable/actionpack/lib/action_dispatch/testing/integration.rb#L289)
|
|
|
|
#
|
|
|
|
# This issue is fixed in Rails 4-0 stable, by using a newer version of
|
|
|
|
# ActionDispatch Http::Headers which correctly handles the merge
|
|
|
|
headers = headers.dup.tap { |h| h['CONTENT_TYPE'] ||= h.delete('Content-type') }
|
|
|
|
|
2012-03-21 06:08:20 +08:00
|
|
|
send(method, url, query, headers.merge(http_headers))
|
|
|
|
end
|
2012-03-30 07:52:51 +08:00
|
|
|
|
2015-07-17 04:36:55 +08:00
|
|
|
def content_type_key
|
2015-12-18 01:14:52 +08:00
|
|
|
'Content-Type'
|
2015-07-17 04:36:55 +08:00
|
|
|
end
|
|
|
|
|
2012-11-01 23:03:48 +08:00
|
|
|
def force_string_encoding(str, encoding = "UTF-8")
|
|
|
|
if str.respond_to?(:force_encoding)
|
|
|
|
str.force_encoding(encoding)
|
|
|
|
end
|
|
|
|
str
|
|
|
|
end
|
|
|
|
|
2013-02-05 05:58:38 +08:00
|
|
|
# from minitest, MIT licensed
|
|
|
|
def capture_io
|
|
|
|
orig_stdout, orig_stderr = $stdout, $stderr
|
|
|
|
$stdout, $stderr = StringIO.new, StringIO.new
|
|
|
|
yield
|
|
|
|
return $stdout.string, $stderr.string
|
|
|
|
ensure
|
|
|
|
$stdout, $stderr = orig_stdout, orig_stderr
|
|
|
|
end
|
|
|
|
|
2012-05-18 10:51:27 +08:00
|
|
|
def verify_post_matches(post_lines, expected_post_lines)
|
|
|
|
# first lines should match
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(post_lines[0]).to eq expected_post_lines[0]
|
2012-05-18 10:51:27 +08:00
|
|
|
|
|
|
|
# now extract the headers
|
|
|
|
post_headers = post_lines[1..post_lines.index("")]
|
|
|
|
expected_post_headers = expected_post_lines[1..expected_post_lines.index("")]
|
2013-04-19 23:54:35 +08:00
|
|
|
expected_post_headers << "User-Agent: Ruby"
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(post_headers.sort).to eq expected_post_headers.sort
|
2012-05-18 10:51:27 +08:00
|
|
|
|
|
|
|
# now check payload
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(post_lines[post_lines.index(""), -1]).to eq
|
2013-05-24 03:18:11 +08:00
|
|
|
expected_post_lines[expected_post_lines.index(""), -1]
|
2012-05-18 10:51:27 +08:00
|
|
|
end
|
2012-08-14 05:13:20 +08:00
|
|
|
|
2013-01-01 06:25:44 +08:00
|
|
|
def compare_json(actual, expected)
|
|
|
|
if actual.is_a?(Hash)
|
2013-05-24 03:18:11 +08:00
|
|
|
actual.each do |k, v|
|
2013-01-01 06:25:44 +08:00
|
|
|
expected_v = expected[k]
|
|
|
|
compare_json(v, expected_v)
|
|
|
|
end
|
|
|
|
elsif actual.is_a?(Array)
|
2013-05-24 03:18:11 +08:00
|
|
|
actual.zip(expected).each do |a, e|
|
|
|
|
compare_json(a, e)
|
2013-01-01 06:25:44 +08:00
|
|
|
end
|
|
|
|
else
|
2014-02-26 22:50:20 +08:00
|
|
|
if actual.is_a?(Fixnum) || actual.is_a?(Float)
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(actual).to eq expected
|
2014-02-26 22:50:20 +08:00
|
|
|
else
|
2014-10-17 03:02:40 +08:00
|
|
|
expect(actual.to_json).to eq expected.to_json
|
2014-02-26 22:50:20 +08:00
|
|
|
end
|
2013-01-01 06:25:44 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-08-14 05:13:20 +08:00
|
|
|
class FakeHttpResponse
|
|
|
|
def initialize(code, body = nil, headers={})
|
|
|
|
@code = code
|
|
|
|
@body = body
|
|
|
|
@headers = headers
|
|
|
|
end
|
|
|
|
|
|
|
|
def read_body(io)
|
|
|
|
io << @body
|
|
|
|
end
|
|
|
|
|
|
|
|
def code
|
|
|
|
@code.to_s
|
|
|
|
end
|
|
|
|
|
|
|
|
def [](arg)
|
|
|
|
@headers[arg]
|
|
|
|
end
|
|
|
|
|
|
|
|
def content_type
|
|
|
|
self['content-type']
|
|
|
|
end
|
|
|
|
end
|
2013-03-14 23:27:50 +08:00
|
|
|
|
|
|
|
def intify_timestamps(object)
|
|
|
|
case object
|
2013-05-24 03:18:11 +08:00
|
|
|
when Time
|
|
|
|
object.to_i
|
|
|
|
when Hash
|
|
|
|
object.inject({}) { |memo, (k, v)| memo[intify_timestamps(k)] = intify_timestamps(v); memo }
|
|
|
|
when Array
|
|
|
|
object.map { |v| intify_timestamps(v) }
|
|
|
|
else
|
|
|
|
object
|
2013-03-14 23:27:50 +08:00
|
|
|
end
|
|
|
|
end
|
2013-03-16 22:50:49 +08:00
|
|
|
|
2014-07-04 12:38:20 +08:00
|
|
|
# frd class, not a mock, so we can once-ler WebConferences (need to Marshal.dump)
|
|
|
|
class WebConferencePluginMock
|
|
|
|
attr_reader :id, :settings
|
|
|
|
def initialize(id, settings)
|
|
|
|
@id = id
|
|
|
|
@settings = settings
|
|
|
|
end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-07-04 12:38:20 +08:00
|
|
|
def valid_settings?; true; end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-07-04 12:38:20 +08:00
|
|
|
def enabled?; true; end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-07-04 12:38:20 +08:00
|
|
|
def base; end
|
|
|
|
end
|
2013-03-16 22:50:49 +08:00
|
|
|
def web_conference_plugin_mock(id, settings)
|
2014-07-04 12:38:20 +08:00
|
|
|
WebConferencePluginMock.new(id, settings)
|
2013-03-16 22:50:49 +08:00
|
|
|
end
|
2013-05-07 03:16:06 +08:00
|
|
|
|
|
|
|
def dummy_io
|
2014-01-09 04:47:34 +08:00
|
|
|
fixture_file_upload('scribd_docs/doc.doc', 'application/msword', true)
|
2013-05-07 03:16:06 +08:00
|
|
|
end
|
|
|
|
|
2013-10-29 05:31:58 +08:00
|
|
|
def consider_all_requests_local(value)
|
2014-07-24 01:14:22 +08:00
|
|
|
Rails.application.config.consider_all_requests_local = value
|
2013-10-29 05:31:58 +08:00
|
|
|
end
|
2013-11-05 02:56:25 +08:00
|
|
|
|
2014-05-16 00:23:58 +08:00
|
|
|
# a fast way to create a record, especially if you don't need the actual
|
|
|
|
# ruby object. since it just does a straight up insert, you need to
|
|
|
|
# provide any non-null attributes or things that would normally be
|
|
|
|
# inferred/defaulted prior to saving
|
|
|
|
def create_record(klass, attributes, return_type = :id)
|
|
|
|
create_records(klass, [attributes], return_type)[0]
|
|
|
|
end
|
|
|
|
|
|
|
|
# a little wrapper around bulk_insert that gives you back records or ids
|
|
|
|
# in order
|
|
|
|
# NOTE: if you decide you want to go add something like this to canvas
|
|
|
|
# proper, make sure you have it handle concurrent inserts (this does
|
|
|
|
# not, because READ COMMITTED is the default transaction isolation
|
|
|
|
# level)
|
|
|
|
def create_records(klass, records, return_type = :id)
|
|
|
|
return [] if records.empty?
|
|
|
|
klass.transaction do
|
|
|
|
klass.connection.bulk_insert klass.table_name, records
|
|
|
|
scope = klass.order("id DESC").limit(records.size)
|
|
|
|
return_type == :record ?
|
2015-07-25 00:01:44 +08:00
|
|
|
scope.to_a.reverse :
|
2014-05-16 00:23:58 +08:00
|
|
|
scope.pluck(:id).reverse
|
|
|
|
end
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2012-06-22 06:34:27 +08:00
|
|
|
|
2014-08-01 14:35:17 +08:00
|
|
|
class I18nema::Backend
|
|
|
|
def stub(translations)
|
|
|
|
@stubs = translations.with_indifferent_access
|
|
|
|
singleton_class.instance_eval do
|
|
|
|
alias_method :lookup, :lookup_with_stubs
|
|
|
|
alias_method :available_locales, :available_locales_with_stubs
|
|
|
|
end
|
|
|
|
yield
|
|
|
|
ensure
|
|
|
|
singleton_class.instance_eval do
|
|
|
|
alias_method :lookup, :lookup_without_stubs
|
|
|
|
alias_method :available_locales, :available_locales_without_stubs
|
|
|
|
end
|
|
|
|
@stubs = nil
|
|
|
|
end
|
|
|
|
|
|
|
|
def lookup_with_stubs(locale, key, scope = [], options = {})
|
|
|
|
init_translations unless initialized?
|
|
|
|
keys = normalize_keys(locale, key, scope, options[:separator])
|
|
|
|
keys.inject(@stubs){ |h,k| h[k] if h.respond_to?(:key) } || direct_lookup(*keys)
|
|
|
|
end
|
|
|
|
alias_method :lookup_without_stubs, :lookup
|
|
|
|
|
|
|
|
def available_locales_with_stubs
|
|
|
|
available_locales_without_stubs | @stubs.keys.map(&:to_sym)
|
|
|
|
end
|
|
|
|
alias_method :available_locales_without_stubs, :available_locales
|
|
|
|
end
|
|
|
|
|
2014-03-28 07:29:27 +08:00
|
|
|
class String
|
|
|
|
def red; colorize(self, "\e[1m\e[31m"); end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-03-28 07:29:27 +08:00
|
|
|
def green; colorize(self, "\e[1m\e[32m"); end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-03-28 07:29:27 +08:00
|
|
|
def dark_green; colorize(self, "\e[32m"); end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-03-28 07:29:27 +08:00
|
|
|
def yellow; colorize(self, "\e[1m\e[33m"); end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-03-28 07:29:27 +08:00
|
|
|
def blue; colorize(self, "\e[1m\e[34m"); end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-03-28 07:29:27 +08:00
|
|
|
def dark_blue; colorize(self, "\e[34m"); end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-03-28 07:29:27 +08:00
|
|
|
def pur; colorize(self, "\e[1m\e[35m"); end
|
2015-04-28 01:47:25 +08:00
|
|
|
|
2014-03-28 07:29:27 +08:00
|
|
|
def colorize(text, color_code) "#{color_code}#{text}\e[0m" end
|
|
|
|
end
|
|
|
|
|
2014-07-30 05:33:55 +08:00
|
|
|
Dir[Rails.root+'{gems,vendor}/plugins/*/spec_canvas/spec_helper.rb'].each do |f|
|
2012-06-22 06:34:27 +08:00
|
|
|
require f
|
2013-05-21 03:39:36 +08:00
|
|
|
end
|