2011-02-01 09:57:29 +08:00
|
|
|
#
|
|
|
|
# Copyright (C) 2011 Instructure, Inc.
|
|
|
|
#
|
|
|
|
# This file is part of Canvas.
|
|
|
|
#
|
|
|
|
# Canvas is free software: you can redistribute it and/or modify it under
|
|
|
|
# the terms of the GNU Affero General Public License as published by the Free
|
|
|
|
# Software Foundation, version 3 of the License.
|
|
|
|
#
|
|
|
|
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
|
|
|
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
|
|
|
# details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License along
|
|
|
|
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
|
|
|
|
ENV["RAILS_ENV"] = 'test'
|
2013-03-21 04:30:20 +08:00
|
|
|
|
|
|
|
require File.expand_path('../../config/environment', __FILE__) unless defined?(Rails)
|
2013-03-22 07:32:21 +08:00
|
|
|
if CANVAS_RAILS3
|
|
|
|
require 'rspec/rails'
|
|
|
|
else
|
|
|
|
require 'spec'
|
|
|
|
# require 'spec/autorun'
|
|
|
|
require 'spec/rails'
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
require 'webrat'
|
2013-03-13 06:33:40 +08:00
|
|
|
require 'mocha/api'
|
2013-04-09 10:56:50 +08:00
|
|
|
require File.expand_path(File.dirname(__FILE__) + '/mocha_rspec_adapter')
|
2013-03-15 02:51:28 +08:00
|
|
|
require File.expand_path(File.dirname(__FILE__) + '/mocha_extensions')
|
2011-02-01 09:57:29 +08:00
|
|
|
|
|
|
|
Dir.glob("#{File.dirname(__FILE__).gsub(/\\/, "/")}/factories/*.rb").each { |file| require file }
|
|
|
|
|
2011-02-10 01:16:29 +08:00
|
|
|
# rspec aliases :describe to :context in a way that it's pretty much defined
|
|
|
|
# globally on every object. :context is already heavily used in our application,
|
2013-03-07 04:32:49 +08:00
|
|
|
# so we remove rspec's definition. This does not prevent 'context' from being
|
|
|
|
# used within a 'describe' block.
|
2013-03-22 07:32:21 +08:00
|
|
|
if defined?(Spec::DSL::Main)
|
|
|
|
module Spec::DSL::Main
|
|
|
|
remove_method :context if respond_to? :context
|
|
|
|
end
|
2011-02-10 01:16:29 +08:00
|
|
|
end
|
|
|
|
|
2011-06-06 23:32:11 +08:00
|
|
|
def truncate_table(model)
|
|
|
|
case model.connection.adapter_name
|
2012-04-19 07:06:28 +08:00
|
|
|
when "SQLite"
|
|
|
|
model.delete_all
|
|
|
|
begin
|
|
|
|
model.connection.execute("delete from sqlite_sequence where name='#{model.connection.quote_table_name(model.table_name)}';")
|
|
|
|
model.connection.execute("insert into sqlite_sequence (name, seq) values ('#{model.connection.quote_table_name(model.table_name)}', #{rand(100)});")
|
|
|
|
rescue
|
|
|
|
end
|
2012-05-08 04:18:47 +08:00
|
|
|
when "PostgreSQL"
|
|
|
|
begin
|
|
|
|
old_proc = model.connection.raw_connection.set_notice_processor {}
|
|
|
|
model.connection.execute("TRUNCATE TABLE #{model.connection.quote_table_name(model.table_name)} CASCADE")
|
|
|
|
ensure
|
|
|
|
model.connection.raw_connection.set_notice_processor(&old_proc)
|
|
|
|
end
|
2012-04-19 07:06:28 +08:00
|
|
|
else
|
2012-06-29 05:49:36 +08:00
|
|
|
model.connection.execute("SET FOREIGN_KEY_CHECKS=0")
|
2012-04-19 07:06:28 +08:00
|
|
|
model.connection.execute("TRUNCATE TABLE #{model.connection.quote_table_name(model.table_name)}")
|
2012-06-29 05:49:36 +08:00
|
|
|
model.connection.execute("SET FOREIGN_KEY_CHECKS=1")
|
2011-06-06 23:32:11 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-05-08 04:18:47 +08:00
|
|
|
def truncate_all_tables
|
2012-11-28 03:56:31 +08:00
|
|
|
models_by_connection = ActiveRecord::Base.all_models.group_by { |m| m.connection }
|
2012-05-08 04:18:47 +08:00
|
|
|
models_by_connection.each do |connection, models|
|
|
|
|
if connection.adapter_name == "PostgreSQL"
|
|
|
|
connection.execute("TRUNCATE TABLE #{models.map(&:table_name).map { |t| connection.quote_table_name(t) }.join(',')}")
|
|
|
|
else
|
2012-05-11 04:36:03 +08:00
|
|
|
models.each { |model| truncate_table(model) }
|
2012-05-08 04:18:47 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-11-03 04:47:56 +08:00
|
|
|
def truncate_all_cassandra_tables
|
|
|
|
Canvas::Cassandra::Database.config_names.each do |cass_config|
|
|
|
|
db = Canvas::Cassandra::Database.from_config(cass_config)
|
|
|
|
db.keyspace_information.tables.each do |table|
|
|
|
|
db.execute("TRUNCATE #{table}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-06-06 23:32:11 +08:00
|
|
|
# wipe out the test db, in case some non-transactional tests crapped out before
|
|
|
|
# cleaning up after themselves
|
2012-05-08 04:18:47 +08:00
|
|
|
truncate_all_tables
|
2011-10-01 04:22:22 +08:00
|
|
|
|
|
|
|
# Make AR not puke if MySQL auto-commits the transaction
|
|
|
|
class ActiveRecord::ConnectionAdapters::MysqlAdapter < ActiveRecord::ConnectionAdapters::AbstractAdapter
|
|
|
|
def outside_transaction?
|
|
|
|
# MySQL ignores creation of savepoints outside of a transaction; so if we can create one
|
|
|
|
# and then can't release it because it doesn't exist, we're not in a transaction
|
|
|
|
execute('SAVEPOINT outside_transaction')
|
|
|
|
!!execute('RELEASE SAVEPOINT outside_transaction') rescue true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-12-15 04:34:57 +08:00
|
|
|
Spec::Matchers.define :encompass do |expected|
|
|
|
|
match do |actual|
|
|
|
|
if expected.is_a?(Array) && actual.is_a?(Array)
|
|
|
|
expected.size == actual.size && expected.zip(actual).all?{|e,a| a.slice(*e.keys) == e}
|
|
|
|
elsif expected.is_a?(Hash) && actual.is_a?(Hash)
|
|
|
|
actual.slice(*expected.keys) == expected
|
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-04-18 04:06:57 +08:00
|
|
|
Spec::Matchers.define :match_ignoring_whitespace do |expected|
|
|
|
|
def whitespaceless(str)
|
|
|
|
str.gsub(/\s+/, '')
|
|
|
|
end
|
|
|
|
|
|
|
|
match do |actual|
|
|
|
|
whitespaceless(actual) == whitespaceless(expected)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
Spec::Runner.configure do |config|
|
|
|
|
# If you're not using ActiveRecord you should remove these
|
|
|
|
# lines, delete config/database.yml and disable :active_record
|
|
|
|
# in your config/boot.rb
|
|
|
|
config.use_transactional_fixtures = true
|
|
|
|
config.use_instantiated_fixtures = false
|
2013-03-08 08:08:47 +08:00
|
|
|
config.fixture_path = Rails.root+'spec/fixtures/'
|
2011-02-01 09:57:29 +08:00
|
|
|
|
2011-11-24 03:52:38 +08:00
|
|
|
config.include Webrat::Matchers, :type => :views
|
2011-02-01 09:57:29 +08:00
|
|
|
|
2012-02-07 02:36:46 +08:00
|
|
|
config.before :all do
|
|
|
|
# so before(:all)'s don't get confused
|
|
|
|
Account.clear_special_account_cache!
|
2012-03-03 05:42:23 +08:00
|
|
|
Notification.after_create { Notification.reset_cache! }
|
2012-02-07 02:36:46 +08:00
|
|
|
end
|
|
|
|
|
2011-02-09 04:34:00 +08:00
|
|
|
config.before :each do
|
2013-02-28 11:24:10 +08:00
|
|
|
I18n.locale = :en
|
2011-02-09 04:34:00 +08:00
|
|
|
Time.zone = 'UTC'
|
2012-02-07 02:36:46 +08:00
|
|
|
Account.clear_special_account_cache!
|
2011-02-09 04:34:00 +08:00
|
|
|
Account.default.update_attribute(:default_time_zone, 'UTC')
|
2011-09-02 23:34:12 +08:00
|
|
|
Setting.reset_cache!
|
2011-11-15 06:03:57 +08:00
|
|
|
HostUrl.reset_cache!
|
2012-03-03 05:42:23 +08:00
|
|
|
Notification.reset_cache!
|
2011-12-03 07:09:07 +08:00
|
|
|
ActiveRecord::Base.reset_any_instantiation!
|
2012-05-08 04:18:47 +08:00
|
|
|
Attachment.clear_cached_mime_ids
|
2012-12-21 07:30:03 +08:00
|
|
|
RoleOverride.clear_cached_contexts
|
2012-08-16 23:21:18 +08:00
|
|
|
Delayed::Job.redis.flushdb if Delayed::Job == Delayed::Backend::Redis::Job
|
2012-11-03 04:47:56 +08:00
|
|
|
truncate_all_cassandra_tables
|
2012-03-31 06:51:02 +08:00
|
|
|
Rails::logger.try(:info, "Running #{self.class.description} #{@method_name}")
|
2013-01-08 01:57:48 +08:00
|
|
|
Attachment.domain_namespace = nil
|
2011-02-09 04:34:00 +08:00
|
|
|
end
|
|
|
|
|
2011-11-04 05:51:51 +08:00
|
|
|
# flush redis before the first spec, and before each spec that comes after
|
|
|
|
# one that used redis
|
|
|
|
class << Canvas
|
|
|
|
attr_accessor :redis_used
|
|
|
|
def redis_with_track_usage(*a, &b)
|
|
|
|
self.redis_used = true
|
|
|
|
redis_without_track_usage(*a, &b)
|
|
|
|
end
|
|
|
|
alias_method_chain :redis, :track_usage
|
|
|
|
Canvas.redis_used = true
|
|
|
|
end
|
|
|
|
config.before :each do
|
|
|
|
if Canvas.redis_enabled? && Canvas.redis_used
|
2012-01-17 05:28:01 +08:00
|
|
|
Canvas.redis.flushdb rescue nil
|
2011-11-04 05:51:51 +08:00
|
|
|
end
|
|
|
|
Canvas.redis_used = false
|
|
|
|
end
|
|
|
|
|
2011-04-08 07:01:32 +08:00
|
|
|
def account_with_cas(opts={})
|
2011-10-28 02:51:15 +08:00
|
|
|
@account = opts[:account]
|
|
|
|
@account ||= Account.create!
|
2011-04-08 07:01:32 +08:00
|
|
|
config = AccountAuthorizationConfig.new
|
|
|
|
cas_url = opts[:cas_url] || "https://localhost/cas"
|
|
|
|
config.auth_type = "cas"
|
|
|
|
config.auth_base = cas_url
|
2011-05-16 06:27:44 +08:00
|
|
|
config.log_in_url = opts[:cas_log_in_url] if opts[:cas_log_in_url]
|
2011-10-28 02:51:15 +08:00
|
|
|
@account.account_authorization_configs << config
|
|
|
|
@account
|
2011-04-08 07:01:32 +08:00
|
|
|
end
|
|
|
|
|
2011-06-30 05:54:34 +08:00
|
|
|
def account_with_saml(opts={})
|
2011-10-28 02:51:15 +08:00
|
|
|
@account = opts[:account]
|
|
|
|
@account ||= Account.create!
|
2011-06-30 05:54:34 +08:00
|
|
|
config = AccountAuthorizationConfig.new
|
|
|
|
config.auth_type = "saml"
|
|
|
|
config.log_in_url = opts[:saml_log_in_url] if opts[:saml_log_in_url]
|
2011-10-28 02:51:15 +08:00
|
|
|
@account.account_authorization_configs << config
|
|
|
|
@account
|
2011-06-30 05:54:34 +08:00
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def course(opts={})
|
2013-02-09 01:29:04 +08:00
|
|
|
account = opts[:account] || Account.default
|
|
|
|
account.shard.activate do
|
|
|
|
@course = Course.create!(:name => opts[:course_name], :account => account)
|
|
|
|
@course.offer! if opts[:active_course] || opts[:active_all]
|
|
|
|
if opts[:active_all]
|
|
|
|
u = User.create!
|
|
|
|
u.register!
|
|
|
|
e = @course.enroll_teacher(u)
|
|
|
|
e.workflow_state = 'active'
|
|
|
|
e.save!
|
|
|
|
@teacher = u
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
@course
|
|
|
|
end
|
|
|
|
|
2011-06-01 04:47:28 +08:00
|
|
|
def account_admin_user_with_role_changes(opts={})
|
|
|
|
account = opts[:account] || Account.default
|
|
|
|
if opts[:role_changes]
|
|
|
|
opts[:role_changes].each_pair do |permission, enabled|
|
2011-09-01 05:23:47 +08:00
|
|
|
account.role_overrides.create(:permission => permission.to_s, :enrollment_type => opts[:membership_type] || 'AccountAdmin', :enabled => enabled)
|
2011-06-01 04:47:28 +08:00
|
|
|
end
|
|
|
|
end
|
2012-12-21 07:30:03 +08:00
|
|
|
RoleOverride.clear_cached_contexts
|
2011-06-01 04:47:28 +08:00
|
|
|
account_admin_user(opts)
|
|
|
|
end
|
|
|
|
|
2012-07-12 06:05:57 +08:00
|
|
|
def account_admin_user(opts={:active_user => true})
|
2013-02-09 01:29:04 +08:00
|
|
|
account = opts[:account] || Account.default
|
|
|
|
@user = opts[:user] || account.shard.activate{ user(opts) }
|
2011-08-12 04:50:02 +08:00
|
|
|
@admin = @user
|
2013-02-09 01:29:04 +08:00
|
|
|
account_user = @user.account_users.build(:account => account, :membership_type => opts[:membership_type] || 'AccountAdmin')
|
|
|
|
account_user.shard = account.shard
|
|
|
|
account_user.save!
|
2011-05-05 07:06:29 +08:00
|
|
|
@user
|
|
|
|
end
|
|
|
|
|
2011-10-19 00:35:41 +08:00
|
|
|
def site_admin_user(opts={})
|
2012-07-12 05:38:17 +08:00
|
|
|
@user = opts[:user] || user(opts)
|
2011-10-19 00:35:41 +08:00
|
|
|
@admin = @user
|
|
|
|
Account.site_admin.add_user(@user, opts[:membership_type] || 'AccountAdmin')
|
|
|
|
@user
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def user(opts={})
|
2013-03-04 15:56:39 +08:00
|
|
|
@user = User.create!(opts.slice(:name, :short_name))
|
2011-02-01 09:57:29 +08:00
|
|
|
@user.register! if opts[:active_user] || opts[:active_all]
|
2012-07-28 07:11:59 +08:00
|
|
|
@user.update_attribute :workflow_state, opts[:user_state] if opts[:user_state]
|
2011-02-01 09:57:29 +08:00
|
|
|
@user
|
|
|
|
end
|
|
|
|
|
|
|
|
def user_with_pseudonym(opts={})
|
2011-12-06 04:48:11 +08:00
|
|
|
user(opts) unless opts[:user]
|
|
|
|
user = opts[:user] || @user
|
|
|
|
@pseudonym = pseudonym(user, opts)
|
2011-11-24 03:52:38 +08:00
|
|
|
user
|
|
|
|
end
|
|
|
|
|
2011-12-06 04:48:11 +08:00
|
|
|
def communication_channel(user, opts={})
|
2011-11-24 03:52:38 +08:00
|
|
|
username = opts[:username] || "nobody@example.com"
|
|
|
|
@cc = user.communication_channels.create!(:path_type => 'email', :path => username) do |cc|
|
refactor user creation/invitations closes #5833
fixes #5573, #5572, #5753
* communication channels are now only unique within a single user
* UserList changes
* Always resolve pseudonym#unique_ids
* Support looking up by SMS CCs
* Option to either require e-mails match an existing CC,
or e-mails that don't match a Pseudonym will always be
returned unattached (relying on better merging behavior
to not have a gazillion accounts created)
* Method to return users, creating new ones (*without* a
Pseudonym) if necessary. (can't create with a pseudonym,
since Pseudonym#unique_id is still unique, I can't have
multiple outstanding users with the same unique_id)
* EnrollmentsFromUserList is mostly gutted, now using UserList's
functionality directy.
* Use UserList for adding account admins, removing the now
unused Account#add_admin => User#find_by_email/User#assert_by_email
codepath
* Update UsersController#create to not worry about duplicate
communication channels
* Remove AccountsController#add_user, and just use
UsersController#create
* Change SIS::UserImporter to send out a merge opportunity
e-mail if a conflicting CC is found (but still create the CC)
* In /profile, don't worry about conflicting CCs (the CC confirmation
process will now allow merging)
* Remove CommunicationChannelsController#try_merge and #merge
* For the non-simple case of CoursesController#enrollment_invitation
redirect to /register (CommunicationsChannelController#confirm)
* Remove CoursesController#transfer_enrollment
* Move PseudonymsController#registration_confirmation to
CommunicationChannelsController#confirm (have to be able to
register an account without a Pseudonym yet)
* Fold the old direct confirm functionality in, if there are
no available merge opportunities
* Allow merging the new account with the currently logged in user
* Allow changing the Pseudonym#unique_id when registering a new
account (since there might be conflicts)
* Display a list of merge opportunities based on conflicting
communication channels
* Provide link(s) to log in as the other user,
redirecting back to the registration page after login is
complete (to complete the merge as the current user)
* Remove several assert_* methods that are no longer needed
* Update PseudonymSessionsController a bit to deal with the new
way of dealing with conflicting CCs (especially CCs from LDAP),
and to redirect back to the registration/confirmation page when
attempting to do a merge
* Expose the open_registration setting; use it to control if
inviting users to a course is able to create new users
Change-Id: If2f38818a71af656854d3bf8431ddbf5dcb84691
Reviewed-on: https://gerrit.instructure.com/6149
Tested-by: Hudson <hudson@instructure.com>
Reviewed-by: Jacob Fugal <jacob@instructure.com>
2011-10-13 04:30:48 +08:00
|
|
|
cc.workflow_state = 'active' if opts[:active_cc] || opts[:active_all]
|
|
|
|
cc.workflow_state = opts[:cc_state] if opts[:cc_state]
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
@cc.should_not be_nil
|
|
|
|
@cc.should_not be_new_record
|
2011-12-06 04:48:11 +08:00
|
|
|
@cc
|
|
|
|
end
|
|
|
|
|
|
|
|
def user_with_communication_channel(opts={})
|
|
|
|
user(opts) unless opts[:user]
|
|
|
|
user = opts[:user] || @user
|
|
|
|
@cc = communication_channel(user, opts)
|
|
|
|
user
|
|
|
|
end
|
|
|
|
|
|
|
|
def pseudonym(user, opts={})
|
2012-02-23 03:16:29 +08:00
|
|
|
@spec_pseudonym_count ||= 0
|
|
|
|
username = opts[:username] || (@spec_pseudonym_count > 0 ? "nobody+#{@spec_pseudonym_count}@example.com" : "nobody@example.com")
|
2012-02-15 04:26:39 +08:00
|
|
|
opts[:username] ||= username
|
2012-02-23 03:16:29 +08:00
|
|
|
@spec_pseudonym_count += 1 if username =~ /nobody(\+\d+)?@example.com/
|
2011-12-06 04:48:11 +08:00
|
|
|
password = opts[:password] || "asdfasdf"
|
|
|
|
password = nil if password == :autogenerate
|
|
|
|
@pseudonym = user.pseudonyms.create!(:account => opts[:account] || Account.default, :unique_id => username, :password => password, :password_confirmation => password)
|
|
|
|
@pseudonym.communication_channel = communication_channel(user, opts)
|
|
|
|
@pseudonym
|
|
|
|
end
|
|
|
|
|
|
|
|
def managed_pseudonym(user, opts={})
|
|
|
|
other_account = opts[:account] || account_with_saml
|
|
|
|
if other_account.password_authentication?
|
|
|
|
config = AccountAuthorizationConfig.new
|
|
|
|
config.auth_type = "saml"
|
|
|
|
config.log_in_url = opts[:saml_log_in_url] if opts[:saml_log_in_url]
|
|
|
|
other_account.account_authorization_configs << config
|
|
|
|
end
|
|
|
|
opts[:account] = other_account
|
|
|
|
pseudonym(user, opts)
|
|
|
|
@pseudonym.sis_user_id = opts[:sis_user_id] || "U001"
|
|
|
|
@pseudonym.save!
|
|
|
|
@pseudonym.should be_managed_password
|
|
|
|
@pseudonym
|
|
|
|
end
|
|
|
|
|
|
|
|
def user_with_managed_pseudonym(opts={})
|
|
|
|
user(opts) unless opts[:user]
|
|
|
|
user = opts[:user] || @user
|
|
|
|
managed_pseudonym(user, opts)
|
2011-03-05 07:34:38 +08:00
|
|
|
user
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
|
2012-01-14 05:25:35 +08:00
|
|
|
def course_with_user(enrollment_type, opts={})
|
|
|
|
@course = opts[:course] || course(opts)
|
2013-02-09 01:29:04 +08:00
|
|
|
@user = opts[:user] || @course.shard.activate{ user(opts) }
|
2012-04-16 23:27:28 +08:00
|
|
|
@enrollment = @course.enroll_user(@user, enrollment_type, opts)
|
2012-01-14 05:25:35 +08:00
|
|
|
@enrollment.course = @course # set the reverse association
|
|
|
|
if opts[:active_enrollment] || opts[:active_all]
|
|
|
|
@enrollment.workflow_state = 'active'
|
|
|
|
@enrollment.save!
|
|
|
|
end
|
|
|
|
@course.reload
|
|
|
|
@enrollment
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def course_with_student(opts={})
|
2012-01-14 05:25:35 +08:00
|
|
|
course_with_user('StudentEnrollment', opts)
|
|
|
|
@student = @user
|
|
|
|
@enrollment
|
2011-05-20 04:40:55 +08:00
|
|
|
end
|
|
|
|
|
2012-02-14 06:26:45 +08:00
|
|
|
def course_with_ta(opts={})
|
2012-03-14 04:08:19 +08:00
|
|
|
course_with_user("TaEnrollment", opts)
|
2012-02-14 06:26:45 +08:00
|
|
|
@ta = @user
|
|
|
|
@enrollment
|
|
|
|
end
|
|
|
|
|
2011-05-20 04:40:55 +08:00
|
|
|
def course_with_student_logged_in(opts={})
|
|
|
|
course_with_student(opts)
|
2011-11-24 03:52:38 +08:00
|
|
|
user_session(@user)
|
2011-05-20 04:40:55 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def student_in_course(opts={})
|
2012-01-14 05:25:35 +08:00
|
|
|
opts[:course] = @course if @course && !opts[:course]
|
|
|
|
course_with_student(opts)
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
|
2012-05-10 06:45:59 +08:00
|
|
|
def student_in_section(section, opts={})
|
|
|
|
user
|
|
|
|
enrollment = section.course.enroll_user(@user, 'StudentEnrollment', :section => section)
|
|
|
|
enrollment.workflow_state = 'active'
|
|
|
|
enrollment.save!
|
|
|
|
@user
|
|
|
|
end
|
|
|
|
|
2012-03-07 07:03:03 +08:00
|
|
|
def teacher_in_course(opts={})
|
2012-03-13 04:32:20 +08:00
|
|
|
opts[:course] = @course if @course && !opts[:course]
|
|
|
|
course_with_teacher(opts)
|
2012-03-07 07:03:03 +08:00
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def course_with_teacher(opts={})
|
2012-01-14 05:25:35 +08:00
|
|
|
course_with_user('TeacherEnrollment', opts)
|
2011-03-24 06:36:58 +08:00
|
|
|
@teacher = @user
|
2011-02-01 09:57:29 +08:00
|
|
|
@enrollment
|
|
|
|
end
|
|
|
|
|
2012-02-15 08:37:17 +08:00
|
|
|
def course_with_designer(opts={})
|
|
|
|
course_with_user('DesignerEnrollment', opts)
|
|
|
|
@designer = @user
|
|
|
|
@enrollment
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def course_with_teacher_logged_in(opts={})
|
|
|
|
course_with_teacher(opts)
|
|
|
|
user_session(@user)
|
|
|
|
end
|
|
|
|
|
2012-01-14 05:25:35 +08:00
|
|
|
def course_with_observer(opts={})
|
|
|
|
course_with_user('ObserverEnrollment', opts)
|
|
|
|
@observer = @user
|
|
|
|
@enrollment
|
|
|
|
end
|
|
|
|
|
|
|
|
def course_with_observer_logged_in(opts={})
|
|
|
|
course_with_observer(opts)
|
|
|
|
user_session(@user)
|
|
|
|
end
|
|
|
|
|
2012-03-07 04:56:52 +08:00
|
|
|
def add_section(section_name)
|
|
|
|
@course_section = @course.course_sections.create!(:name => section_name)
|
|
|
|
@course.reload
|
|
|
|
end
|
|
|
|
|
|
|
|
def multiple_student_enrollment(user, section)
|
2012-03-14 04:08:19 +08:00
|
|
|
@enrollment = @course.enroll_student(user,
|
|
|
|
:enrollment_state => "active",
|
|
|
|
:section => section,
|
|
|
|
:allow_multiple_enrollments => true)
|
|
|
|
end
|
|
|
|
|
|
|
|
def enter_student_view(opts={})
|
|
|
|
course = opts[:course] || @course || course(opts)
|
|
|
|
@fake_student = course.student_view_student
|
|
|
|
post "/users/#{@fake_student.id}/masquerade"
|
|
|
|
session[:become_user_id].should == @fake_student.id.to_s
|
2012-03-07 04:56:52 +08:00
|
|
|
end
|
|
|
|
|
2012-06-09 03:39:34 +08:00
|
|
|
VALID_GROUP_ATTRIBUTES = [:name, :context, :max_membership, :group_category, :join_level, :description, :is_public, :avatar_attachment]
|
2011-02-01 09:57:29 +08:00
|
|
|
def group(opts={})
|
2012-08-31 05:15:34 +08:00
|
|
|
@group = (opts[:group_context].try(:groups) || Group).create! opts.slice(*VALID_GROUP_ATTRIBUTES)
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def group_with_user(opts={})
|
|
|
|
group(opts)
|
2012-06-09 03:39:34 +08:00
|
|
|
u = opts[:user] || user(opts)
|
|
|
|
workflow_state = opts[:active_all] ? 'accepted' : nil
|
|
|
|
@group.add_user(u, workflow_state, opts[:moderator])
|
2011-05-26 00:38:32 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def group_with_user_logged_in(opts={})
|
|
|
|
group_with_user(opts)
|
|
|
|
user_session(@user)
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
|
2012-12-13 23:24:21 +08:00
|
|
|
def custom_role(base, name, opts={})
|
|
|
|
account = opts[:account] || @account
|
|
|
|
role = account.roles.find_by_name(name)
|
|
|
|
role ||= account.roles.create :name => name
|
|
|
|
role.base_role_type = base
|
|
|
|
role.save!
|
|
|
|
role
|
|
|
|
end
|
|
|
|
def custom_student_role(name, opts={})
|
|
|
|
custom_role('StudentEnrollment', name, opts)
|
|
|
|
end
|
|
|
|
def custom_teacher_role(name, opts={})
|
|
|
|
custom_role('TeacherEnrollment', name, opts)
|
|
|
|
end
|
|
|
|
def custom_ta_role(name, opts={})
|
|
|
|
custom_role('TaEnrollment', name, opts)
|
|
|
|
end
|
|
|
|
def custom_designer_role(name, opts={})
|
|
|
|
custom_role('DesignerEnrollment', name, opts)
|
|
|
|
end
|
|
|
|
def custom_observer_role(name, opts={})
|
|
|
|
custom_role('ObserverEnrollment', name, opts)
|
|
|
|
end
|
|
|
|
def custom_account_role(name, opts={})
|
|
|
|
custom_role(AccountUser::BASE_ROLE_NAME, name, opts)
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def user_session(user, pseudonym=nil)
|
2012-05-20 04:53:25 +08:00
|
|
|
unless pseudonym
|
|
|
|
pseudonym = stub(:record => user, :user_id => user.id, :user => user, :login_count => 1)
|
|
|
|
# at least one thing cares about the id of the pseudonym... using the
|
|
|
|
# object_id should make it unique (but obviously things will fail if
|
|
|
|
# it tries to load it from the db.)
|
|
|
|
pseudonym.stubs(:id).returns(pseudonym.object_id)
|
|
|
|
end
|
|
|
|
|
|
|
|
session = stub(:record => pseudonym, :session_credentials => nil, :used_basic_auth? => false)
|
|
|
|
|
2011-10-26 07:15:30 +08:00
|
|
|
PseudonymSession.stubs(:find).returns(session)
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
|
2011-08-25 03:40:15 +08:00
|
|
|
def login_as(username = "nobody@example.com", password = "asdfasdf")
|
|
|
|
post_via_redirect "/login",
|
2012-04-19 07:06:28 +08:00
|
|
|
"pseudonym_session[unique_id]" => username,
|
|
|
|
"pseudonym_session[password]" => password
|
2011-08-25 03:40:15 +08:00
|
|
|
assert_response :success
|
|
|
|
path.should eql("/?login_success=1")
|
|
|
|
end
|
|
|
|
|
2012-04-27 06:00:52 +08:00
|
|
|
def assignment_quiz(questions, opts={})
|
2012-04-11 03:04:18 +08:00
|
|
|
course = opts[:course] || course(:active_course => true)
|
|
|
|
user = opts[:user] || user(:active_user => true)
|
2012-05-02 01:29:28 +08:00
|
|
|
course.enroll_student(user, :enrollment_state => 'active') unless user.enrollments.any?{|e| e.course_id == course.id}
|
2012-04-11 03:04:18 +08:00
|
|
|
@assignment = course.assignments.create(:title => "Test Assignment")
|
2011-11-01 00:58:44 +08:00
|
|
|
@assignment.workflow_state = "available"
|
|
|
|
@assignment.submission_types = "online_quiz"
|
|
|
|
@assignment.save
|
|
|
|
@quiz = Quiz.find_by_assignment_id(@assignment.id)
|
|
|
|
@questions = questions.map { |q| @quiz.quiz_questions.create!(q) }
|
|
|
|
@quiz.generate_quiz_data
|
2012-05-09 01:15:24 +08:00
|
|
|
@quiz.published_at = Time.now
|
2012-03-01 04:45:13 +08:00
|
|
|
@quiz.workflow_state = "available"
|
2012-02-21 01:51:20 +08:00
|
|
|
@quiz.save!
|
2012-04-27 06:00:52 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
# The block should return the submission_data. A block is used so
|
|
|
|
# that we have access to the @questions variable that is created
|
|
|
|
# in this method
|
|
|
|
def quiz_with_graded_submission(questions, opts={}, &block)
|
|
|
|
assignment_quiz(questions, opts)
|
2012-05-02 01:29:28 +08:00
|
|
|
@quiz_submission = @quiz.generate_submission(@user)
|
2011-11-01 00:58:44 +08:00
|
|
|
@quiz_submission.mark_completed
|
|
|
|
@quiz_submission.submission_data = yield if block_given?
|
|
|
|
@quiz_submission.grade_submission
|
|
|
|
end
|
|
|
|
|
2012-02-25 04:33:29 +08:00
|
|
|
def survey_with_submission(questions, &block)
|
|
|
|
course_with_student(:active_all => true)
|
|
|
|
@assignment = @course.assignments.create(:title => "Test Assignment")
|
|
|
|
@assignment.workflow_state = "available"
|
|
|
|
@assignment.submission_types = "online_quiz"
|
|
|
|
@assignment.save
|
|
|
|
@quiz = Quiz.find_by_assignment_id(@assignment.id)
|
|
|
|
@quiz.anonymous_submissions = true
|
|
|
|
@quiz.quiz_type = "graded_survey"
|
|
|
|
@questions = questions.map { |q| @quiz.quiz_questions.create!(q) }
|
|
|
|
@quiz.generate_quiz_data
|
|
|
|
@quiz.save!
|
|
|
|
@quiz_submission = @quiz.generate_submission(@user)
|
|
|
|
@quiz_submission.mark_completed
|
|
|
|
@quiz_submission.submission_data = yield if block_given?
|
|
|
|
end
|
|
|
|
|
2012-06-21 02:58:03 +08:00
|
|
|
def group_discussion_assignment
|
|
|
|
course = @course || course(:active_all => true)
|
|
|
|
group_category = course.group_categories.create!(:name => "category")
|
|
|
|
@group1 = course.groups.create!(:name => "group 1", :group_category => group_category)
|
|
|
|
@group2 = course.groups.create!(:name => "group 2", :group_category => group_category)
|
|
|
|
|
|
|
|
@topic = course.discussion_topics.build(:title => "topic")
|
|
|
|
@assignment = course.assignments.build(:submission_types => 'discussion_topic', :title => @topic.title, :group_category => @group1.group_category)
|
make fancy midnight work for assignment overrides
also fixes an issue where some dates display as "Friday at 11:59pm" instead
of just "Friday"
Also does a little bit of refactoring and spec backfilling for the
override list presenter. The override list presenter now returns a much
more friendly list of "due date" hashes to the outside world to make it
easier to consume in views. Views don't have to format the dates by
passing in a hash anymore.
test plan:
- specs should pass
- as a teacher, create an assignment with overrides using the web
form. In one of the overrides, enter a day like March 1 at 12am.
- save the overrides
- Make sure fancy midnight works for lock dates and due dates, but not
unlock dates (12:00 am unlock date should show up as 12:00 am, not
11:59 pm)
- on the assignment's show page, you should just see "Friday", meaning
that the assignment is due at 11:59 pm on March 1.
- The "fancy midnight" scheme should work correctly for
assignments,quizzes,and discussion topics, including the default due
dates.
- Be sure to check that the dates show up correctly on the
assignment,quiz, and discussion show pages.
- Be sure to make an override that has a blank due_at, lock_at, and
unlock_at, but has a default due date, lock date, and unlock date.
The overrides should not inherit from the default due date (fixes
CNVS-4216)
fixes CNVS-4216, CNVS-4004, CNVS-3890
Change-Id: I8b5e10c074eb2a237a1298cb7def0cb32d3dcb7f
Reviewed-on: https://gerrit.instructure.com/18142
QA-Review: Amber Taniuchi <amber@instructure.com>
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Simon Williams <simon@instructure.com>
2013-03-06 00:04:59 +08:00
|
|
|
@assignment.infer_times
|
2012-06-21 02:58:03 +08:00
|
|
|
@assignment.saved_by = :discussion_topic
|
|
|
|
@topic.assignment = @assignment
|
|
|
|
@topic.save!
|
|
|
|
end
|
|
|
|
|
2012-01-05 07:42:22 +08:00
|
|
|
def rubric_for_course
|
|
|
|
@rubric = Rubric.new(:title => 'My Rubric', :context => @course)
|
|
|
|
@rubric.data = [
|
2012-04-19 07:06:28 +08:00
|
|
|
{
|
2012-01-05 07:42:22 +08:00
|
|
|
:points => 3,
|
2012-04-19 07:06:28 +08:00
|
|
|
:description => "First row",
|
|
|
|
:long_description => "The first row in the rubric",
|
|
|
|
:id => 1,
|
|
|
|
:ratings => [
|
|
|
|
{
|
|
|
|
:points => 3,
|
|
|
|
:description => "Rockin'",
|
|
|
|
:criterion_id => 1,
|
|
|
|
:id => 2
|
|
|
|
},
|
|
|
|
{
|
|
|
|
:points => 2,
|
|
|
|
:description => "Rockin'",
|
|
|
|
:criterion_id => 1,
|
|
|
|
:id => 3
|
|
|
|
},
|
|
|
|
{
|
|
|
|
:points => 0,
|
|
|
|
:description => "Lame",
|
|
|
|
:criterion_id => 1,
|
|
|
|
:id => 4
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
2012-01-05 07:42:22 +08:00
|
|
|
]
|
|
|
|
@rubric.save!
|
|
|
|
end
|
|
|
|
|
2011-08-04 23:48:19 +08:00
|
|
|
def outcome_with_rubric(opts={})
|
learning outcomes refactor
This list is *NOT* complete, some items may have snuck in that I forgot
to note, and/or some of the noted items may not be completely functional
yet.
Specs need to be written around a lot of this, other specs will no doubt
need to be fixed.
Some things, particularly around LearningOutcomeGroups will need data
migrations that aren't there yet.
* remove LearningOutcome.non_rubric_outcomes? and replace with false
where invoked
* remove LearningOutcome.enabled? and replace with true where invoked
* remove never-taken branches
* remove the shared/aligned_outcomes partial and it's supporting
javascript, since it's now empty
* remove js handler for add_outcome_alignment_link and supporting
method since it only occurred in never-taken branches
* mix LearningOutcomeContext into Course and Account
* replace LearningOutcomeGroup.default_for(context) with
LearningOutcomeContext#root_outcome_group
* rename LearningOutcome#content_tags to LearningOutcome#alignments
* rename LearningOutcomeGroup#content_tags to
LearningOutcomeGroup#child_links, and properly restrict
* remove ContentTag[Alignment]#rubric_association_id, add
ContentTag[Alignment]#has_rubric_association? that looks at the
presence of the content's rubric_association_id
* condition off the assignment having a rubric_association rather than
filtering tags by has_rubric_association (which just looks back at
the assignment). all or none of the assignment's alignments are
forced to have the association (via the assignment). this was true in
practice before, is now codified (and more efficient)
* rename AssessmentQuestionBank#learning_outcome_tags to
AssessmentQuestionBank#learning_outcome_alignments
* rename Assignment#learning_outcome_tags to
Assignment#learning_outcome_alignments
* rename Rubric#learning_outcome_tags to
Rubric#learning_outcome_alignments
* move/rename (Course|Account)#learning_outcome_tags to
LearningOutcomeContext#learning_outcome_links
* move/rename Account#learning_outcomes (corrected) and
Course#learning_outcomes to
LearningOutcomeContext#linked_learning_outcomes
* move/rename Account#created_learning_outcomes and
Course#created_learning_outcomes to
LearningOutcomeContext#created_learning_outcomes
* clarify and correct usage of linked_learning_outcomes vs.
created_learning_outcomes
* move/rename (Account|Account)#learning_outcome_groups to
LearningOutcomeContext#learning_outcome_groups
* remove unused Account#associated_learning_outcomes
* just remove one link to a learning outcome when deleting
* merge Account#has_outcomes?, Course#has_outcomes? and
Course#has_outcomes into LearningOutcomeContext#has_outcomes?, add a
use in Context#active_record_types
* kill LearningOutcomeGroup#root_learning_outcome_group (unused)
* rename LearningOutcomeResult#content_tag to
LearningOutcomeResult#alignment
* kill unused (and broken) OutcomesController#add_outcome_group
* kill unused OutcomesController#update_outcomes_for_asset
* kill unused OutcomesController#outcomes_for_asset
* remove unused (outside specs, correct specs)
AssessmentQuestionBank#outcomes=
* remove unused ContentTag#learning_outcome_content
* replace ContentTag.learning_outcome_tags_for(asset) (only ever called
with asset=an assignment) with call to
Assignment#learning_outcome_alignments
* remove unused ContentTag.not_rubric
* remove (now) unused ContentTag.include_outcome
* remove unused LearningOutcome#learning_outcome_group_associations
* avoid explicit use of ContentTag in outcome-related specs
* replace LearningOutcomeGroup#learning_outcome_tags with
LearningOutcomeGroup#child_outcome_links (and only use for outcome
links; not tags for child groups)
* split ContentTag#create_outcome_result into
Submission#create_outcome_result,
QuizSubmission#create_outcome_result, and
RubricAssessment#create_outcome_result. fix some bugs along the way
* refactor ContentTag.outcome_tags_for_banks and some code from
QuizSubmission#(track_outcomes|update_outcomes_for_assessment_questions)
into QuizSubmission#questions_and_alignments
* refactor RubricAssociation#update_outcome_relations and
Rubric#update_alignments into LearningOutcome.update_alignments
* don't use ContentTag#rubric_association with outcome alignments; use
the tag's content's rubric_association in its place (they should have
been equal anyways)
* refactor LearningOutcome.available_in_context and
@context.root_outcome_group.sorted_all_outcomes (only time
sorted_all_outcomes is used) into
LearningOutcomeContext#available_outcomes and
LearningOutcomeContext#available_outcome
* overhaul LearningOutcomeGroup#sorted_content and rename to
LearningOutcomeGroup#sorted_children. it not returns ContentTags
(outcome links) and LearningOutcomeGroups, vs. LearningOutcomes and
LearningOutcomeGroups; fix usages appropriately
* fix UI for arranging/deleting outcome links and groups within a group
to refer to the outcome link rather than the outcome
Change-Id: I85d99f2634f7206332cb1f5d5ea575b428988d4b
Reviewed-on: https://gerrit.instructure.com/12590
Reviewed-by: Jacob Fugal <jacob@instructure.com>
Tested-by: Jacob Fugal <jacob@instructure.com>
2012-07-13 01:16:13 +08:00
|
|
|
@outcome_group ||= @course.root_outcome_group
|
2011-08-11 23:13:00 +08:00
|
|
|
@outcome = @course.created_learning_outcomes.create!(:description => '<p>This is <b>awesome</b>.</p>', :short_description => 'new outcome')
|
learning outcomes refactor
This list is *NOT* complete, some items may have snuck in that I forgot
to note, and/or some of the noted items may not be completely functional
yet.
Specs need to be written around a lot of this, other specs will no doubt
need to be fixed.
Some things, particularly around LearningOutcomeGroups will need data
migrations that aren't there yet.
* remove LearningOutcome.non_rubric_outcomes? and replace with false
where invoked
* remove LearningOutcome.enabled? and replace with true where invoked
* remove never-taken branches
* remove the shared/aligned_outcomes partial and it's supporting
javascript, since it's now empty
* remove js handler for add_outcome_alignment_link and supporting
method since it only occurred in never-taken branches
* mix LearningOutcomeContext into Course and Account
* replace LearningOutcomeGroup.default_for(context) with
LearningOutcomeContext#root_outcome_group
* rename LearningOutcome#content_tags to LearningOutcome#alignments
* rename LearningOutcomeGroup#content_tags to
LearningOutcomeGroup#child_links, and properly restrict
* remove ContentTag[Alignment]#rubric_association_id, add
ContentTag[Alignment]#has_rubric_association? that looks at the
presence of the content's rubric_association_id
* condition off the assignment having a rubric_association rather than
filtering tags by has_rubric_association (which just looks back at
the assignment). all or none of the assignment's alignments are
forced to have the association (via the assignment). this was true in
practice before, is now codified (and more efficient)
* rename AssessmentQuestionBank#learning_outcome_tags to
AssessmentQuestionBank#learning_outcome_alignments
* rename Assignment#learning_outcome_tags to
Assignment#learning_outcome_alignments
* rename Rubric#learning_outcome_tags to
Rubric#learning_outcome_alignments
* move/rename (Course|Account)#learning_outcome_tags to
LearningOutcomeContext#learning_outcome_links
* move/rename Account#learning_outcomes (corrected) and
Course#learning_outcomes to
LearningOutcomeContext#linked_learning_outcomes
* move/rename Account#created_learning_outcomes and
Course#created_learning_outcomes to
LearningOutcomeContext#created_learning_outcomes
* clarify and correct usage of linked_learning_outcomes vs.
created_learning_outcomes
* move/rename (Account|Account)#learning_outcome_groups to
LearningOutcomeContext#learning_outcome_groups
* remove unused Account#associated_learning_outcomes
* just remove one link to a learning outcome when deleting
* merge Account#has_outcomes?, Course#has_outcomes? and
Course#has_outcomes into LearningOutcomeContext#has_outcomes?, add a
use in Context#active_record_types
* kill LearningOutcomeGroup#root_learning_outcome_group (unused)
* rename LearningOutcomeResult#content_tag to
LearningOutcomeResult#alignment
* kill unused (and broken) OutcomesController#add_outcome_group
* kill unused OutcomesController#update_outcomes_for_asset
* kill unused OutcomesController#outcomes_for_asset
* remove unused (outside specs, correct specs)
AssessmentQuestionBank#outcomes=
* remove unused ContentTag#learning_outcome_content
* replace ContentTag.learning_outcome_tags_for(asset) (only ever called
with asset=an assignment) with call to
Assignment#learning_outcome_alignments
* remove unused ContentTag.not_rubric
* remove (now) unused ContentTag.include_outcome
* remove unused LearningOutcome#learning_outcome_group_associations
* avoid explicit use of ContentTag in outcome-related specs
* replace LearningOutcomeGroup#learning_outcome_tags with
LearningOutcomeGroup#child_outcome_links (and only use for outcome
links; not tags for child groups)
* split ContentTag#create_outcome_result into
Submission#create_outcome_result,
QuizSubmission#create_outcome_result, and
RubricAssessment#create_outcome_result. fix some bugs along the way
* refactor ContentTag.outcome_tags_for_banks and some code from
QuizSubmission#(track_outcomes|update_outcomes_for_assessment_questions)
into QuizSubmission#questions_and_alignments
* refactor RubricAssociation#update_outcome_relations and
Rubric#update_alignments into LearningOutcome.update_alignments
* don't use ContentTag#rubric_association with outcome alignments; use
the tag's content's rubric_association in its place (they should have
been equal anyways)
* refactor LearningOutcome.available_in_context and
@context.root_outcome_group.sorted_all_outcomes (only time
sorted_all_outcomes is used) into
LearningOutcomeContext#available_outcomes and
LearningOutcomeContext#available_outcome
* overhaul LearningOutcomeGroup#sorted_content and rename to
LearningOutcomeGroup#sorted_children. it not returns ContentTags
(outcome links) and LearningOutcomeGroups, vs. LearningOutcomes and
LearningOutcomeGroups; fix usages appropriately
* fix UI for arranging/deleting outcome links and groups within a group
to refer to the outcome link rather than the outcome
Change-Id: I85d99f2634f7206332cb1f5d5ea575b428988d4b
Reviewed-on: https://gerrit.instructure.com/12590
Reviewed-by: Jacob Fugal <jacob@instructure.com>
Tested-by: Jacob Fugal <jacob@instructure.com>
2012-07-13 01:16:13 +08:00
|
|
|
@outcome_group.add_outcome(@outcome)
|
2011-11-24 03:52:38 +08:00
|
|
|
@outcome_group.save!
|
2011-08-11 23:13:00 +08:00
|
|
|
|
2012-02-24 08:29:59 +08:00
|
|
|
@rubric = Rubric.generate(:context => @course,
|
|
|
|
:data => {
|
2012-04-19 07:06:28 +08:00
|
|
|
:title => 'My Rubric',
|
|
|
|
:hide_score_total => false,
|
|
|
|
:criteria => {
|
|
|
|
"0" => {
|
|
|
|
:points => 3,
|
|
|
|
:mastery_points => 0,
|
|
|
|
:description => "Outcome row",
|
|
|
|
:long_description => @outcome.description,
|
|
|
|
:ratings => {
|
|
|
|
"0" => {
|
|
|
|
:points => 3,
|
|
|
|
:description => "Rockin'",
|
|
|
|
},
|
|
|
|
"1" => {
|
|
|
|
:points => 0,
|
|
|
|
:description => "Lame",
|
|
|
|
}
|
|
|
|
},
|
|
|
|
:learning_outcome_id => @outcome.id
|
|
|
|
},
|
|
|
|
"1" => {
|
|
|
|
:points => 5,
|
|
|
|
:description => "no outcome row",
|
|
|
|
:long_description => 'non outcome criterion',
|
|
|
|
:ratings => {
|
|
|
|
"0" => {
|
|
|
|
:points => 5,
|
|
|
|
:description => "Amazing",
|
|
|
|
},
|
|
|
|
"1" => {
|
|
|
|
:points => 3,
|
|
|
|
:description => "not too bad",
|
|
|
|
},
|
|
|
|
"2" => {
|
|
|
|
:points => 0,
|
|
|
|
:description => "no bueno",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
2012-08-08 00:27:59 +08:00
|
|
|
@rubric.instance_variable_set('@alignments_changed', true)
|
2011-08-04 23:48:19 +08:00
|
|
|
@rubric.save!
|
2012-08-08 00:27:59 +08:00
|
|
|
@rubric.update_alignments
|
2011-08-04 23:48:19 +08:00
|
|
|
end
|
|
|
|
|
2012-12-15 03:49:40 +08:00
|
|
|
def grading_standard_for(context, opts={})
|
|
|
|
@standard = context.grading_standards.create!(
|
|
|
|
:title => opts[:title] || "My Grading Standard",
|
|
|
|
:standard_data => {
|
2012-04-19 07:06:28 +08:00
|
|
|
"scheme_0" => {:name => "A", :value => "0.9"},
|
|
|
|
"scheme_1" => {:name => "B", :value => "0.8"},
|
|
|
|
"scheme_2" => {:name => "C", :value => "0.7"}
|
2012-02-25 05:44:45 +08:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def eportfolio(opts={})
|
|
|
|
user(opts)
|
|
|
|
@portfolio = @user.eportfolios.create!
|
|
|
|
end
|
|
|
|
def eportfolio_with_user(opts={})
|
|
|
|
eportfolio(opts)
|
|
|
|
end
|
|
|
|
def eportfolio_with_user_logged_in(opts={})
|
|
|
|
eportfolio_with_user(opts)
|
|
|
|
user_session(@user)
|
|
|
|
end
|
|
|
|
|
2011-09-23 03:27:37 +08:00
|
|
|
def conversation(*users)
|
|
|
|
options = users.last.is_a?(Hash) ? users.pop : {}
|
2012-11-29 06:53:00 +08:00
|
|
|
@conversation = (options.delete(:sender) || @me || users.shift).initiate_conversation(users)
|
2012-02-01 09:31:23 +08:00
|
|
|
@message = @conversation.add_message('test')
|
2011-09-23 03:27:37 +08:00
|
|
|
@conversation.update_attributes(options)
|
|
|
|
@conversation.reload
|
|
|
|
end
|
|
|
|
|
2011-10-08 05:41:19 +08:00
|
|
|
def media_object(opts={})
|
|
|
|
mo = MediaObject.new
|
|
|
|
mo.media_id = opts[:media_id] || "1234"
|
|
|
|
mo.media_type = opts[:media_type] || "video"
|
|
|
|
mo.context = opts[:context] || @user || @course
|
|
|
|
mo.user = opts[:user] || @user
|
|
|
|
mo.save!
|
|
|
|
end
|
|
|
|
|
2013-03-15 03:36:27 +08:00
|
|
|
def message(opts={})
|
|
|
|
m = Message.new
|
|
|
|
m.to = opts[:to] || 'some_user'
|
|
|
|
m.from = opts[:from] || 'some_other_user'
|
|
|
|
m.subject = opts[:subject] || 'a message for you'
|
|
|
|
m.body = opts[:body] || 'nice body'
|
|
|
|
m.sent_at = opts[:sent_at] || 5.days.ago
|
|
|
|
m.workflow_state = opts[:workflow_state] || 'sent'
|
|
|
|
m.user_id = opts[:user_id] || opts[:user].try(:id)
|
|
|
|
m.path_type = opts[:path_type] || 'email'
|
|
|
|
m.root_account_id = opts[:account_id] || Account.default.id
|
|
|
|
m.save!
|
|
|
|
m
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def assert_status(status=500)
|
|
|
|
response.status.to_i.should eql(status)
|
|
|
|
end
|
|
|
|
|
|
|
|
def assert_unauthorized
|
|
|
|
assert_status(401) #unauthorized
|
2012-04-19 07:06:28 +08:00
|
|
|
# response.headers['Status'].should eql('401 Unauthorized')
|
2011-02-01 09:57:29 +08:00
|
|
|
response.should render_template("shared/unauthorized")
|
|
|
|
end
|
|
|
|
|
|
|
|
def assert_require_login
|
|
|
|
response.should be_redirect
|
2012-08-07 04:02:09 +08:00
|
|
|
flash[:warning].should eql("You must be logged in to access this page")
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def default_uploaded_data
|
|
|
|
require 'action_controller'
|
|
|
|
require 'action_controller/test_process.rb'
|
|
|
|
ActionController::TestUploadedFile.new(File.expand_path(File.dirname(__FILE__) + '/fixtures/scribd_docs/doc.doc'), 'application/msword', true)
|
|
|
|
end
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def valid_gradebook_csv_content
|
|
|
|
File.read(File.expand_path(File.join(File.dirname(__FILE__), %w(fixtures default_gradebook.csv))))
|
|
|
|
end
|
|
|
|
|
|
|
|
def factory_with_protected_attributes(ar_klass, attrs, do_save = true)
|
2011-05-21 06:29:34 +08:00
|
|
|
obj = ar_klass.respond_to?(:new) ? ar_klass.new : ar_klass.build
|
|
|
|
attrs.each { |k,v| obj.send("#{k}=", attrs[k]) }
|
2011-02-01 09:57:29 +08:00
|
|
|
obj.save! if do_save
|
|
|
|
obj
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_with_protected_attributes!(ar_instance, attrs)
|
|
|
|
attrs.each { |k,v| ar_instance.send("#{k}=", attrs[k]) }
|
|
|
|
ar_instance.save!
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_with_protected_attributes(ar_instance, attrs)
|
|
|
|
update_with_protected_attributes!(ar_instance, attrs) rescue false
|
|
|
|
end
|
|
|
|
|
2011-09-22 01:36:45 +08:00
|
|
|
def process_csv_data(*lines_or_opts)
|
2011-06-14 04:39:15 +08:00
|
|
|
account_model unless @account
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-09-22 01:36:45 +08:00
|
|
|
lines = lines_or_opts.reject{|thing| thing.is_a? Hash}
|
|
|
|
opts = lines_or_opts.select{|thing| thing.is_a? Hash}.inject({:allow_printing => false}, :merge)
|
|
|
|
|
2011-06-14 04:39:15 +08:00
|
|
|
tmp = Tempfile.new("sis_rspec")
|
|
|
|
path = "#{tmp.path}.csv"
|
|
|
|
tmp.close!
|
2011-09-27 07:19:39 +08:00
|
|
|
File.open(path, "w+") { |f| f.puts lines.flatten.join "\n" }
|
2011-09-22 01:36:45 +08:00
|
|
|
opts[:files] = [path]
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-09-22 01:36:45 +08:00
|
|
|
importer = SIS::CSV::Import.process(@account, opts)
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-06-14 04:39:15 +08:00
|
|
|
File.unlink path
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-06-14 04:39:15 +08:00
|
|
|
importer
|
|
|
|
end
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-09-22 01:36:45 +08:00
|
|
|
def process_csv_data_cleanly(*lines_or_opts)
|
|
|
|
importer = process_csv_data(*lines_or_opts)
|
2011-06-14 04:39:15 +08:00
|
|
|
importer.errors.should == []
|
|
|
|
importer.warnings.should == []
|
|
|
|
end
|
|
|
|
|
2012-01-17 05:28:01 +08:00
|
|
|
def enable_cache(new_cache = ActiveSupport::Cache::MemoryStore.new)
|
2011-07-16 00:30:31 +08:00
|
|
|
old_cache = RAILS_CACHE
|
2011-10-22 06:36:39 +08:00
|
|
|
ActionController::Base.cache_store = new_cache
|
|
|
|
silence_warnings { Object.const_set(:RAILS_CACHE, new_cache) }
|
|
|
|
old_perform_caching = ActionController::Base.perform_caching
|
|
|
|
ActionController::Base.perform_caching = true
|
2011-07-16 00:30:31 +08:00
|
|
|
yield
|
|
|
|
ensure
|
|
|
|
silence_warnings { Object.const_set(:RAILS_CACHE, old_cache) }
|
2011-10-22 06:36:39 +08:00
|
|
|
ActionController::Base.cache_store = old_cache
|
|
|
|
ActionController::Base.perform_caching = old_perform_caching
|
2011-07-16 00:30:31 +08:00
|
|
|
end
|
|
|
|
|
2011-07-20 01:47:59 +08:00
|
|
|
# enforce forgery protection, so we can verify usage of the authenticity token
|
2013-03-12 04:17:20 +08:00
|
|
|
def enable_forgery_protection(enable = true)
|
|
|
|
old_value = ActionController::Base.allow_forgery_protection
|
|
|
|
ActionController::Base.stubs(:allow_forgery_protection).including_subclasses.returns(enable)
|
2012-03-21 06:08:20 +08:00
|
|
|
|
|
|
|
yield if block_given?
|
|
|
|
|
2011-07-20 01:47:59 +08:00
|
|
|
ensure
|
2013-03-12 04:17:20 +08:00
|
|
|
ActionController::Base.stubs(:allow_forgery_protection).including_subclasses.returns(old_value) if block_given?
|
2011-07-20 01:47:59 +08:00
|
|
|
end
|
|
|
|
|
2011-12-10 05:39:30 +08:00
|
|
|
def start_test_http_server(requests=1)
|
2011-08-12 00:51:57 +08:00
|
|
|
post_lines = []
|
|
|
|
server = TCPServer.open(0)
|
|
|
|
port = server.addr[1]
|
|
|
|
post_lines = []
|
|
|
|
server_thread = Thread.new(server, post_lines) do |server, post_lines|
|
2011-12-10 05:39:30 +08:00
|
|
|
requests.times do
|
|
|
|
client = server.accept
|
|
|
|
content_length = 0
|
|
|
|
loop do
|
|
|
|
line = client.readline
|
|
|
|
post_lines << line.strip unless line =~ /\AHost: localhost:|\AContent-Length: /
|
|
|
|
content_length = line.split(":")[1].to_i if line.strip =~ /\AContent-Length: [0-9]+\z/
|
|
|
|
if line.strip.blank?
|
|
|
|
post_lines << client.read(content_length)
|
|
|
|
break
|
|
|
|
end
|
2011-08-12 00:51:57 +08:00
|
|
|
end
|
2011-12-10 05:39:30 +08:00
|
|
|
client.puts("HTTP/1.1 200 OK\nContent-Length: 0\n\n")
|
|
|
|
client.close
|
2011-08-12 00:51:57 +08:00
|
|
|
end
|
|
|
|
server.close
|
|
|
|
end
|
|
|
|
return server, server_thread, post_lines
|
|
|
|
end
|
2011-11-24 03:52:38 +08:00
|
|
|
|
2011-09-28 02:15:28 +08:00
|
|
|
def stub_kaltura
|
|
|
|
# trick kaltura into being activated
|
2011-10-26 07:15:30 +08:00
|
|
|
Kaltura::ClientV3.stubs(:config).returns({
|
2012-04-19 07:06:28 +08:00
|
|
|
'domain' => 'kaltura.example.com',
|
|
|
|
'resource_domain' => 'kaltura.example.com',
|
|
|
|
'partner_id' => '100',
|
|
|
|
'subpartner_id' => '10000',
|
|
|
|
'secret_key' => 'fenwl1n23k4123lk4hl321jh4kl321j4kl32j14kl321',
|
|
|
|
'user_secret_key' => '1234821hrj3k21hjk4j3kl21j4kl321j4kl3j21kl4j3k2l1',
|
|
|
|
'player_ui_conf' => '1',
|
|
|
|
'kcw_ui_conf' => '1',
|
|
|
|
'upload_ui_conf' => '1'
|
|
|
|
})
|
2011-09-28 02:15:28 +08:00
|
|
|
end
|
2011-08-12 00:51:57 +08:00
|
|
|
|
2011-10-21 03:52:36 +08:00
|
|
|
def attachment_obj_with_context(obj, opts={})
|
|
|
|
@attachment = factory_with_protected_attributes(Attachment, valid_attachment_attributes.merge(opts))
|
|
|
|
@attachment.context = obj
|
|
|
|
@attachment
|
|
|
|
end
|
|
|
|
|
|
|
|
def attachment_with_context(obj, opts={})
|
|
|
|
attachment_obj_with_context(obj, opts)
|
|
|
|
@attachment.save!
|
|
|
|
@attachment
|
|
|
|
end
|
2011-11-24 04:49:27 +08:00
|
|
|
|
|
|
|
def json_parse(json_string = response.body)
|
|
|
|
JSON.parse(json_string.sub(%r{^while\(1\);}, ''))
|
|
|
|
end
|
2012-02-22 04:35:39 +08:00
|
|
|
|
2013-03-07 03:44:57 +08:00
|
|
|
# inspired by http://blog.jayfields.com/2007/08/ruby-calling-methods-of-specific.html
|
|
|
|
module AttachmentStorageSwitcher
|
|
|
|
BACKENDS = %w{FileSystem S3}.map { |backend| Technoweenie::AttachmentFu::Backends.const_get(:"#{backend}Backend") }.freeze
|
|
|
|
|
|
|
|
class As #:nodoc:
|
|
|
|
private *instance_methods.select { |m| m !~ /(^__|^\W|^binding$)/ }
|
|
|
|
|
|
|
|
def initialize(subject, ancestor)
|
|
|
|
@subject = subject
|
|
|
|
@ancestor = ancestor
|
|
|
|
end
|
|
|
|
|
|
|
|
def method_missing(sym, *args, &blk)
|
|
|
|
@ancestor.instance_method(sym).bind(@subject).call(*args,&blk)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.included(base)
|
|
|
|
base.cattr_accessor :current_backend
|
|
|
|
base.current_backend = (base.ancestors & BACKENDS).first
|
|
|
|
|
|
|
|
# make sure we have all the backends
|
|
|
|
BACKENDS.each do |backend|
|
|
|
|
base.send(:include, backend) unless base.ancestors.include?(backend)
|
|
|
|
end
|
|
|
|
# remove the duplicate callbacks added by multiple backends
|
|
|
|
base.before_update.uniq!
|
|
|
|
|
|
|
|
BACKENDS.map(&:instance_methods).flatten.uniq.each do |method|
|
2013-03-17 01:50:19 +08:00
|
|
|
# overridden by Attachment anyway; don't re-overwrite it
|
|
|
|
next if Attachment.instance_method(method).owner == Attachment
|
2013-03-07 03:44:57 +08:00
|
|
|
if method.to_s[-1..-1] == '='
|
|
|
|
base.class_eval <<-CODE
|
|
|
|
def #{method}(arg)
|
|
|
|
self.as(self.class.current_backend).#{method} arg
|
|
|
|
end
|
|
|
|
CODE
|
|
|
|
else
|
|
|
|
base.class_eval <<-CODE
|
|
|
|
def #{method}(*args, &block)
|
|
|
|
self.as(self.class.current_backend).#{method}(*args, &block)
|
|
|
|
end
|
|
|
|
CODE
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def as(ancestor)
|
|
|
|
@__as ||= {}
|
|
|
|
unless r = @__as[ancestor]
|
|
|
|
r = (@__as[ancestor] = As.new(self, ancestor))
|
|
|
|
end
|
|
|
|
r
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def s3_storage!(opts = {:stubs => true})
|
|
|
|
Attachment.send(:include, AttachmentStorageSwitcher) unless Attachment.ancestors.include?(AttachmentStorageSwitcher)
|
|
|
|
Attachment.stubs(:current_backend).returns(Technoweenie::AttachmentFu::Backends::S3Backend)
|
|
|
|
|
2012-02-22 04:35:39 +08:00
|
|
|
Attachment.stubs(:s3_storage?).returns(true)
|
|
|
|
Attachment.stubs(:local_storage?).returns(false)
|
2013-03-07 03:44:57 +08:00
|
|
|
if opts[:stubs]
|
2013-03-08 03:28:42 +08:00
|
|
|
conn = mock('AWS::S3::Client')
|
|
|
|
AWS::S3::S3Object.any_instance.stubs(:client).returns(conn)
|
|
|
|
AWS::Core::Configuration.any_instance.stubs(:access_key_id).returns('stub_id')
|
|
|
|
AWS::Core::Configuration.any_instance.stubs(:secret_access_key).returns('stub_key')
|
|
|
|
AWS::S3::Bucket.any_instance.stubs(:name).returns('no-bucket')
|
2013-03-07 03:44:57 +08:00
|
|
|
else
|
|
|
|
if Attachment.s3_config.blank? || Attachment.s3_config[:access_key_id] == 'access_key'
|
|
|
|
pending "Please put valid S3 credentials in config/amazon_s3.yml"
|
|
|
|
end
|
|
|
|
end
|
2012-02-22 04:35:39 +08:00
|
|
|
Attachment.s3_storage?.should eql(true)
|
|
|
|
Attachment.local_storage?.should eql(false)
|
|
|
|
end
|
|
|
|
|
|
|
|
def local_storage!
|
2013-03-07 03:44:57 +08:00
|
|
|
Attachment.send(:include, AttachmentStorageSwitcher) unless Attachment.ancestors.include?(AttachmentStorageSwitcher)
|
|
|
|
Attachment.stubs(:current_backend).returns(Technoweenie::AttachmentFu::Backends::FileSystemBackend)
|
|
|
|
|
2012-02-22 04:35:39 +08:00
|
|
|
Attachment.stubs(:s3_storage?).returns(false)
|
|
|
|
Attachment.stubs(:local_storage?).returns(true)
|
|
|
|
Attachment.local_storage?.should eql(true)
|
|
|
|
Attachment.s3_storage?.should eql(false)
|
|
|
|
Attachment.local_storage?.should eql(true)
|
|
|
|
end
|
2011-12-10 06:37:12 +08:00
|
|
|
|
refactor jobs admin functionality to not use AR queries
A set of class functions were added to Delayed::Backend::ActiveRecord
for all the querying a updating functionality that the jobs admin needs,
so that no direct ActiveRecord queries are needed. The /jobs UI is
refactored to use these new functions.
There are a few differences in behavior: The search isn't a combined
wildcard search anymore. Instead, new "flavors" were added to the
drop-down for strand, tag, and ID. The search box searches only the
selected attribute, and it's exact match now.
Specs are being updated to use these new functions as well. Eventually,
no direct AR queries will be done against Jobs anywhere, so that non-AR
jobs backends are possible.
Also as part of this, all jobs require a queue now. Passing nil for the
queue will use the default of Delayed::Worker.queue.
test plan: Load /jobs, and verify that it works as before except where
there are differences as described above.
* Selecting flavors of jobs lists only those jobs.
* Searching by ID, strand or tag works.
* The hold/unhold/delete actions work in the various combinations of
filtering/searching.
* Linking to an individual job still works (though the query string
has changed so old links don't work)
* Running jobs and list of popular tags still works as expected.
Change-Id: Iffd5b8c7b3d6e4b128792a9dee7b97c6dfb251dc
Reviewed-on: https://gerrit.instructure.com/12632
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Bracken Mosbacker <bracken@instructure.com>
Reviewed-on: https://gerrit.instructure.com/13089
Reviewed-by: Jacob Fugal <jacob@instructure.com>
Tested-by: Jacob Fugal <jacob@instructure.com>
2012-08-01 04:22:52 +08:00
|
|
|
def run_job(job)
|
2011-12-10 06:37:12 +08:00
|
|
|
Delayed::Worker.new.perform(job)
|
|
|
|
end
|
2012-03-21 06:08:20 +08:00
|
|
|
|
2012-06-07 04:16:55 +08:00
|
|
|
def run_jobs
|
|
|
|
while job = Delayed::Job.get_and_lock_next_available(
|
|
|
|
'spec run_jobs',
|
|
|
|
Delayed::Worker.queue,
|
|
|
|
0,
|
|
|
|
Delayed::MAX_PRIORITY)
|
|
|
|
run_job(job)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
refactor jobs admin functionality to not use AR queries
A set of class functions were added to Delayed::Backend::ActiveRecord
for all the querying a updating functionality that the jobs admin needs,
so that no direct ActiveRecord queries are needed. The /jobs UI is
refactored to use these new functions.
There are a few differences in behavior: The search isn't a combined
wildcard search anymore. Instead, new "flavors" were added to the
drop-down for strand, tag, and ID. The search box searches only the
selected attribute, and it's exact match now.
Specs are being updated to use these new functions as well. Eventually,
no direct AR queries will be done against Jobs anywhere, so that non-AR
jobs backends are possible.
Also as part of this, all jobs require a queue now. Passing nil for the
queue will use the default of Delayed::Worker.queue.
test plan: Load /jobs, and verify that it works as before except where
there are differences as described above.
* Selecting flavors of jobs lists only those jobs.
* Searching by ID, strand or tag works.
* The hold/unhold/delete actions work in the various combinations of
filtering/searching.
* Linking to an individual job still works (though the query string
has changed so old links don't work)
* Running jobs and list of popular tags still works as expected.
Change-Id: Iffd5b8c7b3d6e4b128792a9dee7b97c6dfb251dc
Reviewed-on: https://gerrit.instructure.com/12632
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Bracken Mosbacker <bracken@instructure.com>
Reviewed-on: https://gerrit.instructure.com/13089
Reviewed-by: Jacob Fugal <jacob@instructure.com>
Tested-by: Jacob Fugal <jacob@instructure.com>
2012-08-01 04:22:52 +08:00
|
|
|
def track_jobs
|
|
|
|
@jobs_tracking = Delayed::JobTracking.track { yield }
|
|
|
|
end
|
|
|
|
|
|
|
|
def created_jobs
|
|
|
|
@jobs_tracking.created
|
|
|
|
end
|
|
|
|
|
|
|
|
def expects_job_with_tag(tag, count = 1)
|
|
|
|
track_jobs do
|
|
|
|
yield
|
|
|
|
end
|
|
|
|
created_jobs.count { |j| j.tag == tag }.should == count
|
|
|
|
end
|
|
|
|
|
2012-03-21 06:08:20 +08:00
|
|
|
# send a multipart post request in an integration spec post_params is
|
|
|
|
# an array of [k,v] params so that the order of the params can be
|
|
|
|
# defined
|
|
|
|
def send_multipart(url, post_params = {}, http_headers = {}, method = :post)
|
|
|
|
mp = Multipart::MultipartPost.new
|
|
|
|
query, headers = mp.prepare_query(post_params)
|
|
|
|
send(method, url, query, headers.merge(http_headers))
|
|
|
|
end
|
2012-03-30 07:52:51 +08:00
|
|
|
|
|
|
|
def run_transaction_commit_callbacks(conn = ActiveRecord::Base.connection)
|
|
|
|
conn.after_transaction_commit_callbacks.each { |cb| cb.call }
|
|
|
|
conn.after_transaction_commit_callbacks.clear
|
|
|
|
end
|
2012-05-18 10:51:27 +08:00
|
|
|
|
2012-11-01 23:03:48 +08:00
|
|
|
def force_string_encoding(str, encoding = "UTF-8")
|
|
|
|
if str.respond_to?(:force_encoding)
|
|
|
|
str.force_encoding(encoding)
|
|
|
|
end
|
|
|
|
str
|
|
|
|
end
|
|
|
|
|
2013-02-05 05:58:38 +08:00
|
|
|
# from minitest, MIT licensed
|
|
|
|
def capture_io
|
|
|
|
orig_stdout, orig_stderr = $stdout, $stderr
|
|
|
|
$stdout, $stderr = StringIO.new, StringIO.new
|
|
|
|
yield
|
|
|
|
return $stdout.string, $stderr.string
|
|
|
|
ensure
|
|
|
|
$stdout, $stderr = orig_stdout, orig_stderr
|
|
|
|
end
|
|
|
|
|
2012-05-18 10:51:27 +08:00
|
|
|
def verify_post_matches(post_lines, expected_post_lines)
|
|
|
|
# first lines should match
|
|
|
|
post_lines[0].should == expected_post_lines[0]
|
|
|
|
|
|
|
|
# now extract the headers
|
|
|
|
post_headers = post_lines[1..post_lines.index("")]
|
|
|
|
expected_post_headers = expected_post_lines[1..expected_post_lines.index("")]
|
2013-04-19 23:54:35 +08:00
|
|
|
expected_post_headers << "User-Agent: Ruby"
|
2012-05-18 10:51:27 +08:00
|
|
|
post_headers.sort.should == expected_post_headers.sort
|
|
|
|
|
|
|
|
# now check payload
|
|
|
|
post_lines[post_lines.index(""),-1].should ==
|
|
|
|
expected_post_lines[expected_post_lines.index(""),-1]
|
|
|
|
end
|
2012-08-14 05:13:20 +08:00
|
|
|
|
2013-01-01 06:25:44 +08:00
|
|
|
def compare_json(actual, expected)
|
|
|
|
if actual.is_a?(Hash)
|
|
|
|
actual.each do |k,v|
|
|
|
|
expected_v = expected[k]
|
|
|
|
compare_json(v, expected_v)
|
|
|
|
end
|
|
|
|
elsif actual.is_a?(Array)
|
|
|
|
actual.zip(expected).each do |a,e|
|
|
|
|
compare_json(a,e)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
actual.to_json.should == expected.to_json
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-08-14 05:13:20 +08:00
|
|
|
class FakeHttpResponse
|
|
|
|
def initialize(code, body = nil, headers={})
|
|
|
|
@code = code
|
|
|
|
@body = body
|
|
|
|
@headers = headers
|
|
|
|
end
|
|
|
|
|
|
|
|
def read_body(io)
|
|
|
|
io << @body
|
|
|
|
end
|
|
|
|
|
|
|
|
def code
|
|
|
|
@code.to_s
|
|
|
|
end
|
|
|
|
|
|
|
|
def [](arg)
|
|
|
|
@headers[arg]
|
|
|
|
end
|
|
|
|
|
|
|
|
def content_type
|
|
|
|
self['content-type']
|
|
|
|
end
|
|
|
|
end
|
2013-03-14 23:27:50 +08:00
|
|
|
|
|
|
|
def intify_timestamps(object)
|
|
|
|
case object
|
|
|
|
when Time
|
|
|
|
object.to_i
|
|
|
|
when Hash
|
|
|
|
object.inject({}) { |memo, (k, v)| memo[intify_timestamps(k)] = intify_timestamps(v); memo }
|
|
|
|
when Array
|
|
|
|
object.map { |v| intify_timestamps(v) }
|
|
|
|
else
|
|
|
|
object
|
|
|
|
end
|
|
|
|
end
|
2013-03-16 22:50:49 +08:00
|
|
|
|
|
|
|
def web_conference_plugin_mock(id, settings)
|
|
|
|
mock = mock("WebConferencePlugin")
|
|
|
|
mock.stubs(:id).returns(id)
|
|
|
|
mock.stubs(:settings).returns(settings)
|
|
|
|
mock.stubs(:valid_settings?).returns(true)
|
|
|
|
mock.stubs(:enabled?).returns(true)
|
|
|
|
mock.stubs(:base).returns(nil)
|
|
|
|
mock
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2012-06-22 06:34:27 +08:00
|
|
|
|
|
|
|
Dir[Rails.root+'vendor/plugins/*/spec_canvas/spec_helper.rb'].each do |f|
|
|
|
|
require f
|
|
|
|
end
|