2011-02-01 09:57:29 +08:00
|
|
|
#
|
|
|
|
# Copyright (C) 2011 Instructure, Inc.
|
|
|
|
#
|
|
|
|
# This file is part of Canvas.
|
|
|
|
#
|
|
|
|
# Canvas is free software: you can redistribute it and/or modify it under
|
|
|
|
# the terms of the GNU Affero General Public License as published by the Free
|
|
|
|
# Software Foundation, version 3 of the License.
|
|
|
|
#
|
|
|
|
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
|
|
|
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
|
|
|
# details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License along
|
|
|
|
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
|
|
|
|
class ContentMigration < ActiveRecord::Base
|
|
|
|
include Workflow
|
2013-04-04 02:30:57 +08:00
|
|
|
include TextHelper
|
2011-02-01 09:57:29 +08:00
|
|
|
belongs_to :context, :polymorphic => true
|
2014-05-09 01:49:10 +08:00
|
|
|
validates_inclusion_of :context_type, :allow_nil => true, :in => ['Course', 'Account', 'Group', 'User']
|
2014-07-29 01:22:01 +08:00
|
|
|
validate :valid_date_shift_options
|
2011-02-01 09:57:29 +08:00
|
|
|
belongs_to :user
|
|
|
|
belongs_to :attachment
|
|
|
|
belongs_to :overview_attachment, :class_name => 'Attachment'
|
2011-02-08 01:27:48 +08:00
|
|
|
belongs_to :exported_attachment, :class_name => 'Attachment'
|
2012-03-28 22:52:21 +08:00
|
|
|
belongs_to :source_course, :class_name => 'Course'
|
|
|
|
has_one :content_export
|
2013-03-14 05:23:24 +08:00
|
|
|
has_many :migration_issues
|
2013-03-29 03:02:06 +08:00
|
|
|
has_one :job_progress, :class_name => 'Progress', :as => :context
|
2011-02-01 09:57:29 +08:00
|
|
|
serialize :migration_settings
|
|
|
|
cattr_accessor :export_file_path
|
2014-08-07 00:18:14 +08:00
|
|
|
after_save :handle_import_in_progress_notice
|
2011-02-01 09:57:29 +08:00
|
|
|
DATE_FORMAT = "%m/%d/%Y"
|
2013-08-29 20:20:54 +08:00
|
|
|
|
2014-02-20 05:07:34 +08:00
|
|
|
attr_accessible :context, :migration_settings, :user, :source_course, :copy_options, :migration_type, :initiated_source
|
2014-05-05 20:23:55 +08:00
|
|
|
attr_accessor :imported_migration_items, :outcome_to_id_map
|
2011-02-01 09:57:29 +08:00
|
|
|
|
|
|
|
workflow do
|
|
|
|
state :created
|
|
|
|
#The pre_process states can be used by individual plugins as needed
|
|
|
|
state :pre_processing
|
|
|
|
state :pre_processed
|
|
|
|
state :pre_process_error
|
|
|
|
state :exporting
|
|
|
|
state :exported
|
|
|
|
state :importing
|
|
|
|
state :imported
|
|
|
|
state :failed
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2013-04-16 22:37:11 +08:00
|
|
|
def self.migration_plugins(exclude_hidden=false)
|
|
|
|
plugins = Canvas::Plugin.all_for_tag(:export_system)
|
|
|
|
exclude_hidden ? plugins.select{|p|!p.meta[:hide_from_users]} : plugins
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
|
2013-03-29 03:02:06 +08:00
|
|
|
set_policy do
|
|
|
|
given { |user, session| self.context.grants_right?(user, session, :manage_files) }
|
|
|
|
can :manage_files and can :read
|
|
|
|
end
|
|
|
|
|
2011-04-28 23:23:03 +08:00
|
|
|
# the stream item context is decided by calling asset.context(user), i guess
|
|
|
|
# to differentiate from the normal asset.context() call that may not give us
|
|
|
|
# the context we want. in this case, they're one and the same.
|
|
|
|
alias_method :original_context, :context
|
|
|
|
def context(user = nil)
|
|
|
|
self.original_context
|
|
|
|
end
|
|
|
|
|
2013-03-29 03:02:06 +08:00
|
|
|
def quota_context
|
|
|
|
self.context
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def migration_settings
|
|
|
|
read_attribute(:migration_settings) || write_attribute(:migration_settings,{}.with_indifferent_access)
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_migration_settings(new_settings)
|
|
|
|
new_settings.each do |key, val|
|
2013-08-29 20:20:54 +08:00
|
|
|
migration_settings[key] = val
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2012-08-21 21:57:13 +08:00
|
|
|
def import_immediately?
|
|
|
|
!!migration_settings[:import_immediately]
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2012-08-21 21:57:13 +08:00
|
|
|
def converter_class=(c_class)
|
|
|
|
migration_settings[:converter_class] = c_class
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2012-08-21 21:57:13 +08:00
|
|
|
def converter_class
|
|
|
|
migration_settings[:converter_class]
|
|
|
|
end
|
2012-11-23 00:48:57 +08:00
|
|
|
|
|
|
|
def strand=(s)
|
|
|
|
migration_settings[:strand] = s
|
|
|
|
end
|
|
|
|
|
|
|
|
def strand
|
|
|
|
migration_settings[:strand]
|
|
|
|
end
|
2013-07-18 03:23:22 +08:00
|
|
|
|
2014-02-20 05:07:34 +08:00
|
|
|
def initiated_source
|
|
|
|
migration_settings[:initiated_source] || :manual
|
|
|
|
end
|
|
|
|
|
|
|
|
def initiated_source=(value)
|
|
|
|
migration_settings[:initiated_source] = value
|
|
|
|
end
|
|
|
|
|
2013-07-18 03:23:22 +08:00
|
|
|
def n_strand
|
|
|
|
["migrations:import_content", self.root_account.try(:global_id) || "global"]
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def migration_ids_to_import=(val)
|
|
|
|
migration_settings[:migration_ids_to_import] = val
|
2013-03-29 03:02:06 +08:00
|
|
|
set_date_shift_options val[:copy]
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2012-03-28 22:52:21 +08:00
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def zip_path=(val)
|
|
|
|
migration_settings[:export_archive_path] = val
|
|
|
|
end
|
|
|
|
|
|
|
|
def zip_path
|
|
|
|
(migration_settings || {})[:export_archive_path]
|
|
|
|
end
|
|
|
|
|
|
|
|
def question_bank_name=(name)
|
|
|
|
if name && name.strip! != ''
|
|
|
|
migration_settings[:question_bank_name] = name
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def question_bank_name
|
|
|
|
migration_settings[:question_bank_name]
|
|
|
|
end
|
|
|
|
|
2013-03-29 03:02:06 +08:00
|
|
|
def question_bank_id=(bank_id)
|
|
|
|
migration_settings[:question_bank_id] = bank_id
|
|
|
|
end
|
|
|
|
|
|
|
|
def question_bank_id
|
|
|
|
migration_settings[:question_bank_id]
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def course_archive_download_url=(url)
|
|
|
|
migration_settings[:course_archive_download_url] = url
|
|
|
|
end
|
2012-03-28 22:52:21 +08:00
|
|
|
|
2013-06-13 01:22:47 +08:00
|
|
|
def skip_job_progress=(val)
|
|
|
|
if val
|
|
|
|
migration_settings[:skip_job_progress] = true
|
|
|
|
else
|
|
|
|
migration_settings.delete(:skip_job_progress)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def skip_job_progress
|
|
|
|
!!migration_settings[:skip_job_progress]
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def root_account
|
|
|
|
self.context.root_account rescue nil
|
|
|
|
end
|
2012-03-28 22:52:21 +08:00
|
|
|
|
2013-03-29 03:02:06 +08:00
|
|
|
def migration_type
|
|
|
|
read_attribute(:migration_type) || migration_settings['migration_type']
|
|
|
|
end
|
|
|
|
|
2011-04-11 21:36:14 +08:00
|
|
|
def plugin_type
|
2013-03-29 03:02:06 +08:00
|
|
|
if plugin = Canvas::Plugin.find(migration_type)
|
2011-07-22 22:55:57 +08:00
|
|
|
plugin.metadata(:select_text) || plugin.name
|
2011-05-03 05:32:37 +08:00
|
|
|
else
|
2011-06-09 04:38:37 +08:00
|
|
|
t(:unknown, 'Unknown')
|
2011-04-11 21:36:14 +08:00
|
|
|
end
|
|
|
|
end
|
2011-04-28 23:23:03 +08:00
|
|
|
|
2014-04-02 02:19:04 +08:00
|
|
|
def canvas_import?
|
|
|
|
migration_settings[:worker_class] == CC::Importer::Canvas::Converter.name
|
|
|
|
end
|
|
|
|
|
2013-03-14 05:23:24 +08:00
|
|
|
# add todo/error/warning issue to the import. user_message is what will be
|
2014-02-07 02:33:47 +08:00
|
|
|
# displayed to the end user.
|
|
|
|
# type must be one of: :todo, :warning, :error
|
2013-03-14 05:23:24 +08:00
|
|
|
#
|
|
|
|
# The possible opts keys are:
|
|
|
|
#
|
|
|
|
# error_message - an admin-only error message
|
|
|
|
# exception - an exception object
|
|
|
|
# error_report_id - the id to an error report
|
|
|
|
# fix_issue_html_url - the url to send the user to to fix problem
|
2014-02-07 02:33:47 +08:00
|
|
|
#
|
2013-03-14 05:23:24 +08:00
|
|
|
def add_issue(user_message, type, opts={})
|
|
|
|
mi = self.migration_issues.build(:issue_type => type.to_s, :description => user_message)
|
|
|
|
if opts[:error_report_id]
|
|
|
|
mi.error_report_id = opts[:error_report_id]
|
|
|
|
elsif opts[:exception]
|
|
|
|
er = ErrorReport.log_exception(:content_migration, opts[:exception])
|
|
|
|
mi.error_report_id = er.id
|
2011-04-28 23:23:03 +08:00
|
|
|
end
|
2013-04-24 01:12:19 +08:00
|
|
|
mi.error_message = opts[:error_message]
|
2013-03-14 05:23:24 +08:00
|
|
|
mi.fix_issue_html_url = opts[:fix_issue_html_url]
|
2013-04-04 02:30:57 +08:00
|
|
|
|
2013-06-20 01:45:02 +08:00
|
|
|
# prevent duplicates
|
|
|
|
if self.migration_issues.where(mi.attributes.slice(
|
|
|
|
"issue_type", "description", "error_message", "fix_issue_html_url")).any?
|
|
|
|
mi.delete
|
|
|
|
else
|
|
|
|
mi.save!
|
|
|
|
end
|
|
|
|
|
2013-03-14 05:23:24 +08:00
|
|
|
mi
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2013-03-14 05:23:24 +08:00
|
|
|
def add_todo(user_message, opts={})
|
|
|
|
add_issue(user_message, :todo, opts)
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2013-03-14 05:23:24 +08:00
|
|
|
def add_error(user_message, opts={})
|
|
|
|
add_issue(user_message, :error, opts)
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2013-03-14 05:23:24 +08:00
|
|
|
def add_warning(user_message, opts={})
|
|
|
|
if !opts.is_a? Hash
|
|
|
|
# convert deprecated behavior to new
|
|
|
|
exception_or_info = opts
|
|
|
|
opts={}
|
|
|
|
if exception_or_info.is_a?(Exception)
|
|
|
|
opts[:exception] = exception_or_info
|
|
|
|
else
|
|
|
|
opts[:error_message] = exception_or_info
|
|
|
|
end
|
|
|
|
end
|
|
|
|
add_issue(user_message, :warning, opts)
|
2011-04-28 23:23:03 +08:00
|
|
|
end
|
|
|
|
|
2013-04-24 01:12:19 +08:00
|
|
|
def add_import_warning(item_type, item_name, warning)
|
2014-04-11 01:37:36 +08:00
|
|
|
item_name = CanvasTextHelper.truncate_text(item_name || "", :max_length => 150)
|
2013-04-24 01:12:19 +08:00
|
|
|
add_warning(t('errors.import_error', "Import Error: ") + "#{item_type} - \"#{item_name}\"", warning)
|
|
|
|
end
|
|
|
|
|
2013-04-10 04:30:41 +08:00
|
|
|
def fail_with_error!(exception_or_info)
|
|
|
|
opts={}
|
|
|
|
if exception_or_info.is_a?(Exception)
|
|
|
|
opts[:exception] = exception_or_info
|
|
|
|
else
|
|
|
|
opts[:error_message] = exception_or_info
|
|
|
|
end
|
|
|
|
add_error(t(:unexpected_error, "There was an unexpected error, please contact support."), opts)
|
|
|
|
self.workflow_state = :failed
|
2013-06-18 02:20:40 +08:00
|
|
|
job_progress.fail if job_progress && !skip_job_progress
|
2013-04-10 04:30:41 +08:00
|
|
|
save
|
|
|
|
end
|
|
|
|
|
2013-03-14 05:23:24 +08:00
|
|
|
# deprecated warning format
|
|
|
|
def old_warnings_format
|
|
|
|
self.migration_issues.map do |mi|
|
|
|
|
message = mi.error_report_id ? "ErrorReport:#{mi.error_report_id}" : mi.error_message
|
|
|
|
[mi.description, message]
|
|
|
|
end
|
2011-04-28 23:23:03 +08:00
|
|
|
end
|
|
|
|
|
2013-03-14 05:23:24 +08:00
|
|
|
def warnings
|
|
|
|
old_warnings_format.map(&:first)
|
|
|
|
end
|
2013-03-29 03:02:06 +08:00
|
|
|
|
|
|
|
# This will be called by the files api after the attachment finishes uploading
|
|
|
|
def file_upload_success_callback(att)
|
|
|
|
if att.file_state == "available"
|
|
|
|
self.attachment = att
|
2013-08-14 01:12:36 +08:00
|
|
|
self.migration_issues.delete_all if self.migration_issues.any?
|
2013-03-29 03:02:06 +08:00
|
|
|
self.workflow_state = :pre_processed
|
|
|
|
self.save
|
|
|
|
self.queue_migration
|
|
|
|
else
|
|
|
|
self.workflow_state = :pre_process_error
|
|
|
|
self.add_warning(t('bad_attachment', "The file was not successfully uploaded."))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-05-30 06:41:50 +08:00
|
|
|
def reset_job_progress(wf_state=:queued)
|
2013-06-13 01:22:47 +08:00
|
|
|
return if skip_job_progress
|
2013-03-29 03:02:06 +08:00
|
|
|
self.progress = 0
|
|
|
|
if self.job_progress
|
|
|
|
p = self.job_progress
|
|
|
|
else
|
|
|
|
p = Progress.new(:context => self, :tag => "content_migration")
|
|
|
|
self.job_progress = p
|
|
|
|
end
|
2013-05-30 06:41:50 +08:00
|
|
|
p.workflow_state = wf_state
|
2013-03-29 03:02:06 +08:00
|
|
|
p.completion = 0
|
|
|
|
p.user = self.user
|
|
|
|
p.save!
|
|
|
|
p
|
|
|
|
end
|
|
|
|
|
2014-04-16 04:57:02 +08:00
|
|
|
def queue_migration(plugin=nil)
|
2013-03-29 03:02:06 +08:00
|
|
|
reset_job_progress
|
2013-09-27 21:13:06 +08:00
|
|
|
|
|
|
|
set_default_settings
|
2014-04-16 04:57:02 +08:00
|
|
|
plugin ||= Canvas::Plugin.find(migration_type)
|
2011-02-01 09:57:29 +08:00
|
|
|
if plugin
|
2013-07-18 03:23:22 +08:00
|
|
|
queue_opts = {:priority => Delayed::LOW_PRIORITY, :max_attempts => 1}
|
|
|
|
if self.strand
|
|
|
|
queue_opts[:strand] = self.strand
|
|
|
|
else
|
|
|
|
queue_opts[:n_strand] = self.n_strand
|
|
|
|
end
|
|
|
|
|
2013-05-30 06:41:50 +08:00
|
|
|
if self.workflow_state == 'exported' && !plugin.settings[:skip_conversion_step]
|
|
|
|
# it's ready to be imported
|
|
|
|
self.workflow_state = :importing
|
|
|
|
self.save
|
2013-07-18 03:23:22 +08:00
|
|
|
self.send_later_enqueue_args(:import_content, queue_opts)
|
2013-05-30 06:41:50 +08:00
|
|
|
else
|
|
|
|
# find worker and queue for conversion
|
|
|
|
begin
|
|
|
|
if Canvas::Migration::Worker.const_defined?(plugin.settings['worker'])
|
|
|
|
self.workflow_state = :exporting
|
2013-07-18 03:23:22 +08:00
|
|
|
worker_class = Canvas::Migration::Worker.const_get(plugin.settings['worker'])
|
|
|
|
job = Delayed::Job.enqueue(worker_class.new(self.id), queue_opts)
|
2013-05-30 06:41:50 +08:00
|
|
|
self.save
|
|
|
|
job
|
|
|
|
else
|
|
|
|
raise NameError
|
|
|
|
end
|
|
|
|
rescue NameError
|
|
|
|
self.workflow_state = 'failed'
|
|
|
|
message = "The migration plugin #{migration_type} doesn't have a worker."
|
|
|
|
migration_settings[:last_error] = message
|
|
|
|
ErrorReport.log_exception(:content_migration, $!)
|
|
|
|
logger.error message
|
2011-02-01 09:57:29 +08:00
|
|
|
self.save
|
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
|
|
|
self.workflow_state = 'failed'
|
2013-03-29 03:02:06 +08:00
|
|
|
message = "No migration plugin of type #{migration_type} found."
|
2011-02-01 09:57:29 +08:00
|
|
|
migration_settings[:last_error] = message
|
|
|
|
logger.error message
|
|
|
|
self.save
|
|
|
|
end
|
|
|
|
end
|
2013-05-24 00:48:03 +08:00
|
|
|
alias_method :export_content, :queue_migration
|
2012-03-28 22:52:21 +08:00
|
|
|
|
2013-09-27 21:13:06 +08:00
|
|
|
def set_default_settings
|
2014-04-17 21:50:28 +08:00
|
|
|
if self.context && self.context.respond_to?(:root_account) && account = self.context.root_account
|
|
|
|
if default_ms = account.settings[:default_migration_settings]
|
|
|
|
self.migration_settings = default_ms.merge(self.migration_settings).with_indifferent_access
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if !self.migration_settings.has_key?(:overwrite_quizzes)
|
|
|
|
self.migration_settings[:overwrite_quizzes] = for_course_copy? || (self.migration_type && self.migration_type == 'canvas_cartridge_importer')
|
2013-09-27 21:13:06 +08:00
|
|
|
end
|
2014-04-17 21:50:28 +08:00
|
|
|
|
2013-09-27 21:13:06 +08:00
|
|
|
check_quiz_id_prepender
|
|
|
|
end
|
|
|
|
|
2014-07-08 04:15:34 +08:00
|
|
|
def process_domain_substitutions(url)
|
|
|
|
unless @domain_substitution_map
|
|
|
|
@domain_substitution_map = {}
|
|
|
|
(self.migration_settings[:domain_substitution_map] || {}).each do |k, v|
|
|
|
|
@domain_substitution_map[k.to_s] = v.to_s # ensure strings
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@domain_substitution_map.each do |from_domain, to_domain|
|
|
|
|
if url.start_with?(from_domain)
|
|
|
|
return url.sub(from_domain, to_domain)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
url
|
|
|
|
end
|
|
|
|
|
2011-08-26 02:49:17 +08:00
|
|
|
def check_quiz_id_prepender
|
2014-04-23 01:55:08 +08:00
|
|
|
return unless self.context.respond_to?(:assessment_questions)
|
2013-03-29 03:02:06 +08:00
|
|
|
if !migration_settings[:id_prepender] && (!migration_settings[:overwrite_questions] || !migration_settings[:overwrite_quizzes])
|
2014-09-26 23:06:42 +08:00
|
|
|
migration_settings[:id_prepender] = self.id
|
2011-08-26 02:49:17 +08:00
|
|
|
end
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
|
|
|
|
def to_import(val)
|
2012-04-03 06:38:05 +08:00
|
|
|
migration_settings[:migration_ids_to_import] && migration_settings[:migration_ids_to_import][:copy] && migration_settings[:migration_ids_to_import][:copy][val]
|
|
|
|
end
|
|
|
|
|
2014-09-04 08:31:19 +08:00
|
|
|
def import_everything?
|
2012-04-03 06:38:05 +08:00
|
|
|
return true unless migration_settings[:migration_ids_to_import] && migration_settings[:migration_ids_to_import][:copy] && migration_settings[:migration_ids_to_import][:copy].length > 0
|
|
|
|
return true if is_set?(to_import(:everything))
|
2013-03-29 03:02:06 +08:00
|
|
|
return true if copy_options && copy_options[:everything]
|
2014-09-04 08:31:19 +08:00
|
|
|
false
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_object?(asset_type, mig_id)
|
|
|
|
return false unless mig_id
|
|
|
|
return true if import_everything?
|
2012-04-03 06:38:05 +08:00
|
|
|
|
|
|
|
return true if is_set?(to_import("all_#{asset_type}"))
|
|
|
|
|
2013-09-05 02:44:48 +08:00
|
|
|
return false unless to_import(asset_type).present?
|
2012-04-03 06:38:05 +08:00
|
|
|
|
|
|
|
is_set?(to_import(asset_type)[mig_id])
|
|
|
|
end
|
|
|
|
|
2014-09-04 08:31:19 +08:00
|
|
|
def import_object!(asset_type, mig_id)
|
|
|
|
return if import_everything?
|
|
|
|
migration_settings[:migration_ids_to_import][:copy][asset_type] ||= {}
|
|
|
|
migration_settings[:migration_ids_to_import][:copy][asset_type][mig_id] = '1'
|
|
|
|
end
|
|
|
|
|
2012-04-03 06:38:05 +08:00
|
|
|
def is_set?(option)
|
|
|
|
Canvas::Plugin::value_to_boolean option
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2012-03-28 22:52:21 +08:00
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def import_content
|
2013-05-30 06:41:50 +08:00
|
|
|
reset_job_progress(:running) if !import_immediately?
|
2011-02-01 09:57:29 +08:00
|
|
|
self.workflow_state = :importing
|
|
|
|
self.save
|
2011-04-29 01:16:26 +08:00
|
|
|
|
2011-02-08 01:27:48 +08:00
|
|
|
begin
|
|
|
|
@exported_data_zip = download_exported_data
|
2013-10-22 04:09:01 +08:00
|
|
|
@zip_file = Zip::File.open(@exported_data_zip.path)
|
2011-04-29 01:16:26 +08:00
|
|
|
@exported_data_zip.close
|
2011-06-21 02:14:42 +08:00
|
|
|
data = JSON.parse(@zip_file.read('course_export.json'), :max_nesting => 50)
|
2013-03-15 04:17:43 +08:00
|
|
|
data = prepare_data(data)
|
2011-04-29 01:16:26 +08:00
|
|
|
|
2011-02-08 01:27:48 +08:00
|
|
|
if @zip_file.find_entry('all_files.zip')
|
|
|
|
# the file importer needs an actual file to process
|
|
|
|
all_files_path = create_all_files_path(@exported_data_zip.path)
|
|
|
|
@zip_file.extract('all_files.zip', all_files_path)
|
|
|
|
data['all_files_export']['file_path'] = all_files_path
|
|
|
|
else
|
2011-04-16 12:09:56 +08:00
|
|
|
data['all_files_export']['file_path'] = nil
|
2011-02-08 01:27:48 +08:00
|
|
|
end
|
2011-04-29 01:16:26 +08:00
|
|
|
|
2011-02-08 01:27:48 +08:00
|
|
|
@zip_file.close
|
2011-04-29 01:16:26 +08:00
|
|
|
|
2011-02-08 01:27:48 +08:00
|
|
|
migration_settings[:migration_ids_to_import] ||= {:copy=>{}}
|
2014-04-22 23:41:03 +08:00
|
|
|
|
2014-05-05 20:23:55 +08:00
|
|
|
Importers.content_importer_for(self.context_type).import_content(self.context, data, migration_settings[:migration_ids_to_import], self)
|
2013-05-30 06:41:50 +08:00
|
|
|
|
|
|
|
if !self.import_immediately?
|
|
|
|
update_import_progress(100)
|
|
|
|
end
|
2011-02-08 01:27:48 +08:00
|
|
|
rescue => e
|
|
|
|
self.workflow_state = :failed
|
2012-01-05 08:21:55 +08:00
|
|
|
er = ErrorReport.log_exception(:content_migration, e)
|
|
|
|
migration_settings[:last_error] = "ErrorReport:#{er.id}"
|
|
|
|
logger.error e
|
2011-02-08 01:27:48 +08:00
|
|
|
self.save
|
|
|
|
raise e
|
|
|
|
ensure
|
|
|
|
clear_migration_data
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2013-07-18 03:23:22 +08:00
|
|
|
alias_method :import_content_without_send_later, :import_content
|
2012-03-28 22:52:21 +08:00
|
|
|
|
2013-03-15 04:17:43 +08:00
|
|
|
def prepare_data(data)
|
|
|
|
data = data.with_indifferent_access if data.is_a? Hash
|
2014-02-07 02:33:47 +08:00
|
|
|
Utf8Cleaner.recursively_strip_invalid_utf8!(data, true)
|
2013-03-15 04:17:43 +08:00
|
|
|
data['all_files_export'] ||= {}
|
|
|
|
data
|
|
|
|
end
|
|
|
|
|
2012-03-28 22:52:21 +08:00
|
|
|
def copy_options
|
|
|
|
self.migration_settings[:copy_options]
|
|
|
|
end
|
|
|
|
|
|
|
|
def copy_options=(options)
|
|
|
|
self.migration_settings[:copy_options] = options
|
2013-03-29 03:02:06 +08:00
|
|
|
set_date_shift_options options
|
2012-03-28 22:52:21 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def for_course_copy?
|
2014-02-20 05:07:34 +08:00
|
|
|
self.migration_type && self.migration_type == 'course_copy_importer'
|
2012-03-28 22:52:21 +08:00
|
|
|
end
|
|
|
|
|
use root account uuid to scope external ids in migrations
given a course that references question banks, rubrics,
outcomes, and/or external tools in its account, if the course
is exported and re-imported into a different institution
in a different shard, there's a chance that the wrong item(s)
may become associated with the copied course.
to solve this,
(1) export the root account's uuid with a cc export;
(2) ignore external references on import if the saved uuid
does not match the root account of the destination
course.
test plan:
1. arrange the following in a course:
a. an external tool module item referencing a tool
defined in the account
b. a learning outcome imported from an account
c. an assignment using an account rubric for grading
d. a quiz containing a question group puling from an
account question bank
2. export this course to a common cartridge
3. import it into a course in the same root account.
it should import without warnings, and the account
references should be intact.
4. unzip the export package from step 2.
in course_settings/course_settings.xml, change
the root_account_uuid value. re-zip.
5. import the modified package into another course
in the same root account. (we tricked canvas
into thinking it's a different root account,
because that's easier than actually setting up
another shard and "decoy" items with matching ids).
-> you should get warnings about the external tool,
rubric, outcome, and question bank
-> the rubric and outcome should still be present,
but are copies and not the original account items
fixes CNVS-15101
Change-Id: I60d2a8377995f9d9476a87776c993d677b838b1b
Reviewed-on: https://gerrit.instructure.com/40391
Tested-by: Jenkins <jenkins@instructure.com>
Reviewed-by: Bracken Mosbacker <bracken@instructure.com>
Reviewed-by: James Williams <jamesw@instructure.com>
QA-Review: Clare Strong <clare@instructure.com>
Product-Review: Jeremy Stanley <jeremy@instructure.com>
2014-09-02 23:42:59 +08:00
|
|
|
def check_cross_institution
|
|
|
|
return unless self.context.is_a?(Course)
|
|
|
|
data = self.context.full_migration_hash
|
|
|
|
return unless data
|
|
|
|
source_root_account_uuid = data[:course] && data[:course][:root_account_uuid]
|
|
|
|
@cross_institution = source_root_account_uuid && source_root_account_uuid != self.context.root_account.uuid
|
|
|
|
end
|
|
|
|
|
|
|
|
def cross_institution?
|
|
|
|
@cross_institution
|
|
|
|
end
|
|
|
|
|
2013-03-29 03:02:06 +08:00
|
|
|
def set_date_shift_options(opts)
|
2014-07-29 01:22:01 +08:00
|
|
|
if opts && (Canvas::Plugin.value_to_boolean(opts[:shift_dates]) || Canvas::Plugin.value_to_boolean(opts[:remove_dates]))
|
|
|
|
self.migration_settings[:date_shift_options] = opts.slice(:shift_dates, :remove_dates, :old_start_date, :old_end_date, :new_start_date, :new_end_date, :day_substitutions, :time_zone)
|
2013-03-29 03:02:06 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def date_shift_options
|
|
|
|
self.migration_settings[:date_shift_options]
|
|
|
|
end
|
|
|
|
|
2014-07-29 01:22:01 +08:00
|
|
|
def valid_date_shift_options
|
|
|
|
if date_shift_options && Canvas::Plugin.value_to_boolean(date_shift_options[:shift_dates]) && Canvas::Plugin.value_to_boolean(date_shift_options[:remove_dates])
|
|
|
|
errors.add(:date_shift_options, t('errors.cannot_shift_and_remove', "cannot specify shift_dates and remove_dates simultaneously"))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-03-21 03:38:19 +08:00
|
|
|
scope :for_context, lambda { |context| where(:context_id => context, :context_type => context.class.to_s) }
|
|
|
|
|
2014-07-02 03:38:26 +08:00
|
|
|
scope :successful, -> { where(:workflow_state => 'imported') }
|
|
|
|
scope :running, -> { where(:workflow_state => ['exporting', 'importing']) }
|
|
|
|
scope :waiting, -> { where(:workflow_state => 'exported') }
|
|
|
|
scope :failed, -> { where(:workflow_state => ['failed', 'pre_process_error']) }
|
2011-09-16 04:24:35 +08:00
|
|
|
|
|
|
|
def complete?
|
|
|
|
%w[imported failed pre_process_error].include?(workflow_state)
|
|
|
|
end
|
|
|
|
|
2011-02-08 01:27:48 +08:00
|
|
|
def download_exported_data
|
|
|
|
raise "No exported data to import" unless self.exported_attachment
|
2014-05-17 00:49:42 +08:00
|
|
|
config = ConfigFile.load('external_migration') || {}
|
2011-04-29 01:16:26 +08:00
|
|
|
@exported_data_zip = self.exported_attachment.open(
|
|
|
|
:need_local_file => true,
|
|
|
|
:temp_folder => config[:data_folder])
|
2011-02-08 01:27:48 +08:00
|
|
|
@exported_data_zip
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2011-02-08 01:27:48 +08:00
|
|
|
def create_all_files_path(temp_path)
|
|
|
|
"#{temp_path}_all_files.zip"
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2011-02-08 01:27:48 +08:00
|
|
|
def clear_migration_data
|
2011-04-29 01:16:26 +08:00
|
|
|
@zip_file.close if @zip_file
|
|
|
|
@zip_file = nil
|
2011-02-04 04:39:44 +08:00
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2013-03-29 03:02:06 +08:00
|
|
|
def finished_converting
|
|
|
|
#todo finish progress if selective
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2013-03-29 03:02:06 +08:00
|
|
|
# expects values between 0 and 100 for the conversion process
|
|
|
|
def update_conversion_progress(prog)
|
|
|
|
if import_immediately?
|
|
|
|
fast_update_progress(prog * 0.5)
|
|
|
|
else
|
|
|
|
fast_update_progress(prog)
|
|
|
|
end
|
|
|
|
end
|
2014-02-07 02:33:47 +08:00
|
|
|
|
2013-03-29 03:02:06 +08:00
|
|
|
# expects values between 0 and 100 for the import process
|
|
|
|
def update_import_progress(prog)
|
|
|
|
if import_immediately?
|
|
|
|
fast_update_progress(50 + (prog * 0.5))
|
|
|
|
else
|
|
|
|
fast_update_progress(prog)
|
|
|
|
end
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
|
2012-03-28 22:52:21 +08:00
|
|
|
def progress
|
|
|
|
return nil if self.workflow_state == 'created'
|
|
|
|
mig_prog = read_attribute(:progress) || 0
|
2014-02-20 05:07:34 +08:00
|
|
|
if self.for_course_copy?
|
2012-03-28 22:52:21 +08:00
|
|
|
# this is for a course copy so it needs to combine the progress of the export and import
|
|
|
|
# The export will count for 40% of progress
|
|
|
|
# The importing step (so the value of progress on this object)will be 60%
|
|
|
|
mig_prog = mig_prog * 0.6
|
|
|
|
|
|
|
|
if self.content_export
|
|
|
|
export_prog = self.content_export.progress || 0
|
|
|
|
mig_prog += export_prog * 0.4
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
mig_prog
|
|
|
|
end
|
|
|
|
|
2011-02-01 09:57:29 +08:00
|
|
|
def fast_update_progress(val)
|
2013-03-29 03:02:06 +08:00
|
|
|
reset_job_progress unless job_progress
|
2013-06-13 01:22:47 +08:00
|
|
|
unless skip_job_progress
|
|
|
|
if val == 100
|
|
|
|
job_progress.completion = 100
|
|
|
|
job_progress.workflow_state = 'completed'
|
|
|
|
job_progress.save!
|
|
|
|
else
|
|
|
|
job_progress.update_completion!(val)
|
|
|
|
end
|
2013-04-10 04:30:41 +08:00
|
|
|
end
|
2013-03-29 03:02:06 +08:00
|
|
|
# Until this progress is phased out
|
2011-02-01 09:57:29 +08:00
|
|
|
self.progress = val
|
2013-03-19 03:07:47 +08:00
|
|
|
ContentMigration.where(:id => self).update_all(:progress=>val)
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|
2013-04-04 02:30:57 +08:00
|
|
|
|
|
|
|
def add_missing_content_links(item)
|
|
|
|
@missing_content_links ||= {}
|
|
|
|
item[:field] ||= :text
|
|
|
|
key = "#{item[:class]}_#{item[:id]}_#{item[:field]}"
|
|
|
|
if item[:missing_links].present?
|
|
|
|
@missing_content_links[key] = item
|
|
|
|
else
|
|
|
|
@missing_content_links.delete(key)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_warnings_for_missing_content_links
|
|
|
|
return unless @missing_content_links
|
|
|
|
@missing_content_links.each_value do |item|
|
|
|
|
if item[:missing_links].any?
|
|
|
|
add_warning(t(:missing_content_links_title, "Missing links found in imported content") + " - #{item[:class]} #{item[:field]}",
|
|
|
|
{:error_message => "#{item[:class]} #{item[:field]} - " + t(:missing_content_links_message,
|
|
|
|
"The following references could not be resolved: ") + " " + item[:missing_links].join(', '),
|
|
|
|
:fix_issue_html_url => item[:url]})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2013-05-24 00:48:03 +08:00
|
|
|
|
2013-08-14 01:12:36 +08:00
|
|
|
UPLOAD_TIMEOUT = 1.hour
|
|
|
|
def check_for_pre_processing_timeout
|
|
|
|
if self.pre_processing? && (self.updated_at.utc + UPLOAD_TIMEOUT) < Time.now.utc
|
|
|
|
add_error(t(:upload_timeout_error, "The file upload process timed out."))
|
|
|
|
self.workflow_state = :failed
|
|
|
|
job_progress.fail if job_progress && !skip_job_progress
|
|
|
|
self.save
|
|
|
|
end
|
|
|
|
end
|
2013-08-20 22:52:09 +08:00
|
|
|
|
2014-08-16 03:35:33 +08:00
|
|
|
# maps the key in the copy parameters hash to the asset string prefix
|
|
|
|
# (usually it's just .singularize; weird names needing special casing go here :P)
|
|
|
|
def self.asset_string_prefix(key)
|
|
|
|
case key
|
2014-08-23 02:43:57 +08:00
|
|
|
when 'quizzes'
|
|
|
|
'quizzes:quiz'
|
|
|
|
else
|
|
|
|
key.singularize
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.collection_name(key)
|
|
|
|
key = key.to_s
|
|
|
|
case key
|
|
|
|
when 'modules'
|
|
|
|
'context_modules'
|
|
|
|
when 'module_items'
|
|
|
|
'content_tags'
|
|
|
|
when 'pages'
|
|
|
|
'wiki_pages'
|
2014-08-28 03:50:07 +08:00
|
|
|
when 'files'
|
|
|
|
'attachments'
|
2014-08-23 02:43:57 +08:00
|
|
|
else
|
|
|
|
key
|
2014-08-16 03:35:33 +08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-08-20 22:52:09 +08:00
|
|
|
# strips out the "id_" prepending the migration ids in the form
|
2014-06-19 02:51:57 +08:00
|
|
|
# also converts arrays of migration ids (or real ids for course exports) into the old hash format
|
zip content exports for course, group, user
test plan:
1. use the content exports api with export_type=zip
to export files from courses, groups, and users
a. confirm only users who have permission to
download files from these contexts can perform
the export
b. confirm that deleted files and folders do not show
up in the downloaded archive
c. confirm that students cannot download locked files
or folders from courses this way
d. check the progress endpoint and make sure
it increments sanely
2. perform selective content exports by passing an array
of ids in select[folders] and/or select[attachments].
for example,
?select[folders][]=123&select[folders][]=456
?select[attachments][]=345
etc.
a. any selected files, plus the full contents of any
selected folders (that the caller has permission
to see) should be included
- that means locked files and subfolders should
be excluded from the archive
b. if all selected files and folders are descendants
of the same subfolder X, the export should be named
"X_export.zip" and all paths inside the zip should be
relative to it. for example, if you are exporting A/B/1
and A/C/2, you should get "A_export.zip" containing
files "B/1" and "C/2".
3. use the index and show endpoints to list and view
content exports in courses, groups, and users
a. confirm students cannot view non-zip course exports
(such as common cartridge exports)
b. confirm students cannot view other users' file (zip)
exports, in course, group, and user context
c. confirm teachers cannot view other users' file (zip)
exports, in course, group, and user context
(but can still view course [cc] exports initiated by
other teachers)
4. look at /courses/X/content_exports (web, not API)
a. confirm teachers see file exports they performed
b. confirm teachers do not see file exports performed by
other teachers
c. confirm teachers see all non-zip course exports
(cc/qti) including those initiated by other teachers
5. as a site admin user, perform a zip export of another
user's files. then, as that other user, go to
/dashboard/data_exports and confirm that the export
performed by the site admin user is not shown.
fixes CNVS-12706
Change-Id: Ie9b58e44ac8006a9c9171b3ed23454bf135385b0
Reviewed-on: https://gerrit.instructure.com/34341
Reviewed-by: James Williams <jamesw@instructure.com>
QA-Review: Trevor deHaan <tdehaan@instructure.com>
Tested-by: Jenkins <jenkins@instructure.com>
Product-Review: Jon Willesen <jonw@instructure.com>
2014-07-18 04:00:32 +08:00
|
|
|
def self.process_copy_params(hash, for_content_export=false, return_asset_strings=false)
|
2014-06-19 02:51:57 +08:00
|
|
|
return {} if hash.blank?
|
zip content exports for course, group, user
test plan:
1. use the content exports api with export_type=zip
to export files from courses, groups, and users
a. confirm only users who have permission to
download files from these contexts can perform
the export
b. confirm that deleted files and folders do not show
up in the downloaded archive
c. confirm that students cannot download locked files
or folders from courses this way
d. check the progress endpoint and make sure
it increments sanely
2. perform selective content exports by passing an array
of ids in select[folders] and/or select[attachments].
for example,
?select[folders][]=123&select[folders][]=456
?select[attachments][]=345
etc.
a. any selected files, plus the full contents of any
selected folders (that the caller has permission
to see) should be included
- that means locked files and subfolders should
be excluded from the archive
b. if all selected files and folders are descendants
of the same subfolder X, the export should be named
"X_export.zip" and all paths inside the zip should be
relative to it. for example, if you are exporting A/B/1
and A/C/2, you should get "A_export.zip" containing
files "B/1" and "C/2".
3. use the index and show endpoints to list and view
content exports in courses, groups, and users
a. confirm students cannot view non-zip course exports
(such as common cartridge exports)
b. confirm students cannot view other users' file (zip)
exports, in course, group, and user context
c. confirm teachers cannot view other users' file (zip)
exports, in course, group, and user context
(but can still view course [cc] exports initiated by
other teachers)
4. look at /courses/X/content_exports (web, not API)
a. confirm teachers see file exports they performed
b. confirm teachers do not see file exports performed by
other teachers
c. confirm teachers see all non-zip course exports
(cc/qti) including those initiated by other teachers
5. as a site admin user, perform a zip export of another
user's files. then, as that other user, go to
/dashboard/data_exports and confirm that the export
performed by the site admin user is not shown.
fixes CNVS-12706
Change-Id: Ie9b58e44ac8006a9c9171b3ed23454bf135385b0
Reviewed-on: https://gerrit.instructure.com/34341
Reviewed-by: James Williams <jamesw@instructure.com>
QA-Review: Trevor deHaan <tdehaan@instructure.com>
Tested-by: Jenkins <jenkins@instructure.com>
Product-Review: Jon Willesen <jonw@instructure.com>
2014-07-18 04:00:32 +08:00
|
|
|
process_key = if return_asset_strings
|
|
|
|
->(asset_string) { asset_string }
|
|
|
|
else
|
|
|
|
->(asset_string) { CC::CCHelper.create_key(asset_string) }
|
|
|
|
end
|
2014-06-19 02:51:57 +08:00
|
|
|
new_hash = {}
|
|
|
|
|
|
|
|
hash.each do |key, value|
|
2014-08-23 02:43:57 +08:00
|
|
|
key = collection_name(key)
|
2014-06-19 02:51:57 +08:00
|
|
|
case value
|
|
|
|
when Hash # e.g. second level in :copy => {:context_modules => {:id_100 => true, etc}}
|
|
|
|
new_sub_hash = {}
|
|
|
|
|
|
|
|
value.each do |sub_key, sub_value|
|
|
|
|
if for_content_export
|
zip content exports for course, group, user
test plan:
1. use the content exports api with export_type=zip
to export files from courses, groups, and users
a. confirm only users who have permission to
download files from these contexts can perform
the export
b. confirm that deleted files and folders do not show
up in the downloaded archive
c. confirm that students cannot download locked files
or folders from courses this way
d. check the progress endpoint and make sure
it increments sanely
2. perform selective content exports by passing an array
of ids in select[folders] and/or select[attachments].
for example,
?select[folders][]=123&select[folders][]=456
?select[attachments][]=345
etc.
a. any selected files, plus the full contents of any
selected folders (that the caller has permission
to see) should be included
- that means locked files and subfolders should
be excluded from the archive
b. if all selected files and folders are descendants
of the same subfolder X, the export should be named
"X_export.zip" and all paths inside the zip should be
relative to it. for example, if you are exporting A/B/1
and A/C/2, you should get "A_export.zip" containing
files "B/1" and "C/2".
3. use the index and show endpoints to list and view
content exports in courses, groups, and users
a. confirm students cannot view non-zip course exports
(such as common cartridge exports)
b. confirm students cannot view other users' file (zip)
exports, in course, group, and user context
c. confirm teachers cannot view other users' file (zip)
exports, in course, group, and user context
(but can still view course [cc] exports initiated by
other teachers)
4. look at /courses/X/content_exports (web, not API)
a. confirm teachers see file exports they performed
b. confirm teachers do not see file exports performed by
other teachers
c. confirm teachers see all non-zip course exports
(cc/qti) including those initiated by other teachers
5. as a site admin user, perform a zip export of another
user's files. then, as that other user, go to
/dashboard/data_exports and confirm that the export
performed by the site admin user is not shown.
fixes CNVS-12706
Change-Id: Ie9b58e44ac8006a9c9171b3ed23454bf135385b0
Reviewed-on: https://gerrit.instructure.com/34341
Reviewed-by: James Williams <jamesw@instructure.com>
QA-Review: Trevor deHaan <tdehaan@instructure.com>
Tested-by: Jenkins <jenkins@instructure.com>
Product-Review: Jon Willesen <jonw@instructure.com>
2014-07-18 04:00:32 +08:00
|
|
|
new_sub_hash[process_key.call(sub_key)] = sub_value
|
2014-06-19 02:51:57 +08:00
|
|
|
elsif sub_key.is_a?(String) && sub_key.start_with?("id_")
|
|
|
|
new_sub_hash[sub_key.sub("id_", "")] = sub_value
|
|
|
|
else
|
|
|
|
new_sub_hash[sub_key] = sub_value
|
|
|
|
end
|
2013-08-20 22:52:09 +08:00
|
|
|
end
|
2014-06-19 02:51:57 +08:00
|
|
|
|
|
|
|
new_hash[key] = new_sub_hash
|
|
|
|
when Array
|
|
|
|
# e.g. :select => {:context_modules => [100, 101]} for content exports
|
|
|
|
# or :select => {:context_modules => [blahblahblah, blahblahblah2]} for normal migration ids
|
|
|
|
sub_hash = {}
|
|
|
|
if for_content_export
|
2014-08-16 03:35:33 +08:00
|
|
|
asset_type = asset_string_prefix(key.to_s)
|
2014-06-19 02:51:57 +08:00
|
|
|
value.each do |id|
|
zip content exports for course, group, user
test plan:
1. use the content exports api with export_type=zip
to export files from courses, groups, and users
a. confirm only users who have permission to
download files from these contexts can perform
the export
b. confirm that deleted files and folders do not show
up in the downloaded archive
c. confirm that students cannot download locked files
or folders from courses this way
d. check the progress endpoint and make sure
it increments sanely
2. perform selective content exports by passing an array
of ids in select[folders] and/or select[attachments].
for example,
?select[folders][]=123&select[folders][]=456
?select[attachments][]=345
etc.
a. any selected files, plus the full contents of any
selected folders (that the caller has permission
to see) should be included
- that means locked files and subfolders should
be excluded from the archive
b. if all selected files and folders are descendants
of the same subfolder X, the export should be named
"X_export.zip" and all paths inside the zip should be
relative to it. for example, if you are exporting A/B/1
and A/C/2, you should get "A_export.zip" containing
files "B/1" and "C/2".
3. use the index and show endpoints to list and view
content exports in courses, groups, and users
a. confirm students cannot view non-zip course exports
(such as common cartridge exports)
b. confirm students cannot view other users' file (zip)
exports, in course, group, and user context
c. confirm teachers cannot view other users' file (zip)
exports, in course, group, and user context
(but can still view course [cc] exports initiated by
other teachers)
4. look at /courses/X/content_exports (web, not API)
a. confirm teachers see file exports they performed
b. confirm teachers do not see file exports performed by
other teachers
c. confirm teachers see all non-zip course exports
(cc/qti) including those initiated by other teachers
5. as a site admin user, perform a zip export of another
user's files. then, as that other user, go to
/dashboard/data_exports and confirm that the export
performed by the site admin user is not shown.
fixes CNVS-12706
Change-Id: Ie9b58e44ac8006a9c9171b3ed23454bf135385b0
Reviewed-on: https://gerrit.instructure.com/34341
Reviewed-by: James Williams <jamesw@instructure.com>
QA-Review: Trevor deHaan <tdehaan@instructure.com>
Tested-by: Jenkins <jenkins@instructure.com>
Product-Review: Jon Willesen <jonw@instructure.com>
2014-07-18 04:00:32 +08:00
|
|
|
sub_hash[process_key.call("#{asset_type}_#{id}")] = '1'
|
2014-06-19 02:51:57 +08:00
|
|
|
end
|
|
|
|
else
|
|
|
|
value.each do |id|
|
|
|
|
sub_hash[id] = '1'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
new_hash[key] = sub_hash
|
|
|
|
else
|
|
|
|
new_hash[key] = value
|
2013-08-20 22:52:09 +08:00
|
|
|
end
|
|
|
|
end
|
2014-06-19 02:51:57 +08:00
|
|
|
new_hash
|
2013-08-20 22:52:09 +08:00
|
|
|
end
|
2014-05-05 20:23:55 +08:00
|
|
|
|
|
|
|
def imported_migration_items
|
2014-05-21 00:21:02 +08:00
|
|
|
@imported_migration_items_hash ||= {}
|
|
|
|
@imported_migration_items_hash.values.flatten
|
|
|
|
end
|
|
|
|
|
|
|
|
def imported_migration_items_by_class(klass)
|
|
|
|
@imported_migration_items_hash ||= {}
|
|
|
|
@imported_migration_items_hash[klass.name] ||= []
|
2014-05-05 20:23:55 +08:00
|
|
|
end
|
|
|
|
|
|
|
|
def add_imported_item(item)
|
2014-05-21 00:21:02 +08:00
|
|
|
arr = imported_migration_items_by_class(item.class)
|
|
|
|
arr << item unless arr.include?(item)
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_external_tool_translation(migration_id, target_tool, custom_fields)
|
|
|
|
@external_tool_translation_map ||= {}
|
|
|
|
@external_tool_translation_map[migration_id] = [target_tool.id, custom_fields]
|
|
|
|
end
|
|
|
|
|
|
|
|
def find_external_tool_translation(migration_id)
|
|
|
|
@external_tool_translation_map && migration_id && @external_tool_translation_map[migration_id]
|
2014-05-05 20:23:55 +08:00
|
|
|
end
|
2014-08-07 00:18:14 +08:00
|
|
|
|
|
|
|
def handle_import_in_progress_notice
|
|
|
|
return unless context.is_a?(Course) && is_set?(migration_settings[:import_in_progress_notice])
|
|
|
|
if (new_record? || (workflow_state_changed? && workflow_state_was == 'created')) &&
|
|
|
|
%w(pre_processing pre_processed exporting importing).include?(workflow_state)
|
|
|
|
context.add_content_notice(:import_in_progress, 4.hours)
|
|
|
|
elsif workflow_state_changed? && %w(pre_process_error exported imported failed).include?(workflow_state)
|
|
|
|
context.remove_content_notice(:import_in_progress)
|
|
|
|
end
|
|
|
|
end
|
2011-02-01 09:57:29 +08:00
|
|
|
end
|