new generic-format importer classes

notable changes:
 * nothing is processed as a sis-import blocking error now. bad imports now
   result in warnings, while just skipping bad data
 * we no longer check for duplicates before going to the database

Change-Id: Iedc96b29d92caccdc6a71ae1de8100a1c82dd137
Reviewed-on: https://gerrit.instructure.com/5724
Tested-by: Hudson <hudson@instructure.com>
Reviewed-by: Cody Cutrer <cody@instructure.com>
This commit is contained in:
JT Olds 2011-09-06 12:07:54 -06:00
parent ba7e01e60e
commit 006cde7157
32 changed files with 2189 additions and 1728 deletions

View File

@ -111,7 +111,7 @@ class SisBatch < ActiveRecord::Base
def process_instructure_csv_zip
require 'sis'
download_zip
importer = SIS::SisCsv.process(self.account, :files => [ @data_file.path ], :batch => self)
importer = SIS::CSV::Import.process(self.account, :files => [ @data_file.path ], :batch => self)
finish importer.finished
end

View File

@ -16,12 +16,12 @@
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require 'sis/sis_importer'
require 'sis/course_importer'
require 'sis/account_importer'
require 'sis/enrollment_importer'
require 'sis/user_importer'
require 'sis/term_importer'
require 'sis/section_importer'
require 'sis/xlist_importer'
require 'sis/sis_csv'
require 'sis/csv/base_importer'
require 'sis/csv/course_importer'
require 'sis/csv/account_importer'
require 'sis/csv/enrollment_importer'
require 'sis/csv/user_importer'
require 'sis/csv/term_importer'
require 'sis/csv/section_importer'
require 'sis/csv/xlist_importer'
require 'sis/csv/import'

View File

@ -16,74 +16,81 @@
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require "skip_callback"
module SIS
class AbstractCourseImporter < SisImporter
def self.is_abstract_course_csv?(row)
row.header?('abstract_course_id') && !row.header?('course_id') && row.header?('short_name')
end
def verify(csv, verify)
abstract_course_ids = (verify[:abstract_course_ids] ||= {})
csv_rows(csv) do |row|
abstract_course_id = row['abstract_course_id']
add_error(csv, "Duplicate abstract course id #{abstract_course_id}") if abstract_course_ids[abstract_course_id]
abstract_course_ids[abstract_course_id] = true
add_error(csv, "No abstract_course_id given for an abstract course") if row['abstract_course_id'].blank?
add_error(csv, "No short_name given for abstract course #{abstract_course_id}") if row['short_name'].blank?
add_error(csv, "No long_name given for abstract course #{abstract_course_id}") if row['long_name'].blank?
add_error(csv, "Improper status \"#{row['status']}\" for abstract course #{abstract_course_id}") unless row['status'] =~ /\Aactive|\Adeleted/i
end
end
# expected columns
# abstract_course_id,short_name,long_name,account_id,term_id,status
def process(csv)
start = Time.now
abstract_courses_to_update_sis_batch_id = []
csv_rows(csv) do |row|
update_progress
class AbstractCourseImporter
logger.debug("Processing AbstractCourse #{row.inspect}")
term = @root_account.enrollment_terms.find_by_sis_source_id(row['term_id'])
course = AbstractCourse.find_by_root_account_id_and_sis_source_id(@root_account.id, row['abstract_course_id'])
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def process
start = Time.now
importer = Work.new(@batch_id, @root_account, @logger)
yield importer
AbstractCourse.update_all({:sis_batch_id => @batch_id}, {:id => importer.abstract_courses_to_update_sis_batch_id}) if @batch_id && !importer.abstract_courses_to_update_sis_batch_id.empty?
@logger.debug("AbstractCourses took #{Time.now - start} seconds")
return importer.success_count
end
private
class Work
attr_accessor :success_count, :abstract_courses_to_update_sis_batch_id
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@abstract_courses_to_update_sis_batch_id = []
@logger = logger
@success_count = 0
end
def add_abstract_course(abstract_course_id, short_name, long_name, status, term_id=nil, account_id=nil, fallback_account_id=nil)
@logger.debug("Processing AbstractCourse #{[abstract_course_id, short_name, long_name, status, term_id, account_id, fallback_account_id].inspect}")
raise ImportError, "No abstract_course_id given for an abstract course" if abstract_course_id.blank?
raise ImportError, "No short_name given for abstract course #{abstract_course_id}" if short_name.blank?
raise ImportError, "No long_name given for abstract course #{abstract_course_id}" if long_name.blank?
raise ImportError, "Improper status \"#{status}\" for abstract course #{abstract_course_id}" unless status =~ /\Aactive|\Adeleted/i
term = @root_account.enrollment_terms.find_by_sis_source_id(term_id)
course = AbstractCourse.find_by_root_account_id_and_sis_source_id(@root_account.id, abstract_course_id)
course ||= AbstractCourse.new
course.enrollment_term = term if term
course.root_account = @root_account
account = nil
account = Account.find_by_root_account_id_and_sis_source_id(@root_account.id, row['account_id']) if row['account_id'].present?
account ||= Account.find_by_root_account_id_and_sis_source_id(@root_account.id, row['fallback_account_id']) if row['fallback_account_id'].present?
account = Account.find_by_root_account_id_and_sis_source_id(@root_account.id, account_id) if account_id.present?
account ||= Account.find_by_root_account_id_and_sis_source_id(@root_account.id, fallback_account_id) if fallback_account_id.present?
course.account = account if account
course.account ||= @root_account
# only update the name/short_name on new records, and ones that haven't been changed
# since the last sis import
if course.new_record? || (course.sis_course_code && course.sis_course_code == course.short_name)
course.short_name = course.sis_course_code = row['short_name']
course.short_name = course.sis_course_code = short_name
end
if course.new_record? || (course.sis_name && course.sis_name == course.name)
course.name = course.sis_name = row['long_name']
course.name = course.sis_name = long_name
end
course.sis_source_id = row['abstract_course_id']
if row['status'] =~ /active/i
course.sis_source_id = abstract_course_id
if status =~ /active/i
course.workflow_state = 'active'
elsif row['status'] =~ /deleted/i
elsif status =~ /deleted/i
course.workflow_state = 'deleted'
end
if course.changed?
course.sis_batch_id = @batch.id if @batch
course.sis_batch_id = @batch_id if @batch_id
course.save!
elsif @batch
abstract_courses_to_update_sis_batch_id << course.id
elsif @batch_id
@abstract_courses_to_update_sis_batch_id << course.id
end
@sis.counts[:abstract_courses] += 1
@success_count += 1
end
AbstractCourse.update_all({:sis_batch_id => @batch.id}, {:id => abstract_courses_to_update_sis_batch_id}) if @batch && !abstract_courses_to_update_sis_batch_id.empty?
logger.debug("AbstractCourses took #{Time.now - start} seconds")
end
end
end

View File

@ -19,85 +19,84 @@
require "skip_callback"
module SIS
class AccountImporter < SisImporter
def self.is_account_csv?(row)
row.header?('account_id') && row.header?('parent_account_id')
class AccountImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
account_ids = (verify[:account_ids] ||= {})
csv_rows(csv) do |row|
account_id = row['account_id']
add_error(csv, "Duplicate account id #{account_id}") if account_ids[account_id]
account_ids[account_id] = true
add_error(csv, "No account_id given for an account") if row['account_id'].blank?
end
end
# expected columns
# account_id,parent_account_id
def process(csv)
def process
start = Time.now
accounts_cache = {}
importer = Work.new(@batch_id, @root_account, @logger)
Account.skip_callback(:update_account_associations_if_changed) do
csv_rows(csv) do |row|
update_progress
logger.debug("Processing Account #{row.inspect}")
parent = nil
if !row['parent_account_id'].blank?
parent = accounts_cache[row['parent_account_id']]
parent ||= Account.find_by_root_account_id_and_sis_source_id(@root_account.id, row['parent_account_id'])
unless parent
add_warning(csv, "Parent account didn't exist for #{row['account_id']}")
next
end
end
account = Account.find_by_root_account_id_and_sis_source_id(@root_account.id, row['account_id'])
if account.nil?
abort = false
if row['name'].blank?
add_warning(csv, "No name given for account #{row['account_id']}, skipping")
abort = true
end
unless row['status'] =~ /\A(active|deleted)/i
add_warning(csv, "Improper status \"#{row['status']}\" for account #{row['account_id']}, skipping")
abort = true
end
next if abort
end
account ||= @root_account.sub_accounts.new
account.root_account_id = @root_account.id
account.parent_account_id = parent ? parent.id : @root_account.id
# only update the name on new records, and ones that haven't been changed since the last sis import
if row['name'].present? && (account.new_record? || (account.sis_name && account.sis_name == account.name))
account.name = account.sis_name = row['name']
end
account.sis_source_id = row['account_id']
account.sis_batch_id = @batch.id if @batch
if row['status'].present?
if row['status'] =~ /active/i
account.workflow_state = 'active'
elsif row['status'] =~ /deleted/i
account.workflow_state = 'deleted'
end
end
update_account_associations = account.root_account_id_changed? || account.parent_account_id_changed?
account.save
account.update_account_associations if update_account_associations
@sis.counts[:accounts] += 1
accounts_cache[account.sis_source_id] = account
end
yield importer
end
@logger.debug("Accounts took #{Time.now - start} seconds")
return importer.success_count
end
private
class Work
attr_accessor :success_count
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@accounts_cache = {}
@logger = logger
@success_count = 0
end
def add_account(account_id, parent_account_id, status, name)
@logger.debug("Processing Account #{[account_id, parent_account_id, status, name].inspect}")
raise ImportError, "No account_id given for an account" if account_id.blank?
parent = nil
if !parent_account_id.blank?
parent = @accounts_cache[parent_account_id]
parent ||= Account.find_by_root_account_id_and_sis_source_id(@root_account.id, parent_account_id)
raise ImportError, "Parent account didn't exist for #{account_id}" unless parent
@accounts_cache[parent.sis_source_id] = parent
end
account = Account.find_by_root_account_id_and_sis_source_id(@root_account.id, account_id)
if account.nil?
raise ImportError, "No name given for account #{account_id}, skipping" if name.blank?
raise ImportError, "Improper status \"#{status}\" for account #{account_id}, skipping" unless status =~ /\A(active|deleted)/i
end
account ||= @root_account.sub_accounts.new
account.root_account_id = @root_account.id
account.parent_account_id = parent ? parent.id : @root_account.id
# only update the name on new records, and ones that haven't been changed since the last sis import
if name.present? && (account.new_record? || (account.sis_name && account.sis_name == account.name))
account.name = account.sis_name = name
end
account.sis_source_id = account_id
account.sis_batch_id = @batch_id if @batch_id
if status.present?
if status =~ /active/i
account.workflow_state = 'active'
elsif status =~ /deleted/i
account.workflow_state = 'deleted'
end
end
update_account_associations = account.root_account_id_changed? || account.parent_account_id_changed?
account.save
account.update_account_associations if update_account_associations
@accounts_cache[account.sis_source_id] = account
@success_count += 1
end
logger.debug("Accounts took #{Time.now - start} seconds")
end
end
end

23
lib/sis/common.rb Normal file
View File

@ -0,0 +1,23 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
class ImportError < StandardError; end
end

View File

@ -19,142 +19,150 @@
require "skip_callback"
module SIS
class CourseImporter < SisImporter
def self.is_course_csv?(row)
row.header?('course_id') && row.header?('short_name')
class CourseImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
course_ids = (verify[:course_ids] ||= {})
csv_rows(csv) do |row|
course_id = row['course_id']
add_error(csv, "Duplicate course id #{course_id}") if course_ids[course_id]
course_ids[course_id] = true
add_error(csv, "No course_id given for a course") if row['course_id'].blank?
add_error(csv, "No short_name given for course #{course_id}") if row['short_name'].blank? && row['abstract_course_id'].blank?
add_error(csv, "No long_name given for course #{course_id}") if row['long_name'].blank? && row['abstract_course_id'].blank?
add_error(csv, "Improper status \"#{row['status']}\" for course #{course_id}") unless row['status'] =~ /\Aactive|\Adeleted|\Acompleted/i
end
end
# expected columns
# course_id,short_name,long_name,account_id,term_id,status
def process(csv)
def process(messages)
start = Time.now
courses_to_update_sis_batch_id = []
course_ids_to_update_associations = [].to_set
importer = Work.new(@batch_id, @root_account, @logger, courses_to_update_sis_batch_id, course_ids_to_update_associations, messages)
Course.skip_callback(:update_enrollments_later) do
csv_rows(csv) do |row|
update_progress
Course.skip_updating_account_associations do
yield importer
end
end
Course.skip_updating_account_associations do
Course.update_account_associations(course_ids_to_update_associations.to_a) unless course_ids_to_update_associations.empty?
Course.update_all({:sis_batch_id => @batch_id}, {:id => courses_to_update_sis_batch_id}) if @batch_id && !courses_to_update_sis_batch_id.empty?
@logger.debug("Courses took #{Time.now - start} seconds")
return importer.success_count
end
logger.debug("Processing Course #{row.inspect}")
term = @root_account.enrollment_terms.find_by_sis_source_id(row['term_id'])
course = Course.find_by_root_account_id_and_sis_source_id(@root_account.id, row['course_id'])
course ||= Course.new
course.enrollment_term = term if term
course.root_account = @root_account
private
account = nil
account = Account.find_by_root_account_id_and_sis_source_id(@root_account.id, row['account_id']) if row['account_id'].present?
account ||= Account.find_by_root_account_id_and_sis_source_id(@root_account.id, row['fallback_account_id']) if row['fallback_account_id'].present?
course.account = account if account
course.account ||= @root_account
class Work
attr_accessor :success_count
update_account_associations = course.account_id_changed? || course.root_account_id_changed?
def initialize(batch_id, root_account, logger, a1, a2, m)
@batch_id = batch_id
@root_account = root_account
@courses_to_update_sis_batch_id = a1
@course_ids_to_update_associations = a2
@messages = m
@logger = logger
@success_count = 0
end
course.sis_source_id = row['course_id']
if row['status'] =~ /active/i
if course.workflow_state == 'completed'
course.workflow_state = 'available'
elsif course.workflow_state != 'available'
course.workflow_state = 'claimed'
end
elsif row['status'] =~ /deleted/i
course.workflow_state = 'deleted'
elsif row['status'] =~ /completed/i
course.workflow_state = 'completed'
end
def add_course(course_id, term_id, account_id, fallback_account_id, status, start_date, end_date, abstract_course_id, short_name, long_name)
begin
course.start_at = row['start_date'].blank? ? nil : DateTime.parse(row['start_date'])
course.conclude_at = row['end_date'].blank? ? nil : DateTime.parse(row['end_date'])
rescue
add_warning(csv, "Bad date format for course #{row['course_id']}")
end
course.restrict_enrollments_to_course_dates = (course.start_at.present? || course.conclude_at.present?)
@logger.debug("Processing Course #{[course_id, term_id, account_id, fallback_account_id, status, start_date, end_date, abstract_course_id, short_name, long_name].inspect}")
abstract_course = nil
if row['abstract_course_id'].present?
abstract_course = AbstractCourse.find_by_root_account_id_and_sis_source_id(@root_account.id, row['abstract_course_id'])
add_warning(csv, "unknown abstract course id #{row['abstract_course_id']}, ignoring abstract course reference") unless abstract_course
end
raise ImportError, "No course_id given for a course" if course_id.blank?
raise ImportError, "No short_name given for course #{course_id}" if short_name.blank? && abstract_course_id.blank?
raise ImportError, "No long_name given for course #{course_id}" if long_name.blank? && abstract_course_id.blank?
raise ImportError, "Improper status \"#{status}\" for course #{course_id}" unless status =~ /\A(active|deleted|completed)/i
if abstract_course
if row['term_id'].blank? && course.enrollment_term_id != abstract_course.enrollment_term
course.send(:association_instance_set, :enrollment_term, nil)
course.enrollment_term_id = abstract_course.enrollment_term_id
end
if row['account_id'].blank? && course.account_id != abstract_course.account_id
course.send(:association_instance_set, :account, nil)
course.account_id = abstract_course.account_id
end
end
course.abstract_course = abstract_course
# only update the name/short_name on new records, and ones that haven't been changed
# since the last sis import
if course.short_name.blank? || course.sis_course_code == course.short_name
if row['short_name'].present?
course.short_name = course.sis_course_code = row['short_name']
elsif abstract_course && course.short_name.blank?
course.short_name = course.sis_course_code = abstract_course.short_name
end
end
if course.name.blank? || course.sis_name == course.name
if row['long_name'].present?
course.name = course.sis_name = row['long_name']
elsif abstract_course && course.name.blank?
course.name = course.sis_name = abstract_course.name
end
end
term = @root_account.enrollment_terms.find_by_sis_source_id(term_id)
course = Course.find_by_root_account_id_and_sis_source_id(@root_account.id, course_id)
course ||= Course.new
course.enrollment_term = term if term
course.root_account = @root_account
update_enrollments = !course.new_record? && !(course.changes.keys & ['workflow_state', 'name', 'course_code']).empty?
if course.changed?
course.templated_courses.each do |templated_course|
templated_course.root_account = @root_account
templated_course.account = course.account
if templated_course.sis_name && templated_course.sis_name == templated_course.name && course.sis_name && course.sis_name == course.name
templated_course.name = course.name
templated_course.sis_name = course.sis_name
end
if templated_course.sis_course_code && templated_course.sis_course_code == templated_course.short_name && course.sis_course_code && course.sis_course_code == course.short_name
templated_course.sis_course_code = course.sis_course_code
templated_course.short_name = course.short_name
end
templated_course.enrollment_term = course.enrollment_term
templated_course.sis_batch_id = @batch.id if @batch
course_ids_to_update_associations.add(templated_course.id) if templated_course.account_id_changed? || templated_course.root_account_id_changed?
templated_course.save_without_broadcasting!
end
course.sis_batch_id = @batch.id if @batch
course.save_without_broadcasting!
course_ids_to_update_associations.add(course.id) if update_account_associations
elsif @batch
courses_to_update_sis_batch_id << course.id
end
@sis.counts[:courses] += 1
account = nil
account = Account.find_by_root_account_id_and_sis_source_id(@root_account.id, account_id) if account_id.present?
account ||= Account.find_by_root_account_id_and_sis_source_id(@root_account.id, fallback_account_id) if fallback_account_id.present?
course.account = account if account
course.account ||= @root_account
course.update_enrolled_users if update_enrollments
update_account_associations = course.account_id_changed? || course.root_account_id_changed?
course.sis_source_id = course_id
if status =~ /active/i
if course.workflow_state == 'completed'
course.workflow_state = 'available'
elsif course.workflow_state != 'available'
course.workflow_state = 'claimed'
end
elsif status =~ /deleted/i
course.workflow_state = 'deleted'
elsif status =~ /completed/i
course.workflow_state = 'completed'
end
course.start_at = start_date
course.conclude_at = end_date
course.restrict_enrollments_to_course_dates = (course.start_at.present? || course.conclude_at.present?)
abstract_course = nil
if abstract_course_id.present?
abstract_course = AbstractCourse.find_by_root_account_id_and_sis_source_id(@root_account.id, abstract_course_id)
@messages << "unknown abstract course id #{abstract_course_id}, ignoring abstract course reference" unless abstract_course
end
if abstract_course
if term_id.blank? && course.enrollment_term_id != abstract_course.enrollment_term
course.send(:association_instance_set, :enrollment_term, nil)
course.enrollment_term_id = abstract_course.enrollment_term_id
end
if account_id.blank? && course.account_id != abstract_course.account_id
course.send(:association_instance_set, :account, nil)
course.account_id = abstract_course.account_id
end
end
Course.update_account_associations(course_ids_to_update_associations.to_a) unless course_ids_to_update_associations.empty?
course.abstract_course = abstract_course
Course.update_all({:sis_batch_id => @batch.id}, {:id => courses_to_update_sis_batch_id}) if @batch && !courses_to_update_sis_batch_id.empty?
logger.debug("Courses took #{Time.now - start} seconds")
# only update the name/short_name on new records, and ones that haven't been changed
# since the last sis import
if course.short_name.blank? || course.sis_course_code == course.short_name
if short_name.present?
course.short_name = course.sis_course_code = short_name
elsif abstract_course && course.short_name.blank?
course.short_name = course.sis_course_code = abstract_course.short_name
end
end
if course.name.blank? || course.sis_name == course.name
if long_name.present?
course.name = course.sis_name = long_name
elsif abstract_course && course.name.blank?
course.name = course.sis_name = abstract_course.name
end
end
update_enrollments = !course.new_record? && !(course.changes.keys & ['workflow_state', 'name', 'course_code']).empty?
if course.changed?
course.templated_courses.each do |templated_course|
templated_course.root_account = @root_account
templated_course.account = course.account
if templated_course.sis_name && templated_course.sis_name == templated_course.name && course.sis_name && course.sis_name == course.name
templated_course.name = course.name
templated_course.sis_name = course.sis_name
end
if templated_course.sis_course_code && templated_course.sis_course_code == templated_course.short_name && course.sis_course_code && course.sis_course_code == course.short_name
templated_course.sis_course_code = course.sis_course_code
templated_course.short_name = course.short_name
end
templated_course.enrollment_term = course.enrollment_term
templated_course.sis_batch_id = @batch_id if @batch_id
@course_ids_to_update_associations.add(templated_course.id) if templated_course.account_id_changed? || templated_course.root_account_id_changed?
templated_course.save_without_broadcasting!
end
course.sis_batch_id = @batch_id if @batch_id
course.save_without_broadcasting!
@course_ids_to_update_associations.add(course.id) if update_account_associations
elsif @batch_id
@courses_to_update_sis_batch_id << course.id
end
course.update_enrolled_users if update_enrollments
@success_count += 1
end
end
end

View File

@ -0,0 +1,44 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class AbstractCourseImporter < BaseImporter
def self.is_abstract_course_csv?(row)
row.header?('abstract_course_id') && !row.header?('course_id') && row.header?('short_name')
end
# expected columns
# abstract_course_id,short_name,long_name,account_id,term_id,status
def process(csv)
@sis.counts[:abstract_courses] += SIS::AbstractCourseImporter.new(@batch.try(:id), @root_account, logger).process do |importer|
csv_rows(csv) do |row|
update_progress
begin
importer.add_abstract_course(row['abstract_course_id'], row['short_name'], row['long_name'], row['status'], row['term_id'], row['account_id'], row['fallback_account_id'])
rescue ImportError => e
add_warning(csv, "#{e}")
end
end
end
end
end
end
end

View File

@ -0,0 +1,45 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class AccountImporter < BaseImporter
def self.is_account_csv?(row)
row.header?('account_id') && row.header?('parent_account_id')
end
# expected columns
# account_id,parent_account_id
def process(csv)
@sis.counts[:accounts] += SIS::AccountImporter.new(@batch.try(:id), @root_account, logger).process do |importer|
csv_rows(csv) do |row|
update_progress
begin
importer.add_account(row['account_id'], row['parent_account_id'],
row['status'], row['name'])
rescue ImportError => e
add_warning(csv, "#{e}")
end
end
end
end
end
end
end

View File

@ -0,0 +1,62 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require 'lib/sis/common'
module SIS
module CSV
class BaseImporter
PARSE_ARGS = {:headers => :first_row,
:skip_blanks => true,
:header_converters => :downcase,
:converters => lambda{|field|field ? field.strip : field}
}
def initialize(sis_csv)
@sis = sis_csv
@root_account = @sis.root_account
@batch = @sis.batch
end
def process(csv)
raise NotImplementedError
end
def logger
@sis.logger
end
def add_error(csv, message)
@sis.add_error(csv, message)
end
def add_warning(csv, message)
@sis.add_warning(csv, message)
end
def update_progress(count = 1)
@sis.update_progress(count)
end
def csv_rows(csv)
FasterCSV.foreach(csv[:fullpath], PARSE_ARGS) do |row|
yield row
end
end
end
end
end

View File

@ -0,0 +1,56 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class CourseImporter < BaseImporter
def self.is_course_csv?(row)
row.header?('course_id') && row.header?('short_name')
end
# expected columns
# course_id,short_name,long_name,account_id,term_id,status
def process(csv)
course_ids = {}
messages = []
@sis.counts[:courses] += SIS::CourseImporter.new(@batch.try(:id), @root_account, logger).process(messages) do |importer|
csv_rows(csv) do |row|
update_progress
start_date = nil
end_date = nil
begin
start_date = DateTime.parse(row['start_date']) unless row['start_date'].blank?
end_date = DateTime.parse(row['end_date']) unless row['end_date'].blank?
rescue
messages << "Bad date format for course #{row['course_id']}"
end
begin
importer.add_course(row['course_id'], row['term_id'], row['account_id'], row['fallback_account_id'], row['status'], start_date, end_date, row['abstract_course_id'], row['short_name'], row['long_name'])
rescue ImportError => e
messages << "#{e}"
end
end
end
messages.each { |message| add_warning(csv, message) }
end
end
end
end

View File

@ -0,0 +1,56 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class EnrollmentImporter < BaseImporter
def self.is_enrollment_csv?(row)
(row.header?('section_id') || row.header?('course_id')) && row.header?('user_id')
end
# expected columns
# course_id,user_id,role,section_id,status
def process(csv)
messages = []
@sis.counts[:enrollments] += SIS::EnrollmentImporter.new(@batch.try(:id), @root_account, logger).process(messages, @sis.updates_every) do |importer|
csv_rows(csv) do |row|
update_progress
start_date = nil
end_date = nil
begin
start_date = DateTime.parse(row['start_date']) unless row['start_date'].blank?
end_date = DateTime.parse(row['end_date']) unless row['end_date'].blank?
rescue
messages << "Bad date format for user #{row['user_id']} in #{row['course_id'].blank? ? 'section' : 'course'} #{row['course_id'].blank? ? row['section_id'] : row['course_id']}"
end
begin
importer.add_enrollment(row['course_id'], row['section_id'], row['user_id'], row['role'], row['status'], start_date, end_date, row['associated_user_id'])
rescue ImportError => e
messages << "#{e}"
next
end
end
end
messages.each { |message| add_warning(csv, message) }
end
end
end
end

View File

@ -0,0 +1,44 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class GradePublishingResultsImporter < BaseImporter
def self.is_grade_publishing_results_csv?(row)
row.header?('enrollment_id') && row.header?('grade_publishing_status')
end
# expected columns
# enrollment_id,grade_publishing_status
def process(csv)
@sis.counts[:grade_publishing_results] += SIS::GradePublishingResultsImporter.new(@batch.try(:id), @root_account, logger).process do |importer|
csv_rows(csv) do |row|
update_progress
begin
importer.add_grade_publishing_result(row['enrollment_id'], row['grade_publishing_status'])
rescue ImportError => e
add_warning(csv, "#{e}")
end
end
end
end
end
end
end

View File

@ -0,0 +1,44 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
# note these are account-level groups, not course groups
class GroupImporter < BaseImporter
def self.is_group_csv?(row)
row.header?('group_id') && row.header?('account_id')
end
# expected columns
# group_id,account_id,name,status
def process(csv)
@sis.counts[:groups] += SIS::GroupImporter.new(@batch.try(:id), @root_account, logger).process do |importer|
csv_rows(csv) do |row|
update_progress
begin
importer.add_group(row['group_id'], row['account_id'], row['name'], row['status'])
rescue ImportError => e
add_warning(csv, "#{e}")
end
end
end
end
end
end
end

View File

@ -0,0 +1,44 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class GroupMembershipImporter < BaseImporter
def self.is_group_membership_csv?(row)
row.header?('group_id') && row.header?('user_id')
end
# expected columns
# group_id,user_id,status
def process(csv)
@sis.counts[:group_memberships] += SIS::GroupMembershipImporter.new(@batch.try(:id), @root_account, logger).process do |importer|
csv_rows(csv) do |row|
update_progress
begin
importer.add_group_membership(row['user_id'], row['group_id'], row['status'])
rescue ImportError => e
add_warning(csv, "#{e}")
end
end
end
end
end
end
end

380
lib/sis/csv/import.rb Normal file
View File

@ -0,0 +1,380 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require 'faster_csv'
require 'zip/zip'
module SIS
module CSV
class Import
attr_accessor :root_account, :batch, :errors, :warnings, :finished, :counts, :updates_every
IGNORE_FILES = /__macosx|desktop d[bf]|\A\..*/i
# The order of this array is important:
# * Account must be imported before Term and Course
# * Course must be imported before Section
# * Course and Section must be imported before Xlist
# * Course, Section, and User must be imported before Enrollment
IMPORTERS = [:account, :term, :abstract_course, :course, :section, :xlist, :user, :enrollment, :group, :group_membership, :grade_publishing_results]
def initialize(root_account, opts = {})
opts = opts.with_indifferent_access
@root_account = root_account
@csvs = {}
IMPORTERS.each { |importer| @csvs[importer] = [] }
@rows = {}
IMPORTERS.each { |importer| @rows[importer] = 0 }
@headers = {}
IMPORTERS.each { |importer| @headers[importer] = Set.new }
@files = opts[:files] || []
@batch = opts[:batch]
@logger = opts[:logger]
@counts = {}
IMPORTERS.each { |importer| @counts[importer.to_s.pluralize.to_sym] = 0 }
@total_rows = 1
@current_row = 0
@rows_since_progress_update = 0
@progress_multiplier = opts[:progress_multiplier] || 1
@progress_offset = opts[:progress_offset] || 0
@errors = []
@warnings = []
@pending = false
@finished = false
settings = PluginSetting.settings_for_plugin('sis_import')
@allow_printing = opts[:allow_printing].nil? ? true : opts[:allow_printing]
@parallelism = opts[:parallelism]
@parallelism ||= settings[:parallelism].to_i
@parallelism = 1 if @parallelism < 1
@parallelism = 1 unless @batch
@minimum_rows_for_parallel = settings[:minimum_rows_for_parallel].to_i
@minimum_rows_for_parallel = 1000 if @minimum_rows_for_parallel < 1
@parallel_queue = settings[:queue_for_parallel_jobs]
@parallel_queue = nil if @parallel_queue.blank?
update_pause_vars
end
def self.process(root_account, opts = {})
importer = Import.new(root_account, opts)
importer.process
importer
end
def process
@tmp_dirs = []
@files.each do |file|
if File.file?(file)
if File.extname(file).downcase == '.zip'
tmp_dir = Dir.mktmpdir
@tmp_dirs << tmp_dir
unzip_file(file, tmp_dir)
Dir[File.join(tmp_dir, "**/**")].each do |fn|
process_file(tmp_dir, fn[tmp_dir.size+1 .. -1])
end
elsif File.extname(file).downcase == '.csv'
process_file(File.dirname(file), File.basename(file))
end
end
end
@files = nil
IMPORTERS.each do |importer|
@csvs[importer].each do |csv|
rows = (%x{wc -l '#{csv[:fullpath]}'}.split.first.to_i rescue 0)
@rows[importer] += rows
@total_rows += rows
end
end
@parallelism = 1 if @total_rows <= @minimum_rows_for_parallel
# calculate how often we should update progress to get 1% resolution
# but don't leave us hanging for more than 500 rows at a time
# and don't do it more often than we have work to do
@updates_every = [ [ @total_rows / @parallelism / 100, 500 ].min, 10 ].max
if (@parallelism > 1)
# re-balance the CSVs
@batch.data[:importers] = {}
IMPORTERS.each do |importer|
if (importer != :account)
rebalance_csvs(importer)
end
@batch.data[:importers][importer] = @csvs[importer].length
end
@batch.save!
@rows = nil
@headers = nil
run_next_importer(IMPORTERS.first)
@batch.reload
while @batch.workflow_state.to_sym == :importing
sleep(0.5)
@batch.reload
end
@finished = [:imported, :imported_with_messages].include?(@batch.workflow_state.to_sym)
else
IMPORTERS.each do |importer|
importerObject = SIS::CSV.const_get(importer.to_s.camelcase + 'Importer').new(self)
@csvs[importer].each { |csv| importerObject.process(csv) }
end
@finished = true
end
rescue => e
if @batch
error_report = ErrorReport.log_exception(:sis_import, e,
:message => "Importing CSV for account: #{@root_account.id} (#{@root_account.name}) sis_batch_id: #{@batch.id}: #{e.to_s}",
:during_tests => false
)
add_error(nil, "Error while importing CSV. Please contact support. (Error report #{error_report.id})")
else
add_error(nil, "#{e.message}\n#{e.backtrace.join "\n"}")
raise e
end
ensure
@tmp_dirs.each do |tmp_dir|
FileUtils.rm_rf(tmp_dir, :secure => true) if File.directory?(tmp_dir)
end
if @batch && @parallelism == 1
@batch.data[:counts] = @counts
@batch.processing_errors = @errors
@batch.processing_warnings = @warnings
@batch.save
end
if @allow_printing and !@errors.empty? and !@batch
# If there's no batch, then we must be working via the console and we should just error out
@errors.each { |w| puts w.join ": " }
end
end
def logger
@logger ||= Rails.logger
end
def add_error(csv, message)
@errors << [ csv ? csv[:file] : "", message ]
end
def add_warning(csv, message)
@warnings << [ csv ? csv[:file] : "", message ]
end
def update_progress(count = 1)
@current_row += count
return unless @batch
@rows_since_progress_update += count
if @rows_since_progress_update >= @updates_every
if @parallelism > 1
SisBatch.transaction do
@batch.reload(:select => 'data, progress', :lock => true)
@current_row += @batch.data[:current_row] if @batch.data[:current_row]
@batch.data[:current_row] = @current_row
@batch.progress = (((@current_row.to_f/@total_rows) * @progress_multiplier) + @progress_offset) * 100
@batch.save
@current_row = 0
@rows_since_progress_update = 0
end
else
@batch.fast_update_progress( (((@current_row.to_f/@total_rows) * @progress_multiplier) + @progress_offset) * 100)
end
end
if @current_row.to_i % @pause_every == 0
sleep(@pause_duration)
update_pause_vars
end
end
def run_single_importer(importer, csv)
begin
importerObject = SIS::CSV.const_get(importer.to_s.camelcase + 'Importer').new(self)
if csv[:attachment]
file = csv[:attachment].open
csv[:fullpath] = file.path
end
importerObject.process(csv)
run_next_importer(IMPORTERS[IMPORTERS.index(importer) + 1]) if complete_importer(importer)
rescue => e
error_report = ErrorReport.log_exception(:sis_import, e,
:message => "Importing CSV for account: #{@root_account.id} (#{@root_account.name}) sis_batch_id: #{@batch.id}: #{e.to_s}",
:during_tests => false
)
add_error(nil, "Error while importing CSV. Please contact support. (Error report #{error_report.id})")
@batch.processing_errors ||= []
@batch.processing_warnings ||= []
@batch.processing_errors.concat(@errors)
@batch.processing_warnings.concat(@warnings)
@batch.workflow_state = :failed_with_messages
@batch.save!
ensure
file.close if file
end
end
private
def run_next_importer(importer)
return finish if importer.nil?
return run_next_importer(IMPORTERS[IMPORTERS.index(importer) + 1]) if @csvs[importer].empty?
if (importer == :account)
@csvs[importer].each { |csv| run_single_importer(importer, csv) }
return
end
# logger doesn't serialize well
@logger = nil
@csvs[importer].each { |csv| self.send_later_enqueue_args(:run_single_importer, { :queue => @queue, :priority => Delayed::LOW_PRIORITY }, importer, csv) }
end
def complete_importer(importer)
return unless @batch
SisBatch.transaction do
@batch.reload(:lock => true)
@batch.data[:importers][importer] -= 1
@batch.data[:counts] ||= {}
@counts.each do |k, v|
@batch.data[:counts][k] ||= 0
@batch.data[:counts][k] += v
@counts[k] = 0
end
@current_row += @batch.data[:current_row] if @batch.data[:current_row]
@batch.data[:current_row] = @current_row
@batch.progress = (((@current_row.to_f/@total_rows) * @progress_multiplier) + @progress_offset) * 100
@batch.processing_errors ||= []
@batch.processing_warnings ||= []
@batch.processing_errors.concat(@errors)
@batch.processing_warnings.concat(@warnings)
@current_row = 0
@batch.save
return @batch.data[:importers][importer] == 0
end
end
def finish
@batch.finish(true)
@finished = true
end
def update_pause_vars
return unless @batch
# throttling can be set on individual SisBatch instances, and also
# site-wide in the Setting table.
@batch.reload(:select => 'data') # update to catch changes to pause vars
@pause_every = (@batch.data[:pause_every] || Setting.get('sis_batch_pause_every', 100)).to_i
@pause_duration = (@batch.data[:pause_duration] || Setting.get('sis_batch_pause_duration', 0)).to_f
end
def unzip_file(file, dest)
Zip::ZipFile.open(file) do |zip_file|
zip_file.each do |f|
f_path = File.join(dest, f.name)
FileUtils.mkdir_p(File.dirname(f_path))
zip_file.extract(f, f_path) unless File.exist?(f_path)
end
end
end
def rebalance_csvs(importer)
rows_per_batch = (@rows[importer].to_f / @parallelism).ceil.to_i
new_csvs = []
out_csv = nil
tmp_dir = Dir.mktmpdir
@tmp_dirs << tmp_dir
temp_file = 0
headers = @headers[importer].to_a
path = nil
begin
Attachment.skip_scribd_submits
@csvs[importer].each do |csv|
remaining_in_batch = 0
FasterCSV.foreach(csv[:fullpath], BaseImporter::PARSE_ARGS) do |row|
if remaining_in_batch == 0
temp_file += 1
if out_csv
out_csv.close
out_csv = nil
att = Attachment.new
att.context = @batch
att.uploaded_data = ActionController::TestUploadedFile.new(path, Attachment.mimetype(path))
att.display_name = new_csvs.last[:file]
att.save!
new_csvs.last.delete(:fullpath)
new_csvs.last[:attachment] = att
end
path = File.join(tmp_dir, "#{importer}#{temp_file}.csv")
out_csv = FasterCSV.open(path, "wb", {:headers => headers, :write_headers => true})
new_csvs << {:file => csv[:file]}
remaining_in_batch = rows_per_batch
end
out_row = FasterCSV::Row.new(headers, []);
headers.each { |header| out_row[header] = row[header] }
out_csv << out_row
remaining_in_batch -= 1
end
end
if out_csv
out_csv.close
out_csv = nil
att = Attachment.new
att.context = @batch
att.uploaded_data = ActionController::TestUploadedFile.new(path, Attachment.mimetype(path))
att.display_name = new_csvs.last[:file]
att.save!
new_csvs.last.delete(:fullpath)
new_csvs.last[:attachment] = att
end
ensure
out_csv.close if out_csv
Attachment.skip_scribd_submits(false)
end
@csvs[importer] = new_csvs
end
def process_file(base, file)
csv = { :base => base, :file => file, :fullpath => File.join(base, file) }
if File.file?(csv[:fullpath]) && File.extname(csv[:fullpath]).downcase == '.csv'
FasterCSV.foreach(csv[:fullpath], BaseImporter::PARSE_ARGS) do |row|
importer = IMPORTERS.index do |importer|
if SIS::CSV.const_get(importer.to_s.camelcase + 'Importer').send('is_' + importer.to_s + '_csv?', row)
@csvs[importer] << csv
@headers[importer].merge(row.headers)
true
else
false
end
end
add_error(csv, "Couldn't find Canvas CSV import headers") if importer.nil?
break
end
elsif !File.directory?(csv[:fullpath]) && !(csv[:fullpath] =~ IGNORE_FILES)
add_warning(csv, "Skipping unknown file type")
end
end
end
end
end

View File

@ -0,0 +1,54 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class SectionImporter < BaseImporter
def self.is_section_csv?(row)
#This matcher works because an enrollment doesn't have name
row.header?('section_id') && row.header?('name')
end
# expected columns
# section_id,course_id,name,status,start_date,end_date
def process(csv)
@sis.counts[:sections] += SIS::SectionImporter.new(@batch.try(:id), @root_account, logger).process do |importer|
csv_rows(csv) do |row|
update_progress
start_date = nil
end_date = nil
begin
start_date = DateTime.parse(row['start_date']) unless row['start_date'].blank?
end_date = DateTime.parse(row['end_date']) unless row['end_date'].blank?
rescue
add_warning(csv, "Bad date format for section #{row['section_id']}")
end
begin
importer.add_section(row['section_id'], row['course_id'], row['name'], row['status'], start_date, end_date, row['account_id'])
rescue ImportError => e
add_warning(csv, "#{e}")
end
end
end
end
end
end
end

View File

@ -0,0 +1,54 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class TermImporter < BaseImporter
def self.is_term_csv?(row)
#This matcher works because a course has long_name/short_name
row.header?('term_id') && row.header?('name')
end
# expected columns
# account_id,parent_account_id,name,status
def process(csv)
@sis.counts[:terms] += SIS::TermImporter.new(@batch.try(:id), @root_account, logger).process do |importer|
csv_rows(csv) do |row|
update_progress
start_date = nil
end_date = nil
begin
start_date = DateTime.parse(row['start_date']) unless row['start_date'].blank?
end_date = DateTime.parse(row['end_date']) unless row['end_date'].blank?
rescue
add_warning(csv, "Bad date format for term #{row['term_id']}")
end
begin
importer.add_term(row['term_id'], row['name'], row['status'], start_date, end_date)
rescue ImportError => e
add_warning(csv, "#{e}")
end
end
end
end
end
end
end

View File

@ -0,0 +1,46 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
module CSV
class UserImporter < BaseImporter
def self.is_user_csv?(row)
row.header?('user_id') && row.header?('login_id')
end
# expected columns:
# user_id,login_id,first_name,last_name,email,status
def process(csv)
messages = []
@sis.counts[:users] += SIS::UserImporter.new(@batch.try(:id), @root_account, logger).process(@sis.updates_every, messages) do |importer|
csv_rows(csv) do |row|
update_progress
begin
importer.add_user(row['user_id'], row['login_id'], row['status'], row['first_name'], row['last_name'], row['email'], row['password'], row['ssha_password'])
rescue ImportError => e
messages << "#{e}"
end
end
end
messages.each { |message| add_warning(csv, message) }
end
end
end
end

View File

@ -0,0 +1,46 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require 'skip_callback'
module SIS
module CSV
class XlistImporter < BaseImporter
def self.is_xlist_csv?(row)
row.header?('xlist_course_id') && row.header?('section_id')
end
# possible columns:
# xlist_course_id, section_id, status
def process(csv)
@sis.counts[:xlists] += SIS::XlistImporter.new(@batch.try(:id), @root_account, logger).process do |importer|
csv_rows(csv) do |row|
update_progress
begin
importer.add_crosslist(row['xlist_course_id'], row['section_id'], row['status'])
rescue ImportError => e
add_warning(csv, "#{e}")
end
end
end
end
end
end
end

View File

@ -20,206 +20,217 @@ require "set"
require "skip_callback"
module SIS
class EnrollmentImporter < SisImporter
class EnrollmentImporter
def self.is_enrollment_csv?(row)
(row.header?('section_id') || row.header?('course_id')) && row.header?('user_id')
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
csv_rows(csv) do |row|
add_error(csv, "No course_id or section_id given for an enrollment") if row['course_id'].blank? && row['section_id'].blank?
add_error(csv, "No user_id given for an enrollment") if row['user_id'].blank?
add_error(csv, "Improper role \"#{row['role']}\" for an enrollment") unless row['role'] =~ /\Astudent|\Ateacher|\Ata|\Aobserver|\Adesigner/i
add_error(csv, "Improper status \"#{row['status']}\" for an enrollment") unless row['status'] =~ /\Aactive|\Adeleted|\Acompleted|\Ainactive/i
end
end
# expected columns
# course_id,user_id,role,section_id,status
def process(csv)
def process(messages, updates_every)
start = Time.now
update_account_association_user_ids = Set.new
incrementally_update_account_associations_user_ids = Set.new
users_to_touch_ids = Set.new
courses_to_touch_ids = Set.new
enrollments_to_update_sis_batch_ids = []
account_chain_cache = {}
course = section = nil
i = Work.new(@batch_id, @root_account, @logger, updates_every, messages)
Enrollment.skip_callback(:belongs_to_touch_after_save_or_destroy_for_course) do
User.skip_updating_account_associations do
FasterCSV.open(csv[:fullpath], "rb", PARSE_ARGS) do |csv_object|
row = csv_object.shift
count = 0
until row.nil?
update_progress(count)
count = 0
# this transaction assumes that all these pseudonyms, courses, enrollments and
# course_sections are all in the same database
Enrollment.transaction do
remaining_in_transaction = @sis.updates_every
tx_end_time = Time.now + Setting.get('sis_transaction_seconds', '1').to_i.seconds
begin
logger.debug("Processing Enrollment #{row.inspect}")
count += 1
remaining_in_transaction -= 1
last_section = section
# reset the cached course/section if they don't match this row
if course && row['course_id'].present? && course.sis_source_id != row['course_id']
course = nil
section = nil
end
if section && row['section_id'].present? && section.sis_source_id != row['section_id']
section = nil
end
pseudo = Pseudonym.find_by_account_id_and_sis_user_id(@root_account.id, row['user_id'])
user = pseudo.user rescue nil
course ||= Course.find_by_root_account_id_and_sis_source_id(@root_account.id, row['course_id']) unless row['course_id'].blank?
section ||= CourseSection.find_by_root_account_id_and_sis_source_id(@root_account.id, row['section_id']) unless row['section_id'].blank?
unless user && (course || section)
add_warning csv, "Neither course #{row['course_id']} nor section #{row['section_id']} existed for user enrollment" unless (course || section)
add_warning csv, "User #{row['user_id']} didn't exist for user enrollment" unless user
next
end
if row['section_id'] && !section
add_warning csv, "An enrollment referenced a non-existent section #{row['section_id']}"
next
end
if row['course_id'] && !course
add_warning csv, "An enrollment referenced a non-existent course #{row['course_id']}"
next
end
# reset cached/inferred course and section if they don't match with the opposite piece that was
# explicitly provided
section = course.default_section if section.nil? || row['section_id'].blank? && !section.default_section
course = section.course if course.nil? || (row['course_id'].blank? && course.id != section.course_id) ||
(course.id != section.course_id && section.nonxlist_course_id == course.id)
if course.id != section.course_id
add_warning csv, "An enrollment listed a section and a course that are unrelated"
next
end
# preload the course object to avoid later queries for it
section.course = course
# commit pending incremental account associations
if section != last_section and !incrementally_update_account_associations_user_ids.empty?
if incrementally_update_account_associations_user_ids.length < 10
update_account_association_user_ids.merge(incrementally_update_account_associations_user_ids)
else
User.update_account_associations(incrementally_update_account_associations_user_ids.to_a,
:incremental => true,
:precalculated_associations => User.calculate_account_associations_from_accounts(
[course.account_id, section.nonxlist_course.try(:account_id)].compact.uniq,
account_chain_cache
))
end
incrementally_update_account_associations_user_ids = Set.new
end
enrollment = section.enrollments.find_by_user_id(user.id)
unless enrollment
enrollment = Enrollment.new
enrollment.root_account = @root_account
end
enrollment.user = user
enrollment.sis_source_id = [row['course_id'], row['user_id'], row['role'], section.name].compact.join(":")
enrollment.course = course
enrollment.course_section = section
if row['role'] =~ /\Ateacher\z/i
enrollment.type = 'TeacherEnrollment'
elsif row['role'] =~ /student/i
enrollment.type = 'StudentEnrollment'
elsif row['role'] =~ /\Ata\z|assistant/i
enrollment.type = 'TaEnrollment'
elsif row['role'] =~ /\Aobserver\z/i
enrollment.type = 'ObserverEnrollment'
if row['associated_user_id']
pseudo = Pseudonym.find_by_account_id_and_sis_user_id(@root_account.id, row['associated_user_id'])
associated_enrollment = pseudo && course.student_enrollments.find_by_user_id(pseudo.user_id)
enrollment.associated_user_id = associated_enrollment && associated_enrollment.user_id
end
elsif row['role'] =~ /\Adesigner\z/i
enrollment.type = 'DesignerEnrollment'
end
if row['status']=~ /\Aactive/i
if user.workflow_state != 'deleted'
enrollment.workflow_state = 'active'
else
enrollment.workflow_state = 'deleted'
add_warning csv, "Attempted enrolling of deleted user #{row['user_id']} in course #{row['course_id']}"
end
elsif row['status']=~ /\Adeleted/i
enrollment.workflow_state = 'deleted'
elsif row['status']=~ /\Acompleted/i
enrollment.workflow_state = 'completed'
elsif row['status']=~ /\Ainactive/i
enrollment.workflow_state = 'inactive'
end
begin
enrollment.start_at = row['start_date'].blank? ? nil : DateTime.parse(row['start_date'])
enrollment.end_at = row['end_date'].blank? ? nil : DateTime.parse(row['end_date'])
rescue
add_warning(csv, "Bad date format for user #{row['user_id']} in #{row['course_id'].blank? ? 'section' : 'course'} #{row['course_id'].blank? ? row['section_id'] : row['course_id']}")
end
courses_to_touch_ids.add(enrollment.course)
if enrollment.should_update_user_account_association?
if enrollment.new_record? && !update_account_association_user_ids.include?(user.id)
incrementally_update_account_associations_user_ids.add(user.id)
else
update_account_association_user_ids.add(user.id)
end
end
if enrollment.changed?
users_to_touch_ids.add(user.id)
enrollment.sis_batch_id = @batch.id if @batch
enrollment.save_without_broadcasting
elsif @batch
enrollments_to_update_sis_batch_ids << enrollment.id
end
@sis.counts[:enrollments] += 1
end while !(row = csv_object.shift).nil? && remaining_in_transaction > 0 && tx_end_time > Time.now
end
end
yield i
while i.any_left_to_process?
i.process_batch
end
end
end
logger.debug("Raw enrollments took #{Time.now - start} seconds")
Enrollment.update_all({:sis_batch_id => @batch.id}, {:id => enrollments_to_update_sis_batch_ids}) if @batch && !enrollments_to_update_sis_batch_ids.empty?
@logger.debug("Raw enrollments took #{Time.now - start} seconds")
Enrollment.update_all({:sis_batch_id => @batch_id}, {:id => i.enrollments_to_update_sis_batch_ids}) if @batch_id && !i.enrollments_to_update_sis_batch_ids.empty?
# We batch these up at the end because we don't want to keep touching the same course over and over,
# and to avoid hitting other callbacks for the course (especially broadcast_policy)
Course.update_all({:updated_at => Time.now.utc}, {:id => courses_to_touch_ids.to_a}) unless courses_to_touch_ids.empty?
Course.update_all({:updated_at => Time.now.utc}, {:id => i.courses_to_touch_ids.to_a}) unless i.courses_to_touch_ids.empty?
# We batch these up at the end because normally a user would get several enrollments, and there's no reason
# to update their account associations on each one.
if incrementally_update_account_associations_user_ids.length < 10
update_account_association_user_ids.merge(incrementally_update_account_associations_user_ids)
else
User.update_account_associations(incrementally_update_account_associations_user_ids.to_a,
:incremental => true,
:precalculated_associations => User.calculate_account_associations_from_accounts(
[course.account_id, section.nonxlist_course.try(:account_id)].compact.uniq,
account_chain_cache
))
end
User.update_account_associations(update_account_association_user_ids.to_a,
:account_chain_cache => account_chain_cache)
User.update_all({:updated_at => Time.now.utc}, {:id => users_to_touch_ids.to_a}) unless users_to_touch_ids.empty?
logger.debug("Enrollments with batch operations took #{Time.now - start} seconds")
i.incrementally_update_account_associations
User.update_account_associations(i.update_account_association_user_ids.to_a, :account_chain_cache => i.account_chain_cache)
User.update_all({:updated_at => Time.now.utc}, {:id => i.users_to_touch_ids.to_a}) unless i.users_to_touch_ids.empty?
@logger.debug("Enrollments with batch operations took #{Time.now - start} seconds")
return i.success_count
end
private
class Work
attr_accessor :enrollments_to_update_sis_batch_ids, :courses_to_touch_ids,
:incrementally_update_account_associations_user_ids, :update_account_association_user_ids,
:account_chain_cache, :users_to_touch_ids, :success_count
def initialize(batch_id, root_account, logger, updates_every, messages)
@batch_id = batch_id
@root_account = root_account
@logger = logger
@updates_every = updates_every
@messages = messages
@update_account_association_user_ids = Set.new
@incrementally_update_account_associations_user_ids = Set.new
@users_to_touch_ids = Set.new
@courses_to_touch_ids = Set.new
@enrollments_to_update_sis_batch_ids = []
@account_chain_cache = {}
@course = @section = nil
@enrollment_batch = []
@success_count = 0
end
def add_enrollment(course_id, section_id, user_id, role, status, start_date, end_date, associated_user_id=nil)
raise ImportError, "No course_id or section_id given for an enrollment" if course_id.blank? && section_id.blank?
raise ImportError, "No user_id given for an enrollment" if user_id.blank?
raise ImportError, "Improper role \"#{role}\" for an enrollment" unless role =~ /\Astudent|\Ateacher|\Ata|\Aobserver|\Adesigner/i
raise ImportError, "Improper status \"#{status}\" for an enrollment" unless status =~ /\Aactive|\Adeleted|\Acompleted|\Ainactive/i
@enrollment_batch << [course_id, section_id, user_id, role, status, start_date, end_date, associated_user_id]
process_batch if @enrollment_batch.size >= @updates_every
end
def any_left_to_process?
return @enrollment_batch.size > 0
end
def process_batch
return unless any_left_to_process?
transaction_timeout = Setting.get('sis_transaction_seconds', '1').to_i.seconds
Enrollment.transaction do
tx_end_time = Time.now + transaction_timeout
enrollment = nil
while !(enrollment = @enrollment_batch.shift).nil? && tx_end_time > Time.now
@logger.debug("Processing Enrollment #{enrollment.inspect}")
course_id, section_id, user_id, role, status, start_date, end_date, associated_user_id = enrollment
last_section = @section
# reset the cached course/section if they don't match this row
if @course && course_id.present? && @course.sis_source_id != course_id
@course = nil
@section = nil
end
if @section && section_id.present? && @section.sis_source_id != section_id
@section = nil
end
pseudo = Pseudonym.find_by_account_id_and_sis_user_id(@root_account.id, user_id)
user = pseudo.user rescue nil
@course ||= Course.find_by_root_account_id_and_sis_source_id(@root_account.id, course_id) unless course_id.blank?
@section ||= CourseSection.find_by_root_account_id_and_sis_source_id(@root_account.id, section_id) unless section_id.blank?
unless (@course || @section)
@messages << "Neither course #{course_id} nor section #{section_id} existed for user enrollment"
next
end
unless user
@messages << "User #{user_id} didn't exist for user enrollment"
next
end
if section_id && !@section
@messages << "An enrollment referenced a non-existent section #{section_id}"
next
end
if course_id && !@course
@messages << "An enrollment referenced a non-existent course #{course_id}"
next
end
# reset cached/inferred course and section if they don't match with the opposite piece that was
# explicitly provided
@section = @course.default_section if @section.nil? || section_id.blank? && !@section.default_section
@course = @section.course if @course.nil? || (course_id.blank? && @course.id != @section.course_id) || (@course.id != @section.course_id && @section.nonxlist_course_id == @course.id)
if @course.id != @section.course_id
@messages << "An enrollment listed a section and a course that are unrelated"
next
end
# preload the course object to avoid later queries for it
@section.course = @course
# commit pending incremental account associations
incrementally_update_account_associations if @section != last_section and !@incrementally_update_account_associations_user_ids.empty?
enrollment = @section.enrollments.find_by_user_id(user.id)
unless enrollment
enrollment = Enrollment.new
enrollment.root_account = @root_account
end
enrollment.user = user
enrollment.sis_source_id = [course_id, user_id, role, @section.name].compact.join(":")
enrollment.course = @course
enrollment.course_section = @section
if role =~ /\Ateacher\z/i
enrollment.type = 'TeacherEnrollment'
elsif role =~ /student/i
enrollment.type = 'StudentEnrollment'
elsif role =~ /\Ata\z|assistant/i
enrollment.type = 'TaEnrollment'
elsif role =~ /\Aobserver\z/i
enrollment.type = 'ObserverEnrollment'
if associated_user_id
pseudo = Pseudonym.find_by_account_id_and_sis_user_id(@root_account.id, associated_user_id)
associated_enrollment = pseudo && @course.student_enrollments.find_by_user_id(pseudo.user_id)
enrollment.associated_user_id = associated_enrollment && associated_enrollment.user_id
end
elsif role =~ /\Adesigner\z/i
enrollment.type = 'DesignerEnrollment'
end
if status =~ /\Aactive/i
if user.workflow_state != 'deleted'
enrollment.workflow_state = 'active'
else
enrollment.workflow_state = 'deleted'
@messages << "Attempted enrolling of deleted user #{user_id} in course #{course_id}"
end
elsif status =~ /\Adeleted/i
enrollment.workflow_state = 'deleted'
elsif status =~ /\Acompleted/i
enrollment.workflow_state = 'completed'
elsif status =~ /\Ainactive/i
enrollment.workflow_state = 'inactive'
end
enrollment.start_at = start_date
enrollment.end_at = end_date
@courses_to_touch_ids.add(enrollment.course)
if enrollment.should_update_user_account_association?
if enrollment.new_record? && !@update_account_association_user_ids.include?(user.id)
@incrementally_update_account_associations_user_ids.add(user.id)
else
@update_account_association_user_ids.add(user.id)
end
end
if enrollment.changed?
@users_to_touch_ids.add(user.id)
enrollment.sis_batch_id = @batch_id if @batch_id
enrollment.save_without_broadcasting
elsif @batch_id
@enrollments_to_update_sis_batch_ids << enrollment.id
end
@success_count += 1
end
end
end
def incrementally_update_account_associations
if @incrementally_update_account_associations_user_ids.length < 10
@update_account_association_user_ids.merge(@incrementally_update_account_associations_user_ids)
else
User.update_account_associations(@incrementally_update_account_associations_user_ids.to_a,
:incremental => true,
:precalculated_associations => User.calculate_account_associations_from_accounts(
[@course.account_id, @section.nonxlist_course.try(:account_id)].compact.uniq,
@account_chain_cache
))
end
@incrementally_update_account_associations_user_ids = Set.new
end
end
end
end

View File

@ -17,46 +17,50 @@
#
module SIS
class GradePublishingResultsImporter < SisImporter
def self.is_grade_publishing_results_csv?(row)
row.header?('enrollment_id') && row.header?('grade_publishing_status')
class GradePublishingResultsImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
enrollment_ids = (verify[:enrollment_ids] ||= {})
csv_rows(csv) do |row|
enrollment_id = row['enrollment_id']
add_error(csv, "Duplicate enrollment id #{enrollment_id}") if enrollment_ids[enrollment_id]
enrollment_ids[enrollment_id] = true
add_error(csv, "No enrollment_id given") if row['enrollment_id'].blank?
add_error(csv, "No grade_publishing_status given for enrollment #{enrollment_id}") if row['grade_publishing_status'].blank?
add_error(csv, "Improper grade_publishing_status \"#{row['grade_publishing_status']}\" for enrollment #{enrollment_id}") unless %w{ published error }.include?(row['grade_publishing_status'].downcase)
end
end
# expected columns
# enrollment_id,grade_publishing_status
def process(csv)
def process
start = Time.now
csv_rows(csv) do |row|
update_progress
logger.debug("Processing Enrollment #{row.inspect}")
importer = Work.new(@batch_id, @root_account, @logger)
yield importer
@logger.debug("Grade publishing results took #{Time.now - start} seconds")
return importer.success_count
end
enrollment = Enrollment.find_by_id(row['enrollment_id'])
enrollment = nil unless enrollment.course.root_account_id == @root_account.id || enrollment.course_section.root_account_id == @root_account.id
unless enrollment
add_warning(csv,"Enrollment #{row['enrollment_id']} doesn't exist")
next
end
private
class Work
attr_accessor :success_count
enrollment.grade_publishing_status = row['grade_publishing_status'].downcase
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
@success_count = 0
end
def add_grade_publishing_result(enrollment_id, grade_publishing_status)
@logger.debug("Processing grade publishing result #{[enrollment_id, grade_publishing_status].inspect}")
raise ImportError, "No enrollment_id given" if enrollment_id.blank?
raise ImportError, "No grade_publishing_status given for enrollment #{enrollment_id}" if grade_publishing_status.blank?
raise ImportError, "Improper grade_publishing_status \"#{grade_publishing_status}\" for enrollment #{enrollment_id}" unless %w{ published error }.include?(grade_publishing_status.downcase)
enrollment = Enrollment.find_by_id(enrollment_id)
enrollment = nil unless enrollment && (enrollment.course.root_account_id == @root_account.id || enrollment.course_section.root_account_id == @root_account.id)
raise ImportError, "Enrollment #{enrollment_id} doesn't exist" unless enrollment
enrollment.grade_publishing_status = grade_publishing_status.downcase
enrollment.save
@sis.counts[:grade_publishing_results] += 1
@success_count += 1
end
logger.debug("Grade publishing results took #{Time.now - start} seconds")
end
end
end

View File

@ -17,73 +17,65 @@
#
module SIS
# note these are account-level groups, not course groups
class GroupImporter < SisImporter
def self.is_group_csv?(row)
row.header?('group_id') && row.header?('account_id')
class GroupImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
group_ids = (verify[:group_ids] ||= {})
csv_rows(csv) do |row|
group_id = row['group_id']
add_error(csv, "Duplicate group id #{group_id}") if group_ids[group_id]
group_ids[group_id] = true
add_error(csv, "No group_id given for a group") if row['group_id'].blank?
end
end
# expected columns
# group_id,account_id,name,status
def process(csv)
def process
start = Time.now
accounts_cache = {}
importer = Work.new(@batch_id, @root_account, @logger)
yield importer
@logger.debug("Groups took #{Time.now - start} seconds")
return importer.success_count
end
csv_rows(csv) do |row|
update_progress
logger.debug("Processing Group #{row.inspect}")
private
class Work
attr_accessor :success_count
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
@success_count = 0
@accounts_cache = {}
end
def add_group(group_id, account_id, name, status)
raise ImportError, "No group_id given for a group" unless group_id.present?
@logger.debug("Processing Group #{[group_id, account_id, name, status].inspect}")
account = nil
if row['account_id'].present?
account = accounts_cache[row['account_id']]
account ||= Account.find_by_root_account_id_and_sis_source_id(@root_account.id, row['account_id'])
unless account
add_warning(csv, "Parent account didn't exist for #{row['account_id']}")
next
end
accounts_cache[account.sis_source_id] = account
if account_id.present?
account = @accounts_cache[account_id]
account ||= Account.find_by_root_account_id_and_sis_source_id(@root_account.id, account_id)
raise ImportError, "Parent account didn't exist for #{account_id}" unless account
@accounts_cache[account.sis_source_id] = account
end
account ||= @root_account
group = Group.first(:conditions => {
:root_account_id => @root_account,
:sis_source_id => row['group_id'] })
if group.nil?
abort = false
if row['name'].blank?
add_warning(csv, "No name given for group #{row['group_id']}, skipping")
abort = true
end
unless row['status'] =~ /\A(available|closed|completed|deleted)/i
add_warning(csv, "Improper status \"#{row['status']}\" for group #{row['group_id']}, skipping")
abort = true
end
next if abort
group = Group.find_by_root_account_id_and_sis_source_id(@root_account.id, group_id)
unless group
raise ImportError, "No name given for group #{group_id}, skipping" if name.blank?
raise ImportError, "Improper status \"#{status}\" for group #{group_id}, skipping" unless status =~ /\A(available|closed|completed|deleted)/i
end
group ||= account.groups.new
# only update the name on new records, and ones that haven't had their name changed since the last sis import
if row['name'].present? && (group.new_record? || (group.sis_name && group.sis_name == group.name))
group.name = group.sis_name = row['name']
if name.present? && (group.new_record? || (group.sis_name && group.sis_name == group.name))
group.name = group.sis_name = name
end
# must set .context, not just .account, since these are account-level groups
group.context = account
group.sis_source_id = row['group_id']
group.sis_batch_id = @batch.try(:id)
group.sis_source_id = group_id
group.sis_batch_id = @batch_id
case row['status']
case status
when /available/i
group.workflow_state = 'available'
when /closed/i
@ -95,10 +87,10 @@ module SIS
end
group.save
@sis.counts[:groups] += 1
@success_count += 1
end
logger.debug("Groups took #{Time.now - start} seconds")
end
end
end

View File

@ -17,56 +17,56 @@
#
module SIS
class GroupMembershipImporter < SisImporter
def self.is_group_membership_csv?(row)
row.header?('group_id') && row.header?('user_id')
class GroupMembershipImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
csv_rows(csv) do |row|
add_error(csv, "No group_id given for a group user") if row['group_id'].blank?
add_error(csv, "No user_id given for a group user") if row['user_id'].blank?
add_error(csv, "Improper status \"#{row['status']}\" for a group user") unless row['status'] =~ /\A(accepted|deleted)/i
end
end
# expected columns
# group_id,user_id,status
def process(csv)
def process
start = Time.now
groups_cache = {}
importer = Work.new(@batch_id, @root_account, @logger)
yield importer
@logger.debug("Group Users took #{Time.now - start} seconds")
return importer.success_count
end
csv_rows(csv) do |row|
update_progress
logger.debug("Processing Group User #{row.inspect}")
private
class Work
attr_accessor :success_count
pseudo = Pseudonym.first(:conditions => {
:account_id => @root_account,
:sis_user_id => row['user_id'] })
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
@success_count = 0
@groups_cache = {}
end
def add_group_membership(user_id, group_id, status)
@logger.debug("Processing Group User #{[user_id, group_id, status].inspect}")
raise ImportError, "No group_id given for a group user" if group_id.blank?
raise ImportError, "No user_id given for a group user" if user_id.blank?
raise ImportError, "Improper status \"#{status}\" for a group user" unless status =~ /\A(accepted|deleted)/i
pseudo = Pseudonym.find_by_account_id_and_sis_user_id(@root_account.id, user_id)
user = pseudo.try(:user)
group = groups_cache[row['group_id']]
group ||= Group.first(:conditions => {
:root_account_id => @root_account,
:sis_source_id => row['group_id'] })
group = @groups_cache[group_id]
group ||= Group.find_by_root_account_id_and_sis_source_id(@root_account.id, group_id)
@groups_cache[group.sis_source_id] = group if group
groups_cache[group.sis_source_id] = group if group
unless user && group
add_warning csv, "User #{row['user_id']} didn't exist for group user" unless user
add_warning csv, "Group #{row['group_id']} didn't exist for group user" unless group
next
end
raise ImportError, "User #{user_id} didn't exist for group user" unless user
raise ImportError, "Group #{group_id} didn't exist for group user" unless group
# can't query group.group_memberships, since that excludes deleted memberships
group_membership = GroupMembership.first(:conditions => {
:group_id => group,
:user_id => user })
group_membership = GroupMembership.find_by_group_id_and_user_id(group.id, user.id)
group_membership ||= group.group_memberships.build(:user => user)
group_membership.sis_batch_id = @batch.try(:id)
group_membership.sis_batch_id = @batch_id
case row['status']
case status
when /accepted/i
group_membership.workflow_state = 'accepted'
when /deleted/i
@ -74,11 +74,9 @@ module SIS
end
group_membership.save
@sis.counts[:group_memberships] += 1
@success_count += 1
end
logger.debug("Group Users took #{Time.now - start} seconds")
end
end
end

View File

@ -17,113 +17,104 @@
#
module SIS
class SectionImporter < SisImporter
EXPECTED_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
def self.is_section_csv?(row)
#This matcher works because an enrollment doesn't have name
row.header?('section_id') && row.header?('name')
class SectionImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
# section ids must be unique across the account
section_ids = (verify[:sections_id] ||= {})
csv_rows(csv) do |row|
section_id = row['section_id']
course_id = row['course_id']
add_error(csv, "Duplicate section id #{section_id}") if section_ids[section_id]
section_ids[section_id] = true
add_error(csv, "No section_id given for a section in course #{course_id}") if section_id.blank?
add_error(csv, "No course_id given for a section #{section_id}") if course_id.blank?
add_error(csv, "No name given for section #{section_id} in course #{course_id}") if row['name'].blank?
add_error(csv, "Improper status \"#{row['status']}\" for section #{section_id} in course #{course_id}") unless row['status'] =~ /\Aactive|\Adeleted/i
end
end
# expected columns
# section_id,course_id,name,status,start_date,end_date
def process(csv)
def process
start = Time.now
sections_to_update_sis_batch_ids = []
course_ids_to_update_associations = [].to_set
importer = Work.new(@batch_id, @root_account, @logger)
Course.skip_updating_account_associations do
yield importer
end
Course.update_account_associations(importer.course_ids_to_update_associations.to_a) unless importer.course_ids_to_update_associations.empty?
CourseSection.update_all({:sis_batch_id => @batch_id}, {:id => importer.sections_to_update_sis_batch_ids}) if @batch_id && !importer.sections_to_update_sis_batch_ids.empty?
@logger.debug("Sections took #{Time.now - start} seconds")
return importer.success_count
end
csv_rows(csv) do |row|
update_progress
Course.skip_updating_account_associations do
logger.debug("Processing Section #{row.inspect}")
private
class Work
attr_accessor :success_count, :sections_to_update_sis_batch_ids, :course_ids_to_update_associations
course = Course.find_by_root_account_id_and_sis_source_id(@root_account.id, row['course_id'])
unless course
add_warning(csv,"Section #{row['section_id']} references course #{row['course_id']} which doesn't exist")
next
end
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
@success_count = 0
@sections_to_update_sis_batch_ids = []
@course_ids_to_update_associations = [].to_set
end
name = row['name']
section = CourseSection.find_by_root_account_id_and_sis_source_id(@root_account.id, row['section_id'])
section ||= course.course_sections.find_by_sis_source_id(row['section_id'])
section ||= course.course_sections.new
section.root_account = @root_account
# this is an easy way to load up the cache with data we already have
section.course = course if course.id == section.course_id
def add_section(section_id, course_id, name, status, start_date=nil, end_date=nil, account_id=nil)
@logger.debug("Processing Section #{[section_id, course_id, name, status, start_date, end_date, account_id].inspect}")
section.account = row['account_id'].present? ? Account.find_by_root_account_id_and_sis_source_id(@root_account.id, row['account_id']) : nil
course_ids_to_update_associations.add section.course_id if section.account_id_changed?
raise ImportError, "No section_id given for a section in course #{course_id}" if section_id.blank?
raise ImportError, "No course_id given for a section #{section_id}" if course_id.blank?
raise ImportError, "No name given for section #{section_id} in course #{course_id}" if name.blank?
raise ImportError, "Improper status \"#{status}\" for section #{section_id} in course #{course_id}" unless status =~ /\Aactive|\Adeleted/i
# only update the name on new records, and ones that haven't been changed since the last sis import
if section.new_record? || (section.sis_name && section.sis_name == section.name)
section.name = section.sis_name = row['name']
end
course = Course.find_by_root_account_id_and_sis_source_id(@root_account.id, course_id)
raise ImportError, "Section #{section_id} references course #{course_id} which doesn't exist" unless course
# update the course id if necessary
if section.course_id != course.id
if section.nonxlist_course_id
# this section is crosslisted
if section.nonxlist_course_id != course.id
# but the course id we were given didn't match the crosslist info
# we have, so, uncrosslist and move
course_ids_to_update_associations.merge [course.id, section.course_id, section.nonxlist_course_id]
section.uncrosslist(:run_jobs_immediately)
section.move_to_course(course, :run_jobs_immediately)
end
else
# this section isn't crosslisted and lives on the wrong course. move
course_ids_to_update_associations.merge [section.course_id, course.id]
section = CourseSection.find_by_root_account_id_and_sis_source_id(@root_account.id, section_id)
section ||= course.course_sections.find_by_sis_source_id(section_id)
section ||= course.course_sections.new
section.root_account = @root_account
# this is an easy way to load up the cache with data we already have
section.course = course if course.id == section.course_id
section.account = account_id.present? ? Account.find_by_root_account_id_and_sis_source_id(@root_account.id, account_id) : nil
@course_ids_to_update_associations.add section.course_id if section.account_id_changed?
# only update the name on new records, and ones that haven't been changed since the last sis import
if section.new_record? || (section.sis_name && section.sis_name == section.name)
section.name = section.sis_name = name
end
# update the course id if necessary
if section.course_id != course.id
if section.nonxlist_course_id
# this section is crosslisted
if section.nonxlist_course_id != course.id
# but the course id we were given didn't match the crosslist info
# we have, so, uncrosslist and move
@course_ids_to_update_associations.merge [course.id, section.course_id, section.nonxlist_course_id]
section.uncrosslist(:run_jobs_immediately)
section.move_to_course(course, :run_jobs_immediately)
end
else
# this section isn't crosslisted and lives on the wrong course. move
@course_ids_to_update_associations.merge [section.course_id, course.id]
section.move_to_course(course, :run_jobs_immediately)
end
course_ids_to_update_associations.add section.course_id
section.sis_source_id = row['section_id']
if row['status'] =~ /active/i
section.workflow_state = 'active'
elsif row['status'] =~ /deleted/i
section.workflow_state = 'deleted'
end
begin
section.start_at = row['start_date'].blank? ? nil : DateTime.parse(row['start_date'])
section.end_at = row['end_date'].blank? ? nil : DateTime.parse(row['end_date'])
rescue
add_warning(csv, "Bad date format for section #{row['section_id']}")
end
section.restrict_enrollments_to_section_dates = (section.start_at.present? || section.end_at.present?)
if section.changed?
section.sis_batch_id = @batch.id if @batch
section.save
elsif @batch
sections_to_update_sis_batch_ids << section.id
end
@sis.counts[:sections] += 1
end
end
Course.update_account_associations(course_ids_to_update_associations.to_a) unless course_ids_to_update_associations.empty?
CourseSection.update_all({:sis_batch_id => @batch.id}, {:id => sections_to_update_sis_batch_ids}) if @batch && !sections_to_update_sis_batch_ids.empty?
logger.debug("Sections took #{Time.now - start} seconds")
@course_ids_to_update_associations.add section.course_id
section.sis_source_id = section_id
if status =~ /active/i
section.workflow_state = 'active'
elsif status =~ /deleted/i
section.workflow_state = 'deleted'
end
section.start_at = start_date
section.end_at = end_date
section.restrict_enrollments_to_section_dates = (section.start_at.present? || section.end_at.present?)
if section.changed?
section.sis_batch_id = @batch_id if @batch_id
section.save
elsif @batch_id
@sections_to_update_sis_batch_ids << section.id
end
@success_count += 1
end
end
end
end

View File

@ -1,389 +0,0 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require 'faster_csv'
require 'zip/zip'
module SIS
class SisCsv
attr_accessor :verify, :root_account, :batch, :errors, :warnings, :finished, :counts, :updates_every
IGNORE_FILES = /__macosx|desktop d[bf]|\A\..*/i
# The order of this array is important:
# * Account must be imported before Term and Course
# * Course must be imported before Section
# * Course and Section must be imported before Xlist
# * Course, Section, and User must be imported before Enrollment
IMPORTERS = [:account, :term, :abstract_course, :course, :section, :xlist, :user, :enrollment, :group, :group_membership, :grade_publishing_results]
def initialize(root_account, opts = {})
opts = opts.with_indifferent_access
@root_account = root_account
@csvs = {}
IMPORTERS.each { |importer| @csvs[importer] = [] }
@rows = {}
IMPORTERS.each { |importer| @rows[importer] = 0 }
@headers = {}
IMPORTERS.each { |importer| @headers[importer] = Set.new }
@files = opts[:files] || []
@batch = opts[:batch]
@logger = opts[:logger]
@counts = {}
IMPORTERS.each { |importer| @counts[importer.to_s.pluralize.to_sym] = 0 }
@total_rows = 1
@current_row = 0
@rows_since_progress_update = 0
@progress_multiplier = opts[:progress_multiplier] || 1
@progress_offset = opts[:progress_offset] || 0
@errors = []
@warnings = []
@pending = false
@finished = false
settings = PluginSetting.settings_for_plugin('sis_import')
@allow_printing = opts[:allow_printing].nil? ? true : opts[:allow_printing]
@parallelism = opts[:parallelism]
@parallelism ||= settings[:parallelism].to_i
@parallelism = 1 if @parallelism < 1
@parallelism = 1 unless @batch
@minimum_rows_for_parallel = settings[:minimum_rows_for_parallel].to_i
@minimum_rows_for_parallel = 1000 if @minimum_rows_for_parallel < 1
@parallel_queue = settings[:queue_for_parallel_jobs]
@parallel_queue = nil if @parallel_queue.blank?
update_pause_vars
end
def self.process(root_account, opts = {})
importer = SisCsv.new(root_account, opts)
importer.process
importer
end
def process
@tmp_dirs = []
@files.each do |file|
if File.file?(file)
if File.extname(file).downcase == '.zip'
tmp_dir = Dir.mktmpdir
@tmp_dirs << tmp_dir
unzip_file(file, tmp_dir)
Dir[File.join(tmp_dir, "**/**")].each do |fn|
process_file(tmp_dir, fn[tmp_dir.size+1 .. -1])
end
elsif File.extname(file).downcase == '.csv'
process_file(File.dirname(file), File.basename(file))
end
end
end
@files = nil
IMPORTERS.each do |importer|
@csvs[importer].each do |csv|
rows = (%x{wc -l '#{csv[:fullpath]}'}.split.first.to_i rescue 0)
@rows[importer] += rows
@total_rows += rows
end
end
@parallelism = 1 if @total_rows <= @minimum_rows_for_parallel
@verify = {}
IMPORTERS.each do |importer|
importerObject = SIS.const_get(importer.to_s.camelcase + 'Importer').new(self)
@csvs[importer].each { |csv| importerObject.verify(csv, @verify) }
@verify[:user_rows] = nil if importer == :user
end
@verify = nil
return unless @errors.empty?
# calculate how often we should update progress to get 1% resolution
# but don't leave us hanging for more than 500 rows at a time
# and don't do it more often than we have work to do
@updates_every = [ [ @total_rows / @parallelism / 100, 500 ].min, 10 ].max
if (@parallelism > 1)
# re-balance the CSVs
@batch.data[:importers] = {}
IMPORTERS.each do |importer|
if (importer != :account)
rebalance_csvs(importer)
end
@batch.data[:importers][importer] = @csvs[importer].length
end
@batch.save!
@rows = nil
@headers = nil
run_next_importer(IMPORTERS.first)
@batch.reload
while @batch.workflow_state.to_sym == :importing
sleep(0.5)
@batch.reload
end
@finished = [:imported, :imported_with_messages].include?(@batch.workflow_state.to_sym)
else
IMPORTERS.each do |importer|
importerObject = SIS.const_get(importer.to_s.camelcase + 'Importer').new(self)
@csvs[importer].each { |csv| importerObject.process(csv) }
end
@finished = true
end
rescue => e
if @batch
error_report = ErrorReport.log_exception(:sis_import, e,
:message => "Importing CSV for account: #{@root_account.id} (#{@root_account.name}) sis_batch_id: #{@batch.id}: #{e.to_s}",
:during_tests => false
)
add_error(nil, "Error while importing CSV. Please contact support. (Error report #{error_report.id})")
else
add_error(nil, "#{e.message}\n#{e.backtrace.join "\n"}")
raise e
end
ensure
@tmp_dirs.each do |tmp_dir|
FileUtils.rm_rf(tmp_dir, :secure => true) if File.directory?(tmp_dir)
end
if @batch && @parallelism == 1
@batch.data[:counts] = @counts
@batch.processing_errors = @errors
@batch.processing_warnings = @warnings
@batch.save
end
if @allow_printing and !@errors.empty? and !@batch
# If there's no batch, then we must be working via the console and we should just error out
@errors.each { |w| puts w.join ": " }
end
end
def logger
@logger ||= Rails.logger
end
def add_error(csv, message)
@errors << [ csv ? csv[:file] : "", message ]
end
def add_warning(csv, message)
@warnings << [ csv ? csv[:file] : "", message ]
end
def update_progress(count = 1)
@current_row += count
return unless @batch
@rows_since_progress_update += count
if @rows_since_progress_update >= @updates_every
if @parallelism > 1
SisBatch.transaction do
@batch.reload(:select => 'data, progress', :lock => true)
@current_row += @batch.data[:current_row] if @batch.data[:current_row]
@batch.data[:current_row] = @current_row
@batch.progress = (((@current_row.to_f/@total_rows) * @progress_multiplier) + @progress_offset) * 100
@batch.save
@current_row = 0
@rows_since_progress_update = 0
end
else
@batch.fast_update_progress( (((@current_row.to_f/@total_rows) * @progress_multiplier) + @progress_offset) * 100)
end
end
if @current_row.to_i % @pause_every == 0
sleep(@pause_duration)
update_pause_vars
end
end
def run_single_importer(importer, csv)
begin
importerObject = SIS.const_get(importer.to_s.camelcase + 'Importer').new(self)
if csv[:attachment]
file = csv[:attachment].open
csv[:fullpath] = file.path
end
importerObject.process(csv)
run_next_importer(IMPORTERS[IMPORTERS.index(importer) + 1]) if complete_importer(importer)
rescue => e
error_report = ErrorReport.log_exception(:sis_import, e,
:message => "Importing CSV for account: #{@root_account.id} (#{@root_account.name}) sis_batch_id: #{@batch.id}: #{e.to_s}",
:during_tests => false
)
add_error(nil, "Error while importing CSV. Please contact support. (Error report #{error_report.id})")
@batch.processing_errors ||= []
@batch.processing_warnings ||= []
@batch.processing_errors.concat(@errors)
@batch.processing_warnings.concat(@warnings)
@batch.workflow_state = :failed_with_messages
@batch.save!
ensure
file.close if file
end
end
private
def run_next_importer(importer)
return finish if importer.nil?
return run_next_importer(IMPORTERS[IMPORTERS.index(importer) + 1]) if @csvs[importer].empty?
if (importer == :account)
@csvs[importer].each { |csv| run_single_importer(importer, csv) }
return
end
# logger doesn't serialize well
@logger = nil
@csvs[importer].each { |csv| self.send_later_enqueue_args(:run_single_importer, { :queue => @queue, :priority => Delayed::LOW_PRIORITY }, importer, csv) }
end
def complete_importer(importer)
return unless @batch
SisBatch.transaction do
@batch.reload(:lock => true)
@batch.data[:importers][importer] -= 1
@batch.data[:counts] ||= {}
@counts.each do |k, v|
@batch.data[:counts][k] ||= 0
@batch.data[:counts][k] += v
@counts[k] = 0
end
@current_row += @batch.data[:current_row] if @batch.data[:current_row]
@batch.data[:current_row] = @current_row
@batch.progress = (((@current_row.to_f/@total_rows) * @progress_multiplier) + @progress_offset) * 100
@batch.processing_errors ||= []
@batch.processing_warnings ||= []
@batch.processing_errors.concat(@errors)
@batch.processing_warnings.concat(@warnings)
@current_row = 0
@batch.save
return @batch.data[:importers][importer] == 0
end
end
def finish
@batch.finish(true)
@finished = true
end
def update_pause_vars
return unless @batch
# throttling can be set on individual SisBatch instances, and also
# site-wide in the Setting table.
@batch.reload(:select => 'data') # update to catch changes to pause vars
@pause_every = (@batch.data[:pause_every] || Setting.get('sis_batch_pause_every', 100)).to_i
@pause_duration = (@batch.data[:pause_duration] || Setting.get('sis_batch_pause_duration', 0)).to_f
end
def unzip_file(file, dest)
Zip::ZipFile.open(file) do |zip_file|
zip_file.each do |f|
f_path = File.join(dest, f.name)
FileUtils.mkdir_p(File.dirname(f_path))
zip_file.extract(f, f_path) unless File.exist?(f_path)
end
end
end
def rebalance_csvs(importer)
rows_per_batch = (@rows[importer].to_f / @parallelism).ceil.to_i
new_csvs = []
out_csv = nil
tmp_dir = Dir.mktmpdir
@tmp_dirs << tmp_dir
temp_file = 0
headers = @headers[importer].to_a
path = nil
begin
Attachment.skip_scribd_submits
@csvs[importer].each do |csv|
remaining_in_batch = 0
FasterCSV.foreach(csv[:fullpath], SisImporter::PARSE_ARGS) do |row|
if remaining_in_batch == 0
temp_file += 1
if out_csv
out_csv.close
out_csv = nil
att = Attachment.new
att.context = @batch
att.uploaded_data = ActionController::TestUploadedFile.new(path, Attachment.mimetype(path))
att.display_name = new_csvs.last[:file]
att.save!
new_csvs.last.delete(:fullpath)
new_csvs.last[:attachment] = att
end
path = File.join(tmp_dir, "#{importer}#{temp_file}.csv")
out_csv = FasterCSV.open(path, "wb", {:headers => headers, :write_headers => true})
new_csvs << {:file => csv[:file]}
remaining_in_batch = rows_per_batch
end
out_row = FasterCSV::Row.new(headers, []);
headers.each { |header| out_row[header] = row[header] }
out_csv << out_row
remaining_in_batch -= 1
end
end
if out_csv
out_csv.close
out_csv = nil
att = Attachment.new
att.context = @batch
att.uploaded_data = ActionController::TestUploadedFile.new(path, Attachment.mimetype(path))
att.display_name = new_csvs.last[:file]
att.save!
new_csvs.last.delete(:fullpath)
new_csvs.last[:attachment] = att
end
ensure
out_csv.close if out_csv
Attachment.skip_scribd_submits(false)
end
@csvs[importer] = new_csvs
end
def process_file(base, file)
csv = { :base => base, :file => file, :fullpath => File.join(base, file) }
if File.file?(csv[:fullpath]) && File.extname(csv[:fullpath]).downcase == '.csv'
FasterCSV.foreach(csv[:fullpath], SisImporter::PARSE_ARGS) do |row|
importer = IMPORTERS.index do |importer|
if SIS.const_get(importer.to_s.camelcase + 'Importer').send('is_' + importer.to_s + '_csv?', row)
@csvs[importer] << csv
@headers[importer].merge(row.headers)
true
else
false
end
end
add_error(csv, "Couldn't find Canvas CSV import headers") if importer.nil?
break
end
elsif !File.directory?(csv[:fullpath]) && !(csv[:fullpath] =~ IGNORE_FILES)
add_warning(csv, "Skipping unknown file type")
end
end
end
end

View File

@ -1,62 +0,0 @@
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module SIS
class SisImporter
PARSE_ARGS = {:headers => :first_row,
:skip_blanks => true,
:header_converters => :downcase,
:converters => lambda{|field|field ? field.strip : field}
}
def initialize(sis_csv)
@sis = sis_csv
@root_account = @sis.root_account
@batch = @sis.batch
end
def verify(csv)
raise NotImplementedError
end
def process(csv)
raise NotImplementedError
end
def logger
@sis.logger
end
def add_error(csv, message)
@sis.add_error(csv, message)
end
def add_warning(csv, message)
@sis.add_warning(csv, message)
end
def update_progress(count = 1)
@sis.update_progress(count)
end
def csv_rows(csv)
FasterCSV.foreach(csv[:fullpath], PARSE_ARGS) do |row|
yield row
end
end
end
end

View File

@ -17,67 +17,63 @@
#
module SIS
class TermImporter < SisImporter
EXPECTED_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
def self.is_term_csv?(row)
#This matcher works because a course has long_name/short_name
row.header?('term_id') && row.header?('name')
class TermImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
term_ids = (verify[:terms_id] ||= {})
csv_rows(csv) do |row|
term_id = row['term_id']
add_error(csv, "Duplicate term id #{term_id}") if term_ids[term_id]
term_ids[term_id] = true
add_error(csv, "No term_id given for a term") if row['term_id'].blank?
add_error(csv, "No name given for term #{term_id}") if row['name'].blank?
add_error(csv, "Improper status \"#{row['status']}\" for term #{term_id}") unless row['status'] =~ /\Aactive|\Adeleted/i
end
end
# expected columns
# account_id,parent_account_id,name,status
def process(csv)
def process
start = Time.now
csv_rows(csv) do |row|
update_progress
logger.debug("Processing Term #{row.inspect}")
term = nil
term = @root_account.enrollment_terms.find_by_sis_source_id(row['term_id'])
importer = Work.new(@batch_id, @root_account, @logger)
yield importer
@logger.debug("Terms took #{Time.now - start} seconds")
return importer.success_count
end
private
class Work
attr_accessor :success_count
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
@success_count = 0
end
def add_term(term_id, name, status, start_date=nil, end_date=nil)
@logger.debug("Processing Term #{[term_id, name, status, start_date, end_date].inspect}")
raise ImportError, "No term_id given for a term" if term_id.blank?
raise ImportError, "No name given for term #{term_id}" if name.blank?
raise ImportError, "Improper status \"#{status}\" for term #{term_id}" unless status =~ /\Aactive|\Adeleted/i
term = @root_account.enrollment_terms.find_by_sis_source_id(term_id)
term ||= @root_account.enrollment_terms.new
# only update the name on new records, and ones that haven't been changed since the last sis import
# only update the name on new records, and ones that haven't been
# changed since the last sis import
if term.new_record? || (term.sis_name && term.sis_name == term.name)
term.name = term.sis_name = row['name']
term.name = term.sis_name = name
end
term.sis_source_id = row['term_id']
term.sis_batch_id = @batch.id if @batch
if row['status'] =~ /active/i
term.sis_source_id = term_id
term.sis_batch_id = @batch_id if @batch_id
if status =~ /active/i
term.workflow_state = 'active'
elsif row['status'] =~ /deleted/i
elsif status =~ /deleted/i
term.workflow_state = 'deleted'
end
begin
unless row['start_date'].blank?
term.start_at = DateTime.parse(row['start_date'])
end
unless row['end_date'].blank?
term.end_at = DateTime.parse(row['end_date'])
end
rescue
add_warning(csv, "Bad date format for term #{row['term_id']}")
end
term.start_at = start_date
term.end_at = end_date
term.save
@sis.counts[:terms] += 1
@success_count += 1
end
logger.debug("Terms took #{Time.now - start} seconds")
end
end
end

View File

@ -17,181 +17,184 @@
#
module SIS
class UserImporter < SisImporter
def self.is_user_csv?(row)
row.header?('user_id') && row.header?('login_id')
class UserImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
user_ids = (verify[:user_ids] ||= {})
identical_row_checker = (verify[:user_rows] ||= {})
csv_rows(csv) do |row|
user_id = row['user_id']
if user_ids[user_id]
if identical_row_checker[user_id] != row
add_error(csv, "Non-identical duplicate user rows for #{user_id}")
else
add_warning(csv, "Duplicate user id #{user_id}")
end
else
identical_row_checker[user_id] = row
end
user_ids[user_id] = true
add_error(csv, "No user_id given for a user") if row['user_id'].blank?
add_error(csv, "No login_id given for user #{user_id}") if row['login_id'].blank?
# add_error(csv, "No email given for user #{user_id}") if row['email'].blank?
add_error(csv, "Improper status for user #{user_id}") unless row['status'] =~ /active|deleted/i
end
end
# expected columns:
# user_id,login_id,first_name,last_name,email,status
def process(csv)
def process(updates_every, messages)
start = Time.now
users_to_set_sis_batch_ids = []
pseudos_to_set_sis_batch_ids = []
users_to_add_account_associations = []
users_to_update_account_associations = []
importer = Work.new(@batch_id, @root_account, @logger, updates_every, messages)
User.skip_updating_account_associations do
FasterCSV.open(csv[:fullpath], "rb", PARSE_ARGS) do |csv_object|
row = csv_object.shift
count = 0
until row.nil?
update_progress(count)
count = 0
# this transaction assumes that the users and pseudonyms are in the same database
User.transaction do
remaining_in_transaction = @sis.updates_every
tx_end_time = Time.now + Setting.get('sis_transaction_seconds', '1').to_i.seconds
yield importer
while importer.any_left_to_process?
importer.process_batch
end
end
User.update_account_associations(importer.users_to_add_account_associations, :incremental => true, :precalculated_associations => {@root_account.id => 0})
User.update_account_associations(importer.users_to_update_account_associations)
User.update_all({:creation_sis_batch_id => @batch_id}, {:id => importer.users_to_set_sis_batch_ids}) if @batch_id && !importer.users_to_set_sis_batch_ids.empty?
Pseudonym.update_all({:sis_batch_id => @batch_id}, {:id => importer.pseudos_to_set_sis_batch_ids}) if @batch && !importer.pseudos_to_set_sis_batch_ids.empty?
@logger.debug("Users took #{Time.now - start} seconds")
return importer.success_count
end
begin
logger.debug("Processing User #{row.inspect}")
private
class Work
attr_accessor :success_count, :users_to_set_sis_batch_ids,
:pseudos_to_set_sis_batch_ids, :users_to_add_account_associations,
:users_to_update_account_associations
count += 1
remaining_in_transaction -= 1
def initialize(batch_id, root_account, logger, updates_every, messages)
@batch_id = batch_id
@root_account = root_account
@logger = logger
@updates_every = updates_every
@batched_users = []
@messages = messages
@success_count = 0
update_account_association = false
@users_to_set_sis_batch_ids = []
@pseudos_to_set_sis_batch_ids = []
@users_to_add_account_associations = []
@users_to_update_account_associations = []
end
pseudo = Pseudonym.find_by_account_id_and_sis_user_id(@root_account.id, row['user_id'])
pseudo_by_login = Pseudonym.find_by_unique_id_and_account_id(row['login_id'], @root_account.id)
pseudo ||= pseudo_by_login
pseudo ||= Pseudonym.find_by_unique_id_and_account_id(row['email'], @root_account.id) if row['email'].present?
def add_user(user_id, login_id, status, first_name, last_name, email=nil, password=nil, ssha_password=nil)
@logger.debug("Processing User #{[user_id, login_id, status, first_name, last_name, email, password, ssha_password].inspect}")
if pseudo
if pseudo.sis_user_id.present? && pseudo.sis_user_id != row['user_id']
add_warning(csv, "user #{pseudo.sis_user_id} has already claimed #{row['user_id']}'s requested login information, skipping")
@sis.counts[:users] += 1
next
end
if !pseudo_by_login.nil? && pseudo.unique_id != row['login_id']
add_warning(csv, "user #{pseudo_by_login.sis_user_id} has already claimed #{row['user_id']}'s requested login information, skipping")
@sis.counts[:users] += 1
next
end
raise ImportError, "No user_id given for a user" if user_id.blank?
raise ImportError, "No login_id given for user #{user_id}" if login_id.blank?
raise ImportError, "Improper status for user #{user_id}" unless status =~ /\A(active|deleted)/i
user = pseudo.user
user.name = user.sis_name = "#{row['first_name']} #{row['last_name']}" if user.sis_name && user.sis_name == user.name
@batched_users << [user_id, login_id, status, first_name, last_name, email, password, ssha_password]
process_batch if @batched_users.size >= @updates_every
end
else
user = User.new
user.name = user.sis_name = "#{row['first_name']} #{row['last_name']}"
end
def any_left_to_process?
return @batched_users.size > 0
end
if row['status']=~ /active/i
user.workflow_state = 'registered'
elsif row['status']=~ /deleted/i
user.workflow_state = 'deleted'
user.enrollments.scoped(:conditions => {:root_account_id => @root_account.id }).update_all(:workflow_state => 'deleted')
users_to_update_account_associations << user.id unless user.new_record?
end
def process_batch
return unless any_left_to_process?
transaction_timeout = Setting.get('sis_transaction_seconds', '1').to_i.seconds
User.transaction do
tx_end_time = Time.now + transaction_timeout
user_row = nil
while !(user_row = @batched_users.shift).nil? && tx_end_time > Time.now
@logger.debug("Processing User #{user_row.inspect}")
user_id, login_id, status, first_name, last_name, email, password, ssha_password = user_row
pseudo ||= Pseudonym.new
pseudo.unique_id = row['login_id']
pseudo.sis_source_id = row['login_id']
pseudo.sis_user_id = row['user_id']
pseudo.account = @root_account
pseudo.workflow_state = row['status']=~ /active/i ? 'active' : 'deleted'
# if a password is provided, use it only if this is a new user, or the user hasn't changed the password in canvas *AND* the incoming password has changed
# otherwise the persistence_token will change even though we're setting to the same password, logging the user out
if !row['password'].blank? && (pseudo.new_record? || pseudo.password_auto_generated && !pseudo.valid_password?(row['password']))
pseudo.password = row['password']
pseudo.password_confirmation = row['password']
pseudo.password_auto_generated = true
end
pseudo.sis_ssha = row['ssha_password'] if !row['ssha_password'].blank?
pseudo.reset_persistence_token if pseudo.sis_ssha_changed? && pseudo.password_auto_generated
pseudo = Pseudonym.find_by_account_id_and_sis_user_id(@root_account.id, user_id)
pseudo_by_login = Pseudonym.find_by_unique_id_and_account_id(login_id, @root_account.id)
pseudo ||= pseudo_by_login
pseudo ||= Pseudonym.find_by_unique_id_and_account_id(email, @root_account.id) if email.present?
begin
User.transaction(:requires_new => true) do
if user.changed?
user.creation_sis_batch_id = @batch.id if @batch
new_record = user.new_record?
raise user.errors.first.join(" ") if !user.save_without_broadcasting && user.errors.size > 0
users_to_add_account_associations << user.id if new_record && user.workflow_state != 'deleted'
elsif @batch
users_to_set_sis_batch_ids << user.id
end
pseudo.user_id = user.id
if pseudo.changed?
pseudo.sis_batch_id = @batch.id if @batch
raise pseudo.errors.first.join(" ") if !pseudo.save_without_broadcasting && pseudo.errors.size > 0
# we do the elsif @batch thing later
end
end
rescue => e
add_warning(csv, "Failed saving user. Internal error: #{e}")
next
end
if pseudo
if pseudo.sis_user_id.present? && pseudo.sis_user_id != user_id
@messages << "user #{pseudo.sis_user_id} has already claimed #{user_id}'s requested login information, skipping"
next
end
if !pseudo_by_login.nil? && pseudo.unique_id != login_id
@messages << "user #{pseudo_by_login.sis_user_id} has already claimed #{user_id}'s requested login information, skipping"
next
end
if row['email'].present?
comm = CommunicationChannel.find_by_path_and_workflow_state_and_path_type(row['email'], 'active', 'email')
if !comm and row['status']=~ /active/i
begin
comm = pseudo.sis_communication_channel || CommunicationChannel.new
if comm.new_record?
comm.user_id = user.id
comm.pseudonym_id = pseudo.id
pseudo.sis_communication_channel = comm
end
comm.path = row['email']
comm.workflow_state = 'active'
comm.do_delayed_jobs_immediately = true
comm.save_without_broadcasting if comm.changed?
pseudo.communication_channel_id = comm.id
rescue => e
add_warning(csv, "Failed adding communication channel #{row['email']} to user #{row['login_id']}")
end
elsif row['status'] =~ /active/i
if comm.user_id != pseudo.user_id
add_warning(csv, "E-mail address #{row['email']} for user #{row['login_id']} is already claimed; ignoring")
else
pseudo.sis_communication_channel.destroy if pseudo.sis_communication_channel != comm and !pseudo.sis_communication_channel.nil?
pseudo.sis_communication_channel = comm
pseudo.communication_channel_id = comm.id
comm.do_delayed_jobs_immediately = true
comm.save_without_broadcasting if comm.changed?
end
end
end
user = pseudo.user
user.name = user.sis_name = "#{first_name} #{last_name}" if user.sis_name && user.sis_name == user.name
if pseudo.changed?
pseudo.sis_batch_id = @batch.id if @batch
pseudo.save_without_broadcasting
elsif @batch && pseudo.sis_batch_id != @batch.id
pseudos_to_set_sis_batch_ids << pseudo.id
end
@sis.counts[:users] += 1
end while !(row = csv_object.shift).nil? && remaining_in_transaction > 0 && tx_end_time > Time.now
else
user = User.new
user.name = user.sis_name = "#{first_name} #{last_name}"
end
if status =~ /active/i
user.workflow_state = 'registered'
elsif status =~ /deleted/i
user.workflow_state = 'deleted'
user.enrollments.scoped(:conditions => {:root_account_id => @root_account.id }).update_all(:workflow_state => 'deleted')
@users_to_update_account_associations << user.id unless user.new_record?
end
pseudo ||= Pseudonym.new
pseudo.unique_id = login_id
pseudo.sis_source_id = login_id
pseudo.sis_user_id = user_id
pseudo.account = @root_account
pseudo.workflow_state = status =~ /active/i ? 'active' : 'deleted'
# if a password is provided, use it only if this is a new user, or the user hasn't changed the password in canvas *AND* the incoming password has changed
# otherwise the persistence_token will change even though we're setting to the same password, logging the user out
if !password.blank? && (pseudo.new_record? || pseudo.password_auto_generated && !pseudo.valid_password?(password))
pseudo.password = password
pseudo.password_confirmation = password
pseudo.password_auto_generated = true
end
pseudo.sis_ssha = ssha_password if !ssha_password.blank?
pseudo.reset_persistence_token if pseudo.sis_ssha_changed? && pseudo.password_auto_generated
begin
User.transaction(:requires_new => true) do
if user.changed?
user.creation_sis_batch_id = @batch_id if @batch_id
new_record = user.new_record?
raise user.errors.first.join(" ") if !user.save_without_broadcasting && user.errors.size > 0
@users_to_add_account_associations << user.id if new_record && user.workflow_state != 'deleted'
elsif @batch_id
@users_to_set_sis_batch_ids << user.id
end
pseudo.user_id = user.id
if pseudo.changed?
pseudo.sis_batch_id = @batch_id if @batch_id
raise pseudo.errors.first.join(" ") if !pseudo.save_without_broadcasting && pseudo.errors.size > 0
end
end
rescue => e
@messages << "Failed saving user. Internal error: #{e}"
next
end
if email.present?
comm = CommunicationChannel.find_by_path_and_workflow_state_and_path_type(email, 'active', 'email')
if !comm and status =~ /active/i
begin
comm = pseudo.sis_communication_channel || CommunicationChannel.new
if comm.new_record?
comm.user_id = user.id
comm.pseudonym_id = pseudo.id
pseudo.sis_communication_channel = comm
end
comm.path = email
comm.workflow_state = 'active'
comm.do_delayed_jobs_immediately = true
comm.save_without_broadcasting if comm.changed?
pseudo.communication_channel_id = comm.id
rescue => e
@messages << "Failed adding communication channel #{email} to user #{login_id}"
end
elsif status =~ /active/i
if comm.user_id != pseudo.user_id
@messages << "E-mail address #{email} for user #{login_id} is already claimed; ignoring"
else
pseudo.sis_communication_channel.destroy if pseudo.sis_communication_channel != comm and !pseudo.sis_communication_channel.nil?
pseudo.sis_communication_channel = comm
pseudo.communication_channel_id = comm.id
comm.do_delayed_jobs_immediately = true
comm.save_without_broadcasting if comm.changed?
end
end
end
if pseudo.changed?
pseudo.sis_batch_id = @batch_id if @batch_id
pseudo.save_without_broadcasting
elsif @batch_id && pseudo.sis_batch_id != @batch_id
@pseudos_to_set_sis_batch_ids << pseudo.id
end
@success_count += 1
end
User.update_account_associations(users_to_add_account_associations, :incremental => true, :precalculated_associations => {@root_account.id => 0})
User.update_account_associations(users_to_update_account_associations)
User.update_all({:creation_sis_batch_id => @batch.id}, {:id => users_to_set_sis_batch_ids}) if @batch && !users_to_set_sis_batch_ids.empty?
Pseudonym.update_all({:sis_batch_id => @batch.id}, {:id => pseudos_to_set_sis_batch_ids}) if @batch && !pseudos_to_set_sis_batch_ids.empty?
logger.debug("Users took #{Time.now - start} seconds")
end
end
end

View File

@ -16,113 +16,113 @@
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require 'skip_callback'
module SIS
class XlistImporter < SisImporter
def self.is_xlist_csv?(row)
row.header?('xlist_course_id') && row.header?('section_id')
class XlistImporter
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
end
def verify(csv, verify)
csv_rows(csv) do |row|
add_error(csv, "No xlist_course_id given for a cross-listing") if row['xlist_course_id'].blank?
add_error(csv, "No section_id given for a cross-listing") if row['section_id'].blank?
add_error(csv, "Improper status \"#{row['status']}\" for a cross-listing") unless row['status'] =~ /\A(active|deleted)\z/i
end
end
# possible columns:
# xlist_course_id, section_id, status
def process(csv)
def process
start = Time.now
course = nil
course_ids_to_update_associations = [].to_set
importer = Work.new(@batch_id, @root_account, @logger)
Course.skip_callback(:update_enrollments_later) do
csv_rows(csv) do |row|
update_progress
logger.debug("Processing CrossListing #{row.inspect}")
Course.skip_updating_account_associations do
section = CourseSection.find_by_root_account_id_and_sis_source_id(@root_account.id, row['section_id'])
unless section
add_warning(csv, "A cross-listing referenced a non-existent section #{row['section_id']}")
next
end
# reduce database hits if possible (csv sorted by xlist_course_id will be faster)
unless course && course.sis_source_id == row['xlist_course_id']
course = Course.find_by_root_account_id_and_sis_source_id(@root_account.id, row['xlist_course_id'])
if !course && row['status'] =~ /\Aactive\z/i
# no course with this crosslist id found, make a new course,
# using the section's current course as a template
course = Course.new
course.root_account = @root_account
course.account_id = section.course.account_id
course.name = course.sis_name = section.course.name
course.short_name = course.sis_course_code = section.course.short_name
course.sis_source_id = row['xlist_course_id']
course.enrollment_term_id = section.course.enrollment_term_id
course.sis_batch_id = @batch.id if @batch
course.workflow_state = 'claimed'
course.template_course = section.course
course.save_without_broadcasting!
course_ids_to_update_associations.add course.id
end
end
unless section.sticky_xlist
if row['status'] =~ /\Aactive\z/i
if course.deleted?
course.workflow_state = 'claimed'
course.save_without_broadcasting!
course.update_enrolled_users
course_ids_to_update_associations.add course.id
end
if section.course_id == course.id
@sis.counts[:xlists] += 1
next
end
begin
course_ids_to_update_associations.merge [course.id, section.course_id, section.nonxlist_course_id].compact
section.crosslist_to_course(course, :run_jobs_immediately, :nonsticky)
rescue => e
add_warning(csv, "An active cross-listing failed: #{e}")
next
end
elsif row['status'] =~ /\Adeleted\z/i
if course && section.course_id != course.id
@sis.counts[:xlists] += 1
next
end
begin
course_ids_to_update_associations.merge [section.course_id, section.nonxlist_course_id]
section.uncrosslist(:run_jobs_immediately)
rescue => e
add_warning(csv, "A deleted cross-listing failed: #{e}")
next
end
else
add_error(csv, "Improper status #{row['status']} for a cross-listing")
end
@sis.counts[:xlists] += 1
end
end
Course.skip_updating_account_associations do
yield importer
end
end
Course.update_account_associations(importer.course_ids_to_update_associations.to_a) unless importer.course_ids_to_update_associations.empty?
@logger.debug("Crosslists took #{Time.now - start} seconds")
return importer.success_count
end
private
class Work
attr_accessor :success_count, :course_ids_to_update_associations
def initialize(batch_id, root_account, logger)
@batch_id = batch_id
@root_account = root_account
@logger = logger
@success_count = 0
@course = nil
@course_ids_to_update_associations = [].to_set
end
def add_crosslist(xlist_course_id, section_id, status)
@logger.debug("Processing CrossListing #{[xlist_course_id, section_id, status].inspect}")
raise ImportError, "No xlist_course_id given for a cross-listing" if xlist_course_id.blank?
raise ImportError, "No section_id given for a cross-listing" if section_id.blank?
raise ImportError, "Improper status \"#{status}\" for a cross-listing" unless status =~ /\A(active|deleted)\z/i
section = CourseSection.find_by_root_account_id_and_sis_source_id(@root_account.id, section_id)
raise ImportError, "A cross-listing referenced a non-existent section #{section_id}" unless section
unless @course && @course.sis_source_id == xlist_course_id
@course = Course.find_by_root_account_id_and_sis_source_id(@root_account.id, xlist_course_id)
if !@course && status =~ /\Aactive\z/i
# no course with this crosslist id found, make a new course,
# using the section's current course as a template
@course = Course.new
@course.root_account = @root_account
@course.account_id = section.course.account_id
@course.name = @course.sis_name = section.course.name
@course.short_name = @course.sis_course_code = section.course.short_name
@course.sis_source_id = xlist_course_id
@course.enrollment_term_id = section.course.enrollment_term_id
@course.sis_batch_id = @batch_id if @batch_id
@course.workflow_state = 'claimed'
@course.template_course = section.course
@course.save_without_broadcasting!
@course_ids_to_update_associations.add @course.id
end
end
unless section.sticky_xlist
if status =~ /\Aactive\z/i
if @course.deleted?
@course.workflow_state = 'claimed'
@course.save_without_broadcasting!
@course.update_enrolled_users
@course_ids_to_update_associations.add @course.id
end
if section.course_id == @course.id
@success_count += 1
return
end
begin
@course_ids_to_update_associations.merge [@course.id, section.course_id, section.nonxlist_course_id].compact
section.crosslist_to_course(@course, :run_jobs_immediately, :nonsticky)
rescue => e
raise ImportError, "An active cross-listing failed: #{e}"
end
elsif status =~ /\Adeleted\z/i
if @course && section.course_id != @course.id
@success_count += 1
return
end
begin
@course_ids_to_update_associations.merge [section.course_id, section.nonxlist_course_id]
section.uncrosslist(:run_jobs_immediately)
rescue => e
raise ImportError, "A deleted cross-listing failed: #{e}"
end
else
raise ImportError, "Improper status #{status} for a cross-listing"
end
@success_count += 1
end
end
Course.update_account_associations(course_ids_to_update_associations.to_a) unless course_ids_to_update_associations.empty?
end
end
end

File diff suppressed because it is too large Load Diff

View File

@ -50,7 +50,7 @@ describe Account do
path = "#{tmp.path}.csv"
tmp.close!
File.open(path, "w+") { |f| f.puts lines.join "\n" }
importer = SIS::SisCsv.process(@account, :files => [path],
importer = SIS::CSV::Import.process(@account, :files => [path],
:allow_printing => false)
File.unlink path
importer.warnings.should == []

View File

@ -314,7 +314,7 @@ Spec::Runner.configure do |config|
tmp.close!
File.open(path, "w+") { |f| f.puts lines.join "\n" }
importer = SIS::SisCsv.process(@account, :files => [ path ], :allow_printing=>false)
importer = SIS::CSV::Import.process(@account, :files => [ path ], :allow_printing=>false)
File.unlink path