update all invocations of send_later and friends to new syntax

Change-Id: I7f40ed058b50882121da69f0cb05966854b8e920
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/250924
Tested-by: Service Cloud Jenkins <svc.cloudjenkins@instructure.com>
Reviewed-by: Simon Williams <simon@instructure.com>
QA-Review: Simon Williams <simon@instructure.com>
Product-Review: Simon Williams <simon@instructure.com>
This commit is contained in:
Cody Cutrer 2020-10-22 17:32:48 -06:00
parent 29621bcff5
commit 548a2a1732
189 changed files with 621 additions and 1064 deletions

View File

@ -74,7 +74,7 @@ gem 'inst_statsd', '2.1.6'
gem 'statsd-ruby', '1.4.0', require: false
gem 'aroi', '0.0.7', require: false
gem 'dogstatsd-ruby', '4.7.0'
gem 'inst-jobs', '1.0.0'
gem 'inst-jobs', '1.0.3'
gem 'fugit', '1.3.3', require: false
gem 'et-orbi', '1.2.2', require: false
gem 'inst-jobs-autoscaling', '2.0.0'
@ -119,7 +119,7 @@ gem 'guardrail', '2.0.0'
gem 'simple_oauth', '0.3.1', require: false
gem 'switchman', '2.0.1'
gem 'open4', '1.3.4', require: false
gem 'switchman-inst-jobs', '3.0.0'
gem 'switchman-inst-jobs', '3.0.2'
gem 'twilio-ruby', '5.36.0', require: false
gem 'tzinfo', '1.2.7'
gem 'vault', '0.13.0', require: false

View File

@ -174,7 +174,7 @@ class AssignmentGroupsController < ApplicationController
if assignment_ids_to_update.any?
assignments.where(:id => assignment_ids_to_update).update_all(assignment_group_id: @group.id, updated_at: Time.now.utc)
tags_to_update += MasterCourses::ChildContentTag.where(:content_type => "Assignment", :content_id => assignment_ids_to_update).to_a
Canvas::LiveEvents.send_later_if_production(:assignments_bulk_updated, assignment_ids_to_update)
Canvas::LiveEvents.delay_if_production.assignments_bulk_updated(assignment_ids_to_update)
end
quizzes = @context.active_quizzes.where(assignment_id: order)
quiz_ids_to_update = quizzes.where.not(:assignment_group_id => @group.id).pluck(:id)

View File

@ -162,7 +162,7 @@ class AssignmentsController < ApplicationController
!@current_user_submission.submission_type
if @current_user_submission
GuardRail.activate(:primary) do
@current_user_submission.send_later(:context_module_action)
@current_user_submission.delay.context_module_action
end
end
end

View File

@ -930,7 +930,7 @@ class CalendarEventsApiController < ApplicationController
event_hashes = builder.generate_event_hashes(timetables)
builder.process_and_validate_event_hashes(event_hashes)
raise "error creating timetable events #{builder.errors.join(", ")}" if builder.errors.present?
builder.send_later(:create_or_update_events, event_hashes) # someday we may want to make this a trackable progress job /shrug
builder.delay.create_or_update_events(event_hashes) # someday we may want to make this a trackable progress job /shrug
end
# delete timetable events for sections missing here
@ -1004,7 +1004,7 @@ class CalendarEventsApiController < ApplicationController
return render :json => {:errors => builder.errors}, :status => :bad_request
end
builder.send_later(:create_or_update_events, event_hashes)
builder.delay.create_or_update_events(event_hashes)
render json: {status: 'ok'}
end
end

View File

@ -303,7 +303,7 @@ class ContextModulesController < ApplicationController
@context.touch
# # Background this, not essential that it happen right away
# ContextModule.send_later(:update_tag_order, @context)
# ContextModule.delay.update_tag_order(@context)
render :json => @modules.map{ |m| m.as_json(include: :content_tags, methods: :workflow_state) }
end
end

View File

@ -927,9 +927,8 @@ class ConversationsController < ApplicationController
message = @conversation.process_new_message(message_args, @recipients, message_ids, @tags)
render :json => conversation_json(@conversation.reload, @current_user, session, :messages => [message])
else
@conversation.send_later_enqueue_args(:process_new_message,
{:strand => "add_message_#{@conversation.global_conversation_id}", :max_attempts => 1},
message_args, @recipients, message_ids, @tags)
@conversation.delay(strand: "add_message_#{@conversation.global_conversation_id}").
process_new_message(message_args, @recipients, message_ids, @tags)
return render :json => [], :status => :accepted
end
else

View File

@ -1544,8 +1544,8 @@ class CoursesController < ApplicationController
:home_page_announcement_limit
)
changes = changed_settings(@course.changes, @course.settings, old_settings)
@course.send_later_if_production_enqueue_args(:touch_content_if_public_visibility_changed,
{ :priority => Delayed::LOW_PRIORITY }, changes)
@course.delay_if_production(priority: Delayed::LOW_PRIORITY).
touch_content_if_public_visibility_changed(changes)
DueDateCacher.with_executing_user(@current_user) do
if @course.save
@ -1614,7 +1614,7 @@ class CoursesController < ApplicationController
def re_send_invitations
get_context
if authorized_action(@context, @current_user, [:manage_students, :manage_admin_users])
@context.send_later_if_production(:re_send_invitations!, @current_user)
@context.delay_if_production.re_send_invitations!(@current_user)
respond_to do |format|
format.html { redirect_to course_settings_url }
@ -2802,7 +2802,8 @@ class CoursesController < ApplicationController
changes = changed_settings(@course.changes, @course.settings, old_settings)
@course.delay_if_production(priority: Delayed::LOW_PRIORITY).touch_content_if_public_visibility_changed(changes)
# RUBY 3.0 - **{} can go away, because data won't implicitly convert to kwargs
@course.delay_if_production(priority: Delayed::LOW_PRIORITY).touch_content_if_public_visibility_changed(changes , **{})
if @course.errors.none? && @course.save
Auditors::Course.record_updated(@course, @current_user, changes, source: logging_source)

View File

@ -190,7 +190,7 @@ class EportfoliosController < ApplicationController
@attachment.file_state = '0'
@attachment.user = @current_user
@attachment.save!
ContentZipper.send_later_enqueue_args(:process_attachment, { :priority => Delayed::LOW_PRIORITY, :max_attempts => 1 }, @attachment)
ContentZipper.delay(priority: Delayed::LOW_PRIORITY).process_attachment(@attachment)
render :json => @attachment
else
respond_to do |format|

View File

@ -158,7 +158,7 @@ class ErrorsController < ApplicationController
report.request_context_id = RequestContextGenerator.request_id
report.assign_data(error)
report.save
report.send_later(:send_to_external)
report.delay.send_to_external
rescue => e
@exception = e
Canvas::Errors.capture(

View File

@ -128,7 +128,7 @@ module LiveAssessments
end
end
@assessment.send_later_if_production(:generate_submissions_for, @results.map(&:user).uniq)
@assessment.delay_if_production.generate_submissions_for(@results.map(&:user).uniq)
render json: serialize_jsonapi(@results)
end

View File

@ -192,7 +192,7 @@ module Lti
else
@report = OriginalityReport.new(create_report_params)
if @report.save
@report.send_later_if_production(:copy_to_group_submissions!)
@report.delay_if_production.copy_to_group_submissions!
render json: api_json(@report, @current_user, session), status: :created
else
render json: @report.errors, status: :bad_request
@ -242,7 +242,7 @@ module Lti
# @returns OriginalityReport
def update
if @report.update(update_report_params)
@report.send_later_if_production(:copy_to_group_submissions!)
@report.delay_if_production.copy_to_group_submissions!
render json: api_json(@report, @current_user, session)
else
render json: @report.errors, status: :bad_request

View File

@ -136,13 +136,8 @@ class LtiApiController < ApplicationController
assignment.update_attribute(:turnitin_enabled, false) if assignment.turnitin_enabled?
request.body.rewind
turnitin_processor = Turnitin::OutcomeResponseProcessor.new(@tool, assignment, user, JSON.parse(request.body.read))
turnitin_processor.send_later_enqueue_args(
:process,
{
max_attempts: Turnitin::OutcomeResponseProcessor.max_attempts,
priority: Delayed::LOW_PRIORITY
}
)
turnitin_processor.delay(max_attempts: Turnitin::OutcomeResponseProcessor.max_attempts,
priority: Delayed::LOW_PRIORITY).process
render json: {}, status: 200
end

View File

@ -207,10 +207,7 @@ class MediaObjectsController < ApplicationController
# Unfortunately, we don't have media_object entities created for everything,
# so we use this opportunity to create the object if it does not exist.
@media_object = MediaObject.create_if_id_exists(params[:media_object_id])
@media_object.send_later_enqueue_args(
:retrieve_details,
{ singleton: "retrieve_media_details:#{@media_object.media_id}" }
)
@media_object.delay(singleton: "retrieve_media_details:#{@media_object.media_id}").retrieve_details
increment_request_cost(Setting.get('missed_media_additional_request_cost', '200').to_i)
end

View File

@ -191,7 +191,7 @@ class Quizzes::QuizSubmissionsApiController < ApplicationController
if quiz_submissions
# trigger delayed grading job for all submission id's which needs grading
quiz_submissions_ids = quiz_submissions.map(&:id).uniq
Quizzes::OutstandingQuizSubmissionManager.new(@quiz).send_later_if_production(:grade_by_ids, quiz_submissions_ids)
Quizzes::OutstandingQuizSubmissionManager.new(@quiz).delay_if_production.grade_by_ids(quiz_submissions_ids)
serialize_and_render quiz_submissions
else
render_unauthorized_action

View File

@ -149,8 +149,8 @@ class Quizzes::QuizzesController < ApplicationController
end
if @current_user.present?
Quizzes::OutstandingQuizSubmissionManager.send_later_if_production(:grade_by_course,
@context)
Quizzes::OutstandingQuizSubmissionManager.delay_if_production.
grade_by_course(@context)
end
log_asset_access([ "quizzes", @context ], "quizzes", 'other')
@ -509,9 +509,7 @@ class Quizzes::QuizzesController < ApplicationController
@quiz.reload
if params[:quiz][:time_limit].present?
@quiz.send_later_if_production_enqueue_args(:update_quiz_submission_end_at_times, {
priority: Delayed::HIGH_PRIORITY
})
@quiz.delay_if_production(priority: Delayed::HIGH_PRIORITY).update_quiz_submission_end_at_times
end
@quiz.publish! if params[:publish]

View File

@ -36,8 +36,8 @@ module BrokenLinkHelper
notification = BroadcastPolicy.notification_finder.by_name('Content Link Error')
error_type = error_type(record.context, request.url)
data = {location: request.referer, url: request.url, anchor: anchor, error_type: error_type}
DelayedNotification.send_later_if_production_enqueue_args(:process, { priority: Delayed::LOW_PRIORITY },
record, notification, recipient_keys, data)
DelayedNotification.delay_if_production(priority: Delayed::LOW_PRIORITY).
process(record, notification, recipient_keys, data)
true
rescue
false

View File

@ -502,7 +502,7 @@ class Account < ActiveRecord::Base
def update_account_associations_if_changed
if self.saved_change_to_parent_account_id? || self.saved_change_to_root_account_id?
self.shard.activate do
send_later_if_production(:update_account_associations)
delay_if_production.update_account_associations
end
end
end
@ -516,7 +516,7 @@ class Account < ActiveRecord::Base
keys_to_clear << :default_locale if self.saved_change_to_default_locale?
if keys_to_clear.any?
self.shard.activate do
send_later_if_production(:clear_downstream_caches, *keys_to_clear)
delay_if_production.clear_downstream_caches(*keys_to_clear)
end
end
end
@ -726,9 +726,8 @@ class Account < ActiveRecord::Base
@invalidations.each do |key|
Rails.cache.delete([key, self.global_id].cache_key)
end
Account.send_later_if_production_enqueue_args(:invalidate_inherited_caches,
{ singleton: "Account.invalidate_inherited_caches_#{global_id}" },
self, @invalidations)
Account.delay_if_production(singleton: "Account.invalidate_inherited_caches_#{global_id}").
invalidate_inherited_caches(self, @invalidations)
end
end
end

View File

@ -44,12 +44,11 @@ class AccountNotification < ActiveRecord::Base
end
def create_alert
if self.start_at > Time.zone.now
self.send_later_enqueue_args(:create_alert, {
:run_at => self.start_at,
:on_conflict => :overwrite,
:singleton => "create_notification_alert:#{self.global_id}"
})
if start_at > Time.zone.now
delay(run_at: start_at,
on_conflict: :overwrite,
singleton: "create_notification_alert:#{self.global_id}").
create_alert
return
end
@ -291,11 +290,9 @@ class AccountNotification < ActiveRecord::Base
def queue_message_broadcast
if self.send_message? && !self.messages_sent_at && !self.message_recipients
self.send_later_enqueue_args(:broadcast_messages, {
:run_at => self.start_at || Time.now.utc,
:on_conflict => :overwrite,
:singleton => "account_notification_broadcast_messages:#{self.global_id}",
:max_attempts => 1})
delay(run_at: start_at || Time.now.utc,
on_conflict: :overwrite,
singleton: "account_notification_broadcast_messages:#{self.global_id}").broadcast_messages
end
end

View File

@ -110,7 +110,7 @@ class AccountReport < ActiveRecord::Base
mark_as_errored
end
end
handle_asynchronously :run_report, priority: Delayed::LOW_PRIORITY, max_attempts: 1,
handle_asynchronously :run_report, priority: Delayed::LOW_PRIORITY,
n_strand: proc {|ar| ['account_reports', ar.account.root_account.global_id]},
on_permanent_failure: :mark_as_errored

View File

@ -23,9 +23,8 @@ module Alerts
Account.root_accounts.active.non_shadow.find_each do |account|
next unless account.settings[:enable_alerts]
account.all_courses.active.find_ids_in_batches(batch_size: 200) do |batch|
self.send_later_if_production_enqueue_args(:evaluate_courses,
{n_strand: ['delayed_alert_sender_evaluate_courses', account.global_id],
priority: Delayed::LOW_PRIORITY}, batch)
delay_if_production(n_strand: ['delayed_alert_sender_evaluate_courses', account.global_id], priority: Delayed::LOW_PRIORITY).
evaluate_courses(batch)
end
end
end

View File

@ -173,7 +173,7 @@ class AssetUserAccessLog
end
def self.reschedule!
AssetUserAccessLog.send_later_enqueue_args(:compact, { strand: strand_name })
AssetUserAccessLog.delay(strand: strand_name).compact
end
def self.strand_name

View File

@ -661,11 +661,10 @@ class Assignment < ActiveRecord::Base
return if run_at.blank?
run_at = 1.minute.from_now if run_at < 1.minute.from_now # delay immediate run in case associated objects are still being saved
self.send_later_enqueue_args(:do_auto_peer_review, {
:run_at => run_at,
:on_conflict => :overwrite,
:singleton => Shard.birth.activate { "assignment:auto_peer_review:#{self.id}" }
})
delay(run_at: run_at,
on_conflict: :overwrite,
singleton: Shard.birth.activate { "assignment:auto_peer_review:#{self.id}" }).
do_auto_peer_review
end
attr_accessor :skip_schedule_peer_reviews
@ -721,7 +720,7 @@ class Assignment < ActiveRecord::Base
# reflect the changes
def update_submissions_and_grades_if_details_changed
if needs_to_update_submissions?
send_later_if_production(:update_student_submissions)
delay_if_production.update_student_submissions
else
update_grades_if_details_changed
end
@ -880,16 +879,9 @@ class Assignment < ActiveRecord::Base
def self.remove_user_as_final_grader(user_id, course_id)
strand_identifier = Course.find(course_id).root_account.global_id
send_later_if_production_enqueue_args(
:remove_user_as_final_grader_immediately,
{
strand: "Assignment.remove_user_as_final_grader:#{strand_identifier}",
max_attempts: 1,
priority: Delayed::LOW_PRIORITY,
},
user_id,
course_id
)
delay_if_production(strand: "Assignment.remove_user_as_final_grader:#{strand_identifier}",
priority: Delayed::LOW_PRIORITY).
remove_user_as_final_grader_immediately(user_id, course_id)
end
def self.remove_user_as_final_grader_immediately(user_id, course_id)
@ -959,7 +951,7 @@ class Assignment < ActiveRecord::Base
end
def update_submissions_later
send_later_if_production(:update_submissions) if saved_change_to_points_possible?
delay_if_production.update_submissions if saved_change_to_points_possible?
end
attr_accessor :updated_submissions # for testing
@ -1049,7 +1041,7 @@ class Assignment < ActiveRecord::Base
modules.each do |mod|
if mod.context_module_progressions.where(current: true, user_id: student_ids).update_all(current: false) > 0
mod.send_later_if_production_enqueue_args(:evaluate_all_progressions, {:strand => "module_reeval_#{mod.global_context_id}"})
mod.delay_if_production(strand: "module_reeval_#{mod.global_context_id}").evaluate_all_progressions
end
end
end
@ -1530,7 +1522,7 @@ class Assignment < ActiveRecord::Base
# a different unlock_at time, so include that in the singleton key so that different
# unlock_at times are properly handled.
singleton = "touch_on_unlock_assignment_#{self.global_id}_#{self.unlock_at}"
send_later_enqueue_args(:touch_assignment_and_submittable, { :run_at => self.unlock_at, :singleton => singleton })
delay(run_at: self.unlock_at, singleton: singleton ).touch_assignment_and_submittable
end
end
end
@ -3222,7 +3214,7 @@ class Assignment < ActiveRecord::Base
{ singleton: "assignment_overrides_changed_#{self.global_id}" }
end
self.send_later_if_production_enqueue_args(:run_if_overrides_changed!, enqueuing_args, student_ids, updating_user)
delay_if_production(**enqueuing_args).run_if_overrides_changed!(student_ids, updating_user)
end
def validate_overrides_for_sis(overrides)
@ -3469,7 +3461,7 @@ class Assignment < ActiveRecord::Base
course.recompute_student_scores(submissions.pluck(:user_id))
update_muted_status!
end
self.send_later_if_production(:recalculate_module_progressions, submission_ids)
delay_if_production.recalculate_module_progressions(submission_ids)
progress.set_results(assignment_id: id, posted_at: update_time, user_ids: user_ids) if progress.present?
broadcast_submissions_posted(posting_params) if posting_params.present?
end

View File

@ -103,7 +103,7 @@ class Assignment::BulkUpdate
end
progress.calculate_completion!(progress_count, progress_total)
progress_count += 1
assignment.send_later_if_production :do_notifications!
assignment.delay_if_production.do_notifications!
end
end
end

View File

@ -232,13 +232,11 @@ class Attachment < ActiveRecord::Base
end
# try an infer encoding if it would be useful to do so
send_later(:infer_encoding) if self.encoding.nil? && self.content_type =~ /text/ && self.context_type != 'SisBatch'
delay.infer_encoding if self.encoding.nil? && self.content_type =~ /text/ && self.context_type != 'SisBatch'
if respond_to?(:process_attachment, true)
automatic_thumbnail_sizes.each do |suffix|
send_later_if_production_enqueue_args(
:create_thumbnail_size,
{singleton: "attachment_thumbnail_#{global_id}_#{suffix}"},
suffix)
delay_if_production(singleton: "attachment_thumbnail_#{global_id}_#{suffix}").
create_thumbnail_size(suffix)
end
end
end
@ -1075,10 +1073,7 @@ class Attachment < ActiveRecord::Base
notification = BroadcastPolicy.notification_finder.by_name(count.to_i > 1 ? 'New Files Added' : 'New File Added')
data = { :count => count }
DelayedNotification.send_later_if_production_enqueue_args(
:process,
{ :priority => 30},
record, notification, recipient_keys, data)
DelayedNotification.delay_if_production(priority: 30).process(record, notification, recipient_keys, data)
end
end
end
@ -1283,8 +1278,8 @@ class Attachment < ActiveRecord::Base
# prevent an access attempt shortly before unlock_at from caching permissions beyond that time
def touch_on_unlock
GuardRail.activate(:primary) do
send_later_enqueue_args(:touch, { :run_at => unlock_at,
:singleton => "touch_on_unlock_attachment_#{global_id}" })
delay(run_at: unlock_at,
singleton: "touch_on_unlock_attachment_#{global_id}").touch
end
end
@ -1694,11 +1689,9 @@ class Attachment < ActiveRecord::Base
if submit_to_crocodoc_instead
# get crocodoc off the canvadocs strand
# (maybe :wants_annotation was a dumb idea)
send_later_enqueue_args :submit_to_crocodoc, {
n_strand: 'crocodoc',
max_attempts: 1,
priority: Delayed::LOW_PRIORITY,
}, attempt
delay(n_strand: 'crocodoc',
priority: Delayed::LOW_PRIORITY).
submit_to_crocodoc(attempt)
elsif canvadocable?
doc = canvadoc || create_canvadoc
doc.upload({
@ -1712,12 +1705,10 @@ class Attachment < ActiveRecord::Base
Canvas::Errors.capture(e, type: :canvadocs, attachment_id: id, annotatable: opts[:wants_annotation])
if attempt <= Setting.get('max_canvadocs_attempts', '5').to_i
send_later_enqueue_args :submit_to_canvadocs, {
:n_strand => 'canvadocs_retries',
:run_at => (5 * attempt).minutes.from_now,
:max_attempts => 1,
:priority => Delayed::LOW_PRIORITY,
}, attempt + 1, opts
delay(n_strand: 'canvadocs_retries',
run_at: (5 * attempt).minutes.from_now,
priority: Delayed::LOW_PRIORITY
).submit_to_canvadocs(attempt + 1, opts)
end
end
@ -1732,12 +1723,10 @@ class Attachment < ActiveRecord::Base
Canvas::Errors.capture(e, type: :canvadocs, attachment_id: id)
if attempt <= Setting.get('max_crocodoc_attempts', '5').to_i
send_later_enqueue_args :submit_to_crocodoc, {
:n_strand => 'crocodoc_retries',
:run_at => (5 * attempt).minutes.from_now,
:max_attempts => 1,
:priority => Delayed::LOW_PRIORITY,
}, attempt + 1
delay(n_strand: 'crocodoc_retries',
run_at: (5 * attempt).minutes.from_now,
priority: Delayed::LOW_PRIORITY).
submit_to_crocodoc(attempt + 1)
end
end

View File

@ -153,7 +153,7 @@ class AuthenticationProvider < ActiveRecord::Base
self.workflow_state = 'deleted'
self.save!
enable_canvas_authentication
send_later_if_production(:soft_delete_pseudonyms)
delay_if_production.soft_delete_pseudonyms
true
end
alias destroy_permanently! destroy

View File

@ -344,11 +344,7 @@ class CommunicationChannel < ActiveRecord::Base
true
)
else
send_later_if_production_enqueue_args(
:send_otp_via_sms_gateway!,
{ priority: Delayed::HIGH_PRIORITY, max_attempts: 1 },
message
)
delay_if_production(priority: Delayed::HIGH_PRIORITY).send_otp_via_sms_gateway!(message)
end
end

View File

@ -99,11 +99,11 @@ class CommunicationChannel
def perform!
send_later(:reset_bounce_counts!)
delay.reset_bounce_counts!
{scheduled_reset_approximate_count: count}
end
private def reset_bounce_counts!
def reset_bounce_counts!
matching_channels.to_a.each(&:reset_bounce_count!)
end
end

View File

@ -371,7 +371,7 @@ class ContentMigration < ActiveRecord::Base
# it's ready to be imported
self.workflow_state = :importing
self.save
self.send_later_enqueue_args(:import_content, queue_opts.merge(:on_permanent_failure => :fail_with_error!))
delay(**queue_opts.merge(on_permanent_failure: :fail_with_error!)).import_content
else
# find worker and queue for conversion
begin
@ -417,7 +417,7 @@ class ContentMigration < ActiveRecord::Base
run_at = Setting.get('content_migration_requeue_delay_minutes', '60').to_i.minutes.from_now
# if everything goes right, we'll queue it right away after the currently running one finishes
# but if something goes catastropically wrong, then make sure we recheck it eventually
job = self.send_later_enqueue_args(:queue_migration, {:no_delay => true, :run_at => run_at},
job = delay(ignore_transaction: true, run_at: run_at).queue_migration(
plugin, retry_count: retry_count + 1, expires_at: expires_at)
if self.job_progress

View File

@ -90,7 +90,7 @@ class ContextModule < ActiveRecord::Base
progression_scope = progression_scope.where(:user_id => student_ids) if student_ids
if progression_scope.update_all(["workflow_state = 'locked', lock_version = lock_version + 1, current = ?", false]) > 0
send_later_if_production_enqueue_args(:evaluate_all_progressions, {:strand => "module_reeval_#{self.global_context_id}"})
delay_if_production(strand: "module_reeval_#{self.global_context_id}").evaluate_all_progressions
end
self.context.context_modules.each do |mod|
@ -103,11 +103,11 @@ class ContextModule < ActiveRecord::Base
self.class.connection.after_transaction_commit do
if context_module_progressions.where(current: true).update_all(current: false) > 0
# don't queue a job unless necessary
send_later_if_production_enqueue_args(:evaluate_all_progressions, {:strand => "module_reeval_#{self.global_context_id}"})
delay_if_production(strand: "module_reeval_#{self.global_context_id}").evaluate_all_progressions
end
if @discussion_topics_to_recalculate
@discussion_topics_to_recalculate.each do |dt|
dt.send_later_if_production_enqueue_args(:recalculate_context_module_actions!, {:strand => "module_reeval_#{self.global_context_id}"})
dt.delay_if_production(strand: "module_reeval_#{self.global_context_id}").recalculate_context_module_actions!
end
end
end
@ -288,7 +288,7 @@ class ContextModule < ActiveRecord::Base
self.workflow_state = 'deleted'
self.deleted_at = Time.now.utc
ContentTag.where(:context_module_id => self).where.not(:workflow_state => 'deleted').update_all(:workflow_state => 'deleted', :updated_at => self.deleted_at)
self.send_later_if_production_enqueue_args(:update_downstreams, { max_attempts: 1, n_strand: "context_module_update_downstreams", priority: Delayed::LOW_PRIORITY }, self.position)
delay_if_production(n_strand: "context_module_update_downstreams", priority: Delayed::LOW_PRIORITY).update_downstreams(self.position)
save!
true
end

View File

@ -300,7 +300,7 @@ class ContextModuleProgression < ActiveRecord::Base
begin
if self.update_requirement_met(*args)
self.save!
self.send_later_if_production(:evaluate!)
delay_if_production.evaluate!
end
rescue ActiveRecord::StaleObjectError
# retry up to five times, otherwise return current (stale) data
@ -458,8 +458,8 @@ class ContextModuleProgression < ActiveRecord::Base
User.where(:id => progressions.map(&:user_id)).touch_all
progressions.each do |progression|
progression.send_later_if_production_enqueue_args(:evaluate!,
{:n_strand => ["dependent_progression_reevaluation", context_module.global_context_id]}, self)
progression.delay_if_production(n_strand: ["dependent_progression_reevaluation", context_module.global_context_id]).
evaluate!(self)
end
end
end

View File

@ -289,7 +289,7 @@ class Conversation < ActiveRecord::Base
# now that the message participants are all saved, we can properly broadcast to recipients
message.after_participants_created_broadcast
send_later_if_production(:reset_unread_counts) if options[:reset_unread_counts]
delay_if_production.reset_unread_counts if options[:reset_unread_counts]
message
end

View File

@ -124,11 +124,8 @@ class ConversationBatch < ActiveRecord::Base
end
def queue_delivery
if mode == :async
send_later :deliver
else
deliver(false)
end
sync = (mode != :async)
delay(synchronous: sync).deliver(!sync)
end
workflow do

View File

@ -359,7 +359,7 @@ class ConversationParticipant < ActiveRecord::Base
save
end
# update the stream item data but leave the instances alone
StreamItem.send_later_if_production_enqueue_args(:generate_or_update, {:priority => 25}, self.conversation)
StreamItem.delay_if_production(priority: 25).generate_or_update(self.conversation)
end
def update(hash)
@ -583,9 +583,8 @@ class ConversationParticipant < ActiveRecord::Base
def self.batch_update(user, conversation_ids, update_params)
progress = user.progresses.create! :tag => "conversation_batch_update", :completion => 0.0
job = ConversationParticipant.send_later_enqueue_args(:do_batch_update,
{ no_delay: true },
progress, user, conversation_ids, update_params)
job = ConversationParticipant.delay(ignore_transaction: true).
do_batch_update(progress, user, conversation_ids, update_params)
progress.user_id = user.id
progress.delayed_job_id = job.id
progress.save!

View File

@ -326,8 +326,8 @@ class Course < ActiveRecord::Base
# a lot of things can change the date logic here :/
if self.enrollments.exists?
EnrollmentState.send_later_if_production_enqueue_args(:invalidate_states_for_course_or_section,
{:n_strand => ["invalidate_enrollment_states", self.global_root_account_id]}, self)
EnrollmentState.delay_if_production(n_strand: ["invalidate_enrollment_states", self.global_root_account_id]).
invalidate_states_for_course_or_section(self)
end
# if the course date settings have been changed, we'll end up reprocessing all the access values anyway, so no need to queue below for other setting changes
end
@ -335,7 +335,7 @@ class Course < ActiveRecord::Base
state_settings = [:restrict_student_future_view, :restrict_student_past_view]
changed_keys = saved_change_to_account_id? ? state_settings : (@changed_settings & state_settings)
if changed_keys.any?
EnrollmentState.send_later_if_production(:invalidate_access_for_course, self, changed_keys)
EnrollmentState.delay_if_production.invalidate_access_for_course(self, changed_keys)
end
end
@ -1087,7 +1087,7 @@ class Course < ActiveRecord::Base
EnrollmentState.where(:enrollment_id => locked_ids).
update_all(["state = ?, state_is_current = ?, access_is_current = ?, lock_version = lock_version + 1, updated_at = ?", 'completed', true, false, Time.now.utc])
end
EnrollmentState.send_later_if_production(:process_states_for_ids, enrollment_info.map(&:id)) # recalculate access
EnrollmentState.delay_if_production.process_states_for_ids(enrollment_info.map(&:id)) # recalculate access
end
appointment_participants.active.current.update_all(:workflow_state => 'deleted')
@ -1107,7 +1107,7 @@ class Course < ActiveRecord::Base
update_all(["state = ?, state_is_current = ?, lock_version = lock_version + 1, updated_at = ?", 'deleted', true, Time.now.utc])
end
end
User.send_later_if_production(:update_account_associations, user_ids)
User.delay_if_production.update_account_associations(user_ids)
end
end
end
@ -1233,9 +1233,7 @@ class Course < ActiveRecord::Base
inst_job_opts[:singleton] = "recompute_student_scores:#{global_id}:#{grading_period_id}"
end
send_later_if_production_enqueue_args(
:recompute_student_scores_without_send_later,
inst_job_opts,
delay_if_production(**inst_job_opts).recompute_student_scores_without_send_later(
student_ids,
grading_period_id: grading_period_id,
update_all_grading_period_scores: update_all_grading_period_scores
@ -1327,7 +1325,7 @@ class Course < ActiveRecord::Base
def do_offer
self.start_at ||= Time.now
send_later_if_production(:invite_uninvited_students)
delay_if_production.invite_uninvited_students
end
def do_claim
@ -1396,7 +1394,7 @@ class Course < ActiveRecord::Base
EnrollmentState.where(:enrollment_id => e_batch.map(&:id)).
update_all(["state = ?, state_is_current = ?, lock_version = lock_version + 1, updated_at = ?", 'deleted', true, Time.now.utc])
User.touch_and_clear_cache_keys(user_ids, :enrollments)
User.send_later_if_production(:update_account_associations, user_ids) if user_ids.any?
User.delay_if_production.update_account_associations(user_ids) if user_ids.any?
end
c_data = SisBatchRollBackData.build_dependent_data(sis_batch: sis_batch, contexts: courses, updated_state: 'deleted', batch_mode_delete: batch_mode)
SisBatchRollBackData.bulk_insert_roll_back_data(c_data) if c_data
@ -1864,10 +1862,9 @@ class Course < ActiveRecord::Base
:grade_publishing_message => nil,
:last_publish_attempt_at => last_publish_attempt_at)
send_later_if_production_enqueue_args(:send_final_grades_to_endpoint,
{ n_strand: ["send_final_grades_to_endpoint", global_root_account_id] },
publishing_user, user_ids_to_publish)
send_at(last_publish_attempt_at + settings[:success_timeout].to_i.seconds, :expire_pending_grade_publishing_statuses, last_publish_attempt_at) if should_kick_off_grade_publishing_timeout?
delay_if_production(n_strand: ["send_final_grades_to_endpoint", global_root_account_id]).
send_final_grades_to_endpoint(publishing_user, user_ids_to_publish)
delay(run_at: last_publish_attempt_at + settings[:success_timeout].to_i.seconds).expire_pending_grade_publishing_statuses(last_publish_attempt_at) if should_kick_off_grade_publishing_timeout?
end
def send_final_grades_to_endpoint(publishing_user, user_ids_to_publish = nil)
@ -3298,9 +3295,8 @@ class Course < ActiveRecord::Base
def self.batch_update(account, user, course_ids, update_params, update_source = :manual)
progress = account.progresses.create! :tag => "course_batch_update", :completion => 0.0
job = Course.send_later_enqueue_args(:do_batch_update,
{ no_delay: true },
progress, user, course_ids, update_params, update_source)
job = Course.delay(ignore_transaction: true).
do_batch_update(progress, user, course_ids, update_params, update_source)
progress.user_id = user.id
progress.delayed_job_id = job.id
progress.save!
@ -3344,7 +3340,12 @@ class Course < ActiveRecord::Base
RUBY
end
def touch_content_if_public_visibility_changed(changes)
# only send one
def touch_content_if_public_visibility_changed(changes = {}, **kwargs)
# RUBY 2.7 this can go away (**{} will work at the caller)
raise ArgumentError, "Only send one hash" if !changes.empty? && !kwargs.empty?
changes = kwargs if changes.empty? && !kwargs.empty?
if changes[:is_public] || changes[:is_public_to_auth_users]
self.assignments.touch_all
self.attachments.touch_all
@ -3359,7 +3360,8 @@ class Course < ActiveRecord::Base
def clear_todo_list_cache_later(association_type)
raise "invalid association" unless self.association(association_type).klass == User
send_later_enqueue_args(:clear_todo_list_cache, { :run_at => 15.seconds.from_now, :singleton => "course_clear_cache_#{global_id}_#{association_type}", on_conflict: :loose }, association_type)
delay(run_at: 15.seconds.from_now, singleton: "course_clear_cache_#{global_id}_#{association_type}", on_conflict: :loose).
clear_todo_list_cache(association_type)
end
def clear_todo_list_cache(association_type)

View File

@ -71,7 +71,7 @@ class CourseSection < ActiveRecord::Base
end
def delete_enrollments_later_if_deleted
send_later_if_production(:delete_enrollments_if_deleted) if workflow_state == 'deleted' && saved_change_to_workflow_state?
delay_if_production.delete_enrollments_if_deleted if workflow_state == 'deleted' && saved_change_to_workflow_state?
end
def delete_enrollments_if_deleted
@ -171,9 +171,8 @@ class CourseSection < ActiveRecord::Base
def update_account_associations_if_changed
if (self.saved_change_to_course_id? || self.saved_change_to_nonxlist_course_id?) && !Course.skip_updating_account_associations?
Course.send_later_if_production_enqueue_args(:update_account_associations,
{:n_strand => ["update_account_associations", self.root_account_id]},
[self.course_id, self.course_id_before_last_save, self.nonxlist_course_id, self.nonxlist_course_id_before_last_save].compact.uniq)
Course.delay_if_production(n_strand: ["update_account_associations", self.root_account_id]).
update_account_associations([self.course_id, self.course_id_before_last_save, self.nonxlist_course_id, self.nonxlist_course_id_before_last_save].compact.uniq)
end
end
@ -251,7 +250,7 @@ class CourseSection < ActiveRecord::Base
self.save!
if enrollment_ids.any?
self.all_enrollments.update_all all_attrs
Enrollment.send_later_if_production(:batch_add_to_favorites, enrollment_ids)
Enrollment.delay_if_production.batch_add_to_favorites(enrollment_ids)
end
Assignment.suspend_due_date_caching do
@ -259,11 +258,11 @@ class CourseSection < ActiveRecord::Base
end
User.clear_cache_keys(user_ids, :enrollments)
EnrollmentState.send_later_if_production_enqueue_args(:invalidate_states_for_course_or_section,
{:n_strand => ["invalidate_enrollment_states", self.global_root_account_id]}, self, invalidate_access: true)
User.send_later_if_production(:update_account_associations, user_ids) if old_course.account_id != course.account_id && !User.skip_updating_account_associations?
EnrollmentState.delay_if_production(n_strand: ["invalidate_enrollment_states", self.global_root_account_id]).
invalidate_states_for_course_or_section(self, invalidate_access: true)
User.delay_if_production.update_account_associations(user_ids) if old_course.account_id != course.account_id && !User.skip_updating_account_associations?
if old_course.id != self.course_id && old_course.id != self.nonxlist_course_id
old_course.send_later_if_production(:update_account_associations) unless Course.skip_updating_account_associations?
old_course.delay_if_production.update_account_associations unless Course.skip_updating_account_associations?
end
run_immediately = opts.include?(:run_jobs_immediately)
@ -278,7 +277,7 @@ class CourseSection < ActiveRecord::Base
# it's possible that some enrollments were created using an old copy of the course section before the crosslist,
# so wait a little bit and then make sure they get cleaned up
self.send_later_if_production_enqueue_args(:ensure_enrollments_in_correct_section, {:max_attempts => 1, :run_at => 10.seconds.from_now})
delay_if_production(run_at: 10.seconds.from_now).ensure_enrollments_in_correct_section
end
def ensure_enrollments_in_correct_section
@ -364,8 +363,8 @@ class CourseSection < ActiveRecord::Base
def update_enrollment_states_if_necessary
if self.saved_change_to_restrict_enrollments_to_section_dates? || (self.restrict_enrollments_to_section_dates? && (saved_changes.keys & %w{start_at end_at}).any?)
EnrollmentState.send_later_if_production_enqueue_args(:invalidate_states_for_course_or_section,
{:n_strand => ["invalidate_enrollment_states", self.global_root_account_id]}, self)
EnrollmentState.delay_if_production(n_strand: ["invalidate_enrollment_states", self.global_root_account_id]).
invalidate_states_for_course_or_section(self)
end
end
end

View File

@ -224,9 +224,7 @@ class CrocodocDocument < ActiveRecord::Base
error_docs = CrocodocDocument.where(:uuid => error_uuids)
attachment_ids = error_docs.pluck(:attachment_id)
if Canvadocs.enabled?
Attachment.send_later_enqueue_args :submit_to_canvadocs,
{:n_strand => "canvadocs", :max_attempts => 1},
attachment_ids
Attachment.delay(n_strand: "canvadocs").submit_to_canvadocs(attachment_ids)
end
end
end

View File

@ -153,7 +153,7 @@ module Csp::AccountHelper
def clear_tool_domain_cache
Rails.cache.delete([ACCOUNT_TOOL_CACHE_KEY_PREFIX, self.global_id].cache_key)
Account.send_later_if_production(:invalidate_inherited_caches, self, [ACCOUNT_TOOL_CACHE_KEY_PREFIX])
Account.delay_if_production.invalidate_inherited_caches(self, [ACCOUNT_TOOL_CACHE_KEY_PREFIX])
end
def csp_files_domains(request)

View File

@ -41,7 +41,12 @@ class DelayedNotification < ActiveRecord::Base
state :errored
end
def self.process(asset, notification, recipient_keys, data)
def self.process(asset, notification, recipient_keys, data = {}, **kwargs)
# RUBY 2.7 this can go away (**{} will work at the caller)
raise ArgumentError, "Only send one hash" if !data&.empty? && !kwargs.empty?
data = kwargs if data&.empty? && !kwargs.empty?
DelayedNotification.new(
asset: asset,
notification: notification,

View File

@ -352,29 +352,16 @@ class DeveloperKey < ActiveRecord::Base
if affected_account.blank? || affected_account.site_admin?
# Cleanup tools across all shards
send_later_enqueue_args(
:manage_external_tools_multi_shard,
enqueue_args,
enqueue_args,
method,
affected_account
)
delay(**enqueue_args).
manage_external_tools_multi_shard(enqueue_args, method, affected_account)
else
send_later_enqueue_args(
method,
enqueue_args,
affected_account
)
delay(**enqueue_args).__send__(method, affected_account)
end
end
def manage_external_tools_multi_shard(enqueue_args, method, affected_account)
Shard.with_each_shard do
send_later_enqueue_args(
method,
enqueue_args,
affected_account
)
delay(**enqueue_args).__send__(method, affected_account)
end
end
@ -485,7 +472,7 @@ class DeveloperKey < ActiveRecord::Base
def invalidate_access_tokens_if_scopes_removed!
return unless saved_change_to_scopes?
return if (scopes_before_last_save - scopes).blank?
send_later_if_production(:invalidate_access_tokens!)
delay_if_production.invalidate_access_tokens!
end
def invalidate_access_tokens!

View File

@ -360,7 +360,7 @@ class DiscussionEntry < ActiveRecord::Base
end
def context_module_action_later
self.send_later_if_production(:context_module_action)
delay_if_production.context_module_action
end
protected :context_module_action_later

View File

@ -222,8 +222,8 @@ class DiscussionTopic < ActiveRecord::Base
def schedule_delayed_transitions
return if self.saved_by == :migration
self.send_at(self.delayed_post_at, :update_based_on_date) if @should_schedule_delayed_post
self.send_at(self.lock_at, :update_based_on_date) if @should_schedule_lock_at
delay(run_at: delayed_post_at).update_based_on_date if @should_schedule_delayed_post
delay(run_at: lock_at).update_based_on_date if @should_schedule_lock_at
# need to clear these in case we do a save whilst saving (e.g.
# Announcement#respect_context_lock_rules), so as to avoid the dreaded
# double delayed job ಠ_ಠ
@ -242,7 +242,7 @@ class DiscussionTopic < ActiveRecord::Base
def update_subtopics
if !self.deleted? && (self.has_group_category? || !!self.group_category_id_before_last_save)
send_later_if_production_enqueue_args(:refresh_subtopics, :singleton => "refresh_subtopics_#{self.global_id}")
delay_if_production(singleton: "refresh_subtopics_#{self.global_id}").refresh_subtopics
end
end
@ -954,7 +954,7 @@ class DiscussionTopic < ActiveRecord::Base
# either changed sections or made section specificness
return unless self.is_section_specific? ? @sections_changed : self.is_section_specific_before_last_save
send_later_if_production(:clear_stream_items_for_sections)
delay_if_production.clear_stream_items_for_sections
end
def clear_stream_items_for_sections
@ -972,7 +972,7 @@ class DiscussionTopic < ActiveRecord::Base
def clear_non_applicable_stream_items_for_delayed_posts
if self.is_announcement && self.delayed_post_at? && @delayed_post_at_changed && self.delayed_post_at > Time.now
send_later_if_production(:clear_stream_items_for_delayed_posts)
delay_if_production.clear_stream_items_for_delayed_posts
end
end
@ -987,7 +987,7 @@ class DiscussionTopic < ActiveRecord::Base
def clear_non_applicable_stream_items_for_locked_modules
return unless self.locked_by_module?
send_later_if_production(:clear_stream_items_for_locked_modules)
delay_if_production.clear_stream_items_for_locked_modules
end
def clear_stream_items_for_locked_modules
@ -1003,13 +1003,11 @@ class DiscussionTopic < ActiveRecord::Base
def clear_stream_item_cache_for(user_ids)
if stream_item && user_ids.any?
StreamItemCache.send_later_if_production_enqueue_args(
:invalidate_all_recent_stream_items,
{ :priority => Delayed::LOW_PRIORITY },
user_ids,
stream_item.context_type,
stream_item.context_id
)
StreamItemCache.delay_if_production(priority: Delayed::LOW_PRIORITY).
invalidate_all_recent_stream_items(
user_ids,
stream_item.context_type,
stream_item.context_id)
end
end

View File

@ -141,7 +141,7 @@ class DiscussionTopic::MaterializedView < ActiveRecord::Base
if !self.class.wait_for_replication(start: xlog_location, timeout: timeout)
# failed to replicate - requeue later
run_at = Setting.get("discussion_materialized_view_replication_failure_retry", "300").to_i.seconds.from_now
self.delay(singleton: "materialized_discussion:#{ Shard.birth.activate { self.discussion_topic_id } }", run_at: run_at).
delay(singleton: "materialized_discussion:#{ Shard.birth.activate { self.discussion_topic_id } }", run_at: run_at).
update_materialized_view(synchronous: true, xlog_location: xlog_location, use_master: use_master)
raise "timed out waiting for replication"
end
@ -156,7 +156,7 @@ class DiscussionTopic::MaterializedView < ActiveRecord::Base
end
handle_asynchronously :update_materialized_view,
:singleton => proc { |o| "materialized_discussion:#{ Shard.birth.activate { o.discussion_topic_id } }" }
singleton: proc { |o| "materialized_discussion:#{ Shard.birth.activate { o.discussion_topic_id } }" }
def build_materialized_view(use_master: false)
entry_lookup = {}

View File

@ -221,7 +221,7 @@ class Enrollment < ActiveRecord::Base
if (self.just_created || self.saved_change_to_workflow_state? || @re_send_confirmation) && self.workflow_state == 'invited' && self.inactive? && self.available_at &&
!self.self_enrolled && !(self.observer? && self.user.registered?)
# this won't work if they invite them and then change the course/term/section dates _afterwards_ so hopefully people don't do that
self.send_later_enqueue_args(:re_send_confirmation_if_invited!, {:run_at => self.available_at, :singleton => "send_enrollment_invitations_#{global_id}"})
delay(run_at: self.available_at, singleton: "send_enrollment_invitations_#{global_id}").re_send_confirmation_if_invited!
end
end
@ -659,7 +659,7 @@ class Enrollment < ActiveRecord::Base
def add_to_favorites_later
if self.saved_change_to_workflow_state? && self.workflow_state == 'active'
self.class.connection.after_transaction_commit do
self.send_later_if_production_enqueue_args(:add_to_favorites, { :priority => Delayed::LOW_PRIORITY })
delay_if_production(priority: Delayed::LOW_PRIORITY).add_to_favorites
end
end
end
@ -1004,16 +1004,9 @@ class Enrollment < ActiveRecord::Base
# Guard against getting more than one user_id
raise ArgumentError, "Cannot call with more than one user" if Array(user_id).size > 1
send_later_if_production_enqueue_args(
:recompute_final_score,
{
singleton: "Enrollment.recompute_final_score:#{user_id}:#{course_id}:#{opts[:grading_period_id]}",
max_attempts: 10
},
user_id,
course_id,
**opts
)
delay_if_production(singleton: "Enrollment.recompute_final_score:#{user_id}:#{course_id}:#{opts[:grading_period_id]}",
max_attempts: 10).
recompute_final_score(user_id, course_id, **opts)
end
def self.recompute_due_dates_and_scores(user_id)
@ -1482,14 +1475,9 @@ class Enrollment < ActiveRecord::Base
# running in an n_strand to handle situations where a SIS import could
# update a ton of enrollments from "deleted" to "completed".
send_later_if_production_enqueue_args(
:restore_submissions_and_scores_now,
{
n_strand: "Enrollment#restore_submissions_and_scores#{root_account.global_id}",
max_attempts: 1,
priority: Delayed::LOW_PRIORITY
}
)
delay_if_production(n_strand: "Enrollment#restore_submissions_and_scores#{root_account.global_id}",
priority: Delayed::LOW_PRIORITY).
restore_submissions_and_scores_now
end
def restore_submissions_and_scores_now

View File

@ -194,13 +194,10 @@ class Enrollment::BatchStateUpdater
return if batch.empty?
root_account ||= Enrollment.where(id: batch).take&.root_account
return unless root_account
EnrollmentState.send_later_if_production_enqueue_args(
:force_recalculation,
{run_at: Setting.get("wait_time_to_calculate_enrollment_state", 1).to_f.minute.from_now,
EnrollmentState.delay_if_production(run_at: Setting.get("wait_time_to_calculate_enrollment_state", 1).to_f.minute.from_now,
n_strand: ["restore_states_enrollment_states", root_account.global_id],
max_attempts: 2},
batch
)
max_attempts: 2).
force_recalculation(batch)
students = Enrollment.of_student_type.where(id: batch).preload({user: :linked_observers}, :root_account).to_a
user_ids = Enrollment.where(id: batch).distinct.pluck(:user_id)
courses = Course.where(id: Enrollment.where(id: batch).select(:course_id).distinct).to_a

View File

@ -269,7 +269,7 @@ class EnrollmentState < ActiveRecord::Base
EnrollmentState.where(:enrollment_id => enrollment_ids).
update_all(["lock_version = COALESCE(lock_version, 0) + 1, state_is_current = ?", false])
args = strand ? {n_strand: strand} : {}
EnrollmentState.send_later_if_production_enqueue_args(:process_states_for_ids, args, enrollment_ids)
EnrollmentState.delay_if_production(**args).process_states_for_ids(enrollment_ids)
end
end
@ -290,10 +290,9 @@ class EnrollmentState < ActiveRecord::Base
scope = scope.where(:type => enrollment_type) if enrollment_type
scope.find_ids_in_ranges(:batch_size => ENROLLMENT_BATCH_SIZE) do |min_id, max_id|
if invalidate_states(scope.where(:id => min_id..max_id)) > 0
EnrollmentState.send_later_if_production_enqueue_args(:process_term_states_in_ranges, {
:priority => Delayed::LOW_PRIORITY,
:n_strand => ['invalidate_states_for_term', term.global_root_account_id]
}, min_id, max_id, term, enrollment_type)
EnrollmentState.delay_if_production(priority: Delayed::LOW_PRIORITY,
n_strand: ['invalidate_states_for_term', term.global_root_account_id]).
process_term_states_in_ranges(min_id, max_id, term, enrollment_type)
end
end
end
@ -318,10 +317,9 @@ class EnrollmentState < ActiveRecord::Base
enrollments_for_account_ids(account_ids).find_ids_in_ranges(:batch_size => ENROLLMENT_BATCH_SIZE) do |min_id, max_id|
scope = enrollments_for_account_ids(account_ids).where(:id => min_id..max_id)
if invalidate_access(scope, states_to_update) > 0
EnrollmentState.send_later_if_production_enqueue_args(:process_account_states_in_ranges, {
priority: Delayed::LOW_PRIORITY,
n_strand: ['invalidate_access_for_accounts', Shard.current.id]
}, min_id, max_id, account_ids)
EnrollmentState.delay_if_production(priority: Delayed::LOW_PRIORITY,
n_strand: ['invalidate_access_for_accounts', Shard.current.id]).
process_account_states_in_ranges(min_id, max_id, account_ids)
end
end
end

View File

@ -77,10 +77,11 @@ class EnrollmentTerm < ActiveRecord::Base
def update_courses_and_states_later(enrollment_type=nil)
return if new_record?
self.send_later_if_production_enqueue_args(:touch_all_courses, { singleton: "EnrollmentTerm#touch_all_courses_#{self.global_id}" }) unless @touched_courses
delay_if_production(singleton: "EnrollmentTerm#touch_all_courses_#{self.global_id}").touch_all_courses unless @touched_courses
@touched_courses = true
EnrollmentState.send_later_if_production_enqueue_args(:invalidate_states_for_term, { singleton: "EnrollmentState.invalidate_states_for_term_#{self.global_id}_#{enrollment_type}" }, self, enrollment_type)
EnrollmentState.delay_if_production(singleton: "EnrollmentState.invalidate_states_for_term_#{self.global_id}_#{enrollment_type}").
invalidate_states_for_term(self, enrollment_type)
end
def self.i18n_default_term_name
@ -89,17 +90,9 @@ class EnrollmentTerm < ActiveRecord::Base
def recompute_course_scores_later(update_all_grading_period_scores: true, strand_identifier: "EnrollmentTerm:#{global_id}")
courses_to_recompute.find_ids_in_ranges(batch_size: 1000) do |min_id, max_id|
send_later_if_production_enqueue_args(
:recompute_scores_for_batch,
{
n_strand: "EnrollmentTerm#recompute_scores_for_batch:#{strand_identifier}",
max_attempts: 1,
priority: Delayed::LOW_PRIORITY
},
min_id,
max_id,
update_all_grading_period_scores
)
delay_if_production(n_strand: "EnrollmentTerm#recompute_scores_for_batch:#{strand_identifier}",
priority: Delayed::LOW_PRIORITY).
recompute_scores_for_batch(min_id, max_id, update_all_grading_period_scores)
end
end

View File

@ -124,7 +124,7 @@ class EpubExport < ActiveRecord::Base
content_export.export
true
end
handle_asynchronously :export, priority: Delayed::LOW_PRIORITY, max_attempts: 1, on_permanent_failure: :mark_as_failed
handle_asynchronously :export, priority: Delayed::LOW_PRIORITY, on_permanent_failure: :mark_as_failed
def mark_exported
if content_export.failed?
@ -135,14 +135,14 @@ class EpubExport < ActiveRecord::Base
generate
end
end
handle_asynchronously :mark_exported, priority: Delayed::LOW_PRIORITY, max_attempts: 1
handle_asynchronously :mark_exported, priority: Delayed::LOW_PRIORITY
def generate
job_progress.update_attribute(:completion, PERCENTAGE_COMPLETE[:generating])
update_attribute(:workflow_state, 'generating')
convert_to_epub
end
handle_asynchronously :generate, priority: Delayed::LOW_PRIORITY, max_attempts: 1, on_permanent_failure: :mark_as_failed
handle_asynchronously :generate, priority: Delayed::LOW_PRIORITY, on_permanent_failure: :mark_as_failed
def mark_as_generated
job_progress.complete! if job_progress.running?
@ -184,7 +184,7 @@ class EpubExport < ActiveRecord::Base
mark_as_generated
file_paths.each {|file_path| cleanup_file_path!(file_path) }
end
handle_asynchronously :convert_to_epub, priority: Delayed::LOW_PRIORITY, max_attempts: 1
handle_asynchronously :convert_to_epub, priority: Delayed::LOW_PRIORITY
def create_attachment_from_path!(file_path)
begin

View File

@ -122,15 +122,9 @@ class GradingPeriodGroup < ActiveRecord::Base
def cleanup_associations_and_recompute_scores_later(updating_user: nil)
root_account_id = course_id ? course.root_account.global_id : root_account.global_id
send_later_if_production_enqueue_args(
:cleanup_associations_and_recompute_scores,
{
strand: "GradingPeriodGroup#cleanup_associations_and_recompute_scores:Account#{root_account_id}",
max_attempts: 1,
priority: Delayed::LOW_PRIORITY
},
updating_user: updating_user
)
delay_if_production(strand: "GradingPeriodGroup#cleanup_associations_and_recompute_scores:Account#{root_account_id}",
priority: Delayed::LOW_PRIORITY).
cleanup_associations_and_recompute_scores(updating_user: updating_user)
end
def cleanup_associations_and_recompute_scores(updating_user: nil)

View File

@ -388,13 +388,13 @@ class Group < ActiveRecord::Base
notification = BroadcastPolicy.notification_finder.by_name(notification_name)
users.each_with_index do |user, index|
BroadcastPolicy.notifier.send_later_enqueue_args(:send_notification,
{ :priority => Delayed::LOW_PRIORITY },
new_group_memberships[index],
notification_name.parameterize.underscore.to_sym,
notification,
[user],
broadcast_data)
BroadcastPolicy.notifier.delay(priority: Delayed::LOW_PRIORITY).
send_notification(
new_group_memberships[index],
notification_name.parameterize.underscore.to_sym,
notification,
[user],
broadcast_data)
end
end
new_group_memberships

View File

@ -492,7 +492,7 @@ class GroupCategory < ActiveRecord::Base
def assign_unassigned_members_in_background(by_section=false, updating_user: nil)
start_progress
send_later_enqueue_args(:assign_unassigned_members, {:priority => Delayed::LOW_PRIORITY}, by_section, updating_user: updating_user)
delay(priority: Delayed::LOW_PRIORITY).assign_unassigned_members(by_section, updating_user: updating_user)
end
def clone_groups_and_memberships(new_group_category)

View File

@ -94,7 +94,8 @@ class KalturaMediaFileHandler
end
def refresh_later(bulk_upload_id, attachments, root_account_id)
MediaObject.send_later_enqueue_args(:refresh_media_files, {:run_at => 1.minute.from_now, :priority => Delayed::LOW_PRIORITY}, bulk_upload_id, attachments.map(&:id), root_account_id)
MediaObject.delay(run_at: 1.minute.from_now, priority: Delayed::LOW_PRIORITY).
refresh_media_files(bulk_upload_id, attachments.map(&:id), root_account_id)
end
def send_sis_data_to_kaltura?

View File

@ -391,7 +391,7 @@ class LearningOutcome < ActiveRecord::Base
!self.saved_change_to_short_description? &&
!self.saved_change_to_description?
self.send_later_if_production(:update_associated_rubrics)
delay_if_production.update_associated_rubrics
end
def update_associated_rubrics

View File

@ -93,7 +93,7 @@ module Lti
callbacks = get_logout_callbacks(pseudonym)
return unless callbacks.any?
clear_logout_callbacks(pseudonym)
Delayed::Job.enqueue(Lti::LogoutService::Runner.new(callbacks), max_attempts: 1)
Delayed::Job.enqueue(Lti::LogoutService::Runner.new(callbacks))
end
end
end

View File

@ -68,10 +68,9 @@ class MasterCourses::MasterMigration < ActiveRecord::Base
master_template.lock!
if master_template.active_migration_running?
if opts[:retry_later]
self.send_later_enqueue_args(:start_new_migration!,
{:singleton => "retry_start_master_migration_#{master_template.global_id}",
:run_at => 10.minutes.from_now, :max_attempts => 1},
master_template, user, opts)
delay(singleton: "retry_start_master_migration_#{master_template.global_id}",
run_at: 10.minutes.from_now).
start_new_migration!(master_template, user, opts)
else
raise MigrationRunningError.new("cannot start new migration while another one is running")
end
@ -123,7 +122,7 @@ class MasterCourses::MasterMigration < ActiveRecord::Base
}
self.update_attribute(:workflow_state, 'queued')
self.send_later_enqueue_args(:perform_exports, queue_opts)
delay(**queue_opts).perform_exports
end
def fail_export_with_error!(exception_or_info)

View File

@ -103,10 +103,8 @@ class MasterCourses::MasterTemplate < ActiveRecord::Base
end
def destroy_subscriptions_later
self.send_later_if_production_enqueue_args(:destroy_subscriptions, {
:n_strand => ["master_courses_destroy_subscriptions", self.course.global_root_account_id],
:priority => Delayed::LOW_PRIORITY
})
delay_if_production(n_strand: ["master_courses_destroy_subscriptions", self.course.global_root_account_id],
priority: Delayed::LOW_PRIORITY).destroy_subscriptions
end
def destroy_subscriptions

View File

@ -43,7 +43,7 @@ class MediaObject < ActiveRecord::Base
end
def update_title_on_kaltura_later
send_later(:update_title_on_kaltura) if @push_user_title
delay.update_title_on_kaltura if @push_user_title
@push_user_title = nil
end
@ -72,7 +72,8 @@ class MediaObject < ActiveRecord::Base
client.startSession(CanvasKaltura::SessionType::ADMIN)
res = client.bulkUploadCsv(csv)
if !res[:ready]
MediaObject.send_later_enqueue_args(:refresh_media_files, {:run_at => 1.minute.from_now, :priority => Delayed::LOW_PRIORITY}, res[:id], [], root_account_id)
MediaObject.delay(run_at: 1.minute.from_now, priority: Delayed::LOW_PRIORITY).
refresh_media_files(res[:id], [], root_account_id)
else
build_media_objects(res, root_account_id)
end
@ -133,7 +134,8 @@ class MediaObject < ActiveRecord::Base
if !res[:ready]
if attempt < Setting.get('media_object_bulk_refresh_max_attempts', '5').to_i
wait_period = Setting.get('media_object_bulk_refresh_wait_period', '30').to_i
MediaObject.send_later_enqueue_args(:refresh_media_files, {:run_at => wait_period.minutes.from_now, :priority => Delayed::LOW_PRIORITY}, bulk_upload_id, attachment_ids, root_account_id, attempt + 1)
MediaObject.delay(run_at: wait_period.minutes.from_now, priority: Delayed::LOW_PRIORITY).
refresh_media_files(bulk_upload_id, attachment_ids, root_account_id, attempt + 1)
else
# if it fails, then the attachment should no longer consider itself kalturable
Attachment.where("id IN (?) OR root_attachment_id IN (?)", attachment_ids, attachment_ids).update_all(:media_entry_id => nil) unless attachment_ids.empty?
@ -153,7 +155,7 @@ class MediaObject < ActiveRecord::Base
def self.ensure_media_object(media_id, **create_opts)
if !by_media_id(media_id).any?
self.send_later_enqueue_args(:create_if_id_exists, { :priority => Delayed::LOW_PRIORITY }, media_id, **create_opts)
delay(priority: Delayed::LOW_PRIORITY).create_if_id_exists(media_id, **create_opts)
end
end
@ -176,7 +178,7 @@ class MediaObject < ActiveRecord::Base
end
def retrieve_details_later
send_later(:retrieve_details_ensure_codecs)
delay.retrieve_details_ensure_codecs
end
def media_sources
@ -187,7 +189,7 @@ class MediaObject < ActiveRecord::Base
retrieve_details
if !transcoded_details && self.created_at > 6.hours.ago
if attempt < 10
send_at((5 * attempt).minutes.from_now, :retrieve_details_ensure_codecs, attempt + 1)
delay(run_at: (5 * attempt).minutes.from_now).retrieve_details_ensure_codecs(attempt + 1)
else
Canvas::Errors.capture(:media_object_failure, {
message: "Kaltura flavor retrieval failed",
@ -282,7 +284,7 @@ class MediaObject < ActiveRecord::Base
end
def viewed!
send_later(:updated_viewed_at_and_retrieve_details, Time.now) if !self.data[:last_viewed_at] || self.data[:last_viewed_at] > 1.hour.ago
delay.updated_viewed_at_and_retrieve_details(Time.now) if !self.data[:last_viewed_at] || self.data[:last_viewed_at] > 1.hour.ago
true
end

View File

@ -21,7 +21,8 @@ class Notifier
def send_notification(record, dispatch, messages, to_list, data=nil)
recipient_keys = (to_list || []).compact.map{|o| o.is_a?(String) ? o : o.asset_string}
messages = DelayedNotification.delay_if_production(priority: 30).
process(record, messages, recipient_keys, data)
process(record, messages, recipient_keys, data, **{})
# RUBY 3.0 - **{} can go away, because data won't implicitly convert to kwargs
messages ||= DelayedNotification.new(
:asset => record,

View File

@ -89,9 +89,7 @@ class OutcomeImport < ApplicationRecord
end
def schedule
send_later_enqueue_args(:run, {
strand: "OutcomeImport::run::#{root_account.global_id}"
})
delay(strand: "OutcomeImport::run::#{root_account.global_id}").run
end
def locale

View File

@ -159,10 +159,7 @@ class OutcomeProficiency < ApplicationRecord
return unless @update_rubrics
@update_rubrics = false
send_later_if_production_enqueue_args(
:update_associated_rubrics,
{strand: "update_rubrics_from_mastery_scales_#{global_id}"}
)
delay_if_production(strand: "update_rubrics_from_mastery_scales_#{global_id}").update_associated_rubrics
end
def update_associated_rubrics

View File

@ -69,7 +69,7 @@ class Progress < ActiveRecord::Base
queued? || running?
end
# Tie this Progress model to a delayed job. Rather than `obj.send_later(:long_method)`, use:
# Tie this Progress model to a delayed job. Rather than `obj.delay.long_method`, use:
# `progress.process_job(obj, :long_method)`. This will transition from queued
# => running when the job starts, from running => completed when the job
# finishes, and from running => failed if the job fails.

View File

@ -419,7 +419,7 @@ class Quizzes::Quiz < ActiveRecord::Base
attr_accessor :saved_by
def update_assignment
send_later_if_production(:set_unpublished_question_count) if self.id
delay_if_production.set_unpublished_question_count if self.id
if !self.assignment_id && @old_assignment_id
self.context_module_tags.preload(:context_module => :content_tags).each { |tag| tag.confirm_valid_module_requirements }
end
@ -429,12 +429,12 @@ class Quizzes::Quiz < ActiveRecord::Base
submission_types: 'online_quiz'
).update_all(:workflow_state => 'deleted', :updated_at => Time.now.utc)
self.course.recompute_student_scores
send_later_if_production_enqueue_args(:destroy_related_submissions, priority: Delayed::HIGH_PRIORITY)
delay_if_production(priority: Delayed::HIGH_PRIORITY).destroy_related_submissions
::ContentTag.delete_for(::Assignment.find(@old_assignment_id)) if @old_assignment_id
::ContentTag.delete_for(::Assignment.find(self.last_assignment_id)) if self.last_assignment_id
end
send_later_if_production(:update_existing_submissions) if @update_existing_submissions
delay_if_production.update_existing_submissions if @update_existing_submissions
if (self.assignment || @assignment_to_set) && (@assignment_id_set || self.for_assignment?) && @saved_by != :assignment
unless !self.graded? && @old_assignment_id
Quizzes::Quiz.where("assignment_id=? AND id<>?", self.assignment_id, self).update_all(:workflow_state => 'deleted', :assignment_id => nil, :updated_at => Time.now.utc) if self.assignment_id
@ -1342,11 +1342,8 @@ class Quizzes::Quiz < ActiveRecord::Base
version_number: self.version_number
}
if current_quiz_question_regrades.present?
Quizzes::QuizRegrader::Regrader.send_later_enqueue_args(
:regrade!,
{ strand: "quiz:#{self.global_id}:regrading"},
options
)
Quizzes::QuizRegrader::Regrader.delay(strand: "quiz:#{self.global_id}:regrading").
regrade!(options)
end
end
true
@ -1459,10 +1456,7 @@ class Quizzes::Quiz < ActiveRecord::Base
# Assignment#run_if_overrides_changed_later! uses its keyword arguments, but
# this method does not
def run_if_overrides_changed_later!(**)
self.send_later_if_production_enqueue_args(
:run_if_overrides_changed!,
{:singleton => "quiz_overrides_changed_#{self.global_id}"}
)
delay_if_production(singleton: "quiz_overrides_changed_#{self.global_id}").run_if_overrides_changed!
end
# This alias exists to handle cases where a method that expects an

View File

@ -115,7 +115,7 @@ module Quizzes
tagged_bank_ids = Set.new(alignments.map(&:content_id))
question_ids = questions.select { |q| tagged_bank_ids.include?(q.assessment_question_bank_id) }
send_later_if_production(:update_outcomes, question_ids, @submission.id, attempt) unless question_ids.empty?
delay_if_production.update_outcomes(question_ids, @submission.id, attempt) unless question_ids.empty?
end
def update_outcomes(question_ids, submission_id, attempt)

View File

@ -39,9 +39,8 @@ class RoleOverride < ActiveRecord::Base
end
def self.clear_caches(account, role)
account.send_later_if_production_enqueue_args(:clear_downstream_caches,
{:singleton => "clear_downstream_role_caches:#{account.global_id}"},
:role_overrides)
account.delay_if_production(singleton: "clear_downstream_role_caches:#{account.global_id}").
clear_downstream_caches(:role_overrides)
role.touch
end

View File

@ -49,7 +49,7 @@ class RubricAssessment < ActiveRecord::Base
peer_review = self.assessment_type == "peer_review"
provisional_grade = self.artifact_type == "ModeratedGrading::ProvisionalGrade"
update_outcomes = outcome_ids.present? && !peer_review && !provisional_grade
send_later_if_production(:update_outcomes_for_assessment, outcome_ids) if update_outcomes
delay_if_production.update_outcomes_for_assessment(outcome_ids) if update_outcomes
end
def update_outcomes_for_assessment(outcome_ids=[])

View File

@ -25,7 +25,7 @@ class ScheduledSmartAlert < ApplicationRecord
offset = account.settings['smart_alerts_threshold'] || 36
ScheduledSmartAlert.runnable(offset, root_account_id).order(:due_at).find_each do |record|
AssignmentUtil.send_later(:process_due_date_reminder, record.context_type, record.context_id) if account.feature_enabled?(:smart_alerts)
AssignmentUtil.delay.process_due_date_reminder(record.context_type, record.context_id) if account.feature_enabled?(:smart_alerts)
record.destroy
end
end

View File

@ -173,9 +173,8 @@ class SisBatch < ActiveRecord::Base
job_args = {
singleton: "account:update_account_associations:#{Shard.birth.activate { account.id }}",
priority: Delayed::LOW_PRIORITY,
max_attempts: 1,
}
account.send_later_enqueue_args(:update_account_associations, job_args)
account.delay(**job_args).update_account_associations
end
end
@ -384,7 +383,7 @@ class SisBatch < ActiveRecord::Base
@has_errors = self.sis_batch_errors.exists?
import_finished = !(@has_errors && self.sis_batch_errors.failed.exists?) if import_finished
finalize_workflow_state(import_finished)
self.send_later_if_production_enqueue_args(:write_errors_to_file, {max_attempts: 5}) if @has_errors
delay_if_production(max_attempts: 5).write_errors_to_file if @has_errors
populate_old_warnings_and_errors
statistics
self.progress = 100 if import_finished
@ -832,7 +831,8 @@ class SisBatch < ActiveRecord::Base
def finalize_enrollments(ids)
ids.each_slice(1000) do |slice|
Enrollment::BatchStateUpdater.send_later_enqueue_args(:run_call_backs_for, { n_strand: ["restore_states_batch_updater", account.global_id] }, slice, self.account)
Enrollment::BatchStateUpdater.delay(n_strand: ["restore_states_batch_updater", account.global_id]).
run_call_backs_for(slice, self.account)
end
# we know enrollments are not deleted, but we don't know what the previous
# state was, we will assume deleted and restore the scores and submissions

View File

@ -375,8 +375,8 @@ class SplitUsers
model.where(id: other_ids).update_all(user_id: source_user.id)
model.where(id: ids).update_all(user_id: restored_user.id)
end
Enrollment.send_later(:recompute_due_dates_and_scores, source_user.id)
Enrollment.send_later(:recompute_due_dates_and_scores, restored_user.id)
Enrollment.delay.recompute_due_dates_and_scores(source_user.id)
Enrollment.delay.recompute_due_dates_and_scores(restored_user.id)
end
end
end

View File

@ -458,8 +458,8 @@ class StreamItem < ActiveRecord::Base
self.stream_item_instances.shard(self).activate do |scope|
user_ids = scope.pluck(:user_id)
if !self.invalidate_immediately && user_ids.count > 100
StreamItemCache.send_later_if_production_enqueue_args(:invalidate_all_recent_stream_items,
{ :priority => Delayed::LOW_PRIORITY }, user_ids, self.context_type, self.context_id)
StreamItemCache.delay_if_production(priority: Delayed::LOW_PRIORITY).
invalidate_all_recent_stream_items(user_ids, self.context_type, self.context_id)
else
StreamItemCache.invalidate_all_recent_stream_items(user_ids, self.context_type, self.context_id)
end

View File

@ -589,7 +589,7 @@ class Submission < ActiveRecord::Base
)
end
end
self.assignment&.send_later_if_production(:multiple_module_actions, [self.user_id], :scored, self.score)
self.assignment&.delay_if_production&.multiple_module_actions([self.user_id], :scored, self.score)
end
true
end
@ -691,7 +691,7 @@ class Submission < ActiveRecord::Base
self.turnitin_data[asset_string] = data
end
send_at((2 ** attempt).minutes.from_now, :check_turnitin_status, attempt + 1) if needs_retry
delay(run_at: (2 ** attempt).minutes.from_now).check_turnitin_status(attempt + 1) if needs_retry
self.turnitin_data_changed!
self.save
end
@ -699,7 +699,7 @@ class Submission < ActiveRecord::Base
def turnitin_report_url(asset_string, user)
if self.turnitin_data && self.turnitin_data[asset_string] && self.turnitin_data[asset_string][:similarity_score]
turnitin = Turnitin::Client.new(*self.context.turnitin_settings)
self.send_later(:check_turnitin_status)
delay.check_turnitin_status
if self.grants_right?(user, :grade)
turnitin.submissionReportUrl(self, asset_string)
elsif self.grants_right?(user, :view_turnitin_report)
@ -710,7 +710,7 @@ class Submission < ActiveRecord::Base
end
end
TURNITIN_JOB_OPTS = { :n_strand => 'turnitin', :priority => Delayed::LOW_PRIORITY, :max_attempts => 2 }
TURNITIN_JOB_OPTS = { n_strand: 'turnitin', priority: Delayed::LOW_PRIORITY, max_attempts: 2 }
TURNITIN_RETRY = 5
def submit_to_turnitin(attempt=0)
@ -725,7 +725,7 @@ class Submission < ActiveRecord::Base
delete_turnitin_errors
else
if attempt < TURNITIN_RETRY
send_later_enqueue_args(:submit_to_turnitin, { :run_at => 5.minutes.from_now }.merge(TURNITIN_JOB_OPTS), attempt + 1)
delay(run_at: 5.minutes.from_now, **TURNITIN_JOB_OPTS).submit_to_turnitin(attempt + 1)
else
assignment_error = assignment.turnitin_settings[:error]
self.turnitin_data[:status] = 'error'
@ -747,13 +747,13 @@ class Submission < ActiveRecord::Base
end
end
send_later_enqueue_args(:check_turnitin_status, { :run_at => 5.minutes.from_now }.merge(TURNITIN_JOB_OPTS))
delay(run_at: 5.minutes.from_now, **TURNITIN_JOB_OPTS).check_turnitin_status
self.save
# Schedule retry if there were failures
submit_status = submission_response.present? && submission_response.values.all?{ |v| v[:object_id] }
unless submit_status
send_later_enqueue_args(:submit_to_turnitin, { :run_at => 5.minutes.from_now }.merge(TURNITIN_JOB_OPTS), attempt + 1) if attempt < TURNITIN_RETRY
delay(run_at: 5.minutes.from_now, **TURNITIN_JOB_OPTS).submit_to_turnitin(attempt + 1) if attempt < TURNITIN_RETRY
return false
end
@ -1002,7 +1002,7 @@ class Submission < ActiveRecord::Base
retry_mins = 240;
end
# if attempt <= 0, then that means no retries should be attempted
send_at(retry_mins.minutes.from_now, :check_vericite_status, attempt + 1) if attempt > 0 && needs_retry
delay(run_at: retry_mins.minutes.from_now).check_vericite_status(attempt + 1) if attempt > 0 && needs_retry
# if all we did was recheck scores, do not version this save (i.e. increase the attempt number)
if data_changed
self.vericite_data_changed!
@ -1029,7 +1029,7 @@ class Submission < ActiveRecord::Base
end
end
VERICITE_JOB_OPTS = { :n_strand => 'vericite', :priority => Delayed::LOW_PRIORITY, :max_attempts => 2 }
VERICITE_JOB_OPTS = { n_strand: 'vericite', priority: Delayed::LOW_PRIORITY, max_attempts: 2 }
VERICITE_RETRY = 5
def submit_to_vericite(attempt=0)
@ -1077,7 +1077,7 @@ class Submission < ActiveRecord::Base
end
# only save if there were newly submitted attachments
if update
send_later_enqueue_args(:check_vericite_status, { :run_at => 5.minutes.from_now }.merge(VERICITE_JOB_OPTS))
delay(run_at: 5.minutes.from_now, **VERICITE_JOB_OPTS).check_vericite_status
if !self.vericite_data_hash.empty?
# only set vericite provider flag if the hash isn't empty
self.vericite_data_hash[:provider] = :vericite
@ -1087,7 +1087,7 @@ class Submission < ActiveRecord::Base
# Schedule retry if there were failures
submit_status = submission_response.present? && submission_response.values.all?{ |v| v[:object_id] }
unless submit_status
send_later_enqueue_args(:submit_to_vericite, { :run_at => 5.minutes.from_now }.merge(VERICITE_JOB_OPTS), attempt + 1) if attempt < VERICITE_RETRY
delay(run_at: 5.minutes.from_now, **VERICITE_JOB_OPTS).submit_to_vericite(attempt + 1) if attempt < VERICITE_RETRY
return false
end
end
@ -1230,7 +1230,7 @@ class Submission < ActiveRecord::Base
Rails.logger.info("#submit_to_plagiarism_later submission ID: #{self.id}, type: #{plagiarism_service_to_use}, canSubmit? #{canSubmit}, submitPlag? #{submitPlag}")
if canSubmit && submitPlag
delay = Setting.get(delayName, 60.to_s).to_i
send_later_enqueue_args(delayFunction, { :run_at => delay.seconds.from_now }.merge(delayOpts))
delay(run_at: delay.seconds.from_now, **delayOpts).__send__(delayFunction)
end
end
# End Plagiarism functions
@ -1255,10 +1255,8 @@ class Submission < ActiveRecord::Base
def update_assignment
unless @assignment_changed_not_sub
self.send_later_enqueue_args(:context_module_action, {
singleton: "submission_context_module_action_#{self.global_id}",
on_conflict: :loose
})
delay(singleton: "submission_context_module_action_#{self.global_id}",
on_conflict: :loose).context_module_action
end
true
end
@ -1346,11 +1344,10 @@ class Submission < ActiveRecord::Base
opts[:preferred_plugins].unshift Canvadocs::RENDER_O365
end
a.send_later_enqueue_args :submit_to_canvadocs, {
:n_strand => 'canvadocs',
:max_attempts => 1,
:priority => Delayed::LOW_PRIORITY
}, 1, opts
a.delay(
n_strand: 'canvadocs',
priority: Delayed::LOW_PRIORITY).
submit_to_canvadocs(1, opts)
end
end
end
@ -1423,7 +1420,7 @@ class Submission < ActiveRecord::Base
def update_admins_if_just_submitted
if @just_submitted
context.send_later_if_production(:resubmission_for, assignment)
context.delay_if_production.resubmission_for(assignment)
end
true
end
@ -1646,7 +1643,7 @@ class Submission < ActiveRecord::Base
def queue_websnap
if !self.attachment_id && @attempt_changed && self.url && self.submission_type == 'online_url'
self.send_later_enqueue_args(:get_web_snapshot, { :priority => Delayed::LOW_PRIORITY })
delay(priority: Delayed::LOW_PRIORITY).get_web_snapshot
end
end
@ -1920,11 +1917,8 @@ class Submission < ActiveRecord::Base
self.course.feature_enabled?(:conditional_release)
end
if ConditionalRelease::Rule.is_trigger_assignment?(self.assignment)
ConditionalRelease::OverrideHandler.send_later_if_production_enqueue_args(
:handle_grade_change,
{:priority => Delayed::LOW_PRIORITY, :strand => "conditional_release_grade_change:#{self.global_assignment_id}"},
self
)
ConditionalRelease::OverrideHandler.delay_if_production(priority: Delayed::LOW_PRIORITY, strand: "conditional_release_grade_change:#{self.global_assignment_id}").
handle_grade_change(self)
end
end
end

View File

@ -462,16 +462,11 @@ class User < ActiveRecord::Base
end
def update_root_account_ids_later
send_later_enqueue_args(
:update_root_account_ids,
{
max_attempts: MAX_ROOT_ACCOUNT_ID_SYNC_ATTEMPTS
}
)
delay(max_attempts: MAX_ROOT_ACCOUNT_ID_SYNC_ATTEMPTS).update_root_account_ids
end
def update_account_associations_later
self.send_later_if_production(:update_account_associations) unless self.class.skip_updating_account_associations?
delay_if_production.update_account_associations unless self.class.skip_updating_account_associations?
end
def update_account_associations_if_necessary

View File

@ -39,7 +39,7 @@ class WebZipExport < EpubExport
update_attribute(:workflow_state, 'generating')
convert_to_offline_web_zip
end
handle_asynchronously :generate, priority: Delayed::LOW_PRIORITY, max_attempts: 1
handle_asynchronously :generate, priority: Delayed::LOW_PRIORITY
# WebZip Exportable overrides
def content_cartridge
@ -61,7 +61,7 @@ class WebZipExport < EpubExport
mark_as_generated
cleanup_file_path!(file_path)
end
handle_asynchronously :convert_to_offline_web_zip, priority: Delayed::LOW_PRIORITY, max_attempts: 1
handle_asynchronously :convert_to_offline_web_zip, priority: Delayed::LOW_PRIORITY
def cache_key
"web_zip_export_user_progress_#{global_id}"

View File

@ -191,8 +191,9 @@ end
# syntactic sugar and compatibility shims
module CanvasDelayedMessageSending
def delay_if_production(**kwargs)
delay(**kwargs.merge(synchronous: !Rails.env.production?))
def delay_if_production(sender: nil, **kwargs)
sender ||= __calculate_sender_for_delay
delay(sender: sender, **kwargs.merge(synchronous: !Rails.env.production?))
end
def send_later(method, *args, **kwargs)
@ -243,4 +244,4 @@ module CanvasDelayedMessageSending
enqueue_args
end
end
Object.send(:include, CanvasDelayedMessageSending)
Object.send(:include, CanvasDelayedMessageSending)

View File

@ -54,11 +54,10 @@ class PeriodicJobs
next if Delayed::Job == Delayed::Backend::ActiveRecord::Job && Delayed::Job.where(strand: strand, shard_id: Shard.current.id, locked_by: nil).exists?
dj_params = {
strand: strand,
max_attempts: 1,
priority: 40
}
dj_params[:run_at] = compute_run_at(jitter: jitter, local_offset: local_offset)
klass.send_later_enqueue_args(method, dj_params, *args)
klass.delay(**dj_params).__send__(method, *args)
end
end
end
@ -68,7 +67,6 @@ def with_each_shard_by_database(klass, method, *args, jitter: nil, local_offset:
:with_each_shard_by_database_in_region,
{
singleton: "periodic:region: #{klass}.#{method}",
max_attempts: 1,
}, klass, method, *args, jitter: jitter, local_offset: local_offset)
end

View File

@ -139,13 +139,13 @@ Rails.application.config.after_initialize do
next if db.shards.empty?
regions << db.config[:region]
db.shards.first.activate do
klass.send_later_enqueue_args(method, enqueue_args, *args)
klass.delay(**enqueue_args).__send__(method, *args)
end
end
end
def self.send_in_region(region, klass, method, enqueue_args = {}, *args)
return klass.send_later_enqueue_args(method, enqueue_args, *args) if region.nil?
return klass.delay(**enqueue_args).__send__(method, *args) if region.nil?
shard = nil
all.find { |db| db.config[:region] == region && (shard = db.shards.first) }
@ -153,12 +153,12 @@ Rails.application.config.after_initialize do
# the app server knows what region it's in, but the database servers don't?
# just send locally
if shard.nil? && all.all? { |db| db.config[:region].nil? }
return klass.send_later_enqueue_args(method, enqueue_args, *args)
return klass.delay(**enqueue_args).__send__(method, *args)
end
raise "Could not find a shard in region #{region}" unless shard
shard.activate do
klass.send_later_enqueue_args(method, enqueue_args, *args)
klass.delay(**enqueue_args).__send__(method, *args)
end
end
end

View File

@ -19,7 +19,7 @@ class FixOverwrittenFileModuleItems < ActiveRecord::Migration[4.2]
tag :postdeploy
def up
DataFixup::FixOverwrittenFileModuleItems.send_later_if_production_enqueue_args(:run, :priority => Delayed::LOW_PRIORITY)
DataFixup::FixOverwrittenFileModuleItems.delay_if_production(priority: Delayed::LOW_PRIORITY).run
end
def down

View File

@ -19,13 +19,8 @@ class FixNanGroupWeights < ActiveRecord::Migration[4.2]
tag :postdeploy
def up
DataFixup::FixNanGroupWeights.send_later_if_production_enqueue_args(
:run, {
priority: Delayed::LOWER_PRIORITY,
max_attempts: 1,
n_strand: "data_fixups:#{Shard.current.database_server.id}"
}
)
DataFixup::FixNanGroupWeights.delay_if_production(priority: Delayed::LOWER_PRIORITY,
n_strand: "data_fixups:#{Shard.current.database_server.id}").run
end
def down

View File

@ -19,6 +19,6 @@ class DeleteEmptyProgressions < ActiveRecord::Migration[4.2]
tag :postdeploy
def up
DataFixup::DeleteEmptyProgressions.send_later_if_production_enqueue_args(:run, :priority => Delayed::LOW_PRIORITY)
DataFixup::DeleteEmptyProgressions.delay_if_production(priority: Delayed::LOW_PRIORITY).run
end
end

View File

@ -20,7 +20,6 @@ class UpdateDeveloperKeyAccessTokenCounts < ActiveRecord::Migration[4.2]
tag :postdeploy
def up
DataFixup::UpdateDeveloperKeyAccessTokenCounts.send_later_if_production_enqueue_args(:run,
:priority => Delayed::LOW_PRIORITY)
DataFixup::UpdateDeveloperKeyAccessTokenCounts.delay_if_production(priority: Delayed::LOW_PRIORITY).run
end
end

View File

@ -20,9 +20,8 @@ class PopulateGradingPeriodForSubmissions < ActiveRecord::Migration[4.2]
tag :postdeploy
def self.up
DataFixup::InitializeSubmissionCachedDueDate.send_later_if_production_enqueue_args(
:run,
singleton: "DataFixup:InitializeSubmissionCachedDueDate:#{Shard.current.id}"
)
DataFixup::InitializeSubmissionCachedDueDate.
delay_if_production(singleton: "DataFixup:InitializeSubmissionCachedDueDate:#{Shard.current.id}").
run
end
end

View File

@ -19,8 +19,6 @@ class RePopulateAccountReportDateTimes < ActiveRecord::Migration[4.2]
tag :postdeploy
def change
DataFixup::PopulateAccountReportDateTimes.send_later_if_production_enqueue_args(
:run, priority: Delayed::LOW_PRIORITY
)
DataFixup::PopulateAccountReportDateTimes.delay_if_production(priority: Delayed::LOW_PRIORITY).run
end
end

View File

@ -19,11 +19,7 @@ class FixPointsPossibleSumsInQuizzes < ActiveRecord::Migration[4.2]
tag :postdeploy
def up
DataFixup::FixPointsPossibleSumsInQuizzes.send_later_if_production_enqueue_args(
:run, {
priority: Delayed::LOW_PRIORITY,
strand: "fix_points_possible_sums:#{Shard.current.database_server.id}"
}
)
DataFixup::FixPointsPossibleSumsInQuizzes.delay_if_production(priority: Delayed::LOW_PRIORITY,
strand: "fix_points_possible_sums:#{Shard.current.database_server.id}").run
end
end

View File

@ -20,8 +20,7 @@ class FixPlannerOverridesMarkedCompleteData < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
def self.up
DataFixup::FixPlannerOverridesMarkedCompleteData.send_later_if_production_enqueue_args(
:run, { priority: Delayed::LOWER_PRIORITY, max_attempts: 1 })
DataFixup::FixPlannerOverridesMarkedCompleteData.delay_if_production(priority: Delayed::LOWER_PRIORITY).run
end
def self.down

View File

@ -20,10 +20,7 @@ class PopulateCourseScoreOnScores < ActiveRecord::Migration[5.0]
tag :postdeploy
def up
DataFixup::PopulateScoresCourseScore.send_later_if_production_enqueue_args(
:run,
priority: Delayed::LOW_PRIORITY,
strand: "populate_scores_course_score_#{Shard.current.database_server.id}"
)
DataFixup::PopulateScoresCourseScore.delay_if_production(priority: Delayed::LOW_PRIORITY,
strand: "populate_scores_course_score_#{Shard.current.database_server.id}").run
end
end

View File

@ -20,11 +20,9 @@ class CreateLtiLinksForLegacyLtiToolSettings < ActiveRecord::Migration[5.0]
tag :postdeploy
def up
DataFixup::CreateLtiLinksForLegacyLtiToolSettings.send_later_if_production_enqueue_args(:run,
priority: Delayed::LOW_PRIORITY,
max_attempts: 1,
n_strand: 'long_datafixups'
)
DataFixup::CreateLtiLinksForLegacyLtiToolSettings.
delay_if_production(priority: Delayed::LOW_PRIORITY, n_strand: 'long_datafixups').
run
end
def down

View File

@ -20,8 +20,7 @@ class MoveMasterImportResults < ActiveRecord::Migration[5.0]
tag :postdeploy
def up
DataFixup::MoveMasterImportResults.send_later_if_production_enqueue_args(
:run, :priority => Delayed::LOW_PRIORITY, :n_strand => 'long_datafixups')
DataFixup::MoveMasterImportResults.delay_if_production(priority: Delayed::LOW_PRIORITY, n_strand: 'long_datafixups').run
end
def down

View File

@ -25,11 +25,7 @@ class InitNewGradeHistoryAuditLogIndexes < ActiveRecord::Migration[4.2]
end
def self.up
DataFixup::InitNewGradeHistoryAuditLogIndexes.send_later_if_production_enqueue_args(
:run, {
priority: Delayed::LOW_PRIORITY,
strand: "init_new_grade_history_audit_log_indexes:#{Shard.current.database_server.id}"
}
)
DataFixup::InitNewGradeHistoryAuditLogIndexes.delay_if_production(priority: Delayed::LOW_PRIORITY,
strand: "init_new_grade_history_audit_log_indexes:#{Shard.current.database_server.id}").run
end
end

View File

@ -19,10 +19,8 @@ class DeleteInvalidCommunicationChannels < ActiveRecord::Migration[4.2]
tag :postdeploy
def up
DataFixup::DeleteInvalidCommunicationChannels.send_later_if_production_enqueue_args(:run,
priority: Delayed::LOW_PRIORITY,
max_attempts: 1,
n_strand: 'long_datafixups'
)
DataFixup::DeleteInvalidCommunicationChannels.
delay_if_production(priority: Delayed::LOW_PRIORITY, n_strand: 'long_datafixups').
run
end
end

View File

@ -19,10 +19,7 @@ class DeleteExtraPlaceholderSubmissions < ActiveRecord::Migration[4.2]
tag :postdeploy
def change
DataFixup::DeleteExtraPlaceholderSubmissions.send_later_if_production_enqueue_args(
:run,
priority: Delayed::LOW_PRIORITY,
strand: "DataFixup:DeleteExtraPlaceholderSubmissions:Migration" # only run one at a time for a job server
)
DataFixup::DeleteExtraPlaceholderSubmissions.delay_if_production(priority: Delayed::LOW_PRIORITY,
strand: "DataFixup:DeleteExtraPlaceholderSubmissions:Migration").run
end
end

View File

@ -20,12 +20,9 @@ class ValidateAssignmentOverrides < ActiveRecord::Migration[5.0]
def self.up
Assignment.find_ids_in_ranges(:batch_size => 10_000) do |start_at, end_at|
DataFixup::ValidateAssignmentOverrides.send_later_if_production_enqueue_args(:run, {
priority: Delayed::LOW_PRIORITY,
max_attempts: 1,
n_strand: ["DataFixup::ValidateAssignmentOverrides", Shard.current.database_server.id]
}, start_at, end_at
)
DataFixup::ValidateAssignmentOverrides.
delay_if_production(priority: Delayed::LOW_PRIORITY, n_strand: ["DataFixup::ValidateAssignmentOverrides", Shard.current.database_server.id]).
run(start_at, end_at)
end
end

View File

@ -20,7 +20,7 @@ class FixAssignmentPeerReviewJobs < ActiveRecord::Migration[5.0]
tag :postdeploy
def up
DataFixup::FixAssignmentPeerReviewJobs.send_later_if_production_enqueue_args(:run, priority: Delayed::LOW_PRIORITY)
DataFixup::FixAssignmentPeerReviewJobs.delay_if_production(priority: Delayed::LOW_PRIORITY).run
end
def down

View File

@ -20,10 +20,6 @@ class PopulateRootAccountIdForGroupCategories < ActiveRecord::Migration[5.0]
tag :postdeploy
def up
DataFixup::PopulateRootAccountIdForGroupCategories.send_later_if_production_enqueue_args(
:run,
priority: Delayed::LOW_PRIORITY,
max_attempts: 1,
n_strand: 'long_datafixups')
DataFixup::PopulateRootAccountIdForGroupCategories.delay_if_production(priority: Delayed::LOW_PRIORITY, n_strand: 'long_datafixups').run
end
end

View File

@ -19,10 +19,7 @@ class PopulateScoresAndMetadataForAssignmentGroupsAndTeacherView < ActiveRecord:
tag :postdeploy
def change
DataFixup::PopulateScoresAndMetadataForAssignmentGroupsAndTeacherView.send_later_if_production_enqueue_args(
:run,
priority: Delayed::LOW_PRIORITY,
strand: "DataFixup::PopulateScoresAndMetadataForAssignmentGroupsAndTeacherView::Migration"
)
DataFixup::PopulateScoresAndMetadataForAssignmentGroupsAndTeacherView.delay_if_production(priority: Delayed::LOW_PRIORITY,
strand: "DataFixup::PopulateScoresAndMetadataForAssignmentGroupsAndTeacherView::Migration").run
end
end

View File

@ -19,9 +19,9 @@ class FixDiscussionTopicMaterializedViews < ActiveRecord::Migration[5.0]
tag :postdeploy
def up
DataFixup::FixDiscussionTopicMaterializedViews.send_later_if_production_enqueue_args(:run,
priority: Delayed::LOW_PRIORITY,
n_strand: 'long_datafixups')
DataFixup::FixDiscussionTopicMaterializedViews.
delay_if_production(priority: Delayed::LOW_PRIORITY, n_strand: 'long_datafixups').
run
end
def down

View File

@ -20,10 +20,7 @@ class PopulatePointsForAllScores < ActiveRecord::Migration[5.0]
tag :postdeploy
def change
DataFixup::PopulatePointsForAllScores.send_later_if_production_enqueue_args(
:run,
priority: Delayed::LOW_PRIORITY,
strand: "DataFixup::PopulatePointsForAllScores::Migration"
)
DataFixup::PopulatePointsForAllScores.delay_if_production(priority: Delayed::LOW_PRIORITY,
strand: "DataFixup::PopulatePointsForAllScores::Migration").run
end
end

View File

@ -19,12 +19,7 @@ class FixupGroupOriginalityReports < ActiveRecord::Migration[5.0]
tag :postdeploy
def change
DataFixup::FixupGroupOriginalityReports.send_later_if_production_enqueue_args(
:run,
{
priority: Delayed::LOWER_PRIORITY,
n_strand: "DataFixup::FixupGroupOriginalityReports:#{Shard.current.database_server.id}"
}
)
DataFixup::FixupGroupOriginalityReports.delay_if_production(priority: Delayed::LOWER_PRIORITY,
n_strand: "DataFixup::FixupGroupOriginalityReports:#{Shard.current.database_server.id}").run
end
end

View File

@ -19,6 +19,6 @@ class AddOutcomesExport < ActiveRecord::Migration[5.0]
tag :postdeploy
def up
DataFixup::AddNewDefaultReport.send_later_if_production(:run, 'outcome_export_csv')
DataFixup::AddNewDefaultReport.delay_if_production.run('outcome_export_csv')
end
end

View File

@ -22,16 +22,9 @@ class PopulateSubmissionAnonymousIds < ActiveRecord::Migration[5.1]
def up
Course.active.find_ids_in_ranges do |start_at, end_at|
DataFixup::PopulateSubmissionAnonymousIds.send_later_if_production_enqueue_args(
:run,
{
priority: Delayed::LOW_PRIORITY,
max_attempts: 1,
n_strand: ['DataFixup::PopulateSubmissionAnonymousIds', Shard.current.database_server.id]
},
start_at,
end_at
)
DataFixup::PopulateSubmissionAnonymousIds.delay_if_production(priority: Delayed::LOW_PRIORITY,
n_strand: ['DataFixup::PopulateSubmissionAnonymousIds', Shard.current.database_server.id]).
run(start_at, end_at)
end
end
end

View File

@ -20,9 +20,7 @@ class BackfillAnonymousModeratedMarkingFields < ActiveRecord::Migration[5.1]
tag :postdeploy
def up
DataFixup::BackfillNulls.send_later_if_production_enqueue_args(
:run,
{priority: Delayed::LOW_PRIORITY, n_strand: 'long_datafixups'},
DataFixup::BackfillNulls.delay_if_production(priority: Delayed::LOW_PRIORITY, n_strand: 'long_datafixups').run(
Assignment,
{
graders_anonymous_to_graders: false,

View File

@ -19,10 +19,7 @@ class UpdateGradingStandardsToFullRange < ActiveRecord::Migration[5.1]
tag :postdeploy
def up
DataFixup::UpdateGradingStandardsToFullRange.send_later_if_production_enqueue_args(
:run,
priority: Delayed::LOW_PRIORITY,
strand: "DataFixup::UpdateGradingStandardsToFullRange::Migration:#{Shard.current.database_server.id}"
)
DataFixup::UpdateGradingStandardsToFullRange.delay_if_production(priority: Delayed::LOW_PRIORITY,
strand: "DataFixup::UpdateGradingStandardsToFullRange::Migration:#{Shard.current.database_server.id}").run
end
end

View File

@ -21,14 +21,8 @@ class PopulateLastCommentAtOnSubmissions < ActiveRecord::Migration[5.0]
def up
Submission.find_ids_in_ranges(:batch_size => 1_000_000) do |start_at, end_at|
DataFixup::PopulateLastCommentAtOnSubmissions.send_later_if_production_enqueue_args(
:run,
{
priority: Delayed::LOW_PRIORITY, max_attempts: 1,
n_strand: ['DataFixup::PopulateLastCommentAtOnSubmissions', Shard.current.database_server.id]
},
start_at, end_at
)
DataFixup::PopulateLastCommentAtOnSubmissions.delay_if_production(priority: Delayed::LOW_PRIORITY,
n_strand: ['DataFixup::PopulateLastCommentAtOnSubmissions', Shard.current.database_server.id]).run(start_at, end_at)
end
end

Some files were not shown because too many files have changed in this diff Show More