RuboCop: Style/RedundantBegin

[skip-stages=Flakey]

auto-corrected

Change-Id: I6a29a9d2fa1057e2278c105a8331d2c79e496897
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/277904
Tested-by: Service Cloud Jenkins <svc.cloudjenkins@instructure.com>
Reviewed-by: Simon Williams <simon@instructure.com>
QA-Review: Cody Cutrer <cody@instructure.com>
Product-Review: Cody Cutrer <cody@instructure.com>
This commit is contained in:
Cody Cutrer 2021-11-10 13:01:30 -07:00
parent f23a4917d7
commit ddfd72ca17
126 changed files with 1405 additions and 1732 deletions

View File

@ -138,6 +138,8 @@ Style/ParenthesesAroundCondition:
Severity: error
Style/PerlBackrefs:
Enabled: false # Regexp.last_match(1) is far worse than $1
Style/RedundantBegin:
Severity: error
Style/RedundantInterpolation:
Severity: error
Style/RedundantFreeze:

View File

@ -1486,23 +1486,21 @@ class ApplicationController < ActionController::Base
end
def log_page_view
begin
user = @current_user || (@accessed_asset && @accessed_asset[:user])
if user && @log_page_views != false
add_interaction_seconds
log_participation(user)
log_gets
finalize_page_view
else
@page_view.destroy if @page_view && !@page_view.new_record?
end
rescue StandardError, CassandraCQL::Error::InvalidRequestException => e
Canvas::Errors.capture_exception(:page_view, e)
logger.error "Pageview error!"
raise e if Rails.env.development?
true
user = @current_user || (@accessed_asset && @accessed_asset[:user])
if user && @log_page_views != false
add_interaction_seconds
log_participation(user)
log_gets
finalize_page_view
else
@page_view.destroy if @page_view && !@page_view.new_record?
end
rescue StandardError, CassandraCQL::Error::InvalidRequestException => e
Canvas::Errors.capture_exception(:page_view, e)
logger.error "Pageview error!"
raise e if Rails.env.development?
true
end
def add_interaction_seconds
@ -2144,29 +2142,27 @@ class ApplicationController < ActionController::Base
feature = feature.to_sym
return @features_enabled[feature] if @features_enabled[feature] != nil
@features_enabled[feature] ||= begin
if [:question_banks].include?(feature)
true
elsif feature == :twitter
!!Twitter::Connection.config
elsif feature == :diigo
!!Diigo::Connection.config
elsif feature == :google_drive
Canvas::Plugin.find(:google_drive).try(:enabled?)
elsif feature == :etherpad
!!EtherpadCollaboration.config
elsif feature == :kaltura
!!CanvasKaltura::ClientV3.config
elsif feature == :web_conferences
!!WebConference.config
elsif feature == :vericite
Canvas::Plugin.find(:vericite).try(:enabled?)
elsif feature == :lockdown_browser
Canvas::Plugin.all_for_tag(:lockdown_browser).any? { |p| p.settings[:enabled] }
else
!!AccountServices.allowable_services[feature]
end
end
@features_enabled[feature] ||= if [:question_banks].include?(feature)
true
elsif feature == :twitter
!!Twitter::Connection.config
elsif feature == :diigo
!!Diigo::Connection.config
elsif feature == :google_drive
Canvas::Plugin.find(:google_drive).try(:enabled?)
elsif feature == :etherpad
!!EtherpadCollaboration.config
elsif feature == :kaltura
!!CanvasKaltura::ClientV3.config
elsif feature == :web_conferences
!!WebConference.config
elsif feature == :vericite
Canvas::Plugin.find(:vericite).try(:enabled?)
elsif feature == :lockdown_browser
Canvas::Plugin.all_for_tag(:lockdown_browser).any? { |p| p.settings[:enabled] }
else
!!AccountServices.allowable_services[feature]
end
end
helper_method :feature_enabled?
@ -2732,15 +2728,13 @@ class ApplicationController < ActionController::Base
end
def user_has_google_drive
@user_has_google_drive ||= begin
if logged_in_user
Rails.cache.fetch_with_batched_keys('user_has_google_drive', batch_object: logged_in_user, batched_keys: :user_services) do
google_drive_connection.authorized?
end
else
google_drive_connection.authorized?
end
end
@user_has_google_drive ||= if logged_in_user
Rails.cache.fetch_with_batched_keys('user_has_google_drive', batch_object: logged_in_user, batched_keys: :user_services) do
google_drive_connection.authorized?
end
else
google_drive_connection.authorized?
end
end
def setup_live_events_context

View File

@ -116,11 +116,9 @@ class DeveloperKeyAccountBindingsController < ApplicationController
end
def existing_binding
@_existing_binding ||= begin
account.developer_key_account_bindings.find_by(
developer_key_id: params[:developer_key_id]
)
end
@_existing_binding ||= account.developer_key_account_bindings.find_by(
developer_key_id: params[:developer_key_id]
)
end
def developer_key

View File

@ -59,30 +59,28 @@ class EportfolioCategoriesController < ApplicationController
end
def show
begin
if params[:verifier] == @portfolio.uuid
session[:eportfolio_ids] ||= []
session[:eportfolio_ids] << @portfolio.id
session[:permissions_key] = SecureRandom.uuid
end
if authorized_action(@portfolio, @current_user, :read)
if params[:id]
@category = @portfolio.eportfolio_categories.find(params[:id])
elsif params[:category_name]
@category = @portfolio.eportfolio_categories.where(slug: params[:category_name]).first!
end
@page = @category.eportfolio_entries.first
@page ||= @portfolio.eportfolio_entries.create(:eportfolio_category => @category, :allow_comments => true, :show_comments => true, :name => t(:default_name, "New Page")) if @portfolio.grants_right?(@current_user, session, :update)
raise ActiveRecord::RecordNotFound if !@page
eportfolio_page_attributes
render "eportfolios/show", stream: can_stream_template?
end
rescue ActiveRecord::RecordNotFound
flash[:notice] = t('errors.missing_page', "Couldn't find that page")
redirect_to eportfolio_url(@portfolio.id)
if params[:verifier] == @portfolio.uuid
session[:eportfolio_ids] ||= []
session[:eportfolio_ids] << @portfolio.id
session[:permissions_key] = SecureRandom.uuid
end
if authorized_action(@portfolio, @current_user, :read)
if params[:id]
@category = @portfolio.eportfolio_categories.find(params[:id])
elsif params[:category_name]
@category = @portfolio.eportfolio_categories.where(slug: params[:category_name]).first!
end
@page = @category.eportfolio_entries.first
@page ||= @portfolio.eportfolio_entries.create(:eportfolio_category => @category, :allow_comments => true, :show_comments => true, :name => t(:default_name, "New Page")) if @portfolio.grants_right?(@current_user, session, :update)
raise ActiveRecord::RecordNotFound if !@page
eportfolio_page_attributes
render "eportfolios/show", stream: can_stream_template?
end
rescue ActiveRecord::RecordNotFound
flash[:notice] = t('errors.missing_page', "Couldn't find that page")
redirect_to eportfolio_url(@portfolio.id)
end
def destroy

View File

@ -45,13 +45,11 @@ module Lti::Concerns
end
def jwt_validator
@jwt_validator ||= begin
Canvas::Security::JwtValidator.new(
jwt: verified_jwt,
expected_aud: Canvas::Security.config['lti_iss'],
require_iss: true
)
end
@jwt_validator ||= Canvas::Security::JwtValidator.new(
jwt: verified_jwt,
expected_aud: Canvas::Security.config['lti_iss'],
require_iss: true
)
end
def oembed_endpoint

View File

@ -159,11 +159,9 @@ module Lti
end
def id_token
@id_token ||= begin
Lti::Messages::JwtMessage.generate_id_token(cached_launch_with_nonce).merge({
state: oidc_params[:state]
})
end
@id_token ||= Lti::Messages::JwtMessage.generate_id_token(cached_launch_with_nonce).merge({
state: oidc_params[:state]
})
end
def authorize_redirect_url

View File

@ -221,12 +221,10 @@ module Lti
private
def line_item_params
@_line_item_params ||= begin
params.permit(%i(resourceId resourceLinkId scoreMaximum label tag),
Lti::LineItem::AGS_EXT_SUBMISSION_TYPE => [:type, :external_tool_url]).transform_keys do |k|
k.to_s.underscore
end.except(:resource_link_id)
end
@_line_item_params ||= params.permit(%i(resourceId resourceLinkId scoreMaximum label tag),
Lti::LineItem::AGS_EXT_SUBMISSION_TYPE => [:type, :external_tool_url]).transform_keys do |k|
k.to_s.underscore
end.except(:resource_link_id)
end
def assignment

View File

@ -301,13 +301,11 @@ module Lti
end
def submission
@_submission ||= begin
if params[:file_id].present?
AttachmentAssociation.find_by(attachment_id: params[:file_id])&.context
else
Submission.active.find(params[:submission_id])
end
end
@_submission ||= if params[:file_id].present?
AttachmentAssociation.find_by(attachment_id: params[:file_id])&.context
else
Submission.active.find(params[:submission_id])
end
end
def attachment
@ -347,20 +345,18 @@ module Lti
end
def lti_link_params
@_lti_link_params ||= begin
if lti_link_settings&.dig('tool_setting', 'resource_type_code')
lti_link_settings['tool_setting'].merge({
id: @report&.lti_link&.id,
product_code: tool_proxy.product_family.product_code,
vendor_code: tool_proxy.product_family.vendor_code
})
else
{
id: @report&.lti_link&.id,
_destroy: true
}
end
end
@_lti_link_params ||= if lti_link_settings&.dig('tool_setting', 'resource_type_code')
lti_link_settings['tool_setting'].merge({
id: @report&.lti_link&.id,
product_code: tool_proxy.product_family.product_code,
vendor_code: tool_proxy.product_family.vendor_code
})
else
{
id: @report&.lti_link&.id,
_destroy: true
}
end
end
def lti_link_settings

View File

@ -89,9 +89,7 @@ module Lti
private
def subscription_context
@_subscription_context ||= begin
SubscriptionsValidator.retrieve_context(subscription)
end
@_subscription_context ||= SubscriptionsValidator.retrieve_context(subscription)
end
end
end

View File

@ -399,13 +399,11 @@ class Quizzes::QuizQuestionsController < ApplicationController
private
def guard_against_big_fields
begin
yield
rescue Quizzes::QuizQuestion::RawFields::FieldTooLongError => ex
raise ex unless request.xhr?
yield
rescue Quizzes::QuizQuestion::RawFields::FieldTooLongError => ex
raise ex unless request.xhr?
render_xhr_exception(ex, ex.message)
end
render_xhr_exception(ex, ex.message)
end
def require_question

View File

@ -1224,26 +1224,24 @@ class UsersController < ApplicationController
ServiceCredentials = Struct.new(:service_user_name, :decrypted_password)
def create_user_service
begin
user_name = params[:user_service][:user_name]
password = params[:user_service][:password]
service = ServiceCredentials.new(user_name, password)
case params[:user_service][:service]
when 'delicious'
delicious_get_last_posted(service)
when 'diigo'
Diigo::Connection.diigo_get_bookmarks(service)
when 'skype'
true
else
raise "Unknown Service"
end
@service = UserService.register_from_params(@current_user, params[:user_service])
render :json => @service
rescue => e
Canvas::Errors.capture_exception(:user_service, e)
render :json => { :errors => true }, :status => :bad_request
user_name = params[:user_service][:user_name]
password = params[:user_service][:password]
service = ServiceCredentials.new(user_name, password)
case params[:user_service][:service]
when 'delicious'
delicious_get_last_posted(service)
when 'diigo'
Diigo::Connection.diigo_get_bookmarks(service)
when 'skype'
true
else
raise "Unknown Service"
end
@service = UserService.register_from_params(@current_user, params[:user_service])
render :json => @service
rescue => e
Canvas::Errors.capture_exception(:user_service, e)
render :json => { :errors => true }, :status => :bad_request
end
def services

View File

@ -26,23 +26,19 @@ module GraphQLHelpers
# will get a standard canvas id
def self.relay_or_legacy_id_prepare_func(expected_type)
proc do |relay_or_legacy_id|
begin
self.parse_relay_or_legacy_id(relay_or_legacy_id, expected_type)
rescue InvalidIDError => e
GraphQL::ExecutionError.new(e.message)
end
self.parse_relay_or_legacy_id(relay_or_legacy_id, expected_type)
rescue InvalidIDError => e
GraphQL::ExecutionError.new(e.message)
end
end
def self.relay_or_legacy_ids_prepare_func(expected_type)
proc do |relay_or_legacy_ids|
begin
relay_or_legacy_ids.map do |relay_or_legacy_id|
self.parse_relay_or_legacy_id(relay_or_legacy_id, expected_type)
end
rescue InvalidIDError => e
GraphQL::ExecutionError.new(e.message)
relay_or_legacy_ids.map do |relay_or_legacy_id|
self.parse_relay_or_legacy_id(relay_or_legacy_id, expected_type)
end
rescue InvalidIDError => e
GraphQL::ExecutionError.new(e.message)
end
end

View File

@ -28,19 +28,17 @@ module GraphQLPostgresTimeout
yield
else
ActiveRecord::Base.transaction do
begin
statement_timeout = Integer(Setting.get('graphql_statement_timeout', '60_000'))
ActiveRecord::Base.connection.execute "SET statement_timeout = #{statement_timeout}"
yield
rescue ActiveRecord::StatementInvalid => e
if PG::QueryCanceled === e.cause
Rails.logger.warn {
"GraphQL Operation failed due to postgres statement_timeout:\n#{query.query_string}"
}
raise GraphQLPostgresTimeout::Error, "operation timed out"
end
raise GraphQL::ExecutionError, "Invalid SQL: #{e.message}"
statement_timeout = Integer(Setting.get('graphql_statement_timeout', '60_000'))
ActiveRecord::Base.connection.execute "SET statement_timeout = #{statement_timeout}"
yield
rescue ActiveRecord::StatementInvalid => e
if PG::QueryCanceled === e.cause
Rails.logger.warn {
"GraphQL Operation failed due to postgres statement_timeout:\n#{query.query_string}"
}
raise GraphQLPostgresTimeout::Error, "operation timed out"
end
raise GraphQL::ExecutionError, "Invalid SQL: #{e.message}"
end
end
end

View File

@ -76,11 +76,9 @@ module ApplicationHelper
Rails
.cache
.fetch(['short_name_lookup', code].cache_key) do
begin
Context.find_by_asset_string(code).short_name
rescue StandardError
''
end
Context.find_by_asset_string(code).short_name
rescue StandardError
''
end
end
@ -1113,12 +1111,10 @@ module ApplicationHelper
def csp_report_uri
@csp_report_uri ||=
begin
if (host = csp_context.root_account.csp_logging_config['host'])
"; report-uri #{host}report/#{csp_context.root_account.global_id}"
else
''
end
if (host = csp_context.root_account.csp_logging_config['host'])
"; report-uri #{host}report/#{csp_context.root_account.global_id}"
else
''
end
end

View File

@ -63,17 +63,16 @@ module SectionTabHelper
def section_tabs
@section_tabs ||=
begin
if @context && available_section_tabs.any?
content_tag(:nav, { role: 'navigation', 'aria-label': nav_name }) do
concat(
content_tag(:ul, id: 'section-tabs') do
available_section_tabs.map { |tab| section_tab_tag(tab, @context, get_active_tab) }
end
)
end
if @context && available_section_tabs.any?
content_tag(:nav, { role: 'navigation', 'aria-label': nav_name }) do
concat(
content_tag(:ul, id: 'section-tabs') do
available_section_tabs.map { |tab| section_tab_tag(tab, @context, get_active_tab) }
end
)
end
end
raw(@section_tabs)
end

View File

@ -1274,16 +1274,14 @@ class Account < ActiveRecord::Base
return [] unless user
@account_users_cache ||= {}
@account_users_cache[user.global_id] ||= begin
if self.site_admin?
account_users_for(user) # has own cache
else
Rails.cache.fetch_with_batched_keys(['account_users_for_user', user.cache_key(:account_users)].cache_key,
batch_object: self, batched_keys: :account_chain, skip_cache_if_disabled: true) do
account_users_for(user).each(&:clear_association_cache)
end
end
end
@account_users_cache[user.global_id] ||= if self.site_admin?
account_users_for(user) # has own cache
else
Rails.cache.fetch_with_batched_keys(['account_users_for_user', user.cache_key(:account_users)].cache_key,
batch_object: self, batched_keys: :account_chain, skip_cache_if_disabled: true) do
account_users_for(user).each(&:clear_association_cache)
end
end
end
# returns all active account users for this entire account tree

View File

@ -317,12 +317,10 @@ class AccountNotification < ActiveRecord::Base
# don't try to send a message to an entire account in one job
self.applicable_user_ids.each_slice(self.class.users_per_message_batch) do |sliced_user_ids|
begin
self.message_recipients = sliced_user_ids.map { |id| "user_#{id}" }
self.save # trigger the broadcast policy
ensure
self.message_recipients = nil
end
self.message_recipients = sliced_user_ids.map { |id| "user_#{id}" }
self.save # trigger the broadcast policy
ensure
self.message_recipients = nil
end
self.update_attribute(:messages_sent_at, Time.now.utc)
end

View File

@ -39,19 +39,17 @@ class AssetUserAccess < ActiveRecord::Base
scope :most_recent, -> { order('updated_at DESC') }
def infer_root_account_id(asset_for_root_account_id = nil)
self.root_account_id ||= begin
if context_type != 'User'
context&.resolved_root_account_id || 0
elsif asset_for_root_account_id.is_a?(User)
# Unfillable. Point to the dummy root account with id=0.
0
else
asset_for_root_account_id.try(:resolved_root_account_id) ||
asset_for_root_account_id.try(:root_account_id) || 0
# We could default `asset_for_root_account_id ||= asset`, but AUAs shouldn't
# ever be created outside of .log(), and calling `asset` would add a DB hit
end
end
self.root_account_id ||= if context_type != 'User'
context&.resolved_root_account_id || 0
elsif asset_for_root_account_id.is_a?(User)
# Unfillable. Point to the dummy root account with id=0.
0
else
asset_for_root_account_id.try(:resolved_root_account_id) ||
asset_for_root_account_id.try(:root_account_id) || 0
# We could default `asset_for_root_account_id ||= asset`, but AUAs shouldn't
# ever be created outside of .log(), and calling `asset` would add a DB hit
end
end
def category

View File

@ -1192,12 +1192,10 @@ class Assignment < ActiveRecord::Base
private :validate_resource_link_custom_params
def primary_resource_link
@primary_resource_link ||= begin
lti_resource_links.find_by(
resource_link_uuid: lti_context_id,
context: self
)
end
@primary_resource_link ||= lti_resource_links.find_by(
resource_link_uuid: lti_context_id,
context: self
)
end
def lti_1_3_external_tool_tag?

View File

@ -60,18 +60,16 @@ class AssignmentConfigurationToolLookup < ActiveRecord::Base
end
def lti_tool
@_lti_tool ||= begin
if tool_id.present?
tool
elsif tool_type == 'Lti::MessageHandler'
Lti::MessageHandler.by_resource_codes(
vendor_code: tool_vendor_code,
product_code: tool_product_code,
resource_type_code: tool_resource_type_code,
context: assignment.course
)
end
end
@_lti_tool ||= if tool_id.present?
tool
elsif tool_type == 'Lti::MessageHandler'
Lti::MessageHandler.by_resource_codes(
vendor_code: tool_vendor_code,
product_code: tool_product_code,
resource_type_code: tool_resource_type_code,
context: assignment.course
)
end
end
def resource_codes

View File

@ -537,11 +537,9 @@ class Attachment < ActiveRecord::Base
end
def root_account
begin
root_account_id && Account.find_cached(root_account_id)
rescue ::Canvas::AccountCacheError
nil
end
root_account_id && Account.find_cached(root_account_id)
rescue ::Canvas::AccountCacheError
nil
end
def namespace
@ -1034,21 +1032,17 @@ class Attachment < ActiveRecord::Base
end
def save_without_broadcasting
begin
@skip_broadcasts = true
save
ensure
@skip_broadcasts = false
end
@skip_broadcasts = true
save
ensure
@skip_broadcasts = false
end
def save_without_broadcasting!
begin
@skip_broadcasts = true
save!
ensure
@skip_broadcasts = false
end
@skip_broadcasts = true
save!
ensure
@skip_broadcasts = false
end
# called before save
@ -2008,62 +2002,60 @@ class Attachment < ActiveRecord::Base
end
def clone_url(url, duplicate_handling, check_quota, opts = {})
begin
Attachment.clone_url_as_attachment(url, :attachment => self)
Attachment.clone_url_as_attachment(url, :attachment => self)
if check_quota
self.save! # save to calculate attachment size, otherwise self.size is nil
if Attachment.over_quota?(opts[:quota_context] || self.context, self.size)
raise OverQuotaError, t(:over_quota, 'The downloaded file exceeds the quota.')
end
if check_quota
self.save! # save to calculate attachment size, otherwise self.size is nil
if Attachment.over_quota?(opts[:quota_context] || self.context, self.size)
raise OverQuotaError, t(:over_quota, 'The downloaded file exceeds the quota.')
end
self.file_state = 'available'
self.save!
if opts[:progress]
# the UI only needs the id from here
opts[:progress].set_results({ id: self.id })
end
handle_duplicates(duplicate_handling || 'overwrite')
nil # the rescue returns true if the file failed and is retryable, nil if successful
rescue StandardError => e
failed_retryable = false
self.file_state = 'errored'
self.workflow_state = 'errored'
case e
when CanvasHttp::TooManyRedirectsError
failed_retryable = true
self.upload_error_message = t :upload_error_too_many_redirects, "Too many redirects for %{url}", url: url
when CanvasHttp::InvalidResponseCodeError
failed_retryable = true
self.upload_error_message = t :upload_error_invalid_response_code, "Invalid response code, expected 200 got %{code} for %{url}", :code => e.code, url: url
Canvas::Errors.capture(e, clone_url_error_info(e, url))
when CanvasHttp::RelativeUriError
self.upload_error_message = t :upload_error_relative_uri, "No host provided for the URL: %{url}", :url => url
when URI::Error, ArgumentError
# assigning all ArgumentError to InvalidUri may be incorrect
self.upload_error_message = t :upload_error_invalid_url, "Could not parse the URL: %{url}", :url => url
when Timeout::Error
failed_retryable = true
self.upload_error_message = t :upload_error_timeout, "The request timed out: %{url}", :url => url
when OverQuotaError
self.upload_error_message = t :upload_error_over_quota, "file size exceeds quota limits: %{bytes} bytes", :bytes => self.size
else
failed_retryable = true
self.upload_error_message = t :upload_error_unexpected, "An unknown error occurred downloading from %{url}", :url => url
Canvas::Errors.capture(e, clone_url_error_info(e, url))
end
if opts[:progress]
opts[:progress].message = self.upload_error_message
opts[:progress].fail!
end
self.save!
failed_retryable
end
self.file_state = 'available'
self.save!
if opts[:progress]
# the UI only needs the id from here
opts[:progress].set_results({ id: self.id })
end
handle_duplicates(duplicate_handling || 'overwrite')
nil # the rescue returns true if the file failed and is retryable, nil if successful
rescue StandardError => e
failed_retryable = false
self.file_state = 'errored'
self.workflow_state = 'errored'
case e
when CanvasHttp::TooManyRedirectsError
failed_retryable = true
self.upload_error_message = t :upload_error_too_many_redirects, "Too many redirects for %{url}", url: url
when CanvasHttp::InvalidResponseCodeError
failed_retryable = true
self.upload_error_message = t :upload_error_invalid_response_code, "Invalid response code, expected 200 got %{code} for %{url}", :code => e.code, url: url
Canvas::Errors.capture(e, clone_url_error_info(e, url))
when CanvasHttp::RelativeUriError
self.upload_error_message = t :upload_error_relative_uri, "No host provided for the URL: %{url}", :url => url
when URI::Error, ArgumentError
# assigning all ArgumentError to InvalidUri may be incorrect
self.upload_error_message = t :upload_error_invalid_url, "Could not parse the URL: %{url}", :url => url
when Timeout::Error
failed_retryable = true
self.upload_error_message = t :upload_error_timeout, "The request timed out: %{url}", :url => url
when OverQuotaError
self.upload_error_message = t :upload_error_over_quota, "file size exceeds quota limits: %{bytes} bytes", :bytes => self.size
else
failed_retryable = true
self.upload_error_message = t :upload_error_unexpected, "An unknown error occurred downloading from %{url}", :url => url
Canvas::Errors.capture(e, clone_url_error_info(e, url))
end
if opts[:progress]
opts[:progress].message = self.upload_error_message
opts[:progress].fail!
end
self.save!
failed_retryable
end
def crocodoc_available?

View File

@ -81,9 +81,7 @@ class AuthenticationProvider::LinkedIn < AuthenticationProvider::OAuth2
end
def email(token)
token.options[:emailAddress] ||= begin
token.get("/v2/emailAddress?q=members&projection=(elements*(handle~))").parsed["elements"].first["handle~"]
end
token.options[:emailAddress] ||= token.get("/v2/emailAddress?q=members&projection=(elements*(handle~))").parsed["elements"].first["handle~"]
end
def email_required?

View File

@ -29,15 +29,13 @@ class AuthenticationProvider::SAML::MetadataRefresher
.shard(shard_scope)
providers.each do |provider|
begin
new_data = refresh_if_necessary(provider.global_id, provider.metadata_uri)
next unless new_data
new_data = refresh_if_necessary(provider.global_id, provider.metadata_uri)
next unless new_data
provider.populate_from_metadata_xml(new_data)
provider.save! if provider.changed?
rescue => e
::Canvas::Errors.capture_exception(:saml_metadata_refresh, e)
end
provider.populate_from_metadata_xml(new_data)
provider.save! if provider.changed?
rescue => e
::Canvas::Errors.capture_exception(:saml_metadata_refresh, e)
end
end

View File

@ -297,18 +297,16 @@ class BigBlueButtonConference < WebConference
end
def fetch_recordings
@loaded_recordings ||= begin
if conference_key && settings[:record]
response = send_request(:getRecordings, {
:meetingID => conference_key,
})
result = response[:recordings] if response
result = [] if result.is_a?(String)
Array(result)
else
[]
end
end
@loaded_recordings ||= if conference_key && settings[:record]
response = send_request(:getRecordings, {
:meetingID => conference_key,
})
result = response[:recordings] if response
result = [] if result.is_a?(String)
Array(result)
else
[]
end
end
def generate_request(*args)

View File

@ -614,25 +614,21 @@ class ContextModule < ActiveRecord::Base
end
def cached_active_tags
@cached_active_tags ||= begin
if self.content_tags.loaded?
# don't reload the preloaded content
self.content_tags.select { |tag| tag.active? }
else
self.content_tags.active.to_a
end
end
@cached_active_tags ||= if self.content_tags.loaded?
# don't reload the preloaded content
self.content_tags.select { |tag| tag.active? }
else
self.content_tags.active.to_a
end
end
def cached_not_deleted_tags
@cached_not_deleted_tags ||= begin
if self.content_tags.loaded?
# don't reload the preloaded content
self.content_tags.select { |tag| !tag.deleted? }
else
self.content_tags.not_deleted.to_a
end
end
@cached_not_deleted_tags ||= if self.content_tags.loaded?
# don't reload the preloaded content
self.content_tags.select { |tag| !tag.deleted? }
else
self.content_tags.not_deleted.to_a
end
end
def add_item(params, added_item = nil, opts = {})

View File

@ -1012,15 +1012,13 @@ class Course < ActiveRecord::Base
def preload_user_roles!
# plz to use before you make a billion calls to user_has_been_X? with different users
@user_ids_by_enroll_type ||= begin
self.shard.activate do
map = {}
self.enrollments.active.pluck(:user_id, :type).each do |user_id, type|
map[type] ||= []
map[type] << user_id
end
map
@user_ids_by_enroll_type ||= self.shard.activate do
map = {}
self.enrollments.active.pluck(:user_id, :type).each do |user_id, type|
map[type] ||= []
map[type] << user_id
end
map
end
end
@ -2086,18 +2084,16 @@ class Course < ActiveRecord::Base
timeout_options = { raise_on_timeout: true, fallback_timeout_length: default_timeout }
posts_to_make.each do |enrollment_ids, res, mime_type, headers = {}|
begin
posted_enrollment_ids += enrollment_ids
if res
Canvas.timeout_protection("send_final_grades_to_endpoint:#{global_root_account_id}", timeout_options) do
SSLCommon.post_data(settings[:publish_endpoint], res, mime_type, headers)
end
posted_enrollment_ids += enrollment_ids
if res
Canvas.timeout_protection("send_final_grades_to_endpoint:#{global_root_account_id}", timeout_options) do
SSLCommon.post_data(settings[:publish_endpoint], res, mime_type, headers)
end
Enrollment.where(:id => enrollment_ids).update_all(:grade_publishing_status => (should_kick_off_grade_publishing_timeout? ? "publishing" : "published"), :grade_publishing_message => nil)
rescue => e
errors << e
Enrollment.where(:id => enrollment_ids).update_all(:grade_publishing_status => "error", :grade_publishing_message => e.to_s)
end
Enrollment.where(:id => enrollment_ids).update_all(:grade_publishing_status => (should_kick_off_grade_publishing_timeout? ? "publishing" : "published"), :grade_publishing_message => nil)
rescue => e
errors << e
Enrollment.where(:id => enrollment_ids).update_all(:grade_publishing_status => "error", :grade_publishing_message => e.to_s)
end
Enrollment.where(:id => (all_enrollment_ids.to_set - posted_enrollment_ids.to_set).to_a).update_all(:grade_publishing_status => "unpublishable", :grade_publishing_message => nil)

View File

@ -177,9 +177,7 @@ class CourseProgress
def module_requirements_completed(progression)
@_module_requirements_completed ||= {}
@_module_requirements_completed[progression.id] ||= begin
progression.requirements_met.select { |req| module_requirements(progression.context_module).include?(req) }.uniq
end
@_module_requirements_completed[progression.id] ||= progression.requirements_met.select { |req| module_requirements(progression.context_module).include?(req) }.uniq
end
def module_reqs_to_complete_count(mod)

View File

@ -33,12 +33,10 @@ class Csp::Domain < ActiveRecord::Base
after_save :invalidate_domain_list_cache
def validate_domain
begin
URI.parse(self.domain)
rescue
self.errors.add(:domain, "Invalid domain")
return false
end
URI.parse(self.domain)
rescue
self.errors.add(:domain, "Invalid domain")
return false
end
def downcase_domain

View File

@ -901,18 +901,16 @@ class DiscussionTopic < ActiveRecord::Base
def can_unpublish?(opts = {})
return @can_unpublish unless @can_unpublish.nil?
@can_unpublish = begin
if self.assignment
!self.assignment.has_student_submissions?
else
student_ids = opts[:student_ids] || self.context.all_real_student_enrollments.select(:user_id)
if self.for_group_discussion?
!DiscussionEntry.active.joins(:discussion_topic).merge(child_topics).where(user_id: student_ids).exists?
else
!self.discussion_entries.active.where(:user_id => student_ids).exists?
end
end
end
@can_unpublish = if self.assignment
!self.assignment.has_student_submissions?
else
student_ids = opts[:student_ids] || self.context.all_real_student_enrollments.select(:user_id)
if self.for_group_discussion?
!DiscussionEntry.active.joins(:discussion_topic).merge(child_topics).where(user_id: student_ids).exists?
else
!self.discussion_entries.active.where(:user_id => student_ids).exists?
end
end
end
attr_writer :can_unpublish

View File

@ -187,22 +187,20 @@ class EpubExport < ActiveRecord::Base
handle_asynchronously :convert_to_epub, priority: Delayed::LOW_PRIORITY
def create_attachment_from_path!(file_path)
begin
mime_type = MIME::Types.type_for(file_path).first
file = Rack::Multipart::UploadedFile.new(
file_path,
mime_type.try(:content_type)
)
attachment = self.attachments.new
attachment.filename = File.basename(file_path)
Attachments::Storage.store_for_attachment(attachment, file)
attachment.save!
rescue Errno::ENOENT => e
mark_as_failed
raise e
ensure
file.try(:close)
end
mime_type = MIME::Types.type_for(file_path).first
file = Rack::Multipart::UploadedFile.new(
file_path,
mime_type.try(:content_type)
)
attachment = self.attachments.new
attachment.filename = File.basename(file_path)
Attachments::Storage.store_for_attachment(attachment, file)
attachment.save!
rescue Errno::ENOENT => e
mark_as_failed
raise e
ensure
file.try(:close)
end
def cleanup_file_path!(file_path)

View File

@ -167,12 +167,10 @@ module Importers
imported_migration_ids = []
items.each do |tag_hash|
begin
tags = self.add_module_item_from_migration(item, tag_hash, 0, context, item_map, migration)
imported_migration_ids.concat tags.map(&:migration_id)
rescue
migration.add_import_warning(t(:migration_module_item_type, "Module Item"), tag_hash[:title], $!)
end
tags = self.add_module_item_from_migration(item, tag_hash, 0, context, item_map, migration)
imported_migration_ids.concat tags.map(&:migration_id)
rescue
migration.add_import_warning(t(:migration_module_item_type, "Module Item"), tag_hash[:title], $!)
end
item.content_tags.where.not(:migration_id => nil)

View File

@ -276,104 +276,102 @@ module Importers
end
def self.adjust_dates(course, migration)
begin
# Adjust dates
if (shift_options = migration.date_shift_options)
shift_options = self.shift_date_options(course, shift_options)
# Adjust dates
if (shift_options = migration.date_shift_options)
shift_options = self.shift_date_options(course, shift_options)
Assignment.suspend_due_date_caching do
migration.imported_migration_items_by_class(Assignment).each do |event|
event.reload # just in case
event.due_at = shift_date(event.due_at, shift_options)
event.lock_at = shift_date(event.lock_at, shift_options)
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.peer_reviews_due_at = shift_date(event.peer_reviews_due_at, shift_options)
event.needs_update_cached_due_dates = true if event.update_cached_due_dates?
event.save_without_broadcasting
if event.errors.any?
migration.add_warning(t("Couldn't adjust dates on assignment %{name} (ID %{id})", name: event.name, id: event.id.to_s))
end
end
end
migration.imported_migration_items_by_class(Attachment).each do |event|
Assignment.suspend_due_date_caching do
migration.imported_migration_items_by_class(Assignment).each do |event|
event.reload # just in case
event.due_at = shift_date(event.due_at, shift_options)
event.lock_at = shift_date(event.lock_at, shift_options)
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.peer_reviews_due_at = shift_date(event.peer_reviews_due_at, shift_options)
event.needs_update_cached_due_dates = true if event.update_cached_due_dates?
event.save_without_broadcasting
end
migration.imported_migration_items_by_class(Folder).each do |event|
event.lock_at = shift_date(event.lock_at, shift_options)
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.save
end
(migration.imported_migration_items_by_class(Announcement) +
migration.imported_migration_items_by_class(DiscussionTopic)).each do |event|
event.reload
event.saved_by = :after_migration
event.delayed_post_at = shift_date(event.delayed_post_at, shift_options)
event.lock_at = shift_date(event.lock_at, shift_options)
event.todo_date = shift_date(event.todo_date, shift_options)
event.save_without_broadcasting
end
migration.imported_migration_items_by_class(CalendarEvent).each do |event|
event.reload
event.start_at = shift_date(event.start_at, shift_options)
event.end_at = shift_date(event.end_at, shift_options)
if event.all_day_date
ad_time = event.all_day_date.in_time_zone(shift_options[:time_zone] || Time.zone)
event.all_day_date = shift_date(ad_time, shift_options).try(:to_date)
if event.errors.any?
migration.add_warning(t("Couldn't adjust dates on assignment %{name} (ID %{id})", name: event.name, id: event.id.to_s))
end
event.save_without_broadcasting
end
Assignment.suspend_due_date_caching do
migration.imported_migration_items_by_class(Quizzes::Quiz).each do |event|
event.reload # have to reload the quiz_data to keep link resolution - the others are just in case
event.due_at = shift_date(event.due_at, shift_options)
event.lock_at = shift_date(event.lock_at, shift_options)
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.show_correct_answers_at = shift_date(event.show_correct_answers_at, shift_options)
event.hide_correct_answers_at = shift_date(event.hide_correct_answers_at, shift_options)
event.saved_by = :migration
event.save
end
migration.imported_migration_items_by_class(AssignmentOverride).each do |event|
AssignmentOverride.overridden_dates.each do |field|
date = event.send(field)
next unless date
event.send("#{field}=", shift_date(date, shift_options))
end
event.save_without_broadcasting
end
end
migration.imported_migration_items_by_class(ContextModule).each do |event|
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.save
end
migration.imported_migration_items_by_class(WikiPage).each do |event|
event.reload
event.todo_date = shift_date(event.todo_date, shift_options)
event.save_without_broadcasting
end
course.set_course_dates_if_blank(shift_options)
else
(migration.imported_migration_items_by_class(Announcement) +
migration.imported_migration_items_by_class(DiscussionTopic)).each do |event|
event.saved_by = :after_migration
event.schedule_delayed_transitions
end
end
rescue
migration.add_warning(t(:due_dates_warning, "Couldn't adjust the due dates."), $!)
migration.imported_migration_items_by_class(Attachment).each do |event|
event.lock_at = shift_date(event.lock_at, shift_options)
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.save_without_broadcasting
end
migration.imported_migration_items_by_class(Folder).each do |event|
event.lock_at = shift_date(event.lock_at, shift_options)
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.save
end
(migration.imported_migration_items_by_class(Announcement) +
migration.imported_migration_items_by_class(DiscussionTopic)).each do |event|
event.reload
event.saved_by = :after_migration
event.delayed_post_at = shift_date(event.delayed_post_at, shift_options)
event.lock_at = shift_date(event.lock_at, shift_options)
event.todo_date = shift_date(event.todo_date, shift_options)
event.save_without_broadcasting
end
migration.imported_migration_items_by_class(CalendarEvent).each do |event|
event.reload
event.start_at = shift_date(event.start_at, shift_options)
event.end_at = shift_date(event.end_at, shift_options)
if event.all_day_date
ad_time = event.all_day_date.in_time_zone(shift_options[:time_zone] || Time.zone)
event.all_day_date = shift_date(ad_time, shift_options).try(:to_date)
end
event.save_without_broadcasting
end
Assignment.suspend_due_date_caching do
migration.imported_migration_items_by_class(Quizzes::Quiz).each do |event|
event.reload # have to reload the quiz_data to keep link resolution - the others are just in case
event.due_at = shift_date(event.due_at, shift_options)
event.lock_at = shift_date(event.lock_at, shift_options)
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.show_correct_answers_at = shift_date(event.show_correct_answers_at, shift_options)
event.hide_correct_answers_at = shift_date(event.hide_correct_answers_at, shift_options)
event.saved_by = :migration
event.save
end
migration.imported_migration_items_by_class(AssignmentOverride).each do |event|
AssignmentOverride.overridden_dates.each do |field|
date = event.send(field)
next unless date
event.send("#{field}=", shift_date(date, shift_options))
end
event.save_without_broadcasting
end
end
migration.imported_migration_items_by_class(ContextModule).each do |event|
event.unlock_at = shift_date(event.unlock_at, shift_options)
event.save
end
migration.imported_migration_items_by_class(WikiPage).each do |event|
event.reload
event.todo_date = shift_date(event.todo_date, shift_options)
event.save_without_broadcasting
end
course.set_course_dates_if_blank(shift_options)
else
(migration.imported_migration_items_by_class(Announcement) +
migration.imported_migration_items_by_class(DiscussionTopic)).each do |event|
event.saved_by = :after_migration
event.schedule_delayed_transitions
end
end
rescue
migration.add_warning(t(:due_dates_warning, "Couldn't adjust the due dates."), $!)
end
def self.clear_assignment_and_quiz_caches(migration)

View File

@ -39,15 +39,13 @@ module Importers
load_questions!(link_map)
link_map.each do |item_key, field_links|
begin
item_key[:item] ||= retrieve_item(item_key)
item_key[:item] ||= retrieve_item(item_key)
replace_item_placeholders!(item_key, field_links)
replace_item_placeholders!(item_key, field_links)
add_missing_link_warnings!(item_key, field_links)
rescue
@migration.add_warning("An error occurred while translating content links", $!)
end
add_missing_link_warnings!(item_key, field_links)
rescue
@migration.add_warning("An error occurred while translating content links", $!)
end
end

View File

@ -26,28 +26,26 @@ module Importers
tool_profiles = data['tool_profiles'] || []
tool_profiles.each do |tool_profile|
begin
values = tease_out_required_values!(tool_profile)
next unless migration.import_object?('tool_profiles', tool_profile['migration_id'])
values = tease_out_required_values!(tool_profile)
next unless migration.import_object?('tool_profiles', tool_profile['migration_id'])
tool_proxies = Lti::ToolProxy.find_active_proxies_for_context_by_vendor_code_and_product_code(
context: migration.context,
vendor_code: values[:vendor_code],
product_code: values[:product_code]
)
tool_proxies = Lti::ToolProxy.find_active_proxies_for_context_by_vendor_code_and_product_code(
context: migration.context,
vendor_code: values[:vendor_code],
product_code: values[:product_code]
)
if tool_proxies.empty?
if values[:registration_url].blank?
migration.add_warning(I18n.t("We were unable to find a tool profile match for \"%{product_name}\".", product_name: values[:product_name]))
else
migration.add_warning(I18n.t("We were unable to find a tool profile match for \"%{product_name}\". If you would like to use this tool please install it using the following registration url: %{registration_url}", product_name: values[:product_name], registration_url: values[:registration_url]))
end
elsif tool_proxies.none? { |tool_proxy| tool_proxy.matching_tool_profile?(tool_profile['tool_profile']) }
migration.add_warning(I18n.t("We found a different version of \"%{product_name}\" installed for your course. If this tool fails to work as intended, try reregistering or reinstalling it.", product_name: values[:product_name]))
if tool_proxies.empty?
if values[:registration_url].blank?
migration.add_warning(I18n.t("We were unable to find a tool profile match for \"%{product_name}\".", product_name: values[:product_name]))
else
migration.add_warning(I18n.t("We were unable to find a tool profile match for \"%{product_name}\". If you would like to use this tool please install it using the following registration url: %{registration_url}", product_name: values[:product_name], registration_url: values[:registration_url]))
end
rescue MissingRequiredToolProfileValuesError => e
migration.add_import_warning('tool_profile', tool_profile['resource_href'], e)
elsif tool_proxies.none? { |tool_proxy| tool_proxy.matching_tool_profile?(tool_profile['tool_profile']) }
migration.add_warning(I18n.t("We found a different version of \"%{product_name}\" installed for your course. If this tool fails to work as intended, try reregistering or reinstalling it.", product_name: values[:product_name]))
end
rescue MissingRequiredToolProfileValuesError => e
migration.add_import_warning('tool_profile', tool_profile['resource_href'], e)
end
end

View File

@ -32,32 +32,30 @@ module IncomingMail
from_channel = nil
original_message.shard.activate do
begin
context = original_message.context
user = original_message.user
raise IncomingMail::Errors::UnknownAddress unless valid_user_and_context?(context, user)
context = original_message.context
user = original_message.user
raise IncomingMail::Errors::UnknownAddress unless valid_user_and_context?(context, user)
from_channel = sent_from_channel(user, incoming_message)
raise IncomingMail::Errors::UnknownSender unless from_channel
raise IncomingMail::Errors::MessageTooLong if body.length > ActiveRecord::Base.maximum_text_length
raise IncomingMail::Errors::MessageTooLong if html_body.length > ActiveRecord::Base.maximum_text_length
raise IncomingMail::Errors::BlankMessage if body.blank?
from_channel = sent_from_channel(user, incoming_message)
raise IncomingMail::Errors::UnknownSender unless from_channel
raise IncomingMail::Errors::MessageTooLong if body.length > ActiveRecord::Base.maximum_text_length
raise IncomingMail::Errors::MessageTooLong if html_body.length > ActiveRecord::Base.maximum_text_length
raise IncomingMail::Errors::BlankMessage if body.blank?
Rails.cache.fetch(['incoming_mail_reply_from', context, incoming_message.message_id].cache_key, expires_in: 7.days) do
context.reply_from({
:purpose => 'general',
:user => user,
:subject => IncomingMailProcessor::IncomingMessageProcessor.utf8ify(incoming_message.subject, incoming_message.header[:subject].try(:charset)),
:html => html_body,
:text => body
})
true
end
rescue IncomingMail::Errors::ReplyFrom => error
bounce_message(original_message, incoming_message, error, outgoing_from_address, from_channel)
rescue => e
Canvas::Errors.capture_exception("IncomingMailProcessor", e)
Rails.cache.fetch(['incoming_mail_reply_from', context, incoming_message.message_id].cache_key, expires_in: 7.days) do
context.reply_from({
:purpose => 'general',
:user => user,
:subject => IncomingMailProcessor::IncomingMessageProcessor.utf8ify(incoming_message.subject, incoming_message.header[:subject].try(:charset)),
:html => html_body,
:text => body
})
true
end
rescue IncomingMail::Errors::ReplyFrom => error
bounce_message(original_message, incoming_message, error, outgoing_from_address, from_channel)
rescue => e
Canvas::Errors.capture_exception("IncomingMailProcessor", e)
end
end

View File

@ -37,13 +37,11 @@ module Lti
exports = {}
configured_tools.each do |tool|
begin
migrator = Lti::ContentMigrationService::Exporter.new(course, tool, options)
migrator.start!
exports["lti_#{tool.id}"] = migrator if migrator.successfully_started?
rescue => e
Canvas::Errors.capture_exception(:external_content_migration, e)
end
migrator = Lti::ContentMigrationService::Exporter.new(course, tool, options)
migrator.start!
exports["lti_#{tool.id}"] = migrator if migrator.successfully_started?
rescue => e
Canvas::Errors.capture_exception(:external_content_migration, e)
end
exports

View File

@ -69,11 +69,9 @@ module Lti
class Runner < Struct.new(:callbacks)
def perform
callbacks.each_value do |callback|
begin
CanvasHttp.get(URI.parse(callback).to_s)
rescue => e
Rails.logger.error("Failed to call logout callback '#{callback}': #{e.inspect}")
end
CanvasHttp.get(URI.parse(callback).to_s)
rescue => e
Rails.logger.error("Failed to call logout callback '#{callback}': #{e.inspect}")
end
end
end

View File

@ -207,16 +207,14 @@ module Lti
end
def resource_link_request
@_resource_link_request ||= begin
Lti::Messages::ResourceLinkRequest.new(
tool: @tool,
context: @context,
user: @user,
expander: @expander,
return_url: @return_url,
opts: @opts.merge(option_overrides)
)
end
@_resource_link_request ||= Lti::Messages::ResourceLinkRequest.new(
tool: @tool,
context: @context,
user: @user,
expander: @expander,
return_url: @return_url,
opts: @opts.merge(option_overrides)
)
end
def option_overrides

View File

@ -263,15 +263,13 @@ class Message < ActiveRecord::Base
# populate the avatar, name, and email in the conversation email notification
def author
@_author ||= begin
if author_context.has_attribute?(:user_id)
User.find(context.user_id)
elsif author_context.has_attribute?(:author_id)
User.find(context.author_id)
else
nil
end
end
@_author ||= if author_context.has_attribute?(:user_id)
User.find(context.user_id)
elsif author_context.has_attribute?(:author_id)
User.find(context.author_id)
else
nil
end
end
def author_context
@ -1115,18 +1113,16 @@ class Message < ActiveRecord::Base
#
# Returns nothing.
def deliver_via_push
begin
self.user.notification_endpoints.each do |notification_endpoint|
notification_endpoint.destroy unless notification_endpoint.push_json(sns_json)
end
complete_dispatch
rescue StandardError => e
@exception = e
error_string = "Exception: #{e.class}: #{e.message}\n\t#{e.backtrace.join("\n\t")}"
logger.error error_string
cancel
raise e
self.user.notification_endpoints.each do |notification_endpoint|
notification_endpoint.destroy unless notification_endpoint.push_json(sns_json)
end
complete_dispatch
rescue StandardError => e
@exception = e
error_string = "Exception: #{e.class}: #{e.message}\n\t#{e.backtrace.join("\n\t")}"
logger.error error_string
cancel
raise e
end
private

View File

@ -164,14 +164,12 @@ class ModeratedGrading::ProvisionalGrade < ActiveRecord::Base
def publish_submission_comments!
submission_comments.select(&:provisional_grade_id).each do |provisional_comment|
begin
comment = provisional_comment.dup
comment.grade_posting_in_progress = true
comment.provisional_grade_id = nil
comment.save!
ensure
comment.grade_posting_in_progress = false
end
comment = provisional_comment.dup
comment.grade_posting_in_progress = true
comment.provisional_grade_id = nil
comment.save!
ensure
comment.grade_posting_in_progress = false
end
end

View File

@ -47,18 +47,14 @@ class NotificationEndpoint < ActiveRecord::Base
end
def endpoint_attributes
@endpoint_attributes ||= begin
sns_client.get_endpoint_attributes(endpoint_arn: self.arn).attributes
end
@endpoint_attributes ||= sns_client.get_endpoint_attributes(endpoint_arn: self.arn).attributes
end
def endpoint_exists?
begin
endpoint_attributes
true
rescue Aws::SNS::Errors::NotFound
false
end
endpoint_attributes
true
rescue Aws::SNS::Errors::NotFound
false
end
def own_endpoint?

View File

@ -139,13 +139,12 @@ class NotificationPolicy < ActiveRecord::Base
np = communication_channel.notification_policies.where(notification_id: notification).first
if !np
np = communication_channel.notification_policies.build(notification: notification)
frequency ||= begin
if communication_channel == communication_channel.user.communication_channel
notification.default_frequency(communication_channel.user)
else
'never'
end
end
frequency ||= if communication_channel == communication_channel.user.communication_channel
notification.default_frequency(communication_channel.user)
else
'never'
end
end
if frequency
np.frequency = frequency
@ -172,20 +171,18 @@ class NotificationPolicy < ActiveRecord::Base
end
np = nil
NotificationPolicy.transaction(requires_new: true) do
begin
np = communication_channel.notification_policies.build(notification: notification)
np.frequency = if frequencies[notification]
frequencies[notification]
elsif communication_channel == communication_channel.user.communication_channel
notification.default_frequency(communication_channel.user)
else
'never'
end
np.save!
rescue ActiveRecord::RecordNotUnique
np = nil
raise ActiveRecord::Rollback
end
np = communication_channel.notification_policies.build(notification: notification)
np.frequency = if frequencies[notification]
frequencies[notification]
elsif communication_channel == communication_channel.user.communication_channel
notification.default_frequency(communication_channel.user)
else
'never'
end
np.save!
rescue ActiveRecord::RecordNotUnique
np = nil
raise ActiveRecord::Rollback
end
np ||= communication_channel.notification_policies.where(notification_id: notification).first
policies << np

View File

@ -103,31 +103,29 @@ class OutcomeImport < ApplicationRecord
def run
root_account.shard.activate do
begin
job_started!
I18n.locale = locale if locale.present?
file = self.attachment.open(need_local_file: true)
job_started!
I18n.locale = locale if locale.present?
file = self.attachment.open(need_local_file: true)
Outcomes::CSVImporter.new(self, file).run do |status|
status[:errors].each do |row, error|
add_error row, error
end
self.update!(progress: status[:progress])
Outcomes::CSVImporter.new(self, file).run do |status|
status[:errors].each do |row, error|
add_error row, error
end
job_completed!
rescue Outcomes::Import::DataFormatError => e
add_error(1, e.message, true)
job_failed!
rescue => e
report = ErrorReport.log_exception('outcomes_import', e)
# no I18n on error report id
add_error(1, I18n.t('An unexpected error has occurred: see error report %{id}', id: report.id.to_s), true)
job_failed!
ensure
file.close
notify_user
self.update!(progress: status[:progress])
end
job_completed!
rescue Outcomes::Import::DataFormatError => e
add_error(1, e.message, true)
job_failed!
rescue => e
report = ErrorReport.log_exception('outcomes_import', e)
# no I18n on error report id
add_error(1, I18n.t('An unexpected error has occurred: see error report %{id}', id: report.id.to_s), true)
job_failed!
ensure
file.close
notify_user
end
end

View File

@ -457,32 +457,30 @@ class PageView < ActiveRecord::Base
return false if rows.empty?
inserted = rows.count do |attrs|
begin
created_at = attrs['created_at']
created_at = Time.zone.parse(created_at) unless created_at.is_a?(Time)
# if the created_at is the same as the last_created_at,
# we may have already inserted this page view
# use to_i here to avoid sub-second precision problems
if created_at.to_i == last_created_at.to_i
exists = !!cassandra.select_value("SELECT request_id FROM page_views WHERE request_id = ?", attrs['request_id'])
end
# now instantiate the AR object here, as a brand new record, so
# it's saved to cassandra as if it was just created (though
# created_at comes from the queried db attributes)
# we're bypassing the redis queue here, just saving directly to cassandra
if exists
false
else
# assumes PageView.cassandra? is true at this point
page_view = PageView.from_attributes(attrs, true)
page_view.save!
true
end
rescue
logger.error "failed migrating request id to cassandra: #{attrs['request_id']} : #{$!}"
false
created_at = attrs['created_at']
created_at = Time.zone.parse(created_at) unless created_at.is_a?(Time)
# if the created_at is the same as the last_created_at,
# we may have already inserted this page view
# use to_i here to avoid sub-second precision problems
if created_at.to_i == last_created_at.to_i
exists = !!cassandra.select_value("SELECT request_id FROM page_views WHERE request_id = ?", attrs['request_id'])
end
# now instantiate the AR object here, as a brand new record, so
# it's saved to cassandra as if it was just created (though
# created_at comes from the queried db attributes)
# we're bypassing the redis queue here, just saving directly to cassandra
if exists
false
else
# assumes PageView.cassandra? is true at this point
page_view = PageView.from_attributes(attrs, true)
page_view.save!
true
end
rescue
logger.error "failed migrating request id to cassandra: #{attrs['request_id']} : #{$!}"
false
end
logger.info "account #{Shard.current.id}~#{account_id}: added #{inserted} page views starting at #{last_created_at}"

View File

@ -33,11 +33,9 @@ module Quizzes::QuizQuestion::AnswerSerializers
# @return [Integer|NilClass]
# nil if the parameter isn't really an integer.
def to_integer(number)
begin
Integer(number)
rescue
nil
end
Integer(number)
rescue
nil
end
# Cast a localized string number to a BigDecimal

View File

@ -55,13 +55,11 @@ class Quizzes::QuizSubmissionHistory
end
def kept
@kept ||= begin
if @submission.score == @submission.kept_score
@submission
else
version_models.detect { |v| v.score == @submission.kept_score }
end
end
@kept ||= if @submission.score == @submission.kept_score
@submission
else
version_models.detect { |v| v.score == @submission.kept_score }
end
end
private

View File

@ -81,31 +81,29 @@ class ReportSnapshot < ActiveRecord::Base
scope :progressive, -> { where(:report_type => 'counts_progressive_detailed') }
def push_to_instructure_if_collection_enabled
begin
return if self.report_type != REPORT_TO_SEND
return if self.account != Account.default
return if self.report_type != REPORT_TO_SEND
return if self.account != Account.default
collection_type = Setting.get("usage_statistics_collection", "opt_out")
return if collection_type == "opt_out"
collection_type = Setting.get("usage_statistics_collection", "opt_out")
return if collection_type == "opt_out"
require 'lib/ssl_common'
require 'lib/ssl_common'
data = {
"collection_type" => collection_type,
"installation_uuid" => Canvas.installation_uuid,
"report_type" => self.report_type,
"data" => read_attribute(:data),
"rails_env" => Rails.env
}
data = {
"collection_type" => collection_type,
"installation_uuid" => Canvas.installation_uuid,
"report_type" => self.report_type,
"data" => read_attribute(:data),
"rails_env" => Rails.env
}
if collection_type == "opt_in"
data["account_name"] = Account.default.name
data["admin_email"] = Account.site_admin.users.first.pseudonyms.first.unique_id
end
SSLCommon.post_form(STATS_COLLECTION_URL, data)
rescue
nil
if collection_type == "opt_in"
data["account_name"] = Account.default.name
data["admin_email"] = Account.site_admin.users.first.pseudonyms.first.unique_id
end
SSLCommon.post_form(STATS_COLLECTION_URL, data)
rescue
nil
end
end

View File

@ -29,39 +29,37 @@ class Setting < Switchman::UnshardedRecord
end
def self.get(name, default, expires_in: nil, set_if_nx: false)
begin
cache.fetch(name, expires_in: expires_in) do
if @skip_cache && expires_in
obj = Setting.find_by(name: name)
Setting.set(name, default) if !obj && set_if_nx
next obj ? obj.value&.to_s : default&.to_s
end
fetch = Proc.new { Setting.pluck(:name, :value).to_h }
all_settings = if @skip_cache
# we want to skip talking to redis, but it's okay to use the in-proc cache
@all_settings ||= fetch.call
elsif expires_in
# ignore the in-proc cache, but check redis; it will have been properly
# cleared by whoever set it, they just have no way to clear the in-proc cache
@all_settings = MultiCache.fetch("all_settings", &fetch)
else
# use both caches
@all_settings ||= MultiCache.fetch("all_settings", &fetch)
end
if all_settings.key?(name)
all_settings[name]&.to_s
else
Setting.set(name, default) if set_if_nx
default&.to_s
end
cache.fetch(name, expires_in: expires_in) do
if @skip_cache && expires_in
obj = Setting.find_by(name: name)
Setting.set(name, default) if !obj && set_if_nx
next obj ? obj.value&.to_s : default&.to_s
end
fetch = Proc.new { Setting.pluck(:name, :value).to_h }
all_settings = if @skip_cache
# we want to skip talking to redis, but it's okay to use the in-proc cache
@all_settings ||= fetch.call
elsif expires_in
# ignore the in-proc cache, but check redis; it will have been properly
# cleared by whoever set it, they just have no way to clear the in-proc cache
@all_settings = MultiCache.fetch("all_settings", &fetch)
else
# use both caches
@all_settings ||= MultiCache.fetch("all_settings", &fetch)
end
if all_settings.key?(name)
all_settings[name]&.to_s
else
Setting.set(name, default) if set_if_nx
default&.to_s
end
rescue ActiveRecord::StatementInvalid, ActiveRecord::ConnectionNotEstablished => e
# the db may not exist yet
Rails.logger.warn("Unable to read setting: #{e}") if Rails.logger
default&.to_s
end
rescue ActiveRecord::StatementInvalid, ActiveRecord::ConnectionNotEstablished => e
# the db may not exist yet
Rails.logger.warn("Unable to read setting: #{e}") if Rails.logger
default&.to_s
end
# Note that after calling this, you should send SIGHUP to all running Canvas processes

View File

@ -803,13 +803,11 @@ class Submission < ActiveRecord::Base
end
def text_entry_originality_reports
@text_entry_originality_reports ||= begin
if self.association(:originality_reports).loaded?
originality_reports.select { |o| o.attachment_id.blank? }
else
originality_reports.where(attachment_id: nil)
end
end
@text_entry_originality_reports ||= if self.association(:originality_reports).loaded?
originality_reports.select { |o| o.attachment_id.blank? }
else
originality_reports.where(attachment_id: nil)
end
end
# Returns an array of both the versioned originality reports (those with attachments) and

View File

@ -696,20 +696,18 @@ class User < ActiveRecord::Base
aa.depth = depth
aa.shard = Shard.shard_for(account_id)
aa.shard.activate do
begin
UserAccountAssociation.transaction(:requires_new => true) do
aa.save!
end
rescue ActiveRecord::RecordNotUnique
# race condition - someone else created the UAA after we queried for existing ones
old_aa = UserAccountAssociation.where(user_id: aa.user_id, account_id: aa.account_id).first
raise unless old_aa # wtf!
UserAccountAssociation.transaction(:requires_new => true) do
aa.save!
end
rescue ActiveRecord::RecordNotUnique
# race condition - someone else created the UAA after we queried for existing ones
old_aa = UserAccountAssociation.where(user_id: aa.user_id, account_id: aa.account_id).first
raise unless old_aa # wtf!
# make sure we don't need to change the depth
if depth < old_aa.depth
old_aa.depth = depth
old_aa.save!
end
# make sure we don't need to change the depth
if depth < old_aa.depth
old_aa.depth = depth
old_aa.save!
end
end
else
@ -760,9 +758,7 @@ class User < ActiveRecord::Base
# Returns an array of groups which are currently visible for the user.
def visible_groups
@visible_groups ||= begin
filter_visible_groups_for_user(self.current_groups)
end
@visible_groups ||= filter_visible_groups_for_user(self.current_groups)
end
def filter_visible_groups_for_user(groups)

View File

@ -216,11 +216,9 @@ class GradeSummaryAssignmentPresenter
end
def grade_distribution
@grade_distribution ||= begin
if (stats = @summary.assignment_stats[assignment.id])
[stats.maximum, stats.minimum, stats.mean].map { |stat| stat.to_f.round(2) }
end
end
@grade_distribution ||= if (stats = @summary.assignment_stats[assignment.id])
[stats.maximum, stats.minimum, stats.mean].map { |stat| stat.to_f.round(2) }
end
end
def graph

View File

@ -98,17 +98,15 @@ class GradeSummaryPresenter
end
def student_enrollment
@student_enrollment ||= begin
if @id_param # always use id if given
validate_id
user_id = Shard.relative_id_for(@id_param, @context.shard, @context.shard)
@context.shard.activate { student_enrollment_for(@context, user_id) }
elsif observed_students.present? # otherwise try to find an observed student
observed_student
else # or just fall back to @current_user
@context.shard.activate { student_enrollment_for(@context, @current_user) }
end
end
@student_enrollment ||= if @id_param # always use id if given
validate_id
user_id = Shard.relative_id_for(@id_param, @context.shard, @context.shard)
@context.shard.activate { student_enrollment_for(@context, user_id) }
elsif observed_students.present? # otherwise try to find an observed student
observed_student
else # or just fall back to @current_user
@context.shard.activate { student_enrollment_for(@context, @current_user) }
end
end
def students
@ -253,44 +251,40 @@ class GradeSummaryPresenter
end
def courses_with_grades
@courses_with_grades ||= begin
student.shard.activate do
course_list = if student_is_user?
Course.preload(:enrollment_term, :grading_period_groups)
.where(id: student.participating_student_current_and_concluded_course_ids).to_a
elsif user_an_observer_of_student?
observed_courses = []
Shard.partition_by_shard(student.participating_student_current_and_concluded_course_ids) do |course_ids|
observed_course_ids = ObserverEnrollment
.not_deleted
.where(course_id: course_ids, user_id: @current_user, associated_user_id: student)
.pluck(:course_id)
next unless observed_course_ids.any?
@courses_with_grades ||= student.shard.activate do
course_list = if student_is_user?
Course.preload(:enrollment_term, :grading_period_groups)
.where(id: student.participating_student_current_and_concluded_course_ids).to_a
elsif user_an_observer_of_student?
observed_courses = []
Shard.partition_by_shard(student.participating_student_current_and_concluded_course_ids) do |course_ids|
observed_course_ids = ObserverEnrollment
.not_deleted
.where(course_id: course_ids, user_id: @current_user, associated_user_id: student)
.pluck(:course_id)
next unless observed_course_ids.any?
observed_courses += Course.preload(:enrollment_term, :grading_period_groups)
.where(id: observed_course_ids).to_a
end
observed_courses
else
[]
observed_courses += Course.preload(:enrollment_term, :grading_period_groups)
.where(id: observed_course_ids).to_a
end
observed_courses
else
[]
end
course_list.select { |c| c.grants_right?(student, :read) }
end
course_list.select { |c| c.grants_right?(student, :read) }
end
end
def unread_submission_ids
@unread_submission_ids ||= begin
if student_is_user?
# remember unread submissions and then mark all as read
subs = submissions.select { |s| s.unread?(@current_user) }
subs.each { |s| s.change_read_state("read", @current_user) }
subs.map(&:id)
else
[]
end
end
@unread_submission_ids ||= if student_is_user?
# remember unread submissions and then mark all as read
subs = submissions.select { |s| s.unread?(@current_user) }
subs.each { |s| s.change_read_state("read", @current_user) }
subs.map(&:id)
else
[]
end
end
def no_calculations?
@ -298,13 +292,11 @@ class GradeSummaryPresenter
end
def total_weight
@total_weight ||= begin
if @context.group_weighting_scheme == "percent"
groups.sum(&:group_weight)
else
0
end
end
@total_weight ||= if @context.group_weighting_scheme == "percent"
groups.sum(&:group_weight)
else
0
end
end
def groups_assignments=(value)

View File

@ -23,19 +23,15 @@ class GradesPresenter
end
def student_enrollments
@student_enrollments ||= begin
current_enrollments.select { |e| e.student? }.index_by { |e| e.course }
end
@student_enrollments ||= current_enrollments.select { |e| e.student? }.index_by { |e| e.course }
end
def observed_enrollments
@observed_enrollments ||= begin
observer_enrollments.map { |e|
e.shard.activate do
StudentEnrollment.active.where(user_id: e.associated_user_id, course_id: e.course_id).first
end
}.uniq.compact
end
@observed_enrollments ||= observer_enrollments.map { |e|
e.shard.activate do
StudentEnrollment.active.where(user_id: e.associated_user_id, course_id: e.course_id).first
end
}.uniq.compact
end
def course_grade_summaries
@ -76,14 +72,10 @@ class GradesPresenter
private
def observer_enrollments
@observer_enrollments ||= begin
current_enrollments.select { |e| e.is_a?(ObserverEnrollment) && e.associated_user_id }
end
@observer_enrollments ||= current_enrollments.select { |e| e.is_a?(ObserverEnrollment) && e.associated_user_id }
end
def current_enrollments
@current_enrollments ||= begin
@enrollments.select { |e| e.state_based_on_date == :active }
end
@current_enrollments ||= @enrollments.select { |e| e.state_based_on_date == :active }
end
end

View File

@ -31,8 +31,6 @@ class GradingPeriodGradeSummaryPresenter < GradeSummaryPresenter
end
def groups
@groups ||= begin
assignments.uniq(&:assignment_group_id).map(&:assignment_group)
end
@groups ||= assignments.uniq(&:assignment_group_id).map(&:assignment_group)
end
end

View File

@ -173,17 +173,15 @@ module CanvasRails
hosts = Array(conn_params[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
conn_params[:host] = host
return super(conn_params)
# we _shouldn't_ be catching a NoDatabaseError, but that's what Rails raises
# for an error where the database name is in the message (i.e. a hostname lookup failure)
# CANVAS_RAILS6_0 rails 6.1 switches from PG::Error to ActiveRecord::ConnectionNotEstablished
# for any other error
rescue ::PG::Error, ::ActiveRecord::NoDatabaseError, ::ActiveRecord::ConnectionNotEstablished
raise if index == hosts.length - 1
# else try next host
end
conn_params[:host] = host
return super(conn_params)
# we _shouldn't_ be catching a NoDatabaseError, but that's what Rails raises
# for an error where the database name is in the message (i.e. a hostname lookup failure)
# CANVAS_RAILS6_0 rails 6.1 switches from PG::Error to ActiveRecord::ConnectionNotEstablished
# for any other error
rescue ::PG::Error, ::ActiveRecord::NoDatabaseError, ::ActiveRecord::ConnectionNotEstablished
raise if index == hosts.length - 1
# else try next host
end
end
end
@ -199,24 +197,22 @@ module CanvasRails
def connect
hosts = Array(@connection_parameters[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
connection_parameters = @connection_parameters.dup
connection_parameters[:host] = host
@connection = PG::Connection.connect(connection_parameters)
connection_parameters = @connection_parameters.dup
connection_parameters[:host] = host
@connection = PG::Connection.connect(connection_parameters)
configure_connection
configure_connection
raise "Canvas requires PostgreSQL 12 or newer" unless postgresql_version >= 12_00_00
raise "Canvas requires PostgreSQL 12 or newer" unless postgresql_version >= 12_00_00
break
rescue ::PG::Error => error
if error.message.include?("does not exist")
raise ActiveRecord::NoDatabaseError.new(error.message)
elsif index == hosts.length - 1
raise
end
# else try next host
break
rescue ::PG::Error => error
if error.message.include?("does not exist")
raise ActiveRecord::NoDatabaseError.new(error.message)
elsif index == hosts.length - 1
raise
end
# else try next host
end
end
end

View File

@ -41,14 +41,12 @@ callback_chain.append(cb) if cb
# be tolerant of using a secondary
module IgnoreSlaveErrors
def save_record(alternate_record = nil)
begin
super
rescue ActiveRecord::StatementInvalid => error
# "simulated" secondary of a user with read-only access; probably the same error for Slony
raise if !error.message.match(/PG(?:::)?Error: ERROR: +permission denied for relation/) &&
# real secondary that's in recovery
!error.message.match(/PG(?:::)?Error: ERROR: +cannot execute UPDATE in a read-only transaction/)
end
super
rescue ActiveRecord::StatementInvalid => error
# "simulated" secondary of a user with read-only access; probably the same error for Slony
raise if !error.message.match(/PG(?:::)?Error: ERROR: +permission denied for relation/) &&
# real secondary that's in recovery
!error.message.match(/PG(?:::)?Error: ERROR: +cannot execute UPDATE in a read-only transaction/)
end
end
Authlogic::Session::Base.prepend(IgnoreSlaveErrors)

View File

@ -21,17 +21,13 @@ class DataServicesCaliperLoader
JSON_BASE_PATH = 'doc/api/data_services/json/caliper'
def self.data
@@data ||= begin
DataServicesEventsLoader.new(JSON_BASE_PATH).data
end
@@data ||= DataServicesEventsLoader.new(JSON_BASE_PATH).data
end
def self.extensions
@@extensions ||= begin
{
'extensions' => JSON.parse(File.read("#{JSON_BASE_PATH}/extensions.json")),
'actor_extensions' => JSON.parse(File.read("#{JSON_BASE_PATH}/actor_extensions.json"))
}
end
@@extensions ||= {
'extensions' => JSON.parse(File.read("#{JSON_BASE_PATH}/extensions.json")),
'actor_extensions' => JSON.parse(File.read("#{JSON_BASE_PATH}/actor_extensions.json"))
}
end
end

View File

@ -21,16 +21,12 @@ class DataServicesCanvasLoader
JSON_BASE_PATH = 'doc/api/data_services/json/canvas'
def self.data
@@data ||= begin
DataServicesEventsLoader.new(JSON_BASE_PATH).data
end
@@data ||= DataServicesEventsLoader.new(JSON_BASE_PATH).data
end
def self.metadata
@@metadata ||= begin
{
examples: JSON.parse(File.read("#{JSON_BASE_PATH}/metadata.json"))
}
end
@@metadata ||= {
examples: JSON.parse(File.read("#{JSON_BASE_PATH}/metadata.json"))
}
end
end

View File

@ -61,14 +61,12 @@ class DataServicesEventsLoader
end
def data
@data ||= begin
event_types.collect do |event_category, event_files|
{
event_category: event_category,
page_title: page_tile_formatter(event_category),
event_payloads: load_json_events(event_files.sort)
}
end
@data ||= event_types.collect do |event_category, event_files|
{
event_category: event_category,
page_title: page_tile_formatter(event_category),
event_payloads: load_json_events(event_files.sort)
}
end
end
@ -79,12 +77,10 @@ class DataServicesEventsLoader
end
def event_types
@event_types ||= begin
files
.group_by { |file_path| extrat_category_from_file_path(file_path) }
.sort
.to_h
end
@event_types ||= files
.group_by { |file_path| extrat_category_from_file_path(file_path) }
.sort
.to_h
end
def extrat_category_from_file_path(file_path)

View File

@ -90,11 +90,9 @@ class ControllerListView < HashView
def models
{}.tap do |m|
merge = lambda do |name, hash|
begin
m.merge! hash
rescue JSON::ParserError
puts "Unable to parse model: #{name} (#{ctrl.raw_name})"
end
m.merge! hash
rescue JSON::ParserError
puts "Unable to parse model: #{name} (#{ctrl.raw_name})"
end
# If @object tags are available to describe a class of object, we'll

View File

@ -264,43 +264,41 @@ describe CanvasCache::Redis do
end
it "logs the cache fetch block generation time" do
begin
Timecop.safe_mode = false
Timecop.freeze
log_lines = capture_log_messages do
# make sure this works with fetching nested fetches
cache.fetch(key, force: true) do
val = +"a1"
val << cache.fetch(key2, force: true) do
Timecop.travel(Time.zone.now + 1.second)
# Cheat to cover the missing ActiveSupport::Notifications.subscription in config/inititalizers/cache_store.rb
# TODO: remove this hack when initializer is ported to gem and incorporated
Thread.current[:last_cache_generate] = 1
"b1"
end
Timecop.travel(Time.zone.now + 2.seconds)
Timecop.safe_mode = false
Timecop.freeze
log_lines = capture_log_messages do
# make sure this works with fetching nested fetches
cache.fetch(key, force: true) do
val = +"a1"
val << cache.fetch(key2, force: true) do
Timecop.travel(Time.zone.now + 1.second)
# Cheat to cover the missing ActiveSupport::Notifications.subscription in config/inititalizers/cache_store.rb
# TODO: remove this hack when initializer is ported to gem and incorporated
Thread.current[:last_cache_generate] = 3
val << "a2"
Thread.current[:last_cache_generate] = 1
"b1"
end
Timecop.travel(Time.zone.now + 2.seconds)
# Cheat to cover the missing ActiveSupport::Notifications.subscription in config/inititalizers/cache_store.rb
# TODO: remove this hack when initializer is ported to gem and incorporated
Thread.current[:last_cache_generate] = 3
val << "a2"
end
outer_message = JSON.parse(log_lines.pop)
expect(outer_message["command"]).to eq("set")
expect(outer_message["key"]).to be_ends_with(key)
expect(outer_message["request_time_ms"]).to be_a(Float)
# 3000 (3s) == 2s outer fetch + 1s inner fetch
expect(outer_message["generate_time_ms"]).to be_within(500).of(3000)
inner_message = JSON.parse(log_lines.pop)
expect(inner_message["command"]).to eq("set")
expect(inner_message["key"]).to be_ends_with(key2)
expect(inner_message["request_time_ms"]).to be_a(Float)
expect(inner_message["generate_time_ms"]).to be_within(500).of(1000)
ensure
Timecop.return
Timecop.safe_mode = true
end
outer_message = JSON.parse(log_lines.pop)
expect(outer_message["command"]).to eq("set")
expect(outer_message["key"]).to be_ends_with(key)
expect(outer_message["request_time_ms"]).to be_a(Float)
# 3000 (3s) == 2s outer fetch + 1s inner fetch
expect(outer_message["generate_time_ms"]).to be_within(500).of(3000)
inner_message = JSON.parse(log_lines.pop)
expect(inner_message["command"]).to eq("set")
expect(inner_message["key"]).to be_ends_with(key2)
expect(inner_message["request_time_ms"]).to be_a(Float)
expect(inner_message["generate_time_ms"]).to be_within(500).of(1000)
ensure
Timecop.return
Timecop.safe_mode = true
end
it "logs zero response size on cache miss" do

View File

@ -148,13 +148,11 @@ module CanvasPartman::Concerns
partition_table_name = infer_partition_table_name(attributes)
@arel_tables ||= {}
@arel_tables[partition_table_name] ||= begin
if ::ActiveRecord.version < Gem::Version.new('5')
Arel::Table.new(partition_table_name, { engine: self.arel_engine })
else
Arel::Table.new(partition_table_name, type_caster: type_caster)
end
end
@arel_tables[partition_table_name] ||= if ::ActiveRecord.version < Gem::Version.new('5')
Arel::Table.new(partition_table_name, { engine: self.arel_engine })
else
Arel::Table.new(partition_table_name, type_caster: type_caster)
end
end
# @internal

View File

@ -81,11 +81,9 @@ RSpec.configure do |config|
config.after do
connection.tables.grep(/^partman_(?:animals|trails)_/).each do |partition_table_name|
begin
SchemaHelper.drop_table(partition_table_name)
rescue StandardError => e
puts "[WARN] Partition table dropping failed: #{e.message}"
end
SchemaHelper.drop_table(partition_table_name)
rescue StandardError => e
puts "[WARN] Partition table dropping failed: #{e.message}"
end
end
end

View File

@ -69,13 +69,11 @@ module CanvasQuizStatistics::Analyzers::Concerns
def calculate_responses(responses, answers, *args)
responses.each do |response|
answer = locate_answer(response, answers, *args)
answer ||= begin
if answer_present_but_unknown?(response, *args)
generate_unknown_answer(answers)
else
generate_missing_answer(answers)
end
end
answer ||= if answer_present_but_unknown?(response, *args)
generate_unknown_answer(answers)
else
generate_missing_answer(answers)
end
answer[:user_ids] << response[:user_id]
answer[:user_names] << response[:user_name]

View File

@ -253,18 +253,16 @@ module CanvasSecurity
keys += encryption_keys
keys.each do |key|
begin
body = JSON::JWT.decode(token, key)
verify_jwt(body, ignore_expiration: ignore_expiration)
return body.with_indifferent_access
rescue JSON::JWS::VerificationFailed
# Keep looping, to try all the keys. If none succeed,
# we raise below.
rescue CanvasSecurity::TokenExpired
raise
rescue => e
raise CanvasSecurity::InvalidToken, e
end
body = JSON::JWT.decode(token, key)
verify_jwt(body, ignore_expiration: ignore_expiration)
return body.with_indifferent_access
rescue JSON::JWS::VerificationFailed
# Keep looping, to try all the keys. If none succeed,
# we raise below.
rescue CanvasSecurity::TokenExpired
raise
rescue => e
raise CanvasSecurity::InvalidToken, e
end
raise CanvasSecurity::InvalidToken
@ -287,13 +285,11 @@ module CanvasSecurity
end
secrets_to_check.each do |cur_secret|
begin
raw_jwt = JSON::JWT.decode(signed_coded_jwt.plain_text, cur_secret)
verify_jwt(raw_jwt, ignore_expiration: ignore_expiration)
return raw_jwt.with_indifferent_access
rescue JSON::JWS::VerificationFailed => e
CanvasErrors.capture_exception(:security_auth, e, :info)
end
raw_jwt = JSON::JWT.decode(signed_coded_jwt.plain_text, cur_secret)
verify_jwt(raw_jwt, ignore_expiration: ignore_expiration)
return raw_jwt.with_indifferent_access
rescue JSON::JWS::VerificationFailed => e
CanvasErrors.capture_exception(:security_auth, e, :info)
end
raise CanvasSecurity::InvalidToken
end

View File

@ -43,11 +43,9 @@ module CanvasTime
end
def self.try_parse(maybe_time, default = nil)
begin
Time.zone.parse(maybe_time) || default
rescue
default
end
Time.zone.parse(maybe_time) || default
rescue
default
end
def utc_datetime

View File

@ -234,11 +234,9 @@ module DynamicSettings
error = nil
method = options[:recurse] ? :get_all : :get
ms = 1000 * Benchmark.realtime do
begin
result = Diplomat::Kv.send(method, full_key, options)
rescue => e
error = e
end
result = Diplomat::Kv.send(method, full_key, options)
rescue => e
error = e
end
timing = format("CONSUL (%.2fms)", ms)
status = 'OK'

View File

@ -46,20 +46,18 @@ module Api::V1::Conferences
def ui_conferences_json(conferences, context, user, session)
cs = conferences.map do |c|
begin
c.as_json(
permissions: {
user: user,
session: session,
},
url: named_context_url(context, :context_conference_url, c)
)
rescue => e
Canvas::Errors.capture_exception(:web_conferences, e)
@errors ||= []
@errors << e
nil
end
c.as_json(
permissions: {
user: user,
session: session,
},
url: named_context_url(context, :context_conference_url, c)
)
rescue => e
Canvas::Errors.capture_exception(:web_conferences, e)
@errors ||= []
@errors << e
nil
end
cs.compact
end

View File

@ -65,12 +65,10 @@ module Canvas::Migration::ExternalContent
sleep(retry_delay) if retry_count > 0
pending_keys.each do |service_key|
begin
pending_keys.delete(service_key) if yield(service_key)
rescue => e
pending_keys.delete(service_key) # don't retry if failed
Canvas::Errors.capture_exception(:external_content_migration, e)
end
pending_keys.delete(service_key) if yield(service_key)
rescue => e
pending_keys.delete(service_key) # don't retry if failed
Canvas::Errors.capture_exception(:external_content_migration, e)
end
retry_count += 1
end

View File

@ -64,11 +64,9 @@ module Canvas::Migration
end
def delete_unzipped_archive
begin
@archive.delete_unzipped_archive
rescue
Rails.logger.warn "Couldn't delete #{@unzipped_file_path} for content_migration #{@settings[:content_migration_id]}"
end
@archive.delete_unzipped_archive
rescue
Rails.logger.warn "Couldn't delete #{@unzipped_file_path} for content_migration #{@settings[:content_migration_id]}"
end
def get_full_path(file_name)

View File

@ -81,14 +81,12 @@ module Canvas::Migration::Worker
end
def self.clear_exported_data(folder)
begin
config = ConfigFile.load('external_migration')
if !config || !config[:keep_after_complete]
FileUtils::rm_rf(folder) if File.exist?(folder)
end
rescue
Rails.logger.warn "Couldn't clear export data for content_migration #{content_migration.id}"
config = ConfigFile.load('external_migration')
if !config || !config[:keep_after_complete]
FileUtils::rm_rf(folder) if File.exist?(folder)
end
rescue
Rails.logger.warn "Couldn't clear export data for content_migration #{content_migration.id}"
end
def self.download_attachment(cm, url)

View File

@ -30,64 +30,62 @@ class Canvas::Migration::Worker::CourseCopyWorker < Canvas::Migration::Worker::B
cm.job_progress.start
cm.shard.activate do
begin
source = cm.source_course || Course.find(cm.migration_settings[:source_course_id])
ce = ContentExport.new
ce.shard = source.shard
ce.context = source
ce.content_migration = cm
ce.selected_content = cm.copy_options
ce.export_type = ContentExport::COURSE_COPY
ce.user = cm.user
ce.save!
cm.content_export = ce
source = cm.source_course || Course.find(cm.migration_settings[:source_course_id])
ce = ContentExport.new
ce.shard = source.shard
ce.context = source
ce.content_migration = cm
ce.selected_content = cm.copy_options
ce.export_type = ContentExport::COURSE_COPY
ce.user = cm.user
ce.save!
cm.content_export = ce
source.shard.activate do
ce.export(synchronous: true)
end
if ce.workflow_state == 'exported_for_course_copy'
# use the exported attachment as the import archive
cm.attachment = ce.attachment
cm.migration_settings[:migration_ids_to_import] ||= { :copy => {} }
cm.migration_settings[:migration_ids_to_import][:copy][:everything] = true
# set any attachments referenced in html to be copied
ce.selected_content['attachments'] ||= {}
ce.referenced_files.values.each do |att_mig_id|
ce.selected_content['attachments'][att_mig_id] = true
end
ce.save
cm.save
worker = CC::Importer::CCWorker.new
worker.migration_id = cm.id
worker.perform
cm.reload
if cm.workflow_state == 'exported'
cm.workflow_state = :pre_processed
cm.update_import_progress(10)
cm.context.copy_attachments_from_course(source, :content_export => ce, :content_migration => cm)
cm.update_import_progress(20)
cm.import_content
cm.workflow_state = :imported
cm.save
cm.update_import_progress(100)
end
else
cm.workflow_state = :failed
cm.migration_settings[:last_error] = "ContentExport failed to export course."
cm.save
end
rescue InstFS::ServiceError, ActiveRecord::RecordInvalid => e
Canvas::Errors.capture_exception(:course_copy, e, :warn)
cm.fail_with_error!(e)
raise Delayed::RetriableError, e.message
rescue => e
cm.fail_with_error!(e)
raise e
source.shard.activate do
ce.export(synchronous: true)
end
if ce.workflow_state == 'exported_for_course_copy'
# use the exported attachment as the import archive
cm.attachment = ce.attachment
cm.migration_settings[:migration_ids_to_import] ||= { :copy => {} }
cm.migration_settings[:migration_ids_to_import][:copy][:everything] = true
# set any attachments referenced in html to be copied
ce.selected_content['attachments'] ||= {}
ce.referenced_files.values.each do |att_mig_id|
ce.selected_content['attachments'][att_mig_id] = true
end
ce.save
cm.save
worker = CC::Importer::CCWorker.new
worker.migration_id = cm.id
worker.perform
cm.reload
if cm.workflow_state == 'exported'
cm.workflow_state = :pre_processed
cm.update_import_progress(10)
cm.context.copy_attachments_from_course(source, :content_export => ce, :content_migration => cm)
cm.update_import_progress(20)
cm.import_content
cm.workflow_state = :imported
cm.save
cm.update_import_progress(100)
end
else
cm.workflow_state = :failed
cm.migration_settings[:last_error] = "ContentExport failed to export course."
cm.save
end
rescue InstFS::ServiceError, ActiveRecord::RecordInvalid => e
Canvas::Errors.capture_exception(:course_copy, e, :warn)
cm.fail_with_error!(e)
raise Delayed::RetriableError, e.message
rescue => e
cm.fail_with_error!(e)
raise e
end
end

View File

@ -101,21 +101,19 @@ module Canvas::Plugins::TicketingSystem
end
def become_user_id_uri
begin
if url && user_id
begin
become_user_uri = URI.parse(url)
become_user_uri.query = (Hash[*(become_user_uri.query || '')
.split('&').map { |part| part.split('=') }.flatten])
.merge({ 'become_user_id' => user_id }).to_query
rescue URI::Error
become_user_uri = "unable to parse uri: #{url}"
end
become_user_uri.to_s
if url && user_id
begin
become_user_uri = URI.parse(url)
become_user_uri.query = (Hash[*(become_user_uri.query || '')
.split('&').map { |part| part.split('=') }.flatten])
.merge({ 'become_user_id' => user_id }).to_query
rescue URI::Error
become_user_uri = "unable to parse uri: #{url}"
end
rescue
nil
become_user_uri.to_s
end
rescue
nil
end
def pretty_http_env

View File

@ -28,11 +28,9 @@ module Canvas::Reloader
Rails.logger.info("Canvas::Reloader fired")
@pending_reload = false
to_reload.each do |block|
begin
block.call
rescue => e
Canvas::Errors.capture_exception(:reloader, e)
end
block.call
rescue => e
Canvas::Errors.capture_exception(:reloader, e)
end
end

View File

@ -140,13 +140,11 @@ module CC::Importer
end
def retrieve_and_convert_blti_url(url)
begin
response = CanvasHttp.get(url, redirect_limit: 10)
config_xml = response.body
convert_blti_xml(config_xml)
rescue Timeout::Error
raise CCImportError.new(I18n.t(:retrieve_timeout, "could not retrieve configuration, the server response timed out"))
end
response = CanvasHttp.get(url, redirect_limit: 10)
config_xml = response.body
convert_blti_xml(config_xml)
rescue Timeout::Error
raise CCImportError.new(I18n.t(:retrieve_timeout, "could not retrieve configuration, the server response timed out"))
end
def get_custom_properties(node)

View File

@ -39,59 +39,57 @@ module CC
zipper = ContentZipper.new
zipper.user = @user
zipper.process_folder(course_folder, @zip_file, [CCHelper::WEB_RESOURCES_FOLDER], :exporter => @manifest.exporter) do |file, folder_names|
begin
next if file.display_name.blank?
next if file.display_name.blank?
if file.is_a? Folder
dir = File.join(folder_names[1..-1])
files_with_metadata[:folders] << [file, dir] if file_or_folder_restricted?(file)
next
end
if file.is_a? Folder
dir = File.join(folder_names[1..-1])
files_with_metadata[:folders] << [file, dir] if file_or_folder_restricted?(file)
next
end
path = File.join(folder_names, file.display_name)
@added_attachments[file.id] = path
migration_id = create_key(file)
if file_or_folder_restricted?(file) || file.usage_rights || file.display_name != file.unencoded_filename
files_with_metadata[:files] << [file, migration_id]
end
@resources.resource(
"type" => CCHelper::WEBCONTENT,
:identifier => migration_id,
:href => path
) do |res|
if file.locked || file.usage_rights
res.metadata do |meta_node|
meta_node.lom :lom do |lom_node|
if file.locked
lom_node.lom :educational do |edu_node|
edu_node.lom :intendedEndUserRole do |role_node|
role_node.lom :source, "IMSGLC_CC_Rolesv1p1"
role_node.lom :value, "Instructor"
end
path = File.join(folder_names, file.display_name)
@added_attachments[file.id] = path
migration_id = create_key(file)
if file_or_folder_restricted?(file) || file.usage_rights || file.display_name != file.unencoded_filename
files_with_metadata[:files] << [file, migration_id]
end
@resources.resource(
"type" => CCHelper::WEBCONTENT,
:identifier => migration_id,
:href => path
) do |res|
if file.locked || file.usage_rights
res.metadata do |meta_node|
meta_node.lom :lom do |lom_node|
if file.locked
lom_node.lom :educational do |edu_node|
edu_node.lom :intendedEndUserRole do |role_node|
role_node.lom :source, "IMSGLC_CC_Rolesv1p1"
role_node.lom :value, "Instructor"
end
end
if file.usage_rights
lom_node.lom :rights do |rights_node|
rights_node.lom :copyrightAndOtherRestrictions do |node|
node.lom :value, (file.usage_rights.license == 'public_domain') ? "no" : "yes"
end
description = []
description << file.usage_rights.legal_copyright if file.usage_rights.legal_copyright.present?
description << file.usage_rights.license_name unless file.usage_rights.license == 'private'
rights_node.lom :description do |desc|
desc.lom :string, description.join('\n')
end
end
if file.usage_rights
lom_node.lom :rights do |rights_node|
rights_node.lom :copyrightAndOtherRestrictions do |node|
node.lom :value, (file.usage_rights.license == 'public_domain') ? "no" : "yes"
end
description = []
description << file.usage_rights.legal_copyright if file.usage_rights.legal_copyright.present?
description << file.usage_rights.license_name unless file.usage_rights.license == 'private'
rights_node.lom :description do |desc|
desc.lom :string, description.join('\n')
end
end
end
end
end
res.file(:href => path)
end
rescue
title = file.unencoded_filename rescue I18n.t('course_exports.unknown_titles.file', "Unknown file")
add_error(I18n.t('course_exports.errors.file', "The file \"%{file_name}\" failed to export", :file_name => title), $!)
res.file(:href => path)
end
rescue
title = file.unencoded_filename rescue I18n.t('course_exports.unknown_titles.file', "Unknown file")
add_error(I18n.t('course_exports.errors.file', "The file \"%{file_name}\" failed to export", :file_name => title), $!)
end
add_meta_info_for_files(files_with_metadata)
@ -224,42 +222,40 @@ module CC
tracks = {}
html_content_exporter.used_media_objects.each do |obj|
begin
migration_id = create_key(obj)
info = html_content_exporter.media_object_infos[obj.id]
next unless info && info[:asset]
migration_id = create_key(obj)
info = html_content_exporter.media_object_infos[obj.id]
next unless info && info[:asset]
path = File.join(CCHelper::WEB_RESOURCES_FOLDER, info[:path])
path = File.join(CCHelper::WEB_RESOURCES_FOLDER, info[:path])
# download from kaltura if the file wasn't already exported here in add_course_files
if !@added_attachments || @added_attachments[obj.attachment_id] != path
unless CanvasKaltura::ClientV3::ASSET_STATUSES[info[:asset][:status]] == :READY &&
(url = (client.flavorAssetGetPlaylistUrl(obj.media_id, info[:asset][:id]) || client.flavorAssetGetDownloadUrl(info[:asset][:id])))
add_error(I18n.t('course_exports.errors.media_file', "A media file failed to export"))
next
end
# download from kaltura if the file wasn't already exported here in add_course_files
if !@added_attachments || @added_attachments[obj.attachment_id] != path
unless CanvasKaltura::ClientV3::ASSET_STATUSES[info[:asset][:status]] == :READY &&
(url = (client.flavorAssetGetPlaylistUrl(obj.media_id, info[:asset][:id]) || client.flavorAssetGetDownloadUrl(info[:asset][:id])))
add_error(I18n.t('course_exports.errors.media_file', "A media file failed to export"))
next
end
CanvasHttp.get(url) do |http_response|
raise CanvasHttp::InvalidResponseCodeError.new(http_response.code.to_i) unless http_response.code.to_i == 200
CanvasHttp.get(url) do |http_response|
raise CanvasHttp::InvalidResponseCodeError.new(http_response.code.to_i) unless http_response.code.to_i == 200
@zip_file.get_output_stream(path) do |stream|
http_response.read_body(stream)
end
end
@resources.resource(
"type" => CCHelper::WEBCONTENT,
:identifier => migration_id,
:href => path
) do |res|
res.file(:href => path)
@zip_file.get_output_stream(path) do |stream|
http_response.read_body(stream)
end
end
process_media_tracks(tracks, migration_id, obj, path)
rescue
add_error(I18n.t('course_exports.errors.media_file', "A media file failed to export"), $!)
@resources.resource(
"type" => CCHelper::WEBCONTENT,
:identifier => migration_id,
:href => path
) do |res|
res.file(:href => path)
end
end
process_media_tracks(tracks, migration_id, obj, path)
rescue
add_error(I18n.t('course_exports.errors.media_file', "A media file failed to export"), $!)
end
add_tracks(tracks) if @canvas_resource_dir

View File

@ -22,13 +22,11 @@ module CustomValidations
module ClassMethods
def validates_as_url(*fields, allowed_schemes: %w{http https})
validates_each(fields, :allow_nil => true) do |record, attr, value|
begin
value, = CanvasHttp.validate_url(value, allowed_schemes: allowed_schemes)
value, = CanvasHttp.validate_url(value, allowed_schemes: allowed_schemes)
record.send("#{attr}=", value)
rescue CanvasHttp::Error, URI::Error, ArgumentError
record.errors.add attr, 'is not a valid URL'
end
record.send("#{attr}=", value)
rescue CanvasHttp::Error, URI::Error, ArgumentError
record.errors.add attr, 'is not a valid URL'
end
end

View File

@ -22,12 +22,10 @@ module DataFixup
def self.run
Quizzes::Quiz.find_ids_in_ranges(:batch_size => 10000) do |min_id, max_id|
affected_quizzes.where(id: min_id..max_id).find_each do |quiz|
begin
possible = Quizzes::Quiz.count_points_possible(quiz.root_entries(true))
quiz.update!(points_possible: possible)
rescue => e
Rails.logger.error("Error occured trying to repair Quiz #{quiz.global_id} #{e}")
end
possible = Quizzes::Quiz.count_points_possible(quiz.root_entries(true))
quiz.update!(points_possible: possible)
rescue => e
Rails.logger.error("Error occured trying to repair Quiz #{quiz.global_id} #{e}")
end
end
end

View File

@ -93,17 +93,15 @@ module DataFixup::RebuildQuizSubmissionsFromQuizSubmissionEvents
find_missing_submissions_on_current_shard
end
ids.map do |id|
begin
Rails.logger.info LOG_PREFIX + "#{id} data fix starting..."
success = run(id)
rescue => e
Rails.logger.warn LOG_PREFIX + "#{id} failed with error: #{e}"
ensure
if success
Rails.logger.info LOG_PREFIX + "#{id} completed successfully"
else
Rails.logger.warn LOG_PREFIX + "#{id} failed"
end
Rails.logger.info LOG_PREFIX + "#{id} data fix starting..."
success = run(id)
rescue => e
Rails.logger.warn LOG_PREFIX + "#{id} failed with error: #{e}"
ensure
if success
Rails.logger.info LOG_PREFIX + "#{id} completed successfully"
else
Rails.logger.warn LOG_PREFIX + "#{id} failed"
end
end
end

View File

@ -35,21 +35,19 @@ module DataFixup::RebuildQuizSubmissionsFromQuizSubmissionVersions
def run_on_array(submission_ids, timestamp = Time.zone.now)
base_url = "#{Shard.current.id}/api/v1/"
submission_ids.map do |id|
begin
Rails.logger.info LOG_PREFIX + "#{id} data fix starting..."
success = run(id, timestamp)
rescue => e
Rails.logger.warn LOG_PREFIX + "#{id} failed with error: #{e}"
ensure
if success
Rails.logger.info LOG_PREFIX + "#{id} completed successfully"
sub = Submission.find(id)
assignment = sub.assignment
url = "#{base_url}courses/#{assignment.context.id}/assignments/#{assignment.id}/submissions/#{sub.user_id}"
Rails.logger.info LOG_PREFIX + "You can investigate #{id} manually at #{url}"
else
Rails.logger.warn LOG_PREFIX + "#{id} failed"
end
Rails.logger.info LOG_PREFIX + "#{id} data fix starting..."
success = run(id, timestamp)
rescue => e
Rails.logger.warn LOG_PREFIX + "#{id} failed with error: #{e}"
ensure
if success
Rails.logger.info LOG_PREFIX + "#{id} completed successfully"
sub = Submission.find(id)
assignment = sub.assignment
url = "#{base_url}courses/#{assignment.context.id}/assignments/#{assignment.id}/submissions/#{sub.user_id}"
Rails.logger.info LOG_PREFIX + "You can investigate #{id} manually at #{url}"
else
Rails.logger.warn LOG_PREFIX + "#{id} failed"
end
end
end

View File

@ -80,33 +80,31 @@ class ExternalFeedAggregator
end
def process_feed(feed)
begin
LiveEvents.set_context(Canvas::LiveEvents.amended_context(feed.context))
@logger.info("feed found: #{feed.url}")
@logger.info('requesting entries')
require 'net/http'
LiveEvents.set_context(Canvas::LiveEvents.amended_context(feed.context))
@logger.info("feed found: #{feed.url}")
@logger.info('requesting entries')
require 'net/http'
response = CanvasHttp.get(feed.url)
case response
when Net::HTTPSuccess
success = parse_entries(feed, response.body)
@logger.info(success ? 'successful response' : '200 with no data returned')
feed.consecutive_failures = 0 if success
feed.update_attribute(:refresh_at, success_wait_seconds.seconds.from_now)
else
@logger.info("request failed #{response.class}")
handle_failure(feed)
end
rescue CanvasHttp::Error,
CanvasHttp::RelativeUriError,
CanvasHttp::InsecureUriError,
Timeout::Error,
SocketError,
SystemCallError,
OpenSSL::SSL::SSLError => e
Canvas::Errors.capture_exception(:external_feed, e, :info)
response = CanvasHttp.get(feed.url)
case response
when Net::HTTPSuccess
success = parse_entries(feed, response.body)
@logger.info(success ? 'successful response' : '200 with no data returned')
feed.consecutive_failures = 0 if success
feed.update_attribute(:refresh_at, success_wait_seconds.seconds.from_now)
else
@logger.info("request failed #{response.class}")
handle_failure(feed)
end
rescue CanvasHttp::Error,
CanvasHttp::RelativeUriError,
CanvasHttp::InsecureUriError,
Timeout::Error,
SocketError,
SystemCallError,
OpenSSL::SSL::SSLError => e
Canvas::Errors.capture_exception(:external_feed, e, :info)
handle_failure(feed)
end
def handle_failure(feed)

View File

@ -82,11 +82,9 @@ class GradebookImporter
end
CSV::Converters[:nil] = lambda do |e|
begin
e.nil? ? e : raise
rescue
e
end
e.nil? ? e : raise
rescue
e
end
CSV::Converters[:decimal_comma_to_period] = ->(field) do
@ -635,16 +633,14 @@ class GradebookImporter
def identify_delimiter(rows)
field_counts = {}
%w[; ,].each do |separator|
begin
field_counts[separator] = 0
field_count_by_row = rows.map { |line| CSV.parse_line(line, col_sep: separator)&.size || 0 }
field_counts[separator] = 0
field_count_by_row = rows.map { |line| CSV.parse_line(line, col_sep: separator)&.size || 0 }
# If the number of fields generated by this separator is consistent for all lines,
# we should be able to assume it's a valid delimiter for this file
field_counts[separator] = field_count_by_row.first if field_count_by_row.uniq.size == 1
rescue CSV::MalformedCSVError
nil
end
# If the number of fields generated by this separator is consistent for all lines,
# we should be able to assume it's a valid delimiter for this file
field_counts[separator] = field_count_by_row.first if field_count_by_row.uniq.size == 1
rescue CSV::MalformedCSVError
nil
end
field_counts[';'] > field_counts[','] ? :semicolon : :comma

View File

@ -199,12 +199,10 @@ module Lti::Messages
return nil unless @context.is_a?(Course)
return nil if @user.blank?
@_current_observee_list ||= begin
@user.observer_enrollments.current
.where(course_id: @context.id)
.preload(:associated_user)
.map { |e| e.try(:associated_user).try(:lti_id) }.compact
end
@_current_observee_list ||= @user.observer_enrollments.current
.where(course_id: @context.id)
.preload(:associated_user)
.map { |e| e.try(:associated_user).try(:lti_id) }.compact
end
def custom_parameters

View File

@ -82,9 +82,7 @@ module Lti
end
def submission_event_service
@_submission_event_service ||= begin
tool_proxy.find_service(SUBMISSION_EVENT_ID, 'POST')
end
@_submission_event_service ||= tool_proxy.find_service(SUBMISSION_EVENT_ID, 'POST')
end
def format

View File

@ -138,24 +138,22 @@ module MessageBus
# expanded. https://guides.rubyonrails.org/threading_and_code_execution.html#wrapping-application-code
Rails.application.executor.wrap do
Shard.lookup(shard_id).activate do
begin
status = produce_message(namespace, topic_name, message)
rescue StandardError => e
# if we errored, we didn't actually process the message
# put it back on the queue to try to get to it later.
# Does this screw up ordering? yes, absolutely, but ruby queues are one-way.
# If your messages within topics are required to be stricly ordered, you need to
# generate a producer and manage error handling yourself.
@queue.push(work_tuple)
# if this is NOT one of the known error types from pulsar
# then we actually need to know about it with a full ":error"
# level in sentry.
err_level = ::MessageBus.rescuable_pulsar_errors.include?(e.class) ? :warn : :error
CanvasErrors.capture_exception(:message_bus, e, err_level)
status = :error
ensure
MessageBus.on_work_unit_end&.call
end
status = produce_message(namespace, topic_name, message)
rescue StandardError => e
# if we errored, we didn't actually process the message
# put it back on the queue to try to get to it later.
# Does this screw up ordering? yes, absolutely, but ruby queues are one-way.
# If your messages within topics are required to be stricly ordered, you need to
# generate a producer and manage error handling yourself.
@queue.push(work_tuple)
# if this is NOT one of the known error types from pulsar
# then we actually need to know about it with a full ":error"
# level in sentry.
err_level = ::MessageBus.rescuable_pulsar_errors.include?(e.class) ? :warn : :error
CanvasErrors.capture_exception(:message_bus, e, err_level)
status = :error
ensure
MessageBus.on_work_unit_end&.call
end
end
status

View File

@ -96,17 +96,15 @@ module Outcomes
def parse_batch(headers, batch)
Account.transaction do
results = batch.map do |row, line|
begin
utf8_row = row.map(&method(:check_encoding))
import_row(headers, utf8_row) unless utf8_row.all?(&:blank?)
[]
rescue ParseError, InvalidDataError => e
[[line, e.message]]
rescue ActiveRecord::RecordInvalid => e
errors = e.record.errors
errors.set_reporter(:array, :human)
errors.to_a.map { |err| [line, err] }
end
utf8_row = row.map(&method(:check_encoding))
import_row(headers, utf8_row) unless utf8_row.all?(&:blank?)
[]
rescue ParseError, InvalidDataError => e
[[line, e.message]]
rescue ActiveRecord::RecordInvalid => e
errors = e.record.errors
errors.set_reporter(:array, :human)
errors.to_a.map { |err| [line, err] }
end
results.flatten(1)

View File

@ -28,19 +28,17 @@ module QuizMathDataFixup
end
questions = questions.where('updated_at>?', check_date) if check_date
questions.find_each do |quiz_question|
begin
old_data = quiz_question.question_data.to_hash
new_data = fixup_question_data(quiz_question.question_data.to_hash.symbolize_keys)
quiz_question.write_attribute(:question_data, new_data) if new_data != old_data
if quiz_question.changed?
stat = question_bank ? 'updated_math_qb_question' : 'updated_math_question'
InstStatsd::Statsd.increment(stat)
changed = true
quiz_question.save!
end
rescue => e
Canvas::Errors.capture(e)
old_data = quiz_question.question_data.to_hash
new_data = fixup_question_data(quiz_question.question_data.to_hash.symbolize_keys)
quiz_question.write_attribute(:question_data, new_data) if new_data != old_data
if quiz_question.changed?
stat = question_bank ? 'updated_math_qb_question' : 'updated_math_question'
InstStatsd::Statsd.increment(stat)
changed = true
quiz_question.save!
end
rescue => e
Canvas::Errors.capture(e)
end
qstat = question_bank ? 'updated_math_question_bank' : 'updated_math_quiz'
InstStatsd::Statsd.increment(qstat) if changed
@ -49,12 +47,10 @@ module QuizMathDataFixup
def fixup_submission_questions_with_bad_math(submission)
submission.questions&.each_with_index do |question, index|
begin
data = fixup_question_data(question)
submission.questions[index] = data
rescue => e
Canvas::Errors.capture(e)
end
data = fixup_question_data(question)
submission.questions[index] = data
rescue => e
Canvas::Errors.capture(e)
end
begin
submission.save! if submission.changed?

View File

@ -30,13 +30,11 @@ module SIS
def process(csv, index = nil, count = nil)
count = SIS::AbstractCourseImporter.new(@root_account, importer_opts).process do |importer|
csv_rows(csv, index, count) do |row|
begin
importer.add_abstract_course(row['abstract_course_id'], row['short_name'],
row['long_name'], row['status'], row['term_id'],
row['account_id'], row['fallback_account_id'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
importer.add_abstract_course(row['abstract_course_id'], row['short_name'],
row['long_name'], row['status'], row['term_id'],
row['account_id'], row['fallback_account_id'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
count

View File

@ -34,12 +34,10 @@ module SIS
def process(csv, index = nil, count = nil)
count = SIS::AccountImporter.new(@root_account, importer_opts).process do |importer|
csv_rows(csv, index, count) do |row|
begin
importer.add_account(row['account_id'], row['parent_account_id'],
row['status'], row['name'], row['integration_id'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
importer.add_account(row['account_id'], row['parent_account_id'],
row['status'], row['name'], row['integration_id'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
count

View File

@ -35,13 +35,11 @@ module SIS
messages = []
count = SIS::AdminImporter.new(@root_account, importer_opts).process do |i|
csv_rows(csv, index, count) do |row|
begin
i.process_admin(user_id: row['user_id'], account_id: row['account_id'],
role_id: row['role_id'], role: row['role'],
status: row['status'], root_account: row['root_account'])
rescue ImportError => e
messages << SisBatch.build_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
i.process_admin(user_id: row['user_id'], account_id: row['account_id'],
role_id: row['role_id'], role: row['role'],
status: row['status'], root_account: row['root_account'])
rescue ImportError => e
messages << SisBatch.build_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
SisBatch.bulk_insert_sis_errors(messages)

View File

@ -34,11 +34,9 @@ module SIS
def process(csv, index = nil, count = nil)
count = SIS::ChangeSisIdImporter.new(@root_account, importer_opts).process do |i|
csv_rows(csv, index, count) do |row|
begin
i.process_change_sis_id(create_change_data(row))
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
i.process_change_sis_id(create_change_data(row))
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
count

View File

@ -36,11 +36,9 @@ module SIS
messages = []
count = SIS::EnrollmentImporter.new(@root_account, importer_opts).process(messages) do |importer|
csv_rows(csv, index, count) do |row|
begin
importer.add_enrollment(create_enrollment(row, messages, csv: csv))
rescue ImportError => e
messages << SisBatch.build_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
importer.add_enrollment(create_enrollment(row, messages, csv: csv))
rescue ImportError => e
messages << SisBatch.build_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
persist_errors(csv, messages)

View File

@ -30,11 +30,9 @@ module SIS
def process(csv, index = nil, count = nil)
count = SIS::GradePublishingResultsImporter.new(@root_account, importer_opts).process do |importer|
csv_rows(csv, index, count) do |row|
begin
importer.add_grade_publishing_result(row['enrollment_id'], row['grade_publishing_status'], row['message'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
importer.add_grade_publishing_result(row['enrollment_id'], row['grade_publishing_status'], row['message'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
count

View File

@ -35,12 +35,10 @@ module SIS
def process(csv, index = nil, count = nil)
count = SIS::GroupCategoryImporter.new(@root_account, importer_opts).process do |importer|
csv_rows(csv, index, count) do |row|
begin
importer.add_group_category(row['group_category_id'], row['account_id'],
row['course_id'], row['category_name'], row['status'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
importer.add_group_category(row['group_category_id'], row['account_id'],
row['course_id'], row['category_name'], row['status'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
count

View File

@ -35,12 +35,10 @@ module SIS
def process(csv, index = nil, count = nil)
count = SIS::GroupImporter.new(@root_account, importer_opts).process do |importer|
csv_rows(csv, index, count) do |row|
begin
importer.add_group(row['group_id'], row['group_category_id'], row['account_id'],
row['course_id'], row['name'], row['status'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
importer.add_group(row['group_id'], row['group_category_id'], row['account_id'],
row['course_id'], row['name'], row['status'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
count

View File

@ -34,11 +34,9 @@ module SIS
def process(csv, index = nil, count = nil)
count = SIS::GroupMembershipImporter.new(@root_account, importer_opts).process do |importer|
csv_rows(csv, index, count) do |row|
begin
importer.add_group_membership(row['user_id'], row['group_id'], row['status'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
importer.add_group_membership(row['user_id'], row['group_id'], row['status'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
count

View File

@ -126,17 +126,15 @@ module SIS
def number_of_rows(create_importers:)
IMPORTERS.each do |importer|
@csvs[importer].reject! do |csv|
begin
rows = count_rows(csv, importer, create_importers: create_importers)
unless create_importers
@rows[importer] += rows
@total_rows += rows
end
false
rescue ::CSV::MalformedCSVError
SisBatch.add_error(csv, I18n.t("Malformed CSV"), sis_batch: @batch, failure: true)
true
rows = count_rows(csv, importer, create_importers: create_importers)
unless create_importers
@rows[importer] += rows
@total_rows += rows
end
false
rescue ::CSV::MalformedCSVError
SisBatch.add_error(csv, I18n.t("Malformed CSV"), sis_batch: @batch, failure: true)
true
end
end
end

View File

@ -36,12 +36,10 @@ module SIS
messages = []
count = SIS::UserImporter.new(@root_account, importer_opts).process(messages, login_only: true) do |importer|
csv_rows(csv, index, count) do |row|
begin
p = create_user(row, csv)
importer.add_user(p, login_only: true)
rescue ImportError => e
messages << SisBatch.build_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: p.login_row_info)
end
p = create_user(row, csv)
importer.add_user(p, login_only: true)
rescue ImportError => e
messages << SisBatch.build_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: p.login_row_info)
end
end
SisBatch.bulk_insert_sis_errors(messages)

View File

@ -36,12 +36,10 @@ module SIS
messages = []
count = SIS::UserImporter.new(@root_account, importer_opts).process(messages) do |importer|
csv_rows(csv, index, count) do |row|
begin
u = create_user(row, csv)
importer.add_user(u)
rescue ImportError => e
messages << SisBatch.build_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: u.row_info)
end
u = create_user(row, csv)
importer.add_user(u)
rescue ImportError => e
messages << SisBatch.build_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: u.row_info)
end
end
SisBatch.bulk_insert_sis_errors(messages)

View File

@ -34,11 +34,9 @@ module SIS
def process(csv, index = nil, count = nil)
count = SIS::UserObserverImporter.new(@root_account, importer_opts).process do |i|
csv_rows(csv, index, count) do |row|
begin
i.process_user_observer(row['observer_id'], row['student_id'], row['status'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
i.process_user_observer(row['observer_id'], row['student_id'], row['status'])
rescue ImportError => e
SisBatch.add_error(csv, e.to_s, sis_batch: @batch, row: row['lineno'], row_info: row)
end
end
count

Some files were not shown because too many files have changed in this diff Show More