RuboCop: Performance/StringInclude
[skip-stages=Flakey] auto-corrected, with manual review to identify possible nilness Change-Id: I205436e5c3cb37aae99ea552c7d14e6d1a04ef06 Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/277893 Tested-by: Service Cloud Jenkins <svc.cloudjenkins@instructure.com> Reviewed-by: Simon Williams <simon@instructure.com> QA-Review: Cody Cutrer <cody@instructure.com> Product-Review: Cody Cutrer <cody@instructure.com>
This commit is contained in:
parent
0b67e1d47b
commit
6c2705e1bf
|
@ -64,6 +64,9 @@ Naming/FileName:
|
|||
Exclude:
|
||||
- "**/Gemfile.d/~after.rb"
|
||||
|
||||
Performance/StringInclude:
|
||||
Severity: error
|
||||
|
||||
Rails/ApplicationRecord:
|
||||
Enabled: false # we never bothered creating an ApplicationRecord
|
||||
Rails/HasManyOrHasOneDependent:
|
||||
|
|
|
@ -1547,7 +1547,7 @@ class ApplicationController < ActionController::Base
|
|||
end
|
||||
|
||||
def log_gets
|
||||
if @page_view && !request.xhr? && request.get? && ((response.media_type || "").to_s.match(/html/) ||
|
||||
if @page_view && !request.xhr? && request.get? && ((response.media_type || "").to_s.include?('html') ||
|
||||
((Setting.get('create_get_api_page_views', 'true') == 'true') && api_request?))
|
||||
@page_view.render_time ||= (Time.now.utc - @page_before_render) rescue nil
|
||||
@page_view_update = true
|
||||
|
@ -2302,7 +2302,7 @@ class ApplicationController < ActionController::Base
|
|||
end
|
||||
|
||||
def json_as_text?
|
||||
(request.headers['CONTENT_TYPE'].to_s =~ %r{multipart/form-data}) &&
|
||||
request.headers['CONTENT_TYPE'].to_s.include?('multipart/form-data') &&
|
||||
(params[:format].to_s != 'json' || in_app?)
|
||||
end
|
||||
|
||||
|
@ -2334,7 +2334,7 @@ class ApplicationController < ActionController::Base
|
|||
end
|
||||
|
||||
def stringify_json_ids?
|
||||
request.headers['Accept'] =~ %r{application/json\+canvas-string-ids}
|
||||
request.headers['Accept']&.include?('application/json+canvas-string-ids')
|
||||
end
|
||||
|
||||
def json_cast(obj)
|
||||
|
@ -2529,7 +2529,7 @@ class ApplicationController < ActionController::Base
|
|||
end
|
||||
|
||||
def ms_office?
|
||||
!!(request.user_agent.to_s =~ /ms-office/) ||
|
||||
!!request.user_agent.to_s.include?('ms-office') ||
|
||||
!!(request.user_agent.to_s =~ %r{Word/\d+\.\d+})
|
||||
end
|
||||
|
||||
|
|
|
@ -515,7 +515,7 @@ class ContentMigrationsController < ApplicationController
|
|||
end
|
||||
|
||||
def find_migration_plugin(name)
|
||||
if name =~ /context_external_tool/
|
||||
if name.include?('context_external_tool')
|
||||
plugin = Canvas::Plugin.new(name)
|
||||
plugin.meta[:settings] = { requires_file_upload: true, worker: 'CCWorker', valid_contexts: %w{Course} }.with_indifferent_access
|
||||
plugin
|
||||
|
|
|
@ -428,7 +428,7 @@ class EnrollmentsApiController < ApplicationController
|
|||
enrollments = enrollments.joins(:user).select("enrollments.*")
|
||||
|
||||
has_courses = enrollments.where_clause.instance_variable_get(:@predicates)
|
||||
.any? { |cond| cond.is_a?(String) && cond =~ /courses\./ }
|
||||
.any? { |cond| cond.is_a?(String) && cond.include?('courses.') }
|
||||
enrollments = enrollments.joins(:course) if has_courses
|
||||
enrollments = enrollments.shard(@shard_scope) if @shard_scope
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ module Lti::Concerns
|
|||
end
|
||||
|
||||
def oembed_object_uri
|
||||
URI.parse(oembed_endpoint + (oembed_endpoint.match(/\?/) ? '&url=' : '?url=') + CGI.escape(oembed_url) + '&format=json')
|
||||
URI.parse(oembed_endpoint + (oembed_endpoint.include?('?') ? '&url=' : '?url=') + CGI.escape(oembed_url) + '&format=json')
|
||||
end
|
||||
|
||||
def uri_source
|
||||
|
|
|
@ -80,7 +80,7 @@ class ServicesApiController < ApplicationController
|
|||
client = CanvasKaltura::ClientV3.new
|
||||
uid = "#{@user.id}_#{@domain_root_account.id}"
|
||||
res = client.startSession(CanvasKaltura::SessionType::USER, uid)
|
||||
raise "Kaltura session failed to generate" if res =~ /START_SESSION_ERROR/
|
||||
raise "Kaltura session failed to generate" if res.include?('START_SESSION_ERROR')
|
||||
|
||||
hash = {
|
||||
:ks => res,
|
||||
|
|
|
@ -54,7 +54,7 @@ module AttachmentHelper
|
|||
end
|
||||
|
||||
def media_preview_attributes(attachment, attrs = {})
|
||||
attrs[:type] = attachment.content_type.match(/video/) ? 'video' : 'audio'
|
||||
attrs[:type] = attachment.content_type&.include?('video') ? 'video' : 'audio'
|
||||
attrs[:download_url] = context_url(attachment.context, :context_file_download_url, attachment.id)
|
||||
attrs[:media_entry_id] = attachment.media_entry_id if attachment.media_entry_id
|
||||
attrs.inject(+"") { |s, (attr, val)| s << "data-#{attr}=#{val} " }
|
||||
|
|
|
@ -36,7 +36,7 @@ module CalendarEventsHelper
|
|||
end
|
||||
# Use a explicit "return_to" option first, absent that, use calendar_url_for
|
||||
clean_return_to(
|
||||
params[:return_to] && params[:return_to].match(/calendar/) && params[:return_to]
|
||||
params[:return_to]&.include?('calendar') && params[:return_to]
|
||||
) ||
|
||||
calendar_url_for(options[:context], cal_options)
|
||||
end
|
||||
|
|
|
@ -68,9 +68,9 @@ module QuizzesHelper
|
|||
|
||||
def render_number(num)
|
||||
# if the string representation of this number uses scientific notation,
|
||||
return format('%g', num) if num.to_s =~ /e/ # short circuit if scientific notation
|
||||
return format('%g', num) if num.to_s.include?('e') # short circuit if scientific notation
|
||||
|
||||
if num.to_s =~ /%/
|
||||
if num.to_s.include?('%')
|
||||
I18n.n(round_if_whole(num.delete('%'))) + '%'
|
||||
else
|
||||
I18n.n(round_if_whole(num))
|
||||
|
|
|
@ -94,7 +94,7 @@ class AssetUserAccess < ActiveRecord::Base
|
|||
end
|
||||
|
||||
def readable_name(include_group_name: true)
|
||||
if self.asset_code && self.asset_code.match(/:/)
|
||||
if self.asset_code&.include?(':')
|
||||
split = self.asset_code.split(/:/)
|
||||
|
||||
if split[1].match(/course_\d+/)
|
||||
|
|
|
@ -3071,8 +3071,7 @@ class Assignment < ActiveRecord::Base
|
|||
end
|
||||
|
||||
def allow_google_docs_submission?
|
||||
self.submission_types &&
|
||||
self.submission_types.match(/online_upload/)
|
||||
submission_types&.include?('online_upload')
|
||||
end
|
||||
|
||||
def <=>(comparable)
|
||||
|
|
|
@ -238,7 +238,7 @@ class Attachment < ActiveRecord::Base
|
|||
end
|
||||
|
||||
# try an infer encoding if it would be useful to do so
|
||||
delay.infer_encoding if self.encoding.nil? && self.content_type =~ /text/ && self.context_type != 'SisBatch'
|
||||
delay.infer_encoding if self.encoding.nil? && self.content_type&.include?('text') && self.context_type != 'SisBatch'
|
||||
if respond_to?(:process_attachment, true)
|
||||
automatic_thumbnail_sizes.each do |suffix|
|
||||
delay_if_production(singleton: "attachment_thumbnail_#{global_id}_#{suffix}")
|
||||
|
@ -470,12 +470,12 @@ class Attachment < ActiveRecord::Base
|
|||
end
|
||||
|
||||
def assert_file_extension
|
||||
self.content_type = nil if self.content_type && (self.content_type == 'application/x-unknown' || self.content_type.match(/ERROR/))
|
||||
self.content_type = nil if content_type == 'application/x-unknown' || content_type&.include?('ERROR')
|
||||
self.content_type ||= self.mimetype(self.filename)
|
||||
if self.filename && self.filename.split(".").length < 2
|
||||
# we actually have better luck assuming zip files without extensions
|
||||
# are docx files than assuming they're zip files
|
||||
self.content_type = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' if self.content_type.match(/zip/)
|
||||
self.content_type = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' if content_type&.include?('zip')
|
||||
ext = self.extension
|
||||
self.write_attribute(:filename, self.filename + ext) unless ext == '.unknown'
|
||||
end
|
||||
|
|
|
@ -707,7 +707,7 @@ class ContextExternalTool < ActiveRecord::Base
|
|||
return "" if url.blank?
|
||||
|
||||
url = url.gsub(/[[:space:]]/, '')
|
||||
url = "http://" + url unless url.match(/:\/\//)
|
||||
url = "http://" + url unless url.include?('://')
|
||||
res = Addressable::URI.parse(url).normalize
|
||||
res.query = res.query.split(/&/).sort.join('&') if !res.query.blank?
|
||||
res.to_s
|
||||
|
|
|
@ -63,8 +63,9 @@ module Importers
|
|||
def convert_link(node, attr, item_type, mig_id, field)
|
||||
return unless node[attr].present?
|
||||
|
||||
if attr == 'value'
|
||||
return unless (node[attr] && node[attr] =~ %r{IMS(?:-|_)CC(?:-|_)FILEBASE}) || node[attr] =~ %r{CANVAS_COURSE_REFERENCE}
|
||||
if attr == 'value' &&
|
||||
!(node[attr] =~ %r{IMS(?:-|_)CC(?:-|_)FILEBASE} || node[attr].include?('CANVAS_COURSE_REFERENCE'))
|
||||
return
|
||||
end
|
||||
|
||||
url = node[attr].dup
|
||||
|
@ -144,13 +145,13 @@ module Importers
|
|||
|
||||
elsif url =~ %r{\$IMS(?:-|_)CC(?:-|_)FILEBASE\$/(.*)}
|
||||
rel_path = URI.unescape($1)
|
||||
if (attr == 'href' && node['class'] && node['class'] =~ /instructure_inline_media_comment/) ||
|
||||
if (attr == 'href' && node['class']&.include?('instructure_inline_media_comment')) ||
|
||||
(attr == 'src' && node.name == 'iframe' && node['data-media-id'])
|
||||
unresolved(:media_object, :rel_path => rel_path)
|
||||
else
|
||||
unresolved(:file, :rel_path => rel_path)
|
||||
end
|
||||
elsif (attr == 'href' && node['class'] && node['class'] =~ /instructure_inline_media_comment/) ||
|
||||
elsif (attr == 'href' && node['class']&.include?('instructure_inline_media_comment')) ||
|
||||
(attr == 'src' && node.name == 'iframe' && node['data-media-id'])
|
||||
# Course copy media reference, leave it alone
|
||||
resolved
|
||||
|
|
|
@ -216,7 +216,7 @@ class MediaObject < ActiveRecord::Base
|
|||
self.data[:plays] = entry[:plays].to_i
|
||||
self.data[:download_url] = entry[:downloadUrl]
|
||||
tags = (entry[:tags] || "").split(/,/).map(&:strip)
|
||||
old_id = tags.detect { |t| t.match(/old_id_/) }
|
||||
old_id = tags.detect { |t| t.include?('old_id_') }
|
||||
self.old_media_id = old_id.sub(/old_id_/, '') if old_id
|
||||
end
|
||||
self.data[:extensions] ||= {}
|
||||
|
|
|
@ -323,7 +323,7 @@ class Quizzes::Quiz < ActiveRecord::Base
|
|||
val = val.in_time_zone.end_of_day if val.is_a?(Date)
|
||||
if val.is_a?(String)
|
||||
super(Time.zone.parse(val))
|
||||
self.lock_at = CanvasTime.fancy_midnight(self.lock_at) unless val =~ /:/
|
||||
self.lock_at = CanvasTime.fancy_midnight(self.lock_at) unless val.include?(':')
|
||||
else
|
||||
super(val)
|
||||
end
|
||||
|
@ -333,7 +333,7 @@ class Quizzes::Quiz < ActiveRecord::Base
|
|||
val = val.in_time_zone.end_of_day if val.is_a?(Date)
|
||||
if val.is_a?(String)
|
||||
super(Time.zone.parse(val))
|
||||
infer_times unless val =~ /:/
|
||||
infer_times unless val.include?(':')
|
||||
else
|
||||
super(val)
|
||||
end
|
||||
|
|
|
@ -99,7 +99,7 @@ class Quizzes::QuizRegrader::Answer
|
|||
|
||||
fake_submission_data = if question_data[:question_type] == 'multiple_answers_question'
|
||||
hash = {}
|
||||
answer.each { |k, v| hash["question_#{question_id}_#{k}"] = v if /answer/ =~ k.to_s }
|
||||
answer.each { |k, v| hash["question_#{question_id}_#{k}"] = v if k.to_s.include?('answer') }
|
||||
answer.merge(hash)
|
||||
else
|
||||
answer.merge("question_#{question_id}" => answer[:text])
|
||||
|
|
|
@ -2072,7 +2072,7 @@ class Submission < ActiveRecord::Base
|
|||
|
||||
def processed?
|
||||
if submission_type == "online_url"
|
||||
return attachment && attachment.content_type.match(/image/)
|
||||
return attachment&.content_type&.include?('image')
|
||||
end
|
||||
|
||||
false
|
||||
|
|
|
@ -317,7 +317,7 @@ module PostgreSQLAdapterExtensions
|
|||
end
|
||||
|
||||
I18n.available_locales.each do |locale|
|
||||
next if locale =~ /-x-/
|
||||
next if locale.to_s.include?('-x-')
|
||||
|
||||
I18n.locale = locale
|
||||
next if Canvas::ICU.collator.rules.empty?
|
||||
|
|
|
@ -70,7 +70,7 @@ module AttachmentFu # :nodoc:
|
|||
commands.limit("disk", "1000MB") # because arbitrary numbers are arbitrary
|
||||
|
||||
# gif are not handled correct, this is a hack, but it seems to work.
|
||||
if img[:format] =~ /GIF/
|
||||
if img[:format].include?('GIF')
|
||||
img.format("png")
|
||||
end
|
||||
|
||||
|
|
|
@ -122,7 +122,7 @@ module CanvasCache
|
|||
def self.redis_failure?(redis_name)
|
||||
return false unless last_redis_failure[redis_name]
|
||||
# i feel this dangling rescue is justifiable, given the try-to-be-failsafe nature of this code
|
||||
if redis_name =~ /localhost/
|
||||
if redis_name.include?('localhost')
|
||||
# talking to local redis should not short ciruit as long
|
||||
return (Time.zone.now - last_redis_failure[redis_name]) < (settings_store.get('redis_local_failure_time', '2').to_i rescue 2)
|
||||
end
|
||||
|
|
|
@ -360,7 +360,7 @@ describe CanvasCache::Redis do
|
|||
end
|
||||
# we don't log the second message under spring, cause reasons; we only
|
||||
# care about the primary message anyway
|
||||
msgs = messages.select { |m| m =~ /Query failure/ }
|
||||
msgs = messages.select { |m| m.include?('Query failure') }
|
||||
expect(msgs.length).to eq(1)
|
||||
m = msgs.first
|
||||
expect(m).to match(/\[REDIS\] Query failure/)
|
||||
|
|
|
@ -180,7 +180,7 @@ module CanvasHttp
|
|||
begin
|
||||
uri = URI.parse(value)
|
||||
rescue URI::InvalidURIError => e
|
||||
if e.message =~ /URI must be ascii only/
|
||||
if e.message.include?('URI must be ascii only')
|
||||
uri = URI.parse(Addressable::URI.normalized_encode(value).chomp("/"))
|
||||
value = uri.to_s
|
||||
else
|
||||
|
|
|
@ -35,12 +35,10 @@ module LuckySneaks
|
|||
if defined?(RedCloth)
|
||||
if lite_mode
|
||||
RedCloth.new(self, [:lite_mode]).to_html
|
||||
elsif self.include?('<pre>')
|
||||
RedCloth.new(self).to_html.tr("\t", "")
|
||||
else
|
||||
if self =~ /<pre>/
|
||||
RedCloth.new(self).to_html.tr("\t", "")
|
||||
else
|
||||
RedCloth.new(self).to_html.tr("\t", "").gsub(/\n\n/, "")
|
||||
end
|
||||
RedCloth.new(self).to_html.tr("\t", "").gsub(/\n\n/, "")
|
||||
end
|
||||
else
|
||||
self
|
||||
|
@ -142,7 +140,7 @@ module LuckySneaks
|
|||
/(\s|^)\$(\d+)\.(\d+)(\s|$)/ => '\2 dollars \3 cents',
|
||||
/(\s|^)£(\d+)\.(\d+)(\s|$)/u => '\2 pounds \3 pence',
|
||||
}.each do |found, replaced|
|
||||
replaced = " #{replaced} " unless replaced =~ /\\1/
|
||||
replaced = " #{replaced} " unless replaced.include?('\\1')
|
||||
dummy.gsub!(found, replaced)
|
||||
end
|
||||
# Back to normal rules
|
||||
|
@ -158,7 +156,7 @@ module LuckySneaks
|
|||
/\s*%\s*/ => "percent",
|
||||
/\s*(\\|\/)\s*/ => "slash",
|
||||
}.each do |found, replaced|
|
||||
replaced = " #{replaced} " unless replaced =~ /\\1/
|
||||
replaced = " #{replaced} " unless replaced.include?('\\1')
|
||||
dummy.gsub!(found, replaced)
|
||||
end
|
||||
dummy = dummy.gsub(/(^|\w)'(\w|$)/, '\1\2').gsub(/[.,:;()\[\]\/?!\^'"_]/, " ")
|
||||
|
|
|
@ -304,7 +304,7 @@ module HtmlTextHelper
|
|||
def add_notification_to_link(url, notification_id)
|
||||
parts = url.to_s.split("#", 2)
|
||||
link = parts[0]
|
||||
link += link.match(/\?/) ? "&" : "?"
|
||||
link += link.include?('?') ? "&" : "?"
|
||||
link += "clear_notification_id=#{notification_id}"
|
||||
link += parts[1] if parts[1]
|
||||
link
|
||||
|
|
|
@ -190,7 +190,7 @@ module I18nExtraction::Extensions
|
|||
remove_whitespace = true
|
||||
scope = case filename
|
||||
when /app\/messages\//
|
||||
remove_whitespace = false unless filename =~ /html/
|
||||
remove_whitespace = false unless filename.include?('html')
|
||||
filename.gsub(/.*app\/|\.erb/, '').gsub(/\/_?/, '.')
|
||||
when /app\/views\//
|
||||
filename.gsub(/.*app\/views\/|\.(html\.|fbml\.)?erb\z/, '').gsub(/\/_?/, '.')
|
||||
|
|
|
@ -144,7 +144,7 @@ module I18nTasks
|
|||
.concat(scan_and_report(dashed_str, /(!?\[)[^\]]+\]\(([^)"']+).*?\)/).map { |m| "link:#{m.last}" }) # links
|
||||
|
||||
# only do fancy markdown checks on multi-line strings
|
||||
if dashed_str =~ /\n/
|
||||
if dashed_str.include?("\n")
|
||||
matches.concat(scan_and_report(dashed_str, /^(\#{1,6})\s+[^#]*#*$/).map { |m| "h#{m.first.size}" }) # headings
|
||||
.concat(scan_and_report(dashed_str, /^[^=\-\n]+\n^(=+|-+)$/).map { |m| m.first[0] == '=' ? 'h1' : 'h2' }) # moar headings
|
||||
.concat(scan_and_report(dashed_str, /^((\s*\*\s*){3,}|(\s*-\s*){3,}|(\s*_\s*){3,})$/).map { "hr" })
|
||||
|
|
|
@ -414,7 +414,7 @@ namespace :i18n do
|
|||
import = I18nTasks::I18nImport.new(source_translations, new_translations)
|
||||
|
||||
complete_translations = import.compile_complete_translations do |error_items, description|
|
||||
if description =~ /mismatches/
|
||||
if description.include?('mismatches')
|
||||
# Output malformed stuff and don't import them
|
||||
errors.concat error_items
|
||||
:discard
|
||||
|
|
|
@ -62,7 +62,7 @@ describe RuboCop::Cop::Specs::NoBeforeOnceStubs do
|
|||
end
|
||||
})
|
||||
expect(cop.offenses.size).to eq(5)
|
||||
expect(cop.messages.all? { |msg| msg =~ /Use `before\(:once\)`/ })
|
||||
expect(cop.messages.all? { |msg| msg.include?('Use `before(:once)`') })
|
||||
expect(cop.offenses.all? { |off| off.severity.name == :warning })
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,7 +8,7 @@ module TatlTael
|
|||
class << self
|
||||
def inherited(subclass)
|
||||
super
|
||||
Linters.linters << subclass unless subclass.name =~ /SimpleLinter/
|
||||
Linters.linters << subclass unless subclass.name&.include?('SimpleLinter')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -434,7 +434,7 @@ module Api
|
|||
end
|
||||
|
||||
def self.build_links_hash(base_url, opts = {})
|
||||
base_url += (base_url =~ /\?/ ? '&' : '?')
|
||||
base_url += (base_url.include?('?') ? '&' : '?')
|
||||
qp = opts[:query_parameters] || {}
|
||||
qp = qp.with_indifferent_access.except(*EXCLUDE_IN_PAGINATION_LINKS)
|
||||
base_url += "#{qp.to_query}&" if qp.present?
|
||||
|
@ -652,7 +652,7 @@ module Api
|
|||
end
|
||||
|
||||
def accepts_jsonapi?
|
||||
!!(/application\/vnd\.api\+json/ =~ request.headers['Accept'].to_s)
|
||||
!!request.headers['Accept'].to_s.include?('application/vnd.api+json')
|
||||
end
|
||||
|
||||
# Return a template url that follows the root links key for the jsonapi.org
|
||||
|
|
|
@ -227,7 +227,7 @@ module Api::V1::Submission
|
|||
|
||||
# include the discussion topic entries
|
||||
if other_fields.include?('discussion_entries') &&
|
||||
assignment.submission_types =~ /discussion_topic/ &&
|
||||
assignment.submission_types&.include?('discussion_topic') &&
|
||||
assignment.discussion_topic
|
||||
# group assignments will have a child topic for each group.
|
||||
# it's also possible the student posted in the main topic, as well as the
|
||||
|
|
|
@ -138,7 +138,7 @@ module Canvas::OAuth
|
|||
if is_oob?(redirect_uri)
|
||||
controller.oauth2_auth_url(opts)
|
||||
else
|
||||
has_params = redirect_uri =~ %r{\?}
|
||||
has_params = redirect_uri.include?('?')
|
||||
redirect_uri + (has_params ? "&" : "?") + opts.to_query
|
||||
end
|
||||
end
|
||||
|
|
|
@ -60,9 +60,9 @@ module CC
|
|||
http://www.imsglobal.org/xsd/imslticp_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticp_v1p0.xsd") do |blti_node|
|
||||
blti_node.blti :title, tool.name
|
||||
blti_node.blti :description, tool.description
|
||||
if tool.url =~ %r{http://}
|
||||
if tool.url&.include?('http://')
|
||||
blti_node.blti :launch_url, tool.url
|
||||
elsif tool.url =~ %r{https://}
|
||||
elsif tool.url&.include?('https://')
|
||||
blti_node.blti :secure_launch_url, tool.url
|
||||
end
|
||||
blti_node.blti(:icon, tool.settings[:icon_url]) if tool.settings[:icon_url]
|
||||
|
|
|
@ -48,7 +48,7 @@ module CC::Exporter::Epub::Converters
|
|||
return nil unless match.present?
|
||||
|
||||
if sort_by_content
|
||||
match[1] =~ /module/ ? nil : "#{match[1]}.xhtml##{match[2]}"
|
||||
match[1].include?('module') ? nil : "#{match[1]}.xhtml##{match[2]}"
|
||||
else
|
||||
item = get_item(match[1], match[2])
|
||||
item[:href]
|
||||
|
|
|
@ -28,7 +28,7 @@ module CC::Importer::Standard
|
|||
resources_by_type(WEBCONTENT, "associatedcontent").each do |res|
|
||||
if res[:intended_use] || @convert_html_to_pages
|
||||
path = get_full_path(res[:href])
|
||||
if path && File.exist?(path) && Attachment.mimetype(path) =~ /html/
|
||||
if path && File.exist?(path) && Attachment.mimetype(path).include?('html')
|
||||
case res[:intended_use]
|
||||
when "assignment"
|
||||
new_assignments << { :migration_id => res[:migration_id], :description => File.read(path) }
|
||||
|
|
|
@ -256,12 +256,12 @@ class CourseLinkValidator
|
|||
# makes sure that links to course objects exist and are in a visible state
|
||||
def check_object_status(url, object: nil)
|
||||
return :missing_item unless valid_route?(url)
|
||||
return :missing_item if url =~ /\/test_error/
|
||||
return :missing_item if url.include?('/test_error')
|
||||
|
||||
object ||= Context.find_asset_by_url(url)
|
||||
unless object
|
||||
return :missing_item unless [nil, 'syllabus'].include?(url.match(/\/courses\/\d+\/\w+\/(.+)/)&.[](1))
|
||||
return :missing_item if url =~ /\/media_objects_iframe\//
|
||||
return :missing_item if url.include?('/media_objects_iframe/')
|
||||
|
||||
return nil
|
||||
end
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
module FileSplitter
|
||||
# OK, lame, but if there's a commas, call it comma-seperated
|
||||
def format
|
||||
@format = @txt =~ /,/ ? :each_record : :each_line
|
||||
@format = @txt.include?(',') ? :each_record : :each_line
|
||||
end
|
||||
|
||||
# Send it a block, expects @txt to be set in the parser.
|
||||
|
|
|
@ -390,7 +390,7 @@ class GradebookImporter
|
|||
end
|
||||
|
||||
def row_has_student_headers?(row)
|
||||
row.length > 3 && row[0] =~ /Student/ && row[1] =~ /ID/
|
||||
row.length > 3 && row[0].include?('Student') && row[1].include?('ID')
|
||||
end
|
||||
|
||||
def update_column_count(row)
|
||||
|
@ -666,7 +666,7 @@ class GradebookImporter
|
|||
return true
|
||||
end
|
||||
|
||||
if row[0] =~ /Points Possible/
|
||||
if row[0]&.include?('Points Possible')
|
||||
# this row is describing the assignment, has no student data
|
||||
process_pp(row)
|
||||
return true
|
||||
|
|
|
@ -72,7 +72,7 @@ module MicrosoftSync
|
|||
|
||||
response.parsed_response
|
||||
rescue Errors::HTTPBadRequest
|
||||
if response.body =~ /Tenant .* not found/ || response.body =~ /is neither a valid DNS name/
|
||||
if response.body =~ /Tenant .* not found/ || response.body.include?('is neither a valid DNS name')
|
||||
raise TenantDoesNotExist
|
||||
end
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ describe "Announcements API", type: :request do
|
|||
@params.merge(:context_codes => ["course_#{@course1.id}", "course_#{@course2.id}"],
|
||||
:start_date => start_date, :end_date => end_date, :per_page => 1))
|
||||
expect(json.length).to eq 1
|
||||
next_link = response.headers['Link'].split(",").detect { |link| link =~ /rel="next"/ }
|
||||
next_link = response.headers['Link'].split(",").detect { |link| link.include?('rel="next"') }
|
||||
expect(next_link).to match(/\/api\/v1\/announcements/)
|
||||
expect(next_link).to include "page=2"
|
||||
end
|
||||
|
|
|
@ -177,22 +177,22 @@ describe ConversationsController, type: :request do
|
|||
|
||||
expect(json.size).to eql 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/conversations/ }).to be_truthy
|
||||
expect(links.all? { |l| l.include?('api/v1/conversations') }).to be_truthy
|
||||
expect(links.all? { |l| l.scan(/scope=default/).size == 1 }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
|
||||
# get the last page
|
||||
json = api_call(:get, "/api/v1/conversations.json?scope=default&page=3&per_page=3",
|
||||
{ :controller => 'conversations', :action => 'index', :format => 'json', :scope => 'default', :page => '3', :per_page => '3' })
|
||||
expect(json.size).to eql 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/conversations/ }).to be_truthy
|
||||
expect(links.all? { |l| l.include?('api/v1/conversations') }).to be_truthy
|
||||
expect(links.all? { |l| l.scan(/scope=default/).size == 1 }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
end
|
||||
|
||||
it "filters conversations by scope" do
|
||||
|
@ -2440,10 +2440,10 @@ describe ConversationsController, type: :request do
|
|||
:user_id => @bob.id })
|
||||
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/conversations\/deleted/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="current"/) }).to match(/page=1&per_page=10>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=10>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=1&per_page=10>/)
|
||||
expect(links.all? { |l| l.include?('api/v1/conversations/deleted') }).to be_truthy
|
||||
expect(links.find { |l| l.include?('rel="current"') }).to match(/page=1&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=1&per_page=10>/)
|
||||
end
|
||||
|
||||
it 'can respond with multiple users data' do
|
||||
|
|
|
@ -3435,7 +3435,7 @@ describe CoursesController, type: :request do
|
|||
)
|
||||
links = response['Link'].split(",")
|
||||
expect(links).not_to be_empty
|
||||
expect(links.all? { |l| l =~ /enrollment_type=student/ }).to be_truthy
|
||||
expect(links.all? { |l| l.include?('enrollment_type=student') }).to be_truthy
|
||||
expect(links.first.scan(/per_page/).length).to eq 1
|
||||
end
|
||||
|
||||
|
|
|
@ -1578,9 +1578,9 @@ describe DiscussionTopicsController, type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/discussion_topics/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
|
||||
# get the last page
|
||||
json = api_call(:get, "/api/v1/courses/#{@course.id}/discussion_topics.json?page=3&per_page=3",
|
||||
|
@ -1588,9 +1588,9 @@ describe DiscussionTopicsController, type: :request do
|
|||
expect(json.length).to eq 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/discussion_topics/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
end
|
||||
|
||||
it "works with groups" do
|
||||
|
@ -1681,9 +1681,9 @@ describe DiscussionTopicsController, type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/groups\/#{group.id}\/discussion_topics/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
|
||||
# get the last page
|
||||
json = api_call(:get, "/api/v1/groups/#{group.id}/discussion_topics.json?page=3&per_page=3",
|
||||
|
@ -1691,9 +1691,9 @@ describe DiscussionTopicsController, type: :request do
|
|||
expect(json.length).to eq 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/groups\/#{group.id}\/discussion_topics/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
end
|
||||
|
||||
it "fulfills module viewed requirements when marking a topic read" do
|
||||
|
@ -1985,9 +1985,9 @@ describe DiscussionTopicsController, type: :request do
|
|||
expect(json.map { |e| e['id'] }).to eq entries.last(3).reverse.map(&:id)
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/discussion_topics\/#{@topic.id}\/entries/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
|
||||
# last page
|
||||
json = api_call(
|
||||
|
@ -1999,9 +1999,9 @@ describe DiscussionTopicsController, type: :request do
|
|||
expect(json.map { |e| e['id'] }).to eq [entries.first, @entry].map(&:id)
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/discussion_topics\/#{@topic.id}\/entries/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
end
|
||||
|
||||
it "only includes the first 10 replies for each top-level entry" do
|
||||
|
@ -2081,9 +2081,9 @@ describe DiscussionTopicsController, type: :request do
|
|||
expect(json.map { |e| e['id'] }).to eq replies.last(3).reverse.map(&:id)
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/discussion_topics\/#{@topic.id}\/entries\/#{@entry.id}\/replies/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
|
||||
# last page
|
||||
json = api_call(
|
||||
|
@ -2095,9 +2095,9 @@ describe DiscussionTopicsController, type: :request do
|
|||
expect(json.map { |e| e['id'] }).to eq [replies.first, @reply].map(&:id)
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/discussion_topics\/#{@topic.id}\/entries\/#{@entry.id}\/replies/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -514,9 +514,9 @@ describe ExternalToolsController, type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/groups\/#{group.id}\/external_tools/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
|
||||
# get the last page
|
||||
json = api_call(:get, "/api/v1/groups/#{group.id}/external_tools",
|
||||
|
@ -526,9 +526,9 @@ describe ExternalToolsController, type: :request do
|
|||
expect(json.length).to eq 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/groups\/#{group.id}\/external_tools/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
end
|
||||
|
||||
def index_call(context, type = "course")
|
||||
|
@ -647,9 +647,9 @@ describe ExternalToolsController, type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/#{type}s\/#{context.id}\/external_tools/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
|
||||
# get the last page
|
||||
json = api_call(:get, "/api/v1/#{type}s/#{context.id}/external_tools.json?page=3&per_page=3",
|
||||
|
@ -657,9 +657,9 @@ describe ExternalToolsController, type: :request do
|
|||
expect(json.length).to eq 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/#{type}s\/#{context.id}\/external_tools/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
end
|
||||
|
||||
def tool_with_everything(context, opts = {})
|
||||
|
|
|
@ -608,17 +608,17 @@ describe "Files API", type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/folders\/#{@root.id}\/files/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
|
||||
json = api_call(:get, "/api/v1/folders/#{@root.id}/files?per_page=3&page=3", @files_path_options.merge(:id => @root.id.to_param, :per_page => '3', :page => '3'), {})
|
||||
expect(json.length).to eq 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/folders\/#{@root.id}\/files/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
end
|
||||
|
||||
it "only returns names if requested" do
|
||||
|
@ -862,17 +862,17 @@ describe "Files API", type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/files/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
|
||||
json = api_call(:get, "/api/v1/courses/#{@course.id}/files?per_page=3&page=3", @files_path_options.merge(:per_page => '3', :page => '3'), {})
|
||||
expect(json.length).to eq 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/files/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
end
|
||||
|
||||
context "content_types" do
|
||||
|
|
|
@ -85,17 +85,17 @@ describe "Folders API", type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/folders\/#{@root.id}\/folders/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
|
||||
json = api_call(:get, @folders_path + "/#{@root.id}/folders?per_page=3&page=3", @folders_path_options.merge(:per_page => '3', :page => '3'), {})
|
||||
expect(json.length).to eq 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/folders\/#{@root.id}\/folders/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
end
|
||||
|
||||
context "student" do
|
||||
|
@ -979,18 +979,18 @@ describe "Folders API", type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/folders/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
|
||||
json = api_call(:get, "/api/v1/courses/#{@course.id}/folders",
|
||||
{ :controller => "folders", :action => "list_all_folders", :format => "json", :course_id => @course.id.to_param, :per_page => 3, :page => 3 })
|
||||
expect(json.length).to eq 1
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/courses\/#{@course.id}\/folders/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=3>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3&per_page=3>/)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -137,7 +137,7 @@ describe "Groups API", type: :request do
|
|||
json = api_call(:get, "/api/v1/users/self/groups", @category_path_options.merge(:action => "index"))
|
||||
expect(json).to eq [group_json(@community), group_json(@group)]
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/users\/self\/groups/ }).to be_truthy
|
||||
expect(links.all? { |l| l.include?('api/v1/users/self/groups') }).to be_truthy
|
||||
end
|
||||
|
||||
describe "show SIS fields based on manage_sis permissions" do
|
||||
|
|
|
@ -152,9 +152,9 @@ describe "Rubrics API", type: :request do
|
|||
expect(json.length).to eq 3
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/#{type}s\/#{context.id}\/rubrics/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
|
||||
# get the last page
|
||||
json = rubrics_api_call(context, { :per_page => '3', :page => '3' }, type)
|
||||
|
@ -162,9 +162,9 @@ describe "Rubrics API", type: :request do
|
|||
expect(json.length).to eq 2
|
||||
links = response.headers['Link'].split(",")
|
||||
expect(links.all? { |l| l =~ /api\/v1\/#{type}s\/#{context.id}\/rubrics/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=3/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=3/)
|
||||
end
|
||||
|
||||
describe "course level rubrics" do
|
||||
|
|
|
@ -77,7 +77,7 @@ describe AlignmentsHelper do
|
|||
let(:string) { link_to_outcome_alignment(@course, outcome) }
|
||||
|
||||
it "does not include an icon-* html class" do
|
||||
expect(string.match(/icon-/)).to be_falsey
|
||||
expect(string.include?('icon-')).to be_falsey
|
||||
end
|
||||
|
||||
it "is a blank link tag" do
|
||||
|
@ -92,7 +92,7 @@ describe AlignmentsHelper do
|
|||
}
|
||||
|
||||
it "does not include an icon-* html class" do
|
||||
expect(string.match(/icon-/)).to be_truthy
|
||||
expect(string.include?('icon-')).to be_truthy
|
||||
end
|
||||
|
||||
it "is a blank link tag" do
|
||||
|
@ -108,7 +108,7 @@ describe AlignmentsHelper do
|
|||
let(:html) { Nokogiri::HTML5.fragment(string).children[0] }
|
||||
|
||||
it "includes an id of 'alignment_blank'" do
|
||||
expect(string.match(/alignment_blank/)).to be_truthy
|
||||
expect(string.include?('alignment_blank')).to be_truthy
|
||||
end
|
||||
|
||||
it "includes class alignment" do
|
||||
|
@ -117,7 +117,7 @@ describe AlignmentsHelper do
|
|||
|
||||
it "includes 1 data-* attribute" do
|
||||
expect(html.keys.select { |k|
|
||||
k.match(/data-/)
|
||||
k.include?('data-')
|
||||
}).to include('data-url')
|
||||
end
|
||||
|
||||
|
@ -141,7 +141,7 @@ describe AlignmentsHelper do
|
|||
|
||||
it "data-ids & data-url attributes" do
|
||||
expect(html.keys.select { |k|
|
||||
k.match(/data-/)
|
||||
k.include?('data-')
|
||||
}).to include('data-id', 'data-url')
|
||||
end
|
||||
|
||||
|
@ -172,7 +172,7 @@ describe AlignmentsHelper do
|
|||
|
||||
it "has html 'data-has-rubric-association' data attritbute" do
|
||||
expect(html.keys.find { |k|
|
||||
k.match(/data-has-rubric-association/)
|
||||
k.include?('data-has-rubric-association')
|
||||
}).to be_truthy
|
||||
end
|
||||
end
|
||||
|
|
|
@ -997,11 +997,11 @@ describe Api do
|
|||
:last => 10,
|
||||
})
|
||||
expect(links.all? { |l| l =~ /www.example.com\/\?/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="current"/) }).to match(/page=8&per_page=10>/)
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=4&per_page=10>/)
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=10>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to match(/page=1&per_page=10>/)
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=10&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="current"') }).to match(/page=8&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=4&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to match(/page=1&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=10&per_page=10>/)
|
||||
end
|
||||
|
||||
it "maintains query parameters" do
|
||||
|
@ -1043,11 +1043,11 @@ describe Api do
|
|||
:last => 10,
|
||||
})
|
||||
expect(links.all? { |l| l =~ /www.example.com\/\?/ }).to be_truthy
|
||||
expect(links.find { |l| l.match(/rel="current"/) }).to be_nil
|
||||
expect(links.find { |l| l.match(/rel="next"/) }).to match(/page=4&per_page=10>/)
|
||||
expect(links.find { |l| l.match(/rel="prev"/) }).to match(/page=2&per_page=10>/)
|
||||
expect(links.find { |l| l.match(/rel="first"/) }).to be_nil
|
||||
expect(links.find { |l| l.match(/rel="last"/) }).to match(/page=10&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="current"') }).to be_nil
|
||||
expect(links.find { |l| l.include?('rel="next"') }).to match(/page=4&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="prev"') }).to match(/page=2&per_page=10>/)
|
||||
expect(links.find { |l| l.include?('rel="first"') }).to be_nil
|
||||
expect(links.find { |l| l.include?('rel="last"') }).to match(/page=10&per_page=10>/)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -152,7 +152,7 @@ describe Canvas::ICU do
|
|||
include_examples "ICU Collator"
|
||||
|
||||
before do
|
||||
skip "Postgres does not have collation support" if ActiveRecord::Base.best_unicode_collation_key('col') =~ /LOWER/
|
||||
skip "Postgres does not have collation support" if ActiveRecord::Base.best_unicode_collation_key('col').include?('LOWER')
|
||||
end
|
||||
|
||||
def collate(values)
|
||||
|
|
|
@ -73,7 +73,7 @@ describe TokenScopes do
|
|||
|
||||
it "does not include the optional format part of the route path" do
|
||||
generated_scopes.each do |scope|
|
||||
expect(/\(\.:format\)/ =~ scope).to be_nil
|
||||
expect(scope.include?('(.:format)')).to be false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5060,7 +5060,7 @@ describe Assignment do
|
|||
assignment_model(:due_at => "", :course => @course)
|
||||
res = @assignment.to_ics
|
||||
expect(res).not_to be_nil
|
||||
expect(res.match(/DTSTART/)).to be_nil
|
||||
expect(res.include?('DTSTART')).to eq false
|
||||
end
|
||||
|
||||
it ".to_ics should not return data for null due dates" do
|
||||
|
@ -5076,9 +5076,9 @@ describe Assignment do
|
|||
@assignment.updated_at = Time.at(1220443500) # 3 Sep 2008 12:05pm (UTC)
|
||||
res = @assignment.to_ics
|
||||
expect(res).not_to be_nil
|
||||
expect(res.match(/DTEND:20080903T115500Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTART:20080903T115500Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTAMP:20080903T120500Z/)).not_to be_nil
|
||||
expect(res.include?('DTEND:20080903T115500Z')).not_to be_nil
|
||||
expect(res.include?('DTSTART:20080903T115500Z')).not_to be_nil
|
||||
expect(res.include?('DTSTAMP:20080903T120500Z')).not_to be_nil
|
||||
end
|
||||
|
||||
it ".to_ics should return correct dates even with different time_zone_edited" do
|
||||
|
@ -5088,9 +5088,9 @@ describe Assignment do
|
|||
@assignment.updated_at = Time.at(1220443500) # 3 Sep 2008 12:05pm (UTC)
|
||||
res = @assignment.to_ics
|
||||
expect(res).not_to be_nil
|
||||
expect(res.match(/DTEND:20080903T115500Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTART:20080903T115500Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTAMP:20080903T120500Z/)).not_to be_nil
|
||||
expect(res.include?('DTEND:20080903T115500Z')).not_to be_nil
|
||||
expect(res.include?('DTSTART:20080903T115500Z')).not_to be_nil
|
||||
expect(res.include?('DTSTAMP:20080903T120500Z')).not_to be_nil
|
||||
end
|
||||
|
||||
it ".to_ics should return correct dates even with different timezone on call midnight" do
|
||||
|
@ -5101,9 +5101,9 @@ describe Assignment do
|
|||
Time.zone = 'HST'
|
||||
res = @assignment.to_ics
|
||||
expect(res).not_to be_nil
|
||||
expect(res.match(/DTEND:20080903T235900Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTART:20080903T235900Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTAMP:20080903T120500Z/)).not_to be_nil
|
||||
expect(res.include?('DTEND:20080903T235900Z')).not_to be_nil
|
||||
expect(res.include?('DTSTART:20080903T235900Z')).not_to be_nil
|
||||
expect(res.include?('DTSTAMP:20080903T120500Z')).not_to be_nil
|
||||
end
|
||||
|
||||
it ".to_ics should return string data for assignments with due dates in correct tz" do
|
||||
|
@ -5113,9 +5113,9 @@ describe Assignment do
|
|||
@assignment.updated_at = Time.at(1220472300) # 3 Sep 2008 12:05pm (AKDT)
|
||||
res = @assignment.to_ics
|
||||
expect(res).not_to be_nil
|
||||
expect(res.match(/DTEND:20080903T195500Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTART:20080903T195500Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTAMP:20080903T200500Z/)).not_to be_nil
|
||||
expect(res.include?('DTEND:20080903T195500Z')).not_to be_nil
|
||||
expect(res.include?('DTSTART:20080903T195500Z')).not_to be_nil
|
||||
expect(res.include?('DTSTAMP:20080903T200500Z')).not_to be_nil
|
||||
end
|
||||
|
||||
it ".to_ics should return data for assignments with due dates" do
|
||||
|
@ -5153,8 +5153,8 @@ describe Assignment do
|
|||
assignment_model(:due_at => "Sep 3 2008 11:59pm", :course => @course)
|
||||
expect(@assignment.all_day).to eql(true)
|
||||
res = @assignment.to_ics
|
||||
expect(res.match(/DTSTART;VALUE=DATE:20080903/)).not_to be_nil
|
||||
expect(res.match(/DTEND;VALUE=DATE:20080903/)).not_to be_nil
|
||||
expect(res.include?('DTSTART;VALUE=DATE:20080903')).not_to be_nil
|
||||
expect(res.include?('DTEND;VALUE=DATE:20080903')).not_to be_nil
|
||||
end
|
||||
|
||||
it ".to_ics should populate uid and summary fields" do
|
||||
|
|
|
@ -149,7 +149,7 @@ describe CalendarEvent do
|
|||
calendar_event_model(:start_at => "", :end_at => "")
|
||||
res = @event.to_ics
|
||||
expect(res).not_to be_nil
|
||||
expect(res.match(/DTSTART/)).to be_nil
|
||||
expect(res.include?('DTSTART')).to be false
|
||||
end
|
||||
|
||||
it "does not return data for null times" do
|
||||
|
@ -165,9 +165,9 @@ describe CalendarEvent do
|
|||
@event.updated_at = Time.at(1220443500) # 3 Sep 2008 12:05pm (UTC)
|
||||
res = @event.to_ics
|
||||
expect(res).not_to be_nil
|
||||
expect(res.match(/DTSTART:20080903T115500Z/)).not_to be_nil
|
||||
expect(res.match(/DTEND:20080903T120000Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTAMP:20080903T120500Z/)).not_to be_nil
|
||||
expect(res.include?('DTSTART:20080903T115500Z')).not_to be_nil
|
||||
expect(res.include?('DTEND:20080903T120000Z')).not_to be_nil
|
||||
expect(res.include?('DTSTAMP:20080903T120500Z')).not_to be_nil
|
||||
end
|
||||
|
||||
it "returns string data for events with times in correct tz" do
|
||||
|
@ -177,9 +177,9 @@ describe CalendarEvent do
|
|||
@event.updated_at = Time.at(1220472300) # 3 Sep 2008 12:05pm (AKDT)
|
||||
res = @event.to_ics
|
||||
expect(res).not_to be_nil
|
||||
expect(res.match(/DTSTART:20080903T195500Z/)).not_to be_nil
|
||||
expect(res.match(/DTEND:20080903T200000Z/)).not_to be_nil
|
||||
expect(res.match(/DTSTAMP:20080903T200500Z/)).not_to be_nil
|
||||
expect(res.include?('DTSTART:20080903T195500Z')).not_to be_nil
|
||||
expect(res.include?('DTEND:20080903T200000Z')).not_to be_nil
|
||||
expect(res.include?('DTSTAMP:20080903T200500Z')).not_to be_nil
|
||||
end
|
||||
|
||||
it "returns data for events with times" do
|
||||
|
@ -222,8 +222,8 @@ describe CalendarEvent do
|
|||
expect(@event.all_day).to eql(true)
|
||||
expect(@event.end_at).to eql(@event.start_at)
|
||||
res = @event.to_ics
|
||||
expect(res.match(/DTSTART;VALUE=DATE:20080903/)).not_to be_nil
|
||||
expect(res.match(/DTEND;VALUE=DATE:20080903/)).not_to be_nil
|
||||
expect(res.include?('DTSTART;VALUE=DATE:20080903')).not_to be_nil
|
||||
expect(res.include?('DTEND;VALUE=DATE:20080903')).not_to be_nil
|
||||
end
|
||||
|
||||
it "returns a plain-text description" do
|
||||
|
|
|
@ -102,8 +102,8 @@ describe ContentMigration do
|
|||
|
||||
expect(@cm.warnings.detect { |w| w =~ /account External Tool.+must be configured/ }).not_to be_nil
|
||||
expect(@cm.warnings.detect { |w| w =~ /external Learning Outcome couldn't be found.+creating a copy/ }).not_to be_nil
|
||||
expect(@cm.warnings.detect { |w| w =~ /Couldn't find the question bank/ }).not_to be_nil
|
||||
expect(@cm.warnings.detect { |w| w =~ /referenced a grading scheme that was not found/ }).not_to be_nil
|
||||
expect(@cm.warnings.detect { |w| w.include?("Couldn't find the question bank") }).not_to be_nil
|
||||
expect(@cm.warnings.detect { |w| w.include?('referenced a grading scheme that was not found') }).not_to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -189,7 +189,7 @@ describe ConversationMessage do
|
|||
@submission = @assignment.submit_homework(@user, :body => 'some message')
|
||||
@submission.add_comment(:author => @user, :comment => "hello")
|
||||
|
||||
expect(StreamItem.all.select { |i| i.asset_string =~ /conversation_/ }).to be_empty
|
||||
expect(StreamItem.all.select { |i| i.asset_string.include?('conversation_') }).to be_empty
|
||||
end
|
||||
|
||||
it "does not create additional stream_items for additional messages in the same conversation" do
|
||||
|
|
|
@ -2137,7 +2137,7 @@ describe Course, "gradebook_to_csv" do
|
|||
assignments, groups = [], []
|
||||
rows.headers.each do |column|
|
||||
assignments << column.sub(/ \([0-9]+\)/, '') if column =~ /Assignment \d+/
|
||||
groups << column if column =~ /Some Assignment Group/
|
||||
groups << column if column.include?('Some Assignment Group')
|
||||
end
|
||||
expect(assignments).to eq ["Assignment 02", "Assignment 03", "Assignment 01", "Assignment 05", "Assignment 04", "Assignment 06", "Assignment 07", "Assignment 09", "Assignment 11", "Assignment 12", "Assignment 13", "Assignment 14", "Assignment 08", "Assignment 10"]
|
||||
expect(groups).to eq [
|
||||
|
|
|
@ -78,7 +78,7 @@ describe Quizzes::QuizRegrader::Answer do
|
|||
sent_params, sent_answer_data = args
|
||||
if question.question_data[:question_type] == 'multiple_answers_question'
|
||||
answer.each do |k, v|
|
||||
next unless /answer/ =~ k
|
||||
next unless k.to_s.include?('answer')
|
||||
|
||||
key = "question_#{question.id}_#{k}"
|
||||
expect(sent_answer_data[key]).to eq v
|
||||
|
|
|
@ -96,8 +96,8 @@ describe "calendar2" do
|
|||
|
||||
get "/calendar2"
|
||||
events = ff('.fc-event')
|
||||
event1 = events.detect { |e| e.text =~ /Assignment 1/ }
|
||||
event2 = events.detect { |e| e.text =~ /Assignment 2/ }
|
||||
event1 = events.detect { |e| e.text.include?('Assignment 1') }
|
||||
event2 = events.detect { |e| e.text.include?('Assignment 2') }
|
||||
expect(event1).not_to be_nil
|
||||
expect(event2).not_to be_nil
|
||||
expect(event1).not_to eq event2
|
||||
|
|
|
@ -109,7 +109,7 @@ module ManageGroupsCommon
|
|||
|
||||
def expand_group(group_id)
|
||||
group_selector = (group_id == "unassigned" ? ".unassigned-students" : ".group[data-id=\"#{group_id}\"]")
|
||||
return if group_selector == ".unassigned-students" || f(group_selector).attribute(:class) =~ /group-expanded/
|
||||
return if group_selector == ".unassigned-students" || f(group_selector).attribute(:class).include?('group-expanded')
|
||||
|
||||
fj("#{group_selector} .toggle-group").click
|
||||
wait_for_ajax_requests
|
||||
|
|
|
@ -595,7 +595,7 @@ module QuizzesCommon
|
|||
data = []
|
||||
els.each do |el|
|
||||
# its a question
|
||||
if el['class'].match(/question_holder/)
|
||||
if el['class'].include?('question_holder')
|
||||
id = el.find_element(:css, 'a')['name'].gsub(/question_/, '')
|
||||
question = {
|
||||
:id => id.to_i,
|
||||
|
@ -612,7 +612,7 @@ module QuizzesCommon
|
|||
end
|
||||
|
||||
# its a group
|
||||
elsif el['class'].match(/group_top/)
|
||||
elsif el['class'].include?('group_top')
|
||||
last_group_id = el['id'].gsub(/group_top_/, '').to_i
|
||||
data << {
|
||||
:id => last_group_id,
|
||||
|
@ -622,7 +622,7 @@ module QuizzesCommon
|
|||
}
|
||||
|
||||
# group ended
|
||||
elsif el['class'].match(/group_bottom/)
|
||||
elsif el['class'].include?('group_bottom')
|
||||
last_group_id = nil
|
||||
end
|
||||
end
|
||||
|
|
|
@ -75,7 +75,7 @@ describe "profile" do
|
|||
# check to see if error box popped up
|
||||
errorboxes = ff('.error_text')
|
||||
expect(errorboxes.length).to be > 1
|
||||
expect(errorboxes.any? { |errorbox| errorbox.text =~ /Invalid old password for the login/ }).to be_truthy
|
||||
expect(errorboxes.any? { |errorbox| errorbox.text.include?('Invalid old password for the login') }).to be_truthy
|
||||
end
|
||||
|
||||
it "changes the password" do
|
||||
|
@ -89,14 +89,14 @@ describe "profile" do
|
|||
log_in_to_settings
|
||||
change_password('asdfasdf', SecureRandom.hex(128))
|
||||
errorboxes = ff('.error_text')
|
||||
expect(errorboxes.any? { |errorbox| errorbox.text =~ /Can't exceed 255 characters/ }).to be_truthy
|
||||
expect(errorboxes.any? { |errorbox| errorbox.text.include?("Can't exceed 255 characters") }).to be_truthy
|
||||
end
|
||||
|
||||
it "rejects passwords shorter than 8 characters", priority: "2", test_id: 1055503 do
|
||||
log_in_to_settings
|
||||
change_password('asdfasdf', SecureRandom.hex(2))
|
||||
errorboxes = ff('.error_text')
|
||||
expect(errorboxes.any? { |errorbox| errorbox.text =~ /Must be at least 8 characters/ }).to be_truthy
|
||||
expect(errorboxes.any? { |errorbox| errorbox.text.include?('Must be at least 8 characters') }).to be_truthy
|
||||
end
|
||||
|
||||
context "non password tests" do
|
||||
|
|
|
@ -282,7 +282,7 @@ describe 'user_content post processing' do
|
|||
browser_tabs = driver.window_handles
|
||||
expect(browser_tabs.length).to eq(1)
|
||||
|
||||
modifier_key = /mac/ =~ driver.capabilities.platform ? :meta : :control
|
||||
modifier_key = driver.capabilities.platform.include?('mac') ? :meta : :control
|
||||
file_link = f('a#thelink')
|
||||
driver.action.key_down(modifier_key).click(file_link).key_up(modifier_key).perform
|
||||
|
||||
|
@ -307,7 +307,7 @@ describe 'user_content post processing' do
|
|||
browser_tabs = driver.window_handles
|
||||
expect(browser_tabs.length).to eq(1)
|
||||
|
||||
modifier_key = /mac/ =~ driver.capabilities.platform ? :meta : :control
|
||||
modifier_key = driver.capabilities.platform.include?('mac') ? :meta : :control
|
||||
file_link = f('a#thelink')
|
||||
driver.action.key_down(modifier_key).click(file_link).key_up(modifier_key).perform
|
||||
|
||||
|
|
|
@ -502,7 +502,7 @@ module CustomSeleniumActions
|
|||
fj("#ui-datepicker-div a:contains(#{day_text})").click
|
||||
end
|
||||
|
||||
MODIFIER_KEY = RUBY_PLATFORM =~ /darwin/ ? :command : :control
|
||||
MODIFIER_KEY = RUBY_PLATFORM.include?('darwin') ? :command : :control
|
||||
def replace_content(el, value, options = {})
|
||||
# el.clear doesn't work with textboxes that have a pattern attribute that's why we have :backspace.
|
||||
# We are treating the chrome browser different because Selenium cannot send :command key to chrome on Mac.
|
||||
|
|
Loading…
Reference in New Issue