apply autoformatter to all files
refs DE-643 Change-Id: I9482afa27e6b8734640741fca784f971f782af26 Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/264050 Reviewed-by: Andrea Cirulli <andrea.cirulli@instructure.com> QA-Review: Aaron Ogata <aogata@instructure.com> Product-Review: Aaron Ogata <aogata@instructure.com> Tested-by: Service Cloud Jenkins <svc.cloudjenkins@instructure.com>
This commit is contained in:
parent
957e6417da
commit
70b06aaa0b
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"extends": "recommended",
|
||||
"rules": {
|
||||
"formatting.ClassStartsWithBlankLine": {
|
||||
"enabled": false
|
||||
},
|
||||
"formatting.ClassEndsWithBlankLine": {
|
||||
"enabled": false
|
||||
},
|
||||
"formatting.Indentation": {
|
||||
"spacesPerIndentLevel": 2
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,9 +17,9 @@
|
|||
* You should have received a copy of the GNU Affero General Public License along
|
||||
* with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
def FILES_CHANGED_STAGE = "Detect Files Changed"
|
||||
def JS_BUILD_IMAGE_STAGE = "Javascript (Build Image)"
|
||||
def RUN_MIGRATIONS_STAGE = "Run Migrations"
|
||||
def FILES_CHANGED_STAGE = 'Detect Files Changed'
|
||||
def JS_BUILD_IMAGE_STAGE = 'Javascript (Build Image)'
|
||||
def RUN_MIGRATIONS_STAGE = 'Run Migrations'
|
||||
|
||||
def buildParameters = [
|
||||
string(name: 'GERRIT_REFSPEC', value: "${env.GERRIT_REFSPEC}"),
|
||||
|
@ -51,11 +51,11 @@ def getSummaryUrl() {
|
|||
}
|
||||
|
||||
def getDockerWorkDir() {
|
||||
return env.GERRIT_PROJECT == "canvas-lms" ? "/usr/src/app" : "/usr/src/app/gems/plugins/${env.GERRIT_PROJECT}"
|
||||
return env.GERRIT_PROJECT == 'canvas-lms' ? '/usr/src/app' : "/usr/src/app/gems/plugins/${env.GERRIT_PROJECT}"
|
||||
}
|
||||
|
||||
def getLocalWorkDir() {
|
||||
return env.GERRIT_PROJECT == "canvas-lms" ? "." : "gems/plugins/${env.GERRIT_PROJECT}"
|
||||
return env.GERRIT_PROJECT == 'canvas-lms' ? '.' : "gems/plugins/${env.GERRIT_PROJECT}"
|
||||
}
|
||||
|
||||
// return false if the current patchset tag doesn't match the
|
||||
|
@ -65,7 +65,7 @@ def isPatchsetPublishable() {
|
|||
}
|
||||
|
||||
def isPatchsetRetriggered() {
|
||||
if(env.IS_AUTOMATIC_RETRIGGER == '1') {
|
||||
if (env.IS_AUTOMATIC_RETRIGGER == '1') {
|
||||
return true
|
||||
}
|
||||
|
||||
|
@ -88,11 +88,11 @@ def postFn(status) {
|
|||
|
||||
buildSummaryReport.publishReport('Build Summary Report', status)
|
||||
|
||||
if(isPatchsetPublishable()) {
|
||||
if (isPatchsetPublishable()) {
|
||||
dockerUtils.tagRemote(env.PATCHSET_TAG, env.EXTERNAL_TAG)
|
||||
}
|
||||
|
||||
if(status == 'SUCCESS' && configuration.isChangeMerged() && isPatchsetPublishable()) {
|
||||
if (status == 'SUCCESS' && configuration.isChangeMerged() && isPatchsetPublishable()) {
|
||||
dockerUtils.tagRemote(env.PATCHSET_TAG, env.MERGE_TAG)
|
||||
dockerUtils.tagRemote(env.CASSANDRA_IMAGE_TAG, env.CASSANDRA_MERGE_IMAGE)
|
||||
dockerUtils.tagRemote(env.DYNAMODB_IMAGE_TAG, env.DYNAMODB_MERGE_IMAGE)
|
||||
|
@ -100,7 +100,7 @@ def postFn(status) {
|
|||
}
|
||||
}
|
||||
} finally {
|
||||
if(status == 'SUCCESS') {
|
||||
if (status == 'SUCCESS') {
|
||||
maybeSlackSendSuccess()
|
||||
} else {
|
||||
maybeSlackSendFailure()
|
||||
|
@ -119,21 +119,21 @@ def shouldPatchsetRetrigger() {
|
|||
}
|
||||
|
||||
def maybeRetrigger() {
|
||||
if(shouldPatchsetRetrigger() && !isPatchsetRetriggered()) {
|
||||
if (shouldPatchsetRetrigger() && !isPatchsetRetriggered()) {
|
||||
def retriggerParams = currentBuild.rawBuild.getAction(ParametersAction).getParameters()
|
||||
|
||||
retriggerParams = retriggerParams.findAll { record ->
|
||||
record.name != 'IS_AUTOMATIC_RETRIGGER'
|
||||
}
|
||||
|
||||
retriggerParams << new StringParameterValue('IS_AUTOMATIC_RETRIGGER', "1")
|
||||
retriggerParams << new StringParameterValue('IS_AUTOMATIC_RETRIGGER', '1')
|
||||
|
||||
build(job: env.JOB_NAME, parameters: retriggerParams, propagate: false, wait: false)
|
||||
}
|
||||
}
|
||||
|
||||
def maybeSlackSendFailure() {
|
||||
if(configuration.isChangeMerged()) {
|
||||
if (configuration.isChangeMerged()) {
|
||||
def branchSegment = env.GERRIT_BRANCH ? "[$env.GERRIT_BRANCH]" : ''
|
||||
def authorSlackId = env.GERRIT_EVENT_ACCOUNT_EMAIL ? slackUserIdFromEmail(email: env.GERRIT_EVENT_ACCOUNT_EMAIL, botUser: true, tokenCredentialId: 'slack-user-id-lookup') : ''
|
||||
def authorSlackMsg = authorSlackId ? "<@$authorSlackId>" : env.GERRIT_EVENT_ACCOUNT_NAME
|
||||
|
@ -155,7 +155,7 @@ def maybeSlackSendFailure() {
|
|||
}
|
||||
|
||||
def maybeSlackSendSuccess() {
|
||||
if(configuration.isChangeMerged() && isPatchsetRetriggered()) {
|
||||
if (configuration.isChangeMerged() && isPatchsetRetriggered()) {
|
||||
slackSend(
|
||||
channel: getSlackChannel(),
|
||||
color: 'good',
|
||||
|
@ -171,7 +171,7 @@ def maybeSlackSendSuccess() {
|
|||
}
|
||||
|
||||
def maybeSlackSendRetrigger() {
|
||||
if(configuration.isChangeMerged() && isPatchsetRetriggered()) {
|
||||
if (configuration.isChangeMerged() && isPatchsetRetriggered()) {
|
||||
slackSend(
|
||||
channel: getSlackChannel(),
|
||||
color: 'warning',
|
||||
|
@ -194,7 +194,7 @@ def getSlackChannel() {
|
|||
def getCanvasBuildsRefspec() {
|
||||
def commitMessage = env.GERRIT_CHANGE_COMMIT_MESSAGE ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : null
|
||||
|
||||
if(env.GERRIT_EVENT_TYPE == 'change-merged' || !commitMessage || !(commitMessage =~ CANVAS_BUILDS_REFSPEC_REGEX).find()) {
|
||||
if (env.GERRIT_EVENT_TYPE == 'change-merged' || !commitMessage || !(commitMessage =~ CANVAS_BUILDS_REFSPEC_REGEX).find()) {
|
||||
return env.GERRIT_BRANCH.contains('stable/') ? env.GERRIT_BRANCH : 'master'
|
||||
}
|
||||
|
||||
|
@ -204,9 +204,9 @@ def getCanvasBuildsRefspec() {
|
|||
@groovy.transform.Field def CANVAS_LMS_REFSPEC_REGEX = /\[canvas\-lms\-refspec=(.+?)\]/
|
||||
def getCanvasLmsRefspec() {
|
||||
// If stable branch, first search commit message for canvas-lms-refspec. If not present use stable branch head on origin.
|
||||
if(env.GERRIT_BRANCH.contains('stable/')) {
|
||||
if (env.GERRIT_BRANCH.contains('stable/')) {
|
||||
def commitMessage = env.GERRIT_CHANGE_COMMIT_MESSAGE ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : null
|
||||
if((commitMessage =~ CANVAS_LMS_REFSPEC_REGEX).find()) {
|
||||
if ((commitMessage =~ CANVAS_LMS_REFSPEC_REGEX).find()) {
|
||||
return configuration.canvasLmsRefspec()
|
||||
}
|
||||
return "+refs/heads/$GERRIT_BRANCH:refs/remotes/origin/$GERRIT_BRANCH"
|
||||
|
@ -216,7 +216,7 @@ def getCanvasLmsRefspec() {
|
|||
// =========
|
||||
|
||||
library "canvas-builds-library@${getCanvasBuildsRefspec()}"
|
||||
loadLocalLibrary("local-lib", "build/new-jenkins/library")
|
||||
loadLocalLibrary('local-lib', 'build/new-jenkins/library')
|
||||
|
||||
configuration.setUseCommitMessageFlags(env.GERRIT_EVENT_TYPE != 'change-merged')
|
||||
protectedNode.setReportUnhandledExceptions(!env.JOB_NAME.endsWith('Jenkinsfile'))
|
||||
|
@ -241,7 +241,6 @@ pipeline {
|
|||
// e.g. postgres-12-ruby-2.6
|
||||
TAG_SUFFIX = imageTag.suffix()
|
||||
|
||||
|
||||
// e.g. canvas-lms:01.123456.78-postgres-12-ruby-2.6
|
||||
PATCHSET_TAG = imageTag.patchset()
|
||||
|
||||
|
@ -259,17 +258,17 @@ pipeline {
|
|||
RUBY = configuration.ruby() // RUBY_VERSION is a reserved keyword for ruby installs
|
||||
RSPEC_PROCESSES = 4
|
||||
|
||||
LINTER_DEBUG_IMAGE = "${configuration.buildRegistryPath("linter-debug")}:${imageTagVersion()}-$TAG_SUFFIX"
|
||||
LINTER_DEBUG_IMAGE = "${configuration.buildRegistryPath('linter-debug')}:${imageTagVersion()}-$TAG_SUFFIX"
|
||||
|
||||
CASSANDRA_PREFIX = configuration.buildRegistryPath('cassandra-migrations')
|
||||
DYNAMODB_PREFIX = configuration.buildRegistryPath('dynamodb-migrations')
|
||||
KARMA_BUILDER_PREFIX = configuration.buildRegistryPath("karma-builder")
|
||||
KARMA_RUNNER_PREFIX = configuration.buildRegistryPath("karma-runner")
|
||||
KARMA_BUILDER_PREFIX = configuration.buildRegistryPath('karma-builder')
|
||||
KARMA_RUNNER_PREFIX = configuration.buildRegistryPath('karma-runner')
|
||||
POSTGRES_PREFIX = configuration.buildRegistryPath('postgres-migrations')
|
||||
RUBY_RUNNER_PREFIX = configuration.buildRegistryPath("ruby-runner")
|
||||
YARN_RUNNER_PREFIX = configuration.buildRegistryPath("yarn-runner")
|
||||
WEBPACK_BUILDER_PREFIX = configuration.buildRegistryPath("webpack-builder")
|
||||
WEBPACK_CACHE_PREFIX = configuration.buildRegistryPath("webpack-cache")
|
||||
RUBY_RUNNER_PREFIX = configuration.buildRegistryPath('ruby-runner')
|
||||
YARN_RUNNER_PREFIX = configuration.buildRegistryPath('yarn-runner')
|
||||
WEBPACK_BUILDER_PREFIX = configuration.buildRegistryPath('webpack-builder')
|
||||
WEBPACK_CACHE_PREFIX = configuration.buildRegistryPath('webpack-cache')
|
||||
|
||||
IMAGE_CACHE_BUILD_SCOPE = configuration.gerritChangeNumber()
|
||||
IMAGE_CACHE_MERGE_SCOPE = configuration.gerritBranchSanitized()
|
||||
|
@ -300,19 +299,19 @@ pipeline {
|
|||
node('master') {
|
||||
if (configuration.skipCi()) {
|
||||
currentBuild.result = 'NOT_BUILT'
|
||||
gerrit.submitLintReview("-2", "Build not executed due to [skip-ci] flag")
|
||||
error "[skip-ci] flag enabled: skipping the build"
|
||||
gerrit.submitLintReview('-2', 'Build not executed due to [skip-ci] flag')
|
||||
error '[skip-ci] flag enabled: skipping the build'
|
||||
return
|
||||
} else if(extendedStage.isAllowStagesFilterUsed() || extendedStage.isIgnoreStageResultsFilterUsed() || extendedStage.isSkipStagesFilterUsed()) {
|
||||
gerrit.submitLintReview("-2", "One or more build flags causes a subset of the build to be run")
|
||||
} else if (extendedStage.isAllowStagesFilterUsed() || extendedStage.isIgnoreStageResultsFilterUsed() || extendedStage.isSkipStagesFilterUsed()) {
|
||||
gerrit.submitLintReview('-2', 'One or more build flags causes a subset of the build to be run')
|
||||
} else {
|
||||
gerrit.submitLintReview("0")
|
||||
gerrit.submitLintReview('0')
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that all build flags are compatible.
|
||||
if(configuration.getBoolean('change-merged') && configuration.isValueDefault('build-registry-path')) {
|
||||
error "Manually triggering the change-merged build path must be combined with a custom build-registry-path"
|
||||
if (configuration.getBoolean('change-merged') && configuration.isValueDefault('build-registry-path')) {
|
||||
error 'Manually triggering the change-merged build path must be combined with a custom build-registry-path'
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -320,7 +319,7 @@ pipeline {
|
|||
|
||||
def buildSummaryReportHooks = [
|
||||
onStageEnded: { stageName, _, buildResult ->
|
||||
if(buildResult) {
|
||||
if (buildResult) {
|
||||
buildSummaryReport.addFailureRun(stageName, buildResult)
|
||||
buildSummaryReport.addRunTestActions(stageName, buildResult)
|
||||
buildSummaryReport.setStageIgnored(stageName)
|
||||
|
@ -342,18 +341,18 @@ pipeline {
|
|||
buildParameters += string(name: 'PATCHSET_TAG', value: "${env.PATCHSET_TAG}")
|
||||
buildParameters += string(name: 'POSTGRES', value: "${env.POSTGRES}")
|
||||
buildParameters += string(name: 'RUBY', value: "${env.RUBY}")
|
||||
buildParameters += string(name: 'CANVAS_RAILS6_0', value: "1")
|
||||
buildParameters += string(name: 'CANVAS_RAILS6_0', value: '1')
|
||||
|
||||
// If modifying any of our Jenkinsfiles set JENKINSFILE_REFSPEC for sub-builds to use Jenkinsfiles in
|
||||
// the gerrit rather than master. Stable branches also need to check out the JENKINSFILE_REFSPEC to prevent
|
||||
// the job default from pulling master.
|
||||
if(env.GERRIT_PROJECT == 'canvas-lms' && env.JOB_NAME.endsWith('Jenkinsfile')) {
|
||||
if (env.GERRIT_PROJECT == 'canvas-lms' && env.JOB_NAME.endsWith('Jenkinsfile')) {
|
||||
buildParameters += string(name: 'JENKINSFILE_REFSPEC', value: "${env.GERRIT_REFSPEC}")
|
||||
} else if(env.GERRIT_PROJECT == 'canvas-lms' && env.JOB_NAME.endsWith('stable')) {
|
||||
} else if (env.GERRIT_PROJECT == 'canvas-lms' && env.JOB_NAME.endsWith('stable')) {
|
||||
buildParameters += string(name: 'JENKINSFILE_REFSPEC', value: "${env.GERRIT_REFSPEC}")
|
||||
}
|
||||
|
||||
if (env.GERRIT_PROJECT != "canvas-lms") {
|
||||
if (env.GERRIT_PROJECT != 'canvas-lms') {
|
||||
// the plugin builds require the canvas lms refspec to be different. so only
|
||||
// set this refspec if the main build is requesting it to be set.
|
||||
// NOTE: this is only being set in main-from-plugin build. so main-canvas wont run this.
|
||||
|
@ -382,7 +381,7 @@ pipeline {
|
|||
|
||||
// Remove the @tmp directory created by dir() for plugin builds, so bundler doesn't get confused.
|
||||
// https://issues.jenkins.io/browse/JENKINS-52750
|
||||
if(env.GERRIT_PROJECT != "canvas-lms") {
|
||||
if (env.GERRIT_PROJECT != 'canvas-lms') {
|
||||
sh "rm -vrf $LOCAL_WORKDIR@tmp"
|
||||
}
|
||||
|
||||
|
@ -428,7 +427,7 @@ pipeline {
|
|||
extendedStage('Linters - Run Tests - Code').queue(nestedStages, lintersStage.&codeStage)
|
||||
extendedStage('Linters - Run Tests - Webpack').queue(nestedStages, lintersStage.&webpackStage)
|
||||
extendedStage('Linters - Run Tests - Yarn')
|
||||
.required(env.GERRIT_PROJECT == "canvas-lms" && git.changedFiles(['package.json', 'yarn.lock'], 'HEAD^'))
|
||||
.required(env.GERRIT_PROJECT == 'canvas-lms' && git.changedFiles(['package.json', 'yarn.lock'], 'HEAD^'))
|
||||
.queue(nestedStages, lintersStage.&yarnStage)
|
||||
|
||||
parallel(nestedStages)
|
||||
|
@ -468,28 +467,28 @@ pipeline {
|
|||
parallel(nestedStages)
|
||||
}
|
||||
|
||||
extendedStage("Javascript (Waiting for Dependencies)").obeysAllowStages(false).waitsFor(JS_BUILD_IMAGE_STAGE, 'Builder').queue(rootStages) {
|
||||
extendedStage('Javascript (Waiting for Dependencies)').obeysAllowStages(false).waitsFor(JS_BUILD_IMAGE_STAGE, 'Builder').queue(rootStages) {
|
||||
def nestedStages = [:]
|
||||
|
||||
extendedStage('Javascript (Jest)')
|
||||
.hooks(buildSummaryReportHooks)
|
||||
.queue(nestedStages, jobName: '/Canvas/test-suites/JS', buildParameters: buildParameters + [
|
||||
string(name: 'KARMA_RUNNER_IMAGE', value: env.KARMA_RUNNER_IMAGE),
|
||||
string(name: 'TEST_SUITE', value: "jest"),
|
||||
string(name: 'TEST_SUITE', value: 'jest'),
|
||||
])
|
||||
|
||||
extendedStage('Javascript (Coffeescript)')
|
||||
.hooks(buildSummaryReportHooks)
|
||||
.queue(nestedStages, jobName: '/Canvas/test-suites/JS', buildParameters: buildParameters + [
|
||||
string(name: 'KARMA_RUNNER_IMAGE', value: env.KARMA_RUNNER_IMAGE),
|
||||
string(name: 'TEST_SUITE', value: "coffee"),
|
||||
string(name: 'TEST_SUITE', value: 'coffee'),
|
||||
])
|
||||
|
||||
extendedStage('Javascript (Karma)')
|
||||
.hooks(buildSummaryReportHooks)
|
||||
.queue(nestedStages, jobName: '/Canvas/test-suites/JS', buildParameters: buildParameters + [
|
||||
string(name: 'KARMA_RUNNER_IMAGE', value: env.KARMA_RUNNER_IMAGE),
|
||||
string(name: 'TEST_SUITE', value: "karma"),
|
||||
string(name: 'TEST_SUITE', value: 'karma'),
|
||||
])
|
||||
|
||||
parallel(nestedStages)
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.junit.runner.Description
|
|||
import org.junit.Rule
|
||||
import com.lesfurets.jenkins.unit.*
|
||||
|
||||
|
||||
class BaseTest extends BasePipelineTest {
|
||||
|
||||
// Implement a rule to intercept test failures and print the callStack
|
||||
|
|
|
@ -21,7 +21,7 @@ def getRailsLoadAllLocales() {
|
|||
}
|
||||
|
||||
def handleDockerBuildFailure(imagePrefix, e) {
|
||||
if(configuration.isChangeMerged() || configuration.getBoolean('upload-docker-image-failures', 'false')) {
|
||||
if (configuration.isChangeMerged() || configuration.getBoolean('upload-docker-image-failures', 'false')) {
|
||||
// DEBUG: In some cases, such as the the image build failing only on Jenkins, it can be useful to be able to
|
||||
// download the last successful layer to debug locally. If we ever start using buildkit for the relevant
|
||||
// images, then this approach will have to change as buildkit doesn't save the intermediate layers as images.
|
||||
|
@ -53,10 +53,10 @@ def slackSendCacheBuild(block) {
|
|||
def cur_partition = []
|
||||
def max_entries = 5
|
||||
|
||||
while(i < buildLogPartsLength) {
|
||||
while (i < buildLogPartsLength) {
|
||||
cur_partition.add(buildLogParts[i])
|
||||
|
||||
if(cur_partition.size() >= max_entries) {
|
||||
if (cur_partition.size() >= max_entries) {
|
||||
partitions.add(cur_partition)
|
||||
|
||||
cur_partition = []
|
||||
|
@ -65,11 +65,11 @@ def slackSendCacheBuild(block) {
|
|||
i++
|
||||
}
|
||||
|
||||
if(cur_partition.size() > 0) {
|
||||
if (cur_partition.size() > 0) {
|
||||
partitions.add(cur_partition)
|
||||
}
|
||||
|
||||
for(i = 0; i < partitions.size(); i++) {
|
||||
for (i = 0; i < partitions.size(); i++) {
|
||||
slackSend(
|
||||
channel: '#jenkins_cache_noisy',
|
||||
message: """<${env.GERRIT_CHANGE_URL}|#${env.GERRIT_CHANGE_NUMBER}> on ${env.GERRIT_PROJECT}. Build <${env.BUILD_URL}|#${env.BUILD_NUMBER}> (${i} / ${partitions.size() - 1})
|
||||
|
@ -103,7 +103,7 @@ def jsImage() {
|
|||
./build/new-jenkins/docker-with-flakey-network-protection.sh push $KARMA_RUNNER_IMAGE
|
||||
./build/new-jenkins/docker-with-flakey-network-protection.sh push $KARMA_BUILDER_PREFIX
|
||||
"""
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
handleDockerBuildFailure(KARMA_RUNNER_IMAGE, e)
|
||||
}
|
||||
}
|
||||
|
@ -115,9 +115,9 @@ def premergeCacheImage() {
|
|||
"CACHE_LOAD_SCOPE=${env.IMAGE_CACHE_MERGE_SCOPE}",
|
||||
"CACHE_LOAD_FALLBACK_SCOPE=${env.IMAGE_CACHE_BUILD_SCOPE}",
|
||||
"CACHE_SAVE_SCOPE=${env.IMAGE_CACHE_MERGE_SCOPE}",
|
||||
"COMPILE_ADDITIONAL_ASSETS=0",
|
||||
"JS_BUILD_NO_UGLIFY=1",
|
||||
"RAILS_LOAD_ALL_LOCALES=0",
|
||||
'COMPILE_ADDITIONAL_ASSETS=0',
|
||||
'JS_BUILD_NO_UGLIFY=1',
|
||||
'RAILS_LOAD_ALL_LOCALES=0',
|
||||
"RUBY_RUNNER_PREFIX=${env.RUBY_RUNNER_PREFIX}",
|
||||
"WEBPACK_BUILDER_PREFIX=${env.WEBPACK_BUILDER_PREFIX}",
|
||||
"WEBPACK_CACHE_PREFIX=${env.WEBPACK_CACHE_PREFIX}",
|
||||
|
@ -125,8 +125,8 @@ def premergeCacheImage() {
|
|||
]) {
|
||||
slackSendCacheBuild {
|
||||
try {
|
||||
sh "build/new-jenkins/docker-build.sh"
|
||||
} catch(e) {
|
||||
sh 'build/new-jenkins/docker-build.sh'
|
||||
} catch (e) {
|
||||
handleDockerBuildFailure("$PATCHSET_TAG-pre-merge-failed", e)
|
||||
}
|
||||
}
|
||||
|
@ -163,7 +163,7 @@ def patchsetImage() {
|
|||
]) {
|
||||
try {
|
||||
sh "build/new-jenkins/docker-build.sh $PATCHSET_TAG"
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
handleDockerBuildFailure(PATCHSET_TAG, e)
|
||||
}
|
||||
}
|
||||
|
@ -171,7 +171,7 @@ def patchsetImage() {
|
|||
|
||||
sh "./build/new-jenkins/docker-with-flakey-network-protection.sh push $PATCHSET_TAG"
|
||||
|
||||
if(configuration.isChangeMerged()) {
|
||||
if (configuration.isChangeMerged()) {
|
||||
def GIT_REV = sh(script: 'git rev-parse HEAD', returnStdout: true).trim()
|
||||
sh "docker tag \$PATCHSET_TAG \$BUILD_IMAGE:${GIT_REV}"
|
||||
|
||||
|
|
|
@ -19,11 +19,11 @@
|
|||
def call() {
|
||||
catchError (buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
|
||||
try {
|
||||
snyk("canvas-lms:ruby", "Gemfile.lock", "$PATCHSET_TAG")
|
||||
snyk('canvas-lms:ruby', 'Gemfile.lock', "$PATCHSET_TAG")
|
||||
}
|
||||
catch (err) {
|
||||
if (err.toString().contains('Gemfile.lock does not exist')) {
|
||||
snyk("canvas-lms:ruby", "Gemfile.lock.next", "$PATCHSET_TAG")
|
||||
snyk('canvas-lms:ruby', 'Gemfile.lock.next', "$PATCHSET_TAG")
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
|
|
|
@ -33,10 +33,10 @@ def appendStagesAsBuildNodes(nodes,
|
|||
stage_name_prefix,
|
||||
test_label,
|
||||
stage_block) {
|
||||
for(int i = 0; i < stage_count; i++) {
|
||||
for (int i = 0; i < stage_count; i++) {
|
||||
// make this a local variable so when the closure resolves
|
||||
// it gets the correct number
|
||||
def index = i;
|
||||
def index = i
|
||||
// we cant use String.format, so... yea
|
||||
def stage_name = "$stage_name_prefix ${(index + 1).toString().padLeft(2, '0')}"
|
||||
def timeStart = new Date()
|
||||
|
@ -44,12 +44,12 @@ def appendStagesAsBuildNodes(nodes,
|
|||
echo "Running on node ${env.NODE_NAME}"
|
||||
def duration = TimeCategory.minus(new Date(), timeStart).toMilliseconds()
|
||||
// make sure to unstash
|
||||
unstash name: "build-dir"
|
||||
unstash name: "build-docker-compose"
|
||||
unstash name: 'build-dir'
|
||||
unstash name: 'build-docker-compose'
|
||||
stage_block(index)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* use this in combination with appendStagesAsBuildNodes. this will
|
||||
|
@ -57,8 +57,8 @@ def appendStagesAsBuildNodes(nodes,
|
|||
* the build scripts
|
||||
*/
|
||||
def stashBuildScripts() {
|
||||
stash name: "build-dir", includes: 'build/**/*'
|
||||
stash name: "build-docker-compose", includes: 'docker-compose.*.yml'
|
||||
stash name: 'build-dir', includes: 'build/**/*'
|
||||
stash name: 'build-docker-compose', includes: 'docker-compose.*.yml'
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -23,7 +23,7 @@ import groovy.transform.Field
|
|||
def _getDockerInputs() {
|
||||
def inputVars = [
|
||||
"--volume $WORKSPACE/.git:/usr/src/app/.git",
|
||||
"--env GERGICH_DB_PATH=/home/docker/gergich",
|
||||
'--env GERGICH_DB_PATH=/home/docker/gergich',
|
||||
"--env GERGICH_PUBLISH=$GERGICH_PUBLISH",
|
||||
"--env GERGICH_KEY=$GERGICH_KEY",
|
||||
"--env GERRIT_HOST=$GERRIT_HOST",
|
||||
|
@ -37,7 +37,7 @@ def _getDockerInputs() {
|
|||
"--env GERRIT_REFSPEC=$GERRIT_REFSPEC",
|
||||
]
|
||||
|
||||
if(env.GERRIT_PROJECT != "canvas-lms") {
|
||||
if (env.GERRIT_PROJECT != 'canvas-lms') {
|
||||
inputVars.addAll([
|
||||
"--volume $WORKSPACE/gems/plugins/$GERRIT_PROJECT/.git:/usr/src/app/gems/plugins/$GERRIT_PROJECT/.git",
|
||||
"--env GERGICH_GIT_PATH=/usr/src/app/gems/plugins/$GERRIT_PROJECT",
|
||||
|
@ -49,9 +49,9 @@ def _getDockerInputs() {
|
|||
|
||||
def setupNode() {
|
||||
credentials.withStarlordDockerLogin {
|
||||
sh "./build/new-jenkins/linters/docker-build.sh local/gergich"
|
||||
sh './build/new-jenkins/linters/docker-build.sh local/gergich'
|
||||
|
||||
if(configuration.getBoolean('upload-linter-debug-image', 'false')) {
|
||||
if (configuration.getBoolean('upload-linter-debug-image', 'false')) {
|
||||
sh """
|
||||
docker tag local/gergich $LINTER_DEBUG_IMAGE
|
||||
docker push $LINTER_DEBUG_IMAGE
|
||||
|
@ -80,8 +80,8 @@ def codeStage() {
|
|||
sh './build/new-jenkins/linters/run-gergich-linters.sh'
|
||||
}
|
||||
|
||||
if(configuration.getBoolean('force-failure-linters', 'false')) {
|
||||
error "lintersStage: force failing due to flag"
|
||||
if (configuration.getBoolean('force-failure-linters', 'false')) {
|
||||
error 'lintersStage: force failing due to flag'
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,8 +93,8 @@ def webpackStage() {
|
|||
sh './build/new-jenkins/linters/run-gergich-webpack.sh'
|
||||
}
|
||||
|
||||
if(configuration.getBoolean('force-failure-linters', 'false')) {
|
||||
error "lintersStage: force failing due to flag"
|
||||
if (configuration.getBoolean('force-failure-linters', 'false')) {
|
||||
error 'lintersStage: force failing due to flag'
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,7 +107,7 @@ def yarnStage() {
|
|||
sh './build/new-jenkins/linters/run-gergich-yarn.sh'
|
||||
}
|
||||
|
||||
if(configuration.getBoolean('force-failure-linters', 'false')) {
|
||||
error "lintersStage: force failing due to flag"
|
||||
if (configuration.getBoolean('force-failure-linters', 'false')) {
|
||||
error 'lintersStage: force failing due to flag'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,4 +46,4 @@ def _rebase(String branch, Integer commitHistory) {
|
|||
if (!git.rebase(branch)) {
|
||||
error "Error: Rebase couldn't resolve changes automatically, please resolve these conflicts locally."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ def seleniumConfig() {
|
|||
node_total: configuration.getInteger('selenium-ci-node-total'),
|
||||
max_fail: configuration.getInteger('selenium-max-fail'),
|
||||
reruns_retry: configuration.getInteger('selenium-rerun-retry'),
|
||||
force_failure: configuration.isForceFailureSelenium() ? "1" : '',
|
||||
force_failure: configuration.isForceFailureSelenium() ? '1' : '',
|
||||
patchsetTag: env.PATCHSET_TAG,
|
||||
]
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ def rspecConfig() {
|
|||
node_total: configuration.getInteger('rspec-ci-node-total'),
|
||||
max_fail: configuration.getInteger('rspec-max-fail'),
|
||||
reruns_retry: configuration.getInteger('rspec-rerun-retry'),
|
||||
force_failure: configuration.isForceFailureRSpec() ? "1" : '',
|
||||
force_failure: configuration.isForceFailureRSpec() ? '1' : '',
|
||||
patchsetTag: env.PATCHSET_TAG,
|
||||
]
|
||||
}
|
||||
|
@ -97,8 +97,8 @@ def _runRspecTestSuite(
|
|||
"DOCKER_PROCESSES=$docker_processes",
|
||||
"RSPEC_PROCESSES=$rspec_processes",
|
||||
"FORCE_FAILURE=$force_failure",
|
||||
"POSTGRES_PASSWORD=sekret",
|
||||
"SELENIUM_VERSION=3.141.59-20201119",
|
||||
'POSTGRES_PASSWORD=sekret',
|
||||
'SELENIUM_VERSION=3.141.59-20201119',
|
||||
"PATCHSET_TAG=$patchsetTag",
|
||||
"ENABLE_AXE_SELENIUM=${env.ENABLE_AXE_SELENIUM}",
|
||||
]) {
|
||||
|
@ -111,7 +111,7 @@ def _runRspecTestSuite(
|
|||
sh(script: 'build/new-jenkins/docker-compose-build-up.sh', label: 'Start Containers')
|
||||
sh(script: 'build/new-jenkins/docker-compose-rspec-parallel.sh', label: 'Run Tests')
|
||||
}
|
||||
} catch(org.jenkinsci.plugins.workflow.steps.FlowInterruptedException e) {
|
||||
} catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException e) {
|
||||
if (e.causes[0] instanceof org.jenkinsci.plugins.workflow.steps.TimeoutStepExecution.ExceededTimeout) {
|
||||
sh '''#!/bin/bash
|
||||
ids=( $(docker ps -aq --filter "name=canvas_") )
|
||||
|
@ -128,7 +128,7 @@ def _runRspecTestSuite(
|
|||
sh "build/new-jenkins/docker-copy-files.sh /usr/src/app/log/spec_failures/ tmp/spec_failures/$prefix canvas_ --allow-error --clean-dir"
|
||||
sh 'build/new-jenkins/docker-copy-files.sh /usr/src/app/log/results tmp/rspec_results canvas_ --allow-error --clean-dir'
|
||||
|
||||
if(configuration.getBoolean('upload-docker-logs', 'false')) {
|
||||
if (configuration.getBoolean('upload-docker-logs', 'false')) {
|
||||
sh "docker ps -aq | xargs -I{} -n1 -P1 docker logs --timestamps --details {} 2>&1 > tmp/docker-${prefix}-${index}.log"
|
||||
archiveArtifacts(artifacts: "tmp/docker-${prefix}-${index}.log")
|
||||
}
|
||||
|
@ -138,7 +138,7 @@ def _runRspecTestSuite(
|
|||
// node_18/spec_failures/canvas__9224fba6fc34/spec_failures/Initial/spec/selenium/force_failure_spec.rb:20/index
|
||||
// split on the 5th to give us the rerun category (Initial, Rerun_1, Rerun_2...)
|
||||
|
||||
def pathCategory = file.getPath().split("/")[5]
|
||||
def pathCategory = file.getPath().split('/')[5]
|
||||
def finalCategory = reruns_retry.toInteger() == 0 ? 'Initial' : "Rerun_${reruns_retry.toInteger()}"
|
||||
def splitPath = file.getPath().split('/').toList()
|
||||
def specTitle = splitPath.subList(6, splitPath.size() - 1).join('/')
|
||||
|
@ -146,7 +146,7 @@ def _runRspecTestSuite(
|
|||
|
||||
buildSummaryReport.addFailurePath(specTitle, artifactsPath, pathCategory)
|
||||
|
||||
if(pathCategory == finalCategory) {
|
||||
if (pathCategory == finalCategory) {
|
||||
buildSummaryReport.setFailureCategory(specTitle, buildSummaryReport.FAILURE_TYPE_TEST_NEVER_PASSED)
|
||||
} else {
|
||||
buildSummaryReport.setFailureCategoryUnlessExists(specTitle, buildSummaryReport.FAILURE_TYPE_TEST_PASSED_ON_RETRY)
|
||||
|
@ -156,13 +156,12 @@ def _runRspecTestSuite(
|
|||
// junit publishing will set build status to unstable if failed tests found, if so set it back to the original value
|
||||
def preStatus = currentBuild.rawBuild.@result
|
||||
|
||||
junit allowEmptyResults: true, testResults: "tmp/rspec_results/**/*.xml"
|
||||
junit allowEmptyResults: true, testResults: 'tmp/rspec_results/**/*.xml'
|
||||
|
||||
if(currentBuild.getResult() == 'UNSTABLE' && preStatus != 'UNSTABLE') {
|
||||
if (currentBuild.getResult() == 'UNSTABLE' && preStatus != 'UNSTABLE') {
|
||||
currentBuild.rawBuild.@result = preStatus
|
||||
}
|
||||
|
||||
|
||||
if (env.COVERAGE == '1') {
|
||||
sh 'build/new-jenkins/docker-copy-files.sh /usr/src/app/coverage/ tmp/spec_coverage canvas_ --clean-dir'
|
||||
archiveArtifacts(artifacts: 'tmp/spec_coverage/**/*')
|
||||
|
@ -192,7 +191,7 @@ def _uploadCoverage(prefix, coverage_name) {
|
|||
|
||||
def uploadParallelLog() {
|
||||
reports.copyParallelLogs('tmp/parallel_runtime_rspec_tests/**/*.log')
|
||||
archiveArtifacts(artifacts: "parallel_logs/**")
|
||||
archiveArtifacts(artifacts: 'parallel_logs/**')
|
||||
}
|
||||
|
||||
return this
|
||||
|
|
|
@ -27,12 +27,12 @@ def call() {
|
|||
"CACHE_UNIQUE_SCOPE=${env.IMAGE_CACHE_UNIQUE_SCOPE}",
|
||||
"CASSANDRA_IMAGE_TAG=${imageTag.cassandra()}",
|
||||
"CASSANDRA_PREFIX=${env.CASSANDRA_PREFIX}",
|
||||
"COMPOSE_FILE=docker-compose.new-jenkins.yml",
|
||||
'COMPOSE_FILE=docker-compose.new-jenkins.yml',
|
||||
"DYNAMODB_IMAGE_TAG=${imageTag.dynamodb()}",
|
||||
"DYNAMODB_PREFIX=${env.DYNAMODB_PREFIX}",
|
||||
"POSTGRES_IMAGE_TAG=${imageTag.postgres()}",
|
||||
"POSTGRES_PREFIX=${env.POSTGRES_PREFIX}",
|
||||
"POSTGRES_PASSWORD=sekret"
|
||||
'POSTGRES_PASSWORD=sekret'
|
||||
]) {
|
||||
sh """
|
||||
# Due to https://issues.jenkins.io/browse/JENKINS-15146, we have to set it to empty string here
|
||||
|
@ -45,7 +45,7 @@ def call() {
|
|||
"""
|
||||
}
|
||||
|
||||
archiveArtifacts(artifacts: "migrate-*.log", allowEmptyArchive: true)
|
||||
archiveArtifacts(artifacts: 'migrate-*.log', allowEmptyArchive: true)
|
||||
sh 'docker-compose down --remove-orphans'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,10 +17,10 @@
|
|||
*/
|
||||
|
||||
def call() {
|
||||
def refspecToCheckout = env.GERRIT_PROJECT == "canvas-lms" ? env.GERRIT_REFSPEC : env.CANVAS_LMS_REFSPEC
|
||||
checkoutRepo("canvas-lms", refspecToCheckout, 100)
|
||||
def refspecToCheckout = env.GERRIT_PROJECT == 'canvas-lms' ? env.GERRIT_REFSPEC : env.CANVAS_LMS_REFSPEC
|
||||
checkoutRepo('canvas-lms', refspecToCheckout, 100)
|
||||
|
||||
if(env.GERRIT_PROJECT != "canvas-lms") {
|
||||
if (env.GERRIT_PROJECT != 'canvas-lms') {
|
||||
dir(env.LOCAL_WORKDIR) {
|
||||
checkoutRepo(GERRIT_PROJECT, env.GERRIT_REFSPEC, 2)
|
||||
}
|
||||
|
@ -39,17 +39,16 @@ def call() {
|
|||
}
|
||||
}
|
||||
|
||||
pluginsToPull.add([name: 'qti_migration_tool', version: _getPluginVersion('qti_migration_tool'), target: "vendor/qti_migration_tool"])
|
||||
pluginsToPull.add([name: 'qti_migration_tool', version: _getPluginVersion('qti_migration_tool'), target: 'vendor/qti_migration_tool'])
|
||||
|
||||
pullRepos(pluginsToPull)
|
||||
|
||||
libraryScript.load('bash/docker-tag-remote.sh', './build/new-jenkins/docker-tag-remote.sh')
|
||||
|
||||
}
|
||||
|
||||
def _getPluginVersion(plugin) {
|
||||
if(env.GERRIT_BRANCH.contains('stable/')) {
|
||||
if (env.GERRIT_BRANCH.contains('stable/')) {
|
||||
return configuration.getString("pin-commit-$plugin", env.GERRIT_BRANCH)
|
||||
}
|
||||
return env.GERRIT_EVENT_TYPE == 'change-merged' ? 'master' : configuration.getString("pin-commit-$plugin", "master")
|
||||
return env.GERRIT_EVENT_TYPE == 'change-merged' ? 'master' : configuration.getString("pin-commit-$plugin", 'master')
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue