2019-02-16 04:16:49 +08:00
#!/usr/bin/env groovy
2019-03-13 23:54:18 +08:00
/*
* Copyright (C) 2019 - present Instructure, Inc.
*
* This file is part of Canvas.
*
* Canvas is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, version 3 of the License.
*
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
2021-03-23 03:05:58 +08:00
def JS_BUILD_IMAGE_STAGE = "Javascript (Build Image)"
2021-03-04 03:35:07 +08:00
2020-02-15 06:02:23 +08:00
def buildParameters = [
2019-08-15 02:43:09 +08:00
string(name: 'GERRIT_REFSPEC', value: "${env.GERRIT_REFSPEC}"),
string(name: 'GERRIT_EVENT_TYPE', value: "${env.GERRIT_EVENT_TYPE}"),
2020-01-18 04:26:48 +08:00
string(name: 'GERRIT_PROJECT', value: "${env.GERRIT_PROJECT}"),
2019-08-15 02:43:09 +08:00
string(name: 'GERRIT_BRANCH', value: "${env.GERRIT_BRANCH}"),
string(name: 'GERRIT_CHANGE_NUMBER', value: "${env.GERRIT_CHANGE_NUMBER}"),
string(name: 'GERRIT_PATCHSET_NUMBER', value: "${env.GERRIT_PATCHSET_NUMBER}"),
string(name: 'GERRIT_EVENT_ACCOUNT_NAME', value: "${env.GERRIT_EVENT_ACCOUNT_NAME}"),
2019-11-16 02:12:18 +08:00
string(name: 'GERRIT_EVENT_ACCOUNT_EMAIL', value: "${env.GERRIT_EVENT_ACCOUNT_EMAIL}"),
string(name: 'GERRIT_CHANGE_COMMIT_MESSAGE', value: "${env.GERRIT_CHANGE_COMMIT_MESSAGE}"),
string(name: 'GERRIT_HOST', value: "${env.GERRIT_HOST}"),
2019-12-20 02:55:08 +08:00
string(name: 'GERGICH_PUBLISH', value: "${env.GERGICH_PUBLISH}"),
string(name: 'MASTER_BOUNCER_RUN', value: "${env.MASTER_BOUNCER_RUN}")
2019-08-15 02:43:09 +08:00
]
2020-11-24 01:55:14 +08:00
def dockerDevFiles = [
'^docker-compose/',
2021-03-20 03:18:53 +08:00
'^script/common/',
2020-11-24 01:55:14 +08:00
'^script/canvas_update',
'^docker-compose.yml',
'^Dockerfile$',
'^lib/tasks/',
'Jenkinsfile.docker-smoke'
]
def jenkinsFiles = [
'Jenkinsfile*',
'^docker-compose.new-jenkins*.yml',
'build/new-jenkins/*'
]
2021-03-16 22:52:49 +08:00
def getSummaryUrl() {
return "${env.BUILD_URL}/build-summary-report"
}
2020-08-28 20:08:22 +08:00
def getDockerWorkDir() {
return env.GERRIT_PROJECT == "canvas-lms" ? "/usr/src/app" : "/usr/src/app/gems/plugins/${env.GERRIT_PROJECT}"
}
def getLocalWorkDir() {
return env.GERRIT_PROJECT == "canvas-lms" ? "." : "gems/plugins/${env.GERRIT_PROJECT}"
}
2021-02-17 08:25:14 +08:00
def getRailsLoadAllLocales() {
return configuration.isChangeMerged() ? 1 : (configuration.getBoolean('rails-load-all-locales', 'false') ? 1 : 0)
}
2020-04-23 01:30:45 +08:00
// if the build never starts or gets into a node block, then we
// can never load a file. and a very noisy/confusing error is thrown.
def ignoreBuildNeverStartedError(block) {
try {
block()
}
catch (org.jenkinsci.plugins.workflow.steps.MissingContextVariableException ex) {
if (!ex.message.startsWith('Required context class hudson.FilePath is missing')) {
throw ex
}
else {
echo "ignored MissingContextVariableException: \n${ex.message}"
}
// we can ignore this very noisy error
}
}
2020-03-16 20:23:58 +08:00
// return false if the current patchset tag doesn't match the
2020-08-12 01:14:14 +08:00
// mainline publishable tag. i.e. ignore pg-9.5 builds
2020-02-15 06:02:23 +08:00
def isPatchsetPublishable() {
env.PATCHSET_TAG == env.PUBLISHABLE_TAG
2019-11-23 04:57:11 +08:00
}
2020-09-02 22:46:17 +08:00
def isPatchsetRetriggered() {
2020-09-03 22:31:09 +08:00
if(env.IS_AUTOMATIC_RETRIGGER == '1') {
return true
}
2020-09-02 22:46:17 +08:00
def userCause = currentBuild.getBuildCauses('com.sonyericsson.hudson.plugins.gerrit.trigger.hudsontrigger.GerritUserCause')
return userCause && userCause[0].shortDescription.contains('Retriggered')
}
2020-07-27 00:18:52 +08:00
def cleanupFn(status) {
2020-07-29 03:14:53 +08:00
ignoreBuildNeverStartedError {
2021-01-12 07:33:50 +08:00
libraryScript.execute 'bash/docker-cleanup.sh --allow-failure'
2020-07-27 00:18:52 +08:00
}
}
2020-07-29 03:14:53 +08:00
def postFn(status) {
2020-11-03 09:34:07 +08:00
try {
def requestStartTime = System.currentTimeMillis()
node('master') {
def requestEndTime = System.currentTimeMillis()
reportToSplunk('node_request_time', [
'nodeName': 'master',
'nodeLabel': 'master',
'requestTime': requestEndTime - requestStartTime,
])
2021-03-04 03:35:07 +08:00
buildSummaryReport.publishReport('Build Summary Report', status)
2020-11-03 09:34:07 +08:00
2021-04-09 00:42:35 +08:00
if(isPatchsetPublishable()) {
dockerUtils.tagRemote(env.PATCHSET_TAG, env.EXTERNAL_TAG)
}
2020-11-03 09:34:07 +08:00
if(status == 'SUCCESS' && configuration.isChangeMerged() && isPatchsetPublishable()) {
2020-12-03 04:36:48 +08:00
dockerUtils.tagRemote(env.PATCHSET_TAG, env.MERGE_TAG)
2021-01-20 03:15:52 +08:00
dockerUtils.tagRemote(env.CASSANDRA_IMAGE, env.CASSANDRA_MERGE_IMAGE)
dockerUtils.tagRemote(env.DYNAMODB_IMAGE, env.DYNAMODB_MERGE_IMAGE)
dockerUtils.tagRemote(env.POSTGRES_IMAGE, env.POSTGRES_MERGE_IMAGE)
2020-11-03 09:34:07 +08:00
}
}
} finally {
if(status == 'FAILURE') {
maybeSlackSendFailure()
maybeRetrigger()
} else if(status == 'SUCCESS') {
maybeSlackSendSuccess()
}
2020-09-02 22:46:17 +08:00
}
}
2020-09-03 22:31:09 +08:00
def shouldPatchsetRetrigger() {
// NOTE: The IS_AUTOMATIC_RETRIGGER check is here to ensure that the parameter is properly defined for the triggering job.
// If it isn't, we have the risk of triggering this job over and over in an infinite loop.
return env.IS_AUTOMATIC_RETRIGGER == '0' && (
env.GERRIT_EVENT_TYPE == 'change-merged' ||
configuration.getBoolean('change-merged') && configuration.getBoolean('enable-automatic-retrigger', '0')
)
}
def maybeRetrigger() {
if(shouldPatchsetRetrigger() && !isPatchsetRetriggered()) {
def retriggerParams = currentBuild.rawBuild.getAction(ParametersAction).getParameters()
retriggerParams = retriggerParams.findAll { record ->
record.name != 'IS_AUTOMATIC_RETRIGGER'
}
retriggerParams << new StringParameterValue('IS_AUTOMATIC_RETRIGGER', "1")
build(job: env.JOB_NAME, parameters: retriggerParams, propagate: false, wait: false)
}
}
2020-09-02 22:46:17 +08:00
def maybeSlackSendFailure() {
if(configuration.isChangeMerged()) {
2020-07-29 03:14:53 +08:00
def branchSegment = env.GERRIT_BRANCH ? "[$env.GERRIT_BRANCH]" : ''
def authorSlackId = env.GERRIT_EVENT_ACCOUNT_EMAIL ? slackUserIdFromEmail(email: env.GERRIT_EVENT_ACCOUNT_EMAIL, botUser: true, tokenCredentialId: 'slack-user-id-lookup') : ''
def authorSlackMsg = authorSlackId ? "<@$authorSlackId>" : env.GERRIT_EVENT_ACCOUNT_NAME
2020-09-09 02:22:51 +08:00
def authorSegment = "Patchset <${env.GERRIT_CHANGE_URL}|#${env.GERRIT_CHANGE_NUMBER}> by ${authorSlackMsg} failed against ${branchSegment}"
2020-09-18 04:59:28 +08:00
def extra = "Please investigate the cause of the failure, and respond to this message with your diagnosis. If you need help, don't hesitate to tag @ oncall and our on call will assist in looking at the build. Further details of our post-merge failure process can be found at this <${configuration.getFailureWiki()}|link>. Thanks!"
2020-09-09 02:22:51 +08:00
2020-07-29 03:14:53 +08:00
slackSend(
2020-09-02 05:02:38 +08:00
channel: getSlackChannel(),
2020-07-29 03:14:53 +08:00
color: 'danger',
2021-03-16 22:52:49 +08:00
message: "${authorSegment}. Build <${getSummaryUrl()}|#${env.BUILD_NUMBER}>\n\n$extra"
2020-09-02 01:18:16 +08:00
)
}
}
2020-09-02 22:46:17 +08:00
def maybeSlackSendSuccess() {
if(configuration.isChangeMerged() && isPatchsetRetriggered()) {
slackSend(
channel: getSlackChannel(),
color: 'good',
2021-03-16 22:52:49 +08:00
message: "Patchset <${env.GERRIT_CHANGE_URL}|#${env.GERRIT_CHANGE_NUMBER}> succeeded on re-trigger. Build <${getSummaryUrl()}|#${env.BUILD_NUMBER}>"
2020-09-02 22:46:17 +08:00
)
}
}
2020-09-02 01:18:16 +08:00
2020-09-02 22:46:17 +08:00
def maybeSlackSendRetrigger() {
if(configuration.isChangeMerged() && isPatchsetRetriggered()) {
2020-09-02 01:18:16 +08:00
slackSend(
channel: getSlackChannel(),
color: 'warning',
2020-09-03 22:16:39 +08:00
message: "Patchset <${env.GERRIT_CHANGE_URL}|#${env.GERRIT_CHANGE_NUMBER}> by ${env.GERRIT_EVENT_ACCOUNT_EMAIL} has been re-triggered. Build <${env.BUILD_URL}|#${env.BUILD_NUMBER}>"
2020-07-29 03:14:53 +08:00
)
}
}
2020-12-17 07:35:20 +08:00
def slackSendCacheBuild(block) {
2020-10-14 02:25:33 +08:00
def buildStartTime = System.currentTimeMillis()
block()
def buildEndTime = System.currentTimeMillis()
2020-12-18 01:14:58 +08:00
def buildLog = sh(script: 'cat tmp/docker-build.log', returnStdout: true).trim()
def buildLogParts = buildLog.split('\n')
def buildLogPartsLength = buildLogParts.size()
2020-10-14 02:25:33 +08:00
2020-12-18 01:14:58 +08:00
// slackSend() has a ridiculously low limit of 2k, so we need to split longer logs
// into parts.
def i = 0
def partitions = []
def cur_partition = []
def max_entries = 5
while(i < buildLogPartsLength) {
cur_partition.add(buildLogParts[i])
if(cur_partition.size() >= max_entries) {
partitions.add(cur_partition)
cur_partition = []
}
i++
}
if(cur_partition.size() > 0) {
partitions.add(cur_partition)
}
for(i = 0; i < partitions.size(); i++) {
slackSend(
channel: '#jenkins_cache_noisy',
message: """<${env.GERRIT_CHANGE_URL}|#${env.GERRIT_CHANGE_NUMBER}> on ${env.GERRIT_PROJECT}. Build <${env.BUILD_URL}|#${env.BUILD_NUMBER}> (${i} / ${partitions.size() - 1})
2020-11-11 03:56:57 +08:00
Duration: ${buildEndTime - buildStartTime}ms
Instance: ${env.NODE_NAME}
2020-12-17 07:35:20 +08:00
2020-12-18 01:14:58 +08:00
```${partitions[i].join('\n\n')}```
"""
)
}
2020-10-14 02:25:33 +08:00
}
2020-08-05 05:42:10 +08:00
// These functions are intentionally pinned to GERRIT_EVENT_TYPE == 'change-merged' to ensure that real post-merge
// builds always run correctly. We intentionally ignore overrides for version pins, docker image paths, etc when
// running real post-merge builds.
// =========
2020-08-01 00:30:29 +08:00
def getPluginVersion(plugin) {
2020-11-10 00:03:25 +08:00
if(env.GERRIT_BRANCH.contains('stable/')) {
return configuration.getString("pin-commit-$plugin", env.GERRIT_BRANCH)
}
2020-08-01 00:30:29 +08:00
return env.GERRIT_EVENT_TYPE == 'change-merged' ? 'master' : configuration.getString("pin-commit-$plugin", "master")
}
2020-09-02 05:02:38 +08:00
def getSlackChannel() {
return env.GERRIT_EVENT_TYPE == 'change-merged' ? '#canvas_builds' : '#devx-bots'
2020-08-05 05:42:10 +08:00
}
2020-09-02 05:02:38 +08:00
2020-11-02 23:39:02 +08:00
@groovy.transform.Field def CANVAS_BUILDS_REFSPEC_REGEX = /\[canvas\-builds\-refspec=(.+?)\]/
def getCanvasBuildsRefspec() {
def commitMessage = env.GERRIT_CHANGE_COMMIT_MESSAGE ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : null
if(env.GERRIT_EVENT_TYPE == 'change-merged' || !commitMessage || !(commitMessage =~ CANVAS_BUILDS_REFSPEC_REGEX).find()) {
2021-01-06 00:03:51 +08:00
return env.GERRIT_BRANCH.contains('stable/') ? env.GERRIT_BRANCH : 'master'
2020-11-02 23:39:02 +08:00
}
return (commitMessage =~ CANVAS_BUILDS_REFSPEC_REGEX).findAll()[0][1]
}
2020-11-10 00:03:25 +08:00
@groovy.transform.Field def CANVAS_LMS_REFSPEC_REGEX = /\[canvas\-lms\-refspec=(.+?)\]/
2020-10-08 22:19:24 +08:00
def getCanvasLmsRefspec() {
2020-11-10 00:03:25 +08:00
// If stable branch, first search commit message for canvas-lms-refspec. If not present use stable branch head on origin.
if(env.GERRIT_BRANCH.contains('stable/')) {
def commitMessage = env.GERRIT_CHANGE_COMMIT_MESSAGE ? new String(env.GERRIT_CHANGE_COMMIT_MESSAGE.decodeBase64()) : null
if((commitMessage =~ CANVAS_LMS_REFSPEC_REGEX).find()) {
return configuration.canvasLmsRefspec()
}
return "+refs/heads/$GERRIT_BRANCH:refs/remotes/origin/$GERRIT_BRANCH"
}
2020-10-08 22:19:24 +08:00
return env.GERRIT_EVENT_TYPE == 'change-merged' ? configuration.canvasLmsRefspecDefault() : configuration.canvasLmsRefspec()
}
2020-08-05 05:42:10 +08:00
// =========
2021-03-19 23:14:58 +08:00
def handleDockerBuildFailure(imagePrefix, e) {
if(configuration.isChangeMerged() || configuration.getBoolean('upload-docker-image-failures', 'false')) {
// DEBUG: In some cases, such as the the image build failing only on Jenkins, it can be useful to be able to
// download the last successful layer to debug locally. If we ever start using buildkit for the relevant
// images, then this approach will have to change as buildkit doesn't save the intermediate layers as images.
sh(script: """
docker tag \$(docker images | awk '{print \$3}' | awk 'NR==2') $imagePrefix-failed
./build/new-jenkins/docker-with-flakey-network-protection.sh push $imagePrefix-failed
""", label: 'upload failed image')
}
throw e
}
2020-11-02 23:39:02 +08:00
library "canvas-builds-library@${getCanvasBuildsRefspec()}"
2021-03-25 04:22:46 +08:00
loadLocalLibrary("local-lib", "build/new-jenkins/library")
2020-11-02 23:39:02 +08:00
2020-12-09 06:57:08 +08:00
configuration.setUseCommitMessageFlags(env.GERRIT_EVENT_TYPE != 'change-merged')
2019-02-16 04:16:49 +08:00
pipeline {
2020-07-27 00:18:52 +08:00
agent none
2020-05-09 02:23:07 +08:00
options {
ansiColor('xterm')
2021-01-07 01:11:17 +08:00
timeout(time: 1, unit: 'HOURS')
2020-05-09 02:23:07 +08:00
timestamps()
}
2019-02-20 23:41:00 +08:00
2019-02-21 01:06:06 +08:00
environment {
2019-03-12 09:55:34 +08:00
GERRIT_PORT = '29418'
2019-02-21 01:22:05 +08:00
GERRIT_URL = "$GERRIT_HOST:$GERRIT_PORT"
2020-06-17 02:23:25 +08:00
BUILD_REGISTRY_FQDN = configuration.buildRegistryFQDN()
2020-12-09 06:57:08 +08:00
BUILD_IMAGE = configuration.buildRegistryPath()
2020-06-17 02:23:25 +08:00
POSTGRES = configuration.postgres()
2020-03-16 20:23:58 +08:00
POSTGRES_CLIENT = configuration.postgresClient()
2020-07-31 00:27:16 +08:00
SKIP_CACHE = configuration.skipCache()
2019-02-16 04:16:49 +08:00
2020-08-12 01:14:14 +08:00
// e.g. postgres-12-ruby-2.6
2020-07-22 10:06:57 +08:00
TAG_SUFFIX = imageTag.suffix()
2019-02-21 01:22:05 +08:00
2020-05-14 23:19:56 +08:00
2020-03-16 20:23:58 +08:00
// e.g. canvas-lms:01.123456.78-postgres-12-ruby-2.6
2020-12-09 06:57:08 +08:00
PATCHSET_TAG = imageTag.patchset()
2020-02-15 06:02:23 +08:00
2020-08-12 01:14:14 +08:00
// e.g. canvas-lms:01.123456.78-postgres-12-ruby-2.6
2020-12-09 06:57:08 +08:00
PUBLISHABLE_TAG = imageTag.publishableTag()
2020-02-15 06:02:23 +08:00
// e.g. canvas-lms:master when not on another branch
2020-12-09 06:57:08 +08:00
MERGE_TAG = imageTag.mergeTag()
2020-04-04 04:05:52 +08:00
// e.g. canvas-lms:01.123456.78; this is for consumers like Portal 2 who want to build a patchset
2020-12-09 06:57:08 +08:00
EXTERNAL_TAG = imageTag.externalTag()
2020-03-16 20:23:58 +08:00
ALPINE_MIRROR = configuration.alpineMirror()
NODE = configuration.node()
RUBY = configuration.ruby() // RUBY_VERSION is a reserved keyword for ruby installs
2021-01-20 03:15:52 +08:00
RSPEC_PROCESSES = 4
2020-06-27 03:37:32 +08:00
2021-01-14 07:49:14 +08:00
LINTER_DEBUG_IMAGE = "${configuration.buildRegistryPath("linter-debug")}:${imageTagVersion()}-$TAG_SUFFIX"
2020-12-11 06:08:08 +08:00
2021-01-12 08:54:32 +08:00
CASSANDRA_PREFIX = configuration.buildRegistryPath('cassandra-migrations')
DYNAMODB_PREFIX = configuration.buildRegistryPath('dynamodb-migrations')
2021-02-02 01:47:21 +08:00
KARMA_BUILDER_PREFIX = configuration.buildRegistryPath("karma-builder")
KARMA_RUNNER_PREFIX = configuration.buildRegistryPath("karma-runner")
2021-01-12 08:54:32 +08:00
POSTGRES_PREFIX = configuration.buildRegistryPath('postgres-migrations')
2021-01-05 01:24:26 +08:00
RUBY_RUNNER_PREFIX = configuration.buildRegistryPath("ruby-runner")
YARN_RUNNER_PREFIX = configuration.buildRegistryPath("yarn-runner")
WEBPACK_BUILDER_PREFIX = configuration.buildRegistryPath("webpack-builder")
WEBPACK_CACHE_PREFIX = configuration.buildRegistryPath("webpack-cache")
2020-08-10 03:30:45 +08:00
2020-12-09 04:02:29 +08:00
IMAGE_CACHE_BUILD_SCOPE = configuration.gerritChangeNumber()
IMAGE_CACHE_MERGE_SCOPE = configuration.gerritBranchSanitized()
2021-01-12 08:54:32 +08:00
IMAGE_CACHE_UNIQUE_SCOPE = "${imageTagVersion()}-$TAG_SUFFIX"
CASSANDRA_IMAGE = "$CASSANDRA_PREFIX:$IMAGE_CACHE_UNIQUE_SCOPE"
DYNAMODB_IMAGE = "$DYNAMODB_PREFIX:$IMAGE_CACHE_UNIQUE_SCOPE"
POSTGRES_IMAGE = "$POSTGRES_PREFIX:$IMAGE_CACHE_UNIQUE_SCOPE"
WEBPACK_BUILDER_IMAGE = "$WEBPACK_BUILDER_PREFIX:$IMAGE_CACHE_UNIQUE_SCOPE"
2020-12-03 01:02:24 +08:00
2021-01-20 03:15:52 +08:00
CASSANDRA_MERGE_IMAGE = "$CASSANDRA_PREFIX:$IMAGE_CACHE_MERGE_SCOPE-$RSPEC_PROCESSES"
DYNAMODB_MERGE_IMAGE = "$DYNAMODB_PREFIX:$IMAGE_CACHE_MERGE_SCOPE-$RSPEC_PROCESSES"
2021-02-02 01:47:21 +08:00
KARMA_RUNNER_IMAGE = "$KARMA_RUNNER_PREFIX:$IMAGE_CACHE_UNIQUE_SCOPE"
2021-01-20 03:15:52 +08:00
POSTGRES_MERGE_IMAGE = "$POSTGRES_PREFIX:$IMAGE_CACHE_MERGE_SCOPE-$RSPEC_PROCESSES"
2020-08-15 00:13:24 +08:00
// This is primarily for the plugin build
// for testing canvas-lms changes against plugin repo changes
2020-11-02 23:39:02 +08:00
CANVAS_BUILDS_REFSPEC = getCanvasBuildsRefspec()
2020-10-08 22:19:24 +08:00
CANVAS_LMS_REFSPEC = getCanvasLmsRefspec()
2020-08-28 20:08:22 +08:00
DOCKER_WORKDIR = getDockerWorkDir()
LOCAL_WORKDIR = getLocalWorkDir()
2020-02-15 06:02:23 +08:00
}
2019-12-10 02:37:05 +08:00
2020-02-15 06:02:23 +08:00
stages {
2020-07-27 00:18:52 +08:00
stage('Environment') {
2019-03-12 09:55:34 +08:00
steps {
2020-07-27 00:18:52 +08:00
script {
2021-03-17 02:38:05 +08:00
node('master') {
if (configuration.skipCi()) {
2021-01-06 01:24:43 +08:00
currentBuild.result = 'NOT_BUILT'
2021-03-17 02:38:05 +08:00
gerrit.submitLintReview("-2", "Build not executed due to [skip-ci] flag")
error "[skip-ci] flag enabled: skipping the build"
2021-01-06 01:24:43 +08:00
return
2021-04-06 04:17:59 +08:00
} else if(extendedStage.isAllowStagesFilterUsed() || extendedStage.isIgnoreStageResultsFilterUsed()) {
gerrit.submitLintReview("-2", "One or more build flags causes a subset of the build to be run")
2021-03-17 02:38:05 +08:00
} else {
gerrit.submitLintReview("0")
2021-03-11 04:12:29 +08:00
}
2021-01-06 01:24:43 +08:00
}
2021-03-11 04:12:29 +08:00
2020-08-05 05:42:10 +08:00
// Ensure that all build flags are compatible.
if(configuration.getBoolean('change-merged') && configuration.isValueDefault('build-registry-path')) {
error "Manually triggering the change-merged build path must be combined with a custom build-registry-path"
return
}
2020-09-02 22:46:17 +08:00
maybeSlackSendRetrigger()
2020-09-02 01:18:16 +08:00
2021-03-22 23:11:42 +08:00
def rootStages = [:]
2021-04-08 22:29:43 +08:00
extendedStage('Builder').obeysAllowStages(false).timings(false).queue(rootStages) {
2021-03-23 05:36:59 +08:00
// Use a nospot instance for now to avoid really bad UX. Jenkins currently will
// wait for the current steps to complete (even wait to spin up a node), causing
// extremely long wait times for a restart. Investigation in DE-166 / DE-158.
protectedNode('canvas-docker-nospot', { status -> cleanupFn(status) }, { status -> postFn(status) }) {
2021-04-08 22:29:43 +08:00
extendedStage('Setup').obeysAllowStages(false).handler(buildSummaryReport).execute {
2021-03-23 05:36:59 +08:00
timeout(time: 2) {
echo "Cleaning Workspace From Previous Runs"
sh 'ls -A1 | xargs rm -rf'
sh 'find .'
cleanAndSetup()
def refspecToCheckout = env.GERRIT_PROJECT == "canvas-lms" ? env.GERRIT_REFSPEC : env.CANVAS_LMS_REFSPEC
checkoutRepo("canvas-lms", refspecToCheckout, 100)
if(env.GERRIT_PROJECT != "canvas-lms") {
dir(env.LOCAL_WORKDIR) {
checkoutRepo(GERRIT_PROJECT, env.GERRIT_REFSPEC, 2)
2021-03-22 23:11:42 +08:00
}
2020-12-08 00:52:44 +08:00
2021-03-23 05:36:59 +08:00
// Plugin builds using the dir step above will create this @tmp file, we need to remove it
// https://issues.jenkins.io/browse/JENKINS-52750
sh 'rm -vr gems/plugins/*@tmp'
}
2020-12-08 00:52:44 +08:00
2021-03-23 05:36:59 +08:00
buildParameters += string(name: 'CANVAS_BUILDS_REFSPEC', value: "${env.CANVAS_BUILDS_REFSPEC}")
buildParameters += string(name: 'PATCHSET_TAG', value: "${env.PATCHSET_TAG}")
buildParameters += string(name: 'POSTGRES', value: "${env.POSTGRES}")
buildParameters += string(name: 'RUBY', value: "${env.RUBY}")
2020-12-08 00:52:44 +08:00
2021-03-23 05:36:59 +08:00
// if (currentBuild.projectName.contains("rails-6")) {
// when updating this for future rails versions, change the value back to ${env.CANVAS_RAILSX_Y}
buildParameters += string(name: 'CANVAS_RAILS6_0', value: "1")
// }
2020-10-23 00:21:11 +08:00
2021-03-23 05:36:59 +08:00
// If modifying any of our Jenkinsfiles set JENKINSFILE_REFSPEC for sub-builds to use Jenkinsfiles in
// the gerrit rather than master.
if(env.GERRIT_PROJECT == 'canvas-lms' && git.changedFiles(jenkinsFiles, 'HEAD^') ) {
buildParameters += string(name: 'JENKINSFILE_REFSPEC', value: "${env.GERRIT_REFSPEC}")
}
2020-03-21 10:56:24 +08:00
2021-03-23 05:36:59 +08:00
if (env.GERRIT_PROJECT != "canvas-lms") {
// the plugin builds require the canvas lms refspec to be different. so only
// set this refspec if the main build is requesting it to be set.
// NOTE: this is only being set in main-from-plugin build. so main-canvas wont run this.
buildParameters += string(name: 'CANVAS_LMS_REFSPEC', value: env.CANVAS_LMS_REFSPEC)
}
gems = configuration.plugins()
echo "Plugin list: ${gems}"
def pluginsToPull = []
gems.each {
if (env.GERRIT_PROJECT != it) {
pluginsToPull.add([name: it, version: getPluginVersion(it), target: "gems/plugins/$it"])
2021-03-22 23:11:42 +08:00
}
2021-03-23 05:36:59 +08:00
}
2020-12-08 00:52:44 +08:00
2021-03-23 05:36:59 +08:00
pluginsToPull.add([name: 'qti_migration_tool', version: getPluginVersion('qti_migration_tool'), target: "vendor/qti_migration_tool"])
2020-12-08 00:52:44 +08:00
2021-03-23 05:36:59 +08:00
pullRepos(pluginsToPull)
2021-01-20 06:00:04 +08:00
2021-03-23 05:36:59 +08:00
libraryScript.load('bash/docker-tag-remote.sh', './build/new-jenkins/docker-tag-remote.sh')
2021-03-22 23:11:42 +08:00
}
2021-03-23 05:36:59 +08:00
}
2021-03-22 23:11:42 +08:00
2021-03-23 05:36:59 +08:00
if(!configuration.isChangeMerged() && env.GERRIT_PROJECT == 'canvas-lms' && !configuration.skipRebase()) {
2021-04-08 22:29:43 +08:00
extendedStage('Rebase').obeysAllowStages(false).handler(buildSummaryReport).execute {
2021-03-23 05:36:59 +08:00
timeout(time: 2) {
rebaseHelper(GERRIT_BRANCH)
2021-03-22 23:11:42 +08:00
2021-03-23 05:36:59 +08:00
if(!env.JOB_NAME.endsWith('Jenkinsfile') && git.changedFiles(jenkinsFiles, 'origin/master')) {
error "Jenkinsfile has been updated. Please retrigger your patchset for the latest updates."
2021-03-22 23:11:42 +08:00
}
2020-11-05 22:54:12 +08:00
}
2020-03-17 01:10:43 +08:00
}
2021-03-23 05:36:59 +08:00
}
2019-07-29 23:55:07 +08:00
2021-03-23 05:36:59 +08:00
if (configuration.isChangeMerged()) {
2021-04-08 22:29:43 +08:00
extendedStage('Build Docker Image (Pre-Merge)').obeysAllowStages(false).handler(buildSummaryReport).execute {
2021-03-22 23:11:42 +08:00
timeout(time: 20) {
2021-03-23 02:07:08 +08:00
credentials.withStarlordCredentials {
2021-03-23 05:36:59 +08:00
withEnv([
"CACHE_LOAD_SCOPE=${env.IMAGE_CACHE_MERGE_SCOPE}",
"CACHE_LOAD_FALLBACK_SCOPE=${env.IMAGE_CACHE_BUILD_SCOPE}",
"CACHE_SAVE_SCOPE=${env.IMAGE_CACHE_MERGE_SCOPE}",
"COMPILE_ADDITIONAL_ASSETS=0",
"JS_BUILD_NO_UGLIFY=1",
"RAILS_LOAD_ALL_LOCALES=0",
"RUBY_RUNNER_PREFIX=${env.RUBY_RUNNER_PREFIX}",
"WEBPACK_BUILDER_PREFIX=${env.WEBPACK_BUILDER_PREFIX}",
"WEBPACK_CACHE_PREFIX=${env.WEBPACK_CACHE_PREFIX}",
"YARN_RUNNER_PREFIX=${env.YARN_RUNNER_PREFIX}",
]) {
slackSendCacheBuild {
2021-03-22 23:11:42 +08:00
try {
2021-03-23 05:36:59 +08:00
sh "build/new-jenkins/docker-build.sh"
2021-03-22 23:11:42 +08:00
} catch(e) {
2021-03-23 05:36:59 +08:00
handleDockerBuildFailure("$PATCHSET_TAG-pre-merge-failed", e)
2021-03-22 23:11:42 +08:00
}
2021-03-19 23:14:58 +08:00
}
2021-03-18 23:00:32 +08:00
2021-03-23 05:36:59 +08:00
// We need to attempt to upload all prefixes here in case instructure/ruby-passenger
// has changed between the post-merge build and this pre-merge build.
sh(script: """
./build/new-jenkins/docker-with-flakey-network-protection.sh push $WEBPACK_BUILDER_PREFIX || true
./build/new-jenkins/docker-with-flakey-network-protection.sh push $YARN_RUNNER_PREFIX || true
./build/new-jenkins/docker-with-flakey-network-protection.sh push $RUBY_RUNNER_PREFIX || true
./build/new-jenkins/docker-with-flakey-network-protection.sh push $WEBPACK_CACHE_PREFIX
""", label: 'upload cache images')
2021-02-20 01:09:24 +08:00
}
2021-03-23 02:07:08 +08:00
}
2020-07-27 00:18:52 +08:00
}
2021-03-22 23:11:42 +08:00
}
2021-03-23 05:36:59 +08:00
}
2020-10-20 02:44:43 +08:00
2021-04-08 22:29:43 +08:00
extendedStage('Build Docker Image').obeysAllowStages(false).handler(buildSummaryReport).execute {
2021-03-23 05:36:59 +08:00
timeout(time: 20) {
credentials.withStarlordCredentials {
def cacheScope = configuration.isChangeMerged() ? env.IMAGE_CACHE_MERGE_SCOPE : env.IMAGE_CACHE_BUILD_SCOPE
2020-11-12 03:21:00 +08:00
2021-03-23 05:36:59 +08:00
slackSendCacheBuild {
2021-03-22 23:11:42 +08:00
withEnv([
2021-03-23 05:36:59 +08:00
"CACHE_LOAD_SCOPE=${env.IMAGE_CACHE_MERGE_SCOPE}",
"CACHE_LOAD_FALLBACK_SCOPE=${env.IMAGE_CACHE_BUILD_SCOPE}",
"CACHE_SAVE_SCOPE=${cacheScope}",
2021-03-22 23:11:42 +08:00
"CACHE_UNIQUE_SCOPE=${env.IMAGE_CACHE_UNIQUE_SCOPE}",
2021-03-23 05:36:59 +08:00
"COMPILE_ADDITIONAL_ASSETS=${configuration.isChangeMerged() ? 1 : 0}",
"JS_BUILD_NO_UGLIFY=${configuration.isChangeMerged() ? 0 : 1}",
"RAILS_LOAD_ALL_LOCALES=${getRailsLoadAllLocales()}",
"RUBY_RUNNER_PREFIX=${env.RUBY_RUNNER_PREFIX}",
"WEBPACK_BUILDER_PREFIX=${env.WEBPACK_BUILDER_PREFIX}",
"WEBPACK_CACHE_PREFIX=${env.WEBPACK_CACHE_PREFIX}",
"YARN_RUNNER_PREFIX=${env.YARN_RUNNER_PREFIX}",
2021-03-22 23:11:42 +08:00
]) {
2021-03-23 05:36:59 +08:00
try {
sh "build/new-jenkins/docker-build.sh $PATCHSET_TAG"
} catch(e) {
handleDockerBuildFailure(PATCHSET_TAG, e)
}
2021-03-22 23:11:42 +08:00
}
2021-03-23 05:36:59 +08:00
}
sh "./build/new-jenkins/docker-with-flakey-network-protection.sh push $PATCHSET_TAG"
if(configuration.isChangeMerged()) {
def GIT_REV = sh(script: 'git rev-parse HEAD', returnStdout: true).trim()
sh "docker tag \$PATCHSET_TAG \$BUILD_IMAGE:${GIT_REV}"
sh "./build/new-jenkins/docker-with-flakey-network-protection.sh push \$BUILD_IMAGE:${GIT_REV}"
}
sh(script: """
./build/new-jenkins/docker-with-flakey-network-protection.sh push $WEBPACK_BUILDER_PREFIX || true
./build/new-jenkins/docker-with-flakey-network-protection.sh push $YARN_RUNNER_PREFIX || true
./build/new-jenkins/docker-with-flakey-network-protection.sh push $RUBY_RUNNER_PREFIX || true
./build/new-jenkins/docker-with-flakey-network-protection.sh push $WEBPACK_CACHE_PREFIX
""", label: 'upload cache images')
}
}
}
2020-12-17 06:59:02 +08:00
2021-04-08 22:29:43 +08:00
extendedStage('Run Migrations').obeysAllowStages(false).handler(buildSummaryReport).execute {
2021-03-23 05:36:59 +08:00
timeout(time: 10) {
credentials.withStarlordCredentials {
def cacheLoadScope = configuration.isChangeMerged() || configuration.getBoolean('skip-cache') ? '' : env.IMAGE_CACHE_MERGE_SCOPE
def cacheSaveScope = configuration.isChangeMerged() ? env.IMAGE_CACHE_MERGE_SCOPE : ''
withEnv([
"CACHE_LOAD_SCOPE=${cacheLoadScope}",
"CACHE_SAVE_SCOPE=${cacheSaveScope}",
"CACHE_UNIQUE_SCOPE=${env.IMAGE_CACHE_UNIQUE_SCOPE}",
"CASSANDRA_IMAGE_TAG=${imageTag.cassandra()}",
"CASSANDRA_PREFIX=${env.CASSANDRA_PREFIX}",
"COMPOSE_FILE=docker-compose.new-jenkins.yml",
"DYNAMODB_IMAGE_TAG=${imageTag.dynamodb()}",
"DYNAMODB_PREFIX=${env.DYNAMODB_PREFIX}",
"POSTGRES_IMAGE_TAG=${imageTag.postgres()}",
"POSTGRES_PREFIX=${env.POSTGRES_PREFIX}",
"POSTGRES_PASSWORD=sekret"
]) {
sh """
# Due to https://issues.jenkins.io/browse/JENKINS-15146, we have to set it to empty string here
export CACHE_LOAD_SCOPE=\${CACHE_LOAD_SCOPE:-}
export CACHE_SAVE_SCOPE=\${CACHE_SAVE_SCOPE:-}
./build/new-jenkins/run-migrations.sh
./build/new-jenkins/docker-with-flakey-network-protection.sh push $CASSANDRA_PREFIX || true
./build/new-jenkins/docker-with-flakey-network-protection.sh push $DYNAMODB_PREFIX || true
./build/new-jenkins/docker-with-flakey-network-protection.sh push $POSTGRES_PREFIX || true
"""
2021-03-23 02:07:08 +08:00
}
2021-03-23 05:36:59 +08:00
archiveArtifacts(artifacts: "migrate-*.log", allowEmptyArchive: true)
sh 'docker-compose down --remove-orphans'
2021-03-10 00:07:15 +08:00
}
2021-03-22 23:11:42 +08:00
}
2021-03-23 05:36:59 +08:00
}
2021-03-10 00:07:15 +08:00
2021-03-23 05:36:59 +08:00
stage('Parallel Run Tests') {
withEnv([
"CASSANDRA_IMAGE_TAG=${env.CASSANDRA_IMAGE}",
"DYNAMODB_IMAGE_TAG=${env.DYNAMODB_IMAGE}",
"POSTGRES_IMAGE_TAG=${env.POSTGRES_IMAGE}",
]) {
def stages = [:]
if (!configuration.isChangeMerged()) {
echo 'adding Linters'
2021-04-07 22:51:31 +08:00
extendedStage('Linters').handler(buildSummaryReport).queue(stages) {
2021-03-23 05:36:59 +08:00
credentials.withStarlordCredentials {
credentials.withGerritCredentials {
withEnv([
"FORCE_FAILURE=${configuration.getBoolean('force-failure-linters', 'false')}",
"PLUGINS_LIST=${configuration.plugins().join(' ')}",
"SKIP_ESLINT=${configuration.getString('skip-eslint', 'false')}",
"UPLOAD_DEBUG_IMAGE=${configuration.getBoolean('upload-linter-debug-image', 'false')}",
]) {
sh 'build/new-jenkins/linters/run-gergich.sh'
2021-03-22 23:11:42 +08:00
}
2021-03-23 05:36:59 +08:00
}
if (env.MASTER_BOUNCER_RUN == '1' && !configuration.isChangeMerged()) {
credentials.withMasterBouncerCredentials {
sh 'build/new-jenkins/linters/run-master-bouncer.sh'
2021-03-22 23:11:42 +08:00
}
2021-03-23 02:07:08 +08:00
}
}
2021-03-22 23:11:42 +08:00
}
2021-03-23 05:36:59 +08:00
}
2021-01-12 08:54:32 +08:00
2021-03-23 05:36:59 +08:00
echo 'adding Consumer Smoke Test'
2021-04-07 22:51:31 +08:00
extendedStage('Consumer Smoke Test').handler(buildSummaryReport).queue(stages) {
2021-03-23 05:36:59 +08:00
sh 'build/new-jenkins/consumer-smoke-test.sh'
}
2020-04-11 03:38:35 +08:00
2021-03-23 05:36:59 +08:00
echo 'adding Vendored Gems'
2021-04-08 07:02:21 +08:00
extendedStage('Vendored Gems')
.handler(buildSummaryReport)
.queue(stages, jobName: '/Canvas/test-suites/vendored-gems', buildParameters: buildParameters + [
string(name: 'CASSANDRA_IMAGE_TAG', value: "${env.CASSANDRA_IMAGE_TAG}"),
string(name: 'DYNAMODB_IMAGE_TAG', value: "${env.DYNAMODB_IMAGE_TAG}"),
string(name: 'POSTGRES_IMAGE_TAG', value: "${env.POSTGRES_IMAGE_TAG}"),
])
2021-03-22 23:11:42 +08:00
2021-04-07 22:51:31 +08:00
extendedStage(JS_BUILD_IMAGE_STAGE).handler(buildSummaryReport).queue(stages) {
2021-03-23 05:36:59 +08:00
credentials.withStarlordCredentials {
try {
def cacheScope = configuration.isChangeMerged() ? env.IMAGE_CACHE_MERGE_SCOPE : env.IMAGE_CACHE_BUILD_SCOPE
2021-03-22 23:11:42 +08:00
2021-03-23 05:36:59 +08:00
withEnv([
"CACHE_LOAD_SCOPE=${env.IMAGE_CACHE_MERGE_SCOPE}",
"CACHE_LOAD_FALLBACK_SCOPE=${env.IMAGE_CACHE_BUILD_SCOPE}",
"CACHE_SAVE_SCOPE=${cacheScope}",
"KARMA_BUILDER_PREFIX=${env.KARMA_BUILDER_PREFIX}",
"PATCHSET_TAG=${env.PATCHSET_TAG}",
"RAILS_LOAD_ALL_LOCALES=${getRailsLoadAllLocales()}",
"WEBPACK_BUILDER_IMAGE=${env.WEBPACK_BUILDER_IMAGE}",
]) {
sh "./build/new-jenkins/js/docker-build.sh $KARMA_RUNNER_IMAGE"
2021-03-22 23:11:42 +08:00
}
2021-03-23 05:36:59 +08:00
sh """
./build/new-jenkins/docker-with-flakey-network-protection.sh push $KARMA_RUNNER_IMAGE
./build/new-jenkins/docker-with-flakey-network-protection.sh push $KARMA_BUILDER_PREFIX
"""
} catch(e) {
handleDockerBuildFailure(KARMA_RUNNER_IMAGE, e)
2021-03-23 02:07:08 +08:00
}
}
2021-03-23 05:36:59 +08:00
}
echo 'adding Contract Tests'
2021-04-08 07:02:21 +08:00
extendedStage('Contract Tests')
.handler(buildSummaryReport)
.queue(stages, jobName: '/Canvas/test-suites/contract-tests', buildParameters: buildParameters + [
2021-03-23 05:36:59 +08:00
string(name: 'CASSANDRA_IMAGE_TAG', value: "${env.CASSANDRA_IMAGE_TAG}"),
string(name: 'DYNAMODB_IMAGE_TAG', value: "${env.DYNAMODB_IMAGE_TAG}"),
string(name: 'POSTGRES_IMAGE_TAG', value: "${env.POSTGRES_IMAGE_TAG}"),
2021-04-08 07:02:21 +08:00
])
2020-11-21 02:58:35 +08:00
2021-03-23 05:36:59 +08:00
if (sh(script: 'build/new-jenkins/check-for-migrations.sh', returnStatus: true) == 0) {
echo 'adding CDC Schema check'
2021-04-08 07:02:21 +08:00
extendedStage('CDC Schema Check')
.handler(buildSummaryReport)
.queue(stages, jobName: '/Canvas/cdc-event-transformer-master', buildParameters: buildParameters + [
2021-03-23 05:36:59 +08:00
string(name: 'CANVAS_LMS_IMAGE_PATH', value: "${env.PATCHSET_TAG}"),
2021-04-08 07:02:21 +08:00
])
2021-03-23 05:36:59 +08:00
}
else {
echo 'no migrations added, skipping CDC Schema check'
}
2021-02-02 01:47:21 +08:00
2021-03-23 05:36:59 +08:00
if (
!configuration.isChangeMerged() &&
(
dir(env.LOCAL_WORKDIR){ (sh(script: '${WORKSPACE}/build/new-jenkins/spec-changes.sh', returnStatus: true) == 0) } ||
configuration.forceFailureFSC() == '1'
)
) {
echo 'adding Flakey Spec Catcher'
2021-04-08 07:02:21 +08:00
extendedStage('Flakey Spec Catcher')
.handler(buildSummaryReport)
.queue(stages, jobName: '/Canvas/test-suites/flakey-spec-catcher', buildParameters: buildParameters + [
2021-03-22 23:11:42 +08:00
string(name: 'CASSANDRA_IMAGE_TAG', value: "${env.CASSANDRA_IMAGE_TAG}"),
string(name: 'DYNAMODB_IMAGE_TAG', value: "${env.DYNAMODB_IMAGE_TAG}"),
string(name: 'POSTGRES_IMAGE_TAG', value: "${env.POSTGRES_IMAGE_TAG}"),
2021-04-08 07:02:21 +08:00
])
2021-03-23 05:36:59 +08:00
}
2019-03-20 23:16:16 +08:00
2021-03-23 05:36:59 +08:00
// Flakey spec catcher using the dir step above will create this @tmp file, we need to remove it
// https://issues.jenkins.io/browse/JENKINS-52750
if(!configuration.isChangeMerged() && env.GERRIT_PROJECT != "canvas-lms") {
sh "rm -vrf $LOCAL_WORKDIR@tmp"
}
2021-02-09 03:34:14 +08:00
2021-03-23 05:36:59 +08:00
if(env.GERRIT_PROJECT == 'canvas-lms' && git.changedFiles(dockerDevFiles, 'HEAD^')) {
echo 'adding Local Docker Dev Build'
2021-04-08 07:02:21 +08:00
extendedStage('Local Docker Dev Build')
.handler(buildSummaryReport)
.queue(stages, jobName: '/Canvas/test-suites/local-docker-dev-smoke', buildParameters: buildParameters)
2021-03-23 05:36:59 +08:00
}
2020-08-22 03:42:49 +08:00
2021-03-23 05:36:59 +08:00
if(configuration.isChangeMerged()) {
2021-04-07 22:51:31 +08:00
extendedStage('Dependency Check').handler(buildSummaryReport).queue(stages) {
2021-03-23 05:36:59 +08:00
catchError (buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
try {
snyk("canvas-lms:ruby", "Gemfile.lock", "$PATCHSET_TAG")
}
catch (err) {
if (err.toString().contains('Gemfile.lock does not exist')) {
snyk("canvas-lms:ruby", "Gemfile.lock.next", "$PATCHSET_TAG")
} else {
throw err
2021-03-22 23:11:42 +08:00
}
2021-02-18 02:40:06 +08:00
}
2021-03-23 02:07:08 +08:00
}
2021-02-09 23:12:01 +08:00
}
2021-03-23 05:36:59 +08:00
}
2020-11-03 03:25:23 +08:00
2021-03-23 05:36:59 +08:00
distribution.stashBuildScripts()
2020-07-27 00:18:52 +08:00
2021-03-23 05:36:59 +08:00
distribution.addRSpecSuites(stages)
distribution.addSeleniumSuites(stages)
2020-07-27 00:18:52 +08:00
2021-03-23 05:36:59 +08:00
parallel(stages)
2021-03-22 23:11:42 +08:00
}
2020-10-07 22:36:43 +08:00
}
2020-07-24 23:34:44 +08:00
}
2021-03-22 23:11:42 +08:00
}
2021-04-07 05:48:53 +08:00
extendedStage("Javascript (Waiting for Dependencies)").waitsFor(JS_BUILD_IMAGE_STAGE, 'Builder').queue(rootStages) {
2021-03-23 04:59:07 +08:00
def nestedStages = [:]
echo 'adding Javascript (Jest)'
2021-04-08 07:02:21 +08:00
extendedStage('Javascript (Jest)')
.handler(buildSummaryReport)
.queue(nestedStages, jobName: '/Canvas/test-suites/JS', buildParameters: buildParameters + [
2021-03-23 04:59:07 +08:00
string(name: 'KARMA_RUNNER_IMAGE', value: env.KARMA_RUNNER_IMAGE),
string(name: 'TEST_SUITE', value: "jest"),
2021-04-08 07:02:21 +08:00
])
2021-03-23 04:59:07 +08:00
echo 'adding Javascript (Coffeescript)'
2021-04-08 07:02:21 +08:00
extendedStage('Javascript (Coffeescript)')
.handler(buildSummaryReport)
.queue(nestedStages, jobName: '/Canvas/test-suites/JS', buildParameters: buildParameters + [
2021-03-23 04:59:07 +08:00
string(name: 'KARMA_RUNNER_IMAGE', value: env.KARMA_RUNNER_IMAGE),
string(name: 'TEST_SUITE', value: "coffee"),
2021-04-08 07:02:21 +08:00
])
2021-03-23 04:59:07 +08:00
echo 'adding Javascript (Karma)'
2021-04-08 07:02:21 +08:00
extendedStage('Javascript (Karma)')
.handler(buildSummaryReport)
.queue(nestedStages, jobName: '/Canvas/test-suites/JS', buildParameters: buildParameters + [
2021-03-23 04:59:07 +08:00
string(name: 'KARMA_RUNNER_IMAGE', value: env.KARMA_RUNNER_IMAGE),
string(name: 'TEST_SUITE', value: "karma"),
2021-04-08 07:02:21 +08:00
])
2021-03-23 04:59:07 +08:00
parallel(nestedStages)
}
2021-03-22 23:11:42 +08:00
parallel(rootStages)
2020-08-22 03:42:49 +08:00
}//script
}//steps
}//environment
}//stages
2020-11-04 05:33:25 +08:00
}//pipeline