2019-02-16 04:16:49 +08:00
|
|
|
#!/usr/bin/env groovy
|
|
|
|
|
2019-03-13 23:54:18 +08:00
|
|
|
/*
|
|
|
|
* Copyright (C) 2019 - present Instructure, Inc.
|
|
|
|
*
|
|
|
|
* This file is part of Canvas.
|
|
|
|
*
|
|
|
|
* Canvas is free software: you can redistribute it and/or modify it under
|
|
|
|
* the terms of the GNU Affero General Public License as published by the Free
|
|
|
|
* Software Foundation, version 3 of the License.
|
|
|
|
*
|
|
|
|
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
|
|
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
|
|
|
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
|
|
|
* details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Affero General Public License along
|
|
|
|
* with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*/
|
2020-04-01 01:25:11 +08:00
|
|
|
import org.jenkinsci.plugins.workflow.support.steps.build.DownstreamFailureCause
|
|
|
|
import org.jenkinsci.plugins.workflow.steps.FlowInterruptedException
|
|
|
|
|
2020-02-15 06:02:23 +08:00
|
|
|
def buildParameters = [
|
2019-08-15 02:43:09 +08:00
|
|
|
string(name: 'GERRIT_REFSPEC', value: "${env.GERRIT_REFSPEC}"),
|
|
|
|
string(name: 'GERRIT_EVENT_TYPE', value: "${env.GERRIT_EVENT_TYPE}"),
|
2020-01-18 04:26:48 +08:00
|
|
|
string(name: 'GERRIT_PROJECT', value: "${env.GERRIT_PROJECT}"),
|
2019-08-15 02:43:09 +08:00
|
|
|
string(name: 'GERRIT_BRANCH', value: "${env.GERRIT_BRANCH}"),
|
|
|
|
string(name: 'GERRIT_CHANGE_NUMBER', value: "${env.GERRIT_CHANGE_NUMBER}"),
|
|
|
|
string(name: 'GERRIT_PATCHSET_NUMBER', value: "${env.GERRIT_PATCHSET_NUMBER}"),
|
|
|
|
string(name: 'GERRIT_EVENT_ACCOUNT_NAME', value: "${env.GERRIT_EVENT_ACCOUNT_NAME}"),
|
2019-11-16 02:12:18 +08:00
|
|
|
string(name: 'GERRIT_EVENT_ACCOUNT_EMAIL', value: "${env.GERRIT_EVENT_ACCOUNT_EMAIL}"),
|
|
|
|
string(name: 'GERRIT_CHANGE_COMMIT_MESSAGE', value: "${env.GERRIT_CHANGE_COMMIT_MESSAGE}"),
|
|
|
|
string(name: 'GERRIT_HOST', value: "${env.GERRIT_HOST}"),
|
2019-12-20 02:55:08 +08:00
|
|
|
string(name: 'GERGICH_PUBLISH', value: "${env.GERGICH_PUBLISH}"),
|
|
|
|
string(name: 'MASTER_BOUNCER_RUN', value: "${env.MASTER_BOUNCER_RUN}")
|
2019-08-15 02:43:09 +08:00
|
|
|
]
|
|
|
|
|
2020-06-17 02:23:25 +08:00
|
|
|
library "canvas-builds-library"
|
2020-05-16 03:42:23 +08:00
|
|
|
|
2020-03-21 10:56:24 +08:00
|
|
|
def runDatadogMetric(name, body) {
|
|
|
|
def dd = load('build/new-jenkins/groovy/datadog.groovy')
|
|
|
|
dd.runDataDogForMetric(name,body)
|
|
|
|
}
|
|
|
|
|
2019-11-23 04:57:11 +08:00
|
|
|
def skipIfPreviouslySuccessful(name, block) {
|
2020-03-21 10:56:24 +08:00
|
|
|
runDatadogMetric(name) {
|
|
|
|
def successes = load('build/new-jenkins/groovy/successes.groovy')
|
|
|
|
successes.skipIfPreviouslySuccessful(name, true, block)
|
|
|
|
}
|
2020-02-15 06:02:23 +08:00
|
|
|
}
|
|
|
|
|
2020-04-30 00:44:23 +08:00
|
|
|
def wrapBuildExecution(jobName, parameters, propagate, urlExtra) {
|
2020-04-01 01:25:11 +08:00
|
|
|
try {
|
2020-04-30 00:44:23 +08:00
|
|
|
build(job: jobName, parameters: parameters, propagate: propagate)
|
2020-04-01 01:25:11 +08:00
|
|
|
}
|
|
|
|
catch(FlowInterruptedException ex) {
|
|
|
|
// if its this type, then that means its a build failure.
|
|
|
|
// other reasons can be user cancelling or jenkins aborting, etc...
|
|
|
|
def failure = ex.causes.find { it instanceof DownstreamFailureCause }
|
|
|
|
if (failure != null) {
|
|
|
|
def downstream = failure.getDownstreamBuild()
|
|
|
|
def url = downstream.getAbsoluteUrl() + urlExtra
|
|
|
|
load('build/new-jenkins/groovy/reports.groovy').appendFailMessageReport(jobName, url)
|
|
|
|
}
|
|
|
|
throw ex
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-23 01:30:45 +08:00
|
|
|
// if the build never starts or gets into a node block, then we
|
|
|
|
// can never load a file. and a very noisy/confusing error is thrown.
|
|
|
|
def ignoreBuildNeverStartedError(block) {
|
|
|
|
try {
|
|
|
|
block()
|
|
|
|
}
|
|
|
|
catch (org.jenkinsci.plugins.workflow.steps.MissingContextVariableException ex) {
|
|
|
|
if (!ex.message.startsWith('Required context class hudson.FilePath is missing')) {
|
|
|
|
throw ex
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
echo "ignored MissingContextVariableException: \n${ex.message}"
|
|
|
|
}
|
|
|
|
// we can ignore this very noisy error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-16 20:23:58 +08:00
|
|
|
// return false if the current patchset tag doesn't match the
|
|
|
|
// mainline publishable tag. i.e. ignore ruby-2.6/pg-12 upgrade builds
|
2020-02-15 06:02:23 +08:00
|
|
|
def isPatchsetPublishable() {
|
|
|
|
env.PATCHSET_TAG == env.PUBLISHABLE_TAG
|
2019-11-23 04:57:11 +08:00
|
|
|
}
|
|
|
|
|
2020-04-21 22:14:39 +08:00
|
|
|
def isPatchsetSlackableOnFailure() {
|
|
|
|
env.SLACK_MESSAGE_ON_FAILURE == 'true' && env.GERRIT_EVENT_TYPE == 'change-merged'
|
|
|
|
}
|
|
|
|
|
2019-02-16 04:16:49 +08:00
|
|
|
pipeline {
|
2019-09-07 02:03:00 +08:00
|
|
|
agent { label 'canvas-docker' }
|
2020-05-09 02:23:07 +08:00
|
|
|
options {
|
|
|
|
ansiColor('xterm')
|
|
|
|
timestamps()
|
|
|
|
}
|
2019-02-20 23:41:00 +08:00
|
|
|
|
2019-02-21 01:06:06 +08:00
|
|
|
environment {
|
2019-03-12 09:55:34 +08:00
|
|
|
GERRIT_PORT = '29418'
|
2019-02-21 01:22:05 +08:00
|
|
|
GERRIT_URL = "$GERRIT_HOST:$GERRIT_PORT"
|
2020-06-17 02:23:25 +08:00
|
|
|
NAME = imageTagVersion()
|
2020-02-15 06:02:23 +08:00
|
|
|
CANVAS_LMS_IMAGE = "$DOCKER_REGISTRY_FQDN/jenkins/canvas-lms"
|
2020-06-17 02:23:25 +08:00
|
|
|
BUILD_REGISTRY_FQDN = configuration.buildRegistryFQDN()
|
2020-04-23 01:30:45 +08:00
|
|
|
BUILD_IMAGE = "$BUILD_REGISTRY_FQDN/jenkins/canvas-lms"
|
2020-06-17 02:23:25 +08:00
|
|
|
POSTGRES = configuration.postgres()
|
2020-03-16 20:23:58 +08:00
|
|
|
POSTGRES_CLIENT = configuration.postgresClient()
|
2019-02-16 04:16:49 +08:00
|
|
|
|
2020-03-16 20:23:58 +08:00
|
|
|
// e.g. postgres-9.5-ruby-2.6
|
|
|
|
TAG_SUFFIX = "postgres-$POSTGRES-ruby-$RUBY"
|
2019-02-21 01:22:05 +08:00
|
|
|
|
2020-05-14 23:19:56 +08:00
|
|
|
// this is found in the PUBLISHABLE_TAG_SUFFIX config file on jenkins
|
2020-03-16 20:23:58 +08:00
|
|
|
PUBLISHABLE_TAG_SUFFIX = configuration.publishableTagSuffixNew()
|
2020-05-14 23:19:56 +08:00
|
|
|
|
2020-03-16 20:23:58 +08:00
|
|
|
// e.g. canvas-lms:01.123456.78-postgres-12-ruby-2.6
|
2020-04-23 01:30:45 +08:00
|
|
|
PATCHSET_TAG = "$BUILD_IMAGE:$NAME-$TAG_SUFFIX"
|
2020-02-15 06:02:23 +08:00
|
|
|
|
2020-03-16 20:23:58 +08:00
|
|
|
// e.g. canvas-lms:01.123456.78-postgres-9.5-ruby-2.6
|
2020-05-14 23:19:56 +08:00
|
|
|
PUBLISHABLE_TAG = "$BUILD_IMAGE:$NAME-$PUBLISHABLE_TAG_SUFFIX"
|
2020-02-15 06:02:23 +08:00
|
|
|
|
|
|
|
// e.g. canvas-lms:master when not on another branch
|
|
|
|
MERGE_TAG = "$CANVAS_LMS_IMAGE:$GERRIT_BRANCH"
|
2020-04-04 04:05:52 +08:00
|
|
|
|
|
|
|
// e.g. canvas-lms:01.123456.78; this is for consumers like Portal 2 who want to build a patchset
|
|
|
|
EXTERNAL_TAG = "$CANVAS_LMS_IMAGE:$NAME"
|
2020-03-16 20:23:58 +08:00
|
|
|
|
|
|
|
ALPINE_MIRROR = configuration.alpineMirror()
|
|
|
|
NODE = configuration.node()
|
|
|
|
RUBY = configuration.ruby() // RUBY_VERSION is a reserved keyword for ruby installs
|
|
|
|
RUBY_IMAGE = "$BUILD_IMAGE-ruby"
|
|
|
|
RUBY_MERGE_IMAGE = "$RUBY_IMAGE:$GERRIT_BRANCH"
|
|
|
|
RUBY_PATCHSET_IMAGE = "$RUBY_IMAGE:$NAME-$TAG_SUFFIX"
|
2020-06-27 03:37:32 +08:00
|
|
|
|
|
|
|
CASSANDRA_IMAGE_TAG=imageTag.cassandra()
|
|
|
|
DYNAMODB_IMAGE_TAG=imageTag.dynamodb()
|
|
|
|
POSTGRES_IMAGE_TAG=imageTag.postgres()
|
2020-02-15 06:02:23 +08:00
|
|
|
}
|
2019-12-10 02:37:05 +08:00
|
|
|
|
2020-02-15 06:02:23 +08:00
|
|
|
stages {
|
|
|
|
stage('Setup') {
|
2019-03-12 09:55:34 +08:00
|
|
|
steps {
|
2020-02-15 06:02:23 +08:00
|
|
|
timeout(time: 5) {
|
2019-03-12 09:55:34 +08:00
|
|
|
script {
|
2020-04-01 01:25:11 +08:00
|
|
|
runDatadogMetric("Setup") {
|
2020-06-17 02:23:25 +08:00
|
|
|
cleanAndSetup()
|
2020-03-21 10:56:24 +08:00
|
|
|
|
|
|
|
buildParameters += string(name: 'PATCHSET_TAG', value: "${env.PATCHSET_TAG}")
|
|
|
|
buildParameters += string(name: 'POSTGRES', value: "${env.POSTGRES}")
|
2020-03-16 20:23:58 +08:00
|
|
|
buildParameters += string(name: 'RUBY', value: "${env.RUBY}")
|
2020-03-21 10:56:24 +08:00
|
|
|
if (env.CANVAS_LMS_REFSPEC) {
|
|
|
|
// the plugin builds require the canvas lms refspec to be different. so only
|
|
|
|
// set this refspec if the main build is requesting it to be set.
|
|
|
|
// NOTE: this is only being set in main-from-plugin build. so main-canvas wont run this.
|
|
|
|
buildParameters += string(name: 'CANVAS_LMS_REFSPEC', value: env.CANVAS_LMS_REFSPEC)
|
2020-03-18 00:30:25 +08:00
|
|
|
}
|
2020-03-21 10:56:24 +08:00
|
|
|
|
2020-06-17 02:23:25 +08:00
|
|
|
pullGerritRepo('gerrit_builder', 'master', '.')
|
2020-03-21 10:56:24 +08:00
|
|
|
gems = readFile('gerrit_builder/canvas-lms/config/plugins_list').split()
|
|
|
|
echo "Plugin list: ${gems}"
|
2020-03-16 20:23:58 +08:00
|
|
|
// fetch plugins
|
2020-03-21 10:56:24 +08:00
|
|
|
gems.each { gem ->
|
|
|
|
if (env.GERRIT_PROJECT == gem) {
|
|
|
|
/* this is the commit we're testing */
|
2020-06-17 02:23:25 +08:00
|
|
|
pullGerritRepo(gem, env.GERRIT_REFSPEC, 'gems/plugins')
|
2020-03-21 10:56:24 +08:00
|
|
|
} else {
|
2020-06-17 02:23:25 +08:00
|
|
|
pullGerritRepo(gem, 'master', 'gems/plugins')
|
2020-03-18 00:30:25 +08:00
|
|
|
}
|
2019-10-22 03:55:38 +08:00
|
|
|
}
|
2020-06-17 02:23:25 +08:00
|
|
|
pullGerritRepo("qti_migration_tool", "master", "vendor")
|
2020-04-01 01:25:11 +08:00
|
|
|
|
|
|
|
sh 'mv -v gerrit_builder/canvas-lms/config/* config/'
|
|
|
|
sh 'rm -v config/cache_store.yml'
|
2020-06-17 02:23:25 +08:00
|
|
|
sh 'rm -vr gerrit_builder'
|
2020-04-01 01:25:11 +08:00
|
|
|
sh 'rm -v config/database.yml'
|
|
|
|
sh 'rm -v config/security.yml'
|
|
|
|
sh 'rm -v config/selenium.yml'
|
2020-04-11 03:38:35 +08:00
|
|
|
sh 'rm -v config/file_store.yml'
|
2020-04-01 01:25:11 +08:00
|
|
|
sh 'cp -v docker-compose/config/selenium.yml config/'
|
|
|
|
sh 'cp -vR docker-compose/config/new-jenkins/* config/'
|
|
|
|
sh 'cp -v config/delayed_jobs.yml.example config/delayed_jobs.yml'
|
|
|
|
sh 'cp -v config/domain.yml.example config/domain.yml'
|
|
|
|
sh 'cp -v config/external_migration.yml.example config/external_migration.yml'
|
|
|
|
sh 'cp -v config/outgoing_mail.yml.example config/outgoing_mail.yml'
|
|
|
|
}
|
2019-02-21 01:22:05 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-05 05:09:25 +08:00
|
|
|
stage('Rebase') {
|
2020-01-21 01:18:19 +08:00
|
|
|
when { expression { env.GERRIT_EVENT_TYPE == 'patchset-created' && env.GERRIT_PROJECT == 'canvas-lms' } }
|
2019-03-05 05:09:25 +08:00
|
|
|
steps {
|
2019-03-20 23:16:16 +08:00
|
|
|
timeout(time: 2) {
|
2019-09-04 03:29:25 +08:00
|
|
|
script {
|
2020-03-21 10:56:24 +08:00
|
|
|
runDatadogMetric("Rebase") {
|
|
|
|
credentials.withGerritCredentials({ ->
|
2020-04-14 03:53:35 +08:00
|
|
|
sh '''#!/bin/bash
|
|
|
|
set -o errexit -o errtrace -o nounset -o pipefail -o xtrace
|
|
|
|
|
2020-03-21 10:56:24 +08:00
|
|
|
GIT_SSH_COMMAND='ssh -i \"$SSH_KEY_PATH\" -l \"$SSH_USERNAME\"' \
|
|
|
|
git fetch origin $GERRIT_BRANCH:origin/$GERRIT_BRANCH
|
|
|
|
|
|
|
|
git config user.name "$GERRIT_EVENT_ACCOUNT_NAME"
|
|
|
|
git config user.email "$GERRIT_EVENT_ACCOUNT_EMAIL"
|
|
|
|
|
|
|
|
# this helps current build issues where cleanup is needed before proceeding.
|
|
|
|
# however the later git rebase --abort should be enough once this has
|
|
|
|
# been on jenkins for long enough to hit all nodes, maybe a couple days?
|
|
|
|
if [ -d .git/rebase-merge ]; then
|
|
|
|
echo "A previous build's rebase failed and the build exited without cleaning up. Aborting the previous rebase now..."
|
|
|
|
git rebase --abort
|
|
|
|
git checkout $GERRIT_REFSPEC
|
|
|
|
fi
|
|
|
|
|
|
|
|
# store exit_status inline to ensures the script doesn't exit here on failures
|
|
|
|
git rebase --preserve-merges origin/$GERRIT_BRANCH; exit_status=$?
|
|
|
|
if [ $exit_status != 0 ]; then
|
|
|
|
echo "Warning: Rebase couldn't resolve changes automatically, please resolve these conflicts locally."
|
|
|
|
git rebase --abort
|
|
|
|
exit $exit_status
|
|
|
|
fi
|
|
|
|
'''
|
|
|
|
})
|
|
|
|
}
|
2019-09-04 03:29:25 +08:00
|
|
|
}
|
2019-03-20 23:16:16 +08:00
|
|
|
}
|
2019-03-05 05:09:25 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-16 20:23:58 +08:00
|
|
|
stage('Build Docker Image') {
|
2019-02-21 01:06:06 +08:00
|
|
|
steps {
|
2020-03-16 20:23:58 +08:00
|
|
|
timeout(time: 30) {
|
2020-03-17 01:10:43 +08:00
|
|
|
skipIfPreviouslySuccessful('docker-build-and-push') {
|
2020-01-03 00:39:27 +08:00
|
|
|
script {
|
2020-06-17 02:23:25 +08:00
|
|
|
if (configuration.getBoolean('skip-docker-build')) {
|
2020-07-01 23:25:02 +08:00
|
|
|
sh './build/new-jenkins/docker-with-flakey-network-protection.sh pull $MERGE_TAG'
|
2020-03-17 01:10:43 +08:00
|
|
|
sh 'docker tag $MERGE_TAG $PATCHSET_TAG'
|
2020-03-16 20:23:58 +08:00
|
|
|
} else {
|
2020-06-17 02:23:25 +08:00
|
|
|
if (!configuration.getBoolean('skip-cache')) {
|
2020-03-16 20:23:58 +08:00
|
|
|
sh "./build/new-jenkins/docker-with-flakey-network-protection.sh pull $MERGE_TAG || true"
|
2020-03-17 01:10:43 +08:00
|
|
|
}
|
2020-03-16 20:23:58 +08:00
|
|
|
sh 'build/new-jenkins/docker-build.sh'
|
2020-07-20 00:43:18 +08:00
|
|
|
sh "./build/new-jenkins/docker-with-flakey-network-protection.sh push $RUBY_PATCHSET_IMAGE"
|
2020-01-03 00:39:27 +08:00
|
|
|
}
|
2020-07-01 23:25:02 +08:00
|
|
|
sh "./build/new-jenkins/docker-with-flakey-network-protection.sh push $PATCHSET_TAG"
|
2020-04-04 04:05:52 +08:00
|
|
|
if (isPatchsetPublishable()) {
|
|
|
|
sh 'docker tag $PATCHSET_TAG $EXTERNAL_TAG'
|
2020-07-01 23:25:02 +08:00
|
|
|
sh './build/new-jenkins/docker-with-flakey-network-protection.sh push $EXTERNAL_TAG'
|
2020-04-04 04:05:52 +08:00
|
|
|
}
|
2020-01-03 00:39:27 +08:00
|
|
|
}
|
2019-10-31 23:22:09 +08:00
|
|
|
}
|
2019-07-29 23:55:07 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
stage('Parallel Run Tests') {
|
2020-03-19 22:42:53 +08:00
|
|
|
steps {
|
|
|
|
script {
|
|
|
|
def stages = [:]
|
2020-06-15 23:48:27 +08:00
|
|
|
if (env.GERRIT_EVENT_TYPE != 'change-merged' && env.GERRIT_PROJECT == 'canvas-lms') {
|
2020-03-19 22:42:53 +08:00
|
|
|
echo 'adding Linters'
|
|
|
|
stages['Linters'] = {
|
|
|
|
skipIfPreviouslySuccessful("linters") {
|
2020-04-30 22:09:40 +08:00
|
|
|
credentials.withGerritCredentials {
|
|
|
|
sh 'build/new-jenkins/linters/run-gergich.sh'
|
|
|
|
}
|
2020-03-10 00:55:12 +08:00
|
|
|
if (env.MASTER_BOUNCER_RUN == '1' && env.GERRIT_EVENT_TYPE == 'patchset-created') {
|
|
|
|
credentials.withMasterBouncerCredentials {
|
|
|
|
sh 'build/new-jenkins/linters/run-master-bouncer.sh'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-11-06 02:02:37 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-11 03:38:35 +08:00
|
|
|
echo 'adding Consumer Smoke Test'
|
|
|
|
stages['Consumer Smoke Test'] = {
|
|
|
|
skipIfPreviouslySuccessful("consumer-smoke-test") {
|
|
|
|
sh 'build/new-jenkins/consumer-smoke-test.sh'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-19 22:42:53 +08:00
|
|
|
echo 'adding Vendored Gems'
|
|
|
|
stages['Vendored Gems'] = {
|
2019-10-31 23:22:09 +08:00
|
|
|
skipIfPreviouslySuccessful("vendored-gems") {
|
2020-06-27 03:37:32 +08:00
|
|
|
wrapBuildExecution('/Canvas/test-suites/vendored-gems', buildParameters + [
|
|
|
|
string(name: 'CASSANDRA_IMAGE_TAG', value: "${env.CASSANDRA_IMAGE_TAG}"),
|
|
|
|
string(name: 'DYNAMODB_IMAGE_TAG', value: "${env.DYNAMODB_IMAGE_TAG}"),
|
|
|
|
string(name: 'POSTGRES_IMAGE_TAG', value: "${env.POSTGRES_IMAGE_TAG}"),
|
|
|
|
], true, "")
|
2019-10-31 23:22:09 +08:00
|
|
|
}
|
2019-11-08 01:09:34 +08:00
|
|
|
}
|
2019-11-24 04:34:22 +08:00
|
|
|
|
2020-07-02 21:15:23 +08:00
|
|
|
echo 'adding Javascript (Jest)'
|
|
|
|
stages['Javascript (Jest)'] = {
|
|
|
|
skipIfPreviouslySuccessful("javascript_jest") {
|
|
|
|
wrapBuildExecution('/Canvas/test-suites/JS', buildParameters + [
|
|
|
|
string(name: 'TEST_SUITE', value: "jest"),
|
|
|
|
], true, "testReport")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
echo 'adding Javascript (Karma)'
|
|
|
|
stages['Javascript (Karma)'] = {
|
|
|
|
skipIfPreviouslySuccessful("javascript_karma") {
|
|
|
|
wrapBuildExecution('/Canvas/test-suites/JS', buildParameters + [
|
|
|
|
string(name: 'TEST_SUITE', value: "karma"),
|
|
|
|
], true, "testReport")
|
2019-11-24 04:34:22 +08:00
|
|
|
}
|
|
|
|
}
|
2019-11-27 00:36:22 +08:00
|
|
|
|
2020-03-19 22:42:53 +08:00
|
|
|
echo 'adding Contract Tests'
|
|
|
|
stages['Contract Tests'] = {
|
2019-11-27 00:36:22 +08:00
|
|
|
skipIfPreviouslySuccessful("contract-tests") {
|
2020-06-27 03:37:32 +08:00
|
|
|
wrapBuildExecution('/Canvas/test-suites/contract-tests', buildParameters + [
|
|
|
|
string(name: 'CASSANDRA_IMAGE_TAG', value: "${env.CASSANDRA_IMAGE_TAG}"),
|
|
|
|
string(name: 'DYNAMODB_IMAGE_TAG', value: "${env.DYNAMODB_IMAGE_TAG}"),
|
|
|
|
string(name: 'POSTGRES_IMAGE_TAG', value: "${env.POSTGRES_IMAGE_TAG}"),
|
|
|
|
], true, "")
|
2019-11-27 00:36:22 +08:00
|
|
|
}
|
|
|
|
}
|
2019-12-05 03:57:36 +08:00
|
|
|
|
2020-06-15 23:48:27 +08:00
|
|
|
if (env.GERRIT_EVENT_TYPE != 'change-merged') {
|
2020-03-19 22:42:53 +08:00
|
|
|
echo 'adding Flakey Spec Catcher'
|
|
|
|
stages['Flakey Spec Catcher'] = {
|
|
|
|
skipIfPreviouslySuccessful("flakey-spec-catcher") {
|
2020-06-17 02:23:25 +08:00
|
|
|
def propagate = configuration.fscPropagate()
|
2020-04-30 01:06:08 +08:00
|
|
|
echo "fsc propagation: $propagate"
|
2020-06-27 03:37:32 +08:00
|
|
|
wrapBuildExecution('/Canvas/test-suites/flakey-spec-catcher', buildParameters + [
|
|
|
|
string(name: 'CASSANDRA_IMAGE_TAG', value: "${env.CASSANDRA_IMAGE_TAG}"),
|
|
|
|
string(name: 'DYNAMODB_IMAGE_TAG', value: "${env.DYNAMODB_IMAGE_TAG}"),
|
|
|
|
string(name: 'POSTGRES_IMAGE_TAG', value: "${env.POSTGRES_IMAGE_TAG}"),
|
|
|
|
], propagate, "")
|
2020-03-19 22:42:53 +08:00
|
|
|
}
|
2019-11-16 01:49:57 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-19 22:42:53 +08:00
|
|
|
// // keep this around in case there is changes to the subbuilds that need to happen
|
|
|
|
// // and you have no other way to test it except by running a test build.
|
|
|
|
// stages['Test Subbuild'] = {
|
|
|
|
// skipIfPreviouslySuccessful("test-subbuild") {
|
2020-06-30 02:26:30 +08:00
|
|
|
// build(job: '/Canvas/proofs-of-concept/test-subbuild', parameters: buildParameters)
|
2020-03-19 22:42:53 +08:00
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // Don't run these on all patch sets until we have them ready to report results.
|
|
|
|
// // Uncomment stage to run when developing.
|
|
|
|
// stages['Xbrowser'] = {
|
|
|
|
// skipIfPreviouslySuccessful("xbrowser") {
|
2020-06-30 00:41:23 +08:00
|
|
|
// build(job: '/Canvas/proofs-of-concept/xbrowser', propagate: false, parameters: buildParameters)
|
2020-03-19 22:42:53 +08:00
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
def distribution = load 'build/new-jenkins/groovy/distribution.groovy'
|
|
|
|
distribution.stashBuildScripts()
|
2020-03-21 10:56:24 +08:00
|
|
|
|
2020-03-19 22:42:53 +08:00
|
|
|
distribution.addRSpecSuites(stages)
|
2020-03-24 04:32:13 +08:00
|
|
|
distribution.addSeleniumSuites(stages)
|
2020-03-19 22:42:53 +08:00
|
|
|
|
|
|
|
parallel(stages)
|
2019-11-25 14:06:34 +08:00
|
|
|
}
|
2019-03-20 23:16:16 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-15 06:02:23 +08:00
|
|
|
stage('Publish Image on Merge') {
|
|
|
|
when {
|
|
|
|
allOf {
|
|
|
|
expression { isPatchsetPublishable() }
|
|
|
|
expression { env.GERRIT_EVENT_TYPE == 'change-merged' }
|
|
|
|
}
|
|
|
|
}
|
2019-02-21 01:06:06 +08:00
|
|
|
steps {
|
2019-11-20 02:24:38 +08:00
|
|
|
timeout(time: 10) {
|
2019-03-15 01:44:25 +08:00
|
|
|
script {
|
2020-03-21 10:56:24 +08:00
|
|
|
runDatadogMetric("publishImageOnMerge") {
|
|
|
|
// Retriggers won't have an image to tag/push, pull that
|
|
|
|
// image if doesn't exist. If image is not found it will
|
|
|
|
// return NULL
|
2020-07-21 00:31:20 +08:00
|
|
|
if (!sh (script: 'docker images -q $RUBY_PATCHSET_IMAGE')) {
|
|
|
|
sh './build/new-jenkins/docker-with-flakey-network-protection.sh pull $RUBY_PATCHSET_IMAGE'
|
|
|
|
}
|
|
|
|
|
2020-03-21 10:56:24 +08:00
|
|
|
if (!sh (script: 'docker images -q $PATCHSET_TAG')) {
|
2020-07-01 23:25:02 +08:00
|
|
|
sh './build/new-jenkins/docker-with-flakey-network-protection.sh pull $PATCHSET_TAG'
|
2020-03-21 10:56:24 +08:00
|
|
|
}
|
2020-03-14 01:46:34 +08:00
|
|
|
|
2020-03-21 10:56:24 +08:00
|
|
|
// publish canvas-lms:$GERRIT_BRANCH (i.e. canvas-lms:master)
|
|
|
|
sh 'docker tag $PUBLISHABLE_TAG $MERGE_TAG'
|
2020-07-16 02:14:11 +08:00
|
|
|
sh 'docker tag $RUBY_PATCHSET_IMAGE $RUBY_MERGE_IMAGE'
|
2020-03-21 10:56:24 +08:00
|
|
|
// push *all* canvas-lms images (i.e. all canvas-lms prefixed tags)
|
2020-07-01 23:25:02 +08:00
|
|
|
sh './build/new-jenkins/docker-with-flakey-network-protection.sh push $MERGE_TAG'
|
2020-07-16 02:14:11 +08:00
|
|
|
sh './build/new-jenkins/docker-with-flakey-network-protection.sh push $RUBY_MERGE_IMAGE'
|
2020-03-21 10:56:24 +08:00
|
|
|
}
|
2019-03-15 01:44:25 +08:00
|
|
|
}
|
2019-02-16 04:16:49 +08:00
|
|
|
}
|
2019-02-21 01:06:06 +08:00
|
|
|
}
|
2019-02-16 04:16:49 +08:00
|
|
|
}
|
2020-02-27 00:25:50 +08:00
|
|
|
|
|
|
|
stage('Dependency Check') {
|
|
|
|
when { expression { env.GERRIT_EVENT_TYPE == 'change-merged' } }
|
|
|
|
steps {
|
|
|
|
script {
|
2020-03-21 10:56:24 +08:00
|
|
|
runDatadogMetric("dependencyCheck") {
|
|
|
|
def reports = load 'build/new-jenkins/groovy/reports.groovy'
|
|
|
|
reports.snykCheckDependencies("$PATCHSET_TAG", "/usr/src/app/")
|
|
|
|
}
|
2020-02-27 00:25:50 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
stage('Mark Build as Successful') {
|
|
|
|
steps {
|
|
|
|
script {
|
2020-03-21 10:56:24 +08:00
|
|
|
runDatadogMetric("markBuildAsSuccessful") {
|
|
|
|
def successes = load 'build/new-jenkins/groovy/successes.groovy'
|
|
|
|
successes.markBuildAsSuccessful()
|
|
|
|
}
|
2020-02-27 00:25:50 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-02-21 01:06:06 +08:00
|
|
|
}
|
2019-03-12 09:55:34 +08:00
|
|
|
|
|
|
|
post {
|
2019-11-21 02:53:17 +08:00
|
|
|
failure {
|
|
|
|
script {
|
2020-04-21 22:14:39 +08:00
|
|
|
if (isPatchsetSlackableOnFailure()) {
|
2020-03-19 05:28:14 +08:00
|
|
|
def branchSegment = env.GERRIT_BRANCH ? "[$env.GERRIT_BRANCH]" : ''
|
2020-06-10 00:38:30 +08:00
|
|
|
def authorSlackId = env.GERRIT_EVENT_ACCOUNT_EMAIL ? slackUserIdFromEmail(email: env.GERRIT_EVENT_ACCOUNT_EMAIL, botUser: true, tokenCredentialId: 'slack-user-id-lookup') : ''
|
|
|
|
def authorSlackMsg = authorSlackId ? "<@$authorSlackId>" : env.GERRIT_EVENT_ACCOUNT_NAME
|
|
|
|
def authorSegment = authorSlackMsg ? "Patchset by ${authorSlackMsg}. " : ''
|
2020-01-03 00:39:27 +08:00
|
|
|
slackSend(
|
|
|
|
channel: '#canvas_builds',
|
|
|
|
color: 'danger',
|
2020-03-19 05:28:14 +08:00
|
|
|
message: "${branchSegment}${env.JOB_NAME} failed on merge. ${authorSegment}(<${env.BUILD_URL}|${env.BUILD_NUMBER}>)"
|
2020-01-03 00:39:27 +08:00
|
|
|
)
|
2019-11-21 02:53:17 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-03-19 22:42:53 +08:00
|
|
|
always {
|
|
|
|
script {
|
2020-04-23 01:30:45 +08:00
|
|
|
ignoreBuildNeverStartedError {
|
|
|
|
def rspec = load 'build/new-jenkins/groovy/rspec.groovy'
|
2020-05-21 02:59:25 +08:00
|
|
|
rspec.uploadJunitReports()
|
2020-04-23 01:30:45 +08:00
|
|
|
rspec.uploadSeleniumFailures()
|
|
|
|
rspec.uploadRSpecFailures()
|
|
|
|
load('build/new-jenkins/groovy/reports.groovy').sendFailureMessageIfPresent()
|
2020-05-13 00:45:37 +08:00
|
|
|
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
|
|
|
|
splunk.upload([splunk.eventForBuildDuration(currentBuild.duration)])
|
2020-04-23 01:30:45 +08:00
|
|
|
}
|
2020-03-19 22:42:53 +08:00
|
|
|
}
|
|
|
|
}
|
2019-03-20 23:16:16 +08:00
|
|
|
cleanup {
|
2020-04-23 01:30:45 +08:00
|
|
|
ignoreBuildNeverStartedError {
|
2020-06-17 02:23:25 +08:00
|
|
|
execute 'bash/docker-cleanup.sh --allow-failure'
|
2020-04-23 01:30:45 +08:00
|
|
|
}
|
2019-03-20 23:16:16 +08:00
|
|
|
}
|
2019-03-12 09:55:34 +08:00
|
|
|
}
|
2019-02-16 04:16:49 +08:00
|
|
|
}
|