remove deprecated datadog / splunk logic
refs DE-150 Change-Id: I07ee65b349e64c6d4279929fbbfb2fa71042330b Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/243361 Tested-by: Service Cloud Jenkins <svc.cloudjenkins@instructure.com> Reviewed-by: James Butters <jbutters@instructure.com> QA-Review: James Butters <jbutters@instructure.com> Product-Review: James Butters <jbutters@instructure.com>
This commit is contained in:
parent
d0df807673
commit
dc039e2567
|
@ -37,16 +37,9 @@ def buildParameters = [
|
|||
|
||||
library "canvas-builds-library"
|
||||
|
||||
def runDatadogMetric(name, body) {
|
||||
def dd = load('build/new-jenkins/groovy/datadog.groovy')
|
||||
dd.runDataDogForMetric(name,body)
|
||||
}
|
||||
|
||||
def skipIfPreviouslySuccessful(name, block) {
|
||||
runDatadogMetric(name) {
|
||||
def successes = load('build/new-jenkins/groovy/successes.groovy')
|
||||
successes.skipIfPreviouslySuccessful(name, true, block)
|
||||
}
|
||||
def successes = load('build/new-jenkins/groovy/successes.groovy')
|
||||
successes.skipIfPreviouslySuccessful(name, true, block)
|
||||
}
|
||||
|
||||
def wrapBuildExecution(jobName, parameters, propagate, urlExtra) {
|
||||
|
@ -145,47 +138,45 @@ pipeline {
|
|||
steps {
|
||||
timeout(time: 5) {
|
||||
script {
|
||||
runDatadogMetric("Setup") {
|
||||
cleanAndSetup()
|
||||
cleanAndSetup()
|
||||
|
||||
buildParameters += string(name: 'PATCHSET_TAG', value: "${env.PATCHSET_TAG}")
|
||||
buildParameters += string(name: 'POSTGRES', value: "${env.POSTGRES}")
|
||||
buildParameters += string(name: 'RUBY', value: "${env.RUBY}")
|
||||
if (env.CANVAS_LMS_REFSPEC) {
|
||||
// the plugin builds require the canvas lms refspec to be different. so only
|
||||
// set this refspec if the main build is requesting it to be set.
|
||||
// NOTE: this is only being set in main-from-plugin build. so main-canvas wont run this.
|
||||
buildParameters += string(name: 'CANVAS_LMS_REFSPEC', value: env.CANVAS_LMS_REFSPEC)
|
||||
}
|
||||
|
||||
pullGerritRepo('gerrit_builder', 'master', '.')
|
||||
gems = readFile('gerrit_builder/canvas-lms/config/plugins_list').split()
|
||||
echo "Plugin list: ${gems}"
|
||||
// fetch plugins
|
||||
gems.each { gem ->
|
||||
if (env.GERRIT_PROJECT == gem) {
|
||||
/* this is the commit we're testing */
|
||||
pullGerritRepo(gem, env.GERRIT_REFSPEC, 'gems/plugins')
|
||||
} else {
|
||||
pullGerritRepo(gem, 'master', 'gems/plugins')
|
||||
}
|
||||
}
|
||||
pullGerritRepo("qti_migration_tool", "master", "vendor")
|
||||
|
||||
sh 'mv -v gerrit_builder/canvas-lms/config/* config/'
|
||||
sh 'rm -v config/cache_store.yml'
|
||||
sh 'rm -vr gerrit_builder'
|
||||
sh 'rm -v config/database.yml'
|
||||
sh 'rm -v config/security.yml'
|
||||
sh 'rm -v config/selenium.yml'
|
||||
sh 'rm -v config/file_store.yml'
|
||||
sh 'cp -v docker-compose/config/selenium.yml config/'
|
||||
sh 'cp -vR docker-compose/config/new-jenkins/* config/'
|
||||
sh 'cp -v config/delayed_jobs.yml.example config/delayed_jobs.yml'
|
||||
sh 'cp -v config/domain.yml.example config/domain.yml'
|
||||
sh 'cp -v config/external_migration.yml.example config/external_migration.yml'
|
||||
sh 'cp -v config/outgoing_mail.yml.example config/outgoing_mail.yml'
|
||||
buildParameters += string(name: 'PATCHSET_TAG', value: "${env.PATCHSET_TAG}")
|
||||
buildParameters += string(name: 'POSTGRES', value: "${env.POSTGRES}")
|
||||
buildParameters += string(name: 'RUBY', value: "${env.RUBY}")
|
||||
if (env.CANVAS_LMS_REFSPEC) {
|
||||
// the plugin builds require the canvas lms refspec to be different. so only
|
||||
// set this refspec if the main build is requesting it to be set.
|
||||
// NOTE: this is only being set in main-from-plugin build. so main-canvas wont run this.
|
||||
buildParameters += string(name: 'CANVAS_LMS_REFSPEC', value: env.CANVAS_LMS_REFSPEC)
|
||||
}
|
||||
|
||||
pullGerritRepo('gerrit_builder', 'master', '.')
|
||||
gems = readFile('gerrit_builder/canvas-lms/config/plugins_list').split()
|
||||
echo "Plugin list: ${gems}"
|
||||
// fetch plugins
|
||||
gems.each { gem ->
|
||||
if (env.GERRIT_PROJECT == gem) {
|
||||
/* this is the commit we're testing */
|
||||
pullGerritRepo(gem, env.GERRIT_REFSPEC, 'gems/plugins')
|
||||
} else {
|
||||
pullGerritRepo(gem, 'master', 'gems/plugins')
|
||||
}
|
||||
}
|
||||
pullGerritRepo("qti_migration_tool", "master", "vendor")
|
||||
|
||||
sh 'mv -v gerrit_builder/canvas-lms/config/* config/'
|
||||
sh 'rm -v config/cache_store.yml'
|
||||
sh 'rm -vr gerrit_builder'
|
||||
sh 'rm -v config/database.yml'
|
||||
sh 'rm -v config/security.yml'
|
||||
sh 'rm -v config/selenium.yml'
|
||||
sh 'rm -v config/file_store.yml'
|
||||
sh 'cp -v docker-compose/config/selenium.yml config/'
|
||||
sh 'cp -vR docker-compose/config/new-jenkins/* config/'
|
||||
sh 'cp -v config/delayed_jobs.yml.example config/delayed_jobs.yml'
|
||||
sh 'cp -v config/domain.yml.example config/domain.yml'
|
||||
sh 'cp -v config/external_migration.yml.example config/external_migration.yml'
|
||||
sh 'cp -v config/outgoing_mail.yml.example config/outgoing_mail.yml'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -196,36 +187,34 @@ pipeline {
|
|||
steps {
|
||||
timeout(time: 2) {
|
||||
script {
|
||||
runDatadogMetric("Rebase") {
|
||||
credentials.withGerritCredentials({ ->
|
||||
sh '''#!/bin/bash
|
||||
set -o errexit -o errtrace -o nounset -o pipefail -o xtrace
|
||||
credentials.withGerritCredentials({ ->
|
||||
sh '''#!/bin/bash
|
||||
set -o errexit -o errtrace -o nounset -o pipefail -o xtrace
|
||||
|
||||
GIT_SSH_COMMAND='ssh -i \"$SSH_KEY_PATH\" -l \"$SSH_USERNAME\"' \
|
||||
git fetch origin $GERRIT_BRANCH:origin/$GERRIT_BRANCH
|
||||
GIT_SSH_COMMAND='ssh -i \"$SSH_KEY_PATH\" -l \"$SSH_USERNAME\"' \
|
||||
git fetch origin $GERRIT_BRANCH:origin/$GERRIT_BRANCH
|
||||
|
||||
git config user.name "$GERRIT_EVENT_ACCOUNT_NAME"
|
||||
git config user.email "$GERRIT_EVENT_ACCOUNT_EMAIL"
|
||||
git config user.name "$GERRIT_EVENT_ACCOUNT_NAME"
|
||||
git config user.email "$GERRIT_EVENT_ACCOUNT_EMAIL"
|
||||
|
||||
# this helps current build issues where cleanup is needed before proceeding.
|
||||
# however the later git rebase --abort should be enough once this has
|
||||
# been on jenkins for long enough to hit all nodes, maybe a couple days?
|
||||
if [ -d .git/rebase-merge ]; then
|
||||
echo "A previous build's rebase failed and the build exited without cleaning up. Aborting the previous rebase now..."
|
||||
git rebase --abort
|
||||
git checkout $GERRIT_REFSPEC
|
||||
fi
|
||||
# this helps current build issues where cleanup is needed before proceeding.
|
||||
# however the later git rebase --abort should be enough once this has
|
||||
# been on jenkins for long enough to hit all nodes, maybe a couple days?
|
||||
if [ -d .git/rebase-merge ]; then
|
||||
echo "A previous build's rebase failed and the build exited without cleaning up. Aborting the previous rebase now..."
|
||||
git rebase --abort
|
||||
git checkout $GERRIT_REFSPEC
|
||||
fi
|
||||
|
||||
# store exit_status inline to ensures the script doesn't exit here on failures
|
||||
git rebase --preserve-merges origin/$GERRIT_BRANCH; exit_status=$?
|
||||
if [ $exit_status != 0 ]; then
|
||||
echo "Warning: Rebase couldn't resolve changes automatically, please resolve these conflicts locally."
|
||||
git rebase --abort
|
||||
exit $exit_status
|
||||
fi
|
||||
'''
|
||||
})
|
||||
}
|
||||
# store exit_status inline to ensures the script doesn't exit here on failures
|
||||
git rebase --preserve-merges origin/$GERRIT_BRANCH; exit_status=$?
|
||||
if [ $exit_status != 0 ]; then
|
||||
echo "Warning: Rebase couldn't resolve changes automatically, please resolve these conflicts locally."
|
||||
git rebase --abort
|
||||
exit $exit_status
|
||||
fi
|
||||
'''
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -376,25 +365,23 @@ pipeline {
|
|||
steps {
|
||||
timeout(time: 10) {
|
||||
script {
|
||||
runDatadogMetric("publishImageOnMerge") {
|
||||
// Retriggers won't have an image to tag/push, pull that
|
||||
// image if doesn't exist. If image is not found it will
|
||||
// return NULL
|
||||
if (!sh (script: 'docker images -q $RUBY_PATCHSET_IMAGE')) {
|
||||
sh './build/new-jenkins/docker-with-flakey-network-protection.sh pull $RUBY_PATCHSET_IMAGE'
|
||||
}
|
||||
|
||||
if (!sh (script: 'docker images -q $PATCHSET_TAG')) {
|
||||
sh './build/new-jenkins/docker-with-flakey-network-protection.sh pull $PATCHSET_TAG'
|
||||
}
|
||||
|
||||
// publish canvas-lms:$GERRIT_BRANCH (i.e. canvas-lms:master)
|
||||
sh 'docker tag $PUBLISHABLE_TAG $MERGE_TAG'
|
||||
sh 'docker tag $RUBY_PATCHSET_IMAGE $RUBY_MERGE_IMAGE'
|
||||
// push *all* canvas-lms images (i.e. all canvas-lms prefixed tags)
|
||||
sh './build/new-jenkins/docker-with-flakey-network-protection.sh push $MERGE_TAG'
|
||||
sh './build/new-jenkins/docker-with-flakey-network-protection.sh push $RUBY_MERGE_IMAGE'
|
||||
// Retriggers won't have an image to tag/push, pull that
|
||||
// image if doesn't exist. If image is not found it will
|
||||
// return NULL
|
||||
if (!sh (script: 'docker images -q $RUBY_PATCHSET_IMAGE')) {
|
||||
sh './build/new-jenkins/docker-with-flakey-network-protection.sh pull $RUBY_PATCHSET_IMAGE'
|
||||
}
|
||||
|
||||
if (!sh (script: 'docker images -q $PATCHSET_TAG')) {
|
||||
sh './build/new-jenkins/docker-with-flakey-network-protection.sh pull $PATCHSET_TAG'
|
||||
}
|
||||
|
||||
// publish canvas-lms:$GERRIT_BRANCH (i.e. canvas-lms:master)
|
||||
sh 'docker tag $PUBLISHABLE_TAG $MERGE_TAG'
|
||||
sh 'docker tag $RUBY_PATCHSET_IMAGE $RUBY_MERGE_IMAGE'
|
||||
// push *all* canvas-lms images (i.e. all canvas-lms prefixed tags)
|
||||
sh './build/new-jenkins/docker-with-flakey-network-protection.sh push $MERGE_TAG'
|
||||
sh './build/new-jenkins/docker-with-flakey-network-protection.sh push $RUBY_MERGE_IMAGE'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -404,10 +391,8 @@ pipeline {
|
|||
when { expression { env.GERRIT_EVENT_TYPE == 'change-merged' } }
|
||||
steps {
|
||||
script {
|
||||
runDatadogMetric("dependencyCheck") {
|
||||
def reports = load 'build/new-jenkins/groovy/reports.groovy'
|
||||
reports.snykCheckDependencies("$PATCHSET_TAG", "/usr/src/app/")
|
||||
}
|
||||
def reports = load 'build/new-jenkins/groovy/reports.groovy'
|
||||
reports.snykCheckDependencies("$PATCHSET_TAG", "/usr/src/app/")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -415,10 +400,8 @@ pipeline {
|
|||
stage('Mark Build as Successful') {
|
||||
steps {
|
||||
script {
|
||||
runDatadogMetric("markBuildAsSuccessful") {
|
||||
def successes = load 'build/new-jenkins/groovy/successes.groovy'
|
||||
successes.markBuildAsSuccessful()
|
||||
}
|
||||
def successes = load 'build/new-jenkins/groovy/successes.groovy'
|
||||
successes.markBuildAsSuccessful()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -448,8 +431,6 @@ pipeline {
|
|||
rspec.uploadSeleniumFailures()
|
||||
rspec.uploadRSpecFailures()
|
||||
load('build/new-jenkins/groovy/reports.groovy').sendFailureMessageIfPresent()
|
||||
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
|
||||
splunk.upload([splunk.eventForBuildDuration(currentBuild.duration)])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2020 - present Instructure, Inc.
|
||||
*
|
||||
* This file is part of Canvas.
|
||||
*
|
||||
* Canvas is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU Affero General Public License as published by the Free
|
||||
* Software Foundation, version 3 of the License.
|
||||
*
|
||||
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
* details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License along
|
||||
* with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import groovy.time.*
|
||||
|
||||
// Datadog metric format is the following:
|
||||
// <METRIC_NAME>:<VALUE>|<TYPE>|@<SAMPLE_RATE>|#<TAG_KEY_1>:<TAG_VALUE_1>,<TAG_2>
|
||||
// We are just allowing counts for now to be simple.
|
||||
// Source https://docs.datadoghq.com/developers/dogstatsd/datagram_shell/?tab=metrics
|
||||
def hackyMetricSend(metric, value, tags) {
|
||||
def metric_string = "${metric}:${value}|d|1|#${tags.join(',')}"
|
||||
echo "sending metric: $metric_string"
|
||||
def script = """#!/bin/bash
|
||||
echo -n "$metric_string" > /dev/udp/localhost/8125
|
||||
"""
|
||||
// exit code is captured in case we want upstream caller status correction
|
||||
return sh(script: script, returnStatus: true)
|
||||
}
|
||||
|
||||
def runDataDogForMetric(name, block) {
|
||||
def timeStart = new Date()
|
||||
try {
|
||||
block.call()
|
||||
}
|
||||
finally {
|
||||
def duration = TimeCategory.minus(new Date(), timeStart).toMilliseconds()
|
||||
hackyMetricSend("jenkins.stage.elapsedTimeDist", duration, ["stage:${name}"])
|
||||
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
|
||||
splunk.upload([splunk.eventForStageDuration(name, duration)])
|
||||
}
|
||||
}
|
||||
|
||||
def runDataDogForMetricWithExtraTags(name, extraTags, block) {
|
||||
def timeStart = new Date()
|
||||
try {
|
||||
block.call()
|
||||
}
|
||||
finally {
|
||||
def duration = TimeCategory.minus(new Date(), timeStart).toMilliseconds()
|
||||
hackyMetricSend("jenkins.stage.elapsedTimeDist", duration, ["stage:${name}", extraTags].flatten())
|
||||
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
|
||||
splunk.upload([splunk.eventForStageDuration(name, duration)])
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
|
@ -18,11 +18,6 @@
|
|||
|
||||
import groovy.time.*
|
||||
|
||||
def runDatadogMetric(name, extraTags, body) {
|
||||
def dd = load('build/new-jenkins/groovy/datadog.groovy')
|
||||
dd.runDataDogForMetricWithExtraTags(name,extraTags,body)
|
||||
}
|
||||
|
||||
/**
|
||||
* appends stages to the nodes based on the count passed into
|
||||
* the closure.
|
||||
|
@ -51,12 +46,7 @@ def appendStagesAsBuildNodes(nodes,
|
|||
// make sure to unstash
|
||||
unstash name: "build-dir"
|
||||
unstash name: "build-docker-compose"
|
||||
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
|
||||
splunk.upload([splunk.eventForNodeWait(stage_name, duration)])
|
||||
def extraTags = ["parallelStageName:${stage_name}"]
|
||||
runDatadogMetric(test_label,extraTags) {
|
||||
stage_block(index)
|
||||
}
|
||||
stage_block(index)
|
||||
}
|
||||
|
||||
// mark with instance index.
|
||||
|
|
|
@ -57,7 +57,7 @@ def publishSpecCoverageToS3(prefix, ci_node_total, coverage_type) {
|
|||
sh "mv coverage ${prefix}_coverage"
|
||||
archiveArtifacts(artifacts: "${prefix}_coverage_nodes/**")
|
||||
archiveArtifacts(artifacts: "${prefix}_coverage/**")
|
||||
|
||||
|
||||
cleanupCoverage(prefix)
|
||||
}
|
||||
|
||||
|
@ -135,7 +135,6 @@ def publishSpecFailuresAsHTML(prefix, ci_node_total, report_title) {
|
|||
buildIndexPage(failureCategories)
|
||||
htmlFiles = findFiles glob: '**/index.html'
|
||||
}
|
||||
uploadSplunkFailures(failureCategories)
|
||||
|
||||
def report_name = "spec-failure-$prefix"
|
||||
def report_url = "${BUILD_URL}${report_name}"
|
||||
|
@ -186,18 +185,6 @@ def buildIndexPage(failureCategories) {
|
|||
writeFile file: "index.html", text: indexHtml
|
||||
}
|
||||
|
||||
def uploadSplunkFailures(failureCategories) {
|
||||
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
|
||||
def splunkFailureEvents = []
|
||||
failureCategories.each {category, failures ->
|
||||
failures.each { failure ->
|
||||
def spec = (failure =~ /.*spec_failures\/(.*)\/index/)[0][1]
|
||||
splunkFailureEvents.add(splunk.eventForTestFailure(spec, category))
|
||||
}
|
||||
}
|
||||
splunk.upload(splunkFailureEvents)
|
||||
}
|
||||
|
||||
def snykCheckDependencies(projectImage, projectDirectory) {
|
||||
def projectContainer = sh(script: "docker run -d -it -v snyk_volume:${projectDirectory} ${projectImage}", returnStdout: true).trim()
|
||||
runSnyk(
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
/*
|
||||
* Copyright (C) 2020 - present Instructure, Inc.
|
||||
*
|
||||
* This file is part of Canvas.
|
||||
*
|
||||
* Canvas is free software: you can redistribute it and/or modify it under
|
||||
* the terms of the GNU Affero General Public License as published by the Free
|
||||
* Software Foundation, version 3 of the License.
|
||||
*
|
||||
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
* details.
|
||||
*
|
||||
* You should have received a copy of the GNU Affero General Public License along
|
||||
* with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
// https://docs.splunk.com/Documentation/Splunk/8.0.3/Data/FormateventsforHTTPEventCollector#Event_metadata
|
||||
|
||||
import groovy.json.*
|
||||
|
||||
// Splunk can take one or more events:
|
||||
// the json objects are simply concatenated if there are multiple (no [] and no commas)
|
||||
def upload(events) {
|
||||
logEvents(events)
|
||||
credentials.withSplunkCredentials({
|
||||
sh "build/new-jenkins/splunk_event.sh '${new JsonBuilder(events).toString()}'"
|
||||
})
|
||||
}
|
||||
|
||||
def event(name, fields) {
|
||||
return [
|
||||
"sourcetype": "jenkins",
|
||||
"event": name,
|
||||
"fields": fields
|
||||
]
|
||||
}
|
||||
|
||||
// Rerun category is a string describing which rerun retry this test failure was
|
||||
def eventForTestFailure(test, rerun_category) {
|
||||
return event('jenkins.test.failure', ['test': test, 'rerun_category': rerun_category])
|
||||
}
|
||||
|
||||
def eventForBuildDuration(duration) {
|
||||
return event('jenkins.build.duration', ['duration': duration])
|
||||
}
|
||||
|
||||
def eventForStageDuration(name, duration) {
|
||||
return event('jenkins.stage.duration', ['stage': name, 'duration': duration])
|
||||
}
|
||||
|
||||
def eventForNodeWait(node, duration) {
|
||||
return event('jenkins.node.wait', ['node': node, 'duration': duration])
|
||||
}
|
||||
|
||||
def logEvents(events) {
|
||||
def displaySize = 10
|
||||
def displayEventsString = new JsonBuilder(events.take(displaySize)).toPrettyString()
|
||||
println("Uploading ${events.size()} events to splunk (showing ${displaySize} events): ${displayEventsString}")
|
||||
}
|
||||
|
||||
return this
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -o errexit -o nounset -o xtrace -o errtrace -o pipefail
|
||||
|
||||
SPLUNK_URL=${SPLUNK_URL:-"https://http-inputs-inst.splunkcloud.com/services/collector"}
|
||||
curl -k "$SPLUNK_URL" -H "Authorization: Splunk $SPLUNK_HEC_KEY" -d "$1"
|
Loading…
Reference in New Issue