remove deprecated datadog / splunk logic

refs DE-150

Change-Id: I07ee65b349e64c6d4279929fbbfb2fa71042330b
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/243361
Tested-by: Service Cloud Jenkins <svc.cloudjenkins@instructure.com>
Reviewed-by: James Butters <jbutters@instructure.com>
QA-Review: James Butters <jbutters@instructure.com>
Product-Review: James Butters <jbutters@instructure.com>
This commit is contained in:
Aaron Ogata 2020-07-24 08:34:44 -07:00
parent d0df807673
commit dc039e2567
6 changed files with 85 additions and 257 deletions

19
Jenkinsfile vendored
View File

@ -37,17 +37,10 @@ def buildParameters = [
library "canvas-builds-library"
def runDatadogMetric(name, body) {
def dd = load('build/new-jenkins/groovy/datadog.groovy')
dd.runDataDogForMetric(name,body)
}
def skipIfPreviouslySuccessful(name, block) {
runDatadogMetric(name) {
def successes = load('build/new-jenkins/groovy/successes.groovy')
successes.skipIfPreviouslySuccessful(name, true, block)
}
}
def wrapBuildExecution(jobName, parameters, propagate, urlExtra) {
try {
@ -145,7 +138,6 @@ pipeline {
steps {
timeout(time: 5) {
script {
runDatadogMetric("Setup") {
cleanAndSetup()
buildParameters += string(name: 'PATCHSET_TAG', value: "${env.PATCHSET_TAG}")
@ -189,14 +181,12 @@ pipeline {
}
}
}
}
stage('Rebase') {
when { expression { env.GERRIT_EVENT_TYPE == 'patchset-created' && env.GERRIT_PROJECT == 'canvas-lms' } }
steps {
timeout(time: 2) {
script {
runDatadogMetric("Rebase") {
credentials.withGerritCredentials({ ->
sh '''#!/bin/bash
set -o errexit -o errtrace -o nounset -o pipefail -o xtrace
@ -229,7 +219,6 @@ pipeline {
}
}
}
}
stage('Build Docker Image') {
steps {
@ -376,7 +365,6 @@ pipeline {
steps {
timeout(time: 10) {
script {
runDatadogMetric("publishImageOnMerge") {
// Retriggers won't have an image to tag/push, pull that
// image if doesn't exist. If image is not found it will
// return NULL
@ -398,31 +386,26 @@ pipeline {
}
}
}
}
stage('Dependency Check') {
when { expression { env.GERRIT_EVENT_TYPE == 'change-merged' } }
steps {
script {
runDatadogMetric("dependencyCheck") {
def reports = load 'build/new-jenkins/groovy/reports.groovy'
reports.snykCheckDependencies("$PATCHSET_TAG", "/usr/src/app/")
}
}
}
}
stage('Mark Build as Successful') {
steps {
script {
runDatadogMetric("markBuildAsSuccessful") {
def successes = load 'build/new-jenkins/groovy/successes.groovy'
successes.markBuildAsSuccessful()
}
}
}
}
}
post {
failure {
@ -448,8 +431,6 @@ pipeline {
rspec.uploadSeleniumFailures()
rspec.uploadRSpecFailures()
load('build/new-jenkins/groovy/reports.groovy').sendFailureMessageIfPresent()
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
splunk.upload([splunk.eventForBuildDuration(currentBuild.duration)])
}
}
}

View File

@ -1,61 +0,0 @@
/*
* Copyright (C) 2020 - present Instructure, Inc.
*
* This file is part of Canvas.
*
* Canvas is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, version 3 of the License.
*
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import groovy.time.*
// Datadog metric format is the following:
// <METRIC_NAME>:<VALUE>|<TYPE>|@<SAMPLE_RATE>|#<TAG_KEY_1>:<TAG_VALUE_1>,<TAG_2>
// We are just allowing counts for now to be simple.
// Source https://docs.datadoghq.com/developers/dogstatsd/datagram_shell/?tab=metrics
def hackyMetricSend(metric, value, tags) {
def metric_string = "${metric}:${value}|d|1|#${tags.join(',')}"
echo "sending metric: $metric_string"
def script = """#!/bin/bash
echo -n "$metric_string" > /dev/udp/localhost/8125
"""
// exit code is captured in case we want upstream caller status correction
return sh(script: script, returnStatus: true)
}
def runDataDogForMetric(name, block) {
def timeStart = new Date()
try {
block.call()
}
finally {
def duration = TimeCategory.minus(new Date(), timeStart).toMilliseconds()
hackyMetricSend("jenkins.stage.elapsedTimeDist", duration, ["stage:${name}"])
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
splunk.upload([splunk.eventForStageDuration(name, duration)])
}
}
def runDataDogForMetricWithExtraTags(name, extraTags, block) {
def timeStart = new Date()
try {
block.call()
}
finally {
def duration = TimeCategory.minus(new Date(), timeStart).toMilliseconds()
hackyMetricSend("jenkins.stage.elapsedTimeDist", duration, ["stage:${name}", extraTags].flatten())
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
splunk.upload([splunk.eventForStageDuration(name, duration)])
}
}
return this

View File

@ -18,11 +18,6 @@
import groovy.time.*
def runDatadogMetric(name, extraTags, body) {
def dd = load('build/new-jenkins/groovy/datadog.groovy')
dd.runDataDogForMetricWithExtraTags(name,extraTags,body)
}
/**
* appends stages to the nodes based on the count passed into
* the closure.
@ -51,13 +46,8 @@ def appendStagesAsBuildNodes(nodes,
// make sure to unstash
unstash name: "build-dir"
unstash name: "build-docker-compose"
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
splunk.upload([splunk.eventForNodeWait(stage_name, duration)])
def extraTags = ["parallelStageName:${stage_name}"]
runDatadogMetric(test_label,extraTags) {
stage_block(index)
}
}
// mark with instance index.
// we need to do this on the main node so we dont run into

View File

@ -135,7 +135,6 @@ def publishSpecFailuresAsHTML(prefix, ci_node_total, report_title) {
buildIndexPage(failureCategories)
htmlFiles = findFiles glob: '**/index.html'
}
uploadSplunkFailures(failureCategories)
def report_name = "spec-failure-$prefix"
def report_url = "${BUILD_URL}${report_name}"
@ -186,18 +185,6 @@ def buildIndexPage(failureCategories) {
writeFile file: "index.html", text: indexHtml
}
def uploadSplunkFailures(failureCategories) {
def splunk = load 'build/new-jenkins/groovy/splunk.groovy'
def splunkFailureEvents = []
failureCategories.each {category, failures ->
failures.each { failure ->
def spec = (failure =~ /.*spec_failures\/(.*)\/index/)[0][1]
splunkFailureEvents.add(splunk.eventForTestFailure(spec, category))
}
}
splunk.upload(splunkFailureEvents)
}
def snykCheckDependencies(projectImage, projectDirectory) {
def projectContainer = sh(script: "docker run -d -it -v snyk_volume:${projectDirectory} ${projectImage}", returnStdout: true).trim()
runSnyk(

View File

@ -1,63 +0,0 @@
/*
* Copyright (C) 2020 - present Instructure, Inc.
*
* This file is part of Canvas.
*
* Canvas is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, version 3 of the License.
*
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// https://docs.splunk.com/Documentation/Splunk/8.0.3/Data/FormateventsforHTTPEventCollector#Event_metadata
import groovy.json.*
// Splunk can take one or more events:
// the json objects are simply concatenated if there are multiple (no [] and no commas)
def upload(events) {
logEvents(events)
credentials.withSplunkCredentials({
sh "build/new-jenkins/splunk_event.sh '${new JsonBuilder(events).toString()}'"
})
}
def event(name, fields) {
return [
"sourcetype": "jenkins",
"event": name,
"fields": fields
]
}
// Rerun category is a string describing which rerun retry this test failure was
def eventForTestFailure(test, rerun_category) {
return event('jenkins.test.failure', ['test': test, 'rerun_category': rerun_category])
}
def eventForBuildDuration(duration) {
return event('jenkins.build.duration', ['duration': duration])
}
def eventForStageDuration(name, duration) {
return event('jenkins.stage.duration', ['stage': name, 'duration': duration])
}
def eventForNodeWait(node, duration) {
return event('jenkins.node.wait', ['node': node, 'duration': duration])
}
def logEvents(events) {
def displaySize = 10
def displayEventsString = new JsonBuilder(events.take(displaySize)).toPrettyString()
println("Uploading ${events.size()} events to splunk (showing ${displaySize} events): ${displayEventsString}")
}
return this

View File

@ -1,6 +0,0 @@
#!/bin/bash
set -o errexit -o nounset -o xtrace -o errtrace -o pipefail
SPLUNK_URL=${SPLUNK_URL:-"https://http-inputs-inst.splunkcloud.com/services/collector"}
curl -k "$SPLUNK_URL" -H "Authorization: Splunk $SPLUNK_HEC_KEY" -d "$1"