diff --git a/.ci/Jenkinsfile_flaky b/.ci/Jenkinsfile_flaky
new file mode 100644
index 0000000000000..06260b9b89587
--- /dev/null
+++ b/.ci/Jenkinsfile_flaky
@@ -0,0 +1,128 @@
+#!/bin/groovy
+
+library 'kibana-pipeline-library'
+kibanaLibrary.load()
+
+// Looks like 'oss:ciGroup:1' or 'oss:firefoxSmoke'
+def JOB_PARTS = params.CI_GROUP.split(':')
+def IS_XPACK = JOB_PARTS[0] == 'xpack'
+def JOB = JOB_PARTS[1]
+def CI_GROUP = JOB_PARTS.size() > 2 ? JOB_PARTS[2] : ''
+def EXECUTIONS = params.NUMBER_EXECUTIONS.toInteger()
+def AGENT_COUNT = getAgentCount(EXECUTIONS)
+
+def worker = getWorkerFromParams(IS_XPACK, JOB, CI_GROUP)
+
+def workerFailures = []
+
+currentBuild.displayName += trunc(" ${params.GITHUB_OWNER}:${params.branch_specifier}", 24)
+currentBuild.description = "${params.CI_GROUP}
Agents: ${AGENT_COUNT}
Executions: ${params.NUMBER_EXECUTIONS}"
+
+stage("Kibana Pipeline") {
+ timeout(time: 180, unit: 'MINUTES') {
+ timestamps {
+ ansiColor('xterm') {
+ def agents = [:]
+ for(def agentNumber = 1; agentNumber <= AGENT_COUNT; agentNumber++) {
+ def agentNumberInside = agentNumber
+ def agentExecutions = floor(EXECUTIONS/AGENT_COUNT) + (agentNumber <= EXECUTIONS%AGENT_COUNT ? 1 : 0)
+ agents["agent-${agentNumber}"] = {
+ catchError {
+ print "Agent ${agentNumberInside} - ${agentExecutions} executions"
+
+ kibanaPipeline.withWorkers('flaky-test-runner', {
+ if (!IS_XPACK) {
+ kibanaPipeline.buildOss()
+ } else {
+ kibanaPipeline.buildXpack()
+ }
+ }, getWorkerMap(agentNumberInside, agentExecutions, worker, workerFailures))()
+ }
+ }
+ }
+
+ parallel(agents)
+
+ currentBuild.description += ", Failures: ${workerFailures.size()}"
+
+ if (workerFailures.size() > 0) {
+ print "There were ${workerFailures.size()} test suite failures."
+ print "The executions that failed were:"
+ print workerFailures.join("\n")
+ print "Please check 'Test Result' and 'Pipeline Steps' pages for more info"
+ }
+ }
+ }
+ }
+}
+
+def getWorkerFromParams(isXpack, job, ciGroup) {
+ if (!isXpack) {
+ if (job == 'firefoxSmoke') {
+ return kibanaPipeline.getPostBuildWorker('firefoxSmoke', { runbld('./test/scripts/jenkins_firefox_smoke.sh', 'Execute kibana-firefoxSmoke') })
+ } else if(job == 'visualRegression') {
+ return kibanaPipeline.getPostBuildWorker('visualRegression', { runbld('./test/scripts/jenkins_visual_regression.sh', 'Execute kibana-visualRegression') })
+ } else {
+ return kibanaPipeline.getOssCiGroupWorker(ciGroup)
+ }
+ }
+
+ if (job == 'firefoxSmoke') {
+ return kibanaPipeline.getPostBuildWorker('xpack-firefoxSmoke', { runbld('./test/scripts/jenkins_xpack_firefox_smoke.sh', 'Execute xpack-firefoxSmoke') })
+ } else if(job == 'visualRegression') {
+ return kibanaPipeline.getPostBuildWorker('xpack-visualRegression', { runbld('./test/scripts/jenkins_xpack_visual_regression.sh', 'Execute xpack-visualRegression') })
+ } else {
+ return kibanaPipeline.getXpackCiGroupWorker(ciGroup)
+ }
+}
+
+def getWorkerMap(agentNumber, numberOfExecutions, worker, workerFailures, maxWorkerProcesses = 12) {
+ def workerMap = [:]
+ def numberOfWorkers = Math.min(numberOfExecutions, maxWorkerProcesses)
+
+ for(def i = 1; i <= numberOfWorkers; i++) {
+ def workerExecutions = numberOfExecutions/numberOfWorkers + (i <= numberOfExecutions%numberOfWorkers ? 1 : 0)
+
+ workerMap["agent-${agentNumber}-worker-${i}"] = { workerNumber ->
+ for(def j = 0; j < workerExecutions; j++) {
+ print "Execute agent-${agentNumber} worker-${workerNumber}: ${j}"
+ withEnv([
+ "JOB=agent-${agentNumber}-worker-${workerNumber}-${j}",
+ "REMOVE_KIBANA_INSTALL_DIR=1",
+ ]) {
+ catchError {
+ try {
+ worker(workerNumber)
+ } catch (ex) {
+ workerFailures << "agent-${agentNumber} worker-${workerNumber}-${j}"
+ throw ex
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return workerMap
+}
+
+def getAgentCount(executions) {
+ // Increase agent count every 24 worker processess, up to 3 agents maximum
+ return Math.min(3, 1 + floor(executions/24))
+}
+
+def trunc(str, length) {
+ if (str.size() >= length) {
+ return str.take(length) + "..."
+ }
+
+ return str;
+}
+
+// All of the real rounding/truncating methods are sandboxed
+def floor(num) {
+ return num
+ .toString()
+ .split('\\.')[0]
+ .toInteger()
+}
diff --git a/Jenkinsfile b/Jenkinsfile
index 3cb2d75e50229..6d859e75c2b73 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,300 +1,42 @@
#!/bin/groovy
library 'kibana-pipeline-library'
+kibanaLibrary.load()
stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
- timeout(time: 180, unit: 'MINUTES') {
+ timeout(time: 120, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
catchError {
parallel([
- 'kibana-intake-agent': legacyJobRunner('kibana-intake'),
- 'x-pack-intake-agent': legacyJobRunner('x-pack-intake'),
- 'kibana-oss-agent': withWorkers('kibana-oss-tests', { buildOss() }, [
- 'oss-ciGroup1': getOssCiGroupWorker(1),
- 'oss-ciGroup2': getOssCiGroupWorker(2),
- 'oss-ciGroup3': getOssCiGroupWorker(3),
- 'oss-ciGroup4': getOssCiGroupWorker(4),
- 'oss-ciGroup5': getOssCiGroupWorker(5),
- 'oss-ciGroup6': getOssCiGroupWorker(6),
- 'oss-ciGroup7': getOssCiGroupWorker(7),
- 'oss-ciGroup8': getOssCiGroupWorker(8),
- 'oss-ciGroup9': getOssCiGroupWorker(9),
- 'oss-ciGroup10': getOssCiGroupWorker(10),
- 'oss-ciGroup11': getOssCiGroupWorker(11),
- 'oss-ciGroup12': getOssCiGroupWorker(12),
+ 'kibana-intake-agent': kibanaPipeline.legacyJobRunner('kibana-intake'),
+ 'x-pack-intake-agent': kibanaPipeline.legacyJobRunner('x-pack-intake'),
+ 'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
+ 'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
+ 'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
+ 'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
+ 'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
+ 'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
+ 'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
+ 'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
+ 'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
+ 'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
+ 'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
+ 'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
+ 'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
]),
- 'kibana-xpack-agent': withWorkers('kibana-xpack-tests', { buildXpack() }, [
- 'xpack-ciGroup1': getXpackCiGroupWorker(1),
- 'xpack-ciGroup2': getXpackCiGroupWorker(2),
- 'xpack-ciGroup3': getXpackCiGroupWorker(3),
- 'xpack-ciGroup4': getXpackCiGroupWorker(4),
- 'xpack-ciGroup5': getXpackCiGroupWorker(5),
- 'xpack-ciGroup6': getXpackCiGroupWorker(6),
+ 'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
+ 'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
+ 'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
+ 'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
+ 'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
+ 'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
+ 'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
]),
])
}
- node('flyweight') {
- // If the build doesn't have a result set by this point, there haven't been any errors and it can be marked as a success
- // The e-mail plugin for the infra e-mail depends upon this being set
- currentBuild.result = currentBuild.result ?: 'SUCCESS'
-
- sendMail()
- }
+ kibanaPipeline.sendMail()
}
}
}
}
-
-def withWorkers(name, preWorkerClosure = {}, workerClosures = [:]) {
- return {
- jobRunner('tests-xl', true) {
- try {
- doSetup()
- preWorkerClosure()
-
- def nextWorker = 1
- def worker = { workerClosure ->
- def workerNumber = nextWorker
- nextWorker++
-
- return {
- workerClosure(workerNumber)
- }
- }
-
- def workers = [:]
- workerClosures.each { workerName, workerClosure ->
- workers[workerName] = worker(workerClosure)
- }
-
- parallel(workers)
- } finally {
- catchError {
- uploadAllGcsArtifacts(name)
- }
-
- catchError {
- runbldJunit()
- }
-
- catchError {
- publishJunit()
- }
-
- catchError {
- runErrorReporter()
- }
- }
- }
- }
-}
-
-def getPostBuildWorker(name, closure) {
- return { workerNumber ->
- def kibanaPort = "61${workerNumber}1"
- def esPort = "61${workerNumber}2"
- def esTransportPort = "61${workerNumber}3"
-
- withEnv([
- "PARALLEL_PIPELINE_WORKER_INDEX=${workerNumber}",
- "TEST_KIBANA_HOST=localhost",
- "TEST_KIBANA_PORT=${kibanaPort}",
- "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
- "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
- "TEST_ES_TRANSPORT_PORT=${esTransportPort}",
- "IS_PIPELINE_JOB=1",
- ]) {
- closure()
- }
- }
-}
-
-def getOssCiGroupWorker(ciGroup) {
- return getPostBuildWorker("ciGroup" + ciGroup, {
- withEnv([
- "CI_GROUP=${ciGroup}",
- "JOB=kibana-ciGroup${ciGroup}",
- ]) {
- runbld "./test/scripts/jenkins_ci_group.sh"
- }
- })
-}
-
-def getXpackCiGroupWorker(ciGroup) {
- return getPostBuildWorker("xpack-ciGroup" + ciGroup, {
- withEnv([
- "CI_GROUP=${ciGroup}",
- "JOB=xpack-kibana-ciGroup${ciGroup}",
- ]) {
- runbld "./test/scripts/jenkins_xpack_ci_group.sh"
- }
- })
-}
-
-def legacyJobRunner(name) {
- return {
- parallel([
- "${name}": {
- withEnv([
- "JOB=${name}",
- ]) {
- jobRunner('linux && immutable', false) {
- try {
- runbld('.ci/run.sh', true)
- } finally {
- catchError {
- uploadAllGcsArtifacts(name)
- }
- catchError {
- publishJunit()
- }
- catchError {
- runErrorReporter()
- }
- }
- }
- }
- }
- ])
- }
-}
-
-def jobRunner(label, useRamDisk, closure) {
- node(label) {
- if (useRamDisk) {
- // Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
- def originalWorkspace = env.WORKSPACE
- ws('/tmp/workspace') {
- sh """
- mkdir -p /dev/shm/workspace
- mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
- rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
- ln -s /dev/shm/workspace '${originalWorkspace}'
- """
- }
- }
-
- def scmVars = checkout scm
-
- withEnv([
- "CI=true",
- "HOME=${env.JENKINS_HOME}",
- "PR_SOURCE_BRANCH=${env.ghprbSourceBranch}",
- "PR_TARGET_BRANCH=${env.ghprbTargetBranch}",
- "PR_AUTHOR=${env.ghprbPullAuthorLogin}",
- "TEST_BROWSER_HEADLESS=1",
- "GIT_BRANCH=${scmVars.GIT_BRANCH}",
- ]) {
- withCredentials([
- string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
- string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
- string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
- ]) {
- // scm is configured to check out to the ./kibana directory
- dir('kibana') {
- closure()
- }
- }
- }
- }
-}
-
-// TODO what should happen if GCS, Junit, or email publishing fails? Unstable build? Failed build?
-
-def uploadGcsArtifact(workerName, pattern) {
- def storageLocation = "gs://kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}" // TODO
- // def storageLocation = "gs://kibana-pipeline-testing/jobs/pipeline-test/${BUILD_NUMBER}/${workerName}"
-
- googleStorageUpload(
- credentialsId: 'kibana-ci-gcs-plugin',
- bucket: storageLocation,
- pattern: pattern,
- sharedPublicly: true,
- showInline: true,
- )
-}
-
-def uploadAllGcsArtifacts(workerName) {
- def ARTIFACT_PATTERNS = [
- 'target/kibana-*',
- 'target/junit/**/*',
- 'test/**/screenshots/**/*.png',
- 'test/functional/failure_debug/html/*.html',
- 'x-pack/test/**/screenshots/**/*.png',
- 'x-pack/test/functional/failure_debug/html/*.html',
- 'x-pack/test/functional/apps/reporting/reports/session/*.pdf',
- ]
-
- ARTIFACT_PATTERNS.each { pattern ->
- uploadGcsArtifact(workerName, pattern)
- }
-}
-
-def publishJunit() {
- junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
-}
-
-def sendMail() {
- sendInfraMail()
- sendKibanaMail()
-}
-
-def sendInfraMail() {
- catchError {
- step([
- $class: 'Mailer',
- notifyEveryUnstableBuild: true,
- recipients: 'infra-root+build@elastic.co',
- sendToIndividuals: false
- ])
- }
-}
-
-def sendKibanaMail() {
- catchError {
- def buildStatus = buildUtils.getBuildStatus()
-
- if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
- emailext(
- to: 'build-kibana@elastic.co',
- subject: "${env.JOB_NAME} - Build # ${env.BUILD_NUMBER} - ${buildStatus}",
- body: '${SCRIPT,template="groovy-html.template"}',
- mimeType: 'text/html',
- )
- }
- }
-}
-
-def runbld(script, enableJunitProcessing = false) {
- def extraConfig = enableJunitProcessing ? "" : "--config ${env.WORKSPACE}/kibana/.ci/runbld_no_junit.yml"
-
- sh "/usr/local/bin/runbld -d '${pwd()}' ${extraConfig} ${script}"
-}
-
-def runbldJunit() {
- sh "/usr/local/bin/runbld -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh"
-}
-
-def bash(script) {
- sh "#!/bin/bash\n${script}"
-}
-
-def doSetup() {
- runbld "./test/scripts/jenkins_setup.sh"
-}
-
-def buildOss() {
- runbld "./test/scripts/jenkins_build_kibana.sh"
-}
-
-def buildXpack() {
- runbld "./test/scripts/jenkins_xpack_build_kibana.sh"
-}
-
-def runErrorReporter() {
- bash """
- source src/dev/ci_setup/setup_env.sh
- node src/dev/failed_tests/cli
- """
-}
diff --git a/src/dev/ci_setup/setup_env.sh b/src/dev/ci_setup/setup_env.sh
index 3b239bd3ff731..805b77365e624 100644
--- a/src/dev/ci_setup/setup_env.sh
+++ b/src/dev/ci_setup/setup_env.sh
@@ -2,6 +2,10 @@
set -e
+if [[ "$CI_ENV_SETUP" ]]; then
+ return 0
+fi
+
installNode=$1
dir="$(pwd)"
@@ -152,3 +156,5 @@ if [[ -d "$ES_DIR" && -f "$ES_JAVA_PROP_PATH" ]]; then
echo "Setting JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA"
export JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
fi
+
+export CI_ENV_SETUP=true
diff --git a/src/dev/precommit_hook/casing_check_config.js b/src/dev/precommit_hook/casing_check_config.js
index ec8eb3bcfb66e..66621916de2dd 100644
--- a/src/dev/precommit_hook/casing_check_config.js
+++ b/src/dev/precommit_hook/casing_check_config.js
@@ -43,8 +43,9 @@ export const IGNORE_FILE_GLOBS = [
'**/{webpackShims,__mocks__}/**/*',
'x-pack/docs/**/*',
'src/dev/tslint/rules/*',
- 'Jenkinsfile',
+ '**/Jenkinsfile*',
'Dockerfile*',
+ 'vars/*',
// filename must match language code which requires capital letters
'**/translations/*.json',
diff --git a/test/scripts/jenkins_ci_group.sh b/test/scripts/jenkins_ci_group.sh
index bd26ec1ff4cc1..2cd61ee4b7155 100755
--- a/test/scripts/jenkins_ci_group.sh
+++ b/test/scripts/jenkins_ci_group.sh
@@ -1,14 +1,6 @@
#!/usr/bin/env bash
-set -e
-
-if [[ -z "$IS_PIPELINE_JOB" ]] ; then
- trap 'node "$KIBANA_DIR/src/dev/failed_tests/cli"' EXIT
-else
- source src/dev/ci_setup/setup_env.sh
-fi
-
-export TEST_BROWSER_HEADLESS=1
+source test/scripts/jenkins_test_setup.sh
if [[ -z "$IS_PIPELINE_JOB" ]] ; then
yarn run grunt functionalTests:ensureAllTestsInCiGroup;
diff --git a/test/scripts/jenkins_test_setup.sh b/test/scripts/jenkins_test_setup.sh
new file mode 100644
index 0000000000000..8ceccebbdfc16
--- /dev/null
+++ b/test/scripts/jenkins_test_setup.sh
@@ -0,0 +1,20 @@
+set -e
+
+function post_work() {
+ set +e
+ if [[ -z "$IS_PIPELINE_JOB" ]] ; then
+ node "$KIBANA_DIR/src/dev/failed_tests/cli"
+ fi
+
+ if [[ -z "$REMOVE_KIBANA_INSTALL_DIR" && -z "$KIBANA_INSTALL_DIR" && -d "$KIBANA_INSTALL_DIR" ]]; then
+ rm -rf "$REMOVE_KIBANA_INSTALL_DIR"
+ fi
+}
+
+trap 'post_work' EXIT
+
+export TEST_BROWSER_HEADLESS=1
+
+if [[ -n "$IS_PIPELINE_JOB" ]] ; then
+ source src/dev/ci_setup/setup_env.sh
+fi
diff --git a/test/scripts/jenkins_xpack_ci_group.sh b/test/scripts/jenkins_xpack_ci_group.sh
index 83c0042df4315..dfefe578f3e07 100755
--- a/test/scripts/jenkins_xpack_ci_group.sh
+++ b/test/scripts/jenkins_xpack_ci_group.sh
@@ -1,14 +1,6 @@
#!/usr/bin/env bash
-set -e
-
-if [[ -z "$IS_PIPELINE_JOB" ]] ; then
- trap 'node "$KIBANA_DIR/src/dev/failed_tests/cli"' EXIT
-else
- source src/dev/ci_setup/setup_env.sh
-fi
-
-export TEST_BROWSER_HEADLESS=1
+source test/scripts/jenkins_test_setup.sh
if [[ -z "$IS_PIPELINE_JOB" ]] ; then
echo " -> Ensuring all functional tests are in a ciGroup"
diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy
new file mode 100644
index 0000000000000..5220dea599920
--- /dev/null
+++ b/vars/kibanaPipeline.groovy
@@ -0,0 +1,270 @@
+def withWorkers(name, preWorkerClosure = {}, workerClosures = [:]) {
+ return {
+ jobRunner('tests-xl', true) {
+ try {
+ doSetup()
+ preWorkerClosure()
+
+ def nextWorker = 1
+ def worker = { workerClosure ->
+ def workerNumber = nextWorker
+ nextWorker++
+
+ return {
+ workerClosure(workerNumber)
+ }
+ }
+
+ def workers = [:]
+ workerClosures.each { workerName, workerClosure ->
+ workers[workerName] = worker(workerClosure)
+ }
+
+ parallel(workers)
+ } finally {
+ catchError {
+ uploadAllGcsArtifacts(name)
+ }
+
+ catchError {
+ runbld.junit()
+ }
+
+ catchError {
+ publishJunit()
+ }
+
+ catchError {
+ runErrorReporter()
+ }
+ }
+ }
+ }
+}
+
+def getPostBuildWorker(name, closure) {
+ return { workerNumber ->
+ def kibanaPort = "61${workerNumber}1"
+ def esPort = "61${workerNumber}2"
+ def esTransportPort = "61${workerNumber}3"
+
+ withEnv([
+ "PARALLEL_PIPELINE_WORKER_INDEX=${workerNumber}",
+ "TEST_KIBANA_HOST=localhost",
+ "TEST_KIBANA_PORT=${kibanaPort}",
+ "TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
+ "TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
+ "TEST_ES_TRANSPORT_PORT=${esTransportPort}",
+ "IS_PIPELINE_JOB=1",
+ ]) {
+ closure()
+ }
+ }
+}
+
+def getOssCiGroupWorker(ciGroup) {
+ return getPostBuildWorker("ciGroup" + ciGroup, {
+ withEnv([
+ "CI_GROUP=${ciGroup}",
+ "JOB=kibana-ciGroup${ciGroup}",
+ ]) {
+ runbld("./test/scripts/jenkins_ci_group.sh", "Execute kibana-ciGroup${ciGroup}")
+ }
+ })
+}
+
+def getXpackCiGroupWorker(ciGroup) {
+ return getPostBuildWorker("xpack-ciGroup" + ciGroup, {
+ withEnv([
+ "CI_GROUP=${ciGroup}",
+ "JOB=xpack-kibana-ciGroup${ciGroup}",
+ ]) {
+ runbld("./test/scripts/jenkins_xpack_ci_group.sh", "Execute xpack-kibana-ciGroup${ciGroup}")
+ }
+ })
+}
+
+def legacyJobRunner(name) {
+ return {
+ parallel([
+ "${name}": {
+ withEnv([
+ "JOB=${name}",
+ ]) {
+ jobRunner('linux && immutable', false) {
+ try {
+ runbld('.ci/run.sh', "Execute ${name}", true)
+ } finally {
+ catchError {
+ uploadAllGcsArtifacts(name)
+ }
+ catchError {
+ publishJunit()
+ }
+ catchError {
+ runErrorReporter()
+ }
+ }
+ }
+ }
+ }
+ ])
+ }
+}
+
+def jobRunner(label, useRamDisk, closure) {
+ node(label) {
+ if (useRamDisk) {
+ // Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
+ def originalWorkspace = env.WORKSPACE
+ ws('/tmp/workspace') {
+ sh(
+ script: """
+ mkdir -p /dev/shm/workspace
+ mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
+ rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
+ ln -s /dev/shm/workspace '${originalWorkspace}'
+ """,
+ label: "Move workspace to RAM - /dev/shm/workspace"
+ )
+ }
+ }
+
+ def scmVars
+
+ // Try to clone from Github up to 8 times, waiting 15 secs between attempts
+ retry(8) {
+ try {
+ scmVars = checkout scm
+ } catch (ex) {
+ sleep 15
+ throw ex
+ }
+ }
+
+ withEnv([
+ "CI=true",
+ "HOME=${env.JENKINS_HOME}",
+ "PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}",
+ "PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}",
+ "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
+ "TEST_BROWSER_HEADLESS=1",
+ "GIT_BRANCH=${scmVars.GIT_BRANCH}",
+ ]) {
+ withCredentials([
+ string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
+ string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
+ string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
+ ]) {
+ // scm is configured to check out to the ./kibana directory
+ dir('kibana') {
+ closure()
+ }
+ }
+ }
+ }
+}
+
+// TODO what should happen if GCS, Junit, or email publishing fails? Unstable build? Failed build?
+
+def uploadGcsArtifact(workerName, pattern) {
+ def storageLocation = "gs://kibana-ci-artifacts/jobs/${env.JOB_NAME}/${BUILD_NUMBER}/${workerName}" // TODO
+
+ googleStorageUpload(
+ credentialsId: 'kibana-ci-gcs-plugin',
+ bucket: storageLocation,
+ pattern: pattern,
+ sharedPublicly: true,
+ showInline: true,
+ )
+}
+
+def uploadAllGcsArtifacts(workerName) {
+ def ARTIFACT_PATTERNS = [
+ 'target/kibana-*',
+ 'target/junit/**/*',
+ 'test/**/screenshots/**/*.png',
+ 'test/functional/failure_debug/html/*.html',
+ 'x-pack/test/**/screenshots/**/*.png',
+ 'x-pack/test/functional/failure_debug/html/*.html',
+ 'x-pack/test/functional/apps/reporting/reports/session/*.pdf',
+ ]
+
+ ARTIFACT_PATTERNS.each { pattern ->
+ uploadGcsArtifact(workerName, pattern)
+ }
+}
+
+def publishJunit() {
+ junit(testResults: 'target/junit/**/*.xml', allowEmptyResults: true, keepLongStdio: true)
+}
+
+def sendMail() {
+ // If the build doesn't have a result set by this point, there haven't been any errors and it can be marked as a success
+ // The e-mail plugin for the infra e-mail depends upon this being set
+ currentBuild.result = currentBuild.result ?: 'SUCCESS'
+
+ def buildStatus = buildUtils.getBuildStatus()
+ if (buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
+ node('flyweight') {
+ sendInfraMail()
+ sendKibanaMail()
+ }
+ }
+}
+
+def sendInfraMail() {
+ catchError {
+ step([
+ $class: 'Mailer',
+ notifyEveryUnstableBuild: true,
+ recipients: 'infra-root+build@elastic.co',
+ sendToIndividuals: false
+ ])
+ }
+}
+
+def sendKibanaMail() {
+ catchError {
+ def buildStatus = buildUtils.getBuildStatus()
+ if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
+ emailext(
+ to: 'build-kibana@elastic.co',
+ subject: "${env.JOB_NAME} - Build # ${env.BUILD_NUMBER} - ${buildStatus}",
+ body: '${SCRIPT,template="groovy-html.template"}',
+ mimeType: 'text/html',
+ )
+ }
+ }
+}
+
+def bash(script, label) {
+ sh(
+ script: "#!/bin/bash\n${script}",
+ label: label
+ )
+}
+
+def doSetup() {
+ runbld("./test/scripts/jenkins_setup.sh", "Setup Build Environment and Dependencies")
+}
+
+def buildOss() {
+ runbld("./test/scripts/jenkins_build_kibana.sh", "Build OSS/Default Kibana")
+}
+
+def buildXpack() {
+ runbld("./test/scripts/jenkins_xpack_build_kibana.sh", "Build X-Pack Kibana")
+}
+
+def runErrorReporter() {
+ bash(
+ """
+ source src/dev/ci_setup/setup_env.sh
+ node src/dev/failed_tests/cli
+ """,
+ "Report failed tests, if necessary"
+ )
+}
+
+return this
diff --git a/vars/runbld.groovy b/vars/runbld.groovy
new file mode 100644
index 0000000000000..e52bc244c65cb
--- /dev/null
+++ b/vars/runbld.groovy
@@ -0,0 +1,17 @@
+def call(script, label, enableJunitProcessing = false) {
+ def extraConfig = enableJunitProcessing ? "" : "--config ${env.WORKSPACE}/kibana/.ci/runbld_no_junit.yml"
+
+ sh(
+ script: "/usr/local/bin/runbld -d '${pwd()}' ${extraConfig} ${script}",
+ label: label ?: script
+ )
+}
+
+def junit() {
+ sh(
+ script: "/usr/local/bin/runbld -d '${pwd()}' ${env.WORKSPACE}/kibana/test/scripts/jenkins_runbld_junit.sh",
+ label: "Process JUnit reports with runbld"
+ )
+}
+
+return this