You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
873 lines
40 KiB
Groovy
873 lines
40 KiB
Groovy
#!/usr/bin/env groovy
|
|
/*
|
|
* Copyright (c) Contributors to the Open 3D Engine Project.
|
|
* For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
|
*
|
|
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
|
*
|
|
*/
|
|
import groovy.json.JsonOutput
|
|
PIPELINE_CONFIG_FILE = 'scripts/build/Jenkins/lumberyard.json'
|
|
INCREMENTAL_BUILD_SCRIPT_PATH = 'scripts/build/bootstrap/incremental_build_util.py'
|
|
PIPELINE_RETRY_ATTEMPTS = 3
|
|
|
|
EMPTY_JSON = readJSON text: '{}'
|
|
|
|
ENGINE_REPOSITORY_NAME = 'o3de'
|
|
|
|
// Branches with build snapshots
|
|
BUILD_SNAPSHOTS = ['development', 'stabilization/2110']
|
|
|
|
// Build snapshots with empty snapshot (for use with 'SNAPSHOT' pipeline paramater)
|
|
BUILD_SNAPSHOTS_WITH_EMPTY = BUILD_SNAPSHOTS + ''
|
|
|
|
// The default build snapshot to be selected in the 'SNAPSHOT' pipeline paramater
|
|
DEFAULT_BUILD_SNAPSHOT = BUILD_SNAPSHOTS_WITH_EMPTY.get(0)
|
|
|
|
// Branches with build snapshots as comma separated value string
|
|
env.BUILD_SNAPSHOTS = BUILD_SNAPSHOTS.join(",")
|
|
|
|
def pipelineProperties = []
|
|
|
|
def pipelineParameters = [
|
|
// Build/clean Parameters
|
|
// The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
|
|
booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build. NOTE: does not imply CLEAN_ASSETS', name: 'CLEAN_OUTPUT_DIRECTORY'),
|
|
booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'),
|
|
booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
|
|
booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME')
|
|
]
|
|
|
|
def palSh(cmd, lbl = '', winSlashReplacement = true) {
|
|
if (env.IS_UNIX) {
|
|
sh label: lbl,
|
|
script: cmd
|
|
} else if (winSlashReplacement) {
|
|
bat label: lbl,
|
|
script: cmd.replace('/','\\')
|
|
} else {
|
|
bat label: lbl,
|
|
script: cmd
|
|
}
|
|
}
|
|
|
|
def palMkdir(path) {
|
|
if (env.IS_UNIX) {
|
|
sh label: "Making directories ${path}",
|
|
script: "mkdir -p ${path}"
|
|
} else {
|
|
def win_path = path.replace('/','\\')
|
|
bat label: "Making directories ${win_path}",
|
|
script: "mkdir ${win_path}."
|
|
}
|
|
}
|
|
|
|
def palRm(path) {
|
|
if (env.IS_UNIX) {
|
|
sh label: "Removing ${path}",
|
|
script: "rm ${path}"
|
|
} else {
|
|
def win_path = path.replace('/','\\')
|
|
bat label: "Removing ${win_path}",
|
|
script: "del /Q ${win_path}"
|
|
}
|
|
}
|
|
|
|
def palRmDir(path) {
|
|
if (env.IS_UNIX) {
|
|
sh label: "Removing ${path}",
|
|
script: "rm -rf ${path}"
|
|
} else {
|
|
def win_path = path.replace('/','\\')
|
|
bat label: "Removing ${win_path}",
|
|
script: "rd /s /q ${win_path}"
|
|
}
|
|
}
|
|
|
|
def IsPullRequest(branchName) {
|
|
// temporarily using the name to detect if we are in a PR
|
|
// In the future we will check with github
|
|
return branchName.startsWith('PR-')
|
|
}
|
|
|
|
def IsJobEnabled(branchName, buildTypeMap, pipelineName, platformName) {
|
|
if (IsPullRequest(branchName)) {
|
|
return buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
|
|
}
|
|
def job_list_override = params.JOB_LIST_OVERRIDE ? params.JOB_LIST_OVERRIDE.tokenize(',') : ''
|
|
if (!job_list_override.isEmpty()) {
|
|
return params[platformName] && job_list_override.contains(buildTypeMap.key);
|
|
} else {
|
|
return params[platformName] && buildTypeMap.value.TAGS && buildTypeMap.value.TAGS.contains(pipelineName)
|
|
}
|
|
}
|
|
|
|
def IsAPLogUpload(branchName, jobName) {
|
|
return !IsPullRequest(branchName) && jobName.toLowerCase().contains('asset') && env.AP_LOGS_S3_BUCKET
|
|
}
|
|
|
|
def GetRunningPipelineName(JENKINS_JOB_NAME) {
|
|
// If the job name has an underscore
|
|
def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
|
|
if (job_parts.size() > 1) {
|
|
return [job_parts.take(job_parts.size() - 1).join('_'), job_parts[job_parts.size()-1]]
|
|
}
|
|
return [job_parts[0], 'default']
|
|
}
|
|
|
|
@NonCPS
|
|
def RegexMatcher(str, regex) {
|
|
def matcher = (str =~ regex)
|
|
return matcher ? matcher.group(1) : null
|
|
}
|
|
|
|
def LoadPipelineConfig(String pipelineName, String branchName) {
|
|
echo 'Loading pipeline config'
|
|
def pipelineConfig = {}
|
|
pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
|
|
palRm(PIPELINE_CONFIG_FILE)
|
|
pipelineConfig.platforms = EMPTY_JSON
|
|
|
|
// Load the pipeline configs per platform
|
|
pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
|
|
def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
|
|
if (!env.IS_UNIX) {
|
|
platform_regex = platform_regex.replace('/','\\\\')
|
|
}
|
|
echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
|
|
for (pipeline_config_path in findFiles(glob: pipeline_config)) {
|
|
echo "\tFound platform pipeline config ${pipeline_config_path}"
|
|
def platform = RegexMatcher(pipeline_config_path, platform_regex)
|
|
if(platform) {
|
|
pipelineConfig.platforms[platform] = EMPTY_JSON
|
|
pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString()
|
|
}
|
|
palRm(pipeline_config_path.toString())
|
|
}
|
|
}
|
|
|
|
// Load the build configs
|
|
pipelineConfig.BUILD_CONFIGS.each { build_config ->
|
|
def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
|
|
if (!env.IS_UNIX) {
|
|
platform_regex = platform_regex.replace('/','\\\\')
|
|
}
|
|
echo "Searching configs in ${build_config} using ${platform_regex}"
|
|
for (build_config_path in findFiles(glob: build_config)) {
|
|
echo "\tFound config ${build_config_path}"
|
|
def platform = RegexMatcher(build_config_path, platform_regex)
|
|
if(platform) {
|
|
pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString()
|
|
}
|
|
}
|
|
}
|
|
return pipelineConfig
|
|
}
|
|
|
|
def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
|
|
def envVarMap = [:]
|
|
platformPipelineEnv = platformEnv['ENV'] ?: [:]
|
|
platformPipelineEnv.each { var ->
|
|
envVarMap[var.key] = var.value
|
|
}
|
|
platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:]
|
|
platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:]
|
|
platformPipelineEnvOverride.each { var ->
|
|
envVarMap[var.key] = var.value
|
|
}
|
|
buildTypeEnv.each { var ->
|
|
// This may override the above one if there is an entry defined by the job
|
|
envVarMap[var.key] = var.value
|
|
}
|
|
|
|
// Environment that only applies to to Jenkins tweaks.
|
|
// For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
|
|
// instances. This allow us to scale up and down without having to re-download 3rdParty
|
|
envVarMap['LY_PACKAGE_DOWNLOAD_CACHE_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/downloaded_packages"
|
|
envVarMap['LY_PACKAGE_UNPACK_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/packages"
|
|
|
|
return envVarMap
|
|
}
|
|
|
|
def GetEnvStringList(Map envVarMap) {
|
|
def strList = []
|
|
envVarMap.each { var ->
|
|
strList.add("${var.key}=${var.value}")
|
|
}
|
|
return strList
|
|
}
|
|
|
|
def CheckoutBootstrapScripts(String branchName) {
|
|
checkout([$class: 'GitSCM',
|
|
branches: [[name: "*/${branchName}"]],
|
|
doGenerateSubmoduleConfigurations: false,
|
|
extensions: [
|
|
[$class: 'PruneStaleBranch'],
|
|
[$class: 'AuthorInChangelog'],
|
|
[$class: 'SparseCheckoutPaths', sparseCheckoutPaths: [
|
|
[ $class: 'SparseCheckoutPath', path: 'scripts/build/Jenkins/' ],
|
|
[ $class: 'SparseCheckoutPath', path: 'scripts/build/bootstrap/' ],
|
|
[ $class: 'SparseCheckoutPath', path: 'scripts/build/Platform' ]
|
|
]],
|
|
// Shallow checkouts break changelog computation. Do not enable.
|
|
[$class: 'CloneOption', noTags: false, reference: '', shallow: false]
|
|
],
|
|
submoduleCfg: [],
|
|
userRemoteConfigs: scm.userRemoteConfigs
|
|
])
|
|
}
|
|
|
|
def CheckoutRepo(boolean disableSubmodules = false) {
|
|
|
|
if (!fileExists(ENGINE_REPOSITORY_NAME)) {
|
|
palMkdir(ENGINE_REPOSITORY_NAME)
|
|
}
|
|
|
|
if(fileExists('.git')) {
|
|
// If the repository after checkout is locked, likely we took a snapshot while git was running,
|
|
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
|
|
def indexLockFile = '.git/index.lock'
|
|
if(fileExists(indexLockFile)) {
|
|
palSh('git gc', 'Git GarbageCollect')
|
|
}
|
|
if(fileExists(indexLockFile)) { // if it is still there, remove it
|
|
palRm(indexLockFile)
|
|
}
|
|
}
|
|
|
|
def random = new Random()
|
|
def retryAttempt = 0
|
|
retry(5) {
|
|
if (retryAttempt > 0) {
|
|
sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit
|
|
}
|
|
retryAttempt = retryAttempt + 1
|
|
checkout scm: [
|
|
$class: 'GitSCM',
|
|
branches: scm.branches,
|
|
extensions: [
|
|
[$class: 'PruneStaleBranch'],
|
|
[$class: 'AuthorInChangelog'],
|
|
[$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
|
|
[$class: 'CheckoutOption', timeout: 60]
|
|
],
|
|
userRemoteConfigs: scm.userRemoteConfigs
|
|
]
|
|
}
|
|
|
|
// CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
|
|
palSh('git rev-parse HEAD > commitid', 'Getting commit id')
|
|
env.CHANGE_ID = readFile file: 'commitid'
|
|
env.CHANGE_ID = env.CHANGE_ID.trim()
|
|
palRm('commitid')
|
|
|
|
// CHANGE_DATE is used by the installer to provide some ability to sort tagged builds in addition to BRANCH_NAME and CHANGE_ID
|
|
commitDateFmt = '%%cI'
|
|
if (env.IS_UNIX) commitDateFmt = '%cI'
|
|
|
|
palSh("git show -s --format=${commitDateFmt} ${env.CHANGE_ID} > commitdate", 'Getting commit date')
|
|
env.CHANGE_DATE = readFile file: 'commitdate'
|
|
env.CHANGE_DATE = env.CHANGE_DATE.trim()
|
|
palRm('commitdate')
|
|
}
|
|
|
|
def HandleDriveMount(String snapshot, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean recreateVolume = false) {
|
|
unstash name: 'incremental_build_script'
|
|
|
|
def pythonCmd = ''
|
|
if(env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
|
|
else pythonCmd = 'python3 -u '
|
|
|
|
if(recreateVolume) {
|
|
palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume', winSlashReplacement=false)
|
|
}
|
|
palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --snapshot ${snapshot} --repository_name ${repositoryName} --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume', winSlashReplacement=false)
|
|
|
|
if(env.IS_UNIX) {
|
|
sh label: 'Setting volume\'s ownership',
|
|
script: """
|
|
if sudo test ! -d "${workspace}"; then
|
|
sudo mkdir -p ${workspace}
|
|
cd ${workspace}/..
|
|
sudo chown -R lybuilder:root .
|
|
fi
|
|
"""
|
|
}
|
|
}
|
|
|
|
def PreBuildCommonSteps(Map pipelineConfig, String snapshot, String repositoryName, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
|
|
echo 'Starting pre-build common steps...'
|
|
|
|
if (mount) {
|
|
if(env.RECREATE_VOLUME?.toBoolean()){
|
|
echo 'Starting to recreating drive...'
|
|
HandleDriveMount(snapshot, repositoryName, projectName, pipeline, branchName, platform, buildType, workspace, true)
|
|
} else {
|
|
echo 'Starting to mounting drive...'
|
|
HandleDriveMount(snapshot, repositoryName, projectName, pipeline, branchName, platform, buildType, workspace, false)
|
|
}
|
|
}
|
|
|
|
// Cleanup previous repo location, we are currently at the root of the workspace, if we have a .git folder
|
|
// we need to cleanup. Once all branches take this relocation, we can remove this
|
|
if(env.CLEAN_WORKSPACE?.toBoolean() || fileExists("${workspace}/.git")) {
|
|
if(fileExists(workspace)) {
|
|
palRmDir(workspace)
|
|
}
|
|
}
|
|
|
|
dir(workspace) {
|
|
// Add folder where we will store the 3rdParty downloads and packages
|
|
if(!fileExists('3rdParty')) {
|
|
palMkdir('3rdParty')
|
|
}
|
|
}
|
|
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
CheckoutRepo(disableSubmodules)
|
|
|
|
// Get python
|
|
if(env.IS_UNIX) {
|
|
sh label: 'Getting python',
|
|
script: 'python/get_python.sh'
|
|
} else {
|
|
bat label: 'Getting python',
|
|
script: 'python/get_python.bat'
|
|
}
|
|
|
|
// Always run the clean step, the scripts detect what variables were set, but it also cleans if
|
|
// the NODE_LABEL has changed
|
|
def command = "${pipelineConfig.PYTHON_DIR}/python"
|
|
if(env.IS_UNIX) command += '.sh'
|
|
else command += '.cmd'
|
|
command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
|
|
palSh(command, "Running ${platform} clean")
|
|
|
|
if(fileExists('.lfsconfig')) {
|
|
palSh("git lfs install", "LFS config exists. Installing LFS hooks to local repo")
|
|
palSh("git lfs pull", "Pulling new LFS objects")
|
|
}
|
|
}
|
|
}
|
|
|
|
def Build(Map pipelineConfig, String platform, String type, String workspace) {
|
|
timeout(time: env.TIMEOUT, unit: 'MINUTES', activity: true) {
|
|
def command = "${pipelineConfig.PYTHON_DIR}/python"
|
|
if(env.IS_UNIX) command += '.sh'
|
|
else command += '.cmd'
|
|
command += " -u ${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
|
|
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
palSh(command, "Running ${platform} ${type}")
|
|
}
|
|
}
|
|
}
|
|
|
|
def TestMetrics(Map pipelineConfig, String workspace, String branchName, String repoName, String buildJobName, String outputDirectory, String configuration) {
|
|
catchError(buildResult: null, stageResult: null) {
|
|
def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
|
|
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
checkout scm: [
|
|
$class: 'GitSCM',
|
|
branches: [[name: '*/main']],
|
|
extensions: [
|
|
[$class: 'AuthorInChangelog'],
|
|
[$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']
|
|
],
|
|
userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]]
|
|
]
|
|
withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) {
|
|
def command = "${pipelineConfig.PYTHON_DIR}/python.cmd -u mars/scripts/python/ctest_test_metric_scraper.py " +
|
|
'-e jenkins.creds.user %username% -e jenkins.creds.pass %apitoken% ' +
|
|
"-e jenkins.base_url ${env.JENKINS_URL} " +
|
|
"${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${configuration} ${repoName} --url ${env.BUILD_URL.replace('%','%%')}"
|
|
bat label: "Publishing ${buildJobName} Test Metrics",
|
|
script: command
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
def BenchmarkMetrics(Map pipelineConfig, String workspace, String branchName, String outputDirectory) {
|
|
catchError(buildResult: null, stageResult: null) {
|
|
def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
|
|
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
checkout scm: [
|
|
$class: 'GitSCM',
|
|
branches: [[name: '*/main']],
|
|
extensions: [
|
|
[$class: 'AuthorInChangelog'],
|
|
[$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']
|
|
],
|
|
userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]]
|
|
]
|
|
def command = "${pipelineConfig.PYTHON_DIR}/python.cmd -u mars/scripts/python/benchmark_scraper.py ${cmakeBuildDir} ${branchName}"
|
|
palSh(command, "Publishing Benchmark Metrics")
|
|
}
|
|
}
|
|
}
|
|
|
|
def ExportTestResults(Map options, String platform, String type, String workspace, Map params) {
|
|
catchError(message: "Error exporting tests results (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
|
|
def o3deroot = "${workspace}/${ENGINE_REPOSITORY_NAME}"
|
|
dir("${o3deroot}/${params.OUTPUT_DIRECTORY}") {
|
|
junit testResults: "Testing/**/*.xml"
|
|
palRmDir("Testing")
|
|
// Recreate test runner xml directories that need to be pre generated
|
|
palMkdir("Testing/Pytest")
|
|
palMkdir("Testing/Gtest")
|
|
}
|
|
}
|
|
}
|
|
|
|
def ExportTestScreenshots(Map options, String branchName, String platformName, String jobName, String workspace, Map params) {
|
|
catchError(message: "Error exporting test screenshots (this won't fail the build)", buildResult: 'SUCCESS', stageResult: 'FAILURE') {
|
|
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
def screenshotsFolder = "AutomatedTesting/user/PythonTests/Automated/Screenshots"
|
|
def s3Uploader = "scripts/build/tools/upload_to_s3.py"
|
|
def command = "${options.PYTHON_DIR}/python.cmd -u ${s3Uploader} --base_dir ${screenshotsFolder} " +
|
|
'--file_regex \\"(.*zip\$)\\" ' +
|
|
"--bucket ${env.TEST_SCREENSHOT_BUCKET} " +
|
|
"--search_subdirectories True --key_prefix ${branchName}_${env.BUILD_NUMBER} " +
|
|
'--extra_args {\\"ACL\\":\\"bucket-owner-full-control\\"}'
|
|
palSh(command, "Uploading test screenshots for ${jobName}")
|
|
}
|
|
}
|
|
}
|
|
|
|
def UploadAPLogs(Map options, String branchName, String platformName, String jobName, String workspace, Map params) {
|
|
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
|
|
projects = params.CMAKE_LY_PROJECTS.split(",")
|
|
projects.each{ project ->
|
|
def apLogsPath = "${project}/user/log"
|
|
def s3UploadScriptPath = "scripts/build/tools/upload_to_s3.py"
|
|
if(env.IS_UNIX) {
|
|
pythonPath = "${options.PYTHON_DIR}/python.sh"
|
|
}
|
|
else {
|
|
pythonPath = "${options.PYTHON_DIR}/python.cmd"
|
|
}
|
|
def command = "${pythonPath} -u ${s3UploadScriptPath} --base_dir ${apLogsPath} " +
|
|
"--file_regex \".*\" --bucket ${env.AP_LOGS_S3_BUCKET} " +
|
|
"--search_subdirectories True --key_prefix ${env.JENKINS_JOB_NAME}/${branchName}/${env.BUILD_NUMBER}/${platformName}/${jobName} " +
|
|
'--extra_args {\\"ACL\\":\\"bucket-owner-full-control\\"}'
|
|
palSh(command, "Uploading AP logs for job ${jobName} for branch ${branchName}", false)
|
|
}
|
|
}
|
|
}
|
|
|
|
def PostBuildCommonSteps(String workspace, boolean mount = true) {
|
|
echo 'Starting post-build common steps...'
|
|
|
|
if (mount) {
|
|
def pythonCmd = ''
|
|
if(env.IS_UNIX) pythonCmd = 'sudo -E python3 -u '
|
|
else pythonCmd = 'python3 -u '
|
|
|
|
try {
|
|
timeout(5) {
|
|
palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
|
|
}
|
|
} catch (Exception e) {
|
|
echo "Unmount script error ${e}"
|
|
}
|
|
}
|
|
}
|
|
|
|
def CreateSetupStage(Map pipelineConfig, String snapshot, String repositoryName, String projectName, String pipelineName, String branchName, String platformName, String jobName, Map environmentVars, boolean onlyMountEBSVolume = false) {
|
|
return {
|
|
stage('Setup') {
|
|
if(onlyMountEBSVolume) {
|
|
HandleDriveMount(snapshot, repositoryName, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], false)
|
|
} else {
|
|
PreBuildCommonSteps(pipelineConfig, snapshot, repositoryName, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) {
|
|
return {
|
|
stage("${jobName}") {
|
|
Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'])
|
|
}
|
|
}
|
|
}
|
|
|
|
def CreateTestMetricsStage(Map pipelineConfig, String branchName, Map environmentVars, String buildJobName, String outputDirectory, String configuration) {
|
|
return {
|
|
stage("${buildJobName}_metrics") {
|
|
TestMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, env.DEFAULT_REPOSITORY_NAME, buildJobName, outputDirectory, configuration)
|
|
BenchmarkMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, outputDirectory)
|
|
}
|
|
}
|
|
}
|
|
|
|
def CreateExportTestResultsStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars, Map params) {
|
|
return {
|
|
stage("${jobName}_results") {
|
|
ExportTestResults(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'], params)
|
|
}
|
|
}
|
|
}
|
|
|
|
def CreateExportTestScreenshotsStage(Map pipelineConfig, String branchName, String platformName, String jobName, Map environmentVars, Map params) {
|
|
return {
|
|
stage("${jobName}_screenshots") {
|
|
ExportTestScreenshots(pipelineConfig, branchName, platformName, jobName, environmentVars['WORKSPACE'], params)
|
|
}
|
|
}
|
|
}
|
|
|
|
def CreateUploadAPLogsStage(Map pipelineConfig, String branchName, String platformName, String jobName, String workspace, Map params) {
|
|
return {
|
|
stage("${jobName}_upload_ap_logs") {
|
|
UploadAPLogs(pipelineConfig, branchName, platformName, jobName, workspace, params)
|
|
}
|
|
}
|
|
}
|
|
|
|
def CreateTeardownStage(Map environmentVars) {
|
|
return {
|
|
stage('Teardown') {
|
|
PostBuildCommonSteps(environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
|
|
}
|
|
}
|
|
}
|
|
|
|
def CreateSingleNode(Map pipelineConfig, def platform, def build_job, Map envVars, String branchName, String pipelineName, String repositoryName, String projectName, boolean onlyMountEBSVolume = false) {
|
|
def nodeLabel = envVars['NODE_LABEL']
|
|
return {
|
|
def currentResult = ''
|
|
def currentException = ''
|
|
retry(PIPELINE_RETRY_ATTEMPTS) {
|
|
node("${nodeLabel}") {
|
|
if(isUnix()) { // Has to happen inside a node
|
|
envVars['IS_UNIX'] = 1
|
|
}
|
|
withEnv(GetEnvStringList(envVars)) {
|
|
def build_job_name = build_job.key
|
|
try {
|
|
CreateSetupStage(pipelineConfig, snapshot, repositoryName, projectName, pipelineName, branchName, platform.key, build_job.key, envVars, onlyMountEBSVolume).call()
|
|
|
|
if(build_job.value.steps) { //this is a pipe with many steps so create all the build stages
|
|
pipelineEnvVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
|
|
build_job.value.steps.each { build_step ->
|
|
build_job_name = build_step
|
|
// This addition of maps makes it that the right operand will override entries if they overlap with the left operand
|
|
envVars = pipelineEnvVars + GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, platform.value.build_types[build_step].PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
|
|
try {
|
|
CreateBuildStage(pipelineConfig, platform.key, build_step, envVars).call()
|
|
}
|
|
catch (Exception e) {
|
|
if (envVars['NONBLOCKING_STEP']?.toBoolean()) {
|
|
unstable(message: "Build step ${build_step} failed but it's a non-blocking step in build job ${build_job.key}")
|
|
} else {
|
|
throw e
|
|
}
|
|
}
|
|
}
|
|
} else {
|
|
CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
|
|
}
|
|
}
|
|
catch(Exception e) {
|
|
if (e instanceof org.jenkinsci.plugins.workflow.steps.FlowInterruptedException) {
|
|
def causes = e.getCauses().toString()
|
|
if (causes.contains('RemovedNodeCause')) {
|
|
error "Node disconnected during build: ${e}" // Error raised to retry stage on a new node
|
|
}
|
|
}
|
|
if (IsAPLogUpload(branchName, build_job_name)) {
|
|
CreateUploadAPLogsStage(pipelineConfig, branchName, platform.key, build_job_name, envVars['WORKSPACE'], platform.value.build_types[build_job_name].PARAMETERS).call()
|
|
}
|
|
// All other errors will be raised outside the retry block
|
|
currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE'
|
|
currentException = e.toString()
|
|
}
|
|
finally {
|
|
def params = platform.value.build_types[build_job_name].PARAMETERS
|
|
if (env.MARS_REPO && params && params.containsKey('TEST_METRICS') && params.TEST_METRICS == 'True') {
|
|
def output_directory = params.OUTPUT_DIRECTORY
|
|
def configuration = params.CONFIGURATION
|
|
CreateTestMetricsStage(pipelineConfig, branchName, envVars, build_job_name, output_directory, configuration).call()
|
|
}
|
|
if (params && params.containsKey('TEST_RESULTS') && params.TEST_RESULTS == 'True') {
|
|
CreateExportTestResultsStage(pipelineConfig, platform.key, build_job_name, envVars, params).call()
|
|
}
|
|
if (params && params.containsKey('TEST_SCREENSHOTS') && params.TEST_SCREENSHOTS == 'True' && currentResult == 'FAILURE') {
|
|
CreateExportTestScreenshotsStage(pipelineConfig, branchName, platform.key, build_job_name, envVars, params).call()
|
|
}
|
|
CreateTeardownStage(envVars).call()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
// https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
|
|
// {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED}
|
|
if (currentResult == 'FAILURE') {
|
|
currentBuild.result = 'FAILURE'
|
|
error "FAILURE: ${currentException}"
|
|
} else if (currentResult == 'UNSTABLE') {
|
|
currentBuild.result = 'UNSTABLE'
|
|
unstable(message: "UNSTABLE: ${currentException}")
|
|
}
|
|
}
|
|
}
|
|
|
|
// Used in CreateBuildJobs() to preprocess the build_job steps to programically create
|
|
// Node sections with a set of steps that can run on that node.
|
|
class PipeStepJobData {
|
|
String m_nodeLabel = ""
|
|
def m_steps = []
|
|
|
|
PipeStepJobData(String label) {
|
|
this.m_nodeLabel = label
|
|
}
|
|
|
|
def addStep(def step) {
|
|
this.m_steps.add(step)
|
|
}
|
|
}
|
|
|
|
def CreateBuildJobs(Map pipelineConfig, def platform, def build_job, Map envVars, String branchName, String pipelineName, String repositoryName, String projectName) {
|
|
|
|
// if this is a pipeline, split jobs based on the NODE_LABEL
|
|
if(build_job.value.steps) {
|
|
def defaultLabel = envVars['NODE_LABEL']
|
|
def lastNodeLable = ""
|
|
def jobList = []
|
|
def currentIdx = -1;
|
|
|
|
// iterate the steps to build the order of node label + steps sets.
|
|
// Order matters, as it is executed from first to last.
|
|
// example layout.
|
|
// node A
|
|
// step 1
|
|
// step 2
|
|
// node B
|
|
// step 3
|
|
// node C
|
|
// step 4
|
|
build_job.value.steps.each { build_step ->
|
|
//if node label defined
|
|
if(platform.value.build_types[build_step] && platform.value.build_types[build_step].PIPELINE_ENV &&
|
|
platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL']) {
|
|
|
|
//if the last node label doen't match the new one, append it.
|
|
if(platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL'] != lastNodeLable) {
|
|
lastNodeLable = platform.value.build_types[build_step].PIPELINE_ENV['NODE_LABEL']
|
|
jobList.add(new PipeStepJobData(lastNodeLable))
|
|
currentIdx++
|
|
}
|
|
}
|
|
//no label define, so it needs to run on the default node label
|
|
else if(lastNodeLable != defaultLabel) { //if the last node is not the default, append default
|
|
lastNodeLable = defaultLabel
|
|
jobList.add(new PipeStepJobData(lastNodeLable))
|
|
currentIdx++
|
|
}
|
|
//add the build_step to the current node
|
|
jobList[currentIdx].addStep(build_step)
|
|
}
|
|
|
|
return {
|
|
jobList.eachWithIndex{ element, idx ->
|
|
//update the node label + steps to the discovered data
|
|
envVars['NODE_LABEL'] = element.m_nodeLabel
|
|
build_job.value.steps = element.m_steps
|
|
//no any additional nodes just mount the drive, do not handle clean parameters as that will be done by the first node.
|
|
boolean onlyMountEBSVolume = idx != 0;
|
|
//add this node
|
|
CreateSingleNode(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName, onlyMountEBSVolume).call()
|
|
}
|
|
}
|
|
} else {
|
|
return CreateSingleNode(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName)
|
|
}
|
|
}
|
|
|
|
def projectName = ''
|
|
def pipelineName = ''
|
|
def branchName = ''
|
|
def pipelineConfig = {}
|
|
|
|
// Start Pipeline
|
|
try {
|
|
stage('Setup Pipeline') {
|
|
node('controller') {
|
|
def envVarList = []
|
|
if(isUnix()) {
|
|
envVarList.add('IS_UNIX=1')
|
|
}
|
|
withEnv(envVarList) {
|
|
timestamps {
|
|
repositoryUrl = scm.getUserRemoteConfigs()[0].getUrl()
|
|
// repositoryName is the full repository name
|
|
repositoryName = (repositoryUrl =~ /https:\/\/github.com\/(.*)\.git/)[0][1]
|
|
env.REPOSITORY_NAME = repositoryName
|
|
(projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
|
|
env.PIPELINE_NAME = pipelineName
|
|
if(env.BRANCH_NAME) {
|
|
branchName = env.BRANCH_NAME
|
|
} else {
|
|
branchName = scm.branches[0].name // for non-multibranch pipelines
|
|
env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
|
|
}
|
|
if(env.CHANGE_TARGET) {
|
|
// PR builds
|
|
if(BUILD_SNAPSHOTS.contains(env.CHANGE_TARGET)) {
|
|
snapshot = env.CHANGE_TARGET
|
|
echo "Snapshot for destination branch \"${env.CHANGE_TARGET}\" found."
|
|
} else {
|
|
snapshot = DEFAULT_BUILD_SNAPSHOT
|
|
echo "Snapshot for destination branch \"${env.CHANGE_TARGET}\" does not exist, defaulting to snapshot \"${snapshot}\""
|
|
}
|
|
} else {
|
|
// Non-PR builds
|
|
pipelineParameters.add(choice(defaultValue: DEFAULT_BUILD_SNAPSHOT, name: 'SNAPSHOT', choices: BUILD_SNAPSHOTS_WITH_EMPTY, description: 'Selects the build snapshot to use. A more diverted snapshot will cause longer build times, but will not cause build failures.'))
|
|
snapshot = env.SNAPSHOT
|
|
echo "Snapshot \"${snapshot}\" selected."
|
|
}
|
|
pipelineProperties.add(disableConcurrentBuilds())
|
|
|
|
echo "Running repository: \"${repositoryName}\", pipeline: \"${pipelineName}\", branch: \"${branchName}\", CHANGE_ID: \"${env.CHANGE_ID}\", GIT_COMMMIT: \"${scm.GIT_COMMIT}\"..."
|
|
|
|
CheckoutBootstrapScripts(branchName)
|
|
|
|
// Load configs
|
|
pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
|
|
|
|
// Add each platform as a parameter that the user can disable if needed
|
|
if (!IsPullRequest(branchName)) {
|
|
pipelineParameters.add(stringParam(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'))
|
|
|
|
pipelineConfig.platforms.each { platform ->
|
|
pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
|
|
}
|
|
}
|
|
// Add additional Jenkins parameters
|
|
pipelineConfig.platforms.each { platform ->
|
|
platformEnv = platform.value.PIPELINE_ENV
|
|
pipelineJenkinsParameters = platformEnv['PIPELINE_JENKINS_PARAMETERS'] ?: [:]
|
|
jenkinsParametersToAdd = pipelineJenkinsParameters[pipelineName] ?: [:]
|
|
jenkinsParametersToAdd.each{ jenkinsParameter ->
|
|
defaultValue = jenkinsParameter['default_value']
|
|
// Use last run's value as default value so we can save values in different Jenkins environment
|
|
if (jenkinsParameter['use_last_run_value']?.toBoolean()) {
|
|
defaultValue = params."${jenkinsParameter['parameter_name']}" ?: jenkinsParameter['default_value']
|
|
}
|
|
switch (jenkinsParameter['parameter_type']) {
|
|
case 'string':
|
|
pipelineParameters.add(stringParam(defaultValue: defaultValue,
|
|
description: jenkinsParameter['description'],
|
|
name: jenkinsParameter['parameter_name']
|
|
))
|
|
break
|
|
case 'boolean':
|
|
pipelineParameters.add(booleanParam(defaultValue: defaultValue,
|
|
description: jenkinsParameter['description'],
|
|
name: jenkinsParameter['parameter_name']
|
|
))
|
|
break
|
|
case 'password':
|
|
pipelineParameters.add(password(defaultValue: defaultValue,
|
|
description: jenkinsParameter['description'],
|
|
name: jenkinsParameter['parameter_name']
|
|
))
|
|
break
|
|
}
|
|
}
|
|
}
|
|
|
|
pipelineProperties.add(parameters(pipelineParameters))
|
|
properties(pipelineProperties)
|
|
|
|
// Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
|
|
stash name: 'incremental_build_script',
|
|
includes: INCREMENTAL_BUILD_SCRIPT_PATH
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if(env.BUILD_NUMBER == '1' && !IsPullRequest(branchName)) {
|
|
// Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
|
|
// to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
|
|
currentBuild.result = 'SUCCESS'
|
|
return
|
|
}
|
|
|
|
def someBuildHappened = false
|
|
|
|
// Build and Post-Build Testing Stage
|
|
def buildConfigs = [:]
|
|
|
|
// Platform Builds run on EC2
|
|
pipelineConfig.platforms.each { platform ->
|
|
platform.value.build_types.each { build_job ->
|
|
if (IsJobEnabled(branchName, build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline
|
|
def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
|
|
envVars['JENKINS_JOB_NAME'] = env.JOB_NAME // Save original Jenkins job name to JENKINS_JOB_NAME
|
|
envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
|
|
someBuildHappened = true
|
|
|
|
buildConfigs["${platform.key} [${build_job.key}]"] = CreateBuildJobs(pipelineConfig, platform, build_job, envVars, branchName, pipelineName, repositoryName, projectName)
|
|
}
|
|
}
|
|
}
|
|
|
|
timestamps {
|
|
|
|
stage('Build') {
|
|
parallel buildConfigs // Run parallel builds
|
|
}
|
|
|
|
echo 'All builds successful'
|
|
}
|
|
if (!someBuildHappened) {
|
|
currentBuild.result = 'NOT_BUILT'
|
|
}
|
|
}
|
|
catch(Exception e) {
|
|
error "Exception: ${e}"
|
|
}
|
|
finally {
|
|
try {
|
|
node('controller') {
|
|
if("${currentBuild.currentResult}" == "SUCCESS") {
|
|
buildFailure = ""
|
|
emailBody = "${BUILD_URL}\nSuccess!"
|
|
} else {
|
|
buildFailure = tm('${BUILD_FAILURE_ANALYZER}')
|
|
emailBody = "${BUILD_URL}\n${buildFailure}!"
|
|
|
|
}
|
|
if(env.POST_AR_BUILD_SNS_TOPIC) {
|
|
message_json = [
|
|
"build_url": env.BUILD_URL,
|
|
"build_number": env.BUILD_NUMBER,
|
|
"repository_name": env.REPOSITORY_NAME,
|
|
"branch_name": env.BRANCH_NAME,
|
|
"build_result": "${currentBuild.currentResult}",
|
|
"build_failure": buildFailure,
|
|
"recreate_volume": env.RECREATE_VOLUME,
|
|
"clean_output_directory": env.CLEAN_OUTPUT_DIRECTORY,
|
|
"clean_assets": env.CLEAN_ASSETS
|
|
]
|
|
snsPublish(
|
|
topicArn: env.POST_AR_BUILD_SNS_TOPIC,
|
|
subject:'Build Result',
|
|
message:JsonOutput.toJson(message_json)
|
|
)
|
|
}
|
|
emailext (
|
|
body: "${emailBody}",
|
|
subject: "${currentBuild.currentResult}: ${JOB_NAME} - Build # ${BUILD_NUMBER}",
|
|
recipientProviders: [
|
|
[$class: 'RequesterRecipientProvider']
|
|
]
|
|
)
|
|
}
|
|
} catch(Exception e) {
|
|
}
|
|
}
|