diff --git a/AutomatedReview/Jenkinsfile b/AutomatedReview/Jenkinsfile index 54ec974917..9132b7558b 100644 --- a/AutomatedReview/Jenkinsfile +++ b/AutomatedReview/Jenkinsfile @@ -488,8 +488,6 @@ def TestMetrics(Map options, String workspace, String branchName, String repoNam ] withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) { def command = "${options.PYTHON_DIR}/python.cmd -u mars/scripts/python/ctest_test_metric_scraper.py -e jenkins.creds.user ${username} -e jenkins.creds.pass ${apitoken} ${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${configuration} ${repoName} " - if (params.DESTINATION_BRANCH) - command += "--destination-branch ${params.DESTINATION_BRANCH} " bat label: "Publishing ${buildJobName} Test Metrics", script: command } diff --git a/scripts/build/Jenkins/Jenkinsfile b/scripts/build/Jenkins/Jenkinsfile new file mode 100644 index 0000000000..08bf5e90f1 --- /dev/null +++ b/scripts/build/Jenkins/Jenkinsfile @@ -0,0 +1,711 @@ +#!/usr/bin/env groovy +/* +* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +* its licensors. +* +* For complete copyright and license terms please see the LICENSE at the root of this +* distribution (the "License"). All use of this software is governed by the License, +* or, if provided, by the license below or the license accompanying this file. Do not +* remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* +*/ + +PIPELINE_CONFIG_FILE = 'scripts/build/Jenkins/lumberyard.json' +INCREMENTAL_BUILD_SCRIPT_PATH = 'scripts/build/bootstrap/incremental_build_util.py' + +EMPTY_JSON = readJSON text: '{}' + +ENGINE_REPOSITORY_NAME = 'o3de' + +def pipelineProperties = [] + +def pipelineParameters = [ + // Build/clean Parameters + // The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way + booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build. NOTE: does not imply CLEAN_ASSETS', name: 'CLEAN_OUTPUT_DIRECTORY'), + booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'), + booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'), + booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'), + string(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'), + + // Pull Request Parameters + string(defaultValue: '', description: '', name: 'DESTINATION_BRANCH'), + string(defaultValue: '', description: '', name: 'DESTINATION_COMMIT'), + string(defaultValue: '', description: '', name: 'PULL_REQUEST_ID'), + string(defaultValue: '', description: '', name: 'REPOSITORY_NAME'), + string(defaultValue: '', description: '', name: 'SOURCE_BRANCH'), + string(defaultValue: '', description: '', name: 'SOURCE_COMMIT') +] + +def palSh(cmd, lbl = '', winSlashReplacement = true) { + if (env.IS_UNIX) { + sh label: lbl, + script: cmd + } else if (winSlashReplacement) { + bat label: lbl, + script: cmd.replace('/','\\') + } else { + bat label: lbl, + script: cmd + } +} + +def palMkdir(path) { + if (env.IS_UNIX) { + sh label: "Making directories ${path}", + script: "mkdir -p ${path}" + } else { + def win_path = path.replace('/','\\') + bat label: "Making directories ${win_path}", + script: "mkdir ${win_path}." + } +} + +def palRm(path) { + if (env.IS_UNIX) { + sh label: "Removing ${path}", + script: "rm ${path}" + } else { + def win_path = path.replace('/','\\') + bat label: "Removing ${win_path}", + script: "del ${win_path}" + } +} + +def palRmDir(path) { + if (env.IS_UNIX) { + sh label: "Removing ${path}", + script: "rm -rf ${path}" + } else { + def win_path = path.replace('/','\\') + bat label: "Removing ${win_path}", + script: "rd /s /q ${win_path}" + } +} + +def IsJobEnabled(buildTypeMap, pipelineName, platformName) { + def job_list_override = params.JOB_LIST_OVERRIDE.tokenize(',') + if(params.PULL_REQUEST_ID) { // dont allow pull requests to filter platforms/jobs + if(buildTypeMap.value.TAGS) { + return buildTypeMap.value.TAGS.contains(pipelineName) + } + } else if (!job_list_override.isEmpty()) { + return params[platformName] && job_list_override.contains(buildTypeMap.key); + } else { + if (params[platformName]) { + if(buildTypeMap.value.TAGS) { + return buildTypeMap.value.TAGS.contains(pipelineName) + } + } + } + return false +} + +def GetRunningPipelineName(JENKINS_JOB_NAME) { + // If the job name has an underscore + def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_') + if (job_parts.size() > 1) { + return [job_parts.take(job_parts.size() - 1).join('_'), job_parts[job_parts.size()-1]] + } + return [job_parts[0], 'default'] +} + +@NonCPS +def RegexMatcher(str, regex) { + def matcher = (str =~ regex) + return matcher ? matcher.group(1) : null +} + +def LoadPipelineConfig(String pipelineName, String branchName, String scmType) { + echo 'Loading pipeline config' + if (scmType == 'codecommit') { + PullFilesFromGit(PIPELINE_CONFIG_FILE, branchName, true, ENGINE_REPOSITORY_NAME) + } + def pipelineConfig = {} + pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE + palRm(PIPELINE_CONFIG_FILE) + pipelineConfig.platforms = EMPTY_JSON + + // Load the pipeline configs per platform + pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config -> + def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)') + if (!env.IS_UNIX) { + platform_regex = platform_regex.replace('/','\\\\') + } + echo "Downloading platform pipeline configs ${pipeline_config}" + if (scmType == 'codecommit') { + PullFilesFromGit(pipeline_config, branchName, false, ENGINE_REPOSITORY_NAME) + } + echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}" + for (pipeline_config_path in findFiles(glob: pipeline_config)) { + echo "\tFound platform pipeline config ${pipeline_config_path}" + def platform = RegexMatcher(pipeline_config_path, platform_regex) + if(platform) { + pipelineConfig.platforms[platform] = EMPTY_JSON + pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString() + } + palRm(pipeline_config_path.toString()) + } + } + + // Load the build configs + pipelineConfig.BUILD_CONFIGS.each { build_config -> + def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)') + if (!env.IS_UNIX) { + platform_regex = platform_regex.replace('/','\\\\') + } + echo "Downloading configs ${build_config}" + if (scmType == 'codecommit') { + PullFilesFromGit(build_config, branchName, false, ENGINE_REPOSITORY_NAME) + } + echo "Searching configs in ${build_config} using ${platform_regex}" + for (build_config_path in findFiles(glob: build_config)) { + echo "\tFound config ${build_config_path}" + def platform = RegexMatcher(build_config_path, platform_regex) + if(platform) { + pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString() + } + } + } + return pipelineConfig +} + +def GetSCMType() { + def gitUrl = scm.getUserRemoteConfigs()[0].getUrl() + if (gitUrl ==~ /https:\/\/git-codecommit.*/) { + return 'codecommit' + } else if (gitUrl ==~ /https:\/\/github.com.*/) { + return 'github' + } + return 'unknown' +} + +def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) { + def envVarMap = [:] + platformPipelineEnv = platformEnv['ENV'] ?: [:] + platformPipelineEnv.each { var -> + envVarMap[var.key] = var.value + } + platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:] + platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:] + platformPipelineEnvOverride.each { var -> + envVarMap[var.key] = var.value + } + buildTypeEnv.each { var -> + // This may override the above one if there is an entry defined by the job + envVarMap[var.key] = var.value + } + + // Environment that only applies to to Jenkins tweaks. + // For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node + // instances. This allow us to scale up and down without having to re-download 3rdParty + envVarMap['LY_PACKAGE_DOWNLOAD_CACHE_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/downloaded_packages" + envVarMap['LY_PACKAGE_UNPACK_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/packages" + + return envVarMap +} + +def GetEnvStringList(Map envVarMap) { + def strList = [] + envVarMap.each { var -> + strList.add("${var.key}=${var.value}") + } + return strList +} + +// Pulls/downloads files from the repo through codecommit. Despite Glob matching is NOT supported, '*' is supported +// as a folder or filename (not a portion, it has to be the whole folder or filename) +def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFound = true, String repositoryName = env.DEFAULT_REPOSITORY_NAME) { + echo "PullFilesFromGit filenamePath=${filenamePath} branchName=${branchName} repositoryName=${repositoryName}" + def folderPathParts = filenamePath.tokenize('/') + def filename = folderPathParts[folderPathParts.size()-1] + folderPathParts.remove(folderPathParts.size()-1) // remove the filename + def folderPath = folderPathParts.join('/') + if (folderPath.contains('*')) { + + def currentPath = '' + for (int i = 0; i < folderPathParts.size(); i++) { + if (folderPathParts[i] == '*') { + palMkdir(currentPath) + retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${currentPath} > ${currentPath}/.codecommit", "GetFolder ${currentPath}") } + def folderInfo = readJSON file: "${currentPath}/.codecommit" + folderInfo.subFolders.each { folder -> + def newSubPath = currentPath + '/' + folder.relativePath + for (int j = i+1; j < folderPathParts.size(); j++) { + newSubPath = newSubPath + '/' + folderPathParts[j] + } + newSubPath = newSubPath + '/' + filename + PullFilesFromGit(newSubPath, branchName, false, repositoryName) + } + palRm("${currentPath}/.codecommit") + } + if (i == 0) { + currentPath = folderPathParts[i] + } else { + currentPath = currentPath + '/' + folderPathParts[i] + } + } + + } else if (filename.contains('*')) { + + palMkdir(folderPath) + retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${folderPath} > ${folderPath}/.codecommit", "GetFolder ${folderPath}") } + def folderInfo = readJSON file: "${folderPath}/.codecommit" + folderInfo.files.each { file -> + PullFilesFromGit("${folderPath}/${filename}", branchName, false, repositoryName) + } + palRm("${folderPath}/.codecommit") + + } else { + + def errorFile = "${folderPath}/error.txt" + palMkdir(folderPath) + retry(3) { + try { + if(env.IS_UNIX) { + sh label: "Downloading ${filenamePath}", + script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${filenamePath}_encoded" + sh label: 'Decoding', + script: "base64 --decode ${filenamePath}_encoded > ${filenamePath}" + } else { + errorFile = errorFile.replace('/','\\') + win_filenamePath = filenamePath.replace('/', '\\') + bat label: "Downloading ${win_filenamePath}", + script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${win_filenamePath}_encoded" + bat label: 'Decoding', + script: "certutil -decode ${win_filenamePath}_encoded ${win_filenamePath}" + } + palRm("${filenamePath}_encoded") + } catch (Exception ex) { + def error = '' + if(fileExists(errorFile)) { + error = readFile errorFile + } + if (!error || !(!failIfNotFound && error.contains('FileDoesNotExistException'))) { + palRm("${errorFile} ${filenamePath}.encoded ${filenamePath}") + throw new Exception("Could not get file: ${filenamePath}, ex: ${ex}, stderr: ${error}") + } + } + palRm(errorFile) + } + } +} + +def SetLfsCredentials(cmd, lbl = '') { + if (env.IS_UNIX) { + sh label: lbl, + script: cmd + } else { + bat label: lbl, + script: cmd + } +} + +def CheckoutBootstrapScripts(String branchName) { + checkout([$class: "GitSCM", + branches: [[name: "*/${branchName}"]], + doGenerateSubmoduleConfigurations: false, + extensions: [ + [ + $class: "SparseCheckoutPaths", + sparseCheckoutPaths: [ + [ $class: "SparseCheckoutPath", path: "scripts/build/Jenkins/" ], + [ $class: "SparseCheckoutPath", path: "scripts/build/bootstrap/" ], + [ $class: "SparseCheckoutPath", path: "Tools/build/JenkinsScripts/build/Platform" ] + ] + ], + [ + $class: "CloneOption", depth: 1, noTags: false, reference: "", shallow: true + ] + ], + submoduleCfg: [], + userRemoteConfigs: scm.userRemoteConfigs + ]) +} + +def CheckoutRepo(boolean disableSubmodules = false) { + dir(ENGINE_REPOSITORY_NAME) { + palSh('git lfs uninstall', 'Git LFS Uninstall') // Prevent git from pulling lfs objects during checkout + + if(fileExists('.git')) { + // If the repository after checkout is locked, likely we took a snapshot while git was running, + // to leave the repo in a usable state, garbagecollect. This also helps in situations where + def indexLockFile = '.git/index.lock' + if(fileExists(indexLockFile)) { + palSh('git gc', 'Git GarbageCollect') + } + if(fileExists(indexLockFile)) { // if it is still there, remove it + palRm(indexLockFile) + } + } + } + + def random = new Random() + def retryAttempt = 0 + retry(5) { + if (retryAttempt > 0) { + sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit + } + retryAttempt = retryAttempt + 1 + if(params.PULL_REQUEST_ID) { + // This is a pull request build. Perform merge with destination branch before building. + dir(ENGINE_REPOSITORY_NAME) { + checkout scm: [ + $class: 'GitSCM', + branches: scm.branches, + extensions: [ + [$class: 'PreBuildMerge', options: [mergeRemote: 'origin', mergeTarget: params.DESTINATION_BRANCH]], + [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true], + [$class: 'CheckoutOption', timeout: 60] + ], + userRemoteConfigs: scm.userRemoteConfigs + ] + } + } else { + dir(ENGINE_REPOSITORY_NAME) { + checkout scm: [ + $class: 'GitSCM', + branches: scm.branches, + extensions: [ + [$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true], + [$class: 'CheckoutOption', timeout: 60] + ], + userRemoteConfigs: scm.userRemoteConfigs + ] + } + } + } + + // Add folder where we will store the 3rdParty downloads and packages + if(!fileExists('3rdParty')) { + palMkdir('3rdParty') + } + + dir(ENGINE_REPOSITORY_NAME) { + // Run lfs in a separate step. Jenkins is unable to load the credentials for the custom LFS endpoint + withCredentials([usernamePassword(credentialsId: "${env.GITHUB_USER}", passwordVariable: 'accesstoken', usernameVariable: 'username')]) { + SetLfsCredentials("git config -f .lfsconfig lfs.url https://${username}:${accesstoken}@${env.LFS_URL}", 'Set credentials') + } + palSh('git lfs install', 'Git LFS Install') + palSh('git lfs pull', 'Git LFS Pull') + + // CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs) + palSh('git rev-parse HEAD > commitid', 'Getting commit id') + env.CHANGE_ID = readFile file: 'commitid' + env.CHANGE_ID = env.CHANGE_ID.trim() + palRm('commitid') + } +} + +def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) { + echo 'Starting pre-build common steps...' + + if (mount) { + unstash name: 'incremental_build_script' + + def pythonCmd = '' + if(env.IS_UNIX) pythonCmd = 'sudo -E python -u ' + else pythonCmd = 'python -u ' + + if(env.RECREATE_VOLUME.toBoolean()) { + palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume') + } + timeout(5) { + palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume') + } + + if(env.IS_UNIX) { + sh label: 'Setting volume\'s ownership', + script: """ + if sudo test ! -d "${workspace}"; then + sudo mkdir -p ${workspace} + cd ${workspace}/.. + sudo chown -R lybuilder:root . + fi + """ + } + } + + // Cleanup previous repo location, we are currently at the root of the workspace, if we have a .git folder + // we need to cleanup. Once all branches take this relocation, we can remove this + if(env.CLEAN_WORKSPACE.toBoolean() || fileExists("${workspace}/.git")) { + if(fileExists(workspace)) { + palRmDir(workspace) + } + } + + dir(workspace) { + + CheckoutRepo(disableSubmodules) + + // Get python + dir(ENGINE_REPOSITORY_NAME) { + if(env.IS_UNIX) { + sh label: 'Getting python', + script: 'python/get_python.sh' + } else { + bat label: 'Getting python', + script: 'python/get_python.bat' + } + + if(env.CLEAN_OUTPUT_DIRECTORY.toBoolean() || env.CLEAN_ASSETS.toBoolean()) { + def command = "${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean" + if (env.IS_UNIX) { + sh label: "Running ${platform} clean", + script: "${pipelineConfig.PYTHON_DIR}/python.sh -u ${command}" + } else { + bat label: "Running ${platform} clean", + script: "${pipelineConfig.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\') + } + } + } + } +} + +def Build(Map options, String platform, String type, String workspace) { + def command = "${options.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}" + dir("${workspace}/${ENGINE_REPOSITORY_NAME}") { + if (env.IS_UNIX) { + sh label: "Running ${platform} ${type}", + script: "${options.PYTHON_DIR}/python.sh -u ${command}" + } else { + bat label: "Running ${platform} ${type}", + script: "${options.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\') + } + } +} + +def TestMetrics(Map options, String workspace, String branchName, String repoName, String buildJobName, String outputDirectory, String configuration) { + catchError(buildResult: null, stageResult: null) { + def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/') + dir("${workspace}/${ENGINE_REPOSITORY_NAME}") { + checkout scm: [ + $class: 'GitSCM', + branches: [[name: '*/main']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']], + userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]] + ] + withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) { + def command = "${options.PYTHON_DIR}/python.cmd -u mars/scripts/python/ctest_test_metric_scraper.py -e jenkins.creds.user ${username} -e jenkins.creds.pass ${apitoken} ${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${configuration} ${repoName} " + if (params.DESTINATION_BRANCH) + command += "--destination-branch ${params.DESTINATION_BRANCH} " + bat label: "Publishing ${buildJobName} Test Metrics", + script: command + } + } + } +} + +def PostBuildCommonSteps(String workspace, boolean mount = true) { + echo 'Starting post-build common steps...' + + if(params.PULL_REQUEST_ID) { + dir("${workspace}/${ENGINE_REPOSITORY_NAME}") { + if(fileExists('.git')) { + palSh('git reset --hard HEAD', 'Discard PR merge, git reset') + } + } + } + + if (mount) { + def pythonCmd = '' + if(env.IS_UNIX) pythonCmd = 'sudo -E python -u ' + else pythonCmd = 'python -u ' + + try { + timeout(5) { + palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume') + } + } catch (Exception e) { + echo "Unmount script error ${e}" + } + } +} + +def CreateSetupStage(Map pipelineConfig, String projectName, String pipelineName, String branchName, String platformName, String jobName, Map environmentVars) { + return { + stage("Setup") { + PreBuildCommonSteps(pipelineConfig, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME']) + } + } +} + +def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) { + return { + stage("${jobName}") { + Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE']) + } + } +} + +def CreateTestMetricsStage(Map pipelineConfig, String branchName, Map environmentVars, String buildJobName, String outputDirectory, String configuration) { + return { + stage("${buildJobName}_metrics") { + TestMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, env.DEFAULT_REPOSITORY_NAME, buildJobName, outputDirectory, configuration) + } + } +} + +def CreateTeardownStage(Map environmentVars) { + return { + stage("Teardown") { + PostBuildCommonSteps(environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME']) + } + } +} + +def projectName = '' +def pipelineName = '' +def branchName = '' +def pipelineConfig = {} + +// Start Pipeline +try { + stage('Setup Pipeline') { + node('controller') { + def envVarList = [] + if(isUnix()) { + envVarList.add('IS_UNIX=1') + } + withEnv(envVarList) { + timestamps { + (projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins + scmType = GetSCMType() + + if(env.BRANCH_NAME) { + branchName = env.BRANCH_NAME + } else { + branchName = scm.branches[0].name // for non-multibranch pipelines + env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py) + } + pipelineProperties.add(disableConcurrentBuilds()) + + echo "Running \"${pipelineName}\" for \"${branchName}\"..." + + if (scmType == 'github') { + CheckoutBootstrapScripts(branchName) + } + + // Load configs + pipelineConfig = LoadPipelineConfig(pipelineName, branchName, scmType) + + // Add each platform as a parameter that the user can disable if needed + pipelineConfig.platforms.each { platform -> + pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key)) + } + pipelineProperties.add(parameters(pipelineParameters)) + properties(pipelineProperties) + + // Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it + if (scmType == 'codecommit') { + PullFilesFromGit(INCREMENTAL_BUILD_SCRIPT_PATH, branchName, true, ENGINE_REPOSITORY_NAME) + } + stash name: 'incremental_build_script', + includes: INCREMENTAL_BUILD_SCRIPT_PATH + } + } + } + } + + if(env.BUILD_NUMBER == '1') { + // Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users + // to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929 + currentBuild.result = 'SUCCESS' + return + } + + // Build and Post-Build Testing Stage + def buildConfigs = [:] + + // Platform Builds run on EC2 + pipelineConfig.platforms.each { platform -> + platform.value.build_types.each { build_job -> + if (IsJobEnabled(build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline + def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName) + envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this + def nodeLabel = envVars['NODE_LABEL'] + + buildConfigs["${platform.key} [${build_job.key}]"] = { + node("${nodeLabel}") { + if(isUnix()) { // Has to happen inside a node + envVars['IS_UNIX'] = 1 + } + withEnv(GetEnvStringList(envVars)) { + timeout(time: envVars['TIMEOUT'], unit: 'MINUTES', activity: true) { + try { + def build_job_name = build_job.key + + CreateSetupStage(pipelineConfig, projectName, pipelineName, branchName, platform.key, build_job.key, envVars).call() + + if(build_job.value.steps) { //this is a pipe with many steps so create all the build stages + build_job.value.steps.each { build_step -> + build_job_name = build_step + CreateBuildStage(pipelineConfig, platform.key, build_step, envVars).call() + } + } else { + CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call() + } + + if (env.MARS_REPO && platform.key == 'Windows' && build_job_name.startsWith('test')) { + def output_directory = platform.value.build_types[build_job_name].PARAMETERS.OUTPUT_DIRECTORY + def configuration = platform.value.build_types[build_job_name].PARAMETERS.CONFIGURATION + CreateTestMetricsStage(pipelineConfig, branchName, envVars, build_job_name, output_directory, configuration).call() + } + } + catch(Exception e) { + // https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java + // {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED} + def currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE' + if (currentResult == 'FAILURE') { + currentBuild.result = 'FAILURE' + error "FAILURE: ${e}" + } else if (currentResult == 'UNSTABLE') { + currentBuild.result = 'UNSTABLE' + unstable(message: "UNSTABLE: ${e}") + } + } + finally { + CreateTeardownStage(envVars).call() + } + } + } + } + } + } + } + } + + timestamps { + + stage('Build') { + parallel buildConfigs // Run parallel builds + } + + echo 'All builds successful' + } +} +catch(Exception e) { + error "Exception: ${e}" +} +finally { + try { + if(env.SNS_TOPIC) { + snsPublish( + topicArn: env.SNS_TOPIC, + subject:'Build Result', + message:"${currentBuild.currentResult}:${params.REPOSITORY_NAME}:${params.SOURCE_BRANCH}:${params.SOURCE_COMMIT}:${params.DESTINATION_COMMIT}:${params.PULL_REQUEST_ID}:${BUILD_URL}:${env.RECREATE_VOLUME}:${env.CLEAN_OUTPUT_DIRECTORY}:${env.CLEAN_ASSETS}" + ) + } + step([ + $class: 'Mailer', + notifyEveryUnstableBuild: true, + sendToIndividuals: true, + recipients: emailextrecipients([ + [$class: 'CulpritsRecipientProvider'], + [$class: 'RequesterRecipientProvider'] + ]) + ]) + } catch(Exception e) { + } +} diff --git a/scripts/build/Jenkins/lumberyard.json b/scripts/build/Jenkins/lumberyard.json new file mode 100644 index 0000000000..a174b7b449 --- /dev/null +++ b/scripts/build/Jenkins/lumberyard.json @@ -0,0 +1,12 @@ +{ + "BUILD_ENTRY_POINT": "Tools/build/JenkinsScripts/build/ci_build.py", + "PIPELINE_CONFIGS": [ + "Tools/build/JenkinsScripts/build/Platform/*/pipeline.json", + "restricted/*/Tools/build/JenkinsScripts/build/pipeline.json" + ], + "BUILD_CONFIGS": [ + "Tools/build/JenkinsScripts/build/Platform/*/build_config.json", + "restricted/*/Tools/build/JenkinsScripts/build/build_config.json" + ], + "PYTHON_DIR": "python" +}