mbalfour 5 years ago
commit 12725feb48

@ -1,709 +0,0 @@
#!/usr/bin/env groovy
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
PIPELINE_CONFIG_FILE = 'AutomatedReview/lumberyard.json'
INCREMENTAL_BUILD_SCRIPT_PATH = 'scripts/build/bootstrap/incremental_build_util.py'
EMPTY_JSON = readJSON text: '{}'
ENGINE_REPOSITORY_NAME = 'o3de'
def pipelineProperties = []
def pipelineParameters = [
// Build/clean Parameters
// The CLEAN_OUTPUT_DIRECTORY is used by ci_build scripts. Creating the parameter here passes it as an environment variable to jobs and is consumed that way
booleanParam(defaultValue: false, description: 'Deletes the contents of the output directory before building. This will cause a \"clean\" build. NOTE: does not imply CLEAN_ASSETS', name: 'CLEAN_OUTPUT_DIRECTORY'),
booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'),
booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
string(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'),
// Pull Request Parameters
string(defaultValue: '', description: '', name: 'DESTINATION_BRANCH'),
string(defaultValue: '', description: '', name: 'DESTINATION_COMMIT'),
string(defaultValue: '', description: '', name: 'PULL_REQUEST_ID'),
string(defaultValue: '', description: '', name: 'REPOSITORY_NAME'),
string(defaultValue: '', description: '', name: 'SOURCE_BRANCH'),
string(defaultValue: '', description: '', name: 'SOURCE_COMMIT')
]
def palSh(cmd, lbl = '', winSlashReplacement = true) {
if (env.IS_UNIX) {
sh label: lbl,
script: cmd
} else if (winSlashReplacement) {
bat label: lbl,
script: cmd.replace('/','\\')
} else {
bat label: lbl,
script: cmd
}
}
def palMkdir(path) {
if (env.IS_UNIX) {
sh label: "Making directories ${path}",
script: "mkdir -p ${path}"
} else {
def win_path = path.replace('/','\\')
bat label: "Making directories ${win_path}",
script: "mkdir ${win_path}."
}
}
def palRm(path) {
if (env.IS_UNIX) {
sh label: "Removing ${path}",
script: "rm ${path}"
} else {
def win_path = path.replace('/','\\')
bat label: "Removing ${win_path}",
script: "del ${win_path}"
}
}
def palRmDir(path) {
if (env.IS_UNIX) {
sh label: "Removing ${path}",
script: "rm -rf ${path}"
} else {
def win_path = path.replace('/','\\')
bat label: "Removing ${win_path}",
script: "rd /s /q ${win_path}"
}
}
def IsJobEnabled(buildTypeMap, pipelineName, platformName) {
def job_list_override = params.JOB_LIST_OVERRIDE.tokenize(',')
if(params.PULL_REQUEST_ID) { // dont allow pull requests to filter platforms/jobs
if(buildTypeMap.value.TAGS) {
return buildTypeMap.value.TAGS.contains(pipelineName)
}
} else if (!job_list_override.isEmpty()) {
return params[platformName] && job_list_override.contains(buildTypeMap.key);
} else {
if (params[platformName]) {
if(buildTypeMap.value.TAGS) {
return buildTypeMap.value.TAGS.contains(pipelineName)
}
}
}
return false
}
def GetRunningPipelineName(JENKINS_JOB_NAME) {
// If the job name has an underscore
def job_parts = JENKINS_JOB_NAME.tokenize('/')[0].tokenize('_')
if (job_parts.size() > 1) {
return [job_parts.take(job_parts.size() - 1).join('_'), job_parts[job_parts.size()-1]]
}
return [job_parts[0], 'default']
}
@NonCPS
def RegexMatcher(str, regex) {
def matcher = (str =~ regex)
return matcher ? matcher.group(1) : null
}
def LoadPipelineConfig(String pipelineName, String branchName, String scmType) {
echo 'Loading pipeline config'
if (scmType == 'codecommit') {
PullFilesFromGit(PIPELINE_CONFIG_FILE, branchName, true, ENGINE_REPOSITORY_NAME)
}
def pipelineConfig = {}
pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
palRm(PIPELINE_CONFIG_FILE)
pipelineConfig.platforms = EMPTY_JSON
// Load the pipeline configs per platform
pipelineConfig.PIPELINE_CONFIGS.each { pipeline_config ->
def platform_regex = pipeline_config.replace('.','\\.').replace('*', '(.*)')
if (!env.IS_UNIX) {
platform_regex = platform_regex.replace('/','\\\\')
}
echo "Downloading platform pipeline configs ${pipeline_config}"
if (scmType == 'codecommit') {
PullFilesFromGit(pipeline_config, branchName, false, ENGINE_REPOSITORY_NAME)
}
echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
for (pipeline_config_path in findFiles(glob: pipeline_config)) {
echo "\tFound platform pipeline config ${pipeline_config_path}"
def platform = RegexMatcher(pipeline_config_path, platform_regex)
if(platform) {
pipelineConfig.platforms[platform] = EMPTY_JSON
pipelineConfig.platforms[platform].PIPELINE_ENV = readJSON file: pipeline_config_path.toString()
}
palRm(pipeline_config_path.toString())
}
}
// Load the build configs
pipelineConfig.BUILD_CONFIGS.each { build_config ->
def platform_regex = build_config.replace('.','\\.').replace('*', '(.*)')
if (!env.IS_UNIX) {
platform_regex = platform_regex.replace('/','\\\\')
}
echo "Downloading configs ${build_config}"
if (scmType == 'codecommit') {
PullFilesFromGit(build_config, branchName, false, ENGINE_REPOSITORY_NAME)
}
echo "Searching configs in ${build_config} using ${platform_regex}"
for (build_config_path in findFiles(glob: build_config)) {
echo "\tFound config ${build_config_path}"
def platform = RegexMatcher(build_config_path, platform_regex)
if(platform) {
pipelineConfig.platforms[platform].build_types = readJSON file: build_config_path.toString()
}
}
}
return pipelineConfig
}
def GetSCMType() {
def gitUrl = scm.getUserRemoteConfigs()[0].getUrl()
if (gitUrl ==~ /https:\/\/git-codecommit.*/) {
return 'codecommit'
} else if (gitUrl ==~ /https:\/\/github.com.*/) {
return 'github'
}
return 'unknown'
}
def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
def envVarMap = [:]
platformPipelineEnv = platformEnv['ENV'] ?: [:]
platformPipelineEnv.each { var ->
envVarMap[var.key] = var.value
}
platformEnvOverride = platformEnv['PIPELINE_ENV_OVERRIDE'] ?: [:]
platformPipelineEnvOverride = platformEnvOverride[pipelineName] ?: [:]
platformPipelineEnvOverride.each { var ->
envVarMap[var.key] = var.value
}
buildTypeEnv.each { var ->
// This may override the above one if there is an entry defined by the job
envVarMap[var.key] = var.value
}
// Environment that only applies to to Jenkins tweaks.
// For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
// instances. This allow us to scale up and down without having to re-download 3rdParty
envVarMap['LY_PACKAGE_DOWNLOAD_CACHE_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/downloaded_packages"
envVarMap['LY_PACKAGE_UNPACK_LOCATION'] = "${envVarMap['WORKSPACE']}/3rdParty/packages"
return envVarMap
}
def GetEnvStringList(Map envVarMap) {
def strList = []
envVarMap.each { var ->
strList.add("${var.key}=${var.value}")
}
return strList
}
// Pulls/downloads files from the repo through codecommit. Despite Glob matching is NOT supported, '*' is supported
// as a folder or filename (not a portion, it has to be the whole folder or filename)
def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFound = true, String repositoryName = env.DEFAULT_REPOSITORY_NAME) {
echo "PullFilesFromGit filenamePath=${filenamePath} branchName=${branchName} repositoryName=${repositoryName}"
def folderPathParts = filenamePath.tokenize('/')
def filename = folderPathParts[folderPathParts.size()-1]
folderPathParts.remove(folderPathParts.size()-1) // remove the filename
def folderPath = folderPathParts.join('/')
if (folderPath.contains('*')) {
def currentPath = ''
for (int i = 0; i < folderPathParts.size(); i++) {
if (folderPathParts[i] == '*') {
palMkdir(currentPath)
retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${currentPath} > ${currentPath}/.codecommit", "GetFolder ${currentPath}") }
def folderInfo = readJSON file: "${currentPath}/.codecommit"
folderInfo.subFolders.each { folder ->
def newSubPath = currentPath + '/' + folder.relativePath
for (int j = i+1; j < folderPathParts.size(); j++) {
newSubPath = newSubPath + '/' + folderPathParts[j]
}
newSubPath = newSubPath + '/' + filename
PullFilesFromGit(newSubPath, branchName, false, repositoryName)
}
palRm("${currentPath}/.codecommit")
}
if (i == 0) {
currentPath = folderPathParts[i]
} else {
currentPath = currentPath + '/' + folderPathParts[i]
}
}
} else if (filename.contains('*')) {
palMkdir(folderPath)
retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${folderPath} > ${folderPath}/.codecommit", "GetFolder ${folderPath}") }
def folderInfo = readJSON file: "${folderPath}/.codecommit"
folderInfo.files.each { file ->
PullFilesFromGit("${folderPath}/${filename}", branchName, false, repositoryName)
}
palRm("${folderPath}/.codecommit")
} else {
def errorFile = "${folderPath}/error.txt"
palMkdir(folderPath)
retry(3) {
try {
if(env.IS_UNIX) {
sh label: "Downloading ${filenamePath}",
script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${filenamePath}_encoded"
sh label: 'Decoding',
script: "base64 --decode ${filenamePath}_encoded > ${filenamePath}"
} else {
errorFile = errorFile.replace('/','\\')
win_filenamePath = filenamePath.replace('/', '\\')
bat label: "Downloading ${win_filenamePath}",
script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${win_filenamePath}_encoded"
bat label: 'Decoding',
script: "certutil -decode ${win_filenamePath}_encoded ${win_filenamePath}"
}
palRm("${filenamePath}_encoded")
} catch (Exception ex) {
def error = ''
if(fileExists(errorFile)) {
error = readFile errorFile
}
if (!error || !(!failIfNotFound && error.contains('FileDoesNotExistException'))) {
palRm("${errorFile} ${filenamePath}.encoded ${filenamePath}")
throw new Exception("Could not get file: ${filenamePath}, ex: ${ex}, stderr: ${error}")
}
}
palRm(errorFile)
}
}
}
def SetLfsCredentials(cmd, lbl = '') {
if (env.IS_UNIX) {
sh label: lbl,
script: cmd
} else {
bat label: lbl,
script: cmd
}
}
def CheckoutBootstrapScripts(String branchName) {
checkout([$class: "GitSCM",
branches: [[name: "*/${branchName}"]],
doGenerateSubmoduleConfigurations: false,
extensions: [
[
$class: "SparseCheckoutPaths",
sparseCheckoutPaths: [
[ $class: "SparseCheckoutPath", path: "AutomatedReview/" ],
[ $class: "SparseCheckoutPath", path: "scripts/build/bootstrap/" ],
[ $class: "SparseCheckoutPath", path: "Tools/build/JenkinsScripts/build/Platform" ]
]
],
[
$class: "CloneOption", depth: 1, noTags: false, reference: "", shallow: true
]
],
submoduleCfg: [],
userRemoteConfigs: scm.userRemoteConfigs
])
}
def CheckoutRepo(boolean disableSubmodules = false) {
dir(ENGINE_REPOSITORY_NAME) {
palSh('git lfs uninstall', 'Git LFS Uninstall') // Prevent git from pulling lfs objects during checkout
if(fileExists('.git')) {
// If the repository after checkout is locked, likely we took a snapshot while git was running,
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
def indexLockFile = '.git/index.lock'
if(fileExists(indexLockFile)) {
palSh('git gc', 'Git GarbageCollect')
}
if(fileExists(indexLockFile)) { // if it is still there, remove it
palRm(indexLockFile)
}
}
}
def random = new Random()
def retryAttempt = 0
retry(5) {
if (retryAttempt > 0) {
sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit
}
retryAttempt = retryAttempt + 1
if(params.PULL_REQUEST_ID) {
// This is a pull request build. Perform merge with destination branch before building.
dir(ENGINE_REPOSITORY_NAME) {
checkout scm: [
$class: 'GitSCM',
branches: scm.branches,
extensions: [
[$class: 'PreBuildMerge', options: [mergeRemote: 'origin', mergeTarget: params.DESTINATION_BRANCH]],
[$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
[$class: 'CheckoutOption', timeout: 60]
],
userRemoteConfigs: scm.userRemoteConfigs
]
}
} else {
dir(ENGINE_REPOSITORY_NAME) {
checkout scm: [
$class: 'GitSCM',
branches: scm.branches,
extensions: [
[$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
[$class: 'CheckoutOption', timeout: 60]
],
userRemoteConfigs: scm.userRemoteConfigs
]
}
}
}
// Add folder where we will store the 3rdParty downloads and packages
if(!fileExists('3rdParty')) {
palMkdir('3rdParty')
}
dir(ENGINE_REPOSITORY_NAME) {
// Run lfs in a separate step. Jenkins is unable to load the credentials for the custom LFS endpoint
withCredentials([usernamePassword(credentialsId: "${env.GITHUB_USER}", passwordVariable: 'accesstoken', usernameVariable: 'username')]) {
SetLfsCredentials("git config -f .lfsconfig lfs.url https://${username}:${accesstoken}@${env.LFS_URL}", 'Set credentials')
}
palSh('git lfs install', 'Git LFS Install')
palSh('git lfs pull', 'Git LFS Pull')
// CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
palSh('git rev-parse HEAD > commitid', 'Getting commit id')
env.CHANGE_ID = readFile file: 'commitid'
env.CHANGE_ID = env.CHANGE_ID.trim()
palRm('commitid')
}
}
def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
echo 'Starting pre-build common steps...'
if (mount) {
unstash name: 'incremental_build_script'
def pythonCmd = ''
if(env.IS_UNIX) pythonCmd = 'sudo -E python -u '
else pythonCmd = 'python -u '
if(env.RECREATE_VOLUME.toBoolean()) {
palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action delete --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Deleting volume')
}
timeout(5) {
palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action mount --project ${projectName} --pipeline ${pipeline} --branch ${branchName} --platform ${platform} --build_type ${buildType}", 'Mounting volume')
}
if(env.IS_UNIX) {
sh label: 'Setting volume\'s ownership',
script: """
if sudo test ! -d "${workspace}"; then
sudo mkdir -p ${workspace}
cd ${workspace}/..
sudo chown -R lybuilder:root .
fi
"""
}
}
// Cleanup previous repo location, we are currently at the root of the workspace, if we have a .git folder
// we need to cleanup. Once all branches take this relocation, we can remove this
if(env.CLEAN_WORKSPACE.toBoolean() || fileExists("${workspace}/.git")) {
if(fileExists(workspace)) {
palRmDir(workspace)
}
}
dir(workspace) {
CheckoutRepo(disableSubmodules)
// Get python
dir(ENGINE_REPOSITORY_NAME) {
if(env.IS_UNIX) {
sh label: 'Getting python',
script: 'python/get_python.sh'
} else {
bat label: 'Getting python',
script: 'python/get_python.bat'
}
if(env.CLEAN_OUTPUT_DIRECTORY.toBoolean() || env.CLEAN_ASSETS.toBoolean()) {
def command = "${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
if (env.IS_UNIX) {
sh label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.sh -u ${command}"
} else {
bat label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
}
}
}
}
}
def Build(Map options, String platform, String type, String workspace) {
def command = "${options.BUILD_ENTRY_POINT} --platform ${platform} --type ${type}"
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
if (env.IS_UNIX) {
sh label: "Running ${platform} ${type}",
script: "${options.PYTHON_DIR}/python.sh -u ${command}"
} else {
bat label: "Running ${platform} ${type}",
script: "${options.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
}
}
}
def TestMetrics(Map options, String workspace, String branchName, String repoName, String buildJobName, String outputDirectory, String configuration) {
catchError(buildResult: null, stageResult: null) {
def cmakeBuildDir = [workspace, ENGINE_REPOSITORY_NAME, outputDirectory].join('/')
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
checkout scm: [
$class: 'GitSCM',
branches: [[name: '*/main']],
extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'mars']],
userRemoteConfigs: [[url: "${env.MARS_REPO}", name: 'mars', credentialsId: "${env.GITHUB_USER}"]]
]
withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) {
def command = "${options.PYTHON_DIR}/python.cmd -u mars/scripts/python/ctest_test_metric_scraper.py -e jenkins.creds.user ${username} -e jenkins.creds.pass ${apitoken} ${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${configuration} ${repoName} "
bat label: "Publishing ${buildJobName} Test Metrics",
script: command
}
}
}
}
def PostBuildCommonSteps(String workspace, boolean mount = true) {
echo 'Starting post-build common steps...'
if(params.PULL_REQUEST_ID) {
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
if(fileExists('.git')) {
palSh('git reset --hard HEAD', 'Discard PR merge, git reset')
}
}
}
if (mount) {
def pythonCmd = ''
if(env.IS_UNIX) pythonCmd = 'sudo -E python -u '
else pythonCmd = 'python -u '
try {
timeout(5) {
palSh("${pythonCmd} ${INCREMENTAL_BUILD_SCRIPT_PATH} --action unmount", 'Unmounting volume')
}
} catch (Exception e) {
echo "Unmount script error ${e}"
}
}
}
def CreateSetupStage(Map pipelineConfig, String projectName, String pipelineName, String branchName, String platformName, String jobName, Map environmentVars) {
return {
stage("Setup") {
PreBuildCommonSteps(pipelineConfig, projectName, pipelineName, branchName, platformName, jobName, environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
}
}
}
def CreateBuildStage(Map pipelineConfig, String platformName, String jobName, Map environmentVars) {
return {
stage("${jobName}") {
Build(pipelineConfig, platformName, jobName, environmentVars['WORKSPACE'])
}
}
}
def CreateTestMetricsStage(Map pipelineConfig, String branchName, Map environmentVars, String buildJobName, String outputDirectory, String configuration) {
return {
stage("${buildJobName}_metrics") {
TestMetrics(pipelineConfig, environmentVars['WORKSPACE'], branchName, env.DEFAULT_REPOSITORY_NAME, buildJobName, outputDirectory, configuration)
}
}
}
def CreateTeardownStage(Map environmentVars) {
return {
stage("Teardown") {
PostBuildCommonSteps(environmentVars['WORKSPACE'], environmentVars['MOUNT_VOLUME'])
}
}
}
def projectName = ''
def pipelineName = ''
def branchName = ''
def pipelineConfig = {}
// Start Pipeline
try {
stage('Setup Pipeline') {
node('controller') {
def envVarList = []
if(isUnix()) {
envVarList.add('IS_UNIX=1')
}
withEnv(envVarList) {
timestamps {
(projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
scmType = GetSCMType()
if(env.BRANCH_NAME) {
branchName = env.BRANCH_NAME
} else {
branchName = scm.branches[0].name // for non-multibranch pipelines
env.BRANCH_NAME = branchName // so scripts that read this environment have it (e.g. incremental_build_util.py)
}
pipelineProperties.add(disableConcurrentBuilds())
echo "Running \"${pipelineName}\" for \"${branchName}\"..."
if (scmType == 'github') {
CheckoutBootstrapScripts(branchName)
}
// Load configs
pipelineConfig = LoadPipelineConfig(pipelineName, branchName, scmType)
// Add each platform as a parameter that the user can disable if needed
pipelineConfig.platforms.each { platform ->
pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
}
pipelineProperties.add(parameters(pipelineParameters))
properties(pipelineProperties)
// Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
if (scmType == 'codecommit') {
PullFilesFromGit(INCREMENTAL_BUILD_SCRIPT_PATH, branchName, true, ENGINE_REPOSITORY_NAME)
}
stash name: 'incremental_build_script',
includes: INCREMENTAL_BUILD_SCRIPT_PATH
}
}
}
}
if(env.BUILD_NUMBER == '1') {
// Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
// to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
currentBuild.result = 'SUCCESS'
return
}
// Build and Post-Build Testing Stage
def buildConfigs = [:]
// Platform Builds run on EC2
pipelineConfig.platforms.each { platform ->
platform.value.build_types.each { build_job ->
if (IsJobEnabled(build_job, pipelineName, platform.key)) { // User can filter jobs, jobs are tagged by pipeline
def envVars = GetBuildEnvVars(platform.value.PIPELINE_ENV ?: EMPTY_JSON, build_job.value.PIPELINE_ENV ?: EMPTY_JSON, pipelineName)
envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
def nodeLabel = envVars['NODE_LABEL']
buildConfigs["${platform.key} [${build_job.key}]"] = {
node("${nodeLabel}") {
if(isUnix()) { // Has to happen inside a node
envVars['IS_UNIX'] = 1
}
withEnv(GetEnvStringList(envVars)) {
timeout(time: envVars['TIMEOUT'], unit: 'MINUTES', activity: true) {
try {
def build_job_name = build_job.key
CreateSetupStage(pipelineConfig, projectName, pipelineName, branchName, platform.key, build_job.key, envVars).call()
if(build_job.value.steps) { //this is a pipe with many steps so create all the build stages
build_job.value.steps.each { build_step ->
build_job_name = build_step
CreateBuildStage(pipelineConfig, platform.key, build_step, envVars).call()
}
} else {
CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
}
if (env.MARS_REPO && platform.key == 'Windows' && build_job_name.startsWith('test')) {
def output_directory = platform.value.build_types[build_job_name].PARAMETERS.OUTPUT_DIRECTORY
def configuration = platform.value.build_types[build_job_name].PARAMETERS.CONFIGURATION
CreateTestMetricsStage(pipelineConfig, branchName, envVars, build_job_name, output_directory, configuration).call()
}
}
catch(Exception e) {
// https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/Result.java
// {SUCCESS,UNSTABLE,FAILURE,NOT_BUILT,ABORTED}
def currentResult = envVars['ON_FAILURE_MARK'] ?: 'FAILURE'
if (currentResult == 'FAILURE') {
currentBuild.result = 'FAILURE'
error "FAILURE: ${e}"
} else if (currentResult == 'UNSTABLE') {
currentBuild.result = 'UNSTABLE'
unstable(message: "UNSTABLE: ${e}")
}
}
finally {
CreateTeardownStage(envVars).call()
}
}
}
}
}
}
}
}
timestamps {
stage('Build') {
parallel buildConfigs // Run parallel builds
}
echo 'All builds successful'
}
}
catch(Exception e) {
error "Exception: ${e}"
}
finally {
try {
if(env.SNS_TOPIC) {
snsPublish(
topicArn: env.SNS_TOPIC,
subject:'Build Result',
message:"${currentBuild.currentResult}:${params.REPOSITORY_NAME}:${params.SOURCE_BRANCH}:${params.SOURCE_COMMIT}:${params.DESTINATION_COMMIT}:${params.PULL_REQUEST_ID}:${BUILD_URL}:${env.RECREATE_VOLUME}:${env.CLEAN_OUTPUT_DIRECTORY}:${env.CLEAN_ASSETS}"
)
}
step([
$class: 'Mailer',
notifyEveryUnstableBuild: true,
sendToIndividuals: true,
recipients: emailextrecipients([
[$class: 'CulpritsRecipientProvider'],
[$class: 'RequesterRecipientProvider']
])
])
} catch(Exception e) {
}
}

@ -1,12 +0,0 @@
{
"BUILD_ENTRY_POINT": "Tools/build/JenkinsScripts/build/ci_build.py",
"PIPELINE_CONFIGS": [
"Tools/build/JenkinsScripts/build/Platform/*/pipeline.json",
"restricted/*/Tools/build/JenkinsScripts/build/pipeline.json"
],
"BUILD_CONFIGS": [
"Tools/build/JenkinsScripts/build/Platform/*/build_config.json",
"restricted/*/Tools/build/JenkinsScripts/build/build_config.json"
],
"PYTHON_DIR": "python"
}

@ -922,7 +922,7 @@ void CVars::Init()
"Will not render CGFs past the given amount of drawcalls\n"
"(<=0 off (default), >0 draw calls limit)");
REGISTER_CVAR(e_CheckOctreeObjectsBoxSize, 1, VF_NULL, "CryWarning for crazy sized COctreeNode m_objectsBoxes");
REGISTER_CVAR(e_CheckOctreeObjectsBoxSize, 1, VF_NULL, "Warning for crazy sized COctreeNode m_objectsBoxes");
REGISTER_CVAR(e_DebugGeomPrep, 0, VF_NULL, "enable logging of Geom preparation");
DefineConstIntCVar(e_GeomCaches, 1, VF_NULL, "Activates drawing of geometry caches");
REGISTER_CVAR(e_GeomCacheBufferSize, 128, VF_CHEAT, "Geometry cache stream buffer upper limit size in MB. Default: 128");

@ -35,7 +35,7 @@ namespace AZ
}
AZ::OSString msgBoxMessage;
msgBoxMessage.append("CrySystem could not initialize correctly for the following reason(s):");
msgBoxMessage.append("O3DE could not initialize correctly for the following reason(s):");
for (const AZ::OSString& errMsg : m_errorStringsCollected)
{
@ -47,7 +47,7 @@ namespace AZ
Trace::Output(nullptr, msgBoxMessage.c_str());
Trace::Output(nullptr, "\n==================================================================\n");
EBUS_EVENT(AZ::NativeUI::NativeUIRequestBus, DisplayOkDialog, "CrySystem Initialization Failed", msgBoxMessage.c_str(), false);
EBUS_EVENT(AZ::NativeUI::NativeUIRequestBus, DisplayOkDialog, "O3DE Initialization Failed", msgBoxMessage.c_str(), false);
}
} // namespace Debug
} // namespace AZ

@ -605,7 +605,7 @@ void CSystem::DebugStats([[maybe_unused]] bool checkpoint, [[maybe_unused]] bool
{
if (!dbgmodules[i].handle)
{
CryLogAlways("WARNING: <CrySystem> CSystem::DebugStats: NULL handle for %s", dbgmodules[i].name.c_str());
CryLogAlways("WARNING: CSystem::DebugStats: NULL handle for %s", dbgmodules[i].name.c_str());
nolib++;
continue;
}
@ -642,7 +642,7 @@ void CSystem::DebugStats([[maybe_unused]] bool checkpoint, [[maybe_unused]] bool
}
else
{
CryLogAlways("WARNING: <CrySystem> CSystem::DebugStats: could not retrieve function from DLL %s", dbgmodules[i].name.c_str());
CryLogAlways("WARNING: CSystem::DebugStats: could not retrieve function from DLL %s", dbgmodules[i].name.c_str());
nolib++;
};
#endif
@ -1066,7 +1066,7 @@ void CSystem::FatalError(const char* format, ...)
if (szSysErrorMessage)
{
CryLogAlways("<CrySystem> Last System Error: %s", szSysErrorMessage);
CryLogAlways("Last System Error: %s", szSysErrorMessage);
}
if (GetUserCallback())

@ -1117,7 +1117,7 @@ namespace AZ
return asset;
}
void AssetManager::UpdateDebugStatus(AZ::Data::Asset<AZ::Data::AssetData> asset)
void AssetManager::UpdateDebugStatus(const AZ::Data::Asset<AZ::Data::AssetData>& asset)
{
if(!m_debugAssetEvents)
{

@ -358,7 +358,7 @@ namespace AZ
Asset<AssetData> GetAssetInternal(const AssetId& assetId, const AssetType& assetType, AssetLoadBehavior assetReferenceLoadBehavior, const AssetLoadParameters& loadParams = AssetLoadParameters{}, AssetInfo assetInfo = AssetInfo(), bool signalLoaded = false);
void UpdateDebugStatus(AZ::Data::Asset<AZ::Data::AssetData> asset);
void UpdateDebugStatus(const AZ::Data::Asset<AZ::Data::AssetData>& asset);
/**
* Gets a root asset and dependencies as individual async loads if necessary.

@ -42,9 +42,8 @@ namespace AZ
}
}
#define AZ_TRACE_METHOD_NAME_CATEGORY(name, category) AZ::Debug::EventTrace::ScopedSlice AZ_JOIN(ScopedSlice__, __LINE__)(name, category);
#ifdef AZ_PROFILE_TELEMETRY
# define AZ_TRACE_METHOD_NAME_CATEGORY(name, category) AZ::Debug::EventTrace::ScopedSlice AZ_JOIN(ScopedSlice__, __LINE__)(name, category);
# define AZ_TRACE_METHOD_NAME(name) \
AZ_TRACE_METHOD_NAME_CATEGORY(name, "") \
AZ_PROFILE_SCOPE(AZ::Debug::ProfileCategory::AzTrace, name)
@ -53,6 +52,7 @@ namespace AZ
AZ_TRACE_METHOD_NAME_CATEGORY(AZ_FUNCTION_SIGNATURE, "") \
AZ_PROFILE_FUNCTION(AZ::Debug::ProfileCategory::AzTrace)
#else
# define AZ_TRACE_METHOD_NAME_CATEGORY(name, category)
# define AZ_TRACE_METHOD_NAME(name) AZ_TRACE_METHOD_NAME_CATEGORY(name, "")
# define AZ_TRACE_METHOD() AZ_TRACE_METHOD_NAME(AZ_FUNCTION_SIGNATURE)
#endif

@ -313,7 +313,7 @@ namespace O3DELauncher
return "Failed to initialize the CrySystem Interface";
case ReturnCode::ErrCryEnvironment:
return "Failed to initialize the CryEngine global environment";
return "Failed to initialize the global environment";
case ReturnCode::ErrAssetProccessor:
return "Failed to connect to AssetProcessor while the /Amazon/AzCore/Bootstrap/wait_for_connect value is 1\n."

@ -5702,7 +5702,7 @@ extern "C" int AZ_DLL_EXPORT CryEditMain(int argc, char* argv[])
int exitCode = 0;
BOOL didCryEditStart = CCryEditApp::instance()->InitInstance();
AZ_Error("Editor", didCryEditStart, "CryEditor did not initialize correctly, and will close."
AZ_Error("Editor", didCryEditStart, "O3DE Editor did not initialize correctly, and will close."
"\nThis could be because of incorrectly configured components, or missing required gems."
"\nSee other errors for more details.");

@ -1931,7 +1931,7 @@ void CCryEditDoc::Fetch(const QString& holdName, const QString& relativeHoldPath
if (!LoadXmlArchiveArray(arrXmlAr, holdFilename, holdPath))
{
QMessageBox::critical(QApplication::activeWindow(), "Error", "The temporary 'Hold' level failed to load successfully. Your level might be corrupted, you should restart the Editor.", QMessageBox::Ok);
AZ_Error("CryEditDoc", false, "Fetch failed to load the Xml Archive");
AZ_Error("EditDoc", false, "Fetch failed to load the Xml Archive");
return;
}

@ -25,6 +25,7 @@ namespace ImageProcessingAtom
// EBusTraits overrides
static const AZ::EBusHandlerPolicy HandlerPolicy = AZ::EBusHandlerPolicy::Single;
static const AZ::EBusAddressPolicy AddressPolicy = AZ::EBusAddressPolicy::Single;
typedef AZStd::recursive_mutex MutexType;
//////////////////////////////////////////////////////////////////////////
// Loads an image from a source file path

@ -145,24 +145,36 @@ namespace SurfaceData
void EditorSurfaceDataSystemComponent::OnCatalogLoaded(const char* /*catalogFile*/)
{
//automatically register all surface tag list assets
//automatically register all existing surface tag list assets at Editor startup
// First run through all the assets and trigger loads on them.
AZStd::vector<AZ::Data::AssetId> surfaceTagAssetIds;
// First run through all the assets and gather up the asset IDs for all surface tag list assets
AZ::Data::AssetCatalogRequestBus::Broadcast(&AZ::Data::AssetCatalogRequestBus::Events::EnumerateAssets,
nullptr,
[this](const AZ::Data::AssetId assetId, const AZ::Data::AssetInfo& assetInfo) {
[&surfaceTagAssetIds](const AZ::Data::AssetId assetId, const AZ::Data::AssetInfo& assetInfo) {
const auto assetType = azrtti_typeid<EditorSurfaceTagListAsset>();
if (assetInfo.m_assetType == assetType)
{
m_surfaceTagNameAssets[assetId] = AZ::Data::AssetManager::Instance().GetAsset(assetId, assetType, AZ::Data::AssetLoadBehavior::Default);
surfaceTagAssetIds.emplace_back(assetId);
}
},
nullptr);
// After all the loads are triggered, block to make sure they've all completed.
for (auto& asset : m_surfaceTagNameAssets)
// Next, trigger all the loads. This is done outside of EnumerateAssets to ensure that we don't have any deadlocks caused by
// lock inversion. If this thread locks AssetCatalogRequestBus mutex with EnumerateAssets, then locks m_assetMutex in
// AssetManager::FindOrCreateAsset, it's possible for those locks to get locked in reverse on a loading thread, causing a deadlock.
for (auto& assetId : surfaceTagAssetIds)
{
asset.second.BlockUntilLoadComplete();
m_surfaceTagNameAssets[assetId] = AZ::Data::AssetManager::Instance().GetAsset(
assetId, azrtti_typeid<EditorSurfaceTagListAsset>(), AZ::Data::AssetLoadBehavior::Default);
// If any assets are still loading (which they likely will be), listen for the OnAssetReady event and refresh the Editor
// UI as each one finishes loading.
if (!m_surfaceTagNameAssets[assetId].IsReady())
{
AZ::Data::AssetBus::MultiHandler::BusConnect(assetId);
}
}
}

@ -12,7 +12,7 @@
"default"
],
"steps": [
"profile",
"profile_nounity",
"asset_profile",
"test_profile"
]
@ -43,6 +43,7 @@
},
"profile": {
"TAGS": [
"nightly",
"daily-pipeline-metrics",
"weekly-build-metrics"
],
@ -57,7 +58,6 @@
},
"profile_nounity": {
"TAGS": [
"nightly",
"weekly-build-metrics"
],
"COMMAND": "build_linux.sh",

@ -17,8 +17,8 @@ set(LY_3RDPARTY_PATH "" CACHE PATH "Path to the 3rdParty folder")
if(LY_3RDPARTY_PATH)
file(TO_CMAKE_PATH ${LY_3RDPARTY_PATH} LY_3RDPARTY_PATH)
endif()
if(NOT EXISTS ${LY_3RDPARTY_PATH}/3rdParty.txt)
message(FATAL_ERROR "3rdParty.txt not found in ${LY_3RDPARTY_PATH}, call cmake defining a valid LY_3RDPARTY_PATH or use cmake-gui to configure it")
if(NOT EXISTS ${LY_3RDPARTY_PATH})
message(FATAL_ERROR "3rdParty folder: ${LY_3RDPARTY_PATH} does not exist, call cmake defining a valid LY_3RDPARTY_PATH or use cmake-gui to configure it")
endif()
#! ly_add_external_target_path: adds a path to module path so 3rdparty Find files can be added from paths different than cmake/3rdParty

@ -1,18 +0,0 @@
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
ly_add_external_target(
NAME etc2comp
VERSION 2017_04_24-az.2
INCLUDE_DIRECTORIES
EtcLib/Etc
EtcLib/EtcCodec
)

@ -39,6 +39,7 @@ ly_associate_package(PACKAGE_NAME freetype-2.10.4.14-linux TARGETS free
ly_associate_package(PACKAGE_NAME tiff-4.2.0.15-linux TARGETS tiff PACKAGE_HASH ae92b4d3b189c42ef644abc5cac865d1fb2eb7cb5622ec17e35642b00d1a0a76)
ly_associate_package(PACKAGE_NAME AWSNativeSDK-1.7.167-rev3-linux TARGETS AWSNativeSDK PACKAGE_HASH e69c55682638dc1e7fa571a61a82c8a69d395c74a008543a5188f4bd2b6b10c4)
ly_associate_package(PACKAGE_NAME PhysX-4.1.0.25992954-rev1-linux TARGETS PhysX PACKAGE_HASH e3ca36106a8dbf1524709f8bb82d520920ebd3ff3a92672d382efff406c75ee3)
ly_associate_package(PACKAGE_NAME etc2comp-9cd0f9cae0-rev1-linux TARGETS etc2comp PACKAGE_HASH 9283aa5db5bb7fb90a0ddb7a9f3895317c8ebe8044943124bbb3673a41407430)
ly_associate_package(PACKAGE_NAME mikkelsen-1.0.0.4-linux TARGETS mikkelsen PACKAGE_HASH 5973b1e71a64633588eecdb5b5c06ca0081f7be97230f6ef64365cbda315b9c8)
ly_associate_package(PACKAGE_NAME googletest-1.8.1-rev4-linux TARGETS googletest PACKAGE_HASH 7b7ad330f369450c316a4c4592d17fbb4c14c731c95bd8f37757203e8c2bbc1b)
ly_associate_package(PACKAGE_NAME googlebenchmark-1.5.0-rev2-linux TARGETS GoogleBenchmark PACKAGE_HASH 4038878f337fc7e0274f0230f71851b385b2e0327c495fc3dd3d1c18a807928d)

@ -15,7 +15,6 @@ set(FILES
civetweb_linux.cmake
Clang_linux.cmake
dyad_linux.cmake
etc2comp_linux.cmake
FbxSdk_linux.cmake
OpenSSL_linux.cmake
Wwise_linux.cmake

@ -1,12 +0,0 @@
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
set(ETC2COMP_LIBS ${BASE_PATH}/EtcLib/Linux_x64/libEtcLib.a)

@ -44,6 +44,7 @@ ly_associate_package(PACKAGE_NAME freetype-2.10.4.14-mac-ios TARGETS fre
ly_associate_package(PACKAGE_NAME tiff-4.2.0.15-mac-ios TARGETS tiff PACKAGE_HASH a23ae1f8991a29f8e5df09d6d5b00d7768a740f90752cef465558c1768343709)
ly_associate_package(PACKAGE_NAME AWSNativeSDK-1.7.167-rev3-mac TARGETS AWSNativeSDK PACKAGE_HASH 21920372e90355407578b45ac19580df1463a39a25a867bcd0ffd8b385c8254a)
ly_associate_package(PACKAGE_NAME PhysX-4.1.0.25992954-rev1-mac TARGETS PhysX PACKAGE_HASH 149f5e9b44bd27291b1c4772f5e89a1e0efa88eef73c7e0b188935ed4d0c4a70)
ly_associate_package(PACKAGE_NAME etc2comp-9cd0f9cae0-rev1-mac TARGETS etc2comp PACKAGE_HASH 1966ab101c89db7ecf30984917e0a48c0d02ee0e4d65b798743842b9469c0818)
ly_associate_package(PACKAGE_NAME mikkelsen-1.0.0.4-mac TARGETS mikkelsen PACKAGE_HASH 83af99ca8bee123684ad254263add556f0cf49486c0b3e32e6d303535714e505)
ly_associate_package(PACKAGE_NAME googletest-1.8.1-rev4-mac TARGETS googletest PACKAGE_HASH cbf020d5ef976c5db8b6e894c6c63151ade85ed98e7c502729dd20172acae5a8)
ly_associate_package(PACKAGE_NAME googlebenchmark-1.5.0-rev2-mac TARGETS GoogleBenchmark PACKAGE_HASH ad25de0146769c91e179953d845de2bec8ed4a691f973f47e3eb37639381f665)

@ -14,8 +14,7 @@ set(FILES
civetweb_mac.cmake
Clang_mac.cmake
DirectXShaderCompiler_mac.cmake
etc2comp_mac.cmake
FbxSdk_mac.cmake
OpenSSL_mac.cmake
Wwise_mac.cmake
)
)

@ -1,12 +0,0 @@
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
set(ETC2COMP_LIBS ${BASE_PATH}/EtcLib/OSX_x86/$<IF:$<CONFIG:Debug>,Debug,Release>/libEtcLib.a)

@ -46,6 +46,7 @@ ly_associate_package(PACKAGE_NAME freetype-2.10.4.14-windows TARGETS fre
ly_associate_package(PACKAGE_NAME tiff-4.2.0.14-windows TARGETS tiff PACKAGE_HASH ab60d1398e4e1e375ec0f1a00cdb1d812a07c0096d827db575ce52dd6d714207)
ly_associate_package(PACKAGE_NAME AWSNativeSDK-1.7.167-rev3-windows TARGETS AWSNativeSDK PACKAGE_HASH 929873d4252c464620a9d288e41bd5d47c0bd22750aeb3a1caa68a3da8247c48)
ly_associate_package(PACKAGE_NAME PhysX-4.1.0.25992954-rev1-windows TARGETS PhysX PACKAGE_HASH 198bed89d1aae7caaf5dadba24cee56235fe41725d004b64040d4e50d0f3aa1a)
ly_associate_package(PACKAGE_NAME etc2comp-9cd0f9cae0-rev1-windows TARGETS etc2comp PACKAGE_HASH fc9ae937b2ec0d42d5e7d0e9e8c80e5e4d257673fb33bc9b7d6db76002117123)
ly_associate_package(PACKAGE_NAME mikkelsen-1.0.0.4-windows TARGETS mikkelsen PACKAGE_HASH 872c4d245a1c86139aa929f2b465b63ea4ea55b04ced50309135dd4597457a4e)
ly_associate_package(PACKAGE_NAME googletest-1.8.1-rev4-windows TARGETS googletest PACKAGE_HASH 7e8f03ae8a01563124e3daa06386f25a2b311c10bb95bff05cae6c41eff83837)
ly_associate_package(PACKAGE_NAME googlebenchmark-1.5.0-rev2-windows TARGETS GoogleBenchmark PACKAGE_HASH 0c94ca69ae8e7e4aab8e90032b5c82c5964410429f3dd9dbb1f9bf4fe032b1d4)

@ -16,7 +16,6 @@ set(FILES
Crashpad_windows.cmake
DirectXShaderCompiler_windows.cmake
dyad_windows.cmake
etc2comp_windows.cmake
FbxSdk_windows.cmake
libav_windows.cmake
OpenSSL_windows.cmake

@ -1,13 +0,0 @@
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
set(ETC2COMP_LIBS ${BASE_PATH}/EtcLib/Windows_x86_64/vc140/$<IF:$<CONFIG:Debug>,Debug,Release>/EtcLib.lib)
set(ETC2COMP_LINK_OPTIONS $<$<STREQUAL:${PAL_TRAIT_COMPILER_ID},Clang>:-Wl,>/ignore:4099)

@ -16,7 +16,6 @@ set(FILES
FindClang.cmake
FindDirectXShaderCompiler.cmake
Finddyad.cmake
Findetc2comp.cmake
FindFbxSdk.cmake
Findlibav.cmake
FindOpenSSL.cmake

Loading…
Cancel
Save