Merge pull request #10 from aws-lumberyard-dev/SPEC-6230

Pipeline fails when using a new empty ebs volume
main
Shirang Jia 5 years ago committed by GitHub
commit a25f5b1317
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -96,7 +96,7 @@ def IsJobEnabled(buildTypeMap, pipelineName, platformName) {
if (params[platformName]) {
if(buildTypeMap.value.TAGS) {
return buildTypeMap.value.TAGS.contains(pipelineName)
}
}
}
}
return false
@ -194,9 +194,9 @@ def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
}
buildTypeEnv.each { var ->
// This may override the above one if there is an entry defined by the job
envVarMap[var.key] = var.value
envVarMap[var.key] = var.value
}
// Environment that only applies to to Jenkins tweaks.
// For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
// instances. This allow us to scale up and down without having to re-download 3rdParty
@ -223,7 +223,7 @@ def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFo
folderPathParts.remove(folderPathParts.size()-1) // remove the filename
def folderPath = folderPathParts.join('/')
if (folderPath.contains('*')) {
def currentPath = ''
for (int i = 0; i < folderPathParts.size(); i++) {
if (folderPathParts[i] == '*') {
@ -259,7 +259,7 @@ def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFo
} else {
def errorFile = "${folderPath}/error.txt"
def errorFile = "${folderPath}/error.txt"
palMkdir(folderPath)
retry(3) {
try {
@ -273,7 +273,7 @@ def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFo
win_filenamePath = filenamePath.replace('/', '\\')
bat label: "Downloading ${win_filenamePath}",
script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${win_filenamePath}_encoded"
bat label: 'Decoding',
bat label: 'Decoding',
script: "certutil -decode ${win_filenamePath}_encoded ${win_filenamePath}"
}
palRm("${filenamePath}_encoded")
@ -296,7 +296,7 @@ def SetLfsCredentials(cmd, lbl = '') {
if (env.IS_UNIX) {
sh label: lbl,
script: cmd
} else {
} else {
bat label: lbl,
script: cmd
}
@ -330,7 +330,7 @@ def CheckoutRepo(boolean disableSubmodules = false) {
if(fileExists('.git')) {
// If the repository after checkout is locked, likely we took a snapshot while git was running,
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
def indexLockFile = '.git/index.lock'
if(fileExists(indexLockFile)) {
palSh('git gc', 'Git GarbageCollect')
@ -380,7 +380,7 @@ def CheckoutRepo(boolean disableSubmodules = false) {
// Add folder where we will store the 3rdParty downloads and packages
if(!fileExists('3rdParty')) {
palMkdir('3rdParty')
}
}
dir(ENGINE_REPOSITORY_NAME) {
// Run lfs in a separate step. Jenkins is unable to load the credentials for the custom LFS endpoint
@ -419,7 +419,7 @@ def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline,
sh label: 'Setting volume\'s ownership',
script: """
if sudo test ! -d "${workspace}"; then
sudo mkdir -p ${workspace}
sudo mkdir -p ${workspace}
cd ${workspace}/..
sudo chown -R lybuilder:root .
fi

@ -84,7 +84,7 @@ def IsJobEnabled(buildTypeMap, pipelineName, platformName) {
if (params[platformName]) {
if(buildTypeMap.value.TAGS) {
return buildTypeMap.value.TAGS.contains(pipelineName)
}
}
}
}
return false
@ -161,9 +161,9 @@ def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
}
buildTypeEnv.each { var ->
// This may override the above one if there is an entry defined by the job
envVarMap[var.key] = var.value
envVarMap[var.key] = var.value
}
// Environment that only applies to to Jenkins tweaks.
// For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
// instances. This allow us to scale up and down without having to re-download 3rdParty
@ -185,7 +185,7 @@ def SetLfsCredentials(cmd, lbl = '') {
if (env.IS_UNIX) {
sh label: lbl,
script: cmd
} else {
} else {
bat label: lbl,
script: cmd
}
@ -214,19 +214,17 @@ def CheckoutBootstrapScripts(String branchName) {
}
def CheckoutRepo(boolean disableSubmodules = false) {
dir(ENGINE_REPOSITORY_NAME) {
palSh('git lfs uninstall', 'Git LFS Uninstall') // Prevent git from pulling lfs objects during checkout
if(fileExists('.git')) {
// If the repository after checkout is locked, likely we took a snapshot while git was running,
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
def indexLockFile = '.git/index.lock'
if(fileExists(indexLockFile)) {
palSh('git gc', 'Git GarbageCollect')
}
if(fileExists(indexLockFile)) { // if it is still there, remove it
palRm(indexLockFile)
}
palSh('git lfs uninstall', 'Git LFS Uninstall') // Prevent git from pulling lfs objects during checkout
if(fileExists('.git')) {
// If the repository after checkout is locked, likely we took a snapshot while git was running,
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
def indexLockFile = '.git/index.lock'
if(fileExists(indexLockFile)) {
palSh('git gc', 'Git GarbageCollect')
}
if(fileExists(indexLockFile)) { // if it is still there, remove it
palRm(indexLockFile)
}
}
@ -237,38 +235,29 @@ def CheckoutRepo(boolean disableSubmodules = false) {
sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit
}
retryAttempt = retryAttempt + 1
dir(ENGINE_REPOSITORY_NAME) {
checkout scm: [
$class: 'GitSCM',
branches: scm.branches,
extensions: [
[$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
[$class: 'CheckoutOption', timeout: 60]
],
userRemoteConfigs: scm.userRemoteConfigs
]
}
checkout scm: [
$class: 'GitSCM',
branches: scm.branches,
extensions: [
[$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
[$class: 'CheckoutOption', timeout: 60]
],
userRemoteConfigs: scm.userRemoteConfigs
]
}
// Add folder where we will store the 3rdParty downloads and packages
if(!fileExists('3rdParty')) {
palMkdir('3rdParty')
}
dir(ENGINE_REPOSITORY_NAME) {
// Run lfs in a separate step. Jenkins is unable to load the credentials for the custom LFS endpoint
withCredentials([usernamePassword(credentialsId: "${env.GITHUB_USER}", passwordVariable: 'accesstoken', usernameVariable: 'username')]) {
SetLfsCredentials("git config -f .lfsconfig lfs.url https://${username}:${accesstoken}@${env.LFS_URL}", 'Set credentials')
}
palSh('git lfs install', 'Git LFS Install')
palSh('git lfs pull', 'Git LFS Pull')
// CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
palSh('git rev-parse HEAD > commitid', 'Getting commit id')
env.CHANGE_ID = readFile file: 'commitid'
env.CHANGE_ID = env.CHANGE_ID.trim()
palRm('commitid')
// Run lfs in a separate step. Jenkins is unable to load the credentials for the custom LFS endpoint
withCredentials([usernamePassword(credentialsId: "${env.GITHUB_USER}", passwordVariable: 'accesstoken', usernameVariable: 'username')]) {
SetLfsCredentials("git config -f .lfsconfig lfs.url https://${username}:${accesstoken}@${env.LFS_URL}", 'Set credentials')
}
palSh('git lfs install', 'Git LFS Install')
palSh('git lfs pull', 'Git LFS Pull')
// CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
palSh('git rev-parse HEAD > commitid', 'Getting commit id')
env.CHANGE_ID = readFile file: 'commitid'
env.CHANGE_ID = env.CHANGE_ID.trim()
palRm('commitid')
}
def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
@ -292,7 +281,7 @@ def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline,
sh label: 'Setting volume\'s ownership',
script: """
if sudo test ! -d "${workspace}"; then
sudo mkdir -p ${workspace}
sudo mkdir -p ${workspace}
cd ${workspace}/..
sudo chown -R lybuilder:root .
fi
@ -309,28 +298,31 @@ def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline,
}
dir(workspace) {
// Add folder where we will store the 3rdParty downloads and packages
if(!fileExists('3rdParty')) {
palMkdir('3rdParty')
}
}
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
CheckoutRepo(disableSubmodules)
// Get python
dir(ENGINE_REPOSITORY_NAME) {
if(env.IS_UNIX) {
sh label: 'Getting python',
script: 'python/get_python.sh'
} else {
bat label: 'Getting python',
script: 'python/get_python.bat'
}
if(env.IS_UNIX) {
sh label: 'Getting python',
script: 'python/get_python.sh'
} else {
bat label: 'Getting python',
script: 'python/get_python.bat'
}
if(env.CLEAN_OUTPUT_DIRECTORY.toBoolean() || env.CLEAN_ASSETS.toBoolean()) {
def command = "${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
if (env.IS_UNIX) {
sh label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.sh -u ${command}"
} else {
bat label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
}
if(env.CLEAN_OUTPUT_DIRECTORY.toBoolean() || env.CLEAN_ASSETS.toBoolean()) {
def command = "${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
if (env.IS_UNIX) {
sh label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.sh -u ${command}"
} else {
bat label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
}
}
}
@ -446,10 +438,10 @@ try {
echo "Running \"${pipelineName}\" for \"${branchName}\"..."
CheckoutBootstrapScripts(branchName)
// Load configs
pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
// Add each platform as a parameter that the user can disable if needed
pipelineConfig.platforms.each { platform ->
pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
@ -460,13 +452,13 @@ try {
// Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
stash name: 'incremental_build_script',
includes: INCREMENTAL_BUILD_SCRIPT_PATH
}
}
}
}
}
if(env.BUILD_NUMBER == '1') {
// Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
// Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
// to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
currentBuild.result = 'SUCCESS'
return
@ -483,16 +475,16 @@ try {
envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
def nodeLabel = envVars['NODE_LABEL']
buildConfigs["${platform.key} [${build_job.key}]"] = {
buildConfigs["${platform.key} [${build_job.key}]"] = {
node("${nodeLabel}") {
if(isUnix()) { // Has to happen inside a node
envVars['IS_UNIX'] = 1
}
}
withEnv(GetEnvStringList(envVars)) {
timeout(time: envVars['TIMEOUT'], unit: 'MINUTES', activity: true) {
try {
def build_job_name = build_job.key
CreateSetupStage(pipelineConfig, projectName, pipelineName, branchName, platform.key, build_job.key, envVars).call()
if(build_job.value.steps) { //this is a pipe with many steps so create all the build stages
@ -503,7 +495,7 @@ try {
} else {
CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
}
if (env.MARS_REPO && platform.key == 'Windows' && build_job_name.startsWith('test')) {
def output_directory = platform.value.build_types[build_job_name].PARAMETERS.OUTPUT_DIRECTORY
def configuration = platform.value.build_types[build_job_name].PARAMETERS.CONFIGURATION
@ -549,8 +541,8 @@ finally {
try {
if(env.SNS_TOPIC) {
snsPublish(
topicArn: env.SNS_TOPIC,
subject:'Build Result',
topicArn: env.SNS_TOPIC,
subject:'Build Result',
message:"${currentBuild.currentResult}:${BUILD_URL}:${env.RECREATE_VOLUME}:${env.CLEAN_OUTPUT_DIRECTORY}:${env.CLEAN_ASSETS}"
)
}

Loading…
Cancel
Save