Merge branch 'main' into nvsickle/EditorViewportWidgetFixes

main
Nicholas Van Sickle 5 years ago committed by GitHub
commit 22684af141
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -96,7 +96,7 @@ def IsJobEnabled(buildTypeMap, pipelineName, platformName) {
if (params[platformName]) {
if(buildTypeMap.value.TAGS) {
return buildTypeMap.value.TAGS.contains(pipelineName)
}
}
}
}
return false
@ -194,9 +194,9 @@ def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
}
buildTypeEnv.each { var ->
// This may override the above one if there is an entry defined by the job
envVarMap[var.key] = var.value
envVarMap[var.key] = var.value
}
// Environment that only applies to to Jenkins tweaks.
// For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
// instances. This allow us to scale up and down without having to re-download 3rdParty
@ -223,7 +223,7 @@ def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFo
folderPathParts.remove(folderPathParts.size()-1) // remove the filename
def folderPath = folderPathParts.join('/')
if (folderPath.contains('*')) {
def currentPath = ''
for (int i = 0; i < folderPathParts.size(); i++) {
if (folderPathParts[i] == '*') {
@ -259,7 +259,7 @@ def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFo
} else {
def errorFile = "${folderPath}/error.txt"
def errorFile = "${folderPath}/error.txt"
palMkdir(folderPath)
retry(3) {
try {
@ -273,7 +273,7 @@ def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFo
win_filenamePath = filenamePath.replace('/', '\\')
bat label: "Downloading ${win_filenamePath}",
script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${win_filenamePath}_encoded"
bat label: 'Decoding',
bat label: 'Decoding',
script: "certutil -decode ${win_filenamePath}_encoded ${win_filenamePath}"
}
palRm("${filenamePath}_encoded")
@ -296,7 +296,7 @@ def SetLfsCredentials(cmd, lbl = '') {
if (env.IS_UNIX) {
sh label: lbl,
script: cmd
} else {
} else {
bat label: lbl,
script: cmd
}
@ -330,7 +330,7 @@ def CheckoutRepo(boolean disableSubmodules = false) {
if(fileExists('.git')) {
// If the repository after checkout is locked, likely we took a snapshot while git was running,
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
def indexLockFile = '.git/index.lock'
if(fileExists(indexLockFile)) {
palSh('git gc', 'Git GarbageCollect')
@ -380,7 +380,7 @@ def CheckoutRepo(boolean disableSubmodules = false) {
// Add folder where we will store the 3rdParty downloads and packages
if(!fileExists('3rdParty')) {
palMkdir('3rdParty')
}
}
dir(ENGINE_REPOSITORY_NAME) {
// Run lfs in a separate step. Jenkins is unable to load the credentials for the custom LFS endpoint
@ -419,7 +419,7 @@ def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline,
sh label: 'Setting volume\'s ownership',
script: """
if sudo test ! -d "${workspace}"; then
sudo mkdir -p ${workspace}
sudo mkdir -p ${workspace}
cd ${workspace}/..
sudo chown -R lybuilder:root .
fi

@ -1549,97 +1549,15 @@ namespace AzToolsFramework
AZ::Vector3::CreateAxisZ());
scaleManipulators->ConfigureView(
2.0f,
AzFramework::ViewportColors::XAxisColor,
AzFramework::ViewportColors::YAxisColor,
AzFramework::ViewportColors::ZAxisColor);
AZ::Color::CreateOne(),
AZ::Color::CreateOne(),
AZ::Color::CreateOne());
// lambdas capture shared_ptr by value to increment ref count
auto manipulatorEntityIds = AZStd::make_shared<ManipulatorEntityIds>();
scaleManipulators->InstallAxisLeftMouseDownCallback(
[this, manipulatorEntityIds](const LinearManipulator::Action& action)
{
// important to sort entityIds based on hierarchy order when updating transforms
BuildSortedEntityIdVectorFromEntityIdMap(m_entityIdManipulators.m_lookups, manipulatorEntityIds->m_entityIds);
// here we are calling SetLocalScale, so order we visit entities in hierarchy is important
for (AZ::EntityId entityId : manipulatorEntityIds->m_entityIds)
{
auto entityIdLookupIt = m_entityIdManipulators.m_lookups.find(entityId);
if (entityIdLookupIt == m_entityIdManipulators.m_lookups.end())
{
continue;
}
AZ::Vector3 localScale = AZ::Vector3::CreateZero();
AZ::TransformBus::EventResult(
localScale, entityId, &AZ::TransformBus::Events::GetLocalScale);
SetEntityLocalScale(entityId, localScale + action.m_start.m_scaleSnapOffset);
AZ::Transform worldFromLocal = AZ::Transform::CreateIdentity();
AZ::TransformBus::EventResult(
worldFromLocal, entityId, &AZ::TransformBus::Events::GetWorldTM);
entityIdLookupIt->second.m_initial = worldFromLocal;
}
m_axisPreview.m_translation = m_entityIdManipulators.m_manipulators->GetLocalTransform().GetTranslation();
m_axisPreview.m_orientation =
QuaternionFromTransformNoScaling(m_entityIdManipulators.m_manipulators->GetLocalTransform());
});
scaleManipulators->InstallAxisLeftMouseUpCallback(
[this, manipulatorEntityIds](const LinearManipulator::Action& /*action*/)
{
m_entityIdManipulators.m_manipulators->SetLocalTransform(
RecalculateAverageManipulatorTransform(
m_entityIdManipulators.m_lookups, m_pivotOverrideFrame, m_pivotMode, m_referenceFrame));
});
scaleManipulators->InstallAxisMouseMoveCallback(
[this, manipulatorEntityIds](const LinearManipulator::Action& action)
{
// note: must use sorted entityIds based on hierarchy order when updating transforms
for (AZ::EntityId entityId : manipulatorEntityIds->m_entityIds)
{
auto entityIdLookupIt = m_entityIdManipulators.m_lookups.find(entityId);
if (entityIdLookupIt == m_entityIdManipulators.m_lookups.end())
{
continue;
}
const AZ::Transform initial = entityIdLookupIt->second.m_initial;
const AZ::Vector3 scale = (AZ::Vector3::CreateOne() +
((action.LocalScaleOffset() *
action.m_start.m_sign) / initial.GetScale())).GetMax(AZ::Vector3(0.01f));
const AZ::Transform scaleTransform = AZ::Transform::CreateScale(scale);
if (action.m_modifiers.Alt())
{
const AZ::Quaternion pivotRotation = entityIdLookupIt->second.m_initial.GetRotation().GetNormalized();
const AZ::Vector3 pivotPosition = entityIdLookupIt->second.m_initial.TransformPoint(CalculateCenterOffset(entityId, m_pivotMode));
const AZ::Transform pivotTransform = AZ::Transform::CreateFromQuaternionAndTranslation(pivotRotation, pivotPosition);
const AZ::Transform transformInPivotSpace = pivotTransform.GetInverse() * initial;
SetEntityWorldTransform(entityId, pivotTransform * scaleTransform * transformInPivotSpace);
}
else
{
const AZ::Transform pivotTransform =
TransformNormalizedScale(m_entityIdManipulators.m_manipulators->GetLocalTransform());
const AZ::Transform transformInPivotSpace = pivotTransform.GetInverse() * initial;
// pivot
SetEntityWorldTransform(entityId, pivotTransform * scaleTransform * transformInPivotSpace);
}
}
});
scaleManipulators->InstallUniformLeftMouseDownCallback(
[this, manipulatorEntityIds](const LinearManipulator::Action& /*action*/)
auto uniformLeftMouseDownCallback =
[this, manipulatorEntityIds]([[maybe_unused]] const LinearManipulator::Action& action)
{
// important to sort entityIds based on hierarchy order when updating transforms
BuildSortedEntityIdVectorFromEntityIdMap(m_entityIdManipulators.m_lookups, manipulatorEntityIds->m_entityIds);
@ -1656,18 +1574,16 @@ namespace AzToolsFramework
m_axisPreview.m_translation = m_entityIdManipulators.m_manipulators->GetLocalTransform().GetTranslation();
m_axisPreview.m_orientation = QuaternionFromTransformNoScaling(
m_entityIdManipulators.m_manipulators->GetLocalTransform());
});
};
scaleManipulators->InstallUniformLeftMouseUpCallback(
[this, manipulatorEntityIds](const LinearManipulator::Action& /*action*/)
auto uniformLeftMouseUpCallback = [this, manipulatorEntityIds]([[maybe_unused]] const LinearManipulator::Action& action)
{
m_entityIdManipulators.m_manipulators->SetLocalTransform(
RecalculateAverageManipulatorTransform(
m_entityIdManipulators.m_lookups, m_pivotOverrideFrame, m_pivotMode, m_referenceFrame));
});
};
scaleManipulators->InstallUniformMouseMoveCallback(
[this, manipulatorEntityIds](const LinearManipulator::Action& action)
auto uniformLeftMouseMoveCallback = [this, manipulatorEntityIds](const LinearManipulator::Action& action)
{
// note: must use sorted entityIds based on hierarchy order when updating transforms
for (AZ::EntityId entityId : manipulatorEntityIds->m_entityIds)
@ -1685,7 +1601,7 @@ namespace AzToolsFramework
return vec.GetX() + vec.GetY() + vec.GetZ();
};
const AZ::Vector3 uniformScale = AZ::Vector3(sumVectorElements(action.LocalScaleOffset()));
const AZ::Vector3 uniformScale = AZ::Vector3(action.m_start.m_sign * sumVectorElements(action.LocalScaleOffset()));
const AZ::Vector3 scale = (AZ::Vector3::CreateOne() +
(uniformScale / initialScale)).GetMax(AZ::Vector3(0.01f));
const AZ::Transform scaleTransform = AZ::Transform::CreateScale(scale);
@ -1709,7 +1625,14 @@ namespace AzToolsFramework
SetEntityWorldTransform(entityId, pivotTransform * scaleTransform * transformInPivotSpace);
}
}
});
};
scaleManipulators->InstallAxisLeftMouseDownCallback(uniformLeftMouseDownCallback);
scaleManipulators->InstallAxisLeftMouseUpCallback(uniformLeftMouseUpCallback);
scaleManipulators->InstallAxisMouseMoveCallback(uniformLeftMouseMoveCallback);
scaleManipulators->InstallUniformLeftMouseDownCallback(uniformLeftMouseDownCallback);
scaleManipulators->InstallUniformLeftMouseUpCallback(uniformLeftMouseUpCallback);
scaleManipulators->InstallUniformMouseMoveCallback(uniformLeftMouseMoveCallback);
// transfer ownership
m_entityIdManipulators.m_manipulators = AZStd::move(scaleManipulators);

@ -213,6 +213,9 @@ namespace PhysXDebug
void SystemComponent::GetRequiredServices(AZ::ComponentDescriptor::DependencyArrayType& required)
{
required.push_back(AZ_CRC("PhysXService"));
#ifdef PHYSXDEBUG_GEM_EDITOR
required.push_back(AZ_CRC("PhysXEditorService"));
#endif // PHYSXDEBUG_GEM_EDITOR
}
void SystemComponent::GetDependentServices(AZ::ComponentDescriptor::DependencyArrayType& dependent)

@ -27,15 +27,7 @@ def pipelineParameters = [
booleanParam(defaultValue: false, description: 'Deletes the contents of the output directories of the AssetProcessor before building.', name: 'CLEAN_ASSETS'),
booleanParam(defaultValue: false, description: 'Deletes the contents of the workspace and forces a complete pull.', name: 'CLEAN_WORKSPACE'),
booleanParam(defaultValue: false, description: 'Recreates the volume used for the workspace. The volume will be created out of a snapshot taken from main.', name: 'RECREATE_VOLUME'),
string(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE'),
// Pull Request Parameters
string(defaultValue: '', description: '', name: 'DESTINATION_BRANCH'),
string(defaultValue: '', description: '', name: 'DESTINATION_COMMIT'),
string(defaultValue: '', description: '', name: 'PULL_REQUEST_ID'),
string(defaultValue: '', description: '', name: 'REPOSITORY_NAME'),
string(defaultValue: '', description: '', name: 'SOURCE_BRANCH'),
string(defaultValue: '', description: '', name: 'SOURCE_COMMIT')
string(defaultValue: '', description: 'Filters and overrides the list of jobs to run for each of the below platforms (comma-separated). Can\'t be used during a pull request.', name: 'JOB_LIST_OVERRIDE')
]
def palSh(cmd, lbl = '', winSlashReplacement = true) {
@ -86,17 +78,13 @@ def palRmDir(path) {
def IsJobEnabled(buildTypeMap, pipelineName, platformName) {
def job_list_override = params.JOB_LIST_OVERRIDE.tokenize(',')
if(params.PULL_REQUEST_ID) { // dont allow pull requests to filter platforms/jobs
if(buildTypeMap.value.TAGS) {
return buildTypeMap.value.TAGS.contains(pipelineName)
}
} else if (!job_list_override.isEmpty()) {
if (!job_list_override.isEmpty()) {
return params[platformName] && job_list_override.contains(buildTypeMap.key);
} else {
if (params[platformName]) {
if(buildTypeMap.value.TAGS) {
return buildTypeMap.value.TAGS.contains(pipelineName)
}
}
}
}
return false
@ -117,11 +105,8 @@ def RegexMatcher(str, regex) {
return matcher ? matcher.group(1) : null
}
def LoadPipelineConfig(String pipelineName, String branchName, String scmType) {
def LoadPipelineConfig(String pipelineName, String branchName) {
echo 'Loading pipeline config'
if (scmType == 'codecommit') {
PullFilesFromGit(PIPELINE_CONFIG_FILE, branchName, true, ENGINE_REPOSITORY_NAME)
}
def pipelineConfig = {}
pipelineConfig = readJSON file: PIPELINE_CONFIG_FILE
palRm(PIPELINE_CONFIG_FILE)
@ -133,10 +118,6 @@ def LoadPipelineConfig(String pipelineName, String branchName, String scmType) {
if (!env.IS_UNIX) {
platform_regex = platform_regex.replace('/','\\\\')
}
echo "Downloading platform pipeline configs ${pipeline_config}"
if (scmType == 'codecommit') {
PullFilesFromGit(pipeline_config, branchName, false, ENGINE_REPOSITORY_NAME)
}
echo "Searching platform pipeline configs in ${pipeline_config} using ${platform_regex}"
for (pipeline_config_path in findFiles(glob: pipeline_config)) {
echo "\tFound platform pipeline config ${pipeline_config_path}"
@ -155,10 +136,6 @@ def LoadPipelineConfig(String pipelineName, String branchName, String scmType) {
if (!env.IS_UNIX) {
platform_regex = platform_regex.replace('/','\\\\')
}
echo "Downloading configs ${build_config}"
if (scmType == 'codecommit') {
PullFilesFromGit(build_config, branchName, false, ENGINE_REPOSITORY_NAME)
}
echo "Searching configs in ${build_config} using ${platform_regex}"
for (build_config_path in findFiles(glob: build_config)) {
echo "\tFound config ${build_config_path}"
@ -171,16 +148,6 @@ def LoadPipelineConfig(String pipelineName, String branchName, String scmType) {
return pipelineConfig
}
def GetSCMType() {
def gitUrl = scm.getUserRemoteConfigs()[0].getUrl()
if (gitUrl ==~ /https:\/\/git-codecommit.*/) {
return 'codecommit'
} else if (gitUrl ==~ /https:\/\/github.com.*/) {
return 'github'
}
return 'unknown'
}
def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
def envVarMap = [:]
platformPipelineEnv = platformEnv['ENV'] ?: [:]
@ -194,9 +161,9 @@ def GetBuildEnvVars(Map platformEnv, Map buildTypeEnv, String pipelineName) {
}
buildTypeEnv.each { var ->
// This may override the above one if there is an entry defined by the job
envVarMap[var.key] = var.value
envVarMap[var.key] = var.value
}
// Environment that only applies to to Jenkins tweaks.
// For 3rdParty downloads, we store them in the EBS volume so we can reuse them across node
// instances. This allow us to scale up and down without having to re-download 3rdParty
@ -214,89 +181,11 @@ def GetEnvStringList(Map envVarMap) {
return strList
}
// Pulls/downloads files from the repo through codecommit. Despite Glob matching is NOT supported, '*' is supported
// as a folder or filename (not a portion, it has to be the whole folder or filename)
def PullFilesFromGit(String filenamePath, String branchName, boolean failIfNotFound = true, String repositoryName = env.DEFAULT_REPOSITORY_NAME) {
echo "PullFilesFromGit filenamePath=${filenamePath} branchName=${branchName} repositoryName=${repositoryName}"
def folderPathParts = filenamePath.tokenize('/')
def filename = folderPathParts[folderPathParts.size()-1]
folderPathParts.remove(folderPathParts.size()-1) // remove the filename
def folderPath = folderPathParts.join('/')
if (folderPath.contains('*')) {
def currentPath = ''
for (int i = 0; i < folderPathParts.size(); i++) {
if (folderPathParts[i] == '*') {
palMkdir(currentPath)
retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${currentPath} > ${currentPath}/.codecommit", "GetFolder ${currentPath}") }
def folderInfo = readJSON file: "${currentPath}/.codecommit"
folderInfo.subFolders.each { folder ->
def newSubPath = currentPath + '/' + folder.relativePath
for (int j = i+1; j < folderPathParts.size(); j++) {
newSubPath = newSubPath + '/' + folderPathParts[j]
}
newSubPath = newSubPath + '/' + filename
PullFilesFromGit(newSubPath, branchName, false, repositoryName)
}
palRm("${currentPath}/.codecommit")
}
if (i == 0) {
currentPath = folderPathParts[i]
} else {
currentPath = currentPath + '/' + folderPathParts[i]
}
}
} else if (filename.contains('*')) {
palMkdir(folderPath)
retry(3) { palSh("aws codecommit get-folder --repository-name ${repositoryName} --commit-specifier ${branchName} --folder-path ${folderPath} > ${folderPath}/.codecommit", "GetFolder ${folderPath}") }
def folderInfo = readJSON file: "${folderPath}/.codecommit"
folderInfo.files.each { file ->
PullFilesFromGit("${folderPath}/${filename}", branchName, false, repositoryName)
}
palRm("${folderPath}/.codecommit")
} else {
def errorFile = "${folderPath}/error.txt"
palMkdir(folderPath)
retry(3) {
try {
if(env.IS_UNIX) {
sh label: "Downloading ${filenamePath}",
script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${filenamePath}_encoded"
sh label: 'Decoding',
script: "base64 --decode ${filenamePath}_encoded > ${filenamePath}"
} else {
errorFile = errorFile.replace('/','\\')
win_filenamePath = filenamePath.replace('/', '\\')
bat label: "Downloading ${win_filenamePath}",
script: "aws codecommit get-file --repository-name ${repositoryName} --commit-specifier ${branchName} --file-path ${filenamePath} --query fileContent --output text 2>${errorFile} > ${win_filenamePath}_encoded"
bat label: 'Decoding',
script: "certutil -decode ${win_filenamePath}_encoded ${win_filenamePath}"
}
palRm("${filenamePath}_encoded")
} catch (Exception ex) {
def error = ''
if(fileExists(errorFile)) {
error = readFile errorFile
}
if (!error || !(!failIfNotFound && error.contains('FileDoesNotExistException'))) {
palRm("${errorFile} ${filenamePath}.encoded ${filenamePath}")
throw new Exception("Could not get file: ${filenamePath}, ex: ${ex}, stderr: ${error}")
}
}
palRm(errorFile)
}
}
}
def SetLfsCredentials(cmd, lbl = '') {
if (env.IS_UNIX) {
sh label: lbl,
script: cmd
} else {
} else {
bat label: lbl,
script: cmd
}
@ -325,19 +214,17 @@ def CheckoutBootstrapScripts(String branchName) {
}
def CheckoutRepo(boolean disableSubmodules = false) {
dir(ENGINE_REPOSITORY_NAME) {
palSh('git lfs uninstall', 'Git LFS Uninstall') // Prevent git from pulling lfs objects during checkout
if(fileExists('.git')) {
// If the repository after checkout is locked, likely we took a snapshot while git was running,
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
def indexLockFile = '.git/index.lock'
if(fileExists(indexLockFile)) {
palSh('git gc', 'Git GarbageCollect')
}
if(fileExists(indexLockFile)) { // if it is still there, remove it
palRm(indexLockFile)
}
palSh('git lfs uninstall', 'Git LFS Uninstall') // Prevent git from pulling lfs objects during checkout
if(fileExists('.git')) {
// If the repository after checkout is locked, likely we took a snapshot while git was running,
// to leave the repo in a usable state, garbagecollect. This also helps in situations where
def indexLockFile = '.git/index.lock'
if(fileExists(indexLockFile)) {
palSh('git gc', 'Git GarbageCollect')
}
if(fileExists(indexLockFile)) { // if it is still there, remove it
palRm(indexLockFile)
}
}
@ -348,54 +235,29 @@ def CheckoutRepo(boolean disableSubmodules = false) {
sleep random.nextInt(60 * retryAttempt) // Stagger checkouts to prevent HTTP 429 (Too Many Requests) response from CodeCommit
}
retryAttempt = retryAttempt + 1
if(params.PULL_REQUEST_ID) {
// This is a pull request build. Perform merge with destination branch before building.
dir(ENGINE_REPOSITORY_NAME) {
checkout scm: [
$class: 'GitSCM',
branches: scm.branches,
extensions: [
[$class: 'PreBuildMerge', options: [mergeRemote: 'origin', mergeTarget: params.DESTINATION_BRANCH]],
[$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
[$class: 'CheckoutOption', timeout: 60]
],
userRemoteConfigs: scm.userRemoteConfigs
]
}
} else {
dir(ENGINE_REPOSITORY_NAME) {
checkout scm: [
$class: 'GitSCM',
branches: scm.branches,
extensions: [
[$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
[$class: 'CheckoutOption', timeout: 60]
],
userRemoteConfigs: scm.userRemoteConfigs
]
}
}
checkout scm: [
$class: 'GitSCM',
branches: scm.branches,
extensions: [
[$class: 'SubmoduleOption', disableSubmodules: disableSubmodules, recursiveSubmodules: true],
[$class: 'CheckoutOption', timeout: 60]
],
userRemoteConfigs: scm.userRemoteConfigs
]
}
// Add folder where we will store the 3rdParty downloads and packages
if(!fileExists('3rdParty')) {
palMkdir('3rdParty')
}
dir(ENGINE_REPOSITORY_NAME) {
// Run lfs in a separate step. Jenkins is unable to load the credentials for the custom LFS endpoint
withCredentials([usernamePassword(credentialsId: "${env.GITHUB_USER}", passwordVariable: 'accesstoken', usernameVariable: 'username')]) {
SetLfsCredentials("git config -f .lfsconfig lfs.url https://${username}:${accesstoken}@${env.LFS_URL}", 'Set credentials')
}
palSh('git lfs install', 'Git LFS Install')
palSh('git lfs pull', 'Git LFS Pull')
// CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
palSh('git rev-parse HEAD > commitid', 'Getting commit id')
env.CHANGE_ID = readFile file: 'commitid'
env.CHANGE_ID = env.CHANGE_ID.trim()
palRm('commitid')
// Run lfs in a separate step. Jenkins is unable to load the credentials for the custom LFS endpoint
withCredentials([usernamePassword(credentialsId: "${env.GITHUB_USER}", passwordVariable: 'accesstoken', usernameVariable: 'username')]) {
SetLfsCredentials("git config -f .lfsconfig lfs.url https://${username}:${accesstoken}@${env.LFS_URL}", 'Set credentials')
}
palSh('git lfs install', 'Git LFS Install')
palSh('git lfs pull', 'Git LFS Pull')
// CHANGE_ID is used by some scripts to identify uniquely the current change (usually metric jobs)
palSh('git rev-parse HEAD > commitid', 'Getting commit id')
env.CHANGE_ID = readFile file: 'commitid'
env.CHANGE_ID = env.CHANGE_ID.trim()
palRm('commitid')
}
def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline, String branchName, String platform, String buildType, String workspace, boolean mount = true, boolean disableSubmodules = false) {
@ -419,7 +281,7 @@ def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline,
sh label: 'Setting volume\'s ownership',
script: """
if sudo test ! -d "${workspace}"; then
sudo mkdir -p ${workspace}
sudo mkdir -p ${workspace}
cd ${workspace}/..
sudo chown -R lybuilder:root .
fi
@ -436,28 +298,31 @@ def PreBuildCommonSteps(Map pipelineConfig, String projectName, String pipeline,
}
dir(workspace) {
// Add folder where we will store the 3rdParty downloads and packages
if(!fileExists('3rdParty')) {
palMkdir('3rdParty')
}
}
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
CheckoutRepo(disableSubmodules)
// Get python
dir(ENGINE_REPOSITORY_NAME) {
if(env.IS_UNIX) {
if(env.IS_UNIX) {
sh label: 'Getting python',
script: 'python/get_python.sh'
} else {
bat label: 'Getting python',
script: 'python/get_python.bat'
}
} else {
bat label: 'Getting python',
script: 'python/get_python.bat'
}
if(env.CLEAN_OUTPUT_DIRECTORY.toBoolean() || env.CLEAN_ASSETS.toBoolean()) {
def command = "${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
if (env.IS_UNIX) {
sh label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.sh -u ${command}"
} else {
bat label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
}
if(env.CLEAN_OUTPUT_DIRECTORY.toBoolean() || env.CLEAN_ASSETS.toBoolean()) {
def command = "${pipelineConfig.BUILD_ENTRY_POINT} --platform ${platform} --type clean"
if (env.IS_UNIX) {
sh label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.sh -u ${command}"
} else {
bat label: "Running ${platform} clean",
script: "${pipelineConfig.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
}
}
}
@ -470,7 +335,7 @@ def Build(Map options, String platform, String type, String workspace) {
sh label: "Running ${platform} ${type}",
script: "${options.PYTHON_DIR}/python.sh -u ${command}"
} else {
bat label: "Running ${platform} ${type}",
bat label: "Running ${platform} ${type}",
script: "${options.PYTHON_DIR}/python.cmd -u ${command}".replace('/','\\')
}
}
@ -488,10 +353,8 @@ def TestMetrics(Map options, String workspace, String branchName, String repoNam
]
withCredentials([usernamePassword(credentialsId: "${env.SERVICE_USER}", passwordVariable: 'apitoken', usernameVariable: 'username')]) {
def command = "${options.PYTHON_DIR}/python.cmd -u mars/scripts/python/ctest_test_metric_scraper.py -e jenkins.creds.user ${username} -e jenkins.creds.pass ${apitoken} ${cmakeBuildDir} ${branchName} %BUILD_NUMBER% AR ${configuration} ${repoName} "
if (params.DESTINATION_BRANCH)
command += "--destination-branch ${params.DESTINATION_BRANCH} "
bat label: "Publishing ${buildJobName} Test Metrics",
script: command
script: command
}
}
}
@ -500,14 +363,6 @@ def TestMetrics(Map options, String workspace, String branchName, String repoNam
def PostBuildCommonSteps(String workspace, boolean mount = true) {
echo 'Starting post-build common steps...'
if(params.PULL_REQUEST_ID) {
dir("${workspace}/${ENGINE_REPOSITORY_NAME}") {
if(fileExists('.git')) {
palSh('git reset --hard HEAD', 'Discard PR merge, git reset')
}
}
}
if (mount) {
def pythonCmd = ''
if(env.IS_UNIX) pythonCmd = 'sudo -E python -u '
@ -571,7 +426,6 @@ try {
withEnv(envVarList) {
timestamps {
(projectName, pipelineName) = GetRunningPipelineName(env.JOB_NAME) // env.JOB_NAME is the name of the job given by Jenkins
scmType = GetSCMType()
if(env.BRANCH_NAME) {
branchName = env.BRANCH_NAME
@ -583,13 +437,11 @@ try {
echo "Running \"${pipelineName}\" for \"${branchName}\"..."
if (scmType == 'github') {
CheckoutBootstrapScripts(branchName)
}
CheckoutBootstrapScripts(branchName)
// Load configs
pipelineConfig = LoadPipelineConfig(pipelineName, branchName, scmType)
pipelineConfig = LoadPipelineConfig(pipelineName, branchName)
// Add each platform as a parameter that the user can disable if needed
pipelineConfig.platforms.each { platform ->
pipelineParameters.add(booleanParam(defaultValue: true, description: '', name: platform.key))
@ -598,18 +450,15 @@ try {
properties(pipelineProperties)
// Stash the INCREMENTAL_BUILD_SCRIPT_PATH since all nodes will use it
if (scmType == 'codecommit') {
PullFilesFromGit(INCREMENTAL_BUILD_SCRIPT_PATH, branchName, true, ENGINE_REPOSITORY_NAME)
}
stash name: 'incremental_build_script',
includes: INCREMENTAL_BUILD_SCRIPT_PATH
}
}
}
}
}
if(env.BUILD_NUMBER == '1') {
// Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
// Exit pipeline early on the intial build. This allows Jenkins to load the pipeline for the branch and enables users
// to select build parameters on their first actual build. See https://issues.jenkins.io/browse/JENKINS-41929
currentBuild.result = 'SUCCESS'
return
@ -626,16 +475,16 @@ try {
envVars['JOB_NAME'] = "${branchName}_${platform.key}_${build_job.key}" // backwards compatibility, some scripts rely on this
def nodeLabel = envVars['NODE_LABEL']
buildConfigs["${platform.key} [${build_job.key}]"] = {
buildConfigs["${platform.key} [${build_job.key}]"] = {
node("${nodeLabel}") {
if(isUnix()) { // Has to happen inside a node
envVars['IS_UNIX'] = 1
}
}
withEnv(GetEnvStringList(envVars)) {
timeout(time: envVars['TIMEOUT'], unit: 'MINUTES', activity: true) {
try {
def build_job_name = build_job.key
CreateSetupStage(pipelineConfig, projectName, pipelineName, branchName, platform.key, build_job.key, envVars).call()
if(build_job.value.steps) { //this is a pipe with many steps so create all the build stages
@ -646,7 +495,7 @@ try {
} else {
CreateBuildStage(pipelineConfig, platform.key, build_job.key, envVars).call()
}
if (env.MARS_REPO && platform.key == 'Windows' && build_job_name.startsWith('test')) {
def output_directory = platform.value.build_types[build_job_name].PARAMETERS.OUTPUT_DIRECTORY
def configuration = platform.value.build_types[build_job_name].PARAMETERS.CONFIGURATION
@ -692,9 +541,9 @@ finally {
try {
if(env.SNS_TOPIC) {
snsPublish(
topicArn: env.SNS_TOPIC,
subject:'Build Result',
message:"${currentBuild.currentResult}:${params.REPOSITORY_NAME}:${params.SOURCE_BRANCH}:${params.SOURCE_COMMIT}:${params.DESTINATION_COMMIT}:${params.PULL_REQUEST_ID}:${BUILD_URL}:${env.RECREATE_VOLUME}:${env.CLEAN_OUTPUT_DIRECTORY}:${env.CLEAN_ASSETS}"
topicArn: env.SNS_TOPIC,
subject:'Build Result',
message:"${currentBuild.currentResult}:${BUILD_URL}:${env.RECREATE_VOLUME}:${env.CLEAN_OUTPUT_DIRECTORY}:${env.CLEAN_ASSETS}"
)
}
step([

@ -10,10 +10,8 @@
#
import os
import sys
import glob_to_regex
import zipfile
import timeit
import stat
import progressbar
from optparse import OptionParser
from PackageEnv import PackageEnv
@ -26,18 +24,6 @@ from glob3 import glob
def package(options):
package_env = PackageEnv(options.platform, options.type, options.package_env)
engine_root = package_env.get('ENGINE_ROOT')
if not package_env.get('SKIP_SCRUBBING'):
# Ask the validator code to tell us which files need to be removed from the package
prohibited_file_mask = get_prohibited_file_mask(options.platform, engine_root)
# Scrub files. This is destructive, but is necessary to allow the current file existance checks to work properly. Better to copy and then build, or to
# mask on sync, but this is what we have for now
scrub_files(package_env, prohibited_file_mask)
# validate files
validate_restricted_files(options.platform, options.type, package_env)
# Override values in bootstrap.cfg for PC package
override_bootstrap_cfg(package_env)
@ -93,61 +79,6 @@ def override_bootstrap_cfg(package_env):
print('{} updated with value {}'.format(bootstrap_path, replace_values))
def get_prohibited_file_mask(platform, engine_root):
sys.path.append(os.path.join(engine_root, 'Tools', 'build', 'JenkinsScripts', 'distribution', 'scrubbing'))
from validator_data_LEGAL_REVIEW_REQUIRED import get_prohibited_platforms_for_package
# The list of prohibited platforms is controlled by the validator on a per-package basis
prohibited_platforms = get_prohibited_platforms_for_package(platform)
prohibited_platforms.append('all')
excludes_list = []
for p in prohibited_platforms:
platform_excludes = glob_to_regex.generate_excludes_for_platform(engine_root, p)
excludes_list.extend(platform_excludes)
prohibited_file_mask = re.compile('|'.join(excludes_list), re.IGNORECASE)
return prohibited_file_mask
def scrub_files(package_env, prohibited_file_mask):
print('Perform the Code Scrubbing')
engine_root = package_env.get('ENGINE_ROOT')
success = True
for dirname, subFolders, files in os.walk(engine_root):
for filename in files:
full_path = os.path.join(dirname, filename)
if prohibited_file_mask.match(full_path):
try:
print('Deleting: {}'.format(full_path))
os.chmod(full_path, stat.S_IWRITE)
os.unlink(full_path)
except:
e = sys.exc_info()[0]
sys.stderr.write('Error: could not delete {} ... aborting.\n'.format(full_path))
sys.stderr.write('{}\n'.format(str(e)))
success = False
if not success:
sys.stderr.write('ERROR: scrub_files failed\n')
sys.exit(1)
def validate_restricted_files(package_platform, package_type, package_env):
print('Perform the Code Scrubbing')
engine_root = package_env.get('ENGINE_ROOT')
# Run validator
success = True
validator_path = os.path.join(engine_root, 'Tools/build/JenkinsScripts/distribution/scrubbing/validator.py')
python = get_python_path(package_env)
args = [python, validator_path, '--package_platform', package_platform, '--package_type', package_type, engine_root]
return_code = safe_execute_system_call(args)
if return_code != 0:
success = False
if not success:
error('Restricted file validator failed.')
print('Restricted file validator completed successfully.')
def cmake_build(package_env):
build_targets = package_env.get('BUILD_TARGETS')
for build_target in build_targets:
@ -161,15 +92,7 @@ def create_package(package_env, package_target):
if file_list_type == 'All':
filelist = os.path.join(cur_dir, 'package_filelists', package_target['FILE_LIST'])
else:
# Search non-restricted platform first
filelist = os.path.join(cur_dir, 'Platform', file_list_type, 'package_filelists', package_target['FILE_LIST'])
if not os.path.exists(filelist):
engine_root = package_env.get('ENGINE_ROOT')
# Use real path in case engine root is a symlink path
if os.name == 'posix' and os.path.islink(engine_root):
engine_root = os.readlink(engine_root)
rel_path = os.path.relpath(cur_dir, engine_root)
filelist = os.path.join(engine_root, 'restricted', file_list_type, rel_path, 'package_filelists', package_target['FILE_LIST'])
with open(filelist, 'r') as source:
data = json.load(source)
lyengine = package_env.get('ENGINE_ROOT')
@ -296,7 +219,3 @@ def parse_args():
if __name__ == "__main__":
(options, args) = parse_args()
package(options)

Loading…
Cancel
Save