adds TEST_SCREENSHOTS boolean toggle to build_config.json & adds subdirectory searching for upload_to_s3.py script

monroegm-disable-blank-issue-2
jromnoa 5 years ago
parent 63bb8aa8a9
commit b2bcd6a326

@ -177,7 +177,8 @@
"CMAKE_NATIVE_BUILD_ARGS": "/m /nologo",
"CTEST_OPTIONS": "-L \"(SUITE_smoke_REQUIRES_gpu|SUITE_main_REQUIRES_gpu)\" -T Test",
"TEST_METRICS": "True",
"TEST_RESULTS": "True"
"TEST_RESULTS": "True",
"TEST_SCREENSHOTS": "True"
}
},
"asset_profile_vs2019": {

@ -17,6 +17,9 @@ python upload_to_s3.py --base_dir %WORKSPACE% --file_regex "(.*zip$|.*MD5$)" --b
Use profile to upload all .zip and .MD5 files in %WORKSPACE% folder to bucket ly-packages-mainline:
python upload_to_s3.py --base_dir %WORKSPACE% --profile profile --file_regex "(.*zip$|.*MD5$)" --bucket ly-packages-mainline
Another example usage for uploading all .png and .ppm files inside base_dir and only subdirectories within base_dir:
python upload_to_s3.py --base_dir %WORKSPACE%/path/to/files --file_regex "(.*png$|.*ppm$)" --bucket screenshot-test-bucket --search_subdirectories True --key_prefix Test
'''
@ -34,6 +37,8 @@ def parse_args():
parser.add_option("--profile", dest="profile", default=None, help="The name of a profile to use. If not given, then the default profile is used.")
parser.add_option("--bucket", dest="bucket", default=None, help="S3 bucket the files are uploaded to.")
parser.add_option("--key_prefix", dest="key_prefix", default='', help="Object key prefix.")
parser.add_option("--search_subdirectories", dest="search_subdirectories", action='store_true',
help="Toggle for searching for files in subdirectories beneath base_dir, defaults to False")
'''
ExtraArgs used to call s3.upload_file(), should be in json format. extra_args key must be one of: ACL, CacheControl, ContentDisposition, ContentEncoding, ContentLanguage, ContentType, Expires,
GrantFullControl, GrantRead, GrantReadACP, GrantWriteACP, Metadata, RequestPayer, ServerSideEncryption, StorageClass,
@ -62,48 +67,74 @@ def get_client(service_name, profile_name):
return client
def get_files_to_upload(base_dir, regex):
def get_files_to_upload(base_dir, regex, search_subdirectories):
"""
Uses a regex expression pattern to return a list of file paths for files to upload to the s3 bucket.
:param base_dir: path for the base directory, if using search_subdirectories=True ensure this is the parent.
:param regex: pattern to use for regex searching, ex. "(.*zip$|.*MD5$)"
:param search_subdirectories: boolean False for only getting files in base_dir, True to get all files in base_dir
and any subdirectory inside base_dir, defaults to False from the parse_args() function.
:return: a list of string file paths for files to upload to the s3 bucket matching the regex expression.
"""
# Get all file names in base directory
files = [x for x in os.listdir(base_dir) if os.path.isfile(os.path.join(base_dir, x))]
# strip the surround quotes, if they exist
files = [os.path.join(base_dir, x) for x in os.listdir(base_dir) if os.path.isfile(os.path.join(base_dir, x))]
if search_subdirectories: # Get all file names in base directory and any subdirectories.
for subdirectory in os.walk(base_dir):
# Example output for subdirectory:
# ('C:\path\to\base_dir\', ['Subfolder1', 'Subfolder2'], ['file1', 'file2'])
subdirectory_file_path = subdirectory[0]
subdirectory_files = subdirectory[2]
subdirectory_file_paths = _build_file_paths(subdirectory_file_path, subdirectory_files)
files.extend(subdirectory_file_paths)
try:
regex = json.loads(regex)
regex = json.loads(regex) # strip the surround quotes, if they exist
except:
pass
# Get all file names matching the regular expression, those file will be uploaded to S3
files_to_upload = [x for x in files if re.match(regex, x)]
return files_to_upload
regex_files_to_upload = [x for x in files if re.match(regex, x)]
return regex_files_to_upload
def s3_upload_file(client, base_dir, file, bucket, key_prefix=None, extra_args=None, max_retry=1):
print(('Uploading file {} to bucket {}.'.format(file, bucket)))
def s3_upload_file(client, file, bucket, key_prefix=None, extra_args=None, max_retry=1):
key = file if key_prefix is None else '{}/{}'.format(key_prefix, file)
for x in range(max_retry):
try:
client.upload_file(
os.path.join(base_dir, file), bucket, key,
ExtraArgs=extra_args
)
print('Upload succeeded')
client.upload_file(file, bucket, key, ExtraArgs=extra_args)
return True
except Exception as err:
print(('exception while uploading: {}'.format(err)))
print('Retrying upload...')
print('Upload failed')
print(('Upload failed: Exception while uploading: {}'.format(err)))
return False
def _build_file_paths(path_to_files, files_in_path):
"""
Given a path containing files, returns a list of strings representing complete paths to each file.
:param path_to_files: path to the location storing the files to create string paths for
:param files_in_path: list of files that are inside the path_to_files path string
:return: list of fully parsed file path strings from path_to_files path.
"""
parsed_file_paths = []
for file_in_path in files_in_path:
complete_file_path = os.path.join(path_to_files, file_in_path)
parsed_file_paths.append(complete_file_path)
return parsed_file_paths
if __name__ == "__main__":
options = parse_args()
client = get_client('s3', options.profile)
files_to_upload = get_files_to_upload(options.base_dir, options.file_regex)
files_to_upload = get_files_to_upload(options.base_dir, options.file_regex, options.search_subdirectories)
extra_args = json.loads(options.extra_args) if options.extra_args else None
print(('Uploading {} files to bucket {}.'.format(len(files_to_upload), options.bucket)))
failure = []
success = []
for file in files_to_upload:
if not s3_upload_file(client, options.base_dir, file, options.bucket, options.key_prefix, extra_args, 2):
if not s3_upload_file(client, file, options.bucket, options.key_prefix, extra_args, 2):
failure.append(file)
else:
success.append(file)

Loading…
Cancel
Save