Merge branch 'main' into Atom/dmcdiar/ATOM-15517
commit
35d7ee5e53
@ -0,0 +1,10 @@
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
@ -0,0 +1,237 @@
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pytest
|
||||
import time
|
||||
import typing
|
||||
|
||||
from datetime import datetime
|
||||
import ly_test_tools.log.log_monitor
|
||||
|
||||
from assetpipeline.ap_fixtures.asset_processor_fixture import asset_processor as asset_processor
|
||||
from AWS.common.aws_utils import aws_utils
|
||||
from AWS.common.aws_credentials import aws_credentials
|
||||
from AWS.Windows.resource_mappings.resource_mappings import resource_mappings
|
||||
from AWS.Windows.cdk.cdk import cdk
|
||||
from .aws_metrics_utils import aws_metrics_utils
|
||||
|
||||
AWS_METRICS_FEATURE_NAME = 'AWSMetrics'
|
||||
GAME_LOG_NAME = 'Game.log'
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup(launcher: ly_test_tools.launchers.Launcher,
|
||||
cdk: cdk,
|
||||
asset_processor: asset_processor,
|
||||
resource_mappings: resource_mappings,
|
||||
context_variable: str = '') -> typing.Tuple[ly_test_tools.log.log_monitor.LogMonitor, str, str]:
|
||||
"""
|
||||
Set up the CDK application and start the log monitor.
|
||||
:param launcher: Client launcher for running the test level.
|
||||
:param cdk: CDK application for deploying the AWS resources.
|
||||
:param asset_processor: asset_processor fixture.
|
||||
:param resource_mappings: resource_mappings fixture.
|
||||
:param context_variable: context_variable for enable optional CDK feature.
|
||||
:return log monitor object, metrics file path and the metrics stack name.
|
||||
"""
|
||||
logger.info(f'Cdk stack names:\n{cdk.list()}')
|
||||
stacks = cdk.deploy(context_variable=context_variable)
|
||||
resource_mappings.populate_output_keys(stacks)
|
||||
|
||||
asset_processor.start()
|
||||
asset_processor.wait_for_idle()
|
||||
|
||||
metrics_file_path = os.path.join(launcher.workspace.paths.project(), 'user',
|
||||
AWS_METRICS_FEATURE_NAME, 'metrics.json')
|
||||
remove_file(metrics_file_path)
|
||||
|
||||
file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), GAME_LOG_NAME)
|
||||
remove_file(file_to_monitor)
|
||||
|
||||
# Initialize the log monitor.
|
||||
log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor)
|
||||
|
||||
return log_monitor, metrics_file_path, stacks[0]
|
||||
|
||||
|
||||
def monitor_metrics_submission(log_monitor: ly_test_tools.log.log_monitor.LogMonitor) -> None:
|
||||
"""
|
||||
Monitor the messages and notifications for submitting metrics.
|
||||
:param log_monitor: Log monitor to check the log messages.
|
||||
"""
|
||||
expected_lines = [
|
||||
'(Script) - Submitted metrics without buffer.',
|
||||
'(Script) - Submitted metrics with buffer.',
|
||||
'(Script) - Metrics is sent successfully.'
|
||||
]
|
||||
|
||||
unexpected_lines = [
|
||||
'(Script) - Failed to submit metrics without buffer.',
|
||||
'(Script) - Failed to submit metrics with buffer.',
|
||||
'(Script) - Failed to send metrics.'
|
||||
]
|
||||
|
||||
result = log_monitor.monitor_log_for_lines(
|
||||
expected_lines=expected_lines,
|
||||
unexpected_lines=unexpected_lines,
|
||||
halt_on_unexpected=True)
|
||||
|
||||
# Assert the log monitor detected expected lines and did not detect any unexpected lines.
|
||||
assert result, (
|
||||
f'Log monitoring failed. Used expected_lines values: {expected_lines} & '
|
||||
f'unexpected_lines values: {unexpected_lines}')
|
||||
|
||||
|
||||
def remove_file(file_path: str) -> None:
|
||||
"""
|
||||
Remove a local file and its directory.
|
||||
:param file_path: Path to the local file.
|
||||
"""
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
file_dir = os.path.dirname(file_path)
|
||||
if os.path.exists(file_dir) and len(os.listdir(file_dir)) == 0:
|
||||
os.rmdir(file_dir)
|
||||
|
||||
|
||||
@pytest.mark.SUITE_periodic
|
||||
@pytest.mark.usefixtures('automatic_process_killer')
|
||||
@pytest.mark.parametrize('project', ['AutomatedTesting'])
|
||||
@pytest.mark.parametrize('level', ['AWS/Metrics'])
|
||||
@pytest.mark.parametrize('feature_name', [AWS_METRICS_FEATURE_NAME])
|
||||
@pytest.mark.parametrize('resource_mappings_filename', ['aws_resource_mappings.json'])
|
||||
@pytest.mark.parametrize('profile_name', ['AWSAutomationTest'])
|
||||
@pytest.mark.parametrize('region_name', ['us-west-2'])
|
||||
@pytest.mark.parametrize('assume_role_arn', ['arn:aws:iam::645075835648:role/o3de-automation-tests'])
|
||||
@pytest.mark.parametrize('session_name', ['o3de-Automation-session'])
|
||||
class TestAWSMetrics_Windows(object):
|
||||
def test_AWSMetrics_RealTimeAnalytics_MetricsSentToCloudWatch(self,
|
||||
level: str,
|
||||
launcher: ly_test_tools.launchers.Launcher,
|
||||
asset_processor: pytest.fixture,
|
||||
workspace: pytest.fixture,
|
||||
aws_utils: aws_utils,
|
||||
aws_credentials: aws_credentials,
|
||||
resource_mappings: resource_mappings,
|
||||
cdk: cdk,
|
||||
aws_metrics_utils: aws_metrics_utils,
|
||||
):
|
||||
"""
|
||||
Tests that the submitted metrics are sent to CloudWatch for real-time analytics.
|
||||
"""
|
||||
log_monitor, metrics_file_path, stack_name = setup(launcher, cdk, asset_processor, resource_mappings)
|
||||
|
||||
# Start the Kinesis Data Analytics application for real-time analytics.
|
||||
analytics_application_name = f'{stack_name}-AnalyticsApplication'
|
||||
aws_metrics_utils.start_kinesis_data_analytics_application(analytics_application_name)
|
||||
|
||||
launcher.args = ['+LoadLevel', level]
|
||||
launcher.args.extend(['-rhi=null'])
|
||||
|
||||
with launcher.start(launch_ap=False):
|
||||
start_time = datetime.utcnow()
|
||||
monitor_metrics_submission(log_monitor)
|
||||
# Verify that operational health metrics are delivered to CloudWatch.
|
||||
aws_metrics_utils.verify_cloud_watch_delivery(
|
||||
'AWS/Lambda',
|
||||
'Invocations',
|
||||
[{'Name': 'FunctionName',
|
||||
'Value': f'{stack_name}-AnalyticsProcessingLambda'}],
|
||||
start_time)
|
||||
logger.info('Operational health metrics sent to CloudWatch.')
|
||||
|
||||
aws_metrics_utils.verify_cloud_watch_delivery(
|
||||
AWS_METRICS_FEATURE_NAME,
|
||||
'TotalLogins',
|
||||
[],
|
||||
start_time)
|
||||
logger.info('Real-time metrics sent to CloudWatch.')
|
||||
|
||||
# Stop the Kinesis Data Analytics application.
|
||||
aws_metrics_utils.stop_kinesis_data_analytics_application(analytics_application_name)
|
||||
|
||||
def test_AWSMetrics_UnauthorizedUser_RequestRejected(self,
|
||||
level: str,
|
||||
launcher: ly_test_tools.launchers.Launcher,
|
||||
cdk: cdk,
|
||||
aws_credentials: aws_credentials,
|
||||
asset_processor: pytest.fixture,
|
||||
resource_mappings: resource_mappings,
|
||||
workspace: pytest.fixture):
|
||||
"""
|
||||
Tests that unauthorized users cannot send metrics events to the AWS backed backend.
|
||||
"""
|
||||
log_monitor, metrics_file_path, stack_name = setup(launcher, cdk, asset_processor, resource_mappings)
|
||||
# Set invalid AWS credentials.
|
||||
launcher.args = ['+LoadLevel', level, '+cl_awsAccessKey', 'AKIAIOSFODNN7EXAMPLE',
|
||||
'+cl_awsSecretKey', 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY']
|
||||
launcher.args.extend(['-rhi=null'])
|
||||
|
||||
with launcher.start(launch_ap=False):
|
||||
result = log_monitor.monitor_log_for_lines(
|
||||
expected_lines=['(Script) - Failed to send metrics.'],
|
||||
unexpected_lines=['(Script) - Metrics is sent successfully.'],
|
||||
halt_on_unexpected=True)
|
||||
assert result, 'Metrics events are sent successfully by unauthorized user'
|
||||
logger.info('Unauthorized user is rejected to send metrics.')
|
||||
|
||||
def test_AWSMetrics_BatchAnalytics_MetricsDeliveredToS3(self,
|
||||
level: str,
|
||||
launcher: ly_test_tools.launchers.Launcher,
|
||||
cdk: cdk,
|
||||
aws_credentials: aws_credentials,
|
||||
asset_processor: pytest.fixture,
|
||||
resource_mappings: resource_mappings,
|
||||
aws_utils: aws_utils,
|
||||
aws_metrics_utils: aws_metrics_utils,
|
||||
workspace: pytest.fixture):
|
||||
"""
|
||||
Tests that the submitted metrics are sent to the data lake for batch analytics.
|
||||
"""
|
||||
log_monitor, metrics_file_path, stack_name = setup(launcher, cdk, asset_processor, resource_mappings,
|
||||
context_variable='batch_processing=true')
|
||||
|
||||
analytics_bucket_name = aws_metrics_utils.get_analytics_bucket_name(stack_name)
|
||||
|
||||
launcher.args = ['+LoadLevel', level]
|
||||
launcher.args.extend(['-rhi=null'])
|
||||
|
||||
with launcher.start(launch_ap=False):
|
||||
start_time = datetime.utcnow()
|
||||
monitor_metrics_submission(log_monitor)
|
||||
# Verify that operational health metrics are delivered to CloudWatch.
|
||||
aws_metrics_utils.verify_cloud_watch_delivery(
|
||||
'AWS/Lambda',
|
||||
'Invocations',
|
||||
[{'Name': 'FunctionName',
|
||||
'Value': f'{stack_name}-EventsProcessingLambda'}],
|
||||
start_time)
|
||||
logger.info('Operational health metrics sent to CloudWatch.')
|
||||
|
||||
aws_metrics_utils.verify_s3_delivery(analytics_bucket_name)
|
||||
logger.info('Metrics sent to S3.')
|
||||
|
||||
# Run the glue crawler to populate the AWS Glue Data Catalog with tables.
|
||||
aws_metrics_utils.run_glue_crawler(f'{stack_name}-EventsCrawler')
|
||||
# Run named queries on the table to verify the batch analytics.
|
||||
aws_metrics_utils.run_named_queries(f'{stack_name}-AthenaWorkGroup')
|
||||
logger.info('Query metrics from S3 successfully.')
|
||||
|
||||
# Kinesis Data Firehose buffers incoming data before it delivers it to Amazon S3. Sleep for the
|
||||
# default interval (60s) to make sure that all the metrics are sent to the bucket before cleanup.
|
||||
time.sleep(60)
|
||||
# Empty the S3 bucket. S3 buckets can only be deleted successfully when it doesn't contain any object.
|
||||
aws_metrics_utils.empty_s3_bucket(analytics_bucket_name)
|
||||
|
||||
@ -0,0 +1,252 @@
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import pathlib
|
||||
import pytest
|
||||
import typing
|
||||
|
||||
from datetime import datetime
|
||||
from botocore.exceptions import WaiterError
|
||||
|
||||
from AWS.common.aws_utils import AwsUtils
|
||||
from .aws_metrics_waiters import KinesisAnalyticsApplicationUpdatedWaiter, \
|
||||
CloudWatchMetricsDeliveredWaiter, DataLakeMetricsDeliveredWaiter, GlueCrawlerReadyWaiter
|
||||
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
# Expected directory and file extension for the S3 objects.
|
||||
EXPECTED_S3_DIRECTORY = 'firehose_events/'
|
||||
EXPECTED_S3_OBJECT_EXTENSION = '.parquet'
|
||||
|
||||
|
||||
class AWSMetricsUtils:
|
||||
"""
|
||||
Provide utils functions for the AWSMetrics gem to interact with the deployed resources.
|
||||
"""
|
||||
|
||||
def __init__(self, aws_utils: AwsUtils):
|
||||
self._aws_util = aws_utils
|
||||
|
||||
def start_kinesis_data_analytics_application(self, application_name: str) -> None:
|
||||
"""
|
||||
Start the Kenisis Data Analytics application for real-time analytics.
|
||||
:param application_name: Name of the Kenisis Data Analytics application.
|
||||
"""
|
||||
input_id = self.get_kinesis_analytics_application_input_id(application_name)
|
||||
assert input_id, 'invalid Kinesis Data Analytics application input.'
|
||||
|
||||
client = self._aws_util.client('kinesisanalytics')
|
||||
try:
|
||||
client.start_application(
|
||||
ApplicationName=application_name,
|
||||
InputConfigurations=[
|
||||
{
|
||||
'Id': input_id,
|
||||
'InputStartingPositionConfiguration': {
|
||||
'InputStartingPosition': 'NOW'
|
||||
}
|
||||
},
|
||||
]
|
||||
)
|
||||
except client.exceptions.ResourceInUseException:
|
||||
# The application has been started.
|
||||
return
|
||||
|
||||
try:
|
||||
KinesisAnalyticsApplicationUpdatedWaiter(client, 'RUNNING').wait(application_name=application_name)
|
||||
except WaiterError as e:
|
||||
assert False, f'Failed to start the Kinesis Data Analytics application: {str(e)}.'
|
||||
|
||||
def get_kinesis_analytics_application_input_id(self, application_name: str) -> str:
|
||||
"""
|
||||
Get the input ID for the Kenisis Data Analytics application.
|
||||
:param application_name: Name of the Kenisis Data Analytics application.
|
||||
:return: Input ID for the Kenisis Data Analytics application.
|
||||
"""
|
||||
client = self._aws_util.client('kinesisanalytics')
|
||||
response = client.describe_application(
|
||||
ApplicationName=application_name
|
||||
)
|
||||
if not response:
|
||||
return ''
|
||||
input_descriptions = response.get('ApplicationDetail', {}).get('InputDescriptions', [])
|
||||
if len(input_descriptions) != 1:
|
||||
return ''
|
||||
|
||||
return input_descriptions[0].get('InputId', '')
|
||||
|
||||
def stop_kinesis_data_analytics_application(self, application_name: str) -> None:
|
||||
"""
|
||||
Stop the Kenisis Data Analytics application.
|
||||
:param application_name: Name of the Kenisis Data Analytics application.
|
||||
"""
|
||||
client = self._aws_util.client('kinesisanalytics')
|
||||
client.stop_application(
|
||||
ApplicationName=application_name
|
||||
)
|
||||
|
||||
try:
|
||||
KinesisAnalyticsApplicationUpdatedWaiter(client, 'READY').wait(application_name=application_name)
|
||||
except WaiterError as e:
|
||||
assert False, f'Failed to stop the Kinesis Data Analytics application: {str(e)}.'
|
||||
|
||||
def verify_cloud_watch_delivery(self, namespace: str, metrics_name: str,
|
||||
dimensions: typing.List[dict], start_time: datetime) -> None:
|
||||
"""
|
||||
Verify that the expected metrics is delivered to CloudWatch.
|
||||
:param namespace: Namespace of the metrics.
|
||||
:param metrics_name: Name of the metrics.
|
||||
:param dimensions: Dimensions of the metrics.
|
||||
:param start_time: Start time for generating the metrics.
|
||||
"""
|
||||
client = self._aws_util.client('cloudwatch')
|
||||
|
||||
try:
|
||||
CloudWatchMetricsDeliveredWaiter(client).wait(
|
||||
namespace=namespace,
|
||||
metrics_name=metrics_name,
|
||||
dimensions=dimensions,
|
||||
start_time=start_time
|
||||
)
|
||||
except WaiterError as e:
|
||||
assert False, f'Failed to deliver metrics to CloudWatch: {str(e)}.'
|
||||
|
||||
def verify_s3_delivery(self, analytics_bucket_name: str) -> None:
|
||||
"""
|
||||
Verify that metrics are delivered to S3 for batch analytics successfully.
|
||||
:param analytics_bucket_name: Name of the deployed S3 bucket.
|
||||
"""
|
||||
client = self._aws_util.client('s3')
|
||||
bucket_name = analytics_bucket_name
|
||||
|
||||
try:
|
||||
DataLakeMetricsDeliveredWaiter(client).wait(bucket_name=bucket_name, prefix=EXPECTED_S3_DIRECTORY)
|
||||
except WaiterError as e:
|
||||
assert False, f'Failed to find the S3 directory for storing metrics data: {str(e)}.'
|
||||
|
||||
# Check whether the data is converted to the expected data format.
|
||||
response = client.list_objects_v2(
|
||||
Bucket=bucket_name,
|
||||
Prefix=EXPECTED_S3_DIRECTORY
|
||||
)
|
||||
assert response.get('KeyCount', 0) != 0, f'Failed to deliver metrics to the S3 bucket {bucket_name}.'
|
||||
|
||||
s3_objects = response.get('Contents', [])
|
||||
for s3_object in s3_objects:
|
||||
key = s3_object.get('Key', '')
|
||||
assert pathlib.Path(key).suffix == EXPECTED_S3_OBJECT_EXTENSION, \
|
||||
f'Invalid data format is found in the S3 bucket {bucket_name}'
|
||||
|
||||
def run_glue_crawler(self, crawler_name: str) -> None:
|
||||
"""
|
||||
Run the Glue crawler and wait for it to finish.
|
||||
:param crawler_name: Name of the Glue crawler
|
||||
"""
|
||||
client = self._aws_util.client('glue')
|
||||
try:
|
||||
client.start_crawler(
|
||||
Name=crawler_name
|
||||
)
|
||||
except client.exceptions.CrawlerRunningException:
|
||||
# The crawler has already been started.
|
||||
return
|
||||
|
||||
try:
|
||||
GlueCrawlerReadyWaiter(client).wait(crawler_name=crawler_name)
|
||||
except WaiterError as e:
|
||||
assert False, f'Failed to run the Glue crawler: {str(e)}.'
|
||||
|
||||
def run_named_queries(self, work_group: str) -> None:
|
||||
"""
|
||||
Run the named queries under the specific Athena work group.
|
||||
:param work_group: Name of the Athena work group.
|
||||
"""
|
||||
client = self._aws_util.client('athena')
|
||||
# List all the named queries.
|
||||
response = client.list_named_queries(
|
||||
WorkGroup=work_group
|
||||
)
|
||||
named_query_ids = response.get('NamedQueryIds', [])
|
||||
|
||||
# Run each of the queries.
|
||||
for named_query_id in named_query_ids:
|
||||
get_named_query_response = client.get_named_query(
|
||||
NamedQueryId=named_query_id
|
||||
)
|
||||
named_query = get_named_query_response.get('NamedQuery', {})
|
||||
|
||||
start_query_execution_response = client.start_query_execution(
|
||||
QueryString=named_query.get('QueryString', ''),
|
||||
QueryExecutionContext={
|
||||
'Database': named_query.get('Database', '')
|
||||
},
|
||||
WorkGroup=work_group
|
||||
)
|
||||
|
||||
# Wait for the query to finish.
|
||||
state = 'RUNNING'
|
||||
while state == 'QUEUED' or state == 'RUNNING':
|
||||
get_query_execution_response = client.get_query_execution(
|
||||
QueryExecutionId=start_query_execution_response.get('QueryExecutionId', '')
|
||||
)
|
||||
|
||||
state = get_query_execution_response.get('QueryExecution', {}).get('Status', {}).get('State', '')
|
||||
|
||||
assert state == 'SUCCEEDED', f'Failed to run the named query {named_query.get("Name", {})}'
|
||||
|
||||
def empty_s3_bucket(self, bucket_name: str) -> None:
|
||||
"""
|
||||
Empty the S3 bucket following:
|
||||
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/migrations3.html
|
||||
|
||||
:param bucket_name: Name of the S3 bucket.
|
||||
"""
|
||||
|
||||
s3 = self._aws_util.resource('s3')
|
||||
bucket = s3.Bucket(bucket_name)
|
||||
|
||||
for key in bucket.objects.all():
|
||||
key.delete()
|
||||
|
||||
def get_analytics_bucket_name(self, stack_name: str) -> str:
|
||||
"""
|
||||
Get the name of the deployed S3 bucket.
|
||||
:param stack_name: Name of the CloudFormation stack.
|
||||
:return: Name of the deployed S3 bucket.
|
||||
"""
|
||||
|
||||
client = self._aws_util.client('cloudformation')
|
||||
|
||||
response = client.describe_stack_resources(
|
||||
StackName=stack_name
|
||||
)
|
||||
resources = response.get('StackResources', [])
|
||||
|
||||
for resource in resources:
|
||||
if resource.get('ResourceType') == 'AWS::S3::Bucket':
|
||||
return resource.get('PhysicalResourceId', '')
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def aws_metrics_utils(
|
||||
request: pytest.fixture,
|
||||
aws_utils: pytest.fixture):
|
||||
"""
|
||||
Fixture for the AWS metrics util functions.
|
||||
:param request: _pytest.fixtures.SubRequest class that handles getting
|
||||
a pytest fixture from a pytest function/fixture.
|
||||
:param aws_utils: aws_utils fixture.
|
||||
"""
|
||||
aws_utils_obj = AWSMetricsUtils(aws_utils)
|
||||
return aws_utils_obj
|
||||
@ -0,0 +1,142 @@
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
import botocore.client
|
||||
import logging
|
||||
|
||||
from datetime import timedelta
|
||||
from AWS.common.custom_waiter import CustomWaiter, WaitState
|
||||
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
class KinesisAnalyticsApplicationUpdatedWaiter(CustomWaiter):
|
||||
"""
|
||||
Subclass of the base custom waiter class.
|
||||
Wait for the Kinesis analytics application being updated to a specific status.
|
||||
"""
|
||||
def __init__(self, client: botocore.client, status: str):
|
||||
"""
|
||||
Initialize the waiter.
|
||||
|
||||
:param client: Boto3 client to use.
|
||||
:param status: Expected status.
|
||||
"""
|
||||
super().__init__(
|
||||
'KinesisAnalyticsApplicationUpdated',
|
||||
'DescribeApplication',
|
||||
'ApplicationDetail.ApplicationStatus',
|
||||
{status: WaitState.SUCCESS},
|
||||
client)
|
||||
|
||||
def wait(self, application_name: str):
|
||||
"""
|
||||
Wait for the expected status.
|
||||
|
||||
:param application_name: Name of the Kinesis analytics application.
|
||||
"""
|
||||
self._wait(ApplicationName=application_name)
|
||||
|
||||
|
||||
class GlueCrawlerReadyWaiter(CustomWaiter):
|
||||
"""
|
||||
Subclass of the base custom waiter class.
|
||||
Wait for the Glue crawler to finish its processing.
|
||||
"""
|
||||
def __init__(self, client: botocore.client):
|
||||
"""
|
||||
Initialize the waiter.
|
||||
|
||||
:param client: Boto3 client to use.
|
||||
"""
|
||||
super().__init__(
|
||||
'GlueCrawlerReady',
|
||||
'GetCrawler',
|
||||
'Crawler.State',
|
||||
{'READY': WaitState.SUCCESS},
|
||||
client)
|
||||
|
||||
def wait(self, crawler_name):
|
||||
"""
|
||||
Wait for the expected status.
|
||||
|
||||
:param crawler_name: Name of the Glue crawler.
|
||||
"""
|
||||
self._wait(Name=crawler_name)
|
||||
|
||||
|
||||
class DataLakeMetricsDeliveredWaiter(CustomWaiter):
|
||||
"""
|
||||
Subclass of the base custom waiter class.
|
||||
Wait for the expected directory being created in the S3 bucket.
|
||||
"""
|
||||
def __init__(self, client: botocore.client):
|
||||
"""
|
||||
Initialize the waiter.
|
||||
|
||||
:param client: Boto3 client to use.
|
||||
"""
|
||||
super().__init__(
|
||||
'DataLakeMetricsDelivered',
|
||||
'ListObjectsV2',
|
||||
'KeyCount > `0`',
|
||||
{True: WaitState.SUCCESS},
|
||||
client)
|
||||
|
||||
def wait(self, bucket_name, prefix):
|
||||
"""
|
||||
Wait for the expected directory being created.
|
||||
|
||||
:param bucket_name: Name of the S3 bucket.
|
||||
:param prefix: Name of the expected directory prefix.
|
||||
"""
|
||||
self._wait(Bucket=bucket_name, Prefix=prefix)
|
||||
|
||||
|
||||
class CloudWatchMetricsDeliveredWaiter(CustomWaiter):
|
||||
"""
|
||||
Subclass of the base custom waiter class.
|
||||
Wait for the expected metrics being delivered to CloudWatch.
|
||||
"""
|
||||
def __init__(self, client: botocore.client):
|
||||
"""
|
||||
Initialize the waiter.
|
||||
|
||||
:param client: Boto3 client to use.
|
||||
"""
|
||||
super().__init__(
|
||||
'CloudWatchMetricsDelivered',
|
||||
'GetMetricStatistics',
|
||||
'length(Datapoints) > `0`',
|
||||
{True: WaitState.SUCCESS},
|
||||
client)
|
||||
|
||||
def wait(self, namespace, metrics_name, dimensions, start_time):
|
||||
"""
|
||||
Wait for the expected metrics being delivered.
|
||||
|
||||
:param namespace: Namespace of the metrics.
|
||||
:param metrics_name: Name of the metrics.
|
||||
:param dimensions: Dimensions of the metrics.
|
||||
:param start_time: Start time for generating the metrics.
|
||||
"""
|
||||
self._wait(
|
||||
Namespace=namespace,
|
||||
MetricName=metrics_name,
|
||||
Dimensions=dimensions,
|
||||
StartTime=start_time,
|
||||
EndTime=start_time + timedelta(0, self.timeout),
|
||||
Period=60,
|
||||
Statistics=[
|
||||
'SampleCount'
|
||||
],
|
||||
Unit='Count'
|
||||
)
|
||||
@ -0,0 +1,134 @@
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
import boto3
|
||||
import configparser
|
||||
import logging
|
||||
import os
|
||||
import pytest
|
||||
import typing
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
class AwsCredentials:
|
||||
def __init__(self, profile_name: str):
|
||||
self._profile_name = profile_name
|
||||
|
||||
self._credentials_path = os.environ.get('AWS_SHARED_CREDENTIALS_FILE')
|
||||
if not self._credentials_path:
|
||||
# Home directory location varies based on the operating system, but is referred to using the environment
|
||||
# variables %UserProfile% in Windows and $HOME or ~ (tilde) in Unix-based systems.
|
||||
self._credentials_path = os.path.join(os.environ.get('UserProfile', os.path.expanduser('~')),
|
||||
'.aws', 'credentials')
|
||||
self._credentials_file_exists = os.path.exists(self._credentials_path)
|
||||
|
||||
self._credentials = configparser.ConfigParser()
|
||||
self._credentials.read(self._credentials_path)
|
||||
|
||||
def get_aws_credentials(self) -> typing.Tuple[str, str, str]:
|
||||
"""
|
||||
Get aws credentials stored in the specific named profile.
|
||||
|
||||
:return AWS credentials.
|
||||
"""
|
||||
access_key_id = self._get_aws_credential_attribute_value('aws_access_key_id')
|
||||
secret_access_key = self._get_aws_credential_attribute_value('aws_secret_access_key')
|
||||
session_token = self._get_aws_credential_attribute_value('aws_session_token')
|
||||
|
||||
return access_key_id, secret_access_key, session_token
|
||||
|
||||
def set_aws_credentials_by_session(self, session: boto3.Session) -> None:
|
||||
"""
|
||||
Set AWS credentials stored in the specific named profile using an assumed role session.
|
||||
|
||||
:param session: assumed role session.
|
||||
"""
|
||||
credentials = session.get_credentials().get_frozen_credentials()
|
||||
self.set_aws_credentials(credentials.access_key, credentials.secret_key, credentials.token)
|
||||
|
||||
def set_aws_credentials(self, aws_access_key_id: str, aws_secret_access_key: str,
|
||||
aws_session_token: str) -> None:
|
||||
"""
|
||||
Set AWS credentials stored in the specific named profile.
|
||||
|
||||
:param aws_access_key_id: AWS access key id.
|
||||
:param aws_secret_access_key: AWS secrete access key.
|
||||
:param aws_session_token: AWS assumed role session.
|
||||
"""
|
||||
self._set_aws_credential_attribute_value('aws_access_key_id', aws_access_key_id)
|
||||
self._set_aws_credential_attribute_value('aws_secret_access_key', aws_secret_access_key)
|
||||
self._set_aws_credential_attribute_value('aws_session_token', aws_session_token)
|
||||
|
||||
if (len(self._credentials.sections()) == 0) and (not self._credentials_file_exists):
|
||||
os.remove(self._credentials_path)
|
||||
return
|
||||
|
||||
with open(self._credentials_path, 'w+') as credential_file:
|
||||
self._credentials.write(credential_file)
|
||||
|
||||
def _get_aws_credential_attribute_value(self, attribute_name: str) -> str:
|
||||
"""
|
||||
Get the value of an AWS credential attribute stored in the specific named profile.
|
||||
|
||||
:param attribute_name: Name of the AWS credential attribute.
|
||||
:return Value of the AWS credential attribute.
|
||||
"""
|
||||
try:
|
||||
value = self._credentials.get(self._profile_name, attribute_name)
|
||||
except configparser.NoSectionError:
|
||||
# Named profile or key doesn't exist
|
||||
value = None
|
||||
except configparser.NoOptionError:
|
||||
# Named profile doesn't have the specified attribute
|
||||
value = None
|
||||
|
||||
return value
|
||||
|
||||
def _set_aws_credential_attribute_value(self, attribute_name: str, attribute_value: str) -> None:
|
||||
"""
|
||||
Set the value of an AWS credential attribute stored in the specific named profile.
|
||||
|
||||
:param attribute_name: Name of the AWS credential attribute.
|
||||
:param attribute_value: Value of the AWS credential attribute.
|
||||
"""
|
||||
if self._profile_name not in self._credentials:
|
||||
self._credentials[self._profile_name] = {}
|
||||
|
||||
if attribute_value is None:
|
||||
self._credentials.remove_option(self._profile_name, attribute_name)
|
||||
# Remove the named profile if it doesn't have any AWS credential attribute.
|
||||
if len(self._credentials[self._profile_name]) == 0:
|
||||
self._credentials.remove_section(self._profile_name)
|
||||
else:
|
||||
self._credentials[self._profile_name][attribute_name] = attribute_value
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def aws_credentials(request: pytest.fixture, aws_utils: pytest.fixture, profile_name: str):
|
||||
"""
|
||||
Fixture for setting up temporary AWS credentials from assume role.
|
||||
|
||||
:param request: _pytest.fixtures.SubRequest class that handles getting
|
||||
a pytest fixture from a pytest function/fixture.
|
||||
:param aws_utils: aws_utils fixture.
|
||||
:param profile_name: Named AWS profile to store temporary credentials.
|
||||
"""
|
||||
aws_credentials_obj = AwsCredentials(profile_name)
|
||||
original_access_key, original_secret_access_key, original_token = aws_credentials_obj.get_aws_credentials()
|
||||
aws_credentials_obj.set_aws_credentials_by_session(aws_utils.assume_session())
|
||||
|
||||
def teardown():
|
||||
# Reset to the named profile using the original AWS credentials
|
||||
aws_credentials_obj.set_aws_credentials(original_access_key, original_secret_access_key, original_token)
|
||||
request.addfinalizer(teardown)
|
||||
|
||||
return aws_credentials_obj
|
||||
@ -1,82 +1,90 @@
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
import boto3
|
||||
import pytest
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AwsUtils:
|
||||
|
||||
def __init__(self, arn: str, session_name: str, region_name: str):
|
||||
local_session = boto3.Session(profile_name='default')
|
||||
local_sts_client = local_session.client('sts')
|
||||
self._local_account_id = local_sts_client.get_caller_identity()["Account"]
|
||||
logger.info(f'Local Account Id: {self._local_account_id}')
|
||||
|
||||
response = local_sts_client.assume_role(RoleArn=arn, RoleSessionName=session_name)
|
||||
|
||||
self._assume_session = boto3.Session(aws_access_key_id=response['Credentials']['AccessKeyId'],
|
||||
aws_secret_access_key=response['Credentials']['SecretAccessKey'],
|
||||
aws_session_token=response['Credentials']['SessionToken'],
|
||||
region_name=region_name)
|
||||
|
||||
assume_sts_client = self._assume_session.client('sts')
|
||||
assume_account_id = assume_sts_client.get_caller_identity()["Account"]
|
||||
logger.info(f'Assume Account Id: {assume_account_id}')
|
||||
self._assume_account_id = assume_account_id
|
||||
|
||||
def client(self, service: str):
|
||||
"""
|
||||
Get the client for a specific AWS service from configured session
|
||||
:return: Client for the AWS service.
|
||||
"""
|
||||
return self._assume_session.client(service)
|
||||
|
||||
def assume_session(self):
|
||||
return self._assume_session
|
||||
|
||||
def local_account_id(self):
|
||||
return self._local_account_id
|
||||
|
||||
def assume_account_id(self):
|
||||
return self._assume_account_id
|
||||
|
||||
def destroy(self) -> None:
|
||||
"""
|
||||
clears stored session
|
||||
"""
|
||||
self._assume_session = None
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def aws_utils(
|
||||
request: pytest.fixture,
|
||||
assume_role_arn: str,
|
||||
session_name: str,
|
||||
region_name: str):
|
||||
"""
|
||||
Fixture for setting up a Cdk
|
||||
:param request: _pytest.fixtures.SubRequest class that handles getting
|
||||
a pytest fixture from a pytest function/fixture.
|
||||
:param assume_role_arn: Role used to fetch temporary aws credentials, configure service clients with obtained credentials.
|
||||
:param session_name: Session name to set.
|
||||
:param region_name: AWS account region to set for session.
|
||||
:return AWSUtils class object.
|
||||
"""
|
||||
aws_utils_obj = AwsUtils(assume_role_arn, session_name, region_name)
|
||||
|
||||
def teardown():
|
||||
aws_utils_obj.destroy()
|
||||
|
||||
request.addfinalizer(teardown)
|
||||
|
||||
return aws_utils_obj
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
import boto3
|
||||
import pytest
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
class AwsUtils:
|
||||
|
||||
def __init__(self, arn: str, session_name: str, region_name: str):
|
||||
local_session = boto3.Session(profile_name='default')
|
||||
local_sts_client = local_session.client('sts')
|
||||
self._local_account_id = local_sts_client.get_caller_identity()["Account"]
|
||||
logger.info(f'Local Account Id: {self._local_account_id}')
|
||||
|
||||
response = local_sts_client.assume_role(RoleArn=arn, RoleSessionName=session_name)
|
||||
|
||||
self._assume_session = boto3.Session(aws_access_key_id=response['Credentials']['AccessKeyId'],
|
||||
aws_secret_access_key=response['Credentials']['SecretAccessKey'],
|
||||
aws_session_token=response['Credentials']['SessionToken'],
|
||||
region_name=region_name)
|
||||
|
||||
assume_sts_client = self._assume_session.client('sts')
|
||||
assume_account_id = assume_sts_client.get_caller_identity()["Account"]
|
||||
logger.info(f'Assume Account Id: {assume_account_id}')
|
||||
self._assume_account_id = assume_account_id
|
||||
|
||||
def client(self, service: str):
|
||||
"""
|
||||
Get the client for a specific AWS service from configured session
|
||||
:return: Client for the AWS service.
|
||||
"""
|
||||
return self._assume_session.client(service)
|
||||
|
||||
def resource(self, service: str):
|
||||
"""
|
||||
Get the resource for a specific AWS service from configured session
|
||||
:return: Client for the AWS service.
|
||||
"""
|
||||
return self._assume_session.resource(service)
|
||||
|
||||
def assume_session(self):
|
||||
return self._assume_session
|
||||
|
||||
def local_account_id(self):
|
||||
return self._local_account_id
|
||||
|
||||
def assume_account_id(self):
|
||||
return self._assume_account_id
|
||||
|
||||
def destroy(self) -> None:
|
||||
"""
|
||||
clears stored session
|
||||
"""
|
||||
self._assume_session = None
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def aws_utils(
|
||||
request: pytest.fixture,
|
||||
assume_role_arn: str,
|
||||
session_name: str,
|
||||
region_name: str):
|
||||
"""
|
||||
Fixture for AWS util functions
|
||||
:param request: _pytest.fixtures.SubRequest class that handles getting
|
||||
a pytest fixture from a pytest function/fixture.
|
||||
:param assume_role_arn: Role used to fetch temporary aws credentials, configure service clients with obtained credentials.
|
||||
:param session_name: Session name to set.
|
||||
:param region_name: AWS account region to set for session.
|
||||
:return AWSUtils class object.
|
||||
"""
|
||||
aws_utils_obj = AwsUtils(assume_role_arn, session_name, region_name)
|
||||
|
||||
def teardown():
|
||||
aws_utils_obj.destroy()
|
||||
|
||||
request.addfinalizer(teardown)
|
||||
|
||||
return aws_utils_obj
|
||||
|
||||
@ -0,0 +1,91 @@
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
import botocore.client
|
||||
import botocore.waiter
|
||||
import logging
|
||||
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
class WaitState(Enum):
|
||||
SUCCESS = 'success'
|
||||
FAILURE = 'failure'
|
||||
|
||||
|
||||
class CustomWaiter:
|
||||
"""
|
||||
Base class for a custom waiter.
|
||||
|
||||
Modified from:
|
||||
https://docs.aws.amazon.com/code-samples/latest/catalog/python-demo_tools-custom_waiter.py.html
|
||||
"""
|
||||
def __init__(
|
||||
self, name: str, operation: str, argument: str,
|
||||
acceptors: dict, client: botocore.client, delay: int = 30, max_tries: int = 10,
|
||||
matcher='path'):
|
||||
"""
|
||||
Subclasses should pass specific operations, arguments, and acceptors to
|
||||
their superclass.
|
||||
|
||||
:param name: The name of the waiter. This can be any descriptive string.
|
||||
:param operation: The operation to wait for. This must match the casing of
|
||||
the underlying operation model, which is typically in
|
||||
CamelCase.
|
||||
:param argument: The dict keys used to access the result of the operation, in
|
||||
dot notation. For example, 'Job.Status' will access
|
||||
result['Job']['Status'].
|
||||
:param acceptors: The list of acceptors that indicate the wait is over. These
|
||||
can indicate either success or failure. The acceptor values
|
||||
are compared to the result of the operation after the
|
||||
argument keys are applied.
|
||||
:param client: The Boto3 client.
|
||||
:param delay: The number of seconds to wait between each call to the operation. Default to 30 seconds.
|
||||
:param max_tries: The maximum number of tries before exiting. Default to 10.
|
||||
:param matcher: The kind of matcher to use. Default to 'path'.
|
||||
"""
|
||||
self.name = name
|
||||
self.operation = operation
|
||||
self.argument = argument
|
||||
self.client = client
|
||||
self.waiter_model = botocore.waiter.WaiterModel({
|
||||
'version': 2,
|
||||
'waiters': {
|
||||
name: {
|
||||
"delay": delay,
|
||||
"operation": operation,
|
||||
"maxAttempts": max_tries,
|
||||
"acceptors": [{
|
||||
"state": state.value,
|
||||
"matcher": matcher,
|
||||
"argument": argument,
|
||||
"expected": expected
|
||||
} for expected, state in acceptors.items()]
|
||||
}}})
|
||||
self.waiter = botocore.waiter.create_waiter_with_client(
|
||||
self.name, self.waiter_model, self.client)
|
||||
|
||||
self._timeout = delay * max_tries
|
||||
|
||||
def _wait(self, **kwargs):
|
||||
"""
|
||||
Starts the botocore wait loop.
|
||||
|
||||
:param kwargs: Keyword arguments that are passed to the operation being polled.
|
||||
"""
|
||||
self.waiter.wait(**kwargs)
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
return self._timeout
|
||||
|
||||
|
||||
@ -0,0 +1,131 @@
|
||||
"""
|
||||
All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
its licensors.
|
||||
|
||||
For complete copyright and license terms please see the LICENSE at the root of this
|
||||
distribution (the "License"). All use of this software is governed by the License,
|
||||
or, if provided, by the license below or the license accompanying this file. Do not
|
||||
remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
"""
|
||||
|
||||
|
||||
# fmt: off
|
||||
class Tests():
|
||||
new_event_created = ("Successfully created a new event", "Failed to create a new event")
|
||||
child_event_created = ("Successfully created Child Event", "Failed to create Child Event")
|
||||
file_saved = ("Successfully saved event asset", "Failed to save event asset")
|
||||
parameter_created = ("Successfully added parameter", "Failed to add parameter")
|
||||
parameter_removed = ("Successfully removed parameter", "Failed to remove parameter")
|
||||
# fmt: on
|
||||
|
||||
|
||||
def ScriptEvent_AddRemoveParameter_ActionsSuccessful():
|
||||
"""
|
||||
Summary:
|
||||
Parameter can be removed from a Script Event method
|
||||
|
||||
Expected Behavior:
|
||||
Upon saving the updated .scriptevents asset the removed paramenter should no longer be present on the Script Event
|
||||
|
||||
Test Steps:
|
||||
1) Open Asset Editor
|
||||
2) Get Asset Editor Qt object
|
||||
3) Create new Script Event Asset
|
||||
4) Add Parameter to Event
|
||||
5) Remove Parameter from Event
|
||||
|
||||
Note:
|
||||
- This test file must be called from the Open 3D Engine Editor command terminal
|
||||
- Any passed and failed tests are written to the Editor.log file.
|
||||
Parsing the file or running a log_monitor are required to observe the test results.
|
||||
|
||||
:return: None
|
||||
"""
|
||||
import os
|
||||
from PySide2 import QtWidgets
|
||||
|
||||
from editor_python_test_tools.utils import Report
|
||||
from editor_python_test_tools.utils import TestHelper as helper
|
||||
import editor_python_test_tools.pyside_utils as pyside_utils
|
||||
|
||||
import azlmbr.bus as bus
|
||||
import azlmbr.editor as editor
|
||||
import azlmbr.legacy.general as general
|
||||
|
||||
GENERAL_WAIT = 1.0 # seconds
|
||||
FILE_PATH = os.path.join("AutomatedTesting", "ScriptCanvas", "test_file.scriptevent")
|
||||
QtObject = object
|
||||
|
||||
def create_script_event(asset_editor: QtObject, file_path: str) -> None:
|
||||
action = pyside_utils.find_child_by_pattern(menu_bar, {"type": QtWidgets.QAction, "text": "Script Events"})
|
||||
action.trigger()
|
||||
result = helper.wait_for_condition(
|
||||
lambda: container.findChild(QtWidgets.QFrame, "Events") is not None, 3 * GENERAL_WAIT
|
||||
)
|
||||
Report.result(Tests.new_event_created, result)
|
||||
|
||||
# Add new child event
|
||||
add_event = container.findChild(QtWidgets.QFrame, "Events").findChild(QtWidgets.QToolButton, "")
|
||||
add_event.click()
|
||||
result = helper.wait_for_condition(
|
||||
lambda: asset_editor.findChild(QtWidgets.QFrame, "EventName") is not None, GENERAL_WAIT
|
||||
)
|
||||
Report.result(Tests.child_event_created, result)
|
||||
# Save the Script Event file
|
||||
editor.AssetEditorWidgetRequestsBus(bus.Broadcast, "SaveAssetAs", file_path)
|
||||
|
||||
# Verify if file is created
|
||||
result = helper.wait_for_condition(lambda: os.path.exists(file_path), 3 * GENERAL_WAIT)
|
||||
Report.result(Tests.file_saved, result)
|
||||
|
||||
def create_parameter(file_path: str) -> None:
|
||||
add_param = container.findChild(QtWidgets.QFrame, "Parameters").findChild(QtWidgets.QToolButton, "")
|
||||
add_param.click()
|
||||
result = helper.wait_for_condition(
|
||||
lambda: asset_editor_widget.findChild(QtWidgets.QFrame, "[0]") is not None, GENERAL_WAIT
|
||||
)
|
||||
Report.result(Tests.parameter_created, result)
|
||||
editor.AssetEditorWidgetRequestsBus(bus.Broadcast, "SaveAssetAs", file_path)
|
||||
|
||||
def remove_parameter(file_path: str) -> None:
|
||||
remove_param = container.findChild(QtWidgets.QFrame, "[0]").findChild(QtWidgets.QToolButton, "")
|
||||
remove_param.click()
|
||||
result = helper.wait_for_condition(
|
||||
lambda: asset_editor_widget.findChild(QtWidgets.QFrame, "[0]") is None, GENERAL_WAIT
|
||||
)
|
||||
Report.result(Tests.parameter_removed, result)
|
||||
editor.AssetEditorWidgetRequestsBus(bus.Broadcast, "SaveAssetAs", file_path)
|
||||
|
||||
# 1) Open Asset Editor
|
||||
general.idle_enable(True)
|
||||
# Initially close the Asset Editor and then reopen to ensure we don't have any existing assets open
|
||||
general.close_pane("Asset Editor")
|
||||
general.open_pane("Asset Editor")
|
||||
helper.wait_for_condition(lambda: general.is_pane_visible("Asset Editor"), 5.0)
|
||||
|
||||
# 2) Get Asset Editor Qt object
|
||||
editor_window = pyside_utils.get_editor_main_window()
|
||||
asset_editor_widget = editor_window.findChild(QtWidgets.QDockWidget, "Asset Editor").findChild(
|
||||
QtWidgets.QWidget, "AssetEditorWindowClass"
|
||||
)
|
||||
container = asset_editor_widget.findChild(QtWidgets.QWidget, "ContainerForRows")
|
||||
menu_bar = asset_editor_widget.findChild(QtWidgets.QMenuBar)
|
||||
|
||||
# 3) Create new Script Event Asset
|
||||
create_script_event(asset_editor_widget, FILE_PATH)
|
||||
|
||||
# 4) Add Parameter to Event
|
||||
create_parameter(FILE_PATH)
|
||||
|
||||
# 5) Remove Parameter from Event
|
||||
remove_parameter(FILE_PATH)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import ImportPathHelper as imports
|
||||
|
||||
imports.init()
|
||||
from editor_python_test_tools.utils import Report
|
||||
|
||||
Report.start_test(ScriptEvent_AddRemoveParameter_ActionsSuccessful)
|
||||
@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:804193a2afd68cd1e6bec8155ea11400566f2941fbd6eb0c324839ebcd10192d
|
||||
size 8492
|
||||
oid sha256:302d6172156e8ed665e44e206d81f54f1b0f1008d73327300ea92f8c1159780b
|
||||
size 11820
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:40949893ed7009eeaa90b7ce6057cb6be9dfaf7b162e3c26ba9dadf985939d7d
|
||||
size 2038
|
||||
oid sha256:b9cd9d6f67440c193a85969ec5c082c6343e6d1fff3b6f209a0a6931eb22dd47
|
||||
size 2949
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,48 @@
|
||||
/*
|
||||
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
* its licensors.
|
||||
*
|
||||
* For complete copyright and license terms please see the LICENSE at the root of this
|
||||
* distribution (the "License"). All use of this software is governed by the License,
|
||||
* or, if provided, by the license below or the license accompanying this file. Do not
|
||||
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
*
|
||||
*/
|
||||
|
||||
#include <Atom/Features/SrgSemantics.azsli>
|
||||
|
||||
ShaderResourceGroup RayTracingMaterialSrg : SRG_RayTracingMaterial
|
||||
{
|
||||
Sampler LinearSampler
|
||||
{
|
||||
AddressU = Wrap;
|
||||
AddressV = Wrap;
|
||||
MinFilter = Linear;
|
||||
MagFilter = Linear;
|
||||
MipFilter = Linear;
|
||||
MaxAnisotropy = 16;
|
||||
};
|
||||
|
||||
// material info structured buffer
|
||||
struct MaterialInfo
|
||||
{
|
||||
float4 m_baseColor;
|
||||
float m_metallicFactor;
|
||||
float m_roughnessFactor;
|
||||
uint m_textureFlags;
|
||||
uint m_textureStartIndex;
|
||||
};
|
||||
|
||||
// hit shaders can retrieve the MaterialInfo for a mesh hit using: RayTracingMaterialSrg::m_materialInfo[InstanceIndex()]
|
||||
StructuredBuffer<MaterialInfo> m_materialInfo;
|
||||
|
||||
// texture flag bits indicating if optional textures are present
|
||||
#define TEXTURE_FLAG_BASECOLOR 1
|
||||
#define TEXTURE_FLAG_NORMAL 2
|
||||
#define TEXTURE_FLAG_METALLIC 4
|
||||
#define TEXTURE_FLAG_ROUGHNESS 8
|
||||
|
||||
// unbounded array of Material textures
|
||||
Texture2D m_materialTextures[];
|
||||
}
|
||||
@ -0,0 +1,69 @@
|
||||
/*
|
||||
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
* its licensors.
|
||||
*
|
||||
* For complete copyright and license terms please see the LICENSE at the root of this
|
||||
* distribution (the "License"). All use of this software is governed by the License,
|
||||
* or, if provided, by the license below or the license accompanying this file. Do not
|
||||
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
*
|
||||
*/
|
||||
|
||||
struct TextureData
|
||||
{
|
||||
float4 m_baseColor;
|
||||
float3 m_normal;
|
||||
float m_metallic;
|
||||
float m_roughness;
|
||||
};
|
||||
|
||||
TextureData GetHitTextureData(RayTracingMaterialSrg::MaterialInfo materialInfo, float2 uv)
|
||||
{
|
||||
TextureData textureData = (TextureData)0;
|
||||
|
||||
uint textureIndex = materialInfo.m_textureStartIndex;
|
||||
|
||||
// base color
|
||||
if (materialInfo.m_textureFlags & TEXTURE_FLAG_BASECOLOR)
|
||||
{
|
||||
textureData.m_baseColor = RayTracingMaterialSrg::m_materialTextures[textureIndex++].SampleLevel(RayTracingMaterialSrg::LinearSampler, uv, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
textureData.m_baseColor = materialInfo.m_baseColor;
|
||||
}
|
||||
|
||||
// normal
|
||||
if (materialInfo.m_textureFlags & TEXTURE_FLAG_NORMAL)
|
||||
{
|
||||
textureData.m_normal = RayTracingMaterialSrg::m_materialTextures[textureIndex++].SampleLevel(RayTracingMaterialSrg::LinearSampler, uv, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
textureData.m_normal = float3(0.0f, 0.0f, 1.0f);
|
||||
}
|
||||
|
||||
// metallic
|
||||
if (materialInfo.m_textureFlags & TEXTURE_FLAG_METALLIC)
|
||||
{
|
||||
textureData.m_metallic = RayTracingMaterialSrg::m_materialTextures[textureIndex++].SampleLevel(RayTracingMaterialSrg::LinearSampler, uv, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
textureData.m_metallic = materialInfo.m_metallicFactor;
|
||||
}
|
||||
|
||||
// roughness
|
||||
if (materialInfo.m_textureFlags & TEXTURE_FLAG_ROUGHNESS)
|
||||
{
|
||||
textureData.m_roughness = RayTracingMaterialSrg::m_materialTextures[textureIndex++].SampleLevel(RayTracingMaterialSrg::LinearSampler, uv, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
textureData.m_roughness = materialInfo.m_roughnessFactor;
|
||||
}
|
||||
|
||||
return textureData;
|
||||
}
|
||||
|
||||
@ -0,0 +1,126 @@
|
||||
/*
|
||||
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
|
||||
* its licensors.
|
||||
*
|
||||
* For complete copyright and license terms please see the LICENSE at the root of this
|
||||
* distribution (the "License"). All use of this software is governed by the License,
|
||||
* or, if provided, by the license below or the license accompanying this file. Do not
|
||||
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
*
|
||||
*/
|
||||
|
||||
// returns the normalized camera view ray into the scene for this raytracing dispatch thread
|
||||
float3 GetViewRayDirection(float4x4 viewProjectionInverseMatrix)
|
||||
{
|
||||
float2 pixel = ((float2)DispatchRaysIndex().xy + float2(0.5f, 0.5f)) / (float2)DispatchRaysDimensions();
|
||||
float2 ndc = pixel * float2(2.0f, -2.0f) + float2(-1.0f, 1.0f);
|
||||
return normalize(mul(viewProjectionInverseMatrix, float4(ndc, 0.0f, 1.0f)).xyz);
|
||||
}
|
||||
|
||||
// returns the vertex indices for the primitive hit by the ray
|
||||
// Note: usable only in a raytracing Hit shader
|
||||
uint3 GetHitIndices(RayTracingSceneSrg::MeshInfo meshInfo)
|
||||
{
|
||||
// compute the array index of the index buffer for this mesh in the m_meshBuffers unbounded array
|
||||
uint meshIndexBufferArrayIndex = meshInfo.m_bufferStartIndex + MESH_INDEX_BUFFER_OFFSET;
|
||||
|
||||
// compute the offset into the index buffer for this primitve of the mesh
|
||||
uint offsetBytes = meshInfo.m_indexOffset + (PrimitiveIndex() * 12);
|
||||
|
||||
// load the indices for this primitive from the index buffer
|
||||
return RayTracingSceneSrg::m_meshBuffers[meshIndexBufferArrayIndex].Load3(offsetBytes);
|
||||
}
|
||||
|
||||
// returns the interpolated vertex data for the primitive hit by the ray
|
||||
// Note: usable only in a raytracing hit shader
|
||||
struct VertexData
|
||||
{
|
||||
float3 m_position;
|
||||
float3 m_normal;
|
||||
float3 m_tangent;
|
||||
float3 m_bitangent;
|
||||
float2 m_uv;
|
||||
};
|
||||
|
||||
VertexData GetHitInterpolatedVertexData(RayTracingSceneSrg::MeshInfo meshInfo, float2 builtInBarycentrics)
|
||||
{
|
||||
// retrieve the poly indices
|
||||
uint3 indices = GetHitIndices(meshInfo);
|
||||
|
||||
// compute barycentrics
|
||||
float3 barycentrics = float3((1.0f - builtInBarycentrics.x - builtInBarycentrics.y), builtInBarycentrics.x, builtInBarycentrics.y);
|
||||
|
||||
// compute the vertex data using barycentric interpolation
|
||||
VertexData vertexData = (VertexData)0;
|
||||
for (uint i = 0; i < 3; ++i)
|
||||
{
|
||||
// position
|
||||
{
|
||||
// array index of the position buffer for this mesh in the m_meshBuffers unbounded array
|
||||
uint meshVertexPositionArrayIndex = meshInfo.m_bufferStartIndex + MESH_POSITION_BUFFER_OFFSET;
|
||||
|
||||
// offset into the position buffer for this vertex
|
||||
uint positionOffset = meshInfo.m_positionOffset + (indices[i] * 12);
|
||||
|
||||
// load the position data
|
||||
vertexData.m_position += asfloat(RayTracingSceneSrg::m_meshBuffers[meshVertexPositionArrayIndex].Load3(positionOffset)) * barycentrics[i];
|
||||
}
|
||||
|
||||
// normal
|
||||
{
|
||||
// array index of the normal buffer for this mesh in the m_meshBuffers unbounded array
|
||||
uint meshVertexNormalArrayIndex = meshInfo.m_bufferStartIndex + MESH_NORMAL_BUFFER_OFFSET;
|
||||
|
||||
// offset into the normal buffer for this vertex
|
||||
uint normalOffset = meshInfo.m_normalOffset + (indices[i] * 12);
|
||||
|
||||
// load the normal data
|
||||
vertexData.m_normal += asfloat(RayTracingSceneSrg::m_meshBuffers[meshVertexNormalArrayIndex].Load3(normalOffset)) * barycentrics[i];
|
||||
}
|
||||
|
||||
// tangent
|
||||
{
|
||||
// array index of the tangent buffer for this mesh in the m_meshBuffers unbounded array
|
||||
uint meshVertexTangentArrayIndex = meshInfo.m_bufferStartIndex + MESH_TANGENT_BUFFER_OFFSET;
|
||||
|
||||
// offset into the tangent buffer for this vertex
|
||||
uint tangentOffset = meshInfo.m_tangentOffset + (indices[i] * 12);
|
||||
|
||||
// load the tangent data
|
||||
vertexData.m_tangent += asfloat(RayTracingSceneSrg::m_meshBuffers[meshVertexTangentArrayIndex].Load3(tangentOffset)) * barycentrics[i];
|
||||
}
|
||||
|
||||
// bitangent
|
||||
{
|
||||
// array index of the bitangent buffer for this mesh in the m_meshBuffers unbounded array
|
||||
uint meshVertexBitangentArrayIndex = meshInfo.m_bufferStartIndex + MESH_BITANGENT_BUFFER_OFFSET;
|
||||
|
||||
// offset into the bitangent buffer for this vertex
|
||||
uint bitangentOffset = meshInfo.m_bitangentOffset + (indices[i] * 12);
|
||||
|
||||
// load the bitangent data
|
||||
vertexData.m_bitangent += asfloat(RayTracingSceneSrg::m_meshBuffers[meshVertexBitangentArrayIndex].Load3(bitangentOffset)) * barycentrics[i];
|
||||
}
|
||||
|
||||
// optional streams begin after MESH_BITANGENT_BUFFER_OFFSET
|
||||
uint optionalBufferOffset = MESH_BITANGENT_BUFFER_OFFSET + 1;
|
||||
|
||||
// UV
|
||||
if (meshInfo.m_bufferFlags & MESH_BUFFER_FLAG_UV)
|
||||
{
|
||||
// array index of the UV buffer for this mesh in the m_meshBuffers unbounded array
|
||||
uint meshVertexUVArrayIndex = meshInfo.m_bufferStartIndex + optionalBufferOffset++;
|
||||
|
||||
// offset into the UV buffer for this vertex
|
||||
uint uvOffset = meshInfo.m_uvOffset + (indices[i] * 8);
|
||||
|
||||
// load the UV data
|
||||
vertexData.m_uv += asfloat(RayTracingSceneSrg::m_meshBuffers[meshVertexUVArrayIndex].Load2(uvOffset)) * barycentrics[i];
|
||||
}
|
||||
}
|
||||
|
||||
vertexData.m_normal = normalize(vertexData.m_normal);
|
||||
|
||||
return vertexData;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue