@ -9,18 +9,18 @@ import logging
import os
import os
import pytest
import pytest
import typing
import typing
from datetime import datetime
from datetime import datetime
import ly_test_tools . log . log_monitor
import ly_test_tools . log . log_monitor
from AWS . common import constants
from . aws_metrics_custom_thread import AWSMetricsThread
# fixture imports
# fixture imports
from assetpipeline . ap_fixtures . asset_processor_fixture import asset_processor
from assetpipeline . ap_fixtures . asset_processor_fixture import asset_processor
from . aws_metrics_utils import aws_metrics_utils
from . aws_metrics_utils import aws_metrics_utils
from . aws_metrics_custom_thread import AWSMetricsThread
AWS_METRICS_FEATURE_NAME = ' AWSMetrics '
AWS_METRICS_FEATURE_NAME = ' AWSMetrics '
GAME_LOG_NAME = ' Game.log '
CONTEXT_VARIABLE = [ ' -c ' , ' batch_processing=true ' ]
logger = logging . getLogger ( __name__ )
logger = logging . getLogger ( __name__ )
@ -36,7 +36,7 @@ def setup(launcher: pytest.fixture,
asset_processor . start ( )
asset_processor . start ( )
asset_processor . wait_for_idle ( )
asset_processor . wait_for_idle ( )
file_to_monitor = os . path . join ( launcher . workspace . paths . project_log ( ) , GAME_LOG_NAME)
file_to_monitor = os . path . join ( launcher . workspace . paths . project_log ( ) , constants. GAME_LOG_NAME)
# Initialize the log monitor.
# Initialize the log monitor.
log_monitor = ly_test_tools . log . log_monitor . LogMonitor ( launcher = launcher , log_file_path = file_to_monitor )
log_monitor = ly_test_tools . log . log_monitor . LogMonitor ( launcher = launcher , log_file_path = file_to_monitor )
@ -73,23 +73,26 @@ def monitor_metrics_submission(log_monitor: pytest.fixture) -> None:
f ' unexpected_lines values: { unexpected_lines } ' )
f ' unexpected_lines values: { unexpected_lines } ' )
def query_metrics_from_s3 ( aws_metrics_utils : pytest . fixture , stack_name: str ) - > None :
def query_metrics_from_s3 ( aws_metrics_utils : pytest . fixture , resource_mappings: pytest . fixture , stack_name: str ) - > None :
"""
"""
Verify that the metrics events are delivered to the S3 bucket and can be queried .
Verify that the metrics events are delivered to the S3 bucket and can be queried .
aws_metrics_utils : aws_metrics_utils fixture .
: param aws_metrics_utils : aws_metrics_utils fixture .
stack_name : name of the CloudFormation stack .
: param resource_mappings : resource_mappings fixture .
: param stack_name : name of the CloudFormation stack .
"""
"""
analytics_bucket_name = aws_metrics_utils . get_analytics_bucket_name ( stack_name )
aws_metrics_utils . verify_s3_delivery (
aws_metrics_utils . verify_s3_delivery ( analytics_bucket_name )
resource_mappings . get_resource_name_id ( ' AWSMetrics.AnalyticsBucketName ' )
)
logger . info ( ' Metrics are sent to S3. ' )
logger . info ( ' Metrics are sent to S3. ' )
aws_metrics_utils . run_glue_crawler ( f ' { stack_name } -EventsCrawler ' )
aws_metrics_utils . run_glue_crawler (
resource_mappings . get_resource_name_id ( ' AWSMetrics.EventsCrawlerName ' ) )
# Remove the events_json table if exists so that the sample query can create a table with the same name.
aws_metrics_utils . delete_table ( f ' { stack_name } -eventsdatabase ' , ' events_json ' )
aws_metrics_utils . run_named_queries ( f ' { stack_name } -AthenaWorkGroup ' )
aws_metrics_utils . run_named_queries ( f ' { stack_name } -AthenaWorkGroup ' )
logger . info ( ' Query metrics from S3 successfully. ' )
logger . info ( ' Query metrics from S3 successfully. ' )
# Empty the S3 bucket. S3 buckets can only be deleted successfully when it doesn't contain any object.
aws_metrics_utils . empty_batch_analytics_bucket ( analytics_bucket_name )
def verify_operational_metrics ( aws_metrics_utils : pytest . fixture , stack_name : str , start_time : datetime ) - > None :
def verify_operational_metrics ( aws_metrics_utils : pytest . fixture , stack_name : str , start_time : datetime ) - > None :
"""
"""
@ -102,7 +105,7 @@ def verify_operational_metrics(aws_metrics_utils: pytest.fixture, stack_name: st
' AWS/Lambda ' ,
' AWS/Lambda ' ,
' Invocations ' ,
' Invocations ' ,
[ { ' Name ' : ' FunctionName ' ,
[ { ' Name ' : ' FunctionName ' ,
' Value ' : f ' { stack_name } -AnalyticsProcessingLambda Name ' } ] ,
' Value ' : f ' { stack_name } -AnalyticsProcessingLambda ' } ] ,
start_time )
start_time )
logger . info ( ' AnalyticsProcessingLambda metrics are sent to CloudWatch. ' )
logger . info ( ' AnalyticsProcessingLambda metrics are sent to CloudWatch. ' )
@ -115,50 +118,59 @@ def verify_operational_metrics(aws_metrics_utils: pytest.fixture, stack_name: st
logger . info ( ' EventsProcessingLambda metrics are sent to CloudWatch. ' )
logger . info ( ' EventsProcessingLambda metrics are sent to CloudWatch. ' )
def start_kinesis_analytics_application ( aws_metrics_utils : pytest . fixture , stack_name : str ) - > None :
def update_kinesis_analytics_application_status ( aws_metrics_utils : pytest . fixture ,
resource_mappings : pytest . fixture , start_application : bool ) - > None :
"""
"""
Start the Kinesis analytics application for real - time analytics .
Update the Kinesis analytics application to start or stop it .
aws_metrics_utils : aws_metrics_utils fixture .
: param aws_metrics_utils : aws_metrics_utils fixture .
stack_name : name of the CloudFormation stack .
: param resource_mappings : resource_mappings fixture .
: param start_application : whether to start or stop the application .
"""
"""
analytics_application_name = f ' { stack_name } -AnalyticsApplication '
if start_application :
aws_metrics_utils . start_kinesis_data_analytics_application ( analytics_application_name )
aws_metrics_utils . start_kinesis_data_analytics_application (
resource_mappings . get_resource_name_id ( ' AWSMetrics.AnalyticsApplicationName ' ) )
else :
aws_metrics_utils . stop_kinesis_data_analytics_application (
resource_mappings . get_resource_name_id ( ' AWSMetrics.AnalyticsApplicationName ' ) )
@pytest.mark.SUITE_periodic
@pytest.mark.SUITE_periodic
@pytest.mark.usefixtures ( ' automatic_process_killer ' )
@pytest.mark.usefixtures ( ' automatic_process_killer ' )
@pytest.mark.parametrize ( ' project ' , [ ' AutomatedTesting ' ] )
@pytest.mark.parametrize ( ' level ' , [ ' AWS/Metrics ' ] )
@pytest.mark.parametrize ( ' feature_name ' , [ AWS_METRICS_FEATURE_NAME ] )
@pytest.mark.usefixtures ( ' resource_mappings ' )
@pytest.mark.parametrize ( ' resource_mappings_filename ' , [ ' default_aws_resource_mappings.json ' ] )
@pytest.mark.usefixtures ( ' aws_credentials ' )
@pytest.mark.usefixtures ( ' aws_credentials ' )
@pytest.mark.usefixtures ( ' resource_mappings ' )
@pytest.mark.parametrize ( ' assume_role_arn ' , [ constants . ASSUME_ROLE_ARN ] )
@pytest.mark.parametrize ( ' feature_name ' , [ AWS_METRICS_FEATURE_NAME ] )
@pytest.mark.parametrize ( ' level ' , [ ' AWS/Metrics ' ] )
@pytest.mark.parametrize ( ' profile_name ' , [ ' AWSAutomationTest ' ] )
@pytest.mark.parametrize ( ' profile_name ' , [ ' AWSAutomationTest ' ] )
@pytest.mark.parametrize ( ' region_name ' , [ ' us-west-2 ' ] )
@pytest.mark.parametrize ( ' project' , [ ' AutomatedTesting ' ] )
@pytest.mark.parametrize ( ' assume_role_arn ' , [ ' arn:aws:iam::645075835648:role/o3de-automation-tests ' ] )
@pytest.mark.parametrize ( ' region_name' , [ constants . AWS_REGION ] )
@pytest.mark.usefixtures ( ' cdk ' )
@pytest.mark. parametrize( ' resource_mappings_filename ' , [ constants . AWS_RESOURCE_MAPPING_FILE_NAME ] )
@pytest.mark.parametrize ( ' session_name ' , [ ' o3de-Automation-session ' ] )
@pytest.mark.parametrize ( ' session_name ' , [ constants . SESSION_NAME ] )
@pytest.mark.parametrize ( ' deployment_params' , [ CONTEXT_VARIABLE ] )
@pytest.mark.parametrize ( ' stacks' , [ [ f ' { constants . AWS_PROJECT_NAME } - { AWS_METRICS_FEATURE_NAME } - { constants . AWS_REGION } ' ] ] )
class TestAWSMetricsWindows ( object ) :
class TestAWSMetricsWindows ( object ) :
"""
"""
Test class to verify the real - time and batch analytics for metrics .
Test class to verify the real - time and batch analytics for metrics .
"""
"""
@pytest.mark.parametrize ( ' destroy_stacks_on_teardown ' , [ False ] )
def test_realtime_and_batch_analytics ( self ,
def test_realtime_and_batch_analytics ( self ,
level : str ,
level : str ,
launcher : pytest . fixture ,
launcher : pytest . fixture ,
asset_processor : pytest . fixture ,
asset_processor : pytest . fixture ,
workspace : pytest . fixture ,
workspace : pytest . fixture ,
aws_utils : pytest . fixture ,
aws_utils : pytest . fixture ,
cdk : pytest . fixture ,
resource_mappings : pytest . fixture ,
stacks : typing . List ,
aws_metrics_utils : pytest . fixture ) :
aws_metrics_utils : pytest . fixture ) :
"""
"""
Verify that the metrics events are sent to CloudWatch and S3 for analytics .
Verify that the metrics events are sent to CloudWatch and S3 for analytics .
"""
"""
# Start Kinesis analytics application on a separate thread to avoid blocking the test.
# Start Kinesis analytics application on a separate thread to avoid blocking the test.
kinesis_analytics_application_thread = AWSMetricsThread ( target = start_kinesis_analytics_application ,
kinesis_analytics_application_thread = AWSMetricsThread ( target = update_kinesis_analytics_application_status ,
args = ( aws_metrics_utils , cdk. stacks [ 0 ] ) )
args = ( aws_metrics_utils , resource_mappings, True ) )
kinesis_analytics_application_thread . start ( )
kinesis_analytics_application_thread . start ( )
# Clear the analytics bucket objects before sending new metrics.
aws_metrics_utils . empty_bucket (
resource_mappings . get_resource_name_id ( ' AWSMetrics.AnalyticsBucketName ' ) )
log_monitor = setup ( launcher , asset_processor )
log_monitor = setup ( launcher , asset_processor )
# Kinesis analytics application needs to be in the running state before we start the game launcher.
# Kinesis analytics application needs to be in the running state before we start the game launcher.
@ -177,18 +189,22 @@ class TestAWSMetricsWindows(object):
start_time )
start_time )
logger . info ( ' Real-time metrics are sent to CloudWatch. ' )
logger . info ( ' Real-time metrics are sent to CloudWatch. ' )
# Run time-consuming verifications on separate threads to avoid blocking the test.
# Run time-consuming operations on separate threads to avoid blocking the test.
verification_threads = list ( )
operational_threads = list ( )
verification_threads . append (
operational_threads . append (
AWSMetricsThread ( target = query_metrics_from_s3 , args = ( aws_metrics_utils , cdk . stacks [ 0 ] ) ) )
AWSMetricsThread ( target = query_metrics_from_s3 ,
verification_threads . append (
args = ( aws_metrics_utils , resource_mappings , stacks [ 0 ] ) ) )
AWSMetricsThread ( target = verify_operational_metrics , args = ( aws_metrics_utils , cdk . stacks [ 0 ] , start_time ) ) )
operational_threads . append (
for thread in verification_threads :
AWSMetricsThread ( target = verify_operational_metrics ,
args = ( aws_metrics_utils , stacks [ 0 ] , start_time ) ) )
operational_threads . append (
AWSMetricsThread ( target = update_kinesis_analytics_application_status ,
args = ( aws_metrics_utils , resource_mappings , False ) ) )
for thread in operational_threads :
thread . start ( )
thread . start ( )
for thread in verification_threads :
for thread in operational _threads:
thread . join ( )
thread . join ( )
@pytest.mark.parametrize ( ' destroy_stacks_on_teardown ' , [ True ] )
def test_unauthorized_user_request_rejected ( self ,
def test_unauthorized_user_request_rejected ( self ,
level : str ,
level : str ,
launcher : pytest . fixture ,
launcher : pytest . fixture ,