Update the AWS automation tests to use existing CloudFormation stacks (#3092)

monroegm-disable-blank-issue-2
Junbo Liang 4 years ago committed by GitHub
parent f7831be7ce
commit c93a18ab82
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,26 @@
# AWS Gem Automation Tests
## Prerequisites
1. Build the O3DE Editor and AutomatedTesting.GameLauncher in Profile.
2. AWS CLI is installed and configured following [Configuration and Credential File Settings](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html).
3. [AWS Cloud Development Kit (CDK)](https://docs.aws.amazon.com/cdk/latest/guide/getting_started.html#getting_started_install) is installed.
## Deploy CDK Applications
1. Go to the AWS IAM console and create an IAM role called o3de-automation-tests which adds your own account as as a trusted entity and uses the "AdministratorAccess" permissions policy.
2. Copy {engine_root}\scripts\build\Platform\Windows\deploy_cdk_applications.cmd to your engine root folder.
3. Open a Command Prompt window at the engine root and set the following environment variables:
Set O3DE_AWS_PROJECT_NAME=AWSAUTO
Set O3DE_AWS_DEPLOY_REGION=us-east-1
Set ASSUME_ROLE_ARN="arn:aws:iam::{your_aws_account_id}:role/o3de-automation-tests"
Set COMMIT_ID=HEAD
4. Deploy the CDK applications for AWS gems by running deploy_cdk_applications.cmd in the same Command Prompt window.
5. Edit AWS\common\constants.py to replace the assume role ARN with your own:
arn:aws:iam::{your_aws_account_id}:role/o3de-automation-tests
## Run Automation Tests
### CLI
Open a Command Prompt window at the engine root and run the following CLI command:
python\python.cmd -m pytest {path_to_the_test_file} --build-directory {directory_to_the_profile_build}
### Pycharm
You can also run any specific automation test directly from Pycharm by providing the "--build-directory" argument in the Run Configuration.

@ -9,18 +9,18 @@ import logging
import os import os
import pytest import pytest
import typing import typing
from datetime import datetime from datetime import datetime
import ly_test_tools.log.log_monitor import ly_test_tools.log.log_monitor
from AWS.common import constants
from .aws_metrics_custom_thread import AWSMetricsThread
# fixture imports # fixture imports
from assetpipeline.ap_fixtures.asset_processor_fixture import asset_processor from assetpipeline.ap_fixtures.asset_processor_fixture import asset_processor
from .aws_metrics_utils import aws_metrics_utils from .aws_metrics_utils import aws_metrics_utils
from .aws_metrics_custom_thread import AWSMetricsThread
AWS_METRICS_FEATURE_NAME = 'AWSMetrics' AWS_METRICS_FEATURE_NAME = 'AWSMetrics'
GAME_LOG_NAME = 'Game.log'
CONTEXT_VARIABLE = ['-c', 'batch_processing=true']
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -36,7 +36,7 @@ def setup(launcher: pytest.fixture,
asset_processor.start() asset_processor.start()
asset_processor.wait_for_idle() asset_processor.wait_for_idle()
file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), GAME_LOG_NAME) file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), constants.GAME_LOG_NAME)
# Initialize the log monitor. # Initialize the log monitor.
log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor) log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor)
@ -73,23 +73,26 @@ def monitor_metrics_submission(log_monitor: pytest.fixture) -> None:
f'unexpected_lines values: {unexpected_lines}') f'unexpected_lines values: {unexpected_lines}')
def query_metrics_from_s3(aws_metrics_utils: pytest.fixture, stack_name: str) -> None: def query_metrics_from_s3(aws_metrics_utils: pytest.fixture, resource_mappings: pytest.fixture, stack_name: str) -> None:
""" """
Verify that the metrics events are delivered to the S3 bucket and can be queried. Verify that the metrics events are delivered to the S3 bucket and can be queried.
aws_metrics_utils: aws_metrics_utils fixture. :param aws_metrics_utils: aws_metrics_utils fixture.
stack_name: name of the CloudFormation stack. :param resource_mappings: resource_mappings fixture.
:param stack_name: name of the CloudFormation stack.
""" """
analytics_bucket_name = aws_metrics_utils.get_analytics_bucket_name(stack_name) aws_metrics_utils.verify_s3_delivery(
aws_metrics_utils.verify_s3_delivery(analytics_bucket_name) resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsBucketName')
)
logger.info('Metrics are sent to S3.') logger.info('Metrics are sent to S3.')
aws_metrics_utils.run_glue_crawler(f'{stack_name}-EventsCrawler') aws_metrics_utils.run_glue_crawler(
resource_mappings.get_resource_name_id('AWSMetrics.EventsCrawlerName'))
# Remove the events_json table if exists so that the sample query can create a table with the same name.
aws_metrics_utils.delete_table(f'{stack_name}-eventsdatabase', 'events_json')
aws_metrics_utils.run_named_queries(f'{stack_name}-AthenaWorkGroup') aws_metrics_utils.run_named_queries(f'{stack_name}-AthenaWorkGroup')
logger.info('Query metrics from S3 successfully.') logger.info('Query metrics from S3 successfully.')
# Empty the S3 bucket. S3 buckets can only be deleted successfully when it doesn't contain any object.
aws_metrics_utils.empty_batch_analytics_bucket(analytics_bucket_name)
def verify_operational_metrics(aws_metrics_utils: pytest.fixture, stack_name: str, start_time: datetime) -> None: def verify_operational_metrics(aws_metrics_utils: pytest.fixture, stack_name: str, start_time: datetime) -> None:
""" """
@ -102,7 +105,7 @@ def verify_operational_metrics(aws_metrics_utils: pytest.fixture, stack_name: st
'AWS/Lambda', 'AWS/Lambda',
'Invocations', 'Invocations',
[{'Name': 'FunctionName', [{'Name': 'FunctionName',
'Value': f'{stack_name}-AnalyticsProcessingLambdaName'}], 'Value': f'{stack_name}-AnalyticsProcessingLambda'}],
start_time) start_time)
logger.info('AnalyticsProcessingLambda metrics are sent to CloudWatch.') logger.info('AnalyticsProcessingLambda metrics are sent to CloudWatch.')
@ -115,50 +118,59 @@ def verify_operational_metrics(aws_metrics_utils: pytest.fixture, stack_name: st
logger.info('EventsProcessingLambda metrics are sent to CloudWatch.') logger.info('EventsProcessingLambda metrics are sent to CloudWatch.')
def start_kinesis_analytics_application(aws_metrics_utils: pytest.fixture, stack_name: str) -> None: def update_kinesis_analytics_application_status(aws_metrics_utils: pytest.fixture,
resource_mappings: pytest.fixture, start_application: bool) -> None:
""" """
Start the Kinesis analytics application for real-time analytics. Update the Kinesis analytics application to start or stop it.
aws_metrics_utils: aws_metrics_utils fixture. :param aws_metrics_utils: aws_metrics_utils fixture.
stack_name: name of the CloudFormation stack. :param resource_mappings: resource_mappings fixture.
:param start_application: whether to start or stop the application.
""" """
analytics_application_name = f'{stack_name}-AnalyticsApplication' if start_application:
aws_metrics_utils.start_kinesis_data_analytics_application(analytics_application_name) aws_metrics_utils.start_kinesis_data_analytics_application(
resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsApplicationName'))
else:
aws_metrics_utils.stop_kinesis_data_analytics_application(
resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsApplicationName'))
@pytest.mark.SUITE_periodic @pytest.mark.SUITE_periodic
@pytest.mark.usefixtures('automatic_process_killer') @pytest.mark.usefixtures('automatic_process_killer')
@pytest.mark.parametrize('project', ['AutomatedTesting'])
@pytest.mark.parametrize('level', ['AWS/Metrics'])
@pytest.mark.parametrize('feature_name', [AWS_METRICS_FEATURE_NAME])
@pytest.mark.usefixtures('resource_mappings')
@pytest.mark.parametrize('resource_mappings_filename', ['default_aws_resource_mappings.json'])
@pytest.mark.usefixtures('aws_credentials') @pytest.mark.usefixtures('aws_credentials')
@pytest.mark.usefixtures('resource_mappings')
@pytest.mark.parametrize('assume_role_arn', [constants.ASSUME_ROLE_ARN])
@pytest.mark.parametrize('feature_name', [AWS_METRICS_FEATURE_NAME])
@pytest.mark.parametrize('level', ['AWS/Metrics'])
@pytest.mark.parametrize('profile_name', ['AWSAutomationTest']) @pytest.mark.parametrize('profile_name', ['AWSAutomationTest'])
@pytest.mark.parametrize('region_name', ['us-west-2']) @pytest.mark.parametrize('project', ['AutomatedTesting'])
@pytest.mark.parametrize('assume_role_arn', ['arn:aws:iam::645075835648:role/o3de-automation-tests']) @pytest.mark.parametrize('region_name', [constants.AWS_REGION])
@pytest.mark.usefixtures('cdk') @pytest.mark.parametrize('resource_mappings_filename', [constants.AWS_RESOURCE_MAPPING_FILE_NAME])
@pytest.mark.parametrize('session_name', ['o3de-Automation-session']) @pytest.mark.parametrize('session_name', [constants.SESSION_NAME])
@pytest.mark.parametrize('deployment_params', [CONTEXT_VARIABLE]) @pytest.mark.parametrize('stacks', [[f'{constants.AWS_PROJECT_NAME}-{AWS_METRICS_FEATURE_NAME}-{constants.AWS_REGION}']])
class TestAWSMetricsWindows(object): class TestAWSMetricsWindows(object):
""" """
Test class to verify the real-time and batch analytics for metrics. Test class to verify the real-time and batch analytics for metrics.
""" """
@pytest.mark.parametrize('destroy_stacks_on_teardown', [False])
def test_realtime_and_batch_analytics(self, def test_realtime_and_batch_analytics(self,
level: str, level: str,
launcher: pytest.fixture, launcher: pytest.fixture,
asset_processor: pytest.fixture, asset_processor: pytest.fixture,
workspace: pytest.fixture, workspace: pytest.fixture,
aws_utils: pytest.fixture, aws_utils: pytest.fixture,
cdk: pytest.fixture, resource_mappings: pytest.fixture,
stacks: typing.List,
aws_metrics_utils: pytest.fixture): aws_metrics_utils: pytest.fixture):
""" """
Verify that the metrics events are sent to CloudWatch and S3 for analytics. Verify that the metrics events are sent to CloudWatch and S3 for analytics.
""" """
# Start Kinesis analytics application on a separate thread to avoid blocking the test. # Start Kinesis analytics application on a separate thread to avoid blocking the test.
kinesis_analytics_application_thread = AWSMetricsThread(target=start_kinesis_analytics_application, kinesis_analytics_application_thread = AWSMetricsThread(target=update_kinesis_analytics_application_status,
args=(aws_metrics_utils, cdk.stacks[0])) args=(aws_metrics_utils, resource_mappings, True))
kinesis_analytics_application_thread.start() kinesis_analytics_application_thread.start()
# Clear the analytics bucket objects before sending new metrics.
aws_metrics_utils.empty_bucket(
resource_mappings.get_resource_name_id('AWSMetrics.AnalyticsBucketName'))
log_monitor = setup(launcher, asset_processor) log_monitor = setup(launcher, asset_processor)
# Kinesis analytics application needs to be in the running state before we start the game launcher. # Kinesis analytics application needs to be in the running state before we start the game launcher.
@ -177,18 +189,22 @@ class TestAWSMetricsWindows(object):
start_time) start_time)
logger.info('Real-time metrics are sent to CloudWatch.') logger.info('Real-time metrics are sent to CloudWatch.')
# Run time-consuming verifications on separate threads to avoid blocking the test. # Run time-consuming operations on separate threads to avoid blocking the test.
verification_threads = list() operational_threads = list()
verification_threads.append( operational_threads.append(
AWSMetricsThread(target=query_metrics_from_s3, args=(aws_metrics_utils, cdk.stacks[0]))) AWSMetricsThread(target=query_metrics_from_s3,
verification_threads.append( args=(aws_metrics_utils, resource_mappings, stacks[0])))
AWSMetricsThread(target=verify_operational_metrics, args=(aws_metrics_utils, cdk.stacks[0], start_time))) operational_threads.append(
for thread in verification_threads: AWSMetricsThread(target=verify_operational_metrics,
args=(aws_metrics_utils, stacks[0], start_time)))
operational_threads.append(
AWSMetricsThread(target=update_kinesis_analytics_application_status,
args=(aws_metrics_utils, resource_mappings, False)))
for thread in operational_threads:
thread.start() thread.start()
for thread in verification_threads: for thread in operational_threads:
thread.join() thread.join()
@pytest.mark.parametrize('destroy_stacks_on_teardown', [True])
def test_unauthorized_user_request_rejected(self, def test_unauthorized_user_request_rejected(self,
level: str, level: str,
launcher: pytest.fixture, launcher: pytest.fixture,

@ -198,7 +198,7 @@ class AWSMetricsUtils:
assert state == 'SUCCEEDED', f'Failed to run the named query {named_query.get("Name", {})}' assert state == 'SUCCEEDED', f'Failed to run the named query {named_query.get("Name", {})}'
def empty_batch_analytics_bucket(self, bucket_name: str) -> None: def empty_bucket(self, bucket_name: str) -> None:
""" """
Empty the S3 bucket following: Empty the S3 bucket following:
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/migrations3.html https://boto3.amazonaws.com/v1/documentation/api/latest/guide/migrations3.html
@ -211,25 +211,18 @@ class AWSMetricsUtils:
for key in bucket.objects.all(): for key in bucket.objects.all():
key.delete() key.delete()
def get_analytics_bucket_name(self, stack_name: str) -> str: def delete_table(self, database_name: str, table_name: str) -> None:
""" """
Get the name of the deployed S3 bucket. Delete an existing Glue table.
:param stack_name: Name of the CloudFormation stack.
:return: Name of the deployed S3 bucket.
"""
client = self._aws_util.client('cloudformation')
response = client.describe_stack_resources( :param database_name: Name of the Glue database.
StackName=stack_name :param table_name: Name of the table to delete.
"""
client = self._aws_util.client('glue')
client.delete_table(
DatabaseName=database_name,
Name=table_name
) )
resources = response.get('StackResources', [])
for resource in resources:
if resource.get('ResourceType') == 'AWS::S3::Bucket':
return resource.get('PhysicalResourceId', '')
return ''
@pytest.fixture(scope='function') @pytest.fixture(scope='function')

@ -45,7 +45,8 @@ class KinesisAnalyticsApplicationUpdatedWaiter(CustomWaiter):
class GlueCrawlerReadyWaiter(CustomWaiter): class GlueCrawlerReadyWaiter(CustomWaiter):
""" """
Subclass of the base custom waiter class. Subclass of the base custom waiter class.
Wait for the Glue crawler to finish its processing. Wait for the Glue crawler to finish its processing. Return when the crawler is in the "Stopping" status
to avoid wasting too much time in the automation tests on its shutdown process.
""" """
def __init__(self, client: botocore.client): def __init__(self, client: botocore.client):
""" """
@ -57,7 +58,7 @@ class GlueCrawlerReadyWaiter(CustomWaiter):
'GlueCrawlerReady', 'GlueCrawlerReady',
'GetCrawler', 'GetCrawler',
'Crawler.State', 'Crawler.State',
{'READY': WaitState.SUCCESS}, {'STOPPING': WaitState.SUCCESS},
client) client)
def wait(self, crawler_name): def wait(self, crawler_name):

@ -1,7 +0,0 @@
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""

@ -1,248 +0,0 @@
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
import os
import pytest
import boto3
import uuid
import logging
import subprocess
import botocore
import ly_test_tools.environment.process_utils as process_utils
from typing import List
BOOTSTRAP_STACK_NAME = 'CDKToolkit'
BOOTSTRAP_STAGING_BUCKET_LOGIC_ID = 'StagingBucket'
logger = logging.getLogger(__name__)
class Cdk:
"""
Cdk class that provides methods to run cdk application commands.
Expects system to have NodeJS, AWS CLI and CDK installed globally and have their paths setup as env variables.
"""
def __init__(self):
self._cdk_env = ''
self._stacks = []
self._cdk_path = os.path.dirname(os.path.realpath(__file__))
self._session = ''
cdk_npm_latest_version_cmd = ['npm', 'view', 'aws-cdk', 'version']
output = process_utils.check_output(
cdk_npm_latest_version_cmd,
cwd=self._cdk_path,
shell=True)
cdk_npm_latest_version = output.split()[0]
cdk_version_cmd = ['cdk', 'version']
output = process_utils.check_output(
cdk_version_cmd,
cwd=self._cdk_path,
shell=True)
cdk_version = output.split()[0]
logger.info(f'Current CDK version {cdk_version}')
if cdk_version != cdk_npm_latest_version:
try:
logger.info(f'Updating CDK to latest')
# uninstall and reinstall cdk in case npm has been updated.
output = process_utils.check_output(
'npm uninstall -g aws-cdk',
cwd=self._cdk_path,
shell=True)
logger.info(f'Uninstall CDK output: {output}')
output = process_utils.check_output(
'npm install -g aws-cdk@latest',
cwd=self._cdk_path,
shell=True)
logger.info(f'Install CDK output: {output}')
except subprocess.CalledProcessError as error:
logger.warning(f'Failed reinstalling latest CDK on npm'
f'\nError:{error.stderr}')
def setup(self, cdk_path: str, project: str, account_id: str,
workspace: pytest.fixture, session: boto3.session.Session):
"""
:param cdk_path: Path where cdk app.py is stored.
:param project: Project name used for cdk project name env variable.
:param account_id: AWS account id to use with cdk application.
:param workspace: ly_test_tools workspace fixture.
:param session: Current boto3 session, provides credentials and region.
"""
self._cdk_env = os.environ.copy()
unique_id = uuid.uuid4().hex[-4:]
self._cdk_env['O3DE_AWS_PROJECT_NAME'] = project[:4] + unique_id if len(project) > 4 else project + unique_id
self._cdk_env['O3DE_AWS_DEPLOY_REGION'] = session.region_name
self._cdk_env['O3DE_AWS_DEPLOY_ACCOUNT'] = account_id
self._cdk_env['PATH'] = f'{workspace.paths.engine_root()}\\python;' + self._cdk_env['PATH']
credentials = session.get_credentials().get_frozen_credentials()
self._cdk_env['AWS_ACCESS_KEY_ID'] = credentials.access_key
self._cdk_env['AWS_SECRET_ACCESS_KEY'] = credentials.secret_key
self._cdk_env['AWS_SESSION_TOKEN'] = credentials.token
self._cdk_path = cdk_path
self._session = session
output = process_utils.check_output(
'python -m pip install -r requirements.txt',
cwd=self._cdk_path,
env=self._cdk_env,
shell=True)
logger.info(f'Installing cdk python dependencies: {output}')
self.bootstrap()
def bootstrap(self) -> None:
"""
Deploy the bootstrap stack.
"""
try:
bootstrap_cmd = ['cdk', 'bootstrap',
f'aws://{self._cdk_env["O3DE_AWS_DEPLOY_ACCOUNT"]}/{self._cdk_env["O3DE_AWS_DEPLOY_REGION"]}']
process_utils.check_call(
bootstrap_cmd,
cwd=self._cdk_path,
env=self._cdk_env,
shell=True)
except botocore.exceptions.ClientError as clientError:
logger.warning(f'Failed creating Bootstrap stack {BOOTSTRAP_STACK_NAME} not found. '
f'\nError:{clientError["Error"]["Message"]}')
def list(self, deployment_params: List[str] = None) -> List[str]:
"""
lists cdk stack names.
:param deployment_params: Deployment parameters like --all can be passed in this way.
:return List of cdk stack names.
"""
if not self._cdk_path:
return []
list_cdk_application_cmd = ['cdk', 'list']
if deployment_params:
list_cdk_application_cmd.extend(deployment_params)
output = process_utils.check_output(
list_cdk_application_cmd,
cwd=self._cdk_path,
env=self._cdk_env,
shell=True)
return output.splitlines()
def synthesize(self, deployment_params: List[str] = None) -> None:
"""
Synthesizes all cdk stacks.
:param deployment_params: Deployment parameters like --all can be passed in this way.
"""
if not self._cdk_path:
return
synth_cdk_application_cmd = ['cdk', 'synth']
if deployment_params:
synth_cdk_application_cmd.extend(deployment_params)
process_utils.check_output(
synth_cdk_application_cmd,
cwd=self._cdk_path,
env=self._cdk_env,
shell=True)
def deploy(self, deployment_params: List[str] = None) -> List[str]:
"""
Deploys all the CDK stacks.
:param deployment_params: Deployment parameters like --all can be passed in this way.
:return List of deployed stack arns.
"""
if not self._cdk_path:
return []
deploy_cdk_application_cmd = ['cdk', 'deploy', '--require-approval', 'never']
if deployment_params:
deploy_cdk_application_cmd.extend(deployment_params)
output = process_utils.check_output(
deploy_cdk_application_cmd,
cwd=self._cdk_path,
env=self._cdk_env,
shell=True)
for line in output.splitlines():
line_sections = line.split('/')
assert len(line_sections), 3
self._stacks.append(line.split('/')[-2])
return self._stacks
def destroy(self, deployment_params: List[str] = None) -> None:
"""
Destroys the cdk application.
:param deployment_params: Deployment parameters like --all can be passed in this way.
"""
logger.info(f'CDK Path {self._cdk_path}')
destroy_cdk_application_cmd = ['cdk', 'destroy', '-f']
if deployment_params:
destroy_cdk_application_cmd.extend(deployment_params)
try:
process_utils.check_output(
destroy_cdk_application_cmd,
cwd=self._cdk_path,
env=self._cdk_env,
shell=True)
except subprocess.CalledProcessError as e:
logger.error(e.output)
raise e
self._stacks = []
def remove_bootstrap_stack(self) -> None:
"""
Remove the CDK bootstrap stack.
:param aws_utils: aws_utils fixture.
"""
# Check if the bootstrap stack exists.
response = self._session.client('cloudformation').describe_stacks(
StackName=BOOTSTRAP_STACK_NAME
)
stacks = response.get('Stacks', [])
if not stacks or len(stacks) is 0:
return
# Clear the bootstrap staging bucket before deleting the bootstrap stack.
response = self._session.client('cloudformation').describe_stack_resource(
StackName=BOOTSTRAP_STACK_NAME,
LogicalResourceId=BOOTSTRAP_STAGING_BUCKET_LOGIC_ID
)
staging_bucket_name = response.get('StackResourceDetail', {}).get('PhysicalResourceId', '')
if staging_bucket_name:
s3 = self._session.resource('s3')
bucket = s3.Bucket(staging_bucket_name)
for key in bucket.objects.all():
key.delete()
# Delete the bootstrap stack.
# Should not need to delete the stack if S3 bucket can be cleaned.
# self._session.client('cloudformation').delete_stack(
# StackName=BOOTSTRAP_STACK_NAME
# )
@property
def stacks(self):
return self._stacks

@ -4,45 +4,42 @@ For complete copyright and license terms please see the LICENSE at the root of t
SPDX-License-Identifier: Apache-2.0 OR MIT SPDX-License-Identifier: Apache-2.0 OR MIT
""" """
import pytest
import os
import logging import logging
import os
import pytest
import ly_test_tools.log.log_monitor import ly_test_tools.log.log_monitor
from AWS.common import constants
# fixture imports # fixture imports
from assetpipeline.ap_fixtures.asset_processor_fixture import asset_processor from assetpipeline.ap_fixtures.asset_processor_fixture import asset_processor
AWS_PROJECT_NAME = 'AWS-AutomationTest'
AWS_CLIENT_AUTH_FEATURE_NAME = 'AWSClientAuth' AWS_CLIENT_AUTH_FEATURE_NAME = 'AWSClientAuth'
AWS_CLIENT_AUTH_DEFAULT_PROFILE_NAME = 'default'
GAME_LOG_NAME = 'Game.log'
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@pytest.mark.SUITE_periodic @pytest.mark.SUITE_periodic
@pytest.mark.usefixtures('automatic_process_killer')
@pytest.mark.usefixtures('asset_processor') @pytest.mark.usefixtures('asset_processor')
@pytest.mark.usefixtures('automatic_process_killer')
@pytest.mark.usefixtures('aws_utils')
@pytest.mark.usefixtures('workspace') @pytest.mark.usefixtures('workspace')
@pytest.mark.parametrize('project', ['AutomatedTesting']) @pytest.mark.parametrize('assume_role_arn', [constants.ASSUME_ROLE_ARN])
@pytest.mark.usefixtures('cdk')
@pytest.mark.parametrize('feature_name', [AWS_CLIENT_AUTH_FEATURE_NAME]) @pytest.mark.parametrize('feature_name', [AWS_CLIENT_AUTH_FEATURE_NAME])
@pytest.mark.parametrize('project', ['AutomatedTesting'])
@pytest.mark.usefixtures('resource_mappings') @pytest.mark.usefixtures('resource_mappings')
@pytest.mark.parametrize('resource_mappings_filename', ['default_aws_resource_mappings.json']) @pytest.mark.parametrize('resource_mappings_filename', [constants.AWS_RESOURCE_MAPPING_FILE_NAME])
@pytest.mark.usefixtures('aws_utils') @pytest.mark.parametrize('region_name', [constants.AWS_REGION])
@pytest.mark.parametrize('region_name', ['us-west-2']) @pytest.mark.parametrize('session_name', [constants.SESSION_NAME])
@pytest.mark.parametrize('assume_role_arn', ['arn:aws:iam::645075835648:role/o3de-automation-tests']) @pytest.mark.parametrize('stacks', [[f'{constants.AWS_PROJECT_NAME}-{AWS_CLIENT_AUTH_FEATURE_NAME}-Stack-{constants.AWS_REGION}']])
@pytest.mark.parametrize('session_name', ['o3de-Automation-session'])
@pytest.mark.usefixtures('cdk')
@pytest.mark.parametrize('deployment_params', [[]])
class TestAWSClientAuthWindows(object): class TestAWSClientAuthWindows(object):
""" """
Test class to verify AWS Client Auth gem features on Windows. Test class to verify AWS Client Auth gem features on Windows.
""" """
@pytest.mark.parametrize('level', ['AWS/ClientAuth']) @pytest.mark.parametrize('level', ['AWS/ClientAuth'])
@pytest.mark.parametrize('destroy_stacks_on_teardown', [False])
def test_anonymous_credentials(self, def test_anonymous_credentials(self,
level: str, level: str,
launcher: pytest.fixture, launcher: pytest.fixture,
@ -53,14 +50,14 @@ class TestAWSClientAuthWindows(object):
""" """
Test to verify AWS Cognito Identity pool anonymous authorization. Test to verify AWS Cognito Identity pool anonymous authorization.
Setup: Deploys cdk and updates resource mapping file. Setup: Updates resource mapping file using existing CloudFormation stacks.
Tests: Getting credentials when no credentials are configured Tests: Getting credentials when no credentials are configured
Verification: Log monitor looks for success credentials log. Verification: Log monitor looks for success credentials log.
""" """
asset_processor.start() asset_processor.start()
asset_processor.wait_for_idle() asset_processor.wait_for_idle()
file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), GAME_LOG_NAME) file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), constants.GAME_LOG_NAME)
log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor) log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor)
launcher.args = ['+LoadLevel', level] launcher.args = ['+LoadLevel', level]
@ -74,10 +71,8 @@ class TestAWSClientAuthWindows(object):
) )
assert result, 'Anonymous credentials fetched successfully.' assert result, 'Anonymous credentials fetched successfully.'
@pytest.mark.parametrize('destroy_stacks_on_teardown', [True])
def test_password_signin_credentials(self, def test_password_signin_credentials(self,
launcher: pytest.fixture, launcher: pytest.fixture,
cdk: pytest.fixture,
resource_mappings: pytest.fixture, resource_mappings: pytest.fixture,
workspace: pytest.fixture, workspace: pytest.fixture,
asset_processor: pytest.fixture, asset_processor: pytest.fixture,
@ -86,16 +81,29 @@ class TestAWSClientAuthWindows(object):
""" """
Test to verify AWS Cognito IDP Password sign in and Cognito Identity pool authenticated authorization. Test to verify AWS Cognito IDP Password sign in and Cognito Identity pool authenticated authorization.
Setup: Deploys cdk and updates resource mapping file. Setup: Updates resource mapping file using existing CloudFormation stacks.
Tests: Sign up new test user, admin confirm the user, sign in and get aws credentials. Tests: Sign up new test user, admin confirm the user, sign in and get aws credentials.
Verification: Log monitor looks for success credentials log. Verification: Log monitor looks for success credentials log.
""" """
asset_processor.start() asset_processor.start()
asset_processor.wait_for_idle() asset_processor.wait_for_idle()
file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), GAME_LOG_NAME) file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), constants.GAME_LOG_NAME)
log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor) log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor)
cognito_idp = aws_utils.client('cognito-idp')
user_pool_id = resource_mappings.get_resource_name_id(f'{AWS_CLIENT_AUTH_FEATURE_NAME}.CognitoUserPoolId')
logger.info(f'UserPoolId:{user_pool_id}')
# Remove the user if already exists
try:
cognito_idp.admin_delete_user(
UserPoolId=user_pool_id,
Username='test1'
)
except cognito_idp.exceptions.UserNotFoundException:
pass
launcher.args = ['+LoadLevel', 'AWS/ClientAuthPasswordSignUp'] launcher.args = ['+LoadLevel', 'AWS/ClientAuthPasswordSignUp']
launcher.args.extend(['-rhi=null']) launcher.args.extend(['-rhi=null'])
@ -109,9 +117,6 @@ class TestAWSClientAuthWindows(object):
launcher.stop() launcher.stop()
cognito_idp = aws_utils.client('cognito-idp')
user_pool_id = resource_mappings.get_resource_name_id(f'{AWS_CLIENT_AUTH_FEATURE_NAME}.CognitoUserPoolId')
print(f'UserPoolId:{user_pool_id}')
cognito_idp.admin_confirm_sign_up( cognito_idp.admin_confirm_sign_up(
UserPoolId=user_pool_id, UserPoolId=user_pool_id,
Username='test1' Username='test1'

@ -5,10 +5,11 @@ For complete copyright and license terms please see the LICENSE at the root of t
SPDX-License-Identifier: Apache-2.0 OR MIT SPDX-License-Identifier: Apache-2.0 OR MIT
""" """
import os
import logging import logging
import typing import os
import shutil import shutil
import typing
from botocore.exceptions import ClientError
import pytest import pytest
import ly_test_tools import ly_test_tools
@ -16,16 +17,15 @@ import ly_test_tools.log.log_monitor
import ly_test_tools.environment.process_utils as process_utils import ly_test_tools.environment.process_utils as process_utils
import ly_test_tools.o3de.asset_processor_utils as asset_processor_utils import ly_test_tools.o3de.asset_processor_utils as asset_processor_utils
from botocore.exceptions import ClientError from AWS.common import constants
# fixture imports
from assetpipeline.ap_fixtures.asset_processor_fixture import asset_processor from assetpipeline.ap_fixtures.asset_processor_fixture import asset_processor
AWS_CORE_FEATURE_NAME = 'AWSCore' AWS_CORE_FEATURE_NAME = 'AWSCore'
AWS_RESOURCE_MAPPING_FILE_NAME = 'default_aws_resource_mappings.json'
process_utils.kill_processes_named("o3de", ignore_extensions=True) # Kill ProjectManager windows process_utils.kill_processes_named("o3de", ignore_extensions=True) # Kill ProjectManager windows
GAME_LOG_NAME = 'Game.log'
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -46,7 +46,7 @@ def setup(launcher: pytest.fixture, asset_processor: pytest.fixture) -> typing.T
asset_processor.start() asset_processor.start()
asset_processor.wait_for_idle() asset_processor.wait_for_idle()
file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), GAME_LOG_NAME) file_to_monitor = os.path.join(launcher.workspace.paths.project_log(), constants.GAME_LOG_NAME)
log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor) log_monitor = ly_test_tools.log.log_monitor.LogMonitor(launcher=launcher, log_file_path=file_to_monitor)
return log_monitor, s3_download_dir return log_monitor, s3_download_dir
@ -58,7 +58,7 @@ def write_test_data_to_dynamodb_table(resource_mappings: pytest.fixture, aws_uti
:param resource_mappings: resource_mappings fixture. :param resource_mappings: resource_mappings fixture.
:param aws_utils: aws_utils fixture. :param aws_utils: aws_utils fixture.
""" """
table_name = resource_mappings.get_resource_name_id("AWSCore.ExampleDynamoTableOutput") table_name = resource_mappings.get_resource_name_id(f'{AWS_CORE_FEATURE_NAME}.ExampleDynamoTableOutput')
try: try:
aws_utils.client('dynamodb').put_item( aws_utils.client('dynamodb').put_item(
TableName=table_name, TableName=table_name,
@ -77,21 +77,19 @@ def write_test_data_to_dynamodb_table(resource_mappings: pytest.fixture, aws_uti
@pytest.mark.SUITE_periodic @pytest.mark.SUITE_periodic
@pytest.mark.usefixtures('automatic_process_killer') @pytest.mark.usefixtures('automatic_process_killer')
@pytest.mark.usefixtures('asset_processor') @pytest.mark.usefixtures('asset_processor')
@pytest.mark.usefixtures('cdk')
@pytest.mark.parametrize('feature_name', [AWS_CORE_FEATURE_NAME]) @pytest.mark.parametrize('feature_name', [AWS_CORE_FEATURE_NAME])
@pytest.mark.parametrize('region_name', ['us-west-2']) @pytest.mark.parametrize('region_name', [constants.AWS_REGION])
@pytest.mark.parametrize('assume_role_arn', ['arn:aws:iam::645075835648:role/o3de-automation-tests']) @pytest.mark.parametrize('assume_role_arn', [constants.ASSUME_ROLE_ARN])
@pytest.mark.parametrize('session_name', ['o3de-Automation-session']) @pytest.mark.parametrize('session_name', [constants.SESSION_NAME])
@pytest.mark.usefixtures('workspace') @pytest.mark.usefixtures('workspace')
@pytest.mark.parametrize('project', ['AutomatedTesting']) @pytest.mark.parametrize('project', ['AutomatedTesting'])
@pytest.mark.parametrize('level', ['AWS/Core']) @pytest.mark.parametrize('level', ['AWS/Core'])
@pytest.mark.usefixtures('resource_mappings') @pytest.mark.usefixtures('resource_mappings')
@pytest.mark.parametrize('resource_mappings_filename', [AWS_RESOURCE_MAPPING_FILE_NAME]) @pytest.mark.parametrize('resource_mappings_filename', [constants.AWS_RESOURCE_MAPPING_FILE_NAME])
@pytest.mark.parametrize('stacks', [[f'{constants.AWS_PROJECT_NAME}-{AWS_CORE_FEATURE_NAME}',
f'{constants.AWS_PROJECT_NAME}-{AWS_CORE_FEATURE_NAME}-Example-{constants.AWS_REGION}']])
@pytest.mark.usefixtures('aws_credentials') @pytest.mark.usefixtures('aws_credentials')
@pytest.mark.parametrize('profile_name', ['AWSAutomationTest']) @pytest.mark.parametrize('profile_name', ['AWSAutomationTest'])
@pytest.mark.usefixtures('cdk')
@pytest.mark.parametrize('deployment_params', [['--all']])
@pytest.mark.parametrize('destroy_stacks_on_teardown', [True])
class TestAWSCoreAWSResourceInteraction(object): class TestAWSCoreAWSResourceInteraction(object):
""" """
Test class to verify the scripting behavior for the AWSCore gem. Test class to verify the scripting behavior for the AWSCore gem.
@ -119,7 +117,7 @@ class TestAWSCoreAWSResourceInteraction(object):
expected_lines: typing.List[str], expected_lines: typing.List[str],
unexpected_lines: typing.List[str]): unexpected_lines: typing.List[str]):
""" """
Setup: Deploys cdk and updates resource mapping file. Setup: Updates resource mapping file using existing CloudFormation stacks.
Tests: Interact with AWS S3, DynamoDB and Lambda services. Tests: Interact with AWS S3, DynamoDB and Lambda services.
Verification: Script canvas nodes can communicate with AWS services successfully. Verification: Script canvas nodes can communicate with AWS services successfully.
""" """

@ -1,6 +0,0 @@
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""

@ -0,0 +1,19 @@
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
# ARN of the IAM role to assume for retrieving temporary AWS credentials
ASSUME_ROLE_ARN = 'arn:aws:iam::645075835648:role/o3de-automation-tests'
# Name of the AWS project deployed by the CDK applications
AWS_PROJECT_NAME = 'AWSAUTO'
# Region for the existing CloudFormation stacks used by the automation tests
AWS_REGION = 'us-east-1'
# Name of the default resource mapping config file used by the automation tests
AWS_RESOURCE_MAPPING_FILE_NAME = 'default_aws_resource_mappings.json'
# Name of the game launcher log
GAME_LOG_NAME = 'Game.log'
# Name of the IAM role session for retrieving temporary AWS credentials
SESSION_NAME = 'o3de-Automation-session'

@ -6,9 +6,9 @@ SPDX-License-Identifier: Apache-2.0 OR MIT
""" """
import os import os
import pytest
import json import json
import logging import logging
from AWS.common import constants
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -22,18 +22,14 @@ class ResourceMappings:
ResourceMappings class that handles writing Cloud formation outputs to resource mappings json file in a project. ResourceMappings class that handles writing Cloud formation outputs to resource mappings json file in a project.
""" """
def __init__(self, file_path: str, region: str, feature_name: str, account_id: str, workspace: pytest.fixture, def __init__(self, file_path: str, region: str, feature_name: str, account_id: str, cloud_formation_client):
cloud_formation_client):
""" """
:param file_path: Path for the resource mapping file. :param file_path: Path for the resource mapping file.
:param region: Region value for the resource mapping file. :param region: Region value for the resource mapping file.
:param feature_name: Feature gem name to use to append name to mappings key. :param feature_name: Feature gem name to use to append name to mappings key.
:param account_id: AWS account id value for the resource mapping file. :param account_id: AWS account id value for the resource mapping file.
:param workspace: ly_test_tools workspace fixture.
:param cloud_formation_client: AWS cloud formation client. :param cloud_formation_client: AWS cloud formation client.
""" """
self._cdk_env = os.environ.copy()
self._cdk_env['PATH'] = f'{workspace.paths.engine_root()}\\python;' + self._cdk_env['PATH']
self._resource_mapping_file_path = file_path self._resource_mapping_file_path = file_path
self._region = region self._region = region
self._feature_name = feature_name self._feature_name = feature_name
@ -44,7 +40,7 @@ class ResourceMappings:
f'Invalid resource mapping file path {self._resource_mapping_file_path}' f'Invalid resource mapping file path {self._resource_mapping_file_path}'
self._client = cloud_formation_client self._client = cloud_formation_client
def populate_output_keys(self, stacks=[]) -> None: def populate_output_keys(self, stacks=None) -> None:
""" """
Calls describe stacks on cloud formation service and persists outputs to resource mappings file. Calls describe stacks on cloud formation service and persists outputs to resource mappings file.
:param stacks List of stack arns to describe and populate resource mappings with. :param stacks List of stack arns to describe and populate resource mappings with.
@ -58,7 +54,7 @@ class ResourceMappings:
self._write_resource_mappings(stacks[0].get('Outputs', [])) self._write_resource_mappings(stacks[0].get('Outputs', []))
def _write_resource_mappings(self, outputs, append_feature_name = True) -> None: def _write_resource_mappings(self, outputs, append_feature_name=True) -> None:
with open(self._resource_mapping_file_path) as file_content: with open(self._resource_mapping_file_path) as file_content:
resource_mappings = json.load(file_content) resource_mappings = json.load(file_content)
@ -91,7 +87,7 @@ class ResourceMappings:
resource_mappings = json.load(file_content) resource_mappings = json.load(file_content)
resource_mappings[AWS_RESOURCE_MAPPINGS_ACCOUNT_ID_KEY] = '' resource_mappings[AWS_RESOURCE_MAPPINGS_ACCOUNT_ID_KEY] = ''
resource_mappings[AWS_RESOURCE_MAPPINGS_REGION_KEY] = 'us-west-2' resource_mappings[AWS_RESOURCE_MAPPINGS_REGION_KEY] = constants.AWS_REGION
# Append new mappings. # Append new mappings.
resource_mappings[AWS_RESOURCE_MAPPINGS_KEY] = resource_mappings.get(AWS_RESOURCE_MAPPINGS_KEY, {}) resource_mappings[AWS_RESOURCE_MAPPINGS_KEY] = resource_mappings.get(AWS_RESOURCE_MAPPINGS_KEY, {})

@ -11,8 +11,7 @@ import typing
from AWS.common.aws_utils import AwsUtils from AWS.common.aws_utils import AwsUtils
from AWS.common.aws_credentials import AwsCredentials from AWS.common.aws_credentials import AwsCredentials
from AWS.Windows.cdk.cdk_utils import Cdk from AWS.common.resource_mappings import ResourceMappings
from AWS.Windows.resource_mappings.resource_mappings import ResourceMappings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -52,6 +51,7 @@ def resource_mappings(
project: str, project: str,
feature_name: str, feature_name: str,
resource_mappings_filename: str, resource_mappings_filename: str,
stacks: typing.List,
workspace: pytest.fixture, workspace: pytest.fixture,
aws_utils: pytest.fixture) -> ResourceMappings: aws_utils: pytest.fixture) -> ResourceMappings:
""" """
@ -61,6 +61,7 @@ def resource_mappings(
:param project: Project to find resource mapping file. :param project: Project to find resource mapping file.
:param feature_name: AWS Gem name that is prepended to resource mapping keys. :param feature_name: AWS Gem name that is prepended to resource mapping keys.
:param resource_mappings_filename: Name of resource mapping file. :param resource_mappings_filename: Name of resource mapping file.
:param stacks: List of stack names to describe and populate resource mappings with.
:param workspace: ly_test_tools workspace fixture. :param workspace: ly_test_tools workspace fixture.
:param aws_utils: AWS utils fixture. :param aws_utils: AWS utils fixture.
:return: ResourceMappings class object. :return: ResourceMappings class object.
@ -70,8 +71,8 @@ def resource_mappings(
logger.info(f'Resource mapping path : {path}') logger.info(f'Resource mapping path : {path}')
logger.info(f'Resource mapping resolved path : {abspath(path)}') logger.info(f'Resource mapping resolved path : {abspath(path)}')
resource_mappings_obj = ResourceMappings(abspath(path), aws_utils.assume_session().region_name, feature_name, resource_mappings_obj = ResourceMappings(abspath(path), aws_utils.assume_session().region_name, feature_name,
aws_utils.assume_account_id(), workspace, aws_utils.assume_account_id(), aws_utils.client('cloudformation'))
aws_utils.client('cloudformation')) resource_mappings_obj.populate_output_keys(stacks)
def teardown(): def teardown():
resource_mappings_obj.clear_output_keys() resource_mappings_obj.clear_output_keys()
@ -81,56 +82,6 @@ def resource_mappings(
return resource_mappings_obj return resource_mappings_obj
@pytest.fixture(scope='function')
def cdk(
request: pytest.fixture,
project: str,
feature_name: str,
workspace: pytest.fixture,
aws_utils: pytest.fixture,
resource_mappings: pytest.fixture,
deployment_params: typing.List[str],
destroy_stacks_on_teardown: bool) -> Cdk:
"""
Fixture for setting up a Cdk
:param request: _pytest.fixtures.SubRequest class that handles getting
a pytest fixture from a pytest function/fixture.
:param project: Project name used for cdk project name env variable.
:param feature_name: Feature gem name to expect cdk folder in.
:param workspace: ly_test_tools workspace fixture.
:param aws_utils: aws_utils fixture.
:param resource_mappings: resource_mappings fixture.
:param deployment_params: Parameters for the CDK application deployment.
:param destroy_stacks_on_teardown: option to control calling destroy ot the end of test.
:return Cdk class object.
"""
cdk_path = f'{workspace.paths.engine_root()}/Gems/{feature_name}/cdk'
logger.info(f'CDK Path {cdk_path}')
if pytest.cdk_obj is None:
pytest.cdk_obj = Cdk()
pytest.cdk_obj.setup(cdk_path, project, aws_utils.assume_account_id(), workspace, aws_utils.assume_session())
stacks = pytest.cdk_obj.deploy(deployment_params=deployment_params)
logger.info(f'Cdk stack names:\n{stacks}')
resource_mappings.populate_output_keys(stacks)
def teardown():
if destroy_stacks_on_teardown:
pytest.cdk_obj.destroy(deployment_params=deployment_params)
# Enable after https://github.com/aws/aws-cdk/issues/986 is fixed.
# Until then clean the bootstrap bucket manually.
# pytest.cdk_obj.remove_bootstrap_stack()
pytest.cdk_obj = None
request.addfinalizer(teardown)
return pytest.cdk_obj
@pytest.fixture(scope='function') @pytest.fixture(scope='function')
def aws_credentials(request: pytest.fixture, aws_utils: pytest.fixture, profile_name: str): def aws_credentials(request: pytest.fixture, aws_utils: pytest.fixture, profile_name: str):
""" """

@ -25,7 +25,7 @@ ACCOUNT = os.environ.get('O3DE_AWS_DEPLOY_ACCOUNT', os.environ.get('CDK_DEFAULT_
PROJECT_NAME = os.environ.get('O3DE_AWS_PROJECT_NAME', f'O3DE-AWS-PROJECT').upper() PROJECT_NAME = os.environ.get('O3DE_AWS_PROJECT_NAME', f'O3DE-AWS-PROJECT').upper()
# The name of this feature # The name of this feature
FEATURE_NAME = 'Core' FEATURE_NAME = 'AWSCore'
# The name of this CDK application # The name of this CDK application
PROJECT_FEATURE_NAME = f'{PROJECT_NAME}-{FEATURE_NAME}' PROJECT_FEATURE_NAME = f'{PROJECT_NAME}-{FEATURE_NAME}'

@ -96,9 +96,9 @@ class BatchAnalytics:
), ),
athena.CfnNamedQuery( athena.CfnNamedQuery(
self._stack, self._stack,
id='NamedQuery-NewUsersLastMonth', id='NamedQuery-LoginLastMonth',
name=resource_name_sanitizer.sanitize_resource_name( name=resource_name_sanitizer.sanitize_resource_name(
f'{self._stack.stack_name}-NamedQuery-NewUsersLastMonth', 'athena_named_query'), f'{self._stack.stack_name}-NamedQuery-LoginLastMonth', 'athena_named_query'),
database=self._events_database_name, database=self._events_database_name,
query_string="WITH detail AS (" query_string="WITH detail AS ("
"SELECT date_trunc('month', date(date_parse(CONCAT(year, '-', month, '-', day), '%Y-%m-%d'))) as event_month, * " "SELECT date_trunc('month', date(date_parse(CONCAT(year, '-', month, '-', day), '%Y-%m-%d'))) as event_month, * "
@ -107,9 +107,9 @@ class BatchAnalytics:
"date_trunc('month', event_month) as month, " "date_trunc('month', event_month) as month, "
"count(*) as new_accounts " "count(*) as new_accounts "
"FROM detail " "FROM detail "
"WHERE event_name = 'user_registration' " "WHERE event_name = 'login' "
"GROUP BY date_trunc('month', event_month)", "GROUP BY date_trunc('month', event_month)",
description='New users over the last month', description='Total number of login events over the last month',
work_group=self._athena_work_group.name work_group=self._athena_work_group.name
) )
] ]

@ -50,8 +50,7 @@ class DataLakeIntegration:
# a specific name here, only one customer can deploy the bucket successfully. # a specific name here, only one customer can deploy the bucket successfully.
self._analytics_bucket = s3.Bucket( self._analytics_bucket = s3.Bucket(
self._stack, self._stack,
id=f'AnalyticsBucket'.lower(), id=resource_name_sanitizer.sanitize_resource_name(
bucket_name=resource_name_sanitizer.sanitize_resource_name(
f'{self._stack.stack_name}-AnalyticsBucket'.lower(), 's3_bucket'), f'{self._stack.stack_name}-AnalyticsBucket'.lower(), 's3_bucket'),
encryption=s3.BucketEncryption.S3_MANAGED, encryption=s3.BucketEncryption.S3_MANAGED,
block_public_access=s3.BlockPublicAccess( block_public_access=s3.BlockPublicAccess(
@ -68,6 +67,13 @@ class DataLakeIntegration:
cfn_bucket = self._analytics_bucket.node.find_child('Resource') cfn_bucket = self._analytics_bucket.node.find_child('Resource')
cfn_bucket.apply_removal_policy(core.RemovalPolicy.DESTROY) cfn_bucket.apply_removal_policy(core.RemovalPolicy.DESTROY)
analytics_bucket_output = core.CfnOutput(
self._stack,
id='AnalyticsBucketName',
description='Name of the S3 bucket for storing metrics event data',
export_name=f"{self._application_name}:AnalyticsBucket",
value=self._analytics_bucket.bucket_name)
def _create_events_database(self) -> None: def _create_events_database(self) -> None:
""" """
Create the Glue database for metrics events. Create the Glue database for metrics events.

@ -182,7 +182,7 @@ class RealTimeDataProcessing:
Generate the analytics processing lambda to send processed data to CloudWatch for visualization. Generate the analytics processing lambda to send processed data to CloudWatch for visualization.
""" """
analytics_processing_function_name = resource_name_sanitizer.sanitize_resource_name( analytics_processing_function_name = resource_name_sanitizer.sanitize_resource_name(
f'{self._stack.stack_name}-AnalyticsProcessingLambdaName', 'lambda_function') f'{self._stack.stack_name}-AnalyticsProcessingLambda', 'lambda_function')
self._analytics_processing_lambda_role = self._create_analytics_processing_lambda_role( self._analytics_processing_lambda_role = self._create_analytics_processing_lambda_role(
analytics_processing_function_name analytics_processing_function_name
) )

@ -24,28 +24,28 @@
"parameter_type": "string", "parameter_type": "string",
"default_value": "", "default_value": "",
"use_last_run_value": true, "use_last_run_value": true,
"description": "" "description": "The name of the O3DE project that stacks should be deployed for."
}, },
{ {
"parameter_name": "O3DE_AWS_DEPLOY_REGION", "parameter_name": "O3DE_AWS_DEPLOY_REGION",
"parameter_type": "string", "parameter_type": "string",
"default_value": "", "default_value": "",
"use_last_run_value": true, "use_last_run_value": true,
"description": "" "description": "The region to deploy the stacks into."
}, },
{ {
"parameter_name": "ASSUME_ROLE_ARN", "parameter_name": "ASSUME_ROLE_ARN",
"parameter_type": "string", "parameter_type": "string",
"default_value": "", "default_value": "",
"use_last_run_value": true, "use_last_run_value": true,
"description": "" "description": "The ARN of the IAM role to assume to retrieve temporary AWS credentials."
}, },
{ {
"parameter_name": "COMMIT_ID", "parameter_name": "COMMIT_ID",
"parameter_type": "string", "parameter_type": "string",
"default_value": "", "default_value": "",
"use_last_run_value": true, "use_last_run_value": true,
"description": "" "description": "The commit ID for locking the version of CDK applications to deploy."
} }
] ]
} }

Loading…
Cancel
Save