From 79e32ce4e5ba62099bdbacb24d26e606663c93bf Mon Sep 17 00:00:00 2001 From: jromnoa <80134229+jromnoa@users.noreply.github.com> Date: Tue, 15 Feb 2022 16:29:31 -0800 Subject: [PATCH] moving benchmark tests into their own file to prevent timeout failures of other tests potentially stopping the benchmarks Signed-off-by: jromnoa <80134229+jromnoa@users.noreply.github.com> --- .../Gem/PythonTests/Atom/CMakeLists.txt | 14 +++++ .../Atom/TestSuite_Benchmark_GPU.py | 59 +++++++++++++++++++ .../PythonTests/Atom/TestSuite_Main_GPU.py | 44 -------------- 3 files changed, 73 insertions(+), 44 deletions(-) create mode 100644 AutomatedTesting/Gem/PythonTests/Atom/TestSuite_Benchmark_GPU.py diff --git a/AutomatedTesting/Gem/PythonTests/Atom/CMakeLists.txt b/AutomatedTesting/Gem/PythonTests/Atom/CMakeLists.txt index 26eb96e7ec..5c85dda9c0 100644 --- a/AutomatedTesting/Gem/PythonTests/Atom/CMakeLists.txt +++ b/AutomatedTesting/Gem/PythonTests/Atom/CMakeLists.txt @@ -47,4 +47,18 @@ if(PAL_TRAIT_BUILD_HOST_TOOLS AND PAL_TRAIT_BUILD_TESTS_SUPPORTED) COMPONENT Atom ) + ly_add_pytest( + NAME AutomatedTesting::Atom_TestSuite_Benchmark_GPU + TEST_SUITE main + TEST_REQUIRES gpu + TEST_SERIAL + TIMEOUT 700 + PATH ${CMAKE_CURRENT_LIST_DIR}/TestSuite_Benchmark_GPU.py + RUNTIME_DEPENDENCIES + AssetProcessor + AutomatedTesting.Assets + Editor + COMPONENT + Atom + ) endif() diff --git a/AutomatedTesting/Gem/PythonTests/Atom/TestSuite_Benchmark_GPU.py b/AutomatedTesting/Gem/PythonTests/Atom/TestSuite_Benchmark_GPU.py new file mode 100644 index 0000000000..08c8f4b0e7 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/Atom/TestSuite_Benchmark_GPU.py @@ -0,0 +1,59 @@ +""" +Copyright (c) Contributors to the Open 3D Engine Project. +For complete copyright and license terms please see the LICENSE at the root of this distribution. + +SPDX-License-Identifier: Apache-2.0 OR MIT +""" +import logging +import os + +import pytest + +import editor_python_test_tools.hydra_test_utils as hydra +from ly_test_tools.benchmark.data_aggregator import BenchmarkDataAggregator + +logger = logging.getLogger(__name__) + + +@pytest.mark.parametrize('rhi', ['dx12', 'vulkan']) +@pytest.mark.parametrize("project", ["AutomatedTesting"]) +@pytest.mark.parametrize("launcher_platform", ["windows_editor"]) +@pytest.mark.parametrize("level", ["AtomFeatureIntegrationBenchmark"]) +class TestPerformanceBenchmarkSuite(object): + def test_AtomFeatureIntegrationBenchmarkTest_UploadMetrics( + self, request, editor, workspace, rhi, project, launcher_platform, level): + """ + Please review the hydra script run by this test for more specific test info. + Tests the performance of the Simple level. + """ + expected_lines = [ + "Benchmark metadata captured.", + "Pass timestamps captured.", + "CPU frame time captured.", + "Captured data successfully.", + "Exited game mode" + ] + + unexpected_lines = [ + "Failed to capture data.", + "Failed to capture pass timestamps.", + "Failed to capture CPU frame time.", + "Failed to capture benchmark metadata." + ] + + hydra.launch_and_validate_results( + request, + os.path.join(os.path.dirname(__file__), "tests"), + editor, + "hydra_GPUTest_AtomFeatureIntegrationBenchmark.py", + timeout=600, + expected_lines=expected_lines, + unexpected_lines=unexpected_lines, + halt_on_unexpected=True, + cfg_args=[level], + null_renderer=False, + enable_prefab_system=False, + ) + + aggregator = BenchmarkDataAggregator(workspace, logger, 'periodic') + aggregator.upload_metrics(rhi) diff --git a/AutomatedTesting/Gem/PythonTests/Atom/TestSuite_Main_GPU.py b/AutomatedTesting/Gem/PythonTests/Atom/TestSuite_Main_GPU.py index ad2662113b..fd1f3ca2b6 100644 --- a/AutomatedTesting/Gem/PythonTests/Atom/TestSuite_Main_GPU.py +++ b/AutomatedTesting/Gem/PythonTests/Atom/TestSuite_Main_GPU.py @@ -156,50 +156,6 @@ class TestAutomation(EditorTestSuite): similarity_threshold=0.96) is True -@pytest.mark.parametrize('rhi', ['dx12', 'vulkan']) -@pytest.mark.parametrize("project", ["AutomatedTesting"]) -@pytest.mark.parametrize("launcher_platform", ["windows_editor"]) -@pytest.mark.parametrize("level", ["AtomFeatureIntegrationBenchmark"]) -class TestPerformanceBenchmarkSuite(object): - def test_AtomFeatureIntegrationBenchmark( - self, request, editor, workspace, rhi, project, launcher_platform, level): - """ - Please review the hydra script run by this test for more specific test info. - Tests the performance of the Simple level. - """ - expected_lines = [ - "Benchmark metadata captured.", - "Pass timestamps captured.", - "CPU frame time captured.", - "Captured data successfully.", - "Exited game mode" - ] - - unexpected_lines = [ - "Failed to capture data.", - "Failed to capture pass timestamps.", - "Failed to capture CPU frame time.", - "Failed to capture benchmark metadata." - ] - - hydra.launch_and_validate_results( - request, - os.path.join(os.path.dirname(__file__), "tests"), - editor, - "hydra_GPUTest_AtomFeatureIntegrationBenchmark.py", - timeout=600, - expected_lines=expected_lines, - unexpected_lines=unexpected_lines, - halt_on_unexpected=True, - cfg_args=[level], - null_renderer=False, - enable_prefab_system=False, - ) - - aggregator = BenchmarkDataAggregator(workspace, logger, 'periodic') - aggregator.upload_metrics(rhi) - - @pytest.mark.parametrize("project", ["AutomatedTesting"]) @pytest.mark.parametrize("launcher_platform", ['windows_generic']) class TestMaterialEditor(object):