diff --git a/AutomatedTesting/Gem/PythonTests/CMakeLists.txt b/AutomatedTesting/Gem/PythonTests/CMakeLists.txt index 42fe9ac4a1..7931aa7178 100644 --- a/AutomatedTesting/Gem/PythonTests/CMakeLists.txt +++ b/AutomatedTesting/Gem/PythonTests/CMakeLists.txt @@ -68,7 +68,7 @@ if(PAL_TRAIT_BUILD_TESTS_SUPPORTED AND PAL_TRAIT_BUILD_HOST_TOOLS) TEST_SUITE periodic TEST_SERIAL PATH ${CMAKE_CURRENT_LIST_DIR}/scripting/TestSuite_Active.py - TIMEOUT 1500 + TIMEOUT 3000 RUNTIME_DEPENDENCIES Legacy::Editor AZ::AssetProcessor diff --git a/AutomatedTesting/Gem/PythonTests/EditorPythonTestTools/editor_python_test_tools/utils.py b/AutomatedTesting/Gem/PythonTests/EditorPythonTestTools/editor_python_test_tools/utils.py index a9f6d0aa02..c7f1aba031 100644 --- a/AutomatedTesting/Gem/PythonTests/EditorPythonTestTools/editor_python_test_tools/utils.py +++ b/AutomatedTesting/Gem/PythonTests/EditorPythonTestTools/editor_python_test_tools/utils.py @@ -265,6 +265,7 @@ class Tracer: self.warnings = [] self.errors = [] self.asserts = [] + self.prints = [] self.has_warnings = False self.has_errors = False self.has_asserts = False @@ -310,6 +311,11 @@ class Tracer: def __repr__(self): return f"[Assert: {self.message}]" + + class PrintInfo: + def __init__(self, args): + self.window = args[0] + self.message = args[1] def _on_warning(self, args): warningInfo = Tracer.WarningInfo(args) @@ -331,13 +337,19 @@ class Tracer: Report.info("Tracer caught Assert: %s:%i[%s] \"%s\"" % (assertInfo.filename, assertInfo.line, assertInfo.function, assertInfo.message)) self.has_asserts = True return False - + + def _on_printf(self, args): + printInfo = Tracer.PrintInfo(args) + self.prints.append(printInfo) + return False + def __enter__(self): self.handler = azlmbr.debug.TraceMessageBusHandler() self.handler.connect(None) self.handler.add_callback("OnPreAssert", self._on_assert) self.handler.add_callback("OnPreWarning", self._on_warning) self.handler.add_callback("OnPreError", self._on_error) + self.handler.add_callback("OnPrintf", self._on_printf) return self def __exit__(self, type, value, traceback): diff --git a/AutomatedTesting/Gem/PythonTests/scripting/AssetEditor_CreateScriptEventFile.py b/AutomatedTesting/Gem/PythonTests/scripting/AssetEditor_CreateScriptEventFile.py new file mode 100644 index 0000000000..dcbbf47f0c --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/AssetEditor_CreateScriptEventFile.py @@ -0,0 +1,124 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92569013 +Test Case Title: Script Event file can be created +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569013 +""" + + +# fmt: off +class Tests(): + new_event_created = ("New Script Event created", "New Script Event not created") + child_event_created = ("Child Event created", "Child not created") + file_saved = ("Script event file saved", "Script event file did not save") + console_error = ("No unexpected error in console", "Error found in console") + console_warning = ("No unexpected warning in console", "Warning found in console") +# fmt: on + + +def CreateScriptEventFile(): + """ + Summary: + Script Event file can be created + + Expected Behavior: + File is created without any errors and warnings in Console + + Test Steps: + 1) Open Asset Editor + 2) Get Asset Editor Qt object + 3) Create new Script Event Asset + 4) Add new child event + 5) Save the Script Event file + 6) Verify if file is created + 7) Verify console for errors/warnings + 8) Close Asset Editor + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + import os + from utils import Report + from utils import TestHelper as helper + from utils import Tracer + import pyside_utils + + # Open 3D Engine imports + import azlmbr.legacy.general as general + import azlmbr.editor as editor + import azlmbr.bus as bus + + # Pyside imports + from PySide2 import QtWidgets + + GENERAL_WAIT = 1.0 # seconds + + FILE_PATH = os.path.join("AutomatedTesting", "ScriptCanvas", "test_file.scriptevent") + + # 1) Open Asset Editor + general.idle_enable(True) + # Initially close the Asset Editor and then reopen to ensure we don't have any existing assets open + general.close_pane("Asset Editor") + general.open_pane("Asset Editor") + helper.wait_for_condition(lambda: general.is_pane_visible("Asset Editor"), 5.0) + + # 2) Get Asset Editor Qt object + editor_window = pyside_utils.get_editor_main_window() + asset_editor_widget = editor_window.findChild(QtWidgets.QDockWidget, "Asset Editor").findChild( + QtWidgets.QWidget, "AssetEditorWindowClass" + ) + container = asset_editor_widget.findChild(QtWidgets.QWidget, "ContainerForRows") + menu_bar = asset_editor_widget.findChild(QtWidgets.QMenuBar) + + # 3) Create new Script Event Asset + action = pyside_utils.find_child_by_pattern(menu_bar, {"type": QtWidgets.QAction, "text": "Script Events"}) + action.trigger() + result = helper.wait_for_condition( + lambda: container.findChild(QtWidgets.QFrame, "Events") is not None, 3 * GENERAL_WAIT + ) + Report.result(Tests.new_event_created, result) + + # 4) Add new child event + add_event = container.findChild(QtWidgets.QFrame, "Events").findChild(QtWidgets.QToolButton, "") + add_event.click() + result = helper.wait_for_condition( + lambda: asset_editor_widget.findChild(QtWidgets.QFrame, "EventName") is not None, GENERAL_WAIT + ) + Report.result(Tests.child_event_created, result) + + with Tracer() as section_tracer: + # 5) Save the Script Event file + editor.AssetEditorWidgetRequestsBus(bus.Broadcast, "SaveAssetAs", FILE_PATH) + + # 6) Verify if file is created + result = helper.wait_for_condition(lambda: os.path.exists(FILE_PATH), 3 * GENERAL_WAIT) + Report.result(Tests.file_saved, result) + + # 7) Verify console for errors/warnings + Report.result(Tests.console_error, not section_tracer.has_errors) + Report.result(Tests.console_warning, not section_tracer.has_warnings) + + # 8) Close Asset Editor + general.close_pane("Asset Editor") + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(CreateScriptEventFile) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/AssetEditor_NewScriptEvent.py b/AutomatedTesting/Gem/PythonTests/scripting/AssetEditor_NewScriptEvent.py new file mode 100644 index 0000000000..1f801d4eeb --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/AssetEditor_NewScriptEvent.py @@ -0,0 +1,129 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + +Test case ID: T92568942 +Test Case Title: Clicking the "+" button and selecting "New Script Event" opens the +Asset Editor with a new Script Event asset +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92568942 +""" + +from PySide2 import QtWidgets +import azlmbr.legacy.general as general + +import editor_python_test_tools.pyside_utils as pyside_utils +from editor_python_test_tools.utils import TestHelper as helper +from editor_python_test_tools.utils import Report + + +class Tests: + action_found = "New Script event action found" + asset_editor_opened = "Asset Editor opened" + new_asset = "Asset Editor created with new asset" + script_event = "New Script event created in Asset Editor" + + +GENERAL_WAIT = 0.5 # seconds + + +class TestAssetEditor_NewScriptEvent: + """ + Summary: + Clicking the "+" button in Node Palette and creating New Script Event opens Asset Editor + + Expected Behavior: + Clicking the "+" button and selecting "New Script Event" opens the Asset Editor with a + new Script Event asset + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Close any existing AssetEditor window + 3) Get the SC window object + 4) Click on New Script Event on Node palette + 5) Verify if Asset Editor opened + 6) Verify if a new asset with Script Canvas category is opened + 7) Close Script Canvas and Asset Editor + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + @pyside_utils.wrap_async + async def run_test(self): + # 1) Open Script Canvas window (Tools > Script Canvas) + general.idle_enable(True) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + + # 2) Close any existing AssetEditor window + general.close_pane("Asset Editor") + helper.wait_for_condition(lambda: not general.is_pane_visible("Asset Editor"), 5.0) + + # 3) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + node_palette = sc.findChild(QtWidgets.QDockWidget, "NodePalette") + frame = node_palette.findChild(QtWidgets.QFrame, "searchCustomization") + button = frame.findChild(QtWidgets.QToolButton) + pyside_utils.click_button_async(button) + + # 4) Click on New Script Event on Node palette + menu = None + + def menu_has_focus(): + nonlocal menu + for fw in [ + QtWidgets.QApplication.activePopupWidget(), + QtWidgets.QApplication.activeModalWidget(), + QtWidgets.QApplication.focusWidget(), + QtWidgets.QApplication.activeWindow(), + ]: + print(fw) + if fw and isinstance(fw, QtWidgets.QMenu) and fw.isVisible(): + menu = fw + return True + return False + + await pyside_utils.wait_for_condition(menu_has_focus, GENERAL_WAIT) + action = await pyside_utils.wait_for_action_in_menu(menu, {"text": "New Script Event"}) + Report.info(f"{Tests.action_found}: {action is not None}") + action.trigger() + pyside_utils.queue_hide_event(menu) + + # 5) Verify if Asset Editor opened + result = helper.wait_for_condition(lambda: general.is_pane_visible("Asset Editor"), GENERAL_WAIT) + Report.info(f"{Tests.asset_editor_opened}: {result}") + + # 6) Verify if a new asset with Script Canvas category is opened + asset_editor = editor_window.findChild(QtWidgets.QDockWidget, "Asset Editor") + row_container = asset_editor.findChild(QtWidgets.QWidget, "ContainerForRows") + # NOTE: QWidget ContainerForRows will have frames of Name, Category, ToolTip etc. + # To validate if a new script event file is generated, we check for + # QFrame Category and its value + categories = row_container.findChildren(QtWidgets.QFrame, "Category") + Report.info(f"{Tests.new_asset}: {len(categories)>0}") + result = False + for frame in categories: + line_edit = frame.findChild(QtWidgets.QLineEdit) + result = True if (line_edit and line_edit.text() == "Script Events") else False + Report.info(f"{Tests.script_event}: {result}") + + # 7) Close Script Canvas and Asset Editor + general.close_pane("Script Canvas") + general.close_pane("Asset Editor") + + +test = TestAssetEditor_NewScriptEvent() +test.run_test() diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Debugging_TargetMultipleEntities.py b/AutomatedTesting/Gem/PythonTests/scripting/Debugging_TargetMultipleEntities.py new file mode 100644 index 0000000000..a26cb4f923 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/Debugging_TargetMultipleEntities.py @@ -0,0 +1,144 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + +Test case ID: T92568856 +Test Case Title: Multiple Entities can be targeted in the Debugger tool +URLs of the test case: https://testrail.agscollab.com/index.php?/tests/view/92568856 +""" + + +# fmt: off +class Tests(): + level_created = ("New level created", "New level not created") + entities_found = ("Entities are found in Logging window", "Entities are not found in Logging window") + select_multiple_targets = ("Multiple targets are selected", "Multiple targets are not selected") +# fmt: on + + +GENERAL_WAIT = 0.5 # seconds + + +def Debugging_TargetMultipleEntities(): + """ + Summary: + Multiple Entities can be targeted in the Debugger tool + + Expected Behavior: + Selected files can be checked for logging. + Upon checking, checkboxes of the parent folders change to either full or partial check. + + Test Steps: + 1) Create temp level + 2) Create two entities with scriptcanvas components + 3) Set values for scriptcanvas + 4) Open Script Canvas window and get sc opbject + 5) Open Debugging(Logging) window + 6) Click on Entities tab in logging window + 7) Verify if the scriptcanvas exist under entities + 8) Verify if the entities can be selected + 9) Close Debugging window and Script Canvas window + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + from PySide2 import QtWidgets + from PySide2.QtCore import Qt + import azlmbr.legacy.general as general + import azlmbr.math as math + import azlmbr.asset as asset + import azlmbr.bus as bus + + import os + import pyside_utils + import hydra_editor_utils as hydra + from utils import TestHelper as helper + from utils import Report + + LEVEL_NAME = "tmp_level" + ASSET_NAME_1 = "ScriptCanvas_TwoComponents0.scriptcanvas" + ASSET_NAME_2 = "ScriptCanvas_TwoComponents1.scriptcanvas" + ASSET_1 = os.path.join("scriptcanvas", ASSET_NAME_1) + ASSET_2 = os.path.join("scriptcanvas", ASSET_NAME_2) + WAIT_TIME = 3.0 + + def get_asset(asset_path): + return asset.AssetCatalogRequestBus(bus.Broadcast, "GetAssetIdByPath", asset_path, math.Uuid(), False) + + # 1) Create temp level + general.idle_enable(True) + result = general.create_level_no_prompt(LEVEL_NAME, 128, 1, 512, True) + Report.critical_result(Tests.level_created, result == 0) + helper.wait_for_condition(lambda: general.get_current_level_name() == LEVEL_NAME, WAIT_TIME) + general.close_pane("Error Report") + + # 2) Create two entities with scriptcanvas components + position = math.Vector3(512.0, 512.0, 32.0) + test_entity_1 = hydra.Entity("test_entity_1") + test_entity_1.create_entity(position, ["Script Canvas"]) + test_entity_2 = hydra.Entity("test_entity_2") + test_entity_2.create_entity(position, ["Script Canvas"]) + + # 3) Set values for scriptcanvas + test_entity_1.get_set_test(0, "Script Canvas Asset|Script Canvas Asset", get_asset(ASSET_1)) + test_entity_2.get_set_test(0, "Script Canvas Asset|Script Canvas Asset", get_asset(ASSET_2)) + + # 4) Open Script Canvas window and get sc opbject + general.open_pane("Script Canvas") + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + + # 5) Open Debugging(Logging) window + if ( + sc.findChild(QtWidgets.QDockWidget, "LoggingWindow") is None + or not sc.findChild(QtWidgets.QDockWidget, "LoggingWindow").isVisible() + ): + action = pyside_utils.find_child_by_pattern(sc, {"text": "Debugging", "type": QtWidgets.QAction}) + action.trigger() + logging_window = sc.findChild(QtWidgets.QDockWidget, "LoggingWindow") + + # 6) Click on Entities tab in logging window + button = pyside_utils.find_child_by_pattern(logging_window, {"type": QtWidgets.QPushButton, "text": "Entities"}) + button.click() + + # 7) Verify if the scriptcanvas exist under entities + entities = logging_window.findChild(QtWidgets.QWidget, "entitiesPage") + tree = entities.findChild(QtWidgets.QTreeView, "pivotTreeView") + asset_1_mi = pyside_utils.find_child_by_pattern(tree, ASSET_NAME_1.lower()) + asset_2_mi = pyside_utils.find_child_by_pattern(tree, ASSET_NAME_2.lower()) + result = asset_1_mi is not None and asset_2_mi is not None + Report.critical_result(Tests.entities_found, result) + + # 8) Verify if the entities can be selected + tree.expandAll() + tree.model().setData(asset_1_mi, 2, Qt.CheckStateRole) + tree.model().setData(asset_2_mi, 2, Qt.CheckStateRole) + checklist = [asset_1_mi, asset_1_mi.parent(), asset_2_mi, asset_2_mi.parent()] + result = all([index.data(Qt.CheckStateRole) == 2 for index in checklist]) + Report.critical_result(Tests.select_multiple_targets, result) + + # 9) Close Debugging window and Script Canvas window + logging_window.close() + general.close_pane("Script Canvas") + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + from utils import Report + + Report.start_test(Debugging_TargetMultipleEntities) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Debugging_TargetMultipleGraphs.py b/AutomatedTesting/Gem/PythonTests/scripting/Debugging_TargetMultipleGraphs.py new file mode 100644 index 0000000000..4aa5c822a7 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/Debugging_TargetMultipleGraphs.py @@ -0,0 +1,113 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + +Test case ID: T92569137 +Test Case Title: Multiple Graphs can be targeted in the Debugger tool +URLs of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569137 +""" + + +# fmt: off +class Tests(): + select_multiple_targets = ("Multiple targets are selected", "Multiple targets are not selected") +# fmt: on + + +GENERAL_WAIT = 0.5 # seconds + + +def Debugging_TargetMultipleGraphs(): + """ + Summary: + Multiple Graphs can be targeted in the Debugger tool + + Expected Behavior: + Selected files can be checked for logging. + Upon checking, checkboxes of the parent folders change to either full or partial check. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Open Debugging Tool if not opened already + 4) Select Graphs tab under logging window + 5) Select multiple targets from levels and scriptcanvas + 6) Verify if multiple targets are selected + 7) Close Debugging window and Script Canvas window + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + from PySide2 import QtWidgets + from PySide2.QtCore import Qt + import azlmbr.legacy.general as general + + import pyside_utils + from utils import TestHelper as helper + from utils import Report + + # 1) Open Script Canvas window (Tools > Script Canvas) + general.idle_enable(True) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 6.0) + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + + # 3) Open Debugging Tool if not opened already + if ( + sc.findChild(QtWidgets.QDockWidget, "LoggingWindow") is None + or not sc.findChild(QtWidgets.QDockWidget, "LoggingWindow").isVisible() + ): + action = pyside_utils.find_child_by_pattern(sc, {"text": "Debugging", "type": QtWidgets.QAction}) + action.trigger() + logging_window = sc.findChild(QtWidgets.QDockWidget, "LoggingWindow") + + # 4) Select Graphs tab under logging window + button = pyside_utils.find_child_by_pattern(logging_window, {"type": QtWidgets.QPushButton, "text": "Graphs"}) + button.click() + + # 5) Select multiple targets from levels and scriptcanvas + graphs = logging_window.findChild(QtWidgets.QWidget, "graphsPage") + tree = graphs.findChild(QtWidgets.QTreeView, "pivotTreeView") + # Select the first child under levels + level_model_index = pyside_utils.find_child_by_pattern(tree, "levels") + level_child_index = pyside_utils.get_item_view_index(tree, 0, 0, level_model_index) + tree.model().setData(level_child_index, 2, Qt.CheckStateRole) + # Select the first child under scriptcanvas + sc_model_index = pyside_utils.find_child_by_pattern(tree, "scriptcanvas") + sc_child_index = pyside_utils.get_item_view_index(tree, 0, 0, sc_model_index) + tree.model().setData(sc_child_index, 2, Qt.CheckStateRole) + + # 6) Verify if multiple targets are selected + result = all([index.data(Qt.CheckStateRole) != 0 for index in (level_model_index, sc_model_index)]) + result = result and all([index.data(Qt.CheckStateRole) == 2 for index in (level_child_index, sc_child_index)]) + Report.result(Tests.select_multiple_targets, result) + + # 7) Close Debugging window and Script Canvas window + logging_window.close() + general.close_pane("Script Canvas") + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(Debugging_TargetMultipleGraphs) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Docking_Pane.py b/AutomatedTesting/Gem/PythonTests/scripting/Docking_Pane.py index 25dd83e5e2..4fa1e1257f 100755 --- a/AutomatedTesting/Gem/PythonTests/scripting/Docking_Pane.py +++ b/AutomatedTesting/Gem/PythonTests/scripting/Docking_Pane.py @@ -16,7 +16,6 @@ URLs of the test case: https://testrail.agscollab.com/index.php?/cases/view/1702 # fmt: off class Tests(): - open_sc_window = ("Script Canvas window is opened", "Failed to open Script Canvas window") pane_opened = ("Pane is opened successfully", "Failed to open pane") dock_pane = ("Pane is docked successfully", "Failed to dock Pane into one or more allowed area") # fmt: on @@ -78,8 +77,7 @@ def Docking_Pane(): # 1) Open Script Canvas window (Tools > Script Canvas) general.open_pane("Script Canvas") - is_sc_visible = helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) - Report.result(Tests.open_sc_window, is_sc_visible) + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) # 2) Make sure Node Palette pane is opened editor_window = pyside_utils.get_editor_main_window() diff --git a/AutomatedTesting/Gem/PythonTests/scripting/EditMenu_UndoRedo.py b/AutomatedTesting/Gem/PythonTests/scripting/EditMenu_UndoRedo.py new file mode 100644 index 0000000000..a87d9e9be9 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/EditMenu_UndoRedo.py @@ -0,0 +1,123 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + +Test case ID: T92569049 +Test Case Title: Edit > Undo undoes the last action +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569049 +Test case ID: T92569051 +Test Case Title: Edit > Redo redoes the last undone action +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569051 +""" + + +# fmt: off +class Tests(): + variable_created = ("New variable created", "New variable not created") + undo_worked = ("Undo action working", "Undo action did not work") + redo_worked = ("Redo action working", "Redo action did not work") +# fmt: on + + +def EditMenu_UndoRedo(): + """ + Summary: + Edit > Undo undoes the last action + Edit > Redo redoes the last undone action + We create a new variable in variable manager, undo and verify if variable is removed, + redo it and verify if the variable is created again. + + Expected Behavior: + The last action is undone. + The last undone action is redone. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Open Variable Manager if not opened already + 4) Create Graph + 5) Create new variable + 6) Verify if the variable is created initially + 7) Trigger Undo action and verify if variable is removed in Variable Manager + 8) Trigger Redo action and verify if variable is readded in Variable Manager + 9) Close SC window + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + from PySide2 import QtWidgets, QtCore + + import azlmbr.legacy.general as general + + import pyside_utils + + # 1) Open Script Canvas window + general.idle_enable(True) + general.open_pane("Script Canvas") + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + + # 3) Open Variable Manager if not opened already + if sc.findChild(QtWidgets.QDockWidget, "VariableManager") is None: + action = pyside_utils.find_child_by_pattern(sc, {"text": "Variable Manager", "type": QtWidgets.QAction}) + action.trigger() + variable_manager = sc.findChild(QtWidgets.QDockWidget, "VariableManager") + + # 4) Create Graph + action = pyside_utils.find_child_by_pattern(sc, {"objectName": "action_New_Script", "type": QtWidgets.QAction}) + action.trigger() + + # 5) Create new variable + add_button = variable_manager.findChild(QtWidgets.QPushButton, "addButton") + add_button.click() # Click on Create Variable button + # Select variable type + table_view = variable_manager.findChild(QtWidgets.QTableView, "variablePalette") + model_index = pyside_utils.find_child_by_pattern(table_view, "Boolean") + # Click on it to create variable + pyside_utils.item_view_index_mouse_click(table_view, model_index) + + # 6) Verify if the variable is created initially + graph_vars = variable_manager.findChild(QtWidgets.QTableView, "graphVariables") + result = graph_vars.model().rowCount(QtCore.QModelIndex()) == 1 # since we added 1 variable, rowcount=1 + Report.result(Tests.variable_created, result) + + # 7) Trigger Undo action and verify if variable is removed in Variable Manager + action = sc.findChild(QtWidgets.QAction, "action_Undo") + action.trigger() + result = graph_vars.model().rowCount(QtCore.QModelIndex()) == 0 # since we triggered undo, rowcount=0 + Report.result(Tests.undo_worked, result) + + # 8) Trigger Redo action and verify if variable is readded in Variable Manager + action = sc.findChild(QtWidgets.QAction, "action_Redo") + action.trigger() + result = ( + graph_vars.model().rowCount(QtCore.QModelIndex()) == 1 + ) # since action is redone 1 variable is readded, rowcount=1 + Report.result(Tests.redo_worked, result) + + # 9) Close SC window + general.close_pane("Script Canvas") + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(EditMenu_UndoRedo) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Entity_AddScriptCanvasComponent.py b/AutomatedTesting/Gem/PythonTests/scripting/Entity_AddScriptCanvasComponent.py new file mode 100644 index 0000000000..fd8e9b1173 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/Entity_AddScriptCanvasComponent.py @@ -0,0 +1,88 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92562978 +Test Case Title: Script Canvas Component can be added to an entity +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92562978 +""" + + +# fmt: off +class Tests(): + level_created = ("New level created", "Failed to create new level") + entity_created = ("Test Entity created", "Failed to create test entity") + add_sc_component = ("Script Canvas component added to entity", "Failed to add SC component to entity") + no_errors_found = ("Tracer found no errors", "One or more errors found by Tracer") + no_warnings_found = ("Tracer found no warnings", "One or more warnings found by Tracer") +# fmt: on + + +def Entity_AddScriptCanvasComponent(): + """ + Summary: + verify if Script Canvas component can be added to Entity without any issue + + Expected Behavior: + Script Canvas Component is added to the entity successfully without issue. + + Test Steps: + 1) Create temp level + 2) Create test entity + 3) Start Tracer + 4) Add Script Canvas component to test entity + 5) Search for errors and warnings + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + from utils import TestHelper as helper + from utils import Tracer + from editor_entity_utils import EditorEntity + import azlmbr.legacy.general as general + + LEVEL_NAME = "tmp_level" + WAIT_TIME = 3.0 # SECONDS + + # 1) Create temp level + general.idle_enable(True) + result = general.create_level_no_prompt(LEVEL_NAME, 128, 1, 512, True) + Report.critical_result(Tests.level_created, result == 0) + helper.wait_for_condition(lambda: general.get_current_level_name() == LEVEL_NAME, WAIT_TIME) + general.close_pane("Error Report") + + # 2) Create new entity + test_entity = EditorEntity.create_editor_entity("test_entity") + Report.result(Tests.entity_created, test_entity.id.IsValid()) + + # 3) Start Tracer + with Tracer() as section_tracer: + + # 4) Add Script Canvas component to test entity + test_entity.add_component("Script Canvas") + Report.result(Tests.add_sc_component, test_entity.has_component("Script Canvas")) + + # 5) Search for errors and warnings + Report.result(Tests.no_errors_found, not section_tracer.has_errors) + Report.result(Tests.no_warnings_found, not section_tracer.has_warnings) + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + from utils import Report + + Report.start_test(Entity_AddScriptCanvasComponent) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/FileMenu_New_Open.py b/AutomatedTesting/Gem/PythonTests/scripting/FileMenu_New_Open.py new file mode 100644 index 0000000000..f72ac8ea01 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/FileMenu_New_Open.py @@ -0,0 +1,98 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + +Test case ID: T92569037 +Test Case Title: File > New Script creates a new script +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569037 +Test case ID: T92569039 +Test Case Title: File > Open opens the Open... dialog +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569039 +""" + +import os +import sys +from PySide2 import QtWidgets +import azlmbr.legacy.general as general + +import editor_python_test_tools.pyside_utils as pyside_utils +from editor_python_test_tools.utils import Report + +# fmt: off +class Tests(): + new_action = "File->New action working as expected" + open_action = "File->Open action working as expected" +# fmt: on + + +GENERAL_WAIT = 0.5 # seconds + + +class TestFileMenuNewOpen: + """ + Summary: + When clicked on File->New, new script opens and File->Open should open the FileBrowser + + Expected Behavior: + New and Open actions should work as expected. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Trigger File->New action + 4) Verify if New tab is opened + 5) Trigger File->Open action + 6) Close Script Canvas window + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + @pyside_utils.wrap_async + async def run_test(self): + # 1) Open Script Canvas window (Tools > Script Canvas) + general.open_pane("Script Canvas") + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + sc_main = sc.findChild(QtWidgets.QMainWindow) + sc_tabs = sc_main.findChild(QtWidgets.QTabWidget, "ScriptCanvasTabs") + + # 3) Trigger File->New action + initial_tabs_count = sc_tabs.count() + action = pyside_utils.find_child_by_pattern( + sc_main, {"objectName": "action_New_Script", "type": QtWidgets.QAction} + ) + action.trigger() + + # 4) Verify if New tab is opened + general.idle_wait(GENERAL_WAIT) + Report.info(f"{Tests.new_action}: {sc_tabs.count() == initial_tabs_count + 1}") + + # 5) Trigger File->Open action + action = pyside_utils.find_child_by_pattern(sc_main, {"objectName": "action_Open", "type": QtWidgets.QAction}) + pyside_utils.trigger_action_async(action) + general.idle_wait(GENERAL_WAIT) + popup = await pyside_utils.wait_for_modal_widget() + Report.info(f"{Tests.open_action}: {popup and 'Open' in popup.windowTitle()}") + popup.close() + + # 6) Close Script Canvas window + general.close_pane("Script Canvas") + + +test = TestFileMenuNewOpen() +test.run_test() diff --git a/AutomatedTesting/Gem/PythonTests/scripting/GraphClose_SavePrompt.py b/AutomatedTesting/Gem/PythonTests/scripting/GraphClose_SavePrompt.py new file mode 100644 index 0000000000..ce610b159b --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/GraphClose_SavePrompt.py @@ -0,0 +1,110 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + +Test case ID: T92563070 +Test Case Title: Graphs can be closed by clicking X on the Graph name tab +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92563070 +Test case ID: T92563068 +Test Case Title: Save Prompt: User is prompted to save a graph on close after +creating a new graph +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92563068 +""" + +import os +import sys +from PySide2 import QtWidgets +import azlmbr.legacy.general as general + +import editor_python_test_tools.pyside_utils as pyside_utils +from editor_python_test_tools.utils import TestHelper as helper +from editor_python_test_tools.utils import Report + +# fmt: off +class Tests(): + new_graph = "New graph created" + save_prompt = "Save prompt opened as expected" + close_graph = "Close button worked as expected" +# fmt: on + + +GENERAL_WAIT = 0.5 # seconds + + +class TestGraphCloseSavePrompt: + """ + Summary: + The graph is closed when x button is clicked. + Save Prompt is opened before closing. + + Expected Behavior: + New and Open actions should work as expected. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Trigger File->New action + 4) Verify if New tab is opened + 5) Close new tab using X on top of graph and check for save dialog + 6) Check if tab is closed + 7) Close Script Canvas window + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + @pyside_utils.wrap_async + async def run_test(self): + # 1) Open Script Canvas window (Tools > Script Canvas) + general.idle_enable(True) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + sc_main = sc.findChild(QtWidgets.QMainWindow) + sc_tabs = sc_main.findChild(QtWidgets.QTabWidget, "ScriptCanvasTabs") + tab_bar = sc_tabs.findChild(QtWidgets.QTabBar) + + # 3) Trigger File->New action + initial_tabs_count = sc_tabs.count() + action = pyside_utils.find_child_by_pattern( + sc_main, {"objectName": "action_New_Script", "type": QtWidgets.QAction} + ) + action.trigger() + + # 4) Verify if New tab is opened + result = helper.wait_for_condition(lambda: sc_tabs.count() == initial_tabs_count + 1, GENERAL_WAIT) + Report.info(f"{Tests.new_graph}: {result}") + + # 5) Close new tab using X on top of graph and check for save dialog + close_button = tab_bar.findChildren(QtWidgets.QAbstractButton)[0] + pyside_utils.click_button_async(close_button) + popup = await pyside_utils.wait_for_modal_widget() + if popup: + Report.info(f"{Tests.save_prompt}: {popup.findChild(QtWidgets.QDialog, 'SaveChangesDialog') is not None}") + dont_save = popup.findChild(QtWidgets.QPushButton, "m_continueButton") + dont_save.click() + + # 6) Check if tab is closed + await pyside_utils.wait_for_condition(lambda: sc_tabs.count() == initial_tabs_count, 5.0) + Report.info(f"{Tests.close_graph}: {sc_tabs.count()==initial_tabs_count}") + + # 7) Close Script Canvas window + general.close_pane("Script Canvas") + + +test = TestGraphCloseSavePrompt() +test.run_test() diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Graph_ZoomInZoomOut.py b/AutomatedTesting/Gem/PythonTests/scripting/Graph_ZoomInZoomOut.py new file mode 100644 index 0000000000..a93b6e61d1 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/Graph_ZoomInZoomOut.py @@ -0,0 +1,120 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + +Test case ID: T92569079 +Test Case Title: View > Zoom In zooms the graph in +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569079 +Test case ID: T92569081 +Test Case Title: View > Zoom In zooms the graph out +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569081 +""" + + +# fmt: off +class Tests(): + zoom_in = ("Zoom In action working as expected", "Zoom In action not working as expected") + zoom_out = ("Zoom Out action working as expected", "Zoom Out action not working as expected") +# fmt: on + + +GENERAL_WAIT = 0.5 # seconds + + +def Graph_ZoomInZoomOut(): + """ + Summary: + The graph can be zoomed in and zoomed out. + + Expected Behavior: + The graph is zoomed in when when we click View->ZoomIn + The graph is zoomed out when when we click View->ZoomOut + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Create new graph + 4) Get initial graph transform values + 5) Trigger Zoom In and verify if the graph transform scale is increased + 6) Trigger Zoom Out and verify if the graph transform scale is decreased + 7) Close Script Canvas window + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + # Helper imports + import ImportPathHelper as imports + + imports.init() + + from PySide2 import QtWidgets + import azlmbr.legacy.general as general + + import pyside_utils + from utils import TestHelper as helper + from utils import Report + + # 1) Open Script Canvas window (Tools > Script Canvas) + general.idle_enable(True) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + sc_main = sc.findChild(QtWidgets.QMainWindow) + + # 3) Create new graph + create_new_graph = pyside_utils.find_child_by_pattern( + sc_main, {"objectName": "action_New_Script", "type": QtWidgets.QAction} + ) + create_new_graph.trigger() + + # 4) Get initial graph transform values + graphics_view = sc_main.findChild(QtWidgets.QGraphicsView) + # NOTE: transform m11 and m22 are horizontal and vertical scales of graph + # they increase when zoomed in and decreased when zoomed out + curr_m11, curr_m22 = graphics_view.transform().m11(), graphics_view.transform().m22() + + # 5) Trigger Zoom In and verify if the graph transform scale is increased + zin = pyside_utils.find_child_by_pattern(sc_main, {"objectName": "action_ZoomIn", "type": QtWidgets.QAction}) + zin.trigger() + result = helper.wait_for_condition( + lambda: curr_m11 < graphics_view.transform().m11() and curr_m22 < graphics_view.transform().m22(), GENERAL_WAIT, + ) + Report.result(Tests.zoom_in, result) + + # 6) Trigger Zoom Out and verify if the graph transform scale is decreased + curr_m11, curr_m22 = graphics_view.transform().m11(), graphics_view.transform().m22() + zout = pyside_utils.find_child_by_pattern(sc_main, {"objectName": "action_ZoomOut", "type": QtWidgets.QAction}) + zout.trigger() + result = helper.wait_for_condition( + lambda: curr_m11 > graphics_view.transform().m11() and curr_m22 > graphics_view.transform().m22(), GENERAL_WAIT, + ) + Report.result(Tests.zoom_out, result) + + # 7) Close Script Canvas window + general.close_pane("Script Canvas") + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(Graph_ZoomInZoomOut) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/ImportPathHelper.py b/AutomatedTesting/Gem/PythonTests/scripting/ImportPathHelper.py index 8aede24d0e..a45024cebf 100755 --- a/AutomatedTesting/Gem/PythonTests/scripting/ImportPathHelper.py +++ b/AutomatedTesting/Gem/PythonTests/scripting/ImportPathHelper.py @@ -13,4 +13,5 @@ def init(): import os import sys sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../automatedtesting_shared') + sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../EditorPythonTestTools/editor_python_test_tools') \ No newline at end of file diff --git a/AutomatedTesting/Gem/PythonTests/scripting/NodeInspector_RenameVariable.py b/AutomatedTesting/Gem/PythonTests/scripting/NodeInspector_RenameVariable.py new file mode 100644 index 0000000000..33d3f4137a --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/NodeInspector_RenameVariable.py @@ -0,0 +1,132 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92568982 +Test Case Title: Renaming variables in the Node Inspector +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92568982 +""" + + +# fmt: off +class Tests(): + variable_created = ("New variable created", "New variable is not created") + node_inspector_rename = ("Variable is renamed in Node Inspector", "Variable is not renamed in Node Inspector") + variable_manager_rename = ("Variable is renamed in Variable Manager", "Variable is not renamed in Variable Manager") +# fmt: on + + +GENERAL_WAIT = 0.5 # seconds + + +def NodeInspector_RenameVariable(): + """ + Summary: + Renaming variables in the Node Inspector, renames the actual variable. + + Expected Behavior: + The Variable's name is changed in both Node Inspector and Variable Manager. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Open Variable Manager if not opened already + 4) Open Node Inspector if not opened already + 5) Create new graph and a new variable in Variable manager + 6) Click on the variable + 7) Update name in Node Inspector and click on ENTER + 8) Verify if the name is updated in Node inspector and Variable manager + 9) Close Script Canvas window + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + TEST_NAME = "test name" + + from PySide2 import QtWidgets, QtCore, QtTest + from PySide2.QtCore import Qt + import azlmbr.legacy.general as general + + import pyside_utils + from utils import TestHelper as helper + + def open_tool(sc, dock_widget_name, pane_name): + if sc.findChild(QtWidgets.QDockWidget, dock_widget_name) is None: + action = pyside_utils.find_child_by_pattern(sc, {"text": pane_name, "type": QtWidgets.QAction}) + action.trigger() + tool = sc.findChild(QtWidgets.QDockWidget, dock_widget_name) + return tool + + # 1) Open Script Canvas window + general.idle_enable(True) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + + # 3) Open Variable Manager if not opened already + variable_manager = open_tool(sc, "VariableManager", "Variable Manager") + + # 4) Open Node Inspector if not opened already + node_inspector = open_tool(sc, "NodeInspector", "Node Inspector") + + # 5) Create new graph and a new variable in Variable manager + action = pyside_utils.find_child_by_pattern(sc, {"objectName": "action_New_Script", "type": QtWidgets.QAction}) + action.trigger() + graph_vars = variable_manager.findChild(QtWidgets.QTableView, "graphVariables") + add_button = variable_manager.findChild(QtWidgets.QPushButton, "addButton") + add_button.click() + # Select variable type + table_view = variable_manager.findChild(QtWidgets.QTableView, "variablePalette") + model_index = pyside_utils.find_child_by_pattern(table_view, "Boolean") + # Click on it to create variable + pyside_utils.item_view_index_mouse_click(table_view, model_index) + result = graph_vars.model().rowCount(QtCore.QModelIndex()) == 1 + var_mi = pyside_utils.find_child_by_pattern(graph_vars, "Variable 1") + result = result and (var_mi is not None) + Report.critical_result(Tests.variable_created, result) + + # 6) Click on the variable + pyside_utils.item_view_index_mouse_click(graph_vars, var_mi) + + # 7) Update name in Node Inspector and click on ENTER + helper.wait_for_condition( + lambda: node_inspector.findChild(QtWidgets.QWidget, "ContainerForRows") is not None, GENERAL_WAIT + ) + row_container = node_inspector.findChild(QtWidgets.QWidget, "ContainerForRows") + name_frame = row_container.findChild(QtWidgets.QWidget, "Name") + name_line_edit = name_frame.findChild(QtWidgets.QLineEdit) + name_line_edit.setText(TEST_NAME) + QtTest.QTest.keyClick(name_line_edit, Qt.Key_Return, Qt.NoModifier) + + # 8) Verify if the name is updated in Node inspector and Variable manager + helper.wait_for_condition(lambda: var_mi.data(Qt.DisplayRole) == TEST_NAME, GENERAL_WAIT) + Report.critical_result(Tests.node_inspector_rename, name_line_edit.text() == TEST_NAME) + Report.critical_result(Tests.variable_manager_rename, var_mi.data(Qt.DisplayRole) == TEST_NAME) + + # 9) Close Script Canvas window + general.close_pane("Script Canvas") + + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(NodeInspector_RenameVariable) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/NodePalette_ClearSelection.py b/AutomatedTesting/Gem/PythonTests/scripting/NodePalette_ClearSelection.py new file mode 100644 index 0000000000..de30e46767 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/NodePalette_ClearSelection.py @@ -0,0 +1,94 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92562993 +Test Case Title: Clicking the X button on the Search Box clears the currently entered string +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92562993 +""" + + +# fmt: off +class Tests(): + set_search_string = ("Search string is set", "Search string is not set") + search_string_cleared = ("Search string cleared as expected", "Search string not cleared") +# fmt: on + + +def NodePalette_ClearSelection(): + """ + Summary: + We enter some string in the Node Palette Search box, and click on the X button to verify if the + search string got cleared. + + Expected Behavior: + Clicking the X button on the Search Box clears the currently entered string + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Open Node Manager if not opened already + 4) Set some string in the Search box + 5) Verify if the test string is set + 6) Clear search string and verify if it is cleared + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + from PySide2 import QtWidgets + + from utils import TestHelper as helper + + import azlmbr.legacy.general as general + + import pyside_utils + + TEST_STRING = "Test String" + + # 1) Open Script Canvas window (Tools > Script Canvas) + general.idle_enable(True) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 3.0) + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + + # 3) Open Node Manager if not opened already + if sc.findChild(QtWidgets.QDockWidget, "NodePalette") is None: + action = pyside_utils.find_child_by_pattern(sc, {"text": "Node Palette", "type": QtWidgets.QAction}) + action.trigger() + node_palette = sc.findChild(QtWidgets.QDockWidget, "NodePalette") + search_frame = node_palette.findChild(QtWidgets.QFrame, "searchFrame") + + # 4) Set some string in the Search box + search_box = search_frame.findChild(QtWidgets.QLineEdit, "searchFilter") + search_box.setText(TEST_STRING) + + # 5) Verify if the test string is set + Report.result(Tests.set_search_string, search_box.text() == TEST_STRING) + + # 6) Clear search string and verify if it is cleared + clear_text_button = search_frame.findChild(QtWidgets.QToolButton, "ClearToolButton") + clear_text_button.click() + Report.result(Tests.search_string_cleared, search_box.text() == "") + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + from utils import Report + + Report.start_test(NodePalette_ClearSelection) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/NodePalette_SelectNode.py b/AutomatedTesting/Gem/PythonTests/scripting/NodePalette_SelectNode.py new file mode 100644 index 0000000000..2ab76071a6 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/NodePalette_SelectNode.py @@ -0,0 +1,104 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + +Test case ID: T92568940 +Test Case Title: Categories and Nodes can be selected +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92568940 +""" + + +# fmt: off +class Tests(): + category_selected = ("Category can be selected", "Category cannot be selected") + node_selected = ("Node can be selected", "Node cannot be selected") +# fmt: on + + +GENERAL_WAIT = 0.5 # seconds + + +def NodePalette_SelectNode(): + """ + Summary: + Categories and Nodes can be selected + + Expected Behavior: + When clicked on Node Palette, nodes and categories can be selected. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Expand QTreeView + 4) Click on category and check if it is selected + 5) Click on node and check if it is selected + 6) Close Script Canvas window + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + CATEGORY = "AI" + NODE = "Find Path To Entity" + + from PySide2 import QtWidgets + import azlmbr.legacy.general as general + + import pyside_utils + from utils import TestHelper as helper + + # 1) Open Script Canvas window (Tools > Script Canvas) + general.idle_enable(True) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + if sc.findChild(QtWidgets.QDockWidget, "NodePalette") is None: + action = pyside_utils.find_child_by_pattern(sc, {"text": "Node Palette", "type": QtWidgets.QAction}) + action.trigger() + node_palette = sc.findChild(QtWidgets.QDockWidget, "NodePalette") + tree = node_palette.findChild(QtWidgets.QTreeView, "treeView") + + # 3) Expand QTreeView + tree.expandAll() + + # 4) Click on category and check if it is selected + category_index = pyside_utils.find_child_by_hierarchy(tree, CATEGORY) + tree.scrollTo(category_index) + pyside_utils.item_view_index_mouse_click(tree, category_index) + pyside_utils.wait_for_condition(tree.selectedIndexes() and tree.selectedIndexes()[0] == category_index) + Report.result(Tests.category_selected, tree.selectedIndexes()[0] == category_index) + + # 5) Click on node and check if it is selected + node_index = pyside_utils.find_child_by_pattern(tree, NODE) + helper.wait_for_condition(lambda: tree.isExpanded(node_index), GENERAL_WAIT) + pyside_utils.item_view_index_mouse_click(tree, node_index) + pyside_utils.wait_for_condition(tree.selectedIndexes()[0] == node_index) + Report.result(Tests.node_selected, tree.selectedIndexes()[0] == node_index) + + # 6) Close Script Canvas window + general.close_pane("Script Canvas") + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(NodePalette_SelectNode) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/OnEntityActivatedDeactivated_PrintMessage.py b/AutomatedTesting/Gem/PythonTests/scripting/OnEntityActivatedDeactivated_PrintMessage.py new file mode 100644 index 0000000000..51b63c4268 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/OnEntityActivatedDeactivated_PrintMessage.py @@ -0,0 +1,184 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92569253 // T92569254 +Test Case Title: On Entity Activated // On Entity Deactivated +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92569253 // https://testrail.agscollab.com/index.php?/tests/view/92569254 +""" + + +# fmt: off +class Tests(): + level_created = ("Successfully created temp level", "Failed to create temp level") + controller_exists = ("Successfully found controller entity", "Failed to find controller entity") + activated_exists = ("Successfully found activated entity", "Failed to find activated entity") + deactivated_exists = ("Successfully found deactivated entity","Failed to find deactivated entity") + start_states_correct = ("Start states set up successfully", "Start states set up incorrectly") + game_mode_entered = ("Successfully entered game mode" "Failed to enter game mode") + lines_found = ("Successfully found expected prints", "Failed to find expected prints") + game_mode_exited = ("Successfully exited game mode" "Failed to exit game mode") +# fmt: on + + +def OnEntityActivatedDeactivated_PrintMessage(): + """ + Summary: + Verify that the On Entity Activation node is working as expected + + Expected Behavior: + Upon entering game mode, the Controller entity will wait 1 second and then activate the ActivationTest + entity. The script attached to ActivationTest will print out a message on activation. The Controller + will also deactivate the DeactivationTest entity, which should print a message. + + Test Steps: + 1) Create temp level + 2) Setup the level + 3) Validate the entities + 4) Start the Tracer + 5) Enter Game Mode + 6) Validate Print message + 7) Exit game mode + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + import os + + from utils import TestHelper as helper + from editor_entity_utils import EditorEntity as Entity + from utils import Report + from utils import Tracer + + import azlmbr.legacy.general as general + + EditorEntity = str + LEVEL_NAME = "tmp_level" + WAIT_TIME = 3.0 # SECONDS + EXPECTED_LINES = ["Activator Script: Activated", "Deactivator Script: Deactivated"] + controller_dict = { + "name": "Controller", + "status": "active", + "path": os.path.join("ScriptCanvas", "OnEntityActivatedScripts", "controller.scriptcanvas") + } + activated_dict = { + "name": "ActivationTest", + "status": "inactive", + "path": os.path.join("ScriptCanvas", "OnEntityActivatedScripts", "activator.scriptcanvas") + } + deactivated_dict = { + "name": "DeactivationTest", + "status": "active", + "path": os.path.join("ScriptCanvas", "OnEntityActivatedScripts", "deactivator.scriptcanvas") + } + + def get_asset(asset_path): + return azlmbr.asset.AssetCatalogRequestBus(azlmbr.bus.Broadcast, "GetAssetIdByPath", asset_path, azlmbr.math.Uuid(), False) + + def setup_level(): + def create_editor_entity(entity_dict:dict, entity_to_activate:EditorEntity=None, entity_to_deactivate:EditorEntity=None) -> EditorEntity: + entity = Entity.create_editor_entity(entity_dict["name"]) + entity.set_start_status(entity_dict["status"]) + sc_component = entity.add_component("Script Canvas") + sc_component.set_component_property_value("Script Canvas Asset|Script Canvas Asset", get_asset(entity_dict["path"])) + + if entity_dict["name"] == "Controller": + sc_component.get_property_tree() + sc_component.set_component_property_value("Properties|Variable Fields|Variables|[0]|Name,Value|Datum|Datum|EntityToActivate", entity_to_activate.id) + sc_component.set_component_property_value("Properties|Variable Fields|Variables|[1]|Name,Value|Datum|Datum|EntityToDeactivate", entity_to_deactivate.id) + return entity + + activated = create_editor_entity(activated_dict) + deactivated = create_editor_entity(deactivated_dict) + create_editor_entity(controller_dict, activated, deactivated) + + def validate_entity_exist(entity_name: str, test_tuple: tuple): + """ + Validate the entity with the given name exists in the level + :return: entity: editor entity object + """ + entity = Entity.find_editor_entity(entity_name) + Report.critical_result(test_tuple, entity.id.IsValid()) + return entity + + def validate_start_state(entity:EditorEntity, expected_state:str): + """ + Validate that the starting state of the entity is correct, if it isn't then attempt to rectify and recheck. + :return: bool: Whether state is set as expected + """ + state_options = { + "active": azlmbr.globals.property.EditorEntityStartStatus_StartActive, + "inactive": azlmbr.globals.property.EditorEntityStartStatus_StartInactive, + "editor": azlmbr.globals.property.EditorEntityStartStatus_EditorOnly, + } + if expected_state.lower() not in state_options.keys(): + raise ValueError(f"{expected_state} is an invalid option; valid options: active, inactive, or editor.") + + state = entity.get_start_status() + if state != state_options[expected_state]: + # If state fails to set, set_start_status will assert + entity.set_start_status(expected_state) + return True + + def validate_entities_in_level(): + controller = validate_entity_exist(controller_dict["name"], Tests.controller_exists) + state1_correct = validate_start_state(controller, controller_dict["status"]) + + act_tester = validate_entity_exist(activated_dict["name"], Tests.activated_exists) + state2_correct = validate_start_state(act_tester, activated_dict["status"]) + + deac_tester = validate_entity_exist(deactivated_dict["name"], Tests.deactivated_exists) + state3_correct = validate_start_state(deac_tester, deactivated_dict["status"]) + + all_states_correct = state1_correct and state2_correct and state3_correct + Report.critical_result(Tests.start_states_correct, all_states_correct) + + def locate_expected_lines(line_list: list): + found_lines = [printInfo.message.strip() for printInfo in section_tracer.prints] + return all(line in found_lines for line in line_list) + + # 1) Create temp level + general.idle_enable(True) + result = general.create_level_no_prompt(LEVEL_NAME, 128, 1, 512, True) + Report.critical_result(Tests.level_created, result == 0) + helper.wait_for_condition(lambda: general.get_current_level_name() == LEVEL_NAME, WAIT_TIME) + general.close_pane("Error Report") + + # 2) Setup the level + setup_level() + + # 3) Validate the entities + validate_entities_in_level() + + # 4) Start the Tracer + with Tracer() as section_tracer: + + # 5) Enter Game Mode + helper.enter_game_mode(Tests.game_mode_entered) + + # 6) Validate Print message + helper.wait_for_condition(lambda: locate_expected_lines(EXPECTED_LINES), WAIT_TIME) + + Report.result(Tests.lines_found, locate_expected_lines(EXPECTED_LINES)) + + # 7) Exit game mode + helper.exit_game_mode(Tests.game_mode_exited) + + +if __name__ == "__main__": + import ImportPathHelper as imports + imports.init() + + from utils import Report + + Report.start_test(OnEntityActivatedDeactivated_PrintMessage) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Opening_Closing_Pane.py b/AutomatedTesting/Gem/PythonTests/scripting/Opening_Closing_Pane.py index 6100285092..666f052240 100755 --- a/AutomatedTesting/Gem/PythonTests/scripting/Opening_Closing_Pane.py +++ b/AutomatedTesting/Gem/PythonTests/scripting/Opening_Closing_Pane.py @@ -17,7 +17,6 @@ URLs of the test case: https://testrail.agscollab.com/index.php?/cases/view/1702 # fmt: off class Tests(): - open_sc_window = ("Script Canvas window is opened", "Failed to open Script Canvas window") default_visible = ("All the panes visible by default", "One or more panes do not visible by default") open_panes = ("All the Panes opened successfully", "Failed to open one or more panes") close_pane = ("All the Panes closed successfully", "Failed to close one or more panes") @@ -49,11 +48,6 @@ def Opening_Closing_Pane(): :return: None """ - # Helper imports - import ImportPathHelper as imports - - imports.init() - from editor_python_test_tools.utils import Report from editor_python_test_tools.utils import TestHelper as helper import editor_python_test_tools.pyside_utils as pyside_utils @@ -82,8 +76,7 @@ def Opening_Closing_Pane(): # 1) Open Script Canvas window (Tools > Script Canvas) general.open_pane("Script Canvas") - is_sc_visible = helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) - Report.result(Tests.open_sc_window, is_sc_visible) + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) # 2) Restore default layout editor_window = pyside_utils.get_editor_main_window() diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Pane_RetainOnSCRestart.py b/AutomatedTesting/Gem/PythonTests/scripting/Pane_RetainOnSCRestart.py new file mode 100644 index 0000000000..fa5e9e6068 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/Pane_RetainOnSCRestart.py @@ -0,0 +1,164 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: C1702821 // C1702832 +Test Case Title: Retain visibility, size and location upon Script Canvas restart +URLs of the test case: https://testrail.agscollab.com/index.php?/cases/view/1702821 and + https://testrail.agscollab.com/index.php?/cases/view/1702832 +""" + + +# fmt: off +class Tests(): + relaunch_sc = ("Script Canvas window is relaunched", "Failed to relaunch Script Canvas window") + test_panes_visible = ("All the test panes are opened", "Failed to open one or more test panes") + close_pane_1 = ("Test pane 1 is closed", "Failed to close test pane 1") + visiblity_retained = ("Test pane retained its visiblity on SC restart", "Failed to retain visiblity of test pane on SC restart") + resize_pane_3 = ("Test pane 3 resized successfully", "Failed to resize Test pane 3") + size_retained = ("Test pane retained its size on SC restart", "Failed to retain size of test pane on SC restart") + location_changed = ("Location of test pane 2 changed successfully", "Failed to change locatio of test pane 2") + location_retained = ("Test pane retained its location on SC restart", "Failed to retain location of test pane on SC restart") +# fmt: on + + +def Pane_RetainOnSCRestart(): + """ + Summary: + The Script Canvas window is opened to verify if Script canvas panes can retain its visibility, size and location + upon ScriptCanvas restart. + + Expected Behavior: + The ScriptCanvas pane retain it's visiblity, size and location upon ScriptCanvas restart. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Make sure test panes are open and visible + 3) Close test pane 1 + 4) Change dock location of test pane 2 + 5) Resize test pane 3 + 6) Relaunch Script Canvas + 7) Verify if test pane 1 retain its visiblity + 8) Verify if location of test pane 2 is retained + 9) Verify if size of test pane 3 is retained + 10) Restore default layout and close SC window + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + # Helper imports + from utils import Report + from utils import TestHelper as helper + import pyside_utils + + # Open 3D Engine Imports + import azlmbr.legacy.general as general + + # Pyside imports + from PySide2 import QtCore, QtWidgets + from PySide2.QtCore import Qt + + # Constants + TEST_PANE_1 = "NodePalette" # test visibility + TEST_PANE_2 = "VariableManager" # test location + TEST_PANE_3 = "NodeInspector" # test size + SCALE_INT = 10 # Random resize scale integer + DOCKAREA = Qt.TopDockWidgetArea # Preferred top area since no widget is docked on top + + def click_menu_option(window, option_text): + action = pyside_utils.find_child_by_pattern(window, {"text": option_text, "type": QtWidgets.QAction}) + action.trigger() + + def find_pane(window, pane_name): + return window.findChild(QtWidgets.QDockWidget, pane_name) + + # Test starts here + general.idle_enable(True) + + # 1) Open Script Canvas window (Tools > Script Canvas) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 3.0) + + # 2) Make sure test panes are open and visible + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + click_menu_option(sc, "Restore Default Layout") + test_pane_1 = sc.findChild(QtWidgets.QDockWidget, TEST_PANE_1) + test_pane_2 = sc.findChild(QtWidgets.QDockWidget, TEST_PANE_2) + test_pane_3 = sc.findChild(QtWidgets.QDockWidget, TEST_PANE_3) + + Report.result( + Tests.test_panes_visible, test_pane_1.isVisible() and test_pane_2.isVisible() and test_pane_3.isVisible() + ) + + # Initiate try block here to restore default in finally block + try: + # 3) Close test pane + test_pane_1.close() + Report.result(Tests.close_pane_1, not test_pane_1.isVisible()) + + # 4) Change dock location of test pane 2 + sc_main = sc.findChild(QtWidgets.QMainWindow) + sc_main.addDockWidget(DOCKAREA, find_pane(sc_main, TEST_PANE_2), QtCore.Qt.Vertical) + Report.result(Tests.location_changed, sc_main.dockWidgetArea(find_pane(sc_main, TEST_PANE_2)) == DOCKAREA) + + # 5) Resize test pane 3 + initial_size = test_pane_3.frameSize() + test_pane_3.resize(initial_size.width() + SCALE_INT, initial_size.height() + SCALE_INT) + new_size = test_pane_3.frameSize() + resize_success = ( + abs(initial_size.width() - new_size.width()) == abs(initial_size.height() - new_size.height()) == SCALE_INT + ) + Report.result(Tests.resize_pane_3, resize_success) + + # 6) Relaunch Script Canvas + general.close_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 2.0) + + general.open_pane("Script Canvas") + sc_visible = helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + Report.result(Tests.relaunch_sc, sc_visible) + + # 7) Verify if test pane 1 retain its visiblity + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + Report.result(Tests.visiblity_retained, not find_pane(sc, TEST_PANE_1).isVisible()) + + # 8) Verify if location of test pane 2 is retained + sc_main = sc.findChild(QtWidgets.QMainWindow) + Report.result(Tests.location_retained, sc_main.dockWidgetArea(find_pane(sc_main, TEST_PANE_2)) == DOCKAREA) + + # 9) Verify if size of test pane 3 is retained + test_pane_3 = sc.findChild(QtWidgets.QDockWidget, TEST_PANE_3) + retained_size = test_pane_3.frameSize() + retain_success = retained_size != initial_size + Report.result(Tests.size_retained, retain_success) + + finally: + # 10) Restore default layout and close SC window + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + click_menu_option(sc, "Restore Default Layout") + sc.close() + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(Pane_RetainOnSCRestart) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Resizing_Pane.py b/AutomatedTesting/Gem/PythonTests/scripting/Resizing_Pane.py index 9262e76cb0..180f577953 100755 --- a/AutomatedTesting/Gem/PythonTests/scripting/Resizing_Pane.py +++ b/AutomatedTesting/Gem/PythonTests/scripting/Resizing_Pane.py @@ -16,7 +16,6 @@ URLs of the test case: https://testrail.agscollab.com/index.php?/cases/view/1702 # fmt: off class Tests(): - open_sc_window = ("Script Canvas window is opened", "Failed to open Script Canvas window") open_pane = ("Pane opened successfully", "Failed to open pane") resize_pane = ("Pane window resized successfully", "Failed to resize pane window") # fmt: on @@ -46,11 +45,6 @@ def Resizing_Pane(): :return: None """ - # Helper imports - import ImportPathHelper as imports - - imports.init() - from editor_python_test_tools.utils import Report from editor_python_test_tools.utils import TestHelper as helper import editor_python_test_tools.pyside_utils as pyside_utils @@ -76,8 +70,7 @@ def Resizing_Pane(): # 1) Open Script Canvas window (Tools > Script Canvas) general.open_pane("Script Canvas") - is_sc_visible = helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) - Report.result(Tests.open_sc_window, is_sc_visible) + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) # 2) Restore default layout editor_window = pyside_utils.get_editor_main_window() diff --git a/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_ChangingAssets.py b/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_ChangingAssets.py new file mode 100644 index 0000000000..38d60ad871 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_ChangingAssets.py @@ -0,0 +1,116 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92562986 +Test Case Title: Changing the assigned Script Canvas Asset on an entity properly updates +level functionality +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92562986 +""" + + +# fmt: off +class Tests(): + level_created = ("New level created", "New level not created") + entity_created = ("Test Entity created", "Test Entity not created") + game_mode_entered = ("Game Mode successfully entered", "Game mode failed to enter") + game_mode_exited = ("Game Mode successfully exited", "Game mode failed to exited") + found_lines = ("Expected log lines were found", "Expected log lines were not found") +# fmt: on + + +def ScriptCanvas_ChangingAssets(): + """ + Summary: + Changing the assigned Script Canvas Asset on an entity properly updates level functionality + + Expected Behavior: + When game mode is entered, respective strings of assigned assets should be printed + + Test Steps: + 1) Create temp level + 2) Create new entity + 3) Start Tracer + 4) Set first script and evaluate + 5) Set second script and evaluate + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + import os + + from utils import TestHelper as helper + from utils import Tracer + import hydra_editor_utils as hydra + import azlmbr.legacy.general as general + import azlmbr.math as math + import azlmbr.asset as asset + import azlmbr.bus as bus + import azlmbr.paths as paths + + LEVEL_NAME = "tmp_level" + ASSET_1 = os.path.join("scriptcanvas", "ScriptCanvas_TwoComponents0.scriptcanvas") + ASSET_2 = os.path.join("scriptcanvas", "ScriptCanvas_TwoComponents1.scriptcanvas") + EXP_LINE_1 = "Greetings from the first script" + EXP_LINE_2 = "Greetings from the second script" + WAIT_TIME = 3.0 # SECONDS + + def get_asset(asset_path): + return asset.AssetCatalogRequestBus(bus.Broadcast, "GetAssetIdByPath", asset_path, math.Uuid(), False) + + def find_expected_line(expected_line): + found_lines = [printInfo.message.strip() for printInfo in section_tracer.prints] + return expected_line in found_lines + + def set_asset_evaluate(test_entity, ASSET_PATH, EXP_LINE): + # Set Script Canvas entity + test_entity.get_set_test(0, "Script Canvas Asset|Script Canvas Asset", get_asset(ASSET_PATH)) + + # Enter/exit game mode + helper.enter_game_mode(Tests.game_mode_entered) + helper.wait_for_condition(lambda: find_expected_line(EXP_LINE), WAIT_TIME) + Report.result(Tests.found_lines, find_expected_line(EXP_LINE)) + helper.exit_game_mode(Tests.game_mode_exited) + + + # 1) Create temp level + general.idle_enable(True) + result = general.create_level_no_prompt(LEVEL_NAME, 128, 1, 512, True) + Report.critical_result(Tests.level_created, result == 0) + helper.wait_for_condition(lambda: general.get_current_level_name() == LEVEL_NAME, WAIT_TIME) + general.close_pane("Error Report") + + # 2) Create new entity + position = math.Vector3(512.0, 512.0, 32.0) + test_entity = hydra.Entity("test_entity") + test_entity.create_entity(position, ["Script Canvas"]) + + # 3) Start Tracer + with Tracer() as section_tracer: + + # 4) Set first script and evaluate + set_asset_evaluate(test_entity, ASSET_1, EXP_LINE_1) + + # 5) Set second script and evaluate + set_asset_evaluate(test_entity, ASSET_2, EXP_LINE_2) + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + from utils import Report + + Report.start_test(ScriptCanvas_ChangingAssets) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_TwoComponents.py b/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_TwoComponents.py new file mode 100644 index 0000000000..896bee96e5 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_TwoComponents.py @@ -0,0 +1,118 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92563190 +Test Case Title: A single Entity with two Script Canvas components works properly +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92563190 +""" + + +# fmt: off +class Tests(): + level_created = ("New level created", "New level not created") + game_mode_entered = ("Game Mode successfully entered", "Game mode failed to enter") + game_mode_exited = ("Game Mode successfully exited", "Game mode failed to exited") + found_lines = ("Expected log lines were found", "Expected log lines were not found") +# fmt: on + + +class LogLines: + expected_lines = ["Greetings from the first script", "Greetings from the second script"] + + +def ScriptCanvas_TwoComponents(): + """ + Summary: + A test entity contains two Script Canvas components with different unique script canvas files. + Each of these files will have a print node set to activate on graph start. + + Expected Behavior: + When game mode is entered, two unique strings should be printed out to the console + + Test Steps: + 1) Create level + 2) Create entity with SC components + 3) Start Tracer + 4) Enter game mode + 5) Wait for expected lines to be found + 6) Report if expected lines were found + 7) Exit game mode + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + import os + + from utils import TestHelper as helper + import hydra_editor_utils as hydra + from utils import Report + from utils import Tracer + import azlmbr.legacy.general as general + import azlmbr.math as math + import azlmbr.asset as asset + import azlmbr.bus as bus + + LEVEL_NAME = "tmp_level" + ASSET_1 = os.path.join("scriptcanvas", "ScriptCanvas_TwoComponents0.scriptcanvas") + ASSET_2 = os.path.join("scriptcanvas", "ScriptCanvas_TwoComponents1.scriptcanvas") + WAIT_TIME = 3.0 # SECONDS + + def get_asset(asset_path): + return asset.AssetCatalogRequestBus(bus.Broadcast, "GetAssetIdByPath", asset_path, math.Uuid(), False) + + def locate_expected_lines(): + found_lines = [] + for printInfo in section_tracer.prints: + found_lines.append(printInfo.message.strip()) + + return all(line in found_lines for line in LogLines.expected_lines) + + # 1) Create level + general.idle_enable(True) + result = general.create_level_no_prompt(LEVEL_NAME, 128, 1, 512, True) + Report.critical_result(Tests.level_created, result == 0) + helper.wait_for_condition(lambda: general.get_current_level_name() == LEVEL_NAME, WAIT_TIME) + general.close_pane("Error Report") + + # 2) Create entity with SC components + position = math.Vector3(512.0, 512.0, 32.0) + test_entity = hydra.Entity("test_entity") + test_entity.create_entity(position, ["Script Canvas", "Script Canvas"]) + test_entity.get_set_test(0, "Script Canvas Asset|Script Canvas Asset", get_asset(ASSET_1)) + test_entity.get_set_test(1, "Script Canvas Asset|Script Canvas Asset", get_asset(ASSET_2)) + + # 3) Start Tracer + with Tracer() as section_tracer: + + # 4) Enter game mode + helper.enter_game_mode(Tests.game_mode_entered) + + # 5) Wait for expected lines to be found + helper.wait_for_condition(locate_expected_lines, WAIT_TIME) + + # 6) Report if expected lines were found + Report.result(Tests.found_lines, locate_expected_lines()) + + # 7) Exit game mode + helper.exit_game_mode(Tests.game_mode_exited) + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(ScriptCanvas_TwoComponents) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_TwoEntities.py b/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_TwoEntities.py new file mode 100644 index 0000000000..401e6c0271 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/ScriptCanvas_TwoEntities.py @@ -0,0 +1,107 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92563191 +Test Case Title: Two Entities can use the same Graph asset successfully at RunTime +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92563191 +""" + + +# fmt: off +class Tests(): + level_created = ("New level created", "New level not created") + game_mode_entered = ("Game Mode successfully entered", "Game mode failed to enter") + game_mode_exited = ("Game Mode successfully exited", "Game mode failed to exited") + found_lines = ("Expected log lines were found", "Expected log lines were not found") +# fmt: on + + +def ScriptCanvas_TwoEntities(): + """ + Summary: + Two Entities can use the same Graph asset successfully at RunTime. The script canvas asset + attached to the enties will print the respective entity names. + + Expected Behavior: + When game mode is entered, respective strings of different entities should be printed. + + Test Steps: + 1) Create temp level + 2) Create two new entities with different names + 3) Set ScriptCanvas asset to both the entities + 4) Enter/Exit game mode and verify log lines + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + import os + + from utils import TestHelper as helper + from utils import Tracer + import hydra_editor_utils as hydra + import azlmbr.legacy.general as general + import azlmbr.math as math + import azlmbr.asset as asset + import azlmbr.bus as bus + + LEVEL_NAME = "tmp_level" + ASSET_PATH = os.path.join("scriptcanvas", "T92563191_test.scriptcanvas") + EXPECTED_LINES = ["Entity Name: test_entity_1", "Entity Name: test_entity_2"] + WAIT_TIME = 0.5 # SECONDS + + def get_asset(asset_path): + return asset.AssetCatalogRequestBus(bus.Broadcast, "GetAssetIdByPath", asset_path, math.Uuid(), False) + + # 1) Create temp level + general.idle_enable(True) + result = general.create_level_no_prompt(LEVEL_NAME, 128, 1, 512, True) + Report.critical_result(Tests.level_created, result == 0) + helper.wait_for_condition(lambda: general.get_current_level_name() == LEVEL_NAME, WAIT_TIME) + general.close_pane("Error Report") + + # 2) Create two new entities with different names + position = math.Vector3(512.0, 512.0, 32.0) + test_entity_1 = hydra.Entity("test_entity_1") + test_entity_1.create_entity(position, ["Script Canvas"]) + + test_entity_2 = hydra.Entity("test_entity_2") + test_entity_2.create_entity(position, ["Script Canvas"]) + + # 3) Set ScriptCanvas asset to both the entities + test_entity_1.get_set_test(0, "Script Canvas Asset|Script Canvas Asset", get_asset(ASSET_PATH)) + test_entity_2.get_set_test(0, "Script Canvas Asset|Script Canvas Asset", get_asset(ASSET_PATH)) + + # 4) Enter/Exit game mode and verify log lines + with Tracer() as section_tracer: + + helper.enter_game_mode(Tests.game_mode_entered) + # wait for WAIT_TIME to let the script print strings + general.idle_wait(WAIT_TIME) + helper.exit_game_mode(Tests.game_mode_exited) + + found_lines = [printInfo.message.strip() for printInfo in section_tracer.prints] + result = all(line in found_lines for line in EXPECTED_LINES) + + Report.result(Tests.found_lines, result) + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + from utils import Report + + Report.start_test(ScriptCanvas_TwoEntities) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/ScriptEvents_SendReceiveAcrossMultiple.py b/AutomatedTesting/Gem/PythonTests/scripting/ScriptEvents_SendReceiveAcrossMultiple.py new file mode 100644 index 0000000000..d671229cdf --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/ScriptEvents_SendReceiveAcrossMultiple.py @@ -0,0 +1,120 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92567321 +Test Case Title: Script Events: Can send and receive a script event across multiple entities successfully +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92567321 +""" + + +# fmt: off +class Tests(): + level_created = ("Successfully created temporary level", "Failed to create temporary level") + entitya_created = ("Successfully created EntityA", "Failed to create EntityA") + entityb_created = ("Successfully created EntityB", "Failed to create EntityB") + enter_game_mode = ("Successfully entered game mode", "Failed to enter game mode") + lines_found = ("Successfully found expected message", "Failed to find expected message") + exit_game_mode = ("Successfully exited game mode", "Failed to exit game mode") +# fmt: on + + +def ScriptEvents_SendReceiveAcrossMultiple(): + """ + Summary: + EntityA and EntityB will be created in a level. Attached to both will be a Script Canvas component. The Script Event created for the test will be sent from EntityA to EntityB. + + Expected Behavior: + The output of the Script Event should be printed to the console + + Test Steps: + 1) Create test level + 2) Create EntityA/EntityB (add scriptcanvas files part of entity setup) + 3) Start Tracer + 4) Enter Game Mode + 5) Read for line + 6) Exit Game Mode + + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + import os + + from editor_entity_utils import EditorEntity as Entity + from utils import Report + from utils import TestHelper as helper + from utils import Tracer + + import azlmbr.legacy.general as general + + LEVEL_NAME = "tmp_level" + WAIT_TIME = 3.0 + ASSET_PREFIX = "T92567321" + asset_paths = { + "event": os.path.join("TestAssets", f"{ASSET_PREFIX}.scriptevents"), + "assetA": os.path.join("ScriptCanvas", f"{ASSET_PREFIX}A.scriptcanvas"), + "assetB": os.path.join("ScriptCanvas", f"{ASSET_PREFIX}B.scriptcanvas"), + } + sc_for_entities = { + "EntityA": asset_paths["assetA"], + "EntityB": asset_paths["assetB"] + } + EXPECTED_LINES = ["Incoming Message Received"] + + def get_asset(asset_path): + return azlmbr.asset.AssetCatalogRequestBus(azlmbr.bus.Broadcast, "GetAssetIdByPath", asset_path, azlmbr.math.Uuid(), False) + + def create_editor_entity(name, sc_asset): + entity = Entity.create_editor_entity(name) + sc_comp = entity.add_component("Script Canvas") + sc_comp.set_component_property_value("Script Canvas Asset|Script Canvas Asset", get_asset(sc_asset)) + Report.critical_result(Tests.__dict__[name.lower()+"_created"], entity.id.isValid()) + + def locate_expected_lines(line_list: list): + found_lines = [printInfo.message.strip() for printInfo in section_tracer.prints] + + return all(line in found_lines for line in line_list) + + # 1) Create temp level + general.idle_enable(True) + result = general.create_level_no_prompt(LEVEL_NAME, 128, 1, 512, True) + Report.critical_result(Tests.level_created, result == 0) + helper.wait_for_condition(lambda: general.get_current_level_name() == LEVEL_NAME, WAIT_TIME) + general.close_pane("Error Report") + + # 2) Create EntityA/EntityB + for key in sc_for_entities.keys(): + create_editor_entity(key, sc_for_entities[key]) + + # 3) Start Tracer + with Tracer() as section_tracer: + + # 4) Enter Game Mode + helper.enter_game_mode(Tests.enter_game_mode) + + # 5) Read for line + lines_located = helper.wait_for_condition(lambda: locate_expected_lines(EXPECTED_LINES), WAIT_TIME) + Report.result(Tests.lines_found, lines_located) + + # 6) Exit Game Mode + helper.exit_game_mode(Tests.exit_game_mode) + + +if __name__ == "__main__": + import ImportPathHelper as imports + imports.init() + + from utils import Report + + Report.start_test(ScriptEvents_SendReceiveAcrossMultiple) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/ScriptEvents_SendReceiveSuccessfully.py b/AutomatedTesting/Gem/PythonTests/scripting/ScriptEvents_SendReceiveSuccessfully.py new file mode 100644 index 0000000000..b5e26d14ae --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/ScriptEvents_SendReceiveSuccessfully.py @@ -0,0 +1,110 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92567320 +Test Case Title: Script Events: Can send and receive a script event successfully +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92567320 +""" + + +# fmt: off +class Tests(): + level_created = ("Successfully created temporary level", "Failed to create temporary level") + entity_created = ("Successfully created test entity", "Failed to create test entity") + enter_game_mode = ("Successfully entered game mode", "Failed to enter game mode") + lines_found = ("Successfully found expected message", "Failed to find expected message") + exit_game_mode = ("Successfully exited game mode", "Failed to exit game mode") +# fmt: on + + +def ScriptEvents_SendReceiveSuccessfully(): + """ + Summary: + An entity exists in the level that contains a Script Canvas component. In the graph is both a Send Event + and a Receive Event. + + Expected Behavior: + After entering game mode the graph on the entity should print an expected message to the console + + Test Steps: + 1) Create test level + 2) Create test entity + 3) Start Tracer + 4) Enter Game Mode + 5) Read for line + 6) Exit Game Mode + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + import os + from editor_entity_utils import EditorEntity as Entity + from utils import Report + from utils import TestHelper as helper + from utils import Tracer + + import azlmbr.legacy.general as general + import azlmbr.asset as asset + import azlmbr.math as math + import azlmbr.bus as bus + + LEVEL_NAME = "tmp_level" + WAIT_TIME = 3.0 # SECONDS + EXPECTED_LINES = ["T92567320: Message Received"] + SC_ASSET_PATH = os.path.join("ScriptCanvas", "T92567320.scriptcanvas") + + def create_editor_entity(name, sc_asset): + entity = Entity.create_editor_entity(name) + sc_comp = entity.add_component("Script Canvas") + asset_id = asset.AssetCatalogRequestBus(bus.Broadcast, "GetAssetIdByPath", sc_asset, math.Uuid(), False) + sc_comp.set_component_property_value("Script Canvas Asset|Script Canvas Asset", asset_id) + Report.critical_result(Tests.entity_created, entity.id.isValid()) + + def locate_expected_lines(line_list: list): + found_lines = [printInfo.message.strip() for printInfo in section_tracer.prints] + + return all(line in found_lines for line in line_list) + + # 1) Create temp level + general.idle_enable(True) + result = general.create_level_no_prompt(LEVEL_NAME, 128, 1, 512, True) + Report.critical_result(Tests.level_created, result == 0) + helper.wait_for_condition(lambda: general.get_current_level_name() == LEVEL_NAME, WAIT_TIME) + general.close_pane("Error Report") + + # 2) Create test entity + create_editor_entity("TestEntity", SC_ASSET_PATH) + + # 3) Start Tracer + with Tracer() as section_tracer: + + # 4) Enter Game Mode + helper.enter_game_mode(Tests.enter_game_mode) + + # 5) Read for line + lines_located = helper.wait_for_condition(lambda: locate_expected_lines(EXPECTED_LINES), WAIT_TIME) + Report.result(Tests.lines_found, lines_located) + + # 6) Exit Game Mode + helper.exit_game_mode(Tests.exit_game_mode) + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(ScriptEvents_SendReceiveSuccessfully) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/TestSuite_Active.py b/AutomatedTesting/Gem/PythonTests/scripting/TestSuite_Active.py index 8c34f29ebf..d87f2986bf 100755 --- a/AutomatedTesting/Gem/PythonTests/scripting/TestSuite_Active.py +++ b/AutomatedTesting/Gem/PythonTests/scripting/TestSuite_Active.py @@ -12,22 +12,243 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. import pytest import os import sys +sys.path.append(os.path.dirname(__file__)) +import ImportPathHelper as imports +imports.init() + +import hydra_test_utils as hydra +import ly_test_tools.environment.file_system as file_system from ly_test_tools import LAUNCHERS +from base import TestAutomationBase -sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../automatedtesting_shared') +TEST_DIRECTORY = os.path.dirname(__file__) -from base import TestAutomationBase @pytest.mark.SUITE_periodic @pytest.mark.parametrize("launcher_platform", ['windows_editor']) @pytest.mark.parametrize("project", ["AutomatedTesting"]) class TestAutomation(TestAutomationBase): - + @pytest.mark.test_case_id("C1702834", "C1702823") + def test_Opening_Closing_Pane(self, request, workspace, editor, launcher_platform): + from . import Opening_Closing_Pane as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("C1702824") def test_Docking_Pane(self, request, workspace, editor, launcher_platform): from . import Docking_Pane as test_module self._run_test(request, workspace, editor, test_module) + @pytest.mark.test_case_id("C1702829") def test_Resizing_Pane(self, request, workspace, editor, launcher_platform): from . import Resizing_Pane as test_module self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92563190") + @pytest.mark.parametrize("level", ["tmp_level"]) + def test_ScriptCanvas_TwoComponents(self, request, workspace, editor, launcher_platform, level): + def teardown(): + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + request.addfinalizer(teardown) + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + from . import ScriptCanvas_TwoComponents as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92562986") + @pytest.mark.parametrize("level", ["tmp_level"]) + def test_ScriptCanvas_ChangingAssets(self, request, workspace, editor, launcher_platform, project, level): + def teardown(): + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + request.addfinalizer(teardown) + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + from . import ScriptCanvas_ChangingAssets as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92569079", "T92569081") + def test_Graph_ZoomInZoomOut(self, request, workspace, editor, launcher_platform): + from . import Graph_ZoomInZoomOut as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92568940") + def test_NodePalette_SelectNode(self, request, workspace, editor, launcher_platform): + from . import NodePalette_SelectNode as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92569253") + @pytest.mark.test_case_id("T92569254") + @pytest.mark.parametrize("level", ["tmp_level"]) + def test_OnEntityActivatedDeactivated_PrintMessage(self, request, workspace, editor, launcher_platform, project, level): + def teardown(): + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + request.addfinalizer(teardown) + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + from . import OnEntityActivatedDeactivated_PrintMessage as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92562993") + def test_NodePalette_ClearSelection(self, request, workspace, editor, launcher_platform, project): + from . import NodePalette_ClearSelection as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92563191") + @pytest.mark.parametrize("level", ["tmp_level"]) + def test_ScriptCanvas_TwoEntities(self, request, workspace, editor, launcher_platform, project, level): + def teardown(): + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + request.addfinalizer(teardown) + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + from . import ScriptCanvas_TwoEntities as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92569013") + def test_AssetEditor_CreateScriptEventFile(self, request, workspace, editor, launcher_platform, project): + def teardown(): + file_system.delete( + [os.path.join(workspace.paths.project(), "ScriptCanvas", "test_file.scriptevent")], True, True + ) + request.addfinalizer(teardown) + file_system.delete( + [os.path.join(workspace.paths.project(), "ScriptCanvas", "test_file.scriptevent")], True, True + ) + from . import AssetEditor_CreateScriptEventFile as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92569165", "T92569167", "T92569168", "T92569170") + def test_Toggle_ScriptCanvasTools(self, request, workspace, editor, launcher_platform): + from . import Toggle_ScriptCanvasTools as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92568982") + def test_NodeInspector_RenameVariable(self, request, workspace, editor, launcher_platform, project): + from . import NodeInspector_RenameVariable as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92569137") + def test_Debugging_TargetMultipleGraphs(self, request, workspace, editor, launcher_platform, project): + from . import Debugging_TargetMultipleGraphs as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92568856") + @pytest.mark.parametrize("level", ["tmp_level"]) + def test_Debugging_TargetMultipleEntities(self, request, workspace, editor, launcher_platform, project, level): + def teardown(): + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + request.addfinalizer(teardown) + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + from . import Debugging_TargetMultipleEntities as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92569049", "T92569051") + def test_EditMenu_UndoRedo(self, request, workspace, editor, launcher_platform, project): + from . import EditMenu_UndoRedo as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("C1702825", "C1702831") + def test_UnDockedPane_CloseSCWindow(self, request, workspace, editor, launcher_platform): + from . import UnDockedPane_CloseSCWindow as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92562978") + @pytest.mark.parametrize("level", ["tmp_level"]) + def test_Entity_AddScriptCanvasComponent(self, request, workspace, editor, launcher_platform, project, level): + def teardown(): + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + request.addfinalizer(teardown) + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + from . import Entity_AddScriptCanvasComponent as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("C1702821", "C1702832") + def test_Pane_RetainOnSCRestart(self, request, workspace, editor, launcher_platform): + from . import Pane_RetainOnSCRestart as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92567321") + @pytest.mark.parametrize("level", ["tmp_level"]) + def test_ScriptEvents_SendReceiveAcrossMultiple(self, request, workspace, editor, launcher_platform, project, level): + def teardown(): + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + request.addfinalizer(teardown) + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + from . import ScriptEvents_SendReceiveAcrossMultiple as test_module + self._run_test(request, workspace, editor, test_module) + + @pytest.mark.test_case_id("T92567320") + @pytest.mark.parametrize("level", ["tmp_level"]) + def test_ScriptEvents_SendReceiveSuccessfully(self, request, workspace, editor, launcher_platform, project, level): + def teardown(): + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + request.addfinalizer(teardown) + file_system.delete([os.path.join(workspace.paths.project(), "Levels", level)], True, True) + from . import ScriptEvents_SendReceiveSuccessfully as test_module + self._run_test(request, workspace, editor, test_module) + +# NOTE: We had to use hydra_test_utils.py, as TestAutomationBase run_test method +# fails because of pyside_utils import +@pytest.mark.SUITE_periodic +@pytest.mark.parametrize("launcher_platform", ["windows_editor"]) +@pytest.mark.parametrize("project", ["AutomatedTesting"]) +class TestScriptCanvasTests(object): + """ + The following tests use hydra_test_utils.py to launch the editor and validate the results. + """ + + @pytest.mark.test_case_id("T92569037", "T92569039") + def test_FileMenu_New_Open(self, request, editor, launcher_platform): + expected_lines = [ + "File->New action working as expected: True", + "File->Open action working as expected: True", + ] + hydra.launch_and_validate_results( + request, TEST_DIRECTORY, editor, "FileMenu_New_Open.py", expected_lines, auto_test_mode=False, timeout=60, + ) + + @pytest.mark.test_case_id("T92568942") + def test_AssetEditor_NewScriptEvent(self, request, editor, launcher_platform): + expected_lines = [ + "New Script event action found: True", + "Asset Editor opened: True", + "Asset Editor created with new asset: True", + "New Script event created in Asset Editor: True", + ] + hydra.launch_and_validate_results( + request, + TEST_DIRECTORY, + editor, + "AssetEditor_NewScriptEvent.py", + expected_lines, + auto_test_mode=False, + timeout=60, + ) + + @pytest.mark.test_case_id("T92563068", "T92563070") + def test_GraphClose_SavePrompt(self, request, editor, launcher_platform): + expected_lines = [ + "New graph created: True", + "Save prompt opened as expected: True", + "Close button worked as expected: True", + ] + hydra.launch_and_validate_results( + request, + TEST_DIRECTORY, + editor, + "GraphClose_SavePrompt.py", + expected_lines, + auto_test_mode=False, + timeout=60, + ) + + @pytest.mark.test_case_id("T92564789", "T92568873") + def test_VariableManager_CreateDeleteVars(self, request, editor, launcher_platform): + var_types = ["Boolean", "Color", "EntityID", "Number", "String", "Transform", "Vector2", "Vector3", "Vector4"] + expected_lines = [f"Success: {var_type} variable is created" for var_type in var_types] + expected_lines.extend([f"Success: {var_type} variable is deleted" for var_type in var_types]) + hydra.launch_and_validate_results( + request, + TEST_DIRECTORY, + editor, + "VariableManager_CreateDeleteVars.py", + expected_lines, + auto_test_mode=False, + timeout=60, + ) \ No newline at end of file diff --git a/AutomatedTesting/Gem/PythonTests/scripting/Toggle_ScriptCanvasTools.py b/AutomatedTesting/Gem/PythonTests/scripting/Toggle_ScriptCanvasTools.py new file mode 100644 index 0000000000..4024e28277 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/Toggle_ScriptCanvasTools.py @@ -0,0 +1,137 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: C92569165, C92569167, C92569168, C92569170 +Test Case Title: Tools > Node Palette toggles the Node Palette + Tools > Node Inspector toggles the Node Inspector + Tools > Bookmarks toggles the Bookmarks + Tools > Variable Manager toggles the Variable Manager + +URLs of the test case: https://testrail.agscollab.com/index.php?/cases/view/92569165 + https://testrail.agscollab.com/index.php?/cases/view/92569167 + https://testrail.agscollab.com/index.php?/cases/view/92569168 + https://testrail.agscollab.com/index.php?/cases/view/92569170 +""" + + +# fmt: off +class Tests(): + node_palette_opened = ("NodePalette is opened successfully", "Failed to open NodePalette") + node_inspector_opened = ("NodeInspector is opened successfully", "Failed to open NodeInspector") + bookmark_opened = ("Bookmarks is opened successfully", "Failed to open Bookmarks") + variable_manager_opened = ("VariableManager is opened successfully", "Failed to open VariableManager") + node_palette_closed_by_start = ("NodePalette is closed successfully", "Failed to close NodePalette") + node_inspector_closed_by_start = ("NodeInspector is closed successfully", "Failed to close NodeInspector") + bookmark_closed_by_start = ("Bookmarks is closed successfully", "Failed to close Bookmarks") + variable_manager_closed_by_start = ("VariableManager is closed successfully", "Failed to close VariableManager") + node_palette_closed_by_end = ("NodePalette is closed successfully", "Failed to close NodePalette") + node_inspector_closed_by_end = ("NodeInspector is closed successfully", "Failed to close NodeInspector") + bookmark_closed_by_end = ("Bookmarks is closed successfully", "Failed to close Bookmarks") + variable_manager_closed_by_end = ("VariableManager is closed successfully", "Failed to close VariableManager") +# fmt: on + + +def Toggle_ScriptCanvasTools(): + """ + Summary: + Toggle Node Palette, Node Inspector, Bookmarks and Variable Manager in Script Canvas. + Make sure each pane opens and closes successfully. + + Expected Behavior: + Each pane opens and closes successfully. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Make sure Node Palette, Node Inspector, Bookmarks and Variable Manager panes are closed in Script Canvas window + 3) Open Node Palette, Node Inspector, Bookmarks and Variable Manager in Script Canvas window + 4) Close Node Palette, Node Inspector, Bookmarks and Variable Manager in Script Canvas window + 5) Restore default layout + 6) Close Script Canvas window + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + from utils import Report + from utils import TestHelper as helper + import pyside_utils + + # Open 3D Engine imports + import azlmbr.legacy.general as general + + # Pyside imports + from PySide2 import QtWidgets + + def click_menu_option(window, option_text): + action = pyside_utils.find_child_by_pattern(window, {"text": option_text, "type": QtWidgets.QAction}) + action.trigger() + + def find_pane(window, pane_name): + return window.findChild(QtWidgets.QDockWidget, pane_name) + + def close_tool(window, pane_widget, test_tuple): + pane = find_pane(window, pane_widget) + pane.close() + Report.result(test_tuple, not pane.isVisible()) + + def open_tool(window, pane_widget, tool, test_tuple): + pane = find_pane(window, pane_widget) + if not pane.isVisible(): + click_menu_option(window, tool) + pane = find_pane(window, pane_widget) + Report.result(test_tuple, pane.isVisible()) + + # Test starts here + general.idle_enable(True) + + # 1) Open Script Canvas window (Tools > Script Canvas) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + + # 2) Make sure Node Palette, Node Inspector, Bookmarks and Variable Manager panes are closed in Script Canvas window + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + close_tool(sc, "NodePalette", Tests.node_palette_closed_by_start) + close_tool(sc, "NodeInspector", Tests.node_inspector_closed_by_start) + close_tool(sc, "BookmarkDockWidget", Tests.bookmark_closed_by_start) + close_tool(sc, "VariableManager", Tests.variable_manager_closed_by_start) + + # 3) Open Node Palette, Node Inspector, Bookmarks and Variable Manager in Script Canvas window + open_tool(sc, "NodePalette", "Node Palette", Tests.node_palette_opened) + open_tool(sc, "NodeInspector", "Node Inspector", Tests.node_inspector_opened) + open_tool(sc, "BookmarkDockWidget", "Bookmarks", Tests.bookmark_opened) + open_tool(sc, "VariableManager", "Variable Manager", Tests.variable_manager_opened) + + # 4) Close Node Palette, Node Inspector, Bookmarks and Variable Manager in Script Canvas window + close_tool(sc, "NodePalette", Tests.node_palette_closed_by_end) + close_tool(sc, "NodeInspector", Tests.node_inspector_closed_by_end) + close_tool(sc, "BookmarkDockWidget", Tests.bookmark_closed_by_end) + close_tool(sc, "VariableManager", Tests.variable_manager_closed_by_end) + + # 5) Restore default layout + # Need this step to restore to default in case of test failure + click_menu_option(sc, "Restore Default Layout") + + # 6) Close Script Canvas window + sc.close() + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(Toggle_ScriptCanvasTools) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/UnDockedPane_CloseSCWindow.py b/AutomatedTesting/Gem/PythonTests/scripting/UnDockedPane_CloseSCWindow.py new file mode 100644 index 0000000000..875f28ec95 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/UnDockedPane_CloseSCWindow.py @@ -0,0 +1,128 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: C1702825 // C1702831 +Test Case Title: Undocking // Closing script canvas with the pane floating +URLs of the test case: https://testrail.agscollab.com/index.php?/cases/view/1702825 & + https://testrail.agscollab.com/index.php?/cases/view/1702831 +""" + + +# fmt: off +class Tests(): + undock_pane = ("Pane is undocked successfully", "Failed to undock pane") + close_sc_window = ("Script Canvas window is closed", "Failed to close Script Canvas window") + pane_closed = ("Pane is closed successfully", "Failed to close the pane") +# fmt: on + + +def UnDockedPane_CloseSCWindow(): + """ + Summary: + The Script Canvas window is opened with one of the pane undocked. + Verify if undocked pane closes upon closing Script canvas window. + + Expected Behavior: + The undocked pane closes when Script Canvas window closed. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Undock Node Palette pane + 3) Connect to Pane visibility signal emitter to verify pane closed + 4) Close Script Canvas window + 5) Restore default layout + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + # Helper imports + from utils import Report + from utils import TestHelper as helper + import pyside_utils + + # Open 3D Engine imports + import azlmbr.legacy.general as general + + # Pyside imports + from PySide2 import QtWidgets + + TEST_PANE = "NodePalette" # Chosen most commonly used pane + + def click_menu_option(window, option_text): + action = pyside_utils.find_child_by_pattern(window, {"text": option_text, "type": QtWidgets.QAction}) + action.trigger() + + def find_pane(window, pane_name): + return window.findChild(QtWidgets.QDockWidget, pane_name) + + def on_top_level_changed(): + # This function has test condition always True since it gets emitted only when condition satisfied + Report.result(Tests.undock_pane, True) + + def on_pane_closed(): + # This function has test condition always True since it gets emitted only when condition satisfied + Report.result(Tests.pane_closed, True) + + # Test starts here + general.idle_enable(True) + + # 1) Open Script Canvas window (Tools > Script Canvas) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + + # 2) Undock Node Palette pane + # Make sure Node Palette pane is opened + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + pane = find_pane(sc, TEST_PANE) + if not pane.isVisible(): + click_menu_option(sc, "Node Palette") + pane = find_pane(sc, TEST_PANE) # New reference + + # We drag/drop pane over the graph since it doesn't allow docking, so this will undock it + try: + graph = find_pane(sc, "GraphCanvasEditorCentralWidget") + try: + pane.topLevelChanged.connect(on_top_level_changed) + pyside_utils.drag_and_drop(pane, graph) + finally: + pane.topLevelChanged.disconnect(on_top_level_changed) + + # 3) Connect to Pane visibility signal emitter to verify pane closed + # No need to disconnect this since pane widget gets deleted when SC window closed + pane.visibilityChanged.connect(on_pane_closed) + + # 4) Close Script Canvas window + sc.close() + is_sc_visible = helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 2.0) + Report.result(Tests.close_sc_window, not is_sc_visible) + + finally: + # 5) Restore default layout + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 5.0) + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + click_menu_option(sc, "Restore Default Layout") + sc.close() + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + + from utils import Report + + Report.start_test(UnDockedPane_CloseSCWindow) diff --git a/AutomatedTesting/Gem/PythonTests/scripting/VariableManager_CreateDeleteVars.py b/AutomatedTesting/Gem/PythonTests/scripting/VariableManager_CreateDeleteVars.py new file mode 100644 index 0000000000..6facc324d4 --- /dev/null +++ b/AutomatedTesting/Gem/PythonTests/scripting/VariableManager_CreateDeleteVars.py @@ -0,0 +1,123 @@ +""" +All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or +its licensors. + +For complete copyright and license terms please see the LICENSE at the root of this +distribution (the "License"). All use of this software is governed by the License, +or, if provided, by the license below or the license accompanying this file. Do not +remove or modify any license notices. This file is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +Test case ID: T92564789 +Test Case Title: Each Variable type can be created +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92564789 +Test case ID: T92568873 +Test Case Title: Each Variable type can be deleted +URL of the test case: https://testrail.agscollab.com/index.php?/tests/view/92568873 +""" + + +def VariableManager_CreateDeleteVars(): + """ + Summary: + Each variable type can be created and deleted in variable manager. + + Expected Behavior: + Each variable type can be created and deleted in variable manager. + + Test Steps: + 1) Open Script Canvas window (Tools > Script Canvas) + 2) Get the SC window object + 3) Open Variable Manager if not opened already + 4) Create Graph + 5) Create variable of each type and verify if it is created + 6) Delete each type of variable and verify if it is deleted + 7) Close SC window + + Note: + - This test file must be called from the Open 3D Engine Editor command terminal + - Any passed and failed tests are written to the Editor.log file. + Parsing the file or running a log_monitor are required to observe the test results. + + :return: None + """ + + from PySide2 import QtWidgets, QtCore, QtTest + + from PySide2.QtCore import Qt + + from utils import TestHelper as helper + + import azlmbr.legacy.general as general + + import pyside_utils + + def generate_test_tuple(var_type, action): + return (f"{var_type} variable is {action}d", f"{var_type} variable is not {action}d") + + # 1) Open Script Canvas window + general.idle_enable(True) + general.open_pane("Script Canvas") + helper.wait_for_condition(lambda: general.is_pane_visible("Script Canvas"), 10.0) + + # 2) Get the SC window object + editor_window = pyside_utils.get_editor_main_window() + sc = editor_window.findChild(QtWidgets.QDockWidget, "Script Canvas") + + # 3) Open Variable Manager if not opened already + if sc.findChild(QtWidgets.QDockWidget, "VariableManager") is None: + action = pyside_utils.find_child_by_pattern(sc, {"text": "Variable Manager", "type": QtWidgets.QAction}) + action.trigger() + variable_manager = sc.findChild(QtWidgets.QDockWidget, "VariableManager") + + # 4) Create Graph + action = pyside_utils.find_child_by_pattern(sc, {"objectName": "action_New_Script", "type": QtWidgets.QAction}) + action.trigger() + + graph_vars = variable_manager.findChild(QtWidgets.QTableView, "graphVariables") + + var_types = ["Boolean", "Color", "EntityID", "Number", "String", "Transform", "Vector2", "Vector3", "Vector4"] + + # 5) Create variable of each type and verify if it is created + for index, var_type in enumerate(var_types): + # Create new variable + add_button = variable_manager.findChild(QtWidgets.QPushButton, "addButton") + add_button.click() # Click on Create Variable button + # Select variable type + table_view = variable_manager.findChild(QtWidgets.QTableView, "variablePalette") + model_index = pyside_utils.find_child_by_pattern(table_view, var_type) + # Click on it to create variable + pyside_utils.item_view_index_mouse_click(table_view, model_index) + # Verify if the variable is created + # NOTE: To check if variable of a type is created, we are checking 1) rowcount + # 2) If we have row with variable "Variable " + # 3) Type of variable, which is next column of the variable name + result = graph_vars.model().rowCount(QtCore.QModelIndex()) == ( + index + 1 + ) # since we added 1 variable, rowcount will increase by 1 + var_mi = pyside_utils.find_child_by_pattern(graph_vars, f"Variable {index+1}") + result = result and (var_mi is not None) and (var_mi.siblingAtColumn(1).data(Qt.DisplayRole) == var_type) + Report.result(generate_test_tuple(var_type, "create"), result) + + # 6) Delete each type of variable and verify if it is deleted + for index, var_type in enumerate(var_types): + # Delete variable and verify if its deleted + # NOTE: To check if variable of a type is deleted, we are checking rowcount + var_mi = pyside_utils.find_child_by_pattern(graph_vars, f"Variable {index+1}") + pyside_utils.item_view_index_mouse_click(graph_vars, var_mi) + QtTest.QTest.keyClick(graph_vars, Qt.Key_Delete, Qt.NoModifier) + # since variable is deleted, rowcount will decrease by 1 + result = graph_vars.model().rowCount(QtCore.QModelIndex()) == (len(var_types) - (index + 1)) + Report.result(generate_test_tuple(var_type, "delete"), result) + + # 7) Close SC window + general.close_pane("Script Canvas") + + +if __name__ == "__main__": + import ImportPathHelper as imports + + imports.init() + from utils import Report + + Report.start_test(VariableManager_CreateDeleteVars) diff --git a/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/activator.scriptcanvas b/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/activator.scriptcanvas new file mode 100644 index 0000000000..89d86f0d54 --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/activator.scriptcanvas @@ -0,0 +1,778 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/controller.scriptcanvas b/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/controller.scriptcanvas new file mode 100644 index 0000000000..83a899798e --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/controller.scriptcanvas @@ -0,0 +1,1865 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/deactivator.scriptcanvas b/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/deactivator.scriptcanvas new file mode 100644 index 0000000000..ca98ee303a --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/OnEntityActivatedScripts/deactivator.scriptcanvas @@ -0,0 +1,766 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/ScriptCanvas/ScriptCanvas_TwoComponents0.scriptcanvas b/AutomatedTesting/ScriptCanvas/ScriptCanvas_TwoComponents0.scriptcanvas new file mode 100644 index 0000000000..af3afead19 --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/ScriptCanvas_TwoComponents0.scriptcanvas @@ -0,0 +1,365 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/ScriptCanvas/ScriptCanvas_TwoComponents1.scriptcanvas b/AutomatedTesting/ScriptCanvas/ScriptCanvas_TwoComponents1.scriptcanvas new file mode 100644 index 0000000000..0a16717520 --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/ScriptCanvas_TwoComponents1.scriptcanvas @@ -0,0 +1,365 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/ScriptCanvas/T92563191_test.scriptcanvas b/AutomatedTesting/ScriptCanvas/T92563191_test.scriptcanvas new file mode 100644 index 0000000000..f0797739a5 --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/T92563191_test.scriptcanvas @@ -0,0 +1,750 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/ScriptCanvas/T92567320.scriptcanvas b/AutomatedTesting/ScriptCanvas/T92567320.scriptcanvas new file mode 100644 index 0000000000..d509442435 --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/T92567320.scriptcanvas @@ -0,0 +1,1266 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/ScriptCanvas/T92567321A.scriptcanvas b/AutomatedTesting/ScriptCanvas/T92567321A.scriptcanvas new file mode 100644 index 0000000000..644a5daa17 --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/T92567321A.scriptcanvas @@ -0,0 +1,778 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/ScriptCanvas/T92567321B.scriptcanvas b/AutomatedTesting/ScriptCanvas/T92567321B.scriptcanvas new file mode 100644 index 0000000000..54c83c2933 --- /dev/null +++ b/AutomatedTesting/ScriptCanvas/T92567321B.scriptcanvas @@ -0,0 +1,880 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/TestAssets/T92567320.scriptevents b/AutomatedTesting/TestAssets/T92567320.scriptevents new file mode 100644 index 0000000000..63cf20f2d0 --- /dev/null +++ b/AutomatedTesting/TestAssets/T92567320.scriptevents @@ -0,0 +1,116 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/AutomatedTesting/TestAssets/T92567321.scriptevents b/AutomatedTesting/TestAssets/T92567321.scriptevents new file mode 100644 index 0000000000..6f9308ad02 --- /dev/null +++ b/AutomatedTesting/TestAssets/T92567321.scriptevents @@ -0,0 +1,166 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +