Implement TIAF sequence reporting for MARS
Merge pull request #3010 from aws-lumberyard-dev/TIF/Runtime_mergemonroegm-disable-blank-issue-2
commit
01ed06c7e1
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
* For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
*
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <TestImpactFramework/TestImpactClientSequenceReport.h>
|
||||
|
||||
#include <AzCore/std/string/string.h>
|
||||
|
||||
namespace TestImpact
|
||||
{
|
||||
//! Serializes a regular sequence report to JSON format.
|
||||
AZStd::string SerializeSequenceReport(const Client::RegularSequenceReport& sequenceReport);
|
||||
|
||||
//! Serializes a seed sequence report to JSON format.
|
||||
AZStd::string SerializeSequenceReport(const Client::SeedSequenceReport& sequenceReport);
|
||||
|
||||
//! Serializes an impact analysis sequence report to JSON format.
|
||||
AZStd::string SerializeSequenceReport(const Client::ImpactAnalysisSequenceReport& sequenceReport);
|
||||
|
||||
//! Serializes a safe impact analysis sequence report to JSON format.
|
||||
AZStd::string SerializeSequenceReport(const Client::SafeImpactAnalysisSequenceReport& sequenceReport);
|
||||
} // namespace TestImpact
|
||||
@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
* For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
*
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AzCore/base.h>
|
||||
|
||||
namespace TestImpact
|
||||
{
|
||||
namespace Policy
|
||||
{
|
||||
//! Policy for handling of test targets that fail to execute (e.g. due to the binary not being found).
|
||||
//! @note Test targets that fail to execute will be tagged such that their execution can be attempted at a later date. This is
|
||||
//! important as otherwise it would be erroneously assumed that they cover no sources due to having no entries in the dynamic
|
||||
//! dependency map.
|
||||
enum class ExecutionFailure : AZ::u8
|
||||
{
|
||||
Abort, //!< Abort the test sequence and report a failure.
|
||||
Continue, //!< Continue the test sequence but treat the execution failures as test failures after the run.
|
||||
Ignore //!< Continue the test sequence and ignore the execution failures.
|
||||
};
|
||||
|
||||
//! Policy for handling the coverage data of failed tests targets (both tests that failed to execute and tests that ran but failed).
|
||||
enum class FailedTestCoverage : AZ::u8
|
||||
{
|
||||
Discard, //!< Discard the coverage data produced by the failing tests, causing them to be drafted into future test runs.
|
||||
Keep //!< Keep any existing coverage data and update the coverage data for failed test targets that produce coverage.
|
||||
};
|
||||
|
||||
//! Policy for prioritizing selected tests.
|
||||
enum class TestPrioritization : AZ::u8
|
||||
{
|
||||
None, //!< Do not attempt any test prioritization.
|
||||
DependencyLocality //!< Prioritize test targets according to the locality of the production targets they cover in the build
|
||||
//!< dependency graph.
|
||||
};
|
||||
|
||||
//! Policy for handling test targets that report failing tests.
|
||||
enum class TestFailure : AZ::u8
|
||||
{
|
||||
Abort, //!< Abort the test sequence and report the test failure.
|
||||
Continue //!< Continue the test sequence and report the test failures after the run.
|
||||
};
|
||||
|
||||
//! Policy for handling integrity failures of the dynamic dependency map and the source to target mappings.
|
||||
enum class IntegrityFailure : AZ::u8
|
||||
{
|
||||
Abort, //!< Abort the test sequence and report the test failure.
|
||||
Continue //!< Continue the test sequence and report the test failures after the run.
|
||||
};
|
||||
|
||||
//! Policy for updating the dynamic dependency map with the coverage data of produced by test sequences.
|
||||
enum class DynamicDependencyMap : AZ::u8
|
||||
{
|
||||
Discard, //!< Discard the coverage data produced by test sequences.
|
||||
Update //!< Update the dynamic dependency map with the coverage data produced by test sequences.
|
||||
};
|
||||
|
||||
//! Policy for sharding test targets that have been marked for test sharding.
|
||||
enum class TestSharding : AZ::u8
|
||||
{
|
||||
Never, //!< Do not shard any test targets.
|
||||
Always //!< Shard all test targets that have been marked for test sharding.
|
||||
};
|
||||
|
||||
//! Standard output capture of test target runs.
|
||||
enum class TargetOutputCapture : AZ::u8
|
||||
{
|
||||
None, //!< Do not capture any output.
|
||||
StdOut, //!< Send captured output to standard output
|
||||
File, //!< Write captured output to file.
|
||||
StdOutAndFile //!< Send captured output to standard output and write to file.
|
||||
};
|
||||
|
||||
} // namespace Policy
|
||||
} // namespace TestImpact
|
||||
@ -0,0 +1,22 @@
|
||||
/*
|
||||
* Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
* For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
*
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <TestImpactFramework/TestImpactException.h>
|
||||
|
||||
namespace TestImpact
|
||||
{
|
||||
//! Exception for sequence report operations.
|
||||
class SequenceReportException
|
||||
: public Exception
|
||||
{
|
||||
public:
|
||||
using Exception::Exception;
|
||||
};
|
||||
} // namespace TestImpact
|
||||
@ -0,0 +1,606 @@
|
||||
/*
|
||||
* Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
* For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
*
|
||||
*/
|
||||
|
||||
#include <TestImpactFramework/TestImpactClientSequenceReportSerializer.h>
|
||||
#include <TestImpactFramework/TestImpactSequenceReportException.h>
|
||||
#include <TestImpactFramework/TestImpactUtils.h>
|
||||
|
||||
#include <AzCore/JSON/document.h>
|
||||
#include <AzCore/JSON/prettywriter.h>
|
||||
#include <AzCore/JSON/rapidjson.h>
|
||||
#include <AzCore/JSON/stringbuffer.h>
|
||||
|
||||
namespace TestImpact
|
||||
{
|
||||
namespace
|
||||
{
|
||||
namespace SequenceReportFields
|
||||
{
|
||||
// Keys for pertinent JSON node and attribute names
|
||||
constexpr const char* Keys[] =
|
||||
{
|
||||
"name",
|
||||
"command_args",
|
||||
"start_time",
|
||||
"end_time",
|
||||
"duration",
|
||||
"result",
|
||||
"num_passing_tests",
|
||||
"num_failing_tests",
|
||||
"num_disabled_tests",
|
||||
"tests",
|
||||
"num_passing_test_runs",
|
||||
"num_failing_test_runs",
|
||||
"num_execution_failure_test_runs",
|
||||
"num_timed_out_test_runs",
|
||||
"num_unexecuted_test_runs",
|
||||
"passing_test_runs",
|
||||
"failing_test_runs",
|
||||
"execution_failure_test_runs",
|
||||
"timed_out_test_runs",
|
||||
"unexecuted_test_runs",
|
||||
"total_num_passing_tests",
|
||||
"total_num_failing_tests",
|
||||
"total_num_disabled_tests",
|
||||
"total_num_test_runs",
|
||||
"num_included_test_runs",
|
||||
"num_excluded_test_runs",
|
||||
"included_test_runs",
|
||||
"excluded_test_runs",
|
||||
"execution_failure",
|
||||
"coverage_failure",
|
||||
"test_failure",
|
||||
"integrity_failure",
|
||||
"test_sharding",
|
||||
"target_output_capture",
|
||||
"test_prioritization",
|
||||
"dynamic_dependency_map",
|
||||
"type",
|
||||
"test_target_timeout",
|
||||
"global_timeout",
|
||||
"max_concurrency",
|
||||
"policy",
|
||||
"suite",
|
||||
"selected_test_runs",
|
||||
"selected_test_run_report",
|
||||
"total_num_passing_test_runs",
|
||||
"total_num_failing_test_runs",
|
||||
"total_num_execution_failure_test_runs",
|
||||
"total_num_timed_out_test_runs",
|
||||
"total_num_unexecuted_test_runs",
|
||||
"drafted_test_runs",
|
||||
"drafted_test_run_report",
|
||||
"discarded_test_runs",
|
||||
"discarded_test_run_report"
|
||||
};
|
||||
|
||||
enum
|
||||
{
|
||||
Name,
|
||||
CommandArgs,
|
||||
StartTime,
|
||||
EndTime,
|
||||
Duration,
|
||||
Result,
|
||||
NumPassingTests,
|
||||
NumFailingTests,
|
||||
NumDisabledTests,
|
||||
Tests,
|
||||
NumPassingTestRuns,
|
||||
NumFailingTestRuns,
|
||||
NumExecutionFailureTestRuns,
|
||||
NumTimedOutTestRuns,
|
||||
NumUnexecutedTestRuns,
|
||||
PassingTestRuns,
|
||||
FailingTestRuns,
|
||||
ExecutionFailureTestRuns,
|
||||
TimedOutTestRuns,
|
||||
UnexecutedTestRuns,
|
||||
TotalNumPassingTests,
|
||||
TotalNumFailingTests,
|
||||
TotalNumDisabledTests,
|
||||
TotalNumTestRuns,
|
||||
NumIncludedTestRuns,
|
||||
NumExcludedTestRuns,
|
||||
IncludedTestRuns,
|
||||
ExcludedTestRuns,
|
||||
ExecutionFailure,
|
||||
CoverageFailure,
|
||||
TestFailure,
|
||||
IntegrityFailure,
|
||||
TestSharding,
|
||||
TargetOutputCapture,
|
||||
TestPrioritization,
|
||||
DynamicDependencyMap,
|
||||
Type,
|
||||
TestTargetTimeout,
|
||||
GlobalTimeout,
|
||||
MaxConcurrency,
|
||||
Policy,
|
||||
Suite,
|
||||
SelectedTestRuns,
|
||||
SelectedTestRunReport,
|
||||
TotalNumPassingTestRuns,
|
||||
TotalNumFailingTestRuns,
|
||||
TotalNumExecutionFailureTestRuns,
|
||||
TotalNumTimedOutTestRuns,
|
||||
TotalNumUnexecutedTestRuns,
|
||||
DraftedTestRuns,
|
||||
DraftedTestRunReport,
|
||||
DiscardedTestRuns,
|
||||
DiscardedTestRunReport
|
||||
};
|
||||
} // namespace SequenceReportFields
|
||||
|
||||
AZ::u64 TimePointInMsAsInt64(AZStd::chrono::high_resolution_clock::time_point timePoint)
|
||||
{
|
||||
return AZStd::chrono::duration_cast<AZStd::chrono::milliseconds>(timePoint.time_since_epoch()).count();
|
||||
}
|
||||
|
||||
void SerializeTestRunMembers(const Client::TestRunBase& testRun, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
// Name
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Name]);
|
||||
writer.String(testRun.GetTargetName().c_str());
|
||||
|
||||
// Command string
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::CommandArgs]);
|
||||
writer.String(testRun.GetCommandString().c_str());
|
||||
|
||||
// Start time
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::StartTime]);
|
||||
writer.Int64(TimePointInMsAsInt64(testRun.GetStartTime()));
|
||||
|
||||
// End time
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::EndTime]);
|
||||
writer.Int64(TimePointInMsAsInt64(testRun.GetEndTime()));
|
||||
|
||||
// Duration
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Duration]);
|
||||
writer.Uint64(testRun.GetDuration().count());
|
||||
|
||||
// Result
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Result]);
|
||||
writer.String(TestRunResultAsString(testRun.GetResult()).c_str());
|
||||
}
|
||||
|
||||
void SerializeTestRun(const Client::TestRunBase& testRun, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
writer.StartObject();
|
||||
{
|
||||
SerializeTestRunMembers(testRun, writer);
|
||||
}
|
||||
writer.EndObject();
|
||||
}
|
||||
|
||||
void SerializeCompletedTestRun(const Client::CompletedTestRun& testRun, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
writer.StartObject();
|
||||
{
|
||||
SerializeTestRunMembers(testRun, writer);
|
||||
|
||||
// Number of passing test cases
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumPassingTests]);
|
||||
writer.Uint64(testRun.GetTotalNumPassingTests());
|
||||
|
||||
// Number of failing test cases
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumFailingTests]);
|
||||
writer.Uint64(testRun.GetTotalNumFailingTests());
|
||||
|
||||
// Number of disabled test cases
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumDisabledTests]);
|
||||
writer.Uint64(testRun.GetTotalNumDisabledTests());
|
||||
|
||||
// Tests
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Tests]);
|
||||
writer.StartArray();
|
||||
|
||||
for (const auto& test : testRun.GetTests())
|
||||
{
|
||||
// Test
|
||||
writer.StartObject();
|
||||
|
||||
// Name
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Name]);
|
||||
writer.String(test.GetName().c_str());
|
||||
|
||||
// Result
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Result]);
|
||||
writer.String(ClientTestResultAsString(test.GetResult()).c_str());
|
||||
|
||||
writer.EndObject(); // Test
|
||||
}
|
||||
|
||||
writer.EndArray(); // Tests
|
||||
}
|
||||
writer.EndObject();
|
||||
}
|
||||
|
||||
void SerializeTestRunReport(
|
||||
const Client::TestRunReport& testRunReport, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
writer.StartObject();
|
||||
{
|
||||
// Result
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Result]);
|
||||
writer.String(TestSequenceResultAsString(testRunReport.GetResult()).c_str());
|
||||
|
||||
// Start time
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::StartTime]);
|
||||
writer.Int64(TimePointInMsAsInt64(testRunReport.GetStartTime()));
|
||||
|
||||
// End time
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::EndTime]);
|
||||
writer.Int64(TimePointInMsAsInt64(testRunReport.GetEndTime()));
|
||||
|
||||
// Duration
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Duration]);
|
||||
writer.Uint64(testRunReport.GetDuration().count());
|
||||
|
||||
// Number of passing test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumPassingTestRuns]);
|
||||
writer.Uint64(testRunReport.GetNumPassingTestRuns());
|
||||
|
||||
// Number of failing test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumFailingTestRuns]);
|
||||
writer.Uint64(testRunReport.GetNumFailingTestRuns());
|
||||
|
||||
// Number of test runs that failed to execute
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumExecutionFailureTestRuns]);
|
||||
writer.Uint64(testRunReport.GetNumExecutionFailureTestRuns());
|
||||
|
||||
// Number of timed out test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumTimedOutTestRuns]);
|
||||
writer.Uint64(testRunReport.GetNumTimedOutTestRuns());
|
||||
|
||||
// Number of unexecuted test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumUnexecutedTestRuns]);
|
||||
writer.Uint64(testRunReport.GetNumUnexecutedTestRuns());
|
||||
|
||||
// Passing test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::PassingTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : testRunReport.GetPassingTestRuns())
|
||||
{
|
||||
SerializeCompletedTestRun(testRun, writer);
|
||||
}
|
||||
writer.EndArray(); // Passing test runs
|
||||
|
||||
// Failing test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::FailingTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : testRunReport.GetFailingTestRuns())
|
||||
{
|
||||
SerializeCompletedTestRun(testRun, writer);
|
||||
}
|
||||
writer.EndArray(); // Failing test runs
|
||||
|
||||
// Execution failures
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::ExecutionFailureTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : testRunReport.GetExecutionFailureTestRuns())
|
||||
{
|
||||
SerializeTestRun(testRun, writer);
|
||||
}
|
||||
writer.EndArray(); // Execution failures
|
||||
|
||||
// Timed out test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TimedOutTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : testRunReport.GetTimedOutTestRuns())
|
||||
{
|
||||
SerializeTestRun(testRun, writer);
|
||||
}
|
||||
writer.EndArray(); // Timed out test runs
|
||||
|
||||
// Unexecuted test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::UnexecutedTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : testRunReport.GetUnexecutedTestRuns())
|
||||
{
|
||||
SerializeTestRun(testRun, writer);
|
||||
}
|
||||
writer.EndArray(); // Unexecuted test runs
|
||||
|
||||
// Number of passing tests
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumPassingTests]);
|
||||
writer.Uint64(testRunReport.GetTotalNumPassingTests());
|
||||
|
||||
// Number of failing tests
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumFailingTests]);
|
||||
writer.Uint64(testRunReport.GetTotalNumFailingTests());
|
||||
|
||||
// Number of disabled tests
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumDisabledTests]);
|
||||
writer.Uint64(testRunReport.GetTotalNumDisabledTests());
|
||||
}
|
||||
writer.EndObject();
|
||||
}
|
||||
|
||||
void SerializeTestSelection(
|
||||
const Client::TestRunSelection& testSelection, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
writer.StartObject();
|
||||
{
|
||||
// Total number of test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumTestRuns]);
|
||||
writer.Uint64(testSelection.GetTotalNumTests());
|
||||
|
||||
// Number of included test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumIncludedTestRuns]);
|
||||
writer.Uint64(testSelection.GetNumIncludedTestRuns());
|
||||
|
||||
// Number of excluded test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::NumExcludedTestRuns]);
|
||||
writer.Uint64(testSelection.GetNumExcludedTestRuns());
|
||||
|
||||
// Included test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::IncludedTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : testSelection.GetIncludededTestRuns())
|
||||
{
|
||||
writer.String(testRun.c_str());
|
||||
}
|
||||
writer.EndArray(); // Included test runs
|
||||
|
||||
// Excluded test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::ExcludedTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : testSelection.GetExcludedTestRuns())
|
||||
{
|
||||
writer.String(testRun.c_str());
|
||||
}
|
||||
writer.EndArray(); // Excluded test runs
|
||||
}
|
||||
writer.EndObject();
|
||||
}
|
||||
|
||||
void SerializePolicyStateBaseMembers(const PolicyStateBase& policyState, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
// Execution failure
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::ExecutionFailure]);
|
||||
writer.String(ExecutionFailurePolicyAsString(policyState.m_executionFailurePolicy).c_str());
|
||||
|
||||
// Failed test coverage
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::CoverageFailure]);
|
||||
writer.String(FailedTestCoveragePolicyAsString(policyState.m_failedTestCoveragePolicy).c_str());
|
||||
|
||||
// Test failure
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TestFailure]);
|
||||
writer.String(TestFailurePolicyAsString(policyState.m_testFailurePolicy).c_str());
|
||||
|
||||
// Integrity failure
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::IntegrityFailure]);
|
||||
writer.String(IntegrityFailurePolicyAsString(policyState.m_integrityFailurePolicy).c_str());
|
||||
|
||||
// Test sharding
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TestSharding]);
|
||||
writer.String(TestShardingPolicyAsString(policyState.m_testShardingPolicy).c_str());
|
||||
|
||||
// Target output capture
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TargetOutputCapture]);
|
||||
writer.String(TargetOutputCapturePolicyAsString(policyState.m_targetOutputCapture).c_str());
|
||||
}
|
||||
|
||||
void SerializePolicyStateMembers(
|
||||
const SequencePolicyState& policyState, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
SerializePolicyStateBaseMembers(policyState.m_basePolicies, writer);
|
||||
}
|
||||
|
||||
void SerializePolicyStateMembers(
|
||||
const SafeImpactAnalysisSequencePolicyState& policyState, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
SerializePolicyStateBaseMembers(policyState.m_basePolicies, writer);
|
||||
|
||||
// Test prioritization
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TestPrioritization]);
|
||||
writer.String(TestPrioritizationPolicyAsString(policyState.m_testPrioritizationPolicy).c_str());
|
||||
}
|
||||
|
||||
void SerializePolicyStateMembers(
|
||||
const ImpactAnalysisSequencePolicyState& policyState, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
SerializePolicyStateBaseMembers(policyState.m_basePolicies, writer);
|
||||
|
||||
// Test prioritization
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TestPrioritization]);
|
||||
writer.String(TestPrioritizationPolicyAsString(policyState.m_testPrioritizationPolicy).c_str());
|
||||
|
||||
// Dynamic dependency map
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::DynamicDependencyMap]);
|
||||
writer.String(DynamicDependencyMapPolicyAsString(policyState.m_dynamicDependencyMap).c_str());
|
||||
}
|
||||
|
||||
template<typename PolicyStateType>
|
||||
void SerializeSequenceReportBaseMembers(
|
||||
const Client::SequenceReportBase<PolicyStateType>& sequenceReport, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
// Type
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Type]);
|
||||
writer.String(SequenceReportTypeAsString(sequenceReport.GetType()).c_str());
|
||||
|
||||
// Test target timeout
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TestTargetTimeout]);
|
||||
writer.Uint64(sequenceReport.GetTestTargetTimeout().value_or(AZStd::chrono::milliseconds{0}).count());
|
||||
|
||||
// Global timeout
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::GlobalTimeout]);
|
||||
writer.Uint64(sequenceReport.GetGlobalTimeout().value_or(AZStd::chrono::milliseconds{ 0 }).count());
|
||||
|
||||
// Maximum concurrency
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::MaxConcurrency]);
|
||||
writer.Uint64(sequenceReport.GetMaxConcurrency());
|
||||
|
||||
// Policies
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Policy]);
|
||||
writer.StartObject();
|
||||
{
|
||||
SerializePolicyStateMembers(sequenceReport.GetPolicyState(), writer);
|
||||
}
|
||||
writer.EndObject(); // Policies
|
||||
|
||||
// Suite
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Suite]);
|
||||
writer.String(SuiteTypeAsString(sequenceReport.GetSuite()).c_str());
|
||||
|
||||
// Selected test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::SelectedTestRuns]);
|
||||
SerializeTestSelection(sequenceReport.GetSelectedTestRuns(), writer);
|
||||
|
||||
// Selected test run report
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::SelectedTestRunReport]);
|
||||
SerializeTestRunReport(sequenceReport.GetSelectedTestRunReport(), writer);
|
||||
|
||||
// Start time
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::StartTime]);
|
||||
writer.Int64(TimePointInMsAsInt64(sequenceReport.GetStartTime()));
|
||||
|
||||
// End time
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::EndTime]);
|
||||
writer.Int64(TimePointInMsAsInt64(sequenceReport.GetEndTime()));
|
||||
|
||||
// Duration
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Duration]);
|
||||
writer.Uint64(sequenceReport.GetDuration().count());
|
||||
|
||||
// Result
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::Result]);
|
||||
writer.String(TestSequenceResultAsString(sequenceReport.GetResult()).c_str());
|
||||
|
||||
// Total number of test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumTestRuns]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumTestRuns());
|
||||
|
||||
// Total number of passing test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumPassingTestRuns]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumPassingTestRuns());
|
||||
|
||||
// Total number of failing test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumFailingTestRuns]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumFailingTestRuns());
|
||||
|
||||
// Total number of test runs that failed to execute
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumExecutionFailureTestRuns]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumExecutionFailureTestRuns());
|
||||
|
||||
// Total number of timed out test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumTimedOutTestRuns]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumTimedOutTestRuns());
|
||||
|
||||
// Total number of unexecuted test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumUnexecutedTestRuns]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumUnexecutedTestRuns());
|
||||
|
||||
// Total number of passing tests
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumPassingTests]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumPassingTests());
|
||||
|
||||
// Total number of failing tests
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumFailingTests]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumFailingTests());
|
||||
|
||||
// Total number of disabled tests
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::TotalNumDisabledTests]);
|
||||
writer.Uint64(sequenceReport.GetTotalNumDisabledTests());
|
||||
}
|
||||
|
||||
template<typename PolicyStateType>
|
||||
void SerializeDraftingSequenceReportMembers(
|
||||
const Client::DraftingSequenceReportBase<PolicyStateType>& sequenceReport, rapidjson::PrettyWriter<rapidjson::StringBuffer>& writer)
|
||||
{
|
||||
SerializeSequenceReportBaseMembers(sequenceReport, writer);
|
||||
|
||||
// Drafted test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::DraftedTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : sequenceReport.GetDraftedTestRuns())
|
||||
{
|
||||
writer.String(testRun.c_str());
|
||||
}
|
||||
writer.EndArray(); // Drafted test runs
|
||||
|
||||
// Drafted test run report
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::DraftedTestRunReport]);
|
||||
SerializeTestRunReport(sequenceReport.GetDraftedTestRunReport(), writer);
|
||||
}
|
||||
} // namespace
|
||||
|
||||
AZStd::string SerializeSequenceReport(const Client::RegularSequenceReport& sequenceReport)
|
||||
{
|
||||
rapidjson::StringBuffer stringBuffer;
|
||||
rapidjson::PrettyWriter<rapidjson::StringBuffer> writer(stringBuffer);
|
||||
|
||||
writer.StartObject();
|
||||
{
|
||||
SerializeSequenceReportBaseMembers(sequenceReport, writer);
|
||||
}
|
||||
writer.EndObject();
|
||||
|
||||
return stringBuffer.GetString();
|
||||
}
|
||||
|
||||
AZStd::string SerializeSequenceReport(const Client::SeedSequenceReport& sequenceReport)
|
||||
{
|
||||
rapidjson::StringBuffer stringBuffer;
|
||||
rapidjson::PrettyWriter<rapidjson::StringBuffer> writer(stringBuffer);
|
||||
|
||||
writer.StartObject();
|
||||
{
|
||||
SerializeSequenceReportBaseMembers(sequenceReport, writer);
|
||||
}
|
||||
writer.EndObject();
|
||||
|
||||
return stringBuffer.GetString();
|
||||
}
|
||||
|
||||
AZStd::string SerializeSequenceReport(const Client::ImpactAnalysisSequenceReport& sequenceReport)
|
||||
{
|
||||
rapidjson::StringBuffer stringBuffer;
|
||||
rapidjson::PrettyWriter<rapidjson::StringBuffer> writer(stringBuffer);
|
||||
|
||||
writer.StartObject();
|
||||
{
|
||||
SerializeDraftingSequenceReportMembers(sequenceReport, writer);
|
||||
|
||||
// Discarded test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::DiscardedTestRuns]);
|
||||
writer.StartArray();
|
||||
for (const auto& testRun : sequenceReport.GetDiscardedTestRuns())
|
||||
{
|
||||
writer.String(testRun.c_str());
|
||||
}
|
||||
writer.EndArray(); // Discarded test runs
|
||||
}
|
||||
writer.EndObject();
|
||||
|
||||
return stringBuffer.GetString();
|
||||
}
|
||||
|
||||
AZStd::string SerializeSequenceReport(const Client::SafeImpactAnalysisSequenceReport& sequenceReport)
|
||||
{
|
||||
rapidjson::StringBuffer stringBuffer;
|
||||
rapidjson::PrettyWriter<rapidjson::StringBuffer> writer(stringBuffer);
|
||||
|
||||
writer.StartObject();
|
||||
{
|
||||
SerializeDraftingSequenceReportMembers(sequenceReport, writer);
|
||||
|
||||
// Discarded test runs
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::DiscardedTestRuns]);
|
||||
SerializeTestSelection(sequenceReport.GetDiscardedTestRuns(), writer);
|
||||
|
||||
// Discarded test run report
|
||||
writer.Key(SequenceReportFields::Keys[SequenceReportFields::DiscardedTestRunReport]);
|
||||
SerializeTestRunReport(sequenceReport.GetDiscardedTestRunReport(), writer);
|
||||
}
|
||||
writer.EndObject();
|
||||
|
||||
return stringBuffer.GetString();
|
||||
}
|
||||
} // namespace TestImpact
|
||||
@ -0,0 +1,244 @@
|
||||
/*
|
||||
* Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
* For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
*
|
||||
*/
|
||||
|
||||
#include <TestImpactFramework/TestImpactException.h>
|
||||
#include <TestImpactFramework/TestImpactUtils.h>
|
||||
|
||||
#include <AzCore/std/functional.h>
|
||||
|
||||
namespace TestImpact
|
||||
{
|
||||
//! Delete the files that match the pattern from the specified directory.
|
||||
//! @param path The path to the directory to pattern match the files for deletion.
|
||||
//! @param pattern The pattern to match files for deletion.
|
||||
size_t DeleteFiles(const RepoPath& path, const AZStd::string& pattern)
|
||||
{
|
||||
size_t numFilesDeleted = 0;
|
||||
|
||||
AZ::IO::SystemFile::FindFiles(
|
||||
AZStd::string::format("%s/%s", path.c_str(), pattern.c_str()).c_str(),
|
||||
[&path, &numFilesDeleted](const char* file, bool isFile)
|
||||
{
|
||||
if (isFile)
|
||||
{
|
||||
AZ::IO::SystemFile::Delete(AZStd::string::format("%s/%s", path.c_str(), file).c_str());
|
||||
numFilesDeleted++;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
return numFilesDeleted;
|
||||
}
|
||||
|
||||
//! Deletes the specified file.
|
||||
void DeleteFile(const RepoPath& file)
|
||||
{
|
||||
DeleteFiles(file.ParentPath(), file.Filename().Native());
|
||||
}
|
||||
|
||||
//! User-friendly names for the test suite types.
|
||||
AZStd::string SuiteTypeAsString(SuiteType suiteType)
|
||||
{
|
||||
switch (suiteType)
|
||||
{
|
||||
case SuiteType::Main:
|
||||
return "main";
|
||||
case SuiteType::Periodic:
|
||||
return "periodic";
|
||||
case SuiteType::Sandbox:
|
||||
return "sandbox";
|
||||
default:
|
||||
throw(Exception("Unexpected suite type"));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string SequenceReportTypeAsString(Client::SequenceReportType type)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
case Client::SequenceReportType::RegularSequence:
|
||||
return "regular";
|
||||
case Client::SequenceReportType::SeedSequence:
|
||||
return "seed";
|
||||
case Client::SequenceReportType::ImpactAnalysisSequence:
|
||||
return "impact_analysis";
|
||||
case Client::SequenceReportType::SafeImpactAnalysisSequence:
|
||||
return "safe_impact_analysis";
|
||||
default:
|
||||
throw(Exception(AZStd::string::format("Unexpected sequence report type: %u", aznumeric_cast<AZ::u32>(type))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string TestSequenceResultAsString(TestSequenceResult result)
|
||||
{
|
||||
switch (result)
|
||||
{
|
||||
case TestSequenceResult::Failure:
|
||||
return "failure";
|
||||
case TestSequenceResult::Success:
|
||||
return "success";
|
||||
case TestSequenceResult::Timeout:
|
||||
return "timeout";
|
||||
default:
|
||||
throw(Exception(AZStd::string::format("Unexpected test sequence result: %u", aznumeric_cast<AZ::u32>(result))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string TestRunResultAsString(Client::TestRunResult result)
|
||||
{
|
||||
switch (result)
|
||||
{
|
||||
case Client::TestRunResult::AllTestsPass:
|
||||
return "all_tests_pass";
|
||||
case Client::TestRunResult::FailedToExecute:
|
||||
return "failed_to_execute";
|
||||
case Client::TestRunResult::NotRun:
|
||||
return "not_run";
|
||||
case Client::TestRunResult::TestFailures:
|
||||
return "test_failures";
|
||||
case Client::TestRunResult::Timeout:
|
||||
return "timeout";
|
||||
default:
|
||||
throw(Exception(AZStd::string::format("Unexpected test run result: %u", aznumeric_cast<AZ::u32>(result))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string ExecutionFailurePolicyAsString(Policy::ExecutionFailure executionFailurePolicy)
|
||||
{
|
||||
switch (executionFailurePolicy)
|
||||
{
|
||||
case Policy::ExecutionFailure::Abort:
|
||||
return "abort";
|
||||
case Policy::ExecutionFailure::Continue:
|
||||
return "continue";
|
||||
case Policy::ExecutionFailure::Ignore:
|
||||
return "ignore";
|
||||
default:
|
||||
throw(Exception(
|
||||
AZStd::string::format("Unexpected execution failure policy: %u", aznumeric_cast<AZ::u32>(executionFailurePolicy))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string FailedTestCoveragePolicyAsString(Policy::FailedTestCoverage failedTestCoveragePolicy)
|
||||
{
|
||||
switch (failedTestCoveragePolicy)
|
||||
{
|
||||
case Policy::FailedTestCoverage::Discard:
|
||||
return "discard";
|
||||
case Policy::FailedTestCoverage::Keep:
|
||||
return "keep";
|
||||
default:
|
||||
throw(Exception(
|
||||
AZStd::string::format("Unexpected failed test coverage policy: %u", aznumeric_cast<AZ::u32>(failedTestCoveragePolicy))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string TestPrioritizationPolicyAsString(Policy::TestPrioritization testPrioritizationPolicy)
|
||||
{
|
||||
switch (testPrioritizationPolicy)
|
||||
{
|
||||
case Policy::TestPrioritization::DependencyLocality:
|
||||
return "dependency_locality";
|
||||
case Policy::TestPrioritization::None:
|
||||
return "none";
|
||||
default:
|
||||
throw(Exception(
|
||||
AZStd::string::format("Unexpected test prioritization policy: %u", aznumeric_cast<AZ::u32>(testPrioritizationPolicy))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string TestFailurePolicyAsString(Policy::TestFailure testFailurePolicy)
|
||||
{
|
||||
switch (testFailurePolicy)
|
||||
{
|
||||
case Policy::TestFailure::Abort:
|
||||
return "abort";
|
||||
case Policy::TestFailure::Continue:
|
||||
return "continue";
|
||||
default:
|
||||
throw(
|
||||
Exception(AZStd::string::format("Unexpected test failure policy: %u", aznumeric_cast<AZ::u32>(testFailurePolicy))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string IntegrityFailurePolicyAsString(Policy::IntegrityFailure integrityFailurePolicy)
|
||||
{
|
||||
switch (integrityFailurePolicy)
|
||||
{
|
||||
case Policy::IntegrityFailure::Abort:
|
||||
return "abort";
|
||||
case Policy::IntegrityFailure::Continue:
|
||||
return "continue";
|
||||
default:
|
||||
throw(Exception(
|
||||
AZStd::string::format("Unexpected integration failure policy: %u", aznumeric_cast<AZ::u32>(integrityFailurePolicy))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string DynamicDependencyMapPolicyAsString(Policy::DynamicDependencyMap dynamicDependencyMapPolicy)
|
||||
{
|
||||
switch (dynamicDependencyMapPolicy)
|
||||
{
|
||||
case Policy::DynamicDependencyMap::Discard:
|
||||
return "discard";
|
||||
case Policy::DynamicDependencyMap::Update:
|
||||
return "update";
|
||||
default:
|
||||
throw(Exception(AZStd::string::format(
|
||||
"Unexpected dynamic dependency map policy: %u", aznumeric_cast<AZ::u32>(dynamicDependencyMapPolicy))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string TestShardingPolicyAsString(Policy::TestSharding testShardingPolicy)
|
||||
{
|
||||
switch (testShardingPolicy)
|
||||
{
|
||||
case Policy::TestSharding::Always:
|
||||
return "always";
|
||||
case Policy::TestSharding::Never:
|
||||
return "never";
|
||||
default:
|
||||
throw(Exception(
|
||||
AZStd::string::format("Unexpected test sharding policy: %u", aznumeric_cast<AZ::u32>(testShardingPolicy))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string TargetOutputCapturePolicyAsString(Policy::TargetOutputCapture targetOutputCapturePolicy)
|
||||
{
|
||||
switch (targetOutputCapturePolicy)
|
||||
{
|
||||
case Policy::TargetOutputCapture::File:
|
||||
return "file";
|
||||
case Policy::TargetOutputCapture::None:
|
||||
return "none";
|
||||
case Policy::TargetOutputCapture::StdOut:
|
||||
return "stdout";
|
||||
case Policy::TargetOutputCapture::StdOutAndFile:
|
||||
return "stdout_file";
|
||||
default:
|
||||
throw(Exception(
|
||||
AZStd::string::format("Unexpected target output capture policy: %u", aznumeric_cast<AZ::u32>(targetOutputCapturePolicy))));
|
||||
}
|
||||
}
|
||||
|
||||
AZStd::string ClientTestResultAsString(Client::TestResult result)
|
||||
{
|
||||
switch (result)
|
||||
{
|
||||
case Client::TestResult::Failed:
|
||||
return "failed";
|
||||
case Client::TestResult::NotRun:
|
||||
return "not_run";
|
||||
case Client::TestResult::Passed:
|
||||
return "passed";
|
||||
default:
|
||||
throw(Exception(AZStd::string::format("Unexpected client test case result: %u", aznumeric_cast<AZ::u32>(result))));
|
||||
}
|
||||
}
|
||||
} // namespace TestImpact
|
||||
@ -0,0 +1,452 @@
|
||||
#
|
||||
# Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
#
|
||||
#
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import socket
|
||||
from tiaf_logger import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
MARS_JOB_KEY = "job"
|
||||
SRC_COMMIT_KEY = "src_commit"
|
||||
DST_COMMIT_KEY = "src_commit"
|
||||
COMMIT_DISTANCE_KEY = "commit_distance"
|
||||
SRC_BRANCH_KEY = "src_branch"
|
||||
DST_BRANCH_KEY = "dst_branch"
|
||||
SUITE_KEY = "suite"
|
||||
SOURCE_OF_TRUTH_BRANCH_KEY = "source_of_truth_branch"
|
||||
IS_SOURCE_OF_TRUTH_BRANCH_KEY = "is_source_of_truth_branch"
|
||||
USE_TEST_IMPACT_ANALYSIS_KEY = "use_test_impact_analysis"
|
||||
HAS_CHANGE_LIST_KEY = "has_change_list"
|
||||
HAS_HISTORIC_DATA_KEY = "has_historic_data"
|
||||
S3_BUCKET_KEY = "s3_bucket"
|
||||
DRIVER_ARGS_KEY = "driver_args"
|
||||
RUNTIME_ARGS_KEY = "runtime_args"
|
||||
RUNTIME_RETURN_CODE_KEY = "return_code"
|
||||
NAME_KEY = "name"
|
||||
RESULT_KEY = "result"
|
||||
NUM_PASSING_TESTS_KEY = "num_passing_tests"
|
||||
NUM_FAILING_TESTS_KEY = "num_failing_tests"
|
||||
NUM_DISABLED_TESTS_KEY = "num_disabled_tests"
|
||||
COMMAND_ARGS_STRING = "command_args"
|
||||
NUM_PASSING_TEST_RUNS_KEY = "num_passing_test_runs"
|
||||
NUM_FAILING_TEST_RUNS_KEY = "num_failing_test_runs"
|
||||
NUM_EXECUTION_FAILURE_TEST_RUNS_KEY = "num_execution_failure_test_runs"
|
||||
NUM_TIMED_OUT_TEST_RUNS_KEY = "num_timed_out_test_runs"
|
||||
NUM_UNEXECUTED_TEST_RUNS_KEY = "num_unexecuted_test_runs"
|
||||
TOTAL_NUM_PASSING_TESTS_KEY = "total_num_passing_tests"
|
||||
TOTAL_NUM_FAILING_TESTS_KEY = "total_num_failing_tests"
|
||||
TOTAL_NUM_DISABLED_TESTS_KEY = "total_num_disabled_tests"
|
||||
START_TIME_KEY = "start_time"
|
||||
END_TIME_KEY = "end_time"
|
||||
DURATION_KEY = "duration"
|
||||
INCLUDED_TEST_RUNS_KEY = "included_test_runs"
|
||||
EXCLUDED_TEST_RUNS_KEY = "excluded_test_runs"
|
||||
NUM_INCLUDED_TEST_RUNS_KEY = "num_included_test_runs"
|
||||
NUM_EXCLUDED_TEST_RUNS_KEY = "num_excluded_test_runs"
|
||||
TOTAL_NUM_TEST_RUNS_KEY = "total_num_test_runs"
|
||||
PASSING_TEST_RUNS_KEY = "passing_test_runs"
|
||||
FAILING_TEST_RUNS_KEY = "failing_test_runs"
|
||||
EXECUTION_FAILURE_TEST_RUNS_KEY = "execution_failure_test_runs"
|
||||
TIMED_OUT_TEST_RUNS_KEY = "timed_out_test_runs"
|
||||
UNEXECUTED_TEST_RUNS_KEY = "unexecuted_test_runs"
|
||||
TOTAL_NUM_PASSING_TEST_RUNS_KEY = "total_num_passing_test_runs"
|
||||
TOTAL_NUM_FAILING_TEST_RUNS_KEY = "total_num_failing_test_runs"
|
||||
TOTAL_NUM_EXECUTION_FAILURE_TEST_RUNS_KEY = "total_num_execution_failure_test_runs"
|
||||
TOTAL_NUM_TIMED_OUT_TEST_RUNS_KEY = "total_num_timed_out_test_runs"
|
||||
TOTAL_NUM_UNEXECUTED_TEST_RUNS_KEY = "total_num_unexecuted_test_runs"
|
||||
SEQUENCE_TYPE_KEY = "type"
|
||||
IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY = "impact_analysis"
|
||||
SAFE_IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY = "safe_impact_analysis"
|
||||
SEED_SEQUENCE_TYPE_KEY = "seed"
|
||||
TEST_TARGET_TIMEOUT_KEY = "test_target_timeout"
|
||||
GLOBAL_TIMEOUT_KEY = "global_timeout"
|
||||
MAX_CONCURRENCY_KEY = "max_concurrency"
|
||||
SELECTED_KEY = "selected"
|
||||
DRAFTED_KEY = "drafted"
|
||||
DISCARDED_KEY = "discarded"
|
||||
SELECTED_TEST_RUN_REPORT_KEY = "selected_test_run_report"
|
||||
DISCARDED_TEST_RUN_REPORT_KEY = "discarded_test_run_report"
|
||||
DRAFTED_TEST_RUN_REPORT_KEY = "drafted_test_run_report"
|
||||
SELECTED_TEST_RUNS_KEY = "selected_test_runs"
|
||||
DRAFTED_TEST_RUNS_KEY = "drafted_test_runs"
|
||||
DISCARDED_TEST_RUNS_KEY = "discarded_test_runs"
|
||||
INSTRUMENTATION_KEY = "instrumentation"
|
||||
EFFICIENCY_KEY = "efficiency"
|
||||
CONFIG_KEY = "config"
|
||||
POLICY_KEY = "policy"
|
||||
CHANGE_LIST_KEY = "change_list"
|
||||
TEST_RUN_SELECTION_KEY = "test_run_selection"
|
||||
DYNAMIC_DEPENDENCY_MAP_POLICY_KEY = "dynamic_dependency_map"
|
||||
DYNAMIC_DEPENDENCY_MAP_POLICY_UPDATE_KEY = "update"
|
||||
REPORT_KEY = "report"
|
||||
|
||||
class FilebeatExn(Exception):
|
||||
pass
|
||||
|
||||
class FilebeatClient(object):
|
||||
def __init__(self, host="127.0.0.1", port=9000, timeout=20):
|
||||
self._filebeat_host = host
|
||||
self._filebeat_port = port
|
||||
self._socket_timeout = timeout
|
||||
self._socket = None
|
||||
|
||||
self._open_socket()
|
||||
|
||||
def send_event(self, payload, index, timestamp=None, pipeline="filebeat"):
|
||||
if not timestamp:
|
||||
timestamp = datetime.datetime.utcnow().timestamp()
|
||||
|
||||
event = {
|
||||
"index": index,
|
||||
"timestamp": timestamp,
|
||||
"pipeline": pipeline,
|
||||
"payload": json.dumps(payload)
|
||||
}
|
||||
|
||||
# Serialise event, add new line and encode as UTF-8 before sending to Filebeat.
|
||||
data = json.dumps(event, sort_keys=True) + "\n"
|
||||
data = data.encode()
|
||||
|
||||
#print(f"-> {data}")
|
||||
self._send_data(data)
|
||||
|
||||
def _open_socket(self):
|
||||
logger.info(f"Connecting to Filebeat on {self._filebeat_host}:{self._filebeat_port}")
|
||||
|
||||
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self._socket.settimeout(self._socket_timeout)
|
||||
|
||||
try:
|
||||
self._socket.connect((self._filebeat_host, self._filebeat_port))
|
||||
except (ConnectionError, socket.timeout):
|
||||
raise FilebeatExn("Failed to connect to Filebeat") from None
|
||||
|
||||
def _send_data(self, data):
|
||||
total_sent = 0
|
||||
|
||||
while total_sent < len(data):
|
||||
try:
|
||||
sent = self._socket.send(data[total_sent:])
|
||||
except BrokenPipeError:
|
||||
logging.error("Filebeat socket closed by peer")
|
||||
self._socket.close()
|
||||
self._open_socket()
|
||||
total_sent = 0
|
||||
else:
|
||||
total_sent = total_sent + sent
|
||||
|
||||
def format_timestamp(timestamp: float):
|
||||
"""
|
||||
Formats the given floating point timestamp into "yyyy-MM-dd'T'HH:mm:ss.SSSXX" format.
|
||||
|
||||
@param timestamp: The timestamp to format.
|
||||
@return: The formatted timestamp.
|
||||
"""
|
||||
return datetime.datetime.utcfromtimestamp(timestamp).strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
|
||||
|
||||
def generate_mars_timestamp(t0_offset_milliseconds: int, t0_timestamp: float):
|
||||
"""
|
||||
Generates a MARS timestamp in the format "yyyy-MM-dd'T'HH:mm:ss.SSSXX" by offsetting the T0 timestamp
|
||||
by the specified amount of milliseconds.
|
||||
|
||||
@param t0_offset_milliseconds: The amount of time to offset from T0.
|
||||
@param t0_timestamp: The T0 timestamp that TIAF timings will be offst from.
|
||||
@return: The formatted timestamp offset from T0 by the specified amount of milliseconds.
|
||||
"""
|
||||
|
||||
t0_offset_seconds = get_duration_in_seconds(t0_offset_milliseconds)
|
||||
t0_offset_timestamp = t0_timestamp + t0_offset_seconds
|
||||
return format_timestamp(t0_offset_timestamp)
|
||||
|
||||
def get_duration_in_seconds(duration_in_milliseconds: int):
|
||||
"""
|
||||
Gets the specified duration in milliseconds (as used by TIAF) in seconds (as used my MARS documents).
|
||||
|
||||
@param duration_in_milliseconds: The millisecond duration to transform into seconds.
|
||||
@return: The duration in seconds.
|
||||
"""
|
||||
|
||||
return duration_in_milliseconds * 0.001
|
||||
|
||||
def generate_mars_job(tiaf_result, driver_args):
|
||||
"""
|
||||
Generates a MARS job document using the job meta-data used to drive the TIAF sequence.
|
||||
|
||||
@param tiaf_result: The result object generated by the TIAF script.
|
||||
@param driver_args: The arguments specified to the driver script.
|
||||
@return: The MARS job document with the job meta-data.
|
||||
"""
|
||||
|
||||
mars_job = {key:tiaf_result[key] for key in
|
||||
[
|
||||
SRC_COMMIT_KEY,
|
||||
DST_COMMIT_KEY,
|
||||
COMMIT_DISTANCE_KEY,
|
||||
SRC_BRANCH_KEY,
|
||||
DST_BRANCH_KEY,
|
||||
SUITE_KEY,
|
||||
SOURCE_OF_TRUTH_BRANCH_KEY,
|
||||
IS_SOURCE_OF_TRUTH_BRANCH_KEY,
|
||||
USE_TEST_IMPACT_ANALYSIS_KEY,
|
||||
HAS_CHANGE_LIST_KEY,
|
||||
HAS_HISTORIC_DATA_KEY,
|
||||
S3_BUCKET_KEY,
|
||||
RUNTIME_ARGS_KEY,
|
||||
RUNTIME_RETURN_CODE_KEY
|
||||
]}
|
||||
|
||||
mars_job[DRIVER_ARGS_KEY] = driver_args
|
||||
return mars_job
|
||||
|
||||
def generate_test_run_list(test_runs):
|
||||
"""
|
||||
Generates a list of test run name strings from the list of TIAF test runs.
|
||||
|
||||
@param test_runs: The list of TIAF test runs to generate the name strings from.
|
||||
@return: The list of test run name strings.
|
||||
"""
|
||||
|
||||
test_run_list = []
|
||||
for test_run in test_runs:
|
||||
test_run_list.append(test_run[NAME_KEY])
|
||||
return test_run_list
|
||||
|
||||
def generate_mars_test_run_selections(test_run_selection, test_run_report, t0_timestamp: float):
|
||||
"""
|
||||
Generates a list of MARS test run selections from a TIAF test run selection and report.
|
||||
|
||||
@param test_run_selection: The TIAF test run selection.
|
||||
@param test_run_report: The TIAF test run report.
|
||||
@param t0_timestamp: The T0 timestamp that TIAF timings will be offst from.
|
||||
@return: The list of TIAF test runs.
|
||||
"""
|
||||
|
||||
mars_test_run_selection = {key:test_run_report[key] for key in
|
||||
[
|
||||
RESULT_KEY,
|
||||
NUM_PASSING_TEST_RUNS_KEY,
|
||||
NUM_FAILING_TEST_RUNS_KEY,
|
||||
NUM_EXECUTION_FAILURE_TEST_RUNS_KEY,
|
||||
NUM_TIMED_OUT_TEST_RUNS_KEY,
|
||||
NUM_UNEXECUTED_TEST_RUNS_KEY,
|
||||
TOTAL_NUM_PASSING_TESTS_KEY,
|
||||
TOTAL_NUM_FAILING_TESTS_KEY,
|
||||
TOTAL_NUM_DISABLED_TESTS_KEY
|
||||
]}
|
||||
|
||||
mars_test_run_selection[START_TIME_KEY] = generate_mars_timestamp(test_run_report[START_TIME_KEY], t0_timestamp)
|
||||
mars_test_run_selection[END_TIME_KEY] = generate_mars_timestamp(test_run_report[END_TIME_KEY], t0_timestamp)
|
||||
mars_test_run_selection[DURATION_KEY] = get_duration_in_seconds(test_run_report[DURATION_KEY])
|
||||
|
||||
mars_test_run_selection[INCLUDED_TEST_RUNS_KEY] = test_run_selection[INCLUDED_TEST_RUNS_KEY]
|
||||
mars_test_run_selection[EXCLUDED_TEST_RUNS_KEY] = test_run_selection[EXCLUDED_TEST_RUNS_KEY]
|
||||
mars_test_run_selection[NUM_INCLUDED_TEST_RUNS_KEY] = test_run_selection[NUM_INCLUDED_TEST_RUNS_KEY]
|
||||
mars_test_run_selection[NUM_EXCLUDED_TEST_RUNS_KEY] = test_run_selection[NUM_EXCLUDED_TEST_RUNS_KEY]
|
||||
mars_test_run_selection[TOTAL_NUM_TEST_RUNS_KEY] = test_run_selection[TOTAL_NUM_TEST_RUNS_KEY]
|
||||
|
||||
mars_test_run_selection[PASSING_TEST_RUNS_KEY] = generate_test_run_list(test_run_report[PASSING_TEST_RUNS_KEY])
|
||||
mars_test_run_selection[FAILING_TEST_RUNS_KEY] = generate_test_run_list(test_run_report[FAILING_TEST_RUNS_KEY])
|
||||
mars_test_run_selection[EXECUTION_FAILURE_TEST_RUNS_KEY] = generate_test_run_list(test_run_report[EXECUTION_FAILURE_TEST_RUNS_KEY])
|
||||
mars_test_run_selection[TIMED_OUT_TEST_RUNS_KEY] = generate_test_run_list(test_run_report[TIMED_OUT_TEST_RUNS_KEY])
|
||||
mars_test_run_selection[UNEXECUTED_TEST_RUNS_KEY] = generate_test_run_list(test_run_report[UNEXECUTED_TEST_RUNS_KEY])
|
||||
|
||||
return mars_test_run_selection
|
||||
|
||||
def generate_test_runs_from_list(test_run_list: list):
|
||||
"""
|
||||
Generates a list of TIAF test runs from a list of test target name strings.
|
||||
|
||||
@param test_run_list: The list of test target names.
|
||||
@return: The list of TIAF test runs.
|
||||
"""
|
||||
|
||||
test_run_list = {
|
||||
TOTAL_NUM_TEST_RUNS_KEY: len(test_run_list),
|
||||
NUM_INCLUDED_TEST_RUNS_KEY: len(test_run_list),
|
||||
NUM_EXCLUDED_TEST_RUNS_KEY: 0,
|
||||
INCLUDED_TEST_RUNS_KEY: test_run_list,
|
||||
EXCLUDED_TEST_RUNS_KEY: []
|
||||
}
|
||||
|
||||
return test_run_list
|
||||
|
||||
def generate_mars_sequence(sequence_report: dict, mars_job: dict, change_list:dict, t0_timestamp: float):
|
||||
"""
|
||||
Generates the MARS sequence document from the specified TIAF sequence report.
|
||||
|
||||
@param sequence_report: The TIAF runtime sequence report.
|
||||
@param mars_job: The MARS job for this sequence.
|
||||
@param change_list: The change list for which the TIAF sequence was run.
|
||||
@param t0_timestamp: The T0 timestamp that TIAF timings will be offst from.
|
||||
@return: The MARS sequence document for the specified TIAF sequence report.
|
||||
"""
|
||||
|
||||
mars_sequence = {key:sequence_report[key] for key in
|
||||
[
|
||||
SEQUENCE_TYPE_KEY,
|
||||
RESULT_KEY,
|
||||
POLICY_KEY,
|
||||
TOTAL_NUM_TEST_RUNS_KEY,
|
||||
TOTAL_NUM_PASSING_TEST_RUNS_KEY,
|
||||
TOTAL_NUM_FAILING_TEST_RUNS_KEY,
|
||||
TOTAL_NUM_EXECUTION_FAILURE_TEST_RUNS_KEY,
|
||||
TOTAL_NUM_TIMED_OUT_TEST_RUNS_KEY,
|
||||
TOTAL_NUM_UNEXECUTED_TEST_RUNS_KEY,
|
||||
TOTAL_NUM_PASSING_TESTS_KEY,
|
||||
TOTAL_NUM_FAILING_TESTS_KEY,
|
||||
TOTAL_NUM_DISABLED_TESTS_KEY
|
||||
]}
|
||||
|
||||
mars_sequence[START_TIME_KEY] = generate_mars_timestamp(sequence_report[START_TIME_KEY], t0_timestamp)
|
||||
mars_sequence[END_TIME_KEY] = generate_mars_timestamp(sequence_report[END_TIME_KEY], t0_timestamp)
|
||||
mars_sequence[DURATION_KEY] = get_duration_in_seconds(sequence_report[DURATION_KEY])
|
||||
|
||||
config = {key:sequence_report[key] for key in
|
||||
[
|
||||
TEST_TARGET_TIMEOUT_KEY,
|
||||
GLOBAL_TIMEOUT_KEY,
|
||||
MAX_CONCURRENCY_KEY
|
||||
]}
|
||||
|
||||
test_run_selection = {}
|
||||
test_run_selection[SELECTED_KEY] = generate_mars_test_run_selections(sequence_report[SELECTED_TEST_RUNS_KEY], sequence_report[SELECTED_TEST_RUN_REPORT_KEY], t0_timestamp)
|
||||
if sequence_report[SEQUENCE_TYPE_KEY] == IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY or sequence_report[SEQUENCE_TYPE_KEY] == SAFE_IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY:
|
||||
total_test_runs = sequence_report[TOTAL_NUM_TEST_RUNS_KEY]
|
||||
if total_test_runs > 0:
|
||||
test_run_selection[SELECTED_KEY][EFFICIENCY_KEY] = (1.0 - (test_run_selection[SELECTED_KEY][TOTAL_NUM_TEST_RUNS_KEY] / total_test_runs)) * 100
|
||||
else:
|
||||
test_run_selection[SELECTED_KEY][EFFICIENCY_KEY] = 100
|
||||
test_run_selection[DRAFTED_KEY] = generate_mars_test_run_selections(generate_test_runs_from_list(sequence_report[DRAFTED_TEST_RUNS_KEY]), sequence_report[DRAFTED_TEST_RUN_REPORT_KEY], t0_timestamp)
|
||||
if sequence_report[SEQUENCE_TYPE_KEY] == SAFE_IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY:
|
||||
test_run_selection[DISCARDED_KEY] = generate_mars_test_run_selections(sequence_report[DISCARDED_TEST_RUNS_KEY], sequence_report[DISCARDED_TEST_RUN_REPORT_KEY], t0_timestamp)
|
||||
else:
|
||||
test_run_selection[SELECTED_KEY][EFFICIENCY_KEY] = 0
|
||||
|
||||
mars_sequence[MARS_JOB_KEY] = mars_job
|
||||
mars_sequence[CONFIG_KEY] = config
|
||||
mars_sequence[TEST_RUN_SELECTION_KEY] = test_run_selection
|
||||
mars_sequence[CHANGE_LIST_KEY] = change_list
|
||||
|
||||
return mars_sequence
|
||||
|
||||
def extract_mars_test_target(test_run, instrumentation, mars_job, t0_timestamp: float):
|
||||
"""
|
||||
Extracts a MARS test target from the specified TIAF test run.
|
||||
|
||||
@param test_run: The TIAF test run.
|
||||
@param instrumentation: Flag specifying whether or not instrumentation was used for the test targets in this run.
|
||||
@param mars_job: The MARS job for this test target.
|
||||
@param t0_timestamp: The T0 timestamp that TIAF timings will be offst from.
|
||||
@return: The MARS test target documents for the specified TIAF test target.
|
||||
"""
|
||||
|
||||
mars_test_run = {key:test_run[key] for key in
|
||||
[
|
||||
NAME_KEY,
|
||||
RESULT_KEY,
|
||||
NUM_PASSING_TESTS_KEY,
|
||||
NUM_FAILING_TESTS_KEY,
|
||||
NUM_DISABLED_TESTS_KEY,
|
||||
COMMAND_ARGS_STRING
|
||||
]}
|
||||
|
||||
mars_test_run[START_TIME_KEY] = generate_mars_timestamp(test_run[START_TIME_KEY], t0_timestamp)
|
||||
mars_test_run[END_TIME_KEY] = generate_mars_timestamp(test_run[END_TIME_KEY], t0_timestamp)
|
||||
mars_test_run[DURATION_KEY] = get_duration_in_seconds(test_run[DURATION_KEY])
|
||||
|
||||
mars_test_run[MARS_JOB_KEY] = mars_job
|
||||
mars_test_run[INSTRUMENTATION_KEY] = instrumentation
|
||||
return mars_test_run
|
||||
|
||||
def extract_mars_test_targets_from_report(test_run_report, instrumentation, mars_job, t0_timestamp: float):
|
||||
"""
|
||||
Extracts the MARS test targets from the specified TIAF test run report.
|
||||
|
||||
@param test_run_report: The TIAF runtime test run report.
|
||||
@param instrumentation: Flag specifying whether or not instrumentation was used for the test targets in this run.
|
||||
@param mars_job: The MARS job for these test targets.
|
||||
@param t0_timestamp: The T0 timestamp that TIAF timings will be offst from.
|
||||
@return: The list of all MARS test target documents for the test targets in the TIAF test run report.
|
||||
"""
|
||||
|
||||
mars_test_targets = []
|
||||
|
||||
for test_run in test_run_report[PASSING_TEST_RUNS_KEY]:
|
||||
mars_test_targets.append(extract_mars_test_target(test_run, instrumentation, mars_job, t0_timestamp))
|
||||
for test_run in test_run_report[FAILING_TEST_RUNS_KEY]:
|
||||
mars_test_targets.append(extract_mars_test_target(test_run, instrumentation, mars_job, t0_timestamp))
|
||||
for test_run in test_run_report[EXECUTION_FAILURE_TEST_RUNS_KEY]:
|
||||
mars_test_targets.append(extract_mars_test_target(test_run, instrumentation, mars_job, t0_timestamp))
|
||||
for test_run in test_run_report[TIMED_OUT_TEST_RUNS_KEY]:
|
||||
mars_test_targets.append(extract_mars_test_target(test_run, instrumentation, mars_job, t0_timestamp))
|
||||
for test_run in test_run_report[UNEXECUTED_TEST_RUNS_KEY]:
|
||||
mars_test_targets.append(extract_mars_test_target(test_run, instrumentation, mars_job, t0_timestamp))
|
||||
|
||||
return mars_test_targets
|
||||
|
||||
def generate_mars_test_targets(sequence_report: dict, mars_job: dict, t0_timestamp: float):
|
||||
"""
|
||||
Generates a MARS test target document for each test target in the TIAF sequence report.
|
||||
|
||||
@param sequence_report: The TIAF runtime sequence report.
|
||||
@param mars_job: The MARS job for this sequence.
|
||||
@param t0_timestamp: The T0 timestamp that TIAF timings will be offst from.
|
||||
@return: The list of all MARS test target documents for the test targets in the TIAF sequence report.
|
||||
"""
|
||||
|
||||
mars_test_targets = []
|
||||
|
||||
# Determine whether or not the test targets were executed with instrumentation
|
||||
if sequence_report[SEQUENCE_TYPE_KEY] == SEED_SEQUENCE_TYPE_KEY or sequence_report[SEQUENCE_TYPE_KEY] == SAFE_IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY or (sequence_report[SEQUENCE_TYPE_KEY] == IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY and sequence_report[POLICY_KEY][DYNAMIC_DEPENDENCY_MAP_POLICY_KEY] == DYNAMIC_DEPENDENCY_MAP_POLICY_UPDATE_KEY):
|
||||
instrumentation = True
|
||||
else:
|
||||
instrumentation = False
|
||||
|
||||
# Extract the MARS test target documents from each of the test run reports
|
||||
mars_test_targets += extract_mars_test_targets_from_report(sequence_report[SELECTED_TEST_RUN_REPORT_KEY], instrumentation, mars_job, t0_timestamp)
|
||||
if sequence_report[SEQUENCE_TYPE_KEY] == IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY or sequence_report[SEQUENCE_TYPE_KEY] == SAFE_IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY:
|
||||
mars_test_targets += extract_mars_test_targets_from_report(sequence_report[DRAFTED_TEST_RUN_REPORT_KEY], instrumentation, mars_job, t0_timestamp)
|
||||
if sequence_report[SEQUENCE_TYPE_KEY] == SAFE_IMPACT_ANALYSIS_SEQUENCE_TYPE_KEY:
|
||||
mars_test_targets += extract_mars_test_targets_from_report(sequence_report[DISCARDED_TEST_RUN_REPORT_KEY], instrumentation, mars_job, t0_timestamp)
|
||||
|
||||
return mars_test_targets
|
||||
|
||||
def transmit_report_to_mars(mars_index_prefix: str, tiaf_result: dict, driver_args: list):
|
||||
"""
|
||||
Transforms the TIAF result into the appropriate MARS documents and transmits them to MARS.
|
||||
|
||||
@param mars_index_prefix: The index prefix to be used for all MARS documents.
|
||||
@param tiaf_result: The result object from the TIAF script.
|
||||
@param driver_args: The arguments passed to the TIAF driver script.
|
||||
"""
|
||||
|
||||
try:
|
||||
filebeat = FilebeatClient("localhost", 9000, 60)
|
||||
|
||||
# T0 is the current timestamp that the report timings will be offset from
|
||||
t0_timestamp = datetime.datetime.now().timestamp()
|
||||
|
||||
# Generate and transmit the MARS job document
|
||||
mars_job = generate_mars_job(tiaf_result, driver_args)
|
||||
filebeat.send_event(mars_job, f"{mars_index_prefix}.tiaf.job")
|
||||
|
||||
if tiaf_result[REPORT_KEY]:
|
||||
# Generate and transmit the MARS sequence document
|
||||
mars_sequence = generate_mars_sequence(tiaf_result[REPORT_KEY], mars_job, tiaf_result[CHANGE_LIST_KEY], t0_timestamp)
|
||||
filebeat.send_event(mars_sequence, f"{mars_index_prefix}.tiaf.sequence")
|
||||
|
||||
# Generate and transmit the MARS test target documents
|
||||
mars_test_targets = generate_mars_test_targets(tiaf_result[REPORT_KEY], mars_job, t0_timestamp)
|
||||
for mars_test_target in mars_test_targets:
|
||||
filebeat.send_event(mars_test_target, f"{mars_index_prefix}.tiaf.test_target")
|
||||
except FilebeatExn as e:
|
||||
logger.error(e)
|
||||
except KeyError as e:
|
||||
logger.error(f"The report does not contain the key {str(e)}.")
|
||||
@ -0,0 +1,20 @@
|
||||
#
|
||||
# Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
#
|
||||
#
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
def get_logger(name: str):
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.INFO)
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter('[%(asctime)s][TIAF][%(levelname)s] %(message)s')
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
return logger
|
||||
@ -0,0 +1,118 @@
|
||||
#
|
||||
# Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
#
|
||||
#
|
||||
|
||||
import json
|
||||
import pathlib
|
||||
from abc import ABC, abstractmethod
|
||||
from tiaf_logger import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
# Abstraction for the persistent storage required by TIAF to store and retrieve the branch coverage data and other meta-data
|
||||
class PersistentStorage(ABC):
|
||||
def __init__(self, config: dict, suite: str):
|
||||
"""
|
||||
Initializes the persistent storage into a state for which there is no historic data available.
|
||||
|
||||
@param config: The runtime configuration to obtain the data file paths from.
|
||||
@param suite: The test suite for which the historic data will be obtained for.
|
||||
"""
|
||||
|
||||
# Work on the assumption that there is no historic meta-data (a valid state to be in, should none exist)
|
||||
self._last_commit_hash = None
|
||||
self._has_historic_data = False
|
||||
|
||||
try:
|
||||
# The runtime expects the coverage data to be in the location specified in the config file (unless overridden with
|
||||
# the --datafile command line argument, which the TIAF scripts do not do)
|
||||
self._active_workspace = pathlib.Path(config["workspace"]["active"]["root"])
|
||||
unpacked_coverage_data_file = config["workspace"]["active"]["relative_paths"]["test_impact_data_files"][suite]
|
||||
except KeyError as e:
|
||||
raise SystemError(f"The config does not contain the key {str(e)}.")
|
||||
|
||||
self._unpacked_coverage_data_file = self._active_workspace.joinpath(unpacked_coverage_data_file)
|
||||
|
||||
def _unpack_historic_data(self, historic_data_json: str):
|
||||
"""
|
||||
Unpacks the historic data into the appropriate memory and disk locations.
|
||||
|
||||
@param historic_data_json: The historic data in JSON format.
|
||||
"""
|
||||
|
||||
self._has_historic_data = False
|
||||
|
||||
try:
|
||||
historic_data = json.loads(historic_data_json)
|
||||
self._last_commit_hash = historic_data["last_commit_hash"]
|
||||
|
||||
# Create the active workspace directory where the coverage data file will be placed and unpack the coverage data so
|
||||
# it is accessible by the runtime
|
||||
self._active_workspace.mkdir(exist_ok=True)
|
||||
with open(self._unpacked_coverage_data_file, "w", newline='\n') as coverage_data:
|
||||
coverage_data.write(historic_data["coverage_data"])
|
||||
|
||||
self._has_historic_data = True
|
||||
except json.JSONDecodeError:
|
||||
logger.error("The historic data does not contain valid JSON.")
|
||||
except KeyError as e:
|
||||
logger.error(f"The historic data does not contain the key {str(e)}.")
|
||||
except EnvironmentError as e:
|
||||
logger.error(f"There was a problem the coverage data file '{self._unpacked_coverage_data_file}': '{e}'.")
|
||||
|
||||
def _pack_historic_data(self, last_commit_hash: str):
|
||||
"""
|
||||
Packs the current historic data into a JSON file for serializing.
|
||||
|
||||
@param last_commit_hash: The commit hash to associate the coverage data (and any other meta data) with.
|
||||
@return: The packed historic data in JSON format.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Attempt to read the existing coverage data
|
||||
if self._unpacked_coverage_data_file.is_file():
|
||||
with open(self._unpacked_coverage_data_file, "r") as coverage_data:
|
||||
historic_data = {"last_commit_hash": last_commit_hash, "coverage_data": coverage_data.read()}
|
||||
return json.dumps(historic_data)
|
||||
else:
|
||||
logger.info(f"No coverage data exists at location '{self._unpacked_coverage_data_file}'.")
|
||||
except EnvironmentError as e:
|
||||
logger.error(f"There was a problem the coverage data file '{self._unpacked_coverage_data_file}': '{e}'.")
|
||||
except TypeError:
|
||||
logger.error("The historic data could not be serialized to valid JSON.")
|
||||
|
||||
return None
|
||||
|
||||
@abstractmethod
|
||||
def _store_historic_data(self, historic_data_json: str):
|
||||
"""
|
||||
Stores the historic data in the designated persistent storage location.
|
||||
|
||||
@param historic_data_json: The historic data (in JSON format) to be stored in persistent storage.
|
||||
"""
|
||||
pass
|
||||
|
||||
def update_and_store_historic_data(self, last_commit_hash: str):
|
||||
"""
|
||||
Updates the historic data and stores it in the designated persistent storage location.
|
||||
|
||||
@param last_commit_hash: The commit hash to associate the coverage data (and any other meta data) with.
|
||||
"""
|
||||
|
||||
historic_data_json = self._pack_historic_data(last_commit_hash)
|
||||
if historic_data_json:
|
||||
self._store_historic_data(historic_data_json)
|
||||
else:
|
||||
logger.info("The historic data could not be successfully stored.")
|
||||
|
||||
@property
|
||||
def has_historic_data(self):
|
||||
return self._has_historic_data
|
||||
|
||||
@property
|
||||
def last_commit_hash(self):
|
||||
return self._last_commit_hash
|
||||
@ -0,0 +1,56 @@
|
||||
#
|
||||
# Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
#
|
||||
#
|
||||
|
||||
import pathlib
|
||||
import logging
|
||||
from tiaf_persistent_storage import PersistentStorage
|
||||
from tiaf_logger import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
# Implementation of local persistent storage
|
||||
class PersistentStorageLocal(PersistentStorage):
|
||||
def __init__(self, config: str, suite: str):
|
||||
"""
|
||||
Initializes the persistent storage with any local historic data available.
|
||||
|
||||
@param config: The runtime config file to obtain the data file paths from.
|
||||
@param suite: The test suite for which the historic data will be obtained for.
|
||||
"""
|
||||
|
||||
super().__init__(config, suite)
|
||||
try:
|
||||
# Attempt to obtain the local persistent data location specified in the runtime config file
|
||||
self._historic_workspace = pathlib.Path(config["workspace"]["historic"]["root"])
|
||||
historic_data_file = pathlib.Path(config["workspace"]["historic"]["relative_paths"]["data"])
|
||||
|
||||
# Attempt to unpack the local historic data file
|
||||
self._historic_data_file = self._historic_workspace.joinpath(historic_data_file)
|
||||
if self._historic_data_file.is_file():
|
||||
with open(self._historic_data_file, "r") as historic_data_raw:
|
||||
historic_data_json = historic_data_raw.read()
|
||||
self._unpack_historic_data(historic_data_json)
|
||||
|
||||
except KeyError as e:
|
||||
raise SystemError(f"The config does not contain the key {str(e)}.")
|
||||
except EnvironmentError as e:
|
||||
raise SystemError(f"There was a problem the historic data file '{self._historic_data_file}': '{e}'.")
|
||||
|
||||
def _store_historic_data(self, historic_data_json: str):
|
||||
"""
|
||||
Stores then historical data in historic workspace location specified in the runtime config file.
|
||||
|
||||
@param historic_data_json: The historic data (in JSON format) to be stored in persistent storage.
|
||||
"""
|
||||
|
||||
try:
|
||||
self._historic_workspace.mkdir(exist_ok=True)
|
||||
with open(self._historic_data_file, "w") as historic_data_file:
|
||||
historic_data_file.write(historic_data_json)
|
||||
except EnvironmentError as e:
|
||||
logger.error(f"There was a problem the historic data file '{self._historic_data_file}': '{e}'.")
|
||||
@ -0,0 +1,87 @@
|
||||
#
|
||||
# Copyright (c) Contributors to the Open 3D Engine Project.
|
||||
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
#
|
||||
#
|
||||
|
||||
import boto3
|
||||
import botocore.exceptions
|
||||
import zlib
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from tiaf_persistent_storage import PersistentStorage
|
||||
from tiaf_logger import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
# Implementation of s3 bucket persistent storage
|
||||
class PersistentStorageS3(PersistentStorage):
|
||||
def __init__(self, config: dict, suite: str, s3_bucket: str, branch: str):
|
||||
"""
|
||||
Initializes the persistent storage with the specified s3 bucket.
|
||||
|
||||
@param config: The runtime config file to obtain the data file paths from.
|
||||
@param suite: The test suite for which the historic data will be obtained for.
|
||||
@param s3_bucket: The s3 bucket to use for storing nd retrieving historic data.
|
||||
"""
|
||||
|
||||
super().__init__(config, suite)
|
||||
|
||||
try:
|
||||
# We store the historic data as compressed JSON
|
||||
object_extension = "json.zip"
|
||||
|
||||
# historic_data.json.zip is the file containing the coverage and meta-data of the last TIAF sequence run
|
||||
historic_data_file = f"historic_data.{object_extension}"
|
||||
|
||||
# The location of the data is in the form <branch>/<config> so the build config of each branch gets its own historic data
|
||||
self._dir = f'{branch}/{config["meta"]["build_config"]}'
|
||||
self._historic_data_key = f'{self._dir}/{historic_data_file}'
|
||||
|
||||
logger.info(f"Attempting to retrieve historic data for branch '{branch}' at location '{self._historic_data_key}' on bucket '{s3_bucket}'...")
|
||||
self._s3 = boto3.resource("s3")
|
||||
self._bucket = self._s3.Bucket(s3_bucket)
|
||||
|
||||
# There is only one historic_data.json.zip in the specified location
|
||||
for object in self._bucket.objects.filter(Prefix=self._historic_data_key):
|
||||
logger.info(f"Historic data found for branch '{branch}'.")
|
||||
|
||||
# Archive the existing object with the name of the existing last commit hash
|
||||
archive_key = f"{self._dir}/archive/{self._last_commit_hash}.{object_extension}"
|
||||
logger.info(f"Archiving existing historic data to {archive_key}...")
|
||||
self._bucket.copy({"Bucket": self._bucket.name, "Key": self._historic_data_key}, archive_key)
|
||||
|
||||
# Decode the historic data object into raw bytes
|
||||
response = object.get()
|
||||
file_stream = response['Body']
|
||||
|
||||
# Decompress and unpack the zipped historic data JSON
|
||||
historic_data_json = zlib.decompress(file_stream.read()).decode('UTF-8')
|
||||
self._unpack_historic_data(historic_data_json)
|
||||
|
||||
return
|
||||
except KeyError as e:
|
||||
raise SystemError(f"The config does not contain the key {str(e)}.")
|
||||
except botocore.exceptions.BotoCoreError as e:
|
||||
raise SystemError(f"There was a problem with the s3 bucket: {e}")
|
||||
except botocore.exceptions.ClientError as e:
|
||||
raise SystemError(f"There was a problem with the s3 client: {e}")
|
||||
|
||||
def _store_historic_data(self, historic_data_json: str):
|
||||
"""
|
||||
Stores then historical data in specified s3 bucket at the location <branch>/<build_config>/historical_data.json.zip.
|
||||
|
||||
@param historic_data_json: The historic data (in JSON format) to be stored in persistent storage.
|
||||
"""
|
||||
|
||||
try:
|
||||
data = BytesIO(zlib.compress(bytes(historic_data_json, "UTF-8")))
|
||||
logger.info(f"Uploading historic data to location '{self._historic_data_key}'...")
|
||||
self._bucket.upload_fileobj(data, self._historic_data_key)
|
||||
logger.info("Upload complete.")
|
||||
except botocore.exceptions.BotoCoreError as e:
|
||||
logger.error(f"There was a problem with the s3 bucket: {e}")
|
||||
except botocore.exceptions.ClientError as e:
|
||||
logger.error(f"There was a problem with the s3 client: {e}")
|
||||
Loading…
Reference in New Issue