Motion Matching: Added class description and member comments

* Added description for Feature, FeatureMatrix, FeatureTrajectory classes.
* Changed default residual type to absolute as that results in better visual quality.
* Moved to //! doxygen style code to not affect /**/ sourounding temporary code commenting.

Signed-off-by: Benjamin Jillich <jillich@amazon.com>
monroegm-disable-blank-issue-2
Benjamin Jillich 4 years ago
parent c6a0d76843
commit f60b056b8a

@ -35,6 +35,15 @@ namespace EMotionFX::MotionMatching
class MotionMatchingInstance;
class TrajectoryQuery;
//! A feature is a property extracted from the animation data and is used by the motion matching algorithm to find the next best matching frame.
//! Examples of features are the position of the feet joints, the linear or angular velocity of the knee joints or the trajectory history and future
//! trajectory of the root joint. We can also encode environment sensations like obstacle positions and height, the location of the sword of an enemy
//! character or a football's position and velocity. Their purpose is to describe a frame of the animation by their key characteristics and sometimes
//! enhance the actual keyframe data (pos/rot/scale per joint) by e.g. taking the time domain into account and calculate the velocity or acceleration,
//! or a whole trajectory to describe where the given joint came from to reach the frame and the path it moves along in the near future.
//! @Note: Features are extracted and stored relative to a given joint, in most cases the motion extraction or root joint, and thus are in model-space.
//! This makes the search algorithm invariant to the character location and orientation and the extracted features, like e.g. a joint position or velocity,
//! translate and rotate along with the character.
class EMFX_API Feature
{
public:
@ -89,8 +98,8 @@ namespace EMotionFX::MotionMatching
};
virtual float CalculateFrameCost(size_t frameIndex, const FrameCostContext& context) const;
//! Specifies how the feature value differences (residuals), between the input query values
//! and the frames in the motion database that sum up the feature cost, are calculated.
/// Specifies how the feature value differences (residuals), between the input query values
/// and the frames in the motion database that sum up the feature cost, are calculated.
enum ResidualType
{
Absolute,
@ -138,16 +147,14 @@ namespace EMotionFX::MotionMatching
static void CalculateVelocity(const ActorInstance* actorInstance, size_t jointIndex, size_t relativeToJointIndex, const Frame& frame, AZ::Vector3& outVelocity);
protected:
/**
* Calculate a normalized direction vector difference between the two given vectors.
* A dot product of the two vectors is taken and the result in range [-1, 1] is scaled to [0, 1].
* @result Normalized, absolute difference between the vectors.
* Angle difference dot result cost
* 0.0 degrees 1.0 0.0
* 90.0 degrees 0.0 0.5
* 180.0 degrees -1.0 1.0
* 270.0 degrees 0.0 0.5
**/
//! Calculate a normalized direction vector difference between the two given vectors.
//! A dot product of the two vectors is taken and the result in range [-1, 1] is scaled to [0, 1].
//! @result Normalized, absolute difference between the vectors.
//! Angle difference dot result cost
//! 0.0 degrees 1.0 0.0
//! 90.0 degrees 0.0 0.5
//! 180.0 degrees -1.0 1.0
//! 270.0 degrees 0.0 0.5
float GetNormalizedDirectionDifference(const AZ::Vector2& directionA, const AZ::Vector2& directionB) const;
float GetNormalizedDirectionDifference(const AZ::Vector3& directionA, const AZ::Vector3& directionB) const;
@ -164,7 +171,7 @@ namespace EMotionFX::MotionMatching
AZ::Color m_debugColor = AZ::Colors::Green; //< Color used for debug visualizations to identify the feature.
bool m_debugDrawEnabled = false; //< Are debug visualizations enabled for this feature?
float m_costFactor = 1.0f; //< The cost factor for the feature is multiplied with the actual and can be used to change a feature's influence in the motion matching search.
ResidualType m_residualType = ResidualType::Squared; //< How do we calculate the differences (residuals) between the input query values and the frames in the motion database that sum up the feature cost.
ResidualType m_residualType = ResidualType::Absolute; //< How do we calculate the differences (residuals) between the input query values and the frames in the motion database that sum up the feature cost.
// Instance data (depends on the feature schema or actor instance).
FeatureMatrix::Index m_featureColumnOffset; //< Float/Value offset, starting column for where the feature should be places at.

@ -15,7 +15,9 @@
#include <AzCore/std/containers/vector.h>
#include <AzCore/std/string/string.h>
//#define O3DE_USE_EIGEN
//! Enable in case you want to use the Eigen SDK Eigen::Matrix as base for the feature matrix (https://eigen.tuxfamily.org/)
//! In case Eigen is disabled, a small simple NxM wrapper class is provided by default.
#define O3DE_USE_EIGEN
#define O3DE_MM_FLOATTYPE float
#ifdef O3DE_USE_EIGEN
@ -37,9 +39,7 @@ namespace EMotionFX::MotionMatching
// RowMajor: Store row components next to each other in memory for cache-optimized feature access for a given frame.
using FeatureMatrixType = Eigen::Matrix<O3DE_MM_FLOATTYPE, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor>;
#else
/**
* Small wrapper for a 2D matrix similar to the Eigen::Matrix.
*/
//! Small wrapper for a 2D matrix similar to the Eigen::Matrix.
class FeatureMatrixType
{
public:
@ -87,6 +87,12 @@ namespace EMotionFX::MotionMatching
};
#endif
//! The feature matrix is a NxM matrix which stores the extracted feature values for all frames in our motion database based upon a given feature schema.
//! The feature schema defines the order of the columns and values and is used to identify values and find their location inside the matrix.
//! A 3D position feature storing XYZ values e.g. will use three columns in the feature matrix. Every component of a feature is linked to a column index,
//! so e.g. the left foot position Y value might be at column index 6. The group of values or columns that belong to a given feature is what we call a feature block.
//! The accumulated number of dimensions for all features in the schema, while the number of dimensions might vary per feature, form the number of columns of the feature matrix.
//! Each row represents the features of a single frame of the motion database. The number of rows of the feature matrix is defined by the number.
class FeatureMatrix
: public FeatureMatrixType
{

@ -29,12 +29,10 @@ namespace EMotionFX::MotionMatching
{
class FrameDatabase;
/**
* Matches the root joint past and future trajectory.
* For each frame in the motion database, the position and facing direction relative to the current frame of the joint will be evaluated for a past and future time window.
* The past and future samples together form the trajectory of the current frame within the time window. This basically describes where the character came from to reach the
* current frame and where it will go when continuing to play the animation.
**/
//! Matches the root joint past and future trajectory.
//! For each frame in the motion database, the position and facing direction relative to the current frame of the joint will be evaluated for a past and future time window.
//! The past and future samples together form the trajectory of the current frame within the time window. This basically describes where the character came from to reach the
//! current frame and where it will go when continuing to play the animation.
class EMFX_API FeatureTrajectory
: public Feature
{

@ -20,10 +20,8 @@ namespace EMotionFX
namespace MotionMatching
{
/**
* A motion matching frame.
* This holds information required in order to extract a given pose in a given motion.
*/
//! A motion matching frame.
//! This holds information required in order to extract a given pose in a given motion.
class EMFX_API Frame
{
public:
@ -53,10 +51,10 @@ namespace EMotionFX
void SetMirrored(bool enabled);
private:
size_t m_frameIndex = 0; /**< The motion frame index inside the data object. */
float m_sampleTime = 0.0f; /**< The time offset in the original motion. */
Motion* m_sourceMotion = nullptr; /**< The original motion that we sample from to restore the pose. */
bool m_mirrored = false; /**< Is this frame mirrored? */
size_t m_frameIndex = 0; //< The motion frame index inside the data object.
float m_sampleTime = 0.0f; //< The time offset in the original motion.
Motion* m_sourceMotion = nullptr; //< The original motion that we sample from to restore the pose.
bool m_mirrored = false; //< Is this frame mirrored?
};
} // namespace MotionMatching
} // namespace EMotionFX

@ -63,12 +63,12 @@ namespace EMotionFX::MotionMatching
protected:
bool ExtractFeatures(ActorInstance* actorInstance, FrameDatabase* frameDatabase, size_t maxKdTreeDepth=20, size_t minFramesPerKdTreeNode=2000);
FrameDatabase m_frameDatabase; /**< The animation database with all the keyframes and joint transform data. */
FrameDatabase m_frameDatabase; //< The animation database with all the keyframes and joint transform data.
const FeatureSchema& m_featureSchema;
FeatureMatrix m_featureMatrix;
AZStd::unique_ptr<KdTree> m_kdTree; /**< The acceleration structure to speed up the search for lowest cost frames. */
AZStd::unique_ptr<KdTree> m_kdTree; //< The acceleration structure to speed up the search for lowest cost frames.
AZStd::vector<Feature*> m_featuresInKdTree;
};
} // namespace EMotionFX::MotionMatching

Loading…
Cancel
Save