diff --git a/Gems/MotionMatching/Code/Source/FrameDatabase.cpp b/Gems/MotionMatching/Code/Source/FrameDatabase.cpp index 7d38060dcc..307a45a974 100644 --- a/Gems/MotionMatching/Code/Source/FrameDatabase.cpp +++ b/Gems/MotionMatching/Code/Source/FrameDatabase.cpp @@ -53,7 +53,7 @@ namespace EMotionFX::MotionMatching m_usedMotions.shrink_to_fit(); } - void FrameDatabase::ExtractActiveMotionEventDatas(const Motion* motion, float time, AZStd::vector& activeEventDatas) + void FrameDatabase::ExtractActiveMotionEventDatas(const Motion* motion, float time, AZStd::vector& activeEventDatas) const { activeEventDatas.clear(); @@ -84,9 +84,11 @@ namespace EMotionFX::MotionMatching } } - bool FrameDatabase::IsFrameDiscarded(const AZStd::vector& activeEventDatas) const + bool FrameDatabase::IsFrameDiscarded(const Motion* motion, float frameTime, AZStd::vector& activeEvents) const { - for (const EventData* eventData : activeEventDatas) + // Is frame discarded by a motion event? + ExtractActiveMotionEventDatas(motion, frameTime, activeEvents); + for (const EventData* eventData : activeEvents) { if (eventData->RTTI_GetType() == azrtti_typeid()) { @@ -126,11 +128,10 @@ namespace EMotionFX::MotionMatching double curTime = 0.0; while (curTime <= totalTime) { - const float floatTime = aznumeric_cast(curTime); - ExtractActiveMotionEventDatas(motion, floatTime, activeEvents); - if (!IsFrameDiscarded(activeEvents)) + const float frameTime = aznumeric_cast(curTime); + if (!IsFrameDiscarded(motion, frameTime, activeEvents)) { - ImportFrame(motion, floatTime, mirrored); + ImportFrame(motion, frameTime, mirrored); numFramesImported++; } else @@ -143,11 +144,10 @@ namespace EMotionFX::MotionMatching // Make sure we include the last frame, if we stepped over it. if (curTime - timeStep < totalTime - 0.000001) { - const float floatTime = aznumeric_cast(totalTime); - ExtractActiveMotionEventDatas(motion, floatTime, activeEvents); - if (!IsFrameDiscarded(activeEvents)) + const float frameTime = aznumeric_cast(totalTime); + if (!IsFrameDiscarded(motion, frameTime, activeEvents)) { - ImportFrame(motion, floatTime, mirrored); + ImportFrame(motion, frameTime, mirrored); numFramesImported++; } else diff --git a/Gems/MotionMatching/Code/Source/FrameDatabase.h b/Gems/MotionMatching/Code/Source/FrameDatabase.h index c5258e1b39..45afe98825 100644 --- a/Gems/MotionMatching/Code/Source/FrameDatabase.h +++ b/Gems/MotionMatching/Code/Source/FrameDatabase.h @@ -29,29 +29,36 @@ namespace EMotionFX::MotionMatching class MotionMatchingInstance; class MotionMatchEventData; - // The motion matching data. - // This is basically a database of frames (which point to motion objects), together with meta data per frame. - // No actual pose data is stored directly inside this class, just references to the right sample times inside specific motions. + //! A set of frames from your animations sampled at a given sample rate is stored in the frame database. A frame object knows about its index in the frame database, + //! the animation it belongs to and the sample time in seconds. It does not hold the actual sampled pose for memory reasons as the `EMotionFX::Motion` already store the + //! transform keyframes. + //! The sample rate of the animation might differ from the sample rate used for the frame database. For example, your animations might be recorded with 60 Hz while we only want + //! to extract the features with a sample rate of 30 Hz. As the motion matching algorithm is blending between the frames in the motion database while playing the animation window + //! between the jumps/blends, it can make sense to have animations with a higher sample rate than we use to extract the features. + //! A frame of the motion database can be used to sample a pose from which we can extract the features. It also provides functionality to sample a pose with a time offset to that frame. + //! This can be handy in order to calculate joint velocities or trajectory samples. + //! When importing animations, frames that are within the range of a discard frame motion event are ignored and won't be added to the motion database. Discard motion events can be + //! used to cut out sections of the imported animations that are unwanted like a stretching part between two dance cards. class EMFX_API FrameDatabase { public: AZ_RTTI(FrameDatabase, "{3E5ED4F9-8975-41F2-B665-0086368F0DDA}") AZ_CLASS_ALLOCATOR_DECL - // The settings used when importing motions into the frame database. - // Used in combination with ImportFrames(). + //! The settings used when importing motions into the frame database. + //! Used in combination with ImportFrames(). struct EMFX_API FrameImportSettings { - size_t m_sampleRate = 30; /**< Sample at 30 frames per second on default. */ - bool m_autoShrink = true; /**< Automatically shrink the internal frame arrays to their minimum size afterwards. */ + size_t m_sampleRate = 30; //< Sample at 30 frames per second on default. + bool m_autoShrink = true; //< Automatically shrink the internal frame arrays to their minimum size afterwards. }; FrameDatabase(); virtual ~FrameDatabase(); // Main functions. - AZStd::tuple ImportFrames(Motion* motion, const FrameImportSettings& settings, bool mirrored); // Returns the number of imported frames and the number of discarded frames as second element. - void Clear(); // Clear the data, so you can re-initialize it with new data. + AZStd::tuple ImportFrames(Motion* motion, const FrameImportSettings& settings, bool mirrored); //< Returns the number of imported frames and the number of discarded frames as second element. + void Clear(); //< Clear the data, so you can re-initialize it with new data. // Statistics. size_t GetNumFrames() const; @@ -66,21 +73,19 @@ namespace EMotionFX::MotionMatching const AZStd::vector& GetUsedMotions() const; size_t GetSampleRate() const { return m_sampleRate; } - /** - * Find the frame index for the given playtime and motion. - * NOTE: This is a slow operation and should not be used by the runtime without visual debugging. - */ + //! Find the frame index for the given playtime and motion. + //! NOTE: This is a slow operation and should not be used by the runtime without visual debugging. size_t FindFrameIndex(Motion* motion, float playtime) const; private: void ImportFrame(Motion* motion, float timeValue, bool mirrored); - bool IsFrameDiscarded(const AZStd::vector& activeEventDatas) const; - void ExtractActiveMotionEventDatas(const Motion* motion, float time, AZStd::vector& activeEventDatas); // Vector will be cleared internally. + bool IsFrameDiscarded(const Motion* motion, float frameTime, AZStd::vector& activeEvents) const; + void ExtractActiveMotionEventDatas(const Motion* motion, float time, AZStd::vector& activeEventDatas) const; // Vector will be cleared internally. private: - AZStd::vector m_frames; /**< The collection of frames. Keep in mind these don't hold a pose, but reference to a given frame/time value inside a given motion. */ + AZStd::vector m_frames; //< The collection of frames. Keep in mind these don't hold a pose, but reference to a given frame/time value inside a given motion. AZStd::unordered_map> m_frameIndexByMotion; - AZStd::vector m_usedMotions; /**< The list of used motions. */ + AZStd::vector m_usedMotions; //< The list of used motions. size_t m_sampleRate = 0; }; } // namespace EMotionFX::MotionMatching