Motion Matching: Moved discard event check into consolidated IsFrameDiscarded() method

* Moved the extract events call and the check for a discard frame event to the IsFrameDiscarded() function.
* Added class description for the frame database and ported the comments to //! and //<

Signed-off-by: Benjamin Jillich <jillich@amazon.com>
monroegm-disable-blank-issue-2
Benjamin Jillich 4 years ago
parent 83869d7064
commit 3eb661c4bc

@ -53,7 +53,7 @@ namespace EMotionFX::MotionMatching
m_usedMotions.shrink_to_fit(); m_usedMotions.shrink_to_fit();
} }
void FrameDatabase::ExtractActiveMotionEventDatas(const Motion* motion, float time, AZStd::vector<EventData*>& activeEventDatas) void FrameDatabase::ExtractActiveMotionEventDatas(const Motion* motion, float time, AZStd::vector<EventData*>& activeEventDatas) const
{ {
activeEventDatas.clear(); activeEventDatas.clear();
@ -84,9 +84,11 @@ namespace EMotionFX::MotionMatching
} }
} }
bool FrameDatabase::IsFrameDiscarded(const AZStd::vector<EventData*>& activeEventDatas) const bool FrameDatabase::IsFrameDiscarded(const Motion* motion, float frameTime, AZStd::vector<EventData*>& activeEvents) const
{ {
for (const EventData* eventData : activeEventDatas) // Is frame discarded by a motion event?
ExtractActiveMotionEventDatas(motion, frameTime, activeEvents);
for (const EventData* eventData : activeEvents)
{ {
if (eventData->RTTI_GetType() == azrtti_typeid<DiscardFrameEventData>()) if (eventData->RTTI_GetType() == azrtti_typeid<DiscardFrameEventData>())
{ {
@ -126,11 +128,10 @@ namespace EMotionFX::MotionMatching
double curTime = 0.0; double curTime = 0.0;
while (curTime <= totalTime) while (curTime <= totalTime)
{ {
const float floatTime = aznumeric_cast<float>(curTime); const float frameTime = aznumeric_cast<float>(curTime);
ExtractActiveMotionEventDatas(motion, floatTime, activeEvents); if (!IsFrameDiscarded(motion, frameTime, activeEvents))
if (!IsFrameDiscarded(activeEvents))
{ {
ImportFrame(motion, floatTime, mirrored); ImportFrame(motion, frameTime, mirrored);
numFramesImported++; numFramesImported++;
} }
else else
@ -143,11 +144,10 @@ namespace EMotionFX::MotionMatching
// Make sure we include the last frame, if we stepped over it. // Make sure we include the last frame, if we stepped over it.
if (curTime - timeStep < totalTime - 0.000001) if (curTime - timeStep < totalTime - 0.000001)
{ {
const float floatTime = aznumeric_cast<float>(totalTime); const float frameTime = aznumeric_cast<float>(totalTime);
ExtractActiveMotionEventDatas(motion, floatTime, activeEvents); if (!IsFrameDiscarded(motion, frameTime, activeEvents))
if (!IsFrameDiscarded(activeEvents))
{ {
ImportFrame(motion, floatTime, mirrored); ImportFrame(motion, frameTime, mirrored);
numFramesImported++; numFramesImported++;
} }
else else

@ -29,29 +29,36 @@ namespace EMotionFX::MotionMatching
class MotionMatchingInstance; class MotionMatchingInstance;
class MotionMatchEventData; class MotionMatchEventData;
// The motion matching data. //! A set of frames from your animations sampled at a given sample rate is stored in the frame database. A frame object knows about its index in the frame database,
// This is basically a database of frames (which point to motion objects), together with meta data per frame. //! the animation it belongs to and the sample time in seconds. It does not hold the actual sampled pose for memory reasons as the `EMotionFX::Motion` already store the
// No actual pose data is stored directly inside this class, just references to the right sample times inside specific motions. //! transform keyframes.
//! The sample rate of the animation might differ from the sample rate used for the frame database. For example, your animations might be recorded with 60 Hz while we only want
//! to extract the features with a sample rate of 30 Hz. As the motion matching algorithm is blending between the frames in the motion database while playing the animation window
//! between the jumps/blends, it can make sense to have animations with a higher sample rate than we use to extract the features.
//! A frame of the motion database can be used to sample a pose from which we can extract the features. It also provides functionality to sample a pose with a time offset to that frame.
//! This can be handy in order to calculate joint velocities or trajectory samples.
//! When importing animations, frames that are within the range of a discard frame motion event are ignored and won't be added to the motion database. Discard motion events can be
//! used to cut out sections of the imported animations that are unwanted like a stretching part between two dance cards.
class EMFX_API FrameDatabase class EMFX_API FrameDatabase
{ {
public: public:
AZ_RTTI(FrameDatabase, "{3E5ED4F9-8975-41F2-B665-0086368F0DDA}") AZ_RTTI(FrameDatabase, "{3E5ED4F9-8975-41F2-B665-0086368F0DDA}")
AZ_CLASS_ALLOCATOR_DECL AZ_CLASS_ALLOCATOR_DECL
// The settings used when importing motions into the frame database. //! The settings used when importing motions into the frame database.
// Used in combination with ImportFrames(). //! Used in combination with ImportFrames().
struct EMFX_API FrameImportSettings struct EMFX_API FrameImportSettings
{ {
size_t m_sampleRate = 30; /**< Sample at 30 frames per second on default. */ size_t m_sampleRate = 30; //< Sample at 30 frames per second on default.
bool m_autoShrink = true; /**< Automatically shrink the internal frame arrays to their minimum size afterwards. */ bool m_autoShrink = true; //< Automatically shrink the internal frame arrays to their minimum size afterwards.
}; };
FrameDatabase(); FrameDatabase();
virtual ~FrameDatabase(); virtual ~FrameDatabase();
// Main functions. // Main functions.
AZStd::tuple<size_t, size_t> ImportFrames(Motion* motion, const FrameImportSettings& settings, bool mirrored); // Returns the number of imported frames and the number of discarded frames as second element. AZStd::tuple<size_t, size_t> ImportFrames(Motion* motion, const FrameImportSettings& settings, bool mirrored); //< Returns the number of imported frames and the number of discarded frames as second element.
void Clear(); // Clear the data, so you can re-initialize it with new data. void Clear(); //< Clear the data, so you can re-initialize it with new data.
// Statistics. // Statistics.
size_t GetNumFrames() const; size_t GetNumFrames() const;
@ -66,21 +73,19 @@ namespace EMotionFX::MotionMatching
const AZStd::vector<const Motion*>& GetUsedMotions() const; const AZStd::vector<const Motion*>& GetUsedMotions() const;
size_t GetSampleRate() const { return m_sampleRate; } size_t GetSampleRate() const { return m_sampleRate; }
/** //! Find the frame index for the given playtime and motion.
* Find the frame index for the given playtime and motion. //! NOTE: This is a slow operation and should not be used by the runtime without visual debugging.
* NOTE: This is a slow operation and should not be used by the runtime without visual debugging.
*/
size_t FindFrameIndex(Motion* motion, float playtime) const; size_t FindFrameIndex(Motion* motion, float playtime) const;
private: private:
void ImportFrame(Motion* motion, float timeValue, bool mirrored); void ImportFrame(Motion* motion, float timeValue, bool mirrored);
bool IsFrameDiscarded(const AZStd::vector<EventData*>& activeEventDatas) const; bool IsFrameDiscarded(const Motion* motion, float frameTime, AZStd::vector<EventData*>& activeEvents) const;
void ExtractActiveMotionEventDatas(const Motion* motion, float time, AZStd::vector<EventData*>& activeEventDatas); // Vector will be cleared internally. void ExtractActiveMotionEventDatas(const Motion* motion, float time, AZStd::vector<EventData*>& activeEventDatas) const; // Vector will be cleared internally.
private: private:
AZStd::vector<Frame> m_frames; /**< The collection of frames. Keep in mind these don't hold a pose, but reference to a given frame/time value inside a given motion. */ AZStd::vector<Frame> m_frames; //< The collection of frames. Keep in mind these don't hold a pose, but reference to a given frame/time value inside a given motion.
AZStd::unordered_map<Motion*, AZStd::vector<size_t>> m_frameIndexByMotion; AZStd::unordered_map<Motion*, AZStd::vector<size_t>> m_frameIndexByMotion;
AZStd::vector<const Motion*> m_usedMotions; /**< The list of used motions. */ AZStd::vector<const Motion*> m_usedMotions; //< The list of used motions.
size_t m_sampleRate = 0; size_t m_sampleRate = 0;
}; };
} // namespace EMotionFX::MotionMatching } // namespace EMotionFX::MotionMatching

Loading…
Cancel
Save