FeatureTrajectory.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576
  1. /*
  2. * Copyright (c) Contributors to the Open 3D Engine Project.
  3. * For complete copyright and license terms please see the LICENSE at the root of this distribution.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0 OR MIT
  6. *
  7. */
  8. #include <Allocators.h>
  9. #include <EMotionFX/Source/ActorInstance.h>
  10. #include <EMotionFX/Source/AnimGraphPose.h>
  11. #include <EMotionFX/Source/AnimGraphPosePool.h>
  12. #include <EMotionFX/Source/EventManager.h>
  13. #include <EMotionFX/Source/Pose.h>
  14. #include <EMotionFX/Source/Transform.h>
  15. #include <EMotionFX/Source/TransformData.h>
  16. #include <FeatureMatrixTransformer.h>
  17. #include <FeatureTrajectory.h>
  18. #include <FrameDatabase.h>
  19. #include <MotionMatchingInstance.h>
  20. #include <AzCore/Serialization/EditContext.h>
  21. #include <AzCore/Serialization/SerializeContext.h>
  22. namespace EMotionFX::MotionMatching
  23. {
  24. AZ_CLASS_ALLOCATOR_IMPL(FeatureTrajectory, MotionMatchAllocator)
  25. bool FeatureTrajectory::Init(const InitSettings& settings)
  26. {
  27. const bool result = Feature::Init(settings);
  28. UpdateFacingAxis();
  29. return result;
  30. }
  31. ///////////////////////////////////////////////////////////////////////////
  32. AZ::Vector2 FeatureTrajectory::CalculateFacingDirection(const Pose& pose, const Transform& invRootTransform) const
  33. {
  34. // Get the facing direction of the given joint for the given pose in animation world space.
  35. // The given pose is either sampled into the relative past or future based on the frame we want to extract the feature for.
  36. const AZ::Vector3 facingDirAnimationWorldSpace = pose.GetWorldSpaceTransform(m_jointIndex).TransformVector(m_facingAxisDir);
  37. // The invRootTransform is the inverse of the world space transform for the given joint at the frame we want to extract the feature
  38. // for. The result after this will be the facing direction relative to the frame we want to extract the feature for.
  39. const AZ::Vector3 facingDirection = invRootTransform.TransformVector(facingDirAnimationWorldSpace);
  40. // Project to the ground plane and make sure the direction is normalized.
  41. return AZ::Vector2(facingDirection).GetNormalizedSafe();
  42. }
  43. FeatureTrajectory::Sample FeatureTrajectory::GetSampleFromPose(const Pose& pose, const Transform& invRootTransform) const
  44. {
  45. // Position of the root joint in the model space relative to frame to extract.
  46. const AZ::Vector2 position = AZ::Vector2(invRootTransform.TransformPoint(pose.GetWorldSpaceTransform(m_jointIndex).m_position));
  47. // Calculate the facing direction.
  48. const AZ::Vector2 facingDirection = CalculateFacingDirection(pose, invRootTransform);
  49. return { position, facingDirection };
  50. }
  51. void FeatureTrajectory::ExtractFeatureValues(const ExtractFeatureContext& context)
  52. {
  53. const ActorInstance* actorInstance = context.m_actorInstance;
  54. AnimGraphPose* samplePose = context.m_posePool.RequestPose(actorInstance);
  55. AnimGraphPose* nextSamplePose = context.m_posePool.RequestPose(actorInstance);
  56. const size_t frameIndex = context.m_frameIndex;
  57. const Frame& currentFrame = context.m_frameDatabase->GetFrame(context.m_frameIndex);
  58. // Inverse of the root transform for the frame that we want to extract data from.
  59. const Transform invRootTransform = context.m_framePose->GetWorldSpaceTransform(m_relativeToNodeIndex).Inversed();
  60. const size_t midSampleIndex = CalcMidFrameIndex();
  61. const Sample midSample = GetSampleFromPose(*context.m_framePose, invRootTransform);
  62. SetFeatureData(context.m_featureMatrix, frameIndex, midSampleIndex, midSample);
  63. // Sample the past.
  64. const float pastFrameTimeDelta = m_pastTimeRange / static_cast<float>(m_numPastSamples - 1);
  65. currentFrame.SamplePose(&samplePose->GetPose());
  66. for (size_t i = 0; i < m_numPastSamples; ++i)
  67. {
  68. // Increase the sample index by one as the zeroth past/future sample actually needs one time delta time difference to the
  69. // current frame.
  70. const float sampleTimeOffset = (i + 1) * pastFrameTimeDelta * (-1.0f);
  71. currentFrame.SamplePose(&nextSamplePose->GetPose(), sampleTimeOffset);
  72. const Sample sample = GetSampleFromPose(samplePose->GetPose(), invRootTransform);
  73. const size_t sampleIndex = CalcPastFrameIndex(i);
  74. SetFeatureData(context.m_featureMatrix, frameIndex, sampleIndex, sample);
  75. *samplePose = *nextSamplePose;
  76. }
  77. // Sample into the future.
  78. const float futureFrameTimeDelta = m_futureTimeRange / (float)(m_numFutureSamples - 1);
  79. currentFrame.SamplePose(&samplePose->GetPose());
  80. for (size_t i = 0; i < m_numFutureSamples; ++i)
  81. {
  82. // Sample the value at the future sample point.
  83. const float sampleTimeOffset = (i + 1) * futureFrameTimeDelta;
  84. currentFrame.SamplePose(&nextSamplePose->GetPose(), sampleTimeOffset);
  85. const Sample sample = GetSampleFromPose(samplePose->GetPose(), invRootTransform);
  86. const size_t sampleIndex = CalcFutureFrameIndex(i);
  87. SetFeatureData(context.m_featureMatrix, frameIndex, sampleIndex, sample);
  88. *samplePose = *nextSamplePose;
  89. }
  90. context.m_posePool.FreePose(samplePose);
  91. context.m_posePool.FreePose(nextSamplePose);
  92. }
  93. ///////////////////////////////////////////////////////////////////////////
  94. void FeatureTrajectory::FillQueryVector(QueryVector& queryVector, const QueryVectorContext& context)
  95. {
  96. const Transform invRootTransform = context.m_currentPose.GetWorldSpaceTransform(m_relativeToNodeIndex).Inversed();
  97. auto FillControlPoints =
  98. [this, &queryVector, &invRootTransform](
  99. const AZStd::vector<TrajectoryQuery::ControlPoint>& controlPoints, const AZStd::function<size_t(size_t)>& CalcFrameIndex)
  100. {
  101. const size_t numControlPoints = controlPoints.size();
  102. for (size_t i = 0; i < numControlPoints; ++i)
  103. {
  104. TrajectoryQuery::ControlPoint controlPoint = controlPoints[i];
  105. controlPoint.m_position =
  106. invRootTransform.TransformPoint(controlPoint.m_position); // Convert so it is relative to where we are and pointing to.
  107. controlPoint.m_facingDirection = invRootTransform.TransformVector(controlPoint.m_facingDirection);
  108. const size_t sampleIndex = CalcFrameIndex(i);
  109. const size_t sampleColumnStart = m_featureColumnOffset + sampleIndex * Sample::s_componentsPerSample;
  110. queryVector.SetVector2(AZ::Vector2(controlPoint.m_position), sampleColumnStart + 0); // m_position
  111. queryVector.SetVector2(AZ::Vector2(controlPoint.m_facingDirection), sampleColumnStart + 2); // m_facingDirection
  112. }
  113. };
  114. AZ_Assert(
  115. context.m_trajectoryQuery.GetFutureControlPoints().size() == m_numFutureSamples,
  116. "Number of future control points from the trajectory query does not match the one from the trajectory feature.");
  117. AZ_Assert(
  118. context.m_trajectoryQuery.GetPastControlPoints().size() == m_numPastSamples,
  119. "Number of past control points from the trajectory query does not match the one from the trajectory feature");
  120. FillControlPoints(
  121. context.m_trajectoryQuery.GetPastControlPoints(),
  122. [this](size_t frameIndex)
  123. {
  124. return CalcPastFrameIndex(frameIndex);
  125. });
  126. FillControlPoints(
  127. context.m_trajectoryQuery.GetFutureControlPoints(),
  128. [this](size_t frameIndex)
  129. {
  130. return CalcFutureFrameIndex(frameIndex);
  131. });
  132. }
  133. ///////////////////////////////////////////////////////////////////////////
  134. float FeatureTrajectory::CalculateCost(
  135. const FeatureMatrix& featureMatrix,
  136. size_t frameIndex,
  137. size_t numControlPoints,
  138. const SplineToFeatureMatrixIndex& splineToFeatureMatrixIndex,
  139. const FrameCostContext& context) const
  140. {
  141. float cost = 0.0f;
  142. AZ::Vector2 lastControlPoint, lastSamplePos;
  143. for (size_t i = 0; i < numControlPoints; ++i)
  144. {
  145. const Sample sample = GetFeatureData(featureMatrix, frameIndex, splineToFeatureMatrixIndex(i));
  146. const AZ::Vector2& samplePos = sample.m_position;
  147. const size_t sampleColumnStart = m_featureColumnOffset + splineToFeatureMatrixIndex(i) * Sample::s_componentsPerSample;
  148. const AZ::Vector2 controlPointPos = context.m_queryVector.GetVector2(sampleColumnStart + 0);
  149. const AZ::Vector2 controlPointFacingDirRelativeSpace = context.m_queryVector.GetVector2(sampleColumnStart + 2);
  150. if (i != 0)
  151. {
  152. const AZ::Vector2 controlPointDelta = controlPointPos - lastControlPoint;
  153. const AZ::Vector2 sampleDelta = samplePos - lastSamplePos;
  154. const float posDistance = (samplePos - controlPointPos).GetLength();
  155. const float posDeltaDistance = (controlPointDelta - sampleDelta).GetLength();
  156. // The facing direction from the control point (trajectory query) is in world space while the facing direction from the
  157. // sample of this trajectory feature is in relative-to-frame-root-joint space.
  158. const float facingDirectionCost =
  159. GetNormalizedDirectionDifference(sample.m_facingDirection, controlPointFacingDirRelativeSpace);
  160. // As we got two different costs for the position, double the cost of the facing direction to equal out the influence.
  161. cost += CalcResidual(posDistance) + CalcResidual(posDeltaDistance) + CalcResidual(facingDirectionCost) * 2.0f;
  162. }
  163. lastControlPoint = controlPointPos;
  164. lastSamplePos = samplePos;
  165. }
  166. return cost;
  167. }
  168. float FeatureTrajectory::CalculateFutureFrameCost(size_t frameIndex, const FrameCostContext& context) const
  169. {
  170. return CalculateCost(
  171. context.m_featureMatrix, frameIndex, m_numFutureSamples,
  172. [this](size_t frameIndex)
  173. {
  174. return CalcFutureFrameIndex(frameIndex);
  175. },
  176. context);
  177. }
  178. float FeatureTrajectory::CalculatePastFrameCost(size_t frameIndex, const FrameCostContext& context) const
  179. {
  180. return CalculateCost(
  181. context.m_featureMatrix, frameIndex, m_numPastSamples,
  182. [this](size_t frameIndex)
  183. {
  184. return CalcPastFrameIndex(frameIndex);
  185. },
  186. context);
  187. }
  188. ///////////////////////////////////////////////////////////////////////////
  189. size_t FeatureTrajectory::CalcNumSamplesPerFrame() const
  190. {
  191. return m_numPastSamples + 1 + m_numFutureSamples;
  192. }
  193. size_t FeatureTrajectory::CalcMidFrameIndex() const
  194. {
  195. return m_numPastSamples;
  196. }
  197. size_t FeatureTrajectory::CalcPastFrameIndex(size_t historyFrameIndex) const
  198. {
  199. AZ_Assert(historyFrameIndex < m_numPastSamples, "The history frame index is out of range");
  200. return m_numPastSamples - historyFrameIndex - 1;
  201. }
  202. size_t FeatureTrajectory::CalcFutureFrameIndex(size_t futureFrameIndex) const
  203. {
  204. AZ_Assert(futureFrameIndex < m_numFutureSamples, "The future frame index is out of range");
  205. return CalcMidFrameIndex() + 1 + futureFrameIndex;
  206. }
  207. ///////////////////////////////////////////////////////////////////////////
  208. void FeatureTrajectory::SetFacingAxis(const Axis axis)
  209. {
  210. m_facingAxis = axis;
  211. UpdateFacingAxis();
  212. }
  213. void FeatureTrajectory::UpdateFacingAxis()
  214. {
  215. switch (m_facingAxis)
  216. {
  217. case Axis::X:
  218. {
  219. m_facingAxisDir = AZ::Vector3::CreateAxisX();
  220. break;
  221. }
  222. case Axis::Y:
  223. {
  224. m_facingAxisDir = AZ::Vector3::CreateAxisY();
  225. break;
  226. }
  227. case Axis::X_NEGATIVE:
  228. {
  229. m_facingAxisDir = -AZ::Vector3::CreateAxisX();
  230. break;
  231. }
  232. case Axis::Y_NEGATIVE:
  233. {
  234. m_facingAxisDir = -AZ::Vector3::CreateAxisY();
  235. break;
  236. }
  237. default:
  238. {
  239. AZ_Assert(false, "Facing direction axis unknown.");
  240. }
  241. }
  242. }
  243. void FeatureTrajectory::SetPastTimeRange(float timeInSeconds)
  244. {
  245. m_pastTimeRange = timeInSeconds;
  246. }
  247. void FeatureTrajectory::SetFutureTimeRange(float timeInSeconds)
  248. {
  249. m_futureTimeRange = timeInSeconds;
  250. }
  251. void FeatureTrajectory::SetNumPastSamplesPerFrame(size_t numHistorySamples)
  252. {
  253. m_numPastSamples = numHistorySamples;
  254. }
  255. void FeatureTrajectory::SetNumFutureSamplesPerFrame(size_t numFutureSamples)
  256. {
  257. m_numFutureSamples = numFutureSamples;
  258. }
  259. void FeatureTrajectory::DebugDrawFacingDirection(
  260. AzFramework::DebugDisplayRequests& debugDisplay,
  261. const AZ::Vector3& positionWorldSpace,
  262. const AZ::Vector3& facingDirectionWorldSpace)
  263. {
  264. const float length = 0.2f;
  265. const float radius = 0.01f;
  266. const AZ::Vector3 facingDirectionTarget = positionWorldSpace + facingDirectionWorldSpace * length;
  267. debugDisplay.DrawSolidCylinder(
  268. /*center=*/(facingDirectionTarget + positionWorldSpace) * 0.5f,
  269. /*direction=*/facingDirectionWorldSpace, radius,
  270. /*height=*/length,
  271. /*drawShaded=*/false);
  272. }
  273. void FeatureTrajectory::DebugDrawFacingDirection(
  274. AzFramework::DebugDisplayRequests& debugDisplay,
  275. const Transform& worldSpaceTransform,
  276. const Sample& sample,
  277. const AZ::Vector3& samplePosWorldSpace) const
  278. {
  279. const AZ::Vector3 facingDirectionWorldSpace =
  280. worldSpaceTransform.TransformVector(AZ::Vector3(sample.m_facingDirection)).GetNormalizedSafe();
  281. DebugDrawFacingDirection(debugDisplay, samplePosWorldSpace, facingDirectionWorldSpace);
  282. }
  283. void FeatureTrajectory::DebugDrawTrajectory(
  284. AzFramework::DebugDisplayRequests& debugDisplay,
  285. const FeatureMatrix& featureMatrix,
  286. const FeatureMatrixTransformer* featureTransformer,
  287. size_t frameIndex,
  288. const Transform& worldSpaceTransform,
  289. const AZ::Color& color,
  290. size_t numSamples,
  291. const SplineToFeatureMatrixIndex& splineToFeatureMatrixIndex) const
  292. {
  293. if (frameIndex == InvalidIndex)
  294. {
  295. return;
  296. }
  297. constexpr float markerSize = 0.02f;
  298. debugDisplay.DepthTestOff();
  299. debugDisplay.SetColor(color);
  300. Sample nextSample;
  301. AZ::Vector3 nextSamplePos;
  302. for (size_t i = 0; i < numSamples - 1; ++i)
  303. {
  304. Sample currentSample =
  305. GetFeatureDataInverseTransformed(featureMatrix, featureTransformer, frameIndex, splineToFeatureMatrixIndex(i));
  306. nextSample = GetFeatureDataInverseTransformed(featureMatrix, featureTransformer, frameIndex, splineToFeatureMatrixIndex(i + 1));
  307. const AZ::Vector3 currentSamplePos = worldSpaceTransform.TransformPoint(AZ::Vector3(currentSample.m_position));
  308. nextSamplePos = worldSpaceTransform.TransformPoint(AZ::Vector3(nextSample.m_position));
  309. // Line between current and next sample.
  310. debugDisplay.DrawSolidCylinder(
  311. /*center=*/(nextSamplePos + currentSamplePos) * 0.5f,
  312. /*direction=*/(nextSamplePos - currentSamplePos).GetNormalizedSafe(),
  313. /*radius=*/0.0025f,
  314. /*height=*/(nextSamplePos - currentSamplePos).GetLength(),
  315. /*drawShaded=*/false);
  316. // Sphere at the sample position and a cylinder to indicate the facing direction.
  317. debugDisplay.DrawBall(currentSamplePos, markerSize, /*drawShaded=*/false);
  318. DebugDrawFacingDirection(debugDisplay, worldSpaceTransform, currentSample, currentSamplePos);
  319. }
  320. debugDisplay.DrawBall(nextSamplePos, markerSize, /*drawShaded=*/false);
  321. DebugDrawFacingDirection(debugDisplay, worldSpaceTransform, nextSample, nextSamplePos);
  322. }
  323. void FeatureTrajectory::DebugDraw(
  324. AzFramework::DebugDisplayRequests& debugDisplay,
  325. const Pose& currentPose,
  326. const FeatureMatrix& featureMatrix,
  327. const FeatureMatrixTransformer* featureTransformer,
  328. size_t frameIndex)
  329. {
  330. const Transform transform = currentPose.GetWorldSpaceTransform(m_jointIndex);
  331. DebugDrawTrajectory(
  332. debugDisplay, featureMatrix, featureTransformer, frameIndex, transform, m_debugColor, m_numPastSamples,
  333. AZStd::bind(&FeatureTrajectory::CalcPastFrameIndex, this, AZStd::placeholders::_1));
  334. DebugDrawTrajectory(
  335. debugDisplay, featureMatrix, featureTransformer, frameIndex, transform, m_debugColor, m_numFutureSamples,
  336. AZStd::bind(&FeatureTrajectory::CalcFutureFrameIndex, this, AZStd::placeholders::_1));
  337. }
  338. AZ::Crc32 FeatureTrajectory::GetCostFactorVisibility() const
  339. {
  340. return AZ::Edit::PropertyVisibility::Hide;
  341. }
  342. void FeatureTrajectory::Reflect(AZ::ReflectContext* context)
  343. {
  344. AZ::SerializeContext* serializeContext = azrtti_cast<AZ::SerializeContext*>(context);
  345. if (!serializeContext)
  346. {
  347. return;
  348. }
  349. serializeContext->Class<FeatureTrajectory, Feature>()
  350. ->Version(2)
  351. ->Field("pastTimeRange", &FeatureTrajectory::m_pastTimeRange)
  352. ->Field("numPastSamples", &FeatureTrajectory::m_numPastSamples)
  353. ->Field("pastCostFactor", &FeatureTrajectory::m_pastCostFactor)
  354. ->Field("futureTimeRange", &FeatureTrajectory::m_futureTimeRange)
  355. ->Field("numFutureSamples", &FeatureTrajectory::m_numFutureSamples)
  356. ->Field("futureCostFactor", &FeatureTrajectory::m_futureCostFactor)
  357. ->Field("facingAxis", &FeatureTrajectory::m_facingAxis);
  358. AZ::EditContext* editContext = serializeContext->GetEditContext();
  359. if (!editContext)
  360. {
  361. return;
  362. }
  363. editContext->Class<FeatureTrajectory>("FeatureTrajectory", "Matches the joint past and future trajectory.")
  364. ->ClassElement(AZ::Edit::ClassElements::EditorData, "")
  365. ->Attribute(AZ::Edit::Attributes::AutoExpand, "")
  366. ->DataElement(
  367. AZ::Edit::UIHandlers::Default, &FeatureTrajectory::m_numPastSamples, "Past Samples",
  368. "The number of samples stored per frame for the past trajectory. [Default = 4 samples to represent the trajectory history]")
  369. ->Attribute(AZ::Edit::Attributes::Min, 1)
  370. ->Attribute(AZ::Edit::Attributes::Max, 100)
  371. ->Attribute(AZ::Edit::Attributes::Step, 1)
  372. ->DataElement(
  373. AZ::Edit::UIHandlers::Default, &FeatureTrajectory::m_pastTimeRange, "Past Time Range",
  374. "The time window the samples are distributed along for the trajectory history. [Default = 0.7 seconds]")
  375. ->Attribute(AZ::Edit::Attributes::Min, 0.01f)
  376. ->Attribute(AZ::Edit::Attributes::Max, 10.0f)
  377. ->Attribute(AZ::Edit::Attributes::Step, 0.1f)
  378. ->DataElement(
  379. AZ::Edit::UIHandlers::Default, &FeatureTrajectory::m_pastCostFactor, "Past Cost Factor",
  380. "The cost factor is multiplied with the cost from the trajectory history and can be used to change the influence of the "
  381. "trajectory history match in the motion matching search.")
  382. ->Attribute(AZ::Edit::Attributes::Min, 0.0f)
  383. ->Attribute(AZ::Edit::Attributes::Max, 100.0f)
  384. ->Attribute(AZ::Edit::Attributes::Step, 0.1f)
  385. ->DataElement(
  386. AZ::Edit::UIHandlers::Default, &FeatureTrajectory::m_numFutureSamples, "Future Samples",
  387. "The number of samples stored per frame for the future trajectory. [Default = 6 samples to represent the future "
  388. "trajectory]")
  389. ->Attribute(AZ::Edit::Attributes::Min, 1)
  390. ->Attribute(AZ::Edit::Attributes::Max, 100)
  391. ->Attribute(AZ::Edit::Attributes::Step, 1)
  392. ->DataElement(
  393. AZ::Edit::UIHandlers::Default, &FeatureTrajectory::m_futureTimeRange, "Future Time Range",
  394. "The time window the samples are distributed along for the future trajectory. [Default = 1.2 seconds]")
  395. ->Attribute(AZ::Edit::Attributes::Min, 0.01f)
  396. ->Attribute(AZ::Edit::Attributes::Max, 10.0f)
  397. ->Attribute(AZ::Edit::Attributes::Step, 0.1f)
  398. ->DataElement(
  399. AZ::Edit::UIHandlers::Default, &FeatureTrajectory::m_futureCostFactor, "Future Cost Factor",
  400. "The cost factor is multiplied with the cost from the future trajectory and can be used to change the influence of the "
  401. "future trajectory match in the motion matching search.")
  402. ->Attribute(AZ::Edit::Attributes::Min, 0.0f)
  403. ->Attribute(AZ::Edit::Attributes::Max, 100.0f)
  404. ->Attribute(AZ::Edit::Attributes::Step, 0.1f)
  405. ->DataElement(
  406. AZ::Edit::UIHandlers::ComboBox, &FeatureTrajectory::m_facingAxis, "Facing Axis",
  407. "The facing direction of the character. Which axis of the joint transform is facing forward? [Default = Looking into "
  408. "Y-axis direction]")
  409. ->Attribute(AZ::Edit::Attributes::ChangeNotify, &FeatureTrajectory::UpdateFacingAxis)
  410. ->EnumAttribute(Axis::X, "X")
  411. ->EnumAttribute(Axis::X_NEGATIVE, "-X")
  412. ->EnumAttribute(Axis::Y, "Y")
  413. ->EnumAttribute(Axis::Y_NEGATIVE, "-Y");
  414. }
  415. size_t FeatureTrajectory::GetNumDimensions() const
  416. {
  417. return CalcNumSamplesPerFrame() * Sample::s_componentsPerSample;
  418. }
  419. AZStd::string FeatureTrajectory::GetDimensionName(size_t index) const
  420. {
  421. AZStd::string result = "Trajectory";
  422. const int sampleIndex = aznumeric_cast<int>(index) / aznumeric_cast<int>(Sample::s_componentsPerSample);
  423. const int componentIndex = index % Sample::s_componentsPerSample;
  424. const int midSampleIndex = aznumeric_cast<int>(CalcMidFrameIndex());
  425. if (sampleIndex == midSampleIndex)
  426. {
  427. result += ".Current.";
  428. }
  429. else if (sampleIndex < midSampleIndex)
  430. {
  431. result += AZStd::string::format(".Past%i.", sampleIndex - static_cast<int>(m_numPastSamples));
  432. }
  433. else
  434. {
  435. result += AZStd::string::format(".Future%i.", sampleIndex - static_cast<int>(m_numPastSamples));
  436. }
  437. switch (componentIndex)
  438. {
  439. case 0:
  440. {
  441. result += "PosX";
  442. break;
  443. }
  444. case 1:
  445. {
  446. result += "PosY";
  447. break;
  448. }
  449. case 2:
  450. {
  451. result += "FacingDirX";
  452. break;
  453. }
  454. case 3:
  455. {
  456. result += "FacingDirY";
  457. break;
  458. }
  459. default:
  460. {
  461. result += Feature::GetDimensionName(index);
  462. }
  463. }
  464. return result;
  465. }
  466. FeatureTrajectory::Sample FeatureTrajectory::GetFeatureData(
  467. const FeatureMatrix& featureMatrix, size_t frameIndex, size_t sampleIndex) const
  468. {
  469. const size_t columnOffset = m_featureColumnOffset + sampleIndex * Sample::s_componentsPerSample;
  470. return {
  471. /*.m_position =*/featureMatrix.GetVector2(frameIndex, columnOffset + 0),
  472. /*.m_facingDirection =*/featureMatrix.GetVector2(frameIndex, columnOffset + 2),
  473. };
  474. }
  475. FeatureTrajectory::Sample FeatureTrajectory::GetFeatureDataInverseTransformed(
  476. const FeatureMatrix& featureMatrix, const FeatureMatrixTransformer* featureTransformer, size_t frameIndex, size_t sampleIndex) const
  477. {
  478. Sample sample = GetFeatureData(featureMatrix, frameIndex, sampleIndex);
  479. if (featureTransformer)
  480. {
  481. const size_t columnOffset = m_featureColumnOffset + sampleIndex * Sample::s_componentsPerSample;
  482. sample.m_position = featureTransformer->InverseTransform(sample.m_position, columnOffset + 0);
  483. sample.m_facingDirection = featureTransformer->InverseTransform(sample.m_facingDirection, columnOffset + 2);
  484. }
  485. return sample;
  486. }
  487. void FeatureTrajectory::SetFeatureData(FeatureMatrix& featureMatrix, size_t frameIndex, size_t sampleIndex, const Sample& sample)
  488. {
  489. const size_t columnOffset = m_featureColumnOffset + sampleIndex * Sample::s_componentsPerSample;
  490. featureMatrix.SetVector2(frameIndex, columnOffset + 0, sample.m_position);
  491. featureMatrix.SetVector2(frameIndex, columnOffset + 2, sample.m_facingDirection);
  492. }
  493. } // namespace EMotionFX::MotionMatching