TrajectoryQuery.cpp 9.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210
  1. /*
  2. * Copyright (c) Contributors to the Open 3D Engine Project.
  3. * For complete copyright and license terms please see the LICENSE at the root of this distribution.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0 OR MIT
  6. *
  7. */
  8. #include <EMotionFX/Source/ActorInstance.h>
  9. #include <TrajectoryQuery.h>
  10. #include <FeatureTrajectory.h>
  11. namespace EMotionFX::MotionMatching
  12. {
  13. AZ::Vector3 SampleFunction(float offset, float radius, float phase)
  14. {
  15. phase += 10.7;
  16. AZ::Vector3 displacement = AZ::Vector3::CreateZero();
  17. displacement.SetX(radius * sinf(phase * 0.7f + offset) + radius * 0.75f * cosf(phase * 2.0f + offset * 2.0f));
  18. displacement.SetY(radius * cosf(phase * 0.4f + offset));
  19. return displacement;
  20. }
  21. void TrajectoryQuery::PredictFutureTrajectory(const ActorInstance& actorInstance,
  22. const FeatureTrajectory* trajectoryFeature,
  23. const AZ::Vector3& targetPos,
  24. const AZ::Vector3& targetFacingDir,
  25. bool useTargetFacingDir)
  26. {
  27. const AZ::Vector3 actorInstanceWorldPosition = actorInstance.GetWorldSpaceTransform().m_position;
  28. const AZ::Quaternion actorInstanceWorldRotation = actorInstance.GetWorldSpaceTransform().m_rotation;
  29. const AZ::Vector3 actorInstanceToTarget = (targetPos - actorInstanceWorldPosition);
  30. const size_t numFutureSamples = trajectoryFeature->GetNumFutureSamples();
  31. const float numSections = aznumeric_cast<float>(numFutureSamples-1);
  32. float linearDisplacementPerSample = 0.0f;
  33. AZ::Quaternion targetFacingDirQuat = actorInstanceWorldRotation;
  34. if (!actorInstanceToTarget.IsClose(AZ::Vector3::CreateZero(), m_deadZone))
  35. {
  36. // Calculate the desired linear velocity from the current position to the target position based on the trajectory future time range.
  37. AZ_Assert(trajectoryFeature->GetFutureTimeRange() > AZ::Constants::FloatEpsilon, "Trajectory feature future time range is too small.");
  38. const float velocity = actorInstanceToTarget.GetLength() / trajectoryFeature->GetFutureTimeRange();
  39. linearDisplacementPerSample = (velocity / numSections);
  40. }
  41. else
  42. {
  43. // Force using the target facing direction in the dead zone as the samples of the future trajectory will be all at the same
  44. // location.
  45. useTargetFacingDir = true;
  46. }
  47. if (useTargetFacingDir)
  48. {
  49. // Use the given target facing direction and convert the direction vector to a quaternion.
  50. targetFacingDirQuat = AZ::Quaternion::CreateShortestArc(trajectoryFeature->GetFacingAxisDir(), targetFacingDir);
  51. }
  52. else
  53. {
  54. // Use the direction from the current actor instance position to the target as the target facing direction
  55. // and convert the direction vector to a quaternion.
  56. targetFacingDirQuat = AZ::Quaternion::CreateShortestArc(trajectoryFeature->GetFacingAxisDir(), actorInstanceToTarget);
  57. }
  58. // Set the first control point to the current position and facing direction.
  59. m_futureControlPoints[0].m_position = actorInstanceWorldPosition;
  60. m_futureControlPoints[0].m_facingDirection = actorInstanceWorldRotation.TransformVector(trajectoryFeature->GetFacingAxisDir());
  61. if (useTargetFacingDir)
  62. {
  63. for (size_t i = 0; i < numFutureSamples; ++i)
  64. {
  65. const float sampleTime = static_cast<float>(i) / (numFutureSamples - 1);
  66. m_futureControlPoints[i].m_position = actorInstanceWorldPosition.Lerp(targetPos, sampleTime);
  67. m_futureControlPoints[i].m_facingDirection = targetFacingDir;
  68. }
  69. return;
  70. }
  71. for (size_t i = 1; i < numFutureSamples; ++i)
  72. {
  73. const float t = aznumeric_cast<float>(i) / numSections;
  74. // Position
  75. {
  76. const AZ::Vector3 prevFacingDir = m_futureControlPoints[i - 1].m_facingDirection;
  77. // Interpolate between the linear direction to target and the facing direction from the previous sample.
  78. // This will make sure the facing direction close to the current time matches the current facing direction and
  79. // the facing direction in the most far future matches the desired target facing direction.
  80. const float weight = 1.0f - AZStd::pow(1.0f - t, m_positionBias);
  81. const AZ::Vector3 interpolatedPosDelta = prevFacingDir.Lerp(actorInstanceToTarget.GetNormalized(), weight);
  82. // Scale it by the desired velocity.
  83. const AZ::Vector3 scaledPosDelta = interpolatedPosDelta * linearDisplacementPerSample;
  84. m_futureControlPoints[i].m_position = m_futureControlPoints[i - 1].m_position + scaledPosDelta;
  85. }
  86. // Facing direction
  87. {
  88. // Interpolate facing direction from current character facing direction (first sample) to the target facing direction (most far future sample).
  89. const float weight = 1.0f - AZStd::pow(1.0f - t, m_rotationBias);
  90. const AZ::Quaternion interpolatedRotation = actorInstanceWorldRotation.Slerp(targetFacingDirQuat, weight);
  91. // Convert the interpolated rotation result back to a facing direction vector.
  92. const AZ::Vector3 interpolatedFacingDir = interpolatedRotation.TransformVector(trajectoryFeature->GetFacingAxisDir());
  93. m_futureControlPoints[i].m_facingDirection = interpolatedFacingDir.GetNormalizedSafe();
  94. }
  95. }
  96. }
  97. void TrajectoryQuery::Update(const ActorInstance& actorInstance,
  98. const FeatureTrajectory* trajectoryFeature,
  99. const TrajectoryHistory& trajectoryHistory,
  100. EMode mode,
  101. const AZ::Vector3& targetPos,
  102. const AZ::Vector3& targetFacingDir,
  103. bool useTargetFacingDir,
  104. float timeDelta,
  105. float pathRadius,
  106. float pathSpeed)
  107. {
  108. AZ_PROFILE_SCOPE(Animation, "TrajectoryQuery::Update");
  109. // Build the past trajectory control points.
  110. const size_t numPastSamples = trajectoryFeature->GetNumPastSamples();
  111. m_pastControlPoints.resize(numPastSamples);
  112. const float pastTimeRange = trajectoryFeature->GetPastTimeRange();
  113. for (size_t i = 0; i < numPastSamples; ++i)
  114. {
  115. const float sampleTimeNormalized = i / aznumeric_cast<float>(numPastSamples - 1);
  116. const TrajectoryHistory::Sample sample = trajectoryHistory.Evaluate(sampleTimeNormalized * pastTimeRange);
  117. m_pastControlPoints[i] = { sample.m_position, sample.m_facingDirection };
  118. }
  119. // Build the future trajectory control points.
  120. const size_t numFutureSamples = trajectoryFeature->GetNumFutureSamples();
  121. m_futureControlPoints.resize(numFutureSamples);
  122. if (mode == MODE_TARGETDRIVEN)
  123. {
  124. PredictFutureTrajectory(actorInstance, trajectoryFeature, targetPos, targetFacingDir, useTargetFacingDir);
  125. }
  126. else
  127. {
  128. m_automaticModePhase += timeDelta * pathSpeed;
  129. AZ::Vector3 base = SampleFunction(0.0f, pathRadius, m_automaticModePhase);
  130. for (size_t i = 0; i < numFutureSamples; ++i)
  131. {
  132. const float offset = i * 0.1f;
  133. const AZ::Vector3 curSample = SampleFunction(offset, pathRadius, m_automaticModePhase);
  134. AZ::Vector3 displacement = curSample - base;
  135. m_futureControlPoints[i].m_position = actorInstance.GetWorldSpaceTransform().m_position + displacement;
  136. // Evaluate a control point slightly further into the future than the actual
  137. // one and use the position difference as the facing direction.
  138. const AZ::Vector3 deltaSample = SampleFunction(offset + 0.01f, pathRadius, m_automaticModePhase);
  139. const AZ::Vector3 dir = deltaSample - curSample;
  140. m_futureControlPoints[i].m_facingDirection = dir.GetNormalizedSafe();
  141. }
  142. }
  143. }
  144. void TrajectoryQuery::DebugDraw(AzFramework::DebugDisplayRequests& debugDisplay, const AZ::Color& color) const
  145. {
  146. DebugDrawControlPoints(debugDisplay, m_pastControlPoints, color);
  147. DebugDrawControlPoints(debugDisplay, m_futureControlPoints, color);
  148. }
  149. void TrajectoryQuery::DebugDrawControlPoints(AzFramework::DebugDisplayRequests& debugDisplay,
  150. const AZStd::vector<ControlPoint>& controlPoints,
  151. const AZ::Color& color)
  152. {
  153. const float markerSize = 0.02f;
  154. const size_t numControlPoints = controlPoints.size();
  155. if (numControlPoints > 1)
  156. {
  157. debugDisplay.DepthTestOff();
  158. debugDisplay.SetColor(color);
  159. for (size_t i = 0; i < numControlPoints - 1; ++i)
  160. {
  161. const ControlPoint& current = controlPoints[i];
  162. const AZ::Vector3& posA = current.m_position;
  163. const AZ::Vector3& posB = controlPoints[i + 1].m_position;
  164. const AZ::Vector3 diff = posB - posA;
  165. debugDisplay.DrawSolidCylinder(/*center=*/(posB + posA) * 0.5f,
  166. /*direction=*/diff.GetNormalizedSafe(),
  167. /*radius=*/0.0025f,
  168. /*height=*/diff.GetLength(),
  169. /*drawShaded=*/false);
  170. FeatureTrajectory::DebugDrawFacingDirection(debugDisplay, current.m_position, current.m_facingDirection);
  171. }
  172. for (const ControlPoint& controlPoint : controlPoints)
  173. {
  174. debugDisplay.DrawBall(controlPoint.m_position, markerSize, /*drawShaded=*/false);
  175. FeatureTrajectory::DebugDrawFacingDirection(debugDisplay, controlPoint.m_position, controlPoint.m_facingDirection);
  176. }
  177. }
  178. }
  179. } // namespace EMotionFX::MotionMatching