btDbvt.h 40 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579
  1. /*
  2. Bullet Continuous Collision Detection and Physics Library
  3. Copyright (c) 2003-2007 Erwin Coumans https://bulletphysics.org
  4. This software is provided 'as-is', without any express or implied warranty.
  5. In no event will the authors be held liable for any damages arising from the use of this software.
  6. Permission is granted to anyone to use this software for any purpose,
  7. including commercial applications, and to alter it and redistribute it freely,
  8. subject to the following restrictions:
  9. 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
  10. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
  11. 3. This notice may not be removed or altered from any source distribution.
  12. */
  13. ///btDbvt implementation by Nathanael Presson
  14. #ifndef BT_DYNAMIC_BOUNDING_VOLUME_TREE_H
  15. #define BT_DYNAMIC_BOUNDING_VOLUME_TREE_H
  16. #include "LinearMath/btAlignedObjectArray.h"
  17. #include "LinearMath/btVector3.h"
  18. #include "LinearMath/btTransform.h"
  19. #include "LinearMath/btAabbUtil2.h"
  20. //
  21. // Compile time configuration
  22. //
  23. // Implementation profiles
  24. #define DBVT_IMPL_GENERIC 0 // Generic implementation
  25. #define DBVT_IMPL_SSE 1 // SSE
  26. // Template implementation of ICollide
  27. #ifdef _WIN32
  28. #if (defined(_MSC_VER) && _MSC_VER >= 1400)
  29. #define DBVT_USE_TEMPLATE 1
  30. #else
  31. #define DBVT_USE_TEMPLATE 0
  32. #endif
  33. #else
  34. #define DBVT_USE_TEMPLATE 0
  35. #endif
  36. // Use only intrinsics instead of inline asm
  37. #define DBVT_USE_INTRINSIC_SSE 1
  38. // Using memmov for collideOCL
  39. #define DBVT_USE_MEMMOVE 1
  40. // Enable benchmarking code
  41. #define DBVT_ENABLE_BENCHMARK 0
  42. // Inlining
  43. #define DBVT_INLINE SIMD_FORCE_INLINE
  44. // Specific methods implementation
  45. //SSE gives errors on a MSVC 7.1
  46. #if defined(BT_USE_SSE) //&& defined (_WIN32)
  47. #define DBVT_SELECT_IMPL DBVT_IMPL_SSE
  48. #define DBVT_MERGE_IMPL DBVT_IMPL_SSE
  49. #define DBVT_INT0_IMPL DBVT_IMPL_SSE
  50. #else
  51. #define DBVT_SELECT_IMPL DBVT_IMPL_GENERIC
  52. #define DBVT_MERGE_IMPL DBVT_IMPL_GENERIC
  53. #define DBVT_INT0_IMPL DBVT_IMPL_GENERIC
  54. #endif
  55. #if (DBVT_SELECT_IMPL == DBVT_IMPL_SSE) || \
  56. (DBVT_MERGE_IMPL == DBVT_IMPL_SSE) || \
  57. (DBVT_INT0_IMPL == DBVT_IMPL_SSE)
  58. #include <emmintrin.h>
  59. #endif
  60. //
  61. // Auto config and checks
  62. //
  63. #if DBVT_USE_TEMPLATE
  64. #define DBVT_VIRTUAL
  65. #define DBVT_VIRTUAL_DTOR(a)
  66. #define DBVT_PREFIX template <typename T>
  67. #define DBVT_IPOLICY T& policy
  68. #define DBVT_CHECKTYPE \
  69. static const ICollide& typechecker = *(T*)1; \
  70. (void)typechecker;
  71. #else
  72. #define DBVT_VIRTUAL_DTOR(a) \
  73. virtual ~a() {}
  74. #define DBVT_VIRTUAL virtual
  75. #define DBVT_PREFIX
  76. #define DBVT_IPOLICY ICollide& policy
  77. #define DBVT_CHECKTYPE
  78. #endif
  79. #if DBVT_USE_MEMMOVE
  80. #if !defined(__CELLOS_LV2__) && !defined(__MWERKS__)
  81. #include <memory.h>
  82. #endif
  83. #include <string.h>
  84. #endif
  85. #ifndef DBVT_USE_TEMPLATE
  86. #error "DBVT_USE_TEMPLATE undefined"
  87. #endif
  88. #ifndef DBVT_USE_MEMMOVE
  89. #error "DBVT_USE_MEMMOVE undefined"
  90. #endif
  91. #ifndef DBVT_ENABLE_BENCHMARK
  92. #error "DBVT_ENABLE_BENCHMARK undefined"
  93. #endif
  94. #ifndef DBVT_SELECT_IMPL
  95. #error "DBVT_SELECT_IMPL undefined"
  96. #endif
  97. #ifndef DBVT_MERGE_IMPL
  98. #error "DBVT_MERGE_IMPL undefined"
  99. #endif
  100. #ifndef DBVT_INT0_IMPL
  101. #error "DBVT_INT0_IMPL undefined"
  102. #endif
  103. //
  104. // Defaults volumes
  105. //
  106. /* btDbvtAabbMm */
  107. struct btDbvtAabbMm
  108. {
  109. DBVT_INLINE btDbvtAabbMm(){}
  110. DBVT_INLINE btVector3 Center() const { return ((mi + mx) / 2); }
  111. DBVT_INLINE btVector3 Lengths() const { return (mx - mi); }
  112. DBVT_INLINE btVector3 Extents() const { return ((mx - mi) / 2); }
  113. DBVT_INLINE const btVector3& Mins() const { return (mi); }
  114. DBVT_INLINE const btVector3& Maxs() const { return (mx); }
  115. static inline btDbvtAabbMm FromCE(const btVector3& c, const btVector3& e);
  116. static inline btDbvtAabbMm FromCR(const btVector3& c, btScalar r);
  117. static inline btDbvtAabbMm FromMM(const btVector3& mi, const btVector3& mx);
  118. static inline btDbvtAabbMm FromPoints(const btVector3* pts, int n);
  119. static inline btDbvtAabbMm FromPoints(const btVector3** ppts, int n);
  120. DBVT_INLINE void Expand(const btVector3& e);
  121. DBVT_INLINE void SignedExpand(const btVector3& e);
  122. DBVT_INLINE bool Contain(const btDbvtAabbMm& a) const;
  123. DBVT_INLINE int Classify(const btVector3& n, btScalar o, int s) const;
  124. DBVT_INLINE btScalar ProjectMinimum(const btVector3& v, unsigned signs) const;
  125. DBVT_INLINE friend bool Intersect(const btDbvtAabbMm& a,
  126. const btDbvtAabbMm& b);
  127. DBVT_INLINE friend bool Intersect(const btDbvtAabbMm& a,
  128. const btVector3& b);
  129. DBVT_INLINE friend btScalar Proximity(const btDbvtAabbMm& a,
  130. const btDbvtAabbMm& b);
  131. DBVT_INLINE friend int Select(const btDbvtAabbMm& o,
  132. const btDbvtAabbMm& a,
  133. const btDbvtAabbMm& b);
  134. DBVT_INLINE friend void Merge(const btDbvtAabbMm& a,
  135. const btDbvtAabbMm& b,
  136. btDbvtAabbMm& r);
  137. DBVT_INLINE friend bool NotEqual(const btDbvtAabbMm& a,
  138. const btDbvtAabbMm& b);
  139. DBVT_INLINE btVector3& tMins() { return (mi); }
  140. DBVT_INLINE btVector3& tMaxs() { return (mx); }
  141. private:
  142. DBVT_INLINE void AddSpan(const btVector3& d, btScalar& smi, btScalar& smx) const;
  143. private:
  144. btVector3 mi, mx;
  145. };
  146. // Types
  147. typedef btDbvtAabbMm btDbvtVolume;
  148. /* btDbvtNode */
  149. struct btDbvtNode
  150. {
  151. btDbvtVolume volume;
  152. btDbvtNode* parent;
  153. DBVT_INLINE bool isleaf() const { return (childs[1] == 0); }
  154. DBVT_INLINE bool isinternal() const { return (!isleaf()); }
  155. union {
  156. btDbvtNode* childs[2];
  157. void* data;
  158. int dataAsInt;
  159. };
  160. };
  161. /* btDbv(normal)tNode */
  162. struct btDbvntNode
  163. {
  164. btDbvtVolume volume;
  165. btVector3 normal;
  166. btScalar angle;
  167. DBVT_INLINE bool isleaf() const { return (childs[1] == 0); }
  168. DBVT_INLINE bool isinternal() const { return (!isleaf()); }
  169. btDbvntNode* childs[2];
  170. void* data;
  171. btDbvntNode(const btDbvtNode* n)
  172. : volume(n->volume)
  173. , normal(0,0,0)
  174. , angle(0)
  175. , data(n->data)
  176. {
  177. childs[0] = 0;
  178. childs[1] = 0;
  179. }
  180. ~btDbvntNode()
  181. {
  182. if (childs[0])
  183. delete childs[0];
  184. if (childs[1])
  185. delete childs[1];
  186. }
  187. };
  188. typedef btAlignedObjectArray<const btDbvtNode*> btNodeStack;
  189. ///The btDbvt class implements a fast dynamic bounding volume tree based on axis aligned bounding boxes (aabb tree).
  190. ///This btDbvt is used for soft body collision detection and for the btDbvtBroadphase. It has a fast insert, remove and update of nodes.
  191. ///Unlike the btQuantizedBvh, nodes can be dynamically moved around, which allows for change in topology of the underlying data structure.
  192. struct btDbvt
  193. {
  194. /* Stack element */
  195. struct sStkNN
  196. {
  197. const btDbvtNode* a;
  198. const btDbvtNode* b;
  199. sStkNN() {}
  200. sStkNN(const btDbvtNode* na, const btDbvtNode* nb) : a(na), b(nb) {}
  201. };
  202. struct sStkNP
  203. {
  204. const btDbvtNode* node;
  205. int mask;
  206. sStkNP(const btDbvtNode* n, unsigned m) : node(n), mask(m) {}
  207. };
  208. struct sStkNPS
  209. {
  210. const btDbvtNode* node;
  211. int mask;
  212. btScalar value;
  213. sStkNPS() {}
  214. sStkNPS(const btDbvtNode* n, unsigned m, btScalar v) : node(n), mask(m), value(v) {}
  215. };
  216. struct sStkCLN
  217. {
  218. const btDbvtNode* node;
  219. btDbvtNode* parent;
  220. sStkCLN(const btDbvtNode* n, btDbvtNode* p) : node(n), parent(p) {}
  221. };
  222. struct sStknNN
  223. {
  224. const btDbvntNode* a;
  225. const btDbvntNode* b;
  226. sStknNN() {}
  227. sStknNN(const btDbvntNode* na, const btDbvntNode* nb) : a(na), b(nb) {}
  228. };
  229. // Policies/Interfaces
  230. /* ICollide */
  231. struct ICollide
  232. {
  233. DBVT_VIRTUAL_DTOR(ICollide)
  234. DBVT_VIRTUAL void Process(const btDbvtNode*, const btDbvtNode*) {}
  235. DBVT_VIRTUAL void Process(const btDbvtNode*) {}
  236. DBVT_VIRTUAL void Process(const btDbvtNode* n, btScalar) { Process(n); }
  237. DBVT_VIRTUAL void Process(const btDbvntNode*, const btDbvntNode*) {}
  238. DBVT_VIRTUAL bool Descent(const btDbvtNode*) { return (true); }
  239. DBVT_VIRTUAL bool AllLeaves(const btDbvtNode*) { return (true); }
  240. };
  241. /* IWriter */
  242. struct IWriter
  243. {
  244. virtual ~IWriter() {}
  245. virtual void Prepare(const btDbvtNode* root, int numnodes) = 0;
  246. virtual void WriteNode(const btDbvtNode*, int index, int parent, int child0, int child1) = 0;
  247. virtual void WriteLeaf(const btDbvtNode*, int index, int parent) = 0;
  248. };
  249. /* IClone */
  250. struct IClone
  251. {
  252. virtual ~IClone() {}
  253. virtual void CloneLeaf(btDbvtNode*) {}
  254. };
  255. // Constants
  256. enum
  257. {
  258. SIMPLE_STACKSIZE = 64,
  259. DOUBLE_STACKSIZE = SIMPLE_STACKSIZE * 2
  260. };
  261. // Fields
  262. btDbvtNode* m_root;
  263. btDbvtNode* m_free;
  264. int m_lkhd;
  265. int m_leaves;
  266. unsigned m_opath;
  267. btAlignedObjectArray<sStkNN> m_stkStack;
  268. // Methods
  269. btDbvt();
  270. ~btDbvt();
  271. void clear();
  272. bool empty() const { return (0 == m_root); }
  273. void optimizeBottomUp();
  274. void optimizeTopDown(int bu_treshold = 128);
  275. void optimizeIncremental(int passes);
  276. btDbvtNode* insert(const btDbvtVolume& box, void* data);
  277. void update(btDbvtNode* leaf, int lookahead = -1);
  278. void update(btDbvtNode* leaf, btDbvtVolume& volume);
  279. bool update(btDbvtNode* leaf, btDbvtVolume& volume, const btVector3& velocity, btScalar margin);
  280. bool update(btDbvtNode* leaf, btDbvtVolume& volume, const btVector3& velocity);
  281. bool update(btDbvtNode* leaf, btDbvtVolume& volume, btScalar margin);
  282. void remove(btDbvtNode* leaf);
  283. void write(IWriter* iwriter) const;
  284. void clone(btDbvt& dest, IClone* iclone = 0) const;
  285. static int maxdepth(const btDbvtNode* node);
  286. static int countLeaves(const btDbvtNode* node);
  287. static void extractLeaves(const btDbvtNode* node, btAlignedObjectArray<const btDbvtNode*>& leaves);
  288. #if DBVT_ENABLE_BENCHMARK
  289. static void benchmark();
  290. #else
  291. static void benchmark()
  292. {
  293. }
  294. #endif
  295. // DBVT_IPOLICY must support ICollide policy/interface
  296. DBVT_PREFIX
  297. static void enumNodes(const btDbvtNode* root,
  298. DBVT_IPOLICY);
  299. DBVT_PREFIX
  300. static void enumLeaves(const btDbvtNode* root,
  301. DBVT_IPOLICY);
  302. DBVT_PREFIX
  303. void collideTT(const btDbvtNode* root0,
  304. const btDbvtNode* root1,
  305. DBVT_IPOLICY);
  306. DBVT_PREFIX
  307. void selfCollideT(const btDbvntNode* root,
  308. DBVT_IPOLICY);
  309. DBVT_PREFIX
  310. void selfCollideTT(const btDbvtNode* root,
  311. DBVT_IPOLICY);
  312. DBVT_PREFIX
  313. void collideTTpersistentStack(const btDbvtNode* root0,
  314. const btDbvtNode* root1,
  315. DBVT_IPOLICY);
  316. #if 0
  317. DBVT_PREFIX
  318. void collideTT( const btDbvtNode* root0,
  319. const btDbvtNode* root1,
  320. const btTransform& xform,
  321. DBVT_IPOLICY);
  322. DBVT_PREFIX
  323. void collideTT( const btDbvtNode* root0,
  324. const btTransform& xform0,
  325. const btDbvtNode* root1,
  326. const btTransform& xform1,
  327. DBVT_IPOLICY);
  328. #endif
  329. DBVT_PREFIX
  330. void collideTV(const btDbvtNode* root,
  331. const btDbvtVolume& volume,
  332. DBVT_IPOLICY) const;
  333. DBVT_PREFIX
  334. void collideTVNoStackAlloc(const btDbvtNode* root,
  335. const btDbvtVolume& volume,
  336. btNodeStack& stack,
  337. DBVT_IPOLICY) const;
  338. ///rayTest is a re-entrant ray test, and can be called in parallel as long as the btAlignedAlloc is thread-safe (uses locking etc)
  339. ///rayTest is slower than rayTestInternal, because it builds a local stack, using memory allocations, and it recomputes signs/rayDirectionInverses each time
  340. DBVT_PREFIX
  341. static void rayTest(const btDbvtNode* root,
  342. const btVector3& rayFrom,
  343. const btVector3& rayTo,
  344. DBVT_IPOLICY);
  345. ///rayTestInternal is faster than rayTest, because it uses a persistent stack (to reduce dynamic memory allocations to a minimum) and it uses precomputed signs/rayInverseDirections
  346. ///rayTestInternal is used by btDbvtBroadphase to accelerate world ray casts
  347. DBVT_PREFIX
  348. void rayTestInternal(const btDbvtNode* root,
  349. const btVector3& rayFrom,
  350. const btVector3& rayTo,
  351. const btVector3& rayDirectionInverse,
  352. unsigned int signs[3],
  353. btScalar lambda_max,
  354. const btVector3& aabbMin,
  355. const btVector3& aabbMax,
  356. btAlignedObjectArray<const btDbvtNode*>& stack,
  357. DBVT_IPOLICY) const;
  358. DBVT_PREFIX
  359. static void collideKDOP(const btDbvtNode* root,
  360. const btVector3* normals,
  361. const btScalar* offsets,
  362. int count,
  363. DBVT_IPOLICY);
  364. DBVT_PREFIX
  365. static void collideOCL(const btDbvtNode* root,
  366. const btVector3* normals,
  367. const btScalar* offsets,
  368. const btVector3& sortaxis,
  369. int count,
  370. DBVT_IPOLICY,
  371. bool fullsort = true);
  372. DBVT_PREFIX
  373. static void collideTU(const btDbvtNode* root,
  374. DBVT_IPOLICY);
  375. // Helpers
  376. static DBVT_INLINE int nearest(const int* i, const btDbvt::sStkNPS* a, btScalar v, int l, int h)
  377. {
  378. int m = 0;
  379. while (l < h)
  380. {
  381. m = (l + h) >> 1;
  382. if (a[i[m]].value >= v)
  383. l = m + 1;
  384. else
  385. h = m;
  386. }
  387. return (h);
  388. }
  389. static DBVT_INLINE int allocate(btAlignedObjectArray<int>& ifree,
  390. btAlignedObjectArray<sStkNPS>& stock,
  391. const sStkNPS& value)
  392. {
  393. int i;
  394. if (ifree.size() > 0)
  395. {
  396. i = ifree[ifree.size() - 1];
  397. ifree.pop_back();
  398. stock[i] = value;
  399. }
  400. else
  401. {
  402. i = stock.size();
  403. stock.push_back(value);
  404. }
  405. return (i);
  406. }
  407. //
  408. private:
  409. btDbvt(const btDbvt&) {}
  410. };
  411. //
  412. // Inline's
  413. //
  414. //
  415. inline btDbvtAabbMm btDbvtAabbMm::FromCE(const btVector3& c, const btVector3& e)
  416. {
  417. btDbvtAabbMm box;
  418. box.mi = c - e;
  419. box.mx = c + e;
  420. return (box);
  421. }
  422. //
  423. inline btDbvtAabbMm btDbvtAabbMm::FromCR(const btVector3& c, btScalar r)
  424. {
  425. return (FromCE(c, btVector3(r, r, r)));
  426. }
  427. //
  428. inline btDbvtAabbMm btDbvtAabbMm::FromMM(const btVector3& mi, const btVector3& mx)
  429. {
  430. btDbvtAabbMm box;
  431. box.mi = mi;
  432. box.mx = mx;
  433. return (box);
  434. }
  435. //
  436. inline btDbvtAabbMm btDbvtAabbMm::FromPoints(const btVector3* pts, int n)
  437. {
  438. btDbvtAabbMm box;
  439. box.mi = box.mx = pts[0];
  440. for (int i = 1; i < n; ++i)
  441. {
  442. box.mi.setMin(pts[i]);
  443. box.mx.setMax(pts[i]);
  444. }
  445. return (box);
  446. }
  447. //
  448. inline btDbvtAabbMm btDbvtAabbMm::FromPoints(const btVector3** ppts, int n)
  449. {
  450. btDbvtAabbMm box;
  451. box.mi = box.mx = *ppts[0];
  452. for (int i = 1; i < n; ++i)
  453. {
  454. box.mi.setMin(*ppts[i]);
  455. box.mx.setMax(*ppts[i]);
  456. }
  457. return (box);
  458. }
  459. //
  460. DBVT_INLINE void btDbvtAabbMm::Expand(const btVector3& e)
  461. {
  462. mi -= e;
  463. mx += e;
  464. }
  465. //
  466. DBVT_INLINE void btDbvtAabbMm::SignedExpand(const btVector3& e)
  467. {
  468. if (e.x() > 0)
  469. mx.setX(mx.x() + e[0]);
  470. else
  471. mi.setX(mi.x() + e[0]);
  472. if (e.y() > 0)
  473. mx.setY(mx.y() + e[1]);
  474. else
  475. mi.setY(mi.y() + e[1]);
  476. if (e.z() > 0)
  477. mx.setZ(mx.z() + e[2]);
  478. else
  479. mi.setZ(mi.z() + e[2]);
  480. }
  481. //
  482. DBVT_INLINE bool btDbvtAabbMm::Contain(const btDbvtAabbMm& a) const
  483. {
  484. return ((mi.x() <= a.mi.x()) &&
  485. (mi.y() <= a.mi.y()) &&
  486. (mi.z() <= a.mi.z()) &&
  487. (mx.x() >= a.mx.x()) &&
  488. (mx.y() >= a.mx.y()) &&
  489. (mx.z() >= a.mx.z()));
  490. }
  491. //
  492. DBVT_INLINE int btDbvtAabbMm::Classify(const btVector3& n, btScalar o, int s) const
  493. {
  494. btVector3 pi, px;
  495. switch (s)
  496. {
  497. case (0 + 0 + 0):
  498. px = btVector3(mi.x(), mi.y(), mi.z());
  499. pi = btVector3(mx.x(), mx.y(), mx.z());
  500. break;
  501. case (1 + 0 + 0):
  502. px = btVector3(mx.x(), mi.y(), mi.z());
  503. pi = btVector3(mi.x(), mx.y(), mx.z());
  504. break;
  505. case (0 + 2 + 0):
  506. px = btVector3(mi.x(), mx.y(), mi.z());
  507. pi = btVector3(mx.x(), mi.y(), mx.z());
  508. break;
  509. case (1 + 2 + 0):
  510. px = btVector3(mx.x(), mx.y(), mi.z());
  511. pi = btVector3(mi.x(), mi.y(), mx.z());
  512. break;
  513. case (0 + 0 + 4):
  514. px = btVector3(mi.x(), mi.y(), mx.z());
  515. pi = btVector3(mx.x(), mx.y(), mi.z());
  516. break;
  517. case (1 + 0 + 4):
  518. px = btVector3(mx.x(), mi.y(), mx.z());
  519. pi = btVector3(mi.x(), mx.y(), mi.z());
  520. break;
  521. case (0 + 2 + 4):
  522. px = btVector3(mi.x(), mx.y(), mx.z());
  523. pi = btVector3(mx.x(), mi.y(), mi.z());
  524. break;
  525. case (1 + 2 + 4):
  526. px = btVector3(mx.x(), mx.y(), mx.z());
  527. pi = btVector3(mi.x(), mi.y(), mi.z());
  528. break;
  529. }
  530. if ((btDot(n, px) + o) < 0) return (-1);
  531. if ((btDot(n, pi) + o) >= 0) return (+1);
  532. return (0);
  533. }
  534. //
  535. DBVT_INLINE btScalar btDbvtAabbMm::ProjectMinimum(const btVector3& v, unsigned signs) const
  536. {
  537. const btVector3* b[] = {&mx, &mi};
  538. const btVector3 p(b[(signs >> 0) & 1]->x(),
  539. b[(signs >> 1) & 1]->y(),
  540. b[(signs >> 2) & 1]->z());
  541. return (btDot(p, v));
  542. }
  543. //
  544. DBVT_INLINE void btDbvtAabbMm::AddSpan(const btVector3& d, btScalar& smi, btScalar& smx) const
  545. {
  546. for (int i = 0; i < 3; ++i)
  547. {
  548. if (d[i] < 0)
  549. {
  550. smi += mx[i] * d[i];
  551. smx += mi[i] * d[i];
  552. }
  553. else
  554. {
  555. smi += mi[i] * d[i];
  556. smx += mx[i] * d[i];
  557. }
  558. }
  559. }
  560. //
  561. DBVT_INLINE bool Intersect(const btDbvtAabbMm& a,
  562. const btDbvtAabbMm& b)
  563. {
  564. #if DBVT_INT0_IMPL == DBVT_IMPL_SSE
  565. const __m128 rt(_mm_or_ps(_mm_cmplt_ps(_mm_load_ps(b.mx), _mm_load_ps(a.mi)),
  566. _mm_cmplt_ps(_mm_load_ps(a.mx), _mm_load_ps(b.mi))));
  567. #if defined(_WIN32)
  568. const __int32* pu((const __int32*)&rt);
  569. #else
  570. const int* pu((const int*)&rt);
  571. #endif
  572. return ((pu[0] | pu[1] | pu[2]) == 0);
  573. #else
  574. return ((a.mi.x() <= b.mx.x()) &&
  575. (a.mx.x() >= b.mi.x()) &&
  576. (a.mi.y() <= b.mx.y()) &&
  577. (a.mx.y() >= b.mi.y()) &&
  578. (a.mi.z() <= b.mx.z()) &&
  579. (a.mx.z() >= b.mi.z()));
  580. #endif
  581. }
  582. //
  583. DBVT_INLINE bool Intersect(const btDbvtAabbMm& a,
  584. const btVector3& b)
  585. {
  586. return ((b.x() >= a.mi.x()) &&
  587. (b.y() >= a.mi.y()) &&
  588. (b.z() >= a.mi.z()) &&
  589. (b.x() <= a.mx.x()) &&
  590. (b.y() <= a.mx.y()) &&
  591. (b.z() <= a.mx.z()));
  592. }
  593. //////////////////////////////////////
  594. //
  595. DBVT_INLINE btScalar Proximity(const btDbvtAabbMm& a,
  596. const btDbvtAabbMm& b)
  597. {
  598. const btVector3 d = (a.mi + a.mx) - (b.mi + b.mx);
  599. return (btFabs(d.x()) + btFabs(d.y()) + btFabs(d.z()));
  600. }
  601. //
  602. DBVT_INLINE int Select(const btDbvtAabbMm& o,
  603. const btDbvtAabbMm& a,
  604. const btDbvtAabbMm& b)
  605. {
  606. #if DBVT_SELECT_IMPL == DBVT_IMPL_SSE
  607. #if defined(_WIN32)
  608. static ATTRIBUTE_ALIGNED16(const unsigned __int32) mask[] = {0x7fffffff, 0x7fffffff, 0x7fffffff, 0x7fffffff};
  609. #else
  610. static ATTRIBUTE_ALIGNED16(const unsigned int) mask[] = {0x7fffffff, 0x7fffffff, 0x7fffffff, 0x00000000 /*0x7fffffff*/};
  611. #endif
  612. ///@todo: the intrinsic version is 11% slower
  613. #if DBVT_USE_INTRINSIC_SSE
  614. union btSSEUnion ///NOTE: if we use more intrinsics, move btSSEUnion into the LinearMath directory
  615. {
  616. __m128 ssereg;
  617. float floats[4];
  618. int ints[4];
  619. };
  620. __m128 omi(_mm_load_ps(o.mi));
  621. omi = _mm_add_ps(omi, _mm_load_ps(o.mx));
  622. __m128 ami(_mm_load_ps(a.mi));
  623. ami = _mm_add_ps(ami, _mm_load_ps(a.mx));
  624. ami = _mm_sub_ps(ami, omi);
  625. ami = _mm_and_ps(ami, _mm_load_ps((const float*)mask));
  626. __m128 bmi(_mm_load_ps(b.mi));
  627. bmi = _mm_add_ps(bmi, _mm_load_ps(b.mx));
  628. bmi = _mm_sub_ps(bmi, omi);
  629. bmi = _mm_and_ps(bmi, _mm_load_ps((const float*)mask));
  630. __m128 t0(_mm_movehl_ps(ami, ami));
  631. ami = _mm_add_ps(ami, t0);
  632. ami = _mm_add_ss(ami, _mm_shuffle_ps(ami, ami, 1));
  633. __m128 t1(_mm_movehl_ps(bmi, bmi));
  634. bmi = _mm_add_ps(bmi, t1);
  635. bmi = _mm_add_ss(bmi, _mm_shuffle_ps(bmi, bmi, 1));
  636. btSSEUnion tmp;
  637. tmp.ssereg = _mm_cmple_ss(bmi, ami);
  638. return tmp.ints[0] & 1;
  639. #else
  640. ATTRIBUTE_ALIGNED16(__int32 r[1]);
  641. __asm
  642. {
  643. mov eax,o
  644. mov ecx,a
  645. mov edx,b
  646. movaps xmm0,[eax]
  647. movaps xmm5,mask
  648. addps xmm0,[eax+16]
  649. movaps xmm1,[ecx]
  650. movaps xmm2,[edx]
  651. addps xmm1,[ecx+16]
  652. addps xmm2,[edx+16]
  653. subps xmm1,xmm0
  654. subps xmm2,xmm0
  655. andps xmm1,xmm5
  656. andps xmm2,xmm5
  657. movhlps xmm3,xmm1
  658. movhlps xmm4,xmm2
  659. addps xmm1,xmm3
  660. addps xmm2,xmm4
  661. pshufd xmm3,xmm1,1
  662. pshufd xmm4,xmm2,1
  663. addss xmm1,xmm3
  664. addss xmm2,xmm4
  665. cmpless xmm2,xmm1
  666. movss r,xmm2
  667. }
  668. return (r[0] & 1);
  669. #endif
  670. #else
  671. return (Proximity(o, a) < Proximity(o, b) ? 0 : 1);
  672. #endif
  673. }
  674. //
  675. DBVT_INLINE void Merge(const btDbvtAabbMm& a,
  676. const btDbvtAabbMm& b,
  677. btDbvtAabbMm& r)
  678. {
  679. #if DBVT_MERGE_IMPL == DBVT_IMPL_SSE
  680. __m128 ami(_mm_load_ps(a.mi));
  681. __m128 amx(_mm_load_ps(a.mx));
  682. __m128 bmi(_mm_load_ps(b.mi));
  683. __m128 bmx(_mm_load_ps(b.mx));
  684. ami = _mm_min_ps(ami, bmi);
  685. amx = _mm_max_ps(amx, bmx);
  686. _mm_store_ps(r.mi, ami);
  687. _mm_store_ps(r.mx, amx);
  688. #else
  689. for (int i = 0; i < 3; ++i)
  690. {
  691. if (a.mi[i] < b.mi[i])
  692. r.mi[i] = a.mi[i];
  693. else
  694. r.mi[i] = b.mi[i];
  695. if (a.mx[i] > b.mx[i])
  696. r.mx[i] = a.mx[i];
  697. else
  698. r.mx[i] = b.mx[i];
  699. }
  700. #endif
  701. }
  702. //
  703. DBVT_INLINE bool NotEqual(const btDbvtAabbMm& a,
  704. const btDbvtAabbMm& b)
  705. {
  706. return ((a.mi.x() != b.mi.x()) ||
  707. (a.mi.y() != b.mi.y()) ||
  708. (a.mi.z() != b.mi.z()) ||
  709. (a.mx.x() != b.mx.x()) ||
  710. (a.mx.y() != b.mx.y()) ||
  711. (a.mx.z() != b.mx.z()));
  712. }
  713. //
  714. // Inline's
  715. //
  716. //
  717. DBVT_PREFIX
  718. inline void btDbvt::enumNodes(const btDbvtNode* root,
  719. DBVT_IPOLICY)
  720. {
  721. DBVT_CHECKTYPE
  722. policy.Process(root);
  723. if (root->isinternal())
  724. {
  725. enumNodes(root->childs[0], policy);
  726. enumNodes(root->childs[1], policy);
  727. }
  728. }
  729. //
  730. DBVT_PREFIX
  731. inline void btDbvt::enumLeaves(const btDbvtNode* root,
  732. DBVT_IPOLICY)
  733. {
  734. DBVT_CHECKTYPE
  735. if (root->isinternal())
  736. {
  737. enumLeaves(root->childs[0], policy);
  738. enumLeaves(root->childs[1], policy);
  739. }
  740. else
  741. {
  742. policy.Process(root);
  743. }
  744. }
  745. //
  746. DBVT_PREFIX
  747. inline void btDbvt::collideTT(const btDbvtNode* root0,
  748. const btDbvtNode* root1,
  749. DBVT_IPOLICY)
  750. {
  751. DBVT_CHECKTYPE
  752. if (root0 && root1)
  753. {
  754. int depth = 1;
  755. int treshold = DOUBLE_STACKSIZE - 4;
  756. btAlignedObjectArray<sStkNN> stkStack;
  757. stkStack.resize(DOUBLE_STACKSIZE);
  758. stkStack[0] = sStkNN(root0, root1);
  759. do
  760. {
  761. sStkNN p = stkStack[--depth];
  762. if (depth > treshold)
  763. {
  764. stkStack.resize(stkStack.size() * 2);
  765. treshold = stkStack.size() - 4;
  766. }
  767. if (p.a == p.b)
  768. {
  769. if (p.a->isinternal())
  770. {
  771. stkStack[depth++] = sStkNN(p.a->childs[0], p.a->childs[0]);
  772. stkStack[depth++] = sStkNN(p.a->childs[1], p.a->childs[1]);
  773. stkStack[depth++] = sStkNN(p.a->childs[0], p.a->childs[1]);
  774. }
  775. }
  776. else if (Intersect(p.a->volume, p.b->volume))
  777. {
  778. if (p.a->isinternal())
  779. {
  780. if (p.b->isinternal())
  781. {
  782. stkStack[depth++] = sStkNN(p.a->childs[0], p.b->childs[0]);
  783. stkStack[depth++] = sStkNN(p.a->childs[1], p.b->childs[0]);
  784. stkStack[depth++] = sStkNN(p.a->childs[0], p.b->childs[1]);
  785. stkStack[depth++] = sStkNN(p.a->childs[1], p.b->childs[1]);
  786. }
  787. else
  788. {
  789. stkStack[depth++] = sStkNN(p.a->childs[0], p.b);
  790. stkStack[depth++] = sStkNN(p.a->childs[1], p.b);
  791. }
  792. }
  793. else
  794. {
  795. if (p.b->isinternal())
  796. {
  797. stkStack[depth++] = sStkNN(p.a, p.b->childs[0]);
  798. stkStack[depth++] = sStkNN(p.a, p.b->childs[1]);
  799. }
  800. else
  801. {
  802. policy.Process(p.a, p.b);
  803. }
  804. }
  805. }
  806. } while (depth);
  807. }
  808. }
  809. //
  810. DBVT_PREFIX
  811. inline void btDbvt::selfCollideT(const btDbvntNode* root,
  812. DBVT_IPOLICY)
  813. {
  814. DBVT_CHECKTYPE
  815. if (root)
  816. {
  817. int depth = 1;
  818. int treshold = DOUBLE_STACKSIZE - 4;
  819. btAlignedObjectArray<sStknNN> stkStack;
  820. stkStack.resize(DOUBLE_STACKSIZE);
  821. stkStack[0] = sStknNN(root, root);
  822. do
  823. {
  824. sStknNN p = stkStack[--depth];
  825. if (depth > treshold)
  826. {
  827. stkStack.resize(stkStack.size() * 2);
  828. treshold = stkStack.size() - 4;
  829. }
  830. if (p.a == p.b)
  831. {
  832. if (p.a->isinternal() && p.a->angle > SIMD_PI)
  833. {
  834. stkStack[depth++] = sStknNN(p.a->childs[0], p.a->childs[0]);
  835. stkStack[depth++] = sStknNN(p.a->childs[1], p.a->childs[1]);
  836. stkStack[depth++] = sStknNN(p.a->childs[0], p.a->childs[1]);
  837. }
  838. }
  839. else if (Intersect(p.a->volume, p.b->volume))
  840. {
  841. if (p.a->isinternal())
  842. {
  843. if (p.b->isinternal())
  844. {
  845. stkStack[depth++] = sStknNN(p.a->childs[0], p.b->childs[0]);
  846. stkStack[depth++] = sStknNN(p.a->childs[1], p.b->childs[0]);
  847. stkStack[depth++] = sStknNN(p.a->childs[0], p.b->childs[1]);
  848. stkStack[depth++] = sStknNN(p.a->childs[1], p.b->childs[1]);
  849. }
  850. else
  851. {
  852. stkStack[depth++] = sStknNN(p.a->childs[0], p.b);
  853. stkStack[depth++] = sStknNN(p.a->childs[1], p.b);
  854. }
  855. }
  856. else
  857. {
  858. if (p.b->isinternal())
  859. {
  860. stkStack[depth++] = sStknNN(p.a, p.b->childs[0]);
  861. stkStack[depth++] = sStknNN(p.a, p.b->childs[1]);
  862. }
  863. else
  864. {
  865. policy.Process(p.a, p.b);
  866. }
  867. }
  868. }
  869. } while (depth);
  870. }
  871. }
  872. //
  873. DBVT_PREFIX
  874. inline void btDbvt::selfCollideTT(const btDbvtNode* root,
  875. DBVT_IPOLICY)
  876. {
  877. DBVT_CHECKTYPE
  878. if (root)
  879. {
  880. int depth = 1;
  881. int treshold = DOUBLE_STACKSIZE - 4;
  882. btAlignedObjectArray<sStkNN> stkStack;
  883. stkStack.resize(DOUBLE_STACKSIZE);
  884. stkStack[0] = sStkNN(root, root);
  885. do
  886. {
  887. sStkNN p = stkStack[--depth];
  888. if (depth > treshold)
  889. {
  890. stkStack.resize(stkStack.size() * 2);
  891. treshold = stkStack.size() - 4;
  892. }
  893. if (p.a == p.b)
  894. {
  895. if (p.a->isinternal())
  896. {
  897. stkStack[depth++] = sStkNN(p.a->childs[0], p.a->childs[0]);
  898. stkStack[depth++] = sStkNN(p.a->childs[1], p.a->childs[1]);
  899. stkStack[depth++] = sStkNN(p.a->childs[0], p.a->childs[1]);
  900. }
  901. }
  902. else if (Intersect(p.a->volume, p.b->volume))
  903. {
  904. if (p.a->isinternal())
  905. {
  906. if (p.b->isinternal())
  907. {
  908. stkStack[depth++] = sStkNN(p.a->childs[0], p.b->childs[0]);
  909. stkStack[depth++] = sStkNN(p.a->childs[1], p.b->childs[0]);
  910. stkStack[depth++] = sStkNN(p.a->childs[0], p.b->childs[1]);
  911. stkStack[depth++] = sStkNN(p.a->childs[1], p.b->childs[1]);
  912. }
  913. else
  914. {
  915. stkStack[depth++] = sStkNN(p.a->childs[0], p.b);
  916. stkStack[depth++] = sStkNN(p.a->childs[1], p.b);
  917. }
  918. }
  919. else
  920. {
  921. if (p.b->isinternal())
  922. {
  923. stkStack[depth++] = sStkNN(p.a, p.b->childs[0]);
  924. stkStack[depth++] = sStkNN(p.a, p.b->childs[1]);
  925. }
  926. else
  927. {
  928. policy.Process(p.a, p.b);
  929. }
  930. }
  931. }
  932. } while (depth);
  933. }
  934. }
  935. DBVT_PREFIX
  936. inline void btDbvt::collideTTpersistentStack(const btDbvtNode* root0,
  937. const btDbvtNode* root1,
  938. DBVT_IPOLICY)
  939. {
  940. DBVT_CHECKTYPE
  941. if (root0 && root1)
  942. {
  943. int depth = 1;
  944. int treshold = DOUBLE_STACKSIZE - 4;
  945. m_stkStack.resize(DOUBLE_STACKSIZE);
  946. m_stkStack[0] = sStkNN(root0, root1);
  947. do
  948. {
  949. sStkNN p = m_stkStack[--depth];
  950. if (depth > treshold)
  951. {
  952. m_stkStack.resize(m_stkStack.size() * 2);
  953. treshold = m_stkStack.size() - 4;
  954. }
  955. if (p.a == p.b)
  956. {
  957. if (p.a->isinternal())
  958. {
  959. m_stkStack[depth++] = sStkNN(p.a->childs[0], p.a->childs[0]);
  960. m_stkStack[depth++] = sStkNN(p.a->childs[1], p.a->childs[1]);
  961. m_stkStack[depth++] = sStkNN(p.a->childs[0], p.a->childs[1]);
  962. }
  963. }
  964. else if (Intersect(p.a->volume, p.b->volume))
  965. {
  966. if (p.a->isinternal())
  967. {
  968. if (p.b->isinternal())
  969. {
  970. m_stkStack[depth++] = sStkNN(p.a->childs[0], p.b->childs[0]);
  971. m_stkStack[depth++] = sStkNN(p.a->childs[1], p.b->childs[0]);
  972. m_stkStack[depth++] = sStkNN(p.a->childs[0], p.b->childs[1]);
  973. m_stkStack[depth++] = sStkNN(p.a->childs[1], p.b->childs[1]);
  974. }
  975. else
  976. {
  977. m_stkStack[depth++] = sStkNN(p.a->childs[0], p.b);
  978. m_stkStack[depth++] = sStkNN(p.a->childs[1], p.b);
  979. }
  980. }
  981. else
  982. {
  983. if (p.b->isinternal())
  984. {
  985. m_stkStack[depth++] = sStkNN(p.a, p.b->childs[0]);
  986. m_stkStack[depth++] = sStkNN(p.a, p.b->childs[1]);
  987. }
  988. else
  989. {
  990. policy.Process(p.a, p.b);
  991. }
  992. }
  993. }
  994. } while (depth);
  995. }
  996. }
  997. #if 0
  998. //
  999. DBVT_PREFIX
  1000. inline void btDbvt::collideTT( const btDbvtNode* root0,
  1001. const btDbvtNode* root1,
  1002. const btTransform& xform,
  1003. DBVT_IPOLICY)
  1004. {
  1005. DBVT_CHECKTYPE
  1006. if(root0&&root1)
  1007. {
  1008. int depth=1;
  1009. int treshold=DOUBLE_STACKSIZE-4;
  1010. btAlignedObjectArray<sStkNN> stkStack;
  1011. stkStack.resize(DOUBLE_STACKSIZE);
  1012. stkStack[0]=sStkNN(root0,root1);
  1013. do {
  1014. sStkNN p=stkStack[--depth];
  1015. if(Intersect(p.a->volume,p.b->volume,xform))
  1016. {
  1017. if(depth>treshold)
  1018. {
  1019. stkStack.resize(stkStack.size()*2);
  1020. treshold=stkStack.size()-4;
  1021. }
  1022. if(p.a->isinternal())
  1023. {
  1024. if(p.b->isinternal())
  1025. {
  1026. stkStack[depth++]=sStkNN(p.a->childs[0],p.b->childs[0]);
  1027. stkStack[depth++]=sStkNN(p.a->childs[1],p.b->childs[0]);
  1028. stkStack[depth++]=sStkNN(p.a->childs[0],p.b->childs[1]);
  1029. stkStack[depth++]=sStkNN(p.a->childs[1],p.b->childs[1]);
  1030. }
  1031. else
  1032. {
  1033. stkStack[depth++]=sStkNN(p.a->childs[0],p.b);
  1034. stkStack[depth++]=sStkNN(p.a->childs[1],p.b);
  1035. }
  1036. }
  1037. else
  1038. {
  1039. if(p.b->isinternal())
  1040. {
  1041. stkStack[depth++]=sStkNN(p.a,p.b->childs[0]);
  1042. stkStack[depth++]=sStkNN(p.a,p.b->childs[1]);
  1043. }
  1044. else
  1045. {
  1046. policy.Process(p.a,p.b);
  1047. }
  1048. }
  1049. }
  1050. } while(depth);
  1051. }
  1052. }
  1053. //
  1054. DBVT_PREFIX
  1055. inline void btDbvt::collideTT( const btDbvtNode* root0,
  1056. const btTransform& xform0,
  1057. const btDbvtNode* root1,
  1058. const btTransform& xform1,
  1059. DBVT_IPOLICY)
  1060. {
  1061. const btTransform xform=xform0.inverse()*xform1;
  1062. collideTT(root0,root1,xform,policy);
  1063. }
  1064. #endif
  1065. DBVT_PREFIX
  1066. inline void btDbvt::collideTV(const btDbvtNode* root,
  1067. const btDbvtVolume& vol,
  1068. DBVT_IPOLICY) const
  1069. {
  1070. DBVT_CHECKTYPE
  1071. if (root)
  1072. {
  1073. ATTRIBUTE_ALIGNED16(btDbvtVolume)
  1074. volume(vol);
  1075. btAlignedObjectArray<const btDbvtNode*> stack;
  1076. stack.resize(0);
  1077. #ifndef BT_DISABLE_STACK_TEMP_MEMORY
  1078. char tempmemory[SIMPLE_STACKSIZE * sizeof(const btDbvtNode*)];
  1079. stack.initializeFromBuffer(tempmemory, 0, SIMPLE_STACKSIZE);
  1080. #else
  1081. stack.reserve(SIMPLE_STACKSIZE);
  1082. #endif //BT_DISABLE_STACK_TEMP_MEMORY
  1083. stack.push_back(root);
  1084. do
  1085. {
  1086. const btDbvtNode* n = stack[stack.size() - 1];
  1087. stack.pop_back();
  1088. if (Intersect(n->volume, volume))
  1089. {
  1090. if (n->isinternal())
  1091. {
  1092. stack.push_back(n->childs[0]);
  1093. stack.push_back(n->childs[1]);
  1094. }
  1095. else
  1096. {
  1097. policy.Process(n);
  1098. }
  1099. }
  1100. } while (stack.size() > 0);
  1101. }
  1102. }
  1103. //
  1104. DBVT_PREFIX
  1105. inline void btDbvt::collideTVNoStackAlloc(const btDbvtNode* root,
  1106. const btDbvtVolume& vol,
  1107. btNodeStack& stack,
  1108. DBVT_IPOLICY) const
  1109. {
  1110. DBVT_CHECKTYPE
  1111. if (root)
  1112. {
  1113. ATTRIBUTE_ALIGNED16(btDbvtVolume)
  1114. volume(vol);
  1115. stack.resize(0);
  1116. stack.reserve(SIMPLE_STACKSIZE);
  1117. stack.push_back(root);
  1118. do
  1119. {
  1120. const btDbvtNode* n = stack[stack.size() - 1];
  1121. stack.pop_back();
  1122. if (Intersect(n->volume, volume))
  1123. {
  1124. if (n->isinternal())
  1125. {
  1126. stack.push_back(n->childs[0]);
  1127. stack.push_back(n->childs[1]);
  1128. }
  1129. else
  1130. {
  1131. policy.Process(n);
  1132. }
  1133. }
  1134. } while (stack.size() > 0);
  1135. }
  1136. }
  1137. DBVT_PREFIX
  1138. inline void btDbvt::rayTestInternal(const btDbvtNode* root,
  1139. const btVector3& rayFrom,
  1140. const btVector3& rayTo,
  1141. const btVector3& rayDirectionInverse,
  1142. unsigned int signs[3],
  1143. btScalar lambda_max,
  1144. const btVector3& aabbMin,
  1145. const btVector3& aabbMax,
  1146. btAlignedObjectArray<const btDbvtNode*>& stack,
  1147. DBVT_IPOLICY) const
  1148. {
  1149. (void)rayTo;
  1150. DBVT_CHECKTYPE
  1151. if (root)
  1152. {
  1153. btVector3 resultNormal;
  1154. int depth = 1;
  1155. int treshold = DOUBLE_STACKSIZE - 2;
  1156. stack.resize(DOUBLE_STACKSIZE);
  1157. stack[0] = root;
  1158. btVector3 bounds[2];
  1159. do
  1160. {
  1161. const btDbvtNode* node = stack[--depth];
  1162. bounds[0] = node->volume.Mins() - aabbMax;
  1163. bounds[1] = node->volume.Maxs() - aabbMin;
  1164. btScalar tmin = 1.f, lambda_min = 0.f;
  1165. unsigned int result1 = false;
  1166. result1 = btRayAabb2(rayFrom, rayDirectionInverse, signs, bounds, tmin, lambda_min, lambda_max);
  1167. if (result1)
  1168. {
  1169. if (node->isinternal())
  1170. {
  1171. if (depth > treshold)
  1172. {
  1173. stack.resize(stack.size() * 2);
  1174. treshold = stack.size() - 2;
  1175. }
  1176. stack[depth++] = node->childs[0];
  1177. stack[depth++] = node->childs[1];
  1178. }
  1179. else
  1180. {
  1181. policy.Process(node);
  1182. }
  1183. }
  1184. } while (depth);
  1185. }
  1186. }
  1187. //
  1188. DBVT_PREFIX
  1189. inline void btDbvt::rayTest(const btDbvtNode* root,
  1190. const btVector3& rayFrom,
  1191. const btVector3& rayTo,
  1192. DBVT_IPOLICY)
  1193. {
  1194. DBVT_CHECKTYPE
  1195. if (root)
  1196. {
  1197. btVector3 rayDir = (rayTo - rayFrom);
  1198. rayDir.normalize();
  1199. ///what about division by zero? --> just set rayDirection[i] to INF/BT_LARGE_FLOAT
  1200. btVector3 rayDirectionInverse;
  1201. rayDirectionInverse[0] = rayDir[0] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDir[0];
  1202. rayDirectionInverse[1] = rayDir[1] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDir[1];
  1203. rayDirectionInverse[2] = rayDir[2] == btScalar(0.0) ? btScalar(BT_LARGE_FLOAT) : btScalar(1.0) / rayDir[2];
  1204. unsigned int signs[3] = {rayDirectionInverse[0] < 0.0, rayDirectionInverse[1] < 0.0, rayDirectionInverse[2] < 0.0};
  1205. btScalar lambda_max = rayDir.dot(rayTo - rayFrom);
  1206. btVector3 resultNormal;
  1207. btAlignedObjectArray<const btDbvtNode*> stack;
  1208. int depth = 1;
  1209. int treshold = DOUBLE_STACKSIZE - 2;
  1210. char tempmemory[DOUBLE_STACKSIZE * sizeof(const btDbvtNode*)];
  1211. #ifndef BT_DISABLE_STACK_TEMP_MEMORY
  1212. stack.initializeFromBuffer(tempmemory, DOUBLE_STACKSIZE, DOUBLE_STACKSIZE);
  1213. #else //BT_DISABLE_STACK_TEMP_MEMORY
  1214. stack.resize(DOUBLE_STACKSIZE);
  1215. #endif //BT_DISABLE_STACK_TEMP_MEMORY
  1216. stack[0] = root;
  1217. btVector3 bounds[2];
  1218. do
  1219. {
  1220. const btDbvtNode* node = stack[--depth];
  1221. bounds[0] = node->volume.Mins();
  1222. bounds[1] = node->volume.Maxs();
  1223. btScalar tmin = 1.f, lambda_min = 0.f;
  1224. unsigned int result1 = btRayAabb2(rayFrom, rayDirectionInverse, signs, bounds, tmin, lambda_min, lambda_max);
  1225. #ifdef COMPARE_BTRAY_AABB2
  1226. btScalar param = 1.f;
  1227. bool result2 = btRayAabb(rayFrom, rayTo, node->volume.Mins(), node->volume.Maxs(), param, resultNormal);
  1228. btAssert(result1 == result2);
  1229. #endif //TEST_BTRAY_AABB2
  1230. if (result1)
  1231. {
  1232. if (node->isinternal())
  1233. {
  1234. if (depth > treshold)
  1235. {
  1236. stack.resize(stack.size() * 2);
  1237. treshold = stack.size() - 2;
  1238. }
  1239. stack[depth++] = node->childs[0];
  1240. stack[depth++] = node->childs[1];
  1241. }
  1242. else
  1243. {
  1244. policy.Process(node);
  1245. }
  1246. }
  1247. } while (depth);
  1248. }
  1249. }
  1250. //
  1251. DBVT_PREFIX
  1252. inline void btDbvt::collideKDOP(const btDbvtNode* root,
  1253. const btVector3* normals,
  1254. const btScalar* offsets,
  1255. int count,
  1256. DBVT_IPOLICY)
  1257. {
  1258. DBVT_CHECKTYPE
  1259. if (root)
  1260. {
  1261. const int inside = (1 << count) - 1;
  1262. btAlignedObjectArray<sStkNP> stack;
  1263. int signs[sizeof(unsigned) * 8];
  1264. btAssert(count < int(sizeof(signs) / sizeof(signs[0])));
  1265. for (int i = 0; i < count; ++i)
  1266. {
  1267. signs[i] = ((normals[i].x() >= 0) ? 1 : 0) +
  1268. ((normals[i].y() >= 0) ? 2 : 0) +
  1269. ((normals[i].z() >= 0) ? 4 : 0);
  1270. }
  1271. stack.reserve(SIMPLE_STACKSIZE);
  1272. stack.push_back(sStkNP(root, 0));
  1273. do
  1274. {
  1275. sStkNP se = stack[stack.size() - 1];
  1276. bool out = false;
  1277. stack.pop_back();
  1278. for (int i = 0, j = 1; (!out) && (i < count); ++i, j <<= 1)
  1279. {
  1280. if (0 == (se.mask & j))
  1281. {
  1282. const int side = se.node->volume.Classify(normals[i], offsets[i], signs[i]);
  1283. switch (side)
  1284. {
  1285. case -1:
  1286. out = true;
  1287. break;
  1288. case +1:
  1289. se.mask |= j;
  1290. break;
  1291. }
  1292. }
  1293. }
  1294. if (!out)
  1295. {
  1296. if ((se.mask != inside) && (se.node->isinternal()))
  1297. {
  1298. stack.push_back(sStkNP(se.node->childs[0], se.mask));
  1299. stack.push_back(sStkNP(se.node->childs[1], se.mask));
  1300. }
  1301. else
  1302. {
  1303. if (policy.AllLeaves(se.node)) enumLeaves(se.node, policy);
  1304. }
  1305. }
  1306. } while (stack.size());
  1307. }
  1308. }
  1309. //
  1310. DBVT_PREFIX
  1311. inline void btDbvt::collideOCL(const btDbvtNode* root,
  1312. const btVector3* normals,
  1313. const btScalar* offsets,
  1314. const btVector3& sortaxis,
  1315. int count,
  1316. DBVT_IPOLICY,
  1317. bool fsort)
  1318. {
  1319. DBVT_CHECKTYPE
  1320. if (root)
  1321. {
  1322. const unsigned srtsgns = (sortaxis[0] >= 0 ? 1 : 0) +
  1323. (sortaxis[1] >= 0 ? 2 : 0) +
  1324. (sortaxis[2] >= 0 ? 4 : 0);
  1325. const int inside = (1 << count) - 1;
  1326. btAlignedObjectArray<sStkNPS> stock;
  1327. btAlignedObjectArray<int> ifree;
  1328. btAlignedObjectArray<int> stack;
  1329. int signs[sizeof(unsigned) * 8];
  1330. btAssert(count < int(sizeof(signs) / sizeof(signs[0])));
  1331. for (int i = 0; i < count; ++i)
  1332. {
  1333. signs[i] = ((normals[i].x() >= 0) ? 1 : 0) +
  1334. ((normals[i].y() >= 0) ? 2 : 0) +
  1335. ((normals[i].z() >= 0) ? 4 : 0);
  1336. }
  1337. stock.reserve(SIMPLE_STACKSIZE);
  1338. stack.reserve(SIMPLE_STACKSIZE);
  1339. ifree.reserve(SIMPLE_STACKSIZE);
  1340. stack.push_back(allocate(ifree, stock, sStkNPS(root, 0, root->volume.ProjectMinimum(sortaxis, srtsgns))));
  1341. do
  1342. {
  1343. const int id = stack[stack.size() - 1];
  1344. sStkNPS se = stock[id];
  1345. stack.pop_back();
  1346. ifree.push_back(id);
  1347. if (se.mask != inside)
  1348. {
  1349. bool out = false;
  1350. for (int i = 0, j = 1; (!out) && (i < count); ++i, j <<= 1)
  1351. {
  1352. if (0 == (se.mask & j))
  1353. {
  1354. const int side = se.node->volume.Classify(normals[i], offsets[i], signs[i]);
  1355. switch (side)
  1356. {
  1357. case -1:
  1358. out = true;
  1359. break;
  1360. case +1:
  1361. se.mask |= j;
  1362. break;
  1363. }
  1364. }
  1365. }
  1366. if (out) continue;
  1367. }
  1368. if (policy.Descent(se.node))
  1369. {
  1370. if (se.node->isinternal())
  1371. {
  1372. const btDbvtNode* pns[] = {se.node->childs[0], se.node->childs[1]};
  1373. sStkNPS nes[] = {sStkNPS(pns[0], se.mask, pns[0]->volume.ProjectMinimum(sortaxis, srtsgns)),
  1374. sStkNPS(pns[1], se.mask, pns[1]->volume.ProjectMinimum(sortaxis, srtsgns))};
  1375. const int q = nes[0].value < nes[1].value ? 1 : 0;
  1376. int j = stack.size();
  1377. if (fsort && (j > 0))
  1378. {
  1379. /* Insert 0 */
  1380. j = nearest(&stack[0], &stock[0], nes[q].value, 0, stack.size());
  1381. stack.push_back(0);
  1382. //void * memmove ( void * destination, const void * source, size_t num );
  1383. #if DBVT_USE_MEMMOVE
  1384. {
  1385. int num_items_to_move = stack.size() - 1 - j;
  1386. if (num_items_to_move > 0)
  1387. memmove(&stack[j + 1], &stack[j], sizeof(int) * num_items_to_move);
  1388. }
  1389. #else
  1390. for (int k = stack.size() - 1; k > j; --k)
  1391. {
  1392. stack[k] = stack[k - 1];
  1393. }
  1394. #endif
  1395. stack[j] = allocate(ifree, stock, nes[q]);
  1396. /* Insert 1 */
  1397. j = nearest(&stack[0], &stock[0], nes[1 - q].value, j, stack.size());
  1398. stack.push_back(0);
  1399. #if DBVT_USE_MEMMOVE
  1400. {
  1401. int num_items_to_move = stack.size() - 1 - j;
  1402. if (num_items_to_move > 0)
  1403. memmove(&stack[j + 1], &stack[j], sizeof(int) * num_items_to_move);
  1404. }
  1405. #else
  1406. for (int k = stack.size() - 1; k > j; --k)
  1407. {
  1408. stack[k] = stack[k - 1];
  1409. }
  1410. #endif
  1411. stack[j] = allocate(ifree, stock, nes[1 - q]);
  1412. }
  1413. else
  1414. {
  1415. stack.push_back(allocate(ifree, stock, nes[q]));
  1416. stack.push_back(allocate(ifree, stock, nes[1 - q]));
  1417. }
  1418. }
  1419. else
  1420. {
  1421. policy.Process(se.node, se.value);
  1422. }
  1423. }
  1424. } while (stack.size());
  1425. }
  1426. }
  1427. //
  1428. DBVT_PREFIX
  1429. inline void btDbvt::collideTU(const btDbvtNode* root,
  1430. DBVT_IPOLICY)
  1431. {
  1432. DBVT_CHECKTYPE
  1433. if (root)
  1434. {
  1435. btAlignedObjectArray<const btDbvtNode*> stack;
  1436. stack.reserve(SIMPLE_STACKSIZE);
  1437. stack.push_back(root);
  1438. do
  1439. {
  1440. const btDbvtNode* n = stack[stack.size() - 1];
  1441. stack.pop_back();
  1442. if (policy.Descent(n))
  1443. {
  1444. if (n->isinternal())
  1445. {
  1446. stack.push_back(n->childs[0]);
  1447. stack.push_back(n->childs[1]);
  1448. }
  1449. else
  1450. {
  1451. policy.Process(n);
  1452. }
  1453. }
  1454. } while (stack.size() > 0);
  1455. }
  1456. }
  1457. //
  1458. // PP Cleanup
  1459. //
  1460. #undef DBVT_USE_MEMMOVE
  1461. #undef DBVT_USE_TEMPLATE
  1462. #undef DBVT_VIRTUAL_DTOR
  1463. #undef DBVT_VIRTUAL
  1464. #undef DBVT_PREFIX
  1465. #undef DBVT_IPOLICY
  1466. #undef DBVT_CHECKTYPE
  1467. #undef DBVT_IMPL_GENERIC
  1468. #undef DBVT_IMPL_SSE
  1469. #undef DBVT_USE_INTRINSIC_SSE
  1470. #undef DBVT_SELECT_IMPL
  1471. #undef DBVT_MERGE_IMPL
  1472. #undef DBVT_INT0_IMPL
  1473. #endif