atomic.h 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029
  1. /* Atomic operations usable in machine independent code */
  2. #ifndef _LINUX_ATOMIC_H
  3. #define _LINUX_ATOMIC_H
  4. #include <asm/atomic.h>
  5. #include <asm/barrier.h>
  6. /*
  7. * Relaxed variants of xchg, cmpxchg and some atomic operations.
  8. *
  9. * We support four variants:
  10. *
  11. * - Fully ordered: The default implementation, no suffix required.
  12. * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
  13. * - Release: Provides RELEASE semantics, _release suffix.
  14. * - Relaxed: No ordering guarantees, _relaxed suffix.
  15. *
  16. * For compound atomics performing both a load and a store, ACQUIRE
  17. * semantics apply only to the load and RELEASE semantics only to the
  18. * store portion of the operation. Note that a failed cmpxchg_acquire
  19. * does -not- imply any memory ordering constraints.
  20. *
  21. * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
  22. */
  23. #ifndef atomic_read_acquire
  24. #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
  25. #endif
  26. #ifndef atomic_set_release
  27. #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
  28. #endif
  29. /*
  30. * The idea here is to build acquire/release variants by adding explicit
  31. * barriers on top of the relaxed variant. In the case where the relaxed
  32. * variant is already fully ordered, no additional barriers are needed.
  33. *
  34. * Besides, if an arch has a special barrier for acquire/release, it could
  35. * implement its own __atomic_op_* and use the same framework for building
  36. * variants
  37. */
  38. #ifndef __atomic_op_acquire
  39. #define __atomic_op_acquire(op, args...) \
  40. ({ \
  41. typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
  42. smp_mb__after_atomic(); \
  43. __ret; \
  44. })
  45. #endif
  46. #ifndef __atomic_op_release
  47. #define __atomic_op_release(op, args...) \
  48. ({ \
  49. smp_mb__before_atomic(); \
  50. op##_relaxed(args); \
  51. })
  52. #endif
  53. #ifndef __atomic_op_fence
  54. #define __atomic_op_fence(op, args...) \
  55. ({ \
  56. typeof(op##_relaxed(args)) __ret; \
  57. smp_mb__before_atomic(); \
  58. __ret = op##_relaxed(args); \
  59. smp_mb__after_atomic(); \
  60. __ret; \
  61. })
  62. #endif
  63. /* atomic_add_return_relaxed */
  64. #ifndef atomic_add_return_relaxed
  65. #define atomic_add_return_relaxed atomic_add_return
  66. #define atomic_add_return_acquire atomic_add_return
  67. #define atomic_add_return_release atomic_add_return
  68. #else /* atomic_add_return_relaxed */
  69. #ifndef atomic_add_return_acquire
  70. #define atomic_add_return_acquire(...) \
  71. __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
  72. #endif
  73. #ifndef atomic_add_return_release
  74. #define atomic_add_return_release(...) \
  75. __atomic_op_release(atomic_add_return, __VA_ARGS__)
  76. #endif
  77. #ifndef atomic_add_return
  78. #define atomic_add_return(...) \
  79. __atomic_op_fence(atomic_add_return, __VA_ARGS__)
  80. #endif
  81. #endif /* atomic_add_return_relaxed */
  82. /* atomic_inc_return_relaxed */
  83. #ifndef atomic_inc_return_relaxed
  84. #define atomic_inc_return_relaxed atomic_inc_return
  85. #define atomic_inc_return_acquire atomic_inc_return
  86. #define atomic_inc_return_release atomic_inc_return
  87. #else /* atomic_inc_return_relaxed */
  88. #ifndef atomic_inc_return_acquire
  89. #define atomic_inc_return_acquire(...) \
  90. __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
  91. #endif
  92. #ifndef atomic_inc_return_release
  93. #define atomic_inc_return_release(...) \
  94. __atomic_op_release(atomic_inc_return, __VA_ARGS__)
  95. #endif
  96. #ifndef atomic_inc_return
  97. #define atomic_inc_return(...) \
  98. __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
  99. #endif
  100. #endif /* atomic_inc_return_relaxed */
  101. /* atomic_sub_return_relaxed */
  102. #ifndef atomic_sub_return_relaxed
  103. #define atomic_sub_return_relaxed atomic_sub_return
  104. #define atomic_sub_return_acquire atomic_sub_return
  105. #define atomic_sub_return_release atomic_sub_return
  106. #else /* atomic_sub_return_relaxed */
  107. #ifndef atomic_sub_return_acquire
  108. #define atomic_sub_return_acquire(...) \
  109. __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
  110. #endif
  111. #ifndef atomic_sub_return_release
  112. #define atomic_sub_return_release(...) \
  113. __atomic_op_release(atomic_sub_return, __VA_ARGS__)
  114. #endif
  115. #ifndef atomic_sub_return
  116. #define atomic_sub_return(...) \
  117. __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
  118. #endif
  119. #endif /* atomic_sub_return_relaxed */
  120. /* atomic_dec_return_relaxed */
  121. #ifndef atomic_dec_return_relaxed
  122. #define atomic_dec_return_relaxed atomic_dec_return
  123. #define atomic_dec_return_acquire atomic_dec_return
  124. #define atomic_dec_return_release atomic_dec_return
  125. #else /* atomic_dec_return_relaxed */
  126. #ifndef atomic_dec_return_acquire
  127. #define atomic_dec_return_acquire(...) \
  128. __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
  129. #endif
  130. #ifndef atomic_dec_return_release
  131. #define atomic_dec_return_release(...) \
  132. __atomic_op_release(atomic_dec_return, __VA_ARGS__)
  133. #endif
  134. #ifndef atomic_dec_return
  135. #define atomic_dec_return(...) \
  136. __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
  137. #endif
  138. #endif /* atomic_dec_return_relaxed */
  139. /* atomic_fetch_add_relaxed */
  140. #ifndef atomic_fetch_add_relaxed
  141. #define atomic_fetch_add_relaxed atomic_fetch_add
  142. #define atomic_fetch_add_acquire atomic_fetch_add
  143. #define atomic_fetch_add_release atomic_fetch_add
  144. #else /* atomic_fetch_add_relaxed */
  145. #ifndef atomic_fetch_add_acquire
  146. #define atomic_fetch_add_acquire(...) \
  147. __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
  148. #endif
  149. #ifndef atomic_fetch_add_release
  150. #define atomic_fetch_add_release(...) \
  151. __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
  152. #endif
  153. #ifndef atomic_fetch_add
  154. #define atomic_fetch_add(...) \
  155. __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
  156. #endif
  157. #endif /* atomic_fetch_add_relaxed */
  158. /* atomic_fetch_inc_relaxed */
  159. #ifndef atomic_fetch_inc_relaxed
  160. #ifndef atomic_fetch_inc
  161. #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
  162. #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
  163. #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
  164. #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
  165. #else /* atomic_fetch_inc */
  166. #define atomic_fetch_inc_relaxed atomic_fetch_inc
  167. #define atomic_fetch_inc_acquire atomic_fetch_inc
  168. #define atomic_fetch_inc_release atomic_fetch_inc
  169. #endif /* atomic_fetch_inc */
  170. #else /* atomic_fetch_inc_relaxed */
  171. #ifndef atomic_fetch_inc_acquire
  172. #define atomic_fetch_inc_acquire(...) \
  173. __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
  174. #endif
  175. #ifndef atomic_fetch_inc_release
  176. #define atomic_fetch_inc_release(...) \
  177. __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
  178. #endif
  179. #ifndef atomic_fetch_inc
  180. #define atomic_fetch_inc(...) \
  181. __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
  182. #endif
  183. #endif /* atomic_fetch_inc_relaxed */
  184. /* atomic_fetch_sub_relaxed */
  185. #ifndef atomic_fetch_sub_relaxed
  186. #define atomic_fetch_sub_relaxed atomic_fetch_sub
  187. #define atomic_fetch_sub_acquire atomic_fetch_sub
  188. #define atomic_fetch_sub_release atomic_fetch_sub
  189. #else /* atomic_fetch_sub_relaxed */
  190. #ifndef atomic_fetch_sub_acquire
  191. #define atomic_fetch_sub_acquire(...) \
  192. __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
  193. #endif
  194. #ifndef atomic_fetch_sub_release
  195. #define atomic_fetch_sub_release(...) \
  196. __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
  197. #endif
  198. #ifndef atomic_fetch_sub
  199. #define atomic_fetch_sub(...) \
  200. __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
  201. #endif
  202. #endif /* atomic_fetch_sub_relaxed */
  203. /* atomic_fetch_dec_relaxed */
  204. #ifndef atomic_fetch_dec_relaxed
  205. #ifndef atomic_fetch_dec
  206. #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
  207. #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
  208. #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
  209. #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
  210. #else /* atomic_fetch_dec */
  211. #define atomic_fetch_dec_relaxed atomic_fetch_dec
  212. #define atomic_fetch_dec_acquire atomic_fetch_dec
  213. #define atomic_fetch_dec_release atomic_fetch_dec
  214. #endif /* atomic_fetch_dec */
  215. #else /* atomic_fetch_dec_relaxed */
  216. #ifndef atomic_fetch_dec_acquire
  217. #define atomic_fetch_dec_acquire(...) \
  218. __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
  219. #endif
  220. #ifndef atomic_fetch_dec_release
  221. #define atomic_fetch_dec_release(...) \
  222. __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
  223. #endif
  224. #ifndef atomic_fetch_dec
  225. #define atomic_fetch_dec(...) \
  226. __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
  227. #endif
  228. #endif /* atomic_fetch_dec_relaxed */
  229. /* atomic_fetch_or_relaxed */
  230. #ifndef atomic_fetch_or_relaxed
  231. #define atomic_fetch_or_relaxed atomic_fetch_or
  232. #define atomic_fetch_or_acquire atomic_fetch_or
  233. #define atomic_fetch_or_release atomic_fetch_or
  234. #else /* atomic_fetch_or_relaxed */
  235. #ifndef atomic_fetch_or_acquire
  236. #define atomic_fetch_or_acquire(...) \
  237. __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
  238. #endif
  239. #ifndef atomic_fetch_or_release
  240. #define atomic_fetch_or_release(...) \
  241. __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
  242. #endif
  243. #ifndef atomic_fetch_or
  244. #define atomic_fetch_or(...) \
  245. __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
  246. #endif
  247. #endif /* atomic_fetch_or_relaxed */
  248. /* atomic_fetch_and_relaxed */
  249. #ifndef atomic_fetch_and_relaxed
  250. #define atomic_fetch_and_relaxed atomic_fetch_and
  251. #define atomic_fetch_and_acquire atomic_fetch_and
  252. #define atomic_fetch_and_release atomic_fetch_and
  253. #else /* atomic_fetch_and_relaxed */
  254. #ifndef atomic_fetch_and_acquire
  255. #define atomic_fetch_and_acquire(...) \
  256. __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
  257. #endif
  258. #ifndef atomic_fetch_and_release
  259. #define atomic_fetch_and_release(...) \
  260. __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
  261. #endif
  262. #ifndef atomic_fetch_and
  263. #define atomic_fetch_and(...) \
  264. __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
  265. #endif
  266. #endif /* atomic_fetch_and_relaxed */
  267. #ifdef atomic_andnot
  268. /* atomic_fetch_andnot_relaxed */
  269. #ifndef atomic_fetch_andnot_relaxed
  270. #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
  271. #define atomic_fetch_andnot_acquire atomic_fetch_andnot
  272. #define atomic_fetch_andnot_release atomic_fetch_andnot
  273. #else /* atomic_fetch_andnot_relaxed */
  274. #ifndef atomic_fetch_andnot_acquire
  275. #define atomic_fetch_andnot_acquire(...) \
  276. __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
  277. #endif
  278. #ifndef atomic_fetch_andnot_release
  279. #define atomic_fetch_andnot_release(...) \
  280. __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
  281. #endif
  282. #ifndef atomic_fetch_andnot
  283. #define atomic_fetch_andnot(...) \
  284. __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
  285. #endif
  286. #endif /* atomic_fetch_andnot_relaxed */
  287. #endif /* atomic_andnot */
  288. /* atomic_fetch_xor_relaxed */
  289. #ifndef atomic_fetch_xor_relaxed
  290. #define atomic_fetch_xor_relaxed atomic_fetch_xor
  291. #define atomic_fetch_xor_acquire atomic_fetch_xor
  292. #define atomic_fetch_xor_release atomic_fetch_xor
  293. #else /* atomic_fetch_xor_relaxed */
  294. #ifndef atomic_fetch_xor_acquire
  295. #define atomic_fetch_xor_acquire(...) \
  296. __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
  297. #endif
  298. #ifndef atomic_fetch_xor_release
  299. #define atomic_fetch_xor_release(...) \
  300. __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
  301. #endif
  302. #ifndef atomic_fetch_xor
  303. #define atomic_fetch_xor(...) \
  304. __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
  305. #endif
  306. #endif /* atomic_fetch_xor_relaxed */
  307. /* atomic_xchg_relaxed */
  308. #ifndef atomic_xchg_relaxed
  309. #define atomic_xchg_relaxed atomic_xchg
  310. #define atomic_xchg_acquire atomic_xchg
  311. #define atomic_xchg_release atomic_xchg
  312. #else /* atomic_xchg_relaxed */
  313. #ifndef atomic_xchg_acquire
  314. #define atomic_xchg_acquire(...) \
  315. __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
  316. #endif
  317. #ifndef atomic_xchg_release
  318. #define atomic_xchg_release(...) \
  319. __atomic_op_release(atomic_xchg, __VA_ARGS__)
  320. #endif
  321. #ifndef atomic_xchg
  322. #define atomic_xchg(...) \
  323. __atomic_op_fence(atomic_xchg, __VA_ARGS__)
  324. #endif
  325. #endif /* atomic_xchg_relaxed */
  326. /* atomic_cmpxchg_relaxed */
  327. #ifndef atomic_cmpxchg_relaxed
  328. #define atomic_cmpxchg_relaxed atomic_cmpxchg
  329. #define atomic_cmpxchg_acquire atomic_cmpxchg
  330. #define atomic_cmpxchg_release atomic_cmpxchg
  331. #else /* atomic_cmpxchg_relaxed */
  332. #ifndef atomic_cmpxchg_acquire
  333. #define atomic_cmpxchg_acquire(...) \
  334. __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
  335. #endif
  336. #ifndef atomic_cmpxchg_release
  337. #define atomic_cmpxchg_release(...) \
  338. __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
  339. #endif
  340. #ifndef atomic_cmpxchg
  341. #define atomic_cmpxchg(...) \
  342. __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
  343. #endif
  344. #endif /* atomic_cmpxchg_relaxed */
  345. /* cmpxchg_relaxed */
  346. #ifndef cmpxchg_relaxed
  347. #define cmpxchg_relaxed cmpxchg
  348. #define cmpxchg_acquire cmpxchg
  349. #define cmpxchg_release cmpxchg
  350. #else /* cmpxchg_relaxed */
  351. #ifndef cmpxchg_acquire
  352. #define cmpxchg_acquire(...) \
  353. __atomic_op_acquire(cmpxchg, __VA_ARGS__)
  354. #endif
  355. #ifndef cmpxchg_release
  356. #define cmpxchg_release(...) \
  357. __atomic_op_release(cmpxchg, __VA_ARGS__)
  358. #endif
  359. #ifndef cmpxchg
  360. #define cmpxchg(...) \
  361. __atomic_op_fence(cmpxchg, __VA_ARGS__)
  362. #endif
  363. #endif /* cmpxchg_relaxed */
  364. /* cmpxchg64_relaxed */
  365. #ifndef cmpxchg64_relaxed
  366. #define cmpxchg64_relaxed cmpxchg64
  367. #define cmpxchg64_acquire cmpxchg64
  368. #define cmpxchg64_release cmpxchg64
  369. #else /* cmpxchg64_relaxed */
  370. #ifndef cmpxchg64_acquire
  371. #define cmpxchg64_acquire(...) \
  372. __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
  373. #endif
  374. #ifndef cmpxchg64_release
  375. #define cmpxchg64_release(...) \
  376. __atomic_op_release(cmpxchg64, __VA_ARGS__)
  377. #endif
  378. #ifndef cmpxchg64
  379. #define cmpxchg64(...) \
  380. __atomic_op_fence(cmpxchg64, __VA_ARGS__)
  381. #endif
  382. #endif /* cmpxchg64_relaxed */
  383. /* xchg_relaxed */
  384. #ifndef xchg_relaxed
  385. #define xchg_relaxed xchg
  386. #define xchg_acquire xchg
  387. #define xchg_release xchg
  388. #else /* xchg_relaxed */
  389. #ifndef xchg_acquire
  390. #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
  391. #endif
  392. #ifndef xchg_release
  393. #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
  394. #endif
  395. #ifndef xchg
  396. #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
  397. #endif
  398. #endif /* xchg_relaxed */
  399. /**
  400. * atomic_add_unless - add unless the number is already a given value
  401. * @v: pointer of type atomic_t
  402. * @a: the amount to add to v...
  403. * @u: ...unless v is equal to u.
  404. *
  405. * Atomically adds @a to @v, so long as @v was not already @u.
  406. * Returns non-zero if @v was not @u, and zero otherwise.
  407. */
  408. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  409. {
  410. return __atomic_add_unless(v, a, u) != u;
  411. }
  412. /**
  413. * atomic_inc_not_zero - increment unless the number is zero
  414. * @v: pointer of type atomic_t
  415. *
  416. * Atomically increments @v by 1, so long as @v is non-zero.
  417. * Returns non-zero if @v was non-zero, and zero otherwise.
  418. */
  419. #ifndef atomic_inc_not_zero
  420. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  421. #endif
  422. #ifndef atomic_andnot
  423. static inline void atomic_andnot(int i, atomic_t *v)
  424. {
  425. atomic_and(~i, v);
  426. }
  427. static inline int atomic_fetch_andnot(int i, atomic_t *v)
  428. {
  429. return atomic_fetch_and(~i, v);
  430. }
  431. static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
  432. {
  433. return atomic_fetch_and_relaxed(~i, v);
  434. }
  435. static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
  436. {
  437. return atomic_fetch_and_acquire(~i, v);
  438. }
  439. static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
  440. {
  441. return atomic_fetch_and_release(~i, v);
  442. }
  443. #endif
  444. /**
  445. * atomic_inc_not_zero_hint - increment if not null
  446. * @v: pointer of type atomic_t
  447. * @hint: probable value of the atomic before the increment
  448. *
  449. * This version of atomic_inc_not_zero() gives a hint of probable
  450. * value of the atomic. This helps processor to not read the memory
  451. * before doing the atomic read/modify/write cycle, lowering
  452. * number of bus transactions on some arches.
  453. *
  454. * Returns: 0 if increment was not done, 1 otherwise.
  455. */
  456. #ifndef atomic_inc_not_zero_hint
  457. static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
  458. {
  459. int val, c = hint;
  460. /* sanity test, should be removed by compiler if hint is a constant */
  461. if (!hint)
  462. return atomic_inc_not_zero(v);
  463. do {
  464. val = atomic_cmpxchg(v, c, c + 1);
  465. if (val == c)
  466. return 1;
  467. c = val;
  468. } while (c);
  469. return 0;
  470. }
  471. #endif
  472. #ifndef atomic_inc_unless_negative
  473. static inline int atomic_inc_unless_negative(atomic_t *p)
  474. {
  475. int v, v1;
  476. for (v = 0; v >= 0; v = v1) {
  477. v1 = atomic_cmpxchg(p, v, v + 1);
  478. if (likely(v1 == v))
  479. return 1;
  480. }
  481. return 0;
  482. }
  483. #endif
  484. #ifndef atomic_dec_unless_positive
  485. static inline int atomic_dec_unless_positive(atomic_t *p)
  486. {
  487. int v, v1;
  488. for (v = 0; v <= 0; v = v1) {
  489. v1 = atomic_cmpxchg(p, v, v - 1);
  490. if (likely(v1 == v))
  491. return 1;
  492. }
  493. return 0;
  494. }
  495. #endif
  496. /*
  497. * atomic_dec_if_positive - decrement by 1 if old value positive
  498. * @v: pointer of type atomic_t
  499. *
  500. * The function returns the old value of *v minus 1, even if
  501. * the atomic variable, v, was not decremented.
  502. */
  503. #ifndef atomic_dec_if_positive
  504. static inline int atomic_dec_if_positive(atomic_t *v)
  505. {
  506. int c, old, dec;
  507. c = atomic_read(v);
  508. for (;;) {
  509. dec = c - 1;
  510. if (unlikely(dec < 0))
  511. break;
  512. old = atomic_cmpxchg((v), c, dec);
  513. if (likely(old == c))
  514. break;
  515. c = old;
  516. }
  517. return dec;
  518. }
  519. #endif
  520. #ifdef CONFIG_GENERIC_ATOMIC64
  521. #include <asm-generic/atomic64.h>
  522. #endif
  523. #ifndef atomic64_read_acquire
  524. #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
  525. #endif
  526. #ifndef atomic64_set_release
  527. #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
  528. #endif
  529. /* atomic64_add_return_relaxed */
  530. #ifndef atomic64_add_return_relaxed
  531. #define atomic64_add_return_relaxed atomic64_add_return
  532. #define atomic64_add_return_acquire atomic64_add_return
  533. #define atomic64_add_return_release atomic64_add_return
  534. #else /* atomic64_add_return_relaxed */
  535. #ifndef atomic64_add_return_acquire
  536. #define atomic64_add_return_acquire(...) \
  537. __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
  538. #endif
  539. #ifndef atomic64_add_return_release
  540. #define atomic64_add_return_release(...) \
  541. __atomic_op_release(atomic64_add_return, __VA_ARGS__)
  542. #endif
  543. #ifndef atomic64_add_return
  544. #define atomic64_add_return(...) \
  545. __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
  546. #endif
  547. #endif /* atomic64_add_return_relaxed */
  548. /* atomic64_inc_return_relaxed */
  549. #ifndef atomic64_inc_return_relaxed
  550. #define atomic64_inc_return_relaxed atomic64_inc_return
  551. #define atomic64_inc_return_acquire atomic64_inc_return
  552. #define atomic64_inc_return_release atomic64_inc_return
  553. #else /* atomic64_inc_return_relaxed */
  554. #ifndef atomic64_inc_return_acquire
  555. #define atomic64_inc_return_acquire(...) \
  556. __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
  557. #endif
  558. #ifndef atomic64_inc_return_release
  559. #define atomic64_inc_return_release(...) \
  560. __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
  561. #endif
  562. #ifndef atomic64_inc_return
  563. #define atomic64_inc_return(...) \
  564. __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
  565. #endif
  566. #endif /* atomic64_inc_return_relaxed */
  567. /* atomic64_sub_return_relaxed */
  568. #ifndef atomic64_sub_return_relaxed
  569. #define atomic64_sub_return_relaxed atomic64_sub_return
  570. #define atomic64_sub_return_acquire atomic64_sub_return
  571. #define atomic64_sub_return_release atomic64_sub_return
  572. #else /* atomic64_sub_return_relaxed */
  573. #ifndef atomic64_sub_return_acquire
  574. #define atomic64_sub_return_acquire(...) \
  575. __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
  576. #endif
  577. #ifndef atomic64_sub_return_release
  578. #define atomic64_sub_return_release(...) \
  579. __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
  580. #endif
  581. #ifndef atomic64_sub_return
  582. #define atomic64_sub_return(...) \
  583. __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
  584. #endif
  585. #endif /* atomic64_sub_return_relaxed */
  586. /* atomic64_dec_return_relaxed */
  587. #ifndef atomic64_dec_return_relaxed
  588. #define atomic64_dec_return_relaxed atomic64_dec_return
  589. #define atomic64_dec_return_acquire atomic64_dec_return
  590. #define atomic64_dec_return_release atomic64_dec_return
  591. #else /* atomic64_dec_return_relaxed */
  592. #ifndef atomic64_dec_return_acquire
  593. #define atomic64_dec_return_acquire(...) \
  594. __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
  595. #endif
  596. #ifndef atomic64_dec_return_release
  597. #define atomic64_dec_return_release(...) \
  598. __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
  599. #endif
  600. #ifndef atomic64_dec_return
  601. #define atomic64_dec_return(...) \
  602. __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
  603. #endif
  604. #endif /* atomic64_dec_return_relaxed */
  605. /* atomic64_fetch_add_relaxed */
  606. #ifndef atomic64_fetch_add_relaxed
  607. #define atomic64_fetch_add_relaxed atomic64_fetch_add
  608. #define atomic64_fetch_add_acquire atomic64_fetch_add
  609. #define atomic64_fetch_add_release atomic64_fetch_add
  610. #else /* atomic64_fetch_add_relaxed */
  611. #ifndef atomic64_fetch_add_acquire
  612. #define atomic64_fetch_add_acquire(...) \
  613. __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
  614. #endif
  615. #ifndef atomic64_fetch_add_release
  616. #define atomic64_fetch_add_release(...) \
  617. __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
  618. #endif
  619. #ifndef atomic64_fetch_add
  620. #define atomic64_fetch_add(...) \
  621. __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
  622. #endif
  623. #endif /* atomic64_fetch_add_relaxed */
  624. /* atomic64_fetch_inc_relaxed */
  625. #ifndef atomic64_fetch_inc_relaxed
  626. #ifndef atomic64_fetch_inc
  627. #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
  628. #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
  629. #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
  630. #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
  631. #else /* atomic64_fetch_inc */
  632. #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
  633. #define atomic64_fetch_inc_acquire atomic64_fetch_inc
  634. #define atomic64_fetch_inc_release atomic64_fetch_inc
  635. #endif /* atomic64_fetch_inc */
  636. #else /* atomic64_fetch_inc_relaxed */
  637. #ifndef atomic64_fetch_inc_acquire
  638. #define atomic64_fetch_inc_acquire(...) \
  639. __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
  640. #endif
  641. #ifndef atomic64_fetch_inc_release
  642. #define atomic64_fetch_inc_release(...) \
  643. __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
  644. #endif
  645. #ifndef atomic64_fetch_inc
  646. #define atomic64_fetch_inc(...) \
  647. __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
  648. #endif
  649. #endif /* atomic64_fetch_inc_relaxed */
  650. /* atomic64_fetch_sub_relaxed */
  651. #ifndef atomic64_fetch_sub_relaxed
  652. #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
  653. #define atomic64_fetch_sub_acquire atomic64_fetch_sub
  654. #define atomic64_fetch_sub_release atomic64_fetch_sub
  655. #else /* atomic64_fetch_sub_relaxed */
  656. #ifndef atomic64_fetch_sub_acquire
  657. #define atomic64_fetch_sub_acquire(...) \
  658. __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
  659. #endif
  660. #ifndef atomic64_fetch_sub_release
  661. #define atomic64_fetch_sub_release(...) \
  662. __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
  663. #endif
  664. #ifndef atomic64_fetch_sub
  665. #define atomic64_fetch_sub(...) \
  666. __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
  667. #endif
  668. #endif /* atomic64_fetch_sub_relaxed */
  669. /* atomic64_fetch_dec_relaxed */
  670. #ifndef atomic64_fetch_dec_relaxed
  671. #ifndef atomic64_fetch_dec
  672. #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
  673. #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
  674. #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
  675. #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
  676. #else /* atomic64_fetch_dec */
  677. #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
  678. #define atomic64_fetch_dec_acquire atomic64_fetch_dec
  679. #define atomic64_fetch_dec_release atomic64_fetch_dec
  680. #endif /* atomic64_fetch_dec */
  681. #else /* atomic64_fetch_dec_relaxed */
  682. #ifndef atomic64_fetch_dec_acquire
  683. #define atomic64_fetch_dec_acquire(...) \
  684. __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
  685. #endif
  686. #ifndef atomic64_fetch_dec_release
  687. #define atomic64_fetch_dec_release(...) \
  688. __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
  689. #endif
  690. #ifndef atomic64_fetch_dec
  691. #define atomic64_fetch_dec(...) \
  692. __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
  693. #endif
  694. #endif /* atomic64_fetch_dec_relaxed */
  695. /* atomic64_fetch_or_relaxed */
  696. #ifndef atomic64_fetch_or_relaxed
  697. #define atomic64_fetch_or_relaxed atomic64_fetch_or
  698. #define atomic64_fetch_or_acquire atomic64_fetch_or
  699. #define atomic64_fetch_or_release atomic64_fetch_or
  700. #else /* atomic64_fetch_or_relaxed */
  701. #ifndef atomic64_fetch_or_acquire
  702. #define atomic64_fetch_or_acquire(...) \
  703. __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
  704. #endif
  705. #ifndef atomic64_fetch_or_release
  706. #define atomic64_fetch_or_release(...) \
  707. __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
  708. #endif
  709. #ifndef atomic64_fetch_or
  710. #define atomic64_fetch_or(...) \
  711. __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
  712. #endif
  713. #endif /* atomic64_fetch_or_relaxed */
  714. /* atomic64_fetch_and_relaxed */
  715. #ifndef atomic64_fetch_and_relaxed
  716. #define atomic64_fetch_and_relaxed atomic64_fetch_and
  717. #define atomic64_fetch_and_acquire atomic64_fetch_and
  718. #define atomic64_fetch_and_release atomic64_fetch_and
  719. #else /* atomic64_fetch_and_relaxed */
  720. #ifndef atomic64_fetch_and_acquire
  721. #define atomic64_fetch_and_acquire(...) \
  722. __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
  723. #endif
  724. #ifndef atomic64_fetch_and_release
  725. #define atomic64_fetch_and_release(...) \
  726. __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
  727. #endif
  728. #ifndef atomic64_fetch_and
  729. #define atomic64_fetch_and(...) \
  730. __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
  731. #endif
  732. #endif /* atomic64_fetch_and_relaxed */
  733. #ifdef atomic64_andnot
  734. /* atomic64_fetch_andnot_relaxed */
  735. #ifndef atomic64_fetch_andnot_relaxed
  736. #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
  737. #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
  738. #define atomic64_fetch_andnot_release atomic64_fetch_andnot
  739. #else /* atomic64_fetch_andnot_relaxed */
  740. #ifndef atomic64_fetch_andnot_acquire
  741. #define atomic64_fetch_andnot_acquire(...) \
  742. __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
  743. #endif
  744. #ifndef atomic64_fetch_andnot_release
  745. #define atomic64_fetch_andnot_release(...) \
  746. __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
  747. #endif
  748. #ifndef atomic64_fetch_andnot
  749. #define atomic64_fetch_andnot(...) \
  750. __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
  751. #endif
  752. #endif /* atomic64_fetch_andnot_relaxed */
  753. #endif /* atomic64_andnot */
  754. /* atomic64_fetch_xor_relaxed */
  755. #ifndef atomic64_fetch_xor_relaxed
  756. #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
  757. #define atomic64_fetch_xor_acquire atomic64_fetch_xor
  758. #define atomic64_fetch_xor_release atomic64_fetch_xor
  759. #else /* atomic64_fetch_xor_relaxed */
  760. #ifndef atomic64_fetch_xor_acquire
  761. #define atomic64_fetch_xor_acquire(...) \
  762. __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
  763. #endif
  764. #ifndef atomic64_fetch_xor_release
  765. #define atomic64_fetch_xor_release(...) \
  766. __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
  767. #endif
  768. #ifndef atomic64_fetch_xor
  769. #define atomic64_fetch_xor(...) \
  770. __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
  771. #endif
  772. #endif /* atomic64_fetch_xor_relaxed */
  773. /* atomic64_xchg_relaxed */
  774. #ifndef atomic64_xchg_relaxed
  775. #define atomic64_xchg_relaxed atomic64_xchg
  776. #define atomic64_xchg_acquire atomic64_xchg
  777. #define atomic64_xchg_release atomic64_xchg
  778. #else /* atomic64_xchg_relaxed */
  779. #ifndef atomic64_xchg_acquire
  780. #define atomic64_xchg_acquire(...) \
  781. __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
  782. #endif
  783. #ifndef atomic64_xchg_release
  784. #define atomic64_xchg_release(...) \
  785. __atomic_op_release(atomic64_xchg, __VA_ARGS__)
  786. #endif
  787. #ifndef atomic64_xchg
  788. #define atomic64_xchg(...) \
  789. __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
  790. #endif
  791. #endif /* atomic64_xchg_relaxed */
  792. /* atomic64_cmpxchg_relaxed */
  793. #ifndef atomic64_cmpxchg_relaxed
  794. #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
  795. #define atomic64_cmpxchg_acquire atomic64_cmpxchg
  796. #define atomic64_cmpxchg_release atomic64_cmpxchg
  797. #else /* atomic64_cmpxchg_relaxed */
  798. #ifndef atomic64_cmpxchg_acquire
  799. #define atomic64_cmpxchg_acquire(...) \
  800. __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
  801. #endif
  802. #ifndef atomic64_cmpxchg_release
  803. #define atomic64_cmpxchg_release(...) \
  804. __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
  805. #endif
  806. #ifndef atomic64_cmpxchg
  807. #define atomic64_cmpxchg(...) \
  808. __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
  809. #endif
  810. #endif /* atomic64_cmpxchg_relaxed */
  811. #ifndef atomic64_andnot
  812. static inline void atomic64_andnot(long long i, atomic64_t *v)
  813. {
  814. atomic64_and(~i, v);
  815. }
  816. static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
  817. {
  818. return atomic64_fetch_and(~i, v);
  819. }
  820. static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
  821. {
  822. return atomic64_fetch_and_relaxed(~i, v);
  823. }
  824. static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
  825. {
  826. return atomic64_fetch_and_acquire(~i, v);
  827. }
  828. static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
  829. {
  830. return atomic64_fetch_and_release(~i, v);
  831. }
  832. #endif
  833. #include <asm-generic/atomic-long.h>
  834. #endif /* _LINUX_ATOMIC_H */