atomic-instrumented.h 9.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409
  1. /*
  2. * This file provides wrappers with KASAN instrumentation for atomic operations.
  3. * To use this functionality an arch's atomic.h file needs to define all
  4. * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
  5. * this file at the end. This file provides atomic_read() that forwards to
  6. * arch_atomic_read() for actual atomic operation.
  7. * Note: if an arch atomic operation is implemented by means of other atomic
  8. * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
  9. * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
  10. * double instrumentation.
  11. */
  12. #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
  13. #define _LINUX_ATOMIC_INSTRUMENTED_H
  14. #include <linux/build_bug.h>
  15. #include <linux/kasan-checks.h>
  16. static __always_inline int atomic_read(const atomic_t *v)
  17. {
  18. kasan_check_read(v, sizeof(*v));
  19. return arch_atomic_read(v);
  20. }
  21. static __always_inline s64 atomic64_read(const atomic64_t *v)
  22. {
  23. kasan_check_read(v, sizeof(*v));
  24. return arch_atomic64_read(v);
  25. }
  26. static __always_inline void atomic_set(atomic_t *v, int i)
  27. {
  28. kasan_check_write(v, sizeof(*v));
  29. arch_atomic_set(v, i);
  30. }
  31. static __always_inline void atomic64_set(atomic64_t *v, s64 i)
  32. {
  33. kasan_check_write(v, sizeof(*v));
  34. arch_atomic64_set(v, i);
  35. }
  36. static __always_inline int atomic_xchg(atomic_t *v, int i)
  37. {
  38. kasan_check_write(v, sizeof(*v));
  39. return arch_atomic_xchg(v, i);
  40. }
  41. static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
  42. {
  43. kasan_check_write(v, sizeof(*v));
  44. return arch_atomic64_xchg(v, i);
  45. }
  46. static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  47. {
  48. kasan_check_write(v, sizeof(*v));
  49. return arch_atomic_cmpxchg(v, old, new);
  50. }
  51. static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
  52. {
  53. kasan_check_write(v, sizeof(*v));
  54. return arch_atomic64_cmpxchg(v, old, new);
  55. }
  56. #ifdef arch_atomic_try_cmpxchg
  57. #define atomic_try_cmpxchg atomic_try_cmpxchg
  58. static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  59. {
  60. kasan_check_write(v, sizeof(*v));
  61. kasan_check_read(old, sizeof(*old));
  62. return arch_atomic_try_cmpxchg(v, old, new);
  63. }
  64. #endif
  65. #ifdef arch_atomic64_try_cmpxchg
  66. #define atomic64_try_cmpxchg atomic64_try_cmpxchg
  67. static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
  68. {
  69. kasan_check_write(v, sizeof(*v));
  70. kasan_check_read(old, sizeof(*old));
  71. return arch_atomic64_try_cmpxchg(v, old, new);
  72. }
  73. #endif
  74. static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
  75. {
  76. kasan_check_write(v, sizeof(*v));
  77. return __arch_atomic_add_unless(v, a, u);
  78. }
  79. static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
  80. {
  81. kasan_check_write(v, sizeof(*v));
  82. return arch_atomic64_add_unless(v, a, u);
  83. }
  84. static __always_inline void atomic_inc(atomic_t *v)
  85. {
  86. kasan_check_write(v, sizeof(*v));
  87. arch_atomic_inc(v);
  88. }
  89. static __always_inline void atomic64_inc(atomic64_t *v)
  90. {
  91. kasan_check_write(v, sizeof(*v));
  92. arch_atomic64_inc(v);
  93. }
  94. static __always_inline void atomic_dec(atomic_t *v)
  95. {
  96. kasan_check_write(v, sizeof(*v));
  97. arch_atomic_dec(v);
  98. }
  99. static __always_inline void atomic64_dec(atomic64_t *v)
  100. {
  101. kasan_check_write(v, sizeof(*v));
  102. arch_atomic64_dec(v);
  103. }
  104. static __always_inline void atomic_add(int i, atomic_t *v)
  105. {
  106. kasan_check_write(v, sizeof(*v));
  107. arch_atomic_add(i, v);
  108. }
  109. static __always_inline void atomic64_add(s64 i, atomic64_t *v)
  110. {
  111. kasan_check_write(v, sizeof(*v));
  112. arch_atomic64_add(i, v);
  113. }
  114. static __always_inline void atomic_sub(int i, atomic_t *v)
  115. {
  116. kasan_check_write(v, sizeof(*v));
  117. arch_atomic_sub(i, v);
  118. }
  119. static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
  120. {
  121. kasan_check_write(v, sizeof(*v));
  122. arch_atomic64_sub(i, v);
  123. }
  124. static __always_inline void atomic_and(int i, atomic_t *v)
  125. {
  126. kasan_check_write(v, sizeof(*v));
  127. arch_atomic_and(i, v);
  128. }
  129. static __always_inline void atomic64_and(s64 i, atomic64_t *v)
  130. {
  131. kasan_check_write(v, sizeof(*v));
  132. arch_atomic64_and(i, v);
  133. }
  134. static __always_inline void atomic_or(int i, atomic_t *v)
  135. {
  136. kasan_check_write(v, sizeof(*v));
  137. arch_atomic_or(i, v);
  138. }
  139. static __always_inline void atomic64_or(s64 i, atomic64_t *v)
  140. {
  141. kasan_check_write(v, sizeof(*v));
  142. arch_atomic64_or(i, v);
  143. }
  144. static __always_inline void atomic_xor(int i, atomic_t *v)
  145. {
  146. kasan_check_write(v, sizeof(*v));
  147. arch_atomic_xor(i, v);
  148. }
  149. static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
  150. {
  151. kasan_check_write(v, sizeof(*v));
  152. arch_atomic64_xor(i, v);
  153. }
  154. static __always_inline int atomic_inc_return(atomic_t *v)
  155. {
  156. kasan_check_write(v, sizeof(*v));
  157. return arch_atomic_inc_return(v);
  158. }
  159. static __always_inline s64 atomic64_inc_return(atomic64_t *v)
  160. {
  161. kasan_check_write(v, sizeof(*v));
  162. return arch_atomic64_inc_return(v);
  163. }
  164. static __always_inline int atomic_dec_return(atomic_t *v)
  165. {
  166. kasan_check_write(v, sizeof(*v));
  167. return arch_atomic_dec_return(v);
  168. }
  169. static __always_inline s64 atomic64_dec_return(atomic64_t *v)
  170. {
  171. kasan_check_write(v, sizeof(*v));
  172. return arch_atomic64_dec_return(v);
  173. }
  174. static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v)
  175. {
  176. kasan_check_write(v, sizeof(*v));
  177. return arch_atomic64_inc_not_zero(v);
  178. }
  179. static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
  180. {
  181. kasan_check_write(v, sizeof(*v));
  182. return arch_atomic64_dec_if_positive(v);
  183. }
  184. static __always_inline bool atomic_dec_and_test(atomic_t *v)
  185. {
  186. kasan_check_write(v, sizeof(*v));
  187. return arch_atomic_dec_and_test(v);
  188. }
  189. static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
  190. {
  191. kasan_check_write(v, sizeof(*v));
  192. return arch_atomic64_dec_and_test(v);
  193. }
  194. static __always_inline bool atomic_inc_and_test(atomic_t *v)
  195. {
  196. kasan_check_write(v, sizeof(*v));
  197. return arch_atomic_inc_and_test(v);
  198. }
  199. static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
  200. {
  201. kasan_check_write(v, sizeof(*v));
  202. return arch_atomic64_inc_and_test(v);
  203. }
  204. static __always_inline int atomic_add_return(int i, atomic_t *v)
  205. {
  206. kasan_check_write(v, sizeof(*v));
  207. return arch_atomic_add_return(i, v);
  208. }
  209. static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
  210. {
  211. kasan_check_write(v, sizeof(*v));
  212. return arch_atomic64_add_return(i, v);
  213. }
  214. static __always_inline int atomic_sub_return(int i, atomic_t *v)
  215. {
  216. kasan_check_write(v, sizeof(*v));
  217. return arch_atomic_sub_return(i, v);
  218. }
  219. static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
  220. {
  221. kasan_check_write(v, sizeof(*v));
  222. return arch_atomic64_sub_return(i, v);
  223. }
  224. static __always_inline int atomic_fetch_add(int i, atomic_t *v)
  225. {
  226. kasan_check_write(v, sizeof(*v));
  227. return arch_atomic_fetch_add(i, v);
  228. }
  229. static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
  230. {
  231. kasan_check_write(v, sizeof(*v));
  232. return arch_atomic64_fetch_add(i, v);
  233. }
  234. static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
  235. {
  236. kasan_check_write(v, sizeof(*v));
  237. return arch_atomic_fetch_sub(i, v);
  238. }
  239. static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
  240. {
  241. kasan_check_write(v, sizeof(*v));
  242. return arch_atomic64_fetch_sub(i, v);
  243. }
  244. static __always_inline int atomic_fetch_and(int i, atomic_t *v)
  245. {
  246. kasan_check_write(v, sizeof(*v));
  247. return arch_atomic_fetch_and(i, v);
  248. }
  249. static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
  250. {
  251. kasan_check_write(v, sizeof(*v));
  252. return arch_atomic64_fetch_and(i, v);
  253. }
  254. static __always_inline int atomic_fetch_or(int i, atomic_t *v)
  255. {
  256. kasan_check_write(v, sizeof(*v));
  257. return arch_atomic_fetch_or(i, v);
  258. }
  259. static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
  260. {
  261. kasan_check_write(v, sizeof(*v));
  262. return arch_atomic64_fetch_or(i, v);
  263. }
  264. static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
  265. {
  266. kasan_check_write(v, sizeof(*v));
  267. return arch_atomic_fetch_xor(i, v);
  268. }
  269. static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
  270. {
  271. kasan_check_write(v, sizeof(*v));
  272. return arch_atomic64_fetch_xor(i, v);
  273. }
  274. static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
  275. {
  276. kasan_check_write(v, sizeof(*v));
  277. return arch_atomic_sub_and_test(i, v);
  278. }
  279. static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
  280. {
  281. kasan_check_write(v, sizeof(*v));
  282. return arch_atomic64_sub_and_test(i, v);
  283. }
  284. static __always_inline bool atomic_add_negative(int i, atomic_t *v)
  285. {
  286. kasan_check_write(v, sizeof(*v));
  287. return arch_atomic_add_negative(i, v);
  288. }
  289. static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
  290. {
  291. kasan_check_write(v, sizeof(*v));
  292. return arch_atomic64_add_negative(i, v);
  293. }
  294. #define xchg(ptr, new) \
  295. ({ \
  296. typeof(ptr) __ai_ptr = (ptr); \
  297. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  298. arch_xchg(__ai_ptr, (new)); \
  299. })
  300. #define cmpxchg(ptr, old, new) \
  301. ({ \
  302. typeof(ptr) __ai_ptr = (ptr); \
  303. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  304. arch_cmpxchg(__ai_ptr, (old), (new)); \
  305. })
  306. #define sync_cmpxchg(ptr, old, new) \
  307. ({ \
  308. typeof(ptr) __ai_ptr = (ptr); \
  309. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  310. arch_sync_cmpxchg(__ai_ptr, (old), (new)); \
  311. })
  312. #define cmpxchg_local(ptr, old, new) \
  313. ({ \
  314. typeof(ptr) __ai_ptr = (ptr); \
  315. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  316. arch_cmpxchg_local(__ai_ptr, (old), (new)); \
  317. })
  318. #define cmpxchg64(ptr, old, new) \
  319. ({ \
  320. typeof(ptr) __ai_ptr = (ptr); \
  321. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  322. arch_cmpxchg64(__ai_ptr, (old), (new)); \
  323. })
  324. #define cmpxchg64_local(ptr, old, new) \
  325. ({ \
  326. typeof(ptr) __ai_ptr = (ptr); \
  327. kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
  328. arch_cmpxchg64_local(__ai_ptr, (old), (new)); \
  329. })
  330. #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
  331. ({ \
  332. typeof(p1) __ai_p1 = (p1); \
  333. kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
  334. arch_cmpxchg_double(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
  335. })
  336. #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
  337. ({ \
  338. typeof(p1) __ai_p1 = (p1); \
  339. kasan_check_write(__ai_p1, 2 * sizeof(*__ai_p1)); \
  340. arch_cmpxchg_double_local(__ai_p1, (p2), (o1), (o2), (n1), (n2)); \
  341. })
  342. #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */