percpu.h 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_GENERIC_PERCPU_H_
  3. #define _ASM_GENERIC_PERCPU_H_
  4. #include <linux/compiler.h>
  5. #include <linux/threads.h>
  6. #include <linux/percpu-defs.h>
  7. #ifdef CONFIG_SMP
  8. /*
  9. * per_cpu_offset() is the offset that has to be added to a
  10. * percpu variable to get to the instance for a certain processor.
  11. *
  12. * Most arches use the __per_cpu_offset array for those offsets but
  13. * some arches have their own ways of determining the offset (x86_64, s390).
  14. */
  15. #ifndef __per_cpu_offset
  16. extern unsigned long __per_cpu_offset[NR_CPUS];
  17. #define per_cpu_offset(x) (__per_cpu_offset[x])
  18. #endif
  19. /*
  20. * Determine the offset for the currently active processor.
  21. * An arch may define __my_cpu_offset to provide a more effective
  22. * means of obtaining the offset to the per cpu variables of the
  23. * current processor.
  24. */
  25. #ifndef __my_cpu_offset
  26. #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  27. #endif
  28. #ifdef CONFIG_DEBUG_PREEMPT
  29. #define my_cpu_offset per_cpu_offset(smp_processor_id())
  30. #else
  31. #define my_cpu_offset __my_cpu_offset
  32. #endif
  33. /*
  34. * Arch may define arch_raw_cpu_ptr() to provide more efficient address
  35. * translations for raw_cpu_ptr().
  36. */
  37. #ifndef arch_raw_cpu_ptr
  38. #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
  39. #endif
  40. #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
  41. extern void setup_per_cpu_areas(void);
  42. #endif
  43. #endif /* SMP */
  44. #ifndef PER_CPU_BASE_SECTION
  45. #ifdef CONFIG_SMP
  46. #define PER_CPU_BASE_SECTION ".data..percpu"
  47. #else
  48. #define PER_CPU_BASE_SECTION ".data"
  49. #endif
  50. #endif
  51. #ifndef PER_CPU_ATTRIBUTES
  52. #define PER_CPU_ATTRIBUTES
  53. #endif
  54. #ifndef PER_CPU_DEF_ATTRIBUTES
  55. #define PER_CPU_DEF_ATTRIBUTES
  56. #endif
  57. #define raw_cpu_generic_read(pcp) \
  58. ({ \
  59. *raw_cpu_ptr(&(pcp)); \
  60. })
  61. #define raw_cpu_generic_to_op(pcp, val, op) \
  62. do { \
  63. *raw_cpu_ptr(&(pcp)) op val; \
  64. } while (0)
  65. #define raw_cpu_generic_add_return(pcp, val) \
  66. ({ \
  67. typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
  68. \
  69. *__p += val; \
  70. *__p; \
  71. })
  72. #define raw_cpu_generic_xchg(pcp, nval) \
  73. ({ \
  74. typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
  75. typeof(pcp) __ret; \
  76. __ret = *__p; \
  77. *__p = nval; \
  78. __ret; \
  79. })
  80. #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
  81. ({ \
  82. typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
  83. typeof(pcp) __ret; \
  84. __ret = *__p; \
  85. if (__ret == (oval)) \
  86. *__p = nval; \
  87. __ret; \
  88. })
  89. #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  90. ({ \
  91. typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1)); \
  92. typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2)); \
  93. int __ret = 0; \
  94. if (*__p1 == (oval1) && *__p2 == (oval2)) { \
  95. *__p1 = nval1; \
  96. *__p2 = nval2; \
  97. __ret = 1; \
  98. } \
  99. (__ret); \
  100. })
  101. #define __this_cpu_generic_read_nopreempt(pcp) \
  102. ({ \
  103. typeof(pcp) __ret; \
  104. preempt_disable_notrace(); \
  105. __ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
  106. preempt_enable_notrace(); \
  107. __ret; \
  108. })
  109. #define __this_cpu_generic_read_noirq(pcp) \
  110. ({ \
  111. typeof(pcp) __ret; \
  112. unsigned long __flags; \
  113. raw_local_irq_save(__flags); \
  114. __ret = raw_cpu_generic_read(pcp); \
  115. raw_local_irq_restore(__flags); \
  116. __ret; \
  117. })
  118. #define this_cpu_generic_read(pcp) \
  119. ({ \
  120. typeof(pcp) __ret; \
  121. if (__native_word(pcp)) \
  122. __ret = __this_cpu_generic_read_nopreempt(pcp); \
  123. else \
  124. __ret = __this_cpu_generic_read_noirq(pcp); \
  125. __ret; \
  126. })
  127. #define this_cpu_generic_to_op(pcp, val, op) \
  128. do { \
  129. unsigned long __flags; \
  130. raw_local_irq_save(__flags); \
  131. raw_cpu_generic_to_op(pcp, val, op); \
  132. raw_local_irq_restore(__flags); \
  133. } while (0)
  134. #define this_cpu_generic_add_return(pcp, val) \
  135. ({ \
  136. typeof(pcp) __ret; \
  137. unsigned long __flags; \
  138. raw_local_irq_save(__flags); \
  139. __ret = raw_cpu_generic_add_return(pcp, val); \
  140. raw_local_irq_restore(__flags); \
  141. __ret; \
  142. })
  143. #define this_cpu_generic_xchg(pcp, nval) \
  144. ({ \
  145. typeof(pcp) __ret; \
  146. unsigned long __flags; \
  147. raw_local_irq_save(__flags); \
  148. __ret = raw_cpu_generic_xchg(pcp, nval); \
  149. raw_local_irq_restore(__flags); \
  150. __ret; \
  151. })
  152. #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
  153. ({ \
  154. typeof(pcp) __ret; \
  155. unsigned long __flags; \
  156. raw_local_irq_save(__flags); \
  157. __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
  158. raw_local_irq_restore(__flags); \
  159. __ret; \
  160. })
  161. #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  162. ({ \
  163. int __ret; \
  164. unsigned long __flags; \
  165. raw_local_irq_save(__flags); \
  166. __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
  167. oval1, oval2, nval1, nval2); \
  168. raw_local_irq_restore(__flags); \
  169. __ret; \
  170. })
  171. #ifndef raw_cpu_read_1
  172. #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
  173. #endif
  174. #ifndef raw_cpu_read_2
  175. #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
  176. #endif
  177. #ifndef raw_cpu_read_4
  178. #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
  179. #endif
  180. #ifndef raw_cpu_read_8
  181. #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
  182. #endif
  183. #ifndef raw_cpu_write_1
  184. #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  185. #endif
  186. #ifndef raw_cpu_write_2
  187. #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  188. #endif
  189. #ifndef raw_cpu_write_4
  190. #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  191. #endif
  192. #ifndef raw_cpu_write_8
  193. #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  194. #endif
  195. #ifndef raw_cpu_add_1
  196. #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  197. #endif
  198. #ifndef raw_cpu_add_2
  199. #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  200. #endif
  201. #ifndef raw_cpu_add_4
  202. #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  203. #endif
  204. #ifndef raw_cpu_add_8
  205. #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  206. #endif
  207. #ifndef raw_cpu_and_1
  208. #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  209. #endif
  210. #ifndef raw_cpu_and_2
  211. #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  212. #endif
  213. #ifndef raw_cpu_and_4
  214. #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  215. #endif
  216. #ifndef raw_cpu_and_8
  217. #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  218. #endif
  219. #ifndef raw_cpu_or_1
  220. #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  221. #endif
  222. #ifndef raw_cpu_or_2
  223. #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  224. #endif
  225. #ifndef raw_cpu_or_4
  226. #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  227. #endif
  228. #ifndef raw_cpu_or_8
  229. #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  230. #endif
  231. #ifndef raw_cpu_add_return_1
  232. #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
  233. #endif
  234. #ifndef raw_cpu_add_return_2
  235. #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
  236. #endif
  237. #ifndef raw_cpu_add_return_4
  238. #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
  239. #endif
  240. #ifndef raw_cpu_add_return_8
  241. #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
  242. #endif
  243. #ifndef raw_cpu_xchg_1
  244. #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  245. #endif
  246. #ifndef raw_cpu_xchg_2
  247. #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  248. #endif
  249. #ifndef raw_cpu_xchg_4
  250. #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  251. #endif
  252. #ifndef raw_cpu_xchg_8
  253. #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  254. #endif
  255. #ifndef raw_cpu_cmpxchg_1
  256. #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
  257. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  258. #endif
  259. #ifndef raw_cpu_cmpxchg_2
  260. #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
  261. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  262. #endif
  263. #ifndef raw_cpu_cmpxchg_4
  264. #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
  265. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  266. #endif
  267. #ifndef raw_cpu_cmpxchg_8
  268. #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
  269. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  270. #endif
  271. #ifndef raw_cpu_cmpxchg_double_1
  272. #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  273. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  274. #endif
  275. #ifndef raw_cpu_cmpxchg_double_2
  276. #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  277. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  278. #endif
  279. #ifndef raw_cpu_cmpxchg_double_4
  280. #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  281. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  282. #endif
  283. #ifndef raw_cpu_cmpxchg_double_8
  284. #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  285. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  286. #endif
  287. #ifndef this_cpu_read_1
  288. #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
  289. #endif
  290. #ifndef this_cpu_read_2
  291. #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
  292. #endif
  293. #ifndef this_cpu_read_4
  294. #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
  295. #endif
  296. #ifndef this_cpu_read_8
  297. #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
  298. #endif
  299. #ifndef this_cpu_write_1
  300. #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  301. #endif
  302. #ifndef this_cpu_write_2
  303. #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  304. #endif
  305. #ifndef this_cpu_write_4
  306. #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  307. #endif
  308. #ifndef this_cpu_write_8
  309. #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  310. #endif
  311. #ifndef this_cpu_add_1
  312. #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  313. #endif
  314. #ifndef this_cpu_add_2
  315. #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  316. #endif
  317. #ifndef this_cpu_add_4
  318. #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  319. #endif
  320. #ifndef this_cpu_add_8
  321. #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  322. #endif
  323. #ifndef this_cpu_and_1
  324. #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  325. #endif
  326. #ifndef this_cpu_and_2
  327. #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  328. #endif
  329. #ifndef this_cpu_and_4
  330. #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  331. #endif
  332. #ifndef this_cpu_and_8
  333. #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  334. #endif
  335. #ifndef this_cpu_or_1
  336. #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  337. #endif
  338. #ifndef this_cpu_or_2
  339. #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  340. #endif
  341. #ifndef this_cpu_or_4
  342. #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  343. #endif
  344. #ifndef this_cpu_or_8
  345. #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  346. #endif
  347. #ifndef this_cpu_add_return_1
  348. #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
  349. #endif
  350. #ifndef this_cpu_add_return_2
  351. #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
  352. #endif
  353. #ifndef this_cpu_add_return_4
  354. #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
  355. #endif
  356. #ifndef this_cpu_add_return_8
  357. #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
  358. #endif
  359. #ifndef this_cpu_xchg_1
  360. #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  361. #endif
  362. #ifndef this_cpu_xchg_2
  363. #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  364. #endif
  365. #ifndef this_cpu_xchg_4
  366. #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  367. #endif
  368. #ifndef this_cpu_xchg_8
  369. #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  370. #endif
  371. #ifndef this_cpu_cmpxchg_1
  372. #define this_cpu_cmpxchg_1(pcp, oval, nval) \
  373. this_cpu_generic_cmpxchg(pcp, oval, nval)
  374. #endif
  375. #ifndef this_cpu_cmpxchg_2
  376. #define this_cpu_cmpxchg_2(pcp, oval, nval) \
  377. this_cpu_generic_cmpxchg(pcp, oval, nval)
  378. #endif
  379. #ifndef this_cpu_cmpxchg_4
  380. #define this_cpu_cmpxchg_4(pcp, oval, nval) \
  381. this_cpu_generic_cmpxchg(pcp, oval, nval)
  382. #endif
  383. #ifndef this_cpu_cmpxchg_8
  384. #define this_cpu_cmpxchg_8(pcp, oval, nval) \
  385. this_cpu_generic_cmpxchg(pcp, oval, nval)
  386. #endif
  387. #ifndef this_cpu_cmpxchg_double_1
  388. #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  389. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  390. #endif
  391. #ifndef this_cpu_cmpxchg_double_2
  392. #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  393. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  394. #endif
  395. #ifndef this_cpu_cmpxchg_double_4
  396. #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  397. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  398. #endif
  399. #ifndef this_cpu_cmpxchg_double_8
  400. #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  401. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  402. #endif
  403. #endif /* _ASM_GENERIC_PERCPU_H_ */