preempt.h 1.9 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_PREEMPT_H
  3. #define __ASM_PREEMPT_H
  4. #include <linux/thread_info.h>
  5. #define PREEMPT_ENABLED (0)
  6. static __always_inline int preempt_count(void)
  7. {
  8. return READ_ONCE(current_thread_info()->preempt_count);
  9. }
  10. static __always_inline volatile int *preempt_count_ptr(void)
  11. {
  12. return &current_thread_info()->preempt_count;
  13. }
  14. static __always_inline void preempt_count_set(int pc)
  15. {
  16. *preempt_count_ptr() = pc;
  17. }
  18. /*
  19. * must be macros to avoid header recursion hell
  20. */
  21. #define init_task_preempt_count(p) do { \
  22. task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \
  23. } while (0)
  24. #define init_idle_preempt_count(p, cpu) do { \
  25. task_thread_info(p)->preempt_count = PREEMPT_ENABLED; \
  26. } while (0)
  27. static __always_inline void set_preempt_need_resched(void)
  28. {
  29. }
  30. static __always_inline void clear_preempt_need_resched(void)
  31. {
  32. }
  33. static __always_inline bool test_preempt_need_resched(void)
  34. {
  35. return false;
  36. }
  37. /*
  38. * The various preempt_count add/sub methods
  39. */
  40. static __always_inline void __preempt_count_add(int val)
  41. {
  42. *preempt_count_ptr() += val;
  43. }
  44. static __always_inline void __preempt_count_sub(int val)
  45. {
  46. *preempt_count_ptr() -= val;
  47. }
  48. static __always_inline bool __preempt_count_dec_and_test(void)
  49. {
  50. /*
  51. * Because of load-store architectures cannot do per-cpu atomic
  52. * operations; we cannot use PREEMPT_NEED_RESCHED because it might get
  53. * lost.
  54. */
  55. return !--*preempt_count_ptr() && tif_need_resched();
  56. }
  57. /*
  58. * Returns true when we need to resched and can (barring IRQ state).
  59. */
  60. static __always_inline bool should_resched(int preempt_offset)
  61. {
  62. return unlikely(preempt_count() == preempt_offset &&
  63. tif_need_resched());
  64. }
  65. #ifdef CONFIG_PREEMPT
  66. extern asmlinkage void preempt_schedule(void);
  67. #define __preempt_schedule() preempt_schedule()
  68. extern asmlinkage void preempt_schedule_notrace(void);
  69. #define __preempt_schedule_notrace() preempt_schedule_notrace()
  70. #endif /* CONFIG_PREEMPT */
  71. #endif /* __ASM_PREEMPT_H */