stub_32.h 1.9 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394
  1. /*
  2. * Copyright (C) 2004 Jeff Dike (jdike@addtoit.com)
  3. * Licensed under the GPL
  4. */
  5. #ifndef __SYSDEP_STUB_H
  6. #define __SYSDEP_STUB_H
  7. #include <asm/ptrace.h>
  8. #define STUB_SYSCALL_RET EAX
  9. #define STUB_MMAP_NR __NR_mmap2
  10. #define MMAP_OFFSET(o) ((o) >> UM_KERN_PAGE_SHIFT)
  11. static inline long stub_syscall0(long syscall)
  12. {
  13. long ret;
  14. __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall));
  15. return ret;
  16. }
  17. static inline long stub_syscall1(long syscall, long arg1)
  18. {
  19. long ret;
  20. __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1));
  21. return ret;
  22. }
  23. static inline long stub_syscall2(long syscall, long arg1, long arg2)
  24. {
  25. long ret;
  26. __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
  27. "c" (arg2));
  28. return ret;
  29. }
  30. static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3)
  31. {
  32. long ret;
  33. __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
  34. "c" (arg2), "d" (arg3));
  35. return ret;
  36. }
  37. static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
  38. long arg4)
  39. {
  40. long ret;
  41. __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
  42. "c" (arg2), "d" (arg3), "S" (arg4));
  43. return ret;
  44. }
  45. static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3,
  46. long arg4, long arg5)
  47. {
  48. long ret;
  49. __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
  50. "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5));
  51. return ret;
  52. }
  53. static inline void trap_myself(void)
  54. {
  55. __asm("int3");
  56. }
  57. static inline void remap_stack(int fd, unsigned long offset)
  58. {
  59. __asm__ volatile ("movl %%eax,%%ebp ; movl %0,%%eax ; int $0x80 ;"
  60. "movl %7, %%ebx ; movl %%eax, (%%ebx)"
  61. : : "g" (STUB_MMAP_NR), "b" (STUB_DATA),
  62. "c" (UM_KERN_PAGE_SIZE),
  63. "d" (PROT_READ | PROT_WRITE),
  64. "S" (MAP_FIXED | MAP_SHARED), "D" (fd),
  65. "a" (offset),
  66. "i" (&((struct stub_data *) STUB_DATA)->err)
  67. : "memory");
  68. }
  69. #endif