stub.h 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106
  1. /*
  2. * Copyright (C) 2004 Jeff Dike (jdike@addtoit.com)
  3. * Licensed under the GPL
  4. */
  5. #ifndef __SYSDEP_STUB_H
  6. #define __SYSDEP_STUB_H
  7. #include <sys/mman.h>
  8. #include <asm/unistd.h>
  9. #include <sysdep/ptrace_user.h>
  10. #include "as-layout.h"
  11. #include "stub-data.h"
  12. extern void stub_segv_handler(int sig);
  13. extern void stub_clone_handler(void);
  14. #define STUB_SYSCALL_RET PT_INDEX(RAX)
  15. #define STUB_MMAP_NR __NR_mmap
  16. #define MMAP_OFFSET(o) (o)
  17. #define __syscall_clobber "r11","rcx","memory"
  18. #define __syscall "syscall"
  19. static inline long stub_syscall0(long syscall)
  20. {
  21. long ret;
  22. __asm__ volatile (__syscall
  23. : "=a" (ret)
  24. : "0" (syscall) : __syscall_clobber );
  25. return ret;
  26. }
  27. static inline long stub_syscall2(long syscall, long arg1, long arg2)
  28. {
  29. long ret;
  30. __asm__ volatile (__syscall
  31. : "=a" (ret)
  32. : "0" (syscall), "D" (arg1), "S" (arg2) : __syscall_clobber );
  33. return ret;
  34. }
  35. static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3)
  36. {
  37. long ret;
  38. __asm__ volatile (__syscall
  39. : "=a" (ret)
  40. : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3)
  41. : __syscall_clobber );
  42. return ret;
  43. }
  44. static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
  45. long arg4)
  46. {
  47. long ret;
  48. __asm__ volatile ("movq %5,%%r10 ; " __syscall
  49. : "=a" (ret)
  50. : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3),
  51. "g" (arg4)
  52. : __syscall_clobber, "r10" );
  53. return ret;
  54. }
  55. static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3,
  56. long arg4, long arg5)
  57. {
  58. long ret;
  59. __asm__ volatile ("movq %5,%%r10 ; movq %6,%%r8 ; " __syscall
  60. : "=a" (ret)
  61. : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3),
  62. "g" (arg4), "g" (arg5)
  63. : __syscall_clobber, "r10", "r8" );
  64. return ret;
  65. }
  66. static inline void trap_myself(void)
  67. {
  68. __asm("int3");
  69. }
  70. static inline void remap_stack(long fd, unsigned long offset)
  71. {
  72. __asm__ volatile ("movq %4,%%r10 ; movq %5,%%r8 ; "
  73. "movq %6, %%r9; " __syscall "; movq %7, %%rbx ; "
  74. "movq %%rax, (%%rbx)":
  75. : "a" (STUB_MMAP_NR), "D" (STUB_DATA),
  76. "S" (UM_KERN_PAGE_SIZE),
  77. "d" (PROT_READ | PROT_WRITE),
  78. "g" (MAP_FIXED | MAP_SHARED), "g" (fd),
  79. "g" (offset),
  80. "i" (&((struct stub_data *) STUB_DATA)->err)
  81. : __syscall_clobber, "r10", "r8", "r9" );
  82. }
  83. #endif