1 #define __SYSCALL_LL_E(x) (x)
2 #define __SYSCALL_LL_O(x) (x)
4 #define __scc(X) sizeof(1?(X):0ULL) < 8 ? (unsigned long) (X) : (long long) (X)
5 #define syscall_arg_t long long
6 struct __timespec { long long tv_sec; long tv_nsec; };
7 struct __timespec_kernel { long long tv_sec; long long tv_nsec; };
8 #define __tsc(X) ((struct __timespec*)(unsigned long)(X))
10 static __inline long __syscall0(long long n)
13 __asm__ __volatile__ ("syscall" : "=a"(ret) : "a"(n) : "rcx", "r11", "memory");
17 static __inline long __syscall1(long long n, long long a1)
20 __asm__ __volatile__ ("syscall" : "=a"(ret) : "a"(n), "D"(a1) : "rcx", "r11", "memory");
24 static __inline long __syscall2(long long n, long long a1, long long a2)
27 struct __timespec *ts2 = 0;
30 if(a1) a1 = (unsigned long) (&(struct __timespec_kernel) {
31 .tv_sec = __tsc(a1)->tv_sec, .tv_nsec = __tsc(a1)->tv_nsec});
33 case SYS_clock_settime:
34 if(a2) a2 = (unsigned long) (&(struct __timespec_kernel) {
35 .tv_sec = __tsc(a2)->tv_sec, .tv_nsec = __tsc(a2)->tv_nsec});
37 __asm__ __volatile__ ("syscall" : "=a"(ret) : "a"(n), "D"(a1), "S"(a2)
38 : "rcx", "r11", "memory");
42 static __inline long __syscall3(long long n, long long a1, long long a2, long long a3)
45 __asm__ __volatile__ ("syscall" : "=a"(ret) : "a"(n), "D"(a1), "S"(a2),
46 "d"(a3) : "rcx", "r11", "memory");
50 static __inline long __syscall4(long long n, long long a1, long long a2, long long a3,
54 register long long r10 __asm__("r10") = a4;
57 if((a2 & (~128 /* FUTEX_PRIVATE_FLAG */)) == 0 /* FUTEX_WAIT */) {
58 if(r10) r10 = (unsigned long) (&(struct __timespec_kernel) {
59 .tv_sec = __tsc(r10)->tv_sec, .tv_nsec = __tsc(r10)->tv_nsec});
63 if(a3) a3 = (unsigned long) ((struct __timespec_kernel[2]) {
64 [0] = {.tv_sec = __tsc(a3)[0].tv_sec, .tv_nsec = __tsc(a3)[0].tv_nsec},
65 [1] = {.tv_sec = __tsc(a3)[1].tv_sec, .tv_nsec = __tsc(a3)[1].tv_nsec},
68 case SYS_clock_nanosleep:
69 case SYS_rt_sigtimedwait: case SYS_ppoll:
70 if(a3) a3 = (unsigned long) (&(struct __timespec_kernel) {
71 .tv_sec = __tsc(a3)->tv_sec, .tv_nsec = __tsc(a3)->tv_nsec});
73 __asm__ __volatile__ ("syscall" : "=a"(ret) : "a"(n), "D"(a1), "S"(a2),
74 "d"(a3), "r"(r10): "rcx", "r11", "memory");
78 static __inline long __syscall5(long long n, long long a1, long long a2, long long a3,
79 long long a4, long long a5)
82 register long long r10 __asm__("r10") = a4;
83 register long long r8 __asm__("r8") = a5;
86 if((a2 & (~128 /* FUTEX_PRIVATE_FLAG */)) == 0 /* FUTEX_WAIT */) {
87 if(r10) r10 = (unsigned long) (&(struct __timespec_kernel) {
88 .tv_sec = __tsc(r10)->tv_sec, .tv_nsec = __tsc(r10)->tv_nsec});
91 case SYS_mq_timedsend: case SYS_mq_timedreceive:
92 if(r8) r8 = (unsigned long) (&(struct __timespec_kernel) {
93 .tv_sec = __tsc(r8)->tv_sec, .tv_nsec = __tsc(r8)->tv_nsec});
95 __asm__ __volatile__ ("syscall" : "=a"(ret) : "a"(n), "D"(a1), "S"(a2),
96 "d"(a3), "r"(r10), "r"(r8) : "rcx", "r11", "memory");
100 static __inline long __syscall6(long long n, long long a1, long long a2, long long a3,
101 long long a4, long long a5, long long a6)
104 register long long r10 __asm__("r10") = a4;
105 register long long r8 __asm__("r8") = a5;
106 register long long r9 __asm__("r9") = a6;
109 if((a2 & (~128 /* FUTEX_PRIVATE_FLAG */)) == 0 /* FUTEX_WAIT */) {
110 if(r10) r10 = (unsigned long) (&(struct __timespec_kernel) {
111 .tv_sec = __tsc(r10)->tv_sec, .tv_nsec = __tsc(r10)->tv_nsec});
115 if(r8) r8 = (unsigned long) (&(struct __timespec_kernel) {
116 .tv_sec = __tsc(r8)->tv_sec, .tv_nsec = __tsc(r8)->tv_nsec});
118 __asm__ __volatile__ ("syscall" : "=a"(ret) : "a"(n), "D"(a1), "S"(a2),
119 "d"(a3), "r"(r10), "r"(r8), "r"(r9) : "rcx", "r11", "memory");