From a6ab25ff9885a810c8ceba42b9b1ea3badaae879 Mon Sep 17 00:00:00 2001 From: Rich Felker Date: Wed, 11 Mar 2020 18:43:11 -0400 Subject: [PATCH 1/4] revert previous changes to mips64/n32 syscall asm constraints effectivly revert commit ddc7c4f936c7a90781072f10dbaa122007e939d0 which was wrong; it caused a major regression on Linux versions prior to 2.6.36. old kernels did not properly preserve r2 across syscall restart, and instead restarted with the instruction right before syscall, imposing a contract that the previous instruction must load r2 from an immediate or a register (or memory) not clobbered by the syscall. since other changes were made since, this is not a direct revert, only a functional one. the improved style and removal of redundant constraints are kept. --- arch/mips64/syscall_arch.h | 56 +++++++++++++++++----------------- arch/mipsn32/syscall_arch.h | 61 ++++++++++++++++++++----------------- 2 files changed, 61 insertions(+), 56 deletions(-) diff --git a/arch/mips64/syscall_arch.h b/arch/mips64/syscall_arch.h index 69c429b8..8f6758d3 100644 --- a/arch/mips64/syscall_arch.h +++ b/arch/mips64/syscall_arch.h @@ -16,11 +16,11 @@ static inline long __syscall0(long n) { register long r7 __asm__("$7"); - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "=r"(r7) - : + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) + : "ir"(n) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -29,11 +29,11 @@ static inline long __syscall1(long n, long a) { register long r4 __asm__("$4") = a; register long r7 __asm__("$7"); - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "=r"(r7) - : "r"(r4) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) + : "ir"(n), "r"(r4) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -43,12 +43,12 @@ static inline long __syscall2(long n, long a, long b) register long r4 __asm__("$4") = a; register long r5 __asm__("$5") = b; register long r7 __asm__("$7"); - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "=r"(r7) - : "r"(r4), "r"(r5) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) + : "ir"(n), "r"(r4), "r"(r5) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -59,12 +59,12 @@ static inline long __syscall3(long n, long a, long b, long c) register long r5 __asm__("$5") = b; register long r6 __asm__("$6") = c; register long r7 __asm__("$7"); - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "=r"(r7) - : "r"(r4), "r"(r5), "r"(r6) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) + : "ir"(n), "r"(r4), "r"(r5), "r"(r6) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -75,12 +75,12 @@ static inline long __syscall4(long n, long a, long b, long c, long d) register long r5 __asm__("$5") = b; register long r6 __asm__("$6") = c; register long r7 __asm__("$7") = d; - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "+r"(r7) - : "r"(r4), "r"(r5), "r"(r6) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "+r"(r7) + : "ir"(n), "r"(r4), "r"(r5), "r"(r6) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -92,12 +92,12 @@ static inline long __syscall5(long n, long a, long b, long c, long d, long e) register long r6 __asm__("$6") = c; register long r7 __asm__("$7") = d; register long r8 __asm__("$8") = e; - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "+r"(r7) - : "r"(r4), "r"(r5), "r"(r6), "r"(r8) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "+r"(r7) + : "ir"(n), "r"(r4), "r"(r5), "r"(r6), "r"(r8) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -110,12 +110,12 @@ static inline long __syscall6(long n, long a, long b, long c, long d, long e, lo register long r7 __asm__("$7") = d; register long r8 __asm__("$8") = e; register long r9 __asm__("$9") = f; - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "+r"(r7) - : "r"(r4), "r"(r5), "r"(r6), "r"(r8), "r"(r9) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "+r"(r7) + : "ir"(n), "r"(r4), "r"(r5), "r"(r6), "r"(r8), "r"(r9) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } diff --git a/arch/mipsn32/syscall_arch.h b/arch/mipsn32/syscall_arch.h index c1a4b7da..bc29e318 100644 --- a/arch/mipsn32/syscall_arch.h +++ b/arch/mipsn32/syscall_arch.h @@ -16,11 +16,11 @@ static inline long __syscall0(long n) { register long r7 __asm__("$7"); - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "=r"(r7) - : + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) + : "ir"(n) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -29,11 +29,11 @@ static inline long __syscall1(long n, long a) { register long r4 __asm__("$4") = a; register long r7 __asm__("$7"); - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "=r"(r7) - : "r"(r4) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) + : "ir"(n), "r"(r4) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -43,11 +43,12 @@ static inline long __syscall2(long n, long a, long b) register long r4 __asm__("$4") = a; register long r5 __asm__("$5") = b; register long r7 __asm__("$7"); - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); + __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "=r"(r7) - : "r"(r4), "r"(r5) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) + : "ir"(n), "r"(r4), "r"(r5) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -58,11 +59,12 @@ static inline long __syscall3(long n, long a, long b, long c) register long r5 __asm__("$5") = b; register long r6 __asm__("$6") = c; register long r7 __asm__("$7"); - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); + __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "=r"(r7) - : "r"(r4), "r"(r5), "r"(r6) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "=r"(r7) + : "ir"(n), "r"(r4), "r"(r5), "r"(r6) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -73,11 +75,12 @@ static inline long __syscall4(long n, long a, long b, long c, long d) register long r5 __asm__("$5") = b; register long r6 __asm__("$6") = c; register long r7 __asm__("$7") = d; - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); + __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "+r"(r7) - : "r"(r4), "r"(r5), "r"(r6) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "+r"(r7) + : "ir"(n), "r"(r4), "r"(r5), "r"(r6) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -89,11 +92,12 @@ static inline long __syscall5(long n, long a, long b, long c, long d, long e) register long r6 __asm__("$6") = c; register long r7 __asm__("$7") = d; register long r8 __asm__("$8") = e; - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); + __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "+r"(r7) - : "r"(r4), "r"(r5), "r"(r6), "r"(r8) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "+r"(r7) + : "ir"(n), "r"(r4), "r"(r5), "r"(r6), "r"(r8) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } @@ -106,11 +110,12 @@ static inline long __syscall6(long n, long a, long b, long c, long d, long e, lo register long r7 __asm__("$7") = d; register long r8 __asm__("$8") = e; register long r9 __asm__("$9") = f; - register long r2 __asm__("$2") = n; + register long r2 __asm__("$2"); + __asm__ __volatile__ ( - "syscall" - : "+&r"(r2), "+r"(r7) - : "r"(r4), "r"(r5), "r"(r6), "r"(r8), "r"(r9) + "daddu $2,$0,%2 ; syscall" + : "=&r"(r2), "+r"(r7) + : "ir"(n), "r"(r4), "r"(r5), "r"(r6), "r"(r8), "r"(r9) : SYSCALL_CLOBBERLIST); return r7 ? -r2 : r2; } -- 2.21.0