summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorRich Felker <dalias@aerifal.cx>2020-03-11 18:50:21 -0400
committerRich Felker <dalias@aerifal.cx>2020-03-14 21:22:50 -0400
commit972b5fde9a2d1b5517c15803fb437195e6663085 (patch)
tree9ac85d013ecaf30c7802d6a4cbbb043bcdeb7571 /arch
parent5053fd2644c188ac6531e5fe6a52a1a6d0df2fc2 (diff)
downloadmusl-972b5fde9a2d1b5517c15803fb437195e6663085.tar.gz
revert mips (32-bit, o32) syscall asm clean-up due to regressions
exactly revert commit 604f8d3d8b08ee4f548de193050ef93a7753c2e0 which was wrong; it caused a major regression on Linux versions prior to 2.6.36. old kernels did not properly preserve r2 across syscall restart, and instead restarted with the instruction right before syscall, imposing a contract that the previous instruction must load r2 from an immediate or a register (or memory) not clobbered by the syscall.
Diffstat (limited to 'arch')
-rw-r--r--arch/mips/syscall_arch.h63
1 files changed, 31 insertions, 32 deletions
diff --git a/arch/mips/syscall_arch.h b/arch/mips/syscall_arch.h
index f821e73f..a3812188 100644
--- a/arch/mips/syscall_arch.h
+++ b/arch/mips/syscall_arch.h
@@ -18,11 +18,10 @@
static inline long __syscall0(long n)
{
register long r7 __asm__("$7");
- register long r2 __asm__("$2") = n;
+ register long r2 __asm__("$2");
__asm__ __volatile__ (
- "syscall"
- : "+r"(r2), "=r"(r7)
- :
+ "addu $2,$0,%2 ; syscall"
+ : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
return r7 ? -r2 : r2;
}
@@ -31,11 +30,11 @@ static inline long __syscall1(long n, long a)
{
register long r4 __asm__("$4") = a;
register long r7 __asm__("$7");
- register long r2 __asm__("$2") = n;
+ register long r2 __asm__("$2");
__asm__ __volatile__ (
- "syscall"
- : "+r"(r2), "=r"(r7)
- : "r"(r4)
+ "addu $2,$0,%2 ; syscall"
+ : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
+ "r"(r4)
: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
return r7 ? -r2 : r2;
}
@@ -45,11 +44,11 @@ static inline long __syscall2(long n, long a, long b)
register long r4 __asm__("$4") = a;
register long r5 __asm__("$5") = b;
register long r7 __asm__("$7");
- register long r2 __asm__("$2") = n;
+ register long r2 __asm__("$2");
__asm__ __volatile__ (
- "syscall"
- : "+r"(r2), "=r"(r7)
- : "r"(r4), "r"(r5)
+ "addu $2,$0,%2 ; syscall"
+ : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
+ "r"(r4), "r"(r5)
: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
return r7 ? -r2 : r2;
}
@@ -60,11 +59,11 @@ static inline long __syscall3(long n, long a, long b, long c)
register long r5 __asm__("$5") = b;
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7");
- register long r2 __asm__("$2") = n;
+ register long r2 __asm__("$2");
__asm__ __volatile__ (
- "syscall"
- : "+r"(r2), "=r"(r7)
- : "r"(r4), "r"(r5), "r"(r6)
+ "addu $2,$0,%2 ; syscall"
+ : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
+ "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
return r7 ? -r2 : r2;
}
@@ -75,11 +74,11 @@ static inline long __syscall4(long n, long a, long b, long c, long d)
register long r5 __asm__("$5") = b;
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7") = d;
- register long r2 __asm__("$2") = n;
+ register long r2 __asm__("$2");
__asm__ __volatile__ (
- "syscall"
- : "+r"(r2), "+r"(r7)
- : "r"(r4), "r"(r5), "r"(r6)
+ "addu $2,$0,%2 ; syscall"
+ : "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
+ "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
return r7 ? -r2 : r2;
}
@@ -91,13 +90,13 @@ static inline long __syscall5(long n, long a, long b, long c, long d, long e)
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7") = d;
register long r8 __asm__("$8") = e;
- register long r2 __asm__("$2") = n;
+ register long r2 __asm__("$2");
__asm__ __volatile__ (
"subu $sp,$sp,32 ; sw $8,16($sp) ; "
- "syscall ;"
+ "addu $2,$0,%3 ; syscall ;"
"addu $sp,$sp,32"
- : "+r"(r2), "+r"(r7), "+r"(r8)
- : "r"(r4), "r"(r5), "r"(r6)
+ : "=&r"(r2), "=r"(r7), "+r"(r8)
+ : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST, "$9", "$10");
return r7 ? -r2 : r2;
}
@@ -110,13 +109,13 @@ static inline long __syscall6(long n, long a, long b, long c, long d, long e, lo
register long r7 __asm__("$7") = d;
register long r8 __asm__("$8") = e;
register long r9 __asm__("$9") = f;
- register long r2 __asm__("$2") = n;
+ register long r2 __asm__("$2");
__asm__ __volatile__ (
"subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; "
- "syscall ;"
+ "addu $2,$0,%4 ; syscall ;"
"addu $sp,$sp,32"
- : "+r"(r2), "+r"(r7), "+r"(r8), "+r"(r9)
- : "r"(r4), "r"(r5), "r"(r6)
+ : "=&r"(r2), "=r"(r7), "+r"(r8), "+r"(r9)
+ : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST, "$10");
return r7 ? -r2 : r2;
}
@@ -130,13 +129,13 @@ static inline long __syscall7(long n, long a, long b, long c, long d, long e, lo
register long r8 __asm__("$8") = e;
register long r9 __asm__("$9") = f;
register long r10 __asm__("$10") = g;
- register long r2 __asm__("$2") = n;
+ register long r2 __asm__("$2");
__asm__ __volatile__ (
"subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; sw $10,24($sp) ; "
- "syscall ;"
+ "addu $2,$0,%5 ; syscall ;"
"addu $sp,$sp,32"
- : "+r"(r2), "+r"(r7), "+r"(r8), "+r"(r9), "+r"(r10)
- : "r"(r4), "r"(r5), "r"(r6)
+ : "=&r"(r2), "=r"(r7), "+r"(r8), "+r"(r9), "+r"(r10)
+ : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}