revert mips64/n32 syscall asm clean-up due to regressions

effectivly revert commit ddc7c4f936
which was wrong; it caused a major regression on Linux versions prior
to 2.6.36. old kernels did not properly preserve r2 across syscall
restart, and instead restarted with the instruction right before
syscall, imposing a contract that the previous instruction must load
r2 from an immediate or a register (or memory) not clobbered by the
syscall.

since other changes were made since, including removal of the struct
stat conversion that was replaced by separate struct kstat, this is
not a direct revert, only a functional one.

the "0"(r2) input constraint added back seems useless/erroneous, but
without it most gcc versions (seems to be all prior to 9.x) fail to
honor the output register binding for r2. this seems to be a variant
of gcc bug #87733. further changes should be made later if a better
workaround is found, but this one has been working since 2012. it
seems this issue was encountered but misidentified then, when it
inspired commit 4221f154ff.
This commit is contained in:
Rich Felker 2020-03-11 18:43:11 -04:00
parent 2f2348c958
commit 5053fd2644
2 changed files with 61 additions and 56 deletions

View File

@ -16,11 +16,11 @@
static inline long __syscall0(long n)
{
register long r7 __asm__("$7");
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "=r"(r7)
:
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "=r"(r7)
: "ir"(n), "0"(r2)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -29,11 +29,11 @@ static inline long __syscall1(long n, long a)
{
register long r4 __asm__("$4") = a;
register long r7 __asm__("$7");
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "=r"(r7)
: "r"(r4)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "=r"(r7)
: "ir"(n), "0"(r2), "r"(r4)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -43,12 +43,12 @@ static inline long __syscall2(long n, long a, long b)
register long r4 __asm__("$4") = a;
register long r5 __asm__("$5") = b;
register long r7 __asm__("$7");
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "=r"(r7)
: "r"(r4), "r"(r5)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "=r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -59,12 +59,12 @@ static inline long __syscall3(long n, long a, long b, long c)
register long r5 __asm__("$5") = b;
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7");
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "=r"(r7)
: "r"(r4), "r"(r5), "r"(r6)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "=r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -75,12 +75,12 @@ static inline long __syscall4(long n, long a, long b, long c, long d)
register long r5 __asm__("$5") = b;
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7") = d;
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "+r"(r7)
: "r"(r4), "r"(r5), "r"(r6)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "+r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -92,12 +92,12 @@ static inline long __syscall5(long n, long a, long b, long c, long d, long e)
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7") = d;
register long r8 __asm__("$8") = e;
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "+r"(r7)
: "r"(r4), "r"(r5), "r"(r6), "r"(r8)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "+r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6), "r"(r8)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -110,12 +110,12 @@ static inline long __syscall6(long n, long a, long b, long c, long d, long e, lo
register long r7 __asm__("$7") = d;
register long r8 __asm__("$8") = e;
register long r9 __asm__("$9") = f;
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "+r"(r7)
: "r"(r4), "r"(r5), "r"(r6), "r"(r8), "r"(r9)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "+r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6), "r"(r8), "r"(r9)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}

View File

@ -16,11 +16,11 @@
static inline long __syscall0(long n)
{
register long r7 __asm__("$7");
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "=r"(r7)
:
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "=r"(r7)
: "ir"(n), "0"(r2)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -29,11 +29,11 @@ static inline long __syscall1(long n, long a)
{
register long r4 __asm__("$4") = a;
register long r7 __asm__("$7");
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "=r"(r7)
: "r"(r4)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "=r"(r7)
: "ir"(n), "0"(r2), "r"(r4)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -43,11 +43,12 @@ static inline long __syscall2(long n, long a, long b)
register long r4 __asm__("$4") = a;
register long r5 __asm__("$5") = b;
register long r7 __asm__("$7");
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "=r"(r7)
: "r"(r4), "r"(r5)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "=r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -58,11 +59,12 @@ static inline long __syscall3(long n, long a, long b, long c)
register long r5 __asm__("$5") = b;
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7");
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "=r"(r7)
: "r"(r4), "r"(r5), "r"(r6)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "=r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -73,11 +75,12 @@ static inline long __syscall4(long n, long a, long b, long c, long d)
register long r5 __asm__("$5") = b;
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7") = d;
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "+r"(r7)
: "r"(r4), "r"(r5), "r"(r6)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "+r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -89,11 +92,12 @@ static inline long __syscall5(long n, long a, long b, long c, long d, long e)
register long r6 __asm__("$6") = c;
register long r7 __asm__("$7") = d;
register long r8 __asm__("$8") = e;
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "+r"(r7)
: "r"(r4), "r"(r5), "r"(r6), "r"(r8)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "+r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6), "r"(r8)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}
@ -106,11 +110,12 @@ static inline long __syscall6(long n, long a, long b, long c, long d, long e, lo
register long r7 __asm__("$7") = d;
register long r8 __asm__("$8") = e;
register long r9 __asm__("$9") = f;
register long r2 __asm__("$2") = n;
register long r2 __asm__("$2");
__asm__ __volatile__ (
"syscall"
: "+&r"(r2), "+r"(r7)
: "r"(r4), "r"(r5), "r"(r6), "r"(r8), "r"(r9)
"daddu $2,$0,%2 ; syscall"
: "=&r"(r2), "+r"(r7)
: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6), "r"(r8), "r"(r9)
: SYSCALL_CLOBBERLIST);
return r7 ? -r2 : r2;
}