summaryrefslogtreecommitdiff
path: root/arch/arm
diff options
context:
space:
mode:
authorRich Felker <dalias@aerifal.cx>2018-10-16 14:08:01 -0400
committerRich Felker <dalias@aerifal.cx>2018-10-16 14:11:46 -0400
commita4a3e4dbc086eb58e5cf6118480ef4825788e231 (patch)
tree25d6ffb2cedf301bf69306e87b36aa5b3a68dbcb /arch/arm
parent7f01a734feddaabf366bc644c926e675656cab62 (diff)
downloadmusl-a4a3e4dbc086eb58e5cf6118480ef4825788e231.tar.gz
make thread-pointer-loading asm non-volatile
this will allow the compiler to cache and reuse the result, meaning we no longer have to take care not to load it more than once for the sake of archs where the load may be expensive. depends on commit 1c84c99913bf1cd47b866ed31e665848a0da84a2 for correctness, since otherwise the compiler could hoist loads during stage 3 of dynamic linking before the initial thread-pointer setup.
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/pthread_arch.h4
1 files changed, 2 insertions, 2 deletions
diff --git a/arch/arm/pthread_arch.h b/arch/arm/pthread_arch.h
index 5c6aff28..e689ea21 100644
--- a/arch/arm/pthread_arch.h
+++ b/arch/arm/pthread_arch.h
@@ -4,7 +4,7 @@
static inline pthread_t __pthread_self()
{
char *p;
- __asm__ __volatile__ ( "mrc p15,0,%0,c13,c0,3" : "=r"(p) );
+ __asm__ ( "mrc p15,0,%0,c13,c0,3" : "=r"(p) );
return (void *)(p-sizeof(struct pthread));
}
@@ -20,7 +20,7 @@ static inline pthread_t __pthread_self()
{
extern hidden uintptr_t __a_gettp_ptr;
register uintptr_t p __asm__("r0");
- __asm__ __volatile__ ( BLX " %1" : "=r"(p) : "r"(__a_gettp_ptr) : "cc", "lr" );
+ __asm__ ( BLX " %1" : "=r"(p) : "r"(__a_gettp_ptr) : "cc", "lr" );
return (void *)(p-sizeof(struct pthread));
}