Browse Source

unbreak support for ARM no MMU case

As suggested on the uCLibc mailing list:
http://lists.uclibc.org/pipermail/uclibc/2014-November/048702.html
http://lists.uclibc.org/pipermail/uclibc/2014-November/048703.html
http://lists.uclibc.org/pipermail/uclibc/2014-November/048704.html
Waldemar Brodkorb 10 years ago
parent
commit
322fdd37d3

+ 8 - 0
extra/Configs/Config.arm

@@ -28,6 +28,7 @@ config CONFIG_ARM_EABI
 config COMPILE_IN_THUMB_MODE
 config COMPILE_IN_THUMB_MODE
 	bool "Build using Thumb mode"
 	bool "Build using Thumb mode"
 	select USE_BX
 	select USE_BX
+	select USE_LDREXSTREX
 	help
 	help
 	  Say 'y' here to force building uClibc in thumb mode.
 	  Say 'y' here to force building uClibc in thumb mode.
 	  Say 'n' to use your compiler's default mode.
 	  Say 'n' to use your compiler's default mode.
@@ -38,3 +39,10 @@ config USE_BX
 	  Say 'y' to use BX to return from functions on your thumb-aware
 	  Say 'y' to use BX to return from functions on your thumb-aware
 	  processor. Say 'y' if you need to use interworking. Say 'n' if not.
 	  processor. Say 'y' if you need to use interworking. Say 'n' if not.
 	  It is safe to say 'y' even if you're not doing interworking.
 	  It is safe to say 'y' even if you're not doing interworking.
+
+config USE_LDREXSTREX
+	bool "Use load-store exclusive ASM ops (not supported in SmartFusion)"
+	depends on COMPILE_IN_THUMB_MODE
+	default n
+	help
+	  Say 'y' to use LDREX/STREX ASM ops.

+ 0 - 1
extra/Configs/Config.in

@@ -524,7 +524,6 @@ config LINUXTHREADS_OLD
 	# linuxthreads and linuxthreads.old need nanosleep()
 	# linuxthreads and linuxthreads.old need nanosleep()
 	select UCLIBC_HAS_REALTIME
 	select UCLIBC_HAS_REALTIME
 	depends on !TARGET_arc && \
 	depends on !TARGET_arc && \
-		   !TARGET_arm && \
 		   !TARGET_i386 && \
 		   !TARGET_i386 && \
 		   !TARGET_metag && \
 		   !TARGET_metag && \
 		   !TARGET_mips && \
 		   !TARGET_mips && \

+ 8 - 6
libc/sysdeps/linux/arm/clone.S

@@ -26,7 +26,10 @@
 #include <sys/syscall.h>
 #include <sys/syscall.h>
 #include <bits/arm_asm.h>
 #include <bits/arm_asm.h>
 #include <bits/arm_bx.h>
 #include <bits/arm_bx.h>
+
+#if defined __UCLIBC_HAS_THREADS__ && !defined __LINUXTHREADS_OLD__
 #include <sysdep-cancel.h>
 #include <sysdep-cancel.h>
+#endif
 
 
 #define CLONE_VM      0x00000100
 #define CLONE_VM      0x00000100
 #define CLONE_THREAD  0x00010000
 #define CLONE_THREAD  0x00010000
@@ -57,23 +60,22 @@ __clone:
 	@ get flags
 	@ get flags
 	mov	r0, r2
 	mov	r0, r2
 	@ new sp is already in r1
 	@ new sp is already in r1
-	@ load remaining arguments off the stack
-	stmfd	sp!, {r4}
-	ldr	r2, [sp, #4]
-	ldr	r3, [sp, #8]
-	ldr	r4, [sp, #12]
 	DO_CALL (clone)
 	DO_CALL (clone)
 	movs	a1, a1
 	movs	a1, a1
 	blt	__error
 	blt	__error
-	ldmnefd sp!, {r4}
 	beq	1f
 	beq	1f
 	bx	lr
 	bx	lr
 1:
 1:
 
 
 	@ pick the function arg and call address off the stack and execute
 	@ pick the function arg and call address off the stack and execute
 	ldr	r0, [sp, #4]
 	ldr	r0, [sp, #4]
+#if defined(__USE_BX__)
 	ldr	r1, [sp]
 	ldr	r1, [sp]
 	bl	2f	@ blx r1
 	bl	2f	@ blx r1
+#else
+	mov     lr, pc
+	ldr     pc, [sp]
+#endif
 
 
 	@ and we are done, passing the return value through r0
 	@ and we are done, passing the return value through r0
 	bl	HIDDEN_JUMPTARGET(_exit)
 	bl	HIDDEN_JUMPTARGET(_exit)

+ 49 - 7
libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h

@@ -21,12 +21,50 @@
 #ifndef _PT_MACHINE_H
 #ifndef _PT_MACHINE_H
 #define _PT_MACHINE_H   1
 #define _PT_MACHINE_H   1
 
 
-#include <features.h>
+#include <sys/syscall.h>
+#include <unistd.h>
 
 
 #ifndef PT_EI
 #ifndef PT_EI
 # define PT_EI __extern_always_inline
 # define PT_EI __extern_always_inline
 #endif
 #endif
 
 
+#if defined(__thumb__)
+#if defined(__USE_LDREXSTREX__)
+PT_EI long int ldrex(int *spinlock)
+{
+	long int ret;
+	__asm__ __volatile__(
+		"ldrex %0, [%1]\n"
+		: "=r"(ret)
+		: "r"(spinlock) : "memory");
+	return ret;
+}
+
+PT_EI long int strex(int val, int *spinlock)
+{
+	long int ret;
+	__asm__ __volatile__(
+		"strex %0, %1, [%2]\n"
+		: "=r"(ret)
+		: "r" (val), "r"(spinlock) : "memory");
+	return ret;
+}
+
+/* Spinlock implementation; required.  */
+PT_EI long int
+testandset (int *spinlock)
+{
+  register unsigned int ret;
+
+  do {
+	  ret = ldrex(spinlock);
+  } while (strex(1, spinlock));
+
+  return ret;
+}
+
+#else /* __USE_LDREXSTREX__ */
+
 /* This will not work on ARM1 or ARM2 because SWP is lacking on those
 /* This will not work on ARM1 or ARM2 because SWP is lacking on those
    machines.  Unfortunately we have no way to detect this at compile
    machines.  Unfortunately we have no way to detect this at compile
    time; let's hope nobody tries to use one.  */
    time; let's hope nobody tries to use one.  */
@@ -36,8 +74,6 @@ PT_EI long int testandset (int *spinlock);
 PT_EI long int testandset (int *spinlock)
 PT_EI long int testandset (int *spinlock)
 {
 {
   register unsigned int ret;
   register unsigned int ret;
-
-#if defined(__thumb__)
   void *pc;
   void *pc;
   __asm__ __volatile__(
   __asm__ __volatile__(
 	".align 0\n"
 	".align 0\n"
@@ -50,15 +86,21 @@ PT_EI long int testandset (int *spinlock)
 	"\t.force_thumb"
 	"\t.force_thumb"
 	: "=r"(ret), "=r"(pc)
 	: "=r"(ret), "=r"(pc)
 	: "0"(1), "r"(spinlock));
 	: "0"(1), "r"(spinlock));
-#else
+  return ret;
+}
+#endif
+#else /* __thumb__ */
+
+PT_EI long int testandset (int *spinlock);
+PT_EI long int testandset (int *spinlock)
+{
+  register unsigned int ret;
   __asm__ __volatile__("swp %0, %1, [%2]"
   __asm__ __volatile__("swp %0, %1, [%2]"
 		       : "=r"(ret)
 		       : "=r"(ret)
 		       : "0"(1), "r"(spinlock));
 		       : "0"(1), "r"(spinlock));
-#endif
-
   return ret;
   return ret;
 }
 }
-
+#endif
 
 
 /* Get some notion of the current stack.  Need not be exactly the top
 /* Get some notion of the current stack.  Need not be exactly the top
    of the stack, just something somewhere in the current frame.  */
    of the stack, just something somewhere in the current frame.  */