aboutsummaryrefslogtreecommitdiffstats
path: root/target/linux/generic/patches-3.8/002-ARM-7668-1-fix-memset-related-crashes-caused-by-rece.patch
diff options
context:
space:
mode:
Diffstat (limited to 'target/linux/generic/patches-3.8/002-ARM-7668-1-fix-memset-related-crashes-caused-by-rece.patch')
-rw-r--r--target/linux/generic/patches-3.8/002-ARM-7668-1-fix-memset-related-crashes-caused-by-rece.patch83
1 files changed, 83 insertions, 0 deletions
diff --git a/target/linux/generic/patches-3.8/002-ARM-7668-1-fix-memset-related-crashes-caused-by-rece.patch b/target/linux/generic/patches-3.8/002-ARM-7668-1-fix-memset-related-crashes-caused-by-rece.patch
new file mode 100644
index 0000000000..e3a0d7baee
--- /dev/null
+++ b/target/linux/generic/patches-3.8/002-ARM-7668-1-fix-memset-related-crashes-caused-by-rece.patch
@@ -0,0 +1,83 @@
+From 418df63adac56841ef6b0f1fcf435bc64d4ed177 Mon Sep 17 00:00:00 2001
+From: Nicolas Pitre <nicolas.pitre@linaro.org>
+Date: Tue, 12 Mar 2013 13:00:42 +0100
+Subject: [PATCH] ARM: 7670/1: fix the memset fix
+
+Commit 455bd4c430b0 ("ARM: 7668/1: fix memset-related crashes caused by
+recent GCC (4.7.2) optimizations") attempted to fix a compliance issue
+with the memset return value. However the memset itself became broken
+by that patch for misaligned pointers.
+
+This fixes the above by branching over the entry code from the
+misaligned fixup code to avoid reloading the original pointer.
+
+Also, because the function entry alignment is wrong in the Thumb mode
+compilation, that fixup code is moved to the end.
+
+While at it, the entry instructions are slightly reworked to help dual
+issue pipelines.
+
+Signed-off-by: Nicolas Pitre <nico@linaro.org>
+Tested-by: Alexander Holler <holler@ahsoftware.de>
+Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
+---
+ arch/arm/lib/memset.S | 33 +++++++++++++--------------------
+ 1 file changed, 13 insertions(+), 20 deletions(-)
+
+diff --git a/arch/arm/lib/memset.S b/arch/arm/lib/memset.S
+index d912e7397ecc..94b0650ea98f 100644
+--- a/arch/arm/lib/memset.S
++++ b/arch/arm/lib/memset.S
+@@ -14,31 +14,15 @@
+
+ .text
+ .align 5
+- .word 0
+-
+-1: subs r2, r2, #4 @ 1 do we have enough
+- blt 5f @ 1 bytes to align with?
+- cmp r3, #2 @ 1
+- strltb r1, [ip], #1 @ 1
+- strleb r1, [ip], #1 @ 1
+- strb r1, [ip], #1 @ 1
+- add r2, r2, r3 @ 1 (r2 = r2 - (4 - r3))
+-/*
+- * The pointer is now aligned and the length is adjusted. Try doing the
+- * memset again.
+- */
+
+ ENTRY(memset)
+-/*
+- * Preserve the contents of r0 for the return value.
+- */
+- mov ip, r0
+- ands r3, ip, #3 @ 1 unaligned?
+- bne 1b @ 1
++ ands r3, r0, #3 @ 1 unaligned?
++ mov ip, r0 @ preserve r0 as return value
++ bne 6f @ 1
+ /*
+ * we know that the pointer in ip is aligned to a word boundary.
+ */
+- orr r1, r1, r1, lsl #8
++1: orr r1, r1, r1, lsl #8
+ orr r1, r1, r1, lsl #16
+ mov r3, r1
+ cmp r2, #16
+@@ -127,4 +111,13 @@ ENTRY(memset)
+ tst r2, #1
+ strneb r1, [ip], #1
+ mov pc, lr
++
++6: subs r2, r2, #4 @ 1 do we have enough
++ blt 5b @ 1 bytes to align with?
++ cmp r3, #2 @ 1
++ strltb r1, [ip], #1 @ 1
++ strleb r1, [ip], #1 @ 1
++ strb r1, [ip], #1 @ 1
++ add r2, r2, r3 @ 1 (r2 = r2 - (4 - r3))
++ b 1b
+ ENDPROC(memset)
+--
+1.8.3.2
+