summary refs log tree commit diff
path: root/arch
diff options
context:
space:
mode:
authorMarkos Chandras <markos.chandras@imgtec.com>2014-01-03 10:11:45 +0000
committerRalf Baechle <ralf@linux-mips.org>2014-03-26 23:09:15 +0100
commitfd9720e96e856160f94907db06b707841cbafb0d (patch)
tree4586a2d89749ee8a729a1a7f9b05bfac66a30879 /arch
parent6d5155c2a618207c6154be2e172ba92676dd82ca (diff)
downloadlinux-fd9720e96e856160f94907db06b707841cbafb0d.tar.gz
MIPS: lib: memset: Add EVA support for the __bzero function.
Build the __bzero function using the EVA load/store instructions
when operating in the EVA mode. This function is only used when
accessing user code so there is no need to build two distinct symbols
for user and kernel operations respectively.

Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
Diffstat (limited to 'arch')
-rw-r--r--arch/mips/lib/memset.S27
1 files changed, 23 insertions, 4 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index 05fac199cc0c..7b0e5462ca51 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -37,13 +37,24 @@
 #define LEGACY_MODE 1
 #define EVA_MODE    2
 
+/*
+ * No need to protect it with EVA #ifdefery. The generated block of code
+ * will never be assembled if EVA is not enabled.
+ */
+#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
+#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
+
 #define EX(insn,reg,addr,handler)			\
-9:	insn	reg, addr;				\
+	.if \mode == LEGACY_MODE;			\
+9:		insn	reg, addr;			\
+	.else;						\
+9:		___BUILD_EVA_INSN(insn, reg, addr);	\
+	.endif;						\
 	.section __ex_table,"a";			\
 	PTR	9b, handler;				\
 	.previous
 
-	.macro	f_fill64 dst, offset, val, fixup
+	.macro	f_fill64 dst, offset, val, fixup, mode
 	EX(LONG_S, \val, (\offset +  0 * STORSIZE)(\dst), \fixup)
 	EX(LONG_S, \val, (\offset +  1 * STORSIZE)(\dst), \fixup)
 	EX(LONG_S, \val, (\offset +  2 * STORSIZE)(\dst), \fixup)
@@ -119,7 +130,7 @@
 	.set		reorder
 1:	PTR_ADDIU	a0, 64
 	R10KCBARRIER(0(ra))
-	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@
+	f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
 	bne		t1, a0, 1b
 	.set		noreorder
 
@@ -144,7 +155,7 @@
 	.set		noreorder
 	.set		nomacro
 	/* ... but first do longs ... */
-	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@
+	f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
 2:	.set		pop
 	andi		a2, STORMASK		/* At most one long to go */
 
@@ -225,5 +236,13 @@ LEAF(memset)
 #endif
 	or		a1, t1
 1:
+#ifndef CONFIG_EVA
 FEXPORT(__bzero)
+#endif
 	__BUILD_BZERO LEGACY_MODE
+
+#ifdef CONFIG_EVA
+LEAF(__bzero)
+	__BUILD_BZERO EVA_MODE
+END(__bzero)
+#endif