Browse Source

MIPS: Move definitions for 32/64-bit agonstic inline assembler to new file.

Inspired by Markos Chandras' patch.  I just didn't want do pull bitsops.h
into pgtable.h.

Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
References: https://patchwork.linux-mips.org/patch/11052/
Ralf Baechle 9 years ago
parent
commit
05490626d5
3 changed files with 31 additions and 25 deletions
  1. 1 16
      arch/mips/include/asm/bitops.h
  2. 28 0
      arch/mips/include/asm/llsc.h
  3. 2 9
      arch/mips/include/asm/pgtable.h

+ 1 - 16
arch/mips/include/asm/bitops.h

@@ -19,25 +19,10 @@
 #include <asm/byteorder.h>		/* sigh ... */
 #include <asm/compiler.h>
 #include <asm/cpu-features.h>
+#include <asm/llsc.h>
 #include <asm/sgidefs.h>
 #include <asm/war.h>
 
-#if _MIPS_SZLONG == 32
-#define SZLONG_LOG 5
-#define SZLONG_MASK 31UL
-#define __LL		"ll	"
-#define __SC		"sc	"
-#define __INS		"ins	"
-#define __EXT		"ext	"
-#elif _MIPS_SZLONG == 64
-#define SZLONG_LOG 6
-#define SZLONG_MASK 63UL
-#define __LL		"lld	"
-#define __SC		"scd	"
-#define __INS		"dins	 "
-#define __EXT		"dext	 "
-#endif
-
 /*
  * These are the "slower" versions of the functions and are in bitops.c.
  * These functions call raw_local_irq_{save,restore}().

+ 28 - 0
arch/mips/include/asm/llsc.h

@@ -0,0 +1,28 @@
+/*
+ * This file is subject to the terms and conditions of the GNU General Public
+ * License.  See the file "COPYING" in the main directory of this archive
+ * for more details.
+ *
+ * Macros for 32/64-bit neutral inline assembler
+ */
+
+#ifndef __ASM_LLSC_H
+#define __ASM_LLSC_H
+
+#if _MIPS_SZLONG == 32
+#define SZLONG_LOG 5
+#define SZLONG_MASK 31UL
+#define __LL		"ll	"
+#define __SC		"sc	"
+#define __INS		"ins	"
+#define __EXT		"ext	"
+#elif _MIPS_SZLONG == 64
+#define SZLONG_LOG 6
+#define SZLONG_MASK 63UL
+#define __LL		"lld	"
+#define __SC		"scd	"
+#define __INS		"dins	"
+#define __EXT		"dext	"
+#endif
+
+#endif /* __ASM_LLSC_H  */

+ 2 - 9
arch/mips/include/asm/pgtable.h

@@ -187,23 +187,16 @@ static inline void set_pte(pte_t *ptep, pte_t pteval)
 		 * For SMP, multiple CPUs can race, so we need to do
 		 * this atomically.
 		 */
-#ifdef CONFIG_64BIT
-#define LL_INSN "lld"
-#define SC_INSN "scd"
-#else /* CONFIG_32BIT */
-#define LL_INSN "ll"
-#define SC_INSN "sc"
-#endif
 		unsigned long page_global = _PAGE_GLOBAL;
 		unsigned long tmp;
 
 		__asm__ __volatile__ (
 			"	.set	push\n"
 			"	.set	noreorder\n"
-			"1:	" LL_INSN "	%[tmp], %[buddy]\n"
+			"1:	" __LL "	%[tmp], %[buddy]\n"
 			"	bnez	%[tmp], 2f\n"
 			"	 or	%[tmp], %[tmp], %[global]\n"
-			"	" SC_INSN "	%[tmp], %[buddy]\n"
+			"	" __SC "	%[tmp], %[buddy]\n"
 			"	beqz	%[tmp], 1b\n"
 			"	 nop\n"
 			"2:\n"