summaryrefslogtreecommitdiff
path: root/arch/mips
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2016-04-15 10:25:33 +0200
committerRalf Baechle <ralf@linux-mips.org>2016-05-09 12:00:05 +0200
commit05490626d5f535baaaec60cdd37167f1a8031a9b (patch)
treed1141f397454ffc247aceb1df65f668471009488 /arch/mips
parent92e9953c5865b47281d74d43f08d7cecde11e58a (diff)
MIPS: Move definitions for 32/64-bit agonstic inline assembler to new file.
Inspired by Markos Chandras' patch. I just didn't want do pull bitsops.h into pgtable.h. Signed-off-by: Ralf Baechle <ralf@linux-mips.org> References: https://patchwork.linux-mips.org/patch/11052/
Diffstat (limited to 'arch/mips')
-rw-r--r--arch/mips/include/asm/bitops.h17
-rw-r--r--arch/mips/include/asm/llsc.h28
-rw-r--r--arch/mips/include/asm/pgtable.h11
3 files changed, 31 insertions, 25 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index ce9666cf1499..fa57cef12a46 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -19,25 +19,10 @@
#include <asm/byteorder.h> /* sigh ... */
#include <asm/compiler.h>
#include <asm/cpu-features.h>
+#include <asm/llsc.h>
#include <asm/sgidefs.h>
#include <asm/war.h>
-#if _MIPS_SZLONG == 32
-#define SZLONG_LOG 5
-#define SZLONG_MASK 31UL
-#define __LL "ll "
-#define __SC "sc "
-#define __INS "ins "
-#define __EXT "ext "
-#elif _MIPS_SZLONG == 64
-#define SZLONG_LOG 6
-#define SZLONG_MASK 63UL
-#define __LL "lld "
-#define __SC "scd "
-#define __INS "dins "
-#define __EXT "dext "
-#endif
-
/*
* These are the "slower" versions of the functions and are in bitops.c.
* These functions call raw_local_irq_{save,restore}().
diff --git a/arch/mips/include/asm/llsc.h b/arch/mips/include/asm/llsc.h
new file mode 100644
index 000000000000..c6d17d171147
--- /dev/null
+++ b/arch/mips/include/asm/llsc.h
@@ -0,0 +1,28 @@
+/*
+ * This file is subject to the terms and conditions of the GNU General Public
+ * License. See the file "COPYING" in the main directory of this archive
+ * for more details.
+ *
+ * Macros for 32/64-bit neutral inline assembler
+ */
+
+#ifndef __ASM_LLSC_H
+#define __ASM_LLSC_H
+
+#if _MIPS_SZLONG == 32
+#define SZLONG_LOG 5
+#define SZLONG_MASK 31UL
+#define __LL "ll "
+#define __SC "sc "
+#define __INS "ins "
+#define __EXT "ext "
+#elif _MIPS_SZLONG == 64
+#define SZLONG_LOG 6
+#define SZLONG_MASK 63UL
+#define __LL "lld "
+#define __SC "scd "
+#define __INS "dins "
+#define __EXT "dext "
+#endif
+
+#endif /* __ASM_LLSC_H */
diff --git a/arch/mips/include/asm/pgtable.h b/arch/mips/include/asm/pgtable.h
index 9a4fe0133ff1..25b1c1c08cbc 100644
--- a/arch/mips/include/asm/pgtable.h
+++ b/arch/mips/include/asm/pgtable.h
@@ -187,23 +187,16 @@ static inline void set_pte(pte_t *ptep, pte_t pteval)
* For SMP, multiple CPUs can race, so we need to do
* this atomically.
*/
-#ifdef CONFIG_64BIT
-#define LL_INSN "lld"
-#define SC_INSN "scd"
-#else /* CONFIG_32BIT */
-#define LL_INSN "ll"
-#define SC_INSN "sc"
-#endif
unsigned long page_global = _PAGE_GLOBAL;
unsigned long tmp;
__asm__ __volatile__ (
" .set push\n"
" .set noreorder\n"
- "1: " LL_INSN " %[tmp], %[buddy]\n"
+ "1: " __LL " %[tmp], %[buddy]\n"
" bnez %[tmp], 2f\n"
" or %[tmp], %[tmp], %[global]\n"
- " " SC_INSN " %[tmp], %[buddy]\n"
+ " " __SC " %[tmp], %[buddy]\n"
" beqz %[tmp], 1b\n"
" nop\n"
"2:\n"