aboutsummaryrefslogtreecommitdiff
path: root/arch/m68k/include/asm/uaccess_mm.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/m68k/include/asm/uaccess_mm.h')
-rw-r--r--arch/m68k/include/asm/uaccess_mm.h42
1 files changed, 29 insertions, 13 deletions
diff --git a/arch/m68k/include/asm/uaccess_mm.h b/arch/m68k/include/asm/uaccess_mm.h
index 7107f3fbdbb..9c80cd515b2 100644
--- a/arch/m68k/include/asm/uaccess_mm.h
+++ b/arch/m68k/include/asm/uaccess_mm.h
@@ -21,6 +21,22 @@ static inline int access_ok(int type, const void __user *addr,
}
/*
+ * Not all varients of the 68k family support the notion of address spaces.
+ * The traditional 680x0 parts do, and they use the sfc/dfc registers and
+ * the "moves" instruction to access user space from kernel space. Other
+ * family members like ColdFire don't support this, and only have a single
+ * address space, and use the usual "move" instruction for user space access.
+ *
+ * Outside of this difference the user space access functions are the same.
+ * So lets keep the code simple and just define in what we need to use.
+ */
+#ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
+#define MOVES "moves"
+#else
+#define MOVES "move"
+#endif
+
+/*
* The exception table consists of pairs of addresses: the first is the
* address of an instruction that is allowed to fault, and the second is
* the address at which the program should continue. No registers are
@@ -43,7 +59,7 @@ extern int __get_user_bad(void);
#define __put_user_asm(res, x, ptr, bwl, reg, err) \
asm volatile ("\n" \
- "1: moves."#bwl" %2,%1\n" \
+ "1: "MOVES"."#bwl" %2,%1\n" \
"2:\n" \
" .section .fixup,\"ax\"\n" \
" .even\n" \
@@ -83,8 +99,8 @@ asm volatile ("\n" \
{ \
const void __user *__pu_ptr = (ptr); \
asm volatile ("\n" \
- "1: moves.l %2,(%1)+\n" \
- "2: moves.l %R2,(%1)\n" \
+ "1: "MOVES".l %2,(%1)+\n" \
+ "2: "MOVES".l %R2,(%1)\n" \
"3:\n" \
" .section .fixup,\"ax\"\n" \
" .even\n" \
@@ -115,12 +131,12 @@ asm volatile ("\n" \
#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({ \
type __gu_val; \
asm volatile ("\n" \
- "1: moves."#bwl" %2,%1\n" \
+ "1: "MOVES"."#bwl" %2,%1\n" \
"2:\n" \
" .section .fixup,\"ax\"\n" \
" .even\n" \
"10: move.l %3,%0\n" \
- " sub."#bwl" %1,%1\n" \
+ " sub.l %1,%1\n" \
" jra 2b\n" \
" .previous\n" \
"\n" \
@@ -152,8 +168,8 @@ asm volatile ("\n" \
const void *__gu_ptr = (ptr); \
u64 __gu_val; \
asm volatile ("\n" \
- "1: moves.l (%2)+,%1\n" \
- "2: moves.l (%2),%R1\n" \
+ "1: "MOVES".l (%2)+,%1\n" \
+ "2: "MOVES".l (%2),%R1\n" \
"3:\n" \
" .section .fixup,\"ax\"\n" \
" .even\n" \
@@ -188,12 +204,12 @@ unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned
#define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\
asm volatile ("\n" \
- "1: moves."#s1" (%2)+,%3\n" \
+ "1: "MOVES"."#s1" (%2)+,%3\n" \
" move."#s1" %3,(%1)+\n" \
- "2: moves."#s2" (%2)+,%3\n" \
+ "2: "MOVES"."#s2" (%2)+,%3\n" \
" move."#s2" %3,(%1)+\n" \
" .ifnc \""#s3"\",\"\"\n" \
- "3: moves."#s3" (%2)+,%3\n" \
+ "3: "MOVES"."#s3" (%2)+,%3\n" \
" move."#s3" %3,(%1)+\n" \
" .endif\n" \
"4:\n" \
@@ -269,13 +285,13 @@ __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
asm volatile ("\n" \
" move."#s1" (%2)+,%3\n" \
- "11: moves."#s1" %3,(%1)+\n" \
+ "11: "MOVES"."#s1" %3,(%1)+\n" \
"12: move."#s2" (%2)+,%3\n" \
- "21: moves."#s2" %3,(%1)+\n" \
+ "21: "MOVES"."#s2" %3,(%1)+\n" \
"22:\n" \
" .ifnc \""#s3"\",\"\"\n" \
" move."#s3" (%2)+,%3\n" \
- "31: moves."#s3" %3,(%1)+\n" \
+ "31: "MOVES"."#s3" %3,(%1)+\n" \
"32:\n" \
" .endif\n" \
"4:\n" \