arm64: uaccess: Mask __user pointers for __arch_{clear, copy_*}_user

Like we've done for get_user and put_user, ensure that user pointers
are masked before invoking the underlying __arch_{clear,copy_*}_user
operations.

Signed-off-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
This commit is contained in:
Will Deacon 2018-02-05 15:34:23 +00:00 committed by Catalin Marinas
parent 84624087dd
commit f71c2ffcb2
4 changed files with 30 additions and 14 deletions

View File

@ -403,20 +403,35 @@ do { \
#define put_user __put_user #define put_user __put_user
extern unsigned long __must_check __arch_copy_from_user(void *to, const void __user *from, unsigned long n); extern unsigned long __must_check __arch_copy_from_user(void *to, const void __user *from, unsigned long n);
#define raw_copy_from_user __arch_copy_from_user #define raw_copy_from_user(to, from, n) \
({ \
__arch_copy_from_user((to), __uaccess_mask_ptr(from), (n)); \
})
extern unsigned long __must_check __arch_copy_to_user(void __user *to, const void *from, unsigned long n); extern unsigned long __must_check __arch_copy_to_user(void __user *to, const void *from, unsigned long n);
#define raw_copy_to_user __arch_copy_to_user #define raw_copy_to_user(to, from, n) \
extern unsigned long __must_check raw_copy_in_user(void __user *to, const void __user *from, unsigned long n); ({ \
extern unsigned long __must_check __clear_user(void __user *addr, unsigned long n); __arch_copy_to_user(__uaccess_mask_ptr(to), (from), (n)); \
})
extern unsigned long __must_check __arch_copy_in_user(void __user *to, const void __user *from, unsigned long n);
#define raw_copy_in_user(to, from, n) \
({ \
__arch_copy_in_user(__uaccess_mask_ptr(to), \
__uaccess_mask_ptr(from), (n)); \
})
#define INLINE_COPY_TO_USER #define INLINE_COPY_TO_USER
#define INLINE_COPY_FROM_USER #define INLINE_COPY_FROM_USER
static inline unsigned long __must_check clear_user(void __user *to, unsigned long n) extern unsigned long __must_check __arch_clear_user(void __user *to, unsigned long n);
static inline unsigned long __must_check __clear_user(void __user *to, unsigned long n)
{ {
if (access_ok(VERIFY_WRITE, to, n)) if (access_ok(VERIFY_WRITE, to, n))
n = __clear_user(__uaccess_mask_ptr(to), n); n = __arch_clear_user(__uaccess_mask_ptr(to), n);
return n; return n;
} }
#define clear_user __clear_user
extern long strncpy_from_user(char *dest, const char __user *src, long count); extern long strncpy_from_user(char *dest, const char __user *src, long count);
@ -430,7 +445,7 @@ extern unsigned long __must_check __copy_user_flushcache(void *to, const void __
static inline int __copy_from_user_flushcache(void *dst, const void __user *src, unsigned size) static inline int __copy_from_user_flushcache(void *dst, const void __user *src, unsigned size)
{ {
kasan_check_write(dst, size); kasan_check_write(dst, size);
return __copy_user_flushcache(dst, src, size); return __copy_user_flushcache(dst, __uaccess_mask_ptr(src), size);
} }
#endif #endif

View File

@ -37,8 +37,8 @@ EXPORT_SYMBOL(clear_page);
/* user mem (segment) */ /* user mem (segment) */
EXPORT_SYMBOL(__arch_copy_from_user); EXPORT_SYMBOL(__arch_copy_from_user);
EXPORT_SYMBOL(__arch_copy_to_user); EXPORT_SYMBOL(__arch_copy_to_user);
EXPORT_SYMBOL(__clear_user); EXPORT_SYMBOL(__arch_clear_user);
EXPORT_SYMBOL(raw_copy_in_user); EXPORT_SYMBOL(__arch_copy_in_user);
/* physical memory */ /* physical memory */
EXPORT_SYMBOL(memstart_addr); EXPORT_SYMBOL(memstart_addr);

View File

@ -21,7 +21,7 @@
.text .text
/* Prototype: int __clear_user(void *addr, size_t sz) /* Prototype: int __arch_clear_user(void *addr, size_t sz)
* Purpose : clear some user memory * Purpose : clear some user memory
* Params : addr - user memory address to clear * Params : addr - user memory address to clear
* : sz - number of bytes to clear * : sz - number of bytes to clear
@ -29,7 +29,7 @@
* *
* Alignment fixed up by hardware. * Alignment fixed up by hardware.
*/ */
ENTRY(__clear_user) ENTRY(__arch_clear_user)
uaccess_enable_not_uao x2, x3, x4 uaccess_enable_not_uao x2, x3, x4
mov x2, x1 // save the size for fixup return mov x2, x1 // save the size for fixup return
subs x1, x1, #8 subs x1, x1, #8
@ -52,7 +52,7 @@ uao_user_alternative 9f, strb, sttrb, wzr, x0, 0
5: mov x0, #0 5: mov x0, #0
uaccess_disable_not_uao x2, x3 uaccess_disable_not_uao x2, x3
ret ret
ENDPROC(__clear_user) ENDPROC(__arch_clear_user)
.section .fixup,"ax" .section .fixup,"ax"
.align 2 .align 2

View File

@ -64,14 +64,15 @@
.endm .endm
end .req x5 end .req x5
ENTRY(raw_copy_in_user)
ENTRY(__arch_copy_in_user)
uaccess_enable_not_uao x3, x4, x5 uaccess_enable_not_uao x3, x4, x5
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
uaccess_disable_not_uao x3, x4 uaccess_disable_not_uao x3, x4
mov x0, #0 mov x0, #0
ret ret
ENDPROC(raw_copy_in_user) ENDPROC(__arch_copy_in_user)
.section .fixup,"ax" .section .fixup,"ax"
.align 2 .align 2