x86/uaccess: Enable hardened usercopy
Enables CONFIG_HARDENED_USERCOPY checks on x86. This is done both in copy_*_user() and __copy_*_user() because copy_*_user() actually calls down to _copy_*_user() and not __copy_*_user(). Based on code from PaX and grsecurity. Signed-off-by: Kees Cook <keescook@chromium.org> Tested-by: Valdis Kletnieks <valdis.kletnieks@vt.edu>
This commit is contained in:
parent
f5509cc18d
commit
5b710f34e1
|
@ -80,6 +80,7 @@ config X86
|
||||||
select HAVE_ALIGNED_STRUCT_PAGE if SLUB
|
select HAVE_ALIGNED_STRUCT_PAGE if SLUB
|
||||||
select HAVE_AOUT if X86_32
|
select HAVE_AOUT if X86_32
|
||||||
select HAVE_ARCH_AUDITSYSCALL
|
select HAVE_ARCH_AUDITSYSCALL
|
||||||
|
select HAVE_ARCH_HARDENED_USERCOPY
|
||||||
select HAVE_ARCH_HUGE_VMAP if X86_64 || X86_PAE
|
select HAVE_ARCH_HUGE_VMAP if X86_64 || X86_PAE
|
||||||
select HAVE_ARCH_JUMP_LABEL
|
select HAVE_ARCH_JUMP_LABEL
|
||||||
select HAVE_ARCH_KASAN if X86_64 && SPARSEMEM_VMEMMAP
|
select HAVE_ARCH_KASAN if X86_64 && SPARSEMEM_VMEMMAP
|
||||||
|
|
|
@ -742,9 +742,10 @@ copy_from_user(void *to, const void __user *from, unsigned long n)
|
||||||
* case, and do only runtime checking for non-constant sizes.
|
* case, and do only runtime checking for non-constant sizes.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
if (likely(sz < 0 || sz >= n))
|
if (likely(sz < 0 || sz >= n)) {
|
||||||
|
check_object_size(to, n, false);
|
||||||
n = _copy_from_user(to, from, n);
|
n = _copy_from_user(to, from, n);
|
||||||
else if(__builtin_constant_p(n))
|
} else if (__builtin_constant_p(n))
|
||||||
copy_from_user_overflow();
|
copy_from_user_overflow();
|
||||||
else
|
else
|
||||||
__copy_from_user_overflow(sz, n);
|
__copy_from_user_overflow(sz, n);
|
||||||
|
@ -762,9 +763,10 @@ copy_to_user(void __user *to, const void *from, unsigned long n)
|
||||||
might_fault();
|
might_fault();
|
||||||
|
|
||||||
/* See the comment in copy_from_user() above. */
|
/* See the comment in copy_from_user() above. */
|
||||||
if (likely(sz < 0 || sz >= n))
|
if (likely(sz < 0 || sz >= n)) {
|
||||||
|
check_object_size(from, n, true);
|
||||||
n = _copy_to_user(to, from, n);
|
n = _copy_to_user(to, from, n);
|
||||||
else if(__builtin_constant_p(n))
|
} else if (__builtin_constant_p(n))
|
||||||
copy_to_user_overflow();
|
copy_to_user_overflow();
|
||||||
else
|
else
|
||||||
__copy_to_user_overflow(sz, n);
|
__copy_to_user_overflow(sz, n);
|
||||||
|
|
|
@ -37,6 +37,7 @@ unsigned long __must_check __copy_from_user_ll_nocache_nozero
|
||||||
static __always_inline unsigned long __must_check
|
static __always_inline unsigned long __must_check
|
||||||
__copy_to_user_inatomic(void __user *to, const void *from, unsigned long n)
|
__copy_to_user_inatomic(void __user *to, const void *from, unsigned long n)
|
||||||
{
|
{
|
||||||
|
check_object_size(from, n, true);
|
||||||
return __copy_to_user_ll(to, from, n);
|
return __copy_to_user_ll(to, from, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,6 +96,7 @@ static __always_inline unsigned long
|
||||||
__copy_from_user(void *to, const void __user *from, unsigned long n)
|
__copy_from_user(void *to, const void __user *from, unsigned long n)
|
||||||
{
|
{
|
||||||
might_fault();
|
might_fault();
|
||||||
|
check_object_size(to, n, false);
|
||||||
if (__builtin_constant_p(n)) {
|
if (__builtin_constant_p(n)) {
|
||||||
unsigned long ret;
|
unsigned long ret;
|
||||||
|
|
||||||
|
|
|
@ -54,6 +54,7 @@ int __copy_from_user_nocheck(void *dst, const void __user *src, unsigned size)
|
||||||
{
|
{
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
|
||||||
|
check_object_size(dst, size, false);
|
||||||
if (!__builtin_constant_p(size))
|
if (!__builtin_constant_p(size))
|
||||||
return copy_user_generic(dst, (__force void *)src, size);
|
return copy_user_generic(dst, (__force void *)src, size);
|
||||||
switch (size) {
|
switch (size) {
|
||||||
|
@ -119,6 +120,7 @@ int __copy_to_user_nocheck(void __user *dst, const void *src, unsigned size)
|
||||||
{
|
{
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
|
||||||
|
check_object_size(src, size, true);
|
||||||
if (!__builtin_constant_p(size))
|
if (!__builtin_constant_p(size))
|
||||||
return copy_user_generic((__force void *)dst, src, size);
|
return copy_user_generic((__force void *)dst, src, size);
|
||||||
switch (size) {
|
switch (size) {
|
||||||
|
|
Loading…
Reference in New Issue