saner calling conventions for csum_and_copy_..._user()
All callers of these primitives will * discard anything we might've copied in case of error * ignore the csum value in case of error * always pass 0xffffffff as the initial sum, so the resulting csum value (in case of success, that is) will never be 0. That suggest the following calling conventions: * don't pass err_ptr - just return 0 on error. * don't bother with zeroing destination, etc. in case of error * don't pass the initial sum - just use 0xffffffff. This commit does the minimal conversion in the instances of csum_and_copy_...(); the changes of actual asm code behind them are done later in the series. Note that this asm code is often shared with csum_partial_copy_nocheck(); the difference is that csum_partial_copy_nocheck() passes 0 for initial sum while csum_and_copy_..._user() pass 0xffffffff. Fortunately, we are free to pass 0xffffffff in all cases and subsequent patches will use that freedom without any special comments. A part that could be split off: parisc and uml/i386 claimed to have csum_and_copy_to_user() instances of their own, but those were identical to the generic one, so we simply drop them. Not sure if it's worth a separate commit... Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
This commit is contained in:
parent
99a2c96d52
commit
c693cc4676
|
@ -43,7 +43,7 @@ extern __wsum csum_partial(const void *buff, int len, __wsum sum);
|
|||
*/
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
#define _HAVE_ARCH_CSUM_AND_COPY
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst, int len, __wsum sum, int *errp);
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst, int len);
|
||||
|
||||
__wsum csum_partial_copy_nocheck(const void *src, void *dst, int len);
|
||||
|
||||
|
|
|
@ -325,30 +325,27 @@ csum_partial_cfu_unaligned(const unsigned long __user * src,
|
|||
}
|
||||
|
||||
__wsum
|
||||
csum_and_copy_from_user(const void __user *src, void *dst, int len,
|
||||
__wsum sum, int *errp)
|
||||
csum_and_copy_from_user(const void __user *src, void *dst, int len)
|
||||
{
|
||||
unsigned long checksum = (__force u32) sum;
|
||||
unsigned long checksum = ~0U;
|
||||
unsigned long soff = 7 & (unsigned long) src;
|
||||
unsigned long doff = 7 & (unsigned long) dst;
|
||||
int err = 0;
|
||||
|
||||
if (len) {
|
||||
if (!access_ok(src, len)) {
|
||||
if (errp) *errp = -EFAULT;
|
||||
memset(dst, 0, len);
|
||||
return sum;
|
||||
}
|
||||
if (!access_ok(src, len))
|
||||
return 0;
|
||||
if (!doff) {
|
||||
if (!soff)
|
||||
checksum = csum_partial_cfu_aligned(
|
||||
(const unsigned long __user *) src,
|
||||
(unsigned long *) dst,
|
||||
len-8, checksum, errp);
|
||||
len-8, checksum, &err);
|
||||
else
|
||||
checksum = csum_partial_cfu_dest_aligned(
|
||||
(const unsigned long __user *) src,
|
||||
(unsigned long *) dst,
|
||||
soff, len-8, checksum, errp);
|
||||
soff, len-8, checksum, &err);
|
||||
} else {
|
||||
unsigned long partial_dest;
|
||||
ldq_u(partial_dest, dst);
|
||||
|
@ -357,15 +354,15 @@ csum_and_copy_from_user(const void __user *src, void *dst, int len,
|
|||
(const unsigned long __user *) src,
|
||||
(unsigned long *) dst,
|
||||
doff, len-8, checksum,
|
||||
partial_dest, errp);
|
||||
partial_dest, &err);
|
||||
else
|
||||
checksum = csum_partial_cfu_unaligned(
|
||||
(const unsigned long __user *) src,
|
||||
(unsigned long *) dst,
|
||||
soff, doff, len-8, checksum,
|
||||
partial_dest, errp);
|
||||
partial_dest, &err);
|
||||
}
|
||||
checksum = from64to16 (checksum);
|
||||
checksum = err ? 0 : from64to16 (checksum);
|
||||
}
|
||||
return (__force __wsum)checksum;
|
||||
}
|
||||
|
@ -378,7 +375,7 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len)
|
|||
mm_segment_t oldfs = get_fs();
|
||||
set_fs(KERNEL_DS);
|
||||
checksum = csum_and_copy_from_user((__force const void __user *)src,
|
||||
dst, len, 0, NULL);
|
||||
dst, len);
|
||||
set_fs(oldfs);
|
||||
return checksum;
|
||||
}
|
||||
|
|
|
@ -43,16 +43,15 @@ csum_partial_copy_from_user(const void __user *src, void *dst, int len, __wsum s
|
|||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
#define _HAVE_ARCH_CSUM_AND_COPY
|
||||
static inline
|
||||
__wsum csum_and_copy_from_user (const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst, int len)
|
||||
{
|
||||
if (access_ok(src, len))
|
||||
return csum_partial_copy_from_user(src, dst, len, sum, err_ptr);
|
||||
int err = 0;
|
||||
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
if (!access_ok(src, len))
|
||||
return 0;
|
||||
|
||||
return sum;
|
||||
sum = csum_partial_copy_from_user(src, dst, len, ~0U, &err);
|
||||
return err ? 0 : sum;
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -34,8 +34,7 @@ __wsum csum_partial(const void *buff, int len, __wsum sum);
|
|||
#define _HAVE_ARCH_CSUM_AND_COPY
|
||||
extern __wsum csum_and_copy_from_user(const void __user *src,
|
||||
void *dst,
|
||||
int len, __wsum sum,
|
||||
int *csum_err);
|
||||
int len);
|
||||
|
||||
extern __wsum csum_partial_copy_nocheck(const void *src,
|
||||
void *dst, int len);
|
||||
|
|
|
@ -129,8 +129,7 @@ EXPORT_SYMBOL(csum_partial);
|
|||
*/
|
||||
|
||||
__wsum
|
||||
csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *csum_err)
|
||||
csum_and_copy_from_user(const void __user *src, void *dst, int len)
|
||||
{
|
||||
/*
|
||||
* GCC doesn't like more than 10 operands for the asm
|
||||
|
@ -138,6 +137,7 @@ csum_and_copy_from_user(const void __user *src, void *dst,
|
|||
* code.
|
||||
*/
|
||||
unsigned long tmp1, tmp2;
|
||||
__wsum sum = ~0U;
|
||||
|
||||
__asm__("movel %2,%4\n\t"
|
||||
"btst #1,%4\n\t" /* Check alignment */
|
||||
|
@ -311,9 +311,7 @@ csum_and_copy_from_user(const void __user *src, void *dst,
|
|||
: "0" (sum), "1" (len), "2" (src), "3" (dst)
|
||||
);
|
||||
|
||||
*csum_err = tmp2;
|
||||
|
||||
return(sum);
|
||||
return tmp2 ? 0 : sum;
|
||||
}
|
||||
|
||||
EXPORT_SYMBOL(csum_and_copy_from_user);
|
||||
|
|
|
@ -60,16 +60,15 @@ __wsum csum_partial_copy_from_user(const void __user *src, void *dst, int len,
|
|||
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
static inline
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst, int len)
|
||||
{
|
||||
if (access_ok(src, len))
|
||||
return csum_partial_copy_from_user(src, dst, len, sum,
|
||||
err_ptr);
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
__wsum sum = ~0U;
|
||||
int err = 0;
|
||||
|
||||
return sum;
|
||||
if (!access_ok(src, len))
|
||||
return 0;
|
||||
sum = csum_partial_copy_from_user(src, dst, len, sum, &err);
|
||||
return err ? 0 : sum;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -77,24 +76,23 @@ __wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
|||
*/
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
static inline
|
||||
__wsum csum_and_copy_to_user(const void *src, void __user *dst, int len,
|
||||
__wsum sum, int *err_ptr)
|
||||
__wsum csum_and_copy_to_user(const void *src, void __user *dst, int len)
|
||||
{
|
||||
might_fault();
|
||||
if (access_ok(dst, len)) {
|
||||
if (uaccess_kernel())
|
||||
return __csum_partial_copy_kernel(src,
|
||||
(__force void *)dst,
|
||||
len, sum, err_ptr);
|
||||
else
|
||||
return __csum_partial_copy_to_user(src,
|
||||
(__force void *)dst,
|
||||
len, sum, err_ptr);
|
||||
}
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
int err = 0;
|
||||
__wsum sum = ~0U;
|
||||
|
||||
return (__force __wsum)-1; /* invalid checksum */
|
||||
might_fault();
|
||||
if (!access_ok(dst, len))
|
||||
return 0;
|
||||
if (uaccess_kernel())
|
||||
sum = __csum_partial_copy_kernel(src,
|
||||
(__force void *)dst,
|
||||
len, sum, &err);
|
||||
else
|
||||
sum = __csum_partial_copy_to_user(src,
|
||||
(__force void *)dst,
|
||||
len, sum, &err);
|
||||
return err ? 0 : sum;
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -173,25 +173,5 @@ static __inline__ __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
|
|||
return csum_fold(sum);
|
||||
}
|
||||
|
||||
/*
|
||||
* Copy and checksum to user
|
||||
*/
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
static __inline__ __wsum csum_and_copy_to_user(const void *src,
|
||||
void __user *dst,
|
||||
int len, __wsum sum,
|
||||
int *err_ptr)
|
||||
{
|
||||
/* code stolen from include/asm-mips64 */
|
||||
sum = csum_partial(src, len, sum);
|
||||
|
||||
if (copy_to_user(dst, src, len)) {
|
||||
*err_ptr = -EFAULT;
|
||||
return (__force __wsum)-1;
|
||||
}
|
||||
|
||||
return sum;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
|
|
@ -24,10 +24,10 @@ extern __wsum csum_partial_copy_generic(const void *src, void *dst,
|
|||
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
extern __wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr);
|
||||
int len);
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
extern __wsum csum_and_copy_to_user(const void *src, void __user *dst,
|
||||
int len, __wsum sum, int *err_ptr);
|
||||
int len);
|
||||
|
||||
#define _HAVE_ARCH_CSUM_AND_COPY
|
||||
#define csum_partial_copy_nocheck(src, dst, len) \
|
||||
|
|
|
@ -12,82 +12,56 @@
|
|||
#include <linux/uaccess.h>
|
||||
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
int len)
|
||||
{
|
||||
unsigned int csum;
|
||||
int err = 0;
|
||||
|
||||
might_sleep();
|
||||
|
||||
if (unlikely(!access_ok(src, len)))
|
||||
return 0;
|
||||
|
||||
allow_read_from_user(src, len);
|
||||
|
||||
*err_ptr = 0;
|
||||
|
||||
if (!len) {
|
||||
csum = 0;
|
||||
goto out;
|
||||
}
|
||||
|
||||
if (unlikely((len < 0) || !access_ok(src, len))) {
|
||||
*err_ptr = -EFAULT;
|
||||
csum = (__force unsigned int)sum;
|
||||
goto out;
|
||||
}
|
||||
|
||||
csum = csum_partial_copy_generic((void __force *)src, dst,
|
||||
len, sum, err_ptr, NULL);
|
||||
len, ~0U, &err, NULL);
|
||||
|
||||
if (unlikely(*err_ptr)) {
|
||||
if (unlikely(err)) {
|
||||
int missing = __copy_from_user(dst, src, len);
|
||||
|
||||
if (missing) {
|
||||
memset(dst + len - missing, 0, missing);
|
||||
*err_ptr = -EFAULT;
|
||||
} else {
|
||||
*err_ptr = 0;
|
||||
if (missing)
|
||||
csum = 0;
|
||||
else
|
||||
csum = csum_partial(dst, len, ~0U);
|
||||
}
|
||||
|
||||
csum = csum_partial(dst, len, sum);
|
||||
}
|
||||
|
||||
out:
|
||||
prevent_read_from_user(src, len);
|
||||
return (__force __wsum)csum;
|
||||
}
|
||||
EXPORT_SYMBOL(csum_and_copy_from_user);
|
||||
|
||||
__wsum csum_and_copy_to_user(const void *src, void __user *dst, int len,
|
||||
__wsum sum, int *err_ptr)
|
||||
__wsum csum_and_copy_to_user(const void *src, void __user *dst, int len)
|
||||
{
|
||||
unsigned int csum;
|
||||
int err = 0;
|
||||
|
||||
might_sleep();
|
||||
if (unlikely(!access_ok(dst, len)))
|
||||
return 0;
|
||||
|
||||
allow_write_to_user(dst, len);
|
||||
|
||||
*err_ptr = 0;
|
||||
|
||||
if (!len) {
|
||||
csum = 0;
|
||||
goto out;
|
||||
}
|
||||
|
||||
if (unlikely((len < 0) || !access_ok(dst, len))) {
|
||||
*err_ptr = -EFAULT;
|
||||
csum = -1; /* invalid checksum */
|
||||
goto out;
|
||||
}
|
||||
|
||||
csum = csum_partial_copy_generic(src, (void __force *)dst,
|
||||
len, sum, NULL, err_ptr);
|
||||
len, ~0U, NULL, &err);
|
||||
|
||||
if (unlikely(*err_ptr)) {
|
||||
csum = csum_partial(src, len, sum);
|
||||
if (unlikely(err)) {
|
||||
csum = csum_partial(src, len, ~0U);
|
||||
|
||||
if (copy_to_user(dst, src, len)) {
|
||||
*err_ptr = -EFAULT;
|
||||
csum = -1; /* invalid checksum */
|
||||
}
|
||||
if (copy_to_user(dst, src, len))
|
||||
csum = 0;
|
||||
}
|
||||
|
||||
out:
|
||||
prevent_write_to_user(dst, len);
|
||||
return (__force __wsum)csum;
|
||||
}
|
||||
|
|
|
@ -50,15 +50,16 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst, int len)
|
|||
|
||||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
static inline
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst, int len)
|
||||
{
|
||||
if (access_ok(src, len))
|
||||
return csum_partial_copy_generic((__force const void *)src, dst,
|
||||
len, sum, err_ptr, NULL);
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
return sum;
|
||||
int err = 0;
|
||||
__wsum sum = ~0U;
|
||||
|
||||
if (!access_ok(src, len))
|
||||
return 0;
|
||||
sum = csum_partial_copy_generic((__force const void *)src, dst,
|
||||
len, sum, &err, NULL);
|
||||
return err ? 0 : sum;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -199,16 +200,15 @@ static inline __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
|
|||
#define HAVE_CSUM_COPY_USER
|
||||
static inline __wsum csum_and_copy_to_user(const void *src,
|
||||
void __user *dst,
|
||||
int len, __wsum sum,
|
||||
int *err_ptr)
|
||||
int len)
|
||||
{
|
||||
if (access_ok(dst, len))
|
||||
return csum_partial_copy_generic((__force const void *)src,
|
||||
dst, len, sum, NULL, err_ptr);
|
||||
int err = 0;
|
||||
__wsum sum = ~0U;
|
||||
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
|
||||
return (__force __wsum)-1; /* invalid checksum */
|
||||
if (!access_ok(dst, len))
|
||||
return 0;
|
||||
sum = csum_partial_copy_generic((__force const void *)src,
|
||||
dst, len, sum, NULL, &err);
|
||||
return err ? 0 : sum;
|
||||
}
|
||||
#endif /* __ASM_SH_CHECKSUM_H */
|
||||
|
|
|
@ -60,19 +60,16 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len)
|
|||
}
|
||||
|
||||
static inline __wsum
|
||||
csum_and_copy_from_user(const void __user *src, void *dst, int len,
|
||||
__wsum sum, int *err)
|
||||
csum_and_copy_from_user(const void __user *src, void *dst, int len)
|
||||
{
|
||||
register unsigned long ret asm("o0") = (unsigned long)src;
|
||||
register char *d asm("o1") = dst;
|
||||
register int l asm("g1") = len;
|
||||
register __wsum s asm("g7") = sum;
|
||||
register __wsum s asm("g7") = ~0U;
|
||||
int err = 0;
|
||||
|
||||
if (unlikely(!access_ok(src, len))) {
|
||||
if (len)
|
||||
*err = -EFAULT;
|
||||
return sum;
|
||||
}
|
||||
if (unlikely(!access_ok(src, len)))
|
||||
return 0;
|
||||
|
||||
__asm__ __volatile__ (
|
||||
".section __ex_table,#alloc\n\t"
|
||||
|
@ -83,26 +80,25 @@ csum_and_copy_from_user(const void __user *src, void *dst, int len,
|
|||
"call __csum_partial_copy_sparc_generic\n\t"
|
||||
" st %8, [%%sp + 64]\n"
|
||||
: "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s)
|
||||
: "0" (ret), "1" (d), "2" (l), "3" (s), "r" (err)
|
||||
: "0" (ret), "1" (d), "2" (l), "3" (s), "r" (&err)
|
||||
: "o2", "o3", "o4", "o5", "o7", "g2", "g3", "g4", "g5",
|
||||
"cc", "memory");
|
||||
return (__force __wsum)ret;
|
||||
return err ? 0 : (__force __wsum)ret;
|
||||
}
|
||||
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
|
||||
static inline __wsum
|
||||
csum_and_copy_to_user(const void *src, void __user *dst, int len,
|
||||
__wsum sum, int *err)
|
||||
csum_and_copy_to_user(const void *src, void __user *dst, int len)
|
||||
{
|
||||
if (!access_ok(dst, len)) {
|
||||
*err = -EFAULT;
|
||||
return sum;
|
||||
} else {
|
||||
register unsigned long ret asm("o0") = (unsigned long)src;
|
||||
register char __user *d asm("o1") = dst;
|
||||
register int l asm("g1") = len;
|
||||
register __wsum s asm("g7") = sum;
|
||||
register __wsum s asm("g7") = ~0U;
|
||||
int err = 0;
|
||||
|
||||
if (!access_ok(dst, len))
|
||||
return 0;
|
||||
|
||||
__asm__ __volatile__ (
|
||||
".section __ex_table,#alloc\n\t"
|
||||
|
@ -113,12 +109,11 @@ csum_and_copy_to_user(const void *src, void __user *dst, int len,
|
|||
"call __csum_partial_copy_sparc_generic\n\t"
|
||||
" st %8, [%%sp + 64]\n"
|
||||
: "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s)
|
||||
: "0" (ret), "1" (d), "2" (l), "3" (s), "r" (err)
|
||||
: "0" (ret), "1" (d), "2" (l), "3" (s), "r" (&err)
|
||||
: "o2", "o3", "o4", "o5", "o7",
|
||||
"g2", "g3", "g4", "g5",
|
||||
"cc", "memory");
|
||||
return (__force __wsum)ret;
|
||||
}
|
||||
return err ? 0 : (__force __wsum)ret;
|
||||
}
|
||||
|
||||
/* ihl is always 5 or greater, almost always is 5, and iph is word aligned
|
||||
|
|
|
@ -51,12 +51,11 @@ long __csum_partial_copy_from_user(const void __user *src,
|
|||
|
||||
static inline __wsum
|
||||
csum_and_copy_from_user(const void __user *src,
|
||||
void *dst, int len,
|
||||
__wsum sum, int *err)
|
||||
void *dst, int len)
|
||||
{
|
||||
long ret = __csum_partial_copy_from_user(src, dst, len, sum);
|
||||
long ret = __csum_partial_copy_from_user(src, dst, len, ~0U);
|
||||
if (ret < 0)
|
||||
*err = -EFAULT;
|
||||
return 0;
|
||||
return (__force __wsum) ret;
|
||||
}
|
||||
|
||||
|
@ -70,12 +69,11 @@ long __csum_partial_copy_to_user(const void *src,
|
|||
|
||||
static inline __wsum
|
||||
csum_and_copy_to_user(const void *src,
|
||||
void __user *dst, int len,
|
||||
__wsum sum, int *err)
|
||||
void __user *dst, int len)
|
||||
{
|
||||
long ret = __csum_partial_copy_to_user(src, dst, len, sum);
|
||||
long ret = __csum_partial_copy_to_user(src, dst, len, ~0U);
|
||||
if (ret < 0)
|
||||
*err = -EFAULT;
|
||||
return 0;
|
||||
return (__force __wsum) ret;
|
||||
}
|
||||
|
||||
|
|
|
@ -44,22 +44,19 @@ static inline __wsum csum_partial_copy_nocheck(const void *src, void *dst, int l
|
|||
}
|
||||
|
||||
static inline __wsum csum_and_copy_from_user(const void __user *src,
|
||||
void *dst, int len,
|
||||
__wsum sum, int *err_ptr)
|
||||
void *dst, int len)
|
||||
{
|
||||
__wsum ret;
|
||||
int err = 0;
|
||||
|
||||
might_sleep();
|
||||
if (!user_access_begin(src, len)) {
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
return sum;
|
||||
}
|
||||
if (!user_access_begin(src, len))
|
||||
return 0;
|
||||
ret = csum_partial_copy_generic((__force void *)src, dst,
|
||||
len, sum, err_ptr, NULL);
|
||||
len, ~0U, &err, NULL);
|
||||
user_access_end();
|
||||
|
||||
return ret;
|
||||
return err ? 0 : ret;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -177,23 +174,19 @@ static inline __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
|
|||
*/
|
||||
static inline __wsum csum_and_copy_to_user(const void *src,
|
||||
void __user *dst,
|
||||
int len, __wsum sum,
|
||||
int *err_ptr)
|
||||
int len)
|
||||
{
|
||||
__wsum ret;
|
||||
int err = 0;
|
||||
|
||||
might_sleep();
|
||||
if (user_access_begin(dst, len)) {
|
||||
if (!user_access_begin(dst, len))
|
||||
return 0;
|
||||
|
||||
ret = csum_partial_copy_generic(src, (__force void *)dst,
|
||||
len, sum, NULL, err_ptr);
|
||||
len, ~0U, NULL, &err);
|
||||
user_access_end();
|
||||
return ret;
|
||||
}
|
||||
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
|
||||
return (__force __wsum)-1; /* invalid checksum */
|
||||
return err ? 0 : ret;
|
||||
}
|
||||
|
||||
#endif /* _ASM_X86_CHECKSUM_32_H */
|
||||
|
|
|
@ -135,10 +135,8 @@ extern __visible __wsum csum_partial_copy_generic(const void *src, const void *d
|
|||
int *src_err_ptr, int *dst_err_ptr);
|
||||
|
||||
|
||||
extern __wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum isum, int *errp);
|
||||
extern __wsum csum_and_copy_to_user(const void *src, void __user *dst,
|
||||
int len, __wsum isum, int *errp);
|
||||
extern __wsum csum_and_copy_from_user(const void __user *src, void *dst, int len);
|
||||
extern __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len);
|
||||
extern __wsum csum_partial_copy_nocheck(const void *src, void *dst, int len);
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,13 +22,15 @@
|
|||
*/
|
||||
__wsum
|
||||
csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum isum, int *errp)
|
||||
int len)
|
||||
{
|
||||
int err = 0;
|
||||
__wsum isum = ~0U;
|
||||
|
||||
might_sleep();
|
||||
*errp = 0;
|
||||
|
||||
if (!user_access_begin(src, len))
|
||||
goto out_err;
|
||||
return 0;
|
||||
|
||||
/*
|
||||
* Why 6, not 7? To handle odd addresses aligned we
|
||||
|
@ -53,20 +55,15 @@ csum_and_copy_from_user(const void __user *src, void *dst,
|
|||
}
|
||||
}
|
||||
isum = csum_partial_copy_generic((__force const void *)src,
|
||||
dst, len, isum, errp, NULL);
|
||||
dst, len, isum, &err, NULL);
|
||||
user_access_end();
|
||||
if (unlikely(*errp))
|
||||
goto out_err;
|
||||
|
||||
if (unlikely(err))
|
||||
isum = 0;
|
||||
return isum;
|
||||
|
||||
out:
|
||||
user_access_end();
|
||||
out_err:
|
||||
*errp = -EFAULT;
|
||||
memset(dst, 0, len);
|
||||
|
||||
return isum;
|
||||
return 0;
|
||||
}
|
||||
EXPORT_SYMBOL(csum_and_copy_from_user);
|
||||
|
||||
|
@ -83,16 +80,15 @@ EXPORT_SYMBOL(csum_and_copy_from_user);
|
|||
*/
|
||||
__wsum
|
||||
csum_and_copy_to_user(const void *src, void __user *dst,
|
||||
int len, __wsum isum, int *errp)
|
||||
int len)
|
||||
{
|
||||
__wsum ret;
|
||||
__wsum ret, isum = ~0U;
|
||||
int err = 0;
|
||||
|
||||
might_sleep();
|
||||
|
||||
if (!user_access_begin(dst, len)) {
|
||||
*errp = -EFAULT;
|
||||
if (!user_access_begin(dst, len))
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (unlikely((unsigned long)dst & 6)) {
|
||||
while (((unsigned long)dst & 6) && len >= 2) {
|
||||
|
@ -107,15 +103,13 @@ csum_and_copy_to_user(const void *src, void __user *dst,
|
|||
}
|
||||
}
|
||||
|
||||
*errp = 0;
|
||||
ret = csum_partial_copy_generic(src, (void __force *)dst,
|
||||
len, isum, NULL, errp);
|
||||
len, isum, NULL, &err);
|
||||
user_access_end();
|
||||
return ret;
|
||||
return err ? 0 : ret;
|
||||
out:
|
||||
user_access_end();
|
||||
*errp = -EFAULT;
|
||||
return isum;
|
||||
return 0;
|
||||
}
|
||||
EXPORT_SYMBOL(csum_and_copy_to_user);
|
||||
|
||||
|
|
|
@ -35,27 +35,4 @@ static __inline__ __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
|
|||
return csum_fold(sum);
|
||||
}
|
||||
|
||||
/*
|
||||
* Copy and checksum to user
|
||||
*/
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
static __inline__ __wsum csum_and_copy_to_user(const void *src,
|
||||
void __user *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
{
|
||||
if (access_ok(dst, len)) {
|
||||
if (copy_to_user(dst, src, len)) {
|
||||
*err_ptr = -EFAULT;
|
||||
return (__force __wsum)-1;
|
||||
}
|
||||
|
||||
return csum_partial(src, len, sum);
|
||||
}
|
||||
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
|
||||
return (__force __wsum)-1; /* invalid checksum */
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -55,14 +55,16 @@ __wsum csum_partial_copy_nocheck(const void *src, void *dst, int len)
|
|||
#define _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
static inline
|
||||
__wsum csum_and_copy_from_user(const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
int len)
|
||||
{
|
||||
if (access_ok(src, len))
|
||||
return csum_partial_copy_generic((__force const void *)src, dst,
|
||||
len, sum, err_ptr, NULL);
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
return sum;
|
||||
int err = 0;
|
||||
|
||||
if (!access_ok(src, len))
|
||||
return 0;
|
||||
|
||||
sum = csum_partial_copy_generic((__force const void *)src, dst,
|
||||
len, ~0U, &err, NULL);
|
||||
return err ? 0 : sum;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -243,15 +245,15 @@ static __inline__ __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
|
|||
*/
|
||||
#define HAVE_CSUM_COPY_USER
|
||||
static __inline__ __wsum csum_and_copy_to_user(const void *src,
|
||||
void __user *dst, int len,
|
||||
__wsum sum, int *err_ptr)
|
||||
void __user *dst, int len)
|
||||
{
|
||||
if (access_ok(dst, len))
|
||||
return csum_partial_copy_generic(src,dst,len,sum,NULL,err_ptr);
|
||||
int err = 0;
|
||||
__wsum sum = ~0U;
|
||||
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
if (!access_ok(dst, len))
|
||||
return 0;
|
||||
|
||||
return (__force __wsum)-1; /* invalid checksum */
|
||||
sum = csum_partial_copy_generic(src,dst,len,sum,NULL,&err);
|
||||
return err ? 0 : sum;
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -24,26 +24,23 @@
|
|||
#ifndef _HAVE_ARCH_COPY_AND_CSUM_FROM_USER
|
||||
static inline
|
||||
__wsum csum_and_copy_from_user (const void __user *src, void *dst,
|
||||
int len, __wsum sum, int *err_ptr)
|
||||
int len)
|
||||
{
|
||||
if (copy_from_user(dst, src, len))
|
||||
*err_ptr = -EFAULT;
|
||||
return csum_partial(dst, len, sum);
|
||||
return 0;
|
||||
return csum_partial(dst, len, ~0U);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef HAVE_CSUM_COPY_USER
|
||||
static __inline__ __wsum csum_and_copy_to_user
|
||||
(const void *src, void __user *dst, int len, __wsum sum, int *err_ptr)
|
||||
(const void *src, void __user *dst, int len)
|
||||
{
|
||||
sum = csum_partial(src, len, sum);
|
||||
__wsum sum = csum_partial(src, len, ~0U);
|
||||
|
||||
if (copy_to_user(dst, src, len) == 0)
|
||||
return sum;
|
||||
if (len)
|
||||
*err_ptr = -EFAULT;
|
||||
|
||||
return (__force __wsum)-1; /* invalid checksum */
|
||||
return 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
|
@ -1449,15 +1449,14 @@ size_t csum_and_copy_from_iter(void *addr, size_t bytes, __wsum *csum,
|
|||
return 0;
|
||||
}
|
||||
iterate_and_advance(i, bytes, v, ({
|
||||
int err = 0;
|
||||
next = csum_and_copy_from_user(v.iov_base,
|
||||
(to += v.iov_len) - v.iov_len,
|
||||
v.iov_len, ~0U, &err);
|
||||
if (!err) {
|
||||
v.iov_len);
|
||||
if (next) {
|
||||
sum = csum_block_add(sum, next, off);
|
||||
off += v.iov_len;
|
||||
}
|
||||
err ? v.iov_len : 0;
|
||||
next ? 0 : v.iov_len;
|
||||
}), ({
|
||||
char *p = kmap_atomic(v.bv_page);
|
||||
sum = csum_and_memcpy((to += v.bv_len) - v.bv_len,
|
||||
|
@ -1491,11 +1490,10 @@ bool csum_and_copy_from_iter_full(void *addr, size_t bytes, __wsum *csum,
|
|||
if (unlikely(i->count < bytes))
|
||||
return false;
|
||||
iterate_all_kinds(i, bytes, v, ({
|
||||
int err = 0;
|
||||
next = csum_and_copy_from_user(v.iov_base,
|
||||
(to += v.iov_len) - v.iov_len,
|
||||
v.iov_len, ~0U, &err);
|
||||
if (err)
|
||||
v.iov_len);
|
||||
if (!next)
|
||||
return false;
|
||||
sum = csum_block_add(sum, next, off);
|
||||
off += v.iov_len;
|
||||
|
@ -1537,15 +1535,14 @@ size_t csum_and_copy_to_iter(const void *addr, size_t bytes, void *csump,
|
|||
return 0;
|
||||
}
|
||||
iterate_and_advance(i, bytes, v, ({
|
||||
int err = 0;
|
||||
next = csum_and_copy_to_user((from += v.iov_len) - v.iov_len,
|
||||
v.iov_base,
|
||||
v.iov_len, ~0U, &err);
|
||||
if (!err) {
|
||||
v.iov_len);
|
||||
if (next) {
|
||||
sum = csum_block_add(sum, next, off);
|
||||
off += v.iov_len;
|
||||
}
|
||||
err ? v.iov_len : 0;
|
||||
next ? 0 : v.iov_len;
|
||||
}), ({
|
||||
char *p = kmap_atomic(v.bv_page);
|
||||
sum = csum_and_memcpy(p + v.bv_offset,
|
||||
|
|
Loading…
Reference in New Issue