RISC-V: Make our port sparse-clean
This patch set contains a handful of fixes that clean up the sparse results for the RISC-V port. These patches shouldn't have any functional difference. The patches: * Use NULL instead of 0. * Clean up __user annotations. * Split __copy_user into two functions, to make the __user annotations valid. Signed-off-by: Palmer Dabbelt <palmer@dabbelt.com>
This commit is contained in:
commit
e0e0c87c02
|
@ -47,7 +47,7 @@ static inline void flush_dcache_page(struct page *page)
|
||||||
|
|
||||||
#else /* CONFIG_SMP */
|
#else /* CONFIG_SMP */
|
||||||
|
|
||||||
#define flush_icache_all() sbi_remote_fence_i(0)
|
#define flush_icache_all() sbi_remote_fence_i(NULL)
|
||||||
void flush_icache_mm(struct mm_struct *mm, bool local);
|
void flush_icache_mm(struct mm_struct *mm, bool local);
|
||||||
|
|
||||||
#endif /* CONFIG_SMP */
|
#endif /* CONFIG_SMP */
|
||||||
|
|
|
@ -49,7 +49,7 @@ static inline void flush_tlb_range(struct vm_area_struct *vma,
|
||||||
|
|
||||||
#include <asm/sbi.h>
|
#include <asm/sbi.h>
|
||||||
|
|
||||||
#define flush_tlb_all() sbi_remote_sfence_vma(0, 0, -1)
|
#define flush_tlb_all() sbi_remote_sfence_vma(NULL, 0, -1)
|
||||||
#define flush_tlb_page(vma, addr) flush_tlb_range(vma, addr, 0)
|
#define flush_tlb_page(vma, addr) flush_tlb_range(vma, addr, 0)
|
||||||
#define flush_tlb_range(vma, start, end) \
|
#define flush_tlb_range(vma, start, end) \
|
||||||
sbi_remote_sfence_vma(mm_cpumask((vma)->vm_mm)->bits, \
|
sbi_remote_sfence_vma(mm_cpumask((vma)->vm_mm)->bits, \
|
||||||
|
|
|
@ -392,19 +392,21 @@ do { \
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
extern unsigned long __must_check __copy_user(void __user *to,
|
extern unsigned long __must_check __asm_copy_to_user(void __user *to,
|
||||||
|
const void *from, unsigned long n);
|
||||||
|
extern unsigned long __must_check __asm_copy_from_user(void *to,
|
||||||
const void __user *from, unsigned long n);
|
const void __user *from, unsigned long n);
|
||||||
|
|
||||||
static inline unsigned long
|
static inline unsigned long
|
||||||
raw_copy_from_user(void *to, const void __user *from, unsigned long n)
|
raw_copy_from_user(void *to, const void __user *from, unsigned long n)
|
||||||
{
|
{
|
||||||
return __copy_user(to, from, n);
|
return __asm_copy_to_user(to, from, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline unsigned long
|
static inline unsigned long
|
||||||
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
|
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
|
||||||
{
|
{
|
||||||
return __copy_user(to, from, n);
|
return __asm_copy_from_user(to, from, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
extern long strncpy_from_user(char *dest, const char __user *src, long count);
|
extern long strncpy_from_user(char *dest, const char __user *src, long count);
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
* Assembly functions that may be used (directly or indirectly) by modules
|
* Assembly functions that may be used (directly or indirectly) by modules
|
||||||
*/
|
*/
|
||||||
EXPORT_SYMBOL(__clear_user);
|
EXPORT_SYMBOL(__clear_user);
|
||||||
EXPORT_SYMBOL(__copy_user);
|
EXPORT_SYMBOL(__asm_copy_to_user);
|
||||||
|
EXPORT_SYMBOL(__asm_copy_from_user);
|
||||||
EXPORT_SYMBOL(memset);
|
EXPORT_SYMBOL(memset);
|
||||||
EXPORT_SYMBOL(memcpy);
|
EXPORT_SYMBOL(memcpy);
|
||||||
|
|
|
@ -160,7 +160,7 @@ int is_valid_bugaddr(unsigned long pc)
|
||||||
|
|
||||||
if (pc < PAGE_OFFSET)
|
if (pc < PAGE_OFFSET)
|
||||||
return 0;
|
return 0;
|
||||||
if (probe_kernel_address((bug_insn_t __user *)pc, insn))
|
if (probe_kernel_address((bug_insn_t *)pc, insn))
|
||||||
return 0;
|
return 0;
|
||||||
return (insn == __BUG_INSN);
|
return (insn == __BUG_INSN);
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,8 @@ _epc:
|
||||||
.previous
|
.previous
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
ENTRY(__copy_user)
|
ENTRY(__asm_copy_to_user)
|
||||||
|
ENTRY(__asm_copy_from_user)
|
||||||
|
|
||||||
/* Enable access to user memory */
|
/* Enable access to user memory */
|
||||||
li t6, SR_SUM
|
li t6, SR_SUM
|
||||||
|
@ -63,7 +64,8 @@ ENTRY(__copy_user)
|
||||||
addi a0, a0, 1
|
addi a0, a0, 1
|
||||||
bltu a1, a3, 5b
|
bltu a1, a3, 5b
|
||||||
j 3b
|
j 3b
|
||||||
ENDPROC(__copy_user)
|
ENDPROC(__asm_copy_to_user)
|
||||||
|
ENDPROC(__asm_copy_from_user)
|
||||||
|
|
||||||
|
|
||||||
ENTRY(__clear_user)
|
ENTRY(__clear_user)
|
||||||
|
|
Loading…
Reference in New Issue