bitmap: optimise bitmap_set and bitmap_clear of a single bit
We have eight users calling bitmap_clear for a single bit and seventeen calling bitmap_set for a single bit. Rather than fix all of them to call __clear_bit or __set_bit, turn bitmap_clear and bitmap_set into inline functions and make this special case efficient. Link: http://lkml.kernel.org/r/20170628153221.11322-3-willy@infradead.org Signed-off-by: Matthew Wilcox <mawilcox@microsoft.com> Acked-by: Rasmus Villemoes <linux@rasmusvillemoes.dk> Cc: Martin Schwidefsky <schwidefsky@de.ibm.com> Cc: Matthew Wilcox <willy@infradead.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
parent
3cc78125a0
commit
e5af323c9b
|
@ -112,9 +112,8 @@ extern int __bitmap_intersects(const unsigned long *bitmap1,
|
|||
extern int __bitmap_subset(const unsigned long *bitmap1,
|
||||
const unsigned long *bitmap2, unsigned int nbits);
|
||||
extern int __bitmap_weight(const unsigned long *bitmap, unsigned int nbits);
|
||||
|
||||
extern void bitmap_set(unsigned long *map, unsigned int start, int len);
|
||||
extern void bitmap_clear(unsigned long *map, unsigned int start, int len);
|
||||
extern void __bitmap_set(unsigned long *map, unsigned int start, int len);
|
||||
extern void __bitmap_clear(unsigned long *map, unsigned int start, int len);
|
||||
|
||||
extern unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
|
||||
unsigned long size,
|
||||
|
@ -315,6 +314,24 @@ static __always_inline int bitmap_weight(const unsigned long *src, unsigned int
|
|||
return __bitmap_weight(src, nbits);
|
||||
}
|
||||
|
||||
static __always_inline void bitmap_set(unsigned long *map, unsigned int start,
|
||||
unsigned int nbits)
|
||||
{
|
||||
if (__builtin_constant_p(nbits) && nbits == 1)
|
||||
__set_bit(start, map);
|
||||
else
|
||||
__bitmap_set(map, start, nbits);
|
||||
}
|
||||
|
||||
static __always_inline void bitmap_clear(unsigned long *map, unsigned int start,
|
||||
unsigned int nbits)
|
||||
{
|
||||
if (__builtin_constant_p(nbits) && nbits == 1)
|
||||
__clear_bit(start, map);
|
||||
else
|
||||
__bitmap_clear(map, start, nbits);
|
||||
}
|
||||
|
||||
static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src,
|
||||
unsigned int shift, int nbits)
|
||||
{
|
||||
|
|
|
@ -251,7 +251,7 @@ int __bitmap_weight(const unsigned long *bitmap, unsigned int bits)
|
|||
}
|
||||
EXPORT_SYMBOL(__bitmap_weight);
|
||||
|
||||
void bitmap_set(unsigned long *map, unsigned int start, int len)
|
||||
void __bitmap_set(unsigned long *map, unsigned int start, int len)
|
||||
{
|
||||
unsigned long *p = map + BIT_WORD(start);
|
||||
const unsigned int size = start + len;
|
||||
|
@ -270,9 +270,9 @@ void bitmap_set(unsigned long *map, unsigned int start, int len)
|
|||
*p |= mask_to_set;
|
||||
}
|
||||
}
|
||||
EXPORT_SYMBOL(bitmap_set);
|
||||
EXPORT_SYMBOL(__bitmap_set);
|
||||
|
||||
void bitmap_clear(unsigned long *map, unsigned int start, int len)
|
||||
void __bitmap_clear(unsigned long *map, unsigned int start, int len)
|
||||
{
|
||||
unsigned long *p = map + BIT_WORD(start);
|
||||
const unsigned int size = start + len;
|
||||
|
@ -291,7 +291,7 @@ void bitmap_clear(unsigned long *map, unsigned int start, int len)
|
|||
*p &= ~mask_to_clear;
|
||||
}
|
||||
}
|
||||
EXPORT_SYMBOL(bitmap_clear);
|
||||
EXPORT_SYMBOL(__bitmap_clear);
|
||||
|
||||
/**
|
||||
* bitmap_find_next_zero_area_off - find a contiguous aligned zero area
|
||||
|
|
|
@ -333,9 +333,6 @@ static void __init test_bitmap_u32_array_conversions(void)
|
|||
}
|
||||
}
|
||||
|
||||
#define __bitmap_set(a, b, c) bitmap_set(a, b, c)
|
||||
#define __bitmap_clear(a, b, c) bitmap_clear(a, b, c)
|
||||
|
||||
static void noinline __init test_mem_optimisations(void)
|
||||
{
|
||||
DECLARE_BITMAP(bmap1, 1024);
|
||||
|
|
Loading…
Reference in New Issue