| // SPDX-License-Identifier: GPL-2.0+ |
| #ifndef __LINUX_BITMAP_H |
| #define __LINUX_BITMAP_H |
| |
| #include <asm/types.h> |
| #include <linux/types.h> |
| #include <linux/bitops.h> |
| #include <string.h> |
| |
| extern unsigned long bitmap_find_next_zero_area_off(unsigned long *map, |
| unsigned long size, |
| unsigned long start, |
| unsigned int nr, |
| unsigned long align_mask, |
| unsigned long align_offset); |
| |
| extern void __bitmap_set(unsigned long *map, unsigned int start, int len); |
| extern void __bitmap_clear(unsigned long *map, unsigned int start, int len); |
| /** |
| * bitmap_find_next_zero_area - find a contiguous aligned zero area |
| * @map: The address to base the search on |
| * @size: The bitmap size in bits |
| * @start: The bitnumber to start searching at |
| * @nr: The number of zeroed bits we're looking for |
| * @align_mask: Alignment mask for zero area |
| * |
| * The @align_mask should be one less than a power of 2; the effect is that |
| * the bit offset of all zero areas this function finds is multiples of that |
| * power of 2. A @align_mask of 0 means no alignment is required. |
| */ |
| static inline unsigned long |
| bitmap_find_next_zero_area(unsigned long *map, |
| unsigned long size, |
| unsigned long start, |
| unsigned int nr, |
| unsigned long align_mask) |
| { |
| return bitmap_find_next_zero_area_off(map, size, start, nr, |
| align_mask, 0); |
| } |
| |
| #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1))) |
| #define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1))) |
| |
| #define small_const_nbits(nbits) \ |
| (__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG) |
| |
| static inline void bitmap_zero(unsigned long *dst, int nbits) |
| { |
| if (small_const_nbits(nbits)) { |
| *dst = 0UL; |
| } else { |
| int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); |
| |
| memset(dst, 0, len); |
| } |
| } |
| |
| #ifdef __LITTLE_ENDIAN |
| #define BITMAP_MEM_ALIGNMENT 8 |
| #else |
| #define BITMAP_MEM_ALIGNMENT (8 * sizeof(unsigned long)) |
| #endif |
| #define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1) |
| |
| static __always_inline void bitmap_set(unsigned long *map, unsigned int start, |
| unsigned int nbits) |
| { |
| if (__builtin_constant_p(nbits) && nbits == 1) |
| __set_bit(start, map); |
| else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && |
| IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && |
| __builtin_constant_p(nbits & BITMAP_MEM_MASK) && |
| IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) |
| memset((char *)map + start / 8, 0xff, nbits / 8); |
| else |
| __bitmap_set(map, start, nbits); |
| } |
| |
| static __always_inline void bitmap_clear(unsigned long *map, unsigned int start, |
| unsigned int nbits) |
| { |
| if (__builtin_constant_p(nbits) && nbits == 1) |
| __clear_bit(start, map); |
| else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && |
| IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && |
| __builtin_constant_p(nbits & BITMAP_MEM_MASK) && |
| IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) |
| memset((char *)map + start / 8, 0, nbits / 8); |
| else |
| __bitmap_clear(map, start, nbits); |
| } |
| |
| #endif /* __LINUX_BITMAP_H */ |