forked from mirrors/linux
		
	bitmap: Switch from inline to __always_inline
'inline' keyword is only a recommendation for compiler. If it decides to
not inline bitmap functions, the whole small_const_nbits() machinery
doesn't work.
This is how a standard GCC 11.3.0 does for my x86_64 build now. This patch
replaces 'inline' directive with unconditional '__always_inline' to make
sure that there's always a chance for compile-time optimization. It doesn't
change size of kernel image, according to bloat-o-meter.
[[ Brian: split out from:
      Subject: [PATCH 1/3] bitmap: switch from inline to __always_inline
      https://lore.kernel.org/all/20221027043810.350460-2-yury.norov@gmail.com/
   But rewritten, as there were too many conflicts. ]]
Co-developed-by: Brian Norris <briannorris@chromium.org>
Signed-off-by: Brian Norris <briannorris@chromium.org>
Reviewed-by: Kees Cook <kees@kernel.org>
Reviewed-by: Nathan Chancellor <nathan@kernel.org>
Signed-off-by: Yury Norov <yury.norov@gmail.com>
			
			
This commit is contained in:
		
							parent
							
								
									fda1dd3c54
								
							
						
					
					
						commit
						ed8cd2b3bd
					
				
					 1 changed files with 76 additions and 64 deletions
				
			
		|  | @ -203,12 +203,12 @@ unsigned long bitmap_find_next_zero_area_off(unsigned long *map, | |||
|  * the bit offset of all zero areas this function finds is multiples of that | ||||
|  * power of 2. A @align_mask of 0 means no alignment is required. | ||||
|  */ | ||||
| static inline unsigned long | ||||
| bitmap_find_next_zero_area(unsigned long *map, | ||||
| 			   unsigned long size, | ||||
| 			   unsigned long start, | ||||
| 			   unsigned int nr, | ||||
| 			   unsigned long align_mask) | ||||
| static __always_inline | ||||
| unsigned long bitmap_find_next_zero_area(unsigned long *map, | ||||
| 					 unsigned long size, | ||||
| 					 unsigned long start, | ||||
| 					 unsigned int nr, | ||||
| 					 unsigned long align_mask) | ||||
| { | ||||
| 	return bitmap_find_next_zero_area_off(map, size, start, nr, | ||||
| 					      align_mask, 0); | ||||
|  | @ -228,7 +228,7 @@ void bitmap_fold(unsigned long *dst, const unsigned long *orig, | |||
| 
 | ||||
| #define bitmap_size(nbits)	(ALIGN(nbits, BITS_PER_LONG) / BITS_PER_BYTE) | ||||
| 
 | ||||
| static inline void bitmap_zero(unsigned long *dst, unsigned int nbits) | ||||
| static __always_inline void bitmap_zero(unsigned long *dst, unsigned int nbits) | ||||
| { | ||||
| 	unsigned int len = bitmap_size(nbits); | ||||
| 
 | ||||
|  | @ -238,7 +238,7 @@ static inline void bitmap_zero(unsigned long *dst, unsigned int nbits) | |||
| 		memset(dst, 0, len); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) | ||||
| static __always_inline void bitmap_fill(unsigned long *dst, unsigned int nbits) | ||||
| { | ||||
| 	unsigned int len = bitmap_size(nbits); | ||||
| 
 | ||||
|  | @ -248,8 +248,8 @@ static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) | |||
| 		memset(dst, 0xff, len); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_copy(unsigned long *dst, const unsigned long *src, | ||||
| 			unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_copy(unsigned long *dst, const unsigned long *src, unsigned int nbits) | ||||
| { | ||||
| 	unsigned int len = bitmap_size(nbits); | ||||
| 
 | ||||
|  | @ -262,8 +262,8 @@ static inline void bitmap_copy(unsigned long *dst, const unsigned long *src, | |||
| /*
 | ||||
|  * Copy bitmap and clear tail bits in last word. | ||||
|  */ | ||||
| static inline void bitmap_copy_clear_tail(unsigned long *dst, | ||||
| 		const unsigned long *src, unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_copy_clear_tail(unsigned long *dst, const unsigned long *src, unsigned int nbits) | ||||
| { | ||||
| 	bitmap_copy(dst, src, nbits); | ||||
| 	if (nbits % BITS_PER_LONG) | ||||
|  | @ -306,16 +306,18 @@ void bitmap_to_arr64(u64 *buf, const unsigned long *bitmap, unsigned int nbits); | |||
| 	bitmap_copy_clear_tail((unsigned long *)(buf), (const unsigned long *)(bitmap), (nbits)) | ||||
| #endif | ||||
| 
 | ||||
| static inline bool bitmap_and(unsigned long *dst, const unsigned long *src1, | ||||
| 			const unsigned long *src2, unsigned int nbits) | ||||
| static __always_inline | ||||
| bool bitmap_and(unsigned long *dst, const unsigned long *src1, | ||||
| 		const unsigned long *src2, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0; | ||||
| 	return __bitmap_and(dst, src1, src2, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, | ||||
| 			const unsigned long *src2, unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_or(unsigned long *dst, const unsigned long *src1, | ||||
| 	       const unsigned long *src2, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		*dst = *src1 | *src2; | ||||
|  | @ -323,8 +325,9 @@ static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, | |||
| 		__bitmap_or(dst, src1, src2, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1, | ||||
| 			const unsigned long *src2, unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_xor(unsigned long *dst, const unsigned long *src1, | ||||
| 		const unsigned long *src2, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		*dst = *src1 ^ *src2; | ||||
|  | @ -332,16 +335,17 @@ static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1, | |||
| 		__bitmap_xor(dst, src1, src2, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline bool bitmap_andnot(unsigned long *dst, const unsigned long *src1, | ||||
| 			const unsigned long *src2, unsigned int nbits) | ||||
| static __always_inline | ||||
| bool bitmap_andnot(unsigned long *dst, const unsigned long *src1, | ||||
| 		   const unsigned long *src2, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; | ||||
| 	return __bitmap_andnot(dst, src1, src2, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_complement(unsigned long *dst, const unsigned long *src, | ||||
| 			unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		*dst = ~(*src); | ||||
|  | @ -356,8 +360,8 @@ static inline void bitmap_complement(unsigned long *dst, const unsigned long *sr | |||
| #endif | ||||
| #define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1) | ||||
| 
 | ||||
| static inline bool bitmap_equal(const unsigned long *src1, | ||||
| 				const unsigned long *src2, unsigned int nbits) | ||||
| static __always_inline | ||||
| bool bitmap_equal(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		return !((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits)); | ||||
|  | @ -376,10 +380,9 @@ static inline bool bitmap_equal(const unsigned long *src1, | |||
|  * | ||||
|  * Returns: True if (*@src1 | *@src2) == *@src3, false otherwise | ||||
|  */ | ||||
| static inline bool bitmap_or_equal(const unsigned long *src1, | ||||
| 				   const unsigned long *src2, | ||||
| 				   const unsigned long *src3, | ||||
| 				   unsigned int nbits) | ||||
| static __always_inline | ||||
| bool bitmap_or_equal(const unsigned long *src1, const unsigned long *src2, | ||||
| 		     const unsigned long *src3, unsigned int nbits) | ||||
| { | ||||
| 	if (!small_const_nbits(nbits)) | ||||
| 		return __bitmap_or_equal(src1, src2, src3, nbits); | ||||
|  | @ -387,9 +390,8 @@ static inline bool bitmap_or_equal(const unsigned long *src1, | |||
| 	return !(((*src1 | *src2) ^ *src3) & BITMAP_LAST_WORD_MASK(nbits)); | ||||
| } | ||||
| 
 | ||||
| static inline bool bitmap_intersects(const unsigned long *src1, | ||||
| 				     const unsigned long *src2, | ||||
| 				     unsigned int nbits) | ||||
| static __always_inline | ||||
| bool bitmap_intersects(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; | ||||
|  | @ -397,8 +399,8 @@ static inline bool bitmap_intersects(const unsigned long *src1, | |||
| 		return __bitmap_intersects(src1, src2, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline bool bitmap_subset(const unsigned long *src1, | ||||
| 				 const unsigned long *src2, unsigned int nbits) | ||||
| static __always_inline | ||||
| bool bitmap_subset(const unsigned long *src1, const unsigned long *src2, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits)); | ||||
|  | @ -406,7 +408,8 @@ static inline bool bitmap_subset(const unsigned long *src1, | |||
| 		return __bitmap_subset(src1, src2, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline bool bitmap_empty(const unsigned long *src, unsigned nbits) | ||||
| static __always_inline | ||||
| bool bitmap_empty(const unsigned long *src, unsigned nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		return ! (*src & BITMAP_LAST_WORD_MASK(nbits)); | ||||
|  | @ -414,7 +417,8 @@ static inline bool bitmap_empty(const unsigned long *src, unsigned nbits) | |||
| 	return find_first_bit(src, nbits) == nbits; | ||||
| } | ||||
| 
 | ||||
| static inline bool bitmap_full(const unsigned long *src, unsigned int nbits) | ||||
| static __always_inline | ||||
| bool bitmap_full(const unsigned long *src, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits)); | ||||
|  | @ -448,8 +452,8 @@ unsigned long bitmap_weight_andnot(const unsigned long *src1, | |||
| 	return __bitmap_weight_andnot(src1, src2, nbits); | ||||
| } | ||||
| 
 | ||||
| static __always_inline void bitmap_set(unsigned long *map, unsigned int start, | ||||
| 		unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_set(unsigned long *map, unsigned int start, unsigned int nbits) | ||||
| { | ||||
| 	if (__builtin_constant_p(nbits) && nbits == 1) | ||||
| 		__set_bit(start, map); | ||||
|  | @ -464,8 +468,8 @@ static __always_inline void bitmap_set(unsigned long *map, unsigned int start, | |||
| 		__bitmap_set(map, start, nbits); | ||||
| } | ||||
| 
 | ||||
| static __always_inline void bitmap_clear(unsigned long *map, unsigned int start, | ||||
| 		unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_clear(unsigned long *map, unsigned int start, unsigned int nbits) | ||||
| { | ||||
| 	if (__builtin_constant_p(nbits) && nbits == 1) | ||||
| 		__clear_bit(start, map); | ||||
|  | @ -480,8 +484,9 @@ static __always_inline void bitmap_clear(unsigned long *map, unsigned int start, | |||
| 		__bitmap_clear(map, start, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src, | ||||
| 				unsigned int shift, unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_shift_right(unsigned long *dst, const unsigned long *src, | ||||
| 			unsigned int shift, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		*dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift; | ||||
|  | @ -489,8 +494,9 @@ static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *s | |||
| 		__bitmap_shift_right(dst, src, shift, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *src, | ||||
| 				unsigned int shift, unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_shift_left(unsigned long *dst, const unsigned long *src, | ||||
| 		       unsigned int shift, unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		*dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits); | ||||
|  | @ -498,11 +504,12 @@ static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *sr | |||
| 		__bitmap_shift_left(dst, src, shift, nbits); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_replace(unsigned long *dst, | ||||
| 				  const unsigned long *old, | ||||
| 				  const unsigned long *new, | ||||
| 				  const unsigned long *mask, | ||||
| 				  unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_replace(unsigned long *dst, | ||||
| 		    const unsigned long *old, | ||||
| 		    const unsigned long *new, | ||||
| 		    const unsigned long *mask, | ||||
| 		    unsigned int nbits) | ||||
| { | ||||
| 	if (small_const_nbits(nbits)) | ||||
| 		*dst = (*old & ~(*mask)) | (*new & *mask); | ||||
|  | @ -545,8 +552,9 @@ static inline void bitmap_replace(unsigned long *dst, | |||
|  * bitmap_gather() can be seen as the 'reverse' bitmap_scatter() operation. | ||||
|  * See bitmap_scatter() for details related to this relationship. | ||||
|  */ | ||||
| static inline void bitmap_scatter(unsigned long *dst, const unsigned long *src, | ||||
| 				  const unsigned long *mask, unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_scatter(unsigned long *dst, const unsigned long *src, | ||||
| 		    const unsigned long *mask, unsigned int nbits) | ||||
| { | ||||
| 	unsigned int n = 0; | ||||
| 	unsigned int bit; | ||||
|  | @ -599,8 +607,9 @@ static inline void bitmap_scatter(unsigned long *dst, const unsigned long *src, | |||
|  * bitmap_scatter(res, src, mask, n) and a call to | ||||
|  * bitmap_scatter(res, result, mask, n) will lead to the same res value. | ||||
|  */ | ||||
| static inline void bitmap_gather(unsigned long *dst, const unsigned long *src, | ||||
| 				 const unsigned long *mask, unsigned int nbits) | ||||
| static __always_inline | ||||
| void bitmap_gather(unsigned long *dst, const unsigned long *src, | ||||
| 		   const unsigned long *mask, unsigned int nbits) | ||||
| { | ||||
| 	unsigned int n = 0; | ||||
| 	unsigned int bit; | ||||
|  | @ -611,9 +620,9 @@ static inline void bitmap_gather(unsigned long *dst, const unsigned long *src, | |||
| 		__assign_bit(n++, dst, test_bit(bit, src)); | ||||
| } | ||||
| 
 | ||||
| static inline void bitmap_next_set_region(unsigned long *bitmap, | ||||
| 					  unsigned int *rs, unsigned int *re, | ||||
| 					  unsigned int end) | ||||
| static __always_inline | ||||
| void bitmap_next_set_region(unsigned long *bitmap, unsigned int *rs, | ||||
| 			    unsigned int *re, unsigned int end) | ||||
| { | ||||
| 	*rs = find_next_bit(bitmap, end, *rs); | ||||
| 	*re = find_next_zero_bit(bitmap, end, *rs + 1); | ||||
|  | @ -628,7 +637,8 @@ static inline void bitmap_next_set_region(unsigned long *bitmap, | |||
|  * This is the complement to __bitmap_find_free_region() and releases | ||||
|  * the found region (by clearing it in the bitmap). | ||||
|  */ | ||||
| static inline void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order) | ||||
| static __always_inline | ||||
| void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order) | ||||
| { | ||||
| 	bitmap_clear(bitmap, pos, BIT(order)); | ||||
| } | ||||
|  | @ -644,7 +654,8 @@ static inline void bitmap_release_region(unsigned long *bitmap, unsigned int pos | |||
|  * Returns: 0 on success, or %-EBUSY if specified region wasn't | ||||
|  * free (not all bits were zero). | ||||
|  */ | ||||
| static inline int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order) | ||||
| static __always_inline | ||||
| int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order) | ||||
| { | ||||
| 	unsigned int len = BIT(order); | ||||
| 
 | ||||
|  | @ -668,7 +679,8 @@ static inline int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos | |||
|  * Returns: the bit offset in bitmap of the allocated region, | ||||
|  * or -errno on failure. | ||||
|  */ | ||||
| static inline int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order) | ||||
| static __always_inline | ||||
| int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order) | ||||
| { | ||||
| 	unsigned int pos, end;		/* scans bitmap by regions of size order */ | ||||
| 
 | ||||
|  | @ -722,7 +734,7 @@ static inline int bitmap_find_free_region(unsigned long *bitmap, unsigned int bi | |||
|  * That is ``(u32 *)(&val)[0]`` gets the upper 32 bits, | ||||
|  * but we expect the lower 32-bits of u64. | ||||
|  */ | ||||
| static inline void bitmap_from_u64(unsigned long *dst, u64 mask) | ||||
| static __always_inline void bitmap_from_u64(unsigned long *dst, u64 mask) | ||||
| { | ||||
| 	bitmap_from_arr64(dst, &mask, 64); | ||||
| } | ||||
|  | @ -737,9 +749,8 @@ static inline void bitmap_from_u64(unsigned long *dst, u64 mask) | |||
|  * @map memory region. For @nbits = 0 and @nbits > BITS_PER_LONG the return | ||||
|  * value is undefined. | ||||
|  */ | ||||
| static inline unsigned long bitmap_read(const unsigned long *map, | ||||
| 					unsigned long start, | ||||
| 					unsigned long nbits) | ||||
| static __always_inline | ||||
| unsigned long bitmap_read(const unsigned long *map, unsigned long start, unsigned long nbits) | ||||
| { | ||||
| 	size_t index = BIT_WORD(start); | ||||
| 	unsigned long offset = start % BITS_PER_LONG; | ||||
|  | @ -772,8 +783,9 @@ static inline unsigned long bitmap_read(const unsigned long *map, | |||
|  * | ||||
|  * For @nbits == 0 and @nbits > BITS_PER_LONG no writes are performed. | ||||
|  */ | ||||
| static inline void bitmap_write(unsigned long *map, unsigned long value, | ||||
| 				unsigned long start, unsigned long nbits) | ||||
| static __always_inline | ||||
| void bitmap_write(unsigned long *map, unsigned long value, | ||||
| 		  unsigned long start, unsigned long nbits) | ||||
| { | ||||
| 	size_t index; | ||||
| 	unsigned long offset; | ||||
|  |  | |||
		Loading…
	
		Reference in a new issue
	
	 Yury Norov
						Yury Norov