forked from mirrors/linux
		
	locking/refcount: Provide __refcount API to obtain the old value
David requested means to obtain the old/previous value from the refcount API for tracing purposes. Duplicate (most of) the API as __refcount*() with an additional 'int *' argument into which, if !NULL, the old value will be stored. Requested-by: David Howells <dhowells@redhat.com> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Reviewed-by: Kees Cook <keescook@chromium.org> Link: https://lkml.kernel.org/r/20200729111120.GA2638@hirez.programming.kicks-ass.net
This commit is contained in:
		
							parent
							
								
									6eb6d05958
								
							
						
					
					
						commit
						a435b9a143
					
				
					 1 changed files with 57 additions and 8 deletions
				
			
		| 
						 | 
				
			
			@ -165,7 +165,7 @@ static inline unsigned int refcount_read(const refcount_t *r)
 | 
			
		|||
 *
 | 
			
		||||
 * Return: false if the passed refcount is 0, true otherwise
 | 
			
		||||
 */
 | 
			
		||||
static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
 | 
			
		||||
static inline __must_check bool __refcount_add_not_zero(int i, refcount_t *r, int *oldp)
 | 
			
		||||
{
 | 
			
		||||
	int old = refcount_read(r);
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -174,12 +174,20 @@ static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
 | 
			
		|||
			break;
 | 
			
		||||
	} while (!atomic_try_cmpxchg_relaxed(&r->refs, &old, old + i));
 | 
			
		||||
 | 
			
		||||
	if (oldp)
 | 
			
		||||
		*oldp = old;
 | 
			
		||||
 | 
			
		||||
	if (unlikely(old < 0 || old + i < 0))
 | 
			
		||||
		refcount_warn_saturate(r, REFCOUNT_ADD_NOT_ZERO_OVF);
 | 
			
		||||
 | 
			
		||||
	return old;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
 | 
			
		||||
{
 | 
			
		||||
	return __refcount_add_not_zero(i, r, NULL);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * refcount_add - add a value to a refcount
 | 
			
		||||
 * @i: the value to add to the refcount
 | 
			
		||||
| 
						 | 
				
			
			@ -196,16 +204,24 @@ static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
 | 
			
		|||
 * cases, refcount_inc(), or one of its variants, should instead be used to
 | 
			
		||||
 * increment a reference count.
 | 
			
		||||
 */
 | 
			
		||||
static inline void refcount_add(int i, refcount_t *r)
 | 
			
		||||
static inline void __refcount_add(int i, refcount_t *r, int *oldp)
 | 
			
		||||
{
 | 
			
		||||
	int old = atomic_fetch_add_relaxed(i, &r->refs);
 | 
			
		||||
 | 
			
		||||
	if (oldp)
 | 
			
		||||
		*oldp = old;
 | 
			
		||||
 | 
			
		||||
	if (unlikely(!old))
 | 
			
		||||
		refcount_warn_saturate(r, REFCOUNT_ADD_UAF);
 | 
			
		||||
	else if (unlikely(old < 0 || old + i < 0))
 | 
			
		||||
		refcount_warn_saturate(r, REFCOUNT_ADD_OVF);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
static inline void refcount_add(int i, refcount_t *r)
 | 
			
		||||
{
 | 
			
		||||
	__refcount_add(i, r, NULL);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * refcount_inc_not_zero - increment a refcount unless it is 0
 | 
			
		||||
 * @r: the refcount to increment
 | 
			
		||||
| 
						 | 
				
			
			@ -219,9 +235,14 @@ static inline void refcount_add(int i, refcount_t *r)
 | 
			
		|||
 *
 | 
			
		||||
 * Return: true if the increment was successful, false otherwise
 | 
			
		||||
 */
 | 
			
		||||
static inline __must_check bool __refcount_inc_not_zero(refcount_t *r, int *oldp)
 | 
			
		||||
{
 | 
			
		||||
	return __refcount_add_not_zero(1, r, oldp);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
static inline __must_check bool refcount_inc_not_zero(refcount_t *r)
 | 
			
		||||
{
 | 
			
		||||
	return refcount_add_not_zero(1, r);
 | 
			
		||||
	return __refcount_inc_not_zero(r, NULL);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -236,9 +257,14 @@ static inline __must_check bool refcount_inc_not_zero(refcount_t *r)
 | 
			
		|||
 * Will WARN if the refcount is 0, as this represents a possible use-after-free
 | 
			
		||||
 * condition.
 | 
			
		||||
 */
 | 
			
		||||
static inline void __refcount_inc(refcount_t *r, int *oldp)
 | 
			
		||||
{
 | 
			
		||||
	__refcount_add(1, r, oldp);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
static inline void refcount_inc(refcount_t *r)
 | 
			
		||||
{
 | 
			
		||||
	refcount_add(1, r);
 | 
			
		||||
	__refcount_inc(r, NULL);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -261,10 +287,13 @@ static inline void refcount_inc(refcount_t *r)
 | 
			
		|||
 *
 | 
			
		||||
 * Return: true if the resulting refcount is 0, false otherwise
 | 
			
		||||
 */
 | 
			
		||||
static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
 | 
			
		||||
static inline __must_check bool __refcount_sub_and_test(int i, refcount_t *r, int *oldp)
 | 
			
		||||
{
 | 
			
		||||
	int old = atomic_fetch_sub_release(i, &r->refs);
 | 
			
		||||
 | 
			
		||||
	if (oldp)
 | 
			
		||||
		*oldp = old;
 | 
			
		||||
 | 
			
		||||
	if (old == i) {
 | 
			
		||||
		smp_acquire__after_ctrl_dep();
 | 
			
		||||
		return true;
 | 
			
		||||
| 
						 | 
				
			
			@ -276,6 +305,11 @@ static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
 | 
			
		|||
	return false;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
 | 
			
		||||
{
 | 
			
		||||
	return __refcount_sub_and_test(i, r, NULL);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * refcount_dec_and_test - decrement a refcount and test if it is 0
 | 
			
		||||
 * @r: the refcount
 | 
			
		||||
| 
						 | 
				
			
			@ -289,9 +323,14 @@ static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
 | 
			
		|||
 *
 | 
			
		||||
 * Return: true if the resulting refcount is 0, false otherwise
 | 
			
		||||
 */
 | 
			
		||||
static inline __must_check bool __refcount_dec_and_test(refcount_t *r, int *oldp)
 | 
			
		||||
{
 | 
			
		||||
	return __refcount_sub_and_test(1, r, oldp);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
static inline __must_check bool refcount_dec_and_test(refcount_t *r)
 | 
			
		||||
{
 | 
			
		||||
	return refcount_sub_and_test(1, r);
 | 
			
		||||
	return __refcount_dec_and_test(r, NULL);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -304,10 +343,20 @@ static inline __must_check bool refcount_dec_and_test(refcount_t *r)
 | 
			
		|||
 * Provides release memory ordering, such that prior loads and stores are done
 | 
			
		||||
 * before.
 | 
			
		||||
 */
 | 
			
		||||
static inline void __refcount_dec(refcount_t *r, int *oldp)
 | 
			
		||||
{
 | 
			
		||||
	int old = atomic_fetch_sub_release(1, &r->refs);
 | 
			
		||||
 | 
			
		||||
	if (oldp)
 | 
			
		||||
		*oldp = old;
 | 
			
		||||
 | 
			
		||||
	if (unlikely(old <= 1))
 | 
			
		||||
		refcount_warn_saturate(r, REFCOUNT_DEC_LEAK);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
static inline void refcount_dec(refcount_t *r)
 | 
			
		||||
{
 | 
			
		||||
	if (unlikely(atomic_fetch_sub_release(1, &r->refs) <= 1))
 | 
			
		||||
		refcount_warn_saturate(r, REFCOUNT_DEC_LEAK);
 | 
			
		||||
	__refcount_dec(r, NULL);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
extern __must_check bool refcount_dec_if_one(refcount_t *r);
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
		Reference in a new issue