mirror of
				https://github.com/torvalds/linux.git
				synced 2025-11-04 02:30:34 +02:00 
			
		
		
		
	Add barrier for aarch64 for cross compiling, and most are from Linux Kernel. Signed-off-by: Peng Fan <peng.fan@nxp.com> Link: https://lore.kernel.org/r/20201209084205.24062-4-peng.fan@oss.nxp.com Signed-off-by: Michael S. Tsirkin <mst@redhat.com>
		
			
				
	
	
		
			32 lines
		
	
	
	
		
			1.1 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			32 lines
		
	
	
	
		
			1.1 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
/* SPDX-License-Identifier: GPL-2.0 */
 | 
						|
#include <stdlib.h>
 | 
						|
#if defined(__i386__) || defined(__x86_64__)
 | 
						|
#define barrier() asm volatile("" ::: "memory")
 | 
						|
#define virt_mb() __sync_synchronize()
 | 
						|
#define virt_rmb() barrier()
 | 
						|
#define virt_wmb() barrier()
 | 
						|
/* Atomic store should be enough, but gcc generates worse code in that case. */
 | 
						|
#define virt_store_mb(var, value)  do { \
 | 
						|
	typeof(var) virt_store_mb_value = (value); \
 | 
						|
	__atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \
 | 
						|
			  __ATOMIC_SEQ_CST); \
 | 
						|
	barrier(); \
 | 
						|
} while (0);
 | 
						|
/* Weak barriers should be used. If not - it's a bug */
 | 
						|
# define mb() abort()
 | 
						|
# define dma_rmb() abort()
 | 
						|
# define dma_wmb() abort()
 | 
						|
#elif defined(__aarch64__)
 | 
						|
#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
 | 
						|
#define virt_mb() __sync_synchronize()
 | 
						|
#define virt_rmb() dmb(ishld)
 | 
						|
#define virt_wmb() dmb(ishst)
 | 
						|
#define virt_store_mb(var, value)  do { WRITE_ONCE(var, value); dmb(ish); } while (0)
 | 
						|
/* Weak barriers should be used. If not - it's a bug */
 | 
						|
# define mb() abort()
 | 
						|
# define dma_rmb() abort()
 | 
						|
# define dma_wmb() abort()
 | 
						|
#else
 | 
						|
#error Please fill in barrier macros
 | 
						|
#endif
 | 
						|
 |