xref: /openbmc/linux/arch/arc/include/asm/barrier.h (revision 4c79e98b)
1 /*
2  * Copyright (C) 2014-15 Synopsys, Inc. (www.synopsys.com)
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  */
8 
9 #ifndef __ASM_BARRIER_H
10 #define __ASM_BARRIER_H
11 
12 #ifdef CONFIG_ISA_ARCV2
13 
14 /*
15  * ARCv2 based HS38 cores are in-order issue, but still weakly ordered
16  * due to micro-arch buffering/queuing of load/store, cache hit vs. miss ...
17  *
18  * Explicit barrier provided by DMB instruction
19  *  - Operand supports fine grained load/store/load+store semantics
20  *  - Ensures that selected memory operation issued before it will complete
21  *    before any subsequent memory operation of same type
22  *  - DMB guarantees SMP as well as local barrier semantics
23  *    (asm-generic/barrier.h ensures sane smp_*mb if not defined here, i.e.
24  *    UP: barrier(), SMP: smp_*mb == *mb)
25  *  - DSYNC provides DMB+completion_of_cache_bpu_maintenance_ops hence not needed
26  *    in the general case. Plus it only provides full barrier.
27  */
28 
29 #define mb()	asm volatile("dmb 3\n" : : : "memory")
30 #define rmb()	asm volatile("dmb 1\n" : : : "memory")
31 #define wmb()	asm volatile("dmb 2\n" : : : "memory")
32 
33 #elif !defined(CONFIG_ARC_PLAT_EZNPS)  /* CONFIG_ISA_ARCOMPACT */
34 
35 /*
36  * ARCompact based cores (ARC700) only have SYNC instruction which is super
37  * heavy weight as it flushes the pipeline as well.
38  * There are no real SMP implementations of such cores.
39  */
40 
41 #define mb()	asm volatile("sync\n" : : : "memory")
42 
43 #else	/* CONFIG_ARC_PLAT_EZNPS */
44 
45 #include <plat/ctop.h>
46 
47 #define mb()	asm volatile (".word %0" : : "i"(CTOP_INST_SCHD_RW) : "memory")
48 #define rmb()	asm volatile (".word %0" : : "i"(CTOP_INST_SCHD_RD) : "memory")
49 
50 #endif
51 
52 #include <asm-generic/barrier.h>
53 
54 #endif
55