xref: /openbmc/qemu/include/exec/memop.h (revision 08ae519ab8eb6c9abbd97156cb3678f372521501)
114776ab5STony Nguyen /*
214776ab5STony Nguyen  * Constants for memory operations
314776ab5STony Nguyen  *
414776ab5STony Nguyen  * Authors:
514776ab5STony Nguyen  *  Richard Henderson <rth@twiddle.net>
614776ab5STony Nguyen  *
714776ab5STony Nguyen  * This work is licensed under the terms of the GNU GPL, version 2 or later.
814776ab5STony Nguyen  * See the COPYING file in the top-level directory.
914776ab5STony Nguyen  *
1014776ab5STony Nguyen  */
1114776ab5STony Nguyen 
1214776ab5STony Nguyen #ifndef MEMOP_H
1314776ab5STony Nguyen #define MEMOP_H
1414776ab5STony Nguyen 
15e67c9046STony Nguyen #include "qemu/host-utils.h"
16e67c9046STony Nguyen 
1714776ab5STony Nguyen typedef enum MemOp {
1814776ab5STony Nguyen     MO_8     = 0,
1914776ab5STony Nguyen     MO_16    = 1,
2014776ab5STony Nguyen     MO_32    = 2,
2114776ab5STony Nguyen     MO_64    = 3,
224b473e0cSRichard Henderson     MO_128   = 4,
234b473e0cSRichard Henderson     MO_256   = 5,
244b473e0cSRichard Henderson     MO_512   = 6,
254b473e0cSRichard Henderson     MO_1024  = 7,
264b473e0cSRichard Henderson     MO_SIZE  = 0x07,   /* Mask for the above.  */
2714776ab5STony Nguyen 
284b473e0cSRichard Henderson     MO_SIGN  = 0x08,   /* Sign-extended, otherwise zero-extended.  */
2914776ab5STony Nguyen 
304b473e0cSRichard Henderson     MO_BSWAP = 0x10,   /* Host reverse endian.  */
31e03b5686SMarc-André Lureau #if HOST_BIG_ENDIAN
3214776ab5STony Nguyen     MO_LE    = MO_BSWAP,
3314776ab5STony Nguyen     MO_BE    = 0,
3414776ab5STony Nguyen #else
3514776ab5STony Nguyen     MO_LE    = 0,
3614776ab5STony Nguyen     MO_BE    = MO_BSWAP,
3714776ab5STony Nguyen #endif
387d7a21baSPhilippe Mathieu-Daudé #ifdef COMPILING_PER_TARGET
39ee3eb3a7SMarc-André Lureau #if TARGET_BIG_ENDIAN
4014776ab5STony Nguyen     MO_TE    = MO_BE,
4114776ab5STony Nguyen #else
4214776ab5STony Nguyen     MO_TE    = MO_LE,
4314776ab5STony Nguyen #endif
4414776ab5STony Nguyen #endif
4514776ab5STony Nguyen 
4614776ab5STony Nguyen     /*
4714776ab5STony Nguyen      * MO_UNALN accesses are never checked for alignment.
4814776ab5STony Nguyen      * MO_ALIGN accesses will result in a call to the CPU's
4914776ab5STony Nguyen      * do_unaligned_access hook if the guest address is not aligned.
5014776ab5STony Nguyen      *
5114776ab5STony Nguyen      * Some architectures (e.g. ARMv8) need the address which is aligned
5214776ab5STony Nguyen      * to a size more than the size of the memory access.
5314776ab5STony Nguyen      * Some architectures (e.g. SPARCv9) need an address which is aligned,
5414776ab5STony Nguyen      * but less strictly than the natural alignment.
5514776ab5STony Nguyen      *
5614776ab5STony Nguyen      * MO_ALIGN supposes the alignment size is the size of a memory access.
5714776ab5STony Nguyen      *
5814776ab5STony Nguyen      * There are three options:
5914776ab5STony Nguyen      * - unaligned access permitted (MO_UNALN).
6014776ab5STony Nguyen      * - an alignment to the size of an access (MO_ALIGN);
6114776ab5STony Nguyen      * - an alignment to a specified size, which may be more or less than
6214776ab5STony Nguyen      *   the access size (MO_ALIGN_x where 'x' is a size in bytes);
6314776ab5STony Nguyen      */
644b473e0cSRichard Henderson     MO_ASHIFT = 5,
654b473e0cSRichard Henderson     MO_AMASK = 0x7 << MO_ASHIFT,
6614776ab5STony Nguyen     MO_UNALN    = 0,
6714776ab5STony Nguyen     MO_ALIGN_2  = 1 << MO_ASHIFT,
6814776ab5STony Nguyen     MO_ALIGN_4  = 2 << MO_ASHIFT,
6914776ab5STony Nguyen     MO_ALIGN_8  = 3 << MO_ASHIFT,
7014776ab5STony Nguyen     MO_ALIGN_16 = 4 << MO_ASHIFT,
7114776ab5STony Nguyen     MO_ALIGN_32 = 5 << MO_ASHIFT,
7214776ab5STony Nguyen     MO_ALIGN_64 = 6 << MO_ASHIFT,
731fceff9cSRichard Henderson     MO_ALIGN    = MO_AMASK,
7414776ab5STony Nguyen 
7537031fefSRichard Henderson     /*
7637031fefSRichard Henderson      * MO_ATOM_* describes the atomicity requirements of the operation:
7737031fefSRichard Henderson      * MO_ATOM_IFALIGN: the operation must be single-copy atomic if it
7837031fefSRichard Henderson      *    is aligned; if unaligned there is no atomicity.
7937031fefSRichard Henderson      * MO_ATOM_IFALIGN_PAIR: the entire operation may be considered to
8037031fefSRichard Henderson      *    be a pair of half-sized operations which are packed together
8137031fefSRichard Henderson      *    for convenience, with single-copy atomicity on each half if
8237031fefSRichard Henderson      *    the half is aligned.
8337031fefSRichard Henderson      *    This is the atomicity e.g. of Arm pre-FEAT_LSE2 LDP.
8437031fefSRichard Henderson      * MO_ATOM_WITHIN16: the operation is single-copy atomic, even if it
8537031fefSRichard Henderson      *    is unaligned, so long as it does not cross a 16-byte boundary;
8637031fefSRichard Henderson      *    if it crosses a 16-byte boundary there is no atomicity.
8737031fefSRichard Henderson      *    This is the atomicity e.g. of Arm FEAT_LSE2 LDR.
8837031fefSRichard Henderson      * MO_ATOM_WITHIN16_PAIR: the entire operation is single-copy atomic,
8937031fefSRichard Henderson      *    if it happens to be within a 16-byte boundary, otherwise it
9037031fefSRichard Henderson      *    devolves to a pair of half-sized MO_ATOM_WITHIN16 operations.
9137031fefSRichard Henderson      *    Depending on alignment, one or both will be single-copy atomic.
9237031fefSRichard Henderson      *    This is the atomicity e.g. of Arm FEAT_LSE2 LDP.
9337031fefSRichard Henderson      * MO_ATOM_SUBALIGN: the operation is single-copy atomic by parts
9437031fefSRichard Henderson      *    by the alignment.  E.g. if the address is 0 mod 4, then each
9537031fefSRichard Henderson      *    4-byte subobject is single-copy atomic.
9637031fefSRichard Henderson      *    This is the atomicity e.g. of IBM Power.
9737031fefSRichard Henderson      * MO_ATOM_NONE: the operation has no atomicity requirements.
9837031fefSRichard Henderson      *
9937031fefSRichard Henderson      * Note the default (i.e. 0) value is single-copy atomic to the
10037031fefSRichard Henderson      * size of the operation, if aligned.  This retains the behaviour
10137031fefSRichard Henderson      * from before this field was introduced.
10237031fefSRichard Henderson      */
10337031fefSRichard Henderson     MO_ATOM_SHIFT         = 8,
10437031fefSRichard Henderson     MO_ATOM_IFALIGN       = 0 << MO_ATOM_SHIFT,
10537031fefSRichard Henderson     MO_ATOM_IFALIGN_PAIR  = 1 << MO_ATOM_SHIFT,
10637031fefSRichard Henderson     MO_ATOM_WITHIN16      = 2 << MO_ATOM_SHIFT,
10737031fefSRichard Henderson     MO_ATOM_WITHIN16_PAIR = 3 << MO_ATOM_SHIFT,
10837031fefSRichard Henderson     MO_ATOM_SUBALIGN      = 4 << MO_ATOM_SHIFT,
10937031fefSRichard Henderson     MO_ATOM_NONE          = 5 << MO_ATOM_SHIFT,
11037031fefSRichard Henderson     MO_ATOM_MASK          = 7 << MO_ATOM_SHIFT,
11137031fefSRichard Henderson 
11214776ab5STony Nguyen     /* Combinations of the above, for ease of use.  */
11314776ab5STony Nguyen     MO_UB    = MO_8,
11414776ab5STony Nguyen     MO_UW    = MO_16,
11514776ab5STony Nguyen     MO_UL    = MO_32,
116fc313c64SFrédéric Pétrot     MO_UQ    = MO_64,
117c7f9dd54SFrédéric Pétrot     MO_UO    = MO_128,
11814776ab5STony Nguyen     MO_SB    = MO_SIGN | MO_8,
11914776ab5STony Nguyen     MO_SW    = MO_SIGN | MO_16,
12014776ab5STony Nguyen     MO_SL    = MO_SIGN | MO_32,
121c7f9dd54SFrédéric Pétrot     MO_SQ    = MO_SIGN | MO_64,
122c7f9dd54SFrédéric Pétrot     MO_SO    = MO_SIGN | MO_128,
12314776ab5STony Nguyen 
12414776ab5STony Nguyen     MO_LEUW  = MO_LE | MO_UW,
12514776ab5STony Nguyen     MO_LEUL  = MO_LE | MO_UL,
126fc313c64SFrédéric Pétrot     MO_LEUQ  = MO_LE | MO_UQ,
12714776ab5STony Nguyen     MO_LESW  = MO_LE | MO_SW,
12814776ab5STony Nguyen     MO_LESL  = MO_LE | MO_SL,
129c7f9dd54SFrédéric Pétrot     MO_LESQ  = MO_LE | MO_SQ,
13014776ab5STony Nguyen 
13114776ab5STony Nguyen     MO_BEUW  = MO_BE | MO_UW,
13214776ab5STony Nguyen     MO_BEUL  = MO_BE | MO_UL,
133fc313c64SFrédéric Pétrot     MO_BEUQ  = MO_BE | MO_UQ,
13414776ab5STony Nguyen     MO_BESW  = MO_BE | MO_SW,
13514776ab5STony Nguyen     MO_BESL  = MO_BE | MO_SL,
136c7f9dd54SFrédéric Pétrot     MO_BESQ  = MO_BE | MO_SQ,
13714776ab5STony Nguyen 
1387d7a21baSPhilippe Mathieu-Daudé #ifdef COMPILING_PER_TARGET
13914776ab5STony Nguyen     MO_TEUW  = MO_TE | MO_UW,
14014776ab5STony Nguyen     MO_TEUL  = MO_TE | MO_UL,
141fc313c64SFrédéric Pétrot     MO_TEUQ  = MO_TE | MO_UQ,
142c7f9dd54SFrédéric Pétrot     MO_TEUO  = MO_TE | MO_UO,
14314776ab5STony Nguyen     MO_TESW  = MO_TE | MO_SW,
14414776ab5STony Nguyen     MO_TESL  = MO_TE | MO_SL,
145c7f9dd54SFrédéric Pétrot     MO_TESQ  = MO_TE | MO_SQ,
14614776ab5STony Nguyen #endif
14714776ab5STony Nguyen 
14814776ab5STony Nguyen     MO_SSIZE = MO_SIZE | MO_SIGN,
14914776ab5STony Nguyen } MemOp;
15014776ab5STony Nguyen 
151e67c9046STony Nguyen /* MemOp to size in bytes.  */
memop_size(MemOp op)152e67c9046STony Nguyen static inline unsigned memop_size(MemOp op)
15366b9b243STony Nguyen {
154e67c9046STony Nguyen     return 1 << (op & MO_SIZE);
155e67c9046STony Nguyen }
156e67c9046STony Nguyen 
157e67c9046STony Nguyen /* Size in bytes to MemOp.  */
size_memop(unsigned size)158e67c9046STony Nguyen static inline MemOp size_memop(unsigned size)
159e67c9046STony Nguyen {
160e67c9046STony Nguyen #ifdef CONFIG_DEBUG_TCG
161e67c9046STony Nguyen     /* Power of 2 up to 8.  */
162e67c9046STony Nguyen     assert((size & (size - 1)) == 0 && size >= 1 && size <= 8);
163e67c9046STony Nguyen #endif
164fc0870c1SRoman Kiryanov     return (MemOp)ctz32(size);
16566b9b243STony Nguyen }
16666b9b243STony Nguyen 
167da335fe1SRichard Henderson /**
168c5809eeeSRichard Henderson  * memop_alignment_bits:
169da335fe1SRichard Henderson  * @memop: MemOp value
170da335fe1SRichard Henderson  *
171da335fe1SRichard Henderson  * Extract the alignment size from the memop.
172da335fe1SRichard Henderson  */
memop_alignment_bits(MemOp memop)173c5809eeeSRichard Henderson static inline unsigned memop_alignment_bits(MemOp memop)
174da335fe1SRichard Henderson {
175da335fe1SRichard Henderson     unsigned a = memop & MO_AMASK;
176da335fe1SRichard Henderson 
177da335fe1SRichard Henderson     if (a == MO_UNALN) {
178da335fe1SRichard Henderson         /* No alignment required.  */
179da335fe1SRichard Henderson         a = 0;
180da335fe1SRichard Henderson     } else if (a == MO_ALIGN) {
181da335fe1SRichard Henderson         /* A natural alignment requirement.  */
182da335fe1SRichard Henderson         a = memop & MO_SIZE;
183da335fe1SRichard Henderson     } else {
184da335fe1SRichard Henderson         /* A specific alignment requirement.  */
185da335fe1SRichard Henderson         a = a >> MO_ASHIFT;
186da335fe1SRichard Henderson     }
187da335fe1SRichard Henderson     return a;
188da335fe1SRichard Henderson }
189da335fe1SRichard Henderson 
190*e5b063e8SRichard Henderson /*
191*e5b063e8SRichard Henderson  * memop_atomicity_bits:
192*e5b063e8SRichard Henderson  * @memop: MemOp value
193*e5b063e8SRichard Henderson  *
194*e5b063e8SRichard Henderson  * Extract the atomicity size from the memop.
195*e5b063e8SRichard Henderson  */
memop_atomicity_bits(MemOp memop)196*e5b063e8SRichard Henderson static inline unsigned memop_atomicity_bits(MemOp memop)
197*e5b063e8SRichard Henderson {
198*e5b063e8SRichard Henderson     unsigned size = memop & MO_SIZE;
199*e5b063e8SRichard Henderson 
200*e5b063e8SRichard Henderson     switch (memop & MO_ATOM_MASK) {
201*e5b063e8SRichard Henderson     case MO_ATOM_NONE:
202*e5b063e8SRichard Henderson         size = MO_8;
203*e5b063e8SRichard Henderson         break;
204*e5b063e8SRichard Henderson     case MO_ATOM_IFALIGN_PAIR:
205*e5b063e8SRichard Henderson     case MO_ATOM_WITHIN16_PAIR:
206*e5b063e8SRichard Henderson         size = size ? size - 1 : 0;
207*e5b063e8SRichard Henderson         break;
208*e5b063e8SRichard Henderson     default:
209*e5b063e8SRichard Henderson         break;
210*e5b063e8SRichard Henderson     }
211*e5b063e8SRichard Henderson     return size;
212*e5b063e8SRichard Henderson }
213*e5b063e8SRichard Henderson 
21414776ab5STony Nguyen #endif
215