| /openbmc/qemu/tests/tcg/aarch64/ |
| H A D | mte-1.c | 12 int *p0, *p1, *p2; in main() local 16 p0 = alloc_mte_mem(sizeof(*p0)); in main() 18 asm("irg %0,%1,%2" : "=r"(p1) : "r"(p0), "r"(1l)); in main() 19 assert(p1 != p0); in main() 20 asm("subp %0,%1,%2" : "=r"(c) : "r"(p0), "r"(p1)); in main() 24 asm("ldg %0, [%1]" : "=r"(p2) : "r"(p0), "0"(p0)); in main()
|
| H A D | mte-5.c | 19 void *p0, *p1, *p2; in main() local 23 p0 = alloc_mte_mem(sizeof(*p0)); in main() 26 asm("irg %0,%1,%2" : "=r"(p1) : "r"(p0), "r"(excl)); in main() 29 asm("irg %0,%1,%2" : "=r"(p2) : "r"(p0), "r"(excl)); in main() 42 asm volatile("ldr %0, [%1]" : "=r"(p0) : "r"(p1 + 12)); in main()
|
| H A D | mte-3.c | 19 long *p0, *p1, *p2; in main() local 23 p0 = alloc_mte_mem(sizeof(*p0)); in main() 26 asm("irg %0,%1,%2" : "=r"(p1) : "r"(p0), "r"(excl)); in main() 29 asm("irg %0,%1,%2" : "=r"(p2) : "r"(p0), "r"(excl)); in main()
|
| H A D | mte-2.c | 19 int *p0, *p1, *p2; in main() local 23 p0 = alloc_mte_mem(sizeof(*p0)); in main() 26 asm("irg %0,%1,%2" : "=r"(p1) : "r"(p0), "r"(excl)); in main() 29 asm("irg %0,%1,%2" : "=r"(p2) : "r"(p0), "r"(excl)); in main()
|
| H A D | pauth-5.c | 8 int *p0 = &x, *p1, *p2, *p3; in main() local 27 asm("pacda %0, %1" : "=r"(p1) : "r"(salt), "0"(p0)); in main() 28 } while (p0 == p1); in main() 41 assert(p3 != p0); in main()
|
| H A D | pauth-1.c | 15 void *p0 = &x, *p1, *p2; in main() local 19 asm volatile("pacdza %0" : "=r"(p1) : "0"(p0)); in main() 21 asm volatile("pacdza %0" : "=r"(p2) : "0"(p0)); in main() 23 if (p1 != p0) { in main()
|
| H A D | mte-4.c | 33 int *p0, *p1; in main() local 36 p0 = alloc_mte_mem(size); in main() 39 asm("irg %0,%1,%2" : "=r"(p1) : "r"(p0), "r"(excl)); in main()
|
| /openbmc/qemu/tests/tcg/hexagon/ |
| H A D | hvx_misc.c | 30 void *p0 = buffer0; in test_load_tmp() local 49 : : "r"(p0), "r"(p1), "r"(pout) in test_load_tmp() 51 p0 += sizeof(MMVector); in test_load_tmp() 95 void *p0 = buffer0; in test_load_cur() local 103 : : "r"(p0), "r"(pout) : "v2", "memory"); in test_load_cur() 104 p0 += sizeof(MMVector); in test_load_cur() 118 void *p0 = buffer0; in test_load_aligned() local 122 p0 += offset; /* Create an unaligned address */ in test_load_aligned() 125 : : "r"(p0), "r"(pout) : "v2", "memory"); in test_load_aligned() 134 void *p0 = buffer0; in test_load_unaligned() local [all …]
|
| H A D | v69_hvx.c | 43 void *p0 = buffer0; in test_vasrvuhubrndsat() local 56 : : "r"(p0), "r"(p1), "r"(pout) in test_vasrvuhubrndsat() 58 p0 += sizeof(MMVector) * 2; in test_vasrvuhubrndsat() 80 void *p0 = buffer0; in test_vasrvuhubsat() local 93 : : "r"(p0), "r"(p1), "r"(pout) in test_vasrvuhubsat() 95 p0 += sizeof(MMVector) * 2; in test_vasrvuhubsat() 117 void *p0 = buffer0; in test_vasrvwuhrndsat() local 130 : : "r"(p0), "r"(p1), "r"(pout) in test_vasrvwuhrndsat() 132 p0 += sizeof(MMVector) * 2; in test_vasrvwuhrndsat() 154 void *p0 = buffer0; in test_vasrvwuhsat() local [all …]
|
| H A D | test_fibonacci.S | 11 p0 = cmp.gt(r2, #0); if (!p0.new) jump:nt .LBB0_3 define 22 p0 = cmp.gt(r2, r5); if (p0.new) jump:nt .LBB0_2 define 28 p0 = cmp.eq(r3, #144); if (p0.new) jump:t pass define
|
| H A D | test_bitcnt.S | 20 p0 = cmp.eq(r2, #23); if (p0.new) jump:t test2 define 29 p0 = cmp.eq(r2, #55); if (p0.new) jump:t test3 define 38 p0 = cmp.eq(r2, #1); if (p0.new) jump:t pass define
|
| H A D | test_reorder.S | 20 if (p0.new) jump:nt skip 21 p0 = r4; define 31 p0 = cmp.eq(r2, #-559038737); if (p0.new) jump:t pass define
|
| H A D | test_bitsplit.S | 14 p0 = cmp.eq(r2, #3); if (p0.new) jump:t test2 define 20 p0 = cmp.eq(r3, #23); if (p0.new) jump:t pass define
|
| H A D | test_vpmpyh.S | 20 p0 = cmp.eq(r0, #184945412); if (p0.new) jump:t test2 define 26 p0 = cmp.eq(r1, #262150); if (p0.new) jump:t pass define
|
| H A D | test_round.S | 18 p0 = cmp.eq(r2, #13); if (p0.new) jump:t test2 define 27 p0 = cmp.eq(r2, #12); if (p0.new) jump:t pass define
|
| H A D | test_cmp.S | 18 p0 = cmp.lt(r0, r1); if (p0.new) jump:t unsigned define 29 p0 = cmp.gtu(r0, r1); if (p0.new) jump:t pass define
|
| H A D | test_vminh.S | 27 p0 = cmp.eq(r0, #65541); if (p0.new) jump:t test2 define 33 p0 = cmp.eq(r1, #65538); if (p0.new) jump:t pass define
|
| H A D | test_vmaxh.S | 27 p0 = cmp.eq(r0, #131079); if (p0.new) jump:t test2 define 33 p0 = cmp.eq(r1, #196611); if (p0.new) jump:t pass define
|
| H A D | test_vspliceb.S | 23 p0 = cmp.eq(r4, #-1); if (p0.new) jump:t test2 define 29 p0 = cmp.eq(r5, #255); if (p0.new) jump:t pass define
|
| H A D | test_vavgw.S | 23 p0 = cmp.eq(r0, #2); if (p0.new) jump:t test2 define 29 p0 = cmp.eq(r1, #2); if (p0.new) jump:t pass define
|
| H A D | test_lsr.S | 18 p0 = cmp.eq(r0, #0x28); if (p0.new) jump:t test2 define 34 p0 = cmp.eq(r0, #0x5); if (p0.new) jump:t pass define
|
| H A D | hvx_misc.h | 118 void *p0 = buffer0; \ 122 VEC_OP2(ASM, EL, p0, p1, pout); \ 123 p0 += sizeof(MMVector); \ 154 void *p0 = buffer0; \ 159 PRED_OP2(ASM, p0, p1, pout, INV); \ 160 p0 += sizeof(MMVector); \ 166 bool p0 = (buffer0[i].b[j] > THRESHOLD); \ 169 expect[i].b[j] = (p0 OP !p1) ? 0xff : 0x00; \ 171 expect[i].b[j] = (p0 OP p1) ? 0xff : 0x00; \
|
| H A D | test_clobber.S | 26 p0 = cmp.eq(r16, #47) define 27 p0 = cmp.eq(r17, #155); if (p0.new) jump:t pass define
|
| H A D | test_packet.S | 26 p0 = cmp.eq(r3, #4) define 27 p0 = cmp.eq(r0, #10); if (p0.new) jump:t pass define
|
| /openbmc/u-boot/arch/nds32/cpu/n1213/ |
| H A D | start.S | 321 andi $p0, $t0, ICAC_MEM_KBF_ISZ 323 ! if $p0=0, then no I CAC existed 324 beqz $p0, end_flush_icache 326 ! get $p0 the index of I$ block 327 srli $p0, $p0, 6 330 addi $t1, $p0, 2 356 andi $p0, $t0, DCAC_MEM_KBF_DSZ 358 ! if $p0=0, then no D CAC existed 359 beqz $p0, end_flush_dcache 361 ! get $p0 the index of D$ block [all …]
|