15c1a101eSRichard Henderson #define _GNU_SOURCE 1
25c1a101eSRichard Henderson
35c1a101eSRichard Henderson #include <assert.h>
45c1a101eSRichard Henderson #include <stdlib.h>
55c1a101eSRichard Henderson #include <signal.h>
65c1a101eSRichard Henderson #include <endian.h>
75c1a101eSRichard Henderson
85c1a101eSRichard Henderson
9*6a6f4295SRichard Henderson char x[32] __attribute__((aligned(16))) = {
10*6a6f4295SRichard Henderson 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
11*6a6f4295SRichard Henderson 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10,
12*6a6f4295SRichard Henderson 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
13*6a6f4295SRichard Henderson 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
14*6a6f4295SRichard Henderson };
15*6a6f4295SRichard Henderson void * volatile p = (void *)&x + 15;
165c1a101eSRichard Henderson
sigbus(int sig,siginfo_t * info,void * uc)175c1a101eSRichard Henderson void sigbus(int sig, siginfo_t *info, void *uc)
185c1a101eSRichard Henderson {
195c1a101eSRichard Henderson assert(sig == SIGBUS);
205c1a101eSRichard Henderson assert(info->si_signo == SIGBUS);
215c1a101eSRichard Henderson #ifdef BUS_ADRALN
225c1a101eSRichard Henderson assert(info->si_code == BUS_ADRALN);
235c1a101eSRichard Henderson #endif
245c1a101eSRichard Henderson assert(info->si_addr == p);
255c1a101eSRichard Henderson exit(EXIT_SUCCESS);
265c1a101eSRichard Henderson }
275c1a101eSRichard Henderson
main()285c1a101eSRichard Henderson int main()
295c1a101eSRichard Henderson {
305c1a101eSRichard Henderson struct sigaction sa = {
315c1a101eSRichard Henderson .sa_sigaction = sigbus,
325c1a101eSRichard Henderson .sa_flags = SA_SIGINFO
335c1a101eSRichard Henderson };
345c1a101eSRichard Henderson int allow_fail = 0;
355c1a101eSRichard Henderson int tmp;
365c1a101eSRichard Henderson
375c1a101eSRichard Henderson tmp = sigaction(SIGBUS, &sa, NULL);
385c1a101eSRichard Henderson assert(tmp == 0);
395c1a101eSRichard Henderson
405c1a101eSRichard Henderson /*
415c1a101eSRichard Henderson * Select an operation that's likely to enforce alignment.
425c1a101eSRichard Henderson * On many guests that support unaligned accesses by default,
435c1a101eSRichard Henderson * this is often an atomic operation.
445c1a101eSRichard Henderson */
455c1a101eSRichard Henderson #if defined(__aarch64__)
465c1a101eSRichard Henderson asm volatile("ldxr %w0,[%1]" : "=r"(tmp) : "r"(p) : "memory");
475c1a101eSRichard Henderson #elif defined(__alpha__)
485c1a101eSRichard Henderson asm volatile("ldl_l %0,0(%1)" : "=r"(tmp) : "r"(p) : "memory");
495c1a101eSRichard Henderson #elif defined(__arm__)
505c1a101eSRichard Henderson asm volatile("ldrex %0,[%1]" : "=r"(tmp) : "r"(p) : "memory");
515c1a101eSRichard Henderson #elif defined(__powerpc__)
525c1a101eSRichard Henderson asm volatile("lwarx %0,0,%1" : "=r"(tmp) : "r"(p) : "memory");
535c1a101eSRichard Henderson #elif defined(__riscv_atomic)
545c1a101eSRichard Henderson asm volatile("lr.w %0,(%1)" : "=r"(tmp) : "r"(p) : "memory");
555c1a101eSRichard Henderson #else
565c1a101eSRichard Henderson /* No insn known to fault unaligned -- try for a straight load. */
575c1a101eSRichard Henderson allow_fail = 1;
585c1a101eSRichard Henderson tmp = *(volatile int *)p;
595c1a101eSRichard Henderson #endif
605c1a101eSRichard Henderson
615c1a101eSRichard Henderson assert(allow_fail);
625c1a101eSRichard Henderson
635c1a101eSRichard Henderson /*
645c1a101eSRichard Henderson * We didn't see a signal.
655c1a101eSRichard Henderson * We might as well validate the unaligned load worked.
665c1a101eSRichard Henderson */
675c1a101eSRichard Henderson if (BYTE_ORDER == LITTLE_ENDIAN) {
68*6a6f4295SRichard Henderson assert(tmp == 0x13121110);
695c1a101eSRichard Henderson } else {
70*6a6f4295SRichard Henderson assert(tmp == 0x10111213);
715c1a101eSRichard Henderson }
725c1a101eSRichard Henderson return EXIT_SUCCESS;
735c1a101eSRichard Henderson }
74