1 #ifndef ASM_EDAC_H 2 #define ASM_EDAC_H 3 4 #include <asm/compiler.h> 5 6 /* ECC atomic, DMA, SMP and interrupt safe scrub function */ 7 8 static inline void atomic_scrub(void *va, u32 size) 9 { 10 unsigned long *virt_addr = va; 11 unsigned long temp; 12 u32 i; 13 14 for (i = 0; i < size / sizeof(unsigned long); i++) { 15 /* 16 * Very carefully read and write to memory atomically 17 * so we are interrupt, DMA and SMP safe. 18 * 19 * Intel: asm("lock; addl $0, %0"::"m"(*virt_addr)); 20 */ 21 22 __asm__ __volatile__ ( 23 " .set mips2 \n" 24 "1: ll %0, %1 # atomic_scrub \n" 25 " addu %0, $0 \n" 26 " sc %0, %1 \n" 27 " beqz %0, 1b \n" 28 " .set mips0 \n" 29 : "=&r" (temp), "=" GCC_OFF12_ASM() (*virt_addr) 30 : GCC_OFF12_ASM() (*virt_addr)); 31 32 virt_addr++; 33 } 34 } 35 36 #endif 37