1b8b572e1SStephen Rothwell /*
2b8b572e1SStephen Rothwell * PPC EDAC common defs
3b8b572e1SStephen Rothwell *
4b8b572e1SStephen Rothwell * Author: Dave Jiang <djiang@mvista.com>
5b8b572e1SStephen Rothwell *
6b8b572e1SStephen Rothwell * 2007 (c) MontaVista Software, Inc. This file is licensed under
7b8b572e1SStephen Rothwell * the terms of the GNU General Public License version 2. This program
8b8b572e1SStephen Rothwell * is licensed "as is" without any warranty of any kind, whether express
9b8b572e1SStephen Rothwell * or implied.
10b8b572e1SStephen Rothwell */
11b8b572e1SStephen Rothwell #ifndef ASM_EDAC_H
12b8b572e1SStephen Rothwell #define ASM_EDAC_H
13b8b572e1SStephen Rothwell /*
14b8b572e1SStephen Rothwell * ECC atomic, DMA, SMP and interrupt safe scrub function.
15b01aec9bSBorislav Petkov * Implements the per arch edac_atomic_scrub() that EDAC use for software
16b8b572e1SStephen Rothwell * ECC scrubbing. It reads memory and then writes back the original
17b8b572e1SStephen Rothwell * value, allowing the hardware to detect and correct memory errors.
18b8b572e1SStephen Rothwell */
edac_atomic_scrub(void * va,u32 size)19b01aec9bSBorislav Petkov static __inline__ void edac_atomic_scrub(void *va, u32 size)
20b8b572e1SStephen Rothwell {
21b8b572e1SStephen Rothwell unsigned int *virt_addr = va;
22b8b572e1SStephen Rothwell unsigned int temp;
23b8b572e1SStephen Rothwell unsigned int i;
24b8b572e1SStephen Rothwell
25b8b572e1SStephen Rothwell for (i = 0; i < size / sizeof(*virt_addr); i++, virt_addr++) {
26b8b572e1SStephen Rothwell /* Very carefully read and write to memory atomically
27b8b572e1SStephen Rothwell * so we are interrupt, DMA and SMP safe.
28b8b572e1SStephen Rothwell */
29b8b572e1SStephen Rothwell __asm__ __volatile__ ("\n\
30b8b572e1SStephen Rothwell 1: lwarx %0,0,%1\n\
31b8b572e1SStephen Rothwell stwcx. %0,0,%1\n\
32b8b572e1SStephen Rothwell bne- 1b\n\
33b8b572e1SStephen Rothwell isync"
34b8b572e1SStephen Rothwell : "=&r"(temp)
35b8b572e1SStephen Rothwell : "r"(virt_addr)
36b8b572e1SStephen Rothwell : "cr0", "memory");
37b8b572e1SStephen Rothwell }
38b8b572e1SStephen Rothwell }
39b8b572e1SStephen Rothwell
40b8b572e1SStephen Rothwell #endif
41