aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/edac.h
diff options
context:
space:
mode:
authorDoug Thompson <dougthompson@xmission.h>2007-07-26 10:41:15 -0700
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-07-26 11:35:18 -0700
commit39c29657fcf6060d71e04f1e52e5bb4b2999644f (patch)
treec8845c9bdea19716b28c9439ad39d5e69dea72b1 /include/asm-mips/edac.h
parentd4c1465b7de9686c4c5aa533b15c09ab014aab3a (diff)
downloadkernel_samsung_smdk4412-39c29657fcf6060d71e04f1e52e5bb4b2999644f.zip
kernel_samsung_smdk4412-39c29657fcf6060d71e04f1e52e5bb4b2999644f.tar.gz
kernel_samsung_smdk4412-39c29657fcf6060d71e04f1e52e5bb4b2999644f.tar.bz2
include/asm-:mips add missing edac h file
EDAC has a foundation to perform software memory scrubbing, but it requires a per architecture (atomic_scrub) function for performing an atomic update operation. Under X86, this is done with a lock: add [addr],0 in the file asm-x86/edac.h This patch provides the MIPS arch with that atomic function, atomic_scrub() in asm-mips/edac.h Cc: Alan Cox <alan@lxorguk.ukuu.org.uk> Cc: Ralf Baechle <ralf@linux-mips.org> Signed-off-by: Doug Thompson <dougthompson@xmission.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-mips/edac.h')
-rw-r--r--include/asm-mips/edac.h35
1 files changed, 35 insertions, 0 deletions
diff --git a/include/asm-mips/edac.h b/include/asm-mips/edac.h
new file mode 100644
index 0000000..83719ee
--- /dev/null
+++ b/include/asm-mips/edac.h
@@ -0,0 +1,35 @@
+#ifndef ASM_EDAC_H
+#define ASM_EDAC_H
+
+/* ECC atomic, DMA, SMP and interrupt safe scrub function */
+
+static inline void atomic_scrub(void *va, u32 size)
+{
+ unsigned long *virt_addr = va;
+ unsigned long temp;
+ u32 i;
+
+ for (i = 0; i < size / sizeof(unsigned long); i++, virt_addr++) {
+
+ /*
+ * Very carefully read and write to memory atomically
+ * so we are interrupt, DMA and SMP safe.
+ *
+ * Intel: asm("lock; addl $0, %0"::"m"(*virt_addr));
+ */
+
+ __asm__ __volatile__ (
+ " .set mips3 \n"
+ "1: ll %0, %1 # atomic_add \n"
+ " ll %0, %1 # atomic_add \n"
+ " addu %0, $0 \n"
+ " sc %0, %1 \n"
+ " beqz %0, 1b \n"
+ " .set mips0 \n"
+ : "=&r" (temp), "=m" (*virt_addr)
+ : "m" (*virt_addr));
+
+ }
+}
+
+#endif