mirror of
https://github.com/followmsi/android_kernel_google_msm.git
synced 2024-11-06 23:17:41 +00:00
x86: unify include/asm/cache_32/64.h
Same file, except for whitespace, comment formatting and: 32-bit: unsigned long *virt_addr = va; 64-bit: unsigned int *virt_addr = va; Both can be safely replaced by: u32 i, *virt_addr = va; Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
This commit is contained in:
parent
327c21bc3d
commit
1f7afb08a5
3 changed files with 17 additions and 40 deletions
|
@ -1,5 +1,18 @@
|
||||||
#ifdef CONFIG_X86_32
|
#ifndef _ASM_X86_EDAC_H
|
||||||
# include "edac_32.h"
|
#define _ASM_X86_EDAC_H
|
||||||
#else
|
|
||||||
# include "edac_64.h"
|
/* ECC atomic, DMA, SMP and interrupt safe scrub function */
|
||||||
|
|
||||||
|
static __inline__ void atomic_scrub(void *va, u32 size)
|
||||||
|
{
|
||||||
|
u32 i, *virt_addr = va;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Very carefully read and write to memory atomically so we
|
||||||
|
* are interrupt, DMA and SMP safe.
|
||||||
|
*/
|
||||||
|
for (i = 0; i < size / 4; i++, virt_addr++)
|
||||||
|
__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
|
||||||
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
#ifndef ASM_EDAC_H
|
|
||||||
#define ASM_EDAC_H
|
|
||||||
|
|
||||||
/* ECC atomic, DMA, SMP and interrupt safe scrub function */
|
|
||||||
|
|
||||||
static __inline__ void atomic_scrub(void *va, u32 size)
|
|
||||||
{
|
|
||||||
unsigned long *virt_addr = va;
|
|
||||||
u32 i;
|
|
||||||
|
|
||||||
for (i = 0; i < size / 4; i++, virt_addr++)
|
|
||||||
/* Very carefully read and write to memory atomically
|
|
||||||
* so we are interrupt, DMA and SMP safe.
|
|
||||||
*/
|
|
||||||
__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
|
@ -1,18 +0,0 @@
|
||||||
#ifndef ASM_EDAC_H
|
|
||||||
#define ASM_EDAC_H
|
|
||||||
|
|
||||||
/* ECC atomic, DMA, SMP and interrupt safe scrub function */
|
|
||||||
|
|
||||||
static __inline__ void atomic_scrub(void *va, u32 size)
|
|
||||||
{
|
|
||||||
unsigned int *virt_addr = va;
|
|
||||||
u32 i;
|
|
||||||
|
|
||||||
for (i = 0; i < size / 4; i++, virt_addr++)
|
|
||||||
/* Very carefully read and write to memory atomically
|
|
||||||
* so we are interrupt, DMA and SMP safe.
|
|
||||||
*/
|
|
||||||
__asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr));
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
Loading…
Reference in a new issue