2005-04-16 22:20:36 +00:00
|
|
|
/*
|
2008-08-02 09:55:55 +00:00
|
|
|
* arch/arm/include/asm/cache.h
|
2005-04-16 22:20:36 +00:00
|
|
|
*/
|
|
|
|
#ifndef __ASMARM_CACHE_H
|
|
|
|
#define __ASMARM_CACHE_H
|
|
|
|
|
2009-09-15 09:23:53 +00:00
|
|
|
#define L1_CACHE_SHIFT CONFIG_ARM_L1_CACHE_SHIFT
|
2005-04-16 22:20:36 +00:00
|
|
|
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
|
|
|
|
|
2009-06-01 08:19:37 +00:00
|
|
|
/*
|
|
|
|
* Memory returned by kmalloc() may be used for DMA, so we must make
|
|
|
|
* sure that all such allocations are cache aligned. Otherwise,
|
|
|
|
* unrelated code may cause parts of the buffer to be read into the
|
|
|
|
* cache before the transfer is done, causing old data to be seen by
|
|
|
|
* the CPU.
|
|
|
|
*/
|
2010-08-11 01:03:22 +00:00
|
|
|
#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
|
2009-06-01 08:19:37 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* With EABI on ARMv5 and above we must have 64-bit aligned slab pointers.
|
|
|
|
*/
|
|
|
|
#if defined(CONFIG_AEABI) && (__LINUX_ARM_ARCH__ >= 5)
|
|
|
|
#define ARCH_SLAB_MINALIGN 8
|
|
|
|
#endif
|
|
|
|
|
2005-04-16 22:20:36 +00:00
|
|
|
#endif
|