| 1 | /*
|
|---|
| 2 | * Generic cache management functions. Everything is arch-specific,
|
|---|
| 3 | * but this header exists to make sure the defines/functions can be
|
|---|
| 4 | * used in a generic way.
|
|---|
| 5 | *
|
|---|
| 6 | * 2000-11-13 Arjan van de Ven <arjan@fenrus.demon.nl>
|
|---|
| 7 | *
|
|---|
| 8 | */
|
|---|
| 9 |
|
|---|
| 10 | #ifndef _LINUX_PREFETCH_H
|
|---|
| 11 | #define _LINUX_PREFETCH_H
|
|---|
| 12 |
|
|---|
| 13 | #include <linux/types.h>
|
|---|
| 14 | #include <asm/processor.h>
|
|---|
| 15 | #include <asm/cache.h>
|
|---|
| 16 |
|
|---|
| 17 | /*
|
|---|
| 18 | prefetch(x) attempts to pre-emptively get the memory pointed to
|
|---|
| 19 | by address "x" into the CPU L1 cache.
|
|---|
| 20 | prefetch(x) should not cause any kind of exception, prefetch(0) is
|
|---|
| 21 | specifically ok.
|
|---|
| 22 |
|
|---|
| 23 | prefetch() should be defined by the architecture, if not, the
|
|---|
| 24 | #define below provides a no-op define.
|
|---|
| 25 |
|
|---|
| 26 | There are 3 prefetch() macros:
|
|---|
| 27 |
|
|---|
| 28 | prefetch(x) - prefetches the cacheline at "x" for read
|
|---|
| 29 | prefetchw(x) - prefetches the cacheline at "x" for write
|
|---|
| 30 | spin_lock_prefetch(x) - prefectches the spinlock *x for taking
|
|---|
| 31 |
|
|---|
| 32 | there is also PREFETCH_STRIDE which is the architecure-prefered
|
|---|
| 33 | "lookahead" size for prefetching streamed operations.
|
|---|
| 34 |
|
|---|
| 35 | */
|
|---|
| 36 |
|
|---|
| 37 | /*
|
|---|
| 38 | * These cannot be do{}while(0) macros. See the mental gymnastics in
|
|---|
| 39 | * the loop macro.
|
|---|
| 40 | */
|
|---|
| 41 |
|
|---|
| 42 | #ifndef ARCH_HAS_PREFETCH
|
|---|
| 43 | static inline void prefetch(const void *x) {;}
|
|---|
| 44 | #endif
|
|---|
| 45 |
|
|---|
| 46 | #ifndef ARCH_HAS_PREFETCHW
|
|---|
| 47 | static inline void prefetchw(const void *x) {;}
|
|---|
| 48 | #endif
|
|---|
| 49 |
|
|---|
| 50 | #ifndef ARCH_HAS_SPINLOCK_PREFETCH
|
|---|
| 51 | #define spin_lock_prefetch(x) prefetchw(x)
|
|---|
| 52 | #endif
|
|---|
| 53 |
|
|---|
| 54 | #ifndef PREFETCH_STRIDE
|
|---|
| 55 | #define PREFETCH_STRIDE (4*L1_CACHE_BYTES)
|
|---|
| 56 | #endif
|
|---|
| 57 |
|
|---|
| 58 | static inline void prefetch_range(void *addr, size_t len)
|
|---|
| 59 | {
|
|---|
| 60 | #ifdef ARCH_HAS_PREFETCH
|
|---|
| 61 | char *cp;
|
|---|
| 62 | char *end = addr + len;
|
|---|
| 63 |
|
|---|
| 64 | for (cp = addr; cp < end; cp += PREFETCH_STRIDE)
|
|---|
| 65 | prefetch(cp);
|
|---|
| 66 | #endif
|
|---|
| 67 | }
|
|---|
| 68 |
|
|---|
| 69 | #endif
|
|---|