Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 | /* SPDX-License-Identifier: GPL-2.0 */ #ifndef _ASM_POWERPC_CACHE_H #define _ASM_POWERPC_CACHE_H #ifdef __KERNEL__ /* bytes per L1 cache line */ #if defined(CONFIG_PPC_8xx) #define L1_CACHE_SHIFT 4 #define MAX_COPY_PREFETCH 1 #define IFETCH_ALIGN_SHIFT 2 #elif defined(CONFIG_PPC_E500MC) #define L1_CACHE_SHIFT 6 #define MAX_COPY_PREFETCH 4 #define IFETCH_ALIGN_SHIFT 3 #elif defined(CONFIG_PPC32) #define MAX_COPY_PREFETCH 4 #define IFETCH_ALIGN_SHIFT 3 /* 603 fetches 2 insn at a time */ #if defined(CONFIG_PPC_47x) #define L1_CACHE_SHIFT 7 #else #define L1_CACHE_SHIFT 5 #endif #else /* CONFIG_PPC64 */ #define L1_CACHE_SHIFT 7 #define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */ #endif #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) #define SMP_CACHE_BYTES L1_CACHE_BYTES #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT) #ifdef CONFIG_NOT_COHERENT_CACHE #define ARCH_DMA_MINALIGN L1_CACHE_BYTES #endif #if !defined(__ASSEMBLY__) #ifdef CONFIG_PPC64 struct ppc_cache_info { u32 size; u32 line_size; u32 block_size; /* L1 only */ u32 log_block_size; u32 blocks_per_page; u32 sets; u32 assoc; }; struct ppc64_caches { struct ppc_cache_info l1d; struct ppc_cache_info l1i; struct ppc_cache_info l2; struct ppc_cache_info l3; }; extern struct ppc64_caches ppc64_caches; static inline u32 l1_dcache_shift(void) { return ppc64_caches.l1d.log_block_size; } static inline u32 l1_dcache_bytes(void) { return ppc64_caches.l1d.block_size; } static inline u32 l1_icache_shift(void) { return ppc64_caches.l1i.log_block_size; } static inline u32 l1_icache_bytes(void) { return ppc64_caches.l1i.block_size; } #else static inline u32 l1_dcache_shift(void) { return L1_CACHE_SHIFT; } static inline u32 l1_dcache_bytes(void) { return L1_CACHE_BYTES; } static inline u32 l1_icache_shift(void) { return L1_CACHE_SHIFT; } static inline u32 l1_icache_bytes(void) { return L1_CACHE_BYTES; } #endif #define __read_mostly __section(".data..read_mostly") #ifdef CONFIG_PPC_BOOK3S_32 extern long _get_L2CR(void); extern long _get_L3CR(void); extern void _set_L2CR(unsigned long); extern void _set_L3CR(unsigned long); #else #define _get_L2CR() 0L #define _get_L3CR() 0L #define _set_L2CR(val) do { } while(0) #define _set_L3CR(val) do { } while(0) #endif static inline void dcbz(void *addr) { __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory"); } static inline void dcbi(void *addr) { __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory"); } static inline void dcbf(void *addr) { __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory"); } static inline void dcbst(void *addr) { __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory"); } static inline void icbi(void *addr) { asm volatile ("icbi 0, %0" : : "r"(addr) : "memory"); } static inline void iccci(void *addr) { asm volatile ("iccci 0, %0" : : "r"(addr) : "memory"); } #endif /* !__ASSEMBLY__ */ #endif /* __KERNEL__ */ #endif /* _ASM_POWERPC_CACHE_H */ |