spitfire.h (9967B)
1/* SPDX-License-Identifier: GPL-2.0 */ 2/* spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations. 3 * 4 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 5 */ 6 7#ifndef _SPARC64_SPITFIRE_H 8#define _SPARC64_SPITFIRE_H 9 10#ifdef CONFIG_SPARC64 11 12#include <asm/asi.h> 13 14/* The following register addresses are accessible via ASI_DMMU 15 * and ASI_IMMU, that is there is a distinct and unique copy of 16 * each these registers for each TLB. 17 */ 18#define TSB_TAG_TARGET 0x0000000000000000 /* All chips */ 19#define TLB_SFSR 0x0000000000000018 /* All chips */ 20#define TSB_REG 0x0000000000000028 /* All chips */ 21#define TLB_TAG_ACCESS 0x0000000000000030 /* All chips */ 22#define VIRT_WATCHPOINT 0x0000000000000038 /* All chips */ 23#define PHYS_WATCHPOINT 0x0000000000000040 /* All chips */ 24#define TSB_EXTENSION_P 0x0000000000000048 /* Ultra-III and later */ 25#define TSB_EXTENSION_S 0x0000000000000050 /* Ultra-III and later, D-TLB only */ 26#define TSB_EXTENSION_N 0x0000000000000058 /* Ultra-III and later */ 27#define TLB_TAG_ACCESS_EXT 0x0000000000000060 /* Ultra-III+ and later */ 28 29/* These registers only exist as one entity, and are accessed 30 * via ASI_DMMU only. 31 */ 32#define PRIMARY_CONTEXT 0x0000000000000008 33#define SECONDARY_CONTEXT 0x0000000000000010 34#define DMMU_SFAR 0x0000000000000020 35#define VIRT_WATCHPOINT 0x0000000000000038 36#define PHYS_WATCHPOINT 0x0000000000000040 37 38#define SPITFIRE_HIGHEST_LOCKED_TLBENT (64 - 1) 39#define CHEETAH_HIGHEST_LOCKED_TLBENT (16 - 1) 40 41#define L1DCACHE_SIZE 0x4000 42 43#define SUN4V_CHIP_INVALID 0x00 44#define SUN4V_CHIP_NIAGARA1 0x01 45#define SUN4V_CHIP_NIAGARA2 0x02 46#define SUN4V_CHIP_NIAGARA3 0x03 47#define SUN4V_CHIP_NIAGARA4 0x04 48#define SUN4V_CHIP_NIAGARA5 0x05 49#define SUN4V_CHIP_SPARC_M6 0x06 50#define SUN4V_CHIP_SPARC_M7 0x07 51#define SUN4V_CHIP_SPARC_M8 0x08 52#define SUN4V_CHIP_SPARC64X 0x8a 53#define SUN4V_CHIP_SPARC_SN 0x8b 54#define SUN4V_CHIP_UNKNOWN 0xff 55 56/* 57 * The following CPU_ID_xxx constants are used 58 * to identify the CPU type in the setup phase 59 * (see head_64.S) 60 */ 61#define CPU_ID_NIAGARA1 ('1') 62#define CPU_ID_NIAGARA2 ('2') 63#define CPU_ID_NIAGARA3 ('3') 64#define CPU_ID_NIAGARA4 ('4') 65#define CPU_ID_NIAGARA5 ('5') 66#define CPU_ID_M6 ('6') 67#define CPU_ID_M7 ('7') 68#define CPU_ID_M8 ('8') 69#define CPU_ID_SONOMA1 ('N') 70 71#ifndef __ASSEMBLY__ 72 73enum ultra_tlb_layout { 74 spitfire = 0, 75 cheetah = 1, 76 cheetah_plus = 2, 77 hypervisor = 3, 78}; 79 80extern enum ultra_tlb_layout tlb_type; 81 82extern int sun4v_chip_type; 83 84extern int cheetah_pcache_forced_on; 85void cheetah_enable_pcache(void); 86 87#define sparc64_highest_locked_tlbent() \ 88 (tlb_type == spitfire ? \ 89 SPITFIRE_HIGHEST_LOCKED_TLBENT : \ 90 CHEETAH_HIGHEST_LOCKED_TLBENT) 91 92extern int num_kernel_image_mappings; 93 94/* The data cache is write through, so this just invalidates the 95 * specified line. 96 */ 97static inline void spitfire_put_dcache_tag(unsigned long addr, unsigned long tag) 98{ 99 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 100 "membar #Sync" 101 : /* No outputs */ 102 : "r" (tag), "r" (addr), "i" (ASI_DCACHE_TAG)); 103} 104 105/* The instruction cache lines are flushed with this, but note that 106 * this does not flush the pipeline. It is possible for a line to 107 * get flushed but stale instructions to still be in the pipeline, 108 * a flush instruction (to any address) is sufficient to handle 109 * this issue after the line is invalidated. 110 */ 111static inline void spitfire_put_icache_tag(unsigned long addr, unsigned long tag) 112{ 113 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 114 "membar #Sync" 115 : /* No outputs */ 116 : "r" (tag), "r" (addr), "i" (ASI_IC_TAG)); 117} 118 119static inline unsigned long spitfire_get_dtlb_data(int entry) 120{ 121 unsigned long data; 122 123 __asm__ __volatile__("ldxa [%1] %2, %0" 124 : "=r" (data) 125 : "r" (entry << 3), "i" (ASI_DTLB_DATA_ACCESS)); 126 127 /* Clear TTE diag bits. */ 128 data &= ~0x0003fe0000000000UL; 129 130 return data; 131} 132 133static inline unsigned long spitfire_get_dtlb_tag(int entry) 134{ 135 unsigned long tag; 136 137 __asm__ __volatile__("ldxa [%1] %2, %0" 138 : "=r" (tag) 139 : "r" (entry << 3), "i" (ASI_DTLB_TAG_READ)); 140 return tag; 141} 142 143static inline void spitfire_put_dtlb_data(int entry, unsigned long data) 144{ 145 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 146 "membar #Sync" 147 : /* No outputs */ 148 : "r" (data), "r" (entry << 3), 149 "i" (ASI_DTLB_DATA_ACCESS)); 150} 151 152static inline unsigned long spitfire_get_itlb_data(int entry) 153{ 154 unsigned long data; 155 156 __asm__ __volatile__("ldxa [%1] %2, %0" 157 : "=r" (data) 158 : "r" (entry << 3), "i" (ASI_ITLB_DATA_ACCESS)); 159 160 /* Clear TTE diag bits. */ 161 data &= ~0x0003fe0000000000UL; 162 163 return data; 164} 165 166static inline unsigned long spitfire_get_itlb_tag(int entry) 167{ 168 unsigned long tag; 169 170 __asm__ __volatile__("ldxa [%1] %2, %0" 171 : "=r" (tag) 172 : "r" (entry << 3), "i" (ASI_ITLB_TAG_READ)); 173 return tag; 174} 175 176static inline void spitfire_put_itlb_data(int entry, unsigned long data) 177{ 178 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 179 "membar #Sync" 180 : /* No outputs */ 181 : "r" (data), "r" (entry << 3), 182 "i" (ASI_ITLB_DATA_ACCESS)); 183} 184 185static inline void spitfire_flush_dtlb_nucleus_page(unsigned long page) 186{ 187 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t" 188 "membar #Sync" 189 : /* No outputs */ 190 : "r" (page | 0x20), "i" (ASI_DMMU_DEMAP)); 191} 192 193static inline void spitfire_flush_itlb_nucleus_page(unsigned long page) 194{ 195 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t" 196 "membar #Sync" 197 : /* No outputs */ 198 : "r" (page | 0x20), "i" (ASI_IMMU_DEMAP)); 199} 200 201/* Cheetah has "all non-locked" tlb flushes. */ 202static inline void cheetah_flush_dtlb_all(void) 203{ 204 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t" 205 "membar #Sync" 206 : /* No outputs */ 207 : "r" (0x80), "i" (ASI_DMMU_DEMAP)); 208} 209 210static inline void cheetah_flush_itlb_all(void) 211{ 212 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t" 213 "membar #Sync" 214 : /* No outputs */ 215 : "r" (0x80), "i" (ASI_IMMU_DEMAP)); 216} 217 218/* Cheetah has a 4-tlb layout so direct access is a bit different. 219 * The first two TLBs are fully assosciative, hold 16 entries, and are 220 * used only for locked and >8K sized translations. One exists for 221 * data accesses and one for instruction accesses. 222 * 223 * The third TLB is for data accesses to 8K non-locked translations, is 224 * 2 way assosciative, and holds 512 entries. The fourth TLB is for 225 * instruction accesses to 8K non-locked translations, is 2 way 226 * assosciative, and holds 128 entries. 227 * 228 * Cheetah has some bug where bogus data can be returned from 229 * ASI_{D,I}TLB_DATA_ACCESS loads, doing the load twice fixes 230 * the problem for me. -DaveM 231 */ 232static inline unsigned long cheetah_get_ldtlb_data(int entry) 233{ 234 unsigned long data; 235 236 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t" 237 "ldxa [%1] %2, %0" 238 : "=r" (data) 239 : "r" ((0 << 16) | (entry << 3)), 240 "i" (ASI_DTLB_DATA_ACCESS)); 241 242 return data; 243} 244 245static inline unsigned long cheetah_get_litlb_data(int entry) 246{ 247 unsigned long data; 248 249 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t" 250 "ldxa [%1] %2, %0" 251 : "=r" (data) 252 : "r" ((0 << 16) | (entry << 3)), 253 "i" (ASI_ITLB_DATA_ACCESS)); 254 255 return data; 256} 257 258static inline unsigned long cheetah_get_ldtlb_tag(int entry) 259{ 260 unsigned long tag; 261 262 __asm__ __volatile__("ldxa [%1] %2, %0" 263 : "=r" (tag) 264 : "r" ((0 << 16) | (entry << 3)), 265 "i" (ASI_DTLB_TAG_READ)); 266 267 return tag; 268} 269 270static inline unsigned long cheetah_get_litlb_tag(int entry) 271{ 272 unsigned long tag; 273 274 __asm__ __volatile__("ldxa [%1] %2, %0" 275 : "=r" (tag) 276 : "r" ((0 << 16) | (entry << 3)), 277 "i" (ASI_ITLB_TAG_READ)); 278 279 return tag; 280} 281 282static inline void cheetah_put_ldtlb_data(int entry, unsigned long data) 283{ 284 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 285 "membar #Sync" 286 : /* No outputs */ 287 : "r" (data), 288 "r" ((0 << 16) | (entry << 3)), 289 "i" (ASI_DTLB_DATA_ACCESS)); 290} 291 292static inline void cheetah_put_litlb_data(int entry, unsigned long data) 293{ 294 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 295 "membar #Sync" 296 : /* No outputs */ 297 : "r" (data), 298 "r" ((0 << 16) | (entry << 3)), 299 "i" (ASI_ITLB_DATA_ACCESS)); 300} 301 302static inline unsigned long cheetah_get_dtlb_data(int entry, int tlb) 303{ 304 unsigned long data; 305 306 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t" 307 "ldxa [%1] %2, %0" 308 : "=r" (data) 309 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_DATA_ACCESS)); 310 311 return data; 312} 313 314static inline unsigned long cheetah_get_dtlb_tag(int entry, int tlb) 315{ 316 unsigned long tag; 317 318 __asm__ __volatile__("ldxa [%1] %2, %0" 319 : "=r" (tag) 320 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_TAG_READ)); 321 return tag; 322} 323 324static inline void cheetah_put_dtlb_data(int entry, unsigned long data, int tlb) 325{ 326 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 327 "membar #Sync" 328 : /* No outputs */ 329 : "r" (data), 330 "r" ((tlb << 16) | (entry << 3)), 331 "i" (ASI_DTLB_DATA_ACCESS)); 332} 333 334static inline unsigned long cheetah_get_itlb_data(int entry) 335{ 336 unsigned long data; 337 338 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t" 339 "ldxa [%1] %2, %0" 340 : "=r" (data) 341 : "r" ((2 << 16) | (entry << 3)), 342 "i" (ASI_ITLB_DATA_ACCESS)); 343 344 return data; 345} 346 347static inline unsigned long cheetah_get_itlb_tag(int entry) 348{ 349 unsigned long tag; 350 351 __asm__ __volatile__("ldxa [%1] %2, %0" 352 : "=r" (tag) 353 : "r" ((2 << 16) | (entry << 3)), "i" (ASI_ITLB_TAG_READ)); 354 return tag; 355} 356 357static inline void cheetah_put_itlb_data(int entry, unsigned long data) 358{ 359 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 360 "membar #Sync" 361 : /* No outputs */ 362 : "r" (data), "r" ((2 << 16) | (entry << 3)), 363 "i" (ASI_ITLB_DATA_ACCESS)); 364} 365 366#endif /* !(__ASSEMBLY__) */ 367#endif /* CONFIG_SPARC64 */ 368#endif /* !(_SPARC64_SPITFIRE_H) */