cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

cpu_setup_6xx.S (11096B)


      1/* SPDX-License-Identifier: GPL-2.0-or-later */
      2/*
      3 * This file contains low level CPU setup functions.
      4 *    Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org)
      5 */
      6
      7#include <asm/processor.h>
      8#include <asm/page.h>
      9#include <asm/cputable.h>
     10#include <asm/ppc_asm.h>
     11#include <asm/asm-offsets.h>
     12#include <asm/cache.h>
     13#include <asm/mmu.h>
     14#include <asm/feature-fixups.h>
     15
     16_GLOBAL(__setup_cpu_603)
     17	mflr	r5
     18BEGIN_MMU_FTR_SECTION
     19	li	r10,0
     20	mtspr	SPRN_SPRG_603_LRU,r10		/* init SW LRU tracking */
     21END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
     22
     23BEGIN_FTR_SECTION
     24	bl	__init_fpu_registers
     25END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
     26	bl	setup_common_caches
     27	mtlr	r5
     28	blr
     29_GLOBAL(__setup_cpu_604)
     30	mflr	r5
     31	bl	setup_common_caches
     32	bl	setup_604_hid0
     33	mtlr	r5
     34	blr
     35_GLOBAL(__setup_cpu_750)
     36	mflr	r5
     37	bl	__init_fpu_registers
     38	bl	setup_common_caches
     39	bl	setup_750_7400_hid0
     40	mtlr	r5
     41	blr
     42_GLOBAL(__setup_cpu_750cx)
     43	mflr	r5
     44	bl	__init_fpu_registers
     45	bl	setup_common_caches
     46	bl	setup_750_7400_hid0
     47	bl	setup_750cx
     48	mtlr	r5
     49	blr
     50_GLOBAL(__setup_cpu_750fx)
     51	mflr	r5
     52	bl	__init_fpu_registers
     53	bl	setup_common_caches
     54	bl	setup_750_7400_hid0
     55	bl	setup_750fx
     56	mtlr	r5
     57	blr
     58_GLOBAL(__setup_cpu_7400)
     59	mflr	r5
     60	bl	__init_fpu_registers
     61	bl	setup_7400_workarounds
     62	bl	setup_common_caches
     63	bl	setup_750_7400_hid0
     64	mtlr	r5
     65	blr
     66_GLOBAL(__setup_cpu_7410)
     67	mflr	r5
     68	bl	__init_fpu_registers
     69	bl	setup_7410_workarounds
     70	bl	setup_common_caches
     71	bl	setup_750_7400_hid0
     72	li	r3,0
     73	mtspr	SPRN_L2CR2,r3
     74	mtlr	r5
     75	blr
     76_GLOBAL(__setup_cpu_745x)
     77	mflr	r5
     78	bl	setup_common_caches
     79	bl	setup_745x_specifics
     80	mtlr	r5
     81	blr
     82
     83/* Enable caches for 603's, 604, 750 & 7400 */
     84setup_common_caches:
     85	mfspr	r11,SPRN_HID0
     86	andi.	r0,r11,HID0_DCE
     87	ori	r11,r11,HID0_ICE|HID0_DCE
     88	ori	r8,r11,HID0_ICFI
     89	bne	1f			/* don't invalidate the D-cache */
     90	ori	r8,r8,HID0_DCI		/* unless it wasn't enabled */
     911:	sync
     92	mtspr	SPRN_HID0,r8		/* enable and invalidate caches */
     93	sync
     94	mtspr	SPRN_HID0,r11		/* enable caches */
     95	sync
     96	isync
     97	blr
     98
     99/* 604, 604e, 604ev, ...
    100 * Enable superscalar execution & branch history table
    101 */
    102setup_604_hid0:
    103	mfspr	r11,SPRN_HID0
    104	ori	r11,r11,HID0_SIED|HID0_BHTE
    105	ori	r8,r11,HID0_BTCD
    106	sync
    107	mtspr	SPRN_HID0,r8	/* flush branch target address cache */
    108	sync			/* on 604e/604r */
    109	mtspr	SPRN_HID0,r11
    110	sync
    111	isync
    112	blr
    113
    114/* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
    115 * erratas we work around here.
    116 * Moto MPC710CE.pdf describes them, those are errata
    117 * #3, #4 and #5
    118 * Note that we assume the firmware didn't choose to
    119 * apply other workarounds (there are other ones documented
    120 * in the .pdf). It appear that Apple firmware only works
    121 * around #3 and with the same fix we use. We may want to
    122 * check if the CPU is using 60x bus mode in which case
    123 * the workaround for errata #4 is useless. Also, we may
    124 * want to explicitly clear HID0_NOPDST as this is not
    125 * needed once we have applied workaround #5 (though it's
    126 * not set by Apple's firmware at least).
    127 */
    128setup_7400_workarounds:
    129	mfpvr	r3
    130	rlwinm	r3,r3,0,20,31
    131	cmpwi	0,r3,0x0207
    132	ble	1f
    133	blr
    134setup_7410_workarounds:
    135	mfpvr	r3
    136	rlwinm	r3,r3,0,20,31
    137	cmpwi	0,r3,0x0100
    138	bnelr
    1391:
    140	mfspr	r11,SPRN_MSSSR0
    141	/* Errata #3: Set L1OPQ_SIZE to 0x10 */
    142	rlwinm	r11,r11,0,9,6
    143	oris	r11,r11,0x0100
    144	/* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
    145	oris	r11,r11,0x0002
    146	/* Errata #5: Set DRLT_SIZE to 0x01 */
    147	rlwinm	r11,r11,0,5,2
    148	oris	r11,r11,0x0800
    149	sync
    150	mtspr	SPRN_MSSSR0,r11
    151	sync
    152	isync
    153	blr
    154
    155/* 740/750/7400/7410
    156 * Enable Store Gathering (SGE), Address Broadcast (ABE),
    157 * Branch History Table (BHTE), Branch Target ICache (BTIC)
    158 * Dynamic Power Management (DPM), Speculative (SPD)
    159 * Clear Instruction cache throttling (ICTC)
    160 */
    161setup_750_7400_hid0:
    162	mfspr	r11,SPRN_HID0
    163	ori	r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
    164	oris	r11,r11,HID0_DPM@h
    165BEGIN_FTR_SECTION
    166	xori	r11,r11,HID0_BTIC
    167END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
    168BEGIN_FTR_SECTION
    169	xoris	r11,r11,HID0_DPM@h	/* disable dynamic power mgmt */
    170END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
    171	li	r3,HID0_SPD
    172	andc	r11,r11,r3		/* clear SPD: enable speculative */
    173 	li	r3,0
    174 	mtspr	SPRN_ICTC,r3		/* Instruction Cache Throttling off */
    175	isync
    176	mtspr	SPRN_HID0,r11
    177	sync
    178	isync
    179	blr
    180
    181/* 750cx specific
    182 * Looks like we have to disable NAP feature for some PLL settings...
    183 * (waiting for confirmation)
    184 */
    185setup_750cx:
    186	mfspr	r10, SPRN_HID1
    187	rlwinm	r10,r10,4,28,31
    188	cmpwi	cr0,r10,7
    189	cmpwi	cr1,r10,9
    190	cmpwi	cr2,r10,11
    191	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
    192	cror	4*cr0+eq,4*cr0+eq,4*cr2+eq
    193	bnelr
    194	lwz	r6,CPU_SPEC_FEATURES(r4)
    195	li	r7,CPU_FTR_CAN_NAP
    196	andc	r6,r6,r7
    197	stw	r6,CPU_SPEC_FEATURES(r4)
    198	blr
    199
    200/* 750fx specific
    201 */
    202setup_750fx:
    203	blr
    204
    205/* MPC 745x
    206 * Enable Store Gathering (SGE), Branch Folding (FOLD)
    207 * Branch History Table (BHTE), Branch Target ICache (BTIC)
    208 * Dynamic Power Management (DPM), Speculative (SPD)
    209 * Ensure our data cache instructions really operate.
    210 * Timebase has to be running or we wouldn't have made it here,
    211 * just ensure we don't disable it.
    212 * Clear Instruction cache throttling (ICTC)
    213 * Enable L2 HW prefetch
    214 */
    215setup_745x_specifics:
    216	/* We check for the presence of an L3 cache setup by
    217	 * the firmware. If any, we disable NAP capability as
    218	 * it's known to be bogus on rev 2.1 and earlier
    219	 */
    220BEGIN_FTR_SECTION
    221	mfspr	r11,SPRN_L3CR
    222	andis.	r11,r11,L3CR_L3E@h
    223	beq	1f
    224END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
    225	lwz	r6,CPU_SPEC_FEATURES(r4)
    226	andis.	r0,r6,CPU_FTR_L3_DISABLE_NAP@h
    227	beq	1f
    228	li	r7,CPU_FTR_CAN_NAP
    229	andc	r6,r6,r7
    230	stw	r6,CPU_SPEC_FEATURES(r4)
    2311:
    232	mfspr	r11,SPRN_HID0
    233
    234	/* All of the bits we have to set.....
    235	 */
    236	ori	r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
    237	ori	r11,r11,HID0_LRSTK | HID0_BTIC
    238	oris	r11,r11,HID0_DPM@h
    239BEGIN_MMU_FTR_SECTION
    240	oris	r11,r11,HID0_HIGH_BAT@h
    241END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
    242BEGIN_FTR_SECTION
    243	xori	r11,r11,HID0_BTIC
    244END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
    245BEGIN_FTR_SECTION
    246	xoris	r11,r11,HID0_DPM@h	/* disable dynamic power mgmt */
    247END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
    248
    249	/* All of the bits we have to clear....
    250	 */
    251	li	r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
    252	andc	r11,r11,r3		/* clear SPD: enable speculative */
    253 	li	r3,0
    254
    255 	mtspr	SPRN_ICTC,r3		/* Instruction Cache Throttling off */
    256	isync
    257	mtspr	SPRN_HID0,r11
    258	sync
    259	isync
    260
    261	/* Enable L2 HW prefetch, if L2 is enabled
    262	 */
    263	mfspr	r3,SPRN_L2CR
    264	andis.	r3,r3,L2CR_L2E@h
    265	beqlr
    266	mfspr	r3,SPRN_MSSCR0
    267	ori	r3,r3,3
    268	sync
    269	mtspr	SPRN_MSSCR0,r3
    270	sync
    271	isync
    272	blr
    273
    274/*
    275 * Initialize the FPU registers. This is needed to work around an errata
    276 * in some 750 cpus where using a not yet initialized FPU register after
    277 * power on reset may hang the CPU
    278 */
    279_GLOBAL(__init_fpu_registers)
    280	mfmsr	r10
    281	ori	r11,r10,MSR_FP
    282	mtmsr	r11
    283	isync
    284	addis	r9,r3,empty_zero_page@ha
    285	addi	r9,r9,empty_zero_page@l
    286	REST_32FPRS(0,r9)
    287	sync
    288	mtmsr	r10
    289	isync
    290	blr
    291_ASM_NOKPROBE_SYMBOL(__init_fpu_registers)
    292
    293
    294/* Definitions for the table use to save CPU states */
    295#define CS_HID0		0
    296#define CS_HID1		4
    297#define CS_HID2		8
    298#define	CS_MSSCR0	12
    299#define CS_MSSSR0	16
    300#define CS_ICTRL	20
    301#define CS_LDSTCR	24
    302#define CS_LDSTDB	28
    303#define CS_SIZE		32
    304
    305	.data
    306	.balign	L1_CACHE_BYTES
    307cpu_state_storage:
    308	.space	CS_SIZE
    309	.balign	L1_CACHE_BYTES,0
    310	.text
    311
    312/* Called in normal context to backup CPU 0 state. This
    313 * does not include cache settings. This function is also
    314 * called for machine sleep. This does not include the MMU
    315 * setup, BATs, etc... but rather the "special" registers
    316 * like HID0, HID1, MSSCR0, etc...
    317 */
    318_GLOBAL(__save_cpu_setup)
    319	/* Some CR fields are volatile, we back it up all */
    320	mfcr	r7
    321
    322	/* Get storage ptr */
    323	lis	r5,cpu_state_storage@h
    324	ori	r5,r5,cpu_state_storage@l
    325
    326	/* Save HID0 (common to all CONFIG_PPC_BOOK3S_32 cpus) */
    327	mfspr	r3,SPRN_HID0
    328	stw	r3,CS_HID0(r5)
    329
    330	/* Now deal with CPU type dependent registers */
    331	mfspr	r3,SPRN_PVR
    332	srwi	r3,r3,16
    333	cmplwi	cr0,r3,0x8000	/* 7450 */
    334	cmplwi	cr1,r3,0x000c	/* 7400 */
    335	cmplwi	cr2,r3,0x800c	/* 7410 */
    336	cmplwi	cr3,r3,0x8001	/* 7455 */
    337	cmplwi	cr4,r3,0x8002	/* 7457 */
    338	cmplwi	cr5,r3,0x8003	/* 7447A */
    339	cmplwi	cr6,r3,0x7000	/* 750FX */
    340	cmplwi	cr7,r3,0x8004	/* 7448 */
    341	/* cr1 is 7400 || 7410 */
    342	cror	4*cr1+eq,4*cr1+eq,4*cr2+eq
    343	/* cr0 is 74xx */
    344	cror	4*cr0+eq,4*cr0+eq,4*cr3+eq
    345	cror	4*cr0+eq,4*cr0+eq,4*cr4+eq
    346	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
    347	cror	4*cr0+eq,4*cr0+eq,4*cr5+eq
    348	cror	4*cr0+eq,4*cr0+eq,4*cr7+eq
    349	bne	1f
    350	/* Backup 74xx specific regs */
    351	mfspr	r4,SPRN_MSSCR0
    352	stw	r4,CS_MSSCR0(r5)
    353	mfspr	r4,SPRN_MSSSR0
    354	stw	r4,CS_MSSSR0(r5)
    355	beq	cr1,1f
    356	/* Backup 745x specific registers */
    357	mfspr	r4,SPRN_HID1
    358	stw	r4,CS_HID1(r5)
    359	mfspr	r4,SPRN_ICTRL
    360	stw	r4,CS_ICTRL(r5)
    361	mfspr	r4,SPRN_LDSTCR
    362	stw	r4,CS_LDSTCR(r5)
    363	mfspr	r4,SPRN_LDSTDB
    364	stw	r4,CS_LDSTDB(r5)
    3651:
    366	bne	cr6,1f
    367	/* Backup 750FX specific registers */
    368	mfspr	r4,SPRN_HID1
    369	stw	r4,CS_HID1(r5)
    370	/* If rev 2.x, backup HID2 */
    371	mfspr	r3,SPRN_PVR
    372	andi.	r3,r3,0xff00
    373	cmpwi	cr0,r3,0x0200
    374	bne	1f
    375	mfspr	r4,SPRN_HID2
    376	stw	r4,CS_HID2(r5)
    3771:
    378	mtcr	r7
    379	blr
    380
    381/* Called with no MMU context (typically MSR:IR/DR off) to
    382 * restore CPU state as backed up by the previous
    383 * function. This does not include cache setting
    384 */
    385_GLOBAL(__restore_cpu_setup)
    386	/* Some CR fields are volatile, we back it up all */
    387	mfcr	r7
    388
    389	/* Get storage ptr */
    390	lis	r5,(cpu_state_storage-KERNELBASE)@h
    391	ori	r5,r5,cpu_state_storage@l
    392
    393	/* Restore HID0 */
    394	lwz	r3,CS_HID0(r5)
    395	sync
    396	isync
    397	mtspr	SPRN_HID0,r3
    398	sync
    399	isync
    400
    401	/* Now deal with CPU type dependent registers */
    402	mfspr	r3,SPRN_PVR
    403	srwi	r3,r3,16
    404	cmplwi	cr0,r3,0x8000	/* 7450 */
    405	cmplwi	cr1,r3,0x000c	/* 7400 */
    406	cmplwi	cr2,r3,0x800c	/* 7410 */
    407	cmplwi	cr3,r3,0x8001	/* 7455 */
    408	cmplwi	cr4,r3,0x8002	/* 7457 */
    409	cmplwi	cr5,r3,0x8003	/* 7447A */
    410	cmplwi	cr6,r3,0x7000	/* 750FX */
    411	cmplwi	cr7,r3,0x8004	/* 7448 */
    412	/* cr1 is 7400 || 7410 */
    413	cror	4*cr1+eq,4*cr1+eq,4*cr2+eq
    414	/* cr0 is 74xx */
    415	cror	4*cr0+eq,4*cr0+eq,4*cr3+eq
    416	cror	4*cr0+eq,4*cr0+eq,4*cr4+eq
    417	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
    418	cror	4*cr0+eq,4*cr0+eq,4*cr5+eq
    419	cror	4*cr0+eq,4*cr0+eq,4*cr7+eq
    420	bne	2f
    421	/* Restore 74xx specific regs */
    422	lwz	r4,CS_MSSCR0(r5)
    423	sync
    424	mtspr	SPRN_MSSCR0,r4
    425	sync
    426	isync
    427	lwz	r4,CS_MSSSR0(r5)
    428	sync
    429	mtspr	SPRN_MSSSR0,r4
    430	sync
    431	isync
    432	bne	cr2,1f
    433	/* Clear 7410 L2CR2 */
    434	li	r4,0
    435	mtspr	SPRN_L2CR2,r4
    4361:	beq	cr1,2f
    437	/* Restore 745x specific registers */
    438	lwz	r4,CS_HID1(r5)
    439	sync
    440	mtspr	SPRN_HID1,r4
    441	isync
    442	sync
    443	lwz	r4,CS_ICTRL(r5)
    444	sync
    445	mtspr	SPRN_ICTRL,r4
    446	isync
    447	sync
    448	lwz	r4,CS_LDSTCR(r5)
    449	sync
    450	mtspr	SPRN_LDSTCR,r4
    451	isync
    452	sync
    453	lwz	r4,CS_LDSTDB(r5)
    454	sync
    455	mtspr	SPRN_LDSTDB,r4
    456	isync
    457	sync
    4582:	bne	cr6,1f
    459	/* Restore 750FX specific registers
    460	 * that is restore HID2 on rev 2.x and PLL config & switch
    461	 * to PLL 0 on all
    462	 */
    463	/* If rev 2.x, restore HID2 with low voltage bit cleared */
    464	mfspr	r3,SPRN_PVR
    465	andi.	r3,r3,0xff00
    466	cmpwi	cr0,r3,0x0200
    467	bne	4f
    468	lwz	r4,CS_HID2(r5)
    469	rlwinm	r4,r4,0,19,17
    470	mtspr	SPRN_HID2,r4
    471	sync
    4724:
    473	lwz	r4,CS_HID1(r5)
    474	rlwinm  r5,r4,0,16,14
    475	mtspr	SPRN_HID1,r5
    476		/* Wait for PLL to stabilize */
    477	mftbl	r5
    4783:	mftbl	r6
    479	sub	r6,r6,r5
    480	cmplwi	cr0,r6,10000
    481	ble	3b
    482	/* Setup final PLL */
    483	mtspr	SPRN_HID1,r4
    4841:
    485	mtcr	r7
    486	blr
    487_ASM_NOKPROBE_SYMBOL(__restore_cpu_setup)
    488