cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

hibernate_asm_64.S (3828B)


      1/* SPDX-License-Identifier: GPL-2.0-only */
      2/*
      3 * Hibernation support for x86-64
      4 *
      5 * Copyright 2007 Rafael J. Wysocki <rjw@sisk.pl>
      6 * Copyright 2005 Andi Kleen <ak@suse.de>
      7 * Copyright 2004 Pavel Machek <pavel@suse.cz>
      8 *
      9 * swsusp_arch_resume must not use any stack or any nonlocal variables while
     10 * copying pages:
     11 *
     12 * Its rewriting one kernel image with another. What is stack in "old"
     13 * image could very well be data page in "new" image, and overwriting
     14 * your own stack under you is bad idea.
     15 */
     16
     17	.text
     18#include <linux/linkage.h>
     19#include <asm/segment.h>
     20#include <asm/page_types.h>
     21#include <asm/asm-offsets.h>
     22#include <asm/processor-flags.h>
     23#include <asm/frame.h>
     24#include <asm/nospec-branch.h>
     25
     26	 /* code below belongs to the image kernel */
     27	.align PAGE_SIZE
     28SYM_FUNC_START(restore_registers)
     29	/* go back to the original page tables */
     30	movq    %r9, %cr3
     31
     32	/* Flush TLB, including "global" things (vmalloc) */
     33	movq	mmu_cr4_features(%rip), %rax
     34	movq	%rax, %rdx
     35	andq	$~(X86_CR4_PGE), %rdx
     36	movq	%rdx, %cr4;  # turn off PGE
     37	movq	%cr3, %rcx;  # flush TLB
     38	movq	%rcx, %cr3
     39	movq	%rax, %cr4;  # turn PGE back on
     40
     41	/* We don't restore %rax, it must be 0 anyway */
     42	movq	$saved_context, %rax
     43	movq	pt_regs_sp(%rax), %rsp
     44	movq	pt_regs_bp(%rax), %rbp
     45	movq	pt_regs_si(%rax), %rsi
     46	movq	pt_regs_di(%rax), %rdi
     47	movq	pt_regs_bx(%rax), %rbx
     48	movq	pt_regs_cx(%rax), %rcx
     49	movq	pt_regs_dx(%rax), %rdx
     50	movq	pt_regs_r8(%rax), %r8
     51	movq	pt_regs_r9(%rax), %r9
     52	movq	pt_regs_r10(%rax), %r10
     53	movq	pt_regs_r11(%rax), %r11
     54	movq	pt_regs_r12(%rax), %r12
     55	movq	pt_regs_r13(%rax), %r13
     56	movq	pt_regs_r14(%rax), %r14
     57	movq	pt_regs_r15(%rax), %r15
     58	pushq	pt_regs_flags(%rax)
     59	popfq
     60
     61	/* Saved in save_processor_state. */
     62	lgdt	saved_context_gdt_desc(%rax)
     63
     64	xorl	%eax, %eax
     65
     66	/* tell the hibernation core that we've just restored the memory */
     67	movq	%rax, in_suspend(%rip)
     68
     69	RET
     70SYM_FUNC_END(restore_registers)
     71
     72SYM_FUNC_START(swsusp_arch_suspend)
     73	movq	$saved_context, %rax
     74	movq	%rsp, pt_regs_sp(%rax)
     75	movq	%rbp, pt_regs_bp(%rax)
     76	movq	%rsi, pt_regs_si(%rax)
     77	movq	%rdi, pt_regs_di(%rax)
     78	movq	%rbx, pt_regs_bx(%rax)
     79	movq	%rcx, pt_regs_cx(%rax)
     80	movq	%rdx, pt_regs_dx(%rax)
     81	movq	%r8, pt_regs_r8(%rax)
     82	movq	%r9, pt_regs_r9(%rax)
     83	movq	%r10, pt_regs_r10(%rax)
     84	movq	%r11, pt_regs_r11(%rax)
     85	movq	%r12, pt_regs_r12(%rax)
     86	movq	%r13, pt_regs_r13(%rax)
     87	movq	%r14, pt_regs_r14(%rax)
     88	movq	%r15, pt_regs_r15(%rax)
     89	pushfq
     90	popq	pt_regs_flags(%rax)
     91
     92	/* save cr3 */
     93	movq	%cr3, %rax
     94	movq	%rax, restore_cr3(%rip)
     95
     96	FRAME_BEGIN
     97	call swsusp_save
     98	FRAME_END
     99	RET
    100SYM_FUNC_END(swsusp_arch_suspend)
    101
    102SYM_FUNC_START(restore_image)
    103	/* prepare to jump to the image kernel */
    104	movq	restore_jump_address(%rip), %r8
    105	movq	restore_cr3(%rip), %r9
    106
    107	/* prepare to switch to temporary page tables */
    108	movq	temp_pgt(%rip), %rax
    109	movq	mmu_cr4_features(%rip), %rbx
    110
    111	/* prepare to copy image data to their original locations */
    112	movq	restore_pblist(%rip), %rdx
    113
    114	/* jump to relocated restore code */
    115	movq	relocated_restore_code(%rip), %rcx
    116	ANNOTATE_RETPOLINE_SAFE
    117	jmpq	*%rcx
    118SYM_FUNC_END(restore_image)
    119
    120	/* code below has been relocated to a safe page */
    121SYM_FUNC_START(core_restore_code)
    122	/* switch to temporary page tables */
    123	movq	%rax, %cr3
    124	/* flush TLB */
    125	movq	%rbx, %rcx
    126	andq	$~(X86_CR4_PGE), %rcx
    127	movq	%rcx, %cr4;  # turn off PGE
    128	movq	%cr3, %rcx;  # flush TLB
    129	movq	%rcx, %cr3;
    130	movq	%rbx, %cr4;  # turn PGE back on
    131.Lloop:
    132	testq	%rdx, %rdx
    133	jz	.Ldone
    134
    135	/* get addresses from the pbe and copy the page */
    136	movq	pbe_address(%rdx), %rsi
    137	movq	pbe_orig_address(%rdx), %rdi
    138	movq	$(PAGE_SIZE >> 3), %rcx
    139	rep
    140	movsq
    141
    142	/* progress to the next pbe */
    143	movq	pbe_next(%rdx), %rdx
    144	jmp	.Lloop
    145
    146.Ldone:
    147	/* jump to the restore_registers address from the image header */
    148	ANNOTATE_RETPOLINE_SAFE
    149	jmpq	*%r8
    150SYM_FUNC_END(core_restore_code)