Arthur Heymans has uploaded this change for review. ( https://review.coreboot.org/c/coreboot/+/79261?usp=email )
Change subject: [NOTREADY]cpu/x86/64bit: Turn jumping to long mode into a macro ......................................................................
[NOTREADY]cpu/x86/64bit: Turn jumping to long mode into a macro
This makes it easier to reuse, e.g. if you want to do it twice in one assembly file. Also having the .code64 explicit instead of inside the included file provides more clarity.
Change-Id: Ida861338004187e4e714be41e17c8447fa4cf935 Signed-off-by: Arthur Heymans arthur@aheymans.xyz --- M src/cpu/intel/car/non-evict/cache_as_ram.S M src/cpu/intel/car/p4-netburst/cache_as_ram.S M src/cpu/qemu-x86/cache_as_ram_bootblock.S M src/cpu/x86/64bit/entry64.inc M src/cpu/x86/64bit/mode_switch.S M src/cpu/x86/sipi_vector.S M src/cpu/x86/smm/smm_stub.S M src/soc/intel/common/block/cpu/car/cache_as_ram.S 8 files changed, 34 insertions(+), 19 deletions(-)
git pull ssh://review.coreboot.org:29418/coreboot refs/changes/61/79261/1
diff --git a/src/cpu/intel/car/non-evict/cache_as_ram.S b/src/cpu/intel/car/non-evict/cache_as_ram.S index 76986ff..bf96c1a 100644 --- a/src/cpu/intel/car/non-evict/cache_as_ram.S +++ b/src/cpu/intel/car/non-evict/cache_as_ram.S @@ -4,6 +4,7 @@ #include <cpu/x86/mtrr.h> #include <cpu/x86/cache.h> #include <cpu/x86/post_code.h> +#include <cpu/x86/64bit/entry64.inc>
#define NoEvictMod_MSR 0x2e0 #define BBL_CR_CTL3_MSR 0x11e @@ -213,9 +214,9 @@ andl $0xfffffff0, %esp
#if ENV_X86_64 - - #include <cpu/x86/64bit/entry64.inc> - + setup_longmode 1f +.code64 +1: movd %mm2, %rdi shlq $32, %rdi movd %mm1, %rsi diff --git a/src/cpu/intel/car/p4-netburst/cache_as_ram.S b/src/cpu/intel/car/p4-netburst/cache_as_ram.S index f7c023b..8f47b4d 100644 --- a/src/cpu/intel/car/p4-netburst/cache_as_ram.S +++ b/src/cpu/intel/car/p4-netburst/cache_as_ram.S @@ -13,6 +13,7 @@ .global bootblock_pre_c_entry
#include <cpu/intel/car/cache_as_ram_symbols.inc> +#include <cpu/x86/64bit/entry64.inc>
.code32 _cache_as_ram_setup: @@ -362,8 +363,9 @@ subl $4, %esp
#if ENV_X86_64 - #include <cpu/x86/64bit/entry64.inc> - + setup_longmode 1f +.code64 +1: movd %mm2, %rdi shlq $32, %rdi /* BIST */ movd %mm1, %rsi diff --git a/src/cpu/qemu-x86/cache_as_ram_bootblock.S b/src/cpu/qemu-x86/cache_as_ram_bootblock.S index 0943e35..ad19a42 100644 --- a/src/cpu/qemu-x86/cache_as_ram_bootblock.S +++ b/src/cpu/qemu-x86/cache_as_ram_bootblock.S @@ -1,6 +1,7 @@ /* SPDX-License-Identifier: GPL-2.0-only */
#include <cpu/x86/post_code.h> +#include <cpu/x86/64bit/entry64.inc>
#define CBFS_FILE_MAGIC 0 #define CBFS_FILE_LEN (CBFS_FILE_MAGIC + 8) @@ -77,11 +78,12 @@ /* Align the stack and keep aligned for call to bootblock_c_entry() */ and $0xfffffff0, %esp
- /* entry64.inc preserves ebx. */ -#include <cpu/x86/64bit/entry64.inc> - /* Restore the BIST result and timestamps. */ + movl $(CONFIG_ARCH_X86_64_PGTBL_LOC), %ebx #if ENV_X86_64 + setup_longmode 1f %ebx +.code64 +1: movd %mm2, %rdi shlq $32, %rdi movd %mm1, %rsi diff --git a/src/cpu/x86/64bit/entry64.inc b/src/cpu/x86/64bit/entry64.inc index 7da68b4..6295bdd 100644 --- a/src/cpu/x86/64bit/entry64.inc +++ b/src/cpu/x86/64bit/entry64.inc @@ -22,10 +22,9 @@ #include <arch/rom_segs.h> #endif
- -setup_longmode: +.macro setup_longmode jmp_addr page_table /* Get page table address */ - movl $(CONFIG_ARCH_X86_64_PGTBL_LOC), %eax + movl \page_table, %eax
/* load identity mapped page tables */ movl %eax, %cr3 @@ -48,12 +47,11 @@
/* use long jump to switch to 64-bit code segment */ #if defined(__RAMSTAGE__) - ljmp $RAM_CODE_SEG64, $__longmode_start + ljmp $RAM_CODE_SEG64, $\jmp_addr #else - ljmp $ROM_CODE_SEG64, $__longmode_start + ljmp $ROM_CODE_SEG64, $\jmp_addr
#endif -.code64 -__longmode_start: +.endm
#endif diff --git a/src/cpu/x86/64bit/mode_switch.S b/src/cpu/x86/64bit/mode_switch.S index c27f540..45a62f0 100644 --- a/src/cpu/x86/64bit/mode_switch.S +++ b/src/cpu/x86/64bit/mode_switch.S @@ -1,4 +1,5 @@ /* SPDX-License-Identifier: GPL-2.0-only */ +#include <cpu/x86/64bit/entry64.inc>
.text .code64 @@ -55,7 +56,9 @@ movl %eax, %ebx
/* Preserves ebx */ - #include <cpu/x86/64bit/entry64.inc> + setup_longmode continue $(CONFIG_ARCH_X86_64_PGTBL_LOC) +.code64 +continue:
/* Place return value in rax */ movl %ebx, %eax diff --git a/src/cpu/x86/sipi_vector.S b/src/cpu/x86/sipi_vector.S index 39973db..f83b22b 100644 --- a/src/cpu/x86/sipi_vector.S +++ b/src/cpu/x86/sipi_vector.S @@ -6,6 +6,7 @@ #include <arch/ram_segs.h>
#define __RAMSTAGE__ +#include <cpu/x86/64bit/entry64.inc>
/* The SIPI vector is responsible for initializing the APs in the system. It * loads microcode, sets up MSRs, and enables caching before calling into @@ -223,7 +224,9 @@
#if ENV_X86_64 /* entry64.inc preserves ebx, esi, edi, ebp */ -#include <cpu/x86/64bit/entry64.inc> + setup_longmode 1f $(CONFIG_ARCH_X86_64_PGTBL_LOC) +.code64 +1: movabs c_handler, %eax call *%rax #else diff --git a/src/cpu/x86/smm/smm_stub.S b/src/cpu/x86/smm/smm_stub.S index f97ab59..100f182 100644 --- a/src/cpu/x86/smm/smm_stub.S +++ b/src/cpu/x86/smm/smm_stub.S @@ -12,6 +12,7 @@ #include <cpu/x86/cr.h> #include <cpu/x86/msr.h> #include <cpu/x86/lapic_def.h> +#include <cpu/x86/64bit/entry64.inc>
.code32 .section ".module_parameters", "aw", @progbits @@ -195,7 +196,9 @@ #if ENV_X86_64 mov %ecx, %edi /* entry64.inc preserves ebx, esi, edi, ebp */ -#include <cpu/x86/64bit/entry64.inc> + setup_longmode 1f +.code64 +1: mov %edi, %ecx
diff --git a/src/soc/intel/common/block/cpu/car/cache_as_ram.S b/src/soc/intel/common/block/cpu/car/cache_as_ram.S index 61cbe307..656306d 100644 --- a/src/soc/intel/common/block/cpu/car/cache_as_ram.S +++ b/src/soc/intel/common/block/cpu/car/cache_as_ram.S @@ -9,6 +9,7 @@ #include <cpu/x86/post_code.h> #include <intelblocks/msr.h> #include <intelblocks/post_codes.h> +#include <cpu/x86/64bit/entry64.inc>
.section .init, "ax", @progbits
@@ -279,7 +280,9 @@ andl $0xfffffff0, %esp
#if ENV_X86_64 - #include <cpu/x86/64bit/entry64.inc> + setup_longmode 1f +.code64 +1: movd %mm2, %rdi shlq $32, %rdi movd %mm1, %rsi