Andrey Petrov (andrey.petrov@intel.com) just uploaded a new patch set to gerrit, which you can find at https://review.coreboot.org/13882
-gerrit
commit 161ac6e9a1df33c1797b65d1da862898ecfdafde Author: Aaron Durbin adurbin@chromium.org Date: Wed Mar 2 15:26:10 2016 -0600
arch/x86: always use _start as entry symbol for all stages
Instead of keeping track of all the combinations of entry points depending on the stage and other options just use _start. That way, there's no need to update the arch/header.ld for complicated cases as _start is always the entry point for a stage.
Change-Id: I7795a5ee1caba92ab533bdb8c3ad80294901a48b Signed-off-by: Aaron Durbin adurbin@chromium.org --- src/arch/x86/include/arch/header.ld | 23 ----------------------- src/cpu/x86/32bit/entry32.inc | 12 ++++++++++-- 2 files changed, 10 insertions(+), 25 deletions(-)
diff --git a/src/arch/x86/include/arch/header.ld b/src/arch/x86/include/arch/header.ld index d7fbf07..77eb187 100644 --- a/src/arch/x86/include/arch/header.ld +++ b/src/arch/x86/include/arch/header.ld @@ -20,27 +20,4 @@ PHDRS to_load PT_LOAD; }
-/* - * For CONFIG_SEPARATE_VERSTAGE romstage doesn't have the cache-as-ram setup. - * It only contains the teardown code. The verstage has the cache-as-ram setup - * code. Therefore, it needs the protected_start symbol as its entry point. - * The romstage entry will be named _start for consistency, but it's likely - * to be implemented in the chipset code in order to control the logic flow. - */ -#if IS_ENABLED(CONFIG_SEPARATE_VERSTAGE) - #if ENV_RAMSTAGE || ENV_RMODULE || ENV_ROMSTAGE - ENTRY(_start) - #elif ENV_VERSTAGE - ENTRY(protected_start) - #endif -#else - #if ENV_RAMSTAGE || ENV_RMODULE - ENTRY(_start) - #elif ENV_ROMSTAGE - ENTRY(protected_start) - #endif -#endif - -#if IS_ENABLED(CONFIG_C_ENVIRONMENT_BOOTBLOCK) && ENV_BOOTBLOCK ENTRY(_start) -#endif diff --git a/src/cpu/x86/32bit/entry32.inc b/src/cpu/x86/32bit/entry32.inc index 9ef3bc1..8c39008 100644 --- a/src/cpu/x86/32bit/entry32.inc +++ b/src/cpu/x86/32bit/entry32.inc @@ -2,6 +2,7 @@
#include <arch/rom_segs.h> #include <cpu/x86/post_code.h> +#include <rules.h>
.code32
@@ -44,10 +45,17 @@ gdt_end: * * NOTE aligned to 4 so that we are sure that the prefetch * cache will be reloaded. + * + * In the bootblock there is already a ljmp to __protected_start and + * the reset vector jumps to symbol _start16bit in entry16.inc from + * the reset vectors's symbol which is _start. Therefore, don't + * expose the _start symbol for bootblock. */ .align 4 -.globl protected_start -protected_start: +#if !ENV_BOOTBLOCK +.globl _start +_start: +#endif
lgdt %cs:gdtptr ljmp $ROM_CODE_SEG, $__protected_start