1/* 2 * arch/xtensa/kernel/vmlinux.lds.S 3 * 4 * Xtensa linker script 5 * 6 * This file is subject to the terms and conditions of the GNU General Public 7 * License. See the file "COPYING" in the main directory of this archive 8 * for more details. 9 * 10 * Copyright (C) 2001 - 2008 Tensilica Inc. 11 * 12 * Chris Zankel <chris@zankel.net> 13 * Marc Gauthier <marc@tensilica.com, marc@alumni.uwaterloo.ca> 14 * Joe Taylor <joe@tensilica.com, joetylr@yahoo.com> 15 */ 16 17#define RO_EXCEPTION_TABLE_ALIGN 16 18 19#include <asm-generic/vmlinux.lds.h> 20#include <asm/page.h> 21#include <asm/thread_info.h> 22 23#include <asm/core.h> 24#include <asm/vectors.h> 25 26OUTPUT_ARCH(xtensa) 27ENTRY(_start) 28 29#ifdef __XTENSA_EB__ 30jiffies = jiffies_64 + 4; 31#else 32jiffies = jiffies_64; 33#endif 34 35/* Note: In the following macros, it would be nice to specify only the 36 vector name and section kind and construct "sym" and "section" using 37 CPP concatenation, but that does not work reliably. Concatenating a 38 string with "." produces an invalid token. CPP will not print a 39 warning because it thinks this is an assembly file, but it leaves 40 them as multiple tokens and there may or may not be whitespace 41 between them. */ 42 43/* Macro for a relocation entry */ 44 45#define RELOCATE_ENTRY(sym, section) \ 46 LONG(sym ## _start); \ 47 LONG(sym ## _end); \ 48 LONG(LOADADDR(section)) 49 50#if !defined(CONFIG_VECTORS_ADDR) && XCHAL_HAVE_VECBASE 51#define MERGED_VECTORS 1 52#else 53#define MERGED_VECTORS 0 54#endif 55 56/* 57 * Macro to define a section for a vector. When MERGED_VECTORS is 0 58 * code for every vector is located with other init data. At startup 59 * time head.S copies code for every vector to its final position according 60 * to description recorded in the corresponding RELOCATE_ENTRY. 61 */ 62 63#define SECTION_VECTOR4(sym, section, addr, prevsec) \ 64 section addr : AT(((LOADADDR(prevsec) + SIZEOF(prevsec)) + 3) & ~ 3) \ 65 { \ 66 . = ALIGN(4); \ 67 sym ## _start = ABSOLUTE(.); \ 68 *(section) \ 69 sym ## _end = ABSOLUTE(.); \ 70 } 71 72#define SECTION_VECTOR2(section, addr) \ 73 . = addr; \ 74 *(section) 75 76/* 77 * Mapping of input sections to output sections when linking. 78 */ 79 80SECTIONS 81{ 82 . = KERNELOFFSET; 83 /* .text section */ 84 85 _text = .; 86 _stext = .; 87 88 .text : 89 { 90 /* The HEAD_TEXT section must be the first section! */ 91 HEAD_TEXT 92 93#if MERGED_VECTORS 94 . = ALIGN(PAGE_SIZE); 95 _vecbase = .; 96 97 SECTION_VECTOR2 (.WindowVectors.text, WINDOW_VECTORS_VADDR) 98#if XCHAL_EXCM_LEVEL >= 2 99 SECTION_VECTOR2 (.Level2InterruptVector.text, INTLEVEL2_VECTOR_VADDR) 100#endif 101#if XCHAL_EXCM_LEVEL >= 3 102 SECTION_VECTOR2 (.Level3InterruptVector.text, INTLEVEL3_VECTOR_VADDR) 103#endif 104#if XCHAL_EXCM_LEVEL >= 4 105 SECTION_VECTOR2 (.Level4InterruptVector.text, INTLEVEL4_VECTOR_VADDR) 106#endif 107#if XCHAL_EXCM_LEVEL >= 5 108 SECTION_VECTOR2 (.Level5InterruptVector.text, INTLEVEL5_VECTOR_VADDR) 109#endif 110#if XCHAL_EXCM_LEVEL >= 6 111 SECTION_VECTOR2 (.Level6InterruptVector.text, INTLEVEL6_VECTOR_VADDR) 112#endif 113 SECTION_VECTOR2 (.DebugInterruptVector.text, DEBUG_VECTOR_VADDR) 114 SECTION_VECTOR2 (.KernelExceptionVector.text, KERNEL_VECTOR_VADDR) 115 SECTION_VECTOR2 (.UserExceptionVector.text, USER_VECTOR_VADDR) 116 SECTION_VECTOR2 (.DoubleExceptionVector.text, DOUBLEEXC_VECTOR_VADDR) 117 118 *(.exception.text) 119#endif 120 121 IRQENTRY_TEXT 122 SOFTIRQENTRY_TEXT 123 ENTRY_TEXT 124 TEXT_TEXT 125 SCHED_TEXT 126 CPUIDLE_TEXT 127 LOCK_TEXT 128 *(.fixup) 129 } 130 _etext = .; 131 PROVIDE (etext = .); 132 133 . = ALIGN(16); 134 135 RO_DATA(4096) 136 137 /* Data section */ 138 139#ifdef CONFIG_XIP_KERNEL 140 INIT_TEXT_SECTION(PAGE_SIZE) 141#else 142 _sdata = .; 143 RW_DATA(XCHAL_ICACHE_LINESIZE, PAGE_SIZE, THREAD_SIZE) 144 _edata = .; 145 146 /* Initialization code and data: */ 147 148 . = ALIGN(PAGE_SIZE); 149 __init_begin = .; 150 INIT_TEXT_SECTION(PAGE_SIZE) 151 152 .init.data : 153 { 154 INIT_DATA 155 } 156#endif 157 158 .init.rodata : 159 { 160 . = ALIGN(0x4); 161 __tagtable_begin = .; 162 *(.taglist) 163 __tagtable_end = .; 164 165 . = ALIGN(16); 166 __boot_reloc_table_start = ABSOLUTE(.); 167 168#if !MERGED_VECTORS 169 RELOCATE_ENTRY(_WindowVectors_text, 170 .WindowVectors.text); 171#if XCHAL_EXCM_LEVEL >= 2 172 RELOCATE_ENTRY(_Level2InterruptVector_text, 173 .Level2InterruptVector.text); 174#endif 175#if XCHAL_EXCM_LEVEL >= 3 176 RELOCATE_ENTRY(_Level3InterruptVector_text, 177 .Level3InterruptVector.text); 178#endif 179#if XCHAL_EXCM_LEVEL >= 4 180 RELOCATE_ENTRY(_Level4InterruptVector_text, 181 .Level4InterruptVector.text); 182#endif 183#if XCHAL_EXCM_LEVEL >= 5 184 RELOCATE_ENTRY(_Level5InterruptVector_text, 185 .Level5InterruptVector.text); 186#endif 187#if XCHAL_EXCM_LEVEL >= 6 188 RELOCATE_ENTRY(_Level6InterruptVector_text, 189 .Level6InterruptVector.text); 190#endif 191 RELOCATE_ENTRY(_KernelExceptionVector_text, 192 .KernelExceptionVector.text); 193 RELOCATE_ENTRY(_UserExceptionVector_text, 194 .UserExceptionVector.text); 195 RELOCATE_ENTRY(_DoubleExceptionVector_text, 196 .DoubleExceptionVector.text); 197 RELOCATE_ENTRY(_DebugInterruptVector_text, 198 .DebugInterruptVector.text); 199 RELOCATE_ENTRY(_exception_text, 200 .exception.text); 201#endif 202#ifdef CONFIG_XIP_KERNEL 203 RELOCATE_ENTRY(_xip_data, .data); 204 RELOCATE_ENTRY(_xip_init_data, .init.data); 205#endif 206#if defined(CONFIG_SMP) 207 RELOCATE_ENTRY(_SecondaryResetVector_text, 208 .SecondaryResetVector.text); 209#endif 210 211 __boot_reloc_table_end = ABSOLUTE(.) ; 212 213 INIT_SETUP(XCHAL_ICACHE_LINESIZE) 214 INIT_CALLS 215 CON_INITCALL 216 INIT_RAM_FS 217 } 218 219 PERCPU_SECTION(XCHAL_ICACHE_LINESIZE) 220 221 /* We need this dummy segment here */ 222 223 . = ALIGN(4); 224 .dummy : { LONG(0) } 225 226#undef LAST 227#define LAST .dummy 228 229#if !MERGED_VECTORS 230 /* The vectors are relocated to the real position at startup time */ 231 232 SECTION_VECTOR4 (_WindowVectors_text, 233 .WindowVectors.text, 234 WINDOW_VECTORS_VADDR, 235 .dummy) 236 SECTION_VECTOR4 (_DebugInterruptVector_text, 237 .DebugInterruptVector.text, 238 DEBUG_VECTOR_VADDR, 239 .WindowVectors.text) 240#undef LAST 241#define LAST .DebugInterruptVector.text 242#if XCHAL_EXCM_LEVEL >= 2 243 SECTION_VECTOR4 (_Level2InterruptVector_text, 244 .Level2InterruptVector.text, 245 INTLEVEL2_VECTOR_VADDR, 246 LAST) 247# undef LAST 248# define LAST .Level2InterruptVector.text 249#endif 250#if XCHAL_EXCM_LEVEL >= 3 251 SECTION_VECTOR4 (_Level3InterruptVector_text, 252 .Level3InterruptVector.text, 253 INTLEVEL3_VECTOR_VADDR, 254 LAST) 255# undef LAST 256# define LAST .Level3InterruptVector.text 257#endif 258#if XCHAL_EXCM_LEVEL >= 4 259 SECTION_VECTOR4 (_Level4InterruptVector_text, 260 .Level4InterruptVector.text, 261 INTLEVEL4_VECTOR_VADDR, 262 LAST) 263# undef LAST 264# define LAST .Level4InterruptVector.text 265#endif 266#if XCHAL_EXCM_LEVEL >= 5 267 SECTION_VECTOR4 (_Level5InterruptVector_text, 268 .Level5InterruptVector.text, 269 INTLEVEL5_VECTOR_VADDR, 270 LAST) 271# undef LAST 272# define LAST .Level5InterruptVector.text 273#endif 274#if XCHAL_EXCM_LEVEL >= 6 275 SECTION_VECTOR4 (_Level6InterruptVector_text, 276 .Level6InterruptVector.text, 277 INTLEVEL6_VECTOR_VADDR, 278 LAST) 279# undef LAST 280# define LAST .Level6InterruptVector.text 281#endif 282 SECTION_VECTOR4 (_KernelExceptionVector_text, 283 .KernelExceptionVector.text, 284 KERNEL_VECTOR_VADDR, 285 LAST) 286#undef LAST 287 SECTION_VECTOR4 (_UserExceptionVector_text, 288 .UserExceptionVector.text, 289 USER_VECTOR_VADDR, 290 .KernelExceptionVector.text) 291 SECTION_VECTOR4 (_DoubleExceptionVector_text, 292 .DoubleExceptionVector.text, 293 DOUBLEEXC_VECTOR_VADDR, 294 .UserExceptionVector.text) 295#define LAST .DoubleExceptionVector.text 296 297#endif 298#if defined(CONFIG_SMP) 299 300 SECTION_VECTOR4 (_SecondaryResetVector_text, 301 .SecondaryResetVector.text, 302 RESET_VECTOR1_VADDR, 303 LAST) 304#undef LAST 305#define LAST .SecondaryResetVector.text 306 307#endif 308#if !MERGED_VECTORS 309 SECTION_VECTOR4 (_exception_text, 310 .exception.text, 311 , 312 LAST) 313#undef LAST 314#define LAST .exception.text 315 316#endif 317 . = (LOADADDR(LAST) + SIZEOF(LAST) + 3) & ~ 3; 318 319 .dummy1 : AT(ADDR(.dummy1)) { LONG(0) } 320 . = ALIGN(PAGE_SIZE); 321 322#ifndef CONFIG_XIP_KERNEL 323 __init_end = .; 324 325 BSS_SECTION(0, 8192, 0) 326#endif 327 328 _end = .; 329 330#ifdef CONFIG_XIP_KERNEL 331 . = CONFIG_XIP_DATA_ADDR; 332 333 _xip_start = .; 334 335#undef LOAD_OFFSET 336#define LOAD_OFFSET \ 337 (CONFIG_XIP_DATA_ADDR - (LOADADDR(.dummy1) + SIZEOF(.dummy1) + 3) & ~ 3) 338 339 _xip_data_start = .; 340 _sdata = .; 341 RW_DATA(XCHAL_ICACHE_LINESIZE, PAGE_SIZE, THREAD_SIZE) 342 _edata = .; 343 _xip_data_end = .; 344 345 /* Initialization data: */ 346 347 STRUCT_ALIGN(); 348 349 _xip_init_data_start = .; 350 __init_begin = .; 351 .init.data : 352 { 353 INIT_DATA 354 } 355 _xip_init_data_end = .; 356 __init_end = .; 357 BSS_SECTION(0, 8192, 0) 358 359 _xip_end = .; 360 361#undef LOAD_OFFSET 362#endif 363 364 DWARF_DEBUG 365 366 .xt.prop 0 : { KEEP(*(.xt.prop .xt.prop.* .gnu.linkonce.prop.*)) } 367 .xt.insn 0 : { KEEP(*(.xt.insn .xt.insn.* .gnu.linkonce.x*)) } 368 .xt.lit 0 : { KEEP(*(.xt.lit .xt.lit.* .gnu.linkonce.p*)) } 369 370 /* Sections to be discarded */ 371 DISCARDS 372} 373