1/* 2 * arch/xtensa/kernel/vmlinux.lds.S 3 * 4 * Xtensa linker script 5 * 6 * This file is subject to the terms and conditions of the GNU General Public 7 * License. See the file "COPYING" in the main directory of this archive 8 * for more details. 9 * 10 * Copyright (C) 2001 - 2008 Tensilica Inc. 11 * 12 * Chris Zankel <chris@zankel.net> 13 * Marc Gauthier <marc@tensilica.com, marc@alumni.uwaterloo.ca> 14 * Joe Taylor <joe@tensilica.com, joetylr@yahoo.com> 15 */ 16 17#define RO_EXCEPTION_TABLE_ALIGN 16 18 19#include <asm-generic/vmlinux.lds.h> 20#include <asm/page.h> 21#include <asm/thread_info.h> 22 23#include <asm/core.h> 24#include <asm/vectors.h> 25 26OUTPUT_ARCH(xtensa) 27ENTRY(_start) 28 29#ifdef __XTENSA_EB__ 30jiffies = jiffies_64 + 4; 31#else 32jiffies = jiffies_64; 33#endif 34 35/* Note: In the following macros, it would be nice to specify only the 36 vector name and section kind and construct "sym" and "section" using 37 CPP concatenation, but that does not work reliably. Concatenating a 38 string with "." produces an invalid token. CPP will not print a 39 warning because it thinks this is an assembly file, but it leaves 40 them as multiple tokens and there may or may not be whitespace 41 between them. */ 42 43/* Macro for a relocation entry */ 44 45#define RELOCATE_ENTRY(sym, section) \ 46 LONG(sym ## _start); \ 47 LONG(sym ## _end); \ 48 LONG(LOADADDR(section)) 49 50/* 51 * Macro to define a section for a vector. When CONFIG_VECTORS_OFFSET is 52 * defined code for every vector is located with other init data. At startup 53 * time head.S copies code for every vector to its final position according 54 * to description recorded in the corresponding RELOCATE_ENTRY. 55 */ 56 57#ifdef CONFIG_VECTORS_OFFSET 58#define SECTION_VECTOR(sym, section, addr, prevsec) \ 59 section addr : AT(((LOADADDR(prevsec) + SIZEOF(prevsec)) + 3) & ~ 3) \ 60 { \ 61 . = ALIGN(4); \ 62 sym ## _start = ABSOLUTE(.); \ 63 *(section) \ 64 sym ## _end = ABSOLUTE(.); \ 65 } 66#else 67#define SECTION_VECTOR(section, addr) \ 68 . = addr; \ 69 *(section) 70#endif 71 72/* 73 * Mapping of input sections to output sections when linking. 74 */ 75 76SECTIONS 77{ 78 . = KERNELOFFSET; 79 /* .text section */ 80 81 _text = .; 82 _stext = .; 83 84 .text : 85 { 86 /* The HEAD_TEXT section must be the first section! */ 87 HEAD_TEXT 88 89#ifndef CONFIG_VECTORS_OFFSET 90 . = ALIGN(PAGE_SIZE); 91 _vecbase = .; 92 93 SECTION_VECTOR (.WindowVectors.text, WINDOW_VECTORS_VADDR) 94#if XCHAL_EXCM_LEVEL >= 2 95 SECTION_VECTOR (.Level2InterruptVector.text, INTLEVEL2_VECTOR_VADDR) 96#endif 97#if XCHAL_EXCM_LEVEL >= 3 98 SECTION_VECTOR (.Level3InterruptVector.text, INTLEVEL3_VECTOR_VADDR) 99#endif 100#if XCHAL_EXCM_LEVEL >= 4 101 SECTION_VECTOR (.Level4InterruptVector.text, INTLEVEL4_VECTOR_VADDR) 102#endif 103#if XCHAL_EXCM_LEVEL >= 5 104 SECTION_VECTOR (.Level5InterruptVector.text, INTLEVEL5_VECTOR_VADDR) 105#endif 106#if XCHAL_EXCM_LEVEL >= 6 107 SECTION_VECTOR (.Level6InterruptVector.text, INTLEVEL6_VECTOR_VADDR) 108#endif 109 SECTION_VECTOR (.DebugInterruptVector.text, DEBUG_VECTOR_VADDR) 110 SECTION_VECTOR (.KernelExceptionVector.text, KERNEL_VECTOR_VADDR) 111 SECTION_VECTOR (.UserExceptionVector.text, USER_VECTOR_VADDR) 112 SECTION_VECTOR (.DoubleExceptionVector.text, DOUBLEEXC_VECTOR_VADDR) 113#endif 114 115 IRQENTRY_TEXT 116 SOFTIRQENTRY_TEXT 117 ENTRY_TEXT 118 TEXT_TEXT 119 SCHED_TEXT 120 CPUIDLE_TEXT 121 LOCK_TEXT 122 *(.fixup) 123 } 124 _etext = .; 125 PROVIDE (etext = .); 126 127 . = ALIGN(16); 128 129 RO_DATA(4096) 130 131 /* Data section */ 132 133#ifdef CONFIG_XIP_KERNEL 134 INIT_TEXT_SECTION(PAGE_SIZE) 135#else 136 _sdata = .; 137 RW_DATA(XCHAL_ICACHE_LINESIZE, PAGE_SIZE, THREAD_SIZE) 138 _edata = .; 139 140 /* Initialization code and data: */ 141 142 . = ALIGN(PAGE_SIZE); 143 __init_begin = .; 144 INIT_TEXT_SECTION(PAGE_SIZE) 145 146 .init.data : 147 { 148 INIT_DATA 149 } 150#endif 151 152 .init.rodata : 153 { 154 . = ALIGN(0x4); 155 __tagtable_begin = .; 156 *(.taglist) 157 __tagtable_end = .; 158 159 . = ALIGN(16); 160 __boot_reloc_table_start = ABSOLUTE(.); 161 162#ifdef CONFIG_VECTORS_OFFSET 163 RELOCATE_ENTRY(_WindowVectors_text, 164 .WindowVectors.text); 165#if XCHAL_EXCM_LEVEL >= 2 166 RELOCATE_ENTRY(_Level2InterruptVector_text, 167 .Level2InterruptVector.text); 168#endif 169#if XCHAL_EXCM_LEVEL >= 3 170 RELOCATE_ENTRY(_Level3InterruptVector_text, 171 .Level3InterruptVector.text); 172#endif 173#if XCHAL_EXCM_LEVEL >= 4 174 RELOCATE_ENTRY(_Level4InterruptVector_text, 175 .Level4InterruptVector.text); 176#endif 177#if XCHAL_EXCM_LEVEL >= 5 178 RELOCATE_ENTRY(_Level5InterruptVector_text, 179 .Level5InterruptVector.text); 180#endif 181#if XCHAL_EXCM_LEVEL >= 6 182 RELOCATE_ENTRY(_Level6InterruptVector_text, 183 .Level6InterruptVector.text); 184#endif 185 RELOCATE_ENTRY(_KernelExceptionVector_text, 186 .KernelExceptionVector.text); 187 RELOCATE_ENTRY(_UserExceptionVector_text, 188 .UserExceptionVector.text); 189 RELOCATE_ENTRY(_DoubleExceptionVector_text, 190 .DoubleExceptionVector.text); 191 RELOCATE_ENTRY(_DebugInterruptVector_text, 192 .DebugInterruptVector.text); 193#endif 194#ifdef CONFIG_XIP_KERNEL 195 RELOCATE_ENTRY(_xip_data, .data); 196 RELOCATE_ENTRY(_xip_init_data, .init.data); 197#else 198#if defined(CONFIG_SMP) 199 RELOCATE_ENTRY(_SecondaryResetVector_text, 200 .SecondaryResetVector.text); 201#endif 202#endif 203 204 __boot_reloc_table_end = ABSOLUTE(.) ; 205 206 INIT_SETUP(XCHAL_ICACHE_LINESIZE) 207 INIT_CALLS 208 CON_INITCALL 209 INIT_RAM_FS 210 } 211 212 PERCPU_SECTION(XCHAL_ICACHE_LINESIZE) 213 214 /* We need this dummy segment here */ 215 216 . = ALIGN(4); 217 .dummy : { LONG(0) } 218 219#ifdef CONFIG_VECTORS_OFFSET 220 /* The vectors are relocated to the real position at startup time */ 221 222 SECTION_VECTOR (_WindowVectors_text, 223 .WindowVectors.text, 224 WINDOW_VECTORS_VADDR, 225 .dummy) 226 SECTION_VECTOR (_DebugInterruptVector_text, 227 .DebugInterruptVector.text, 228 DEBUG_VECTOR_VADDR, 229 .WindowVectors.text) 230#undef LAST 231#define LAST .DebugInterruptVector.text 232#if XCHAL_EXCM_LEVEL >= 2 233 SECTION_VECTOR (_Level2InterruptVector_text, 234 .Level2InterruptVector.text, 235 INTLEVEL2_VECTOR_VADDR, 236 LAST) 237# undef LAST 238# define LAST .Level2InterruptVector.text 239#endif 240#if XCHAL_EXCM_LEVEL >= 3 241 SECTION_VECTOR (_Level3InterruptVector_text, 242 .Level3InterruptVector.text, 243 INTLEVEL3_VECTOR_VADDR, 244 LAST) 245# undef LAST 246# define LAST .Level3InterruptVector.text 247#endif 248#if XCHAL_EXCM_LEVEL >= 4 249 SECTION_VECTOR (_Level4InterruptVector_text, 250 .Level4InterruptVector.text, 251 INTLEVEL4_VECTOR_VADDR, 252 LAST) 253# undef LAST 254# define LAST .Level4InterruptVector.text 255#endif 256#if XCHAL_EXCM_LEVEL >= 5 257 SECTION_VECTOR (_Level5InterruptVector_text, 258 .Level5InterruptVector.text, 259 INTLEVEL5_VECTOR_VADDR, 260 LAST) 261# undef LAST 262# define LAST .Level5InterruptVector.text 263#endif 264#if XCHAL_EXCM_LEVEL >= 6 265 SECTION_VECTOR (_Level6InterruptVector_text, 266 .Level6InterruptVector.text, 267 INTLEVEL6_VECTOR_VADDR, 268 LAST) 269# undef LAST 270# define LAST .Level6InterruptVector.text 271#endif 272 SECTION_VECTOR (_KernelExceptionVector_text, 273 .KernelExceptionVector.text, 274 KERNEL_VECTOR_VADDR, 275 LAST) 276#undef LAST 277 SECTION_VECTOR (_UserExceptionVector_text, 278 .UserExceptionVector.text, 279 USER_VECTOR_VADDR, 280 .KernelExceptionVector.text) 281 SECTION_VECTOR (_DoubleExceptionVector_text, 282 .DoubleExceptionVector.text, 283 DOUBLEEXC_VECTOR_VADDR, 284 .UserExceptionVector.text) 285 286 . = (LOADADDR( .DoubleExceptionVector.text ) + SIZEOF( .DoubleExceptionVector.text ) + 3) & ~ 3; 287 288#endif 289#if !defined(CONFIG_XIP_KERNEL) && defined(CONFIG_SMP) 290 291 SECTION_VECTOR (_SecondaryResetVector_text, 292 .SecondaryResetVector.text, 293 RESET_VECTOR1_VADDR, 294 .DoubleExceptionVector.text) 295 296 . = LOADADDR(.SecondaryResetVector.text)+SIZEOF(.SecondaryResetVector.text); 297 298#endif 299 300 . = ALIGN(PAGE_SIZE); 301 302#ifndef CONFIG_XIP_KERNEL 303 __init_end = .; 304 305 BSS_SECTION(0, 8192, 0) 306#endif 307 308 _end = .; 309 310#ifdef CONFIG_XIP_KERNEL 311 . = CONFIG_XIP_DATA_ADDR; 312 313 _xip_start = .; 314 315#undef LOAD_OFFSET 316#define LOAD_OFFSET \ 317 (CONFIG_XIP_DATA_ADDR - (LOADADDR(.dummy) + SIZEOF(.dummy) + 3) & ~ 3) 318 319 _xip_data_start = .; 320 _sdata = .; 321 RW_DATA(XCHAL_ICACHE_LINESIZE, PAGE_SIZE, THREAD_SIZE) 322 _edata = .; 323 _xip_data_end = .; 324 325 /* Initialization data: */ 326 327 STRUCT_ALIGN(); 328 329 _xip_init_data_start = .; 330 __init_begin = .; 331 .init.data : 332 { 333 INIT_DATA 334 } 335 _xip_init_data_end = .; 336 __init_end = .; 337 BSS_SECTION(0, 8192, 0) 338 339 _xip_end = .; 340 341#undef LOAD_OFFSET 342#endif 343 344 DWARF_DEBUG 345 346 .xt.prop 0 : { KEEP(*(.xt.prop .xt.prop.* .gnu.linkonce.prop.*)) } 347 .xt.insn 0 : { KEEP(*(.xt.insn .xt.insn.* .gnu.linkonce.x*)) } 348 .xt.lit 0 : { KEEP(*(.xt.lit .xt.lit.* .gnu.linkonce.p*)) } 349 350 /* Sections to be discarded */ 351 DISCARDS 352} 353