1/* 2 * linux/arch/arm/mm/arm946.S: utility functions for ARM946E-S 3 * 4 * Copyright (C) 2004-2006 Hyok S. Choi (hyok.choi@samsung.com) 5 * 6 * (Many of cache codes are from proc-arm926.S) 7 * 8 * This program is free software; you can redistribute it and/or modify 9 * it under the terms of the GNU General Public License version 2 as 10 * published by the Free Software Foundation. 11 * 12 */ 13#include <linux/linkage.h> 14#include <linux/init.h> 15#include <asm/assembler.h> 16#include <asm/hwcap.h> 17#include <asm/pgtable-hwdef.h> 18#include <asm/pgtable.h> 19#include <asm/ptrace.h> 20#include "proc-macros.S" 21 22/* 23 * ARM946E-S is synthesizable to have 0KB to 1MB sized D-Cache, 24 * comprising 256 lines of 32 bytes (8 words). 25 */ 26#define CACHE_DSIZE (CONFIG_CPU_DCACHE_SIZE) /* typically 8KB. */ 27#define CACHE_DLINESIZE 32 /* fixed */ 28#define CACHE_DSEGMENTS 4 /* fixed */ 29#define CACHE_DENTRIES (CACHE_DSIZE / CACHE_DSEGMENTS / CACHE_DLINESIZE) 30#define CACHE_DLIMIT (CACHE_DSIZE * 4) /* benchmark needed */ 31 32 .text 33/* 34 * cpu_arm946_proc_init() 35 * cpu_arm946_switch_mm() 36 * 37 * These are not required. 38 */ 39ENTRY(cpu_arm946_proc_init) 40ENTRY(cpu_arm946_switch_mm) 41 mov pc, lr 42 43/* 44 * cpu_arm946_proc_fin() 45 */ 46ENTRY(cpu_arm946_proc_fin) 47 stmfd sp!, {lr} 48 mov ip, #PSR_F_BIT | PSR_I_BIT | SVC_MODE 49 msr cpsr_c, ip 50 bl arm946_flush_kern_cache_all 51 mrc p15, 0, r0, c1, c0, 0 @ ctrl register 52 bic r0, r0, #0x00001000 @ i-cache 53 bic r0, r0, #0x00000004 @ d-cache 54 mcr p15, 0, r0, c1, c0, 0 @ disable caches 55 ldmfd sp!, {pc} 56 57/* 58 * cpu_arm946_reset(loc) 59 * Params : r0 = address to jump to 60 * Notes : This sets up everything for a reset 61 */ 62ENTRY(cpu_arm946_reset) 63 mov ip, #0 64 mcr p15, 0, ip, c7, c5, 0 @ flush I cache 65 mcr p15, 0, ip, c7, c6, 0 @ flush D cache 66 mcr p15, 0, ip, c7, c10, 4 @ drain WB 67 mrc p15, 0, ip, c1, c0, 0 @ ctrl register 68 bic ip, ip, #0x00000005 @ .............c.p 69 bic ip, ip, #0x00001000 @ i-cache 70 mcr p15, 0, ip, c1, c0, 0 @ ctrl register 71 mov pc, r0 72 73/* 74 * cpu_arm946_do_idle() 75 */ 76 .align 5 77ENTRY(cpu_arm946_do_idle) 78 mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt 79 mov pc, lr 80 81/* 82 * flush_user_cache_all() 83 */ 84ENTRY(arm946_flush_user_cache_all) 85 /* FALLTHROUGH */ 86 87/* 88 * flush_kern_cache_all() 89 * 90 * Clean and invalidate the entire cache. 91 */ 92ENTRY(arm946_flush_kern_cache_all) 93 mov r2, #VM_EXEC 94 mov ip, #0 95__flush_whole_cache: 96#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH 97 mcr p15, 0, ip, c7, c6, 0 @ flush D cache 98#else 99 mov r1, #(CACHE_DSEGMENTS - 1) << 29 @ 4 segments 1001: orr r3, r1, #(CACHE_DENTRIES - 1) << 4 @ n entries 1012: mcr p15, 0, r3, c7, c14, 2 @ clean/flush D index 102 subs r3, r3, #1 << 4 103 bcs 2b @ entries n to 0 104 subs r1, r1, #1 << 29 105 bcs 1b @ segments 3 to 0 106#endif 107 tst r2, #VM_EXEC 108 mcrne p15, 0, ip, c7, c5, 0 @ flush I cache 109 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 110 mov pc, lr 111 112/* 113 * flush_user_cache_range(start, end, flags) 114 * 115 * Clean and invalidate a range of cache entries in the 116 * specified address range. 117 * 118 * - start - start address (inclusive) 119 * - end - end address (exclusive) 120 * - flags - vm_flags describing address space 121 * (same as arm926) 122 */ 123ENTRY(arm946_flush_user_cache_range) 124 mov ip, #0 125 sub r3, r1, r0 @ calculate total size 126 cmp r3, #CACHE_DLIMIT 127 bhs __flush_whole_cache 128 1291: tst r2, #VM_EXEC 130#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH 131 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 132 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 133 add r0, r0, #CACHE_DLINESIZE 134 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 135 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 136 add r0, r0, #CACHE_DLINESIZE 137#else 138 mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry 139 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 140 add r0, r0, #CACHE_DLINESIZE 141 mcr p15, 0, r0, c7, c14, 1 @ clean and invalidate D entry 142 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 143 add r0, r0, #CACHE_DLINESIZE 144#endif 145 cmp r0, r1 146 blo 1b 147 tst r2, #VM_EXEC 148 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 149 mov pc, lr 150 151/* 152 * coherent_kern_range(start, end) 153 * 154 * Ensure coherency between the Icache and the Dcache in the 155 * region described by start, end. If you have non-snooping 156 * Harvard caches, you need to implement this function. 157 * 158 * - start - virtual start address 159 * - end - virtual end address 160 */ 161ENTRY(arm946_coherent_kern_range) 162 /* FALLTHROUGH */ 163 164/* 165 * coherent_user_range(start, end) 166 * 167 * Ensure coherency between the Icache and the Dcache in the 168 * region described by start, end. If you have non-snooping 169 * Harvard caches, you need to implement this function. 170 * 171 * - start - virtual start address 172 * - end - virtual end address 173 * (same as arm926) 174 */ 175ENTRY(arm946_coherent_user_range) 176 bic r0, r0, #CACHE_DLINESIZE - 1 1771: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 178 mcr p15, 0, r0, c7, c5, 1 @ invalidate I entry 179 add r0, r0, #CACHE_DLINESIZE 180 cmp r0, r1 181 blo 1b 182 mcr p15, 0, r0, c7, c10, 4 @ drain WB 183 mov pc, lr 184 185/* 186 * flush_kern_dcache_page(void *page) 187 * 188 * Ensure no D cache aliasing occurs, either with itself or 189 * the I cache 190 * 191 * - addr - page aligned address 192 * (same as arm926) 193 */ 194ENTRY(arm946_flush_kern_dcache_page) 195 add r1, r0, #PAGE_SZ 1961: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry 197 add r0, r0, #CACHE_DLINESIZE 198 cmp r0, r1 199 blo 1b 200 mov r0, #0 201 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 202 mcr p15, 0, r0, c7, c10, 4 @ drain WB 203 mov pc, lr 204 205/* 206 * dma_inv_range(start, end) 207 * 208 * Invalidate (discard) the specified virtual address range. 209 * May not write back any entries. If 'start' or 'end' 210 * are not cache line aligned, those lines must be written 211 * back. 212 * 213 * - start - virtual start address 214 * - end - virtual end address 215 * (same as arm926) 216 */ 217ENTRY(arm946_dma_inv_range) 218#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 219 tst r0, #CACHE_DLINESIZE - 1 220 mcrne p15, 0, r0, c7, c10, 1 @ clean D entry 221 tst r1, #CACHE_DLINESIZE - 1 222 mcrne p15, 0, r1, c7, c10, 1 @ clean D entry 223#endif 224 bic r0, r0, #CACHE_DLINESIZE - 1 2251: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 226 add r0, r0, #CACHE_DLINESIZE 227 cmp r0, r1 228 blo 1b 229 mcr p15, 0, r0, c7, c10, 4 @ drain WB 230 mov pc, lr 231 232/* 233 * dma_clean_range(start, end) 234 * 235 * Clean the specified virtual address range. 236 * 237 * - start - virtual start address 238 * - end - virtual end address 239 * 240 * (same as arm926) 241 */ 242ENTRY(arm946_dma_clean_range) 243#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 244 bic r0, r0, #CACHE_DLINESIZE - 1 2451: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 246 add r0, r0, #CACHE_DLINESIZE 247 cmp r0, r1 248 blo 1b 249#endif 250 mcr p15, 0, r0, c7, c10, 4 @ drain WB 251 mov pc, lr 252 253/* 254 * dma_flush_range(start, end) 255 * 256 * Clean and invalidate the specified virtual address range. 257 * 258 * - start - virtual start address 259 * - end - virtual end address 260 * 261 * (same as arm926) 262 */ 263ENTRY(arm946_dma_flush_range) 264 bic r0, r0, #CACHE_DLINESIZE - 1 2651: 266#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 267 mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry 268#else 269 mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 270#endif 271 add r0, r0, #CACHE_DLINESIZE 272 cmp r0, r1 273 blo 1b 274 mcr p15, 0, r0, c7, c10, 4 @ drain WB 275 mov pc, lr 276 277ENTRY(arm946_cache_fns) 278 .long arm946_flush_kern_cache_all 279 .long arm946_flush_user_cache_all 280 .long arm946_flush_user_cache_range 281 .long arm946_coherent_kern_range 282 .long arm946_coherent_user_range 283 .long arm946_flush_kern_dcache_page 284 .long arm946_dma_inv_range 285 .long arm946_dma_clean_range 286 .long arm946_dma_flush_range 287 288 289ENTRY(cpu_arm946_dcache_clean_area) 290#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 2911: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 292 add r0, r0, #CACHE_DLINESIZE 293 subs r1, r1, #CACHE_DLINESIZE 294 bhi 1b 295#endif 296 mcr p15, 0, r0, c7, c10, 4 @ drain WB 297 mov pc, lr 298 299 __INIT 300 301 .type __arm946_setup, #function 302__arm946_setup: 303 mov r0, #0 304 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 305 mcr p15, 0, r0, c7, c6, 0 @ invalidate D cache 306 mcr p15, 0, r0, c7, c10, 4 @ drain WB 307 308 mcr p15, 0, r0, c6, c3, 0 @ disable memory region 3~7 309 mcr p15, 0, r0, c6, c4, 0 310 mcr p15, 0, r0, c6, c5, 0 311 mcr p15, 0, r0, c6, c6, 0 312 mcr p15, 0, r0, c6, c7, 0 313 314 mov r0, #0x0000003F @ base = 0, size = 4GB 315 mcr p15, 0, r0, c6, c0, 0 @ set region 0, default 316 317 ldr r0, =(CONFIG_DRAM_BASE & 0xFFFFF000) @ base[31:12] of RAM 318 ldr r1, =(CONFIG_DRAM_SIZE >> 12) @ size of RAM (must be >= 4KB) 319 mov r2, #10 @ 11 is the minimum (4KB) 3201: add r2, r2, #1 @ area size *= 2 321 mov r1, r1, lsr #1 322 bne 1b @ count not zero r-shift 323 orr r0, r0, r2, lsl #1 @ the region register value 324 orr r0, r0, #1 @ set enable bit 325 mcr p15, 0, r0, c6, c1, 0 @ set region 1, RAM 326 327 ldr r0, =(CONFIG_FLASH_MEM_BASE & 0xFFFFF000) @ base[31:12] of FLASH 328 ldr r1, =(CONFIG_FLASH_SIZE >> 12) @ size of FLASH (must be >= 4KB) 329 mov r2, #10 @ 11 is the minimum (4KB) 3301: add r2, r2, #1 @ area size *= 2 331 mov r1, r1, lsr #1 332 bne 1b @ count not zero r-shift 333 orr r0, r0, r2, lsl #1 @ the region register value 334 orr r0, r0, #1 @ set enable bit 335 mcr p15, 0, r0, c6, c2, 0 @ set region 2, ROM/FLASH 336 337 mov r0, #0x06 338 mcr p15, 0, r0, c2, c0, 0 @ region 1,2 d-cacheable 339 mcr p15, 0, r0, c2, c0, 1 @ region 1,2 i-cacheable 340#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH 341 mov r0, #0x00 @ disable whole write buffer 342#else 343 mov r0, #0x02 @ region 1 write bufferred 344#endif 345 mcr p15, 0, r0, c3, c0, 0 346 347/* 348 * Access Permission Settings for future permission control by PU. 349 * 350 * priv. user 351 * region 0 (whole) rw -- : b0001 352 * region 1 (RAM) rw rw : b0011 353 * region 2 (FLASH) rw r- : b0010 354 * region 3~7 (none) -- -- : b0000 355 */ 356 mov r0, #0x00000031 357 orr r0, r0, #0x00000200 358 mcr p15, 0, r0, c5, c0, 2 @ set data access permission 359 mcr p15, 0, r0, c5, c0, 3 @ set inst. access permission 360 361 mrc p15, 0, r0, c1, c0 @ get control register 362 orr r0, r0, #0x00001000 @ I-cache 363 orr r0, r0, #0x00000005 @ MPU/D-cache 364#ifdef CONFIG_CPU_CACHE_ROUND_ROBIN 365 orr r0, r0, #0x00004000 @ .1.. .... .... .... 366#endif 367 mov pc, lr 368 369 .size __arm946_setup, . - __arm946_setup 370 371 __INITDATA 372 373/* 374 * Purpose : Function pointers used to access above functions - all calls 375 * come through these 376 */ 377 .type arm946_processor_functions, #object 378ENTRY(arm946_processor_functions) 379 .word nommu_early_abort 380 .word legacy_pabort 381 .word cpu_arm946_proc_init 382 .word cpu_arm946_proc_fin 383 .word cpu_arm946_reset 384 .word cpu_arm946_do_idle 385 386 .word cpu_arm946_dcache_clean_area 387 .word cpu_arm946_switch_mm 388 .word 0 @ cpu_*_set_pte 389 .size arm946_processor_functions, . - arm946_processor_functions 390 391 .section ".rodata" 392 393 .type cpu_arch_name, #object 394cpu_arch_name: 395 .asciz "armv5te" 396 .size cpu_arch_name, . - cpu_arch_name 397 398 .type cpu_elf_name, #object 399cpu_elf_name: 400 .asciz "v5t" 401 .size cpu_elf_name, . - cpu_elf_name 402 403 .type cpu_arm946_name, #object 404cpu_arm946_name: 405 .ascii "ARM946E-S" 406 .size cpu_arm946_name, . - cpu_arm946_name 407 408 .align 409 410 .section ".proc.info.init", #alloc, #execinstr 411 .type __arm946_proc_info,#object 412__arm946_proc_info: 413 .long 0x41009460 414 .long 0xff00fff0 415 .long 0 416 b __arm946_setup 417 .long cpu_arch_name 418 .long cpu_elf_name 419 .long HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB 420 .long cpu_arm946_name 421 .long arm946_processor_functions 422 .long 0 423 .long 0 424 .long arm940_cache_fns 425 .size __arm946_proc_info, . - __arm946_proc_info 426 427