1/* SPDX-License-Identifier: GPL-2.0+ */ 2 3#include <config.h> 4#include <linux/linkage.h> 5#include <linux/sizes.h> 6#include <asm/system.h> 7 8#if CONFIG_IS_ENABLED(SYS_THUMB_BUILD) 9#define ARM(x...) 10#define THUMB(x...) x 11#else 12#define ARM(x...) x 13#define THUMB(x...) 14#endif 15 16/* 17 * v7_flush_dcache_all() 18 * 19 * Flush the whole D-cache. 20 * 21 * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode) 22 * 23 * Note: copied from arch/arm/mm/cache-v7.S of Linux 4.4 24 */ 25ENTRY(__v7_flush_dcache_all) 26 dmb @ ensure ordering with previous memory accesses 27 mrc p15, 1, r0, c0, c0, 1 @ read clidr 28 mov r3, r0, lsr #23 @ move LoC into position 29 ands r3, r3, #7 << 1 @ extract LoC*2 from clidr 30 beq finished @ if loc is 0, then no need to clean 31start_flush_levels: 32 mov r10, #0 @ start clean at cache level 0 33flush_levels: 34 add r2, r10, r10, lsr #1 @ work out 3x current cache level 35 mov r1, r0, lsr r2 @ extract cache type bits from clidr 36 and r1, r1, #7 @ mask of the bits for current cache only 37 cmp r1, #2 @ see what cache we have at this level 38 blt skip @ skip if no cache, or just i-cache 39 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 40 isb @ isb to sych the new cssr&csidr 41 mrc p15, 1, r1, c0, c0, 0 @ read the new csidr 42 and r2, r1, #7 @ extract the length of the cache lines 43 add r2, r2, #4 @ add 4 (line length offset) 44 movw r4, #0x3ff 45 ands r4, r4, r1, lsr #3 @ find maximum number on the way size 46 clz r5, r4 @ find bit position of way size increment 47 movw r7, #0x7fff 48 ands r7, r7, r1, lsr #13 @ extract max number of the index size 49loop1: 50 mov r9, r7 @ create working copy of max index 51loop2: 52 ARM( orr r11, r10, r4, lsl r5 ) @ factor way and cache number into r11 53 THUMB( lsl r6, r4, r5 ) 54 THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11 55 ARM( orr r11, r11, r9, lsl r2 ) @ factor index number into r11 56 THUMB( lsl r6, r9, r2 ) 57 THUMB( orr r11, r11, r6 ) @ factor index number into r11 58 mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way 59 subs r9, r9, #1 @ decrement the index 60 bge loop2 61 subs r4, r4, #1 @ decrement the way 62 bge loop1 63skip: 64 add r10, r10, #2 @ increment cache number 65 cmp r3, r10 66 bgt flush_levels 67finished: 68 mov r10, #0 @ swith back to cache level 0 69 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 70 dsb st 71 isb 72 bx lr 73ENDPROC(__v7_flush_dcache_all) 74 75ENTRY(v7_flush_dcache_all) 76 ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} ) 77 THUMB( stmfd sp!, {r4-r7, r9-r11, lr} ) 78 bl __v7_flush_dcache_all 79 ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} ) 80 THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} ) 81 bx lr 82ENDPROC(v7_flush_dcache_all) 83 84/* 85 * v7_invalidate_dcache_all() 86 * 87 * Invalidate the whole D-cache. 88 * 89 * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode) 90 * 91 * Note: copied from __v7_flush_dcache_all above with 92 * mcr p15, 0, r11, c7, c14, 2 93 * Replaced with: 94 * mcr p15, 0, r11, c7, c6, 2 95 */ 96ENTRY(__v7_invalidate_dcache_all) 97 dmb @ ensure ordering with previous memory accesses 98 mrc p15, 1, r0, c0, c0, 1 @ read clidr 99 mov r3, r0, lsr #23 @ move LoC into position 100 ands r3, r3, #7 << 1 @ extract LoC*2 from clidr 101 beq inval_finished @ if loc is 0, then no need to clean 102 mov r10, #0 @ start clean at cache level 0 103inval_levels: 104 add r2, r10, r10, lsr #1 @ work out 3x current cache level 105 mov r1, r0, lsr r2 @ extract cache type bits from clidr 106 and r1, r1, #7 @ mask of the bits for current cache only 107 cmp r1, #2 @ see what cache we have at this level 108 blt inval_skip @ skip if no cache, or just i-cache 109 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 110 isb @ isb to sych the new cssr&csidr 111 mrc p15, 1, r1, c0, c0, 0 @ read the new csidr 112 and r2, r1, #7 @ extract the length of the cache lines 113 add r2, r2, #4 @ add 4 (line length offset) 114 movw r4, #0x3ff 115 ands r4, r4, r1, lsr #3 @ find maximum number on the way size 116 clz r5, r4 @ find bit position of way size increment 117 movw r7, #0x7fff 118 ands r7, r7, r1, lsr #13 @ extract max number of the index size 119inval_loop1: 120 mov r9, r7 @ create working copy of max index 121inval_loop2: 122 ARM( orr r11, r10, r4, lsl r5 ) @ factor way and cache number into r11 123 THUMB( lsl r6, r4, r5 ) 124 THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11 125 ARM( orr r11, r11, r9, lsl r2 ) @ factor index number into r11 126 THUMB( lsl r6, r9, r2 ) 127 THUMB( orr r11, r11, r6 ) @ factor index number into r11 128 mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way 129 subs r9, r9, #1 @ decrement the index 130 bge inval_loop2 131 subs r4, r4, #1 @ decrement the way 132 bge inval_loop1 133inval_skip: 134 add r10, r10, #2 @ increment cache number 135 cmp r3, r10 136 bgt inval_levels 137inval_finished: 138 mov r10, #0 @ swith back to cache level 0 139 mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr 140 dsb st 141 isb 142 bx lr 143ENDPROC(__v7_invalidate_dcache_all) 144 145ENTRY(v7_invalidate_dcache_all) 146 ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} ) 147 THUMB( stmfd sp!, {r4-r7, r9-r11, lr} ) 148 bl __v7_invalidate_dcache_all 149 ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} ) 150 THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} ) 151 bx lr 152ENDPROC(v7_invalidate_dcache_all) 153