1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* 3 * Cache maintenance 4 * 5 * Copyright (C) 2001 Deep Blue Solutions Ltd. 6 * Copyright (C) 2012 ARM Ltd. 7 */ 8 9#include <linux/errno.h> 10#include <linux/linkage.h> 11#include <linux/init.h> 12#include <asm/assembler.h> 13#include <asm/cpufeature.h> 14#include <asm/alternative.h> 15#include <asm/asm-uaccess.h> 16 17/* 18 * caches_clean_inval_pou_macro(start,end) [fixup] 19 * 20 * Ensure that the I and D caches are coherent within specified region. 21 * This is typically used when code has been written to a memory region, 22 * and will be executed. 23 * 24 * - start - virtual start address of region 25 * - end - virtual end address of region 26 * - fixup - optional label to branch to on user fault 27 */ 28.macro caches_clean_inval_pou_macro, fixup 29alternative_if ARM64_HAS_CACHE_IDC 30 dsb ishst 31 b .Ldc_skip_\@ 32alternative_else_nop_endif 33 mov x2, x0 34 mov x3, x1 35 dcache_by_line_op cvau, ish, x2, x3, x4, x5, \fixup 36.Ldc_skip_\@: 37alternative_if ARM64_HAS_CACHE_DIC 38 isb 39 b .Lic_skip_\@ 40alternative_else_nop_endif 41 invalidate_icache_by_line x0, x1, x2, x3, \fixup 42.Lic_skip_\@: 43.endm 44 45/* 46 * caches_clean_inval_pou(start,end) 47 * 48 * Ensure that the I and D caches are coherent within specified region. 49 * This is typically used when code has been written to a memory region, 50 * and will be executed. 51 * 52 * - start - virtual start address of region 53 * - end - virtual end address of region 54 */ 55SYM_FUNC_START(caches_clean_inval_pou) 56 caches_clean_inval_pou_macro 57 ret 58SYM_FUNC_END(caches_clean_inval_pou) 59 60/* 61 * caches_clean_inval_user_pou(start,end) 62 * 63 * Ensure that the I and D caches are coherent within specified region. 64 * This is typically used when code has been written to a memory region, 65 * and will be executed. 66 * 67 * - start - virtual start address of region 68 * - end - virtual end address of region 69 */ 70SYM_FUNC_START(caches_clean_inval_user_pou) 71 uaccess_ttbr0_enable x2, x3, x4 72 73 caches_clean_inval_pou_macro 2f 74 mov x0, xzr 751: 76 uaccess_ttbr0_disable x1, x2 77 ret 782: 79 mov x0, #-EFAULT 80 b 1b 81SYM_FUNC_END(caches_clean_inval_user_pou) 82 83/* 84 * icache_inval_pou(start,end) 85 * 86 * Ensure that the I cache is invalid within specified region. 87 * 88 * - start - virtual start address of region 89 * - end - virtual end address of region 90 */ 91SYM_FUNC_START(icache_inval_pou) 92alternative_if ARM64_HAS_CACHE_DIC 93 isb 94 ret 95alternative_else_nop_endif 96 97 invalidate_icache_by_line x0, x1, x2, x3 98 ret 99SYM_FUNC_END(icache_inval_pou) 100 101/* 102 * dcache_clean_inval_poc(start, end) 103 * 104 * Ensure that any D-cache lines for the interval [start, end) 105 * are cleaned and invalidated to the PoC. 106 * 107 * - start - virtual start address of region 108 * - end - virtual end address of region 109 */ 110SYM_FUNC_START_PI(dcache_clean_inval_poc) 111 dcache_by_line_op civac, sy, x0, x1, x2, x3 112 ret 113SYM_FUNC_END_PI(dcache_clean_inval_poc) 114 115/* 116 * dcache_clean_pou(start, end) 117 * 118 * Ensure that any D-cache lines for the interval [start, end) 119 * are cleaned to the PoU. 120 * 121 * - start - virtual start address of region 122 * - end - virtual end address of region 123 */ 124SYM_FUNC_START(dcache_clean_pou) 125alternative_if ARM64_HAS_CACHE_IDC 126 dsb ishst 127 ret 128alternative_else_nop_endif 129 dcache_by_line_op cvau, ish, x0, x1, x2, x3 130 ret 131SYM_FUNC_END(dcache_clean_pou) 132 133/* 134 * dcache_inval_poc(start, end) 135 * 136 * Ensure that any D-cache lines for the interval [start, end) 137 * are invalidated. Any partial lines at the ends of the interval are 138 * also cleaned to PoC to prevent data loss. 139 * 140 * - start - kernel start address of region 141 * - end - kernel end address of region 142 */ 143SYM_FUNC_START_PI(dcache_inval_poc) 144 dcache_line_size x2, x3 145 sub x3, x2, #1 146 tst x1, x3 // end cache line aligned? 147 bic x1, x1, x3 148 b.eq 1f 149 dc civac, x1 // clean & invalidate D / U line 1501: tst x0, x3 // start cache line aligned? 151 bic x0, x0, x3 152 b.eq 2f 153 dc civac, x0 // clean & invalidate D / U line 154 b 3f 1552: dc ivac, x0 // invalidate D / U line 1563: add x0, x0, x2 157 cmp x0, x1 158 b.lo 2b 159 dsb sy 160 ret 161SYM_FUNC_END_PI(dcache_inval_poc) 162 163/* 164 * dcache_clean_poc(start, end) 165 * 166 * Ensure that any D-cache lines for the interval [start, end) 167 * are cleaned to the PoC. 168 * 169 * - start - virtual start address of region 170 * - end - virtual end address of region 171 */ 172SYM_FUNC_START_PI(dcache_clean_poc) 173 dcache_by_line_op cvac, sy, x0, x1, x2, x3 174 ret 175SYM_FUNC_END_PI(dcache_clean_poc) 176 177/* 178 * dcache_clean_pop(start, end) 179 * 180 * Ensure that any D-cache lines for the interval [start, end) 181 * are cleaned to the PoP. 182 * 183 * - start - virtual start address of region 184 * - end - virtual end address of region 185 */ 186SYM_FUNC_START_PI(dcache_clean_pop) 187 alternative_if_not ARM64_HAS_DCPOP 188 b dcache_clean_poc 189 alternative_else_nop_endif 190 dcache_by_line_op cvap, sy, x0, x1, x2, x3 191 ret 192SYM_FUNC_END_PI(dcache_clean_pop) 193 194/* 195 * __dma_flush_area(start, size) 196 * 197 * clean & invalidate D / U line 198 * 199 * - start - virtual start address of region 200 * - size - size in question 201 */ 202SYM_FUNC_START_PI(__dma_flush_area) 203 add x1, x0, x1 204 dcache_by_line_op civac, sy, x0, x1, x2, x3 205 ret 206SYM_FUNC_END_PI(__dma_flush_area) 207 208/* 209 * __dma_map_area(start, size, dir) 210 * - start - kernel virtual start address 211 * - size - size of region 212 * - dir - DMA direction 213 */ 214SYM_FUNC_START_PI(__dma_map_area) 215 add x1, x0, x1 216 cmp w2, #DMA_FROM_DEVICE 217 b.eq __pi_dcache_inval_poc 218 b __pi_dcache_clean_poc 219SYM_FUNC_END_PI(__dma_map_area) 220 221/* 222 * __dma_unmap_area(start, size, dir) 223 * - start - kernel virtual start address 224 * - size - size of region 225 * - dir - DMA direction 226 */ 227SYM_FUNC_START_PI(__dma_unmap_area) 228 add x1, x0, x1 229 cmp w2, #DMA_TO_DEVICE 230 b.ne __pi_dcache_inval_poc 231 ret 232SYM_FUNC_END_PI(__dma_unmap_area) 233