xref: /openbmc/linux/arch/powerpc/kernel/vdso/cacheflush.S (revision fd1feade75fb1a9275c39d76c5ccdbbbe6b37aa3)
1*fd1feadeSChristophe Leroy/* SPDX-License-Identifier: GPL-2.0-or-later */
2*fd1feadeSChristophe Leroy/*
3*fd1feadeSChristophe Leroy * vDSO provided cache flush routines
4*fd1feadeSChristophe Leroy *
5*fd1feadeSChristophe Leroy * Copyright (C) 2004 Benjamin Herrenschmuidt (benh@kernel.crashing.org),
6*fd1feadeSChristophe Leroy *                    IBM Corp.
7*fd1feadeSChristophe Leroy */
8*fd1feadeSChristophe Leroy#include <asm/processor.h>
9*fd1feadeSChristophe Leroy#include <asm/ppc_asm.h>
10*fd1feadeSChristophe Leroy#include <asm/vdso.h>
11*fd1feadeSChristophe Leroy#include <asm/vdso_datapage.h>
12*fd1feadeSChristophe Leroy#include <asm/asm-offsets.h>
13*fd1feadeSChristophe Leroy#include <asm/cache.h>
14*fd1feadeSChristophe Leroy
15*fd1feadeSChristophe Leroy	.text
16*fd1feadeSChristophe Leroy
17*fd1feadeSChristophe Leroy/*
18*fd1feadeSChristophe Leroy * Default "generic" version of __kernel_sync_dicache.
19*fd1feadeSChristophe Leroy *
20*fd1feadeSChristophe Leroy * void __kernel_sync_dicache(unsigned long start, unsigned long end)
21*fd1feadeSChristophe Leroy *
22*fd1feadeSChristophe Leroy * Flushes the data cache & invalidate the instruction cache for the
23*fd1feadeSChristophe Leroy * provided range [start, end[
24*fd1feadeSChristophe Leroy */
25*fd1feadeSChristophe LeroyV_FUNCTION_BEGIN(__kernel_sync_dicache)
26*fd1feadeSChristophe Leroy  .cfi_startproc
27*fd1feadeSChristophe LeroyBEGIN_FTR_SECTION
28*fd1feadeSChristophe Leroy	b	3f
29*fd1feadeSChristophe LeroyEND_FTR_SECTION_IFSET(CPU_FTR_COHERENT_ICACHE)
30*fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
31*fd1feadeSChristophe Leroy	mflr	r12
32*fd1feadeSChristophe Leroy  .cfi_register lr,r12
33*fd1feadeSChristophe Leroy	get_datapage	r10
34*fd1feadeSChristophe Leroy	mtlr	r12
35*fd1feadeSChristophe Leroy  .cfi_restore	lr
36*fd1feadeSChristophe Leroy#endif
37*fd1feadeSChristophe Leroy
38*fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
39*fd1feadeSChristophe Leroy	lwz	r7,CFG_DCACHE_BLOCKSZ(r10)
40*fd1feadeSChristophe Leroy	addi	r5,r7,-1
41*fd1feadeSChristophe Leroy#else
42*fd1feadeSChristophe Leroy	li	r5, L1_CACHE_BYTES - 1
43*fd1feadeSChristophe Leroy#endif
44*fd1feadeSChristophe Leroy	andc	r6,r3,r5		/* round low to line bdy */
45*fd1feadeSChristophe Leroy	subf	r8,r6,r4		/* compute length */
46*fd1feadeSChristophe Leroy	add	r8,r8,r5		/* ensure we get enough */
47*fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
48*fd1feadeSChristophe Leroy	lwz	r9,CFG_DCACHE_LOGBLOCKSZ(r10)
49*fd1feadeSChristophe Leroy	PPC_SRL.	r8,r8,r9		/* compute line count */
50*fd1feadeSChristophe Leroy#else
51*fd1feadeSChristophe Leroy	srwi.	r8, r8, L1_CACHE_SHIFT
52*fd1feadeSChristophe Leroy	mr	r7, r6
53*fd1feadeSChristophe Leroy#endif
54*fd1feadeSChristophe Leroy	crclr	cr0*4+so
55*fd1feadeSChristophe Leroy	beqlr				/* nothing to do? */
56*fd1feadeSChristophe Leroy	mtctr	r8
57*fd1feadeSChristophe Leroy1:	dcbst	0,r6
58*fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
59*fd1feadeSChristophe Leroy	add	r6,r6,r7
60*fd1feadeSChristophe Leroy#else
61*fd1feadeSChristophe Leroy	addi	r6, r6, L1_CACHE_BYTES
62*fd1feadeSChristophe Leroy#endif
63*fd1feadeSChristophe Leroy	bdnz	1b
64*fd1feadeSChristophe Leroy	sync
65*fd1feadeSChristophe Leroy
66*fd1feadeSChristophe Leroy/* Now invalidate the instruction cache */
67*fd1feadeSChristophe Leroy
68*fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
69*fd1feadeSChristophe Leroy	lwz	r7,CFG_ICACHE_BLOCKSZ(r10)
70*fd1feadeSChristophe Leroy	addi	r5,r7,-1
71*fd1feadeSChristophe Leroy	andc	r6,r3,r5		/* round low to line bdy */
72*fd1feadeSChristophe Leroy	subf	r8,r6,r4		/* compute length */
73*fd1feadeSChristophe Leroy	add	r8,r8,r5
74*fd1feadeSChristophe Leroy	lwz	r9,CFG_ICACHE_LOGBLOCKSZ(r10)
75*fd1feadeSChristophe Leroy	PPC_SRL.	r8,r8,r9		/* compute line count */
76*fd1feadeSChristophe Leroy	crclr	cr0*4+so
77*fd1feadeSChristophe Leroy	beqlr				/* nothing to do? */
78*fd1feadeSChristophe Leroy#endif
79*fd1feadeSChristophe Leroy	mtctr	r8
80*fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
81*fd1feadeSChristophe Leroy2:	icbi	0,r6
82*fd1feadeSChristophe Leroy	add	r6,r6,r7
83*fd1feadeSChristophe Leroy#else
84*fd1feadeSChristophe Leroy2:	icbi	0, r7
85*fd1feadeSChristophe Leroy	addi	r7, r7, L1_CACHE_BYTES
86*fd1feadeSChristophe Leroy#endif
87*fd1feadeSChristophe Leroy	bdnz	2b
88*fd1feadeSChristophe Leroy	isync
89*fd1feadeSChristophe Leroy	li	r3,0
90*fd1feadeSChristophe Leroy	blr
91*fd1feadeSChristophe Leroy3:
92*fd1feadeSChristophe Leroy	crclr	cr0*4+so
93*fd1feadeSChristophe Leroy	sync
94*fd1feadeSChristophe Leroy	isync
95*fd1feadeSChristophe Leroy	li	r3,0
96*fd1feadeSChristophe Leroy	blr
97*fd1feadeSChristophe Leroy  .cfi_endproc
98*fd1feadeSChristophe LeroyV_FUNCTION_END(__kernel_sync_dicache)
99