xref: /openbmc/linux/arch/powerpc/kernel/vdso/cacheflush.S (revision fac59652993f075d57860769c99045b3ca18780d)
1fd1feadeSChristophe Leroy/* SPDX-License-Identifier: GPL-2.0-or-later */
2fd1feadeSChristophe Leroy/*
3fd1feadeSChristophe Leroy * vDSO provided cache flush routines
4fd1feadeSChristophe Leroy *
5fd1feadeSChristophe Leroy * Copyright (C) 2004 Benjamin Herrenschmuidt (benh@kernel.crashing.org),
6fd1feadeSChristophe Leroy *                    IBM Corp.
7fd1feadeSChristophe Leroy */
8fd1feadeSChristophe Leroy#include <asm/processor.h>
9fd1feadeSChristophe Leroy#include <asm/ppc_asm.h>
10fd1feadeSChristophe Leroy#include <asm/vdso.h>
11fd1feadeSChristophe Leroy#include <asm/vdso_datapage.h>
12fd1feadeSChristophe Leroy#include <asm/asm-offsets.h>
13fd1feadeSChristophe Leroy#include <asm/cache.h>
14fd1feadeSChristophe Leroy
15fd1feadeSChristophe Leroy	.text
16fd1feadeSChristophe Leroy
17fd1feadeSChristophe Leroy/*
18fd1feadeSChristophe Leroy * Default "generic" version of __kernel_sync_dicache.
19fd1feadeSChristophe Leroy *
20fd1feadeSChristophe Leroy * void __kernel_sync_dicache(unsigned long start, unsigned long end)
21fd1feadeSChristophe Leroy *
22fd1feadeSChristophe Leroy * Flushes the data cache & invalidate the instruction cache for the
23fd1feadeSChristophe Leroy * provided range [start, end[
24fd1feadeSChristophe Leroy */
25fd1feadeSChristophe LeroyV_FUNCTION_BEGIN(__kernel_sync_dicache)
26fd1feadeSChristophe Leroy  .cfi_startproc
27fd1feadeSChristophe LeroyBEGIN_FTR_SECTION
28fd1feadeSChristophe Leroy	b	3f
29fd1feadeSChristophe LeroyEND_FTR_SECTION_IFSET(CPU_FTR_COHERENT_ICACHE)
30fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
31fd1feadeSChristophe Leroy	mflr	r12
32fd1feadeSChristophe Leroy  .cfi_register lr,r12
33*67672702SChristophe Leroy	get_realdatapage	r10, r11
34fd1feadeSChristophe Leroy	mtlr	r12
35fd1feadeSChristophe Leroy  .cfi_restore	lr
36fd1feadeSChristophe Leroy#endif
37fd1feadeSChristophe Leroy
38fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
39fd1feadeSChristophe Leroy	lwz	r7,CFG_DCACHE_BLOCKSZ(r10)
40fd1feadeSChristophe Leroy	addi	r5,r7,-1
41fd1feadeSChristophe Leroy#else
42fd1feadeSChristophe Leroy	li	r5, L1_CACHE_BYTES - 1
43fd1feadeSChristophe Leroy#endif
44fd1feadeSChristophe Leroy	andc	r6,r3,r5		/* round low to line bdy */
45fd1feadeSChristophe Leroy	subf	r8,r6,r4		/* compute length */
46fd1feadeSChristophe Leroy	add	r8,r8,r5		/* ensure we get enough */
47fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
48fd1feadeSChristophe Leroy	lwz	r9,CFG_DCACHE_LOGBLOCKSZ(r10)
49fd1feadeSChristophe Leroy	PPC_SRL.	r8,r8,r9		/* compute line count */
50fd1feadeSChristophe Leroy#else
51fd1feadeSChristophe Leroy	srwi.	r8, r8, L1_CACHE_SHIFT
52fd1feadeSChristophe Leroy	mr	r7, r6
53fd1feadeSChristophe Leroy#endif
54fd1feadeSChristophe Leroy	crclr	cr0*4+so
55fd1feadeSChristophe Leroy	beqlr				/* nothing to do? */
56fd1feadeSChristophe Leroy	mtctr	r8
57fd1feadeSChristophe Leroy1:	dcbst	0,r6
58fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
59fd1feadeSChristophe Leroy	add	r6,r6,r7
60fd1feadeSChristophe Leroy#else
61fd1feadeSChristophe Leroy	addi	r6, r6, L1_CACHE_BYTES
62fd1feadeSChristophe Leroy#endif
63fd1feadeSChristophe Leroy	bdnz	1b
64fd1feadeSChristophe Leroy	sync
65fd1feadeSChristophe Leroy
66fd1feadeSChristophe Leroy/* Now invalidate the instruction cache */
67fd1feadeSChristophe Leroy
68fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
69fd1feadeSChristophe Leroy	lwz	r7,CFG_ICACHE_BLOCKSZ(r10)
70fd1feadeSChristophe Leroy	addi	r5,r7,-1
71fd1feadeSChristophe Leroy	andc	r6,r3,r5		/* round low to line bdy */
72fd1feadeSChristophe Leroy	subf	r8,r6,r4		/* compute length */
73fd1feadeSChristophe Leroy	add	r8,r8,r5
74fd1feadeSChristophe Leroy	lwz	r9,CFG_ICACHE_LOGBLOCKSZ(r10)
75fd1feadeSChristophe Leroy	PPC_SRL.	r8,r8,r9		/* compute line count */
76fd1feadeSChristophe Leroy	crclr	cr0*4+so
77fd1feadeSChristophe Leroy	beqlr				/* nothing to do? */
78fd1feadeSChristophe Leroy#endif
79fd1feadeSChristophe Leroy	mtctr	r8
80fd1feadeSChristophe Leroy#ifdef CONFIG_PPC64
81fd1feadeSChristophe Leroy2:	icbi	0,r6
82fd1feadeSChristophe Leroy	add	r6,r6,r7
83fd1feadeSChristophe Leroy#else
84fd1feadeSChristophe Leroy2:	icbi	0, r7
85fd1feadeSChristophe Leroy	addi	r7, r7, L1_CACHE_BYTES
86fd1feadeSChristophe Leroy#endif
87fd1feadeSChristophe Leroy	bdnz	2b
88fd1feadeSChristophe Leroy	isync
89fd1feadeSChristophe Leroy	li	r3,0
90fd1feadeSChristophe Leroy	blr
91fd1feadeSChristophe Leroy3:
92fd1feadeSChristophe Leroy	crclr	cr0*4+so
93fd1feadeSChristophe Leroy	sync
9428f07fabSNicholas Piggin	icbi	0,r1
95fd1feadeSChristophe Leroy	isync
96fd1feadeSChristophe Leroy	li	r3,0
97fd1feadeSChristophe Leroy	blr
98fd1feadeSChristophe Leroy  .cfi_endproc
99fd1feadeSChristophe LeroyV_FUNCTION_END(__kernel_sync_dicache)
100