xref: /openbmc/linux/arch/powerpc/kvm/book3s_64_slb.S (revision d7a3d85e)
1/*
2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License, version 2, as
4 * published by the Free Software Foundation.
5 *
6 * This program is distributed in the hope that it will be useful,
7 * but WITHOUT ANY WARRANTY; without even the implied warranty of
8 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
9 * GNU General Public License for more details.
10 *
11 * You should have received a copy of the GNU General Public License
12 * along with this program; if not, write to the Free Software
13 * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
14 *
15 * Copyright SUSE Linux Products GmbH 2009
16 *
17 * Authors: Alexander Graf <agraf@suse.de>
18 */
19
20#define SHADOW_SLB_ENTRY_LEN	0x10
21#define OFFSET_ESID(x)		(SHADOW_SLB_ENTRY_LEN * x)
22#define OFFSET_VSID(x)		((SHADOW_SLB_ENTRY_LEN * x) + 8)
23
24/******************************************************************************
25 *                                                                            *
26 *                               Entry code                                   *
27 *                                                                            *
28 *****************************************************************************/
29
30.macro LOAD_GUEST_SEGMENTS
31
32	/* Required state:
33	 *
34	 * MSR = ~IR|DR
35	 * R13 = PACA
36	 * R1 = host R1
37	 * R2 = host R2
38	 * R3 = shadow vcpu
39	 * all other volatile GPRS = free except R4, R6
40	 * SVCPU[CR]  = guest CR
41	 * SVCPU[XER] = guest XER
42	 * SVCPU[CTR] = guest CTR
43	 * SVCPU[LR]  = guest LR
44	 */
45
46BEGIN_FW_FTR_SECTION
47
48	/* Declare SLB shadow as 0 entries big */
49
50	ld	r11, PACA_SLBSHADOWPTR(r13)
51	li	r8, 0
52	stb	r8, 3(r11)
53
54END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
55
56	/* Flush SLB */
57
58	li	r10, 0
59	slbmte	r10, r10
60	slbia
61
62	/* Fill SLB with our shadow */
63
64	lbz	r12, SVCPU_SLB_MAX(r3)
65	mulli	r12, r12, 16
66	addi	r12, r12, SVCPU_SLB
67	add	r12, r12, r3
68
69	/* for (r11 = kvm_slb; r11 < kvm_slb + kvm_slb_size; r11+=slb_entry) */
70	li	r11, SVCPU_SLB
71	add	r11, r11, r3
72
73slb_loop_enter:
74
75	ld	r10, 0(r11)
76
77	andis.	r9, r10, SLB_ESID_V@h
78	beq	slb_loop_enter_skip
79
80	ld	r9, 8(r11)
81	slbmte	r9, r10
82
83slb_loop_enter_skip:
84	addi	r11, r11, 16
85	cmpd	cr0, r11, r12
86	blt	slb_loop_enter
87
88slb_do_enter:
89
90.endm
91
92/******************************************************************************
93 *                                                                            *
94 *                               Exit code                                    *
95 *                                                                            *
96 *****************************************************************************/
97
98.macro LOAD_HOST_SEGMENTS
99
100	/* Register usage at this point:
101	 *
102	 * R1         = host R1
103	 * R2         = host R2
104	 * R12        = exit handler id
105	 * R13        = shadow vcpu - SHADOW_VCPU_OFF [=PACA on PPC64]
106	 * SVCPU.*    = guest *
107	 * SVCPU[CR]  = guest CR
108	 * SVCPU[XER] = guest XER
109	 * SVCPU[CTR] = guest CTR
110	 * SVCPU[LR]  = guest LR
111	 *
112	 */
113
114	/* Remove all SLB entries that are in use. */
115
116	li	r0, r0
117	slbmte	r0, r0
118	slbia
119
120	/* Restore bolted entries from the shadow */
121
122	ld	r11, PACA_SLBSHADOWPTR(r13)
123
124BEGIN_FW_FTR_SECTION
125
126	/* Declare SLB shadow as SLB_NUM_BOLTED entries big */
127
128	li	r8, SLB_NUM_BOLTED
129	stb	r8, 3(r11)
130
131END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
132
133	/* Manually load all entries from shadow SLB */
134
135	li	r8, SLBSHADOW_SAVEAREA
136	li	r7, SLBSHADOW_SAVEAREA + 8
137
138	.rept	SLB_NUM_BOLTED
139	LDX_BE	r10, r11, r8
140	cmpdi	r10, 0
141	beq	1f
142	LDX_BE	r9, r11, r7
143	slbmte	r9, r10
1441:	addi	r7, r7, SHADOW_SLB_ENTRY_LEN
145	addi	r8, r8, SHADOW_SLB_ENTRY_LEN
146	.endr
147
148	isync
149	sync
150
151slb_do_exit:
152
153.endm
154