1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * kexec for arm64
4 *
5 * Copyright (C) Linaro.
6 * Copyright (C) Huawei Futurewei Technologies.
7 */
8
9#include <linux/kexec.h>
10#include <linux/linkage.h>
11
12#include <asm/assembler.h>
13#include <asm/kexec.h>
14#include <asm/page.h>
15#include <asm/sysreg.h>
16
17/*
18 * arm64_relocate_new_kernel - Put a 2nd stage image in place and boot it.
19 *
20 * The memory that the old kernel occupies may be overwritten when copying the
21 * new image to its final location.  To assure that the
22 * arm64_relocate_new_kernel routine which does that copy is not overwritten,
23 * all code and data needed by arm64_relocate_new_kernel must be between the
24 * symbols arm64_relocate_new_kernel and arm64_relocate_new_kernel_end.  The
25 * machine_kexec() routine will copy arm64_relocate_new_kernel to the kexec
26 * safe memory that has been set up to be preserved during the copy operation.
27 */
28SYM_CODE_START(arm64_relocate_new_kernel)
29	/* Setup the list loop variables. */
30	mov	x18, x2				/* x18 = dtb address */
31	mov	x17, x1				/* x17 = kimage_start */
32	mov	x16, x0				/* x16 = kimage_head */
33	mov	x14, xzr			/* x14 = entry ptr */
34	mov	x13, xzr			/* x13 = copy dest */
35	/* Check if the new image needs relocation. */
36	tbnz	x16, IND_DONE_BIT, .Ldone
37	raw_dcache_line_size x15, x1		/* x15 = dcache line size */
38.Lloop:
39	and	x12, x16, PAGE_MASK		/* x12 = addr */
40
41	/* Test the entry flags. */
42.Ltest_source:
43	tbz	x16, IND_SOURCE_BIT, .Ltest_indirection
44
45	/* Invalidate dest page to PoC. */
46	mov     x2, x13
47	add     x20, x2, #PAGE_SIZE
48	sub     x1, x15, #1
49	bic     x2, x2, x1
502:	dc      ivac, x2
51	add     x2, x2, x15
52	cmp     x2, x20
53	b.lo    2b
54	dsb     sy
55
56	copy_page x13, x12, x1, x2, x3, x4, x5, x6, x7, x8
57	b	.Lnext
58.Ltest_indirection:
59	tbz	x16, IND_INDIRECTION_BIT, .Ltest_destination
60	mov	x14, x12			/* ptr = addr */
61	b	.Lnext
62.Ltest_destination:
63	tbz	x16, IND_DESTINATION_BIT, .Lnext
64	mov	x13, x12			/* dest = addr */
65.Lnext:
66	ldr	x16, [x14], #8			/* entry = *ptr++ */
67	tbz	x16, IND_DONE_BIT, .Lloop	/* while (!(entry & DONE)) */
68.Ldone:
69	/* wait for writes from copy_page to finish */
70	dsb	nsh
71	ic	iallu
72	dsb	nsh
73	isb
74
75	/* Start new image. */
76	mov	x0, x18
77	mov	x1, xzr
78	mov	x2, xzr
79	mov	x3, xzr
80	br	x17
81
82SYM_CODE_END(arm64_relocate_new_kernel)
83
84.align 3	/* To keep the 64-bit values below naturally aligned. */
85
86.Lcopy_end:
87.org	KEXEC_CONTROL_PAGE_SIZE
88
89/*
90 * arm64_relocate_new_kernel_size - Number of bytes to copy to the
91 * control_code_page.
92 */
93.globl arm64_relocate_new_kernel_size
94arm64_relocate_new_kernel_size:
95	.quad	.Lcopy_end - arm64_relocate_new_kernel
96