1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* 3 * kexec for arm64 4 * 5 * Copyright (C) Linaro. 6 * Copyright (C) Huawei Futurewei Technologies. 7 */ 8 9#include <linux/kexec.h> 10#include <linux/linkage.h> 11 12#include <asm/assembler.h> 13#include <asm/kexec.h> 14#include <asm/page.h> 15#include <asm/sysreg.h> 16 17/* 18 * arm64_relocate_new_kernel - Put a 2nd stage image in place and boot it. 19 * 20 * The memory that the old kernel occupies may be overwritten when coping the 21 * new image to its final location. To assure that the 22 * arm64_relocate_new_kernel routine which does that copy is not overwritten, 23 * all code and data needed by arm64_relocate_new_kernel must be between the 24 * symbols arm64_relocate_new_kernel and arm64_relocate_new_kernel_end. The 25 * machine_kexec() routine will copy arm64_relocate_new_kernel to the kexec 26 * control_code_page, a special page which has been set up to be preserved 27 * during the copy operation. 28 */ 29SYM_CODE_START(arm64_relocate_new_kernel) 30 31 /* Setup the list loop variables. */ 32 mov x18, x2 /* x18 = dtb address */ 33 mov x17, x1 /* x17 = kimage_start */ 34 mov x16, x0 /* x16 = kimage_head */ 35 raw_dcache_line_size x15, x0 /* x15 = dcache line size */ 36 mov x14, xzr /* x14 = entry ptr */ 37 mov x13, xzr /* x13 = copy dest */ 38 39 /* Check if the new image needs relocation. */ 40 tbnz x16, IND_DONE_BIT, .Ldone 41 42.Lloop: 43 and x12, x16, PAGE_MASK /* x12 = addr */ 44 45 /* Test the entry flags. */ 46.Ltest_source: 47 tbz x16, IND_SOURCE_BIT, .Ltest_indirection 48 49 /* Invalidate dest page to PoC. */ 50 mov x0, x13 51 add x20, x0, #PAGE_SIZE 52 sub x1, x15, #1 53 bic x0, x0, x1 542: dc ivac, x0 55 add x0, x0, x15 56 cmp x0, x20 57 b.lo 2b 58 dsb sy 59 60 mov x20, x13 61 mov x21, x12 62 copy_page x20, x21, x0, x1, x2, x3, x4, x5, x6, x7 63 64 /* dest += PAGE_SIZE */ 65 add x13, x13, PAGE_SIZE 66 b .Lnext 67 68.Ltest_indirection: 69 tbz x16, IND_INDIRECTION_BIT, .Ltest_destination 70 71 /* ptr = addr */ 72 mov x14, x12 73 b .Lnext 74 75.Ltest_destination: 76 tbz x16, IND_DESTINATION_BIT, .Lnext 77 78 /* dest = addr */ 79 mov x13, x12 80 81.Lnext: 82 /* entry = *ptr++ */ 83 ldr x16, [x14], #8 84 85 /* while (!(entry & DONE)) */ 86 tbz x16, IND_DONE_BIT, .Lloop 87 88.Ldone: 89 /* wait for writes from copy_page to finish */ 90 dsb nsh 91 ic iallu 92 dsb nsh 93 isb 94 95 /* Start new image. */ 96 mov x0, x18 97 mov x1, xzr 98 mov x2, xzr 99 mov x3, xzr 100 br x17 101 102SYM_CODE_END(arm64_relocate_new_kernel) 103 104.align 3 /* To keep the 64-bit values below naturally aligned. */ 105 106.Lcopy_end: 107.org KEXEC_CONTROL_PAGE_SIZE 108 109/* 110 * arm64_relocate_new_kernel_size - Number of bytes to copy to the 111 * control_code_page. 112 */ 113.globl arm64_relocate_new_kernel_size 114arm64_relocate_new_kernel_size: 115 .quad .Lcopy_end - arm64_relocate_new_kernel 116