1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* 3 * linux/arch/arm64/crypto/aes-ce.S - AES cipher for ARMv8 with 4 * Crypto Extensions 5 * 6 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> 7 */ 8 9#include <linux/linkage.h> 10#include <asm/assembler.h> 11 12#define AES_ENTRY(func) ENTRY(ce_ ## func) 13#define AES_ENDPROC(func) ENDPROC(ce_ ## func) 14 15 .arch armv8-a+crypto 16 17 xtsmask .req v16 18 cbciv .req v16 19 vctr .req v16 20 21 .macro xts_reload_mask, tmp 22 .endm 23 24 /* preload all round keys */ 25 .macro load_round_keys, rounds, rk 26 cmp \rounds, #12 27 blo 2222f /* 128 bits */ 28 beq 1111f /* 192 bits */ 29 ld1 {v17.4s-v18.4s}, [\rk], #32 301111: ld1 {v19.4s-v20.4s}, [\rk], #32 312222: ld1 {v21.4s-v24.4s}, [\rk], #64 32 ld1 {v25.4s-v28.4s}, [\rk], #64 33 ld1 {v29.4s-v31.4s}, [\rk] 34 .endm 35 36 /* prepare for encryption with key in rk[] */ 37 .macro enc_prepare, rounds, rk, temp 38 mov \temp, \rk 39 load_round_keys \rounds, \temp 40 .endm 41 42 /* prepare for encryption (again) but with new key in rk[] */ 43 .macro enc_switch_key, rounds, rk, temp 44 mov \temp, \rk 45 load_round_keys \rounds, \temp 46 .endm 47 48 /* prepare for decryption with key in rk[] */ 49 .macro dec_prepare, rounds, rk, temp 50 mov \temp, \rk 51 load_round_keys \rounds, \temp 52 .endm 53 54 .macro do_enc_Nx, de, mc, k, i0, i1, i2, i3, i4 55 aes\de \i0\().16b, \k\().16b 56 aes\mc \i0\().16b, \i0\().16b 57 .ifnb \i1 58 aes\de \i1\().16b, \k\().16b 59 aes\mc \i1\().16b, \i1\().16b 60 .ifnb \i3 61 aes\de \i2\().16b, \k\().16b 62 aes\mc \i2\().16b, \i2\().16b 63 aes\de \i3\().16b, \k\().16b 64 aes\mc \i3\().16b, \i3\().16b 65 .ifnb \i4 66 aes\de \i4\().16b, \k\().16b 67 aes\mc \i4\().16b, \i4\().16b 68 .endif 69 .endif 70 .endif 71 .endm 72 73 /* up to 5 interleaved encryption rounds with the same round key */ 74 .macro round_Nx, enc, k, i0, i1, i2, i3, i4 75 .ifc \enc, e 76 do_enc_Nx e, mc, \k, \i0, \i1, \i2, \i3, \i4 77 .else 78 do_enc_Nx d, imc, \k, \i0, \i1, \i2, \i3, \i4 79 .endif 80 .endm 81 82 /* up to 5 interleaved final rounds */ 83 .macro fin_round_Nx, de, k, k2, i0, i1, i2, i3, i4 84 aes\de \i0\().16b, \k\().16b 85 .ifnb \i1 86 aes\de \i1\().16b, \k\().16b 87 .ifnb \i3 88 aes\de \i2\().16b, \k\().16b 89 aes\de \i3\().16b, \k\().16b 90 .ifnb \i4 91 aes\de \i4\().16b, \k\().16b 92 .endif 93 .endif 94 .endif 95 eor \i0\().16b, \i0\().16b, \k2\().16b 96 .ifnb \i1 97 eor \i1\().16b, \i1\().16b, \k2\().16b 98 .ifnb \i3 99 eor \i2\().16b, \i2\().16b, \k2\().16b 100 eor \i3\().16b, \i3\().16b, \k2\().16b 101 .ifnb \i4 102 eor \i4\().16b, \i4\().16b, \k2\().16b 103 .endif 104 .endif 105 .endif 106 .endm 107 108 /* up to 5 interleaved blocks */ 109 .macro do_block_Nx, enc, rounds, i0, i1, i2, i3, i4 110 cmp \rounds, #12 111 blo 2222f /* 128 bits */ 112 beq 1111f /* 192 bits */ 113 round_Nx \enc, v17, \i0, \i1, \i2, \i3, \i4 114 round_Nx \enc, v18, \i0, \i1, \i2, \i3, \i4 1151111: round_Nx \enc, v19, \i0, \i1, \i2, \i3, \i4 116 round_Nx \enc, v20, \i0, \i1, \i2, \i3, \i4 1172222: .irp key, v21, v22, v23, v24, v25, v26, v27, v28, v29 118 round_Nx \enc, \key, \i0, \i1, \i2, \i3, \i4 119 .endr 120 fin_round_Nx \enc, v30, v31, \i0, \i1, \i2, \i3, \i4 121 .endm 122 123 .macro encrypt_block, in, rounds, t0, t1, t2 124 do_block_Nx e, \rounds, \in 125 .endm 126 127 .macro encrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2 128 do_block_Nx e, \rounds, \i0, \i1, \i2, \i3 129 .endm 130 131 .macro encrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2 132 do_block_Nx e, \rounds, \i0, \i1, \i2, \i3, \i4 133 .endm 134 135 .macro decrypt_block, in, rounds, t0, t1, t2 136 do_block_Nx d, \rounds, \in 137 .endm 138 139 .macro decrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2 140 do_block_Nx d, \rounds, \i0, \i1, \i2, \i3 141 .endm 142 143 .macro decrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2 144 do_block_Nx d, \rounds, \i0, \i1, \i2, \i3, \i4 145 .endm 146 147#define MAX_STRIDE 5 148 149#include "aes-modes.S" 150