xref: /openbmc/linux/arch/arm64/crypto/aes-ce.S (revision 7f904d7e1f3ec7c2de47c024a5a5c30988b54703)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * linux/arch/arm64/crypto/aes-ce.S - AES cipher for ARMv8 with
4 *                                    Crypto Extensions
5 *
6 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
7 */
8
9#include <linux/linkage.h>
10#include <asm/assembler.h>
11
12#define AES_ENTRY(func)		ENTRY(ce_ ## func)
13#define AES_ENDPROC(func)	ENDPROC(ce_ ## func)
14
15	.arch		armv8-a+crypto
16
17	xtsmask		.req	v16
18
19	.macro		xts_reload_mask, tmp
20	.endm
21
22	/* preload all round keys */
23	.macro		load_round_keys, rounds, rk
24	cmp		\rounds, #12
25	blo		2222f		/* 128 bits */
26	beq		1111f		/* 192 bits */
27	ld1		{v17.4s-v18.4s}, [\rk], #32
281111:	ld1		{v19.4s-v20.4s}, [\rk], #32
292222:	ld1		{v21.4s-v24.4s}, [\rk], #64
30	ld1		{v25.4s-v28.4s}, [\rk], #64
31	ld1		{v29.4s-v31.4s}, [\rk]
32	.endm
33
34	/* prepare for encryption with key in rk[] */
35	.macro		enc_prepare, rounds, rk, temp
36	mov		\temp, \rk
37	load_round_keys	\rounds, \temp
38	.endm
39
40	/* prepare for encryption (again) but with new key in rk[] */
41	.macro		enc_switch_key, rounds, rk, temp
42	mov		\temp, \rk
43	load_round_keys	\rounds, \temp
44	.endm
45
46	/* prepare for decryption with key in rk[] */
47	.macro		dec_prepare, rounds, rk, temp
48	mov		\temp, \rk
49	load_round_keys	\rounds, \temp
50	.endm
51
52	.macro		do_enc_Nx, de, mc, k, i0, i1, i2, i3
53	aes\de		\i0\().16b, \k\().16b
54	aes\mc		\i0\().16b, \i0\().16b
55	.ifnb		\i1
56	aes\de		\i1\().16b, \k\().16b
57	aes\mc		\i1\().16b, \i1\().16b
58	.ifnb		\i3
59	aes\de		\i2\().16b, \k\().16b
60	aes\mc		\i2\().16b, \i2\().16b
61	aes\de		\i3\().16b, \k\().16b
62	aes\mc		\i3\().16b, \i3\().16b
63	.endif
64	.endif
65	.endm
66
67	/* up to 4 interleaved encryption rounds with the same round key */
68	.macro		round_Nx, enc, k, i0, i1, i2, i3
69	.ifc		\enc, e
70	do_enc_Nx	e, mc, \k, \i0, \i1, \i2, \i3
71	.else
72	do_enc_Nx	d, imc, \k, \i0, \i1, \i2, \i3
73	.endif
74	.endm
75
76	/* up to 4 interleaved final rounds */
77	.macro		fin_round_Nx, de, k, k2, i0, i1, i2, i3
78	aes\de		\i0\().16b, \k\().16b
79	.ifnb		\i1
80	aes\de		\i1\().16b, \k\().16b
81	.ifnb		\i3
82	aes\de		\i2\().16b, \k\().16b
83	aes\de		\i3\().16b, \k\().16b
84	.endif
85	.endif
86	eor		\i0\().16b, \i0\().16b, \k2\().16b
87	.ifnb		\i1
88	eor		\i1\().16b, \i1\().16b, \k2\().16b
89	.ifnb		\i3
90	eor		\i2\().16b, \i2\().16b, \k2\().16b
91	eor		\i3\().16b, \i3\().16b, \k2\().16b
92	.endif
93	.endif
94	.endm
95
96	/* up to 4 interleaved blocks */
97	.macro		do_block_Nx, enc, rounds, i0, i1, i2, i3
98	cmp		\rounds, #12
99	blo		2222f		/* 128 bits */
100	beq		1111f		/* 192 bits */
101	round_Nx	\enc, v17, \i0, \i1, \i2, \i3
102	round_Nx	\enc, v18, \i0, \i1, \i2, \i3
1031111:	round_Nx	\enc, v19, \i0, \i1, \i2, \i3
104	round_Nx	\enc, v20, \i0, \i1, \i2, \i3
1052222:	.irp		key, v21, v22, v23, v24, v25, v26, v27, v28, v29
106	round_Nx	\enc, \key, \i0, \i1, \i2, \i3
107	.endr
108	fin_round_Nx	\enc, v30, v31, \i0, \i1, \i2, \i3
109	.endm
110
111	.macro		encrypt_block, in, rounds, t0, t1, t2
112	do_block_Nx	e, \rounds, \in
113	.endm
114
115	.macro		encrypt_block2x, i0, i1, rounds, t0, t1, t2
116	do_block_Nx	e, \rounds, \i0, \i1
117	.endm
118
119	.macro		encrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
120	do_block_Nx	e, \rounds, \i0, \i1, \i2, \i3
121	.endm
122
123	.macro		decrypt_block, in, rounds, t0, t1, t2
124	do_block_Nx	d, \rounds, \in
125	.endm
126
127	.macro		decrypt_block2x, i0, i1, rounds, t0, t1, t2
128	do_block_Nx	d, \rounds, \i0, \i1
129	.endm
130
131	.macro		decrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
132	do_block_Nx	d, \rounds, \i0, \i1, \i2, \i3
133	.endm
134
135#include "aes-modes.S"
136