xref: /openbmc/linux/arch/sparc/crypto/aes_asm.S (revision 498495dba268b20e8eadd7fe93c140c68b6cc9d2)
1*b2441318SGreg Kroah-Hartman/* SPDX-License-Identifier: GPL-2.0 */
29bf4852dSDavid S. Miller#include <linux/linkage.h>
39bf4852dSDavid S. Miller#include <asm/visasm.h>
49bf4852dSDavid S. Miller
57cff82f5SDavid S. Miller#include "opcodes.h"
69bf4852dSDavid S. Miller
79bf4852dSDavid S. Miller#define ENCRYPT_TWO_ROUNDS(KEY_BASE, I0, I1, T0, T1) \
89bf4852dSDavid S. Miller	AES_EROUND01(KEY_BASE +  0, I0, I1, T0) \
99bf4852dSDavid S. Miller	AES_EROUND23(KEY_BASE +  2, I0, I1, T1) \
109bf4852dSDavid S. Miller	AES_EROUND01(KEY_BASE +  4, T0, T1, I0) \
119bf4852dSDavid S. Miller	AES_EROUND23(KEY_BASE +  6, T0, T1, I1)
129bf4852dSDavid S. Miller
1303d168adSDavid S. Miller#define ENCRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
1403d168adSDavid S. Miller	AES_EROUND01(KEY_BASE +  0, I0, I1, T0) \
1503d168adSDavid S. Miller	AES_EROUND23(KEY_BASE +  2, I0, I1, T1) \
1603d168adSDavid S. Miller	AES_EROUND01(KEY_BASE +  0, I2, I3, T2) \
1703d168adSDavid S. Miller	AES_EROUND23(KEY_BASE +  2, I2, I3, T3) \
1803d168adSDavid S. Miller	AES_EROUND01(KEY_BASE +  4, T0, T1, I0) \
1903d168adSDavid S. Miller	AES_EROUND23(KEY_BASE +  6, T0, T1, I1) \
2003d168adSDavid S. Miller	AES_EROUND01(KEY_BASE +  4, T2, T3, I2) \
2103d168adSDavid S. Miller	AES_EROUND23(KEY_BASE +  6, T2, T3, I3)
2203d168adSDavid S. Miller
239bf4852dSDavid S. Miller#define ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE, I0, I1, T0, T1) \
249bf4852dSDavid S. Miller	AES_EROUND01(KEY_BASE +  0, I0, I1, T0) \
259bf4852dSDavid S. Miller	AES_EROUND23(KEY_BASE +  2, I0, I1, T1) \
269bf4852dSDavid S. Miller	AES_EROUND01_L(KEY_BASE +  4, T0, T1, I0) \
279bf4852dSDavid S. Miller	AES_EROUND23_L(KEY_BASE +  6, T0, T1, I1)
289bf4852dSDavid S. Miller
2903d168adSDavid S. Miller#define ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
3003d168adSDavid S. Miller	AES_EROUND01(KEY_BASE +  0, I0, I1, T0) \
3103d168adSDavid S. Miller	AES_EROUND23(KEY_BASE +  2, I0, I1, T1) \
3203d168adSDavid S. Miller	AES_EROUND01(KEY_BASE +  0, I2, I3, T2) \
3303d168adSDavid S. Miller	AES_EROUND23(KEY_BASE +  2, I2, I3, T3) \
3403d168adSDavid S. Miller	AES_EROUND01_L(KEY_BASE +  4, T0, T1, I0) \
3503d168adSDavid S. Miller	AES_EROUND23_L(KEY_BASE +  6, T0, T1, I1) \
3603d168adSDavid S. Miller	AES_EROUND01_L(KEY_BASE +  4, T2, T3, I2) \
3703d168adSDavid S. Miller	AES_EROUND23_L(KEY_BASE +  6, T2, T3, I3)
3803d168adSDavid S. Miller
399bf4852dSDavid S. Miller	/* 10 rounds */
409bf4852dSDavid S. Miller#define ENCRYPT_128(KEY_BASE, I0, I1, T0, T1) \
419bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE +  0, I0, I1, T0, T1) \
429bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE +  8, I0, I1, T0, T1) \
439bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
449bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
459bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 32, I0, I1, T0, T1)
469bf4852dSDavid S. Miller
4703d168adSDavid S. Miller#define ENCRYPT_128_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
4803d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE +  0, I0, I1, I2, I3, T0, T1, T2, T3) \
4903d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE +  8, I0, I1, I2, I3, T0, T1, T2, T3) \
5003d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
5103d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
5203d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3)
5303d168adSDavid S. Miller
549bf4852dSDavid S. Miller	/* 12 rounds */
559bf4852dSDavid S. Miller#define ENCRYPT_192(KEY_BASE, I0, I1, T0, T1) \
569bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE +  0, I0, I1, T0, T1) \
579bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE +  8, I0, I1, T0, T1) \
589bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
599bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
609bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
619bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 40, I0, I1, T0, T1)
629bf4852dSDavid S. Miller
6303d168adSDavid S. Miller#define ENCRYPT_192_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
6403d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE +  0, I0, I1, I2, I3, T0, T1, T2, T3) \
6503d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE +  8, I0, I1, I2, I3, T0, T1, T2, T3) \
6603d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
6703d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
6803d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3) \
6903d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 40, I0, I1, I2, I3, T0, T1, T2, T3)
7003d168adSDavid S. Miller
719bf4852dSDavid S. Miller	/* 14 rounds */
729bf4852dSDavid S. Miller#define ENCRYPT_256(KEY_BASE, I0, I1, T0, T1) \
739bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE +  0, I0, I1, T0, T1) \
749bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE +  8, I0, I1, T0, T1) \
759bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
769bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
779bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
789bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(KEY_BASE + 40, I0, I1, T0, T1) \
799bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE + 48, I0, I1, T0, T1)
809bf4852dSDavid S. Miller
8103d168adSDavid S. Miller#define ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, TMP_BASE) \
8203d168adSDavid S. Miller	ENCRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, \
8303d168adSDavid S. Miller			     TMP_BASE + 0, TMP_BASE + 2, TMP_BASE + 4, TMP_BASE + 6)
8403d168adSDavid S. Miller
8503d168adSDavid S. Miller#define ENCRYPT_256_2(KEY_BASE, I0, I1, I2, I3) \
8603d168adSDavid S. Miller	ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE +  0, I0, I1, I2, I3, KEY_BASE + 48) \
8703d168adSDavid S. Miller	ldd	[%o0 + 0xd0], %f56; \
8803d168adSDavid S. Miller	ldd	[%o0 + 0xd8], %f58; \
8903d168adSDavid S. Miller	ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE +  8, I0, I1, I2, I3, KEY_BASE +  0) \
9003d168adSDavid S. Miller	ldd	[%o0 + 0xe0], %f60; \
9103d168adSDavid S. Miller	ldd	[%o0 + 0xe8], %f62; \
9203d168adSDavid S. Miller	ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, KEY_BASE +  0) \
9303d168adSDavid S. Miller	ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, KEY_BASE +  0) \
9403d168adSDavid S. Miller	ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, KEY_BASE +  0) \
9503d168adSDavid S. Miller	ENCRYPT_256_TWO_ROUNDS_2(KEY_BASE + 40, I0, I1, I2, I3, KEY_BASE +  0) \
9603d168adSDavid S. Miller	AES_EROUND01(KEY_BASE +  48, I0, I1, KEY_BASE + 0) \
9703d168adSDavid S. Miller	AES_EROUND23(KEY_BASE +  50, I0, I1, KEY_BASE + 2) \
9803d168adSDavid S. Miller	AES_EROUND01(KEY_BASE +  48, I2, I3, KEY_BASE + 4) \
9903d168adSDavid S. Miller	AES_EROUND23(KEY_BASE +  50, I2, I3, KEY_BASE + 6) \
10003d168adSDavid S. Miller	AES_EROUND01_L(KEY_BASE +  52, KEY_BASE + 0, KEY_BASE + 2, I0) \
10103d168adSDavid S. Miller	AES_EROUND23_L(KEY_BASE +  54, KEY_BASE + 0, KEY_BASE + 2, I1) \
10203d168adSDavid S. Miller	ldd	[%o0 + 0x10], %f8; \
10303d168adSDavid S. Miller	ldd	[%o0 + 0x18], %f10; \
10403d168adSDavid S. Miller	AES_EROUND01_L(KEY_BASE +  52, KEY_BASE + 4, KEY_BASE + 6, I2) \
10503d168adSDavid S. Miller	AES_EROUND23_L(KEY_BASE +  54, KEY_BASE + 4, KEY_BASE + 6, I3) \
10603d168adSDavid S. Miller	ldd	[%o0 + 0x20], %f12; \
10703d168adSDavid S. Miller	ldd	[%o0 + 0x28], %f14;
10803d168adSDavid S. Miller
1090bdcaf74SDavid S. Miller#define DECRYPT_TWO_ROUNDS(KEY_BASE, I0, I1, T0, T1) \
1100bdcaf74SDavid S. Miller	AES_DROUND23(KEY_BASE +  0, I0, I1, T1) \
1110bdcaf74SDavid S. Miller	AES_DROUND01(KEY_BASE +  2, I0, I1, T0) \
1120bdcaf74SDavid S. Miller	AES_DROUND23(KEY_BASE +  4, T0, T1, I1) \
1130bdcaf74SDavid S. Miller	AES_DROUND01(KEY_BASE +  6, T0, T1, I0)
1149bf4852dSDavid S. Miller
11530101315SDavid S. Miller#define DECRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
11630101315SDavid S. Miller	AES_DROUND23(KEY_BASE +  0, I0, I1, T1) \
11730101315SDavid S. Miller	AES_DROUND01(KEY_BASE +  2, I0, I1, T0) \
11830101315SDavid S. Miller	AES_DROUND23(KEY_BASE +  0, I2, I3, T3) \
11930101315SDavid S. Miller	AES_DROUND01(KEY_BASE +  2, I2, I3, T2) \
12030101315SDavid S. Miller	AES_DROUND23(KEY_BASE +  4, T0, T1, I1) \
12130101315SDavid S. Miller	AES_DROUND01(KEY_BASE +  6, T0, T1, I0) \
12230101315SDavid S. Miller	AES_DROUND23(KEY_BASE +  4, T2, T3, I3) \
12330101315SDavid S. Miller	AES_DROUND01(KEY_BASE +  6, T2, T3, I2)
12430101315SDavid S. Miller
1250bdcaf74SDavid S. Miller#define DECRYPT_TWO_ROUNDS_LAST(KEY_BASE, I0, I1, T0, T1) \
1260bdcaf74SDavid S. Miller	AES_DROUND23(KEY_BASE +  0, I0, I1, T1) \
1270bdcaf74SDavid S. Miller	AES_DROUND01(KEY_BASE +  2, I0, I1, T0) \
1280bdcaf74SDavid S. Miller	AES_DROUND23_L(KEY_BASE +  4, T0, T1, I1) \
1290bdcaf74SDavid S. Miller	AES_DROUND01_L(KEY_BASE +  6, T0, T1, I0)
1309bf4852dSDavid S. Miller
13130101315SDavid S. Miller#define DECRYPT_TWO_ROUNDS_LAST_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
13230101315SDavid S. Miller	AES_DROUND23(KEY_BASE +  0, I0, I1, T1) \
13330101315SDavid S. Miller	AES_DROUND01(KEY_BASE +  2, I0, I1, T0) \
13430101315SDavid S. Miller	AES_DROUND23(KEY_BASE +  0, I2, I3, T3) \
13530101315SDavid S. Miller	AES_DROUND01(KEY_BASE +  2, I2, I3, T2) \
13630101315SDavid S. Miller	AES_DROUND23_L(KEY_BASE +  4, T0, T1, I1) \
13730101315SDavid S. Miller	AES_DROUND01_L(KEY_BASE +  6, T0, T1, I0) \
13830101315SDavid S. Miller	AES_DROUND23_L(KEY_BASE +  4, T2, T3, I3) \
13930101315SDavid S. Miller	AES_DROUND01_L(KEY_BASE +  6, T2, T3, I2)
14030101315SDavid S. Miller
1419bf4852dSDavid S. Miller	/* 10 rounds */
1420bdcaf74SDavid S. Miller#define DECRYPT_128(KEY_BASE, I0, I1, T0, T1) \
1430bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE +  0, I0, I1, T0, T1) \
1440bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE +  8, I0, I1, T0, T1) \
1450bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
1460bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
1470bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 32, I0, I1, T0, T1)
1489bf4852dSDavid S. Miller
14930101315SDavid S. Miller#define DECRYPT_128_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
15030101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE +  0, I0, I1, I2, I3, T0, T1, T2, T3) \
15130101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE +  8, I0, I1, I2, I3, T0, T1, T2, T3) \
15230101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
15330101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
15430101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3)
15530101315SDavid S. Miller
1569bf4852dSDavid S. Miller	/* 12 rounds */
1570bdcaf74SDavid S. Miller#define DECRYPT_192(KEY_BASE, I0, I1, T0, T1) \
1580bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE +  0, I0, I1, T0, T1) \
1590bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE +  8, I0, I1, T0, T1) \
1600bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
1610bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
1620bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
1630bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 40, I0, I1, T0, T1)
1649bf4852dSDavid S. Miller
16530101315SDavid S. Miller#define DECRYPT_192_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \
16630101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE +  0, I0, I1, I2, I3, T0, T1, T2, T3) \
16730101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE +  8, I0, I1, I2, I3, T0, T1, T2, T3) \
16830101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, T0, T1, T2, T3) \
16930101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, T0, T1, T2, T3) \
17030101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, T0, T1, T2, T3) \
17130101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_LAST_2(KEY_BASE + 40, I0, I1, I2, I3, T0, T1, T2, T3)
17230101315SDavid S. Miller
1739bf4852dSDavid S. Miller	/* 14 rounds */
1740bdcaf74SDavid S. Miller#define DECRYPT_256(KEY_BASE, I0, I1, T0, T1) \
1750bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE +  0, I0, I1, T0, T1) \
1760bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE +  8, I0, I1, T0, T1) \
1770bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 16, I0, I1, T0, T1) \
1780bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 24, I0, I1, T0, T1) \
1790bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 32, I0, I1, T0, T1) \
1800bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(KEY_BASE + 40, I0, I1, T0, T1) \
1810bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS_LAST(KEY_BASE + 48, I0, I1, T0, T1)
1829bf4852dSDavid S. Miller
18330101315SDavid S. Miller#define DECRYPT_256_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, TMP_BASE) \
18430101315SDavid S. Miller	DECRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, \
18530101315SDavid S. Miller			     TMP_BASE + 0, TMP_BASE + 2, TMP_BASE + 4, TMP_BASE + 6)
18630101315SDavid S. Miller
18730101315SDavid S. Miller#define DECRYPT_256_2(KEY_BASE, I0, I1, I2, I3) \
18830101315SDavid S. Miller	DECRYPT_256_TWO_ROUNDS_2(KEY_BASE +  0, I0, I1, I2, I3, KEY_BASE + 48) \
18930101315SDavid S. Miller	ldd	[%o0 + 0x18], %f56; \
19030101315SDavid S. Miller	ldd	[%o0 + 0x10], %f58; \
19130101315SDavid S. Miller	DECRYPT_256_TWO_ROUNDS_2(KEY_BASE +  8, I0, I1, I2, I3, KEY_BASE +  0) \
192699871bcSDavid S. Miller	ldd	[%o0 + 0x08], %f60; \
193699871bcSDavid S. Miller	ldd	[%o0 + 0x00], %f62; \
19430101315SDavid S. Miller	DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 16, I0, I1, I2, I3, KEY_BASE +  0) \
19530101315SDavid S. Miller	DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 24, I0, I1, I2, I3, KEY_BASE +  0) \
19630101315SDavid S. Miller	DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 32, I0, I1, I2, I3, KEY_BASE +  0) \
19730101315SDavid S. Miller	DECRYPT_256_TWO_ROUNDS_2(KEY_BASE + 40, I0, I1, I2, I3, KEY_BASE +  0) \
19830101315SDavid S. Miller	AES_DROUND23(KEY_BASE +  48, I0, I1, KEY_BASE + 2) \
19930101315SDavid S. Miller	AES_DROUND01(KEY_BASE +  50, I0, I1, KEY_BASE + 0) \
20030101315SDavid S. Miller	AES_DROUND23(KEY_BASE +  48, I2, I3, KEY_BASE + 6) \
20130101315SDavid S. Miller	AES_DROUND01(KEY_BASE +  50, I2, I3, KEY_BASE + 4) \
20230101315SDavid S. Miller	AES_DROUND23_L(KEY_BASE +  52, KEY_BASE + 0, KEY_BASE + 2, I1) \
20330101315SDavid S. Miller	AES_DROUND01_L(KEY_BASE +  54, KEY_BASE + 0, KEY_BASE + 2, I0) \
20430101315SDavid S. Miller	ldd	[%o0 + 0xd8], %f8; \
20530101315SDavid S. Miller	ldd	[%o0 + 0xd0], %f10; \
20630101315SDavid S. Miller	AES_DROUND23_L(KEY_BASE +  52, KEY_BASE + 4, KEY_BASE + 6, I3) \
207699871bcSDavid S. Miller	AES_DROUND01_L(KEY_BASE +  54, KEY_BASE + 4, KEY_BASE + 6, I2) \
20830101315SDavid S. Miller	ldd	[%o0 + 0xc8], %f12; \
20930101315SDavid S. Miller	ldd	[%o0 + 0xc0], %f14;
21030101315SDavid S. Miller
2110bdcaf74SDavid S. Miller	.align	32
2129bf4852dSDavid S. MillerENTRY(aes_sparc64_key_expand)
2139bf4852dSDavid S. Miller	/* %o0=input_key, %o1=output_key, %o2=key_len */
2149bf4852dSDavid S. Miller	VISEntry
2159bf4852dSDavid S. Miller	ld	[%o0 + 0x00], %f0
2169bf4852dSDavid S. Miller	ld	[%o0 + 0x04], %f1
2179bf4852dSDavid S. Miller	ld	[%o0 + 0x08], %f2
2189bf4852dSDavid S. Miller	ld	[%o0 + 0x0c], %f3
2199bf4852dSDavid S. Miller
2209bf4852dSDavid S. Miller	std	%f0, [%o1 + 0x00]
2219bf4852dSDavid S. Miller	std	%f2, [%o1 + 0x08]
2229bf4852dSDavid S. Miller	add	%o1, 0x10, %o1
2239bf4852dSDavid S. Miller
2249bf4852dSDavid S. Miller	cmp	%o2, 24
2259bf4852dSDavid S. Miller	bl	2f
2269bf4852dSDavid S. Miller	 nop
2279bf4852dSDavid S. Miller
2289bf4852dSDavid S. Miller	be	1f
2299bf4852dSDavid S. Miller	 nop
2309bf4852dSDavid S. Miller
2319bf4852dSDavid S. Miller	/* 256-bit key expansion */
2329bf4852dSDavid S. Miller	ld	[%o0 + 0x10], %f4
2339bf4852dSDavid S. Miller	ld	[%o0 + 0x14], %f5
2349bf4852dSDavid S. Miller	ld	[%o0 + 0x18], %f6
2359bf4852dSDavid S. Miller	ld	[%o0 + 0x1c], %f7
2369bf4852dSDavid S. Miller
2379bf4852dSDavid S. Miller	std	%f4, [%o1 + 0x00]
2389bf4852dSDavid S. Miller	std	%f6, [%o1 + 0x08]
2399bf4852dSDavid S. Miller	add	%o1, 0x10, %o1
2409bf4852dSDavid S. Miller
2419bf4852dSDavid S. Miller	AES_KEXPAND1(0, 6, 0x0, 8)
2429bf4852dSDavid S. Miller	AES_KEXPAND2(2, 8, 10)
2439bf4852dSDavid S. Miller	AES_KEXPAND0(4, 10, 12)
2449bf4852dSDavid S. Miller	AES_KEXPAND2(6, 12, 14)
2459bf4852dSDavid S. Miller	AES_KEXPAND1(8, 14, 0x1, 16)
2469bf4852dSDavid S. Miller	AES_KEXPAND2(10, 16, 18)
2479bf4852dSDavid S. Miller	AES_KEXPAND0(12, 18, 20)
2489bf4852dSDavid S. Miller	AES_KEXPAND2(14, 20, 22)
2499bf4852dSDavid S. Miller	AES_KEXPAND1(16, 22, 0x2, 24)
2509bf4852dSDavid S. Miller	AES_KEXPAND2(18, 24, 26)
2519bf4852dSDavid S. Miller	AES_KEXPAND0(20, 26, 28)
2529bf4852dSDavid S. Miller	AES_KEXPAND2(22, 28, 30)
2539bf4852dSDavid S. Miller	AES_KEXPAND1(24, 30, 0x3, 32)
2549bf4852dSDavid S. Miller	AES_KEXPAND2(26, 32, 34)
2559bf4852dSDavid S. Miller	AES_KEXPAND0(28, 34, 36)
2569bf4852dSDavid S. Miller	AES_KEXPAND2(30, 36, 38)
2579bf4852dSDavid S. Miller	AES_KEXPAND1(32, 38, 0x4, 40)
2589bf4852dSDavid S. Miller	AES_KEXPAND2(34, 40, 42)
2599bf4852dSDavid S. Miller	AES_KEXPAND0(36, 42, 44)
2609bf4852dSDavid S. Miller	AES_KEXPAND2(38, 44, 46)
2619bf4852dSDavid S. Miller	AES_KEXPAND1(40, 46, 0x5, 48)
2629bf4852dSDavid S. Miller	AES_KEXPAND2(42, 48, 50)
2639bf4852dSDavid S. Miller	AES_KEXPAND0(44, 50, 52)
2649bf4852dSDavid S. Miller	AES_KEXPAND2(46, 52, 54)
2659bf4852dSDavid S. Miller	AES_KEXPAND1(48, 54, 0x6, 56)
2669bf4852dSDavid S. Miller	AES_KEXPAND2(50, 56, 58)
2679bf4852dSDavid S. Miller
2689bf4852dSDavid S. Miller	std	%f8, [%o1 + 0x00]
2699bf4852dSDavid S. Miller	std	%f10, [%o1 + 0x08]
2709bf4852dSDavid S. Miller	std	%f12, [%o1 + 0x10]
2719bf4852dSDavid S. Miller	std	%f14, [%o1 + 0x18]
2729bf4852dSDavid S. Miller	std	%f16, [%o1 + 0x20]
2739bf4852dSDavid S. Miller	std	%f18, [%o1 + 0x28]
2749bf4852dSDavid S. Miller	std	%f20, [%o1 + 0x30]
2759bf4852dSDavid S. Miller	std	%f22, [%o1 + 0x38]
2769bf4852dSDavid S. Miller	std	%f24, [%o1 + 0x40]
2779bf4852dSDavid S. Miller	std	%f26, [%o1 + 0x48]
2789bf4852dSDavid S. Miller	std	%f28, [%o1 + 0x50]
2799bf4852dSDavid S. Miller	std	%f30, [%o1 + 0x58]
2809bf4852dSDavid S. Miller	std	%f32, [%o1 + 0x60]
2819bf4852dSDavid S. Miller	std	%f34, [%o1 + 0x68]
2829bf4852dSDavid S. Miller	std	%f36, [%o1 + 0x70]
2839bf4852dSDavid S. Miller	std	%f38, [%o1 + 0x78]
2849bf4852dSDavid S. Miller	std	%f40, [%o1 + 0x80]
2859bf4852dSDavid S. Miller	std	%f42, [%o1 + 0x88]
2869bf4852dSDavid S. Miller	std	%f44, [%o1 + 0x90]
2879bf4852dSDavid S. Miller	std	%f46, [%o1 + 0x98]
2889bf4852dSDavid S. Miller	std	%f48, [%o1 + 0xa0]
2899bf4852dSDavid S. Miller	std	%f50, [%o1 + 0xa8]
2909bf4852dSDavid S. Miller	std	%f52, [%o1 + 0xb0]
2919bf4852dSDavid S. Miller	std	%f54, [%o1 + 0xb8]
2929bf4852dSDavid S. Miller	std	%f56, [%o1 + 0xc0]
2939bf4852dSDavid S. Miller	ba,pt	%xcc, 80f
2949bf4852dSDavid S. Miller	 std	%f58, [%o1 + 0xc8]
2959bf4852dSDavid S. Miller
2969bf4852dSDavid S. Miller1:
2979bf4852dSDavid S. Miller	/* 192-bit key expansion */
2989bf4852dSDavid S. Miller	ld	[%o0 + 0x10], %f4
2999bf4852dSDavid S. Miller	ld	[%o0 + 0x14], %f5
3009bf4852dSDavid S. Miller
3019bf4852dSDavid S. Miller	std	%f4, [%o1 + 0x00]
3029bf4852dSDavid S. Miller	add	%o1, 0x08, %o1
3039bf4852dSDavid S. Miller
3049bf4852dSDavid S. Miller	AES_KEXPAND1(0, 4, 0x0, 6)
3059bf4852dSDavid S. Miller	AES_KEXPAND2(2, 6, 8)
3069bf4852dSDavid S. Miller	AES_KEXPAND2(4, 8, 10)
3079bf4852dSDavid S. Miller	AES_KEXPAND1(6, 10, 0x1, 12)
3089bf4852dSDavid S. Miller	AES_KEXPAND2(8, 12, 14)
3099bf4852dSDavid S. Miller	AES_KEXPAND2(10, 14, 16)
3109bf4852dSDavid S. Miller	AES_KEXPAND1(12, 16, 0x2, 18)
3119bf4852dSDavid S. Miller	AES_KEXPAND2(14, 18, 20)
3129bf4852dSDavid S. Miller	AES_KEXPAND2(16, 20, 22)
3139bf4852dSDavid S. Miller	AES_KEXPAND1(18, 22, 0x3, 24)
3149bf4852dSDavid S. Miller	AES_KEXPAND2(20, 24, 26)
3159bf4852dSDavid S. Miller	AES_KEXPAND2(22, 26, 28)
3169bf4852dSDavid S. Miller	AES_KEXPAND1(24, 28, 0x4, 30)
3179bf4852dSDavid S. Miller	AES_KEXPAND2(26, 30, 32)
3189bf4852dSDavid S. Miller	AES_KEXPAND2(28, 32, 34)
3199bf4852dSDavid S. Miller	AES_KEXPAND1(30, 34, 0x5, 36)
3209bf4852dSDavid S. Miller	AES_KEXPAND2(32, 36, 38)
3219bf4852dSDavid S. Miller	AES_KEXPAND2(34, 38, 40)
3229bf4852dSDavid S. Miller	AES_KEXPAND1(36, 40, 0x6, 42)
3239bf4852dSDavid S. Miller	AES_KEXPAND2(38, 42, 44)
3249bf4852dSDavid S. Miller	AES_KEXPAND2(40, 44, 46)
3259bf4852dSDavid S. Miller	AES_KEXPAND1(42, 46, 0x7, 48)
3269bf4852dSDavid S. Miller	AES_KEXPAND2(44, 48, 50)
3279bf4852dSDavid S. Miller
3289bf4852dSDavid S. Miller	std	%f6, [%o1 + 0x00]
3299bf4852dSDavid S. Miller	std	%f8, [%o1 + 0x08]
3309bf4852dSDavid S. Miller	std	%f10, [%o1 + 0x10]
3319bf4852dSDavid S. Miller	std	%f12, [%o1 + 0x18]
3329bf4852dSDavid S. Miller	std	%f14, [%o1 + 0x20]
3339bf4852dSDavid S. Miller	std	%f16, [%o1 + 0x28]
3349bf4852dSDavid S. Miller	std	%f18, [%o1 + 0x30]
3359bf4852dSDavid S. Miller	std	%f20, [%o1 + 0x38]
3369bf4852dSDavid S. Miller	std	%f22, [%o1 + 0x40]
3379bf4852dSDavid S. Miller	std	%f24, [%o1 + 0x48]
3389bf4852dSDavid S. Miller	std	%f26, [%o1 + 0x50]
3399bf4852dSDavid S. Miller	std	%f28, [%o1 + 0x58]
3409bf4852dSDavid S. Miller	std	%f30, [%o1 + 0x60]
3419bf4852dSDavid S. Miller	std	%f32, [%o1 + 0x68]
3429bf4852dSDavid S. Miller	std	%f34, [%o1 + 0x70]
3439bf4852dSDavid S. Miller	std	%f36, [%o1 + 0x78]
3449bf4852dSDavid S. Miller	std	%f38, [%o1 + 0x80]
3459bf4852dSDavid S. Miller	std	%f40, [%o1 + 0x88]
3469bf4852dSDavid S. Miller	std	%f42, [%o1 + 0x90]
3479bf4852dSDavid S. Miller	std	%f44, [%o1 + 0x98]
3489bf4852dSDavid S. Miller	std	%f46, [%o1 + 0xa0]
3499bf4852dSDavid S. Miller	std	%f48, [%o1 + 0xa8]
3509bf4852dSDavid S. Miller	ba,pt	%xcc, 80f
3519bf4852dSDavid S. Miller	 std	%f50, [%o1 + 0xb0]
3529bf4852dSDavid S. Miller
3539bf4852dSDavid S. Miller2:
3549bf4852dSDavid S. Miller	/* 128-bit key expansion */
3559bf4852dSDavid S. Miller	AES_KEXPAND1(0, 2, 0x0, 4)
3569bf4852dSDavid S. Miller	AES_KEXPAND2(2, 4, 6)
3579bf4852dSDavid S. Miller	AES_KEXPAND1(4, 6, 0x1, 8)
3589bf4852dSDavid S. Miller	AES_KEXPAND2(6, 8, 10)
3599bf4852dSDavid S. Miller	AES_KEXPAND1(8, 10, 0x2, 12)
3609bf4852dSDavid S. Miller	AES_KEXPAND2(10, 12, 14)
3619bf4852dSDavid S. Miller	AES_KEXPAND1(12, 14, 0x3, 16)
3629bf4852dSDavid S. Miller	AES_KEXPAND2(14, 16, 18)
3639bf4852dSDavid S. Miller	AES_KEXPAND1(16, 18, 0x4, 20)
3649bf4852dSDavid S. Miller	AES_KEXPAND2(18, 20, 22)
3659bf4852dSDavid S. Miller	AES_KEXPAND1(20, 22, 0x5, 24)
3669bf4852dSDavid S. Miller	AES_KEXPAND2(22, 24, 26)
3679bf4852dSDavid S. Miller	AES_KEXPAND1(24, 26, 0x6, 28)
3689bf4852dSDavid S. Miller	AES_KEXPAND2(26, 28, 30)
3699bf4852dSDavid S. Miller	AES_KEXPAND1(28, 30, 0x7, 32)
3709bf4852dSDavid S. Miller	AES_KEXPAND2(30, 32, 34)
3719bf4852dSDavid S. Miller	AES_KEXPAND1(32, 34, 0x8, 36)
3729bf4852dSDavid S. Miller	AES_KEXPAND2(34, 36, 38)
3739bf4852dSDavid S. Miller	AES_KEXPAND1(36, 38, 0x9, 40)
3749bf4852dSDavid S. Miller	AES_KEXPAND2(38, 40, 42)
3759bf4852dSDavid S. Miller
3769bf4852dSDavid S. Miller	std	%f4, [%o1 + 0x00]
3779bf4852dSDavid S. Miller	std	%f6, [%o1 + 0x08]
3789bf4852dSDavid S. Miller	std	%f8, [%o1 + 0x10]
3799bf4852dSDavid S. Miller	std	%f10, [%o1 + 0x18]
3809bf4852dSDavid S. Miller	std	%f12, [%o1 + 0x20]
3819bf4852dSDavid S. Miller	std	%f14, [%o1 + 0x28]
3829bf4852dSDavid S. Miller	std	%f16, [%o1 + 0x30]
3839bf4852dSDavid S. Miller	std	%f18, [%o1 + 0x38]
3849bf4852dSDavid S. Miller	std	%f20, [%o1 + 0x40]
3859bf4852dSDavid S. Miller	std	%f22, [%o1 + 0x48]
3869bf4852dSDavid S. Miller	std	%f24, [%o1 + 0x50]
3879bf4852dSDavid S. Miller	std	%f26, [%o1 + 0x58]
3889bf4852dSDavid S. Miller	std	%f28, [%o1 + 0x60]
3899bf4852dSDavid S. Miller	std	%f30, [%o1 + 0x68]
3909bf4852dSDavid S. Miller	std	%f32, [%o1 + 0x70]
3919bf4852dSDavid S. Miller	std	%f34, [%o1 + 0x78]
3929bf4852dSDavid S. Miller	std	%f36, [%o1 + 0x80]
3939bf4852dSDavid S. Miller	std	%f38, [%o1 + 0x88]
3949bf4852dSDavid S. Miller	std	%f40, [%o1 + 0x90]
3959bf4852dSDavid S. Miller	std	%f42, [%o1 + 0x98]
3969bf4852dSDavid S. Miller80:
3979bf4852dSDavid S. Miller	retl
3989bf4852dSDavid S. Miller	 VISExit
3999bf4852dSDavid S. MillerENDPROC(aes_sparc64_key_expand)
4009bf4852dSDavid S. Miller
4010bdcaf74SDavid S. Miller	.align		32
4020bdcaf74SDavid S. MillerENTRY(aes_sparc64_encrypt_128)
4030bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output */
4040bdcaf74SDavid S. Miller	VISEntry
4050bdcaf74SDavid S. Miller	ld		[%o1 + 0x00], %f4
4060bdcaf74SDavid S. Miller	ld		[%o1 + 0x04], %f5
4070bdcaf74SDavid S. Miller	ld		[%o1 + 0x08], %f6
4080bdcaf74SDavid S. Miller	ld		[%o1 + 0x0c], %f7
4090bdcaf74SDavid S. Miller	ldd		[%o0 + 0x00], %f8
4100bdcaf74SDavid S. Miller	ldd		[%o0 + 0x08], %f10
4110bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f12
4120bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f14
4130bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f16
4140bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f18
4150bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f20
4160bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f22
4170bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f24
4180bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f26
4190bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f28
4200bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f30
4210bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f32
4220bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f34
4230bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f36
4240bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f38
4250bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f40
4260bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f42
4270bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f44
4280bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f46
4290bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f48
4300bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa8], %f50
4310bdcaf74SDavid S. Miller	fxor		%f8, %f4, %f4
4320bdcaf74SDavid S. Miller	fxor		%f10, %f6, %f6
4330bdcaf74SDavid S. Miller	ENCRYPT_128(12, 4, 6, 0, 2)
4340bdcaf74SDavid S. Miller	st		%f4, [%o2 + 0x00]
4350bdcaf74SDavid S. Miller	st		%f5, [%o2 + 0x04]
4360bdcaf74SDavid S. Miller	st		%f6, [%o2 + 0x08]
4370bdcaf74SDavid S. Miller	st		%f7, [%o2 + 0x0c]
4380bdcaf74SDavid S. Miller	retl
4390bdcaf74SDavid S. Miller	 VISExit
4400bdcaf74SDavid S. MillerENDPROC(aes_sparc64_encrypt_128)
4410bdcaf74SDavid S. Miller
4420bdcaf74SDavid S. Miller	.align		32
4430bdcaf74SDavid S. MillerENTRY(aes_sparc64_encrypt_192)
4440bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output */
4459bf4852dSDavid S. Miller	VISEntry
4469bf4852dSDavid S. Miller	ld		[%o1 + 0x00], %f4
4479bf4852dSDavid S. Miller	ld		[%o1 + 0x04], %f5
4489bf4852dSDavid S. Miller	ld		[%o1 + 0x08], %f6
4499bf4852dSDavid S. Miller	ld		[%o1 + 0x0c], %f7
4509bf4852dSDavid S. Miller
4519bf4852dSDavid S. Miller	ldd		[%o0 + 0x00], %f8
4529bf4852dSDavid S. Miller	ldd		[%o0 + 0x08], %f10
4530bdcaf74SDavid S. Miller
4549bf4852dSDavid S. Miller	fxor		%f8, %f4, %f4
4559bf4852dSDavid S. Miller	fxor		%f10, %f6, %f6
4569bf4852dSDavid S. Miller
4579bf4852dSDavid S. Miller	ldd		[%o0 + 0x10], %f8
4589bf4852dSDavid S. Miller	ldd		[%o0 + 0x18], %f10
4599bf4852dSDavid S. Miller	ldd		[%o0 + 0x20], %f12
4609bf4852dSDavid S. Miller	ldd		[%o0 + 0x28], %f14
4619bf4852dSDavid S. Miller	add		%o0, 0x20, %o0
4629bf4852dSDavid S. Miller
4639bf4852dSDavid S. Miller	ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
4649bf4852dSDavid S. Miller
4659bf4852dSDavid S. Miller	ldd		[%o0 + 0x10], %f12
4669bf4852dSDavid S. Miller	ldd		[%o0 + 0x18], %f14
4679bf4852dSDavid S. Miller	ldd		[%o0 + 0x20], %f16
4689bf4852dSDavid S. Miller	ldd		[%o0 + 0x28], %f18
4699bf4852dSDavid S. Miller	ldd		[%o0 + 0x30], %f20
4709bf4852dSDavid S. Miller	ldd		[%o0 + 0x38], %f22
4719bf4852dSDavid S. Miller	ldd		[%o0 + 0x40], %f24
4729bf4852dSDavid S. Miller	ldd		[%o0 + 0x48], %f26
4739bf4852dSDavid S. Miller	ldd		[%o0 + 0x50], %f28
4749bf4852dSDavid S. Miller	ldd		[%o0 + 0x58], %f30
4759bf4852dSDavid S. Miller	ldd		[%o0 + 0x60], %f32
4769bf4852dSDavid S. Miller	ldd		[%o0 + 0x68], %f34
4779bf4852dSDavid S. Miller	ldd		[%o0 + 0x70], %f36
4789bf4852dSDavid S. Miller	ldd		[%o0 + 0x78], %f38
4799bf4852dSDavid S. Miller	ldd		[%o0 + 0x80], %f40
4809bf4852dSDavid S. Miller	ldd		[%o0 + 0x88], %f42
4819bf4852dSDavid S. Miller	ldd		[%o0 + 0x90], %f44
4829bf4852dSDavid S. Miller	ldd		[%o0 + 0x98], %f46
4839bf4852dSDavid S. Miller	ldd		[%o0 + 0xa0], %f48
4849bf4852dSDavid S. Miller	ldd		[%o0 + 0xa8], %f50
4859bf4852dSDavid S. Miller
4869bf4852dSDavid S. Miller
4879bf4852dSDavid S. Miller	ENCRYPT_128(12, 4, 6, 0, 2)
4889bf4852dSDavid S. Miller
4899bf4852dSDavid S. Miller	st		%f4, [%o2 + 0x00]
4909bf4852dSDavid S. Miller	st		%f5, [%o2 + 0x04]
4919bf4852dSDavid S. Miller	st		%f6, [%o2 + 0x08]
4929bf4852dSDavid S. Miller	st		%f7, [%o2 + 0x0c]
4939bf4852dSDavid S. Miller
4949bf4852dSDavid S. Miller	retl
4959bf4852dSDavid S. Miller	 VISExit
4960bdcaf74SDavid S. MillerENDPROC(aes_sparc64_encrypt_192)
4979bf4852dSDavid S. Miller
4980bdcaf74SDavid S. Miller	.align		32
4990bdcaf74SDavid S. MillerENTRY(aes_sparc64_encrypt_256)
5000bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output */
5019bf4852dSDavid S. Miller	VISEntry
5029bf4852dSDavid S. Miller	ld		[%o1 + 0x00], %f4
5039bf4852dSDavid S. Miller	ld		[%o1 + 0x04], %f5
5049bf4852dSDavid S. Miller	ld		[%o1 + 0x08], %f6
5059bf4852dSDavid S. Miller	ld		[%o1 + 0x0c], %f7
5069bf4852dSDavid S. Miller
5070bdcaf74SDavid S. Miller	ldd		[%o0 + 0x00], %f8
5080bdcaf74SDavid S. Miller	ldd		[%o0 + 0x08], %f10
5099bf4852dSDavid S. Miller
5100bdcaf74SDavid S. Miller	fxor		%f8, %f4, %f4
5110bdcaf74SDavid S. Miller	fxor		%f10, %f6, %f6
5129bf4852dSDavid S. Miller
5130bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f8
5149bf4852dSDavid S. Miller
5150bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f10
5160bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f12
5170bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f14
5180bdcaf74SDavid S. Miller	add		%o0, 0x20, %o0
5199bf4852dSDavid S. Miller
5200bdcaf74SDavid S. Miller	ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
5219bf4852dSDavid S. Miller
5220bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f8
5239bf4852dSDavid S. Miller
5240bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f10
5250bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f12
5260bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f14
5270bdcaf74SDavid S. Miller	add		%o0, 0x20, %o0
5289bf4852dSDavid S. Miller
5290bdcaf74SDavid S. Miller	ENCRYPT_TWO_ROUNDS(8, 4, 6, 0, 2)
5300bdcaf74SDavid S. Miller
5310bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f12
5320bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f14
5330bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f16
5340bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f18
5350bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f20
5360bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f22
5370bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f24
5380bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f26
5390bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f28
5400bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f30
5410bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f32
5420bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f34
5430bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f36
5440bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f38
5450bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f40
5460bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f42
5470bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f44
5480bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f46
5490bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f48
5500bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa8], %f50
5510bdcaf74SDavid S. Miller
5520bdcaf74SDavid S. Miller	ENCRYPT_128(12, 4, 6, 0, 2)
5539bf4852dSDavid S. Miller
5549bf4852dSDavid S. Miller	st		%f4, [%o2 + 0x00]
5559bf4852dSDavid S. Miller	st		%f5, [%o2 + 0x04]
5569bf4852dSDavid S. Miller	st		%f6, [%o2 + 0x08]
5579bf4852dSDavid S. Miller	st		%f7, [%o2 + 0x0c]
5589bf4852dSDavid S. Miller
5599bf4852dSDavid S. Miller	retl
5609bf4852dSDavid S. Miller	 VISExit
5610bdcaf74SDavid S. MillerENDPROC(aes_sparc64_encrypt_256)
5629bf4852dSDavid S. Miller
5630bdcaf74SDavid S. Miller	.align		32
5640bdcaf74SDavid S. MillerENTRY(aes_sparc64_decrypt_128)
5650bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output */
5660bdcaf74SDavid S. Miller	VISEntry
5670bdcaf74SDavid S. Miller	ld		[%o1 + 0x00], %f4
5680bdcaf74SDavid S. Miller	ld		[%o1 + 0x04], %f5
5690bdcaf74SDavid S. Miller	ld		[%o1 + 0x08], %f6
5700bdcaf74SDavid S. Miller	ld		[%o1 + 0x0c], %f7
5710bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f8
5720bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa8], %f10
5730bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f12
5740bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f14
5750bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f16
5760bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f18
5770bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f20
5780bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f22
5790bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f24
5800bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f26
5810bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f28
5820bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f30
5830bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f32
5840bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f34
5850bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f36
5860bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f38
5870bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f40
5880bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f42
5890bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f44
5900bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f46
5910bdcaf74SDavid S. Miller	ldd		[%o0 + 0x08], %f48
5920bdcaf74SDavid S. Miller	ldd		[%o0 + 0x00], %f50
5930bdcaf74SDavid S. Miller	fxor		%f8, %f4, %f4
5940bdcaf74SDavid S. Miller	fxor		%f10, %f6, %f6
5950bdcaf74SDavid S. Miller	DECRYPT_128(12, 4, 6, 0, 2)
5960bdcaf74SDavid S. Miller	st		%f4, [%o2 + 0x00]
5970bdcaf74SDavid S. Miller	st		%f5, [%o2 + 0x04]
5980bdcaf74SDavid S. Miller	st		%f6, [%o2 + 0x08]
5990bdcaf74SDavid S. Miller	st		%f7, [%o2 + 0x0c]
6000bdcaf74SDavid S. Miller	retl
6010bdcaf74SDavid S. Miller	 VISExit
6020bdcaf74SDavid S. MillerENDPROC(aes_sparc64_decrypt_128)
6030bdcaf74SDavid S. Miller
6040bdcaf74SDavid S. Miller	.align		32
6050bdcaf74SDavid S. MillerENTRY(aes_sparc64_decrypt_192)
6060bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output */
6070bdcaf74SDavid S. Miller	VISEntry
6080bdcaf74SDavid S. Miller	ld		[%o1 + 0x00], %f4
6090bdcaf74SDavid S. Miller	ld		[%o1 + 0x04], %f5
6100bdcaf74SDavid S. Miller	ld		[%o1 + 0x08], %f6
6110bdcaf74SDavid S. Miller	ld		[%o1 + 0x0c], %f7
6120bdcaf74SDavid S. Miller	ldd		[%o0 + 0xc0], %f8
6130bdcaf74SDavid S. Miller	ldd		[%o0 + 0xc8], %f10
6140bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb8], %f12
6150bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb0], %f14
6160bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa8], %f16
6170bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f18
6180bdcaf74SDavid S. Miller	fxor		%f8, %f4, %f4
6190bdcaf74SDavid S. Miller	fxor		%f10, %f6, %f6
6200bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f20
6210bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f22
6220bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f24
6230bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f26
6240bdcaf74SDavid S. Miller	DECRYPT_TWO_ROUNDS(12, 4, 6, 0, 2)
6250bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f28
6260bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f30
6270bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f32
6280bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f34
6290bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f36
6300bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f38
6310bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f40
6320bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f42
6330bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f44
6340bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f46
6350bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f48
6360bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f50
6370bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f52
6380bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f54
6390bdcaf74SDavid S. Miller	ldd		[%o0 + 0x08], %f56
6400bdcaf74SDavid S. Miller	ldd		[%o0 + 0x00], %f58
6410bdcaf74SDavid S. Miller	DECRYPT_128(20, 4, 6, 0, 2)
6420bdcaf74SDavid S. Miller	st		%f4, [%o2 + 0x00]
6430bdcaf74SDavid S. Miller	st		%f5, [%o2 + 0x04]
6440bdcaf74SDavid S. Miller	st		%f6, [%o2 + 0x08]
6450bdcaf74SDavid S. Miller	st		%f7, [%o2 + 0x0c]
6460bdcaf74SDavid S. Miller	retl
6470bdcaf74SDavid S. Miller	 VISExit
6480bdcaf74SDavid S. MillerENDPROC(aes_sparc64_decrypt_192)
6490bdcaf74SDavid S. Miller
6500bdcaf74SDavid S. Miller	.align		32
6510bdcaf74SDavid S. MillerENTRY(aes_sparc64_decrypt_256)
6520bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output */
6530bdcaf74SDavid S. Miller	VISEntry
6540bdcaf74SDavid S. Miller	ld		[%o1 + 0x00], %f4
6550bdcaf74SDavid S. Miller	ld		[%o1 + 0x04], %f5
6560bdcaf74SDavid S. Miller	ld		[%o1 + 0x08], %f6
6570bdcaf74SDavid S. Miller	ld		[%o1 + 0x0c], %f7
6580bdcaf74SDavid S. Miller	ldd		[%o0 + 0xe0], %f8
6590bdcaf74SDavid S. Miller	ldd		[%o0 + 0xe8], %f10
6600bdcaf74SDavid S. Miller	ldd		[%o0 + 0xd8], %f12
6610bdcaf74SDavid S. Miller	ldd		[%o0 + 0xd0], %f14
6620bdcaf74SDavid S. Miller	ldd		[%o0 + 0xc8], %f16
6630bdcaf74SDavid S. Miller	fxor		%f8, %f4, %f4
6640bdcaf74SDavid S. Miller	ldd		[%o0 + 0xc0], %f18
6650bdcaf74SDavid S. Miller	fxor		%f10, %f6, %f6
6660bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb8], %f20
6670bdcaf74SDavid S. Miller	AES_DROUND23(12, 4, 6, 2)
6680bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb0], %f22
6690bdcaf74SDavid S. Miller	AES_DROUND01(14, 4, 6, 0)
6700bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa8], %f24
6710bdcaf74SDavid S. Miller	AES_DROUND23(16, 0, 2, 6)
6720bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f26
6730bdcaf74SDavid S. Miller	AES_DROUND01(18, 0, 2, 4)
6740bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f12
6750bdcaf74SDavid S. Miller	AES_DROUND23(20, 4, 6, 2)
6760bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f14
6770bdcaf74SDavid S. Miller	AES_DROUND01(22, 4, 6, 0)
6780bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f16
6790bdcaf74SDavid S. Miller	AES_DROUND23(24, 0, 2, 6)
6800bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f18
6810bdcaf74SDavid S. Miller	AES_DROUND01(26, 0, 2, 4)
6820bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f20
6830bdcaf74SDavid S. Miller	AES_DROUND23(12, 4, 6, 2)
6840bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f22
6850bdcaf74SDavid S. Miller	AES_DROUND01(14, 4, 6, 0)
6860bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f24
6870bdcaf74SDavid S. Miller	AES_DROUND23(16, 0, 2, 6)
6880bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f26
6890bdcaf74SDavid S. Miller	AES_DROUND01(18, 0, 2, 4)
6900bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f28
6910bdcaf74SDavid S. Miller	AES_DROUND23(20, 4, 6, 2)
6920bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f30
6930bdcaf74SDavid S. Miller	AES_DROUND01(22, 4, 6, 0)
6940bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f32
6950bdcaf74SDavid S. Miller	AES_DROUND23(24, 0, 2, 6)
6960bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f34
6970bdcaf74SDavid S. Miller	AES_DROUND01(26, 0, 2, 4)
6980bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f36
6990bdcaf74SDavid S. Miller	AES_DROUND23(28, 4, 6, 2)
7000bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f38
7010bdcaf74SDavid S. Miller	AES_DROUND01(30, 4, 6, 0)
7020bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f40
7030bdcaf74SDavid S. Miller	AES_DROUND23(32, 0, 2, 6)
7040bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f42
7050bdcaf74SDavid S. Miller	AES_DROUND01(34, 0, 2, 4)
7060bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f44
7070bdcaf74SDavid S. Miller	AES_DROUND23(36, 4, 6, 2)
7080bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f46
7090bdcaf74SDavid S. Miller	AES_DROUND01(38, 4, 6, 0)
7100bdcaf74SDavid S. Miller	ldd		[%o0 + 0x08], %f48
7110bdcaf74SDavid S. Miller	AES_DROUND23(40, 0, 2, 6)
7120bdcaf74SDavid S. Miller	ldd		[%o0 + 0x00], %f50
7130bdcaf74SDavid S. Miller	AES_DROUND01(42, 0, 2, 4)
7140bdcaf74SDavid S. Miller	AES_DROUND23(44, 4, 6, 2)
7150bdcaf74SDavid S. Miller	AES_DROUND01(46, 4, 6, 0)
7160bdcaf74SDavid S. Miller	AES_DROUND23_L(48, 0, 2, 6)
7170bdcaf74SDavid S. Miller	AES_DROUND01_L(50, 0, 2, 4)
7180bdcaf74SDavid S. Miller	st		%f4, [%o2 + 0x00]
7190bdcaf74SDavid S. Miller	st		%f5, [%o2 + 0x04]
7200bdcaf74SDavid S. Miller	st		%f6, [%o2 + 0x08]
7210bdcaf74SDavid S. Miller	st		%f7, [%o2 + 0x0c]
7220bdcaf74SDavid S. Miller	retl
7230bdcaf74SDavid S. Miller	 VISExit
7240bdcaf74SDavid S. MillerENDPROC(aes_sparc64_decrypt_256)
7250bdcaf74SDavid S. Miller
7260bdcaf74SDavid S. Miller	.align		32
7270bdcaf74SDavid S. MillerENTRY(aes_sparc64_load_encrypt_keys_128)
7289bf4852dSDavid S. Miller	/* %o0=key */
7290bdcaf74SDavid S. Miller	VISEntry
7300bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f8
7310bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f10
7320bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f12
7330bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f14
7340bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f16
7350bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f18
7360bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f20
7370bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f22
7380bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f24
7390bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f26
7400bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f28
7410bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f30
7420bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f32
7430bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f34
7440bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f36
7450bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f38
7460bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f40
7470bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f42
7480bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f44
7490bdcaf74SDavid S. Miller	retl
7500bdcaf74SDavid S. Miller	 ldd		[%o0 + 0xa8], %f46
7510bdcaf74SDavid S. MillerENDPROC(aes_sparc64_load_encrypt_keys_128)
7529bf4852dSDavid S. Miller
7530bdcaf74SDavid S. Miller	.align		32
7540bdcaf74SDavid S. MillerENTRY(aes_sparc64_load_encrypt_keys_192)
7550bdcaf74SDavid S. Miller	/* %o0=key */
7560bdcaf74SDavid S. Miller	VISEntry
7570bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f8
7580bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f10
7590bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f12
7600bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f14
7610bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f16
7620bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f18
7630bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f20
7640bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f22
7650bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f24
7660bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f26
7670bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f28
7680bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f30
7690bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f32
7700bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f34
7710bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f36
7720bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f38
7730bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f40
7740bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f42
7750bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f44
7760bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa8], %f46
7770bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb0], %f48
7780bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb8], %f50
7790bdcaf74SDavid S. Miller	ldd		[%o0 + 0xc0], %f52
7800bdcaf74SDavid S. Miller	retl
7810bdcaf74SDavid S. Miller	 ldd		[%o0 + 0xc8], %f54
7820bdcaf74SDavid S. MillerENDPROC(aes_sparc64_load_encrypt_keys_192)
7830bdcaf74SDavid S. Miller
7840bdcaf74SDavid S. Miller	.align		32
7850bdcaf74SDavid S. MillerENTRY(aes_sparc64_load_encrypt_keys_256)
7869bf4852dSDavid S. Miller	/* %o0=key */
7879bf4852dSDavid S. Miller	VISEntry
7889bf4852dSDavid S. Miller	ldd		[%o0 + 0x10], %f8
7899bf4852dSDavid S. Miller	ldd		[%o0 + 0x18], %f10
7909bf4852dSDavid S. Miller	ldd		[%o0 + 0x20], %f12
7919bf4852dSDavid S. Miller	ldd		[%o0 + 0x28], %f14
7929bf4852dSDavid S. Miller	ldd		[%o0 + 0x30], %f16
7939bf4852dSDavid S. Miller	ldd		[%o0 + 0x38], %f18
7949bf4852dSDavid S. Miller	ldd		[%o0 + 0x40], %f20
7959bf4852dSDavid S. Miller	ldd		[%o0 + 0x48], %f22
7969bf4852dSDavid S. Miller	ldd		[%o0 + 0x50], %f24
7979bf4852dSDavid S. Miller	ldd		[%o0 + 0x58], %f26
7989bf4852dSDavid S. Miller	ldd		[%o0 + 0x60], %f28
7999bf4852dSDavid S. Miller	ldd		[%o0 + 0x68], %f30
8009bf4852dSDavid S. Miller	ldd		[%o0 + 0x70], %f32
8019bf4852dSDavid S. Miller	ldd		[%o0 + 0x78], %f34
8029bf4852dSDavid S. Miller	ldd		[%o0 + 0x80], %f36
8039bf4852dSDavid S. Miller	ldd		[%o0 + 0x88], %f38
8049bf4852dSDavid S. Miller	ldd		[%o0 + 0x90], %f40
8059bf4852dSDavid S. Miller	ldd		[%o0 + 0x98], %f42
8069bf4852dSDavid S. Miller	ldd		[%o0 + 0xa0], %f44
8079bf4852dSDavid S. Miller	ldd		[%o0 + 0xa8], %f46
8089bf4852dSDavid S. Miller	ldd		[%o0 + 0xb0], %f48
8099bf4852dSDavid S. Miller	ldd		[%o0 + 0xb8], %f50
8109bf4852dSDavid S. Miller	ldd		[%o0 + 0xc0], %f52
8119bf4852dSDavid S. Miller	ldd		[%o0 + 0xc8], %f54
8129bf4852dSDavid S. Miller	ldd		[%o0 + 0xd0], %f56
8139bf4852dSDavid S. Miller	ldd		[%o0 + 0xd8], %f58
8149bf4852dSDavid S. Miller	ldd		[%o0 + 0xe0], %f60
8159bf4852dSDavid S. Miller	retl
8169bf4852dSDavid S. Miller	 ldd		[%o0 + 0xe8], %f62
8170bdcaf74SDavid S. MillerENDPROC(aes_sparc64_load_encrypt_keys_256)
8189bf4852dSDavid S. Miller
8190bdcaf74SDavid S. Miller	.align		32
8200bdcaf74SDavid S. MillerENTRY(aes_sparc64_load_decrypt_keys_128)
8210bdcaf74SDavid S. Miller	/* %o0=key */
8220bdcaf74SDavid S. Miller	VISEntry
8230bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f8
8240bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f10
8250bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f12
8260bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f14
8270bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f16
8280bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f18
8290bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f20
8300bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f22
8310bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f24
8320bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f26
8330bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f28
8340bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f30
8350bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f32
8360bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f34
8370bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f36
8380bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f38
8390bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f40
8400bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f42
8410bdcaf74SDavid S. Miller	ldd		[%o0 + 0x08], %f44
8420bdcaf74SDavid S. Miller	retl
8430bdcaf74SDavid S. Miller	 ldd		[%o0 + 0x00], %f46
8440bdcaf74SDavid S. MillerENDPROC(aes_sparc64_load_decrypt_keys_128)
8450bdcaf74SDavid S. Miller
8460bdcaf74SDavid S. Miller	.align		32
8470bdcaf74SDavid S. MillerENTRY(aes_sparc64_load_decrypt_keys_192)
8480bdcaf74SDavid S. Miller	/* %o0=key */
8490bdcaf74SDavid S. Miller	VISEntry
8500bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb8], %f8
8510bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb0], %f10
8520bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa8], %f12
8530bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f14
8540bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f16
8550bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f18
8560bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f20
8570bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f22
8580bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f24
8590bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f26
8600bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f28
8610bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f30
8620bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f32
8630bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f34
8640bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f36
8650bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f38
8660bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f40
8670bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f42
8680bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f44
8690bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f46
8700bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f48
8710bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f50
8720bdcaf74SDavid S. Miller	ldd		[%o0 + 0x08], %f52
8730bdcaf74SDavid S. Miller	retl
8740bdcaf74SDavid S. Miller	 ldd		[%o0 + 0x00], %f54
8750bdcaf74SDavid S. MillerENDPROC(aes_sparc64_load_decrypt_keys_192)
8760bdcaf74SDavid S. Miller
8770bdcaf74SDavid S. Miller	.align		32
8780bdcaf74SDavid S. MillerENTRY(aes_sparc64_load_decrypt_keys_256)
8790bdcaf74SDavid S. Miller	/* %o0=key */
8800bdcaf74SDavid S. Miller	VISEntry
8810bdcaf74SDavid S. Miller	ldd		[%o0 + 0xd8], %f8
8820bdcaf74SDavid S. Miller	ldd		[%o0 + 0xd0], %f10
8830bdcaf74SDavid S. Miller	ldd		[%o0 + 0xc8], %f12
8840bdcaf74SDavid S. Miller	ldd		[%o0 + 0xc0], %f14
8850bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb8], %f16
8860bdcaf74SDavid S. Miller	ldd		[%o0 + 0xb0], %f18
8870bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa8], %f20
8880bdcaf74SDavid S. Miller	ldd		[%o0 + 0xa0], %f22
8890bdcaf74SDavid S. Miller	ldd		[%o0 + 0x98], %f24
8900bdcaf74SDavid S. Miller	ldd		[%o0 + 0x90], %f26
8910bdcaf74SDavid S. Miller	ldd		[%o0 + 0x88], %f28
8920bdcaf74SDavid S. Miller	ldd		[%o0 + 0x80], %f30
8930bdcaf74SDavid S. Miller	ldd		[%o0 + 0x78], %f32
8940bdcaf74SDavid S. Miller	ldd		[%o0 + 0x70], %f34
8950bdcaf74SDavid S. Miller	ldd		[%o0 + 0x68], %f36
8960bdcaf74SDavid S. Miller	ldd		[%o0 + 0x60], %f38
8970bdcaf74SDavid S. Miller	ldd		[%o0 + 0x58], %f40
8980bdcaf74SDavid S. Miller	ldd		[%o0 + 0x50], %f42
8990bdcaf74SDavid S. Miller	ldd		[%o0 + 0x48], %f44
9000bdcaf74SDavid S. Miller	ldd		[%o0 + 0x40], %f46
9010bdcaf74SDavid S. Miller	ldd		[%o0 + 0x38], %f48
9020bdcaf74SDavid S. Miller	ldd		[%o0 + 0x30], %f50
9030bdcaf74SDavid S. Miller	ldd		[%o0 + 0x28], %f52
9040bdcaf74SDavid S. Miller	ldd		[%o0 + 0x20], %f54
9050bdcaf74SDavid S. Miller	ldd		[%o0 + 0x18], %f56
9060bdcaf74SDavid S. Miller	ldd		[%o0 + 0x10], %f58
9070bdcaf74SDavid S. Miller	ldd		[%o0 + 0x08], %f60
9080bdcaf74SDavid S. Miller	retl
9090bdcaf74SDavid S. Miller	 ldd		[%o0 + 0x00], %f62
9100bdcaf74SDavid S. MillerENDPROC(aes_sparc64_load_decrypt_keys_256)
9110bdcaf74SDavid S. Miller
9120bdcaf74SDavid S. Miller	.align		32
9130bdcaf74SDavid S. MillerENTRY(aes_sparc64_ecb_encrypt_128)
9140bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len */
9159bf4852dSDavid S. Miller	ldx		[%o0 + 0x00], %g1
91603d168adSDavid S. Miller	subcc		%o3, 0x10, %o3
91703d168adSDavid S. Miller	be		10f
9189bf4852dSDavid S. Miller	 ldx		[%o0 + 0x08], %g2
9190bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
9209bf4852dSDavid S. Miller	ldx		[%o1 + 0x08], %g7
92103d168adSDavid S. Miller	ldx		[%o1 + 0x10], %o4
92203d168adSDavid S. Miller	ldx		[%o1 + 0x18], %o5
92303d168adSDavid S. Miller	xor		%g1, %g3, %g3
92403d168adSDavid S. Miller	xor		%g2, %g7, %g7
92503d168adSDavid S. Miller	MOVXTOD_G3_F4
92603d168adSDavid S. Miller	MOVXTOD_G7_F6
92703d168adSDavid S. Miller	xor		%g1, %o4, %g3
92803d168adSDavid S. Miller	xor		%g2, %o5, %g7
92903d168adSDavid S. Miller	MOVXTOD_G3_F60
93003d168adSDavid S. Miller	MOVXTOD_G7_F62
93103d168adSDavid S. Miller	ENCRYPT_128_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
93203d168adSDavid S. Miller	std		%f4, [%o2 + 0x00]
93303d168adSDavid S. Miller	std		%f6, [%o2 + 0x08]
93403d168adSDavid S. Miller	std		%f60, [%o2 + 0x10]
93503d168adSDavid S. Miller	std		%f62, [%o2 + 0x18]
93603d168adSDavid S. Miller	sub		%o3, 0x20, %o3
93703d168adSDavid S. Miller	add		%o1, 0x20, %o1
93803d168adSDavid S. Miller	brgz		%o3, 1b
93903d168adSDavid S. Miller	 add		%o2, 0x20, %o2
94003d168adSDavid S. Miller	brlz,pt		%o3, 11f
94103d168adSDavid S. Miller	 nop
94203d168adSDavid S. Miller10:	ldx		[%o1 + 0x00], %g3
94303d168adSDavid S. Miller	ldx		[%o1 + 0x08], %g7
9449bf4852dSDavid S. Miller	xor		%g1, %g3, %g3
9459bf4852dSDavid S. Miller	xor		%g2, %g7, %g7
9469bf4852dSDavid S. Miller	MOVXTOD_G3_F4
9479bf4852dSDavid S. Miller	MOVXTOD_G7_F6
9489bf4852dSDavid S. Miller	ENCRYPT_128(8, 4, 6, 0, 2)
9499bf4852dSDavid S. Miller	std		%f4, [%o2 + 0x00]
9509bf4852dSDavid S. Miller	std		%f6, [%o2 + 0x08]
95103d168adSDavid S. Miller11:	retl
9529bf4852dSDavid S. Miller	 nop
9530bdcaf74SDavid S. MillerENDPROC(aes_sparc64_ecb_encrypt_128)
9549bf4852dSDavid S. Miller
9550bdcaf74SDavid S. Miller	.align		32
9560bdcaf74SDavid S. MillerENTRY(aes_sparc64_ecb_encrypt_192)
9570bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len */
9589bf4852dSDavid S. Miller	ldx		[%o0 + 0x00], %g1
95903d168adSDavid S. Miller	subcc		%o3, 0x10, %o3
96003d168adSDavid S. Miller	be		10f
9619bf4852dSDavid S. Miller	 ldx		[%o0 + 0x08], %g2
9620bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
9639bf4852dSDavid S. Miller	ldx		[%o1 + 0x08], %g7
96403d168adSDavid S. Miller	ldx		[%o1 + 0x10], %o4
96503d168adSDavid S. Miller	ldx		[%o1 + 0x18], %o5
96603d168adSDavid S. Miller	xor		%g1, %g3, %g3
96703d168adSDavid S. Miller	xor		%g2, %g7, %g7
96803d168adSDavid S. Miller	MOVXTOD_G3_F4
96903d168adSDavid S. Miller	MOVXTOD_G7_F6
97003d168adSDavid S. Miller	xor		%g1, %o4, %g3
97103d168adSDavid S. Miller	xor		%g2, %o5, %g7
97203d168adSDavid S. Miller	MOVXTOD_G3_F60
97303d168adSDavid S. Miller	MOVXTOD_G7_F62
97403d168adSDavid S. Miller	ENCRYPT_192_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
97503d168adSDavid S. Miller	std		%f4, [%o2 + 0x00]
97603d168adSDavid S. Miller	std		%f6, [%o2 + 0x08]
97703d168adSDavid S. Miller	std		%f60, [%o2 + 0x10]
97803d168adSDavid S. Miller	std		%f62, [%o2 + 0x18]
97903d168adSDavid S. Miller	sub		%o3, 0x20, %o3
98003d168adSDavid S. Miller	add		%o1, 0x20, %o1
98103d168adSDavid S. Miller	brgz		%o3, 1b
98203d168adSDavid S. Miller	 add		%o2, 0x20, %o2
98303d168adSDavid S. Miller	brlz,pt		%o3, 11f
98403d168adSDavid S. Miller	 nop
98503d168adSDavid S. Miller10:	ldx		[%o1 + 0x00], %g3
98603d168adSDavid S. Miller	ldx		[%o1 + 0x08], %g7
9879bf4852dSDavid S. Miller	xor		%g1, %g3, %g3
9889bf4852dSDavid S. Miller	xor		%g2, %g7, %g7
9890bdcaf74SDavid S. Miller	MOVXTOD_G3_F4
9900bdcaf74SDavid S. Miller	MOVXTOD_G7_F6
9919bf4852dSDavid S. Miller	ENCRYPT_192(8, 4, 6, 0, 2)
9929bf4852dSDavid S. Miller	std		%f4, [%o2 + 0x00]
9939bf4852dSDavid S. Miller	std		%f6, [%o2 + 0x08]
99403d168adSDavid S. Miller11:	retl
9959bf4852dSDavid S. Miller	 nop
9960bdcaf74SDavid S. MillerENDPROC(aes_sparc64_ecb_encrypt_192)
9979bf4852dSDavid S. Miller
9980bdcaf74SDavid S. Miller	.align		32
9990bdcaf74SDavid S. MillerENTRY(aes_sparc64_ecb_encrypt_256)
10000bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len */
10010bdcaf74SDavid S. Miller	ldx		[%o0 + 0x00], %g1
100203d168adSDavid S. Miller	subcc		%o3, 0x10, %o3
100303d168adSDavid S. Miller	be		10f
10040bdcaf74SDavid S. Miller	 ldx		[%o0 + 0x08], %g2
10050bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
10069bf4852dSDavid S. Miller	ldx		[%o1 + 0x08], %g7
100703d168adSDavid S. Miller	ldx		[%o1 + 0x10], %o4
100803d168adSDavid S. Miller	ldx		[%o1 + 0x18], %o5
100903d168adSDavid S. Miller	xor		%g1, %g3, %g3
101003d168adSDavid S. Miller	xor		%g2, %g7, %g7
101103d168adSDavid S. Miller	MOVXTOD_G3_F4
101203d168adSDavid S. Miller	MOVXTOD_G7_F6
101303d168adSDavid S. Miller	xor		%g1, %o4, %g3
101403d168adSDavid S. Miller	xor		%g2, %o5, %g7
101503d168adSDavid S. Miller	MOVXTOD_G3_F0
101603d168adSDavid S. Miller	MOVXTOD_G7_F2
101703d168adSDavid S. Miller	ENCRYPT_256_2(8, 4, 6, 0, 2)
101803d168adSDavid S. Miller	std		%f4, [%o2 + 0x00]
101903d168adSDavid S. Miller	std		%f6, [%o2 + 0x08]
102003d168adSDavid S. Miller	std		%f0, [%o2 + 0x10]
102103d168adSDavid S. Miller	std		%f2, [%o2 + 0x18]
102203d168adSDavid S. Miller	sub		%o3, 0x20, %o3
102303d168adSDavid S. Miller	add		%o1, 0x20, %o1
102403d168adSDavid S. Miller	brgz		%o3, 1b
102503d168adSDavid S. Miller	 add		%o2, 0x20, %o2
102603d168adSDavid S. Miller	brlz,pt		%o3, 11f
102703d168adSDavid S. Miller	 nop
10289f28ffc0SDavid S. Miller10:	ldd		[%o0 + 0xd0], %f56
10299f28ffc0SDavid S. Miller	ldd		[%o0 + 0xd8], %f58
10309f28ffc0SDavid S. Miller	ldd		[%o0 + 0xe0], %f60
10319f28ffc0SDavid S. Miller	ldd		[%o0 + 0xe8], %f62
10329f28ffc0SDavid S. Miller	ldx		[%o1 + 0x00], %g3
103303d168adSDavid S. Miller	ldx		[%o1 + 0x08], %g7
10349bf4852dSDavid S. Miller	xor		%g1, %g3, %g3
10359bf4852dSDavid S. Miller	xor		%g2, %g7, %g7
10360bdcaf74SDavid S. Miller	MOVXTOD_G3_F4
10370bdcaf74SDavid S. Miller	MOVXTOD_G7_F6
10380bdcaf74SDavid S. Miller	ENCRYPT_256(8, 4, 6, 0, 2)
10399bf4852dSDavid S. Miller	std		%f4, [%o2 + 0x00]
10409bf4852dSDavid S. Miller	std		%f6, [%o2 + 0x08]
104103d168adSDavid S. Miller11:	retl
10429bf4852dSDavid S. Miller	 nop
10430bdcaf74SDavid S. MillerENDPROC(aes_sparc64_ecb_encrypt_256)
10449bf4852dSDavid S. Miller
10450bdcaf74SDavid S. Miller	.align		32
10460bdcaf74SDavid S. MillerENTRY(aes_sparc64_ecb_decrypt_128)
10470bdcaf74SDavid S. Miller	/* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
10489bf4852dSDavid S. Miller	ldx		[%o0 - 0x10], %g1
104930101315SDavid S. Miller	subcc		%o3, 0x10, %o3
105030101315SDavid S. Miller	be		10f
10519bf4852dSDavid S. Miller	 ldx		[%o0 - 0x08], %g2
10520bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
10530bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
105430101315SDavid S. Miller	ldx		[%o1 + 0x10], %o4
105530101315SDavid S. Miller	ldx		[%o1 + 0x18], %o5
105630101315SDavid S. Miller	xor		%g1, %g3, %g3
105730101315SDavid S. Miller	xor		%g2, %g7, %g7
105830101315SDavid S. Miller	MOVXTOD_G3_F4
105930101315SDavid S. Miller	MOVXTOD_G7_F6
106030101315SDavid S. Miller	xor		%g1, %o4, %g3
106130101315SDavid S. Miller	xor		%g2, %o5, %g7
106230101315SDavid S. Miller	MOVXTOD_G3_F60
106330101315SDavid S. Miller	MOVXTOD_G7_F62
106430101315SDavid S. Miller	DECRYPT_128_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
106530101315SDavid S. Miller	std		%f4, [%o2 + 0x00]
106630101315SDavid S. Miller	std		%f6, [%o2 + 0x08]
106730101315SDavid S. Miller	std		%f60, [%o2 + 0x10]
106830101315SDavid S. Miller	std		%f62, [%o2 + 0x18]
106930101315SDavid S. Miller	sub		%o3, 0x20, %o3
107030101315SDavid S. Miller	add		%o1, 0x20, %o1
107130101315SDavid S. Miller	brgz,pt		%o3, 1b
107230101315SDavid S. Miller	 add		%o2, 0x20, %o2
107330101315SDavid S. Miller	brlz,pt		%o3, 11f
107430101315SDavid S. Miller	 nop
107530101315SDavid S. Miller10:	ldx		[%o1 + 0x00], %g3
107630101315SDavid S. Miller	ldx		[%o1 + 0x08], %g7
10779bf4852dSDavid S. Miller	xor		%g1, %g3, %g3
10789bf4852dSDavid S. Miller	xor		%g2, %g7, %g7
10799bf4852dSDavid S. Miller	MOVXTOD_G3_F4
10809bf4852dSDavid S. Miller	MOVXTOD_G7_F6
10810bdcaf74SDavid S. Miller	DECRYPT_128(8, 4, 6, 0, 2)
10820bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
10830bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
108430101315SDavid S. Miller11:	retl
10859bf4852dSDavid S. Miller	 nop
10860bdcaf74SDavid S. MillerENDPROC(aes_sparc64_ecb_decrypt_128)
10879bf4852dSDavid S. Miller
10880bdcaf74SDavid S. Miller	.align		32
10890bdcaf74SDavid S. MillerENTRY(aes_sparc64_ecb_decrypt_192)
10900bdcaf74SDavid S. Miller	/* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
10910bdcaf74SDavid S. Miller	ldx		[%o0 - 0x10], %g1
109230101315SDavid S. Miller	subcc		%o3, 0x10, %o3
109330101315SDavid S. Miller	be		10f
10940bdcaf74SDavid S. Miller	 ldx		[%o0 - 0x08], %g2
10950bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
10960bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
109730101315SDavid S. Miller	ldx		[%o1 + 0x10], %o4
109830101315SDavid S. Miller	ldx		[%o1 + 0x18], %o5
109930101315SDavid S. Miller	xor		%g1, %g3, %g3
110030101315SDavid S. Miller	xor		%g2, %g7, %g7
110130101315SDavid S. Miller	MOVXTOD_G3_F4
110230101315SDavid S. Miller	MOVXTOD_G7_F6
110330101315SDavid S. Miller	xor		%g1, %o4, %g3
110430101315SDavid S. Miller	xor		%g2, %o5, %g7
110530101315SDavid S. Miller	MOVXTOD_G3_F60
110630101315SDavid S. Miller	MOVXTOD_G7_F62
110730101315SDavid S. Miller	DECRYPT_192_2(8, 4, 6, 60, 62, 0, 2, 56, 58)
110830101315SDavid S. Miller	std		%f4, [%o2 + 0x00]
110930101315SDavid S. Miller	std		%f6, [%o2 + 0x08]
111030101315SDavid S. Miller	std		%f60, [%o2 + 0x10]
111130101315SDavid S. Miller	std		%f62, [%o2 + 0x18]
111230101315SDavid S. Miller	sub		%o3, 0x20, %o3
111330101315SDavid S. Miller	add		%o1, 0x20, %o1
111430101315SDavid S. Miller	brgz,pt		%o3, 1b
111530101315SDavid S. Miller	 add		%o2, 0x20, %o2
111630101315SDavid S. Miller	brlz,pt		%o3, 11f
111730101315SDavid S. Miller	 nop
111830101315SDavid S. Miller10:	ldx		[%o1 + 0x00], %g3
111930101315SDavid S. Miller	ldx		[%o1 + 0x08], %g7
11209bf4852dSDavid S. Miller	xor		%g1, %g3, %g3
11219bf4852dSDavid S. Miller	xor		%g2, %g7, %g7
11229bf4852dSDavid S. Miller	MOVXTOD_G3_F4
11239bf4852dSDavid S. Miller	MOVXTOD_G7_F6
11240bdcaf74SDavid S. Miller	DECRYPT_192(8, 4, 6, 0, 2)
11250bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
11260bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
112730101315SDavid S. Miller11:	retl
11289bf4852dSDavid S. Miller	 nop
11290bdcaf74SDavid S. MillerENDPROC(aes_sparc64_ecb_decrypt_192)
11300bdcaf74SDavid S. Miller
11310bdcaf74SDavid S. Miller	.align		32
11320bdcaf74SDavid S. MillerENTRY(aes_sparc64_ecb_decrypt_256)
11330bdcaf74SDavid S. Miller	/* %o0=&key[key_len], %o1=input, %o2=output, %o3=len */
11340bdcaf74SDavid S. Miller	ldx		[%o0 - 0x10], %g1
113530101315SDavid S. Miller	subcc		%o3, 0x10, %o3
11360bdcaf74SDavid S. Miller	ldx		[%o0 - 0x08], %g2
11379f28ffc0SDavid S. Miller	be		10f
113830101315SDavid S. Miller	 sub		%o0, 0xf0, %o0
11390bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
11400bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
114130101315SDavid S. Miller	ldx		[%o1 + 0x10], %o4
114230101315SDavid S. Miller	ldx		[%o1 + 0x18], %o5
114330101315SDavid S. Miller	xor		%g1, %g3, %g3
114430101315SDavid S. Miller	xor		%g2, %g7, %g7
114530101315SDavid S. Miller	MOVXTOD_G3_F4
114630101315SDavid S. Miller	MOVXTOD_G7_F6
114730101315SDavid S. Miller	xor		%g1, %o4, %g3
114830101315SDavid S. Miller	xor		%g2, %o5, %g7
114930101315SDavid S. Miller	MOVXTOD_G3_F0
115030101315SDavid S. Miller	MOVXTOD_G7_F2
115130101315SDavid S. Miller	DECRYPT_256_2(8, 4, 6, 0, 2)
115230101315SDavid S. Miller	std		%f4, [%o2 + 0x00]
115330101315SDavid S. Miller	std		%f6, [%o2 + 0x08]
1154699871bcSDavid S. Miller	std		%f0, [%o2 + 0x10]
1155699871bcSDavid S. Miller	std		%f2, [%o2 + 0x18]
115630101315SDavid S. Miller	sub		%o3, 0x20, %o3
115730101315SDavid S. Miller	add		%o1, 0x20, %o1
115830101315SDavid S. Miller	brgz,pt		%o3, 1b
115930101315SDavid S. Miller	 add		%o2, 0x20, %o2
116030101315SDavid S. Miller	brlz,pt		%o3, 11f
116130101315SDavid S. Miller	 nop
11629f28ffc0SDavid S. Miller10:	ldd		[%o0 + 0x18], %f56
11639f28ffc0SDavid S. Miller	ldd		[%o0 + 0x10], %f58
11649f28ffc0SDavid S. Miller	ldd		[%o0 + 0x08], %f60
11659f28ffc0SDavid S. Miller	ldd		[%o0 + 0x00], %f62
11669f28ffc0SDavid S. Miller	ldx		[%o1 + 0x00], %g3
116730101315SDavid S. Miller	ldx		[%o1 + 0x08], %g7
11680bdcaf74SDavid S. Miller	xor		%g1, %g3, %g3
11690bdcaf74SDavid S. Miller	xor		%g2, %g7, %g7
11700bdcaf74SDavid S. Miller	MOVXTOD_G3_F4
11710bdcaf74SDavid S. Miller	MOVXTOD_G7_F6
11720bdcaf74SDavid S. Miller	DECRYPT_256(8, 4, 6, 0, 2)
11730bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
11740bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
117530101315SDavid S. Miller11:	retl
11760bdcaf74SDavid S. Miller	 nop
11770bdcaf74SDavid S. MillerENDPROC(aes_sparc64_ecb_decrypt_256)
11780bdcaf74SDavid S. Miller
11790bdcaf74SDavid S. Miller	.align		32
11800bdcaf74SDavid S. MillerENTRY(aes_sparc64_cbc_encrypt_128)
11810bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
11820bdcaf74SDavid S. Miller	ldd		[%o4 + 0x00], %f4
11830bdcaf74SDavid S. Miller	ldd		[%o4 + 0x08], %f6
11840bdcaf74SDavid S. Miller	ldx		[%o0 + 0x00], %g1
11850bdcaf74SDavid S. Miller	ldx		[%o0 + 0x08], %g2
11860bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
11870bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
11880bdcaf74SDavid S. Miller	add		%o1, 0x10, %o1
11890bdcaf74SDavid S. Miller	xor		%g1, %g3, %g3
11900bdcaf74SDavid S. Miller	xor		%g2, %g7, %g7
11910bdcaf74SDavid S. Miller	MOVXTOD_G3_F0
11920bdcaf74SDavid S. Miller	MOVXTOD_G7_F2
11930bdcaf74SDavid S. Miller	fxor		%f4, %f0, %f4
11940bdcaf74SDavid S. Miller	fxor		%f6, %f2, %f6
11950bdcaf74SDavid S. Miller	ENCRYPT_128(8, 4, 6, 0, 2)
11960bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
11970bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
11980bdcaf74SDavid S. Miller	subcc		%o3, 0x10, %o3
11990bdcaf74SDavid S. Miller	bne,pt		%xcc, 1b
12000bdcaf74SDavid S. Miller	 add		%o2, 0x10, %o2
12010bdcaf74SDavid S. Miller	std		%f4, [%o4 + 0x00]
12020bdcaf74SDavid S. Miller	std		%f6, [%o4 + 0x08]
12030bdcaf74SDavid S. Miller	retl
12040bdcaf74SDavid S. Miller	 nop
12050bdcaf74SDavid S. MillerENDPROC(aes_sparc64_cbc_encrypt_128)
12060bdcaf74SDavid S. Miller
12070bdcaf74SDavid S. Miller	.align		32
12080bdcaf74SDavid S. MillerENTRY(aes_sparc64_cbc_encrypt_192)
12090bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
12100bdcaf74SDavid S. Miller	ldd		[%o4 + 0x00], %f4
12110bdcaf74SDavid S. Miller	ldd		[%o4 + 0x08], %f6
12120bdcaf74SDavid S. Miller	ldx		[%o0 + 0x00], %g1
12130bdcaf74SDavid S. Miller	ldx		[%o0 + 0x08], %g2
12140bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
12150bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
12160bdcaf74SDavid S. Miller	add		%o1, 0x10, %o1
12170bdcaf74SDavid S. Miller	xor		%g1, %g3, %g3
12180bdcaf74SDavid S. Miller	xor		%g2, %g7, %g7
12190bdcaf74SDavid S. Miller	MOVXTOD_G3_F0
12200bdcaf74SDavid S. Miller	MOVXTOD_G7_F2
12210bdcaf74SDavid S. Miller	fxor		%f4, %f0, %f4
12220bdcaf74SDavid S. Miller	fxor		%f6, %f2, %f6
12230bdcaf74SDavid S. Miller	ENCRYPT_192(8, 4, 6, 0, 2)
12240bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
12250bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
12260bdcaf74SDavid S. Miller	subcc		%o3, 0x10, %o3
12270bdcaf74SDavid S. Miller	bne,pt		%xcc, 1b
12280bdcaf74SDavid S. Miller	 add		%o2, 0x10, %o2
12290bdcaf74SDavid S. Miller	std		%f4, [%o4 + 0x00]
12300bdcaf74SDavid S. Miller	std		%f6, [%o4 + 0x08]
12310bdcaf74SDavid S. Miller	retl
12320bdcaf74SDavid S. Miller	 nop
12330bdcaf74SDavid S. MillerENDPROC(aes_sparc64_cbc_encrypt_192)
12340bdcaf74SDavid S. Miller
12350bdcaf74SDavid S. Miller	.align		32
12360bdcaf74SDavid S. MillerENTRY(aes_sparc64_cbc_encrypt_256)
12370bdcaf74SDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
12380bdcaf74SDavid S. Miller	ldd		[%o4 + 0x00], %f4
12390bdcaf74SDavid S. Miller	ldd		[%o4 + 0x08], %f6
12400bdcaf74SDavid S. Miller	ldx		[%o0 + 0x00], %g1
12410bdcaf74SDavid S. Miller	ldx		[%o0 + 0x08], %g2
12420bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
12430bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
12440bdcaf74SDavid S. Miller	add		%o1, 0x10, %o1
12450bdcaf74SDavid S. Miller	xor		%g1, %g3, %g3
12460bdcaf74SDavid S. Miller	xor		%g2, %g7, %g7
12470bdcaf74SDavid S. Miller	MOVXTOD_G3_F0
12480bdcaf74SDavid S. Miller	MOVXTOD_G7_F2
12490bdcaf74SDavid S. Miller	fxor		%f4, %f0, %f4
12500bdcaf74SDavid S. Miller	fxor		%f6, %f2, %f6
12510bdcaf74SDavid S. Miller	ENCRYPT_256(8, 4, 6, 0, 2)
12520bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
12530bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
12540bdcaf74SDavid S. Miller	subcc		%o3, 0x10, %o3
12550bdcaf74SDavid S. Miller	bne,pt		%xcc, 1b
12560bdcaf74SDavid S. Miller	 add		%o2, 0x10, %o2
12570bdcaf74SDavid S. Miller	std		%f4, [%o4 + 0x00]
12580bdcaf74SDavid S. Miller	std		%f6, [%o4 + 0x08]
12590bdcaf74SDavid S. Miller	retl
12600bdcaf74SDavid S. Miller	 nop
12610bdcaf74SDavid S. MillerENDPROC(aes_sparc64_cbc_encrypt_256)
12620bdcaf74SDavid S. Miller
12630bdcaf74SDavid S. Miller	.align		32
12640bdcaf74SDavid S. MillerENTRY(aes_sparc64_cbc_decrypt_128)
12650bdcaf74SDavid S. Miller	/* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
12660bdcaf74SDavid S. Miller	ldx		[%o0 - 0x10], %g1
12670bdcaf74SDavid S. Miller	ldx		[%o0 - 0x08], %g2
12680bdcaf74SDavid S. Miller	ldx		[%o4 + 0x00], %o0
12690bdcaf74SDavid S. Miller	ldx		[%o4 + 0x08], %o5
12700bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
12710bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
12720bdcaf74SDavid S. Miller	add		%o1, 0x10, %o1
12730bdcaf74SDavid S. Miller	xor		%g1, %g3, %g3
12740bdcaf74SDavid S. Miller	xor		%g2, %g7, %g7
12750bdcaf74SDavid S. Miller	MOVXTOD_G3_F4
12760bdcaf74SDavid S. Miller	MOVXTOD_G7_F6
12770bdcaf74SDavid S. Miller	DECRYPT_128(8, 4, 6, 0, 2)
12780bdcaf74SDavid S. Miller	MOVXTOD_O0_F0
12790bdcaf74SDavid S. Miller	MOVXTOD_O5_F2
12800bdcaf74SDavid S. Miller	xor		%g1, %g3, %o0
12810bdcaf74SDavid S. Miller	xor		%g2, %g7, %o5
12820bdcaf74SDavid S. Miller	fxor		%f4, %f0, %f4
12830bdcaf74SDavid S. Miller	fxor		%f6, %f2, %f6
12840bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
12850bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
12860bdcaf74SDavid S. Miller	subcc		%o3, 0x10, %o3
12870bdcaf74SDavid S. Miller	bne,pt		%xcc, 1b
12880bdcaf74SDavid S. Miller	 add		%o2, 0x10, %o2
12890bdcaf74SDavid S. Miller	stx		%o0, [%o4 + 0x00]
12900bdcaf74SDavid S. Miller	stx		%o5, [%o4 + 0x08]
12910bdcaf74SDavid S. Miller	retl
12920bdcaf74SDavid S. Miller	 nop
12930bdcaf74SDavid S. MillerENDPROC(aes_sparc64_cbc_decrypt_128)
12940bdcaf74SDavid S. Miller
12950bdcaf74SDavid S. Miller	.align		32
12960bdcaf74SDavid S. MillerENTRY(aes_sparc64_cbc_decrypt_192)
12970bdcaf74SDavid S. Miller	/* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
12980bdcaf74SDavid S. Miller	ldx		[%o0 - 0x10], %g1
12990bdcaf74SDavid S. Miller	ldx		[%o0 - 0x08], %g2
13000bdcaf74SDavid S. Miller	ldx		[%o4 + 0x00], %o0
13010bdcaf74SDavid S. Miller	ldx		[%o4 + 0x08], %o5
13020bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
13030bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
13040bdcaf74SDavid S. Miller	add		%o1, 0x10, %o1
13050bdcaf74SDavid S. Miller	xor		%g1, %g3, %g3
13060bdcaf74SDavid S. Miller	xor		%g2, %g7, %g7
13070bdcaf74SDavid S. Miller	MOVXTOD_G3_F4
13080bdcaf74SDavid S. Miller	MOVXTOD_G7_F6
13090bdcaf74SDavid S. Miller	DECRYPT_192(8, 4, 6, 0, 2)
13100bdcaf74SDavid S. Miller	MOVXTOD_O0_F0
13110bdcaf74SDavid S. Miller	MOVXTOD_O5_F2
13120bdcaf74SDavid S. Miller	xor		%g1, %g3, %o0
13130bdcaf74SDavid S. Miller	xor		%g2, %g7, %o5
13140bdcaf74SDavid S. Miller	fxor		%f4, %f0, %f4
13150bdcaf74SDavid S. Miller	fxor		%f6, %f2, %f6
13160bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
13170bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
13180bdcaf74SDavid S. Miller	subcc		%o3, 0x10, %o3
13190bdcaf74SDavid S. Miller	bne,pt		%xcc, 1b
13200bdcaf74SDavid S. Miller	 add		%o2, 0x10, %o2
13210bdcaf74SDavid S. Miller	stx		%o0, [%o4 + 0x00]
13220bdcaf74SDavid S. Miller	stx		%o5, [%o4 + 0x08]
13230bdcaf74SDavid S. Miller	retl
13240bdcaf74SDavid S. Miller	 nop
13250bdcaf74SDavid S. MillerENDPROC(aes_sparc64_cbc_decrypt_192)
13260bdcaf74SDavid S. Miller
13270bdcaf74SDavid S. Miller	.align		32
13280bdcaf74SDavid S. MillerENTRY(aes_sparc64_cbc_decrypt_256)
13290bdcaf74SDavid S. Miller	/* %o0=&key[key_len], %o1=input, %o2=output, %o3=len, %o4=iv */
13300bdcaf74SDavid S. Miller	ldx		[%o0 - 0x10], %g1
13310bdcaf74SDavid S. Miller	ldx		[%o0 - 0x08], %g2
13320bdcaf74SDavid S. Miller	ldx		[%o4 + 0x00], %o0
13330bdcaf74SDavid S. Miller	ldx		[%o4 + 0x08], %o5
13340bdcaf74SDavid S. Miller1:	ldx		[%o1 + 0x00], %g3
13350bdcaf74SDavid S. Miller	ldx		[%o1 + 0x08], %g7
13360bdcaf74SDavid S. Miller	add		%o1, 0x10, %o1
13370bdcaf74SDavid S. Miller	xor		%g1, %g3, %g3
13380bdcaf74SDavid S. Miller	xor		%g2, %g7, %g7
13390bdcaf74SDavid S. Miller	MOVXTOD_G3_F4
13400bdcaf74SDavid S. Miller	MOVXTOD_G7_F6
13410bdcaf74SDavid S. Miller	DECRYPT_256(8, 4, 6, 0, 2)
13420bdcaf74SDavid S. Miller	MOVXTOD_O0_F0
13430bdcaf74SDavid S. Miller	MOVXTOD_O5_F2
13440bdcaf74SDavid S. Miller	xor		%g1, %g3, %o0
13450bdcaf74SDavid S. Miller	xor		%g2, %g7, %o5
13460bdcaf74SDavid S. Miller	fxor		%f4, %f0, %f4
13470bdcaf74SDavid S. Miller	fxor		%f6, %f2, %f6
13480bdcaf74SDavid S. Miller	std		%f4, [%o2 + 0x00]
13490bdcaf74SDavid S. Miller	std		%f6, [%o2 + 0x08]
13500bdcaf74SDavid S. Miller	subcc		%o3, 0x10, %o3
13510bdcaf74SDavid S. Miller	bne,pt		%xcc, 1b
13520bdcaf74SDavid S. Miller	 add		%o2, 0x10, %o2
13530bdcaf74SDavid S. Miller	stx		%o0, [%o4 + 0x00]
13540bdcaf74SDavid S. Miller	stx		%o5, [%o4 + 0x08]
13550bdcaf74SDavid S. Miller	retl
13560bdcaf74SDavid S. Miller	 nop
13570bdcaf74SDavid S. MillerENDPROC(aes_sparc64_cbc_decrypt_256)
13589fd130ecSDavid S. Miller
13599fd130ecSDavid S. Miller	.align		32
13609fd130ecSDavid S. MillerENTRY(aes_sparc64_ctr_crypt_128)
13619fd130ecSDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
13629fd130ecSDavid S. Miller	ldx		[%o4 + 0x00], %g3
13639fd130ecSDavid S. Miller	ldx		[%o4 + 0x08], %g7
13644e71bb49SDavid S. Miller	subcc		%o3, 0x10, %o3
13659fd130ecSDavid S. Miller	ldx		[%o0 + 0x00], %g1
13664e71bb49SDavid S. Miller	be		10f
13679fd130ecSDavid S. Miller	 ldx		[%o0 + 0x08], %g2
13689fd130ecSDavid S. Miller1:	xor		%g1, %g3, %o5
13699fd130ecSDavid S. Miller	MOVXTOD_O5_F0
13709fd130ecSDavid S. Miller	xor		%g2, %g7, %o5
13719fd130ecSDavid S. Miller	MOVXTOD_O5_F2
13729fd130ecSDavid S. Miller	add		%g7, 1, %g7
13739fd130ecSDavid S. Miller	add		%g3, 1, %o5
13749fd130ecSDavid S. Miller	movrz		%g7, %o5, %g3
13754e71bb49SDavid S. Miller	xor		%g1, %g3, %o5
13764e71bb49SDavid S. Miller	MOVXTOD_O5_F4
13774e71bb49SDavid S. Miller	xor		%g2, %g7, %o5
13784e71bb49SDavid S. Miller	MOVXTOD_O5_F6
13794e71bb49SDavid S. Miller	add		%g7, 1, %g7
13804e71bb49SDavid S. Miller	add		%g3, 1, %o5
13814e71bb49SDavid S. Miller	movrz		%g7, %o5, %g3
13824e71bb49SDavid S. Miller	ENCRYPT_128_2(8, 0, 2, 4, 6, 56, 58, 60, 62)
13834e71bb49SDavid S. Miller	ldd		[%o1 + 0x00], %f56
13844e71bb49SDavid S. Miller	ldd		[%o1 + 0x08], %f58
13854e71bb49SDavid S. Miller	ldd		[%o1 + 0x10], %f60
13864e71bb49SDavid S. Miller	ldd		[%o1 + 0x18], %f62
13874e71bb49SDavid S. Miller	fxor		%f56, %f0, %f56
13884e71bb49SDavid S. Miller	fxor		%f58, %f2, %f58
13894e71bb49SDavid S. Miller	fxor		%f60, %f4, %f60
13904e71bb49SDavid S. Miller	fxor		%f62, %f6, %f62
13914e71bb49SDavid S. Miller	std		%f56, [%o2 + 0x00]
13924e71bb49SDavid S. Miller	std		%f58, [%o2 + 0x08]
13934e71bb49SDavid S. Miller	std		%f60, [%o2 + 0x10]
13944e71bb49SDavid S. Miller	std		%f62, [%o2 + 0x18]
13954e71bb49SDavid S. Miller	subcc		%o3, 0x20, %o3
13964e71bb49SDavid S. Miller	add		%o1, 0x20, %o1
13974e71bb49SDavid S. Miller	brgz		%o3, 1b
13984e71bb49SDavid S. Miller	 add		%o2, 0x20, %o2
13994e71bb49SDavid S. Miller	brlz,pt		%o3, 11f
14004e71bb49SDavid S. Miller	 nop
14014e71bb49SDavid S. Miller10:	xor		%g1, %g3, %o5
14024e71bb49SDavid S. Miller	MOVXTOD_O5_F0
14034e71bb49SDavid S. Miller	xor		%g2, %g7, %o5
14044e71bb49SDavid S. Miller	MOVXTOD_O5_F2
14054e71bb49SDavid S. Miller	add		%g7, 1, %g7
14064e71bb49SDavid S. Miller	add		%g3, 1, %o5
14074e71bb49SDavid S. Miller	movrz		%g7, %o5, %g3
14089fd130ecSDavid S. Miller	ENCRYPT_128(8, 0, 2, 4, 6)
14099fd130ecSDavid S. Miller	ldd		[%o1 + 0x00], %f4
14109fd130ecSDavid S. Miller	ldd		[%o1 + 0x08], %f6
14119fd130ecSDavid S. Miller	fxor		%f4, %f0, %f4
14129fd130ecSDavid S. Miller	fxor		%f6, %f2, %f6
14139fd130ecSDavid S. Miller	std		%f4, [%o2 + 0x00]
14149fd130ecSDavid S. Miller	std		%f6, [%o2 + 0x08]
14154e71bb49SDavid S. Miller11:	stx		%g3, [%o4 + 0x00]
14169fd130ecSDavid S. Miller	retl
14174e71bb49SDavid S. Miller	 stx		%g7, [%o4 + 0x08]
14189fd130ecSDavid S. MillerENDPROC(aes_sparc64_ctr_crypt_128)
14199fd130ecSDavid S. Miller
14209fd130ecSDavid S. Miller	.align		32
14219fd130ecSDavid S. MillerENTRY(aes_sparc64_ctr_crypt_192)
14229fd130ecSDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
14239fd130ecSDavid S. Miller	ldx		[%o4 + 0x00], %g3
14249fd130ecSDavid S. Miller	ldx		[%o4 + 0x08], %g7
14254e71bb49SDavid S. Miller	subcc		%o3, 0x10, %o3
14269fd130ecSDavid S. Miller	ldx		[%o0 + 0x00], %g1
14274e71bb49SDavid S. Miller	be		10f
14289fd130ecSDavid S. Miller	 ldx		[%o0 + 0x08], %g2
14299fd130ecSDavid S. Miller1:	xor		%g1, %g3, %o5
14309fd130ecSDavid S. Miller	MOVXTOD_O5_F0
14319fd130ecSDavid S. Miller	xor		%g2, %g7, %o5
14329fd130ecSDavid S. Miller	MOVXTOD_O5_F2
14339fd130ecSDavid S. Miller	add		%g7, 1, %g7
14349fd130ecSDavid S. Miller	add		%g3, 1, %o5
14359fd130ecSDavid S. Miller	movrz		%g7, %o5, %g3
14364e71bb49SDavid S. Miller	xor		%g1, %g3, %o5
14374e71bb49SDavid S. Miller	MOVXTOD_O5_F4
14384e71bb49SDavid S. Miller	xor		%g2, %g7, %o5
14394e71bb49SDavid S. Miller	MOVXTOD_O5_F6
14404e71bb49SDavid S. Miller	add		%g7, 1, %g7
14414e71bb49SDavid S. Miller	add		%g3, 1, %o5
14424e71bb49SDavid S. Miller	movrz		%g7, %o5, %g3
14434e71bb49SDavid S. Miller	ENCRYPT_192_2(8, 0, 2, 4, 6, 56, 58, 60, 62)
14444e71bb49SDavid S. Miller	ldd		[%o1 + 0x00], %f56
14454e71bb49SDavid S. Miller	ldd		[%o1 + 0x08], %f58
14464e71bb49SDavid S. Miller	ldd		[%o1 + 0x10], %f60
14474e71bb49SDavid S. Miller	ldd		[%o1 + 0x18], %f62
14484e71bb49SDavid S. Miller	fxor		%f56, %f0, %f56
14494e71bb49SDavid S. Miller	fxor		%f58, %f2, %f58
14504e71bb49SDavid S. Miller	fxor		%f60, %f4, %f60
14514e71bb49SDavid S. Miller	fxor		%f62, %f6, %f62
14524e71bb49SDavid S. Miller	std		%f56, [%o2 + 0x00]
14534e71bb49SDavid S. Miller	std		%f58, [%o2 + 0x08]
14544e71bb49SDavid S. Miller	std		%f60, [%o2 + 0x10]
14554e71bb49SDavid S. Miller	std		%f62, [%o2 + 0x18]
14564e71bb49SDavid S. Miller	subcc		%o3, 0x20, %o3
14574e71bb49SDavid S. Miller	add		%o1, 0x20, %o1
14584e71bb49SDavid S. Miller	brgz		%o3, 1b
14594e71bb49SDavid S. Miller	 add		%o2, 0x20, %o2
14604e71bb49SDavid S. Miller	brlz,pt		%o3, 11f
14614e71bb49SDavid S. Miller	 nop
14624e71bb49SDavid S. Miller10:	xor		%g1, %g3, %o5
14634e71bb49SDavid S. Miller	MOVXTOD_O5_F0
14644e71bb49SDavid S. Miller	xor		%g2, %g7, %o5
14654e71bb49SDavid S. Miller	MOVXTOD_O5_F2
14664e71bb49SDavid S. Miller	add		%g7, 1, %g7
14674e71bb49SDavid S. Miller	add		%g3, 1, %o5
14684e71bb49SDavid S. Miller	movrz		%g7, %o5, %g3
14699fd130ecSDavid S. Miller	ENCRYPT_192(8, 0, 2, 4, 6)
14709fd130ecSDavid S. Miller	ldd		[%o1 + 0x00], %f4
14719fd130ecSDavid S. Miller	ldd		[%o1 + 0x08], %f6
14729fd130ecSDavid S. Miller	fxor		%f4, %f0, %f4
14739fd130ecSDavid S. Miller	fxor		%f6, %f2, %f6
14749fd130ecSDavid S. Miller	std		%f4, [%o2 + 0x00]
14759fd130ecSDavid S. Miller	std		%f6, [%o2 + 0x08]
14764e71bb49SDavid S. Miller11:	stx		%g3, [%o4 + 0x00]
14779fd130ecSDavid S. Miller	retl
14784e71bb49SDavid S. Miller	 stx		%g7, [%o4 + 0x08]
14799fd130ecSDavid S. MillerENDPROC(aes_sparc64_ctr_crypt_192)
14809fd130ecSDavid S. Miller
14819fd130ecSDavid S. Miller	.align		32
14829fd130ecSDavid S. MillerENTRY(aes_sparc64_ctr_crypt_256)
14839fd130ecSDavid S. Miller	/* %o0=key, %o1=input, %o2=output, %o3=len, %o4=IV */
14849fd130ecSDavid S. Miller	ldx		[%o4 + 0x00], %g3
14859fd130ecSDavid S. Miller	ldx		[%o4 + 0x08], %g7
14864e71bb49SDavid S. Miller	subcc		%o3, 0x10, %o3
14879fd130ecSDavid S. Miller	ldx		[%o0 + 0x00], %g1
14884e71bb49SDavid S. Miller	be		10f
14899fd130ecSDavid S. Miller	 ldx		[%o0 + 0x08], %g2
14909fd130ecSDavid S. Miller1:	xor		%g1, %g3, %o5
14919fd130ecSDavid S. Miller	MOVXTOD_O5_F0
14929fd130ecSDavid S. Miller	xor		%g2, %g7, %o5
14939fd130ecSDavid S. Miller	MOVXTOD_O5_F2
14949fd130ecSDavid S. Miller	add		%g7, 1, %g7
14959fd130ecSDavid S. Miller	add		%g3, 1, %o5
14969fd130ecSDavid S. Miller	movrz		%g7, %o5, %g3
14974e71bb49SDavid S. Miller	xor		%g1, %g3, %o5
14984e71bb49SDavid S. Miller	MOVXTOD_O5_F4
14994e71bb49SDavid S. Miller	xor		%g2, %g7, %o5
15004e71bb49SDavid S. Miller	MOVXTOD_O5_F6
15014e71bb49SDavid S. Miller	add		%g7, 1, %g7
15024e71bb49SDavid S. Miller	add		%g3, 1, %o5
15034e71bb49SDavid S. Miller	movrz		%g7, %o5, %g3
15044e71bb49SDavid S. Miller	ENCRYPT_256_2(8, 0, 2, 4, 6)
15054e71bb49SDavid S. Miller	ldd		[%o1 + 0x00], %f56
15064e71bb49SDavid S. Miller	ldd		[%o1 + 0x08], %f58
15074e71bb49SDavid S. Miller	ldd		[%o1 + 0x10], %f60
15084e71bb49SDavid S. Miller	ldd		[%o1 + 0x18], %f62
15094e71bb49SDavid S. Miller	fxor		%f56, %f0, %f56
15104e71bb49SDavid S. Miller	fxor		%f58, %f2, %f58
15114e71bb49SDavid S. Miller	fxor		%f60, %f4, %f60
15124e71bb49SDavid S. Miller	fxor		%f62, %f6, %f62
15134e71bb49SDavid S. Miller	std		%f56, [%o2 + 0x00]
15144e71bb49SDavid S. Miller	std		%f58, [%o2 + 0x08]
15154e71bb49SDavid S. Miller	std		%f60, [%o2 + 0x10]
15164e71bb49SDavid S. Miller	std		%f62, [%o2 + 0x18]
15174e71bb49SDavid S. Miller	subcc		%o3, 0x20, %o3
15184e71bb49SDavid S. Miller	add		%o1, 0x20, %o1
15194e71bb49SDavid S. Miller	brgz		%o3, 1b
15204e71bb49SDavid S. Miller	 add		%o2, 0x20, %o2
15214e71bb49SDavid S. Miller	brlz,pt		%o3, 11f
15224e71bb49SDavid S. Miller	 nop
15239f28ffc0SDavid S. Miller10:	ldd		[%o0 + 0xd0], %f56
1524699871bcSDavid S. Miller	ldd		[%o0 + 0xd8], %f58
1525699871bcSDavid S. Miller	ldd		[%o0 + 0xe0], %f60
1526699871bcSDavid S. Miller	ldd		[%o0 + 0xe8], %f62
15279f28ffc0SDavid S. Miller	xor		%g1, %g3, %o5
15284e71bb49SDavid S. Miller	MOVXTOD_O5_F0
15294e71bb49SDavid S. Miller	xor		%g2, %g7, %o5
15304e71bb49SDavid S. Miller	MOVXTOD_O5_F2
15314e71bb49SDavid S. Miller	add		%g7, 1, %g7
15324e71bb49SDavid S. Miller	add		%g3, 1, %o5
15334e71bb49SDavid S. Miller	movrz		%g7, %o5, %g3
15349fd130ecSDavid S. Miller	ENCRYPT_256(8, 0, 2, 4, 6)
15359fd130ecSDavid S. Miller	ldd		[%o1 + 0x00], %f4
15369fd130ecSDavid S. Miller	ldd		[%o1 + 0x08], %f6
15379fd130ecSDavid S. Miller	fxor		%f4, %f0, %f4
15389fd130ecSDavid S. Miller	fxor		%f6, %f2, %f6
15399fd130ecSDavid S. Miller	std		%f4, [%o2 + 0x00]
15409fd130ecSDavid S. Miller	std		%f6, [%o2 + 0x08]
15414e71bb49SDavid S. Miller11:	stx		%g3, [%o4 + 0x00]
15429fd130ecSDavid S. Miller	retl
15434e71bb49SDavid S. Miller	 stx		%g7, [%o4 + 0x08]
15449fd130ecSDavid S. MillerENDPROC(aes_sparc64_ctr_crypt_256)
1545