xref: /openbmc/qemu/crypto/clmul.c (revision 07f348d77c35b6ff1f99075e20bffbf67e772d8b)
1*07f348d7SRichard Henderson /*
2*07f348d7SRichard Henderson  * Carry-less multiply operations.
3*07f348d7SRichard Henderson  * SPDX-License-Identifier: GPL-2.0-or-later
4*07f348d7SRichard Henderson  *
5*07f348d7SRichard Henderson  * Copyright (C) 2023 Linaro, Ltd.
6*07f348d7SRichard Henderson  */
7*07f348d7SRichard Henderson 
8*07f348d7SRichard Henderson #include "qemu/osdep.h"
9*07f348d7SRichard Henderson #include "crypto/clmul.h"
10*07f348d7SRichard Henderson 
11*07f348d7SRichard Henderson uint64_t clmul_8x8_low(uint64_t n, uint64_t m)
12*07f348d7SRichard Henderson {
13*07f348d7SRichard Henderson     uint64_t r = 0;
14*07f348d7SRichard Henderson 
15*07f348d7SRichard Henderson     for (int i = 0; i < 8; ++i) {
16*07f348d7SRichard Henderson         uint64_t mask = (n & 0x0101010101010101ull) * 0xff;
17*07f348d7SRichard Henderson         r ^= m & mask;
18*07f348d7SRichard Henderson         m = (m << 1) & 0xfefefefefefefefeull;
19*07f348d7SRichard Henderson         n >>= 1;
20*07f348d7SRichard Henderson     }
21*07f348d7SRichard Henderson     return r;
22*07f348d7SRichard Henderson }
23*07f348d7SRichard Henderson 
24*07f348d7SRichard Henderson static uint64_t clmul_8x4_even_int(uint64_t n, uint64_t m)
25*07f348d7SRichard Henderson {
26*07f348d7SRichard Henderson     uint64_t r = 0;
27*07f348d7SRichard Henderson 
28*07f348d7SRichard Henderson     for (int i = 0; i < 8; ++i) {
29*07f348d7SRichard Henderson         uint64_t mask = (n & 0x0001000100010001ull) * 0xffff;
30*07f348d7SRichard Henderson         r ^= m & mask;
31*07f348d7SRichard Henderson         n >>= 1;
32*07f348d7SRichard Henderson         m <<= 1;
33*07f348d7SRichard Henderson     }
34*07f348d7SRichard Henderson     return r;
35*07f348d7SRichard Henderson }
36*07f348d7SRichard Henderson 
37*07f348d7SRichard Henderson uint64_t clmul_8x4_even(uint64_t n, uint64_t m)
38*07f348d7SRichard Henderson {
39*07f348d7SRichard Henderson     n &= 0x00ff00ff00ff00ffull;
40*07f348d7SRichard Henderson     m &= 0x00ff00ff00ff00ffull;
41*07f348d7SRichard Henderson     return clmul_8x4_even_int(n, m);
42*07f348d7SRichard Henderson }
43*07f348d7SRichard Henderson 
44*07f348d7SRichard Henderson uint64_t clmul_8x4_odd(uint64_t n, uint64_t m)
45*07f348d7SRichard Henderson {
46*07f348d7SRichard Henderson     return clmul_8x4_even(n >> 8, m >> 8);
47*07f348d7SRichard Henderson }
48*07f348d7SRichard Henderson 
49*07f348d7SRichard Henderson static uint64_t unpack_8_to_16(uint64_t x)
50*07f348d7SRichard Henderson {
51*07f348d7SRichard Henderson     return  (x & 0x000000ff)
52*07f348d7SRichard Henderson          | ((x & 0x0000ff00) << 8)
53*07f348d7SRichard Henderson          | ((x & 0x00ff0000) << 16)
54*07f348d7SRichard Henderson          | ((x & 0xff000000) << 24);
55*07f348d7SRichard Henderson }
56*07f348d7SRichard Henderson 
57*07f348d7SRichard Henderson uint64_t clmul_8x4_packed(uint32_t n, uint32_t m)
58*07f348d7SRichard Henderson {
59*07f348d7SRichard Henderson     return clmul_8x4_even_int(unpack_8_to_16(n), unpack_8_to_16(m));
60*07f348d7SRichard Henderson }
61