xref: /openbmc/linux/crypto/fcrypt.c (revision 384740dc)
1 /* FCrypt encryption algorithm
2  *
3  * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
4  * Written by David Howells (dhowells@redhat.com)
5  *
6  * This program is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU General Public License
8  * as published by the Free Software Foundation; either version
9  * 2 of the License, or (at your option) any later version.
10  *
11  * Based on code:
12  *
13  * Copyright (c) 1995 - 2000 Kungliga Tekniska Högskolan
14  * (Royal Institute of Technology, Stockholm, Sweden).
15  * All rights reserved.
16  *
17  * Redistribution and use in source and binary forms, with or without
18  * modification, are permitted provided that the following conditions
19  * are met:
20  *
21  * 1. Redistributions of source code must retain the above copyright
22  *    notice, this list of conditions and the following disclaimer.
23  *
24  * 2. Redistributions in binary form must reproduce the above copyright
25  *    notice, this list of conditions and the following disclaimer in the
26  *    documentation and/or other materials provided with the distribution.
27  *
28  * 3. Neither the name of the Institute nor the names of its contributors
29  *    may be used to endorse or promote products derived from this software
30  *    without specific prior written permission.
31  *
32  * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
33  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
34  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
35  * ARE DISCLAIMED.  IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
36  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
37  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
38  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
39  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
40  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
41  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
42  * SUCH DAMAGE.
43  */
44 
45 #include <asm/byteorder.h>
46 #include <linux/bitops.h>
47 #include <linux/init.h>
48 #include <linux/module.h>
49 #include <linux/crypto.h>
50 
51 #define ROUNDS 16
52 
53 struct fcrypt_ctx {
54 	__be32 sched[ROUNDS];
55 };
56 
57 /* Rotate right two 32 bit numbers as a 56 bit number */
58 #define ror56(hi, lo, n)					\
59 do {								\
60 	u32 t = lo & ((1 << n) - 1);				\
61 	lo = (lo >> n) | ((hi & ((1 << n) - 1)) << (32 - n));	\
62 	hi = (hi >> n) | (t << (24-n));				\
63 } while(0)
64 
65 /* Rotate right one 64 bit number as a 56 bit number */
66 #define ror56_64(k, n)						\
67 do {								\
68 	k = (k >> n) | ((k & ((1 << n) - 1)) << (56 - n));	\
69 } while(0)
70 
71 /*
72  * Sboxes for Feistel network derived from
73  * /afs/transarc.com/public/afsps/afs.rel31b.export-src/rxkad/sboxes.h
74  */
75 #undef Z
76 #define Z(x) __constant_cpu_to_be32(x << 3)
77 static const __be32 sbox0[256] = {
78 	Z(0xea), Z(0x7f), Z(0xb2), Z(0x64), Z(0x9d), Z(0xb0), Z(0xd9), Z(0x11),
79 	Z(0xcd), Z(0x86), Z(0x86), Z(0x91), Z(0x0a), Z(0xb2), Z(0x93), Z(0x06),
80 	Z(0x0e), Z(0x06), Z(0xd2), Z(0x65), Z(0x73), Z(0xc5), Z(0x28), Z(0x60),
81 	Z(0xf2), Z(0x20), Z(0xb5), Z(0x38), Z(0x7e), Z(0xda), Z(0x9f), Z(0xe3),
82 	Z(0xd2), Z(0xcf), Z(0xc4), Z(0x3c), Z(0x61), Z(0xff), Z(0x4a), Z(0x4a),
83 	Z(0x35), Z(0xac), Z(0xaa), Z(0x5f), Z(0x2b), Z(0xbb), Z(0xbc), Z(0x53),
84 	Z(0x4e), Z(0x9d), Z(0x78), Z(0xa3), Z(0xdc), Z(0x09), Z(0x32), Z(0x10),
85 	Z(0xc6), Z(0x6f), Z(0x66), Z(0xd6), Z(0xab), Z(0xa9), Z(0xaf), Z(0xfd),
86 	Z(0x3b), Z(0x95), Z(0xe8), Z(0x34), Z(0x9a), Z(0x81), Z(0x72), Z(0x80),
87 	Z(0x9c), Z(0xf3), Z(0xec), Z(0xda), Z(0x9f), Z(0x26), Z(0x76), Z(0x15),
88 	Z(0x3e), Z(0x55), Z(0x4d), Z(0xde), Z(0x84), Z(0xee), Z(0xad), Z(0xc7),
89 	Z(0xf1), Z(0x6b), Z(0x3d), Z(0xd3), Z(0x04), Z(0x49), Z(0xaa), Z(0x24),
90 	Z(0x0b), Z(0x8a), Z(0x83), Z(0xba), Z(0xfa), Z(0x85), Z(0xa0), Z(0xa8),
91 	Z(0xb1), Z(0xd4), Z(0x01), Z(0xd8), Z(0x70), Z(0x64), Z(0xf0), Z(0x51),
92 	Z(0xd2), Z(0xc3), Z(0xa7), Z(0x75), Z(0x8c), Z(0xa5), Z(0x64), Z(0xef),
93 	Z(0x10), Z(0x4e), Z(0xb7), Z(0xc6), Z(0x61), Z(0x03), Z(0xeb), Z(0x44),
94 	Z(0x3d), Z(0xe5), Z(0xb3), Z(0x5b), Z(0xae), Z(0xd5), Z(0xad), Z(0x1d),
95 	Z(0xfa), Z(0x5a), Z(0x1e), Z(0x33), Z(0xab), Z(0x93), Z(0xa2), Z(0xb7),
96 	Z(0xe7), Z(0xa8), Z(0x45), Z(0xa4), Z(0xcd), Z(0x29), Z(0x63), Z(0x44),
97 	Z(0xb6), Z(0x69), Z(0x7e), Z(0x2e), Z(0x62), Z(0x03), Z(0xc8), Z(0xe0),
98 	Z(0x17), Z(0xbb), Z(0xc7), Z(0xf3), Z(0x3f), Z(0x36), Z(0xba), Z(0x71),
99 	Z(0x8e), Z(0x97), Z(0x65), Z(0x60), Z(0x69), Z(0xb6), Z(0xf6), Z(0xe6),
100 	Z(0x6e), Z(0xe0), Z(0x81), Z(0x59), Z(0xe8), Z(0xaf), Z(0xdd), Z(0x95),
101 	Z(0x22), Z(0x99), Z(0xfd), Z(0x63), Z(0x19), Z(0x74), Z(0x61), Z(0xb1),
102 	Z(0xb6), Z(0x5b), Z(0xae), Z(0x54), Z(0xb3), Z(0x70), Z(0xff), Z(0xc6),
103 	Z(0x3b), Z(0x3e), Z(0xc1), Z(0xd7), Z(0xe1), Z(0x0e), Z(0x76), Z(0xe5),
104 	Z(0x36), Z(0x4f), Z(0x59), Z(0xc7), Z(0x08), Z(0x6e), Z(0x82), Z(0xa6),
105 	Z(0x93), Z(0xc4), Z(0xaa), Z(0x26), Z(0x49), Z(0xe0), Z(0x21), Z(0x64),
106 	Z(0x07), Z(0x9f), Z(0x64), Z(0x81), Z(0x9c), Z(0xbf), Z(0xf9), Z(0xd1),
107 	Z(0x43), Z(0xf8), Z(0xb6), Z(0xb9), Z(0xf1), Z(0x24), Z(0x75), Z(0x03),
108 	Z(0xe4), Z(0xb0), Z(0x99), Z(0x46), Z(0x3d), Z(0xf5), Z(0xd1), Z(0x39),
109 	Z(0x72), Z(0x12), Z(0xf6), Z(0xba), Z(0x0c), Z(0x0d), Z(0x42), Z(0x2e)
110 };
111 
112 #undef Z
113 #define Z(x) __constant_cpu_to_be32((x << 27) | (x >> 5))
114 static const __be32 sbox1[256] = {
115 	Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e),
116 	Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85),
117 	Z(0x6c), Z(0x7b), Z(0x67), Z(0xc6), Z(0x23), Z(0xe3), Z(0xf2), Z(0x89),
118 	Z(0x50), Z(0x9c), Z(0x03), Z(0xb7), Z(0x73), Z(0xe6), Z(0xe1), Z(0x39),
119 	Z(0x31), Z(0x2c), Z(0x27), Z(0x9f), Z(0xa5), Z(0x69), Z(0x44), Z(0xd6),
120 	Z(0x23), Z(0x83), Z(0x98), Z(0x7d), Z(0x3c), Z(0xb4), Z(0x2d), Z(0x99),
121 	Z(0x1c), Z(0x1f), Z(0x8c), Z(0x20), Z(0x03), Z(0x7c), Z(0x5f), Z(0xad),
122 	Z(0xf4), Z(0xfa), Z(0x95), Z(0xca), Z(0x76), Z(0x44), Z(0xcd), Z(0xb6),
123 	Z(0xb8), Z(0xa1), Z(0xa1), Z(0xbe), Z(0x9e), Z(0x54), Z(0x8f), Z(0x0b),
124 	Z(0x16), Z(0x74), Z(0x31), Z(0x8a), Z(0x23), Z(0x17), Z(0x04), Z(0xfa),
125 	Z(0x79), Z(0x84), Z(0xb1), Z(0xf5), Z(0x13), Z(0xab), Z(0xb5), Z(0x2e),
126 	Z(0xaa), Z(0x0c), Z(0x60), Z(0x6b), Z(0x5b), Z(0xc4), Z(0x4b), Z(0xbc),
127 	Z(0xe2), Z(0xaf), Z(0x45), Z(0x73), Z(0xfa), Z(0xc9), Z(0x49), Z(0xcd),
128 	Z(0x00), Z(0x92), Z(0x7d), Z(0x97), Z(0x7a), Z(0x18), Z(0x60), Z(0x3d),
129 	Z(0xcf), Z(0x5b), Z(0xde), Z(0xc6), Z(0xe2), Z(0xe6), Z(0xbb), Z(0x8b),
130 	Z(0x06), Z(0xda), Z(0x08), Z(0x15), Z(0x1b), Z(0x88), Z(0x6a), Z(0x17),
131 	Z(0x89), Z(0xd0), Z(0xa9), Z(0xc1), Z(0xc9), Z(0x70), Z(0x6b), Z(0xe5),
132 	Z(0x43), Z(0xf4), Z(0x68), Z(0xc8), Z(0xd3), Z(0x84), Z(0x28), Z(0x0a),
133 	Z(0x52), Z(0x66), Z(0xa3), Z(0xca), Z(0xf2), Z(0xe3), Z(0x7f), Z(0x7a),
134 	Z(0x31), Z(0xf7), Z(0x88), Z(0x94), Z(0x5e), Z(0x9c), Z(0x63), Z(0xd5),
135 	Z(0x24), Z(0x66), Z(0xfc), Z(0xb3), Z(0x57), Z(0x25), Z(0xbe), Z(0x89),
136 	Z(0x44), Z(0xc4), Z(0xe0), Z(0x8f), Z(0x23), Z(0x3c), Z(0x12), Z(0x52),
137 	Z(0xf5), Z(0x1e), Z(0xf4), Z(0xcb), Z(0x18), Z(0x33), Z(0x1f), Z(0xf8),
138 	Z(0x69), Z(0x10), Z(0x9d), Z(0xd3), Z(0xf7), Z(0x28), Z(0xf8), Z(0x30),
139 	Z(0x05), Z(0x5e), Z(0x32), Z(0xc0), Z(0xd5), Z(0x19), Z(0xbd), Z(0x45),
140 	Z(0x8b), Z(0x5b), Z(0xfd), Z(0xbc), Z(0xe2), Z(0x5c), Z(0xa9), Z(0x96),
141 	Z(0xef), Z(0x70), Z(0xcf), Z(0xc2), Z(0x2a), Z(0xb3), Z(0x61), Z(0xad),
142 	Z(0x80), Z(0x48), Z(0x81), Z(0xb7), Z(0x1d), Z(0x43), Z(0xd9), Z(0xd7),
143 	Z(0x45), Z(0xf0), Z(0xd8), Z(0x8a), Z(0x59), Z(0x7c), Z(0x57), Z(0xc1),
144 	Z(0x79), Z(0xc7), Z(0x34), Z(0xd6), Z(0x43), Z(0xdf), Z(0xe4), Z(0x78),
145 	Z(0x16), Z(0x06), Z(0xda), Z(0x92), Z(0x76), Z(0x51), Z(0xe1), Z(0xd4),
146 	Z(0x70), Z(0x03), Z(0xe0), Z(0x2f), Z(0x96), Z(0x91), Z(0x82), Z(0x80)
147 };
148 
149 #undef Z
150 #define Z(x) __constant_cpu_to_be32(x << 11)
151 static const __be32 sbox2[256] = {
152 	Z(0xf0), Z(0x37), Z(0x24), Z(0x53), Z(0x2a), Z(0x03), Z(0x83), Z(0x86),
153 	Z(0xd1), Z(0xec), Z(0x50), Z(0xf0), Z(0x42), Z(0x78), Z(0x2f), Z(0x6d),
154 	Z(0xbf), Z(0x80), Z(0x87), Z(0x27), Z(0x95), Z(0xe2), Z(0xc5), Z(0x5d),
155 	Z(0xf9), Z(0x6f), Z(0xdb), Z(0xb4), Z(0x65), Z(0x6e), Z(0xe7), Z(0x24),
156 	Z(0xc8), Z(0x1a), Z(0xbb), Z(0x49), Z(0xb5), Z(0x0a), Z(0x7d), Z(0xb9),
157 	Z(0xe8), Z(0xdc), Z(0xb7), Z(0xd9), Z(0x45), Z(0x20), Z(0x1b), Z(0xce),
158 	Z(0x59), Z(0x9d), Z(0x6b), Z(0xbd), Z(0x0e), Z(0x8f), Z(0xa3), Z(0xa9),
159 	Z(0xbc), Z(0x74), Z(0xa6), Z(0xf6), Z(0x7f), Z(0x5f), Z(0xb1), Z(0x68),
160 	Z(0x84), Z(0xbc), Z(0xa9), Z(0xfd), Z(0x55), Z(0x50), Z(0xe9), Z(0xb6),
161 	Z(0x13), Z(0x5e), Z(0x07), Z(0xb8), Z(0x95), Z(0x02), Z(0xc0), Z(0xd0),
162 	Z(0x6a), Z(0x1a), Z(0x85), Z(0xbd), Z(0xb6), Z(0xfd), Z(0xfe), Z(0x17),
163 	Z(0x3f), Z(0x09), Z(0xa3), Z(0x8d), Z(0xfb), Z(0xed), Z(0xda), Z(0x1d),
164 	Z(0x6d), Z(0x1c), Z(0x6c), Z(0x01), Z(0x5a), Z(0xe5), Z(0x71), Z(0x3e),
165 	Z(0x8b), Z(0x6b), Z(0xbe), Z(0x29), Z(0xeb), Z(0x12), Z(0x19), Z(0x34),
166 	Z(0xcd), Z(0xb3), Z(0xbd), Z(0x35), Z(0xea), Z(0x4b), Z(0xd5), Z(0xae),
167 	Z(0x2a), Z(0x79), Z(0x5a), Z(0xa5), Z(0x32), Z(0x12), Z(0x7b), Z(0xdc),
168 	Z(0x2c), Z(0xd0), Z(0x22), Z(0x4b), Z(0xb1), Z(0x85), Z(0x59), Z(0x80),
169 	Z(0xc0), Z(0x30), Z(0x9f), Z(0x73), Z(0xd3), Z(0x14), Z(0x48), Z(0x40),
170 	Z(0x07), Z(0x2d), Z(0x8f), Z(0x80), Z(0x0f), Z(0xce), Z(0x0b), Z(0x5e),
171 	Z(0xb7), Z(0x5e), Z(0xac), Z(0x24), Z(0x94), Z(0x4a), Z(0x18), Z(0x15),
172 	Z(0x05), Z(0xe8), Z(0x02), Z(0x77), Z(0xa9), Z(0xc7), Z(0x40), Z(0x45),
173 	Z(0x89), Z(0xd1), Z(0xea), Z(0xde), Z(0x0c), Z(0x79), Z(0x2a), Z(0x99),
174 	Z(0x6c), Z(0x3e), Z(0x95), Z(0xdd), Z(0x8c), Z(0x7d), Z(0xad), Z(0x6f),
175 	Z(0xdc), Z(0xff), Z(0xfd), Z(0x62), Z(0x47), Z(0xb3), Z(0x21), Z(0x8a),
176 	Z(0xec), Z(0x8e), Z(0x19), Z(0x18), Z(0xb4), Z(0x6e), Z(0x3d), Z(0xfd),
177 	Z(0x74), Z(0x54), Z(0x1e), Z(0x04), Z(0x85), Z(0xd8), Z(0xbc), Z(0x1f),
178 	Z(0x56), Z(0xe7), Z(0x3a), Z(0x56), Z(0x67), Z(0xd6), Z(0xc8), Z(0xa5),
179 	Z(0xf3), Z(0x8e), Z(0xde), Z(0xae), Z(0x37), Z(0x49), Z(0xb7), Z(0xfa),
180 	Z(0xc8), Z(0xf4), Z(0x1f), Z(0xe0), Z(0x2a), Z(0x9b), Z(0x15), Z(0xd1),
181 	Z(0x34), Z(0x0e), Z(0xb5), Z(0xe0), Z(0x44), Z(0x78), Z(0x84), Z(0x59),
182 	Z(0x56), Z(0x68), Z(0x77), Z(0xa5), Z(0x14), Z(0x06), Z(0xf5), Z(0x2f),
183 	Z(0x8c), Z(0x8a), Z(0x73), Z(0x80), Z(0x76), Z(0xb4), Z(0x10), Z(0x86)
184 };
185 
186 #undef Z
187 #define Z(x) __constant_cpu_to_be32(x << 19)
188 static const __be32 sbox3[256] = {
189 	Z(0xa9), Z(0x2a), Z(0x48), Z(0x51), Z(0x84), Z(0x7e), Z(0x49), Z(0xe2),
190 	Z(0xb5), Z(0xb7), Z(0x42), Z(0x33), Z(0x7d), Z(0x5d), Z(0xa6), Z(0x12),
191 	Z(0x44), Z(0x48), Z(0x6d), Z(0x28), Z(0xaa), Z(0x20), Z(0x6d), Z(0x57),
192 	Z(0xd6), Z(0x6b), Z(0x5d), Z(0x72), Z(0xf0), Z(0x92), Z(0x5a), Z(0x1b),
193 	Z(0x53), Z(0x80), Z(0x24), Z(0x70), Z(0x9a), Z(0xcc), Z(0xa7), Z(0x66),
194 	Z(0xa1), Z(0x01), Z(0xa5), Z(0x41), Z(0x97), Z(0x41), Z(0x31), Z(0x82),
195 	Z(0xf1), Z(0x14), Z(0xcf), Z(0x53), Z(0x0d), Z(0xa0), Z(0x10), Z(0xcc),
196 	Z(0x2a), Z(0x7d), Z(0xd2), Z(0xbf), Z(0x4b), Z(0x1a), Z(0xdb), Z(0x16),
197 	Z(0x47), Z(0xf6), Z(0x51), Z(0x36), Z(0xed), Z(0xf3), Z(0xb9), Z(0x1a),
198 	Z(0xa7), Z(0xdf), Z(0x29), Z(0x43), Z(0x01), Z(0x54), Z(0x70), Z(0xa4),
199 	Z(0xbf), Z(0xd4), Z(0x0b), Z(0x53), Z(0x44), Z(0x60), Z(0x9e), Z(0x23),
200 	Z(0xa1), Z(0x18), Z(0x68), Z(0x4f), Z(0xf0), Z(0x2f), Z(0x82), Z(0xc2),
201 	Z(0x2a), Z(0x41), Z(0xb2), Z(0x42), Z(0x0c), Z(0xed), Z(0x0c), Z(0x1d),
202 	Z(0x13), Z(0x3a), Z(0x3c), Z(0x6e), Z(0x35), Z(0xdc), Z(0x60), Z(0x65),
203 	Z(0x85), Z(0xe9), Z(0x64), Z(0x02), Z(0x9a), Z(0x3f), Z(0x9f), Z(0x87),
204 	Z(0x96), Z(0xdf), Z(0xbe), Z(0xf2), Z(0xcb), Z(0xe5), Z(0x6c), Z(0xd4),
205 	Z(0x5a), Z(0x83), Z(0xbf), Z(0x92), Z(0x1b), Z(0x94), Z(0x00), Z(0x42),
206 	Z(0xcf), Z(0x4b), Z(0x00), Z(0x75), Z(0xba), Z(0x8f), Z(0x76), Z(0x5f),
207 	Z(0x5d), Z(0x3a), Z(0x4d), Z(0x09), Z(0x12), Z(0x08), Z(0x38), Z(0x95),
208 	Z(0x17), Z(0xe4), Z(0x01), Z(0x1d), Z(0x4c), Z(0xa9), Z(0xcc), Z(0x85),
209 	Z(0x82), Z(0x4c), Z(0x9d), Z(0x2f), Z(0x3b), Z(0x66), Z(0xa1), Z(0x34),
210 	Z(0x10), Z(0xcd), Z(0x59), Z(0x89), Z(0xa5), Z(0x31), Z(0xcf), Z(0x05),
211 	Z(0xc8), Z(0x84), Z(0xfa), Z(0xc7), Z(0xba), Z(0x4e), Z(0x8b), Z(0x1a),
212 	Z(0x19), Z(0xf1), Z(0xa1), Z(0x3b), Z(0x18), Z(0x12), Z(0x17), Z(0xb0),
213 	Z(0x98), Z(0x8d), Z(0x0b), Z(0x23), Z(0xc3), Z(0x3a), Z(0x2d), Z(0x20),
214 	Z(0xdf), Z(0x13), Z(0xa0), Z(0xa8), Z(0x4c), Z(0x0d), Z(0x6c), Z(0x2f),
215 	Z(0x47), Z(0x13), Z(0x13), Z(0x52), Z(0x1f), Z(0x2d), Z(0xf5), Z(0x79),
216 	Z(0x3d), Z(0xa2), Z(0x54), Z(0xbd), Z(0x69), Z(0xc8), Z(0x6b), Z(0xf3),
217 	Z(0x05), Z(0x28), Z(0xf1), Z(0x16), Z(0x46), Z(0x40), Z(0xb0), Z(0x11),
218 	Z(0xd3), Z(0xb7), Z(0x95), Z(0x49), Z(0xcf), Z(0xc3), Z(0x1d), Z(0x8f),
219 	Z(0xd8), Z(0xe1), Z(0x73), Z(0xdb), Z(0xad), Z(0xc8), Z(0xc9), Z(0xa9),
220 	Z(0xa1), Z(0xc2), Z(0xc5), Z(0xe3), Z(0xba), Z(0xfc), Z(0x0e), Z(0x25)
221 };
222 
223 /*
224  * This is a 16 round Feistel network with permutation F_ENCRYPT
225  */
226 #define F_ENCRYPT(R, L, sched)						\
227 do {									\
228 	union lc4 { __be32 l; u8 c[4]; } u;				\
229 	u.l = sched ^ R;						\
230 	L ^= sbox0[u.c[0]] ^ sbox1[u.c[1]] ^ sbox2[u.c[2]] ^ sbox3[u.c[3]]; \
231 } while(0)
232 
233 /*
234  * encryptor
235  */
236 static void fcrypt_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
237 {
238 	const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm);
239 	struct {
240 		__be32 l, r;
241 	} X;
242 
243 	memcpy(&X, src, sizeof(X));
244 
245 	F_ENCRYPT(X.r, X.l, ctx->sched[0x0]);
246 	F_ENCRYPT(X.l, X.r, ctx->sched[0x1]);
247 	F_ENCRYPT(X.r, X.l, ctx->sched[0x2]);
248 	F_ENCRYPT(X.l, X.r, ctx->sched[0x3]);
249 	F_ENCRYPT(X.r, X.l, ctx->sched[0x4]);
250 	F_ENCRYPT(X.l, X.r, ctx->sched[0x5]);
251 	F_ENCRYPT(X.r, X.l, ctx->sched[0x6]);
252 	F_ENCRYPT(X.l, X.r, ctx->sched[0x7]);
253 	F_ENCRYPT(X.r, X.l, ctx->sched[0x8]);
254 	F_ENCRYPT(X.l, X.r, ctx->sched[0x9]);
255 	F_ENCRYPT(X.r, X.l, ctx->sched[0xa]);
256 	F_ENCRYPT(X.l, X.r, ctx->sched[0xb]);
257 	F_ENCRYPT(X.r, X.l, ctx->sched[0xc]);
258 	F_ENCRYPT(X.l, X.r, ctx->sched[0xd]);
259 	F_ENCRYPT(X.r, X.l, ctx->sched[0xe]);
260 	F_ENCRYPT(X.l, X.r, ctx->sched[0xf]);
261 
262 	memcpy(dst, &X, sizeof(X));
263 }
264 
265 /*
266  * decryptor
267  */
268 static void fcrypt_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
269 {
270 	const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm);
271 	struct {
272 		__be32 l, r;
273 	} X;
274 
275 	memcpy(&X, src, sizeof(X));
276 
277 	F_ENCRYPT(X.l, X.r, ctx->sched[0xf]);
278 	F_ENCRYPT(X.r, X.l, ctx->sched[0xe]);
279 	F_ENCRYPT(X.l, X.r, ctx->sched[0xd]);
280 	F_ENCRYPT(X.r, X.l, ctx->sched[0xc]);
281 	F_ENCRYPT(X.l, X.r, ctx->sched[0xb]);
282 	F_ENCRYPT(X.r, X.l, ctx->sched[0xa]);
283 	F_ENCRYPT(X.l, X.r, ctx->sched[0x9]);
284 	F_ENCRYPT(X.r, X.l, ctx->sched[0x8]);
285 	F_ENCRYPT(X.l, X.r, ctx->sched[0x7]);
286 	F_ENCRYPT(X.r, X.l, ctx->sched[0x6]);
287 	F_ENCRYPT(X.l, X.r, ctx->sched[0x5]);
288 	F_ENCRYPT(X.r, X.l, ctx->sched[0x4]);
289 	F_ENCRYPT(X.l, X.r, ctx->sched[0x3]);
290 	F_ENCRYPT(X.r, X.l, ctx->sched[0x2]);
291 	F_ENCRYPT(X.l, X.r, ctx->sched[0x1]);
292 	F_ENCRYPT(X.r, X.l, ctx->sched[0x0]);
293 
294 	memcpy(dst, &X, sizeof(X));
295 }
296 
297 /*
298  * Generate a key schedule from key, the least significant bit in each key byte
299  * is parity and shall be ignored. This leaves 56 significant bits in the key
300  * to scatter over the 16 key schedules. For each schedule extract the low
301  * order 32 bits and use as schedule, then rotate right by 11 bits.
302  */
303 static int fcrypt_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
304 {
305 	struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm);
306 
307 #if BITS_PER_LONG == 64  /* the 64-bit version can also be used for 32-bit
308 			  * kernels - it seems to be faster but the code is
309 			  * larger */
310 
311 	u64 k;	/* k holds all 56 non-parity bits */
312 
313 	/* discard the parity bits */
314 	k = (*key++) >> 1;
315 	k <<= 7;
316 	k |= (*key++) >> 1;
317 	k <<= 7;
318 	k |= (*key++) >> 1;
319 	k <<= 7;
320 	k |= (*key++) >> 1;
321 	k <<= 7;
322 	k |= (*key++) >> 1;
323 	k <<= 7;
324 	k |= (*key++) >> 1;
325 	k <<= 7;
326 	k |= (*key++) >> 1;
327 	k <<= 7;
328 	k |= (*key) >> 1;
329 
330 	/* Use lower 32 bits for schedule, rotate by 11 each round (16 times) */
331 	ctx->sched[0x0] = cpu_to_be32(k); ror56_64(k, 11);
332 	ctx->sched[0x1] = cpu_to_be32(k); ror56_64(k, 11);
333 	ctx->sched[0x2] = cpu_to_be32(k); ror56_64(k, 11);
334 	ctx->sched[0x3] = cpu_to_be32(k); ror56_64(k, 11);
335 	ctx->sched[0x4] = cpu_to_be32(k); ror56_64(k, 11);
336 	ctx->sched[0x5] = cpu_to_be32(k); ror56_64(k, 11);
337 	ctx->sched[0x6] = cpu_to_be32(k); ror56_64(k, 11);
338 	ctx->sched[0x7] = cpu_to_be32(k); ror56_64(k, 11);
339 	ctx->sched[0x8] = cpu_to_be32(k); ror56_64(k, 11);
340 	ctx->sched[0x9] = cpu_to_be32(k); ror56_64(k, 11);
341 	ctx->sched[0xa] = cpu_to_be32(k); ror56_64(k, 11);
342 	ctx->sched[0xb] = cpu_to_be32(k); ror56_64(k, 11);
343 	ctx->sched[0xc] = cpu_to_be32(k); ror56_64(k, 11);
344 	ctx->sched[0xd] = cpu_to_be32(k); ror56_64(k, 11);
345 	ctx->sched[0xe] = cpu_to_be32(k); ror56_64(k, 11);
346 	ctx->sched[0xf] = cpu_to_be32(k);
347 
348 	return 0;
349 #else
350 	u32 hi, lo;		/* hi is upper 24 bits and lo lower 32, total 56 */
351 
352 	/* discard the parity bits */
353 	lo = (*key++) >> 1;
354 	lo <<= 7;
355 	lo |= (*key++) >> 1;
356 	lo <<= 7;
357 	lo |= (*key++) >> 1;
358 	lo <<= 7;
359 	lo |= (*key++) >> 1;
360 	hi = lo >> 4;
361 	lo &= 0xf;
362 	lo <<= 7;
363 	lo |= (*key++) >> 1;
364 	lo <<= 7;
365 	lo |= (*key++) >> 1;
366 	lo <<= 7;
367 	lo |= (*key++) >> 1;
368 	lo <<= 7;
369 	lo |= (*key) >> 1;
370 
371 	/* Use lower 32 bits for schedule, rotate by 11 each round (16 times) */
372 	ctx->sched[0x0] = cpu_to_be32(lo); ror56(hi, lo, 11);
373 	ctx->sched[0x1] = cpu_to_be32(lo); ror56(hi, lo, 11);
374 	ctx->sched[0x2] = cpu_to_be32(lo); ror56(hi, lo, 11);
375 	ctx->sched[0x3] = cpu_to_be32(lo); ror56(hi, lo, 11);
376 	ctx->sched[0x4] = cpu_to_be32(lo); ror56(hi, lo, 11);
377 	ctx->sched[0x5] = cpu_to_be32(lo); ror56(hi, lo, 11);
378 	ctx->sched[0x6] = cpu_to_be32(lo); ror56(hi, lo, 11);
379 	ctx->sched[0x7] = cpu_to_be32(lo); ror56(hi, lo, 11);
380 	ctx->sched[0x8] = cpu_to_be32(lo); ror56(hi, lo, 11);
381 	ctx->sched[0x9] = cpu_to_be32(lo); ror56(hi, lo, 11);
382 	ctx->sched[0xa] = cpu_to_be32(lo); ror56(hi, lo, 11);
383 	ctx->sched[0xb] = cpu_to_be32(lo); ror56(hi, lo, 11);
384 	ctx->sched[0xc] = cpu_to_be32(lo); ror56(hi, lo, 11);
385 	ctx->sched[0xd] = cpu_to_be32(lo); ror56(hi, lo, 11);
386 	ctx->sched[0xe] = cpu_to_be32(lo); ror56(hi, lo, 11);
387 	ctx->sched[0xf] = cpu_to_be32(lo);
388 	return 0;
389 #endif
390 }
391 
392 static struct crypto_alg fcrypt_alg = {
393 	.cra_name		=	"fcrypt",
394 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
395 	.cra_blocksize		=	8,
396 	.cra_ctxsize		=	sizeof(struct fcrypt_ctx),
397 	.cra_module		=	THIS_MODULE,
398 	.cra_alignmask		=	3,
399 	.cra_list		=	LIST_HEAD_INIT(fcrypt_alg.cra_list),
400 	.cra_u			=	{ .cipher = {
401 	.cia_min_keysize	=	8,
402 	.cia_max_keysize	=	8,
403 	.cia_setkey		=	fcrypt_setkey,
404 	.cia_encrypt		=	fcrypt_encrypt,
405 	.cia_decrypt		=	fcrypt_decrypt } }
406 };
407 
408 static int __init fcrypt_mod_init(void)
409 {
410 	return crypto_register_alg(&fcrypt_alg);
411 }
412 
413 static void __exit fcrypt_mod_fini(void)
414 {
415 	crypto_unregister_alg(&fcrypt_alg);
416 }
417 
418 module_init(fcrypt_mod_init);
419 module_exit(fcrypt_mod_fini);
420 
421 MODULE_LICENSE("Dual BSD/GPL");
422 MODULE_DESCRIPTION("FCrypt Cipher Algorithm");
423 MODULE_AUTHOR("David Howells <dhowells@redhat.com>");
424