Merge tag 'ceph-for-4.9-rc1' of git://github.com/ceph/ceph-client
[cascardo/linux.git] / arch / arm / crypto / aes-ce-glue.c
1 /*
2  * aes-ce-glue.c - wrapper code for ARMv8 AES
3  *
4  * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10
11 #include <asm/hwcap.h>
12 #include <asm/neon.h>
13 #include <asm/hwcap.h>
14 #include <crypto/aes.h>
15 #include <crypto/ablk_helper.h>
16 #include <crypto/algapi.h>
17 #include <linux/module.h>
18 #include <crypto/xts.h>
19
20 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
21 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
22 MODULE_LICENSE("GPL v2");
23
24 /* defined in aes-ce-core.S */
25 asmlinkage u32 ce_aes_sub(u32 input);
26 asmlinkage void ce_aes_invert(void *dst, void *src);
27
28 asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
29                                    int rounds, int blocks);
30 asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
31                                    int rounds, int blocks);
32
33 asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
34                                    int rounds, int blocks, u8 iv[]);
35 asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
36                                    int rounds, int blocks, u8 iv[]);
37
38 asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
39                                    int rounds, int blocks, u8 ctr[]);
40
41 asmlinkage void ce_aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
42                                    int rounds, int blocks, u8 iv[],
43                                    u8 const rk2[], int first);
44 asmlinkage void ce_aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
45                                    int rounds, int blocks, u8 iv[],
46                                    u8 const rk2[], int first);
47
48 struct aes_block {
49         u8 b[AES_BLOCK_SIZE];
50 };
51
52 static int num_rounds(struct crypto_aes_ctx *ctx)
53 {
54         /*
55          * # of rounds specified by AES:
56          * 128 bit key          10 rounds
57          * 192 bit key          12 rounds
58          * 256 bit key          14 rounds
59          * => n byte key        => 6 + (n/4) rounds
60          */
61         return 6 + ctx->key_length / 4;
62 }
63
64 static int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
65                             unsigned int key_len)
66 {
67         /*
68          * The AES key schedule round constants
69          */
70         static u8 const rcon[] = {
71                 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
72         };
73
74         u32 kwords = key_len / sizeof(u32);
75         struct aes_block *key_enc, *key_dec;
76         int i, j;
77
78         if (key_len != AES_KEYSIZE_128 &&
79             key_len != AES_KEYSIZE_192 &&
80             key_len != AES_KEYSIZE_256)
81                 return -EINVAL;
82
83         memcpy(ctx->key_enc, in_key, key_len);
84         ctx->key_length = key_len;
85
86         kernel_neon_begin();
87         for (i = 0; i < sizeof(rcon); i++) {
88                 u32 *rki = ctx->key_enc + (i * kwords);
89                 u32 *rko = rki + kwords;
90
91                 rko[0] = ror32(ce_aes_sub(rki[kwords - 1]), 8);
92                 rko[0] = rko[0] ^ rki[0] ^ rcon[i];
93                 rko[1] = rko[0] ^ rki[1];
94                 rko[2] = rko[1] ^ rki[2];
95                 rko[3] = rko[2] ^ rki[3];
96
97                 if (key_len == AES_KEYSIZE_192) {
98                         if (i >= 7)
99                                 break;
100                         rko[4] = rko[3] ^ rki[4];
101                         rko[5] = rko[4] ^ rki[5];
102                 } else if (key_len == AES_KEYSIZE_256) {
103                         if (i >= 6)
104                                 break;
105                         rko[4] = ce_aes_sub(rko[3]) ^ rki[4];
106                         rko[5] = rko[4] ^ rki[5];
107                         rko[6] = rko[5] ^ rki[6];
108                         rko[7] = rko[6] ^ rki[7];
109                 }
110         }
111
112         /*
113          * Generate the decryption keys for the Equivalent Inverse Cipher.
114          * This involves reversing the order of the round keys, and applying
115          * the Inverse Mix Columns transformation on all but the first and
116          * the last one.
117          */
118         key_enc = (struct aes_block *)ctx->key_enc;
119         key_dec = (struct aes_block *)ctx->key_dec;
120         j = num_rounds(ctx);
121
122         key_dec[0] = key_enc[j];
123         for (i = 1, j--; j > 0; i++, j--)
124                 ce_aes_invert(key_dec + i, key_enc + j);
125         key_dec[i] = key_enc[0];
126
127         kernel_neon_end();
128         return 0;
129 }
130
131 static int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,
132                          unsigned int key_len)
133 {
134         struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
135         int ret;
136
137         ret = ce_aes_expandkey(ctx, in_key, key_len);
138         if (!ret)
139                 return 0;
140
141         tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
142         return -EINVAL;
143 }
144
145 struct crypto_aes_xts_ctx {
146         struct crypto_aes_ctx key1;
147         struct crypto_aes_ctx __aligned(8) key2;
148 };
149
150 static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
151                        unsigned int key_len)
152 {
153         struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
154         int ret;
155
156         ret = xts_check_key(tfm, in_key, key_len);
157         if (ret)
158                 return ret;
159
160         ret = ce_aes_expandkey(&ctx->key1, in_key, key_len / 2);
161         if (!ret)
162                 ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2],
163                                        key_len / 2);
164         if (!ret)
165                 return 0;
166
167         tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
168         return -EINVAL;
169 }
170
171 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
172                        struct scatterlist *src, unsigned int nbytes)
173 {
174         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
175         struct blkcipher_walk walk;
176         unsigned int blocks;
177         int err;
178
179         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
180         blkcipher_walk_init(&walk, dst, src, nbytes);
181         err = blkcipher_walk_virt(desc, &walk);
182
183         kernel_neon_begin();
184         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
185                 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
186                                    (u8 *)ctx->key_enc, num_rounds(ctx), blocks);
187                 err = blkcipher_walk_done(desc, &walk,
188                                           walk.nbytes % AES_BLOCK_SIZE);
189         }
190         kernel_neon_end();
191         return err;
192 }
193
194 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
195                        struct scatterlist *src, unsigned int nbytes)
196 {
197         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
198         struct blkcipher_walk walk;
199         unsigned int blocks;
200         int err;
201
202         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
203         blkcipher_walk_init(&walk, dst, src, nbytes);
204         err = blkcipher_walk_virt(desc, &walk);
205
206         kernel_neon_begin();
207         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
208                 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
209                                    (u8 *)ctx->key_dec, num_rounds(ctx), blocks);
210                 err = blkcipher_walk_done(desc, &walk,
211                                           walk.nbytes % AES_BLOCK_SIZE);
212         }
213         kernel_neon_end();
214         return err;
215 }
216
217 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
218                        struct scatterlist *src, unsigned int nbytes)
219 {
220         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
221         struct blkcipher_walk walk;
222         unsigned int blocks;
223         int err;
224
225         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
226         blkcipher_walk_init(&walk, dst, src, nbytes);
227         err = blkcipher_walk_virt(desc, &walk);
228
229         kernel_neon_begin();
230         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
231                 ce_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
232                                    (u8 *)ctx->key_enc, num_rounds(ctx), blocks,
233                                    walk.iv);
234                 err = blkcipher_walk_done(desc, &walk,
235                                           walk.nbytes % AES_BLOCK_SIZE);
236         }
237         kernel_neon_end();
238         return err;
239 }
240
241 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
242                        struct scatterlist *src, unsigned int nbytes)
243 {
244         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
245         struct blkcipher_walk walk;
246         unsigned int blocks;
247         int err;
248
249         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
250         blkcipher_walk_init(&walk, dst, src, nbytes);
251         err = blkcipher_walk_virt(desc, &walk);
252
253         kernel_neon_begin();
254         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
255                 ce_aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
256                                    (u8 *)ctx->key_dec, num_rounds(ctx), blocks,
257                                    walk.iv);
258                 err = blkcipher_walk_done(desc, &walk,
259                                           walk.nbytes % AES_BLOCK_SIZE);
260         }
261         kernel_neon_end();
262         return err;
263 }
264
265 static int ctr_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
266                        struct scatterlist *src, unsigned int nbytes)
267 {
268         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
269         struct blkcipher_walk walk;
270         int err, blocks;
271
272         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
273         blkcipher_walk_init(&walk, dst, src, nbytes);
274         err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
275
276         kernel_neon_begin();
277         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
278                 ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
279                                    (u8 *)ctx->key_enc, num_rounds(ctx), blocks,
280                                    walk.iv);
281                 nbytes -= blocks * AES_BLOCK_SIZE;
282                 if (nbytes && nbytes == walk.nbytes % AES_BLOCK_SIZE)
283                         break;
284                 err = blkcipher_walk_done(desc, &walk,
285                                           walk.nbytes % AES_BLOCK_SIZE);
286         }
287         if (walk.nbytes % AES_BLOCK_SIZE) {
288                 u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
289                 u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
290                 u8 __aligned(8) tail[AES_BLOCK_SIZE];
291
292                 /*
293                  * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
294                  * to tell aes_ctr_encrypt() to only read half a block.
295                  */
296                 blocks = (nbytes <= 8) ? -1 : 1;
297
298                 ce_aes_ctr_encrypt(tail, tsrc, (u8 *)ctx->key_enc,
299                                    num_rounds(ctx), blocks, walk.iv);
300                 memcpy(tdst, tail, nbytes);
301                 err = blkcipher_walk_done(desc, &walk, 0);
302         }
303         kernel_neon_end();
304
305         return err;
306 }
307
308 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
309                        struct scatterlist *src, unsigned int nbytes)
310 {
311         struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
312         int err, first, rounds = num_rounds(&ctx->key1);
313         struct blkcipher_walk walk;
314         unsigned int blocks;
315
316         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
317         blkcipher_walk_init(&walk, dst, src, nbytes);
318         err = blkcipher_walk_virt(desc, &walk);
319
320         kernel_neon_begin();
321         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
322                 ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
323                                    (u8 *)ctx->key1.key_enc, rounds, blocks,
324                                    walk.iv, (u8 *)ctx->key2.key_enc, first);
325                 err = blkcipher_walk_done(desc, &walk,
326                                           walk.nbytes % AES_BLOCK_SIZE);
327         }
328         kernel_neon_end();
329
330         return err;
331 }
332
333 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
334                        struct scatterlist *src, unsigned int nbytes)
335 {
336         struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
337         int err, first, rounds = num_rounds(&ctx->key1);
338         struct blkcipher_walk walk;
339         unsigned int blocks;
340
341         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
342         blkcipher_walk_init(&walk, dst, src, nbytes);
343         err = blkcipher_walk_virt(desc, &walk);
344
345         kernel_neon_begin();
346         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
347                 ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
348                                    (u8 *)ctx->key1.key_dec, rounds, blocks,
349                                    walk.iv, (u8 *)ctx->key2.key_enc, first);
350                 err = blkcipher_walk_done(desc, &walk,
351                                           walk.nbytes % AES_BLOCK_SIZE);
352         }
353         kernel_neon_end();
354
355         return err;
356 }
357
358 static struct crypto_alg aes_algs[] = { {
359         .cra_name               = "__ecb-aes-ce",
360         .cra_driver_name        = "__driver-ecb-aes-ce",
361         .cra_priority           = 0,
362         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
363                                   CRYPTO_ALG_INTERNAL,
364         .cra_blocksize          = AES_BLOCK_SIZE,
365         .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
366         .cra_alignmask          = 7,
367         .cra_type               = &crypto_blkcipher_type,
368         .cra_module             = THIS_MODULE,
369         .cra_blkcipher = {
370                 .min_keysize    = AES_MIN_KEY_SIZE,
371                 .max_keysize    = AES_MAX_KEY_SIZE,
372                 .ivsize         = 0,
373                 .setkey         = ce_aes_setkey,
374                 .encrypt        = ecb_encrypt,
375                 .decrypt        = ecb_decrypt,
376         },
377 }, {
378         .cra_name               = "__cbc-aes-ce",
379         .cra_driver_name        = "__driver-cbc-aes-ce",
380         .cra_priority           = 0,
381         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
382                                   CRYPTO_ALG_INTERNAL,
383         .cra_blocksize          = AES_BLOCK_SIZE,
384         .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
385         .cra_alignmask          = 7,
386         .cra_type               = &crypto_blkcipher_type,
387         .cra_module             = THIS_MODULE,
388         .cra_blkcipher = {
389                 .min_keysize    = AES_MIN_KEY_SIZE,
390                 .max_keysize    = AES_MAX_KEY_SIZE,
391                 .ivsize         = AES_BLOCK_SIZE,
392                 .setkey         = ce_aes_setkey,
393                 .encrypt        = cbc_encrypt,
394                 .decrypt        = cbc_decrypt,
395         },
396 }, {
397         .cra_name               = "__ctr-aes-ce",
398         .cra_driver_name        = "__driver-ctr-aes-ce",
399         .cra_priority           = 0,
400         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
401                                   CRYPTO_ALG_INTERNAL,
402         .cra_blocksize          = 1,
403         .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
404         .cra_alignmask          = 7,
405         .cra_type               = &crypto_blkcipher_type,
406         .cra_module             = THIS_MODULE,
407         .cra_blkcipher = {
408                 .min_keysize    = AES_MIN_KEY_SIZE,
409                 .max_keysize    = AES_MAX_KEY_SIZE,
410                 .ivsize         = AES_BLOCK_SIZE,
411                 .setkey         = ce_aes_setkey,
412                 .encrypt        = ctr_encrypt,
413                 .decrypt        = ctr_encrypt,
414         },
415 }, {
416         .cra_name               = "__xts-aes-ce",
417         .cra_driver_name        = "__driver-xts-aes-ce",
418         .cra_priority           = 0,
419         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
420                                   CRYPTO_ALG_INTERNAL,
421         .cra_blocksize          = AES_BLOCK_SIZE,
422         .cra_ctxsize            = sizeof(struct crypto_aes_xts_ctx),
423         .cra_alignmask          = 7,
424         .cra_type               = &crypto_blkcipher_type,
425         .cra_module             = THIS_MODULE,
426         .cra_blkcipher = {
427                 .min_keysize    = 2 * AES_MIN_KEY_SIZE,
428                 .max_keysize    = 2 * AES_MAX_KEY_SIZE,
429                 .ivsize         = AES_BLOCK_SIZE,
430                 .setkey         = xts_set_key,
431                 .encrypt        = xts_encrypt,
432                 .decrypt        = xts_decrypt,
433         },
434 }, {
435         .cra_name               = "ecb(aes)",
436         .cra_driver_name        = "ecb-aes-ce",
437         .cra_priority           = 300,
438         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
439         .cra_blocksize          = AES_BLOCK_SIZE,
440         .cra_ctxsize            = sizeof(struct async_helper_ctx),
441         .cra_alignmask          = 7,
442         .cra_type               = &crypto_ablkcipher_type,
443         .cra_module             = THIS_MODULE,
444         .cra_init               = ablk_init,
445         .cra_exit               = ablk_exit,
446         .cra_ablkcipher = {
447                 .min_keysize    = AES_MIN_KEY_SIZE,
448                 .max_keysize    = AES_MAX_KEY_SIZE,
449                 .ivsize         = 0,
450                 .setkey         = ablk_set_key,
451                 .encrypt        = ablk_encrypt,
452                 .decrypt        = ablk_decrypt,
453         }
454 }, {
455         .cra_name               = "cbc(aes)",
456         .cra_driver_name        = "cbc-aes-ce",
457         .cra_priority           = 300,
458         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
459         .cra_blocksize          = AES_BLOCK_SIZE,
460         .cra_ctxsize            = sizeof(struct async_helper_ctx),
461         .cra_alignmask          = 7,
462         .cra_type               = &crypto_ablkcipher_type,
463         .cra_module             = THIS_MODULE,
464         .cra_init               = ablk_init,
465         .cra_exit               = ablk_exit,
466         .cra_ablkcipher = {
467                 .min_keysize    = AES_MIN_KEY_SIZE,
468                 .max_keysize    = AES_MAX_KEY_SIZE,
469                 .ivsize         = AES_BLOCK_SIZE,
470                 .setkey         = ablk_set_key,
471                 .encrypt        = ablk_encrypt,
472                 .decrypt        = ablk_decrypt,
473         }
474 }, {
475         .cra_name               = "ctr(aes)",
476         .cra_driver_name        = "ctr-aes-ce",
477         .cra_priority           = 300,
478         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
479         .cra_blocksize          = 1,
480         .cra_ctxsize            = sizeof(struct async_helper_ctx),
481         .cra_alignmask          = 7,
482         .cra_type               = &crypto_ablkcipher_type,
483         .cra_module             = THIS_MODULE,
484         .cra_init               = ablk_init,
485         .cra_exit               = ablk_exit,
486         .cra_ablkcipher = {
487                 .min_keysize    = AES_MIN_KEY_SIZE,
488                 .max_keysize    = AES_MAX_KEY_SIZE,
489                 .ivsize         = AES_BLOCK_SIZE,
490                 .setkey         = ablk_set_key,
491                 .encrypt        = ablk_encrypt,
492                 .decrypt        = ablk_decrypt,
493         }
494 }, {
495         .cra_name               = "xts(aes)",
496         .cra_driver_name        = "xts-aes-ce",
497         .cra_priority           = 300,
498         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
499         .cra_blocksize          = AES_BLOCK_SIZE,
500         .cra_ctxsize            = sizeof(struct async_helper_ctx),
501         .cra_alignmask          = 7,
502         .cra_type               = &crypto_ablkcipher_type,
503         .cra_module             = THIS_MODULE,
504         .cra_init               = ablk_init,
505         .cra_exit               = ablk_exit,
506         .cra_ablkcipher = {
507                 .min_keysize    = 2 * AES_MIN_KEY_SIZE,
508                 .max_keysize    = 2 * AES_MAX_KEY_SIZE,
509                 .ivsize         = AES_BLOCK_SIZE,
510                 .setkey         = ablk_set_key,
511                 .encrypt        = ablk_encrypt,
512                 .decrypt        = ablk_decrypt,
513         }
514 } };
515
516 static int __init aes_init(void)
517 {
518         if (!(elf_hwcap2 & HWCAP2_AES))
519                 return -ENODEV;
520         return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs));
521 }
522
523 static void __exit aes_exit(void)
524 {
525         crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs));
526 }
527
528 module_init(aes_init);
529 module_exit(aes_exit);