2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/crypto.h>
25 #include <linux/module.h>
26 #include <linux/err.h>
27 #include <crypto/algapi.h>
28 #include <crypto/aes.h>
29 #include <crypto/cryptd.h>
30 #include <crypto/ctr.h>
31 #include <crypto/b128ops.h>
32 #include <crypto/lrw.h>
33 #include <crypto/xts.h>
34 #include <asm/cpu_device_id.h>
35 #include <asm/fpu/api.h>
36 #include <asm/crypto/aes.h>
37 #include <crypto/ablk_helper.h>
38 #include <crypto/scatterwalk.h>
39 #include <crypto/internal/aead.h>
40 #include <linux/workqueue.h>
41 #include <linux/spinlock.h>
43 #include <asm/crypto/glue_helper.h>
47 #define AESNI_ALIGN 16
48 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
49 #define RFC4106_HASH_SUBKEY_SIZE 16
51 /* This data is stored at the end of the crypto_tfm struct.
52 * It's a type of per "session" data storage location.
53 * This needs to be 16 byte aligned.
55 struct aesni_rfc4106_gcm_ctx {
56 u8 hash_subkey[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
57 struct crypto_aes_ctx aes_key_expanded
58 __attribute__ ((__aligned__(AESNI_ALIGN)));
62 struct aesni_lrw_ctx {
63 struct lrw_table_ctx lrw_table;
64 u8 raw_aes_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1];
67 struct aesni_xts_ctx {
68 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1];
69 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1];
72 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
73 unsigned int key_len);
74 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
76 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
78 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
79 const u8 *in, unsigned int len);
80 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
81 const u8 *in, unsigned int len);
82 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
83 const u8 *in, unsigned int len, u8 *iv);
84 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
85 const u8 *in, unsigned int len, u8 *iv);
87 int crypto_fpu_init(void);
88 void crypto_fpu_exit(void);
90 #define AVX_GEN2_OPTSIZE 640
91 #define AVX_GEN4_OPTSIZE 4096
95 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
96 const u8 *in, unsigned int len, u8 *iv);
97 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
98 const u8 *in, unsigned int len, u8 *iv);
100 asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
101 const u8 *in, bool enc, u8 *iv);
103 /* asmlinkage void aesni_gcm_enc()
104 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
105 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
106 * const u8 *in, Plaintext input
107 * unsigned long plaintext_len, Length of data in bytes for encryption.
108 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
109 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
110 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
111 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
112 * const u8 *aad, Additional Authentication Data (AAD)
113 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this
114 * is going to be 8 or 12 bytes
115 * u8 *auth_tag, Authenticated Tag output.
116 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
117 * Valid values are 16 (most likely), 12 or 8.
119 asmlinkage void aesni_gcm_enc(void *ctx, u8 *out,
120 const u8 *in, unsigned long plaintext_len, u8 *iv,
121 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
122 u8 *auth_tag, unsigned long auth_tag_len);
124 /* asmlinkage void aesni_gcm_dec()
125 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
126 * u8 *out, Plaintext output. Decrypt in-place is allowed.
127 * const u8 *in, Ciphertext input
128 * unsigned long ciphertext_len, Length of data in bytes for decryption.
129 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
130 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
131 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
132 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
133 * const u8 *aad, Additional Authentication Data (AAD)
134 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
135 * to be 8 or 12 bytes
136 * u8 *auth_tag, Authenticated Tag output.
137 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
138 * Valid values are 16 (most likely), 12 or 8.
140 asmlinkage void aesni_gcm_dec(void *ctx, u8 *out,
141 const u8 *in, unsigned long ciphertext_len, u8 *iv,
142 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
143 u8 *auth_tag, unsigned long auth_tag_len);
147 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
148 void *keys, u8 *out, unsigned int num_bytes);
149 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
150 void *keys, u8 *out, unsigned int num_bytes);
151 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
152 void *keys, u8 *out, unsigned int num_bytes);
154 * asmlinkage void aesni_gcm_precomp_avx_gen2()
155 * gcm_data *my_ctx_data, context data
156 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
158 asmlinkage void aesni_gcm_precomp_avx_gen2(void *my_ctx_data, u8 *hash_subkey);
160 asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx, u8 *out,
161 const u8 *in, unsigned long plaintext_len, u8 *iv,
162 const u8 *aad, unsigned long aad_len,
163 u8 *auth_tag, unsigned long auth_tag_len);
165 asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx, u8 *out,
166 const u8 *in, unsigned long ciphertext_len, u8 *iv,
167 const u8 *aad, unsigned long aad_len,
168 u8 *auth_tag, unsigned long auth_tag_len);
170 static void aesni_gcm_enc_avx(void *ctx, u8 *out,
171 const u8 *in, unsigned long plaintext_len, u8 *iv,
172 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
173 u8 *auth_tag, unsigned long auth_tag_len)
175 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
176 if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)){
177 aesni_gcm_enc(ctx, out, in, plaintext_len, iv, hash_subkey, aad,
178 aad_len, auth_tag, auth_tag_len);
180 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
181 aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
182 aad_len, auth_tag, auth_tag_len);
186 static void aesni_gcm_dec_avx(void *ctx, u8 *out,
187 const u8 *in, unsigned long ciphertext_len, u8 *iv,
188 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
189 u8 *auth_tag, unsigned long auth_tag_len)
191 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
192 if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
193 aesni_gcm_dec(ctx, out, in, ciphertext_len, iv, hash_subkey, aad,
194 aad_len, auth_tag, auth_tag_len);
196 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
197 aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
198 aad_len, auth_tag, auth_tag_len);
203 #ifdef CONFIG_AS_AVX2
205 * asmlinkage void aesni_gcm_precomp_avx_gen4()
206 * gcm_data *my_ctx_data, context data
207 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
209 asmlinkage void aesni_gcm_precomp_avx_gen4(void *my_ctx_data, u8 *hash_subkey);
211 asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx, u8 *out,
212 const u8 *in, unsigned long plaintext_len, u8 *iv,
213 const u8 *aad, unsigned long aad_len,
214 u8 *auth_tag, unsigned long auth_tag_len);
216 asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx, u8 *out,
217 const u8 *in, unsigned long ciphertext_len, u8 *iv,
218 const u8 *aad, unsigned long aad_len,
219 u8 *auth_tag, unsigned long auth_tag_len);
221 static void aesni_gcm_enc_avx2(void *ctx, u8 *out,
222 const u8 *in, unsigned long plaintext_len, u8 *iv,
223 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
224 u8 *auth_tag, unsigned long auth_tag_len)
226 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
227 if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
228 aesni_gcm_enc(ctx, out, in, plaintext_len, iv, hash_subkey, aad,
229 aad_len, auth_tag, auth_tag_len);
230 } else if (plaintext_len < AVX_GEN4_OPTSIZE) {
231 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
232 aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
233 aad_len, auth_tag, auth_tag_len);
235 aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
236 aesni_gcm_enc_avx_gen4(ctx, out, in, plaintext_len, iv, aad,
237 aad_len, auth_tag, auth_tag_len);
241 static void aesni_gcm_dec_avx2(void *ctx, u8 *out,
242 const u8 *in, unsigned long ciphertext_len, u8 *iv,
243 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
244 u8 *auth_tag, unsigned long auth_tag_len)
246 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
247 if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx-> key_length != AES_KEYSIZE_128)) {
248 aesni_gcm_dec(ctx, out, in, ciphertext_len, iv, hash_subkey,
249 aad, aad_len, auth_tag, auth_tag_len);
250 } else if (ciphertext_len < AVX_GEN4_OPTSIZE) {
251 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
252 aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
253 aad_len, auth_tag, auth_tag_len);
255 aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
256 aesni_gcm_dec_avx_gen4(ctx, out, in, ciphertext_len, iv, aad,
257 aad_len, auth_tag, auth_tag_len);
262 static void (*aesni_gcm_enc_tfm)(void *ctx, u8 *out,
263 const u8 *in, unsigned long plaintext_len, u8 *iv,
264 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
265 u8 *auth_tag, unsigned long auth_tag_len);
267 static void (*aesni_gcm_dec_tfm)(void *ctx, u8 *out,
268 const u8 *in, unsigned long ciphertext_len, u8 *iv,
269 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
270 u8 *auth_tag, unsigned long auth_tag_len);
273 aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
275 unsigned long align = AESNI_ALIGN;
277 if (align <= crypto_tfm_ctx_alignment())
279 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
283 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
285 unsigned long addr = (unsigned long)raw_ctx;
286 unsigned long align = AESNI_ALIGN;
288 if (align <= crypto_tfm_ctx_alignment())
290 return (struct crypto_aes_ctx *)ALIGN(addr, align);
293 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
294 const u8 *in_key, unsigned int key_len)
296 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
297 u32 *flags = &tfm->crt_flags;
300 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
301 key_len != AES_KEYSIZE_256) {
302 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
306 if (!irq_fpu_usable())
307 err = crypto_aes_expand_key(ctx, in_key, key_len);
310 err = aesni_set_key(ctx, in_key, key_len);
317 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
318 unsigned int key_len)
320 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
323 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
325 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
327 if (!irq_fpu_usable())
328 crypto_aes_encrypt_x86(ctx, dst, src);
331 aesni_enc(ctx, dst, src);
336 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
338 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
340 if (!irq_fpu_usable())
341 crypto_aes_decrypt_x86(ctx, dst, src);
344 aesni_dec(ctx, dst, src);
349 static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
351 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
353 aesni_enc(ctx, dst, src);
356 static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
358 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
360 aesni_dec(ctx, dst, src);
363 static int ecb_encrypt(struct blkcipher_desc *desc,
364 struct scatterlist *dst, struct scatterlist *src,
367 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
368 struct blkcipher_walk walk;
371 blkcipher_walk_init(&walk, dst, src, nbytes);
372 err = blkcipher_walk_virt(desc, &walk);
373 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
375 while ((nbytes = walk.nbytes)) {
377 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
378 nbytes & AES_BLOCK_MASK);
380 nbytes &= AES_BLOCK_SIZE - 1;
381 err = blkcipher_walk_done(desc, &walk, nbytes);
387 static int ecb_decrypt(struct blkcipher_desc *desc,
388 struct scatterlist *dst, struct scatterlist *src,
391 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
392 struct blkcipher_walk walk;
395 blkcipher_walk_init(&walk, dst, src, nbytes);
396 err = blkcipher_walk_virt(desc, &walk);
397 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
399 while ((nbytes = walk.nbytes)) {
401 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
402 nbytes & AES_BLOCK_MASK);
404 nbytes &= AES_BLOCK_SIZE - 1;
405 err = blkcipher_walk_done(desc, &walk, nbytes);
411 static int cbc_encrypt(struct blkcipher_desc *desc,
412 struct scatterlist *dst, struct scatterlist *src,
415 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
416 struct blkcipher_walk walk;
419 blkcipher_walk_init(&walk, dst, src, nbytes);
420 err = blkcipher_walk_virt(desc, &walk);
421 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
423 while ((nbytes = walk.nbytes)) {
425 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
426 nbytes & AES_BLOCK_MASK, walk.iv);
428 nbytes &= AES_BLOCK_SIZE - 1;
429 err = blkcipher_walk_done(desc, &walk, nbytes);
435 static int cbc_decrypt(struct blkcipher_desc *desc,
436 struct scatterlist *dst, struct scatterlist *src,
439 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
440 struct blkcipher_walk walk;
443 blkcipher_walk_init(&walk, dst, src, nbytes);
444 err = blkcipher_walk_virt(desc, &walk);
445 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
447 while ((nbytes = walk.nbytes)) {
449 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
450 nbytes & AES_BLOCK_MASK, walk.iv);
452 nbytes &= AES_BLOCK_SIZE - 1;
453 err = blkcipher_walk_done(desc, &walk, nbytes);
460 static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
461 struct blkcipher_walk *walk)
463 u8 *ctrblk = walk->iv;
464 u8 keystream[AES_BLOCK_SIZE];
465 u8 *src = walk->src.virt.addr;
466 u8 *dst = walk->dst.virt.addr;
467 unsigned int nbytes = walk->nbytes;
469 aesni_enc(ctx, keystream, ctrblk);
470 crypto_xor(keystream, src, nbytes);
471 memcpy(dst, keystream, nbytes);
472 crypto_inc(ctrblk, AES_BLOCK_SIZE);
476 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
477 const u8 *in, unsigned int len, u8 *iv)
480 * based on key length, override with the by8 version
481 * of ctr mode encryption/decryption for improved performance
482 * aes_set_key_common() ensures that key length is one of
485 if (ctx->key_length == AES_KEYSIZE_128)
486 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
487 else if (ctx->key_length == AES_KEYSIZE_192)
488 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
490 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
494 static int ctr_crypt(struct blkcipher_desc *desc,
495 struct scatterlist *dst, struct scatterlist *src,
498 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
499 struct blkcipher_walk walk;
502 blkcipher_walk_init(&walk, dst, src, nbytes);
503 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
504 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
506 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
508 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
509 nbytes & AES_BLOCK_MASK, walk.iv);
511 nbytes &= AES_BLOCK_SIZE - 1;
512 err = blkcipher_walk_done(desc, &walk, nbytes);
516 ctr_crypt_final(ctx, &walk);
518 err = blkcipher_walk_done(desc, &walk, 0);
525 static int ablk_ecb_init(struct crypto_tfm *tfm)
527 return ablk_init_common(tfm, "__driver-ecb-aes-aesni");
530 static int ablk_cbc_init(struct crypto_tfm *tfm)
532 return ablk_init_common(tfm, "__driver-cbc-aes-aesni");
536 static int ablk_ctr_init(struct crypto_tfm *tfm)
538 return ablk_init_common(tfm, "__driver-ctr-aes-aesni");
543 #if IS_ENABLED(CONFIG_CRYPTO_PCBC)
544 static int ablk_pcbc_init(struct crypto_tfm *tfm)
546 return ablk_init_common(tfm, "fpu(pcbc(__driver-aes-aesni))");
550 static void lrw_xts_encrypt_callback(void *ctx, u8 *blks, unsigned int nbytes)
552 aesni_ecb_enc(ctx, blks, blks, nbytes);
555 static void lrw_xts_decrypt_callback(void *ctx, u8 *blks, unsigned int nbytes)
557 aesni_ecb_dec(ctx, blks, blks, nbytes);
560 static int lrw_aesni_setkey(struct crypto_tfm *tfm, const u8 *key,
563 struct aesni_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
566 err = aes_set_key_common(tfm, ctx->raw_aes_ctx, key,
567 keylen - AES_BLOCK_SIZE);
571 return lrw_init_table(&ctx->lrw_table, key + keylen - AES_BLOCK_SIZE);
574 static void lrw_aesni_exit_tfm(struct crypto_tfm *tfm)
576 struct aesni_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
578 lrw_free_table(&ctx->lrw_table);
581 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
582 struct scatterlist *src, unsigned int nbytes)
584 struct aesni_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
586 struct lrw_crypt_req req = {
588 .tbuflen = sizeof(buf),
590 .table_ctx = &ctx->lrw_table,
591 .crypt_ctx = aes_ctx(ctx->raw_aes_ctx),
592 .crypt_fn = lrw_xts_encrypt_callback,
596 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
599 ret = lrw_crypt(desc, dst, src, nbytes, &req);
605 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
606 struct scatterlist *src, unsigned int nbytes)
608 struct aesni_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
610 struct lrw_crypt_req req = {
612 .tbuflen = sizeof(buf),
614 .table_ctx = &ctx->lrw_table,
615 .crypt_ctx = aes_ctx(ctx->raw_aes_ctx),
616 .crypt_fn = lrw_xts_decrypt_callback,
620 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
623 ret = lrw_crypt(desc, dst, src, nbytes, &req);
629 static int xts_aesni_setkey(struct crypto_tfm *tfm, const u8 *key,
632 struct aesni_xts_ctx *ctx = crypto_tfm_ctx(tfm);
635 err = xts_check_key(tfm, key, keylen);
639 /* first half of xts-key is for crypt */
640 err = aes_set_key_common(tfm, ctx->raw_crypt_ctx, key, keylen / 2);
644 /* second half of xts-key is for tweak */
645 return aes_set_key_common(tfm, ctx->raw_tweak_ctx, key + keylen / 2,
650 static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
652 aesni_enc(ctx, out, in);
657 static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
659 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
662 static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
664 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
667 static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
669 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
672 static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
674 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
677 static const struct common_glue_ctx aesni_enc_xts = {
679 .fpu_blocks_limit = 1,
683 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
686 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
690 static const struct common_glue_ctx aesni_dec_xts = {
692 .fpu_blocks_limit = 1,
696 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
699 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
703 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
704 struct scatterlist *src, unsigned int nbytes)
706 struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
708 return glue_xts_crypt_128bit(&aesni_enc_xts, desc, dst, src, nbytes,
709 XTS_TWEAK_CAST(aesni_xts_tweak),
710 aes_ctx(ctx->raw_tweak_ctx),
711 aes_ctx(ctx->raw_crypt_ctx));
714 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
715 struct scatterlist *src, unsigned int nbytes)
717 struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
719 return glue_xts_crypt_128bit(&aesni_dec_xts, desc, dst, src, nbytes,
720 XTS_TWEAK_CAST(aesni_xts_tweak),
721 aes_ctx(ctx->raw_tweak_ctx),
722 aes_ctx(ctx->raw_crypt_ctx));
727 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
728 struct scatterlist *src, unsigned int nbytes)
730 struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
732 struct xts_crypt_req req = {
734 .tbuflen = sizeof(buf),
736 .tweak_ctx = aes_ctx(ctx->raw_tweak_ctx),
737 .tweak_fn = aesni_xts_tweak,
738 .crypt_ctx = aes_ctx(ctx->raw_crypt_ctx),
739 .crypt_fn = lrw_xts_encrypt_callback,
743 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
746 ret = xts_crypt(desc, dst, src, nbytes, &req);
752 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
753 struct scatterlist *src, unsigned int nbytes)
755 struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
757 struct xts_crypt_req req = {
759 .tbuflen = sizeof(buf),
761 .tweak_ctx = aes_ctx(ctx->raw_tweak_ctx),
762 .tweak_fn = aesni_xts_tweak,
763 .crypt_ctx = aes_ctx(ctx->raw_crypt_ctx),
764 .crypt_fn = lrw_xts_decrypt_callback,
768 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
771 ret = xts_crypt(desc, dst, src, nbytes, &req);
780 static int rfc4106_init(struct crypto_aead *aead)
782 struct cryptd_aead *cryptd_tfm;
783 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
785 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
787 CRYPTO_ALG_INTERNAL);
788 if (IS_ERR(cryptd_tfm))
789 return PTR_ERR(cryptd_tfm);
792 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
796 static void rfc4106_exit(struct crypto_aead *aead)
798 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
800 cryptd_free_aead(*ctx);
804 rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
806 struct crypto_cipher *tfm;
809 tfm = crypto_alloc_cipher("aes", 0, 0);
813 ret = crypto_cipher_setkey(tfm, key, key_len);
815 goto out_free_cipher;
817 /* Clear the data in the hash sub key container to zero.*/
818 /* We want to cipher all zeros to create the hash sub key. */
819 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
821 crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey);
824 crypto_free_cipher(tfm);
828 static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
829 unsigned int key_len)
831 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
834 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
837 /*Account for 4 byte nonce at the end.*/
840 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
842 return aes_set_key_common(crypto_aead_tfm(aead),
843 &ctx->aes_key_expanded, key, key_len) ?:
844 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
847 static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key,
848 unsigned int key_len)
850 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
851 struct cryptd_aead *cryptd_tfm = *ctx;
853 return crypto_aead_setkey(&cryptd_tfm->base, key, key_len);
856 static int common_rfc4106_set_authsize(struct crypto_aead *aead,
857 unsigned int authsize)
871 /* This is the Integrity Check Value (aka the authentication tag length and can
872 * be 8, 12 or 16 bytes long. */
873 static int rfc4106_set_authsize(struct crypto_aead *parent,
874 unsigned int authsize)
876 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
877 struct cryptd_aead *cryptd_tfm = *ctx;
879 return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
882 static int helper_rfc4106_encrypt(struct aead_request *req)
884 u8 one_entry_in_sg = 0;
885 u8 *src, *dst, *assoc;
886 __be32 counter = cpu_to_be32(1);
887 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
888 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
889 void *aes_ctx = &(ctx->aes_key_expanded);
890 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
891 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
892 struct scatter_walk src_sg_walk;
893 struct scatter_walk dst_sg_walk = {};
896 /* Assuming we are supporting rfc4106 64-bit extended */
897 /* sequence numbers We need to have the AAD length equal */
898 /* to 16 or 20 bytes */
899 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
903 for (i = 0; i < 4; i++)
904 *(iv+i) = ctx->nonce[i];
905 for (i = 0; i < 8; i++)
906 *(iv+4+i) = req->iv[i];
907 *((__be32 *)(iv+12)) = counter;
909 if (sg_is_last(req->src) &&
910 req->src->offset + req->src->length <= PAGE_SIZE &&
911 sg_is_last(req->dst) &&
912 req->dst->offset + req->dst->length <= PAGE_SIZE) {
914 scatterwalk_start(&src_sg_walk, req->src);
915 assoc = scatterwalk_map(&src_sg_walk);
916 src = assoc + req->assoclen;
918 if (unlikely(req->src != req->dst)) {
919 scatterwalk_start(&dst_sg_walk, req->dst);
920 dst = scatterwalk_map(&dst_sg_walk) + req->assoclen;
923 /* Allocate memory for src, dst, assoc */
924 assoc = kmalloc(req->cryptlen + auth_tag_len + req->assoclen,
926 if (unlikely(!assoc))
928 scatterwalk_map_and_copy(assoc, req->src, 0,
929 req->assoclen + req->cryptlen, 0);
930 src = assoc + req->assoclen;
935 aesni_gcm_enc_tfm(aes_ctx, dst, src, req->cryptlen, iv,
936 ctx->hash_subkey, assoc, req->assoclen - 8,
937 dst + req->cryptlen, auth_tag_len);
940 /* The authTag (aka the Integrity Check Value) needs to be written
941 * back to the packet. */
942 if (one_entry_in_sg) {
943 if (unlikely(req->src != req->dst)) {
944 scatterwalk_unmap(dst - req->assoclen);
945 scatterwalk_advance(&dst_sg_walk, req->dst->length);
946 scatterwalk_done(&dst_sg_walk, 1, 0);
948 scatterwalk_unmap(assoc);
949 scatterwalk_advance(&src_sg_walk, req->src->length);
950 scatterwalk_done(&src_sg_walk, req->src == req->dst, 0);
952 scatterwalk_map_and_copy(dst, req->dst, req->assoclen,
953 req->cryptlen + auth_tag_len, 1);
959 static int helper_rfc4106_decrypt(struct aead_request *req)
961 u8 one_entry_in_sg = 0;
962 u8 *src, *dst, *assoc;
963 unsigned long tempCipherLen = 0;
964 __be32 counter = cpu_to_be32(1);
966 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
967 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
968 void *aes_ctx = &(ctx->aes_key_expanded);
969 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
970 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
972 struct scatter_walk src_sg_walk;
973 struct scatter_walk dst_sg_walk = {};
976 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
979 /* Assuming we are supporting rfc4106 64-bit extended */
980 /* sequence numbers We need to have the AAD length */
981 /* equal to 16 or 20 bytes */
983 tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len);
985 for (i = 0; i < 4; i++)
986 *(iv+i) = ctx->nonce[i];
987 for (i = 0; i < 8; i++)
988 *(iv+4+i) = req->iv[i];
989 *((__be32 *)(iv+12)) = counter;
991 if (sg_is_last(req->src) &&
992 req->src->offset + req->src->length <= PAGE_SIZE &&
993 sg_is_last(req->dst) &&
994 req->dst->offset + req->dst->length <= PAGE_SIZE) {
996 scatterwalk_start(&src_sg_walk, req->src);
997 assoc = scatterwalk_map(&src_sg_walk);
998 src = assoc + req->assoclen;
1000 if (unlikely(req->src != req->dst)) {
1001 scatterwalk_start(&dst_sg_walk, req->dst);
1002 dst = scatterwalk_map(&dst_sg_walk) + req->assoclen;
1006 /* Allocate memory for src, dst, assoc */
1007 assoc = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC);
1010 scatterwalk_map_and_copy(assoc, req->src, 0,
1011 req->assoclen + req->cryptlen, 0);
1012 src = assoc + req->assoclen;
1017 aesni_gcm_dec_tfm(aes_ctx, dst, src, tempCipherLen, iv,
1018 ctx->hash_subkey, assoc, req->assoclen - 8,
1019 authTag, auth_tag_len);
1022 /* Compare generated tag with passed in tag. */
1023 retval = crypto_memneq(src + tempCipherLen, authTag, auth_tag_len) ?
1026 if (one_entry_in_sg) {
1027 if (unlikely(req->src != req->dst)) {
1028 scatterwalk_unmap(dst - req->assoclen);
1029 scatterwalk_advance(&dst_sg_walk, req->dst->length);
1030 scatterwalk_done(&dst_sg_walk, 1, 0);
1032 scatterwalk_unmap(assoc);
1033 scatterwalk_advance(&src_sg_walk, req->src->length);
1034 scatterwalk_done(&src_sg_walk, req->src == req->dst, 0);
1036 scatterwalk_map_and_copy(dst, req->dst, req->assoclen,
1043 static int rfc4106_encrypt(struct aead_request *req)
1045 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1046 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
1047 struct cryptd_aead *cryptd_tfm = *ctx;
1049 tfm = &cryptd_tfm->base;
1050 if (irq_fpu_usable() && (!in_atomic() ||
1051 !cryptd_aead_queued(cryptd_tfm)))
1052 tfm = cryptd_aead_child(cryptd_tfm);
1054 aead_request_set_tfm(req, tfm);
1056 return crypto_aead_encrypt(req);
1059 static int rfc4106_decrypt(struct aead_request *req)
1061 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1062 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
1063 struct cryptd_aead *cryptd_tfm = *ctx;
1065 tfm = &cryptd_tfm->base;
1066 if (irq_fpu_usable() && (!in_atomic() ||
1067 !cryptd_aead_queued(cryptd_tfm)))
1068 tfm = cryptd_aead_child(cryptd_tfm);
1070 aead_request_set_tfm(req, tfm);
1072 return crypto_aead_decrypt(req);
1076 static struct crypto_alg aesni_algs[] = { {
1078 .cra_driver_name = "aes-aesni",
1079 .cra_priority = 300,
1080 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
1081 .cra_blocksize = AES_BLOCK_SIZE,
1082 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1085 .cra_module = THIS_MODULE,
1088 .cia_min_keysize = AES_MIN_KEY_SIZE,
1089 .cia_max_keysize = AES_MAX_KEY_SIZE,
1090 .cia_setkey = aes_set_key,
1091 .cia_encrypt = aes_encrypt,
1092 .cia_decrypt = aes_decrypt
1096 .cra_name = "__aes-aesni",
1097 .cra_driver_name = "__driver-aes-aesni",
1099 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
1100 .cra_blocksize = AES_BLOCK_SIZE,
1101 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1104 .cra_module = THIS_MODULE,
1107 .cia_min_keysize = AES_MIN_KEY_SIZE,
1108 .cia_max_keysize = AES_MAX_KEY_SIZE,
1109 .cia_setkey = aes_set_key,
1110 .cia_encrypt = __aes_encrypt,
1111 .cia_decrypt = __aes_decrypt
1115 .cra_name = "__ecb-aes-aesni",
1116 .cra_driver_name = "__driver-ecb-aes-aesni",
1118 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1119 CRYPTO_ALG_INTERNAL,
1120 .cra_blocksize = AES_BLOCK_SIZE,
1121 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1124 .cra_type = &crypto_blkcipher_type,
1125 .cra_module = THIS_MODULE,
1128 .min_keysize = AES_MIN_KEY_SIZE,
1129 .max_keysize = AES_MAX_KEY_SIZE,
1130 .setkey = aes_set_key,
1131 .encrypt = ecb_encrypt,
1132 .decrypt = ecb_decrypt,
1136 .cra_name = "__cbc-aes-aesni",
1137 .cra_driver_name = "__driver-cbc-aes-aesni",
1139 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1140 CRYPTO_ALG_INTERNAL,
1141 .cra_blocksize = AES_BLOCK_SIZE,
1142 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1145 .cra_type = &crypto_blkcipher_type,
1146 .cra_module = THIS_MODULE,
1149 .min_keysize = AES_MIN_KEY_SIZE,
1150 .max_keysize = AES_MAX_KEY_SIZE,
1151 .setkey = aes_set_key,
1152 .encrypt = cbc_encrypt,
1153 .decrypt = cbc_decrypt,
1157 .cra_name = "ecb(aes)",
1158 .cra_driver_name = "ecb-aes-aesni",
1159 .cra_priority = 400,
1160 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1161 .cra_blocksize = AES_BLOCK_SIZE,
1162 .cra_ctxsize = sizeof(struct async_helper_ctx),
1164 .cra_type = &crypto_ablkcipher_type,
1165 .cra_module = THIS_MODULE,
1166 .cra_init = ablk_ecb_init,
1167 .cra_exit = ablk_exit,
1170 .min_keysize = AES_MIN_KEY_SIZE,
1171 .max_keysize = AES_MAX_KEY_SIZE,
1172 .setkey = ablk_set_key,
1173 .encrypt = ablk_encrypt,
1174 .decrypt = ablk_decrypt,
1178 .cra_name = "cbc(aes)",
1179 .cra_driver_name = "cbc-aes-aesni",
1180 .cra_priority = 400,
1181 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1182 .cra_blocksize = AES_BLOCK_SIZE,
1183 .cra_ctxsize = sizeof(struct async_helper_ctx),
1185 .cra_type = &crypto_ablkcipher_type,
1186 .cra_module = THIS_MODULE,
1187 .cra_init = ablk_cbc_init,
1188 .cra_exit = ablk_exit,
1191 .min_keysize = AES_MIN_KEY_SIZE,
1192 .max_keysize = AES_MAX_KEY_SIZE,
1193 .ivsize = AES_BLOCK_SIZE,
1194 .setkey = ablk_set_key,
1195 .encrypt = ablk_encrypt,
1196 .decrypt = ablk_decrypt,
1199 #ifdef CONFIG_X86_64
1201 .cra_name = "__ctr-aes-aesni",
1202 .cra_driver_name = "__driver-ctr-aes-aesni",
1204 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1205 CRYPTO_ALG_INTERNAL,
1207 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1210 .cra_type = &crypto_blkcipher_type,
1211 .cra_module = THIS_MODULE,
1214 .min_keysize = AES_MIN_KEY_SIZE,
1215 .max_keysize = AES_MAX_KEY_SIZE,
1216 .ivsize = AES_BLOCK_SIZE,
1217 .setkey = aes_set_key,
1218 .encrypt = ctr_crypt,
1219 .decrypt = ctr_crypt,
1223 .cra_name = "ctr(aes)",
1224 .cra_driver_name = "ctr-aes-aesni",
1225 .cra_priority = 400,
1226 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1228 .cra_ctxsize = sizeof(struct async_helper_ctx),
1230 .cra_type = &crypto_ablkcipher_type,
1231 .cra_module = THIS_MODULE,
1232 .cra_init = ablk_ctr_init,
1233 .cra_exit = ablk_exit,
1236 .min_keysize = AES_MIN_KEY_SIZE,
1237 .max_keysize = AES_MAX_KEY_SIZE,
1238 .ivsize = AES_BLOCK_SIZE,
1239 .setkey = ablk_set_key,
1240 .encrypt = ablk_encrypt,
1241 .decrypt = ablk_encrypt,
1246 #if IS_ENABLED(CONFIG_CRYPTO_PCBC)
1248 .cra_name = "pcbc(aes)",
1249 .cra_driver_name = "pcbc-aes-aesni",
1250 .cra_priority = 400,
1251 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1252 .cra_blocksize = AES_BLOCK_SIZE,
1253 .cra_ctxsize = sizeof(struct async_helper_ctx),
1255 .cra_type = &crypto_ablkcipher_type,
1256 .cra_module = THIS_MODULE,
1257 .cra_init = ablk_pcbc_init,
1258 .cra_exit = ablk_exit,
1261 .min_keysize = AES_MIN_KEY_SIZE,
1262 .max_keysize = AES_MAX_KEY_SIZE,
1263 .ivsize = AES_BLOCK_SIZE,
1264 .setkey = ablk_set_key,
1265 .encrypt = ablk_encrypt,
1266 .decrypt = ablk_decrypt,
1271 .cra_name = "__lrw-aes-aesni",
1272 .cra_driver_name = "__driver-lrw-aes-aesni",
1274 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1275 CRYPTO_ALG_INTERNAL,
1276 .cra_blocksize = AES_BLOCK_SIZE,
1277 .cra_ctxsize = sizeof(struct aesni_lrw_ctx),
1279 .cra_type = &crypto_blkcipher_type,
1280 .cra_module = THIS_MODULE,
1281 .cra_exit = lrw_aesni_exit_tfm,
1284 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
1285 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
1286 .ivsize = AES_BLOCK_SIZE,
1287 .setkey = lrw_aesni_setkey,
1288 .encrypt = lrw_encrypt,
1289 .decrypt = lrw_decrypt,
1293 .cra_name = "__xts-aes-aesni",
1294 .cra_driver_name = "__driver-xts-aes-aesni",
1296 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1297 CRYPTO_ALG_INTERNAL,
1298 .cra_blocksize = AES_BLOCK_SIZE,
1299 .cra_ctxsize = sizeof(struct aesni_xts_ctx),
1301 .cra_type = &crypto_blkcipher_type,
1302 .cra_module = THIS_MODULE,
1305 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1306 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1307 .ivsize = AES_BLOCK_SIZE,
1308 .setkey = xts_aesni_setkey,
1309 .encrypt = xts_encrypt,
1310 .decrypt = xts_decrypt,
1314 .cra_name = "lrw(aes)",
1315 .cra_driver_name = "lrw-aes-aesni",
1316 .cra_priority = 400,
1317 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1318 .cra_blocksize = AES_BLOCK_SIZE,
1319 .cra_ctxsize = sizeof(struct async_helper_ctx),
1321 .cra_type = &crypto_ablkcipher_type,
1322 .cra_module = THIS_MODULE,
1323 .cra_init = ablk_init,
1324 .cra_exit = ablk_exit,
1327 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
1328 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
1329 .ivsize = AES_BLOCK_SIZE,
1330 .setkey = ablk_set_key,
1331 .encrypt = ablk_encrypt,
1332 .decrypt = ablk_decrypt,
1336 .cra_name = "xts(aes)",
1337 .cra_driver_name = "xts-aes-aesni",
1338 .cra_priority = 400,
1339 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1340 .cra_blocksize = AES_BLOCK_SIZE,
1341 .cra_ctxsize = sizeof(struct async_helper_ctx),
1343 .cra_type = &crypto_ablkcipher_type,
1344 .cra_module = THIS_MODULE,
1345 .cra_init = ablk_init,
1346 .cra_exit = ablk_exit,
1349 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1350 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1351 .ivsize = AES_BLOCK_SIZE,
1352 .setkey = ablk_set_key,
1353 .encrypt = ablk_encrypt,
1354 .decrypt = ablk_decrypt,
1359 #ifdef CONFIG_X86_64
1360 static struct aead_alg aesni_aead_algs[] = { {
1361 .setkey = common_rfc4106_set_key,
1362 .setauthsize = common_rfc4106_set_authsize,
1363 .encrypt = helper_rfc4106_encrypt,
1364 .decrypt = helper_rfc4106_decrypt,
1368 .cra_name = "__gcm-aes-aesni",
1369 .cra_driver_name = "__driver-gcm-aes-aesni",
1370 .cra_flags = CRYPTO_ALG_INTERNAL,
1372 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1373 .cra_alignmask = AESNI_ALIGN - 1,
1374 .cra_module = THIS_MODULE,
1377 .init = rfc4106_init,
1378 .exit = rfc4106_exit,
1379 .setkey = rfc4106_set_key,
1380 .setauthsize = rfc4106_set_authsize,
1381 .encrypt = rfc4106_encrypt,
1382 .decrypt = rfc4106_decrypt,
1386 .cra_name = "rfc4106(gcm(aes))",
1387 .cra_driver_name = "rfc4106-gcm-aesni",
1388 .cra_priority = 400,
1389 .cra_flags = CRYPTO_ALG_ASYNC,
1391 .cra_ctxsize = sizeof(struct cryptd_aead *),
1392 .cra_module = THIS_MODULE,
1396 static struct aead_alg aesni_aead_algs[0];
1400 static const struct x86_cpu_id aesni_cpu_id[] = {
1401 X86_FEATURE_MATCH(X86_FEATURE_AES),
1404 MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1406 static int __init aesni_init(void)
1410 if (!x86_match_cpu(aesni_cpu_id))
1412 #ifdef CONFIG_X86_64
1413 #ifdef CONFIG_AS_AVX2
1414 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1415 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1416 aesni_gcm_enc_tfm = aesni_gcm_enc_avx2;
1417 aesni_gcm_dec_tfm = aesni_gcm_dec_avx2;
1420 #ifdef CONFIG_AS_AVX
1421 if (boot_cpu_has(X86_FEATURE_AVX)) {
1422 pr_info("AVX version of gcm_enc/dec engaged.\n");
1423 aesni_gcm_enc_tfm = aesni_gcm_enc_avx;
1424 aesni_gcm_dec_tfm = aesni_gcm_dec_avx;
1428 pr_info("SSE version of gcm_enc/dec engaged.\n");
1429 aesni_gcm_enc_tfm = aesni_gcm_enc;
1430 aesni_gcm_dec_tfm = aesni_gcm_dec;
1432 aesni_ctr_enc_tfm = aesni_ctr_enc;
1433 #ifdef CONFIG_AS_AVX
1434 if (boot_cpu_has(X86_FEATURE_AVX)) {
1435 /* optimize performance of ctr mode encryption transform */
1436 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1437 pr_info("AES CTR mode by8 optimization enabled\n");
1442 err = crypto_fpu_init();
1446 err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1450 err = crypto_register_aeads(aesni_aead_algs,
1451 ARRAY_SIZE(aesni_aead_algs));
1453 goto unregister_algs;
1458 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1464 static void __exit aesni_exit(void)
1466 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1467 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1472 late_initcall(aesni_init);
1473 module_exit(aesni_exit);
1475 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1476 MODULE_LICENSE("GPL");
1477 MODULE_ALIAS_CRYPTO("aes");