1 /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
17 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
19 #include <linux/crypto.h>
20 #include <linux/init.h>
21 #include <linux/module.h>
23 #include <linux/types.h>
24 #include <crypto/algapi.h>
25 #include <crypto/aes.h>
27 #include <asm/fpumacro.h>
28 #include <asm/pstate.h>
34 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
35 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
36 void (*load_encrypt_keys)(const u64 *key);
37 void (*load_decrypt_keys)(const u64 *key);
38 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
40 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
42 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
43 unsigned int len, u64 *iv);
44 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
45 unsigned int len, u64 *iv);
46 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
47 unsigned int len, u64 *iv);
50 struct crypto_sparc64_aes_ctx {
52 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
54 u32 expanded_key_length;
57 extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
59 extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
61 extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
64 extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
66 extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
68 extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
71 extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
72 extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
73 extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
75 extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
76 extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
77 extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
79 extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
80 u64 *output, unsigned int len);
81 extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
82 u64 *output, unsigned int len);
83 extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
84 u64 *output, unsigned int len);
86 extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
87 u64 *output, unsigned int len);
88 extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
89 u64 *output, unsigned int len);
90 extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
91 u64 *output, unsigned int len);
93 extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
94 u64 *output, unsigned int len,
97 extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
98 u64 *output, unsigned int len,
101 extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
102 u64 *output, unsigned int len,
105 extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
106 u64 *output, unsigned int len,
109 extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
110 u64 *output, unsigned int len,
113 extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
114 u64 *output, unsigned int len,
117 extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
118 u64 *output, unsigned int len,
120 extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
121 u64 *output, unsigned int len,
123 extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
124 u64 *output, unsigned int len,
127 struct aes_ops aes128_ops = {
128 .encrypt = aes_sparc64_encrypt_128,
129 .decrypt = aes_sparc64_decrypt_128,
130 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
131 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
132 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
133 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
134 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
135 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
136 .ctr_crypt = aes_sparc64_ctr_crypt_128,
139 struct aes_ops aes192_ops = {
140 .encrypt = aes_sparc64_encrypt_192,
141 .decrypt = aes_sparc64_decrypt_192,
142 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
143 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
144 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
145 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
146 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
147 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
148 .ctr_crypt = aes_sparc64_ctr_crypt_192,
151 struct aes_ops aes256_ops = {
152 .encrypt = aes_sparc64_encrypt_256,
153 .decrypt = aes_sparc64_decrypt_256,
154 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
155 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
156 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
157 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
158 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
159 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
160 .ctr_crypt = aes_sparc64_ctr_crypt_256,
163 extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
164 unsigned int key_len);
166 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
167 unsigned int key_len)
169 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
170 u32 *flags = &tfm->crt_flags;
173 case AES_KEYSIZE_128:
174 ctx->expanded_key_length = 0xb0;
175 ctx->ops = &aes128_ops;
178 case AES_KEYSIZE_192:
179 ctx->expanded_key_length = 0xd0;
180 ctx->ops = &aes192_ops;
183 case AES_KEYSIZE_256:
184 ctx->expanded_key_length = 0xf0;
185 ctx->ops = &aes256_ops;
189 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
193 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194 ctx->key_length = key_len;
199 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
201 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
203 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
206 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
208 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
210 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
213 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
215 static int ecb_encrypt(struct blkcipher_desc *desc,
216 struct scatterlist *dst, struct scatterlist *src,
219 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
220 struct blkcipher_walk walk;
223 blkcipher_walk_init(&walk, dst, src, nbytes);
224 err = blkcipher_walk_virt(desc, &walk);
226 ctx->ops->load_encrypt_keys(&ctx->key[0]);
227 while ((nbytes = walk.nbytes)) {
228 unsigned int block_len = nbytes & AES_BLOCK_MASK;
230 if (likely(block_len)) {
231 ctx->ops->ecb_encrypt(&ctx->key[0],
232 (const u64 *)walk.src.virt.addr,
233 (u64 *) walk.dst.virt.addr,
236 nbytes &= AES_BLOCK_SIZE - 1;
237 err = blkcipher_walk_done(desc, &walk, nbytes);
243 static int ecb_decrypt(struct blkcipher_desc *desc,
244 struct scatterlist *dst, struct scatterlist *src,
247 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
248 struct blkcipher_walk walk;
252 blkcipher_walk_init(&walk, dst, src, nbytes);
253 err = blkcipher_walk_virt(desc, &walk);
255 ctx->ops->load_decrypt_keys(&ctx->key[0]);
256 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
257 while ((nbytes = walk.nbytes)) {
258 unsigned int block_len = nbytes & AES_BLOCK_MASK;
260 if (likely(block_len)) {
261 ctx->ops->ecb_decrypt(key_end,
262 (const u64 *) walk.src.virt.addr,
263 (u64 *) walk.dst.virt.addr, block_len);
265 nbytes &= AES_BLOCK_SIZE - 1;
266 err = blkcipher_walk_done(desc, &walk, nbytes);
273 static int cbc_encrypt(struct blkcipher_desc *desc,
274 struct scatterlist *dst, struct scatterlist *src,
277 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
278 struct blkcipher_walk walk;
281 blkcipher_walk_init(&walk, dst, src, nbytes);
282 err = blkcipher_walk_virt(desc, &walk);
284 ctx->ops->load_encrypt_keys(&ctx->key[0]);
285 while ((nbytes = walk.nbytes)) {
286 unsigned int block_len = nbytes & AES_BLOCK_MASK;
288 if (likely(block_len)) {
289 ctx->ops->cbc_encrypt(&ctx->key[0],
290 (const u64 *)walk.src.virt.addr,
291 (u64 *) walk.dst.virt.addr,
292 block_len, (u64 *) walk.iv);
294 nbytes &= AES_BLOCK_SIZE - 1;
295 err = blkcipher_walk_done(desc, &walk, nbytes);
301 static int cbc_decrypt(struct blkcipher_desc *desc,
302 struct scatterlist *dst, struct scatterlist *src,
305 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
306 struct blkcipher_walk walk;
310 blkcipher_walk_init(&walk, dst, src, nbytes);
311 err = blkcipher_walk_virt(desc, &walk);
313 ctx->ops->load_decrypt_keys(&ctx->key[0]);
314 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
315 while ((nbytes = walk.nbytes)) {
316 unsigned int block_len = nbytes & AES_BLOCK_MASK;
318 if (likely(block_len)) {
319 ctx->ops->cbc_decrypt(key_end,
320 (const u64 *) walk.src.virt.addr,
321 (u64 *) walk.dst.virt.addr,
322 block_len, (u64 *) walk.iv);
324 nbytes &= AES_BLOCK_SIZE - 1;
325 err = blkcipher_walk_done(desc, &walk, nbytes);
332 static int ctr_crypt(struct blkcipher_desc *desc,
333 struct scatterlist *dst, struct scatterlist *src,
336 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
337 struct blkcipher_walk walk;
340 blkcipher_walk_init(&walk, dst, src, nbytes);
341 err = blkcipher_walk_virt(desc, &walk);
343 ctx->ops->load_encrypt_keys(&ctx->key[0]);
344 while ((nbytes = walk.nbytes)) {
345 unsigned int block_len = nbytes & AES_BLOCK_MASK;
347 if (likely(block_len)) {
348 ctx->ops->ctr_crypt(&ctx->key[0],
349 (const u64 *)walk.src.virt.addr,
350 (u64 *) walk.dst.virt.addr,
351 block_len, (u64 *) walk.iv);
353 nbytes &= AES_BLOCK_SIZE - 1;
354 err = blkcipher_walk_done(desc, &walk, nbytes);
360 static struct crypto_alg algs[] = { {
362 .cra_driver_name = "aes-sparc64",
363 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
364 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
365 .cra_blocksize = AES_BLOCK_SIZE,
366 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
368 .cra_module = THIS_MODULE,
371 .cia_min_keysize = AES_MIN_KEY_SIZE,
372 .cia_max_keysize = AES_MAX_KEY_SIZE,
373 .cia_setkey = aes_set_key,
374 .cia_encrypt = aes_encrypt,
375 .cia_decrypt = aes_decrypt
379 .cra_name = "ecb(aes)",
380 .cra_driver_name = "ecb-aes-sparc64",
381 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
382 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
383 .cra_blocksize = AES_BLOCK_SIZE,
384 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
386 .cra_type = &crypto_blkcipher_type,
387 .cra_module = THIS_MODULE,
390 .min_keysize = AES_MIN_KEY_SIZE,
391 .max_keysize = AES_MAX_KEY_SIZE,
392 .setkey = aes_set_key,
393 .encrypt = ecb_encrypt,
394 .decrypt = ecb_decrypt,
398 .cra_name = "cbc(aes)",
399 .cra_driver_name = "cbc-aes-sparc64",
400 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
401 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
402 .cra_blocksize = AES_BLOCK_SIZE,
403 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
405 .cra_type = &crypto_blkcipher_type,
406 .cra_module = THIS_MODULE,
409 .min_keysize = AES_MIN_KEY_SIZE,
410 .max_keysize = AES_MAX_KEY_SIZE,
411 .setkey = aes_set_key,
412 .encrypt = cbc_encrypt,
413 .decrypt = cbc_decrypt,
417 .cra_name = "ctr(aes)",
418 .cra_driver_name = "ctr-aes-sparc64",
419 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
420 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
421 .cra_blocksize = AES_BLOCK_SIZE,
422 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
424 .cra_type = &crypto_blkcipher_type,
425 .cra_module = THIS_MODULE,
428 .min_keysize = AES_MIN_KEY_SIZE,
429 .max_keysize = AES_MAX_KEY_SIZE,
430 .setkey = aes_set_key,
431 .encrypt = ctr_crypt,
432 .decrypt = ctr_crypt,
437 static bool __init sparc64_has_aes_opcode(void)
441 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
444 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
445 if (!(cfr & CFR_AES))
451 static int __init aes_sparc64_mod_init(void)
455 for (i = 0; i < ARRAY_SIZE(algs); i++)
456 INIT_LIST_HEAD(&algs[i].cra_list);
458 if (sparc64_has_aes_opcode()) {
459 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
460 return crypto_register_algs(algs, ARRAY_SIZE(algs));
462 pr_info("sparc64 aes opcodes not available.\n");
466 static void __exit aes_sparc64_mod_fini(void)
468 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
471 module_init(aes_sparc64_mod_init);
472 module_exit(aes_sparc64_mod_fini);
474 MODULE_LICENSE("GPL");
475 MODULE_DESCRIPTION("AES Secure Hash Algorithm, sparc64 aes opcode accelerated");