2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
5 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
6 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
7 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
8 * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
9 * License terms: GNU General Public License (GPL) version 2
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/crypto.h>
15 #include <linux/dmaengine.h>
16 #include <linux/err.h>
17 #include <linux/errno.h>
18 #include <linux/interrupt.h>
20 #include <linux/irqreturn.h>
21 #include <linux/klist.h>
22 #include <linux/module.h>
23 #include <linux/platform_device.h>
24 #include <linux/regulator/consumer.h>
25 #include <linux/semaphore.h>
26 #include <linux/platform_data/dma-ste-dma40.h>
28 #include <crypto/aes.h>
29 #include <crypto/algapi.h>
30 #include <crypto/ctr.h>
31 #include <crypto/des.h>
32 #include <crypto/scatterwalk.h>
34 #include <linux/platform_data/crypto-ux500.h>
35 #include <mach/hardware.h>
40 #define CRYP_MAX_KEY_SIZE 32
41 #define BYTES_PER_WORD 4
44 static atomic_t session_id;
46 static struct stedma40_chan_cfg *mem_to_engine;
47 static struct stedma40_chan_cfg *engine_to_mem;
50 * struct cryp_driver_data - data specific to the driver.
52 * @device_list: A list of registered devices to choose from.
53 * @device_allocation: A semaphore initialized with number of devices.
55 struct cryp_driver_data {
56 struct klist device_list;
57 struct semaphore device_allocation;
61 * struct cryp_ctx - Crypto context
62 * @config: Crypto mode.
63 * @key[CRYP_MAX_KEY_SIZE]: Key.
64 * @keylen: Length of key.
65 * @iv: Pointer to initialization vector.
66 * @indata: Pointer to indata.
67 * @outdata: Pointer to outdata.
68 * @datalen: Length of indata.
69 * @outlen: Length of outdata.
70 * @blocksize: Size of blocks.
71 * @updated: Updated flag.
72 * @dev_ctx: Device dependent context.
73 * @device: Pointer to the device.
76 struct cryp_config config;
77 u8 key[CRYP_MAX_KEY_SIZE];
86 struct cryp_device_context dev_ctx;
87 struct cryp_device_data *device;
91 static struct cryp_driver_data driver_data;
94 * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
95 * @in: Data to convert.
97 static inline u32 uint8p_to_uint32_be(u8 *in)
99 u32 *data = (u32 *)in;
101 return cpu_to_be32p(data);
105 * swap_bits_in_byte - mirror the bits in a byte
106 * @b: the byte to be mirrored
108 * The bits are swapped the following way:
109 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
110 * nibble 2 (n2) bits 4-7.
113 * (The "old" (moved) bit is replaced with a zero)
114 * 1. Move bit 6 and 7, 4 positions to the left.
115 * 2. Move bit 3 and 5, 2 positions to the left.
116 * 3. Move bit 1-4, 1 position to the left.
119 * 1. Move bit 0 and 1, 4 positions to the right.
120 * 2. Move bit 2 and 4, 2 positions to the right.
121 * 3. Move bit 3-6, 1 position to the right.
123 * Combine the two nibbles to a complete and swapped byte.
126 static inline u8 swap_bits_in_byte(u8 b)
128 #define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */
129 #define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5,
131 #define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4,
133 #define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */
134 #define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4,
136 #define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6,
142 /* Swap most significant nibble */
143 /* Right shift 4, bits 6 and 7 */
144 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4));
145 /* Right shift 2, bits 3 and 5 */
146 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
147 /* Right shift 1, bits 1-4 */
148 n1 = (n1 & R_SHIFT_1_MASK) >> 1;
150 /* Swap least significant nibble */
151 /* Left shift 4, bits 0 and 1 */
152 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4));
153 /* Left shift 2, bits 2 and 4 */
154 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
155 /* Left shift 1, bits 3-6 */
156 n2 = (n2 & L_SHIFT_1_MASK) << 1;
161 static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
168 j = len - BYTES_PER_WORD;
170 for (i = 0; i < BYTES_PER_WORD; i++) {
171 index = len - j - BYTES_PER_WORD + i;
173 swap_bits_in_byte(in[index]);
179 static void add_session_id(struct cryp_ctx *ctx)
182 * We never want 0 to be a valid value, since this is the default value
183 * for the software context.
185 if (unlikely(atomic_inc_and_test(&session_id)))
186 atomic_inc(&session_id);
188 ctx->session_id = atomic_read(&session_id);
191 static irqreturn_t cryp_interrupt_handler(int irq, void *param)
193 struct cryp_ctx *ctx;
195 struct cryp_device_data *device_data;
202 /* The device is coming from the one found in hw_crypt_noxts. */
203 device_data = (struct cryp_device_data *)param;
205 ctx = device_data->current_ctx;
212 dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
213 cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
216 if (cryp_pending_irq_src(device_data,
217 CRYP_IRQ_SRC_OUTPUT_FIFO)) {
218 if (ctx->outlen / ctx->blocksize > 0) {
219 for (i = 0; i < ctx->blocksize / 4; i++) {
220 *(ctx->outdata) = readl_relaxed(
221 &device_data->base->dout);
226 if (ctx->outlen == 0) {
227 cryp_disable_irq_src(device_data,
228 CRYP_IRQ_SRC_OUTPUT_FIFO);
231 } else if (cryp_pending_irq_src(device_data,
232 CRYP_IRQ_SRC_INPUT_FIFO)) {
233 if (ctx->datalen / ctx->blocksize > 0) {
234 for (i = 0 ; i < ctx->blocksize / 4; i++) {
235 writel_relaxed(ctx->indata,
236 &device_data->base->din);
241 if (ctx->datalen == 0)
242 cryp_disable_irq_src(device_data,
243 CRYP_IRQ_SRC_INPUT_FIFO);
245 if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
246 CRYP_PUT_BITS(&device_data->base->cr,
251 cryp_wait_until_done(device_data);
259 static int mode_is_aes(enum cryp_algo_mode mode)
261 return CRYP_ALGO_AES_ECB == mode ||
262 CRYP_ALGO_AES_CBC == mode ||
263 CRYP_ALGO_AES_CTR == mode ||
264 CRYP_ALGO_AES_XTS == mode;
267 static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
268 enum cryp_init_vector_index index)
270 struct cryp_init_vector_value vector_value;
272 dev_dbg(device_data->dev, "[%s]", __func__);
274 vector_value.init_value_left = left;
275 vector_value.init_value_right = right;
277 return cryp_configure_init_vector(device_data,
282 static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
286 int num_of_regs = ctx->blocksize / 8;
287 u32 iv[AES_BLOCK_SIZE / 4];
289 dev_dbg(device_data->dev, "[%s]", __func__);
292 * Since we loop on num_of_regs we need to have a check in case
293 * someone provides an incorrect blocksize which would force calling
294 * cfg_iv with i greater than 2 which is an error.
296 if (num_of_regs > 2) {
297 dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
298 __func__, ctx->blocksize);
302 for (i = 0; i < ctx->blocksize / 4; i++)
303 iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
305 for (i = 0; i < num_of_regs; i++) {
306 status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
307 (enum cryp_init_vector_index) i);
314 static int set_key(struct cryp_device_data *device_data,
317 enum cryp_key_reg_index index)
319 struct cryp_key_value key_value;
322 dev_dbg(device_data->dev, "[%s]", __func__);
324 key_value.key_value_left = left_key;
325 key_value.key_value_right = right_key;
327 cryp_error = cryp_configure_key_values(device_data,
331 dev_err(device_data->dev, "[%s]: "
332 "cryp_configure_key_values() failed!", __func__);
337 static int cfg_keys(struct cryp_ctx *ctx)
340 int num_of_regs = ctx->keylen / 8;
341 u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
344 dev_dbg(ctx->device->dev, "[%s]", __func__);
346 if (mode_is_aes(ctx->config.algomode)) {
347 swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
351 for (i = 0; i < ctx->keylen / 4; i++)
352 swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
355 for (i = 0; i < num_of_regs; i++) {
356 cryp_error = set_key(ctx->device,
357 *(((u32 *)swapped_key)+i*2),
358 *(((u32 *)swapped_key)+i*2+1),
359 (enum cryp_key_reg_index) i);
361 if (cryp_error != 0) {
362 dev_err(ctx->device->dev, "[%s]: set_key() failed!",
370 static int cryp_setup_context(struct cryp_ctx *ctx,
371 struct cryp_device_data *device_data)
373 u32 control_register = CRYP_CR_DEFAULT;
376 case CRYP_MODE_INTERRUPT:
377 writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
381 writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
388 if (ctx->updated == 0) {
389 cryp_flush_inoutfifo(device_data);
390 if (cfg_keys(ctx) != 0) {
391 dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
397 CRYP_ALGO_AES_ECB != ctx->config.algomode &&
398 CRYP_ALGO_DES_ECB != ctx->config.algomode &&
399 CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
400 if (cfg_ivs(device_data, ctx) != 0)
404 cryp_set_configuration(device_data, &ctx->config,
407 } else if (ctx->updated == 1 &&
408 ctx->session_id != atomic_read(&session_id)) {
409 cryp_flush_inoutfifo(device_data);
410 cryp_restore_device_context(device_data, &ctx->dev_ctx);
413 control_register = ctx->dev_ctx.cr;
415 control_register = ctx->dev_ctx.cr;
417 writel(control_register |
418 (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
419 &device_data->base->cr);
424 static int cryp_get_device_data(struct cryp_ctx *ctx,
425 struct cryp_device_data **device_data)
428 struct klist_iter device_iterator;
429 struct klist_node *device_node;
430 struct cryp_device_data *local_device_data = NULL;
431 pr_debug(DEV_DBG_NAME " [%s]", __func__);
433 /* Wait until a device is available */
434 ret = down_interruptible(&driver_data.device_allocation);
436 return ret; /* Interrupted */
438 /* Select a device */
439 klist_iter_init(&driver_data.device_list, &device_iterator);
441 device_node = klist_next(&device_iterator);
442 while (device_node) {
443 local_device_data = container_of(device_node,
444 struct cryp_device_data, list_node);
445 spin_lock(&local_device_data->ctx_lock);
446 /* current_ctx allocates a device, NULL = unallocated */
447 if (local_device_data->current_ctx) {
448 device_node = klist_next(&device_iterator);
450 local_device_data->current_ctx = ctx;
451 ctx->device = local_device_data;
452 spin_unlock(&local_device_data->ctx_lock);
455 spin_unlock(&local_device_data->ctx_lock);
457 klist_iter_exit(&device_iterator);
461 * No free device found.
462 * Since we allocated a device with down_interruptible, this
463 * should not be able to happen.
464 * Number of available devices, which are contained in
465 * device_allocation, is therefore decremented by not doing
466 * an up(device_allocation).
471 *device_data = local_device_data;
476 static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
479 dma_cap_zero(device_data->dma.mask);
480 dma_cap_set(DMA_SLAVE, device_data->dma.mask);
482 device_data->dma.cfg_mem2cryp = mem_to_engine;
483 device_data->dma.chan_mem2cryp =
484 dma_request_channel(device_data->dma.mask,
486 device_data->dma.cfg_mem2cryp);
488 device_data->dma.cfg_cryp2mem = engine_to_mem;
489 device_data->dma.chan_cryp2mem =
490 dma_request_channel(device_data->dma.mask,
492 device_data->dma.cfg_cryp2mem);
494 init_completion(&device_data->dma.cryp_dma_complete);
497 static void cryp_dma_out_callback(void *data)
499 struct cryp_ctx *ctx = (struct cryp_ctx *) data;
500 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
502 complete(&ctx->device->dma.cryp_dma_complete);
505 static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
506 struct scatterlist *sg,
508 enum dma_data_direction direction)
510 struct dma_async_tx_descriptor *desc;
511 struct dma_chan *channel = NULL;
514 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
516 if (unlikely(!IS_ALIGNED((u32)sg, 4))) {
517 dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
518 "aligned! Addr: 0x%08x", __func__, (u32)sg);
524 channel = ctx->device->dma.chan_mem2cryp;
525 ctx->device->dma.sg_src = sg;
526 ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
527 ctx->device->dma.sg_src,
528 ctx->device->dma.nents_src,
531 if (!ctx->device->dma.sg_src_len) {
532 dev_dbg(ctx->device->dev,
533 "[%s]: Could not map the sg list (TO_DEVICE)",
538 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
539 "(TO_DEVICE)", __func__);
541 desc = channel->device->device_prep_slave_sg(channel,
542 ctx->device->dma.sg_src,
543 ctx->device->dma.sg_src_len,
544 direction, DMA_CTRL_ACK, NULL);
547 case DMA_FROM_DEVICE:
548 channel = ctx->device->dma.chan_cryp2mem;
549 ctx->device->dma.sg_dst = sg;
550 ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
551 ctx->device->dma.sg_dst,
552 ctx->device->dma.nents_dst,
555 if (!ctx->device->dma.sg_dst_len) {
556 dev_dbg(ctx->device->dev,
557 "[%s]: Could not map the sg list (FROM_DEVICE)",
562 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
563 "(FROM_DEVICE)", __func__);
565 desc = channel->device->device_prep_slave_sg(channel,
566 ctx->device->dma.sg_dst,
567 ctx->device->dma.sg_dst_len,
570 DMA_PREP_INTERRUPT, NULL);
572 desc->callback = cryp_dma_out_callback;
573 desc->callback_param = ctx;
577 dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
582 cookie = desc->tx_submit(desc);
583 dma_async_issue_pending(channel);
588 static void cryp_dma_done(struct cryp_ctx *ctx)
590 struct dma_chan *chan;
592 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
594 chan = ctx->device->dma.chan_mem2cryp;
595 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
596 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
597 ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
599 chan = ctx->device->dma.chan_cryp2mem;
600 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
601 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
602 ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
605 static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
608 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
609 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
612 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
620 static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
622 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
624 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
632 static void cryp_polling_mode(struct cryp_ctx *ctx,
633 struct cryp_device_data *device_data)
635 int len = ctx->blocksize / BYTES_PER_WORD;
636 int remaining_length = ctx->datalen;
637 u32 *indata = (u32 *)ctx->indata;
638 u32 *outdata = (u32 *)ctx->outdata;
640 while (remaining_length > 0) {
641 writesl(&device_data->base->din, indata, len);
643 remaining_length -= (len * BYTES_PER_WORD);
644 cryp_wait_until_done(device_data);
646 readsl(&device_data->base->dout, outdata, len);
648 cryp_wait_until_done(device_data);
652 static int cryp_disable_power(struct device *dev,
653 struct cryp_device_data *device_data,
654 bool save_device_context)
658 dev_dbg(dev, "[%s]", __func__);
660 spin_lock(&device_data->power_state_spinlock);
661 if (!device_data->power_state)
664 spin_lock(&device_data->ctx_lock);
665 if (save_device_context && device_data->current_ctx) {
666 cryp_save_device_context(device_data,
667 &device_data->current_ctx->dev_ctx,
669 device_data->restore_dev_ctx = true;
671 spin_unlock(&device_data->ctx_lock);
673 clk_disable(device_data->clk);
674 ret = regulator_disable(device_data->pwr_regulator);
676 dev_err(dev, "[%s]: "
677 "regulator_disable() failed!",
680 device_data->power_state = false;
683 spin_unlock(&device_data->power_state_spinlock);
688 static int cryp_enable_power(
690 struct cryp_device_data *device_data,
691 bool restore_device_context)
695 dev_dbg(dev, "[%s]", __func__);
697 spin_lock(&device_data->power_state_spinlock);
698 if (!device_data->power_state) {
699 ret = regulator_enable(device_data->pwr_regulator);
701 dev_err(dev, "[%s]: regulator_enable() failed!",
706 ret = clk_enable(device_data->clk);
708 dev_err(dev, "[%s]: clk_enable() failed!",
710 regulator_disable(device_data->pwr_regulator);
713 device_data->power_state = true;
716 if (device_data->restore_dev_ctx) {
717 spin_lock(&device_data->ctx_lock);
718 if (restore_device_context && device_data->current_ctx) {
719 device_data->restore_dev_ctx = false;
720 cryp_restore_device_context(device_data,
721 &device_data->current_ctx->dev_ctx);
723 spin_unlock(&device_data->ctx_lock);
726 spin_unlock(&device_data->power_state_spinlock);
731 static int hw_crypt_noxts(struct cryp_ctx *ctx,
732 struct cryp_device_data *device_data)
736 const u8 *indata = ctx->indata;
737 u8 *outdata = ctx->outdata;
738 u32 datalen = ctx->datalen;
739 u32 outlen = datalen;
741 pr_debug(DEV_DBG_NAME " [%s]", __func__);
743 ctx->outlen = ctx->datalen;
745 if (unlikely(!IS_ALIGNED((u32)indata, 4))) {
746 pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
747 "0x%08x", __func__, (u32)indata);
751 ret = cryp_setup_context(ctx, device_data);
756 if (cryp_mode == CRYP_MODE_INTERRUPT) {
757 cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
758 CRYP_IRQ_SRC_OUTPUT_FIFO);
761 * ctx->outlen is decremented in the cryp_interrupt_handler
762 * function. We had to add cpu_relax() (barrier) to make sure
763 * that gcc didn't optimze away this variable.
765 while (ctx->outlen > 0)
767 } else if (cryp_mode == CRYP_MODE_POLLING ||
768 cryp_mode == CRYP_MODE_DMA) {
770 * The reason for having DMA in this if case is that if we are
771 * running cryp_mode = 2, then we separate DMA routines for
772 * handling cipher/plaintext > blocksize, except when
773 * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
774 * the polling mode. Overhead of doing DMA setup eats up the
777 cryp_polling_mode(ctx, device_data);
779 dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
785 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
789 ctx->indata = indata;
790 ctx->outdata = outdata;
791 ctx->datalen = datalen;
792 ctx->outlen = outlen;
797 static int get_nents(struct scatterlist *sg, int nbytes)
802 nbytes -= sg->length;
803 sg = scatterwalk_sg_next(sg);
810 static int ablk_dma_crypt(struct ablkcipher_request *areq)
812 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
813 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
814 struct cryp_device_data *device_data;
816 int bytes_written = 0;
820 pr_debug(DEV_DBG_NAME " [%s]", __func__);
822 ctx->datalen = areq->nbytes;
823 ctx->outlen = areq->nbytes;
825 ret = cryp_get_device_data(ctx, &device_data);
829 ret = cryp_setup_context(ctx, device_data);
833 /* We have the device now, so store the nents in the dma struct. */
834 ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
835 ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
837 /* Enable DMA in- and output. */
838 cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
840 bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
841 bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
843 wait_for_completion(&ctx->device->dma.cryp_dma_complete);
846 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
850 spin_lock(&device_data->ctx_lock);
851 device_data->current_ctx = NULL;
853 spin_unlock(&device_data->ctx_lock);
856 * The down_interruptible part for this semaphore is called in
857 * cryp_get_device_data.
859 up(&driver_data.device_allocation);
861 if (unlikely(bytes_written != bytes_read))
867 static int ablk_crypt(struct ablkcipher_request *areq)
869 struct ablkcipher_walk walk;
870 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
871 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
872 struct cryp_device_data *device_data;
873 unsigned long src_paddr;
874 unsigned long dst_paddr;
878 pr_debug(DEV_DBG_NAME " [%s]", __func__);
880 ret = cryp_get_device_data(ctx, &device_data);
884 ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
885 ret = ablkcipher_walk_phys(areq, &walk);
888 pr_err(DEV_DBG_NAME "[%s]: ablkcipher_walk_phys() failed!",
893 while ((nbytes = walk.nbytes) > 0) {
895 src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
896 ctx->indata = phys_to_virt(src_paddr);
898 dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
899 ctx->outdata = phys_to_virt(dst_paddr);
901 ctx->datalen = nbytes - (nbytes % ctx->blocksize);
903 ret = hw_crypt_noxts(ctx, device_data);
907 nbytes -= ctx->datalen;
908 ret = ablkcipher_walk_done(areq, &walk, nbytes);
912 ablkcipher_walk_complete(&walk);
915 /* Release the device */
916 spin_lock(&device_data->ctx_lock);
917 device_data->current_ctx = NULL;
919 spin_unlock(&device_data->ctx_lock);
922 * The down_interruptible part for this semaphore is called in
923 * cryp_get_device_data.
925 up(&driver_data.device_allocation);
930 static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
931 const u8 *key, unsigned int keylen)
933 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
934 u32 *flags = &cipher->base.crt_flags;
936 pr_debug(DEV_DBG_NAME " [%s]", __func__);
939 case AES_KEYSIZE_128:
940 ctx->config.keysize = CRYP_KEY_SIZE_128;
943 case AES_KEYSIZE_192:
944 ctx->config.keysize = CRYP_KEY_SIZE_192;
947 case AES_KEYSIZE_256:
948 ctx->config.keysize = CRYP_KEY_SIZE_256;
952 pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
953 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
957 memcpy(ctx->key, key, keylen);
958 ctx->keylen = keylen;
965 static int des_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
966 const u8 *key, unsigned int keylen)
968 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
969 u32 *flags = &cipher->base.crt_flags;
970 u32 tmp[DES_EXPKEY_WORDS];
973 pr_debug(DEV_DBG_NAME " [%s]", __func__);
974 if (keylen != DES_KEY_SIZE) {
975 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
976 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
981 ret = des_ekey(tmp, key);
982 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
983 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
984 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
989 memcpy(ctx->key, key, keylen);
990 ctx->keylen = keylen;
996 static int des3_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
997 const u8 *key, unsigned int keylen)
999 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1000 u32 *flags = &cipher->base.crt_flags;
1001 const u32 *K = (const u32 *)key;
1002 u32 tmp[DES3_EDE_EXPKEY_WORDS];
1005 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1006 if (keylen != DES3_EDE_KEY_SIZE) {
1007 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
1008 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
1013 /* Checking key interdependency for weak key detection. */
1014 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
1015 !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
1016 (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1017 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1018 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
1022 for (i = 0; i < 3; i++) {
1023 ret = des_ekey(tmp, key + i*DES_KEY_SIZE);
1024 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1025 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1026 pr_debug(DEV_DBG_NAME " [%s]: "
1027 "CRYPTO_TFM_REQ_WEAK_KEY", __func__);
1032 memcpy(ctx->key, key, keylen);
1033 ctx->keylen = keylen;
1039 static int cryp_blk_encrypt(struct ablkcipher_request *areq)
1041 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1042 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1044 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1046 ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1049 * DMA does not work for DES due to a hw bug */
1050 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1051 return ablk_dma_crypt(areq);
1053 /* For everything except DMA, we run the non DMA version. */
1054 return ablk_crypt(areq);
1057 static int cryp_blk_decrypt(struct ablkcipher_request *areq)
1059 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1060 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1062 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1064 ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1066 /* DMA does not work for DES due to a hw bug */
1067 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1068 return ablk_dma_crypt(areq);
1070 /* For everything except DMA, we run the non DMA version. */
1071 return ablk_crypt(areq);
1074 struct cryp_algo_template {
1075 enum cryp_algo_mode algomode;
1076 struct crypto_alg crypto;
1079 static int cryp_cra_init(struct crypto_tfm *tfm)
1081 struct cryp_ctx *ctx = crypto_tfm_ctx(tfm);
1082 struct crypto_alg *alg = tfm->__crt_alg;
1083 struct cryp_algo_template *cryp_alg = container_of(alg,
1084 struct cryp_algo_template,
1087 ctx->config.algomode = cryp_alg->algomode;
1088 ctx->blocksize = crypto_tfm_alg_blocksize(tfm);
1093 static struct cryp_algo_template cryp_algs[] = {
1095 .algomode = CRYP_ALGO_AES_ECB,
1098 .cra_driver_name = "aes-ux500",
1099 .cra_priority = 300,
1100 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1102 .cra_blocksize = AES_BLOCK_SIZE,
1103 .cra_ctxsize = sizeof(struct cryp_ctx),
1105 .cra_type = &crypto_ablkcipher_type,
1106 .cra_init = cryp_cra_init,
1107 .cra_module = THIS_MODULE,
1110 .min_keysize = AES_MIN_KEY_SIZE,
1111 .max_keysize = AES_MAX_KEY_SIZE,
1112 .setkey = aes_ablkcipher_setkey,
1113 .encrypt = cryp_blk_encrypt,
1114 .decrypt = cryp_blk_decrypt
1120 .algomode = CRYP_ALGO_AES_ECB,
1122 .cra_name = "ecb(aes)",
1123 .cra_driver_name = "ecb-aes-ux500",
1124 .cra_priority = 300,
1125 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1127 .cra_blocksize = AES_BLOCK_SIZE,
1128 .cra_ctxsize = sizeof(struct cryp_ctx),
1130 .cra_type = &crypto_ablkcipher_type,
1131 .cra_init = cryp_cra_init,
1132 .cra_module = THIS_MODULE,
1135 .min_keysize = AES_MIN_KEY_SIZE,
1136 .max_keysize = AES_MAX_KEY_SIZE,
1137 .setkey = aes_ablkcipher_setkey,
1138 .encrypt = cryp_blk_encrypt,
1139 .decrypt = cryp_blk_decrypt,
1145 .algomode = CRYP_ALGO_AES_CBC,
1147 .cra_name = "cbc(aes)",
1148 .cra_driver_name = "cbc-aes-ux500",
1149 .cra_priority = 300,
1150 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1152 .cra_blocksize = AES_BLOCK_SIZE,
1153 .cra_ctxsize = sizeof(struct cryp_ctx),
1155 .cra_type = &crypto_ablkcipher_type,
1156 .cra_init = cryp_cra_init,
1157 .cra_module = THIS_MODULE,
1160 .min_keysize = AES_MIN_KEY_SIZE,
1161 .max_keysize = AES_MAX_KEY_SIZE,
1162 .setkey = aes_ablkcipher_setkey,
1163 .encrypt = cryp_blk_encrypt,
1164 .decrypt = cryp_blk_decrypt,
1165 .ivsize = AES_BLOCK_SIZE,
1171 .algomode = CRYP_ALGO_AES_CTR,
1173 .cra_name = "ctr(aes)",
1174 .cra_driver_name = "ctr-aes-ux500",
1175 .cra_priority = 300,
1176 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1178 .cra_blocksize = AES_BLOCK_SIZE,
1179 .cra_ctxsize = sizeof(struct cryp_ctx),
1181 .cra_type = &crypto_ablkcipher_type,
1182 .cra_init = cryp_cra_init,
1183 .cra_module = THIS_MODULE,
1186 .min_keysize = AES_MIN_KEY_SIZE,
1187 .max_keysize = AES_MAX_KEY_SIZE,
1188 .setkey = aes_ablkcipher_setkey,
1189 .encrypt = cryp_blk_encrypt,
1190 .decrypt = cryp_blk_decrypt,
1191 .ivsize = AES_BLOCK_SIZE,
1197 .algomode = CRYP_ALGO_DES_ECB,
1200 .cra_driver_name = "des-ux500",
1201 .cra_priority = 300,
1202 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1204 .cra_blocksize = DES_BLOCK_SIZE,
1205 .cra_ctxsize = sizeof(struct cryp_ctx),
1207 .cra_type = &crypto_ablkcipher_type,
1208 .cra_init = cryp_cra_init,
1209 .cra_module = THIS_MODULE,
1212 .min_keysize = DES_KEY_SIZE,
1213 .max_keysize = DES_KEY_SIZE,
1214 .setkey = des_ablkcipher_setkey,
1215 .encrypt = cryp_blk_encrypt,
1216 .decrypt = cryp_blk_decrypt
1223 .algomode = CRYP_ALGO_TDES_ECB,
1225 .cra_name = "des3_ede",
1226 .cra_driver_name = "des3_ede-ux500",
1227 .cra_priority = 300,
1228 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1230 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1231 .cra_ctxsize = sizeof(struct cryp_ctx),
1233 .cra_type = &crypto_ablkcipher_type,
1234 .cra_init = cryp_cra_init,
1235 .cra_module = THIS_MODULE,
1238 .min_keysize = DES3_EDE_KEY_SIZE,
1239 .max_keysize = DES3_EDE_KEY_SIZE,
1240 .setkey = des_ablkcipher_setkey,
1241 .encrypt = cryp_blk_encrypt,
1242 .decrypt = cryp_blk_decrypt
1248 .algomode = CRYP_ALGO_DES_ECB,
1250 .cra_name = "ecb(des)",
1251 .cra_driver_name = "ecb-des-ux500",
1252 .cra_priority = 300,
1253 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1255 .cra_blocksize = DES_BLOCK_SIZE,
1256 .cra_ctxsize = sizeof(struct cryp_ctx),
1258 .cra_type = &crypto_ablkcipher_type,
1259 .cra_init = cryp_cra_init,
1260 .cra_module = THIS_MODULE,
1263 .min_keysize = DES_KEY_SIZE,
1264 .max_keysize = DES_KEY_SIZE,
1265 .setkey = des_ablkcipher_setkey,
1266 .encrypt = cryp_blk_encrypt,
1267 .decrypt = cryp_blk_decrypt,
1273 .algomode = CRYP_ALGO_TDES_ECB,
1275 .cra_name = "ecb(des3_ede)",
1276 .cra_driver_name = "ecb-des3_ede-ux500",
1277 .cra_priority = 300,
1278 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1280 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1281 .cra_ctxsize = sizeof(struct cryp_ctx),
1283 .cra_type = &crypto_ablkcipher_type,
1284 .cra_init = cryp_cra_init,
1285 .cra_module = THIS_MODULE,
1288 .min_keysize = DES3_EDE_KEY_SIZE,
1289 .max_keysize = DES3_EDE_KEY_SIZE,
1290 .setkey = des3_ablkcipher_setkey,
1291 .encrypt = cryp_blk_encrypt,
1292 .decrypt = cryp_blk_decrypt,
1298 .algomode = CRYP_ALGO_DES_CBC,
1300 .cra_name = "cbc(des)",
1301 .cra_driver_name = "cbc-des-ux500",
1302 .cra_priority = 300,
1303 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1305 .cra_blocksize = DES_BLOCK_SIZE,
1306 .cra_ctxsize = sizeof(struct cryp_ctx),
1308 .cra_type = &crypto_ablkcipher_type,
1309 .cra_init = cryp_cra_init,
1310 .cra_module = THIS_MODULE,
1313 .min_keysize = DES_KEY_SIZE,
1314 .max_keysize = DES_KEY_SIZE,
1315 .setkey = des_ablkcipher_setkey,
1316 .encrypt = cryp_blk_encrypt,
1317 .decrypt = cryp_blk_decrypt,
1323 .algomode = CRYP_ALGO_TDES_CBC,
1325 .cra_name = "cbc(des3_ede)",
1326 .cra_driver_name = "cbc-des3_ede-ux500",
1327 .cra_priority = 300,
1328 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1330 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1331 .cra_ctxsize = sizeof(struct cryp_ctx),
1333 .cra_type = &crypto_ablkcipher_type,
1334 .cra_init = cryp_cra_init,
1335 .cra_module = THIS_MODULE,
1338 .min_keysize = DES3_EDE_KEY_SIZE,
1339 .max_keysize = DES3_EDE_KEY_SIZE,
1340 .setkey = des3_ablkcipher_setkey,
1341 .encrypt = cryp_blk_encrypt,
1342 .decrypt = cryp_blk_decrypt,
1343 .ivsize = DES3_EDE_BLOCK_SIZE,
1351 * cryp_algs_register_all -
1353 static int cryp_algs_register_all(void)
1359 pr_debug("[%s]", __func__);
1361 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1362 ret = crypto_register_alg(&cryp_algs[i].crypto);
1365 pr_err("[%s] alg registration failed",
1366 cryp_algs[i].crypto.cra_driver_name);
1372 for (i = 0; i < count; i++)
1373 crypto_unregister_alg(&cryp_algs[i].crypto);
1378 * cryp_algs_unregister_all -
1380 static void cryp_algs_unregister_all(void)
1384 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1386 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1387 crypto_unregister_alg(&cryp_algs[i].crypto);
1390 static int ux500_cryp_probe(struct platform_device *pdev)
1394 struct resource *res = NULL;
1395 struct resource *res_irq = NULL;
1396 struct cryp_device_data *device_data;
1397 struct cryp_protection_config prot = {
1398 .privilege_access = CRYP_STATE_ENABLE
1400 struct device *dev = &pdev->dev;
1402 dev_dbg(dev, "[%s]", __func__);
1403 device_data = kzalloc(sizeof(struct cryp_device_data), GFP_ATOMIC);
1405 dev_err(dev, "[%s]: kzalloc() failed!", __func__);
1410 device_data->dev = dev;
1411 device_data->current_ctx = NULL;
1413 /* Grab the DMA configuration from platform data. */
1414 mem_to_engine = &((struct cryp_platform_data *)
1415 dev->platform_data)->mem_to_engine;
1416 engine_to_mem = &((struct cryp_platform_data *)
1417 dev->platform_data)->engine_to_mem;
1419 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1421 dev_err(dev, "[%s]: platform_get_resource() failed",
1427 res = request_mem_region(res->start, resource_size(res), pdev->name);
1429 dev_err(dev, "[%s]: request_mem_region() failed",
1435 device_data->base = ioremap(res->start, resource_size(res));
1436 if (!device_data->base) {
1437 dev_err(dev, "[%s]: ioremap failed!", __func__);
1442 spin_lock_init(&device_data->ctx_lock);
1443 spin_lock_init(&device_data->power_state_spinlock);
1445 /* Enable power for CRYP hardware block */
1446 device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1447 if (IS_ERR(device_data->pwr_regulator)) {
1448 dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1449 ret = PTR_ERR(device_data->pwr_regulator);
1450 device_data->pwr_regulator = NULL;
1454 /* Enable the clk for CRYP hardware block */
1455 device_data->clk = clk_get(&pdev->dev, NULL);
1456 if (IS_ERR(device_data->clk)) {
1457 dev_err(dev, "[%s]: clk_get() failed!", __func__);
1458 ret = PTR_ERR(device_data->clk);
1462 /* Enable device power (and clock) */
1463 ret = cryp_enable_power(device_data->dev, device_data, false);
1465 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1469 cryp_error = cryp_check(device_data);
1470 if (cryp_error != 0) {
1471 dev_err(dev, "[%s]: cryp_init() failed!", __func__);
1476 cryp_error = cryp_configure_protection(device_data, &prot);
1477 if (cryp_error != 0) {
1478 dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1484 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1486 dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1492 ret = request_irq(res_irq->start,
1493 cryp_interrupt_handler,
1498 dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1502 if (cryp_mode == CRYP_MODE_DMA)
1503 cryp_dma_setup_channel(device_data, dev);
1505 platform_set_drvdata(pdev, device_data);
1507 /* Put the new device into the device list... */
1508 klist_add_tail(&device_data->list_node, &driver_data.device_list);
1510 /* ... and signal that a new device is available. */
1511 up(&driver_data.device_allocation);
1513 atomic_set(&session_id, 1);
1515 ret = cryp_algs_register_all();
1517 dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1525 cryp_disable_power(device_data->dev, device_data, false);
1528 clk_put(device_data->clk);
1531 regulator_put(device_data->pwr_regulator);
1534 iounmap(device_data->base);
1537 release_mem_region(res->start, resource_size(res));
1545 static int ux500_cryp_remove(struct platform_device *pdev)
1547 struct resource *res = NULL;
1548 struct resource *res_irq = NULL;
1549 struct cryp_device_data *device_data;
1551 dev_dbg(&pdev->dev, "[%s]", __func__);
1552 device_data = platform_get_drvdata(pdev);
1554 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1559 /* Try to decrease the number of available devices. */
1560 if (down_trylock(&driver_data.device_allocation))
1563 /* Check that the device is free */
1564 spin_lock(&device_data->ctx_lock);
1565 /* current_ctx allocates a device, NULL = unallocated */
1566 if (device_data->current_ctx) {
1567 /* The device is busy */
1568 spin_unlock(&device_data->ctx_lock);
1569 /* Return the device to the pool. */
1570 up(&driver_data.device_allocation);
1574 spin_unlock(&device_data->ctx_lock);
1576 /* Remove the device from the list */
1577 if (klist_node_attached(&device_data->list_node))
1578 klist_remove(&device_data->list_node);
1580 /* If this was the last device, remove the services */
1581 if (list_empty(&driver_data.device_list.k_list))
1582 cryp_algs_unregister_all();
1584 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1586 dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
1589 disable_irq(res_irq->start);
1590 free_irq(res_irq->start, device_data);
1593 if (cryp_disable_power(&pdev->dev, device_data, false))
1594 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1597 clk_put(device_data->clk);
1598 regulator_put(device_data->pwr_regulator);
1600 iounmap(device_data->base);
1602 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1604 release_mem_region(res->start, res->end - res->start + 1);
1611 static void ux500_cryp_shutdown(struct platform_device *pdev)
1613 struct resource *res_irq = NULL;
1614 struct cryp_device_data *device_data;
1616 dev_dbg(&pdev->dev, "[%s]", __func__);
1618 device_data = platform_get_drvdata(pdev);
1620 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1625 /* Check that the device is free */
1626 spin_lock(&device_data->ctx_lock);
1627 /* current_ctx allocates a device, NULL = unallocated */
1628 if (!device_data->current_ctx) {
1629 if (down_trylock(&driver_data.device_allocation))
1630 dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1631 "Shutting down anyway...", __func__);
1633 * (Allocate the device)
1634 * Need to set this to non-null (dummy) value,
1635 * to avoid usage if context switching.
1637 device_data->current_ctx++;
1639 spin_unlock(&device_data->ctx_lock);
1641 /* Remove the device from the list */
1642 if (klist_node_attached(&device_data->list_node))
1643 klist_remove(&device_data->list_node);
1645 /* If this was the last device, remove the services */
1646 if (list_empty(&driver_data.device_list.k_list))
1647 cryp_algs_unregister_all();
1649 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1651 dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
1654 disable_irq(res_irq->start);
1655 free_irq(res_irq->start, device_data);
1658 if (cryp_disable_power(&pdev->dev, device_data, false))
1659 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1664 static int ux500_cryp_suspend(struct device *dev)
1667 struct platform_device *pdev = to_platform_device(dev);
1668 struct cryp_device_data *device_data;
1669 struct resource *res_irq;
1670 struct cryp_ctx *temp_ctx = NULL;
1672 dev_dbg(dev, "[%s]", __func__);
1675 device_data = platform_get_drvdata(pdev);
1677 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1681 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1683 dev_err(dev, "[%s]: IORESOURCE_IRQ, unavailable", __func__);
1685 disable_irq(res_irq->start);
1687 spin_lock(&device_data->ctx_lock);
1688 if (!device_data->current_ctx)
1689 device_data->current_ctx++;
1690 spin_unlock(&device_data->ctx_lock);
1692 if (device_data->current_ctx == ++temp_ctx) {
1693 if (down_interruptible(&driver_data.device_allocation))
1694 dev_dbg(dev, "[%s]: down_interruptible() failed",
1696 ret = cryp_disable_power(dev, device_data, false);
1699 ret = cryp_disable_power(dev, device_data, true);
1702 dev_err(dev, "[%s]: cryp_disable_power()", __func__);
1707 static int ux500_cryp_resume(struct device *dev)
1710 struct platform_device *pdev = to_platform_device(dev);
1711 struct cryp_device_data *device_data;
1712 struct resource *res_irq;
1713 struct cryp_ctx *temp_ctx = NULL;
1715 dev_dbg(dev, "[%s]", __func__);
1717 device_data = platform_get_drvdata(pdev);
1719 dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1723 spin_lock(&device_data->ctx_lock);
1724 if (device_data->current_ctx == ++temp_ctx)
1725 device_data->current_ctx = NULL;
1726 spin_unlock(&device_data->ctx_lock);
1729 if (!device_data->current_ctx)
1730 up(&driver_data.device_allocation);
1732 ret = cryp_enable_power(dev, device_data, true);
1735 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1737 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1739 enable_irq(res_irq->start);
1745 static SIMPLE_DEV_PM_OPS(ux500_cryp_pm, ux500_cryp_suspend, ux500_cryp_resume);
1747 static struct platform_driver cryp_driver = {
1748 .probe = ux500_cryp_probe,
1749 .remove = ux500_cryp_remove,
1750 .shutdown = ux500_cryp_shutdown,
1752 .owner = THIS_MODULE,
1754 .pm = &ux500_cryp_pm,
1758 static int __init ux500_cryp_mod_init(void)
1760 pr_debug("[%s] is called!", __func__);
1761 klist_init(&driver_data.device_list, NULL, NULL);
1762 /* Initialize the semaphore to 0 devices (locked state) */
1763 sema_init(&driver_data.device_allocation, 0);
1764 return platform_driver_register(&cryp_driver);
1767 static void __exit ux500_cryp_mod_fini(void)
1769 pr_debug("[%s] is called!", __func__);
1770 platform_driver_unregister(&cryp_driver);
1774 module_init(ux500_cryp_mod_init);
1775 module_exit(ux500_cryp_mod_fini);
1777 module_param(cryp_mode, int, 0);
1779 MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1780 MODULE_ALIAS("aes-all");
1781 MODULE_ALIAS("des-all");
1783 MODULE_LICENSE("GPL");