blob: 69c2468f1053de6f33553a49df8e10bbabb7ae5e [file] [log] [blame]
b.liue9582032025-04-17 19:18:16 +08001// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (C) STMicroelectronics SA 2017
4 * Author: Fabien Dessenne <fabien.dessenne@st.com>
5 */
6
7#include <linux/clk.h>
8#include <linux/delay.h>
9#include <linux/interrupt.h>
10#include <linux/iopoll.h>
11#include <linux/module.h>
12#include <linux/of_device.h>
13#include <linux/platform_device.h>
14#include <linux/pm_runtime.h>
15#include <linux/reset.h>
16
17#include <crypto/aes.h>
18#include <crypto/internal/des.h>
19#include <crypto/engine.h>
20#include <crypto/scatterwalk.h>
21#include <crypto/internal/aead.h>
22
23#define DRIVER_NAME "stm32-cryp"
24
25/* Bit [0] encrypt / decrypt */
26#define FLG_ENCRYPT BIT(0)
27/* Bit [8..1] algo & operation mode */
28#define FLG_AES BIT(1)
29#define FLG_DES BIT(2)
30#define FLG_TDES BIT(3)
31#define FLG_ECB BIT(4)
32#define FLG_CBC BIT(5)
33#define FLG_CTR BIT(6)
34#define FLG_GCM BIT(7)
35#define FLG_CCM BIT(8)
36/* Mode mask = bits [15..0] */
37#define FLG_MODE_MASK GENMASK(15, 0)
38/* Bit [31..16] status */
39#define FLG_CCM_PADDED_WA BIT(16)
40
41/* Registers */
42#define CRYP_CR 0x00000000
43#define CRYP_SR 0x00000004
44#define CRYP_DIN 0x00000008
45#define CRYP_DOUT 0x0000000C
46#define CRYP_DMACR 0x00000010
47#define CRYP_IMSCR 0x00000014
48#define CRYP_RISR 0x00000018
49#define CRYP_MISR 0x0000001C
50#define CRYP_K0LR 0x00000020
51#define CRYP_K0RR 0x00000024
52#define CRYP_K1LR 0x00000028
53#define CRYP_K1RR 0x0000002C
54#define CRYP_K2LR 0x00000030
55#define CRYP_K2RR 0x00000034
56#define CRYP_K3LR 0x00000038
57#define CRYP_K3RR 0x0000003C
58#define CRYP_IV0LR 0x00000040
59#define CRYP_IV0RR 0x00000044
60#define CRYP_IV1LR 0x00000048
61#define CRYP_IV1RR 0x0000004C
62#define CRYP_CSGCMCCM0R 0x00000050
63#define CRYP_CSGCM0R 0x00000070
64
65/* Registers values */
66#define CR_DEC_NOT_ENC 0x00000004
67#define CR_TDES_ECB 0x00000000
68#define CR_TDES_CBC 0x00000008
69#define CR_DES_ECB 0x00000010
70#define CR_DES_CBC 0x00000018
71#define CR_AES_ECB 0x00000020
72#define CR_AES_CBC 0x00000028
73#define CR_AES_CTR 0x00000030
74#define CR_AES_KP 0x00000038
75#define CR_AES_GCM 0x00080000
76#define CR_AES_CCM 0x00080008
77#define CR_AES_UNKNOWN 0xFFFFFFFF
78#define CR_ALGO_MASK 0x00080038
79#define CR_DATA32 0x00000000
80#define CR_DATA16 0x00000040
81#define CR_DATA8 0x00000080
82#define CR_DATA1 0x000000C0
83#define CR_KEY128 0x00000000
84#define CR_KEY192 0x00000100
85#define CR_KEY256 0x00000200
86#define CR_FFLUSH 0x00004000
87#define CR_CRYPEN 0x00008000
88#define CR_PH_INIT 0x00000000
89#define CR_PH_HEADER 0x00010000
90#define CR_PH_PAYLOAD 0x00020000
91#define CR_PH_FINAL 0x00030000
92#define CR_PH_MASK 0x00030000
93#define CR_NBPBL_SHIFT 20
94
95#define SR_BUSY 0x00000010
96#define SR_OFNE 0x00000004
97
98#define IMSCR_IN BIT(0)
99#define IMSCR_OUT BIT(1)
100
101#define MISR_IN BIT(0)
102#define MISR_OUT BIT(1)
103
104/* Misc */
105#define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
106#define GCM_CTR_INIT 2
107#define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
108#define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
109#define CRYP_AUTOSUSPEND_DELAY 50
110
111struct stm32_cryp_caps {
112 bool swap_final;
113 bool padding_wa;
114};
115
116struct stm32_cryp_ctx {
117 struct crypto_engine_ctx enginectx;
118 struct stm32_cryp *cryp;
119 int keylen;
120 u32 key[AES_KEYSIZE_256 / sizeof(u32)];
121 unsigned long flags;
122};
123
124struct stm32_cryp_reqctx {
125 unsigned long mode;
126};
127
128struct stm32_cryp {
129 struct list_head list;
130 struct device *dev;
131 void __iomem *regs;
132 struct clk *clk;
133 unsigned long flags;
134 u32 irq_status;
135 const struct stm32_cryp_caps *caps;
136 struct stm32_cryp_ctx *ctx;
137
138 struct crypto_engine *engine;
139
140 struct ablkcipher_request *req;
141 struct aead_request *areq;
142
143 size_t authsize;
144 size_t hw_blocksize;
145
146 size_t total_in;
147 size_t total_in_save;
148 size_t total_out;
149 size_t total_out_save;
150
151 struct scatterlist *in_sg;
152 struct scatterlist *out_sg;
153 struct scatterlist *out_sg_save;
154
155 struct scatterlist in_sgl;
156 struct scatterlist out_sgl;
157 bool sgs_copied;
158
159 int in_sg_len;
160 int out_sg_len;
161
162 struct scatter_walk in_walk;
163 struct scatter_walk out_walk;
164
165 u32 last_ctr[4];
166 u32 gcm_ctr;
167};
168
169struct stm32_cryp_list {
170 struct list_head dev_list;
171 spinlock_t lock; /* protect dev_list */
172};
173
174static struct stm32_cryp_list cryp_list = {
175 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
176 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
177};
178
179static inline bool is_aes(struct stm32_cryp *cryp)
180{
181 return cryp->flags & FLG_AES;
182}
183
184static inline bool is_des(struct stm32_cryp *cryp)
185{
186 return cryp->flags & FLG_DES;
187}
188
189static inline bool is_tdes(struct stm32_cryp *cryp)
190{
191 return cryp->flags & FLG_TDES;
192}
193
194static inline bool is_ecb(struct stm32_cryp *cryp)
195{
196 return cryp->flags & FLG_ECB;
197}
198
199static inline bool is_cbc(struct stm32_cryp *cryp)
200{
201 return cryp->flags & FLG_CBC;
202}
203
204static inline bool is_ctr(struct stm32_cryp *cryp)
205{
206 return cryp->flags & FLG_CTR;
207}
208
209static inline bool is_gcm(struct stm32_cryp *cryp)
210{
211 return cryp->flags & FLG_GCM;
212}
213
214static inline bool is_ccm(struct stm32_cryp *cryp)
215{
216 return cryp->flags & FLG_CCM;
217}
218
219static inline bool is_encrypt(struct stm32_cryp *cryp)
220{
221 return cryp->flags & FLG_ENCRYPT;
222}
223
224static inline bool is_decrypt(struct stm32_cryp *cryp)
225{
226 return !is_encrypt(cryp);
227}
228
229static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
230{
231 return readl_relaxed(cryp->regs + ofst);
232}
233
234static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
235{
236 writel_relaxed(val, cryp->regs + ofst);
237}
238
239static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
240{
241 u32 status;
242
243 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
244 !(status & SR_BUSY), 10, 100000);
245}
246
247static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
248{
249 u32 status;
250
251 return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
252 !(status & CR_CRYPEN), 10, 100000);
253}
254
255static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
256{
257 u32 status;
258
259 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
260 status & SR_OFNE, 10, 100000);
261}
262
263static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
264
265static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
266{
267 struct stm32_cryp *tmp, *cryp = NULL;
268
269 spin_lock_bh(&cryp_list.lock);
270 if (!ctx->cryp) {
271 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
272 cryp = tmp;
273 break;
274 }
275 ctx->cryp = cryp;
276 } else {
277 cryp = ctx->cryp;
278 }
279
280 spin_unlock_bh(&cryp_list.lock);
281
282 return cryp;
283}
284
285static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
286 size_t align)
287{
288 int len = 0;
289
290 if (!total)
291 return 0;
292
293 if (!IS_ALIGNED(total, align))
294 return -EINVAL;
295
296 while (sg) {
297 if (!IS_ALIGNED(sg->offset, sizeof(u32)))
298 return -EINVAL;
299
300 if (!IS_ALIGNED(sg->length, align))
301 return -EINVAL;
302
303 len += sg->length;
304 sg = sg_next(sg);
305 }
306
307 if (len != total)
308 return -EINVAL;
309
310 return 0;
311}
312
313static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp)
314{
315 int ret;
316
317 ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
318 cryp->hw_blocksize);
319 if (ret)
320 return ret;
321
322 ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out,
323 cryp->hw_blocksize);
324
325 return ret;
326}
327
328static void sg_copy_buf(void *buf, struct scatterlist *sg,
329 unsigned int start, unsigned int nbytes, int out)
330{
331 struct scatter_walk walk;
332
333 if (!nbytes)
334 return;
335
336 scatterwalk_start(&walk, sg);
337 scatterwalk_advance(&walk, start);
338 scatterwalk_copychunks(buf, &walk, nbytes, out);
339 scatterwalk_done(&walk, out, 0);
340}
341
342static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp)
343{
344 void *buf_in, *buf_out;
345 int pages, total_in, total_out;
346
347 if (!stm32_cryp_check_io_aligned(cryp)) {
348 cryp->sgs_copied = 0;
349 return 0;
350 }
351
352 total_in = ALIGN(cryp->total_in, cryp->hw_blocksize);
353 pages = total_in ? get_order(total_in) : 1;
354 buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages);
355
356 total_out = ALIGN(cryp->total_out, cryp->hw_blocksize);
357 pages = total_out ? get_order(total_out) : 1;
358 buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages);
359
360 if (!buf_in || !buf_out) {
361 dev_err(cryp->dev, "Can't allocate pages when unaligned\n");
362 cryp->sgs_copied = 0;
363 return -EFAULT;
364 }
365
366 sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
367
368 sg_init_one(&cryp->in_sgl, buf_in, total_in);
369 cryp->in_sg = &cryp->in_sgl;
370 cryp->in_sg_len = 1;
371
372 sg_init_one(&cryp->out_sgl, buf_out, total_out);
373 cryp->out_sg_save = cryp->out_sg;
374 cryp->out_sg = &cryp->out_sgl;
375 cryp->out_sg_len = 1;
376
377 cryp->sgs_copied = 1;
378
379 return 0;
380}
381
382static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv)
383{
384 if (!iv)
385 return;
386
387 stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++));
388 stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++));
389
390 if (is_aes(cryp)) {
391 stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++));
392 stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++));
393 }
394}
395
396static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
397{
398 struct ablkcipher_request *req = cryp->req;
399 u32 *tmp = req->info;
400
401 if (!tmp)
402 return;
403
404 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
405 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
406
407 if (is_aes(cryp)) {
408 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
409 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
410 }
411}
412
413static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
414{
415 unsigned int i;
416 int r_id;
417
418 if (is_des(c)) {
419 stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
420 stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
421 } else {
422 r_id = CRYP_K3RR;
423 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
424 stm32_cryp_write(c, r_id,
425 cpu_to_be32(c->ctx->key[i - 1]));
426 }
427}
428
429static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
430{
431 if (is_aes(cryp) && is_ecb(cryp))
432 return CR_AES_ECB;
433
434 if (is_aes(cryp) && is_cbc(cryp))
435 return CR_AES_CBC;
436
437 if (is_aes(cryp) && is_ctr(cryp))
438 return CR_AES_CTR;
439
440 if (is_aes(cryp) && is_gcm(cryp))
441 return CR_AES_GCM;
442
443 if (is_aes(cryp) && is_ccm(cryp))
444 return CR_AES_CCM;
445
446 if (is_des(cryp) && is_ecb(cryp))
447 return CR_DES_ECB;
448
449 if (is_des(cryp) && is_cbc(cryp))
450 return CR_DES_CBC;
451
452 if (is_tdes(cryp) && is_ecb(cryp))
453 return CR_TDES_ECB;
454
455 if (is_tdes(cryp) && is_cbc(cryp))
456 return CR_TDES_CBC;
457
458 dev_err(cryp->dev, "Unknown mode\n");
459 return CR_AES_UNKNOWN;
460}
461
462static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
463{
464 return is_encrypt(cryp) ? cryp->areq->cryptlen :
465 cryp->areq->cryptlen - cryp->authsize;
466}
467
468static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
469{
470 int ret;
471 u32 iv[4];
472
473 /* Phase 1 : init */
474 memcpy(iv, cryp->areq->iv, 12);
475 iv[3] = cpu_to_be32(GCM_CTR_INIT);
476 cryp->gcm_ctr = GCM_CTR_INIT;
477 stm32_cryp_hw_write_iv(cryp, iv);
478
479 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
480
481 /* Wait for end of processing */
482 ret = stm32_cryp_wait_enable(cryp);
483 if (ret)
484 dev_err(cryp->dev, "Timeout (gcm init)\n");
485
486 return ret;
487}
488
489static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
490{
491 int ret;
492 u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
493 u32 *d;
494 unsigned int i, textlen;
495
496 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
497 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
498 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
499 iv[AES_BLOCK_SIZE - 1] = 1;
500 stm32_cryp_hw_write_iv(cryp, (u32 *)iv);
501
502 /* Build B0 */
503 memcpy(b0, iv, AES_BLOCK_SIZE);
504
505 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
506
507 if (cryp->areq->assoclen)
508 b0[0] |= 0x40;
509
510 textlen = stm32_cryp_get_input_text_len(cryp);
511
512 b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
513 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
514
515 /* Enable HW */
516 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
517
518 /* Write B0 */
519 d = (u32 *)b0;
520
521 for (i = 0; i < AES_BLOCK_32; i++) {
522 if (!cryp->caps->padding_wa)
523 *d = cpu_to_be32(*d);
524 stm32_cryp_write(cryp, CRYP_DIN, *d++);
525 }
526
527 /* Wait for end of processing */
528 ret = stm32_cryp_wait_enable(cryp);
529 if (ret)
530 dev_err(cryp->dev, "Timeout (ccm init)\n");
531
532 return ret;
533}
534
535static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
536{
537 int ret;
538 u32 cfg, hw_mode;
539
540 pm_runtime_resume_and_get(cryp->dev);
541
542 /* Disable interrupt */
543 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
544
545 /* Set key */
546 stm32_cryp_hw_write_key(cryp);
547
548 /* Set configuration */
549 cfg = CR_DATA8 | CR_FFLUSH;
550
551 switch (cryp->ctx->keylen) {
552 case AES_KEYSIZE_128:
553 cfg |= CR_KEY128;
554 break;
555
556 case AES_KEYSIZE_192:
557 cfg |= CR_KEY192;
558 break;
559
560 default:
561 case AES_KEYSIZE_256:
562 cfg |= CR_KEY256;
563 break;
564 }
565
566 hw_mode = stm32_cryp_get_hw_mode(cryp);
567 if (hw_mode == CR_AES_UNKNOWN)
568 return -EINVAL;
569
570 /* AES ECB/CBC decrypt: run key preparation first */
571 if (is_decrypt(cryp) &&
572 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
573 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
574
575 /* Wait for end of processing */
576 ret = stm32_cryp_wait_busy(cryp);
577 if (ret) {
578 dev_err(cryp->dev, "Timeout (key preparation)\n");
579 return ret;
580 }
581 }
582
583 cfg |= hw_mode;
584
585 if (is_decrypt(cryp))
586 cfg |= CR_DEC_NOT_ENC;
587
588 /* Apply config and flush (valid when CRYPEN = 0) */
589 stm32_cryp_write(cryp, CRYP_CR, cfg);
590
591 switch (hw_mode) {
592 case CR_AES_GCM:
593 case CR_AES_CCM:
594 /* Phase 1 : init */
595 if (hw_mode == CR_AES_CCM)
596 ret = stm32_cryp_ccm_init(cryp, cfg);
597 else
598 ret = stm32_cryp_gcm_init(cryp, cfg);
599
600 if (ret)
601 return ret;
602
603 /* Phase 2 : header (authenticated data) */
604 if (cryp->areq->assoclen) {
605 cfg |= CR_PH_HEADER;
606 } else if (stm32_cryp_get_input_text_len(cryp)) {
607 cfg |= CR_PH_PAYLOAD;
608 stm32_cryp_write(cryp, CRYP_CR, cfg);
609 } else {
610 cfg |= CR_PH_INIT;
611 }
612
613 break;
614
615 case CR_DES_CBC:
616 case CR_TDES_CBC:
617 case CR_AES_CBC:
618 case CR_AES_CTR:
619 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info);
620 break;
621
622 default:
623 break;
624 }
625
626 /* Enable now */
627 cfg |= CR_CRYPEN;
628
629 stm32_cryp_write(cryp, CRYP_CR, cfg);
630
631 cryp->flags &= ~FLG_CCM_PADDED_WA;
632
633 return 0;
634}
635
636static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
637{
638 if (!err && (is_gcm(cryp) || is_ccm(cryp)))
639 /* Phase 4 : output tag */
640 err = stm32_cryp_read_auth_tag(cryp);
641
642 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
643 stm32_cryp_get_iv(cryp);
644
645 if (cryp->sgs_copied) {
646 void *buf_in, *buf_out;
647 int pages, len;
648
649 buf_in = sg_virt(&cryp->in_sgl);
650 buf_out = sg_virt(&cryp->out_sgl);
651
652 sg_copy_buf(buf_out, cryp->out_sg_save, 0,
653 cryp->total_out_save, 1);
654
655 len = ALIGN(cryp->total_in_save, cryp->hw_blocksize);
656 pages = len ? get_order(len) : 1;
657 free_pages((unsigned long)buf_in, pages);
658
659 len = ALIGN(cryp->total_out_save, cryp->hw_blocksize);
660 pages = len ? get_order(len) : 1;
661 free_pages((unsigned long)buf_out, pages);
662 }
663
664 pm_runtime_mark_last_busy(cryp->dev);
665 pm_runtime_put_autosuspend(cryp->dev);
666
667 if (is_gcm(cryp) || is_ccm(cryp))
668 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
669 else
670 crypto_finalize_ablkcipher_request(cryp->engine, cryp->req,
671 err);
672}
673
674static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
675{
676 /* Enable interrupt and let the IRQ handler do everything */
677 stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
678
679 return 0;
680}
681
682static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
683static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
684 void *areq);
685
686static int stm32_cryp_cra_init(struct crypto_tfm *tfm)
687{
688 struct stm32_cryp_ctx *ctx = crypto_tfm_ctx(tfm);
689
690 tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx);
691
692 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
693 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
694 ctx->enginectx.op.unprepare_request = NULL;
695 return 0;
696}
697
698static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
699static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
700 void *areq);
701
702static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
703{
704 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
705
706 tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
707
708 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
709 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
710 ctx->enginectx.op.unprepare_request = NULL;
711
712 return 0;
713}
714
715static int stm32_cryp_crypt(struct ablkcipher_request *req, unsigned long mode)
716{
717 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
718 crypto_ablkcipher_reqtfm(req));
719 struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req);
720 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
721
722 if (!cryp)
723 return -ENODEV;
724
725 rctx->mode = mode;
726
727 return crypto_transfer_ablkcipher_request_to_engine(cryp->engine, req);
728}
729
730static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
731{
732 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
733 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
734 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
735
736 if (!cryp)
737 return -ENODEV;
738
739 rctx->mode = mode;
740
741 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
742}
743
744static int stm32_cryp_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
745 unsigned int keylen)
746{
747 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
748
749 memcpy(ctx->key, key, keylen);
750 ctx->keylen = keylen;
751
752 return 0;
753}
754
755static int stm32_cryp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
756 unsigned int keylen)
757{
758 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
759 keylen != AES_KEYSIZE_256)
760 return -EINVAL;
761 else
762 return stm32_cryp_setkey(tfm, key, keylen);
763}
764
765static int stm32_cryp_des_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
766 unsigned int keylen)
767{
768 return verify_ablkcipher_des_key(tfm, key) ?:
769 stm32_cryp_setkey(tfm, key, keylen);
770}
771
772static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
773 unsigned int keylen)
774{
775 return verify_ablkcipher_des3_key(tfm, key) ?:
776 stm32_cryp_setkey(tfm, key, keylen);
777}
778
779static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
780 unsigned int keylen)
781{
782 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
783
784 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
785 keylen != AES_KEYSIZE_256)
786 return -EINVAL;
787
788 memcpy(ctx->key, key, keylen);
789 ctx->keylen = keylen;
790
791 return 0;
792}
793
794static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
795 unsigned int authsize)
796{
797 return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL;
798}
799
800static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
801 unsigned int authsize)
802{
803 switch (authsize) {
804 case 4:
805 case 6:
806 case 8:
807 case 10:
808 case 12:
809 case 14:
810 case 16:
811 break;
812 default:
813 return -EINVAL;
814 }
815
816 return 0;
817}
818
819static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request *req)
820{
821 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
822}
823
824static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request *req)
825{
826 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
827}
828
829static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request *req)
830{
831 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
832}
833
834static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request *req)
835{
836 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
837}
838
839static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request *req)
840{
841 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
842}
843
844static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request *req)
845{
846 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
847}
848
849static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
850{
851 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
852}
853
854static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
855{
856 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
857}
858
859static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
860{
861 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
862}
863
864static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
865{
866 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
867}
868
869static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request *req)
870{
871 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
872}
873
874static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request *req)
875{
876 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
877}
878
879static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request *req)
880{
881 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
882}
883
884static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request *req)
885{
886 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
887}
888
889static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request *req)
890{
891 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
892}
893
894static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request *req)
895{
896 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
897}
898
899static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request *req)
900{
901 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
902}
903
904static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request *req)
905{
906 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
907}
908
909static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
910 struct aead_request *areq)
911{
912 struct stm32_cryp_ctx *ctx;
913 struct stm32_cryp *cryp;
914 struct stm32_cryp_reqctx *rctx;
915 int ret;
916
917 if (!req && !areq)
918 return -EINVAL;
919
920 ctx = req ? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)) :
921 crypto_aead_ctx(crypto_aead_reqtfm(areq));
922
923 cryp = ctx->cryp;
924
925 if (!cryp)
926 return -ENODEV;
927
928 rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq);
929 rctx->mode &= FLG_MODE_MASK;
930
931 ctx->cryp = cryp;
932
933 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
934 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
935 cryp->ctx = ctx;
936
937 if (req) {
938 cryp->req = req;
939 cryp->areq = NULL;
940 cryp->total_in = req->nbytes;
941 cryp->total_out = cryp->total_in;
942 } else {
943 /*
944 * Length of input and output data:
945 * Encryption case:
946 * INPUT = AssocData || PlainText
947 * <- assoclen -> <- cryptlen ->
948 * <------- total_in ----------->
949 *
950 * OUTPUT = AssocData || CipherText || AuthTag
951 * <- assoclen -> <- cryptlen -> <- authsize ->
952 * <---------------- total_out ----------------->
953 *
954 * Decryption case:
955 * INPUT = AssocData || CipherText || AuthTag
956 * <- assoclen -> <--------- cryptlen --------->
957 * <- authsize ->
958 * <---------------- total_in ------------------>
959 *
960 * OUTPUT = AssocData || PlainText
961 * <- assoclen -> <- crypten - authsize ->
962 * <---------- total_out ----------------->
963 */
964 cryp->areq = areq;
965 cryp->req = NULL;
966 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
967 cryp->total_in = areq->assoclen + areq->cryptlen;
968 if (is_encrypt(cryp))
969 /* Append auth tag to output */
970 cryp->total_out = cryp->total_in + cryp->authsize;
971 else
972 /* No auth tag in output */
973 cryp->total_out = cryp->total_in - cryp->authsize;
974 }
975
976 cryp->total_in_save = cryp->total_in;
977 cryp->total_out_save = cryp->total_out;
978
979 cryp->in_sg = req ? req->src : areq->src;
980 cryp->out_sg = req ? req->dst : areq->dst;
981 cryp->out_sg_save = cryp->out_sg;
982
983 cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
984 if (cryp->in_sg_len < 0) {
985 dev_err(cryp->dev, "Cannot get in_sg_len\n");
986 ret = cryp->in_sg_len;
987 return ret;
988 }
989
990 cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
991 if (cryp->out_sg_len < 0) {
992 dev_err(cryp->dev, "Cannot get out_sg_len\n");
993 ret = cryp->out_sg_len;
994 return ret;
995 }
996
997 ret = stm32_cryp_copy_sgs(cryp);
998 if (ret)
999 return ret;
1000
1001 scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1002 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1003
1004 if (is_gcm(cryp) || is_ccm(cryp)) {
1005 /* In output, jump after assoc data */
1006 scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen);
1007 cryp->total_out -= cryp->areq->assoclen;
1008 }
1009
1010 ret = stm32_cryp_hw_init(cryp);
1011 return ret;
1012}
1013
1014static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
1015 void *areq)
1016{
1017 struct ablkcipher_request *req = container_of(areq,
1018 struct ablkcipher_request,
1019 base);
1020
1021 return stm32_cryp_prepare_req(req, NULL);
1022}
1023
1024static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1025{
1026 struct ablkcipher_request *req = container_of(areq,
1027 struct ablkcipher_request,
1028 base);
1029 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
1030 crypto_ablkcipher_reqtfm(req));
1031 struct stm32_cryp *cryp = ctx->cryp;
1032
1033 if (!cryp)
1034 return -ENODEV;
1035
1036 return stm32_cryp_cpu_start(cryp);
1037}
1038
1039static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
1040{
1041 struct aead_request *req = container_of(areq, struct aead_request,
1042 base);
1043
1044 return stm32_cryp_prepare_req(NULL, req);
1045}
1046
1047static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1048{
1049 struct aead_request *req = container_of(areq, struct aead_request,
1050 base);
1051 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1052 struct stm32_cryp *cryp = ctx->cryp;
1053
1054 if (!cryp)
1055 return -ENODEV;
1056
1057 if (unlikely(!cryp->areq->assoclen &&
1058 !stm32_cryp_get_input_text_len(cryp))) {
1059 /* No input data to process: get tag and finish */
1060 stm32_cryp_finish_req(cryp, 0);
1061 return 0;
1062 }
1063
1064 return stm32_cryp_cpu_start(cryp);
1065}
1066
1067static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst,
1068 unsigned int n)
1069{
1070 scatterwalk_advance(&cryp->out_walk, n);
1071
1072 if (unlikely(cryp->out_sg->length == _walked_out)) {
1073 cryp->out_sg = sg_next(cryp->out_sg);
1074 if (cryp->out_sg) {
1075 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1076 return (sg_virt(cryp->out_sg) + _walked_out);
1077 }
1078 }
1079
1080 return (u32 *)((u8 *)dst + n);
1081}
1082
1083static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src,
1084 unsigned int n)
1085{
1086 scatterwalk_advance(&cryp->in_walk, n);
1087
1088 if (unlikely(cryp->in_sg->length == _walked_in)) {
1089 cryp->in_sg = sg_next(cryp->in_sg);
1090 if (cryp->in_sg) {
1091 scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1092 return (sg_virt(cryp->in_sg) + _walked_in);
1093 }
1094 }
1095
1096 return (u32 *)((u8 *)src + n);
1097}
1098
1099static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1100{
1101 u32 cfg, size_bit, *dst, d32;
1102 u8 *d8;
1103 unsigned int i, j;
1104 int ret = 0;
1105
1106 /* Update Config */
1107 cfg = stm32_cryp_read(cryp, CRYP_CR);
1108
1109 cfg &= ~CR_PH_MASK;
1110 cfg |= CR_PH_FINAL;
1111 cfg &= ~CR_DEC_NOT_ENC;
1112 cfg |= CR_CRYPEN;
1113
1114 stm32_cryp_write(cryp, CRYP_CR, cfg);
1115
1116 if (is_gcm(cryp)) {
1117 /* GCM: write aad and payload size (in bits) */
1118 size_bit = cryp->areq->assoclen * 8;
1119 if (cryp->caps->swap_final)
1120 size_bit = cpu_to_be32(size_bit);
1121
1122 stm32_cryp_write(cryp, CRYP_DIN, 0);
1123 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1124
1125 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1126 cryp->areq->cryptlen - AES_BLOCK_SIZE;
1127 size_bit *= 8;
1128 if (cryp->caps->swap_final)
1129 size_bit = cpu_to_be32(size_bit);
1130
1131 stm32_cryp_write(cryp, CRYP_DIN, 0);
1132 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1133 } else {
1134 /* CCM: write CTR0 */
1135 u8 iv[AES_BLOCK_SIZE];
1136 u32 *iv32 = (u32 *)iv;
1137
1138 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1139 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1140
1141 for (i = 0; i < AES_BLOCK_32; i++) {
1142 if (!cryp->caps->padding_wa)
1143 *iv32 = cpu_to_be32(*iv32);
1144 stm32_cryp_write(cryp, CRYP_DIN, *iv32++);
1145 }
1146 }
1147
1148 /* Wait for output data */
1149 ret = stm32_cryp_wait_output(cryp);
1150 if (ret) {
1151 dev_err(cryp->dev, "Timeout (read tag)\n");
1152 return ret;
1153 }
1154
1155 if (is_encrypt(cryp)) {
1156 /* Get and write tag */
1157 dst = sg_virt(cryp->out_sg) + _walked_out;
1158
1159 for (i = 0; i < AES_BLOCK_32; i++) {
1160 if (cryp->total_out >= sizeof(u32)) {
1161 /* Read a full u32 */
1162 *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1163
1164 dst = stm32_cryp_next_out(cryp, dst,
1165 sizeof(u32));
1166 cryp->total_out -= sizeof(u32);
1167 } else if (!cryp->total_out) {
1168 /* Empty fifo out (data from input padding) */
1169 stm32_cryp_read(cryp, CRYP_DOUT);
1170 } else {
1171 /* Read less than an u32 */
1172 d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1173 d8 = (u8 *)&d32;
1174
1175 for (j = 0; j < cryp->total_out; j++) {
1176 *((u8 *)dst) = *(d8++);
1177 dst = stm32_cryp_next_out(cryp, dst, 1);
1178 }
1179 cryp->total_out = 0;
1180 }
1181 }
1182 } else {
1183 /* Get and check tag */
1184 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1185
1186 scatterwalk_map_and_copy(in_tag, cryp->in_sg,
1187 cryp->total_in_save - cryp->authsize,
1188 cryp->authsize, 0);
1189
1190 for (i = 0; i < AES_BLOCK_32; i++)
1191 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1192
1193 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1194 ret = -EBADMSG;
1195 }
1196
1197 /* Disable cryp */
1198 cfg &= ~CR_CRYPEN;
1199 stm32_cryp_write(cryp, CRYP_CR, cfg);
1200
1201 return ret;
1202}
1203
1204static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1205{
1206 u32 cr;
1207
1208 if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) {
1209 cryp->last_ctr[3] = 0;
1210 cryp->last_ctr[2]++;
1211 if (!cryp->last_ctr[2]) {
1212 cryp->last_ctr[1]++;
1213 if (!cryp->last_ctr[1])
1214 cryp->last_ctr[0]++;
1215 }
1216
1217 cr = stm32_cryp_read(cryp, CRYP_CR);
1218 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1219
1220 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr);
1221
1222 stm32_cryp_write(cryp, CRYP_CR, cr);
1223 }
1224
1225 cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR);
1226 cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR);
1227 cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR);
1228 cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR);
1229}
1230
1231static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1232{
1233 unsigned int i, j;
1234 u32 d32, *dst;
1235 u8 *d8;
1236 size_t tag_size;
1237
1238 /* Do no read tag now (if any) */
1239 if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1240 tag_size = cryp->authsize;
1241 else
1242 tag_size = 0;
1243
1244 dst = sg_virt(cryp->out_sg) + _walked_out;
1245
1246 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1247 if (likely(cryp->total_out - tag_size >= sizeof(u32))) {
1248 /* Read a full u32 */
1249 *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1250
1251 dst = stm32_cryp_next_out(cryp, dst, sizeof(u32));
1252 cryp->total_out -= sizeof(u32);
1253 } else if (cryp->total_out == tag_size) {
1254 /* Empty fifo out (data from input padding) */
1255 d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1256 } else {
1257 /* Read less than an u32 */
1258 d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1259 d8 = (u8 *)&d32;
1260
1261 for (j = 0; j < cryp->total_out - tag_size; j++) {
1262 *((u8 *)dst) = *(d8++);
1263 dst = stm32_cryp_next_out(cryp, dst, 1);
1264 }
1265 cryp->total_out = tag_size;
1266 }
1267 }
1268
1269 return !(cryp->total_out - tag_size) || !cryp->total_in;
1270}
1271
1272static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1273{
1274 unsigned int i, j;
1275 u32 *src;
1276 u8 d8[4];
1277 size_t tag_size;
1278
1279 /* Do no write tag (if any) */
1280 if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1281 tag_size = cryp->authsize;
1282 else
1283 tag_size = 0;
1284
1285 src = sg_virt(cryp->in_sg) + _walked_in;
1286
1287 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1288 if (likely(cryp->total_in - tag_size >= sizeof(u32))) {
1289 /* Write a full u32 */
1290 stm32_cryp_write(cryp, CRYP_DIN, *src);
1291
1292 src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1293 cryp->total_in -= sizeof(u32);
1294 } else if (cryp->total_in == tag_size) {
1295 /* Write padding data */
1296 stm32_cryp_write(cryp, CRYP_DIN, 0);
1297 } else {
1298 /* Write less than an u32 */
1299 memset(d8, 0, sizeof(u32));
1300 for (j = 0; j < cryp->total_in - tag_size; j++) {
1301 d8[j] = *((u8 *)src);
1302 src = stm32_cryp_next_in(cryp, src, 1);
1303 }
1304
1305 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1306 cryp->total_in = tag_size;
1307 }
1308 }
1309}
1310
1311static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1312{
1313 int err;
1314 u32 cfg, tmp[AES_BLOCK_32];
1315 size_t total_in_ori = cryp->total_in;
1316 struct scatterlist *out_sg_ori = cryp->out_sg;
1317 unsigned int i;
1318
1319 /* 'Special workaround' procedure described in the datasheet */
1320
1321 /* a) disable ip */
1322 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1323 cfg = stm32_cryp_read(cryp, CRYP_CR);
1324 cfg &= ~CR_CRYPEN;
1325 stm32_cryp_write(cryp, CRYP_CR, cfg);
1326
1327 /* b) Update IV1R */
1328 stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1329
1330 /* c) change mode to CTR */
1331 cfg &= ~CR_ALGO_MASK;
1332 cfg |= CR_AES_CTR;
1333 stm32_cryp_write(cryp, CRYP_CR, cfg);
1334
1335 /* a) enable IP */
1336 cfg |= CR_CRYPEN;
1337 stm32_cryp_write(cryp, CRYP_CR, cfg);
1338
1339 /* b) pad and write the last block */
1340 stm32_cryp_irq_write_block(cryp);
1341 cryp->total_in = total_in_ori;
1342 err = stm32_cryp_wait_output(cryp);
1343 if (err) {
1344 dev_err(cryp->dev, "Timeout (write gcm header)\n");
1345 return stm32_cryp_finish_req(cryp, err);
1346 }
1347
1348 /* c) get and store encrypted data */
1349 stm32_cryp_irq_read_data(cryp);
1350 scatterwalk_map_and_copy(tmp, out_sg_ori,
1351 cryp->total_in_save - total_in_ori,
1352 total_in_ori, 0);
1353
1354 /* d) change mode back to AES GCM */
1355 cfg &= ~CR_ALGO_MASK;
1356 cfg |= CR_AES_GCM;
1357 stm32_cryp_write(cryp, CRYP_CR, cfg);
1358
1359 /* e) change phase to Final */
1360 cfg &= ~CR_PH_MASK;
1361 cfg |= CR_PH_FINAL;
1362 stm32_cryp_write(cryp, CRYP_CR, cfg);
1363
1364 /* f) write padded data */
1365 for (i = 0; i < AES_BLOCK_32; i++) {
1366 if (cryp->total_in)
1367 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1368 else
1369 stm32_cryp_write(cryp, CRYP_DIN, 0);
1370
1371 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1372 }
1373
1374 /* g) Empty fifo out */
1375 err = stm32_cryp_wait_output(cryp);
1376 if (err) {
1377 dev_err(cryp->dev, "Timeout (write gcm header)\n");
1378 return stm32_cryp_finish_req(cryp, err);
1379 }
1380
1381 for (i = 0; i < AES_BLOCK_32; i++)
1382 stm32_cryp_read(cryp, CRYP_DOUT);
1383
1384 /* h) run the he normal Final phase */
1385 stm32_cryp_finish_req(cryp, 0);
1386}
1387
1388static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1389{
1390 u32 cfg, payload_bytes;
1391
1392 /* disable ip, set NPBLB and reneable ip */
1393 cfg = stm32_cryp_read(cryp, CRYP_CR);
1394 cfg &= ~CR_CRYPEN;
1395 stm32_cryp_write(cryp, CRYP_CR, cfg);
1396
1397 payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize :
1398 cryp->total_in;
1399 cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT;
1400 cfg |= CR_CRYPEN;
1401 stm32_cryp_write(cryp, CRYP_CR, cfg);
1402}
1403
1404static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1405{
1406 int err = 0;
1407 u32 cfg, iv1tmp;
1408 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32];
1409 size_t last_total_out, total_in_ori = cryp->total_in;
1410 struct scatterlist *out_sg_ori = cryp->out_sg;
1411 unsigned int i;
1412
1413 /* 'Special workaround' procedure described in the datasheet */
1414 cryp->flags |= FLG_CCM_PADDED_WA;
1415
1416 /* a) disable ip */
1417 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1418
1419 cfg = stm32_cryp_read(cryp, CRYP_CR);
1420 cfg &= ~CR_CRYPEN;
1421 stm32_cryp_write(cryp, CRYP_CR, cfg);
1422
1423 /* b) get IV1 from CRYP_CSGCMCCM7 */
1424 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1425
1426 /* c) Load CRYP_CSGCMCCMxR */
1427 for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1428 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1429
1430 /* d) Write IV1R */
1431 stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1432
1433 /* e) change mode to CTR */
1434 cfg &= ~CR_ALGO_MASK;
1435 cfg |= CR_AES_CTR;
1436 stm32_cryp_write(cryp, CRYP_CR, cfg);
1437
1438 /* a) enable IP */
1439 cfg |= CR_CRYPEN;
1440 stm32_cryp_write(cryp, CRYP_CR, cfg);
1441
1442 /* b) pad and write the last block */
1443 stm32_cryp_irq_write_block(cryp);
1444 cryp->total_in = total_in_ori;
1445 err = stm32_cryp_wait_output(cryp);
1446 if (err) {
1447 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1448 return stm32_cryp_finish_req(cryp, err);
1449 }
1450
1451 /* c) get and store decrypted data */
1452 last_total_out = cryp->total_out;
1453 stm32_cryp_irq_read_data(cryp);
1454
1455 memset(tmp, 0, sizeof(tmp));
1456 scatterwalk_map_and_copy(tmp, out_sg_ori,
1457 cryp->total_out_save - last_total_out,
1458 last_total_out, 0);
1459
1460 /* d) Load again CRYP_CSGCMCCMxR */
1461 for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1462 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1463
1464 /* e) change mode back to AES CCM */
1465 cfg &= ~CR_ALGO_MASK;
1466 cfg |= CR_AES_CCM;
1467 stm32_cryp_write(cryp, CRYP_CR, cfg);
1468
1469 /* f) change phase to header */
1470 cfg &= ~CR_PH_MASK;
1471 cfg |= CR_PH_HEADER;
1472 stm32_cryp_write(cryp, CRYP_CR, cfg);
1473
1474 /* g) XOR and write padded data */
1475 for (i = 0; i < ARRAY_SIZE(tmp); i++) {
1476 tmp[i] ^= cstmp1[i];
1477 tmp[i] ^= cstmp2[i];
1478 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1479 }
1480
1481 /* h) wait for completion */
1482 err = stm32_cryp_wait_busy(cryp);
1483 if (err)
1484 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1485
1486 /* i) run the he normal Final phase */
1487 stm32_cryp_finish_req(cryp, err);
1488}
1489
1490static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1491{
1492 if (unlikely(!cryp->total_in)) {
1493 dev_warn(cryp->dev, "No more data to process\n");
1494 return;
1495 }
1496
1497 if (unlikely(cryp->total_in < AES_BLOCK_SIZE &&
1498 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1499 is_encrypt(cryp))) {
1500 /* Padding for AES GCM encryption */
1501 if (cryp->caps->padding_wa)
1502 /* Special case 1 */
1503 return stm32_cryp_irq_write_gcm_padded_data(cryp);
1504
1505 /* Setting padding bytes (NBBLB) */
1506 stm32_cryp_irq_set_npblb(cryp);
1507 }
1508
1509 if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) &&
1510 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1511 is_decrypt(cryp))) {
1512 /* Padding for AES CCM decryption */
1513 if (cryp->caps->padding_wa)
1514 /* Special case 2 */
1515 return stm32_cryp_irq_write_ccm_padded_data(cryp);
1516
1517 /* Setting padding bytes (NBBLB) */
1518 stm32_cryp_irq_set_npblb(cryp);
1519 }
1520
1521 if (is_aes(cryp) && is_ctr(cryp))
1522 stm32_cryp_check_ctr_counter(cryp);
1523
1524 stm32_cryp_irq_write_block(cryp);
1525}
1526
1527static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp)
1528{
1529 int err;
1530 unsigned int i, j;
1531 u32 cfg, *src;
1532
1533 src = sg_virt(cryp->in_sg) + _walked_in;
1534
1535 for (i = 0; i < AES_BLOCK_32; i++) {
1536 stm32_cryp_write(cryp, CRYP_DIN, *src);
1537
1538 src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1539 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1540
1541 /* Check if whole header written */
1542 if ((cryp->total_in_save - cryp->total_in) ==
1543 cryp->areq->assoclen) {
1544 /* Write padding if needed */
1545 for (j = i + 1; j < AES_BLOCK_32; j++)
1546 stm32_cryp_write(cryp, CRYP_DIN, 0);
1547
1548 /* Wait for completion */
1549 err = stm32_cryp_wait_busy(cryp);
1550 if (err) {
1551 dev_err(cryp->dev, "Timeout (gcm header)\n");
1552 return stm32_cryp_finish_req(cryp, err);
1553 }
1554
1555 if (stm32_cryp_get_input_text_len(cryp)) {
1556 /* Phase 3 : payload */
1557 cfg = stm32_cryp_read(cryp, CRYP_CR);
1558 cfg &= ~CR_CRYPEN;
1559 stm32_cryp_write(cryp, CRYP_CR, cfg);
1560
1561 cfg &= ~CR_PH_MASK;
1562 cfg |= CR_PH_PAYLOAD;
1563 cfg |= CR_CRYPEN;
1564 stm32_cryp_write(cryp, CRYP_CR, cfg);
1565 } else {
1566 /* Phase 4 : tag */
1567 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1568 stm32_cryp_finish_req(cryp, 0);
1569 }
1570
1571 break;
1572 }
1573
1574 if (!cryp->total_in)
1575 break;
1576 }
1577}
1578
1579static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp)
1580{
1581 int err;
1582 unsigned int i = 0, j, k;
1583 u32 alen, cfg, *src;
1584 u8 d8[4];
1585
1586 src = sg_virt(cryp->in_sg) + _walked_in;
1587 alen = cryp->areq->assoclen;
1588
1589 if (!_walked_in) {
1590 if (cryp->areq->assoclen <= 65280) {
1591 /* Write first u32 of B1 */
1592 d8[0] = (alen >> 8) & 0xFF;
1593 d8[1] = alen & 0xFF;
1594 d8[2] = *((u8 *)src);
1595 src = stm32_cryp_next_in(cryp, src, 1);
1596 d8[3] = *((u8 *)src);
1597 src = stm32_cryp_next_in(cryp, src, 1);
1598
1599 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1600 i++;
1601
1602 cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1603 } else {
1604 /* Build the two first u32 of B1 */
1605 d8[0] = 0xFF;
1606 d8[1] = 0xFE;
1607 d8[2] = alen & 0xFF000000;
1608 d8[3] = alen & 0x00FF0000;
1609
1610 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1611 i++;
1612
1613 d8[0] = alen & 0x0000FF00;
1614 d8[1] = alen & 0x000000FF;
1615 d8[2] = *((u8 *)src);
1616 src = stm32_cryp_next_in(cryp, src, 1);
1617 d8[3] = *((u8 *)src);
1618 src = stm32_cryp_next_in(cryp, src, 1);
1619
1620 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1621 i++;
1622
1623 cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1624 }
1625 }
1626
1627 /* Write next u32 */
1628 for (; i < AES_BLOCK_32; i++) {
1629 /* Build an u32 */
1630 memset(d8, 0, sizeof(u32));
1631 for (k = 0; k < sizeof(u32); k++) {
1632 d8[k] = *((u8 *)src);
1633 src = stm32_cryp_next_in(cryp, src, 1);
1634
1635 cryp->total_in -= min_t(size_t, 1, cryp->total_in);
1636 if ((cryp->total_in_save - cryp->total_in) == alen)
1637 break;
1638 }
1639
1640 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1641
1642 if ((cryp->total_in_save - cryp->total_in) == alen) {
1643 /* Write padding if needed */
1644 for (j = i + 1; j < AES_BLOCK_32; j++)
1645 stm32_cryp_write(cryp, CRYP_DIN, 0);
1646
1647 /* Wait for completion */
1648 err = stm32_cryp_wait_busy(cryp);
1649 if (err) {
1650 dev_err(cryp->dev, "Timeout (ccm header)\n");
1651 return stm32_cryp_finish_req(cryp, err);
1652 }
1653
1654 if (stm32_cryp_get_input_text_len(cryp)) {
1655 /* Phase 3 : payload */
1656 cfg = stm32_cryp_read(cryp, CRYP_CR);
1657 cfg &= ~CR_CRYPEN;
1658 stm32_cryp_write(cryp, CRYP_CR, cfg);
1659
1660 cfg &= ~CR_PH_MASK;
1661 cfg |= CR_PH_PAYLOAD;
1662 cfg |= CR_CRYPEN;
1663 stm32_cryp_write(cryp, CRYP_CR, cfg);
1664 } else {
1665 /* Phase 4 : tag */
1666 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1667 stm32_cryp_finish_req(cryp, 0);
1668 }
1669
1670 break;
1671 }
1672 }
1673}
1674
1675static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1676{
1677 struct stm32_cryp *cryp = arg;
1678 u32 ph;
1679
1680 if (cryp->irq_status & MISR_OUT)
1681 /* Output FIFO IRQ: read data */
1682 if (unlikely(stm32_cryp_irq_read_data(cryp))) {
1683 /* All bytes processed, finish */
1684 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1685 stm32_cryp_finish_req(cryp, 0);
1686 return IRQ_HANDLED;
1687 }
1688
1689 if (cryp->irq_status & MISR_IN) {
1690 if (is_gcm(cryp)) {
1691 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1692 if (unlikely(ph == CR_PH_HEADER))
1693 /* Write Header */
1694 stm32_cryp_irq_write_gcm_header(cryp);
1695 else
1696 /* Input FIFO IRQ: write data */
1697 stm32_cryp_irq_write_data(cryp);
1698 cryp->gcm_ctr++;
1699 } else if (is_ccm(cryp)) {
1700 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1701 if (unlikely(ph == CR_PH_HEADER))
1702 /* Write Header */
1703 stm32_cryp_irq_write_ccm_header(cryp);
1704 else
1705 /* Input FIFO IRQ: write data */
1706 stm32_cryp_irq_write_data(cryp);
1707 } else {
1708 /* Input FIFO IRQ: write data */
1709 stm32_cryp_irq_write_data(cryp);
1710 }
1711 }
1712
1713 return IRQ_HANDLED;
1714}
1715
1716static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1717{
1718 struct stm32_cryp *cryp = arg;
1719
1720 cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1721
1722 return IRQ_WAKE_THREAD;
1723}
1724
1725static struct crypto_alg crypto_algs[] = {
1726{
1727 .cra_name = "ecb(aes)",
1728 .cra_driver_name = "stm32-ecb-aes",
1729 .cra_priority = 200,
1730 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1731 CRYPTO_ALG_ASYNC,
1732 .cra_blocksize = AES_BLOCK_SIZE,
1733 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1734 .cra_alignmask = 0xf,
1735 .cra_type = &crypto_ablkcipher_type,
1736 .cra_module = THIS_MODULE,
1737 .cra_init = stm32_cryp_cra_init,
1738 .cra_ablkcipher = {
1739 .min_keysize = AES_MIN_KEY_SIZE,
1740 .max_keysize = AES_MAX_KEY_SIZE,
1741 .setkey = stm32_cryp_aes_setkey,
1742 .encrypt = stm32_cryp_aes_ecb_encrypt,
1743 .decrypt = stm32_cryp_aes_ecb_decrypt,
1744 }
1745},
1746{
1747 .cra_name = "cbc(aes)",
1748 .cra_driver_name = "stm32-cbc-aes",
1749 .cra_priority = 200,
1750 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1751 CRYPTO_ALG_ASYNC,
1752 .cra_blocksize = AES_BLOCK_SIZE,
1753 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1754 .cra_alignmask = 0xf,
1755 .cra_type = &crypto_ablkcipher_type,
1756 .cra_module = THIS_MODULE,
1757 .cra_init = stm32_cryp_cra_init,
1758 .cra_ablkcipher = {
1759 .min_keysize = AES_MIN_KEY_SIZE,
1760 .max_keysize = AES_MAX_KEY_SIZE,
1761 .ivsize = AES_BLOCK_SIZE,
1762 .setkey = stm32_cryp_aes_setkey,
1763 .encrypt = stm32_cryp_aes_cbc_encrypt,
1764 .decrypt = stm32_cryp_aes_cbc_decrypt,
1765 }
1766},
1767{
1768 .cra_name = "ctr(aes)",
1769 .cra_driver_name = "stm32-ctr-aes",
1770 .cra_priority = 200,
1771 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1772 CRYPTO_ALG_ASYNC,
1773 .cra_blocksize = 1,
1774 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1775 .cra_alignmask = 0xf,
1776 .cra_type = &crypto_ablkcipher_type,
1777 .cra_module = THIS_MODULE,
1778 .cra_init = stm32_cryp_cra_init,
1779 .cra_ablkcipher = {
1780 .min_keysize = AES_MIN_KEY_SIZE,
1781 .max_keysize = AES_MAX_KEY_SIZE,
1782 .ivsize = AES_BLOCK_SIZE,
1783 .setkey = stm32_cryp_aes_setkey,
1784 .encrypt = stm32_cryp_aes_ctr_encrypt,
1785 .decrypt = stm32_cryp_aes_ctr_decrypt,
1786 }
1787},
1788{
1789 .cra_name = "ecb(des)",
1790 .cra_driver_name = "stm32-ecb-des",
1791 .cra_priority = 200,
1792 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1793 CRYPTO_ALG_ASYNC,
1794 .cra_blocksize = DES_BLOCK_SIZE,
1795 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1796 .cra_alignmask = 0xf,
1797 .cra_type = &crypto_ablkcipher_type,
1798 .cra_module = THIS_MODULE,
1799 .cra_init = stm32_cryp_cra_init,
1800 .cra_ablkcipher = {
1801 .min_keysize = DES_BLOCK_SIZE,
1802 .max_keysize = DES_BLOCK_SIZE,
1803 .setkey = stm32_cryp_des_setkey,
1804 .encrypt = stm32_cryp_des_ecb_encrypt,
1805 .decrypt = stm32_cryp_des_ecb_decrypt,
1806 }
1807},
1808{
1809 .cra_name = "cbc(des)",
1810 .cra_driver_name = "stm32-cbc-des",
1811 .cra_priority = 200,
1812 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1813 CRYPTO_ALG_ASYNC,
1814 .cra_blocksize = DES_BLOCK_SIZE,
1815 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1816 .cra_alignmask = 0xf,
1817 .cra_type = &crypto_ablkcipher_type,
1818 .cra_module = THIS_MODULE,
1819 .cra_init = stm32_cryp_cra_init,
1820 .cra_ablkcipher = {
1821 .min_keysize = DES_BLOCK_SIZE,
1822 .max_keysize = DES_BLOCK_SIZE,
1823 .ivsize = DES_BLOCK_SIZE,
1824 .setkey = stm32_cryp_des_setkey,
1825 .encrypt = stm32_cryp_des_cbc_encrypt,
1826 .decrypt = stm32_cryp_des_cbc_decrypt,
1827 }
1828},
1829{
1830 .cra_name = "ecb(des3_ede)",
1831 .cra_driver_name = "stm32-ecb-des3",
1832 .cra_priority = 200,
1833 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1834 CRYPTO_ALG_ASYNC,
1835 .cra_blocksize = DES_BLOCK_SIZE,
1836 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1837 .cra_alignmask = 0xf,
1838 .cra_type = &crypto_ablkcipher_type,
1839 .cra_module = THIS_MODULE,
1840 .cra_init = stm32_cryp_cra_init,
1841 .cra_ablkcipher = {
1842 .min_keysize = 3 * DES_BLOCK_SIZE,
1843 .max_keysize = 3 * DES_BLOCK_SIZE,
1844 .setkey = stm32_cryp_tdes_setkey,
1845 .encrypt = stm32_cryp_tdes_ecb_encrypt,
1846 .decrypt = stm32_cryp_tdes_ecb_decrypt,
1847 }
1848},
1849{
1850 .cra_name = "cbc(des3_ede)",
1851 .cra_driver_name = "stm32-cbc-des3",
1852 .cra_priority = 200,
1853 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1854 CRYPTO_ALG_ASYNC,
1855 .cra_blocksize = DES_BLOCK_SIZE,
1856 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1857 .cra_alignmask = 0xf,
1858 .cra_type = &crypto_ablkcipher_type,
1859 .cra_module = THIS_MODULE,
1860 .cra_init = stm32_cryp_cra_init,
1861 .cra_ablkcipher = {
1862 .min_keysize = 3 * DES_BLOCK_SIZE,
1863 .max_keysize = 3 * DES_BLOCK_SIZE,
1864 .ivsize = DES_BLOCK_SIZE,
1865 .setkey = stm32_cryp_tdes_setkey,
1866 .encrypt = stm32_cryp_tdes_cbc_encrypt,
1867 .decrypt = stm32_cryp_tdes_cbc_decrypt,
1868 }
1869},
1870};
1871
1872static struct aead_alg aead_algs[] = {
1873{
1874 .setkey = stm32_cryp_aes_aead_setkey,
1875 .setauthsize = stm32_cryp_aes_gcm_setauthsize,
1876 .encrypt = stm32_cryp_aes_gcm_encrypt,
1877 .decrypt = stm32_cryp_aes_gcm_decrypt,
1878 .init = stm32_cryp_aes_aead_init,
1879 .ivsize = 12,
1880 .maxauthsize = AES_BLOCK_SIZE,
1881
1882 .base = {
1883 .cra_name = "gcm(aes)",
1884 .cra_driver_name = "stm32-gcm-aes",
1885 .cra_priority = 200,
1886 .cra_flags = CRYPTO_ALG_ASYNC,
1887 .cra_blocksize = 1,
1888 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1889 .cra_alignmask = 0xf,
1890 .cra_module = THIS_MODULE,
1891 },
1892},
1893{
1894 .setkey = stm32_cryp_aes_aead_setkey,
1895 .setauthsize = stm32_cryp_aes_ccm_setauthsize,
1896 .encrypt = stm32_cryp_aes_ccm_encrypt,
1897 .decrypt = stm32_cryp_aes_ccm_decrypt,
1898 .init = stm32_cryp_aes_aead_init,
1899 .ivsize = AES_BLOCK_SIZE,
1900 .maxauthsize = AES_BLOCK_SIZE,
1901
1902 .base = {
1903 .cra_name = "ccm(aes)",
1904 .cra_driver_name = "stm32-ccm-aes",
1905 .cra_priority = 200,
1906 .cra_flags = CRYPTO_ALG_ASYNC,
1907 .cra_blocksize = 1,
1908 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1909 .cra_alignmask = 0xf,
1910 .cra_module = THIS_MODULE,
1911 },
1912},
1913};
1914
1915static const struct stm32_cryp_caps f7_data = {
1916 .swap_final = true,
1917 .padding_wa = true,
1918};
1919
1920static const struct stm32_cryp_caps mp1_data = {
1921 .swap_final = false,
1922 .padding_wa = false,
1923};
1924
1925static const struct of_device_id stm32_dt_ids[] = {
1926 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1927 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1928 {},
1929};
1930MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1931
1932static int stm32_cryp_probe(struct platform_device *pdev)
1933{
1934 struct device *dev = &pdev->dev;
1935 struct stm32_cryp *cryp;
1936 struct reset_control *rst;
1937 int irq, ret;
1938
1939 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1940 if (!cryp)
1941 return -ENOMEM;
1942
1943 cryp->caps = of_device_get_match_data(dev);
1944 if (!cryp->caps)
1945 return -ENODEV;
1946
1947 cryp->dev = dev;
1948
1949 cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1950 if (IS_ERR(cryp->regs))
1951 return PTR_ERR(cryp->regs);
1952
1953 irq = platform_get_irq(pdev, 0);
1954 if (irq < 0)
1955 return irq;
1956
1957 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1958 stm32_cryp_irq_thread, IRQF_ONESHOT,
1959 dev_name(dev), cryp);
1960 if (ret) {
1961 dev_err(dev, "Cannot grab IRQ\n");
1962 return ret;
1963 }
1964
1965 cryp->clk = devm_clk_get(dev, NULL);
1966 if (IS_ERR(cryp->clk)) {
1967 dev_err(dev, "Could not get clock\n");
1968 return PTR_ERR(cryp->clk);
1969 }
1970
1971 ret = clk_prepare_enable(cryp->clk);
1972 if (ret) {
1973 dev_err(cryp->dev, "Failed to enable clock\n");
1974 return ret;
1975 }
1976
1977 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
1978 pm_runtime_use_autosuspend(dev);
1979
1980 pm_runtime_get_noresume(dev);
1981 pm_runtime_set_active(dev);
1982 pm_runtime_enable(dev);
1983
1984 rst = devm_reset_control_get(dev, NULL);
1985 if (!IS_ERR(rst)) {
1986 reset_control_assert(rst);
1987 udelay(2);
1988 reset_control_deassert(rst);
1989 }
1990
1991 platform_set_drvdata(pdev, cryp);
1992
1993 spin_lock(&cryp_list.lock);
1994 list_add(&cryp->list, &cryp_list.dev_list);
1995 spin_unlock(&cryp_list.lock);
1996
1997 /* Initialize crypto engine */
1998 cryp->engine = crypto_engine_alloc_init(dev, 1);
1999 if (!cryp->engine) {
2000 dev_err(dev, "Could not init crypto engine\n");
2001 ret = -ENOMEM;
2002 goto err_engine1;
2003 }
2004
2005 ret = crypto_engine_start(cryp->engine);
2006 if (ret) {
2007 dev_err(dev, "Could not start crypto engine\n");
2008 goto err_engine2;
2009 }
2010
2011 ret = crypto_register_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2012 if (ret) {
2013 dev_err(dev, "Could not register algs\n");
2014 goto err_algs;
2015 }
2016
2017 ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2018 if (ret)
2019 goto err_aead_algs;
2020
2021 dev_info(dev, "Initialized\n");
2022
2023 pm_runtime_put_sync(dev);
2024
2025 return 0;
2026
2027err_aead_algs:
2028 crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2029err_algs:
2030err_engine2:
2031 crypto_engine_exit(cryp->engine);
2032err_engine1:
2033 spin_lock(&cryp_list.lock);
2034 list_del(&cryp->list);
2035 spin_unlock(&cryp_list.lock);
2036
2037 pm_runtime_disable(dev);
2038 pm_runtime_put_noidle(dev);
2039
2040 clk_disable_unprepare(cryp->clk);
2041
2042 return ret;
2043}
2044
2045static int stm32_cryp_remove(struct platform_device *pdev)
2046{
2047 struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2048 int ret;
2049
2050 if (!cryp)
2051 return -ENODEV;
2052
2053 ret = pm_runtime_resume_and_get(cryp->dev);
2054 if (ret < 0)
2055 return ret;
2056
2057 crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2058 crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2059
2060 crypto_engine_exit(cryp->engine);
2061
2062 spin_lock(&cryp_list.lock);
2063 list_del(&cryp->list);
2064 spin_unlock(&cryp_list.lock);
2065
2066 pm_runtime_disable(cryp->dev);
2067 pm_runtime_put_noidle(cryp->dev);
2068
2069 clk_disable_unprepare(cryp->clk);
2070
2071 return 0;
2072}
2073
2074#ifdef CONFIG_PM
2075static int stm32_cryp_runtime_suspend(struct device *dev)
2076{
2077 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2078
2079 clk_disable_unprepare(cryp->clk);
2080
2081 return 0;
2082}
2083
2084static int stm32_cryp_runtime_resume(struct device *dev)
2085{
2086 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2087 int ret;
2088
2089 ret = clk_prepare_enable(cryp->clk);
2090 if (ret) {
2091 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2092 return ret;
2093 }
2094
2095 return 0;
2096}
2097#endif
2098
2099static const struct dev_pm_ops stm32_cryp_pm_ops = {
2100 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2101 pm_runtime_force_resume)
2102 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2103 stm32_cryp_runtime_resume, NULL)
2104};
2105
2106static struct platform_driver stm32_cryp_driver = {
2107 .probe = stm32_cryp_probe,
2108 .remove = stm32_cryp_remove,
2109 .driver = {
2110 .name = DRIVER_NAME,
2111 .pm = &stm32_cryp_pm_ops,
2112 .of_match_table = stm32_dt_ids,
2113 },
2114};
2115
2116module_platform_driver(stm32_cryp_driver);
2117
2118MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2119MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2120MODULE_LICENSE("GPL");