Lines Matching refs:dev

50 static void crypto4xx_hw_init(struct crypto4xx_device *dev)  in crypto4xx_hw_init()  argument
60 writel(PPC4XX_BYTE_ORDER, dev->ce_base + CRYPTO4XX_BYTE_ORDER_CFG); in crypto4xx_hw_init()
71 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG); in crypto4xx_hw_init()
78 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG); in crypto4xx_hw_init()
79 writel(dev->pdr_pa, dev->ce_base + CRYPTO4XX_PDR_BASE); in crypto4xx_hw_init()
80 writel(dev->pdr_pa, dev->ce_base + CRYPTO4XX_RDR_BASE); in crypto4xx_hw_init()
81 writel(PPC4XX_PRNG_CTRL_AUTO_EN, dev->ce_base + CRYPTO4XX_PRNG_CTRL); in crypto4xx_hw_init()
83 writel(rand_num, dev->ce_base + CRYPTO4XX_PRNG_SEED_L); in crypto4xx_hw_init()
85 writel(rand_num, dev->ce_base + CRYPTO4XX_PRNG_SEED_H); in crypto4xx_hw_init()
89 writel(ring_size.w, dev->ce_base + CRYPTO4XX_RING_SIZE); in crypto4xx_hw_init()
91 writel(ring_ctrl.w, dev->ce_base + CRYPTO4XX_RING_CTRL); in crypto4xx_hw_init()
92 device_ctrl = readl(dev->ce_base + CRYPTO4XX_DEVICE_CTRL); in crypto4xx_hw_init()
94 writel(device_ctrl, dev->ce_base + CRYPTO4XX_DEVICE_CTRL); in crypto4xx_hw_init()
95 writel(dev->gdr_pa, dev->ce_base + CRYPTO4XX_GATH_RING_BASE); in crypto4xx_hw_init()
96 writel(dev->sdr_pa, dev->ce_base + CRYPTO4XX_SCAT_RING_BASE); in crypto4xx_hw_init()
100 writel(part_ring_size.w, dev->ce_base + CRYPTO4XX_PART_RING_SIZE); in crypto4xx_hw_init()
101 writel(PPC4XX_SD_BUFFER_SIZE, dev->ce_base + CRYPTO4XX_PART_RING_CFG); in crypto4xx_hw_init()
105 writel(io_threshold.w, dev->ce_base + CRYPTO4XX_IO_THRESHOLD); in crypto4xx_hw_init()
106 writel(0, dev->ce_base + CRYPTO4XX_PDR_BASE_UADDR); in crypto4xx_hw_init()
107 writel(0, dev->ce_base + CRYPTO4XX_RDR_BASE_UADDR); in crypto4xx_hw_init()
108 writel(0, dev->ce_base + CRYPTO4XX_PKT_SRC_UADDR); in crypto4xx_hw_init()
109 writel(0, dev->ce_base + CRYPTO4XX_PKT_DEST_UADDR); in crypto4xx_hw_init()
110 writel(0, dev->ce_base + CRYPTO4XX_SA_UADDR); in crypto4xx_hw_init()
111 writel(0, dev->ce_base + CRYPTO4XX_GATH_RING_BASE_UADDR); in crypto4xx_hw_init()
112 writel(0, dev->ce_base + CRYPTO4XX_SCAT_RING_BASE_UADDR); in crypto4xx_hw_init()
119 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG); in crypto4xx_hw_init()
121 writel(PPC4XX_INTERRUPT_CLR, dev->ce_base + CRYPTO4XX_INT_CLR); in crypto4xx_hw_init()
122 writel(PPC4XX_INT_DESCR_CNT, dev->ce_base + CRYPTO4XX_INT_DESCR_CNT); in crypto4xx_hw_init()
123 writel(PPC4XX_INT_DESCR_CNT, dev->ce_base + CRYPTO4XX_INT_DESCR_CNT); in crypto4xx_hw_init()
124 writel(PPC4XX_INT_CFG, dev->ce_base + CRYPTO4XX_INT_CFG); in crypto4xx_hw_init()
125 if (dev->is_revb) { in crypto4xx_hw_init()
127 dev->ce_base + CRYPTO4XX_INT_TIMEOUT_CNT); in crypto4xx_hw_init()
129 dev->ce_base + CRYPTO4XX_INT_EN); in crypto4xx_hw_init()
131 writel(PPC4XX_PD_DONE_INT, dev->ce_base + CRYPTO4XX_INT_EN); in crypto4xx_hw_init()
167 static u32 crypto4xx_build_pdr(struct crypto4xx_device *dev) in crypto4xx_build_pdr() argument
170 dev->pdr = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_pdr()
172 &dev->pdr_pa, GFP_KERNEL); in crypto4xx_build_pdr()
173 if (!dev->pdr) in crypto4xx_build_pdr()
176 dev->pdr_uinfo = kcalloc(PPC4XX_NUM_PD, sizeof(struct pd_uinfo), in crypto4xx_build_pdr()
178 if (!dev->pdr_uinfo) { in crypto4xx_build_pdr()
179 dma_free_coherent(dev->core_dev->device, in crypto4xx_build_pdr()
181 dev->pdr, in crypto4xx_build_pdr()
182 dev->pdr_pa); in crypto4xx_build_pdr()
185 dev->shadow_sa_pool = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_pdr()
187 &dev->shadow_sa_pool_pa, in crypto4xx_build_pdr()
189 if (!dev->shadow_sa_pool) in crypto4xx_build_pdr()
192 dev->shadow_sr_pool = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_pdr()
194 &dev->shadow_sr_pool_pa, GFP_KERNEL); in crypto4xx_build_pdr()
195 if (!dev->shadow_sr_pool) in crypto4xx_build_pdr()
198 struct ce_pd *pd = &dev->pdr[i]; in crypto4xx_build_pdr()
199 struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[i]; in crypto4xx_build_pdr()
201 pd->sa = dev->shadow_sa_pool_pa + in crypto4xx_build_pdr()
205 pd_uinfo->sa_va = &dev->shadow_sa_pool[i].sa; in crypto4xx_build_pdr()
208 pd_uinfo->sr_va = &dev->shadow_sr_pool[i]; in crypto4xx_build_pdr()
209 pd_uinfo->sr_pa = dev->shadow_sr_pool_pa + in crypto4xx_build_pdr()
216 static void crypto4xx_destroy_pdr(struct crypto4xx_device *dev) in crypto4xx_destroy_pdr() argument
218 if (dev->pdr) in crypto4xx_destroy_pdr()
219 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_pdr()
221 dev->pdr, dev->pdr_pa); in crypto4xx_destroy_pdr()
223 if (dev->shadow_sa_pool) in crypto4xx_destroy_pdr()
224 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_pdr()
226 dev->shadow_sa_pool, dev->shadow_sa_pool_pa); in crypto4xx_destroy_pdr()
228 if (dev->shadow_sr_pool) in crypto4xx_destroy_pdr()
229 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_pdr()
231 dev->shadow_sr_pool, dev->shadow_sr_pool_pa); in crypto4xx_destroy_pdr()
233 kfree(dev->pdr_uinfo); in crypto4xx_destroy_pdr()
236 static u32 crypto4xx_get_pd_from_pdr_nolock(struct crypto4xx_device *dev) in crypto4xx_get_pd_from_pdr_nolock() argument
241 retval = dev->pdr_head; in crypto4xx_get_pd_from_pdr_nolock()
242 tmp = (dev->pdr_head + 1) % PPC4XX_NUM_PD; in crypto4xx_get_pd_from_pdr_nolock()
244 if (tmp == dev->pdr_tail) in crypto4xx_get_pd_from_pdr_nolock()
247 dev->pdr_head = tmp; in crypto4xx_get_pd_from_pdr_nolock()
252 static u32 crypto4xx_put_pd_to_pdr(struct crypto4xx_device *dev, u32 idx) in crypto4xx_put_pd_to_pdr() argument
254 struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx]; in crypto4xx_put_pd_to_pdr()
258 spin_lock_irqsave(&dev->core_dev->lock, flags); in crypto4xx_put_pd_to_pdr()
261 if (dev->pdr_tail != PPC4XX_LAST_PD) in crypto4xx_put_pd_to_pdr()
262 dev->pdr_tail++; in crypto4xx_put_pd_to_pdr()
264 dev->pdr_tail = 0; in crypto4xx_put_pd_to_pdr()
265 tail = dev->pdr_tail; in crypto4xx_put_pd_to_pdr()
266 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_pd_to_pdr()
276 static u32 crypto4xx_build_gdr(struct crypto4xx_device *dev) in crypto4xx_build_gdr() argument
278 dev->gdr = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_gdr()
280 &dev->gdr_pa, GFP_KERNEL); in crypto4xx_build_gdr()
281 if (!dev->gdr) in crypto4xx_build_gdr()
287 static inline void crypto4xx_destroy_gdr(struct crypto4xx_device *dev) in crypto4xx_destroy_gdr() argument
289 if (dev->gdr) in crypto4xx_destroy_gdr()
290 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_gdr()
292 dev->gdr, dev->gdr_pa); in crypto4xx_destroy_gdr()
299 static u32 crypto4xx_get_n_gd(struct crypto4xx_device *dev, int n) in crypto4xx_get_n_gd() argument
307 retval = dev->gdr_head; in crypto4xx_get_n_gd()
308 tmp = (dev->gdr_head + n) % PPC4XX_NUM_GD; in crypto4xx_get_n_gd()
309 if (dev->gdr_head > dev->gdr_tail) { in crypto4xx_get_n_gd()
310 if (tmp < dev->gdr_head && tmp >= dev->gdr_tail) in crypto4xx_get_n_gd()
312 } else if (dev->gdr_head < dev->gdr_tail) { in crypto4xx_get_n_gd()
313 if (tmp < dev->gdr_head || tmp >= dev->gdr_tail) in crypto4xx_get_n_gd()
316 dev->gdr_head = tmp; in crypto4xx_get_n_gd()
321 static u32 crypto4xx_put_gd_to_gdr(struct crypto4xx_device *dev) in crypto4xx_put_gd_to_gdr() argument
325 spin_lock_irqsave(&dev->core_dev->lock, flags); in crypto4xx_put_gd_to_gdr()
326 if (dev->gdr_tail == dev->gdr_head) { in crypto4xx_put_gd_to_gdr()
327 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_gd_to_gdr()
331 if (dev->gdr_tail != PPC4XX_LAST_GD) in crypto4xx_put_gd_to_gdr()
332 dev->gdr_tail++; in crypto4xx_put_gd_to_gdr()
334 dev->gdr_tail = 0; in crypto4xx_put_gd_to_gdr()
336 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_gd_to_gdr()
341 static inline struct ce_gd *crypto4xx_get_gdp(struct crypto4xx_device *dev, in crypto4xx_get_gdp() argument
344 *gd_dma = dev->gdr_pa + sizeof(struct ce_gd) * idx; in crypto4xx_get_gdp()
346 return &dev->gdr[idx]; in crypto4xx_get_gdp()
354 static u32 crypto4xx_build_sdr(struct crypto4xx_device *dev) in crypto4xx_build_sdr() argument
358 dev->scatter_buffer_va = in crypto4xx_build_sdr()
359 dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_sdr()
361 &dev->scatter_buffer_pa, GFP_KERNEL); in crypto4xx_build_sdr()
362 if (!dev->scatter_buffer_va) in crypto4xx_build_sdr()
366 dev->sdr = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_sdr()
368 &dev->sdr_pa, GFP_KERNEL); in crypto4xx_build_sdr()
369 if (!dev->sdr) in crypto4xx_build_sdr()
373 dev->sdr[i].ptr = dev->scatter_buffer_pa + in crypto4xx_build_sdr()
380 static void crypto4xx_destroy_sdr(struct crypto4xx_device *dev) in crypto4xx_destroy_sdr() argument
382 if (dev->sdr) in crypto4xx_destroy_sdr()
383 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_sdr()
385 dev->sdr, dev->sdr_pa); in crypto4xx_destroy_sdr()
387 if (dev->scatter_buffer_va) in crypto4xx_destroy_sdr()
388 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_sdr()
390 dev->scatter_buffer_va, in crypto4xx_destroy_sdr()
391 dev->scatter_buffer_pa); in crypto4xx_destroy_sdr()
398 static u32 crypto4xx_get_n_sd(struct crypto4xx_device *dev, int n) in crypto4xx_get_n_sd() argument
406 retval = dev->sdr_head; in crypto4xx_get_n_sd()
407 tmp = (dev->sdr_head + n) % PPC4XX_NUM_SD; in crypto4xx_get_n_sd()
408 if (dev->sdr_head > dev->gdr_tail) { in crypto4xx_get_n_sd()
409 if (tmp < dev->sdr_head && tmp >= dev->sdr_tail) in crypto4xx_get_n_sd()
411 } else if (dev->sdr_head < dev->sdr_tail) { in crypto4xx_get_n_sd()
412 if (tmp < dev->sdr_head || tmp >= dev->sdr_tail) in crypto4xx_get_n_sd()
415 dev->sdr_head = tmp; in crypto4xx_get_n_sd()
420 static u32 crypto4xx_put_sd_to_sdr(struct crypto4xx_device *dev) in crypto4xx_put_sd_to_sdr() argument
424 spin_lock_irqsave(&dev->core_dev->lock, flags); in crypto4xx_put_sd_to_sdr()
425 if (dev->sdr_tail == dev->sdr_head) { in crypto4xx_put_sd_to_sdr()
426 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_sd_to_sdr()
429 if (dev->sdr_tail != PPC4XX_LAST_SD) in crypto4xx_put_sd_to_sdr()
430 dev->sdr_tail++; in crypto4xx_put_sd_to_sdr()
432 dev->sdr_tail = 0; in crypto4xx_put_sd_to_sdr()
433 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_sd_to_sdr()
438 static inline struct ce_sd *crypto4xx_get_sdp(struct crypto4xx_device *dev, in crypto4xx_get_sdp() argument
441 *sd_dma = dev->sdr_pa + sizeof(struct ce_sd) * idx; in crypto4xx_get_sdp()
443 return &dev->sdr[idx]; in crypto4xx_get_sdp()
446 static void crypto4xx_copy_pkt_to_dst(struct crypto4xx_device *dev, in crypto4xx_copy_pkt_to_dst() argument
471 void *buf = dev->scatter_buffer_va + in crypto4xx_copy_pkt_to_dst()
500 static void crypto4xx_ret_sg_desc(struct crypto4xx_device *dev, in crypto4xx_ret_sg_desc() argument
506 crypto4xx_put_gd_to_gdr(dev); in crypto4xx_ret_sg_desc()
512 crypto4xx_put_sd_to_sdr(dev); in crypto4xx_ret_sg_desc()
519 static void crypto4xx_cipher_done(struct crypto4xx_device *dev, in crypto4xx_cipher_done() argument
529 crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, in crypto4xx_cipher_done()
533 dma_unmap_page(dev->core_dev->device, pd->dest, dst->length, in crypto4xx_cipher_done()
545 crypto4xx_ret_sg_desc(dev, pd_uinfo); in crypto4xx_cipher_done()
552 static void crypto4xx_ahash_done(struct crypto4xx_device *dev, in crypto4xx_ahash_done() argument
562 crypto4xx_ret_sg_desc(dev, pd_uinfo); in crypto4xx_ahash_done()
569 static void crypto4xx_aead_done(struct crypto4xx_device *dev, in crypto4xx_aead_done() argument
582 crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, in crypto4xx_aead_done()
586 dma_unmap_page(dev->core_dev->device, pd->dest, dst->length, in crypto4xx_aead_done()
609 crypto4xx_ret_sg_desc(dev, pd_uinfo); in crypto4xx_aead_done()
612 if (!__ratelimit(&dev->aead_ratelimit)) { in crypto4xx_aead_done()
633 static void crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx) in crypto4xx_pd_done() argument
635 struct ce_pd *pd = &dev->pdr[idx]; in crypto4xx_pd_done()
636 struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx]; in crypto4xx_pd_done()
640 crypto4xx_cipher_done(dev, pd_uinfo, pd); in crypto4xx_pd_done()
643 crypto4xx_aead_done(dev, pd_uinfo, pd); in crypto4xx_pd_done()
646 crypto4xx_ahash_done(dev, pd_uinfo); in crypto4xx_pd_done()
653 crypto4xx_destroy_pdr(core_dev->dev); in crypto4xx_stop_all()
654 crypto4xx_destroy_gdr(core_dev->dev); in crypto4xx_stop_all()
655 crypto4xx_destroy_sdr(core_dev->dev); in crypto4xx_stop_all()
656 iounmap(core_dev->dev->ce_base); in crypto4xx_stop_all()
657 kfree(core_dev->dev); in crypto4xx_stop_all()
688 struct crypto4xx_device *dev = ctx->dev; in crypto4xx_build_pd() local
723 dev_err(dev->core_dev->device, "Invalid number of src SG.\n"); in crypto4xx_build_pd()
754 spin_lock_irqsave(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
763 is_busy = ((dev->pdr_head - dev->pdr_tail) % PPC4XX_NUM_PD) >= in crypto4xx_build_pd()
771 is_busy = ((dev->pdr_head - dev->pdr_tail) % PPC4XX_NUM_PD) >= in crypto4xx_build_pd()
775 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
781 fst_gd = crypto4xx_get_n_gd(dev, num_gd); in crypto4xx_build_pd()
783 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
788 fst_sd = crypto4xx_get_n_sd(dev, num_sd); in crypto4xx_build_pd()
791 dev->gdr_head = fst_gd; in crypto4xx_build_pd()
792 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
796 pd_entry = crypto4xx_get_pd_from_pdr_nolock(dev); in crypto4xx_build_pd()
799 dev->gdr_head = fst_gd; in crypto4xx_build_pd()
801 dev->sdr_head = fst_sd; in crypto4xx_build_pd()
802 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
805 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
807 pd = &dev->pdr[pd_entry]; in crypto4xx_build_pd()
810 pd_uinfo = &dev->pdr_uinfo[pd_entry]; in crypto4xx_build_pd()
833 gd = crypto4xx_get_gdp(dev, &gd_dma, gd_idx); in crypto4xx_build_pd()
844 gd->ptr = dma_map_page(dev->core_dev->device, in crypto4xx_build_pd()
854 gd = crypto4xx_get_gdp(dev, &gd_dma, gd_idx); in crypto4xx_build_pd()
858 pd->src = (u32)dma_map_page(dev->core_dev->device, sg_page(src), in crypto4xx_build_pd()
877 pd->dest = (u32)dma_map_page(dev->core_dev->device, in crypto4xx_build_pd()
889 sd = crypto4xx_get_sdp(dev, &sd_dma, sd_idx); in crypto4xx_build_pd()
901 sd = crypto4xx_get_sdp(dev, &sd_dma, sd_idx); in crypto4xx_build_pd()
926 writel(0, dev->ce_base + CRYPTO4XX_INT_DESCR_RD); in crypto4xx_build_pd()
927 writel(1, dev->ce_base + CRYPTO4XX_INT_DESCR_RD); in crypto4xx_build_pd()
937 ctx->dev = amcc_alg->dev; in crypto4xx_ctx_init()
1018 alg->dev = sec_dev; in crypto4xx_register_alg()
1075 struct device *dev = (struct device *)data; in crypto4xx_bh_tasklet_cb() local
1076 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev); in crypto4xx_bh_tasklet_cb()
1079 u32 tail = core_dev->dev->pdr_tail; in crypto4xx_bh_tasklet_cb()
1080 u32 head = core_dev->dev->pdr_head; in crypto4xx_bh_tasklet_cb()
1083 pd_uinfo = &core_dev->dev->pdr_uinfo[tail]; in crypto4xx_bh_tasklet_cb()
1084 pd = &core_dev->dev->pdr[tail]; in crypto4xx_bh_tasklet_cb()
1089 crypto4xx_pd_done(core_dev->dev, tail); in crypto4xx_bh_tasklet_cb()
1090 tail = crypto4xx_put_pd_to_pdr(core_dev->dev, tail); in crypto4xx_bh_tasklet_cb()
1104 struct device *dev = data; in crypto4xx_interrupt_handler() local
1105 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev); in crypto4xx_interrupt_handler()
1107 writel(clr_val, core_dev->dev->ce_base + CRYPTO4XX_INT_CLR); in crypto4xx_interrupt_handler()
1124 static int ppc4xx_prng_data_read(struct crypto4xx_device *dev, in ppc4xx_prng_data_read() argument
1133 dev->ce_base + CRYPTO4XX_PRNG_CTRL); in ppc4xx_prng_data_read()
1137 if ((readl(dev->ce_base + CRYPTO4XX_PRNG_STAT) & in ppc4xx_prng_data_read()
1141 val[0] = readl_be(dev->ce_base + CRYPTO4XX_PRNG_RES_0); in ppc4xx_prng_data_read()
1142 val[1] = readl_be(dev->ce_base + CRYPTO4XX_PRNG_RES_1); in ppc4xx_prng_data_read()
1168 struct crypto4xx_device *dev; in crypto4xx_prng_generate() local
1172 dev = amcc_alg->dev; in crypto4xx_prng_generate()
1174 mutex_lock(&dev->core_dev->rng_lock); in crypto4xx_prng_generate()
1175 ret = ppc4xx_prng_data_read(dev, dstn, dlen); in crypto4xx_prng_generate()
1176 mutex_unlock(&dev->core_dev->rng_lock); in crypto4xx_prng_generate()
1377 struct device *dev = &ofdev->dev; in crypto4xx_probe() local
1383 rc = of_address_to_resource(ofdev->dev.of_node, 0, &res); in crypto4xx_probe()
1421 dev_set_drvdata(dev, core_dev); in crypto4xx_probe()
1423 core_dev->dev = kzalloc(sizeof(struct crypto4xx_device), GFP_KERNEL); in crypto4xx_probe()
1425 if (!core_dev->dev) in crypto4xx_probe()
1437 dev_info(dev, "RevA detected - disable interrupt coalescing\n"); in crypto4xx_probe()
1442 core_dev->dev->core_dev = core_dev; in crypto4xx_probe()
1443 core_dev->dev->is_revb = is_revb; in crypto4xx_probe()
1444 core_dev->device = dev; in crypto4xx_probe()
1447 INIT_LIST_HEAD(&core_dev->dev->alg_list); in crypto4xx_probe()
1448 ratelimit_default_init(&core_dev->dev->aead_ratelimit); in crypto4xx_probe()
1449 rc = crypto4xx_build_sdr(core_dev->dev); in crypto4xx_probe()
1452 rc = crypto4xx_build_pdr(core_dev->dev); in crypto4xx_probe()
1456 rc = crypto4xx_build_gdr(core_dev->dev); in crypto4xx_probe()
1462 (unsigned long) dev); in crypto4xx_probe()
1464 core_dev->dev->ce_base = of_iomap(ofdev->dev.of_node, 0); in crypto4xx_probe()
1465 if (!core_dev->dev->ce_base) { in crypto4xx_probe()
1466 dev_err(dev, "failed to of_iomap\n"); in crypto4xx_probe()
1472 core_dev->irq = irq_of_parse_and_map(ofdev->dev.of_node, 0); in crypto4xx_probe()
1476 KBUILD_MODNAME, dev); in crypto4xx_probe()
1481 crypto4xx_hw_init(core_dev->dev); in crypto4xx_probe()
1484 rc = crypto4xx_register_alg(core_dev->dev, crypto4xx_alg, in crypto4xx_probe()
1493 free_irq(core_dev->irq, dev); in crypto4xx_probe()
1496 iounmap(core_dev->dev->ce_base); in crypto4xx_probe()
1500 crypto4xx_destroy_sdr(core_dev->dev); in crypto4xx_probe()
1501 crypto4xx_destroy_gdr(core_dev->dev); in crypto4xx_probe()
1502 crypto4xx_destroy_pdr(core_dev->dev); in crypto4xx_probe()
1503 kfree(core_dev->dev); in crypto4xx_probe()
1512 struct device *dev = &ofdev->dev; in crypto4xx_remove() local
1513 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev); in crypto4xx_remove()
1517 free_irq(core_dev->irq, dev); in crypto4xx_remove()
1522 crypto4xx_unregister_alg(core_dev->dev); in crypto4xx_remove()