Lines Matching +full:flash +full:- +full:dma

1 // SPDX-License-Identifier: GPL-2.0+
3 * Freescale i.MX28 NAND flash driver
9 * Freescale GPMI NFC NAND Flash Driver
24 #include <asm/arch/imx-regs.h>
25 #include <asm/mach-imx/regs-bch.h>
26 #include <asm/mach-imx/regs-gpmi.h>
56 uint32_t addr = (uint32_t)info->data_buf; in mxs_nand_flush_data_buf()
58 flush_dcache_range(addr, addr + info->data_buf_size); in mxs_nand_flush_data_buf()
63 uint32_t addr = (uint32_t)info->data_buf; in mxs_nand_inval_data_buf()
65 invalidate_dcache_range(addr, addr + info->data_buf_size); in mxs_nand_inval_data_buf()
70 uint32_t addr = (uint32_t)info->cmd_buf; in mxs_nand_flush_cmd_buf()
84 if (info->desc_index >= MXS_NAND_DMA_DESCRIPTOR_COUNT) { in mxs_nand_get_dma_desc()
85 printf("MXS NAND: Too many DMA descriptors requested\n"); in mxs_nand_get_dma_desc()
89 desc = info->desc[info->desc_index]; in mxs_nand_get_dma_desc()
90 info->desc_index++; in mxs_nand_get_dma_desc()
100 for (i = 0; i < info->desc_index; i++) { in mxs_nand_return_dma_descs()
101 desc = info->desc[i]; in mxs_nand_return_dma_descs()
103 desc->address = (dma_addr_t)desc; in mxs_nand_return_dma_descs()
106 info->desc_index = 0; in mxs_nand_return_dma_descs()
117 uint32_t chunk_data_size_in_bits = geo->ecc_chunk_size * 8; in mxs_nand_calc_mark_offset()
118 uint32_t chunk_ecc_size_in_bits = geo->ecc_strength * geo->gf_len; in mxs_nand_calc_mark_offset()
131 block_mark_bit_offset -= MXS_NAND_METADATA_SIZE * 8; in mxs_nand_calc_mark_offset()
144 block_mark_chunk_bit_offset = block_mark_bit_offset - in mxs_nand_calc_mark_offset()
148 return -EINVAL; in mxs_nand_calc_mark_offset()
154 block_mark_bit_offset -= in mxs_nand_calc_mark_offset()
157 geo->block_mark_byte_offset = block_mark_bit_offset >> 3; in mxs_nand_calc_mark_offset()
158 geo->block_mark_bit_offset = block_mark_bit_offset & 0x7; in mxs_nand_calc_mark_offset()
173 geo->gf_len = 13; in mxs_nand_calc_ecc_layout_by_info()
176 geo->gf_len = 14; in mxs_nand_calc_ecc_layout_by_info()
179 return -EINVAL; in mxs_nand_calc_ecc_layout_by_info()
182 geo->ecc_chunk_size = ecc_step; in mxs_nand_calc_ecc_layout_by_info()
183 geo->ecc_strength = round_up(ecc_strength, 2); in mxs_nand_calc_ecc_layout_by_info()
186 if (geo->ecc_chunk_size < mtd->oobsize) in mxs_nand_calc_ecc_layout_by_info()
187 return -EINVAL; in mxs_nand_calc_ecc_layout_by_info()
189 if (geo->ecc_strength > nand_info->max_ecc_strength_supported) in mxs_nand_calc_ecc_layout_by_info()
190 return -EINVAL; in mxs_nand_calc_ecc_layout_by_info()
192 geo->ecc_chunk_count = mtd->writesize / geo->ecc_chunk_size; in mxs_nand_calc_ecc_layout_by_info()
204 geo->gf_len = 13; in mxs_nand_calc_ecc_layout()
207 geo->ecc_chunk_size = 512; in mxs_nand_calc_ecc_layout()
209 if (geo->ecc_chunk_size < mtd->oobsize) { in mxs_nand_calc_ecc_layout()
210 geo->gf_len = 14; in mxs_nand_calc_ecc_layout()
211 geo->ecc_chunk_size *= 2; in mxs_nand_calc_ecc_layout()
214 if (mtd->oobsize > geo->ecc_chunk_size) { in mxs_nand_calc_ecc_layout()
216 geo->ecc_chunk_size); in mxs_nand_calc_ecc_layout()
217 return -EINVAL; in mxs_nand_calc_ecc_layout()
220 geo->ecc_chunk_count = mtd->writesize / geo->ecc_chunk_size; in mxs_nand_calc_ecc_layout()
228 * (page oob size - meta data size) * (bits per byte) in mxs_nand_calc_ecc_layout()
230 geo->ecc_strength = ((mtd->oobsize - MXS_NAND_METADATA_SIZE) * 8) in mxs_nand_calc_ecc_layout()
231 / (geo->gf_len * geo->ecc_chunk_count); in mxs_nand_calc_ecc_layout()
233 geo->ecc_strength = min(round_down(geo->ecc_strength, 2), in mxs_nand_calc_ecc_layout()
234 nand_info->max_ecc_strength_supported); in mxs_nand_calc_ecc_layout()
247 ret = mxs_wait_mask_set(&nand_info->bch_regs->hw_bch_ctrl_reg, in mxs_nand_wait_for_bch_complete()
250 writel(BCH_CTRL_COMPLETE_IRQ, &nand_info->bch_regs->hw_bch_ctrl_clr); in mxs_nand_wait_for_bch_complete()
263 * Flash.
270 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip; in mxs_nand_cmd_ctrl()
277 if (nand_info->cmd_queue_len == MXS_NAND_COMMAND_BUFFER_SIZE) { in mxs_nand_cmd_ctrl()
289 * Rather than run a separate DMA operation for every single byte, we in mxs_nand_cmd_ctrl()
290 * queue them up and run a single DMA operation for the entire series in mxs_nand_cmd_ctrl()
295 nand_info->cmd_buf[nand_info->cmd_queue_len++] = data; in mxs_nand_cmd_ctrl()
304 if (nand_info->cmd_queue_len == 0) in mxs_nand_cmd_ctrl()
307 /* Compile the DMA descriptor -- a descriptor that sends command. */ in mxs_nand_cmd_ctrl()
309 d->cmd.data = in mxs_nand_cmd_ctrl()
313 (nand_info->cmd_queue_len << MXS_DMA_DESC_BYTES_OFFSET); in mxs_nand_cmd_ctrl()
315 d->cmd.address = (dma_addr_t)nand_info->cmd_buf; in mxs_nand_cmd_ctrl()
317 d->cmd.pio_words[0] = in mxs_nand_cmd_ctrl()
320 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) | in mxs_nand_cmd_ctrl()
323 nand_info->cmd_queue_len; in mxs_nand_cmd_ctrl()
330 /* Execute the DMA chain. */ in mxs_nand_cmd_ctrl()
338 nand_info->cmd_queue_len = 0; in mxs_nand_cmd_ctrl()
342 * Test if the NAND flash is ready.
350 tmp = readl(&nand_info->gpmi_regs->hw_gpmi_stat); in mxs_nand_device_ready()
351 tmp >>= (GPMI_STAT_READY_BUSY_OFFSET + nand_info->cur_chip); in mxs_nand_device_ready()
364 nand_info->cur_chip = chip; in mxs_nand_select_chip()
371 * swapping the block mark, or swapping it *back* -- but it doesn't matter
377 uint32_t bit_offset = geo->block_mark_bit_offset; in mxs_nand_swap_block_mark()
378 uint32_t buf_offset = geo->block_mark_byte_offset; in mxs_nand_swap_block_mark()
390 src |= data_buf[buf_offset + 1] << (8 - bit_offset); in mxs_nand_swap_block_mark()
400 data_buf[buf_offset + 1] |= dst >> (8 - bit_offset); in mxs_nand_swap_block_mark()
411 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip; in mxs_nand_read_buf()
415 printf("MXS NAND: DMA buffer too big\n"); in mxs_nand_read_buf()
420 printf("MXS NAND: DMA buffer is NULL\n"); in mxs_nand_read_buf()
424 /* Compile the DMA descriptor - a descriptor that reads data. */ in mxs_nand_read_buf()
426 d->cmd.data = in mxs_nand_read_buf()
432 d->cmd.address = (dma_addr_t)nand_info->data_buf; in mxs_nand_read_buf()
434 d->cmd.pio_words[0] = in mxs_nand_read_buf()
437 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) | in mxs_nand_read_buf()
444 * A DMA descriptor that waits for the command to end and the chip to in mxs_nand_read_buf()
449 * did that and no one has re-thought it yet. in mxs_nand_read_buf()
452 d->cmd.data = in mxs_nand_read_buf()
457 d->cmd.address = 0; in mxs_nand_read_buf()
459 d->cmd.pio_words[0] = in mxs_nand_read_buf()
462 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) | in mxs_nand_read_buf()
470 /* Execute the DMA chain. */ in mxs_nand_read_buf()
473 printf("MXS NAND: DMA read error\n"); in mxs_nand_read_buf()
480 memcpy(buf, nand_info->data_buf, length); in mxs_nand_read_buf()
495 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip; in mxs_nand_write_buf()
499 printf("MXS NAND: DMA buffer too big\n"); in mxs_nand_write_buf()
504 printf("MXS NAND: DMA buffer is NULL\n"); in mxs_nand_write_buf()
508 memcpy(nand_info->data_buf, buf, length); in mxs_nand_write_buf()
510 /* Compile the DMA descriptor - a descriptor that writes data. */ in mxs_nand_write_buf()
512 d->cmd.data = in mxs_nand_write_buf()
518 d->cmd.address = (dma_addr_t)nand_info->data_buf; in mxs_nand_write_buf()
520 d->cmd.pio_words[0] = in mxs_nand_write_buf()
523 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) | in mxs_nand_write_buf()
532 /* Execute the DMA chain. */ in mxs_nand_write_buf()
535 printf("MXS NAND: DMA write error\n"); in mxs_nand_write_buf()
558 struct bch_geometry *geo = &nand_info->bch_geometry; in mxs_nand_ecc_read_page()
560 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip; in mxs_nand_ecc_read_page()
565 /* Compile the DMA descriptor - wait for ready. */ in mxs_nand_ecc_read_page()
567 d->cmd.data = in mxs_nand_ecc_read_page()
572 d->cmd.address = 0; in mxs_nand_ecc_read_page()
574 d->cmd.pio_words[0] = in mxs_nand_ecc_read_page()
577 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) | in mxs_nand_ecc_read_page()
582 /* Compile the DMA descriptor - enable the BCH block and read. */ in mxs_nand_ecc_read_page()
584 d->cmd.data = in mxs_nand_ecc_read_page()
588 d->cmd.address = 0; in mxs_nand_ecc_read_page()
590 d->cmd.pio_words[0] = in mxs_nand_ecc_read_page()
593 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) | in mxs_nand_ecc_read_page()
595 (mtd->writesize + mtd->oobsize); in mxs_nand_ecc_read_page()
596 d->cmd.pio_words[1] = 0; in mxs_nand_ecc_read_page()
597 d->cmd.pio_words[2] = in mxs_nand_ecc_read_page()
601 d->cmd.pio_words[3] = mtd->writesize + mtd->oobsize; in mxs_nand_ecc_read_page()
602 d->cmd.pio_words[4] = (dma_addr_t)nand_info->data_buf; in mxs_nand_ecc_read_page()
603 d->cmd.pio_words[5] = (dma_addr_t)nand_info->oob_buf; in mxs_nand_ecc_read_page()
607 /* Compile the DMA descriptor - disable the BCH block. */ in mxs_nand_ecc_read_page()
609 d->cmd.data = in mxs_nand_ecc_read_page()
614 d->cmd.address = 0; in mxs_nand_ecc_read_page()
616 d->cmd.pio_words[0] = in mxs_nand_ecc_read_page()
619 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) | in mxs_nand_ecc_read_page()
621 (mtd->writesize + mtd->oobsize); in mxs_nand_ecc_read_page()
622 d->cmd.pio_words[1] = 0; in mxs_nand_ecc_read_page()
623 d->cmd.pio_words[2] = 0; in mxs_nand_ecc_read_page()
627 /* Compile the DMA descriptor - deassert the NAND lock and interrupt. */ in mxs_nand_ecc_read_page()
629 d->cmd.data = in mxs_nand_ecc_read_page()
633 d->cmd.address = 0; in mxs_nand_ecc_read_page()
640 /* Execute the DMA chain. */ in mxs_nand_ecc_read_page()
643 printf("MXS NAND: DMA read error\n"); in mxs_nand_ecc_read_page()
656 /* Read DMA completed, now do the mark swapping. */ in mxs_nand_ecc_read_page()
657 mxs_nand_swap_block_mark(geo, nand_info->data_buf, nand_info->oob_buf); in mxs_nand_ecc_read_page()
660 status = nand_info->oob_buf + mxs_nand_aux_status_offset(); in mxs_nand_ecc_read_page()
661 for (i = 0; i < geo->ecc_chunk_count; i++) { in mxs_nand_ecc_read_page()
677 mtd->ecc_stats.failed += failed; in mxs_nand_ecc_read_page()
678 mtd->ecc_stats.corrected += corrected; in mxs_nand_ecc_read_page()
689 memset(nand->oob_poi, 0xff, mtd->oobsize); in mxs_nand_ecc_read_page()
691 nand->oob_poi[0] = nand_info->oob_buf[0]; in mxs_nand_ecc_read_page()
693 memcpy(buf, nand_info->data_buf, mtd->writesize); in mxs_nand_ecc_read_page()
709 struct bch_geometry *geo = &nand_info->bch_geometry; in mxs_nand_ecc_write_page()
711 uint32_t channel = MXS_DMA_CHANNEL_AHB_APBH_GPMI0 + nand_info->cur_chip; in mxs_nand_ecc_write_page()
714 memcpy(nand_info->data_buf, buf, mtd->writesize); in mxs_nand_ecc_write_page()
715 memcpy(nand_info->oob_buf, nand->oob_poi, mtd->oobsize); in mxs_nand_ecc_write_page()
718 mxs_nand_swap_block_mark(geo, nand_info->data_buf, nand_info->oob_buf); in mxs_nand_ecc_write_page()
720 /* Compile the DMA descriptor - write data. */ in mxs_nand_ecc_write_page()
722 d->cmd.data = in mxs_nand_ecc_write_page()
727 d->cmd.address = 0; in mxs_nand_ecc_write_page()
729 d->cmd.pio_words[0] = in mxs_nand_ecc_write_page()
732 (nand_info->cur_chip << GPMI_CTRL0_CS_OFFSET) | in mxs_nand_ecc_write_page()
734 d->cmd.pio_words[1] = 0; in mxs_nand_ecc_write_page()
735 d->cmd.pio_words[2] = in mxs_nand_ecc_write_page()
739 d->cmd.pio_words[3] = (mtd->writesize + mtd->oobsize); in mxs_nand_ecc_write_page()
740 d->cmd.pio_words[4] = (dma_addr_t)nand_info->data_buf; in mxs_nand_ecc_write_page()
741 d->cmd.pio_words[5] = (dma_addr_t)nand_info->oob_buf; in mxs_nand_ecc_write_page()
748 /* Execute the DMA chain. */ in mxs_nand_ecc_write_page()
751 printf("MXS NAND: DMA write error\n"); in mxs_nand_ecc_write_page()
770 * the NAND Flash MTD code.
779 if (ops->mode == MTD_OPS_RAW) in mxs_nand_hook_read_oob()
780 nand_info->raw_oob_mode = 1; in mxs_nand_hook_read_oob()
782 nand_info->raw_oob_mode = 0; in mxs_nand_hook_read_oob()
784 ret = nand_info->hooked_read_oob(mtd, from, ops); in mxs_nand_hook_read_oob()
786 nand_info->raw_oob_mode = 0; in mxs_nand_hook_read_oob()
795 * the NAND Flash MTD code.
804 if (ops->mode == MTD_OPS_RAW) in mxs_nand_hook_write_oob()
805 nand_info->raw_oob_mode = 1; in mxs_nand_hook_write_oob()
807 nand_info->raw_oob_mode = 0; in mxs_nand_hook_write_oob()
809 ret = nand_info->hooked_write_oob(mtd, to, ops); in mxs_nand_hook_write_oob()
811 nand_info->raw_oob_mode = 0; in mxs_nand_hook_write_oob()
820 * the NAND Flash MTD code.
828 nand_info->marking_block_bad = 1; in mxs_nand_hook_block_markbad()
830 ret = nand_info->hooked_block_markbad(mtd, ofs); in mxs_nand_hook_block_markbad()
832 nand_info->marking_block_bad = 0; in mxs_nand_hook_block_markbad()
852 * 3) ECC-based read operations return an OOB full of set bits (since we never
853 * allow ECC-based writes to the OOB, it doesn't matter what ECC-based reads
862 * "raw" read, or an ECC-based read.
864 * It turns out that knowing whether we want an "ECC-based" or "raw" read is not
865 * easy. When reading a page, for example, the NAND Flash MTD code calls our
867 * ECC-based or raw view of the page is implicit in which function it calls
868 * (there is a similar pair of ECC-based/raw functions for writing).
871 * ECC-based/raw functions for reading or or writing the OOB. The fact that the
872 * caller wants an ECC-based or raw view of the page is not propagated down to
891 if (nand_info->raw_oob_mode) { in mxs_nand_ecc_read_oob()
896 nand->cmdfunc(mtd, NAND_CMD_READ0, mtd->writesize, page); in mxs_nand_ecc_read_oob()
897 nand->read_buf(mtd, nand->oob_poi, mtd->oobsize); in mxs_nand_ecc_read_oob()
903 memset(nand->oob_poi, 0xff, mtd->oobsize); in mxs_nand_ecc_read_oob()
905 nand->cmdfunc(mtd, NAND_CMD_READ0, mtd->writesize, page); in mxs_nand_ecc_read_oob()
906 mxs_nand_read_buf(mtd, nand->oob_poi, 1); in mxs_nand_ecc_read_oob()
924 * the NAND Flash MTD model that make it essentially impossible to write in mxs_nand_ecc_write_oob()
925 * the out-of-band bytes. in mxs_nand_ecc_write_oob()
931 if (!nand_info->marking_block_bad) { in mxs_nand_ecc_write_oob()
933 return -EIO; in mxs_nand_ecc_write_oob()
937 nand->cmdfunc(mtd, NAND_CMD_SEQIN, mtd->writesize, page); in mxs_nand_ecc_write_oob()
938 nand->write_buf(mtd, &block_mark, 1); in mxs_nand_ecc_write_oob()
939 nand->cmdfunc(mtd, NAND_CMD_PAGEPROG, -1, -1); in mxs_nand_ecc_write_oob()
942 if (nand->waitfunc(mtd, nand) & NAND_STATUS_FAIL) in mxs_nand_ecc_write_oob()
943 return -EIO; in mxs_nand_ecc_write_oob()
951 * In principle, this function is *only* called when the NAND Flash MTD system
952 * isn't allowed to keep an in-memory bad block table, so it is forced to ask
955 * In fact, we permit the NAND Flash MTD system to have an in-memory BBT, so
972 if (chip->ecc.strength > 0 && chip->ecc.size > 0) in mxs_nand_set_geometry()
974 chip->ecc.strength, chip->ecc.size); in mxs_nand_set_geometry()
976 if (nand_info->use_minimum_ecc || in mxs_nand_set_geometry()
978 if (!(chip->ecc_strength_ds > 0 && chip->ecc_step_ds > 0)) in mxs_nand_set_geometry()
979 return -EINVAL; in mxs_nand_set_geometry()
982 chip->ecc_strength_ds, chip->ecc_step_ds); in mxs_nand_set_geometry()
989 * At this point, the physical NAND Flash chips have been identified and
1001 struct bch_geometry *geo = &nand_info->bch_geometry; in mxs_nand_setup_ecc()
1002 struct mxs_bch_regs *bch_regs = nand_info->bch_regs; in mxs_nand_setup_ecc()
1010 mxs_nand_calc_mark_offset(geo, mtd->writesize); in mxs_nand_setup_ecc()
1013 mxs_reset_block(&bch_regs->hw_bch_ctrl_reg); in mxs_nand_setup_ecc()
1016 tmp = (geo->ecc_chunk_count - 1) << BCH_FLASHLAYOUT0_NBLOCKS_OFFSET; in mxs_nand_setup_ecc()
1018 tmp |= (geo->ecc_strength >> 1) << BCH_FLASHLAYOUT0_ECC0_OFFSET; in mxs_nand_setup_ecc()
1019 tmp |= geo->ecc_chunk_size >> MXS_NAND_CHUNK_DATA_CHUNK_SIZE_SHIFT; in mxs_nand_setup_ecc()
1020 tmp |= (geo->gf_len == 14 ? 1 : 0) << in mxs_nand_setup_ecc()
1022 writel(tmp, &bch_regs->hw_bch_flash0layout0); in mxs_nand_setup_ecc()
1024 tmp = (mtd->writesize + mtd->oobsize) in mxs_nand_setup_ecc()
1026 tmp |= (geo->ecc_strength >> 1) << BCH_FLASHLAYOUT1_ECCN_OFFSET; in mxs_nand_setup_ecc()
1027 tmp |= geo->ecc_chunk_size >> MXS_NAND_CHUNK_DATA_CHUNK_SIZE_SHIFT; in mxs_nand_setup_ecc()
1028 tmp |= (geo->gf_len == 14 ? 1 : 0) << in mxs_nand_setup_ecc()
1030 writel(tmp, &bch_regs->hw_bch_flash0layout1); in mxs_nand_setup_ecc()
1033 writel(0, &bch_regs->hw_bch_layoutselect); in mxs_nand_setup_ecc()
1036 writel(BCH_CTRL_COMPLETE_IRQ_EN, &bch_regs->hw_bch_ctrl_set); in mxs_nand_setup_ecc()
1039 if (mtd->_read_oob != mxs_nand_hook_read_oob) { in mxs_nand_setup_ecc()
1040 nand_info->hooked_read_oob = mtd->_read_oob; in mxs_nand_setup_ecc()
1041 mtd->_read_oob = mxs_nand_hook_read_oob; in mxs_nand_setup_ecc()
1044 if (mtd->_write_oob != mxs_nand_hook_write_oob) { in mxs_nand_setup_ecc()
1045 nand_info->hooked_write_oob = mtd->_write_oob; in mxs_nand_setup_ecc()
1046 mtd->_write_oob = mxs_nand_hook_write_oob; in mxs_nand_setup_ecc()
1049 if (mtd->_block_markbad != mxs_nand_hook_block_markbad) { in mxs_nand_setup_ecc()
1050 nand_info->hooked_block_markbad = mtd->_block_markbad; in mxs_nand_setup_ecc()
1051 mtd->_block_markbad = mxs_nand_hook_block_markbad; in mxs_nand_setup_ecc()
1058 * Allocate DMA buffers
1065 nand_info->data_buf_size = roundup(size, MXS_DMA_ALIGNMENT); in mxs_nand_alloc_buffers()
1067 /* DMA buffers */ in mxs_nand_alloc_buffers()
1068 buf = memalign(MXS_DMA_ALIGNMENT, nand_info->data_buf_size); in mxs_nand_alloc_buffers()
1070 printf("MXS NAND: Error allocating DMA buffers\n"); in mxs_nand_alloc_buffers()
1071 return -ENOMEM; in mxs_nand_alloc_buffers()
1074 memset(buf, 0, nand_info->data_buf_size); in mxs_nand_alloc_buffers()
1076 nand_info->data_buf = buf; in mxs_nand_alloc_buffers()
1077 nand_info->oob_buf = buf + NAND_MAX_PAGESIZE; in mxs_nand_alloc_buffers()
1079 nand_info->cmd_buf = memalign(MXS_DMA_ALIGNMENT, in mxs_nand_alloc_buffers()
1081 if (!nand_info->cmd_buf) { in mxs_nand_alloc_buffers()
1084 return -ENOMEM; in mxs_nand_alloc_buffers()
1086 memset(nand_info->cmd_buf, 0, MXS_NAND_COMMAND_BUFFER_SIZE); in mxs_nand_alloc_buffers()
1087 nand_info->cmd_queue_len = 0; in mxs_nand_alloc_buffers()
1099 info->desc = malloc(sizeof(struct mxs_dma_desc *) * in mxs_nand_init_dma()
1101 if (!info->desc) { in mxs_nand_init_dma()
1102 ret = -ENOMEM; in mxs_nand_init_dma()
1106 /* Allocate the DMA descriptors. */ in mxs_nand_init_dma()
1108 info->desc[i] = mxs_dma_desc_alloc(); in mxs_nand_init_dma()
1109 if (!info->desc[i]) { in mxs_nand_init_dma()
1110 ret = -ENOMEM; in mxs_nand_init_dma()
1115 /* Init the DMA controller. */ in mxs_nand_init_dma()
1125 mxs_reset_block(&info->gpmi_regs->hw_gpmi_ctrl0_reg); in mxs_nand_init_dma()
1126 mxs_reset_block(&info->bch_regs->hw_bch_ctrl_reg); in mxs_nand_init_dma()
1132 clrsetbits_le32(&info->gpmi_regs->hw_gpmi_ctrl1, in mxs_nand_init_dma()
1140 for (--j; j >= MXS_DMA_CHANNEL_AHB_APBH_GPMI0; j--) in mxs_nand_init_dma()
1143 for (--i; i >= 0; i--) in mxs_nand_init_dma()
1144 mxs_dma_desc_free(info->desc[i]); in mxs_nand_init_dma()
1145 free(info->desc); in mxs_nand_init_dma()
1147 if (ret == -ENOMEM) in mxs_nand_init_dma()
1148 printf("MXS NAND: Unable to allocate DMA descriptors\n"); in mxs_nand_init_dma()
1160 return -ENOMEM; in mxs_nand_init_spl()
1164 nand_info->gpmi_regs = (struct mxs_gpmi_regs *)MXS_GPMI_BASE; in mxs_nand_init_spl()
1165 nand_info->bch_regs = (struct mxs_bch_regs *)MXS_BCH_BASE; in mxs_nand_init_spl()
1168 nand_info->max_ecc_strength_supported = 62; in mxs_nand_init_spl()
1170 nand_info->max_ecc_strength_supported = 40; in mxs_nand_init_spl()
1182 nand->options |= NAND_NO_SUBPAGE_WRITE; in mxs_nand_init_spl()
1184 nand->cmd_ctrl = mxs_nand_cmd_ctrl; in mxs_nand_init_spl()
1185 nand->dev_ready = mxs_nand_device_ready; in mxs_nand_init_spl()
1186 nand->select_chip = mxs_nand_select_chip; in mxs_nand_init_spl()
1188 nand->read_byte = mxs_nand_read_byte; in mxs_nand_init_spl()
1189 nand->read_buf = mxs_nand_read_buf; in mxs_nand_init_spl()
1191 nand->ecc.read_page = mxs_nand_ecc_read_page; in mxs_nand_init_spl()
1193 nand->ecc.mode = NAND_ECC_HW; in mxs_nand_init_spl()
1204 nand = &nand_info->chip; in mxs_nand_init_ctrl()
1217 nand->bbt_options |= NAND_BBT_USE_FLASH | NAND_BBT_NO_OOB; in mxs_nand_init_ctrl()
1221 nand->options |= NAND_NO_SUBPAGE_WRITE; in mxs_nand_init_ctrl()
1223 if (nand_info->dev) in mxs_nand_init_ctrl()
1224 nand->flash_node = dev_of_offset(nand_info->dev); in mxs_nand_init_ctrl()
1226 nand->cmd_ctrl = mxs_nand_cmd_ctrl; in mxs_nand_init_ctrl()
1228 nand->dev_ready = mxs_nand_device_ready; in mxs_nand_init_ctrl()
1229 nand->select_chip = mxs_nand_select_chip; in mxs_nand_init_ctrl()
1230 nand->block_bad = mxs_nand_block_bad; in mxs_nand_init_ctrl()
1232 nand->read_byte = mxs_nand_read_byte; in mxs_nand_init_ctrl()
1234 nand->read_buf = mxs_nand_read_buf; in mxs_nand_init_ctrl()
1235 nand->write_buf = mxs_nand_write_buf; in mxs_nand_init_ctrl()
1244 nand->ecc.read_page = mxs_nand_ecc_read_page; in mxs_nand_init_ctrl()
1245 nand->ecc.write_page = mxs_nand_ecc_write_page; in mxs_nand_init_ctrl()
1246 nand->ecc.read_oob = mxs_nand_ecc_read_oob; in mxs_nand_init_ctrl()
1247 nand->ecc.write_oob = mxs_nand_ecc_write_oob; in mxs_nand_init_ctrl()
1249 nand->ecc.layout = &fake_ecc_layout; in mxs_nand_init_ctrl()
1250 nand->ecc.mode = NAND_ECC_HW; in mxs_nand_init_ctrl()
1251 nand->ecc.size = nand_info->bch_geometry.ecc_chunk_size; in mxs_nand_init_ctrl()
1252 nand->ecc.strength = nand_info->bch_geometry.ecc_strength; in mxs_nand_init_ctrl()
1266 free(nand_info->data_buf); in mxs_nand_init_ctrl()
1267 free(nand_info->cmd_buf); in mxs_nand_init_ctrl()
1284 nand_info->gpmi_regs = (struct mxs_gpmi_regs *)MXS_GPMI_BASE; in board_nand_init()
1285 nand_info->bch_regs = (struct mxs_bch_regs *)MXS_BCH_BASE; in board_nand_init()
1289 nand_info->max_ecc_strength_supported = 62; in board_nand_init()
1291 nand_info->max_ecc_strength_supported = 40; in board_nand_init()
1294 nand_info->use_minimum_ecc = true; in board_nand_init()