1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * LPC32xx SLC NAND flash controller driver
4  *
5  * (C) Copyright 2015 Vladimir Zapolskiy <vz@mleia.com>
6  *
7  * Hardware ECC support original source code
8  * Copyright (C) 2008 by NXP Semiconductors
9  * Author: Kevin Wells
10  *
11  * Copyright (c) 2015 Tyco Fire Protection Products.
12  */
13 
14 #include <common.h>
15 #include <nand.h>
16 #include <linux/mtd/nand_ecc.h>
17 #include <linux/errno.h>
18 #include <asm/io.h>
19 #include <asm/arch/config.h>
20 #include <asm/arch/clk.h>
21 #include <asm/arch/sys_proto.h>
22 #include <asm/arch/dma.h>
23 #include <asm/arch/cpu.h>
24 
25 #if defined(CONFIG_DMA_LPC32XX) && defined(CONFIG_SPL_BUILD)
26 #warning "DMA support in SPL image is not tested"
27 #endif
28 
29 struct lpc32xx_nand_slc_regs {
30 	u32 data;
31 	u32 addr;
32 	u32 cmd;
33 	u32 stop;
34 	u32 ctrl;
35 	u32 cfg;
36 	u32 stat;
37 	u32 int_stat;
38 	u32 ien;
39 	u32 isr;
40 	u32 icr;
41 	u32 tac;
42 	u32 tc;
43 	u32 ecc;
44 	u32 dma_data;
45 };
46 
47 /* CFG register */
48 #define CFG_CE_LOW		(1 << 5)
49 #define CFG_DMA_ECC		(1 << 4) /* Enable DMA ECC bit */
50 #define CFG_ECC_EN		(1 << 3) /* ECC enable bit */
51 #define CFG_DMA_BURST		(1 << 2) /* DMA burst bit */
52 #define CFG_DMA_DIR		(1 << 1) /* DMA write(0)/read(1) bit */
53 
54 /* CTRL register */
55 #define CTRL_SW_RESET		(1 << 2)
56 #define CTRL_ECC_CLEAR		(1 << 1) /* Reset ECC bit */
57 #define CTRL_DMA_START		(1 << 0) /* Start DMA channel bit */
58 
59 /* STAT register */
60 #define STAT_DMA_FIFO		(1 << 2) /* DMA FIFO has data bit */
61 #define STAT_NAND_READY		(1 << 0)
62 
63 /* INT_STAT register */
64 #define INT_STAT_TC		(1 << 1)
65 #define INT_STAT_RDY		(1 << 0)
66 
67 /* TAC register bits, be aware of overflows */
68 #define TAC_W_RDY(n)		(max_t(uint32_t, (n), 0xF) << 28)
69 #define TAC_W_WIDTH(n)		(max_t(uint32_t, (n), 0xF) << 24)
70 #define TAC_W_HOLD(n)		(max_t(uint32_t, (n), 0xF) << 20)
71 #define TAC_W_SETUP(n)		(max_t(uint32_t, (n), 0xF) << 16)
72 #define TAC_R_RDY(n)		(max_t(uint32_t, (n), 0xF) << 12)
73 #define TAC_R_WIDTH(n)		(max_t(uint32_t, (n), 0xF) << 8)
74 #define TAC_R_HOLD(n)		(max_t(uint32_t, (n), 0xF) << 4)
75 #define TAC_R_SETUP(n)		(max_t(uint32_t, (n), 0xF) << 0)
76 
77 /* NAND ECC Layout for small page NAND devices
78  * Note: For large page devices, the default layouts are used. */
79 static struct nand_ecclayout lpc32xx_nand_oob_16 = {
80 	.eccbytes = 6,
81 	.eccpos = {10, 11, 12, 13, 14, 15},
82 	.oobfree = {
83 		{.offset = 0,
84 		 . length = 4},
85 		{.offset = 6,
86 		 . length = 4}
87 		}
88 };
89 
90 #if defined(CONFIG_DMA_LPC32XX)
91 #define ECCSTEPS	(CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
92 
93 /*
94  * DMA Descriptors
95  * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
96  * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
97  */
98 static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
99 static u32 ecc_buffer[8]; /* MAX ECC size */
100 static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
101 
102 /*
103  * Helper macro for the DMA client (i.e. NAND SLC):
104  * - to write the next DMA linked list item address
105  *   (see arch/include/asm/arch-lpc32xx/dma.h).
106  * - to assign the DMA data register to DMA source or destination address.
107  * - to assign the ECC register to DMA source or destination address.
108  */
109 #define lpc32xx_dmac_next_lli(x)	((u32)x)
110 #define lpc32xx_dmac_set_dma_data()	((u32)&lpc32xx_nand_slc_regs->dma_data)
111 #define lpc32xx_dmac_set_ecc()		((u32)&lpc32xx_nand_slc_regs->ecc)
112 #endif
113 
114 static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
115 	= (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
116 
117 static void lpc32xx_nand_init(void)
118 {
119 	uint32_t hclk = get_hclk_clk_rate();
120 
121 	/* Reset SLC NAND controller */
122 	writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
123 
124 	/* 8-bit bus, no DMA, no ECC, ordinary CE signal */
125 	writel(0, &lpc32xx_nand_slc_regs->cfg);
126 
127 	/* Interrupts disabled and cleared */
128 	writel(0, &lpc32xx_nand_slc_regs->ien);
129 	writel(INT_STAT_TC | INT_STAT_RDY,
130 	       &lpc32xx_nand_slc_regs->icr);
131 
132 	/* Configure NAND flash timings */
133 	writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
134 	       TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
135 	       TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
136 	       TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
137 	       TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
138 	       TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
139 	       TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
140 	       TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
141 	       &lpc32xx_nand_slc_regs->tac);
142 }
143 
144 static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
145 				  int cmd, unsigned int ctrl)
146 {
147 	debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
148 
149 	if (ctrl & NAND_NCE)
150 		setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
151 	else
152 		clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
153 
154 	if (cmd == NAND_CMD_NONE)
155 		return;
156 
157 	if (ctrl & NAND_CLE)
158 		writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
159 	else if (ctrl & NAND_ALE)
160 		writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
161 }
162 
163 static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
164 {
165 	return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
166 }
167 
168 #if defined(CONFIG_DMA_LPC32XX)
169 /*
170  * Prepares DMA descriptors for NAND RD/WR operations
171  * If the size is < 256 Bytes then it is assumed to be
172  * an OOB transfer
173  */
174 static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
175 				       const u8 *buffer, int size,
176 				       int read)
177 {
178 	u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
179 	struct lpc32xx_dmac_ll *dmalist_cur;
180 	struct lpc32xx_dmac_ll *dmalist_cur_ecc;
181 
182 	/*
183 	 * CTRL descriptor entry for reading ECC
184 	 * Copy Multiple times to sync DMA with Flash Controller
185 	 */
186 	ecc_ctrl = 0x5 |
187 			DMAC_CHAN_SRC_BURST_1 |
188 			DMAC_CHAN_DEST_BURST_1 |
189 			DMAC_CHAN_SRC_WIDTH_32 |
190 			DMAC_CHAN_DEST_WIDTH_32 |
191 			DMAC_CHAN_DEST_AHB1;
192 
193 	/* CTRL descriptor entry for reading/writing Data */
194 	ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
195 			DMAC_CHAN_SRC_BURST_4 |
196 			DMAC_CHAN_DEST_BURST_4 |
197 			DMAC_CHAN_SRC_WIDTH_32 |
198 			DMAC_CHAN_DEST_WIDTH_32 |
199 			DMAC_CHAN_DEST_AHB1;
200 
201 	/* CTRL descriptor entry for reading/writing Spare Area */
202 	oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
203 			DMAC_CHAN_SRC_BURST_4 |
204 			DMAC_CHAN_DEST_BURST_4 |
205 			DMAC_CHAN_SRC_WIDTH_32 |
206 			DMAC_CHAN_DEST_WIDTH_32 |
207 			DMAC_CHAN_DEST_AHB1;
208 
209 	if (read) {
210 		dmasrc = lpc32xx_dmac_set_dma_data();
211 		dmadst = (u32)buffer;
212 		ctrl |= DMAC_CHAN_DEST_AUTOINC;
213 	} else {
214 		dmadst = lpc32xx_dmac_set_dma_data();
215 		dmasrc = (u32)buffer;
216 		ctrl |= DMAC_CHAN_SRC_AUTOINC;
217 	}
218 
219 	/*
220 	 * Write Operation Sequence for Small Block NAND
221 	 * ----------------------------------------------------------
222 	 * 1. X'fer 256 bytes of data from Memory to Flash.
223 	 * 2. Copy generated ECC data from Register to Spare Area
224 	 * 3. X'fer next 256 bytes of data from Memory to Flash.
225 	 * 4. Copy generated ECC data from Register to Spare Area.
226 	 * 5. X'fer 16 byets of Spare area from Memory to Flash.
227 	 * Read Operation Sequence for Small Block NAND
228 	 * ----------------------------------------------------------
229 	 * 1. X'fer 256 bytes of data from Flash to Memory.
230 	 * 2. Copy generated ECC data from Register to ECC calc Buffer.
231 	 * 3. X'fer next 256 bytes of data from Flash to Memory.
232 	 * 4. Copy generated ECC data from Register to ECC calc Buffer.
233 	 * 5. X'fer 16 bytes of Spare area from Flash to Memory.
234 	 * Write Operation Sequence for Large Block NAND
235 	 * ----------------------------------------------------------
236 	 * 1. Steps(1-4) of Write Operations repeate for four times
237 	 * which generates 16 DMA descriptors to X'fer 2048 bytes of
238 	 * data & 32 bytes of ECC data.
239 	 * 2. X'fer 64 bytes of Spare area from Memory to Flash.
240 	 * Read Operation Sequence for Large Block NAND
241 	 * ----------------------------------------------------------
242 	 * 1. Steps(1-4) of Read Operations repeate for four times
243 	 * which generates 16 DMA descriptors to X'fer 2048 bytes of
244 	 * data & 32 bytes of ECC data.
245 	 * 2. X'fer 64 bytes of Spare area from Flash to Memory.
246 	 */
247 
248 	for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
249 		dmalist_cur = &dmalist[i * 2];
250 		dmalist_cur_ecc = &dmalist[(i * 2) + 1];
251 
252 		dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
253 		dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
254 		dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
255 		dmalist_cur->next_ctrl = ctrl;
256 
257 		dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
258 		dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
259 		dmalist_cur_ecc->next_lli =
260 			lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
261 		dmalist_cur_ecc->next_ctrl = ecc_ctrl;
262 	}
263 
264 	if (i) { /* Data only transfer */
265 		dmalist_cur_ecc = &dmalist[(i * 2) - 1];
266 		dmalist_cur_ecc->next_lli = 0;
267 		dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
268 		return;
269 	}
270 
271 	/* OOB only transfer */
272 	if (read) {
273 		dmasrc = lpc32xx_dmac_set_dma_data();
274 		dmadst = (u32)buffer;
275 		oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
276 	} else {
277 		dmadst = lpc32xx_dmac_set_dma_data();
278 		dmasrc = (u32)buffer;
279 		oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
280 	}
281 
282 	/* Read/ Write Spare Area Data To/From Flash */
283 	dmalist_cur = &dmalist[i * 2];
284 	dmalist_cur->dma_src = dmasrc;
285 	dmalist_cur->dma_dest = dmadst;
286 	dmalist_cur->next_lli = 0;
287 	dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
288 }
289 
290 static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
291 			      int len, int read)
292 {
293 	struct nand_chip *chip = mtd_to_nand(mtd);
294 	u32 config;
295 	int ret;
296 
297 	/* DMA Channel Configuration */
298 	config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
299 		(read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
300 		(read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
301 		DMAC_CHAN_ENABLE;
302 
303 	/* Prepare DMA descriptors */
304 	lpc32xx_nand_dma_configure(chip, buf, len, read);
305 
306 	/* Setup SLC controller and start transfer */
307 	if (read)
308 		setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
309 	else  /* NAND_ECC_WRITE */
310 		clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
311 	setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
312 
313 	/* Write length for new transfers */
314 	if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
315 	      readl(&lpc32xx_nand_slc_regs->tc))) {
316 		int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
317 		writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
318 	}
319 
320 	setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
321 
322 	/* Start DMA transfers */
323 	ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
324 	if (unlikely(ret < 0))
325 		BUG();
326 
327 
328 	/* Wait for NAND to be ready */
329 	while (!lpc32xx_nand_dev_ready(mtd))
330 		;
331 
332 	/* Wait till DMA transfer is DONE */
333 	if (lpc32xx_dma_wait_status(dmachan))
334 		pr_err("NAND DMA transfer error!\r\n");
335 
336 	/* Stop DMA & HW ECC */
337 	clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
338 	clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
339 		     CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
340 }
341 
342 static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
343 {
344 	int i;
345 	for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
346 	     i += CONFIG_SYS_NAND_ECCBYTES) {
347 		u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
348 		ce = ~(ce << 2) & 0xFFFFFF;
349 		spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
350 		spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
351 		spare[i]   = (u8)(ce & 0xFF);
352 	}
353 	return 0;
354 }
355 
356 static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
357 				 uint8_t *ecc_code)
358 {
359 	return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
360 }
361 
362 /*
363  * Enables and prepares SLC NAND controller
364  * for doing data transfers with H/W ECC enabled.
365  */
366 static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
367 {
368 	/* Clear ECC */
369 	writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
370 
371 	/* Setup SLC controller for H/W ECC operations */
372 	setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
373 }
374 
375 /*
376  * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
377  * mtd:	MTD block structure
378  * dat:	raw data read from the chip
379  * read_ecc:	ECC from the chip
380  * calc_ecc:	the ECC calculated from raw data
381  *
382  * Detect and correct a 1 bit error for 256 byte block
383  */
384 int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
385 			 u_char *read_ecc, u_char *calc_ecc)
386 {
387 	unsigned int i;
388 	int ret1, ret2 = 0;
389 	u_char *r = read_ecc;
390 	u_char *c = calc_ecc;
391 	u16 data_offset = 0;
392 
393 	for (i = 0 ; i < ECCSTEPS ; i++) {
394 		r += CONFIG_SYS_NAND_ECCBYTES;
395 		c += CONFIG_SYS_NAND_ECCBYTES;
396 		data_offset += CONFIG_SYS_NAND_ECCSIZE;
397 
398 		ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
399 		if (ret1 < 0)
400 			return -EBADMSG;
401 		else
402 			ret2 += ret1;
403 	}
404 
405 	return ret2;
406 }
407 #endif
408 
409 #if defined(CONFIG_DMA_LPC32XX)
410 static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
411 {
412 	lpc32xx_nand_xfer(mtd, buf, len, 1);
413 }
414 #else
415 static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
416 {
417 	while (len-- > 0)
418 		*buf++ = readl(&lpc32xx_nand_slc_regs->data);
419 }
420 #endif
421 
422 static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
423 {
424 	return readl(&lpc32xx_nand_slc_regs->data);
425 }
426 
427 #if defined(CONFIG_DMA_LPC32XX)
428 static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
429 				  int len)
430 {
431 	lpc32xx_nand_xfer(mtd, buf, len, 0);
432 }
433 #else
434 static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
435 {
436 	while (len-- > 0)
437 		writel(*buf++, &lpc32xx_nand_slc_regs->data);
438 }
439 #endif
440 
441 static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
442 {
443 	writel(byte, &lpc32xx_nand_slc_regs->data);
444 }
445 
446 #if defined(CONFIG_DMA_LPC32XX)
447 /* Reuse the logic from "nand_read_page_hwecc()" */
448 static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
449 				uint8_t *buf, int oob_required, int page)
450 {
451 	int i;
452 	int stat;
453 	uint8_t *p = buf;
454 	uint8_t *ecc_calc = chip->buffers->ecccalc;
455 	uint8_t *ecc_code = chip->buffers->ecccode;
456 	uint32_t *eccpos = chip->ecc.layout->eccpos;
457 	unsigned int max_bitflips = 0;
458 
459 	/*
460 	 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
461 	 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
462 	 * of a page size using DMA controller scatter/gather mode through
463 	 * linked list; the ECC read is done without any software intervention.
464 	 */
465 
466 	lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
467 	lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
468 	lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
469 	lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
470 
471 	for (i = 0; i < chip->ecc.total; i++)
472 		ecc_code[i] = chip->oob_poi[eccpos[i]];
473 
474 	stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
475 	if (stat < 0)
476 		mtd->ecc_stats.failed++;
477 	else {
478 		mtd->ecc_stats.corrected += stat;
479 		max_bitflips = max_t(unsigned int, max_bitflips, stat);
480 	}
481 
482 	return max_bitflips;
483 }
484 
485 /* Reuse the logic from "nand_write_page_hwecc()" */
486 static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
487 				    struct nand_chip *chip,
488 				    const uint8_t *buf, int oob_required,
489 				    int page)
490 {
491 	int i;
492 	uint8_t *ecc_calc = chip->buffers->ecccalc;
493 	const uint8_t *p = buf;
494 	uint32_t *eccpos = chip->ecc.layout->eccpos;
495 
496 	/*
497 	 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
498 	 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
499 	 * of a page size using DMA controller scatter/gather mode through
500 	 * linked list; the ECC read is done without any software intervention.
501 	 */
502 
503 	lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
504 	lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
505 	lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
506 
507 	for (i = 0; i < chip->ecc.total; i++)
508 		chip->oob_poi[eccpos[i]] = ecc_calc[i];
509 
510 	lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
511 
512 	return 0;
513 }
514 #endif
515 
516 /*
517  * LPC32xx has only one SLC NAND controller, don't utilize
518  * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
519  * both in SPL NAND and U-Boot images.
520  */
521 int board_nand_init(struct nand_chip *lpc32xx_chip)
522 {
523 #if defined(CONFIG_DMA_LPC32XX)
524 	int ret;
525 
526 	/* Acquire a channel for our use */
527 	ret = lpc32xx_dma_get_channel();
528 	if (unlikely(ret < 0)) {
529 		pr_info("Unable to get free DMA channel for NAND transfers\n");
530 		return -1;
531 	}
532 	dmachan = (unsigned int)ret;
533 #endif
534 
535 	lpc32xx_chip->cmd_ctrl  = lpc32xx_nand_cmd_ctrl;
536 	lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
537 
538 	/*
539 	 * The implementation of these functions is quite common, but
540 	 * they MUST be defined, because access to data register
541 	 * is strictly 32-bit aligned.
542 	 */
543 	lpc32xx_chip->read_byte  = lpc32xx_read_byte;
544 	lpc32xx_chip->write_byte = lpc32xx_write_byte;
545 
546 #if defined(CONFIG_DMA_LPC32XX)
547 	/* Hardware ECC calculation is supported when DMA driver is selected */
548 	lpc32xx_chip->ecc.mode		= NAND_ECC_HW;
549 
550 	lpc32xx_chip->read_buf		= lpc32xx_dma_read_buf;
551 	lpc32xx_chip->write_buf		= lpc32xx_dma_write_buf;
552 
553 	lpc32xx_chip->ecc.calculate	= lpc32xx_ecc_calculate;
554 	lpc32xx_chip->ecc.correct	= lpc32xx_correct_data;
555 	lpc32xx_chip->ecc.hwctl		= lpc32xx_hwecc_enable;
556 	lpc32xx_chip->chip_delay	= 2000;
557 
558 	lpc32xx_chip->ecc.read_page	= lpc32xx_read_page_hwecc;
559 	lpc32xx_chip->ecc.write_page	= lpc32xx_write_page_hwecc;
560 	lpc32xx_chip->options		|= NAND_NO_SUBPAGE_WRITE;
561 #else
562 	/*
563 	 * Hardware ECC calculation is not supported by the driver,
564 	 * because it requires DMA support, see LPC32x0 User Manual,
565 	 * note after SLC_ECC register description (UM10326, p.198)
566 	 */
567 	lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
568 
569 	/*
570 	 * The implementation of these functions is quite common, but
571 	 * they MUST be defined, because access to data register
572 	 * is strictly 32-bit aligned.
573 	 */
574 	lpc32xx_chip->read_buf   = lpc32xx_read_buf;
575 	lpc32xx_chip->write_buf  = lpc32xx_write_buf;
576 #endif
577 
578 	/*
579 	 * These values are predefined
580 	 * for both small and large page NAND flash devices.
581 	 */
582 	lpc32xx_chip->ecc.size     = CONFIG_SYS_NAND_ECCSIZE;
583 	lpc32xx_chip->ecc.bytes    = CONFIG_SYS_NAND_ECCBYTES;
584 	lpc32xx_chip->ecc.strength = 1;
585 
586 	if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
587 		lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
588 
589 #if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
590 	lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
591 #endif
592 
593 	/* Initialize NAND interface */
594 	lpc32xx_nand_init();
595 
596 	return 0;
597 }
598