xref: /openbmc/linux/drivers/net/ieee802154/mcr20a.c (revision 4a075bd4)
1 /*
2  * Driver for NXP MCR20A 802.15.4 Wireless-PAN Networking controller
3  *
4  * Copyright (C) 2018 Xue Liu <liuxuenetmail@gmail.com>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2
8  * as published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  */
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/gpio/consumer.h>
19 #include <linux/spi/spi.h>
20 #include <linux/workqueue.h>
21 #include <linux/interrupt.h>
22 #include <linux/irq.h>
23 #include <linux/skbuff.h>
24 #include <linux/of_gpio.h>
25 #include <linux/regmap.h>
26 #include <linux/ieee802154.h>
27 #include <linux/debugfs.h>
28 
29 #include <net/mac802154.h>
30 #include <net/cfg802154.h>
31 
32 #include <linux/device.h>
33 
34 #include "mcr20a.h"
35 
36 #define	SPI_COMMAND_BUFFER		3
37 
38 #define REGISTER_READ			BIT(7)
39 #define REGISTER_WRITE			(0 << 7)
40 #define REGISTER_ACCESS			(0 << 6)
41 #define PACKET_BUFF_BURST_ACCESS	BIT(6)
42 #define PACKET_BUFF_BYTE_ACCESS		BIT(5)
43 
44 #define MCR20A_WRITE_REG(x)		(x)
45 #define MCR20A_READ_REG(x)		(REGISTER_READ | (x))
46 #define MCR20A_BURST_READ_PACKET_BUF	(0xC0)
47 #define MCR20A_BURST_WRITE_PACKET_BUF	(0x40)
48 
49 #define MCR20A_CMD_REG		0x80
50 #define MCR20A_CMD_REG_MASK	0x3f
51 #define MCR20A_CMD_WRITE	0x40
52 #define MCR20A_CMD_FB		0x20
53 
54 /* Number of Interrupt Request Status Register */
55 #define MCR20A_IRQSTS_NUM 2 /* only IRQ_STS1 and IRQ_STS2 */
56 
57 /* MCR20A CCA Type */
58 enum {
59 	MCR20A_CCA_ED,	  // energy detect - CCA bit not active,
60 			  // not to be used for T and CCCA sequences
61 	MCR20A_CCA_MODE1, // energy detect - CCA bit ACTIVE
62 	MCR20A_CCA_MODE2, // 802.15.4 compliant signal detect - CCA bit ACTIVE
63 	MCR20A_CCA_MODE3
64 };
65 
66 enum {
67 	MCR20A_XCVSEQ_IDLE	= 0x00,
68 	MCR20A_XCVSEQ_RX	= 0x01,
69 	MCR20A_XCVSEQ_TX	= 0x02,
70 	MCR20A_XCVSEQ_CCA	= 0x03,
71 	MCR20A_XCVSEQ_TR	= 0x04,
72 	MCR20A_XCVSEQ_CCCA	= 0x05,
73 };
74 
75 /* IEEE-802.15.4 defined constants (2.4 GHz logical channels) */
76 #define	MCR20A_MIN_CHANNEL	(11)
77 #define	MCR20A_MAX_CHANNEL	(26)
78 #define	MCR20A_CHANNEL_SPACING	(5)
79 
80 /* MCR20A CCA Threshold constans */
81 #define MCR20A_MIN_CCA_THRESHOLD (0x6EU)
82 #define MCR20A_MAX_CCA_THRESHOLD (0x00U)
83 
84 /* version 0C */
85 #define MCR20A_OVERWRITE_VERSION (0x0C)
86 
87 /* MCR20A PLL configurations */
88 static const u8  PLL_INT[16] = {
89 	/* 2405 */ 0x0B,	/* 2410 */ 0x0B,	/* 2415 */ 0x0B,
90 	/* 2420 */ 0x0B,	/* 2425 */ 0x0B,	/* 2430 */ 0x0B,
91 	/* 2435 */ 0x0C,	/* 2440 */ 0x0C,	/* 2445 */ 0x0C,
92 	/* 2450 */ 0x0C,	/* 2455 */ 0x0C,	/* 2460 */ 0x0C,
93 	/* 2465 */ 0x0D,	/* 2470 */ 0x0D,	/* 2475 */ 0x0D,
94 	/* 2480 */ 0x0D
95 };
96 
97 static const u8 PLL_FRAC[16] = {
98 	/* 2405 */ 0x28,	/* 2410 */ 0x50,	/* 2415 */ 0x78,
99 	/* 2420 */ 0xA0,	/* 2425 */ 0xC8,	/* 2430 */ 0xF0,
100 	/* 2435 */ 0x18,	/* 2440 */ 0x40,	/* 2445 */ 0x68,
101 	/* 2450 */ 0x90,	/* 2455 */ 0xB8,	/* 2460 */ 0xE0,
102 	/* 2465 */ 0x08,	/* 2470 */ 0x30,	/* 2475 */ 0x58,
103 	/* 2480 */ 0x80
104 };
105 
106 static const struct reg_sequence mar20a_iar_overwrites[] = {
107 	{ IAR_MISC_PAD_CTRL,	0x02 },
108 	{ IAR_VCO_CTRL1,	0xB3 },
109 	{ IAR_VCO_CTRL2,	0x07 },
110 	{ IAR_PA_TUNING,	0x71 },
111 	{ IAR_CHF_IBUF,		0x2F },
112 	{ IAR_CHF_QBUF,		0x2F },
113 	{ IAR_CHF_IRIN,		0x24 },
114 	{ IAR_CHF_QRIN,		0x24 },
115 	{ IAR_CHF_IL,		0x24 },
116 	{ IAR_CHF_QL,		0x24 },
117 	{ IAR_CHF_CC1,		0x32 },
118 	{ IAR_CHF_CCL,		0x1D },
119 	{ IAR_CHF_CC2,		0x2D },
120 	{ IAR_CHF_IROUT,	0x24 },
121 	{ IAR_CHF_QROUT,	0x24 },
122 	{ IAR_PA_CAL,		0x28 },
123 	{ IAR_AGC_THR1,		0x55 },
124 	{ IAR_AGC_THR2,		0x2D },
125 	{ IAR_ATT_RSSI1,	0x5F },
126 	{ IAR_ATT_RSSI2,	0x8F },
127 	{ IAR_RSSI_OFFSET,	0x61 },
128 	{ IAR_CHF_PMA_GAIN,	0x03 },
129 	{ IAR_CCA1_THRESH,	0x50 },
130 	{ IAR_CORR_NVAL,	0x13 },
131 	{ IAR_ACKDELAY,		0x3D },
132 };
133 
134 #define MCR20A_VALID_CHANNELS (0x07FFF800)
135 #define MCR20A_MAX_BUF		(127)
136 
137 #define printdev(X) (&X->spi->dev)
138 
139 /* regmap information for Direct Access Register (DAR) access */
140 #define MCR20A_DAR_WRITE	0x01
141 #define MCR20A_DAR_READ		0x00
142 #define MCR20A_DAR_NUMREGS	0x3F
143 
144 /* regmap information for Indirect Access Register (IAR) access */
145 #define MCR20A_IAR_ACCESS	0x80
146 #define MCR20A_IAR_NUMREGS	0xBEFF
147 
148 /* Read/Write SPI Commands for DAR and IAR registers. */
149 #define MCR20A_READSHORT(reg)	((reg) << 1)
150 #define MCR20A_WRITESHORT(reg)	((reg) << 1 | 1)
151 #define MCR20A_READLONG(reg)	(1 << 15 | (reg) << 5)
152 #define MCR20A_WRITELONG(reg)	(1 << 15 | (reg) << 5 | 1 << 4)
153 
154 /* Type definitions for link configuration of instantiable layers  */
155 #define MCR20A_PHY_INDIRECT_QUEUE_SIZE (12)
156 
157 static bool
158 mcr20a_dar_writeable(struct device *dev, unsigned int reg)
159 {
160 	switch (reg) {
161 	case DAR_IRQ_STS1:
162 	case DAR_IRQ_STS2:
163 	case DAR_IRQ_STS3:
164 	case DAR_PHY_CTRL1:
165 	case DAR_PHY_CTRL2:
166 	case DAR_PHY_CTRL3:
167 	case DAR_PHY_CTRL4:
168 	case DAR_SRC_CTRL:
169 	case DAR_SRC_ADDRS_SUM_LSB:
170 	case DAR_SRC_ADDRS_SUM_MSB:
171 	case DAR_T3CMP_LSB:
172 	case DAR_T3CMP_MSB:
173 	case DAR_T3CMP_USB:
174 	case DAR_T2PRIMECMP_LSB:
175 	case DAR_T2PRIMECMP_MSB:
176 	case DAR_T1CMP_LSB:
177 	case DAR_T1CMP_MSB:
178 	case DAR_T1CMP_USB:
179 	case DAR_T2CMP_LSB:
180 	case DAR_T2CMP_MSB:
181 	case DAR_T2CMP_USB:
182 	case DAR_T4CMP_LSB:
183 	case DAR_T4CMP_MSB:
184 	case DAR_T4CMP_USB:
185 	case DAR_PLL_INT0:
186 	case DAR_PLL_FRAC0_LSB:
187 	case DAR_PLL_FRAC0_MSB:
188 	case DAR_PA_PWR:
189 	/* no DAR_ACM */
190 	case DAR_OVERWRITE_VER:
191 	case DAR_CLK_OUT_CTRL:
192 	case DAR_PWR_MODES:
193 		return true;
194 	default:
195 		return false;
196 	}
197 }
198 
199 static bool
200 mcr20a_dar_readable(struct device *dev, unsigned int reg)
201 {
202 	bool rc;
203 
204 	/* all writeable are also readable */
205 	rc = mcr20a_dar_writeable(dev, reg);
206 	if (rc)
207 		return rc;
208 
209 	/* readonly regs */
210 	switch (reg) {
211 	case DAR_RX_FRM_LEN:
212 	case DAR_CCA1_ED_FNL:
213 	case DAR_EVENT_TMR_LSB:
214 	case DAR_EVENT_TMR_MSB:
215 	case DAR_EVENT_TMR_USB:
216 	case DAR_TIMESTAMP_LSB:
217 	case DAR_TIMESTAMP_MSB:
218 	case DAR_TIMESTAMP_USB:
219 	case DAR_SEQ_STATE:
220 	case DAR_LQI_VALUE:
221 	case DAR_RSSI_CCA_CONT:
222 		return true;
223 	default:
224 		return false;
225 	}
226 }
227 
228 static bool
229 mcr20a_dar_volatile(struct device *dev, unsigned int reg)
230 {
231 	/* can be changed during runtime */
232 	switch (reg) {
233 	case DAR_IRQ_STS1:
234 	case DAR_IRQ_STS2:
235 	case DAR_IRQ_STS3:
236 	/* use them in spi_async and regmap so it's volatile */
237 		return true;
238 	default:
239 		return false;
240 	}
241 }
242 
243 static bool
244 mcr20a_dar_precious(struct device *dev, unsigned int reg)
245 {
246 	/* don't clear irq line on read */
247 	switch (reg) {
248 	case DAR_IRQ_STS1:
249 	case DAR_IRQ_STS2:
250 	case DAR_IRQ_STS3:
251 		return true;
252 	default:
253 		return false;
254 	}
255 }
256 
257 static const struct regmap_config mcr20a_dar_regmap = {
258 	.name			= "mcr20a_dar",
259 	.reg_bits		= 8,
260 	.val_bits		= 8,
261 	.write_flag_mask	= REGISTER_ACCESS | REGISTER_WRITE,
262 	.read_flag_mask		= REGISTER_ACCESS | REGISTER_READ,
263 	.cache_type		= REGCACHE_RBTREE,
264 	.writeable_reg		= mcr20a_dar_writeable,
265 	.readable_reg		= mcr20a_dar_readable,
266 	.volatile_reg		= mcr20a_dar_volatile,
267 	.precious_reg		= mcr20a_dar_precious,
268 	.fast_io		= true,
269 	.can_multi_write	= true,
270 };
271 
272 static bool
273 mcr20a_iar_writeable(struct device *dev, unsigned int reg)
274 {
275 	switch (reg) {
276 	case IAR_XTAL_TRIM:
277 	case IAR_PMC_LP_TRIM:
278 	case IAR_MACPANID0_LSB:
279 	case IAR_MACPANID0_MSB:
280 	case IAR_MACSHORTADDRS0_LSB:
281 	case IAR_MACSHORTADDRS0_MSB:
282 	case IAR_MACLONGADDRS0_0:
283 	case IAR_MACLONGADDRS0_8:
284 	case IAR_MACLONGADDRS0_16:
285 	case IAR_MACLONGADDRS0_24:
286 	case IAR_MACLONGADDRS0_32:
287 	case IAR_MACLONGADDRS0_40:
288 	case IAR_MACLONGADDRS0_48:
289 	case IAR_MACLONGADDRS0_56:
290 	case IAR_RX_FRAME_FILTER:
291 	case IAR_PLL_INT1:
292 	case IAR_PLL_FRAC1_LSB:
293 	case IAR_PLL_FRAC1_MSB:
294 	case IAR_MACPANID1_LSB:
295 	case IAR_MACPANID1_MSB:
296 	case IAR_MACSHORTADDRS1_LSB:
297 	case IAR_MACSHORTADDRS1_MSB:
298 	case IAR_MACLONGADDRS1_0:
299 	case IAR_MACLONGADDRS1_8:
300 	case IAR_MACLONGADDRS1_16:
301 	case IAR_MACLONGADDRS1_24:
302 	case IAR_MACLONGADDRS1_32:
303 	case IAR_MACLONGADDRS1_40:
304 	case IAR_MACLONGADDRS1_48:
305 	case IAR_MACLONGADDRS1_56:
306 	case IAR_DUAL_PAN_CTRL:
307 	case IAR_DUAL_PAN_DWELL:
308 	case IAR_CCA1_THRESH:
309 	case IAR_CCA1_ED_OFFSET_COMP:
310 	case IAR_LQI_OFFSET_COMP:
311 	case IAR_CCA_CTRL:
312 	case IAR_CCA2_CORR_PEAKS:
313 	case IAR_CCA2_CORR_THRESH:
314 	case IAR_TMR_PRESCALE:
315 	case IAR_ANT_PAD_CTRL:
316 	case IAR_MISC_PAD_CTRL:
317 	case IAR_BSM_CTRL:
318 	case IAR_RNG:
319 	case IAR_RX_WTR_MARK:
320 	case IAR_SOFT_RESET:
321 	case IAR_TXDELAY:
322 	case IAR_ACKDELAY:
323 	case IAR_CORR_NVAL:
324 	case IAR_ANT_AGC_CTRL:
325 	case IAR_AGC_THR1:
326 	case IAR_AGC_THR2:
327 	case IAR_PA_CAL:
328 	case IAR_ATT_RSSI1:
329 	case IAR_ATT_RSSI2:
330 	case IAR_RSSI_OFFSET:
331 	case IAR_XTAL_CTRL:
332 	case IAR_CHF_PMA_GAIN:
333 	case IAR_CHF_IBUF:
334 	case IAR_CHF_QBUF:
335 	case IAR_CHF_IRIN:
336 	case IAR_CHF_QRIN:
337 	case IAR_CHF_IL:
338 	case IAR_CHF_QL:
339 	case IAR_CHF_CC1:
340 	case IAR_CHF_CCL:
341 	case IAR_CHF_CC2:
342 	case IAR_CHF_IROUT:
343 	case IAR_CHF_QROUT:
344 	case IAR_PA_TUNING:
345 	case IAR_VCO_CTRL1:
346 	case IAR_VCO_CTRL2:
347 		return true;
348 	default:
349 		return false;
350 	}
351 }
352 
353 static bool
354 mcr20a_iar_readable(struct device *dev, unsigned int reg)
355 {
356 	bool rc;
357 
358 	/* all writeable are also readable */
359 	rc = mcr20a_iar_writeable(dev, reg);
360 	if (rc)
361 		return rc;
362 
363 	/* readonly regs */
364 	switch (reg) {
365 	case IAR_PART_ID:
366 	case IAR_DUAL_PAN_STS:
367 	case IAR_RX_BYTE_COUNT:
368 	case IAR_FILTERFAIL_CODE1:
369 	case IAR_FILTERFAIL_CODE2:
370 	case IAR_RSSI:
371 		return true;
372 	default:
373 		return false;
374 	}
375 }
376 
377 static bool
378 mcr20a_iar_volatile(struct device *dev, unsigned int reg)
379 {
380 /* can be changed during runtime */
381 	switch (reg) {
382 	case IAR_DUAL_PAN_STS:
383 	case IAR_RX_BYTE_COUNT:
384 	case IAR_FILTERFAIL_CODE1:
385 	case IAR_FILTERFAIL_CODE2:
386 	case IAR_RSSI:
387 		return true;
388 	default:
389 		return false;
390 	}
391 }
392 
393 static const struct regmap_config mcr20a_iar_regmap = {
394 	.name			= "mcr20a_iar",
395 	.reg_bits		= 16,
396 	.val_bits		= 8,
397 	.write_flag_mask	= REGISTER_ACCESS | REGISTER_WRITE | IAR_INDEX,
398 	.read_flag_mask		= REGISTER_ACCESS | REGISTER_READ  | IAR_INDEX,
399 	.cache_type		= REGCACHE_RBTREE,
400 	.writeable_reg		= mcr20a_iar_writeable,
401 	.readable_reg		= mcr20a_iar_readable,
402 	.volatile_reg		= mcr20a_iar_volatile,
403 	.fast_io		= true,
404 };
405 
406 struct mcr20a_local {
407 	struct spi_device *spi;
408 
409 	struct ieee802154_hw *hw;
410 	struct regmap *regmap_dar;
411 	struct regmap *regmap_iar;
412 
413 	u8 *buf;
414 
415 	bool is_tx;
416 
417 	/* for writing tx buffer */
418 	struct spi_message tx_buf_msg;
419 	u8 tx_header[1];
420 	/* burst buffer write command */
421 	struct spi_transfer tx_xfer_header;
422 	u8 tx_len[1];
423 	/* len of tx packet */
424 	struct spi_transfer tx_xfer_len;
425 	/* data of tx packet */
426 	struct spi_transfer tx_xfer_buf;
427 	struct sk_buff *tx_skb;
428 
429 	/* for read length rxfifo */
430 	struct spi_message reg_msg;
431 	u8 reg_cmd[1];
432 	u8 reg_data[MCR20A_IRQSTS_NUM];
433 	struct spi_transfer reg_xfer_cmd;
434 	struct spi_transfer reg_xfer_data;
435 
436 	/* receive handling */
437 	struct spi_message rx_buf_msg;
438 	u8 rx_header[1];
439 	struct spi_transfer rx_xfer_header;
440 	u8 rx_lqi[1];
441 	struct spi_transfer rx_xfer_lqi;
442 	u8 rx_buf[MCR20A_MAX_BUF];
443 	struct spi_transfer rx_xfer_buf;
444 
445 	/* isr handling for reading intstat */
446 	struct spi_message irq_msg;
447 	u8 irq_header[1];
448 	u8 irq_data[MCR20A_IRQSTS_NUM];
449 	struct spi_transfer irq_xfer_data;
450 	struct spi_transfer irq_xfer_header;
451 };
452 
453 static void
454 mcr20a_write_tx_buf_complete(void *context)
455 {
456 	struct mcr20a_local *lp = context;
457 	int ret;
458 
459 	dev_dbg(printdev(lp), "%s\n", __func__);
460 
461 	lp->reg_msg.complete = NULL;
462 	lp->reg_cmd[0]	= MCR20A_WRITE_REG(DAR_PHY_CTRL1);
463 	lp->reg_data[0] = MCR20A_XCVSEQ_TX;
464 	lp->reg_xfer_data.len = 1;
465 
466 	ret = spi_async(lp->spi, &lp->reg_msg);
467 	if (ret)
468 		dev_err(printdev(lp), "failed to set SEQ TX\n");
469 }
470 
471 static int
472 mcr20a_xmit(struct ieee802154_hw *hw, struct sk_buff *skb)
473 {
474 	struct mcr20a_local *lp = hw->priv;
475 
476 	dev_dbg(printdev(lp), "%s\n", __func__);
477 
478 	lp->tx_skb = skb;
479 
480 	print_hex_dump_debug("mcr20a tx: ", DUMP_PREFIX_OFFSET, 16, 1,
481 			     skb->data, skb->len, 0);
482 
483 	lp->is_tx = 1;
484 
485 	lp->reg_msg.complete	= NULL;
486 	lp->reg_cmd[0]		= MCR20A_WRITE_REG(DAR_PHY_CTRL1);
487 	lp->reg_data[0]		= MCR20A_XCVSEQ_IDLE;
488 	lp->reg_xfer_data.len	= 1;
489 
490 	return spi_async(lp->spi, &lp->reg_msg);
491 }
492 
493 static int
494 mcr20a_ed(struct ieee802154_hw *hw, u8 *level)
495 {
496 	WARN_ON(!level);
497 	*level = 0xbe;
498 	return 0;
499 }
500 
501 static int
502 mcr20a_set_channel(struct ieee802154_hw *hw, u8 page, u8 channel)
503 {
504 	struct mcr20a_local *lp = hw->priv;
505 	int ret;
506 
507 	dev_dbg(printdev(lp), "%s\n", __func__);
508 
509 	/* freqency = ((PLL_INT+64) + (PLL_FRAC/65536)) * 32 MHz */
510 	ret = regmap_write(lp->regmap_dar, DAR_PLL_INT0, PLL_INT[channel - 11]);
511 	if (ret)
512 		return ret;
513 	ret = regmap_write(lp->regmap_dar, DAR_PLL_FRAC0_LSB, 0x00);
514 	if (ret)
515 		return ret;
516 	ret = regmap_write(lp->regmap_dar, DAR_PLL_FRAC0_MSB,
517 			   PLL_FRAC[channel - 11]);
518 	if (ret)
519 		return ret;
520 
521 	return 0;
522 }
523 
524 static int
525 mcr20a_start(struct ieee802154_hw *hw)
526 {
527 	struct mcr20a_local *lp = hw->priv;
528 	int ret;
529 
530 	dev_dbg(printdev(lp), "%s\n", __func__);
531 
532 	/* No slotted operation */
533 	dev_dbg(printdev(lp), "no slotted operation\n");
534 	ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL1,
535 				 DAR_PHY_CTRL1_SLOTTED, 0x0);
536 	if (ret < 0)
537 		return ret;
538 
539 	/* enable irq */
540 	enable_irq(lp->spi->irq);
541 
542 	/* Unmask SEQ interrupt */
543 	ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL2,
544 				 DAR_PHY_CTRL2_SEQMSK, 0x0);
545 	if (ret < 0)
546 		return ret;
547 
548 	/* Start the RX sequence */
549 	dev_dbg(printdev(lp), "start the RX sequence\n");
550 	ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL1,
551 				 DAR_PHY_CTRL1_XCVSEQ_MASK, MCR20A_XCVSEQ_RX);
552 	if (ret < 0)
553 		return ret;
554 
555 	return 0;
556 }
557 
558 static void
559 mcr20a_stop(struct ieee802154_hw *hw)
560 {
561 	struct mcr20a_local *lp = hw->priv;
562 
563 	dev_dbg(printdev(lp), "%s\n", __func__);
564 
565 	/* stop all running sequence */
566 	regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL1,
567 			   DAR_PHY_CTRL1_XCVSEQ_MASK, MCR20A_XCVSEQ_IDLE);
568 
569 	/* disable irq */
570 	disable_irq(lp->spi->irq);
571 }
572 
573 static int
574 mcr20a_set_hw_addr_filt(struct ieee802154_hw *hw,
575 			struct ieee802154_hw_addr_filt *filt,
576 			unsigned long changed)
577 {
578 	struct mcr20a_local *lp = hw->priv;
579 
580 	dev_dbg(printdev(lp), "%s\n", __func__);
581 
582 	if (changed & IEEE802154_AFILT_SADDR_CHANGED) {
583 		u16 addr = le16_to_cpu(filt->short_addr);
584 
585 		regmap_write(lp->regmap_iar, IAR_MACSHORTADDRS0_LSB, addr);
586 		regmap_write(lp->regmap_iar, IAR_MACSHORTADDRS0_MSB, addr >> 8);
587 	}
588 
589 	if (changed & IEEE802154_AFILT_PANID_CHANGED) {
590 		u16 pan = le16_to_cpu(filt->pan_id);
591 
592 		regmap_write(lp->regmap_iar, IAR_MACPANID0_LSB, pan);
593 		regmap_write(lp->regmap_iar, IAR_MACPANID0_MSB, pan >> 8);
594 	}
595 
596 	if (changed & IEEE802154_AFILT_IEEEADDR_CHANGED) {
597 		u8 addr[8], i;
598 
599 		memcpy(addr, &filt->ieee_addr, 8);
600 		for (i = 0; i < 8; i++)
601 			regmap_write(lp->regmap_iar,
602 				     IAR_MACLONGADDRS0_0 + i, addr[i]);
603 	}
604 
605 	if (changed & IEEE802154_AFILT_PANC_CHANGED) {
606 		if (filt->pan_coord) {
607 			regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4,
608 					   DAR_PHY_CTRL4_PANCORDNTR0, 0x10);
609 		} else {
610 			regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4,
611 					   DAR_PHY_CTRL4_PANCORDNTR0, 0x00);
612 		}
613 	}
614 
615 	return 0;
616 }
617 
618 /* -30 dBm to 10 dBm */
619 #define MCR20A_MAX_TX_POWERS 0x14
620 static const s32 mcr20a_powers[MCR20A_MAX_TX_POWERS + 1] = {
621 	-3000, -2800, -2600, -2400, -2200, -2000, -1800, -1600, -1400,
622 	-1200, -1000, -800, -600, -400, -200, 0, 200, 400, 600, 800, 1000
623 };
624 
625 static int
626 mcr20a_set_txpower(struct ieee802154_hw *hw, s32 mbm)
627 {
628 	struct mcr20a_local *lp = hw->priv;
629 	u32 i;
630 
631 	dev_dbg(printdev(lp), "%s(%d)\n", __func__, mbm);
632 
633 	for (i = 0; i < lp->hw->phy->supported.tx_powers_size; i++) {
634 		if (lp->hw->phy->supported.tx_powers[i] == mbm)
635 			return regmap_write(lp->regmap_dar, DAR_PA_PWR,
636 					    ((i + 8) & 0x1F));
637 	}
638 
639 	return -EINVAL;
640 }
641 
642 #define MCR20A_MAX_ED_LEVELS MCR20A_MIN_CCA_THRESHOLD
643 static s32 mcr20a_ed_levels[MCR20A_MAX_ED_LEVELS + 1];
644 
645 static int
646 mcr20a_set_cca_mode(struct ieee802154_hw *hw,
647 		    const struct wpan_phy_cca *cca)
648 {
649 	struct mcr20a_local *lp = hw->priv;
650 	unsigned int cca_mode = 0xff;
651 	bool cca_mode_and = false;
652 	int ret;
653 
654 	dev_dbg(printdev(lp), "%s\n", __func__);
655 
656 	/* mapping 802.15.4 to driver spec */
657 	switch (cca->mode) {
658 	case NL802154_CCA_ENERGY:
659 		cca_mode = MCR20A_CCA_MODE1;
660 		break;
661 	case NL802154_CCA_CARRIER:
662 		cca_mode = MCR20A_CCA_MODE2;
663 		break;
664 	case NL802154_CCA_ENERGY_CARRIER:
665 		switch (cca->opt) {
666 		case NL802154_CCA_OPT_ENERGY_CARRIER_AND:
667 			cca_mode = MCR20A_CCA_MODE3;
668 			cca_mode_and = true;
669 			break;
670 		case NL802154_CCA_OPT_ENERGY_CARRIER_OR:
671 			cca_mode = MCR20A_CCA_MODE3;
672 			cca_mode_and = false;
673 			break;
674 		default:
675 			return -EINVAL;
676 		}
677 		break;
678 	default:
679 		return -EINVAL;
680 	}
681 	ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4,
682 				 DAR_PHY_CTRL4_CCATYPE_MASK,
683 				 cca_mode << DAR_PHY_CTRL4_CCATYPE_SHIFT);
684 	if (ret < 0)
685 		return ret;
686 
687 	if (cca_mode == MCR20A_CCA_MODE3) {
688 		if (cca_mode_and) {
689 			ret = regmap_update_bits(lp->regmap_iar, IAR_CCA_CTRL,
690 						 IAR_CCA_CTRL_CCA3_AND_NOT_OR,
691 						 0x08);
692 		} else {
693 			ret = regmap_update_bits(lp->regmap_iar,
694 						 IAR_CCA_CTRL,
695 						 IAR_CCA_CTRL_CCA3_AND_NOT_OR,
696 						 0x00);
697 		}
698 		if (ret < 0)
699 			return ret;
700 	}
701 
702 	return ret;
703 }
704 
705 static int
706 mcr20a_set_cca_ed_level(struct ieee802154_hw *hw, s32 mbm)
707 {
708 	struct mcr20a_local *lp = hw->priv;
709 	u32 i;
710 
711 	dev_dbg(printdev(lp), "%s\n", __func__);
712 
713 	for (i = 0; i < hw->phy->supported.cca_ed_levels_size; i++) {
714 		if (hw->phy->supported.cca_ed_levels[i] == mbm)
715 			return regmap_write(lp->regmap_iar, IAR_CCA1_THRESH, i);
716 	}
717 
718 	return 0;
719 }
720 
721 static int
722 mcr20a_set_promiscuous_mode(struct ieee802154_hw *hw, const bool on)
723 {
724 	struct mcr20a_local *lp = hw->priv;
725 	int ret;
726 	u8 rx_frame_filter_reg = 0x0;
727 
728 	dev_dbg(printdev(lp), "%s(%d)\n", __func__, on);
729 
730 	if (on) {
731 		/* All frame types accepted*/
732 		rx_frame_filter_reg &= ~(IAR_RX_FRAME_FLT_FRM_VER);
733 		rx_frame_filter_reg |= (IAR_RX_FRAME_FLT_ACK_FT |
734 				  IAR_RX_FRAME_FLT_NS_FT);
735 
736 		ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4,
737 					 DAR_PHY_CTRL4_PROMISCUOUS,
738 					 DAR_PHY_CTRL4_PROMISCUOUS);
739 		if (ret < 0)
740 			return ret;
741 
742 		ret = regmap_write(lp->regmap_iar, IAR_RX_FRAME_FILTER,
743 				   rx_frame_filter_reg);
744 		if (ret < 0)
745 			return ret;
746 	} else {
747 		ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4,
748 					 DAR_PHY_CTRL4_PROMISCUOUS, 0x0);
749 		if (ret < 0)
750 			return ret;
751 
752 		ret = regmap_write(lp->regmap_iar, IAR_RX_FRAME_FILTER,
753 				   IAR_RX_FRAME_FLT_FRM_VER |
754 				   IAR_RX_FRAME_FLT_BEACON_FT |
755 				   IAR_RX_FRAME_FLT_DATA_FT |
756 				   IAR_RX_FRAME_FLT_CMD_FT);
757 		if (ret < 0)
758 			return ret;
759 	}
760 
761 	return 0;
762 }
763 
764 static const struct ieee802154_ops mcr20a_hw_ops = {
765 	.owner			= THIS_MODULE,
766 	.xmit_async		= mcr20a_xmit,
767 	.ed			= mcr20a_ed,
768 	.set_channel		= mcr20a_set_channel,
769 	.start			= mcr20a_start,
770 	.stop			= mcr20a_stop,
771 	.set_hw_addr_filt	= mcr20a_set_hw_addr_filt,
772 	.set_txpower		= mcr20a_set_txpower,
773 	.set_cca_mode		= mcr20a_set_cca_mode,
774 	.set_cca_ed_level	= mcr20a_set_cca_ed_level,
775 	.set_promiscuous_mode	= mcr20a_set_promiscuous_mode,
776 };
777 
778 static int
779 mcr20a_request_rx(struct mcr20a_local *lp)
780 {
781 	dev_dbg(printdev(lp), "%s\n", __func__);
782 
783 	/* Start the RX sequence */
784 	regmap_update_bits_async(lp->regmap_dar, DAR_PHY_CTRL1,
785 				 DAR_PHY_CTRL1_XCVSEQ_MASK, MCR20A_XCVSEQ_RX);
786 
787 	return 0;
788 }
789 
790 static void
791 mcr20a_handle_rx_read_buf_complete(void *context)
792 {
793 	struct mcr20a_local *lp = context;
794 	u8 len = lp->reg_data[0] & DAR_RX_FRAME_LENGTH_MASK;
795 	struct sk_buff *skb;
796 
797 	dev_dbg(printdev(lp), "%s\n", __func__);
798 
799 	dev_dbg(printdev(lp), "RX is done\n");
800 
801 	if (!ieee802154_is_valid_psdu_len(len)) {
802 		dev_vdbg(&lp->spi->dev, "corrupted frame received\n");
803 		len = IEEE802154_MTU;
804 	}
805 
806 	len = len - 2;  /* get rid of frame check field */
807 
808 	skb = dev_alloc_skb(len);
809 	if (!skb)
810 		return;
811 
812 	memcpy(skb_put(skb, len), lp->rx_buf, len);
813 	ieee802154_rx_irqsafe(lp->hw, skb, lp->rx_lqi[0]);
814 
815 	print_hex_dump_debug("mcr20a rx: ", DUMP_PREFIX_OFFSET, 16, 1,
816 			     lp->rx_buf, len, 0);
817 	pr_debug("mcr20a rx: lqi: %02hhx\n", lp->rx_lqi[0]);
818 
819 	/* start RX sequence */
820 	mcr20a_request_rx(lp);
821 }
822 
823 static void
824 mcr20a_handle_rx_read_len_complete(void *context)
825 {
826 	struct mcr20a_local *lp = context;
827 	u8 len;
828 	int ret;
829 
830 	dev_dbg(printdev(lp), "%s\n", __func__);
831 
832 	/* get the length of received frame */
833 	len = lp->reg_data[0] & DAR_RX_FRAME_LENGTH_MASK;
834 	dev_dbg(printdev(lp), "frame len : %d\n", len);
835 
836 	/* prepare to read the rx buf */
837 	lp->rx_buf_msg.complete = mcr20a_handle_rx_read_buf_complete;
838 	lp->rx_header[0] = MCR20A_BURST_READ_PACKET_BUF;
839 	lp->rx_xfer_buf.len = len;
840 
841 	ret = spi_async(lp->spi, &lp->rx_buf_msg);
842 	if (ret)
843 		dev_err(printdev(lp), "failed to read rx buffer length\n");
844 }
845 
846 static int
847 mcr20a_handle_rx(struct mcr20a_local *lp)
848 {
849 	dev_dbg(printdev(lp), "%s\n", __func__);
850 	lp->reg_msg.complete = mcr20a_handle_rx_read_len_complete;
851 	lp->reg_cmd[0] = MCR20A_READ_REG(DAR_RX_FRM_LEN);
852 	lp->reg_xfer_data.len	= 1;
853 
854 	return spi_async(lp->spi, &lp->reg_msg);
855 }
856 
857 static int
858 mcr20a_handle_tx_complete(struct mcr20a_local *lp)
859 {
860 	dev_dbg(printdev(lp), "%s\n", __func__);
861 
862 	ieee802154_xmit_complete(lp->hw, lp->tx_skb, false);
863 
864 	return mcr20a_request_rx(lp);
865 }
866 
867 static int
868 mcr20a_handle_tx(struct mcr20a_local *lp)
869 {
870 	int ret;
871 
872 	dev_dbg(printdev(lp), "%s\n", __func__);
873 
874 	/* write tx buffer */
875 	lp->tx_header[0]	= MCR20A_BURST_WRITE_PACKET_BUF;
876 	/* add 2 bytes of FCS */
877 	lp->tx_len[0]		= lp->tx_skb->len + 2;
878 	lp->tx_xfer_buf.tx_buf	= lp->tx_skb->data;
879 	/* add 1 byte psduLength */
880 	lp->tx_xfer_buf.len	= lp->tx_skb->len + 1;
881 
882 	ret = spi_async(lp->spi, &lp->tx_buf_msg);
883 	if (ret) {
884 		dev_err(printdev(lp), "SPI write Failed for TX buf\n");
885 		return ret;
886 	}
887 
888 	return 0;
889 }
890 
891 static void
892 mcr20a_irq_clean_complete(void *context)
893 {
894 	struct mcr20a_local *lp = context;
895 	u8 seq_state = lp->irq_data[DAR_IRQ_STS1] & DAR_PHY_CTRL1_XCVSEQ_MASK;
896 
897 	dev_dbg(printdev(lp), "%s\n", __func__);
898 
899 	enable_irq(lp->spi->irq);
900 
901 	dev_dbg(printdev(lp), "IRQ STA1 (%02x) STA2 (%02x)\n",
902 		lp->irq_data[DAR_IRQ_STS1], lp->irq_data[DAR_IRQ_STS2]);
903 
904 	switch (seq_state) {
905 	/* TX IRQ, RX IRQ and SEQ IRQ */
906 	case (DAR_IRQSTS1_TXIRQ | DAR_IRQSTS1_SEQIRQ):
907 		if (lp->is_tx) {
908 			lp->is_tx = 0;
909 			dev_dbg(printdev(lp), "TX is done. No ACK\n");
910 			mcr20a_handle_tx_complete(lp);
911 		}
912 		break;
913 	case (DAR_IRQSTS1_RXIRQ | DAR_IRQSTS1_SEQIRQ):
914 		/* rx is starting */
915 		dev_dbg(printdev(lp), "RX is starting\n");
916 		mcr20a_handle_rx(lp);
917 		break;
918 	case (DAR_IRQSTS1_RXIRQ | DAR_IRQSTS1_TXIRQ | DAR_IRQSTS1_SEQIRQ):
919 		if (lp->is_tx) {
920 			/* tx is done */
921 			lp->is_tx = 0;
922 			dev_dbg(printdev(lp), "TX is done. Get ACK\n");
923 			mcr20a_handle_tx_complete(lp);
924 		} else {
925 			/* rx is starting */
926 			dev_dbg(printdev(lp), "RX is starting\n");
927 			mcr20a_handle_rx(lp);
928 		}
929 		break;
930 	case (DAR_IRQSTS1_SEQIRQ):
931 		if (lp->is_tx) {
932 			dev_dbg(printdev(lp), "TX is starting\n");
933 			mcr20a_handle_tx(lp);
934 		} else {
935 			dev_dbg(printdev(lp), "MCR20A is stop\n");
936 		}
937 		break;
938 	}
939 }
940 
941 static void mcr20a_irq_status_complete(void *context)
942 {
943 	int ret;
944 	struct mcr20a_local *lp = context;
945 
946 	dev_dbg(printdev(lp), "%s\n", __func__);
947 	regmap_update_bits_async(lp->regmap_dar, DAR_PHY_CTRL1,
948 				 DAR_PHY_CTRL1_XCVSEQ_MASK, MCR20A_XCVSEQ_IDLE);
949 
950 	lp->reg_msg.complete = mcr20a_irq_clean_complete;
951 	lp->reg_cmd[0] = MCR20A_WRITE_REG(DAR_IRQ_STS1);
952 	memcpy(lp->reg_data, lp->irq_data, MCR20A_IRQSTS_NUM);
953 	lp->reg_xfer_data.len = MCR20A_IRQSTS_NUM;
954 
955 	ret = spi_async(lp->spi, &lp->reg_msg);
956 
957 	if (ret)
958 		dev_err(printdev(lp), "failed to clean irq status\n");
959 }
960 
961 static irqreturn_t mcr20a_irq_isr(int irq, void *data)
962 {
963 	struct mcr20a_local *lp = data;
964 	int ret;
965 
966 	disable_irq_nosync(irq);
967 
968 	lp->irq_header[0] = MCR20A_READ_REG(DAR_IRQ_STS1);
969 	/* read IRQSTSx */
970 	ret = spi_async(lp->spi, &lp->irq_msg);
971 	if (ret) {
972 		enable_irq(irq);
973 		return IRQ_NONE;
974 	}
975 
976 	return IRQ_HANDLED;
977 }
978 
979 static void mcr20a_hw_setup(struct mcr20a_local *lp)
980 {
981 	u8 i;
982 	struct ieee802154_hw *hw = lp->hw;
983 	struct wpan_phy *phy = lp->hw->phy;
984 
985 	dev_dbg(printdev(lp), "%s\n", __func__);
986 
987 	phy->symbol_duration = 16;
988 	phy->lifs_period = 40;
989 	phy->sifs_period = 12;
990 
991 	hw->flags = IEEE802154_HW_TX_OMIT_CKSUM |
992 			IEEE802154_HW_AFILT |
993 			IEEE802154_HW_PROMISCUOUS;
994 
995 	phy->flags = WPAN_PHY_FLAG_TXPOWER | WPAN_PHY_FLAG_CCA_ED_LEVEL |
996 			WPAN_PHY_FLAG_CCA_MODE;
997 
998 	phy->supported.cca_modes = BIT(NL802154_CCA_ENERGY) |
999 		BIT(NL802154_CCA_CARRIER) | BIT(NL802154_CCA_ENERGY_CARRIER);
1000 	phy->supported.cca_opts = BIT(NL802154_CCA_OPT_ENERGY_CARRIER_AND) |
1001 		BIT(NL802154_CCA_OPT_ENERGY_CARRIER_OR);
1002 
1003 	/* initiating cca_ed_levels */
1004 	for (i = MCR20A_MAX_CCA_THRESHOLD; i < MCR20A_MIN_CCA_THRESHOLD + 1;
1005 	      ++i) {
1006 		mcr20a_ed_levels[i] =  -i * 100;
1007 	}
1008 
1009 	phy->supported.cca_ed_levels = mcr20a_ed_levels;
1010 	phy->supported.cca_ed_levels_size = ARRAY_SIZE(mcr20a_ed_levels);
1011 
1012 	phy->cca.mode = NL802154_CCA_ENERGY;
1013 
1014 	phy->supported.channels[0] = MCR20A_VALID_CHANNELS;
1015 	phy->current_page = 0;
1016 	/* MCR20A default reset value */
1017 	phy->current_channel = 20;
1018 	phy->symbol_duration = 16;
1019 	phy->supported.tx_powers = mcr20a_powers;
1020 	phy->supported.tx_powers_size = ARRAY_SIZE(mcr20a_powers);
1021 	phy->cca_ed_level = phy->supported.cca_ed_levels[75];
1022 	phy->transmit_power = phy->supported.tx_powers[0x0F];
1023 }
1024 
1025 static void
1026 mcr20a_setup_tx_spi_messages(struct mcr20a_local *lp)
1027 {
1028 	spi_message_init(&lp->tx_buf_msg);
1029 	lp->tx_buf_msg.context = lp;
1030 	lp->tx_buf_msg.complete = mcr20a_write_tx_buf_complete;
1031 
1032 	lp->tx_xfer_header.len = 1;
1033 	lp->tx_xfer_header.tx_buf = lp->tx_header;
1034 
1035 	lp->tx_xfer_len.len = 1;
1036 	lp->tx_xfer_len.tx_buf = lp->tx_len;
1037 
1038 	spi_message_add_tail(&lp->tx_xfer_header, &lp->tx_buf_msg);
1039 	spi_message_add_tail(&lp->tx_xfer_len, &lp->tx_buf_msg);
1040 	spi_message_add_tail(&lp->tx_xfer_buf, &lp->tx_buf_msg);
1041 }
1042 
1043 static void
1044 mcr20a_setup_rx_spi_messages(struct mcr20a_local *lp)
1045 {
1046 	spi_message_init(&lp->reg_msg);
1047 	lp->reg_msg.context = lp;
1048 
1049 	lp->reg_xfer_cmd.len = 1;
1050 	lp->reg_xfer_cmd.tx_buf = lp->reg_cmd;
1051 	lp->reg_xfer_cmd.rx_buf = lp->reg_cmd;
1052 
1053 	lp->reg_xfer_data.rx_buf = lp->reg_data;
1054 	lp->reg_xfer_data.tx_buf = lp->reg_data;
1055 
1056 	spi_message_add_tail(&lp->reg_xfer_cmd, &lp->reg_msg);
1057 	spi_message_add_tail(&lp->reg_xfer_data, &lp->reg_msg);
1058 
1059 	spi_message_init(&lp->rx_buf_msg);
1060 	lp->rx_buf_msg.context = lp;
1061 	lp->rx_buf_msg.complete = mcr20a_handle_rx_read_buf_complete;
1062 	lp->rx_xfer_header.len = 1;
1063 	lp->rx_xfer_header.tx_buf = lp->rx_header;
1064 	lp->rx_xfer_header.rx_buf = lp->rx_header;
1065 
1066 	lp->rx_xfer_buf.rx_buf = lp->rx_buf;
1067 
1068 	lp->rx_xfer_lqi.len = 1;
1069 	lp->rx_xfer_lqi.rx_buf = lp->rx_lqi;
1070 
1071 	spi_message_add_tail(&lp->rx_xfer_header, &lp->rx_buf_msg);
1072 	spi_message_add_tail(&lp->rx_xfer_buf, &lp->rx_buf_msg);
1073 	spi_message_add_tail(&lp->rx_xfer_lqi, &lp->rx_buf_msg);
1074 }
1075 
1076 static void
1077 mcr20a_setup_irq_spi_messages(struct mcr20a_local *lp)
1078 {
1079 	spi_message_init(&lp->irq_msg);
1080 	lp->irq_msg.context		= lp;
1081 	lp->irq_msg.complete	= mcr20a_irq_status_complete;
1082 	lp->irq_xfer_header.len	= 1;
1083 	lp->irq_xfer_header.tx_buf = lp->irq_header;
1084 	lp->irq_xfer_header.rx_buf = lp->irq_header;
1085 
1086 	lp->irq_xfer_data.len	= MCR20A_IRQSTS_NUM;
1087 	lp->irq_xfer_data.rx_buf = lp->irq_data;
1088 
1089 	spi_message_add_tail(&lp->irq_xfer_header, &lp->irq_msg);
1090 	spi_message_add_tail(&lp->irq_xfer_data, &lp->irq_msg);
1091 }
1092 
1093 static int
1094 mcr20a_phy_init(struct mcr20a_local *lp)
1095 {
1096 	u8 index;
1097 	unsigned int phy_reg = 0;
1098 	int ret;
1099 
1100 	dev_dbg(printdev(lp), "%s\n", __func__);
1101 
1102 	/* Disable Tristate on COCO MISO for SPI reads */
1103 	ret = regmap_write(lp->regmap_iar, IAR_MISC_PAD_CTRL, 0x02);
1104 	if (ret)
1105 		goto err_ret;
1106 
1107 	/* Clear all PP IRQ bits in IRQSTS1 to avoid unexpected interrupts
1108 	 * immediately after init
1109 	 */
1110 	ret = regmap_write(lp->regmap_dar, DAR_IRQ_STS1, 0xEF);
1111 	if (ret)
1112 		goto err_ret;
1113 
1114 	/* Clear all PP IRQ bits in IRQSTS2 */
1115 	ret = regmap_write(lp->regmap_dar, DAR_IRQ_STS2,
1116 			   DAR_IRQSTS2_ASM_IRQ | DAR_IRQSTS2_PB_ERR_IRQ |
1117 			   DAR_IRQSTS2_WAKE_IRQ);
1118 	if (ret)
1119 		goto err_ret;
1120 
1121 	/* Disable all timer interrupts */
1122 	ret = regmap_write(lp->regmap_dar, DAR_IRQ_STS3, 0xFF);
1123 	if (ret)
1124 		goto err_ret;
1125 
1126 	/*  PHY_CTRL1 : default HW settings + AUTOACK enabled */
1127 	ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL1,
1128 				 DAR_PHY_CTRL1_AUTOACK, DAR_PHY_CTRL1_AUTOACK);
1129 
1130 	/*  PHY_CTRL2 : disable all interrupts */
1131 	ret = regmap_write(lp->regmap_dar, DAR_PHY_CTRL2, 0xFF);
1132 	if (ret)
1133 		goto err_ret;
1134 
1135 	/* PHY_CTRL3 : disable all timers and remaining interrupts */
1136 	ret = regmap_write(lp->regmap_dar, DAR_PHY_CTRL3,
1137 			   DAR_PHY_CTRL3_ASM_MSK | DAR_PHY_CTRL3_PB_ERR_MSK |
1138 			   DAR_PHY_CTRL3_WAKE_MSK);
1139 	if (ret)
1140 		goto err_ret;
1141 
1142 	/* SRC_CTRL : enable Acknowledge Frame Pending and
1143 	 * Source Address Matching Enable
1144 	 */
1145 	ret = regmap_write(lp->regmap_dar, DAR_SRC_CTRL,
1146 			   DAR_SRC_CTRL_ACK_FRM_PND |
1147 			   (DAR_SRC_CTRL_INDEX << DAR_SRC_CTRL_INDEX_SHIFT));
1148 	if (ret)
1149 		goto err_ret;
1150 
1151 	/*  RX_FRAME_FILTER */
1152 	/*  FRM_VER[1:0] = b11. Accept FrameVersion 0 and 1 packets */
1153 	ret = regmap_write(lp->regmap_iar, IAR_RX_FRAME_FILTER,
1154 			   IAR_RX_FRAME_FLT_FRM_VER |
1155 			   IAR_RX_FRAME_FLT_BEACON_FT |
1156 			   IAR_RX_FRAME_FLT_DATA_FT |
1157 			   IAR_RX_FRAME_FLT_CMD_FT);
1158 	if (ret)
1159 		goto err_ret;
1160 
1161 	dev_info(printdev(lp), "MCR20A DAR overwrites version: 0x%02x\n",
1162 		 MCR20A_OVERWRITE_VERSION);
1163 
1164 	/* Overwrites direct registers  */
1165 	ret = regmap_write(lp->regmap_dar, DAR_OVERWRITE_VER,
1166 			   MCR20A_OVERWRITE_VERSION);
1167 	if (ret)
1168 		goto err_ret;
1169 
1170 	/* Overwrites indirect registers  */
1171 	ret = regmap_multi_reg_write(lp->regmap_iar, mar20a_iar_overwrites,
1172 				     ARRAY_SIZE(mar20a_iar_overwrites));
1173 	if (ret)
1174 		goto err_ret;
1175 
1176 	/* Clear HW indirect queue */
1177 	dev_dbg(printdev(lp), "clear HW indirect queue\n");
1178 	for (index = 0; index < MCR20A_PHY_INDIRECT_QUEUE_SIZE; index++) {
1179 		phy_reg = (u8)(((index & DAR_SRC_CTRL_INDEX) <<
1180 			       DAR_SRC_CTRL_INDEX_SHIFT)
1181 			      | (DAR_SRC_CTRL_SRCADDR_EN)
1182 			      | (DAR_SRC_CTRL_INDEX_DISABLE));
1183 		ret = regmap_write(lp->regmap_dar, DAR_SRC_CTRL, phy_reg);
1184 		if (ret)
1185 			goto err_ret;
1186 		phy_reg = 0;
1187 	}
1188 
1189 	/* Assign HW Indirect hash table to PAN0 */
1190 	ret = regmap_read(lp->regmap_iar, IAR_DUAL_PAN_CTRL, &phy_reg);
1191 	if (ret)
1192 		goto err_ret;
1193 
1194 	/* Clear current lvl */
1195 	phy_reg &= ~IAR_DUAL_PAN_CTRL_DUAL_PAN_SAM_LVL_MSK;
1196 
1197 	/* Set new lvl */
1198 	phy_reg |= MCR20A_PHY_INDIRECT_QUEUE_SIZE <<
1199 		IAR_DUAL_PAN_CTRL_DUAL_PAN_SAM_LVL_SHIFT;
1200 	ret = regmap_write(lp->regmap_iar, IAR_DUAL_PAN_CTRL, phy_reg);
1201 	if (ret)
1202 		goto err_ret;
1203 
1204 	/* Set CCA threshold to -75 dBm */
1205 	ret = regmap_write(lp->regmap_iar, IAR_CCA1_THRESH, 0x4B);
1206 	if (ret)
1207 		goto err_ret;
1208 
1209 	/* Set prescaller to obtain 1 symbol (16us) timebase */
1210 	ret = regmap_write(lp->regmap_iar, IAR_TMR_PRESCALE, 0x05);
1211 	if (ret)
1212 		goto err_ret;
1213 
1214 	/* Enable autodoze mode. */
1215 	ret = regmap_update_bits(lp->regmap_dar, DAR_PWR_MODES,
1216 				 DAR_PWR_MODES_AUTODOZE,
1217 				 DAR_PWR_MODES_AUTODOZE);
1218 	if (ret)
1219 		goto err_ret;
1220 
1221 	/* Disable clk_out */
1222 	ret = regmap_update_bits(lp->regmap_dar, DAR_CLK_OUT_CTRL,
1223 				 DAR_CLK_OUT_CTRL_EN, 0x0);
1224 	if (ret)
1225 		goto err_ret;
1226 
1227 	return 0;
1228 
1229 err_ret:
1230 	return ret;
1231 }
1232 
1233 static int
1234 mcr20a_probe(struct spi_device *spi)
1235 {
1236 	struct ieee802154_hw *hw;
1237 	struct mcr20a_local *lp;
1238 	struct gpio_desc *rst_b;
1239 	int irq_type;
1240 	int ret = -ENOMEM;
1241 
1242 	dev_dbg(&spi->dev, "%s\n", __func__);
1243 
1244 	if (!spi->irq) {
1245 		dev_err(&spi->dev, "no IRQ specified\n");
1246 		return -EINVAL;
1247 	}
1248 
1249 	rst_b = devm_gpiod_get(&spi->dev, "rst_b", GPIOD_OUT_HIGH);
1250 	if (IS_ERR(rst_b)) {
1251 		ret = PTR_ERR(rst_b);
1252 		if (ret != -EPROBE_DEFER)
1253 			dev_err(&spi->dev, "Failed to get 'rst_b' gpio: %d", ret);
1254 		return ret;
1255 	}
1256 
1257 	/* reset mcr20a */
1258 	usleep_range(10, 20);
1259 	gpiod_set_value_cansleep(rst_b, 1);
1260 	usleep_range(10, 20);
1261 	gpiod_set_value_cansleep(rst_b, 0);
1262 	usleep_range(120, 240);
1263 
1264 	/* allocate ieee802154_hw and private data */
1265 	hw = ieee802154_alloc_hw(sizeof(*lp), &mcr20a_hw_ops);
1266 	if (!hw) {
1267 		dev_crit(&spi->dev, "ieee802154_alloc_hw failed\n");
1268 		return ret;
1269 	}
1270 
1271 	/* init mcr20a local data */
1272 	lp = hw->priv;
1273 	lp->hw = hw;
1274 	lp->spi = spi;
1275 
1276 	/* init ieee802154_hw */
1277 	hw->parent = &spi->dev;
1278 	ieee802154_random_extended_addr(&hw->phy->perm_extended_addr);
1279 
1280 	/* init buf */
1281 	lp->buf = devm_kzalloc(&spi->dev, SPI_COMMAND_BUFFER, GFP_KERNEL);
1282 
1283 	if (!lp->buf) {
1284 		ret = -ENOMEM;
1285 		goto free_dev;
1286 	}
1287 
1288 	mcr20a_setup_tx_spi_messages(lp);
1289 	mcr20a_setup_rx_spi_messages(lp);
1290 	mcr20a_setup_irq_spi_messages(lp);
1291 
1292 	/* setup regmap */
1293 	lp->regmap_dar = devm_regmap_init_spi(spi, &mcr20a_dar_regmap);
1294 	if (IS_ERR(lp->regmap_dar)) {
1295 		ret = PTR_ERR(lp->regmap_dar);
1296 		dev_err(&spi->dev, "Failed to allocate dar map: %d\n",
1297 			ret);
1298 		goto free_dev;
1299 	}
1300 
1301 	lp->regmap_iar = devm_regmap_init_spi(spi, &mcr20a_iar_regmap);
1302 	if (IS_ERR(lp->regmap_iar)) {
1303 		ret = PTR_ERR(lp->regmap_iar);
1304 		dev_err(&spi->dev, "Failed to allocate iar map: %d\n", ret);
1305 		goto free_dev;
1306 	}
1307 
1308 	mcr20a_hw_setup(lp);
1309 
1310 	spi_set_drvdata(spi, lp);
1311 
1312 	ret = mcr20a_phy_init(lp);
1313 	if (ret < 0) {
1314 		dev_crit(&spi->dev, "mcr20a_phy_init failed\n");
1315 		goto free_dev;
1316 	}
1317 
1318 	irq_type = irq_get_trigger_type(spi->irq);
1319 	if (!irq_type)
1320 		irq_type = IRQF_TRIGGER_FALLING;
1321 
1322 	ret = devm_request_irq(&spi->dev, spi->irq, mcr20a_irq_isr,
1323 			       irq_type, dev_name(&spi->dev), lp);
1324 	if (ret) {
1325 		dev_err(&spi->dev, "could not request_irq for mcr20a\n");
1326 		ret = -ENODEV;
1327 		goto free_dev;
1328 	}
1329 
1330 	/* disable_irq by default and wait for starting hardware */
1331 	disable_irq(spi->irq);
1332 
1333 	ret = ieee802154_register_hw(hw);
1334 	if (ret) {
1335 		dev_crit(&spi->dev, "ieee802154_register_hw failed\n");
1336 		goto free_dev;
1337 	}
1338 
1339 	return ret;
1340 
1341 free_dev:
1342 	ieee802154_free_hw(lp->hw);
1343 
1344 	return ret;
1345 }
1346 
1347 static int mcr20a_remove(struct spi_device *spi)
1348 {
1349 	struct mcr20a_local *lp = spi_get_drvdata(spi);
1350 
1351 	dev_dbg(&spi->dev, "%s\n", __func__);
1352 
1353 	ieee802154_unregister_hw(lp->hw);
1354 	ieee802154_free_hw(lp->hw);
1355 
1356 	return 0;
1357 }
1358 
1359 static const struct of_device_id mcr20a_of_match[] = {
1360 	{ .compatible = "nxp,mcr20a", },
1361 	{ },
1362 };
1363 MODULE_DEVICE_TABLE(of, mcr20a_of_match);
1364 
1365 static const struct spi_device_id mcr20a_device_id[] = {
1366 	{ .name = "mcr20a", },
1367 	{ },
1368 };
1369 MODULE_DEVICE_TABLE(spi, mcr20a_device_id);
1370 
1371 static struct spi_driver mcr20a_driver = {
1372 	.id_table = mcr20a_device_id,
1373 	.driver = {
1374 		.of_match_table = of_match_ptr(mcr20a_of_match),
1375 		.name	= "mcr20a",
1376 	},
1377 	.probe      = mcr20a_probe,
1378 	.remove     = mcr20a_remove,
1379 };
1380 
1381 module_spi_driver(mcr20a_driver);
1382 
1383 MODULE_DESCRIPTION("MCR20A Transceiver Driver");
1384 MODULE_LICENSE("GPL v2");
1385 MODULE_AUTHOR("Xue Liu <liuxuenetmail@gmail>");
1386