1 /* 2 * Driver for NXP MCR20A 802.15.4 Wireless-PAN Networking controller 3 * 4 * Copyright (C) 2018 Xue Liu <liuxuenetmail@gmail.com> 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License version 2 8 * as published by the Free Software Foundation. 9 * 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * 15 */ 16 #include <linux/kernel.h> 17 #include <linux/module.h> 18 #include <linux/gpio/consumer.h> 19 #include <linux/spi/spi.h> 20 #include <linux/workqueue.h> 21 #include <linux/interrupt.h> 22 #include <linux/irq.h> 23 #include <linux/skbuff.h> 24 #include <linux/of_gpio.h> 25 #include <linux/regmap.h> 26 #include <linux/ieee802154.h> 27 #include <linux/debugfs.h> 28 29 #include <net/mac802154.h> 30 #include <net/cfg802154.h> 31 32 #include <linux/device.h> 33 34 #include "mcr20a.h" 35 36 #define SPI_COMMAND_BUFFER 3 37 38 #define REGISTER_READ BIT(7) 39 #define REGISTER_WRITE (0 << 7) 40 #define REGISTER_ACCESS (0 << 6) 41 #define PACKET_BUFF_BURST_ACCESS BIT(6) 42 #define PACKET_BUFF_BYTE_ACCESS BIT(5) 43 44 #define MCR20A_WRITE_REG(x) (x) 45 #define MCR20A_READ_REG(x) (REGISTER_READ | (x)) 46 #define MCR20A_BURST_READ_PACKET_BUF (0xC0) 47 #define MCR20A_BURST_WRITE_PACKET_BUF (0x40) 48 49 #define MCR20A_CMD_REG 0x80 50 #define MCR20A_CMD_REG_MASK 0x3f 51 #define MCR20A_CMD_WRITE 0x40 52 #define MCR20A_CMD_FB 0x20 53 54 /* Number of Interrupt Request Status Register */ 55 #define MCR20A_IRQSTS_NUM 2 /* only IRQ_STS1 and IRQ_STS2 */ 56 57 /* MCR20A CCA Type */ 58 enum { 59 MCR20A_CCA_ED, // energy detect - CCA bit not active, 60 // not to be used for T and CCCA sequences 61 MCR20A_CCA_MODE1, // energy detect - CCA bit ACTIVE 62 MCR20A_CCA_MODE2, // 802.15.4 compliant signal detect - CCA bit ACTIVE 63 MCR20A_CCA_MODE3 64 }; 65 66 enum { 67 MCR20A_XCVSEQ_IDLE = 0x00, 68 MCR20A_XCVSEQ_RX = 0x01, 69 MCR20A_XCVSEQ_TX = 0x02, 70 MCR20A_XCVSEQ_CCA = 0x03, 71 MCR20A_XCVSEQ_TR = 0x04, 72 MCR20A_XCVSEQ_CCCA = 0x05, 73 }; 74 75 /* IEEE-802.15.4 defined constants (2.4 GHz logical channels) */ 76 #define MCR20A_MIN_CHANNEL (11) 77 #define MCR20A_MAX_CHANNEL (26) 78 #define MCR20A_CHANNEL_SPACING (5) 79 80 /* MCR20A CCA Threshold constans */ 81 #define MCR20A_MIN_CCA_THRESHOLD (0x6EU) 82 #define MCR20A_MAX_CCA_THRESHOLD (0x00U) 83 84 /* version 0C */ 85 #define MCR20A_OVERWRITE_VERSION (0x0C) 86 87 /* MCR20A PLL configurations */ 88 static const u8 PLL_INT[16] = { 89 /* 2405 */ 0x0B, /* 2410 */ 0x0B, /* 2415 */ 0x0B, 90 /* 2420 */ 0x0B, /* 2425 */ 0x0B, /* 2430 */ 0x0B, 91 /* 2435 */ 0x0C, /* 2440 */ 0x0C, /* 2445 */ 0x0C, 92 /* 2450 */ 0x0C, /* 2455 */ 0x0C, /* 2460 */ 0x0C, 93 /* 2465 */ 0x0D, /* 2470 */ 0x0D, /* 2475 */ 0x0D, 94 /* 2480 */ 0x0D 95 }; 96 97 static const u8 PLL_FRAC[16] = { 98 /* 2405 */ 0x28, /* 2410 */ 0x50, /* 2415 */ 0x78, 99 /* 2420 */ 0xA0, /* 2425 */ 0xC8, /* 2430 */ 0xF0, 100 /* 2435 */ 0x18, /* 2440 */ 0x40, /* 2445 */ 0x68, 101 /* 2450 */ 0x90, /* 2455 */ 0xB8, /* 2460 */ 0xE0, 102 /* 2465 */ 0x08, /* 2470 */ 0x30, /* 2475 */ 0x58, 103 /* 2480 */ 0x80 104 }; 105 106 static const struct reg_sequence mar20a_iar_overwrites[] = { 107 { IAR_MISC_PAD_CTRL, 0x02 }, 108 { IAR_VCO_CTRL1, 0xB3 }, 109 { IAR_VCO_CTRL2, 0x07 }, 110 { IAR_PA_TUNING, 0x71 }, 111 { IAR_CHF_IBUF, 0x2F }, 112 { IAR_CHF_QBUF, 0x2F }, 113 { IAR_CHF_IRIN, 0x24 }, 114 { IAR_CHF_QRIN, 0x24 }, 115 { IAR_CHF_IL, 0x24 }, 116 { IAR_CHF_QL, 0x24 }, 117 { IAR_CHF_CC1, 0x32 }, 118 { IAR_CHF_CCL, 0x1D }, 119 { IAR_CHF_CC2, 0x2D }, 120 { IAR_CHF_IROUT, 0x24 }, 121 { IAR_CHF_QROUT, 0x24 }, 122 { IAR_PA_CAL, 0x28 }, 123 { IAR_AGC_THR1, 0x55 }, 124 { IAR_AGC_THR2, 0x2D }, 125 { IAR_ATT_RSSI1, 0x5F }, 126 { IAR_ATT_RSSI2, 0x8F }, 127 { IAR_RSSI_OFFSET, 0x61 }, 128 { IAR_CHF_PMA_GAIN, 0x03 }, 129 { IAR_CCA1_THRESH, 0x50 }, 130 { IAR_CORR_NVAL, 0x13 }, 131 { IAR_ACKDELAY, 0x3D }, 132 }; 133 134 #define MCR20A_VALID_CHANNELS (0x07FFF800) 135 #define MCR20A_MAX_BUF (127) 136 137 #define printdev(X) (&X->spi->dev) 138 139 /* regmap information for Direct Access Register (DAR) access */ 140 #define MCR20A_DAR_WRITE 0x01 141 #define MCR20A_DAR_READ 0x00 142 #define MCR20A_DAR_NUMREGS 0x3F 143 144 /* regmap information for Indirect Access Register (IAR) access */ 145 #define MCR20A_IAR_ACCESS 0x80 146 #define MCR20A_IAR_NUMREGS 0xBEFF 147 148 /* Read/Write SPI Commands for DAR and IAR registers. */ 149 #define MCR20A_READSHORT(reg) ((reg) << 1) 150 #define MCR20A_WRITESHORT(reg) ((reg) << 1 | 1) 151 #define MCR20A_READLONG(reg) (1 << 15 | (reg) << 5) 152 #define MCR20A_WRITELONG(reg) (1 << 15 | (reg) << 5 | 1 << 4) 153 154 /* Type definitions for link configuration of instantiable layers */ 155 #define MCR20A_PHY_INDIRECT_QUEUE_SIZE (12) 156 157 static bool 158 mcr20a_dar_writeable(struct device *dev, unsigned int reg) 159 { 160 switch (reg) { 161 case DAR_IRQ_STS1: 162 case DAR_IRQ_STS2: 163 case DAR_IRQ_STS3: 164 case DAR_PHY_CTRL1: 165 case DAR_PHY_CTRL2: 166 case DAR_PHY_CTRL3: 167 case DAR_PHY_CTRL4: 168 case DAR_SRC_CTRL: 169 case DAR_SRC_ADDRS_SUM_LSB: 170 case DAR_SRC_ADDRS_SUM_MSB: 171 case DAR_T3CMP_LSB: 172 case DAR_T3CMP_MSB: 173 case DAR_T3CMP_USB: 174 case DAR_T2PRIMECMP_LSB: 175 case DAR_T2PRIMECMP_MSB: 176 case DAR_T1CMP_LSB: 177 case DAR_T1CMP_MSB: 178 case DAR_T1CMP_USB: 179 case DAR_T2CMP_LSB: 180 case DAR_T2CMP_MSB: 181 case DAR_T2CMP_USB: 182 case DAR_T4CMP_LSB: 183 case DAR_T4CMP_MSB: 184 case DAR_T4CMP_USB: 185 case DAR_PLL_INT0: 186 case DAR_PLL_FRAC0_LSB: 187 case DAR_PLL_FRAC0_MSB: 188 case DAR_PA_PWR: 189 /* no DAR_ACM */ 190 case DAR_OVERWRITE_VER: 191 case DAR_CLK_OUT_CTRL: 192 case DAR_PWR_MODES: 193 return true; 194 default: 195 return false; 196 } 197 } 198 199 static bool 200 mcr20a_dar_readable(struct device *dev, unsigned int reg) 201 { 202 bool rc; 203 204 /* all writeable are also readable */ 205 rc = mcr20a_dar_writeable(dev, reg); 206 if (rc) 207 return rc; 208 209 /* readonly regs */ 210 switch (reg) { 211 case DAR_RX_FRM_LEN: 212 case DAR_CCA1_ED_FNL: 213 case DAR_EVENT_TMR_LSB: 214 case DAR_EVENT_TMR_MSB: 215 case DAR_EVENT_TMR_USB: 216 case DAR_TIMESTAMP_LSB: 217 case DAR_TIMESTAMP_MSB: 218 case DAR_TIMESTAMP_USB: 219 case DAR_SEQ_STATE: 220 case DAR_LQI_VALUE: 221 case DAR_RSSI_CCA_CONT: 222 return true; 223 default: 224 return false; 225 } 226 } 227 228 static bool 229 mcr20a_dar_volatile(struct device *dev, unsigned int reg) 230 { 231 /* can be changed during runtime */ 232 switch (reg) { 233 case DAR_IRQ_STS1: 234 case DAR_IRQ_STS2: 235 case DAR_IRQ_STS3: 236 /* use them in spi_async and regmap so it's volatile */ 237 return true; 238 default: 239 return false; 240 } 241 } 242 243 static bool 244 mcr20a_dar_precious(struct device *dev, unsigned int reg) 245 { 246 /* don't clear irq line on read */ 247 switch (reg) { 248 case DAR_IRQ_STS1: 249 case DAR_IRQ_STS2: 250 case DAR_IRQ_STS3: 251 return true; 252 default: 253 return false; 254 } 255 } 256 257 static const struct regmap_config mcr20a_dar_regmap = { 258 .name = "mcr20a_dar", 259 .reg_bits = 8, 260 .val_bits = 8, 261 .write_flag_mask = REGISTER_ACCESS | REGISTER_WRITE, 262 .read_flag_mask = REGISTER_ACCESS | REGISTER_READ, 263 .cache_type = REGCACHE_RBTREE, 264 .writeable_reg = mcr20a_dar_writeable, 265 .readable_reg = mcr20a_dar_readable, 266 .volatile_reg = mcr20a_dar_volatile, 267 .precious_reg = mcr20a_dar_precious, 268 .fast_io = true, 269 .can_multi_write = true, 270 }; 271 272 static bool 273 mcr20a_iar_writeable(struct device *dev, unsigned int reg) 274 { 275 switch (reg) { 276 case IAR_XTAL_TRIM: 277 case IAR_PMC_LP_TRIM: 278 case IAR_MACPANID0_LSB: 279 case IAR_MACPANID0_MSB: 280 case IAR_MACSHORTADDRS0_LSB: 281 case IAR_MACSHORTADDRS0_MSB: 282 case IAR_MACLONGADDRS0_0: 283 case IAR_MACLONGADDRS0_8: 284 case IAR_MACLONGADDRS0_16: 285 case IAR_MACLONGADDRS0_24: 286 case IAR_MACLONGADDRS0_32: 287 case IAR_MACLONGADDRS0_40: 288 case IAR_MACLONGADDRS0_48: 289 case IAR_MACLONGADDRS0_56: 290 case IAR_RX_FRAME_FILTER: 291 case IAR_PLL_INT1: 292 case IAR_PLL_FRAC1_LSB: 293 case IAR_PLL_FRAC1_MSB: 294 case IAR_MACPANID1_LSB: 295 case IAR_MACPANID1_MSB: 296 case IAR_MACSHORTADDRS1_LSB: 297 case IAR_MACSHORTADDRS1_MSB: 298 case IAR_MACLONGADDRS1_0: 299 case IAR_MACLONGADDRS1_8: 300 case IAR_MACLONGADDRS1_16: 301 case IAR_MACLONGADDRS1_24: 302 case IAR_MACLONGADDRS1_32: 303 case IAR_MACLONGADDRS1_40: 304 case IAR_MACLONGADDRS1_48: 305 case IAR_MACLONGADDRS1_56: 306 case IAR_DUAL_PAN_CTRL: 307 case IAR_DUAL_PAN_DWELL: 308 case IAR_CCA1_THRESH: 309 case IAR_CCA1_ED_OFFSET_COMP: 310 case IAR_LQI_OFFSET_COMP: 311 case IAR_CCA_CTRL: 312 case IAR_CCA2_CORR_PEAKS: 313 case IAR_CCA2_CORR_THRESH: 314 case IAR_TMR_PRESCALE: 315 case IAR_ANT_PAD_CTRL: 316 case IAR_MISC_PAD_CTRL: 317 case IAR_BSM_CTRL: 318 case IAR_RNG: 319 case IAR_RX_WTR_MARK: 320 case IAR_SOFT_RESET: 321 case IAR_TXDELAY: 322 case IAR_ACKDELAY: 323 case IAR_CORR_NVAL: 324 case IAR_ANT_AGC_CTRL: 325 case IAR_AGC_THR1: 326 case IAR_AGC_THR2: 327 case IAR_PA_CAL: 328 case IAR_ATT_RSSI1: 329 case IAR_ATT_RSSI2: 330 case IAR_RSSI_OFFSET: 331 case IAR_XTAL_CTRL: 332 case IAR_CHF_PMA_GAIN: 333 case IAR_CHF_IBUF: 334 case IAR_CHF_QBUF: 335 case IAR_CHF_IRIN: 336 case IAR_CHF_QRIN: 337 case IAR_CHF_IL: 338 case IAR_CHF_QL: 339 case IAR_CHF_CC1: 340 case IAR_CHF_CCL: 341 case IAR_CHF_CC2: 342 case IAR_CHF_IROUT: 343 case IAR_CHF_QROUT: 344 case IAR_PA_TUNING: 345 case IAR_VCO_CTRL1: 346 case IAR_VCO_CTRL2: 347 return true; 348 default: 349 return false; 350 } 351 } 352 353 static bool 354 mcr20a_iar_readable(struct device *dev, unsigned int reg) 355 { 356 bool rc; 357 358 /* all writeable are also readable */ 359 rc = mcr20a_iar_writeable(dev, reg); 360 if (rc) 361 return rc; 362 363 /* readonly regs */ 364 switch (reg) { 365 case IAR_PART_ID: 366 case IAR_DUAL_PAN_STS: 367 case IAR_RX_BYTE_COUNT: 368 case IAR_FILTERFAIL_CODE1: 369 case IAR_FILTERFAIL_CODE2: 370 case IAR_RSSI: 371 return true; 372 default: 373 return false; 374 } 375 } 376 377 static bool 378 mcr20a_iar_volatile(struct device *dev, unsigned int reg) 379 { 380 /* can be changed during runtime */ 381 switch (reg) { 382 case IAR_DUAL_PAN_STS: 383 case IAR_RX_BYTE_COUNT: 384 case IAR_FILTERFAIL_CODE1: 385 case IAR_FILTERFAIL_CODE2: 386 case IAR_RSSI: 387 return true; 388 default: 389 return false; 390 } 391 } 392 393 static const struct regmap_config mcr20a_iar_regmap = { 394 .name = "mcr20a_iar", 395 .reg_bits = 16, 396 .val_bits = 8, 397 .write_flag_mask = REGISTER_ACCESS | REGISTER_WRITE | IAR_INDEX, 398 .read_flag_mask = REGISTER_ACCESS | REGISTER_READ | IAR_INDEX, 399 .cache_type = REGCACHE_RBTREE, 400 .writeable_reg = mcr20a_iar_writeable, 401 .readable_reg = mcr20a_iar_readable, 402 .volatile_reg = mcr20a_iar_volatile, 403 .fast_io = true, 404 }; 405 406 struct mcr20a_local { 407 struct spi_device *spi; 408 409 struct ieee802154_hw *hw; 410 struct regmap *regmap_dar; 411 struct regmap *regmap_iar; 412 413 u8 *buf; 414 415 bool is_tx; 416 417 /* for writing tx buffer */ 418 struct spi_message tx_buf_msg; 419 u8 tx_header[1]; 420 /* burst buffer write command */ 421 struct spi_transfer tx_xfer_header; 422 u8 tx_len[1]; 423 /* len of tx packet */ 424 struct spi_transfer tx_xfer_len; 425 /* data of tx packet */ 426 struct spi_transfer tx_xfer_buf; 427 struct sk_buff *tx_skb; 428 429 /* for read length rxfifo */ 430 struct spi_message reg_msg; 431 u8 reg_cmd[1]; 432 u8 reg_data[MCR20A_IRQSTS_NUM]; 433 struct spi_transfer reg_xfer_cmd; 434 struct spi_transfer reg_xfer_data; 435 436 /* receive handling */ 437 struct spi_message rx_buf_msg; 438 u8 rx_header[1]; 439 struct spi_transfer rx_xfer_header; 440 u8 rx_lqi[1]; 441 struct spi_transfer rx_xfer_lqi; 442 u8 rx_buf[MCR20A_MAX_BUF]; 443 struct spi_transfer rx_xfer_buf; 444 445 /* isr handling for reading intstat */ 446 struct spi_message irq_msg; 447 u8 irq_header[1]; 448 u8 irq_data[MCR20A_IRQSTS_NUM]; 449 struct spi_transfer irq_xfer_data; 450 struct spi_transfer irq_xfer_header; 451 }; 452 453 static void 454 mcr20a_write_tx_buf_complete(void *context) 455 { 456 struct mcr20a_local *lp = context; 457 int ret; 458 459 dev_dbg(printdev(lp), "%s\n", __func__); 460 461 lp->reg_msg.complete = NULL; 462 lp->reg_cmd[0] = MCR20A_WRITE_REG(DAR_PHY_CTRL1); 463 lp->reg_data[0] = MCR20A_XCVSEQ_TX; 464 lp->reg_xfer_data.len = 1; 465 466 ret = spi_async(lp->spi, &lp->reg_msg); 467 if (ret) 468 dev_err(printdev(lp), "failed to set SEQ TX\n"); 469 } 470 471 static int 472 mcr20a_xmit(struct ieee802154_hw *hw, struct sk_buff *skb) 473 { 474 struct mcr20a_local *lp = hw->priv; 475 476 dev_dbg(printdev(lp), "%s\n", __func__); 477 478 lp->tx_skb = skb; 479 480 print_hex_dump_debug("mcr20a tx: ", DUMP_PREFIX_OFFSET, 16, 1, 481 skb->data, skb->len, 0); 482 483 lp->is_tx = 1; 484 485 lp->reg_msg.complete = NULL; 486 lp->reg_cmd[0] = MCR20A_WRITE_REG(DAR_PHY_CTRL1); 487 lp->reg_data[0] = MCR20A_XCVSEQ_IDLE; 488 lp->reg_xfer_data.len = 1; 489 490 return spi_async(lp->spi, &lp->reg_msg); 491 } 492 493 static int 494 mcr20a_ed(struct ieee802154_hw *hw, u8 *level) 495 { 496 WARN_ON(!level); 497 *level = 0xbe; 498 return 0; 499 } 500 501 static int 502 mcr20a_set_channel(struct ieee802154_hw *hw, u8 page, u8 channel) 503 { 504 struct mcr20a_local *lp = hw->priv; 505 int ret; 506 507 dev_dbg(printdev(lp), "%s\n", __func__); 508 509 /* freqency = ((PLL_INT+64) + (PLL_FRAC/65536)) * 32 MHz */ 510 ret = regmap_write(lp->regmap_dar, DAR_PLL_INT0, PLL_INT[channel - 11]); 511 if (ret) 512 return ret; 513 ret = regmap_write(lp->regmap_dar, DAR_PLL_FRAC0_LSB, 0x00); 514 if (ret) 515 return ret; 516 ret = regmap_write(lp->regmap_dar, DAR_PLL_FRAC0_MSB, 517 PLL_FRAC[channel - 11]); 518 if (ret) 519 return ret; 520 521 return 0; 522 } 523 524 static int 525 mcr20a_start(struct ieee802154_hw *hw) 526 { 527 struct mcr20a_local *lp = hw->priv; 528 int ret; 529 530 dev_dbg(printdev(lp), "%s\n", __func__); 531 532 /* No slotted operation */ 533 dev_dbg(printdev(lp), "no slotted operation\n"); 534 ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL1, 535 DAR_PHY_CTRL1_SLOTTED, 0x0); 536 537 /* enable irq */ 538 enable_irq(lp->spi->irq); 539 540 /* Unmask SEQ interrupt */ 541 ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL2, 542 DAR_PHY_CTRL2_SEQMSK, 0x0); 543 544 /* Start the RX sequence */ 545 dev_dbg(printdev(lp), "start the RX sequence\n"); 546 ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL1, 547 DAR_PHY_CTRL1_XCVSEQ_MASK, MCR20A_XCVSEQ_RX); 548 549 return 0; 550 } 551 552 static void 553 mcr20a_stop(struct ieee802154_hw *hw) 554 { 555 struct mcr20a_local *lp = hw->priv; 556 557 dev_dbg(printdev(lp), "%s\n", __func__); 558 559 /* stop all running sequence */ 560 regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL1, 561 DAR_PHY_CTRL1_XCVSEQ_MASK, MCR20A_XCVSEQ_IDLE); 562 563 /* disable irq */ 564 disable_irq(lp->spi->irq); 565 } 566 567 static int 568 mcr20a_set_hw_addr_filt(struct ieee802154_hw *hw, 569 struct ieee802154_hw_addr_filt *filt, 570 unsigned long changed) 571 { 572 struct mcr20a_local *lp = hw->priv; 573 574 dev_dbg(printdev(lp), "%s\n", __func__); 575 576 if (changed & IEEE802154_AFILT_SADDR_CHANGED) { 577 u16 addr = le16_to_cpu(filt->short_addr); 578 579 regmap_write(lp->regmap_iar, IAR_MACSHORTADDRS0_LSB, addr); 580 regmap_write(lp->regmap_iar, IAR_MACSHORTADDRS0_MSB, addr >> 8); 581 } 582 583 if (changed & IEEE802154_AFILT_PANID_CHANGED) { 584 u16 pan = le16_to_cpu(filt->pan_id); 585 586 regmap_write(lp->regmap_iar, IAR_MACPANID0_LSB, pan); 587 regmap_write(lp->regmap_iar, IAR_MACPANID0_MSB, pan >> 8); 588 } 589 590 if (changed & IEEE802154_AFILT_IEEEADDR_CHANGED) { 591 u8 addr[8], i; 592 593 memcpy(addr, &filt->ieee_addr, 8); 594 for (i = 0; i < 8; i++) 595 regmap_write(lp->regmap_iar, 596 IAR_MACLONGADDRS0_0 + i, addr[i]); 597 } 598 599 if (changed & IEEE802154_AFILT_PANC_CHANGED) { 600 if (filt->pan_coord) { 601 regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4, 602 DAR_PHY_CTRL4_PANCORDNTR0, 0x10); 603 } else { 604 regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4, 605 DAR_PHY_CTRL4_PANCORDNTR0, 0x00); 606 } 607 } 608 609 return 0; 610 } 611 612 /* -30 dBm to 10 dBm */ 613 #define MCR20A_MAX_TX_POWERS 0x14 614 static const s32 mcr20a_powers[MCR20A_MAX_TX_POWERS + 1] = { 615 -3000, -2800, -2600, -2400, -2200, -2000, -1800, -1600, -1400, 616 -1200, -1000, -800, -600, -400, -200, 0, 200, 400, 600, 800, 1000 617 }; 618 619 static int 620 mcr20a_set_txpower(struct ieee802154_hw *hw, s32 mbm) 621 { 622 struct mcr20a_local *lp = hw->priv; 623 u32 i; 624 625 dev_dbg(printdev(lp), "%s(%d)\n", __func__, mbm); 626 627 for (i = 0; i < lp->hw->phy->supported.tx_powers_size; i++) { 628 if (lp->hw->phy->supported.tx_powers[i] == mbm) 629 return regmap_write(lp->regmap_dar, DAR_PA_PWR, 630 ((i + 8) & 0x1F)); 631 } 632 633 return -EINVAL; 634 } 635 636 #define MCR20A_MAX_ED_LEVELS MCR20A_MIN_CCA_THRESHOLD 637 static s32 mcr20a_ed_levels[MCR20A_MAX_ED_LEVELS + 1]; 638 639 static int 640 mcr20a_set_cca_mode(struct ieee802154_hw *hw, 641 const struct wpan_phy_cca *cca) 642 { 643 struct mcr20a_local *lp = hw->priv; 644 unsigned int cca_mode = 0xff; 645 bool cca_mode_and = false; 646 int ret; 647 648 dev_dbg(printdev(lp), "%s\n", __func__); 649 650 /* mapping 802.15.4 to driver spec */ 651 switch (cca->mode) { 652 case NL802154_CCA_ENERGY: 653 cca_mode = MCR20A_CCA_MODE1; 654 break; 655 case NL802154_CCA_CARRIER: 656 cca_mode = MCR20A_CCA_MODE2; 657 break; 658 case NL802154_CCA_ENERGY_CARRIER: 659 switch (cca->opt) { 660 case NL802154_CCA_OPT_ENERGY_CARRIER_AND: 661 cca_mode = MCR20A_CCA_MODE3; 662 cca_mode_and = true; 663 break; 664 case NL802154_CCA_OPT_ENERGY_CARRIER_OR: 665 cca_mode = MCR20A_CCA_MODE3; 666 cca_mode_and = false; 667 break; 668 default: 669 return -EINVAL; 670 } 671 break; 672 default: 673 return -EINVAL; 674 } 675 ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4, 676 DAR_PHY_CTRL4_CCATYPE_MASK, 677 cca_mode << DAR_PHY_CTRL4_CCATYPE_SHIFT); 678 if (ret < 0) 679 return ret; 680 681 if (cca_mode == MCR20A_CCA_MODE3) { 682 if (cca_mode_and) { 683 ret = regmap_update_bits(lp->regmap_iar, IAR_CCA_CTRL, 684 IAR_CCA_CTRL_CCA3_AND_NOT_OR, 685 0x08); 686 } else { 687 ret = regmap_update_bits(lp->regmap_iar, 688 IAR_CCA_CTRL, 689 IAR_CCA_CTRL_CCA3_AND_NOT_OR, 690 0x00); 691 } 692 if (ret < 0) 693 return ret; 694 } 695 696 return ret; 697 } 698 699 static int 700 mcr20a_set_cca_ed_level(struct ieee802154_hw *hw, s32 mbm) 701 { 702 struct mcr20a_local *lp = hw->priv; 703 u32 i; 704 705 dev_dbg(printdev(lp), "%s\n", __func__); 706 707 for (i = 0; i < hw->phy->supported.cca_ed_levels_size; i++) { 708 if (hw->phy->supported.cca_ed_levels[i] == mbm) 709 return regmap_write(lp->regmap_iar, IAR_CCA1_THRESH, i); 710 } 711 712 return 0; 713 } 714 715 static int 716 mcr20a_set_promiscuous_mode(struct ieee802154_hw *hw, const bool on) 717 { 718 struct mcr20a_local *lp = hw->priv; 719 int ret; 720 u8 rx_frame_filter_reg = 0x0; 721 722 dev_dbg(printdev(lp), "%s(%d)\n", __func__, on); 723 724 if (on) { 725 /* All frame types accepted*/ 726 rx_frame_filter_reg &= ~(IAR_RX_FRAME_FLT_FRM_VER); 727 rx_frame_filter_reg |= (IAR_RX_FRAME_FLT_ACK_FT | 728 IAR_RX_FRAME_FLT_NS_FT); 729 730 ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4, 731 DAR_PHY_CTRL4_PROMISCUOUS, 732 DAR_PHY_CTRL4_PROMISCUOUS); 733 if (ret < 0) 734 return ret; 735 736 ret = regmap_write(lp->regmap_iar, IAR_RX_FRAME_FILTER, 737 rx_frame_filter_reg); 738 if (ret < 0) 739 return ret; 740 } else { 741 ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL4, 742 DAR_PHY_CTRL4_PROMISCUOUS, 0x0); 743 if (ret < 0) 744 return ret; 745 746 ret = regmap_write(lp->regmap_iar, IAR_RX_FRAME_FILTER, 747 IAR_RX_FRAME_FLT_FRM_VER | 748 IAR_RX_FRAME_FLT_BEACON_FT | 749 IAR_RX_FRAME_FLT_DATA_FT | 750 IAR_RX_FRAME_FLT_CMD_FT); 751 if (ret < 0) 752 return ret; 753 } 754 755 return 0; 756 } 757 758 static const struct ieee802154_ops mcr20a_hw_ops = { 759 .owner = THIS_MODULE, 760 .xmit_async = mcr20a_xmit, 761 .ed = mcr20a_ed, 762 .set_channel = mcr20a_set_channel, 763 .start = mcr20a_start, 764 .stop = mcr20a_stop, 765 .set_hw_addr_filt = mcr20a_set_hw_addr_filt, 766 .set_txpower = mcr20a_set_txpower, 767 .set_cca_mode = mcr20a_set_cca_mode, 768 .set_cca_ed_level = mcr20a_set_cca_ed_level, 769 .set_promiscuous_mode = mcr20a_set_promiscuous_mode, 770 }; 771 772 static int 773 mcr20a_request_rx(struct mcr20a_local *lp) 774 { 775 dev_dbg(printdev(lp), "%s\n", __func__); 776 777 /* Start the RX sequence */ 778 regmap_update_bits_async(lp->regmap_dar, DAR_PHY_CTRL1, 779 DAR_PHY_CTRL1_XCVSEQ_MASK, MCR20A_XCVSEQ_RX); 780 781 return 0; 782 } 783 784 static void 785 mcr20a_handle_rx_read_buf_complete(void *context) 786 { 787 struct mcr20a_local *lp = context; 788 u8 len = lp->reg_data[0] & DAR_RX_FRAME_LENGTH_MASK; 789 struct sk_buff *skb; 790 791 dev_dbg(printdev(lp), "%s\n", __func__); 792 793 dev_dbg(printdev(lp), "RX is done\n"); 794 795 if (!ieee802154_is_valid_psdu_len(len)) { 796 dev_vdbg(&lp->spi->dev, "corrupted frame received\n"); 797 len = IEEE802154_MTU; 798 } 799 800 len = len - 2; /* get rid of frame check field */ 801 802 skb = dev_alloc_skb(len); 803 if (!skb) 804 return; 805 806 memcpy(skb_put(skb, len), lp->rx_buf, len); 807 ieee802154_rx_irqsafe(lp->hw, skb, lp->rx_lqi[0]); 808 809 print_hex_dump_debug("mcr20a rx: ", DUMP_PREFIX_OFFSET, 16, 1, 810 lp->rx_buf, len, 0); 811 pr_debug("mcr20a rx: lqi: %02hhx\n", lp->rx_lqi[0]); 812 813 /* start RX sequence */ 814 mcr20a_request_rx(lp); 815 } 816 817 static void 818 mcr20a_handle_rx_read_len_complete(void *context) 819 { 820 struct mcr20a_local *lp = context; 821 u8 len; 822 int ret; 823 824 dev_dbg(printdev(lp), "%s\n", __func__); 825 826 /* get the length of received frame */ 827 len = lp->reg_data[0] & DAR_RX_FRAME_LENGTH_MASK; 828 dev_dbg(printdev(lp), "frame len : %d\n", len); 829 830 /* prepare to read the rx buf */ 831 lp->rx_buf_msg.complete = mcr20a_handle_rx_read_buf_complete; 832 lp->rx_header[0] = MCR20A_BURST_READ_PACKET_BUF; 833 lp->rx_xfer_buf.len = len; 834 835 ret = spi_async(lp->spi, &lp->rx_buf_msg); 836 if (ret) 837 dev_err(printdev(lp), "failed to read rx buffer length\n"); 838 } 839 840 static int 841 mcr20a_handle_rx(struct mcr20a_local *lp) 842 { 843 dev_dbg(printdev(lp), "%s\n", __func__); 844 lp->reg_msg.complete = mcr20a_handle_rx_read_len_complete; 845 lp->reg_cmd[0] = MCR20A_READ_REG(DAR_RX_FRM_LEN); 846 lp->reg_xfer_data.len = 1; 847 848 return spi_async(lp->spi, &lp->reg_msg); 849 } 850 851 static int 852 mcr20a_handle_tx_complete(struct mcr20a_local *lp) 853 { 854 dev_dbg(printdev(lp), "%s\n", __func__); 855 856 ieee802154_xmit_complete(lp->hw, lp->tx_skb, false); 857 858 return mcr20a_request_rx(lp); 859 } 860 861 static int 862 mcr20a_handle_tx(struct mcr20a_local *lp) 863 { 864 int ret; 865 866 dev_dbg(printdev(lp), "%s\n", __func__); 867 868 /* write tx buffer */ 869 lp->tx_header[0] = MCR20A_BURST_WRITE_PACKET_BUF; 870 /* add 2 bytes of FCS */ 871 lp->tx_len[0] = lp->tx_skb->len + 2; 872 lp->tx_xfer_buf.tx_buf = lp->tx_skb->data; 873 /* add 1 byte psduLength */ 874 lp->tx_xfer_buf.len = lp->tx_skb->len + 1; 875 876 ret = spi_async(lp->spi, &lp->tx_buf_msg); 877 if (ret) { 878 dev_err(printdev(lp), "SPI write Failed for TX buf\n"); 879 return ret; 880 } 881 882 return 0; 883 } 884 885 static void 886 mcr20a_irq_clean_complete(void *context) 887 { 888 struct mcr20a_local *lp = context; 889 u8 seq_state = lp->irq_data[DAR_IRQ_STS1] & DAR_PHY_CTRL1_XCVSEQ_MASK; 890 891 dev_dbg(printdev(lp), "%s\n", __func__); 892 893 enable_irq(lp->spi->irq); 894 895 dev_dbg(printdev(lp), "IRQ STA1 (%02x) STA2 (%02x)\n", 896 lp->irq_data[DAR_IRQ_STS1], lp->irq_data[DAR_IRQ_STS2]); 897 898 switch (seq_state) { 899 /* TX IRQ, RX IRQ and SEQ IRQ */ 900 case (DAR_IRQSTS1_TXIRQ | DAR_IRQSTS1_SEQIRQ): 901 if (lp->is_tx) { 902 lp->is_tx = 0; 903 dev_dbg(printdev(lp), "TX is done. No ACK\n"); 904 mcr20a_handle_tx_complete(lp); 905 } 906 break; 907 case (DAR_IRQSTS1_RXIRQ | DAR_IRQSTS1_SEQIRQ): 908 /* rx is starting */ 909 dev_dbg(printdev(lp), "RX is starting\n"); 910 mcr20a_handle_rx(lp); 911 break; 912 case (DAR_IRQSTS1_RXIRQ | DAR_IRQSTS1_TXIRQ | DAR_IRQSTS1_SEQIRQ): 913 if (lp->is_tx) { 914 /* tx is done */ 915 lp->is_tx = 0; 916 dev_dbg(printdev(lp), "TX is done. Get ACK\n"); 917 mcr20a_handle_tx_complete(lp); 918 } else { 919 /* rx is starting */ 920 dev_dbg(printdev(lp), "RX is starting\n"); 921 mcr20a_handle_rx(lp); 922 } 923 break; 924 case (DAR_IRQSTS1_SEQIRQ): 925 if (lp->is_tx) { 926 dev_dbg(printdev(lp), "TX is starting\n"); 927 mcr20a_handle_tx(lp); 928 } else { 929 dev_dbg(printdev(lp), "MCR20A is stop\n"); 930 } 931 break; 932 } 933 } 934 935 static void mcr20a_irq_status_complete(void *context) 936 { 937 int ret; 938 struct mcr20a_local *lp = context; 939 940 dev_dbg(printdev(lp), "%s\n", __func__); 941 regmap_update_bits_async(lp->regmap_dar, DAR_PHY_CTRL1, 942 DAR_PHY_CTRL1_XCVSEQ_MASK, MCR20A_XCVSEQ_IDLE); 943 944 lp->reg_msg.complete = mcr20a_irq_clean_complete; 945 lp->reg_cmd[0] = MCR20A_WRITE_REG(DAR_IRQ_STS1); 946 memcpy(lp->reg_data, lp->irq_data, MCR20A_IRQSTS_NUM); 947 lp->reg_xfer_data.len = MCR20A_IRQSTS_NUM; 948 949 ret = spi_async(lp->spi, &lp->reg_msg); 950 951 if (ret) 952 dev_err(printdev(lp), "failed to clean irq status\n"); 953 } 954 955 static irqreturn_t mcr20a_irq_isr(int irq, void *data) 956 { 957 struct mcr20a_local *lp = data; 958 int ret; 959 960 disable_irq_nosync(irq); 961 962 lp->irq_header[0] = MCR20A_READ_REG(DAR_IRQ_STS1); 963 /* read IRQSTSx */ 964 ret = spi_async(lp->spi, &lp->irq_msg); 965 if (ret) { 966 enable_irq(irq); 967 return IRQ_NONE; 968 } 969 970 return IRQ_HANDLED; 971 } 972 973 static void mcr20a_hw_setup(struct mcr20a_local *lp) 974 { 975 u8 i; 976 struct ieee802154_hw *hw = lp->hw; 977 struct wpan_phy *phy = lp->hw->phy; 978 979 dev_dbg(printdev(lp), "%s\n", __func__); 980 981 phy->symbol_duration = 16; 982 phy->lifs_period = 40; 983 phy->sifs_period = 12; 984 985 hw->flags = IEEE802154_HW_TX_OMIT_CKSUM | 986 IEEE802154_HW_AFILT | 987 IEEE802154_HW_PROMISCUOUS; 988 989 phy->flags = WPAN_PHY_FLAG_TXPOWER | WPAN_PHY_FLAG_CCA_ED_LEVEL | 990 WPAN_PHY_FLAG_CCA_MODE; 991 992 phy->supported.cca_modes = BIT(NL802154_CCA_ENERGY) | 993 BIT(NL802154_CCA_CARRIER) | BIT(NL802154_CCA_ENERGY_CARRIER); 994 phy->supported.cca_opts = BIT(NL802154_CCA_OPT_ENERGY_CARRIER_AND) | 995 BIT(NL802154_CCA_OPT_ENERGY_CARRIER_OR); 996 997 /* initiating cca_ed_levels */ 998 for (i = MCR20A_MAX_CCA_THRESHOLD; i < MCR20A_MIN_CCA_THRESHOLD + 1; 999 ++i) { 1000 mcr20a_ed_levels[i] = -i * 100; 1001 } 1002 1003 phy->supported.cca_ed_levels = mcr20a_ed_levels; 1004 phy->supported.cca_ed_levels_size = ARRAY_SIZE(mcr20a_ed_levels); 1005 1006 phy->cca.mode = NL802154_CCA_ENERGY; 1007 1008 phy->supported.channels[0] = MCR20A_VALID_CHANNELS; 1009 phy->current_page = 0; 1010 /* MCR20A default reset value */ 1011 phy->current_channel = 20; 1012 phy->symbol_duration = 16; 1013 phy->supported.tx_powers = mcr20a_powers; 1014 phy->supported.tx_powers_size = ARRAY_SIZE(mcr20a_powers); 1015 phy->cca_ed_level = phy->supported.cca_ed_levels[75]; 1016 phy->transmit_power = phy->supported.tx_powers[0x0F]; 1017 } 1018 1019 static void 1020 mcr20a_setup_tx_spi_messages(struct mcr20a_local *lp) 1021 { 1022 spi_message_init(&lp->tx_buf_msg); 1023 lp->tx_buf_msg.context = lp; 1024 lp->tx_buf_msg.complete = mcr20a_write_tx_buf_complete; 1025 1026 lp->tx_xfer_header.len = 1; 1027 lp->tx_xfer_header.tx_buf = lp->tx_header; 1028 1029 lp->tx_xfer_len.len = 1; 1030 lp->tx_xfer_len.tx_buf = lp->tx_len; 1031 1032 spi_message_add_tail(&lp->tx_xfer_header, &lp->tx_buf_msg); 1033 spi_message_add_tail(&lp->tx_xfer_len, &lp->tx_buf_msg); 1034 spi_message_add_tail(&lp->tx_xfer_buf, &lp->tx_buf_msg); 1035 } 1036 1037 static void 1038 mcr20a_setup_rx_spi_messages(struct mcr20a_local *lp) 1039 { 1040 spi_message_init(&lp->reg_msg); 1041 lp->reg_msg.context = lp; 1042 1043 lp->reg_xfer_cmd.len = 1; 1044 lp->reg_xfer_cmd.tx_buf = lp->reg_cmd; 1045 lp->reg_xfer_cmd.rx_buf = lp->reg_cmd; 1046 1047 lp->reg_xfer_data.rx_buf = lp->reg_data; 1048 lp->reg_xfer_data.tx_buf = lp->reg_data; 1049 1050 spi_message_add_tail(&lp->reg_xfer_cmd, &lp->reg_msg); 1051 spi_message_add_tail(&lp->reg_xfer_data, &lp->reg_msg); 1052 1053 spi_message_init(&lp->rx_buf_msg); 1054 lp->rx_buf_msg.context = lp; 1055 lp->rx_buf_msg.complete = mcr20a_handle_rx_read_buf_complete; 1056 lp->rx_xfer_header.len = 1; 1057 lp->rx_xfer_header.tx_buf = lp->rx_header; 1058 lp->rx_xfer_header.rx_buf = lp->rx_header; 1059 1060 lp->rx_xfer_buf.rx_buf = lp->rx_buf; 1061 1062 lp->rx_xfer_lqi.len = 1; 1063 lp->rx_xfer_lqi.rx_buf = lp->rx_lqi; 1064 1065 spi_message_add_tail(&lp->rx_xfer_header, &lp->rx_buf_msg); 1066 spi_message_add_tail(&lp->rx_xfer_buf, &lp->rx_buf_msg); 1067 spi_message_add_tail(&lp->rx_xfer_lqi, &lp->rx_buf_msg); 1068 } 1069 1070 static void 1071 mcr20a_setup_irq_spi_messages(struct mcr20a_local *lp) 1072 { 1073 spi_message_init(&lp->irq_msg); 1074 lp->irq_msg.context = lp; 1075 lp->irq_msg.complete = mcr20a_irq_status_complete; 1076 lp->irq_xfer_header.len = 1; 1077 lp->irq_xfer_header.tx_buf = lp->irq_header; 1078 lp->irq_xfer_header.rx_buf = lp->irq_header; 1079 1080 lp->irq_xfer_data.len = MCR20A_IRQSTS_NUM; 1081 lp->irq_xfer_data.rx_buf = lp->irq_data; 1082 1083 spi_message_add_tail(&lp->irq_xfer_header, &lp->irq_msg); 1084 spi_message_add_tail(&lp->irq_xfer_data, &lp->irq_msg); 1085 } 1086 1087 static int 1088 mcr20a_phy_init(struct mcr20a_local *lp) 1089 { 1090 u8 index; 1091 unsigned int phy_reg = 0; 1092 int ret; 1093 1094 dev_dbg(printdev(lp), "%s\n", __func__); 1095 1096 /* Disable Tristate on COCO MISO for SPI reads */ 1097 ret = regmap_write(lp->regmap_iar, IAR_MISC_PAD_CTRL, 0x02); 1098 if (ret) 1099 goto err_ret; 1100 1101 /* Clear all PP IRQ bits in IRQSTS1 to avoid unexpected interrupts 1102 * immediately after init 1103 */ 1104 ret = regmap_write(lp->regmap_dar, DAR_IRQ_STS1, 0xEF); 1105 if (ret) 1106 goto err_ret; 1107 1108 /* Clear all PP IRQ bits in IRQSTS2 */ 1109 ret = regmap_write(lp->regmap_dar, DAR_IRQ_STS2, 1110 DAR_IRQSTS2_ASM_IRQ | DAR_IRQSTS2_PB_ERR_IRQ | 1111 DAR_IRQSTS2_WAKE_IRQ); 1112 if (ret) 1113 goto err_ret; 1114 1115 /* Disable all timer interrupts */ 1116 ret = regmap_write(lp->regmap_dar, DAR_IRQ_STS3, 0xFF); 1117 if (ret) 1118 goto err_ret; 1119 1120 /* PHY_CTRL1 : default HW settings + AUTOACK enabled */ 1121 ret = regmap_update_bits(lp->regmap_dar, DAR_PHY_CTRL1, 1122 DAR_PHY_CTRL1_AUTOACK, DAR_PHY_CTRL1_AUTOACK); 1123 1124 /* PHY_CTRL2 : disable all interrupts */ 1125 ret = regmap_write(lp->regmap_dar, DAR_PHY_CTRL2, 0xFF); 1126 if (ret) 1127 goto err_ret; 1128 1129 /* PHY_CTRL3 : disable all timers and remaining interrupts */ 1130 ret = regmap_write(lp->regmap_dar, DAR_PHY_CTRL3, 1131 DAR_PHY_CTRL3_ASM_MSK | DAR_PHY_CTRL3_PB_ERR_MSK | 1132 DAR_PHY_CTRL3_WAKE_MSK); 1133 if (ret) 1134 goto err_ret; 1135 1136 /* SRC_CTRL : enable Acknowledge Frame Pending and 1137 * Source Address Matching Enable 1138 */ 1139 ret = regmap_write(lp->regmap_dar, DAR_SRC_CTRL, 1140 DAR_SRC_CTRL_ACK_FRM_PND | 1141 (DAR_SRC_CTRL_INDEX << DAR_SRC_CTRL_INDEX_SHIFT)); 1142 if (ret) 1143 goto err_ret; 1144 1145 /* RX_FRAME_FILTER */ 1146 /* FRM_VER[1:0] = b11. Accept FrameVersion 0 and 1 packets */ 1147 ret = regmap_write(lp->regmap_iar, IAR_RX_FRAME_FILTER, 1148 IAR_RX_FRAME_FLT_FRM_VER | 1149 IAR_RX_FRAME_FLT_BEACON_FT | 1150 IAR_RX_FRAME_FLT_DATA_FT | 1151 IAR_RX_FRAME_FLT_CMD_FT); 1152 if (ret) 1153 goto err_ret; 1154 1155 dev_info(printdev(lp), "MCR20A DAR overwrites version: 0x%02x\n", 1156 MCR20A_OVERWRITE_VERSION); 1157 1158 /* Overwrites direct registers */ 1159 ret = regmap_write(lp->regmap_dar, DAR_OVERWRITE_VER, 1160 MCR20A_OVERWRITE_VERSION); 1161 if (ret) 1162 goto err_ret; 1163 1164 /* Overwrites indirect registers */ 1165 ret = regmap_multi_reg_write(lp->regmap_iar, mar20a_iar_overwrites, 1166 ARRAY_SIZE(mar20a_iar_overwrites)); 1167 if (ret) 1168 goto err_ret; 1169 1170 /* Clear HW indirect queue */ 1171 dev_dbg(printdev(lp), "clear HW indirect queue\n"); 1172 for (index = 0; index < MCR20A_PHY_INDIRECT_QUEUE_SIZE; index++) { 1173 phy_reg = (u8)(((index & DAR_SRC_CTRL_INDEX) << 1174 DAR_SRC_CTRL_INDEX_SHIFT) 1175 | (DAR_SRC_CTRL_SRCADDR_EN) 1176 | (DAR_SRC_CTRL_INDEX_DISABLE)); 1177 ret = regmap_write(lp->regmap_dar, DAR_SRC_CTRL, phy_reg); 1178 if (ret) 1179 goto err_ret; 1180 phy_reg = 0; 1181 } 1182 1183 /* Assign HW Indirect hash table to PAN0 */ 1184 ret = regmap_read(lp->regmap_iar, IAR_DUAL_PAN_CTRL, &phy_reg); 1185 if (ret) 1186 goto err_ret; 1187 1188 /* Clear current lvl */ 1189 phy_reg &= ~IAR_DUAL_PAN_CTRL_DUAL_PAN_SAM_LVL_MSK; 1190 1191 /* Set new lvl */ 1192 phy_reg |= MCR20A_PHY_INDIRECT_QUEUE_SIZE << 1193 IAR_DUAL_PAN_CTRL_DUAL_PAN_SAM_LVL_SHIFT; 1194 ret = regmap_write(lp->regmap_iar, IAR_DUAL_PAN_CTRL, phy_reg); 1195 if (ret) 1196 goto err_ret; 1197 1198 /* Set CCA threshold to -75 dBm */ 1199 ret = regmap_write(lp->regmap_iar, IAR_CCA1_THRESH, 0x4B); 1200 if (ret) 1201 goto err_ret; 1202 1203 /* Set prescaller to obtain 1 symbol (16us) timebase */ 1204 ret = regmap_write(lp->regmap_iar, IAR_TMR_PRESCALE, 0x05); 1205 if (ret) 1206 goto err_ret; 1207 1208 /* Enable autodoze mode. */ 1209 ret = regmap_update_bits(lp->regmap_dar, DAR_PWR_MODES, 1210 DAR_PWR_MODES_AUTODOZE, 1211 DAR_PWR_MODES_AUTODOZE); 1212 if (ret) 1213 goto err_ret; 1214 1215 /* Disable clk_out */ 1216 ret = regmap_update_bits(lp->regmap_dar, DAR_CLK_OUT_CTRL, 1217 DAR_CLK_OUT_CTRL_EN, 0x0); 1218 if (ret) 1219 goto err_ret; 1220 1221 return 0; 1222 1223 err_ret: 1224 return ret; 1225 } 1226 1227 static int 1228 mcr20a_probe(struct spi_device *spi) 1229 { 1230 struct ieee802154_hw *hw; 1231 struct mcr20a_local *lp; 1232 struct gpio_desc *rst_b; 1233 int irq_type; 1234 int ret = -ENOMEM; 1235 1236 dev_dbg(&spi->dev, "%s\n", __func__); 1237 1238 if (!spi->irq) { 1239 dev_err(&spi->dev, "no IRQ specified\n"); 1240 return -EINVAL; 1241 } 1242 1243 rst_b = devm_gpiod_get(&spi->dev, "rst_b", GPIOD_OUT_HIGH); 1244 if (IS_ERR(rst_b)) { 1245 ret = PTR_ERR(rst_b); 1246 if (ret != -EPROBE_DEFER) 1247 dev_err(&spi->dev, "Failed to get 'rst_b' gpio: %d", ret); 1248 return ret; 1249 } 1250 1251 /* reset mcr20a */ 1252 usleep_range(10, 20); 1253 gpiod_set_value_cansleep(rst_b, 1); 1254 usleep_range(10, 20); 1255 gpiod_set_value_cansleep(rst_b, 0); 1256 usleep_range(120, 240); 1257 1258 /* allocate ieee802154_hw and private data */ 1259 hw = ieee802154_alloc_hw(sizeof(*lp), &mcr20a_hw_ops); 1260 if (!hw) { 1261 dev_crit(&spi->dev, "ieee802154_alloc_hw failed\n"); 1262 return ret; 1263 } 1264 1265 /* init mcr20a local data */ 1266 lp = hw->priv; 1267 lp->hw = hw; 1268 lp->spi = spi; 1269 1270 /* init ieee802154_hw */ 1271 hw->parent = &spi->dev; 1272 ieee802154_random_extended_addr(&hw->phy->perm_extended_addr); 1273 1274 /* init buf */ 1275 lp->buf = devm_kzalloc(&spi->dev, SPI_COMMAND_BUFFER, GFP_KERNEL); 1276 1277 if (!lp->buf) { 1278 ret = -ENOMEM; 1279 goto free_dev; 1280 } 1281 1282 mcr20a_setup_tx_spi_messages(lp); 1283 mcr20a_setup_rx_spi_messages(lp); 1284 mcr20a_setup_irq_spi_messages(lp); 1285 1286 /* setup regmap */ 1287 lp->regmap_dar = devm_regmap_init_spi(spi, &mcr20a_dar_regmap); 1288 if (IS_ERR(lp->regmap_dar)) { 1289 ret = PTR_ERR(lp->regmap_dar); 1290 dev_err(&spi->dev, "Failed to allocate dar map: %d\n", 1291 ret); 1292 goto free_dev; 1293 } 1294 1295 lp->regmap_iar = devm_regmap_init_spi(spi, &mcr20a_iar_regmap); 1296 if (IS_ERR(lp->regmap_iar)) { 1297 ret = PTR_ERR(lp->regmap_iar); 1298 dev_err(&spi->dev, "Failed to allocate iar map: %d\n", ret); 1299 goto free_dev; 1300 } 1301 1302 mcr20a_hw_setup(lp); 1303 1304 spi_set_drvdata(spi, lp); 1305 1306 ret = mcr20a_phy_init(lp); 1307 if (ret < 0) { 1308 dev_crit(&spi->dev, "mcr20a_phy_init failed\n"); 1309 goto free_dev; 1310 } 1311 1312 irq_type = irq_get_trigger_type(spi->irq); 1313 if (!irq_type) 1314 irq_type = IRQF_TRIGGER_FALLING; 1315 1316 ret = devm_request_irq(&spi->dev, spi->irq, mcr20a_irq_isr, 1317 irq_type, dev_name(&spi->dev), lp); 1318 if (ret) { 1319 dev_err(&spi->dev, "could not request_irq for mcr20a\n"); 1320 ret = -ENODEV; 1321 goto free_dev; 1322 } 1323 1324 /* disable_irq by default and wait for starting hardware */ 1325 disable_irq(spi->irq); 1326 1327 ret = ieee802154_register_hw(hw); 1328 if (ret) { 1329 dev_crit(&spi->dev, "ieee802154_register_hw failed\n"); 1330 goto free_dev; 1331 } 1332 1333 return ret; 1334 1335 free_dev: 1336 ieee802154_free_hw(lp->hw); 1337 1338 return ret; 1339 } 1340 1341 static int mcr20a_remove(struct spi_device *spi) 1342 { 1343 struct mcr20a_local *lp = spi_get_drvdata(spi); 1344 1345 dev_dbg(&spi->dev, "%s\n", __func__); 1346 1347 ieee802154_unregister_hw(lp->hw); 1348 ieee802154_free_hw(lp->hw); 1349 1350 return 0; 1351 } 1352 1353 static const struct of_device_id mcr20a_of_match[] = { 1354 { .compatible = "nxp,mcr20a", }, 1355 { }, 1356 }; 1357 MODULE_DEVICE_TABLE(of, mcr20a_of_match); 1358 1359 static const struct spi_device_id mcr20a_device_id[] = { 1360 { .name = "mcr20a", }, 1361 { }, 1362 }; 1363 MODULE_DEVICE_TABLE(spi, mcr20a_device_id); 1364 1365 static struct spi_driver mcr20a_driver = { 1366 .id_table = mcr20a_device_id, 1367 .driver = { 1368 .of_match_table = of_match_ptr(mcr20a_of_match), 1369 .name = "mcr20a", 1370 }, 1371 .probe = mcr20a_probe, 1372 .remove = mcr20a_remove, 1373 }; 1374 1375 module_spi_driver(mcr20a_driver); 1376 1377 MODULE_DESCRIPTION("MCR20A Transceiver Driver"); 1378 MODULE_LICENSE("GPL v2"); 1379 MODULE_AUTHOR("Xue Liu <liuxuenetmail@gmail>"); 1380