1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * SuperH Mobile I2C Controller 4 * 5 * Copyright (C) 2014-19 Wolfram Sang <wsa@sang-engineering.com> 6 * Copyright (C) 2008 Magnus Damm 7 * 8 * Portions of the code based on out-of-tree driver i2c-sh7343.c 9 * Copyright (c) 2006 Carlos Munoz <carlos@kenati.com> 10 */ 11 12 #include <linux/clk.h> 13 #include <linux/delay.h> 14 #include <linux/dmaengine.h> 15 #include <linux/dma-mapping.h> 16 #include <linux/err.h> 17 #include <linux/i2c.h> 18 #include <linux/init.h> 19 #include <linux/interrupt.h> 20 #include <linux/io.h> 21 #include <linux/kernel.h> 22 #include <linux/module.h> 23 #include <linux/of_device.h> 24 #include <linux/platform_device.h> 25 #include <linux/pm_runtime.h> 26 #include <linux/slab.h> 27 28 /* Transmit operation: */ 29 /* */ 30 /* 0 byte transmit */ 31 /* BUS: S A8 ACK P(*) */ 32 /* IRQ: DTE WAIT */ 33 /* ICIC: */ 34 /* ICCR: 0x94 0x90 */ 35 /* ICDR: A8 */ 36 /* */ 37 /* 1 byte transmit */ 38 /* BUS: S A8 ACK D8(1) ACK P(*) */ 39 /* IRQ: DTE WAIT WAIT */ 40 /* ICIC: -DTE */ 41 /* ICCR: 0x94 0x90 */ 42 /* ICDR: A8 D8(1) */ 43 /* */ 44 /* 2 byte transmit */ 45 /* BUS: S A8 ACK D8(1) ACK D8(2) ACK P(*) */ 46 /* IRQ: DTE WAIT WAIT WAIT */ 47 /* ICIC: -DTE */ 48 /* ICCR: 0x94 0x90 */ 49 /* ICDR: A8 D8(1) D8(2) */ 50 /* */ 51 /* 3 bytes or more, +---------+ gets repeated */ 52 /* */ 53 /* */ 54 /* Receive operation: */ 55 /* */ 56 /* 0 byte receive - not supported since slave may hold SDA low */ 57 /* */ 58 /* 1 byte receive [TX] | [RX] */ 59 /* BUS: S A8 ACK | D8(1) ACK P(*) */ 60 /* IRQ: DTE WAIT | WAIT DTE */ 61 /* ICIC: -DTE | +DTE */ 62 /* ICCR: 0x94 0x81 | 0xc0 */ 63 /* ICDR: A8 | D8(1) */ 64 /* */ 65 /* 2 byte receive [TX]| [RX] */ 66 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK P(*) */ 67 /* IRQ: DTE WAIT | WAIT WAIT DTE */ 68 /* ICIC: -DTE | +DTE */ 69 /* ICCR: 0x94 0x81 | 0xc0 */ 70 /* ICDR: A8 | D8(1) D8(2) */ 71 /* */ 72 /* 3 byte receive [TX] | [RX] (*) */ 73 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK D8(3) ACK P */ 74 /* IRQ: DTE WAIT | WAIT WAIT WAIT DTE */ 75 /* ICIC: -DTE | +DTE */ 76 /* ICCR: 0x94 0x81 | 0xc0 */ 77 /* ICDR: A8 | D8(1) D8(2) D8(3) */ 78 /* */ 79 /* 4 bytes or more, this part is repeated +---------+ */ 80 /* */ 81 /* */ 82 /* Interrupt order and BUSY flag */ 83 /* ___ _ */ 84 /* SDA ___\___XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXAAAAAAAAA___/ */ 85 /* SCL \_/1\_/2\_/3\_/4\_/5\_/6\_/7\_/8\___/9\_____/ */ 86 /* */ 87 /* S D7 D6 D5 D4 D3 D2 D1 D0 P(*) */ 88 /* ___ */ 89 /* WAIT IRQ ________________________________/ \___________ */ 90 /* TACK IRQ ____________________________________/ \_______ */ 91 /* DTE IRQ __________________________________________/ \_ */ 92 /* AL IRQ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */ 93 /* _______________________________________________ */ 94 /* BUSY __/ \_ */ 95 /* */ 96 /* (*) The STOP condition is only sent by the master at the end of the last */ 97 /* I2C message or if the I2C_M_STOP flag is set. Similarly, the BUSY bit is */ 98 /* only cleared after the STOP condition, so, between messages we have to */ 99 /* poll for the DTE bit. */ 100 /* */ 101 102 enum sh_mobile_i2c_op { 103 OP_START = 0, 104 OP_TX_FIRST, 105 OP_TX, 106 OP_TX_STOP, 107 OP_TX_TO_RX, 108 OP_RX, 109 OP_RX_STOP, 110 OP_RX_STOP_DATA, 111 }; 112 113 struct sh_mobile_i2c_data { 114 struct device *dev; 115 void __iomem *reg; 116 struct i2c_adapter adap; 117 unsigned long bus_speed; 118 unsigned int clks_per_count; 119 struct clk *clk; 120 u_int8_t icic; 121 u_int8_t flags; 122 u_int16_t iccl; 123 u_int16_t icch; 124 125 spinlock_t lock; 126 wait_queue_head_t wait; 127 struct i2c_msg *msg; 128 int pos; 129 int sr; 130 bool send_stop; 131 bool stop_after_dma; 132 bool atomic_xfer; 133 134 struct resource *res; 135 struct dma_chan *dma_tx; 136 struct dma_chan *dma_rx; 137 struct scatterlist sg; 138 enum dma_data_direction dma_direction; 139 u8 *dma_buf; 140 }; 141 142 struct sh_mobile_dt_config { 143 int clks_per_count; 144 int (*setup)(struct sh_mobile_i2c_data *pd); 145 }; 146 147 #define IIC_FLAG_HAS_ICIC67 (1 << 0) 148 149 /* Register offsets */ 150 #define ICDR 0x00 151 #define ICCR 0x04 152 #define ICSR 0x08 153 #define ICIC 0x0c 154 #define ICCL 0x10 155 #define ICCH 0x14 156 #define ICSTART 0x70 157 158 /* Register bits */ 159 #define ICCR_ICE 0x80 160 #define ICCR_RACK 0x40 161 #define ICCR_TRS 0x10 162 #define ICCR_BBSY 0x04 163 #define ICCR_SCP 0x01 164 165 #define ICSR_SCLM 0x80 166 #define ICSR_SDAM 0x40 167 #define SW_DONE 0x20 168 #define ICSR_BUSY 0x10 169 #define ICSR_AL 0x08 170 #define ICSR_TACK 0x04 171 #define ICSR_WAIT 0x02 172 #define ICSR_DTE 0x01 173 174 #define ICIC_ICCLB8 0x80 175 #define ICIC_ICCHB8 0x40 176 #define ICIC_TDMAE 0x20 177 #define ICIC_RDMAE 0x10 178 #define ICIC_ALE 0x08 179 #define ICIC_TACKE 0x04 180 #define ICIC_WAITE 0x02 181 #define ICIC_DTEE 0x01 182 183 #define ICSTART_ICSTART 0x10 184 185 static void iic_wr(struct sh_mobile_i2c_data *pd, int offs, unsigned char data) 186 { 187 if (offs == ICIC) 188 data |= pd->icic; 189 190 iowrite8(data, pd->reg + offs); 191 } 192 193 static unsigned char iic_rd(struct sh_mobile_i2c_data *pd, int offs) 194 { 195 return ioread8(pd->reg + offs); 196 } 197 198 static void iic_set_clr(struct sh_mobile_i2c_data *pd, int offs, 199 unsigned char set, unsigned char clr) 200 { 201 iic_wr(pd, offs, (iic_rd(pd, offs) | set) & ~clr); 202 } 203 204 static u32 sh_mobile_i2c_iccl(unsigned long count_khz, u32 tLOW, u32 tf) 205 { 206 /* 207 * Conditional expression: 208 * ICCL >= COUNT_CLK * (tLOW + tf) 209 * 210 * SH-Mobile IIC hardware starts counting the LOW period of 211 * the SCL signal (tLOW) as soon as it pulls the SCL line. 212 * In order to meet the tLOW timing spec, we need to take into 213 * account the fall time of SCL signal (tf). Default tf value 214 * should be 0.3 us, for safety. 215 */ 216 return (((count_khz * (tLOW + tf)) + 5000) / 10000); 217 } 218 219 static u32 sh_mobile_i2c_icch(unsigned long count_khz, u32 tHIGH, u32 tf) 220 { 221 /* 222 * Conditional expression: 223 * ICCH >= COUNT_CLK * (tHIGH + tf) 224 * 225 * SH-Mobile IIC hardware is aware of SCL transition period 'tr', 226 * and can ignore it. SH-Mobile IIC controller starts counting 227 * the HIGH period of the SCL signal (tHIGH) after the SCL input 228 * voltage increases at VIH. 229 * 230 * Afterward it turned out calculating ICCH using only tHIGH spec 231 * will result in violation of the tHD;STA timing spec. We need 232 * to take into account the fall time of SDA signal (tf) at START 233 * condition, in order to meet both tHIGH and tHD;STA specs. 234 */ 235 return (((count_khz * (tHIGH + tf)) + 5000) / 10000); 236 } 237 238 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data *pd) 239 { 240 u16 max_val = pd->flags & IIC_FLAG_HAS_ICIC67 ? 0x1ff : 0xff; 241 242 if (pd->iccl > max_val || pd->icch > max_val) { 243 dev_err(pd->dev, "timing values out of range: L/H=0x%x/0x%x\n", 244 pd->iccl, pd->icch); 245 return -EINVAL; 246 } 247 248 /* one more bit of ICCL in ICIC */ 249 if (pd->iccl & 0x100) 250 pd->icic |= ICIC_ICCLB8; 251 else 252 pd->icic &= ~ICIC_ICCLB8; 253 254 /* one more bit of ICCH in ICIC */ 255 if (pd->icch & 0x100) 256 pd->icic |= ICIC_ICCHB8; 257 else 258 pd->icic &= ~ICIC_ICCHB8; 259 260 dev_dbg(pd->dev, "timing values: L/H=0x%x/0x%x\n", pd->iccl, pd->icch); 261 return 0; 262 } 263 264 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data *pd) 265 { 266 unsigned long i2c_clk_khz; 267 u32 tHIGH, tLOW, tf; 268 269 i2c_clk_khz = clk_get_rate(pd->clk) / 1000 / pd->clks_per_count; 270 271 if (pd->bus_speed == I2C_MAX_STANDARD_MODE_FREQ) { 272 tLOW = 47; /* tLOW = 4.7 us */ 273 tHIGH = 40; /* tHD;STA = tHIGH = 4.0 us */ 274 tf = 3; /* tf = 0.3 us */ 275 } else if (pd->bus_speed == I2C_MAX_FAST_MODE_FREQ) { 276 tLOW = 13; /* tLOW = 1.3 us */ 277 tHIGH = 6; /* tHD;STA = tHIGH = 0.6 us */ 278 tf = 3; /* tf = 0.3 us */ 279 } else { 280 dev_err(pd->dev, "unrecognized bus speed %lu Hz\n", 281 pd->bus_speed); 282 return -EINVAL; 283 } 284 285 pd->iccl = sh_mobile_i2c_iccl(i2c_clk_khz, tLOW, tf); 286 pd->icch = sh_mobile_i2c_icch(i2c_clk_khz, tHIGH, tf); 287 288 return sh_mobile_i2c_check_timing(pd); 289 } 290 291 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data *pd) 292 { 293 unsigned long clks_per_cycle; 294 295 /* L = 5, H = 4, L + H = 9 */ 296 clks_per_cycle = clk_get_rate(pd->clk) / pd->bus_speed; 297 pd->iccl = DIV_ROUND_UP(clks_per_cycle * 5 / 9 - 1, pd->clks_per_count); 298 pd->icch = DIV_ROUND_UP(clks_per_cycle * 4 / 9 - 5, pd->clks_per_count); 299 300 return sh_mobile_i2c_check_timing(pd); 301 } 302 303 static unsigned char i2c_op(struct sh_mobile_i2c_data *pd, enum sh_mobile_i2c_op op) 304 { 305 unsigned char ret = 0; 306 unsigned long flags; 307 308 dev_dbg(pd->dev, "op %d\n", op); 309 310 spin_lock_irqsave(&pd->lock, flags); 311 312 switch (op) { 313 case OP_START: /* issue start and trigger DTE interrupt */ 314 iic_wr(pd, ICCR, ICCR_ICE | ICCR_TRS | ICCR_BBSY); 315 break; 316 case OP_TX_FIRST: /* disable DTE interrupt and write client address */ 317 iic_wr(pd, ICIC, ICIC_WAITE | ICIC_ALE | ICIC_TACKE); 318 iic_wr(pd, ICDR, i2c_8bit_addr_from_msg(pd->msg)); 319 break; 320 case OP_TX: /* write data */ 321 iic_wr(pd, ICDR, pd->msg->buf[pd->pos]); 322 break; 323 case OP_TX_STOP: /* issue a stop (or rep_start) */ 324 iic_wr(pd, ICCR, pd->send_stop ? ICCR_ICE | ICCR_TRS 325 : ICCR_ICE | ICCR_TRS | ICCR_BBSY); 326 break; 327 case OP_TX_TO_RX: /* select read mode */ 328 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP); 329 break; 330 case OP_RX: /* just read data */ 331 ret = iic_rd(pd, ICDR); 332 break; 333 case OP_RX_STOP: /* enable DTE interrupt, issue stop */ 334 if (!pd->atomic_xfer) 335 iic_wr(pd, ICIC, 336 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE); 337 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK); 338 break; 339 case OP_RX_STOP_DATA: /* enable DTE interrupt, read data, issue stop */ 340 if (!pd->atomic_xfer) 341 iic_wr(pd, ICIC, 342 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE); 343 ret = iic_rd(pd, ICDR); 344 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK); 345 break; 346 } 347 348 spin_unlock_irqrestore(&pd->lock, flags); 349 350 dev_dbg(pd->dev, "op %d, data out 0x%02x\n", op, ret); 351 return ret; 352 } 353 354 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data *pd) 355 { 356 if (pd->pos == pd->msg->len) { 357 i2c_op(pd, OP_TX_STOP); 358 return 1; 359 } 360 361 if (pd->pos == -1) 362 i2c_op(pd, OP_TX_FIRST); 363 else 364 i2c_op(pd, OP_TX); 365 366 pd->pos++; 367 return 0; 368 } 369 370 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data *pd) 371 { 372 int real_pos; 373 374 /* switch from TX (address) to RX (data) adds two interrupts */ 375 real_pos = pd->pos - 2; 376 377 if (pd->pos == -1) { 378 i2c_op(pd, OP_TX_FIRST); 379 } else if (pd->pos == 0) { 380 i2c_op(pd, OP_TX_TO_RX); 381 } else if (pd->pos == pd->msg->len) { 382 if (pd->stop_after_dma) { 383 /* Simulate PIO end condition after DMA transfer */ 384 i2c_op(pd, OP_RX_STOP); 385 pd->pos++; 386 goto done; 387 } 388 389 if (real_pos < 0) 390 i2c_op(pd, OP_RX_STOP); 391 else 392 pd->msg->buf[real_pos] = i2c_op(pd, OP_RX_STOP_DATA); 393 } else if (real_pos >= 0) { 394 pd->msg->buf[real_pos] = i2c_op(pd, OP_RX); 395 } 396 397 done: 398 pd->pos++; 399 return pd->pos == (pd->msg->len + 2); 400 } 401 402 static irqreturn_t sh_mobile_i2c_isr(int irq, void *dev_id) 403 { 404 struct sh_mobile_i2c_data *pd = dev_id; 405 unsigned char sr; 406 int wakeup = 0; 407 408 sr = iic_rd(pd, ICSR); 409 pd->sr |= sr; /* remember state */ 410 411 dev_dbg(pd->dev, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr, pd->sr, 412 (pd->msg->flags & I2C_M_RD) ? "read" : "write", 413 pd->pos, pd->msg->len); 414 415 /* Kick off TxDMA after preface was done */ 416 if (pd->dma_direction == DMA_TO_DEVICE && pd->pos == 0) 417 iic_set_clr(pd, ICIC, ICIC_TDMAE, 0); 418 else if (sr & (ICSR_AL | ICSR_TACK)) 419 /* don't interrupt transaction - continue to issue stop */ 420 iic_wr(pd, ICSR, sr & ~(ICSR_AL | ICSR_TACK)); 421 else if (pd->msg->flags & I2C_M_RD) 422 wakeup = sh_mobile_i2c_isr_rx(pd); 423 else 424 wakeup = sh_mobile_i2c_isr_tx(pd); 425 426 /* Kick off RxDMA after preface was done */ 427 if (pd->dma_direction == DMA_FROM_DEVICE && pd->pos == 1) 428 iic_set_clr(pd, ICIC, ICIC_RDMAE, 0); 429 430 if (sr & ICSR_WAIT) /* TODO: add delay here to support slow acks */ 431 iic_wr(pd, ICSR, sr & ~ICSR_WAIT); 432 433 if (wakeup) { 434 pd->sr |= SW_DONE; 435 if (!pd->atomic_xfer) 436 wake_up(&pd->wait); 437 } 438 439 /* defeat write posting to avoid spurious WAIT interrupts */ 440 iic_rd(pd, ICSR); 441 442 return IRQ_HANDLED; 443 } 444 445 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data *pd, bool terminate) 446 { 447 struct dma_chan *chan = pd->dma_direction == DMA_FROM_DEVICE 448 ? pd->dma_rx : pd->dma_tx; 449 450 /* only allowed from thread context! */ 451 if (terminate) 452 dmaengine_terminate_sync(chan); 453 454 dma_unmap_single(chan->device->dev, sg_dma_address(&pd->sg), 455 pd->msg->len, pd->dma_direction); 456 457 pd->dma_direction = DMA_NONE; 458 } 459 460 static void sh_mobile_i2c_dma_callback(void *data) 461 { 462 struct sh_mobile_i2c_data *pd = data; 463 464 sh_mobile_i2c_cleanup_dma(pd, false); 465 pd->pos = pd->msg->len; 466 pd->stop_after_dma = true; 467 468 iic_set_clr(pd, ICIC, 0, ICIC_TDMAE | ICIC_RDMAE); 469 } 470 471 static struct dma_chan *sh_mobile_i2c_request_dma_chan(struct device *dev, 472 enum dma_transfer_direction dir, dma_addr_t port_addr) 473 { 474 struct dma_chan *chan; 475 struct dma_slave_config cfg; 476 char *chan_name = dir == DMA_MEM_TO_DEV ? "tx" : "rx"; 477 int ret; 478 479 chan = dma_request_chan(dev, chan_name); 480 if (IS_ERR(chan)) { 481 dev_dbg(dev, "request_channel failed for %s (%ld)\n", chan_name, 482 PTR_ERR(chan)); 483 return chan; 484 } 485 486 memset(&cfg, 0, sizeof(cfg)); 487 cfg.direction = dir; 488 if (dir == DMA_MEM_TO_DEV) { 489 cfg.dst_addr = port_addr; 490 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; 491 } else { 492 cfg.src_addr = port_addr; 493 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; 494 } 495 496 ret = dmaengine_slave_config(chan, &cfg); 497 if (ret) { 498 dev_dbg(dev, "slave_config failed for %s (%d)\n", chan_name, ret); 499 dma_release_channel(chan); 500 return ERR_PTR(ret); 501 } 502 503 dev_dbg(dev, "got DMA channel for %s\n", chan_name); 504 return chan; 505 } 506 507 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data *pd) 508 { 509 bool read = pd->msg->flags & I2C_M_RD; 510 enum dma_data_direction dir = read ? DMA_FROM_DEVICE : DMA_TO_DEVICE; 511 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx; 512 struct dma_async_tx_descriptor *txdesc; 513 dma_addr_t dma_addr; 514 dma_cookie_t cookie; 515 516 if (PTR_ERR(chan) == -EPROBE_DEFER) { 517 if (read) 518 chan = pd->dma_rx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_DEV_TO_MEM, 519 pd->res->start + ICDR); 520 else 521 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV, 522 pd->res->start + ICDR); 523 } 524 525 if (IS_ERR(chan)) 526 return; 527 528 dma_addr = dma_map_single(chan->device->dev, pd->dma_buf, pd->msg->len, dir); 529 if (dma_mapping_error(chan->device->dev, dma_addr)) { 530 dev_dbg(pd->dev, "dma map failed, using PIO\n"); 531 return; 532 } 533 534 sg_dma_len(&pd->sg) = pd->msg->len; 535 sg_dma_address(&pd->sg) = dma_addr; 536 537 pd->dma_direction = dir; 538 539 txdesc = dmaengine_prep_slave_sg(chan, &pd->sg, 1, 540 read ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV, 541 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 542 if (!txdesc) { 543 dev_dbg(pd->dev, "dma prep slave sg failed, using PIO\n"); 544 sh_mobile_i2c_cleanup_dma(pd, false); 545 return; 546 } 547 548 txdesc->callback = sh_mobile_i2c_dma_callback; 549 txdesc->callback_param = pd; 550 551 cookie = dmaengine_submit(txdesc); 552 if (dma_submit_error(cookie)) { 553 dev_dbg(pd->dev, "submitting dma failed, using PIO\n"); 554 sh_mobile_i2c_cleanup_dma(pd, false); 555 return; 556 } 557 558 dma_async_issue_pending(chan); 559 } 560 561 static void start_ch(struct sh_mobile_i2c_data *pd, struct i2c_msg *usr_msg, 562 bool do_init) 563 { 564 if (do_init) { 565 /* Initialize channel registers */ 566 iic_wr(pd, ICCR, ICCR_SCP); 567 568 /* Enable channel and configure rx ack */ 569 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP); 570 571 /* Set the clock */ 572 iic_wr(pd, ICCL, pd->iccl & 0xff); 573 iic_wr(pd, ICCH, pd->icch & 0xff); 574 } 575 576 pd->msg = usr_msg; 577 pd->pos = -1; 578 pd->sr = 0; 579 580 if (pd->atomic_xfer) 581 return; 582 583 pd->dma_buf = i2c_get_dma_safe_msg_buf(pd->msg, 8); 584 if (pd->dma_buf) 585 sh_mobile_i2c_xfer_dma(pd); 586 587 /* Enable all interrupts to begin with */ 588 iic_wr(pd, ICIC, ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE); 589 } 590 591 static int poll_dte(struct sh_mobile_i2c_data *pd) 592 { 593 int i; 594 595 for (i = 1000; i; i--) { 596 u_int8_t val = iic_rd(pd, ICSR); 597 598 if (val & ICSR_DTE) 599 break; 600 601 if (val & ICSR_TACK) 602 return -ENXIO; 603 604 udelay(10); 605 } 606 607 return i ? 0 : -ETIMEDOUT; 608 } 609 610 static int poll_busy(struct sh_mobile_i2c_data *pd) 611 { 612 int i; 613 614 for (i = 1000; i; i--) { 615 u_int8_t val = iic_rd(pd, ICSR); 616 617 dev_dbg(pd->dev, "val 0x%02x pd->sr 0x%02x\n", val, pd->sr); 618 619 /* the interrupt handler may wake us up before the 620 * transfer is finished, so poll the hardware 621 * until we're done. 622 */ 623 if (!(val & ICSR_BUSY)) { 624 /* handle missing acknowledge and arbitration lost */ 625 val |= pd->sr; 626 if (val & ICSR_TACK) 627 return -ENXIO; 628 if (val & ICSR_AL) 629 return -EAGAIN; 630 break; 631 } 632 633 udelay(10); 634 } 635 636 return i ? 0 : -ETIMEDOUT; 637 } 638 639 static int sh_mobile_xfer(struct sh_mobile_i2c_data *pd, 640 struct i2c_msg *msgs, int num) 641 { 642 struct i2c_msg *msg; 643 int err = 0; 644 int i; 645 long time_left; 646 647 /* Wake up device and enable clock */ 648 pm_runtime_get_sync(pd->dev); 649 650 /* Process all messages */ 651 for (i = 0; i < num; i++) { 652 bool do_start = pd->send_stop || !i; 653 msg = &msgs[i]; 654 pd->send_stop = i == num - 1 || msg->flags & I2C_M_STOP; 655 pd->stop_after_dma = false; 656 657 start_ch(pd, msg, do_start); 658 659 if (do_start) 660 i2c_op(pd, OP_START); 661 662 if (pd->atomic_xfer) { 663 unsigned long j = jiffies + pd->adap.timeout; 664 665 time_left = time_before_eq(jiffies, j); 666 while (time_left && 667 !(pd->sr & (ICSR_TACK | SW_DONE))) { 668 unsigned char sr = iic_rd(pd, ICSR); 669 670 if (sr & (ICSR_AL | ICSR_TACK | 671 ICSR_WAIT | ICSR_DTE)) { 672 sh_mobile_i2c_isr(0, pd); 673 udelay(150); 674 } else { 675 cpu_relax(); 676 } 677 time_left = time_before_eq(jiffies, j); 678 } 679 } else { 680 /* The interrupt handler takes care of the rest... */ 681 time_left = wait_event_timeout(pd->wait, 682 pd->sr & (ICSR_TACK | SW_DONE), 683 pd->adap.timeout); 684 685 /* 'stop_after_dma' tells if DMA xfer was complete */ 686 i2c_put_dma_safe_msg_buf(pd->dma_buf, pd->msg, 687 pd->stop_after_dma); 688 } 689 690 if (!time_left) { 691 dev_err(pd->dev, "Transfer request timed out\n"); 692 if (pd->dma_direction != DMA_NONE) 693 sh_mobile_i2c_cleanup_dma(pd, true); 694 695 err = -ETIMEDOUT; 696 break; 697 } 698 699 if (pd->send_stop) 700 err = poll_busy(pd); 701 else 702 err = poll_dte(pd); 703 if (err < 0) 704 break; 705 } 706 707 /* Disable channel */ 708 iic_wr(pd, ICCR, ICCR_SCP); 709 710 /* Disable clock and mark device as idle */ 711 pm_runtime_put_sync(pd->dev); 712 713 return err ?: num; 714 } 715 716 static int sh_mobile_i2c_xfer(struct i2c_adapter *adapter, 717 struct i2c_msg *msgs, 718 int num) 719 { 720 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter); 721 722 pd->atomic_xfer = false; 723 return sh_mobile_xfer(pd, msgs, num); 724 } 725 726 static int sh_mobile_i2c_xfer_atomic(struct i2c_adapter *adapter, 727 struct i2c_msg *msgs, 728 int num) 729 { 730 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter); 731 732 pd->atomic_xfer = true; 733 return sh_mobile_xfer(pd, msgs, num); 734 } 735 736 static u32 sh_mobile_i2c_func(struct i2c_adapter *adapter) 737 { 738 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL | I2C_FUNC_PROTOCOL_MANGLING; 739 } 740 741 static const struct i2c_algorithm sh_mobile_i2c_algorithm = { 742 .functionality = sh_mobile_i2c_func, 743 .master_xfer = sh_mobile_i2c_xfer, 744 .master_xfer_atomic = sh_mobile_i2c_xfer_atomic, 745 }; 746 747 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks = { 748 .flags = I2C_AQ_NO_ZERO_LEN_READ, 749 }; 750 751 /* 752 * r8a7740 has an errata regarding I2C I/O pad reset needing this workaround. 753 */ 754 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data *pd) 755 { 756 iic_set_clr(pd, ICCR, ICCR_ICE, 0); 757 iic_rd(pd, ICCR); /* dummy read */ 758 759 iic_set_clr(pd, ICSTART, ICSTART_ICSTART, 0); 760 iic_rd(pd, ICSTART); /* dummy read */ 761 762 udelay(10); 763 764 iic_wr(pd, ICCR, ICCR_SCP); 765 iic_wr(pd, ICSTART, 0); 766 767 udelay(10); 768 769 iic_wr(pd, ICCR, ICCR_TRS); 770 udelay(10); 771 iic_wr(pd, ICCR, 0); 772 udelay(10); 773 iic_wr(pd, ICCR, ICCR_TRS); 774 udelay(10); 775 776 return sh_mobile_i2c_init(pd); 777 } 778 779 static const struct sh_mobile_dt_config default_dt_config = { 780 .clks_per_count = 1, 781 .setup = sh_mobile_i2c_init, 782 }; 783 784 static const struct sh_mobile_dt_config fast_clock_dt_config = { 785 .clks_per_count = 2, 786 .setup = sh_mobile_i2c_init, 787 }; 788 789 static const struct sh_mobile_dt_config v2_freq_calc_dt_config = { 790 .clks_per_count = 2, 791 .setup = sh_mobile_i2c_v2_init, 792 }; 793 794 static const struct sh_mobile_dt_config r8a7740_dt_config = { 795 .clks_per_count = 1, 796 .setup = sh_mobile_i2c_r8a7740_workaround, 797 }; 798 799 static const struct of_device_id sh_mobile_i2c_dt_ids[] = { 800 { .compatible = "renesas,iic-r8a73a4", .data = &fast_clock_dt_config }, 801 { .compatible = "renesas,iic-r8a7740", .data = &r8a7740_dt_config }, 802 { .compatible = "renesas,iic-r8a774c0", .data = &v2_freq_calc_dt_config }, 803 { .compatible = "renesas,iic-r8a7790", .data = &v2_freq_calc_dt_config }, 804 { .compatible = "renesas,iic-r8a7791", .data = &v2_freq_calc_dt_config }, 805 { .compatible = "renesas,iic-r8a7792", .data = &v2_freq_calc_dt_config }, 806 { .compatible = "renesas,iic-r8a7793", .data = &v2_freq_calc_dt_config }, 807 { .compatible = "renesas,iic-r8a7794", .data = &v2_freq_calc_dt_config }, 808 { .compatible = "renesas,iic-r8a7795", .data = &v2_freq_calc_dt_config }, 809 { .compatible = "renesas,iic-r8a77990", .data = &v2_freq_calc_dt_config }, 810 { .compatible = "renesas,iic-sh73a0", .data = &fast_clock_dt_config }, 811 { .compatible = "renesas,rcar-gen2-iic", .data = &v2_freq_calc_dt_config }, 812 { .compatible = "renesas,rcar-gen3-iic", .data = &v2_freq_calc_dt_config }, 813 { .compatible = "renesas,rmobile-iic", .data = &default_dt_config }, 814 {}, 815 }; 816 MODULE_DEVICE_TABLE(of, sh_mobile_i2c_dt_ids); 817 818 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data *pd) 819 { 820 if (!IS_ERR(pd->dma_tx)) { 821 dma_release_channel(pd->dma_tx); 822 pd->dma_tx = ERR_PTR(-EPROBE_DEFER); 823 } 824 825 if (!IS_ERR(pd->dma_rx)) { 826 dma_release_channel(pd->dma_rx); 827 pd->dma_rx = ERR_PTR(-EPROBE_DEFER); 828 } 829 } 830 831 static int sh_mobile_i2c_hook_irqs(struct platform_device *dev, struct sh_mobile_i2c_data *pd) 832 { 833 struct device_node *np = dev_of_node(&dev->dev); 834 int k = 0, ret; 835 836 if (np) { 837 int irq; 838 839 while ((irq = platform_get_irq_optional(dev, k)) != -ENXIO) { 840 if (irq < 0) 841 return irq; 842 ret = devm_request_irq(&dev->dev, irq, sh_mobile_i2c_isr, 843 0, dev_name(&dev->dev), pd); 844 if (ret) { 845 dev_err(&dev->dev, "cannot request IRQ %d\n", irq); 846 return ret; 847 } 848 k++; 849 } 850 } else { 851 struct resource *res; 852 resource_size_t n; 853 854 while ((res = platform_get_resource(dev, IORESOURCE_IRQ, k))) { 855 for (n = res->start; n <= res->end; n++) { 856 ret = devm_request_irq(&dev->dev, n, sh_mobile_i2c_isr, 857 0, dev_name(&dev->dev), pd); 858 if (ret) { 859 dev_err(&dev->dev, "cannot request IRQ %pa\n", &n); 860 return ret; 861 } 862 } 863 k++; 864 } 865 } 866 867 return k > 0 ? 0 : -ENOENT; 868 } 869 870 static int sh_mobile_i2c_probe(struct platform_device *dev) 871 { 872 struct sh_mobile_i2c_data *pd; 873 struct i2c_adapter *adap; 874 struct resource *res; 875 const struct sh_mobile_dt_config *config; 876 int ret; 877 u32 bus_speed; 878 879 pd = devm_kzalloc(&dev->dev, sizeof(struct sh_mobile_i2c_data), GFP_KERNEL); 880 if (!pd) 881 return -ENOMEM; 882 883 pd->clk = devm_clk_get(&dev->dev, NULL); 884 if (IS_ERR(pd->clk)) { 885 dev_err(&dev->dev, "cannot get clock\n"); 886 return PTR_ERR(pd->clk); 887 } 888 889 ret = sh_mobile_i2c_hook_irqs(dev, pd); 890 if (ret) 891 return ret; 892 893 pd->dev = &dev->dev; 894 platform_set_drvdata(dev, pd); 895 896 res = platform_get_resource(dev, IORESOURCE_MEM, 0); 897 898 pd->res = res; 899 pd->reg = devm_ioremap_resource(&dev->dev, res); 900 if (IS_ERR(pd->reg)) 901 return PTR_ERR(pd->reg); 902 903 ret = of_property_read_u32(dev->dev.of_node, "clock-frequency", &bus_speed); 904 pd->bus_speed = (ret || !bus_speed) ? I2C_MAX_STANDARD_MODE_FREQ : bus_speed; 905 pd->clks_per_count = 1; 906 907 /* Newer variants come with two new bits in ICIC */ 908 if (resource_size(res) > 0x17) 909 pd->flags |= IIC_FLAG_HAS_ICIC67; 910 911 pm_runtime_enable(&dev->dev); 912 pm_runtime_get_sync(&dev->dev); 913 914 config = of_device_get_match_data(&dev->dev); 915 if (config) { 916 pd->clks_per_count = config->clks_per_count; 917 ret = config->setup(pd); 918 } else { 919 ret = sh_mobile_i2c_init(pd); 920 } 921 922 pm_runtime_put_sync(&dev->dev); 923 if (ret) 924 return ret; 925 926 /* Init DMA */ 927 sg_init_table(&pd->sg, 1); 928 pd->dma_direction = DMA_NONE; 929 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER); 930 931 /* setup the private data */ 932 adap = &pd->adap; 933 i2c_set_adapdata(adap, pd); 934 935 adap->owner = THIS_MODULE; 936 adap->algo = &sh_mobile_i2c_algorithm; 937 adap->quirks = &sh_mobile_i2c_quirks; 938 adap->dev.parent = &dev->dev; 939 adap->retries = 5; 940 adap->nr = dev->id; 941 adap->dev.of_node = dev->dev.of_node; 942 943 strlcpy(adap->name, dev->name, sizeof(adap->name)); 944 945 spin_lock_init(&pd->lock); 946 init_waitqueue_head(&pd->wait); 947 948 ret = i2c_add_numbered_adapter(adap); 949 if (ret < 0) { 950 sh_mobile_i2c_release_dma(pd); 951 return ret; 952 } 953 954 dev_info(&dev->dev, "I2C adapter %d, bus speed %lu Hz\n", adap->nr, pd->bus_speed); 955 956 return 0; 957 } 958 959 static int sh_mobile_i2c_remove(struct platform_device *dev) 960 { 961 struct sh_mobile_i2c_data *pd = platform_get_drvdata(dev); 962 963 i2c_del_adapter(&pd->adap); 964 sh_mobile_i2c_release_dma(pd); 965 pm_runtime_disable(&dev->dev); 966 return 0; 967 } 968 969 #ifdef CONFIG_PM_SLEEP 970 static int sh_mobile_i2c_suspend(struct device *dev) 971 { 972 struct sh_mobile_i2c_data *pd = dev_get_drvdata(dev); 973 974 i2c_mark_adapter_suspended(&pd->adap); 975 return 0; 976 } 977 978 static int sh_mobile_i2c_resume(struct device *dev) 979 { 980 struct sh_mobile_i2c_data *pd = dev_get_drvdata(dev); 981 982 i2c_mark_adapter_resumed(&pd->adap); 983 return 0; 984 } 985 986 static const struct dev_pm_ops sh_mobile_i2c_pm_ops = { 987 SET_NOIRQ_SYSTEM_SLEEP_PM_OPS(sh_mobile_i2c_suspend, 988 sh_mobile_i2c_resume) 989 }; 990 991 #define DEV_PM_OPS (&sh_mobile_i2c_pm_ops) 992 #else 993 #define DEV_PM_OPS NULL 994 #endif /* CONFIG_PM_SLEEP */ 995 996 static struct platform_driver sh_mobile_i2c_driver = { 997 .driver = { 998 .name = "i2c-sh_mobile", 999 .of_match_table = sh_mobile_i2c_dt_ids, 1000 .pm = DEV_PM_OPS, 1001 }, 1002 .probe = sh_mobile_i2c_probe, 1003 .remove = sh_mobile_i2c_remove, 1004 }; 1005 1006 static int __init sh_mobile_i2c_adap_init(void) 1007 { 1008 return platform_driver_register(&sh_mobile_i2c_driver); 1009 } 1010 subsys_initcall(sh_mobile_i2c_adap_init); 1011 1012 static void __exit sh_mobile_i2c_adap_exit(void) 1013 { 1014 platform_driver_unregister(&sh_mobile_i2c_driver); 1015 } 1016 module_exit(sh_mobile_i2c_adap_exit); 1017 1018 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver"); 1019 MODULE_AUTHOR("Magnus Damm"); 1020 MODULE_AUTHOR("Wolfram Sang"); 1021 MODULE_LICENSE("GPL v2"); 1022 MODULE_ALIAS("platform:i2c-sh_mobile"); 1023