1 /* 2 * Atmel ADC driver for SAMA5D2 devices and compatible. 3 * 4 * Copyright (C) 2015 Atmel, 5 * 2015 Ludovic Desroches <ludovic.desroches@atmel.com> 6 * 7 * This software is licensed under the terms of the GNU General Public 8 * License version 2, as published by the Free Software Foundation, and 9 * may be copied, distributed, and modified under those terms. 10 * 11 * This program is distributed in the hope that it will be useful, 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 * GNU General Public License for more details. 15 */ 16 17 #include <linux/bitops.h> 18 #include <linux/clk.h> 19 #include <linux/dma-mapping.h> 20 #include <linux/dmaengine.h> 21 #include <linux/interrupt.h> 22 #include <linux/io.h> 23 #include <linux/module.h> 24 #include <linux/of_device.h> 25 #include <linux/platform_device.h> 26 #include <linux/sched.h> 27 #include <linux/wait.h> 28 #include <linux/iio/iio.h> 29 #include <linux/iio/sysfs.h> 30 #include <linux/iio/buffer.h> 31 #include <linux/iio/trigger.h> 32 #include <linux/iio/trigger_consumer.h> 33 #include <linux/iio/triggered_buffer.h> 34 #include <linux/pinctrl/consumer.h> 35 #include <linux/regulator/consumer.h> 36 37 /* Control Register */ 38 #define AT91_SAMA5D2_CR 0x00 39 /* Software Reset */ 40 #define AT91_SAMA5D2_CR_SWRST BIT(0) 41 /* Start Conversion */ 42 #define AT91_SAMA5D2_CR_START BIT(1) 43 /* Touchscreen Calibration */ 44 #define AT91_SAMA5D2_CR_TSCALIB BIT(2) 45 /* Comparison Restart */ 46 #define AT91_SAMA5D2_CR_CMPRST BIT(4) 47 48 /* Mode Register */ 49 #define AT91_SAMA5D2_MR 0x04 50 /* Trigger Selection */ 51 #define AT91_SAMA5D2_MR_TRGSEL(v) ((v) << 1) 52 /* ADTRG */ 53 #define AT91_SAMA5D2_MR_TRGSEL_TRIG0 0 54 /* TIOA0 */ 55 #define AT91_SAMA5D2_MR_TRGSEL_TRIG1 1 56 /* TIOA1 */ 57 #define AT91_SAMA5D2_MR_TRGSEL_TRIG2 2 58 /* TIOA2 */ 59 #define AT91_SAMA5D2_MR_TRGSEL_TRIG3 3 60 /* PWM event line 0 */ 61 #define AT91_SAMA5D2_MR_TRGSEL_TRIG4 4 62 /* PWM event line 1 */ 63 #define AT91_SAMA5D2_MR_TRGSEL_TRIG5 5 64 /* TIOA3 */ 65 #define AT91_SAMA5D2_MR_TRGSEL_TRIG6 6 66 /* RTCOUT0 */ 67 #define AT91_SAMA5D2_MR_TRGSEL_TRIG7 7 68 /* Sleep Mode */ 69 #define AT91_SAMA5D2_MR_SLEEP BIT(5) 70 /* Fast Wake Up */ 71 #define AT91_SAMA5D2_MR_FWUP BIT(6) 72 /* Prescaler Rate Selection */ 73 #define AT91_SAMA5D2_MR_PRESCAL(v) ((v) << AT91_SAMA5D2_MR_PRESCAL_OFFSET) 74 #define AT91_SAMA5D2_MR_PRESCAL_OFFSET 8 75 #define AT91_SAMA5D2_MR_PRESCAL_MAX 0xff 76 #define AT91_SAMA5D2_MR_PRESCAL_MASK GENMASK(15, 8) 77 /* Startup Time */ 78 #define AT91_SAMA5D2_MR_STARTUP(v) ((v) << 16) 79 #define AT91_SAMA5D2_MR_STARTUP_MASK GENMASK(19, 16) 80 /* Analog Change */ 81 #define AT91_SAMA5D2_MR_ANACH BIT(23) 82 /* Tracking Time */ 83 #define AT91_SAMA5D2_MR_TRACKTIM(v) ((v) << 24) 84 #define AT91_SAMA5D2_MR_TRACKTIM_MAX 0xff 85 /* Transfer Time */ 86 #define AT91_SAMA5D2_MR_TRANSFER(v) ((v) << 28) 87 #define AT91_SAMA5D2_MR_TRANSFER_MAX 0x3 88 /* Use Sequence Enable */ 89 #define AT91_SAMA5D2_MR_USEQ BIT(31) 90 91 /* Channel Sequence Register 1 */ 92 #define AT91_SAMA5D2_SEQR1 0x08 93 /* Channel Sequence Register 2 */ 94 #define AT91_SAMA5D2_SEQR2 0x0c 95 /* Channel Enable Register */ 96 #define AT91_SAMA5D2_CHER 0x10 97 /* Channel Disable Register */ 98 #define AT91_SAMA5D2_CHDR 0x14 99 /* Channel Status Register */ 100 #define AT91_SAMA5D2_CHSR 0x18 101 /* Last Converted Data Register */ 102 #define AT91_SAMA5D2_LCDR 0x20 103 /* Interrupt Enable Register */ 104 #define AT91_SAMA5D2_IER 0x24 105 /* Interrupt Enable Register - general overrun error */ 106 #define AT91_SAMA5D2_IER_GOVRE BIT(25) 107 /* Interrupt Disable Register */ 108 #define AT91_SAMA5D2_IDR 0x28 109 /* Interrupt Mask Register */ 110 #define AT91_SAMA5D2_IMR 0x2c 111 /* Interrupt Status Register */ 112 #define AT91_SAMA5D2_ISR 0x30 113 /* Last Channel Trigger Mode Register */ 114 #define AT91_SAMA5D2_LCTMR 0x34 115 /* Last Channel Compare Window Register */ 116 #define AT91_SAMA5D2_LCCWR 0x38 117 /* Overrun Status Register */ 118 #define AT91_SAMA5D2_OVER 0x3c 119 /* Extended Mode Register */ 120 #define AT91_SAMA5D2_EMR 0x40 121 /* Compare Window Register */ 122 #define AT91_SAMA5D2_CWR 0x44 123 /* Channel Gain Register */ 124 #define AT91_SAMA5D2_CGR 0x48 125 126 /* Channel Offset Register */ 127 #define AT91_SAMA5D2_COR 0x4c 128 #define AT91_SAMA5D2_COR_DIFF_OFFSET 16 129 130 /* Channel Data Register 0 */ 131 #define AT91_SAMA5D2_CDR0 0x50 132 /* Analog Control Register */ 133 #define AT91_SAMA5D2_ACR 0x94 134 /* Touchscreen Mode Register */ 135 #define AT91_SAMA5D2_TSMR 0xb0 136 /* Touchscreen X Position Register */ 137 #define AT91_SAMA5D2_XPOSR 0xb4 138 /* Touchscreen Y Position Register */ 139 #define AT91_SAMA5D2_YPOSR 0xb8 140 /* Touchscreen Pressure Register */ 141 #define AT91_SAMA5D2_PRESSR 0xbc 142 /* Trigger Register */ 143 #define AT91_SAMA5D2_TRGR 0xc0 144 /* Mask for TRGMOD field of TRGR register */ 145 #define AT91_SAMA5D2_TRGR_TRGMOD_MASK GENMASK(2, 0) 146 /* No trigger, only software trigger can start conversions */ 147 #define AT91_SAMA5D2_TRGR_TRGMOD_NO_TRIGGER 0 148 /* Trigger Mode external trigger rising edge */ 149 #define AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_RISE 1 150 /* Trigger Mode external trigger falling edge */ 151 #define AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_FALL 2 152 /* Trigger Mode external trigger any edge */ 153 #define AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_ANY 3 154 155 /* Correction Select Register */ 156 #define AT91_SAMA5D2_COSR 0xd0 157 /* Correction Value Register */ 158 #define AT91_SAMA5D2_CVR 0xd4 159 /* Channel Error Correction Register */ 160 #define AT91_SAMA5D2_CECR 0xd8 161 /* Write Protection Mode Register */ 162 #define AT91_SAMA5D2_WPMR 0xe4 163 /* Write Protection Status Register */ 164 #define AT91_SAMA5D2_WPSR 0xe8 165 /* Version Register */ 166 #define AT91_SAMA5D2_VERSION 0xfc 167 168 #define AT91_SAMA5D2_HW_TRIG_CNT 3 169 #define AT91_SAMA5D2_SINGLE_CHAN_CNT 12 170 #define AT91_SAMA5D2_DIFF_CHAN_CNT 6 171 172 /* 173 * Maximum number of bytes to hold conversion from all channels 174 * without the timestamp. 175 */ 176 #define AT91_BUFFER_MAX_CONVERSION_BYTES ((AT91_SAMA5D2_SINGLE_CHAN_CNT + \ 177 AT91_SAMA5D2_DIFF_CHAN_CNT) * 2) 178 179 /* This total must also include the timestamp */ 180 #define AT91_BUFFER_MAX_BYTES (AT91_BUFFER_MAX_CONVERSION_BYTES + 8) 181 182 #define AT91_BUFFER_MAX_HWORDS (AT91_BUFFER_MAX_BYTES / 2) 183 184 #define AT91_HWFIFO_MAX_SIZE_STR "128" 185 #define AT91_HWFIFO_MAX_SIZE 128 186 187 #define AT91_SAMA5D2_CHAN_SINGLE(num, addr) \ 188 { \ 189 .type = IIO_VOLTAGE, \ 190 .channel = num, \ 191 .address = addr, \ 192 .scan_index = num, \ 193 .scan_type = { \ 194 .sign = 'u', \ 195 .realbits = 12, \ 196 .storagebits = 16, \ 197 }, \ 198 .info_mask_separate = BIT(IIO_CHAN_INFO_RAW), \ 199 .info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE), \ 200 .info_mask_shared_by_all = BIT(IIO_CHAN_INFO_SAMP_FREQ),\ 201 .datasheet_name = "CH"#num, \ 202 .indexed = 1, \ 203 } 204 205 #define AT91_SAMA5D2_CHAN_DIFF(num, num2, addr) \ 206 { \ 207 .type = IIO_VOLTAGE, \ 208 .differential = 1, \ 209 .channel = num, \ 210 .channel2 = num2, \ 211 .address = addr, \ 212 .scan_index = num + AT91_SAMA5D2_SINGLE_CHAN_CNT, \ 213 .scan_type = { \ 214 .sign = 's', \ 215 .realbits = 12, \ 216 .storagebits = 16, \ 217 }, \ 218 .info_mask_separate = BIT(IIO_CHAN_INFO_RAW), \ 219 .info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE), \ 220 .info_mask_shared_by_all = BIT(IIO_CHAN_INFO_SAMP_FREQ),\ 221 .datasheet_name = "CH"#num"-CH"#num2, \ 222 .indexed = 1, \ 223 } 224 225 #define at91_adc_readl(st, reg) readl_relaxed(st->base + reg) 226 #define at91_adc_writel(st, reg, val) writel_relaxed(val, st->base + reg) 227 228 struct at91_adc_soc_info { 229 unsigned startup_time; 230 unsigned min_sample_rate; 231 unsigned max_sample_rate; 232 }; 233 234 struct at91_adc_trigger { 235 char *name; 236 unsigned int trgmod_value; 237 unsigned int edge_type; 238 bool hw_trig; 239 }; 240 241 /** 242 * at91_adc_dma - at91-sama5d2 dma information struct 243 * @dma_chan: the dma channel acquired 244 * @rx_buf: dma coherent allocated area 245 * @rx_dma_buf: dma handler for the buffer 246 * @phys_addr: physical address of the ADC base register 247 * @buf_idx: index inside the dma buffer where reading was last done 248 * @rx_buf_sz: size of buffer used by DMA operation 249 * @watermark: number of conversions to copy before DMA triggers irq 250 * @dma_ts: hold the start timestamp of dma operation 251 */ 252 struct at91_adc_dma { 253 struct dma_chan *dma_chan; 254 u8 *rx_buf; 255 dma_addr_t rx_dma_buf; 256 phys_addr_t phys_addr; 257 int buf_idx; 258 int rx_buf_sz; 259 int watermark; 260 s64 dma_ts; 261 }; 262 263 struct at91_adc_state { 264 void __iomem *base; 265 int irq; 266 struct clk *per_clk; 267 struct regulator *reg; 268 struct regulator *vref; 269 int vref_uv; 270 struct iio_trigger *trig; 271 const struct at91_adc_trigger *selected_trig; 272 const struct iio_chan_spec *chan; 273 bool conversion_done; 274 u32 conversion_value; 275 struct at91_adc_soc_info soc_info; 276 wait_queue_head_t wq_data_available; 277 struct at91_adc_dma dma_st; 278 u16 buffer[AT91_BUFFER_MAX_HWORDS]; 279 /* 280 * lock to prevent concurrent 'single conversion' requests through 281 * sysfs. 282 */ 283 struct mutex lock; 284 }; 285 286 static const struct at91_adc_trigger at91_adc_trigger_list[] = { 287 { 288 .name = "external_rising", 289 .trgmod_value = AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_RISE, 290 .edge_type = IRQ_TYPE_EDGE_RISING, 291 .hw_trig = true, 292 }, 293 { 294 .name = "external_falling", 295 .trgmod_value = AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_FALL, 296 .edge_type = IRQ_TYPE_EDGE_FALLING, 297 .hw_trig = true, 298 }, 299 { 300 .name = "external_any", 301 .trgmod_value = AT91_SAMA5D2_TRGR_TRGMOD_EXT_TRIG_ANY, 302 .edge_type = IRQ_TYPE_EDGE_BOTH, 303 .hw_trig = true, 304 }, 305 { 306 .name = "software", 307 .trgmod_value = AT91_SAMA5D2_TRGR_TRGMOD_NO_TRIGGER, 308 .edge_type = IRQ_TYPE_NONE, 309 .hw_trig = false, 310 }, 311 }; 312 313 static const struct iio_chan_spec at91_adc_channels[] = { 314 AT91_SAMA5D2_CHAN_SINGLE(0, 0x50), 315 AT91_SAMA5D2_CHAN_SINGLE(1, 0x54), 316 AT91_SAMA5D2_CHAN_SINGLE(2, 0x58), 317 AT91_SAMA5D2_CHAN_SINGLE(3, 0x5c), 318 AT91_SAMA5D2_CHAN_SINGLE(4, 0x60), 319 AT91_SAMA5D2_CHAN_SINGLE(5, 0x64), 320 AT91_SAMA5D2_CHAN_SINGLE(6, 0x68), 321 AT91_SAMA5D2_CHAN_SINGLE(7, 0x6c), 322 AT91_SAMA5D2_CHAN_SINGLE(8, 0x70), 323 AT91_SAMA5D2_CHAN_SINGLE(9, 0x74), 324 AT91_SAMA5D2_CHAN_SINGLE(10, 0x78), 325 AT91_SAMA5D2_CHAN_SINGLE(11, 0x7c), 326 AT91_SAMA5D2_CHAN_DIFF(0, 1, 0x50), 327 AT91_SAMA5D2_CHAN_DIFF(2, 3, 0x58), 328 AT91_SAMA5D2_CHAN_DIFF(4, 5, 0x60), 329 AT91_SAMA5D2_CHAN_DIFF(6, 7, 0x68), 330 AT91_SAMA5D2_CHAN_DIFF(8, 9, 0x70), 331 AT91_SAMA5D2_CHAN_DIFF(10, 11, 0x78), 332 IIO_CHAN_SOFT_TIMESTAMP(AT91_SAMA5D2_SINGLE_CHAN_CNT 333 + AT91_SAMA5D2_DIFF_CHAN_CNT + 1), 334 }; 335 336 static int at91_adc_chan_xlate(struct iio_dev *indio_dev, int chan) 337 { 338 int i; 339 340 for (i = 0; i < indio_dev->num_channels; i++) { 341 if (indio_dev->channels[i].scan_index == chan) 342 return i; 343 } 344 return -EINVAL; 345 } 346 347 static inline struct iio_chan_spec const * 348 at91_adc_chan_get(struct iio_dev *indio_dev, int chan) 349 { 350 int index = at91_adc_chan_xlate(indio_dev, chan); 351 352 if (index < 0) 353 return NULL; 354 return indio_dev->channels + index; 355 } 356 357 static int at91_adc_configure_trigger(struct iio_trigger *trig, bool state) 358 { 359 struct iio_dev *indio = iio_trigger_get_drvdata(trig); 360 struct at91_adc_state *st = iio_priv(indio); 361 u32 status = at91_adc_readl(st, AT91_SAMA5D2_TRGR); 362 u8 bit; 363 364 /* clear TRGMOD */ 365 status &= ~AT91_SAMA5D2_TRGR_TRGMOD_MASK; 366 367 if (state) 368 status |= st->selected_trig->trgmod_value; 369 370 /* set/unset hw trigger */ 371 at91_adc_writel(st, AT91_SAMA5D2_TRGR, status); 372 373 for_each_set_bit(bit, indio->active_scan_mask, indio->num_channels) { 374 struct iio_chan_spec const *chan = at91_adc_chan_get(indio, bit); 375 376 if (!chan) 377 continue; 378 if (state) { 379 at91_adc_writel(st, AT91_SAMA5D2_CHER, 380 BIT(chan->channel)); 381 /* enable irq only if not using DMA */ 382 if (!st->dma_st.dma_chan) { 383 at91_adc_writel(st, AT91_SAMA5D2_IER, 384 BIT(chan->channel)); 385 } 386 } else { 387 /* disable irq only if not using DMA */ 388 if (!st->dma_st.dma_chan) { 389 at91_adc_writel(st, AT91_SAMA5D2_IDR, 390 BIT(chan->channel)); 391 } 392 at91_adc_writel(st, AT91_SAMA5D2_CHDR, 393 BIT(chan->channel)); 394 } 395 } 396 397 return 0; 398 } 399 400 static int at91_adc_reenable_trigger(struct iio_trigger *trig) 401 { 402 struct iio_dev *indio = iio_trigger_get_drvdata(trig); 403 struct at91_adc_state *st = iio_priv(indio); 404 405 /* if we are using DMA, we must not reenable irq after each trigger */ 406 if (st->dma_st.dma_chan) 407 return 0; 408 409 enable_irq(st->irq); 410 411 /* Needed to ACK the DRDY interruption */ 412 at91_adc_readl(st, AT91_SAMA5D2_LCDR); 413 return 0; 414 } 415 416 static const struct iio_trigger_ops at91_adc_trigger_ops = { 417 .set_trigger_state = &at91_adc_configure_trigger, 418 .try_reenable = &at91_adc_reenable_trigger, 419 .validate_device = iio_trigger_validate_own_device, 420 }; 421 422 static int at91_adc_dma_size_done(struct at91_adc_state *st) 423 { 424 struct dma_tx_state state; 425 enum dma_status status; 426 int i, size; 427 428 status = dmaengine_tx_status(st->dma_st.dma_chan, 429 st->dma_st.dma_chan->cookie, 430 &state); 431 if (status != DMA_IN_PROGRESS) 432 return 0; 433 434 /* Transferred length is size in bytes from end of buffer */ 435 i = st->dma_st.rx_buf_sz - state.residue; 436 437 /* Return available bytes */ 438 if (i >= st->dma_st.buf_idx) 439 size = i - st->dma_st.buf_idx; 440 else 441 size = st->dma_st.rx_buf_sz + i - st->dma_st.buf_idx; 442 return size; 443 } 444 445 static void at91_dma_buffer_done(void *data) 446 { 447 struct iio_dev *indio_dev = data; 448 449 iio_trigger_poll_chained(indio_dev->trig); 450 } 451 452 static int at91_adc_dma_start(struct iio_dev *indio_dev) 453 { 454 struct at91_adc_state *st = iio_priv(indio_dev); 455 struct dma_async_tx_descriptor *desc; 456 dma_cookie_t cookie; 457 int ret; 458 u8 bit; 459 460 if (!st->dma_st.dma_chan) 461 return 0; 462 463 /* we start a new DMA, so set buffer index to start */ 464 st->dma_st.buf_idx = 0; 465 466 /* 467 * compute buffer size w.r.t. watermark and enabled channels. 468 * scan_bytes is aligned so we need an exact size for DMA 469 */ 470 st->dma_st.rx_buf_sz = 0; 471 472 for_each_set_bit(bit, indio_dev->active_scan_mask, 473 indio_dev->num_channels) { 474 struct iio_chan_spec const *chan = 475 at91_adc_chan_get(indio_dev, bit); 476 477 if (!chan) 478 continue; 479 480 st->dma_st.rx_buf_sz += chan->scan_type.storagebits / 8; 481 } 482 st->dma_st.rx_buf_sz *= st->dma_st.watermark; 483 484 /* Prepare a DMA cyclic transaction */ 485 desc = dmaengine_prep_dma_cyclic(st->dma_st.dma_chan, 486 st->dma_st.rx_dma_buf, 487 st->dma_st.rx_buf_sz, 488 st->dma_st.rx_buf_sz / 2, 489 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT); 490 491 if (!desc) { 492 dev_err(&indio_dev->dev, "cannot prepare DMA cyclic\n"); 493 return -EBUSY; 494 } 495 496 desc->callback = at91_dma_buffer_done; 497 desc->callback_param = indio_dev; 498 499 cookie = dmaengine_submit(desc); 500 ret = dma_submit_error(cookie); 501 if (ret) { 502 dev_err(&indio_dev->dev, "cannot submit DMA cyclic\n"); 503 dmaengine_terminate_async(st->dma_st.dma_chan); 504 return ret; 505 } 506 507 /* enable general overrun error signaling */ 508 at91_adc_writel(st, AT91_SAMA5D2_IER, AT91_SAMA5D2_IER_GOVRE); 509 /* Issue pending DMA requests */ 510 dma_async_issue_pending(st->dma_st.dma_chan); 511 512 /* consider current time as DMA start time for timestamps */ 513 st->dma_st.dma_ts = iio_get_time_ns(indio_dev); 514 515 dev_dbg(&indio_dev->dev, "DMA cyclic started\n"); 516 517 return 0; 518 } 519 520 static int at91_adc_buffer_postenable(struct iio_dev *indio_dev) 521 { 522 int ret; 523 524 ret = at91_adc_dma_start(indio_dev); 525 if (ret) { 526 dev_err(&indio_dev->dev, "buffer postenable failed\n"); 527 return ret; 528 } 529 530 return iio_triggered_buffer_postenable(indio_dev); 531 } 532 533 static int at91_adc_buffer_predisable(struct iio_dev *indio_dev) 534 { 535 struct at91_adc_state *st = iio_priv(indio_dev); 536 int ret; 537 u8 bit; 538 539 ret = iio_triggered_buffer_predisable(indio_dev); 540 if (ret < 0) 541 dev_err(&indio_dev->dev, "buffer predisable failed\n"); 542 543 if (!st->dma_st.dma_chan) 544 return ret; 545 546 /* if we are using DMA we must clear registers and end DMA */ 547 dmaengine_terminate_sync(st->dma_st.dma_chan); 548 549 /* 550 * For each enabled channel we must read the last converted value 551 * to clear EOC status and not get a possible interrupt later. 552 * This value is being read by DMA from LCDR anyway 553 */ 554 for_each_set_bit(bit, indio_dev->active_scan_mask, 555 indio_dev->num_channels) { 556 struct iio_chan_spec const *chan = 557 at91_adc_chan_get(indio_dev, bit); 558 559 if (!chan) 560 continue; 561 if (st->dma_st.dma_chan) 562 at91_adc_readl(st, chan->address); 563 } 564 565 /* read overflow register to clear possible overflow status */ 566 at91_adc_readl(st, AT91_SAMA5D2_OVER); 567 return ret; 568 } 569 570 static const struct iio_buffer_setup_ops at91_buffer_setup_ops = { 571 .postenable = &at91_adc_buffer_postenable, 572 .predisable = &at91_adc_buffer_predisable, 573 }; 574 575 static struct iio_trigger *at91_adc_allocate_trigger(struct iio_dev *indio, 576 char *trigger_name) 577 { 578 struct iio_trigger *trig; 579 int ret; 580 581 trig = devm_iio_trigger_alloc(&indio->dev, "%s-dev%d-%s", indio->name, 582 indio->id, trigger_name); 583 if (!trig) 584 return NULL; 585 586 trig->dev.parent = indio->dev.parent; 587 iio_trigger_set_drvdata(trig, indio); 588 trig->ops = &at91_adc_trigger_ops; 589 590 ret = devm_iio_trigger_register(&indio->dev, trig); 591 if (ret) 592 return ERR_PTR(ret); 593 594 return trig; 595 } 596 597 static int at91_adc_trigger_init(struct iio_dev *indio) 598 { 599 struct at91_adc_state *st = iio_priv(indio); 600 601 st->trig = at91_adc_allocate_trigger(indio, st->selected_trig->name); 602 if (IS_ERR(st->trig)) { 603 dev_err(&indio->dev, 604 "could not allocate trigger\n"); 605 return PTR_ERR(st->trig); 606 } 607 608 return 0; 609 } 610 611 static void at91_adc_trigger_handler_nodma(struct iio_dev *indio_dev, 612 struct iio_poll_func *pf) 613 { 614 struct at91_adc_state *st = iio_priv(indio_dev); 615 int i = 0; 616 u8 bit; 617 618 for_each_set_bit(bit, indio_dev->active_scan_mask, 619 indio_dev->num_channels) { 620 struct iio_chan_spec const *chan = 621 at91_adc_chan_get(indio_dev, bit); 622 623 if (!chan) 624 continue; 625 st->buffer[i] = at91_adc_readl(st, chan->address); 626 i++; 627 } 628 iio_push_to_buffers_with_timestamp(indio_dev, st->buffer, 629 pf->timestamp); 630 } 631 632 static void at91_adc_trigger_handler_dma(struct iio_dev *indio_dev) 633 { 634 struct at91_adc_state *st = iio_priv(indio_dev); 635 int transferred_len = at91_adc_dma_size_done(st); 636 s64 ns = iio_get_time_ns(indio_dev); 637 s64 interval; 638 int sample_index = 0, sample_count, sample_size; 639 640 u32 status = at91_adc_readl(st, AT91_SAMA5D2_ISR); 641 /* if we reached this point, we cannot sample faster */ 642 if (status & AT91_SAMA5D2_IER_GOVRE) 643 pr_info_ratelimited("%s: conversion overrun detected\n", 644 indio_dev->name); 645 646 sample_size = div_s64(st->dma_st.rx_buf_sz, st->dma_st.watermark); 647 648 sample_count = div_s64(transferred_len, sample_size); 649 650 /* 651 * interval between samples is total time since last transfer handling 652 * divided by the number of samples (total size divided by sample size) 653 */ 654 interval = div_s64((ns - st->dma_st.dma_ts), sample_count); 655 656 while (transferred_len >= sample_size) { 657 iio_push_to_buffers_with_timestamp(indio_dev, 658 (st->dma_st.rx_buf + st->dma_st.buf_idx), 659 (st->dma_st.dma_ts + interval * sample_index)); 660 /* adjust remaining length */ 661 transferred_len -= sample_size; 662 /* adjust buffer index */ 663 st->dma_st.buf_idx += sample_size; 664 /* in case of reaching end of buffer, reset index */ 665 if (st->dma_st.buf_idx >= st->dma_st.rx_buf_sz) 666 st->dma_st.buf_idx = 0; 667 sample_index++; 668 } 669 /* adjust saved time for next transfer handling */ 670 st->dma_st.dma_ts = iio_get_time_ns(indio_dev); 671 } 672 673 static irqreturn_t at91_adc_trigger_handler(int irq, void *p) 674 { 675 struct iio_poll_func *pf = p; 676 struct iio_dev *indio_dev = pf->indio_dev; 677 struct at91_adc_state *st = iio_priv(indio_dev); 678 679 if (st->dma_st.dma_chan) 680 at91_adc_trigger_handler_dma(indio_dev); 681 else 682 at91_adc_trigger_handler_nodma(indio_dev, pf); 683 684 iio_trigger_notify_done(indio_dev->trig); 685 686 return IRQ_HANDLED; 687 } 688 689 static int at91_adc_buffer_init(struct iio_dev *indio) 690 { 691 return devm_iio_triggered_buffer_setup(&indio->dev, indio, 692 &iio_pollfunc_store_time, 693 &at91_adc_trigger_handler, &at91_buffer_setup_ops); 694 } 695 696 static unsigned at91_adc_startup_time(unsigned startup_time_min, 697 unsigned adc_clk_khz) 698 { 699 static const unsigned int startup_lookup[] = { 700 0, 8, 16, 24, 701 64, 80, 96, 112, 702 512, 576, 640, 704, 703 768, 832, 896, 960 704 }; 705 unsigned ticks_min, i; 706 707 /* 708 * Since the adc frequency is checked before, there is no reason 709 * to not meet the startup time constraint. 710 */ 711 712 ticks_min = startup_time_min * adc_clk_khz / 1000; 713 for (i = 0; i < ARRAY_SIZE(startup_lookup); i++) 714 if (startup_lookup[i] > ticks_min) 715 break; 716 717 return i; 718 } 719 720 static void at91_adc_setup_samp_freq(struct at91_adc_state *st, unsigned freq) 721 { 722 struct iio_dev *indio_dev = iio_priv_to_dev(st); 723 unsigned f_per, prescal, startup, mr; 724 725 f_per = clk_get_rate(st->per_clk); 726 prescal = (f_per / (2 * freq)) - 1; 727 728 startup = at91_adc_startup_time(st->soc_info.startup_time, 729 freq / 1000); 730 731 mr = at91_adc_readl(st, AT91_SAMA5D2_MR); 732 mr &= ~(AT91_SAMA5D2_MR_STARTUP_MASK | AT91_SAMA5D2_MR_PRESCAL_MASK); 733 mr |= AT91_SAMA5D2_MR_STARTUP(startup); 734 mr |= AT91_SAMA5D2_MR_PRESCAL(prescal); 735 at91_adc_writel(st, AT91_SAMA5D2_MR, mr); 736 737 dev_dbg(&indio_dev->dev, "freq: %u, startup: %u, prescal: %u\n", 738 freq, startup, prescal); 739 } 740 741 static unsigned at91_adc_get_sample_freq(struct at91_adc_state *st) 742 { 743 unsigned f_adc, f_per = clk_get_rate(st->per_clk); 744 unsigned mr, prescal; 745 746 mr = at91_adc_readl(st, AT91_SAMA5D2_MR); 747 prescal = (mr >> AT91_SAMA5D2_MR_PRESCAL_OFFSET) 748 & AT91_SAMA5D2_MR_PRESCAL_MAX; 749 f_adc = f_per / (2 * (prescal + 1)); 750 751 return f_adc; 752 } 753 754 static irqreturn_t at91_adc_interrupt(int irq, void *private) 755 { 756 struct iio_dev *indio = private; 757 struct at91_adc_state *st = iio_priv(indio); 758 u32 status = at91_adc_readl(st, AT91_SAMA5D2_ISR); 759 u32 imr = at91_adc_readl(st, AT91_SAMA5D2_IMR); 760 761 if (!(status & imr)) 762 return IRQ_NONE; 763 764 if (iio_buffer_enabled(indio) && !st->dma_st.dma_chan) { 765 disable_irq_nosync(irq); 766 iio_trigger_poll(indio->trig); 767 } else if (iio_buffer_enabled(indio) && st->dma_st.dma_chan) { 768 disable_irq_nosync(irq); 769 WARN(true, "Unexpected irq occurred\n"); 770 } else if (!iio_buffer_enabled(indio)) { 771 st->conversion_value = at91_adc_readl(st, st->chan->address); 772 st->conversion_done = true; 773 wake_up_interruptible(&st->wq_data_available); 774 } 775 return IRQ_HANDLED; 776 } 777 778 static int at91_adc_read_raw(struct iio_dev *indio_dev, 779 struct iio_chan_spec const *chan, 780 int *val, int *val2, long mask) 781 { 782 struct at91_adc_state *st = iio_priv(indio_dev); 783 u32 cor = 0; 784 int ret; 785 786 switch (mask) { 787 case IIO_CHAN_INFO_RAW: 788 /* we cannot use software trigger if hw trigger enabled */ 789 ret = iio_device_claim_direct_mode(indio_dev); 790 if (ret) 791 return ret; 792 mutex_lock(&st->lock); 793 794 st->chan = chan; 795 796 if (chan->differential) 797 cor = (BIT(chan->channel) | BIT(chan->channel2)) << 798 AT91_SAMA5D2_COR_DIFF_OFFSET; 799 800 at91_adc_writel(st, AT91_SAMA5D2_COR, cor); 801 at91_adc_writel(st, AT91_SAMA5D2_CHER, BIT(chan->channel)); 802 at91_adc_writel(st, AT91_SAMA5D2_IER, BIT(chan->channel)); 803 at91_adc_writel(st, AT91_SAMA5D2_CR, AT91_SAMA5D2_CR_START); 804 805 ret = wait_event_interruptible_timeout(st->wq_data_available, 806 st->conversion_done, 807 msecs_to_jiffies(1000)); 808 if (ret == 0) 809 ret = -ETIMEDOUT; 810 811 if (ret > 0) { 812 *val = st->conversion_value; 813 if (chan->scan_type.sign == 's') 814 *val = sign_extend32(*val, 11); 815 ret = IIO_VAL_INT; 816 st->conversion_done = false; 817 } 818 819 at91_adc_writel(st, AT91_SAMA5D2_IDR, BIT(chan->channel)); 820 at91_adc_writel(st, AT91_SAMA5D2_CHDR, BIT(chan->channel)); 821 822 /* Needed to ACK the DRDY interruption */ 823 at91_adc_readl(st, AT91_SAMA5D2_LCDR); 824 825 mutex_unlock(&st->lock); 826 827 iio_device_release_direct_mode(indio_dev); 828 return ret; 829 830 case IIO_CHAN_INFO_SCALE: 831 *val = st->vref_uv / 1000; 832 if (chan->differential) 833 *val *= 2; 834 *val2 = chan->scan_type.realbits; 835 return IIO_VAL_FRACTIONAL_LOG2; 836 837 case IIO_CHAN_INFO_SAMP_FREQ: 838 *val = at91_adc_get_sample_freq(st); 839 return IIO_VAL_INT; 840 841 default: 842 return -EINVAL; 843 } 844 } 845 846 static int at91_adc_write_raw(struct iio_dev *indio_dev, 847 struct iio_chan_spec const *chan, 848 int val, int val2, long mask) 849 { 850 struct at91_adc_state *st = iio_priv(indio_dev); 851 852 if (mask != IIO_CHAN_INFO_SAMP_FREQ) 853 return -EINVAL; 854 855 if (val < st->soc_info.min_sample_rate || 856 val > st->soc_info.max_sample_rate) 857 return -EINVAL; 858 859 at91_adc_setup_samp_freq(st, val); 860 861 return 0; 862 } 863 864 static void at91_adc_dma_init(struct platform_device *pdev) 865 { 866 struct iio_dev *indio_dev = platform_get_drvdata(pdev); 867 struct at91_adc_state *st = iio_priv(indio_dev); 868 struct dma_slave_config config = {0}; 869 /* 870 * We make the buffer double the size of the fifo, 871 * such that DMA uses one half of the buffer (full fifo size) 872 * and the software uses the other half to read/write. 873 */ 874 unsigned int pages = DIV_ROUND_UP(AT91_HWFIFO_MAX_SIZE * 875 AT91_BUFFER_MAX_CONVERSION_BYTES * 2, 876 PAGE_SIZE); 877 878 if (st->dma_st.dma_chan) 879 return; 880 881 st->dma_st.dma_chan = dma_request_slave_channel(&pdev->dev, "rx"); 882 883 if (!st->dma_st.dma_chan) { 884 dev_info(&pdev->dev, "can't get DMA channel\n"); 885 goto dma_exit; 886 } 887 888 st->dma_st.rx_buf = dma_alloc_coherent(st->dma_st.dma_chan->device->dev, 889 pages * PAGE_SIZE, 890 &st->dma_st.rx_dma_buf, 891 GFP_KERNEL); 892 if (!st->dma_st.rx_buf) { 893 dev_info(&pdev->dev, "can't allocate coherent DMA area\n"); 894 goto dma_chan_disable; 895 } 896 897 /* Configure DMA channel to read data register */ 898 config.direction = DMA_DEV_TO_MEM; 899 config.src_addr = (phys_addr_t)(st->dma_st.phys_addr 900 + AT91_SAMA5D2_LCDR); 901 config.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 902 config.src_maxburst = 1; 903 config.dst_maxburst = 1; 904 905 if (dmaengine_slave_config(st->dma_st.dma_chan, &config)) { 906 dev_info(&pdev->dev, "can't configure DMA slave\n"); 907 goto dma_free_area; 908 } 909 910 dev_info(&pdev->dev, "using %s for rx DMA transfers\n", 911 dma_chan_name(st->dma_st.dma_chan)); 912 913 return; 914 915 dma_free_area: 916 dma_free_coherent(st->dma_st.dma_chan->device->dev, pages * PAGE_SIZE, 917 st->dma_st.rx_buf, st->dma_st.rx_dma_buf); 918 dma_chan_disable: 919 dma_release_channel(st->dma_st.dma_chan); 920 st->dma_st.dma_chan = 0; 921 dma_exit: 922 dev_info(&pdev->dev, "continuing without DMA support\n"); 923 } 924 925 static void at91_adc_dma_disable(struct platform_device *pdev) 926 { 927 struct iio_dev *indio_dev = platform_get_drvdata(pdev); 928 struct at91_adc_state *st = iio_priv(indio_dev); 929 unsigned int pages = DIV_ROUND_UP(AT91_HWFIFO_MAX_SIZE * 930 AT91_BUFFER_MAX_CONVERSION_BYTES * 2, 931 PAGE_SIZE); 932 933 /* if we are not using DMA, just return */ 934 if (!st->dma_st.dma_chan) 935 return; 936 937 /* wait for all transactions to be terminated first*/ 938 dmaengine_terminate_sync(st->dma_st.dma_chan); 939 940 dma_free_coherent(st->dma_st.dma_chan->device->dev, pages * PAGE_SIZE, 941 st->dma_st.rx_buf, st->dma_st.rx_dma_buf); 942 dma_release_channel(st->dma_st.dma_chan); 943 st->dma_st.dma_chan = 0; 944 945 dev_info(&pdev->dev, "continuing without DMA support\n"); 946 } 947 948 static int at91_adc_set_watermark(struct iio_dev *indio_dev, unsigned int val) 949 { 950 struct at91_adc_state *st = iio_priv(indio_dev); 951 952 if (val > AT91_HWFIFO_MAX_SIZE) 953 return -EINVAL; 954 955 if (!st->selected_trig->hw_trig) { 956 dev_dbg(&indio_dev->dev, "we need hw trigger for DMA\n"); 957 return 0; 958 } 959 960 dev_dbg(&indio_dev->dev, "new watermark is %u\n", val); 961 st->dma_st.watermark = val; 962 963 /* 964 * The logic here is: if we have watermark 1, it means we do 965 * each conversion with it's own IRQ, thus we don't need DMA. 966 * If the watermark is higher, we do DMA to do all the transfers in bulk 967 */ 968 969 if (val == 1) 970 at91_adc_dma_disable(to_platform_device(&indio_dev->dev)); 971 else if (val > 1) 972 at91_adc_dma_init(to_platform_device(&indio_dev->dev)); 973 974 return 0; 975 } 976 977 static const struct iio_info at91_adc_info = { 978 .read_raw = &at91_adc_read_raw, 979 .write_raw = &at91_adc_write_raw, 980 .hwfifo_set_watermark = &at91_adc_set_watermark, 981 }; 982 983 static void at91_adc_hw_init(struct at91_adc_state *st) 984 { 985 at91_adc_writel(st, AT91_SAMA5D2_CR, AT91_SAMA5D2_CR_SWRST); 986 at91_adc_writel(st, AT91_SAMA5D2_IDR, 0xffffffff); 987 /* 988 * Transfer field must be set to 2 according to the datasheet and 989 * allows different analog settings for each channel. 990 */ 991 at91_adc_writel(st, AT91_SAMA5D2_MR, 992 AT91_SAMA5D2_MR_TRANSFER(2) | AT91_SAMA5D2_MR_ANACH); 993 994 at91_adc_setup_samp_freq(st, st->soc_info.min_sample_rate); 995 } 996 997 static ssize_t at91_adc_get_fifo_state(struct device *dev, 998 struct device_attribute *attr, char *buf) 999 { 1000 struct iio_dev *indio_dev = 1001 platform_get_drvdata(to_platform_device(dev)); 1002 struct at91_adc_state *st = iio_priv(indio_dev); 1003 1004 return scnprintf(buf, PAGE_SIZE, "%d\n", !!st->dma_st.dma_chan); 1005 } 1006 1007 static ssize_t at91_adc_get_watermark(struct device *dev, 1008 struct device_attribute *attr, char *buf) 1009 { 1010 struct iio_dev *indio_dev = 1011 platform_get_drvdata(to_platform_device(dev)); 1012 struct at91_adc_state *st = iio_priv(indio_dev); 1013 1014 return scnprintf(buf, PAGE_SIZE, "%d\n", st->dma_st.watermark); 1015 } 1016 1017 static IIO_DEVICE_ATTR(hwfifo_enabled, 0444, 1018 at91_adc_get_fifo_state, NULL, 0); 1019 static IIO_DEVICE_ATTR(hwfifo_watermark, 0444, 1020 at91_adc_get_watermark, NULL, 0); 1021 1022 static IIO_CONST_ATTR(hwfifo_watermark_min, "2"); 1023 static IIO_CONST_ATTR(hwfifo_watermark_max, AT91_HWFIFO_MAX_SIZE_STR); 1024 1025 static const struct attribute *at91_adc_fifo_attributes[] = { 1026 &iio_const_attr_hwfifo_watermark_min.dev_attr.attr, 1027 &iio_const_attr_hwfifo_watermark_max.dev_attr.attr, 1028 &iio_dev_attr_hwfifo_watermark.dev_attr.attr, 1029 &iio_dev_attr_hwfifo_enabled.dev_attr.attr, 1030 NULL, 1031 }; 1032 1033 static int at91_adc_probe(struct platform_device *pdev) 1034 { 1035 struct iio_dev *indio_dev; 1036 struct at91_adc_state *st; 1037 struct resource *res; 1038 int ret, i; 1039 u32 edge_type = IRQ_TYPE_NONE; 1040 1041 indio_dev = devm_iio_device_alloc(&pdev->dev, sizeof(*st)); 1042 if (!indio_dev) 1043 return -ENOMEM; 1044 1045 indio_dev->dev.parent = &pdev->dev; 1046 indio_dev->name = dev_name(&pdev->dev); 1047 indio_dev->modes = INDIO_DIRECT_MODE; 1048 indio_dev->info = &at91_adc_info; 1049 indio_dev->channels = at91_adc_channels; 1050 indio_dev->num_channels = ARRAY_SIZE(at91_adc_channels); 1051 1052 st = iio_priv(indio_dev); 1053 1054 ret = of_property_read_u32(pdev->dev.of_node, 1055 "atmel,min-sample-rate-hz", 1056 &st->soc_info.min_sample_rate); 1057 if (ret) { 1058 dev_err(&pdev->dev, 1059 "invalid or missing value for atmel,min-sample-rate-hz\n"); 1060 return ret; 1061 } 1062 1063 ret = of_property_read_u32(pdev->dev.of_node, 1064 "atmel,max-sample-rate-hz", 1065 &st->soc_info.max_sample_rate); 1066 if (ret) { 1067 dev_err(&pdev->dev, 1068 "invalid or missing value for atmel,max-sample-rate-hz\n"); 1069 return ret; 1070 } 1071 1072 ret = of_property_read_u32(pdev->dev.of_node, "atmel,startup-time-ms", 1073 &st->soc_info.startup_time); 1074 if (ret) { 1075 dev_err(&pdev->dev, 1076 "invalid or missing value for atmel,startup-time-ms\n"); 1077 return ret; 1078 } 1079 1080 ret = of_property_read_u32(pdev->dev.of_node, 1081 "atmel,trigger-edge-type", &edge_type); 1082 if (ret) { 1083 dev_dbg(&pdev->dev, 1084 "atmel,trigger-edge-type not specified, only software trigger available\n"); 1085 } 1086 1087 st->selected_trig = NULL; 1088 1089 /* find the right trigger, or no trigger at all */ 1090 for (i = 0; i < AT91_SAMA5D2_HW_TRIG_CNT + 1; i++) 1091 if (at91_adc_trigger_list[i].edge_type == edge_type) { 1092 st->selected_trig = &at91_adc_trigger_list[i]; 1093 break; 1094 } 1095 1096 if (!st->selected_trig) { 1097 dev_err(&pdev->dev, "invalid external trigger edge value\n"); 1098 return -EINVAL; 1099 } 1100 1101 init_waitqueue_head(&st->wq_data_available); 1102 mutex_init(&st->lock); 1103 1104 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1105 if (!res) 1106 return -EINVAL; 1107 1108 /* if we plan to use DMA, we need the physical address of the regs */ 1109 st->dma_st.phys_addr = res->start; 1110 1111 st->base = devm_ioremap_resource(&pdev->dev, res); 1112 if (IS_ERR(st->base)) 1113 return PTR_ERR(st->base); 1114 1115 st->irq = platform_get_irq(pdev, 0); 1116 if (st->irq <= 0) { 1117 if (!st->irq) 1118 st->irq = -ENXIO; 1119 1120 return st->irq; 1121 } 1122 1123 st->per_clk = devm_clk_get(&pdev->dev, "adc_clk"); 1124 if (IS_ERR(st->per_clk)) 1125 return PTR_ERR(st->per_clk); 1126 1127 st->reg = devm_regulator_get(&pdev->dev, "vddana"); 1128 if (IS_ERR(st->reg)) 1129 return PTR_ERR(st->reg); 1130 1131 st->vref = devm_regulator_get(&pdev->dev, "vref"); 1132 if (IS_ERR(st->vref)) 1133 return PTR_ERR(st->vref); 1134 1135 ret = devm_request_irq(&pdev->dev, st->irq, at91_adc_interrupt, 0, 1136 pdev->dev.driver->name, indio_dev); 1137 if (ret) 1138 return ret; 1139 1140 ret = regulator_enable(st->reg); 1141 if (ret) 1142 return ret; 1143 1144 ret = regulator_enable(st->vref); 1145 if (ret) 1146 goto reg_disable; 1147 1148 st->vref_uv = regulator_get_voltage(st->vref); 1149 if (st->vref_uv <= 0) { 1150 ret = -EINVAL; 1151 goto vref_disable; 1152 } 1153 1154 at91_adc_hw_init(st); 1155 1156 ret = clk_prepare_enable(st->per_clk); 1157 if (ret) 1158 goto vref_disable; 1159 1160 platform_set_drvdata(pdev, indio_dev); 1161 1162 if (st->selected_trig->hw_trig) { 1163 ret = at91_adc_buffer_init(indio_dev); 1164 if (ret < 0) { 1165 dev_err(&pdev->dev, "couldn't initialize the buffer.\n"); 1166 goto per_clk_disable_unprepare; 1167 } 1168 1169 ret = at91_adc_trigger_init(indio_dev); 1170 if (ret < 0) { 1171 dev_err(&pdev->dev, "couldn't setup the triggers.\n"); 1172 goto per_clk_disable_unprepare; 1173 } 1174 /* 1175 * Initially the iio buffer has a length of 2 and 1176 * a watermark of 1 1177 */ 1178 st->dma_st.watermark = 1; 1179 1180 iio_buffer_set_attrs(indio_dev->buffer, 1181 at91_adc_fifo_attributes); 1182 } 1183 1184 if (dma_coerce_mask_and_coherent(&indio_dev->dev, DMA_BIT_MASK(32))) 1185 dev_info(&pdev->dev, "cannot set DMA mask to 32-bit\n"); 1186 1187 ret = iio_device_register(indio_dev); 1188 if (ret < 0) 1189 goto dma_disable; 1190 1191 if (st->selected_trig->hw_trig) 1192 dev_info(&pdev->dev, "setting up trigger as %s\n", 1193 st->selected_trig->name); 1194 1195 dev_info(&pdev->dev, "version: %x\n", 1196 readl_relaxed(st->base + AT91_SAMA5D2_VERSION)); 1197 1198 return 0; 1199 1200 dma_disable: 1201 at91_adc_dma_disable(pdev); 1202 per_clk_disable_unprepare: 1203 clk_disable_unprepare(st->per_clk); 1204 vref_disable: 1205 regulator_disable(st->vref); 1206 reg_disable: 1207 regulator_disable(st->reg); 1208 return ret; 1209 } 1210 1211 static int at91_adc_remove(struct platform_device *pdev) 1212 { 1213 struct iio_dev *indio_dev = platform_get_drvdata(pdev); 1214 struct at91_adc_state *st = iio_priv(indio_dev); 1215 1216 iio_device_unregister(indio_dev); 1217 1218 at91_adc_dma_disable(pdev); 1219 1220 clk_disable_unprepare(st->per_clk); 1221 1222 regulator_disable(st->vref); 1223 regulator_disable(st->reg); 1224 1225 return 0; 1226 } 1227 1228 static __maybe_unused int at91_adc_suspend(struct device *dev) 1229 { 1230 struct iio_dev *indio_dev = 1231 platform_get_drvdata(to_platform_device(dev)); 1232 struct at91_adc_state *st = iio_priv(indio_dev); 1233 1234 /* 1235 * Do a sofware reset of the ADC before we go to suspend. 1236 * this will ensure that all pins are free from being muxed by the ADC 1237 * and can be used by for other devices. 1238 * Otherwise, ADC will hog them and we can't go to suspend mode. 1239 */ 1240 at91_adc_writel(st, AT91_SAMA5D2_CR, AT91_SAMA5D2_CR_SWRST); 1241 1242 clk_disable_unprepare(st->per_clk); 1243 regulator_disable(st->vref); 1244 regulator_disable(st->reg); 1245 1246 return pinctrl_pm_select_sleep_state(dev); 1247 } 1248 1249 static __maybe_unused int at91_adc_resume(struct device *dev) 1250 { 1251 struct iio_dev *indio_dev = 1252 platform_get_drvdata(to_platform_device(dev)); 1253 struct at91_adc_state *st = iio_priv(indio_dev); 1254 int ret; 1255 1256 ret = pinctrl_pm_select_default_state(dev); 1257 if (ret) 1258 goto resume_failed; 1259 1260 ret = regulator_enable(st->reg); 1261 if (ret) 1262 goto resume_failed; 1263 1264 ret = regulator_enable(st->vref); 1265 if (ret) 1266 goto reg_disable_resume; 1267 1268 ret = clk_prepare_enable(st->per_clk); 1269 if (ret) 1270 goto vref_disable_resume; 1271 1272 at91_adc_hw_init(st); 1273 1274 /* reconfiguring trigger hardware state */ 1275 if (iio_buffer_enabled(indio_dev)) 1276 at91_adc_configure_trigger(st->trig, true); 1277 1278 return 0; 1279 1280 vref_disable_resume: 1281 regulator_disable(st->vref); 1282 reg_disable_resume: 1283 regulator_disable(st->reg); 1284 resume_failed: 1285 dev_err(&indio_dev->dev, "failed to resume\n"); 1286 return ret; 1287 } 1288 1289 static SIMPLE_DEV_PM_OPS(at91_adc_pm_ops, at91_adc_suspend, at91_adc_resume); 1290 1291 static const struct of_device_id at91_adc_dt_match[] = { 1292 { 1293 .compatible = "atmel,sama5d2-adc", 1294 }, { 1295 /* sentinel */ 1296 } 1297 }; 1298 MODULE_DEVICE_TABLE(of, at91_adc_dt_match); 1299 1300 static struct platform_driver at91_adc_driver = { 1301 .probe = at91_adc_probe, 1302 .remove = at91_adc_remove, 1303 .driver = { 1304 .name = "at91-sama5d2_adc", 1305 .of_match_table = at91_adc_dt_match, 1306 .pm = &at91_adc_pm_ops, 1307 }, 1308 }; 1309 module_platform_driver(at91_adc_driver) 1310 1311 MODULE_AUTHOR("Ludovic Desroches <ludovic.desroches@atmel.com>"); 1312 MODULE_DESCRIPTION("Atmel AT91 SAMA5D2 ADC"); 1313 MODULE_LICENSE("GPL v2"); 1314