1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * This file is part of STM32 ADC driver 4 * 5 * Copyright (C) 2016, STMicroelectronics - All Rights Reserved 6 * Author: Fabrice Gasnier <fabrice.gasnier@st.com>. 7 */ 8 9 #include <linux/clk.h> 10 #include <linux/delay.h> 11 #include <linux/dma-mapping.h> 12 #include <linux/dmaengine.h> 13 #include <linux/iio/iio.h> 14 #include <linux/iio/buffer.h> 15 #include <linux/iio/timer/stm32-lptim-trigger.h> 16 #include <linux/iio/timer/stm32-timer-trigger.h> 17 #include <linux/iio/trigger.h> 18 #include <linux/iio/trigger_consumer.h> 19 #include <linux/iio/triggered_buffer.h> 20 #include <linux/interrupt.h> 21 #include <linux/io.h> 22 #include <linux/iopoll.h> 23 #include <linux/module.h> 24 #include <linux/nvmem-consumer.h> 25 #include <linux/platform_device.h> 26 #include <linux/pm_runtime.h> 27 #include <linux/of.h> 28 #include <linux/of_device.h> 29 30 #include "stm32-adc-core.h" 31 32 /* Number of linear calibration shadow registers / LINCALRDYW control bits */ 33 #define STM32H7_LINCALFACT_NUM 6 34 35 /* BOOST bit must be set on STM32H7 when ADC clock is above 20MHz */ 36 #define STM32H7_BOOST_CLKRATE 20000000UL 37 38 #define STM32_ADC_CH_MAX 20 /* max number of channels */ 39 #define STM32_ADC_CH_SZ 16 /* max channel name size */ 40 #define STM32_ADC_MAX_SQ 16 /* SQ1..SQ16 */ 41 #define STM32_ADC_MAX_SMP 7 /* SMPx range is [0..7] */ 42 #define STM32_ADC_TIMEOUT_US 100000 43 #define STM32_ADC_TIMEOUT (msecs_to_jiffies(STM32_ADC_TIMEOUT_US / 1000)) 44 #define STM32_ADC_HW_STOP_DELAY_MS 100 45 #define STM32_ADC_VREFINT_VOLTAGE 3300 46 47 #define STM32_DMA_BUFFER_SIZE PAGE_SIZE 48 49 /* External trigger enable */ 50 enum stm32_adc_exten { 51 STM32_EXTEN_SWTRIG, 52 STM32_EXTEN_HWTRIG_RISING_EDGE, 53 STM32_EXTEN_HWTRIG_FALLING_EDGE, 54 STM32_EXTEN_HWTRIG_BOTH_EDGES, 55 }; 56 57 /* extsel - trigger mux selection value */ 58 enum stm32_adc_extsel { 59 STM32_EXT0, 60 STM32_EXT1, 61 STM32_EXT2, 62 STM32_EXT3, 63 STM32_EXT4, 64 STM32_EXT5, 65 STM32_EXT6, 66 STM32_EXT7, 67 STM32_EXT8, 68 STM32_EXT9, 69 STM32_EXT10, 70 STM32_EXT11, 71 STM32_EXT12, 72 STM32_EXT13, 73 STM32_EXT14, 74 STM32_EXT15, 75 STM32_EXT16, 76 STM32_EXT17, 77 STM32_EXT18, 78 STM32_EXT19, 79 STM32_EXT20, 80 }; 81 82 enum stm32_adc_int_ch { 83 STM32_ADC_INT_CH_NONE = -1, 84 STM32_ADC_INT_CH_VDDCORE, 85 STM32_ADC_INT_CH_VREFINT, 86 STM32_ADC_INT_CH_VBAT, 87 STM32_ADC_INT_CH_NB, 88 }; 89 90 /** 91 * struct stm32_adc_ic - ADC internal channels 92 * @name: name of the internal channel 93 * @idx: internal channel enum index 94 */ 95 struct stm32_adc_ic { 96 const char *name; 97 u32 idx; 98 }; 99 100 static const struct stm32_adc_ic stm32_adc_ic[STM32_ADC_INT_CH_NB] = { 101 { "vddcore", STM32_ADC_INT_CH_VDDCORE }, 102 { "vrefint", STM32_ADC_INT_CH_VREFINT }, 103 { "vbat", STM32_ADC_INT_CH_VBAT }, 104 }; 105 106 /** 107 * struct stm32_adc_trig_info - ADC trigger info 108 * @name: name of the trigger, corresponding to its source 109 * @extsel: trigger selection 110 */ 111 struct stm32_adc_trig_info { 112 const char *name; 113 enum stm32_adc_extsel extsel; 114 }; 115 116 /** 117 * struct stm32_adc_calib - optional adc calibration data 118 * @calfact_s: Calibration offset for single ended channels 119 * @calfact_d: Calibration offset in differential 120 * @lincalfact: Linearity calibration factor 121 * @calibrated: Indicates calibration status 122 */ 123 struct stm32_adc_calib { 124 u32 calfact_s; 125 u32 calfact_d; 126 u32 lincalfact[STM32H7_LINCALFACT_NUM]; 127 bool calibrated; 128 }; 129 130 /** 131 * struct stm32_adc_regs - stm32 ADC misc registers & bitfield desc 132 * @reg: register offset 133 * @mask: bitfield mask 134 * @shift: left shift 135 */ 136 struct stm32_adc_regs { 137 int reg; 138 int mask; 139 int shift; 140 }; 141 142 /** 143 * struct stm32_adc_vrefint - stm32 ADC internal reference voltage data 144 * @vrefint_cal: vrefint calibration value from nvmem 145 * @vrefint_data: vrefint actual value 146 */ 147 struct stm32_adc_vrefint { 148 u32 vrefint_cal; 149 u32 vrefint_data; 150 }; 151 152 /** 153 * struct stm32_adc_regspec - stm32 registers definition 154 * @dr: data register offset 155 * @ier_eoc: interrupt enable register & eocie bitfield 156 * @ier_ovr: interrupt enable register & overrun bitfield 157 * @isr_eoc: interrupt status register & eoc bitfield 158 * @isr_ovr: interrupt status register & overrun bitfield 159 * @sqr: reference to sequence registers array 160 * @exten: trigger control register & bitfield 161 * @extsel: trigger selection register & bitfield 162 * @res: resolution selection register & bitfield 163 * @smpr: smpr1 & smpr2 registers offset array 164 * @smp_bits: smpr1 & smpr2 index and bitfields 165 * @or_vdd: option register & vddcore bitfield 166 * @ccr_vbat: common register & vbat bitfield 167 * @ccr_vref: common register & vrefint bitfield 168 */ 169 struct stm32_adc_regspec { 170 const u32 dr; 171 const struct stm32_adc_regs ier_eoc; 172 const struct stm32_adc_regs ier_ovr; 173 const struct stm32_adc_regs isr_eoc; 174 const struct stm32_adc_regs isr_ovr; 175 const struct stm32_adc_regs *sqr; 176 const struct stm32_adc_regs exten; 177 const struct stm32_adc_regs extsel; 178 const struct stm32_adc_regs res; 179 const u32 smpr[2]; 180 const struct stm32_adc_regs *smp_bits; 181 const struct stm32_adc_regs or_vdd; 182 const struct stm32_adc_regs ccr_vbat; 183 const struct stm32_adc_regs ccr_vref; 184 }; 185 186 struct stm32_adc; 187 188 /** 189 * struct stm32_adc_cfg - stm32 compatible configuration data 190 * @regs: registers descriptions 191 * @adc_info: per instance input channels definitions 192 * @trigs: external trigger sources 193 * @clk_required: clock is required 194 * @has_vregready: vregready status flag presence 195 * @prepare: optional prepare routine (power-up, enable) 196 * @start_conv: routine to start conversions 197 * @stop_conv: routine to stop conversions 198 * @unprepare: optional unprepare routine (disable, power-down) 199 * @irq_clear: routine to clear irqs 200 * @smp_cycles: programmable sampling time (ADC clock cycles) 201 * @ts_vrefint_ns: vrefint minimum sampling time in ns 202 */ 203 struct stm32_adc_cfg { 204 const struct stm32_adc_regspec *regs; 205 const struct stm32_adc_info *adc_info; 206 struct stm32_adc_trig_info *trigs; 207 bool clk_required; 208 bool has_vregready; 209 int (*prepare)(struct iio_dev *); 210 void (*start_conv)(struct iio_dev *, bool dma); 211 void (*stop_conv)(struct iio_dev *); 212 void (*unprepare)(struct iio_dev *); 213 void (*irq_clear)(struct iio_dev *indio_dev, u32 msk); 214 const unsigned int *smp_cycles; 215 const unsigned int ts_vrefint_ns; 216 }; 217 218 /** 219 * struct stm32_adc - private data of each ADC IIO instance 220 * @common: reference to ADC block common data 221 * @offset: ADC instance register offset in ADC block 222 * @cfg: compatible configuration data 223 * @completion: end of single conversion completion 224 * @buffer: data buffer + 8 bytes for timestamp if enabled 225 * @clk: clock for this adc instance 226 * @irq: interrupt for this adc instance 227 * @lock: spinlock 228 * @bufi: data buffer index 229 * @num_conv: expected number of scan conversions 230 * @res: data resolution (e.g. RES bitfield value) 231 * @trigger_polarity: external trigger polarity (e.g. exten) 232 * @dma_chan: dma channel 233 * @rx_buf: dma rx buffer cpu address 234 * @rx_dma_buf: dma rx buffer bus address 235 * @rx_buf_sz: dma rx buffer size 236 * @difsel: bitmask to set single-ended/differential channel 237 * @pcsel: bitmask to preselect channels on some devices 238 * @smpr_val: sampling time settings (e.g. smpr1 / smpr2) 239 * @cal: optional calibration data on some devices 240 * @vrefint: internal reference voltage data 241 * @chan_name: channel name array 242 * @num_diff: number of differential channels 243 * @int_ch: internal channel indexes array 244 */ 245 struct stm32_adc { 246 struct stm32_adc_common *common; 247 u32 offset; 248 const struct stm32_adc_cfg *cfg; 249 struct completion completion; 250 u16 buffer[STM32_ADC_MAX_SQ + 4] __aligned(8); 251 struct clk *clk; 252 int irq; 253 spinlock_t lock; /* interrupt lock */ 254 unsigned int bufi; 255 unsigned int num_conv; 256 u32 res; 257 u32 trigger_polarity; 258 struct dma_chan *dma_chan; 259 u8 *rx_buf; 260 dma_addr_t rx_dma_buf; 261 unsigned int rx_buf_sz; 262 u32 difsel; 263 u32 pcsel; 264 u32 smpr_val[2]; 265 struct stm32_adc_calib cal; 266 struct stm32_adc_vrefint vrefint; 267 char chan_name[STM32_ADC_CH_MAX][STM32_ADC_CH_SZ]; 268 u32 num_diff; 269 int int_ch[STM32_ADC_INT_CH_NB]; 270 }; 271 272 struct stm32_adc_diff_channel { 273 u32 vinp; 274 u32 vinn; 275 }; 276 277 /** 278 * struct stm32_adc_info - stm32 ADC, per instance config data 279 * @max_channels: Number of channels 280 * @resolutions: available resolutions 281 * @num_res: number of available resolutions 282 */ 283 struct stm32_adc_info { 284 int max_channels; 285 const unsigned int *resolutions; 286 const unsigned int num_res; 287 }; 288 289 static const unsigned int stm32f4_adc_resolutions[] = { 290 /* sorted values so the index matches RES[1:0] in STM32F4_ADC_CR1 */ 291 12, 10, 8, 6, 292 }; 293 294 /* stm32f4 can have up to 16 channels */ 295 static const struct stm32_adc_info stm32f4_adc_info = { 296 .max_channels = 16, 297 .resolutions = stm32f4_adc_resolutions, 298 .num_res = ARRAY_SIZE(stm32f4_adc_resolutions), 299 }; 300 301 static const unsigned int stm32h7_adc_resolutions[] = { 302 /* sorted values so the index matches RES[2:0] in STM32H7_ADC_CFGR */ 303 16, 14, 12, 10, 8, 304 }; 305 306 /* stm32h7 can have up to 20 channels */ 307 static const struct stm32_adc_info stm32h7_adc_info = { 308 .max_channels = STM32_ADC_CH_MAX, 309 .resolutions = stm32h7_adc_resolutions, 310 .num_res = ARRAY_SIZE(stm32h7_adc_resolutions), 311 }; 312 313 /* 314 * stm32f4_sq - describe regular sequence registers 315 * - L: sequence len (register & bit field) 316 * - SQ1..SQ16: sequence entries (register & bit field) 317 */ 318 static const struct stm32_adc_regs stm32f4_sq[STM32_ADC_MAX_SQ + 1] = { 319 /* L: len bit field description to be kept as first element */ 320 { STM32F4_ADC_SQR1, GENMASK(23, 20), 20 }, 321 /* SQ1..SQ16 registers & bit fields (reg, mask, shift) */ 322 { STM32F4_ADC_SQR3, GENMASK(4, 0), 0 }, 323 { STM32F4_ADC_SQR3, GENMASK(9, 5), 5 }, 324 { STM32F4_ADC_SQR3, GENMASK(14, 10), 10 }, 325 { STM32F4_ADC_SQR3, GENMASK(19, 15), 15 }, 326 { STM32F4_ADC_SQR3, GENMASK(24, 20), 20 }, 327 { STM32F4_ADC_SQR3, GENMASK(29, 25), 25 }, 328 { STM32F4_ADC_SQR2, GENMASK(4, 0), 0 }, 329 { STM32F4_ADC_SQR2, GENMASK(9, 5), 5 }, 330 { STM32F4_ADC_SQR2, GENMASK(14, 10), 10 }, 331 { STM32F4_ADC_SQR2, GENMASK(19, 15), 15 }, 332 { STM32F4_ADC_SQR2, GENMASK(24, 20), 20 }, 333 { STM32F4_ADC_SQR2, GENMASK(29, 25), 25 }, 334 { STM32F4_ADC_SQR1, GENMASK(4, 0), 0 }, 335 { STM32F4_ADC_SQR1, GENMASK(9, 5), 5 }, 336 { STM32F4_ADC_SQR1, GENMASK(14, 10), 10 }, 337 { STM32F4_ADC_SQR1, GENMASK(19, 15), 15 }, 338 }; 339 340 /* STM32F4 external trigger sources for all instances */ 341 static struct stm32_adc_trig_info stm32f4_adc_trigs[] = { 342 { TIM1_CH1, STM32_EXT0 }, 343 { TIM1_CH2, STM32_EXT1 }, 344 { TIM1_CH3, STM32_EXT2 }, 345 { TIM2_CH2, STM32_EXT3 }, 346 { TIM2_CH3, STM32_EXT4 }, 347 { TIM2_CH4, STM32_EXT5 }, 348 { TIM2_TRGO, STM32_EXT6 }, 349 { TIM3_CH1, STM32_EXT7 }, 350 { TIM3_TRGO, STM32_EXT8 }, 351 { TIM4_CH4, STM32_EXT9 }, 352 { TIM5_CH1, STM32_EXT10 }, 353 { TIM5_CH2, STM32_EXT11 }, 354 { TIM5_CH3, STM32_EXT12 }, 355 { TIM8_CH1, STM32_EXT13 }, 356 { TIM8_TRGO, STM32_EXT14 }, 357 {}, /* sentinel */ 358 }; 359 360 /* 361 * stm32f4_smp_bits[] - describe sampling time register index & bit fields 362 * Sorted so it can be indexed by channel number. 363 */ 364 static const struct stm32_adc_regs stm32f4_smp_bits[] = { 365 /* STM32F4_ADC_SMPR2: smpr[] index, mask, shift for SMP0 to SMP9 */ 366 { 1, GENMASK(2, 0), 0 }, 367 { 1, GENMASK(5, 3), 3 }, 368 { 1, GENMASK(8, 6), 6 }, 369 { 1, GENMASK(11, 9), 9 }, 370 { 1, GENMASK(14, 12), 12 }, 371 { 1, GENMASK(17, 15), 15 }, 372 { 1, GENMASK(20, 18), 18 }, 373 { 1, GENMASK(23, 21), 21 }, 374 { 1, GENMASK(26, 24), 24 }, 375 { 1, GENMASK(29, 27), 27 }, 376 /* STM32F4_ADC_SMPR1, smpr[] index, mask, shift for SMP10 to SMP18 */ 377 { 0, GENMASK(2, 0), 0 }, 378 { 0, GENMASK(5, 3), 3 }, 379 { 0, GENMASK(8, 6), 6 }, 380 { 0, GENMASK(11, 9), 9 }, 381 { 0, GENMASK(14, 12), 12 }, 382 { 0, GENMASK(17, 15), 15 }, 383 { 0, GENMASK(20, 18), 18 }, 384 { 0, GENMASK(23, 21), 21 }, 385 { 0, GENMASK(26, 24), 24 }, 386 }; 387 388 /* STM32F4 programmable sampling time (ADC clock cycles) */ 389 static const unsigned int stm32f4_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = { 390 3, 15, 28, 56, 84, 112, 144, 480, 391 }; 392 393 static const struct stm32_adc_regspec stm32f4_adc_regspec = { 394 .dr = STM32F4_ADC_DR, 395 .ier_eoc = { STM32F4_ADC_CR1, STM32F4_EOCIE }, 396 .ier_ovr = { STM32F4_ADC_CR1, STM32F4_OVRIE }, 397 .isr_eoc = { STM32F4_ADC_SR, STM32F4_EOC }, 398 .isr_ovr = { STM32F4_ADC_SR, STM32F4_OVR }, 399 .sqr = stm32f4_sq, 400 .exten = { STM32F4_ADC_CR2, STM32F4_EXTEN_MASK, STM32F4_EXTEN_SHIFT }, 401 .extsel = { STM32F4_ADC_CR2, STM32F4_EXTSEL_MASK, 402 STM32F4_EXTSEL_SHIFT }, 403 .res = { STM32F4_ADC_CR1, STM32F4_RES_MASK, STM32F4_RES_SHIFT }, 404 .smpr = { STM32F4_ADC_SMPR1, STM32F4_ADC_SMPR2 }, 405 .smp_bits = stm32f4_smp_bits, 406 }; 407 408 static const struct stm32_adc_regs stm32h7_sq[STM32_ADC_MAX_SQ + 1] = { 409 /* L: len bit field description to be kept as first element */ 410 { STM32H7_ADC_SQR1, GENMASK(3, 0), 0 }, 411 /* SQ1..SQ16 registers & bit fields (reg, mask, shift) */ 412 { STM32H7_ADC_SQR1, GENMASK(10, 6), 6 }, 413 { STM32H7_ADC_SQR1, GENMASK(16, 12), 12 }, 414 { STM32H7_ADC_SQR1, GENMASK(22, 18), 18 }, 415 { STM32H7_ADC_SQR1, GENMASK(28, 24), 24 }, 416 { STM32H7_ADC_SQR2, GENMASK(4, 0), 0 }, 417 { STM32H7_ADC_SQR2, GENMASK(10, 6), 6 }, 418 { STM32H7_ADC_SQR2, GENMASK(16, 12), 12 }, 419 { STM32H7_ADC_SQR2, GENMASK(22, 18), 18 }, 420 { STM32H7_ADC_SQR2, GENMASK(28, 24), 24 }, 421 { STM32H7_ADC_SQR3, GENMASK(4, 0), 0 }, 422 { STM32H7_ADC_SQR3, GENMASK(10, 6), 6 }, 423 { STM32H7_ADC_SQR3, GENMASK(16, 12), 12 }, 424 { STM32H7_ADC_SQR3, GENMASK(22, 18), 18 }, 425 { STM32H7_ADC_SQR3, GENMASK(28, 24), 24 }, 426 { STM32H7_ADC_SQR4, GENMASK(4, 0), 0 }, 427 { STM32H7_ADC_SQR4, GENMASK(10, 6), 6 }, 428 }; 429 430 /* STM32H7 external trigger sources for all instances */ 431 static struct stm32_adc_trig_info stm32h7_adc_trigs[] = { 432 { TIM1_CH1, STM32_EXT0 }, 433 { TIM1_CH2, STM32_EXT1 }, 434 { TIM1_CH3, STM32_EXT2 }, 435 { TIM2_CH2, STM32_EXT3 }, 436 { TIM3_TRGO, STM32_EXT4 }, 437 { TIM4_CH4, STM32_EXT5 }, 438 { TIM8_TRGO, STM32_EXT7 }, 439 { TIM8_TRGO2, STM32_EXT8 }, 440 { TIM1_TRGO, STM32_EXT9 }, 441 { TIM1_TRGO2, STM32_EXT10 }, 442 { TIM2_TRGO, STM32_EXT11 }, 443 { TIM4_TRGO, STM32_EXT12 }, 444 { TIM6_TRGO, STM32_EXT13 }, 445 { TIM15_TRGO, STM32_EXT14 }, 446 { TIM3_CH4, STM32_EXT15 }, 447 { LPTIM1_OUT, STM32_EXT18 }, 448 { LPTIM2_OUT, STM32_EXT19 }, 449 { LPTIM3_OUT, STM32_EXT20 }, 450 {}, 451 }; 452 453 /* 454 * stm32h7_smp_bits - describe sampling time register index & bit fields 455 * Sorted so it can be indexed by channel number. 456 */ 457 static const struct stm32_adc_regs stm32h7_smp_bits[] = { 458 /* STM32H7_ADC_SMPR1, smpr[] index, mask, shift for SMP0 to SMP9 */ 459 { 0, GENMASK(2, 0), 0 }, 460 { 0, GENMASK(5, 3), 3 }, 461 { 0, GENMASK(8, 6), 6 }, 462 { 0, GENMASK(11, 9), 9 }, 463 { 0, GENMASK(14, 12), 12 }, 464 { 0, GENMASK(17, 15), 15 }, 465 { 0, GENMASK(20, 18), 18 }, 466 { 0, GENMASK(23, 21), 21 }, 467 { 0, GENMASK(26, 24), 24 }, 468 { 0, GENMASK(29, 27), 27 }, 469 /* STM32H7_ADC_SMPR2, smpr[] index, mask, shift for SMP10 to SMP19 */ 470 { 1, GENMASK(2, 0), 0 }, 471 { 1, GENMASK(5, 3), 3 }, 472 { 1, GENMASK(8, 6), 6 }, 473 { 1, GENMASK(11, 9), 9 }, 474 { 1, GENMASK(14, 12), 12 }, 475 { 1, GENMASK(17, 15), 15 }, 476 { 1, GENMASK(20, 18), 18 }, 477 { 1, GENMASK(23, 21), 21 }, 478 { 1, GENMASK(26, 24), 24 }, 479 { 1, GENMASK(29, 27), 27 }, 480 }; 481 482 /* STM32H7 programmable sampling time (ADC clock cycles, rounded down) */ 483 static const unsigned int stm32h7_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = { 484 1, 2, 8, 16, 32, 64, 387, 810, 485 }; 486 487 static const struct stm32_adc_regspec stm32h7_adc_regspec = { 488 .dr = STM32H7_ADC_DR, 489 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE }, 490 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE }, 491 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC }, 492 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR }, 493 .sqr = stm32h7_sq, 494 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT }, 495 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK, 496 STM32H7_EXTSEL_SHIFT }, 497 .res = { STM32H7_ADC_CFGR, STM32H7_RES_MASK, STM32H7_RES_SHIFT }, 498 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 }, 499 .smp_bits = stm32h7_smp_bits, 500 }; 501 502 static const struct stm32_adc_regspec stm32mp1_adc_regspec = { 503 .dr = STM32H7_ADC_DR, 504 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE }, 505 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE }, 506 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC }, 507 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR }, 508 .sqr = stm32h7_sq, 509 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT }, 510 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK, 511 STM32H7_EXTSEL_SHIFT }, 512 .res = { STM32H7_ADC_CFGR, STM32H7_RES_MASK, STM32H7_RES_SHIFT }, 513 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 }, 514 .smp_bits = stm32h7_smp_bits, 515 .or_vdd = { STM32MP1_ADC2_OR, STM32MP1_VDDCOREEN }, 516 .ccr_vbat = { STM32H7_ADC_CCR, STM32H7_VBATEN }, 517 .ccr_vref = { STM32H7_ADC_CCR, STM32H7_VREFEN }, 518 }; 519 520 /* 521 * STM32 ADC registers access routines 522 * @adc: stm32 adc instance 523 * @reg: reg offset in adc instance 524 * 525 * Note: All instances share same base, with 0x0, 0x100 or 0x200 offset resp. 526 * for adc1, adc2 and adc3. 527 */ 528 static u32 stm32_adc_readl(struct stm32_adc *adc, u32 reg) 529 { 530 return readl_relaxed(adc->common->base + adc->offset + reg); 531 } 532 533 #define stm32_adc_readl_addr(addr) stm32_adc_readl(adc, addr) 534 535 #define stm32_adc_readl_poll_timeout(reg, val, cond, sleep_us, timeout_us) \ 536 readx_poll_timeout(stm32_adc_readl_addr, reg, val, \ 537 cond, sleep_us, timeout_us) 538 539 static u16 stm32_adc_readw(struct stm32_adc *adc, u32 reg) 540 { 541 return readw_relaxed(adc->common->base + adc->offset + reg); 542 } 543 544 static void stm32_adc_writel(struct stm32_adc *adc, u32 reg, u32 val) 545 { 546 writel_relaxed(val, adc->common->base + adc->offset + reg); 547 } 548 549 static void stm32_adc_set_bits(struct stm32_adc *adc, u32 reg, u32 bits) 550 { 551 unsigned long flags; 552 553 spin_lock_irqsave(&adc->lock, flags); 554 stm32_adc_writel(adc, reg, stm32_adc_readl(adc, reg) | bits); 555 spin_unlock_irqrestore(&adc->lock, flags); 556 } 557 558 static void stm32_adc_set_bits_common(struct stm32_adc *adc, u32 reg, u32 bits) 559 { 560 spin_lock(&adc->common->lock); 561 writel_relaxed(readl_relaxed(adc->common->base + reg) | bits, 562 adc->common->base + reg); 563 spin_unlock(&adc->common->lock); 564 } 565 566 static void stm32_adc_clr_bits(struct stm32_adc *adc, u32 reg, u32 bits) 567 { 568 unsigned long flags; 569 570 spin_lock_irqsave(&adc->lock, flags); 571 stm32_adc_writel(adc, reg, stm32_adc_readl(adc, reg) & ~bits); 572 spin_unlock_irqrestore(&adc->lock, flags); 573 } 574 575 static void stm32_adc_clr_bits_common(struct stm32_adc *adc, u32 reg, u32 bits) 576 { 577 spin_lock(&adc->common->lock); 578 writel_relaxed(readl_relaxed(adc->common->base + reg) & ~bits, 579 adc->common->base + reg); 580 spin_unlock(&adc->common->lock); 581 } 582 583 /** 584 * stm32_adc_conv_irq_enable() - Enable end of conversion interrupt 585 * @adc: stm32 adc instance 586 */ 587 static void stm32_adc_conv_irq_enable(struct stm32_adc *adc) 588 { 589 stm32_adc_set_bits(adc, adc->cfg->regs->ier_eoc.reg, 590 adc->cfg->regs->ier_eoc.mask); 591 }; 592 593 /** 594 * stm32_adc_conv_irq_disable() - Disable end of conversion interrupt 595 * @adc: stm32 adc instance 596 */ 597 static void stm32_adc_conv_irq_disable(struct stm32_adc *adc) 598 { 599 stm32_adc_clr_bits(adc, adc->cfg->regs->ier_eoc.reg, 600 adc->cfg->regs->ier_eoc.mask); 601 } 602 603 static void stm32_adc_ovr_irq_enable(struct stm32_adc *adc) 604 { 605 stm32_adc_set_bits(adc, adc->cfg->regs->ier_ovr.reg, 606 adc->cfg->regs->ier_ovr.mask); 607 } 608 609 static void stm32_adc_ovr_irq_disable(struct stm32_adc *adc) 610 { 611 stm32_adc_clr_bits(adc, adc->cfg->regs->ier_ovr.reg, 612 adc->cfg->regs->ier_ovr.mask); 613 } 614 615 static void stm32_adc_set_res(struct stm32_adc *adc) 616 { 617 const struct stm32_adc_regs *res = &adc->cfg->regs->res; 618 u32 val; 619 620 val = stm32_adc_readl(adc, res->reg); 621 val = (val & ~res->mask) | (adc->res << res->shift); 622 stm32_adc_writel(adc, res->reg, val); 623 } 624 625 static int stm32_adc_hw_stop(struct device *dev) 626 { 627 struct iio_dev *indio_dev = dev_get_drvdata(dev); 628 struct stm32_adc *adc = iio_priv(indio_dev); 629 630 if (adc->cfg->unprepare) 631 adc->cfg->unprepare(indio_dev); 632 633 clk_disable_unprepare(adc->clk); 634 635 return 0; 636 } 637 638 static int stm32_adc_hw_start(struct device *dev) 639 { 640 struct iio_dev *indio_dev = dev_get_drvdata(dev); 641 struct stm32_adc *adc = iio_priv(indio_dev); 642 int ret; 643 644 ret = clk_prepare_enable(adc->clk); 645 if (ret) 646 return ret; 647 648 stm32_adc_set_res(adc); 649 650 if (adc->cfg->prepare) { 651 ret = adc->cfg->prepare(indio_dev); 652 if (ret) 653 goto err_clk_dis; 654 } 655 656 return 0; 657 658 err_clk_dis: 659 clk_disable_unprepare(adc->clk); 660 661 return ret; 662 } 663 664 static void stm32_adc_int_ch_enable(struct iio_dev *indio_dev) 665 { 666 struct stm32_adc *adc = iio_priv(indio_dev); 667 u32 i; 668 669 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 670 if (adc->int_ch[i] == STM32_ADC_INT_CH_NONE) 671 continue; 672 673 switch (i) { 674 case STM32_ADC_INT_CH_VDDCORE: 675 dev_dbg(&indio_dev->dev, "Enable VDDCore\n"); 676 stm32_adc_set_bits(adc, adc->cfg->regs->or_vdd.reg, 677 adc->cfg->regs->or_vdd.mask); 678 break; 679 case STM32_ADC_INT_CH_VREFINT: 680 dev_dbg(&indio_dev->dev, "Enable VREFInt\n"); 681 stm32_adc_set_bits_common(adc, adc->cfg->regs->ccr_vref.reg, 682 adc->cfg->regs->ccr_vref.mask); 683 break; 684 case STM32_ADC_INT_CH_VBAT: 685 dev_dbg(&indio_dev->dev, "Enable VBAT\n"); 686 stm32_adc_set_bits_common(adc, adc->cfg->regs->ccr_vbat.reg, 687 adc->cfg->regs->ccr_vbat.mask); 688 break; 689 } 690 } 691 } 692 693 static void stm32_adc_int_ch_disable(struct stm32_adc *adc) 694 { 695 u32 i; 696 697 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 698 if (adc->int_ch[i] == STM32_ADC_INT_CH_NONE) 699 continue; 700 701 switch (i) { 702 case STM32_ADC_INT_CH_VDDCORE: 703 stm32_adc_clr_bits(adc, adc->cfg->regs->or_vdd.reg, 704 adc->cfg->regs->or_vdd.mask); 705 break; 706 case STM32_ADC_INT_CH_VREFINT: 707 stm32_adc_clr_bits_common(adc, adc->cfg->regs->ccr_vref.reg, 708 adc->cfg->regs->ccr_vref.mask); 709 break; 710 case STM32_ADC_INT_CH_VBAT: 711 stm32_adc_clr_bits_common(adc, adc->cfg->regs->ccr_vbat.reg, 712 adc->cfg->regs->ccr_vbat.mask); 713 break; 714 } 715 } 716 } 717 718 /** 719 * stm32f4_adc_start_conv() - Start conversions for regular channels. 720 * @indio_dev: IIO device instance 721 * @dma: use dma to transfer conversion result 722 * 723 * Start conversions for regular channels. 724 * Also take care of normal or DMA mode. Circular DMA may be used for regular 725 * conversions, in IIO buffer modes. Otherwise, use ADC interrupt with direct 726 * DR read instead (e.g. read_raw, or triggered buffer mode without DMA). 727 */ 728 static void stm32f4_adc_start_conv(struct iio_dev *indio_dev, bool dma) 729 { 730 struct stm32_adc *adc = iio_priv(indio_dev); 731 732 stm32_adc_set_bits(adc, STM32F4_ADC_CR1, STM32F4_SCAN); 733 734 if (dma) 735 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, 736 STM32F4_DMA | STM32F4_DDS); 737 738 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, STM32F4_EOCS | STM32F4_ADON); 739 740 /* Wait for Power-up time (tSTAB from datasheet) */ 741 usleep_range(2, 3); 742 743 /* Software start ? (e.g. trigger detection disabled ?) */ 744 if (!(stm32_adc_readl(adc, STM32F4_ADC_CR2) & STM32F4_EXTEN_MASK)) 745 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, STM32F4_SWSTART); 746 } 747 748 static void stm32f4_adc_stop_conv(struct iio_dev *indio_dev) 749 { 750 struct stm32_adc *adc = iio_priv(indio_dev); 751 752 stm32_adc_clr_bits(adc, STM32F4_ADC_CR2, STM32F4_EXTEN_MASK); 753 stm32_adc_clr_bits(adc, STM32F4_ADC_SR, STM32F4_STRT); 754 755 stm32_adc_clr_bits(adc, STM32F4_ADC_CR1, STM32F4_SCAN); 756 stm32_adc_clr_bits(adc, STM32F4_ADC_CR2, 757 STM32F4_ADON | STM32F4_DMA | STM32F4_DDS); 758 } 759 760 static void stm32f4_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 761 { 762 struct stm32_adc *adc = iio_priv(indio_dev); 763 764 stm32_adc_clr_bits(adc, adc->cfg->regs->isr_eoc.reg, msk); 765 } 766 767 static void stm32h7_adc_start_conv(struct iio_dev *indio_dev, bool dma) 768 { 769 struct stm32_adc *adc = iio_priv(indio_dev); 770 enum stm32h7_adc_dmngt dmngt; 771 unsigned long flags; 772 u32 val; 773 774 if (dma) 775 dmngt = STM32H7_DMNGT_DMA_CIRC; 776 else 777 dmngt = STM32H7_DMNGT_DR_ONLY; 778 779 spin_lock_irqsave(&adc->lock, flags); 780 val = stm32_adc_readl(adc, STM32H7_ADC_CFGR); 781 val = (val & ~STM32H7_DMNGT_MASK) | (dmngt << STM32H7_DMNGT_SHIFT); 782 stm32_adc_writel(adc, STM32H7_ADC_CFGR, val); 783 spin_unlock_irqrestore(&adc->lock, flags); 784 785 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTART); 786 } 787 788 static void stm32h7_adc_stop_conv(struct iio_dev *indio_dev) 789 { 790 struct stm32_adc *adc = iio_priv(indio_dev); 791 int ret; 792 u32 val; 793 794 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTP); 795 796 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 797 !(val & (STM32H7_ADSTART)), 798 100, STM32_ADC_TIMEOUT_US); 799 if (ret) 800 dev_warn(&indio_dev->dev, "stop failed\n"); 801 802 stm32_adc_clr_bits(adc, STM32H7_ADC_CFGR, STM32H7_DMNGT_MASK); 803 } 804 805 static void stm32h7_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 806 { 807 struct stm32_adc *adc = iio_priv(indio_dev); 808 /* On STM32H7 IRQs are cleared by writing 1 into ISR register */ 809 stm32_adc_set_bits(adc, adc->cfg->regs->isr_eoc.reg, msk); 810 } 811 812 static int stm32h7_adc_exit_pwr_down(struct iio_dev *indio_dev) 813 { 814 struct stm32_adc *adc = iio_priv(indio_dev); 815 int ret; 816 u32 val; 817 818 /* Exit deep power down, then enable ADC voltage regulator */ 819 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 820 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADVREGEN); 821 822 if (adc->common->rate > STM32H7_BOOST_CLKRATE) 823 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_BOOST); 824 825 /* Wait for startup time */ 826 if (!adc->cfg->has_vregready) { 827 usleep_range(10, 20); 828 return 0; 829 } 830 831 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_ISR, val, 832 val & STM32MP1_VREGREADY, 100, 833 STM32_ADC_TIMEOUT_US); 834 if (ret) { 835 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 836 dev_err(&indio_dev->dev, "Failed to exit power down\n"); 837 } 838 839 return ret; 840 } 841 842 static void stm32h7_adc_enter_pwr_down(struct stm32_adc *adc) 843 { 844 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_BOOST); 845 846 /* Setting DEEPPWD disables ADC vreg and clears ADVREGEN */ 847 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 848 } 849 850 static int stm32h7_adc_enable(struct iio_dev *indio_dev) 851 { 852 struct stm32_adc *adc = iio_priv(indio_dev); 853 int ret; 854 u32 val; 855 856 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADEN); 857 858 /* Poll for ADRDY to be set (after adc startup time) */ 859 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_ISR, val, 860 val & STM32H7_ADRDY, 861 100, STM32_ADC_TIMEOUT_US); 862 if (ret) { 863 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADDIS); 864 dev_err(&indio_dev->dev, "Failed to enable ADC\n"); 865 } else { 866 /* Clear ADRDY by writing one */ 867 stm32_adc_set_bits(adc, STM32H7_ADC_ISR, STM32H7_ADRDY); 868 } 869 870 return ret; 871 } 872 873 static void stm32h7_adc_disable(struct iio_dev *indio_dev) 874 { 875 struct stm32_adc *adc = iio_priv(indio_dev); 876 int ret; 877 u32 val; 878 879 /* Disable ADC and wait until it's effectively disabled */ 880 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADDIS); 881 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 882 !(val & STM32H7_ADEN), 100, 883 STM32_ADC_TIMEOUT_US); 884 if (ret) 885 dev_warn(&indio_dev->dev, "Failed to disable\n"); 886 } 887 888 /** 889 * stm32h7_adc_read_selfcalib() - read calibration shadow regs, save result 890 * @indio_dev: IIO device instance 891 * Note: Must be called once ADC is enabled, so LINCALRDYW[1..6] are writable 892 */ 893 static int stm32h7_adc_read_selfcalib(struct iio_dev *indio_dev) 894 { 895 struct stm32_adc *adc = iio_priv(indio_dev); 896 int i, ret; 897 u32 lincalrdyw_mask, val; 898 899 /* Read linearity calibration */ 900 lincalrdyw_mask = STM32H7_LINCALRDYW6; 901 for (i = STM32H7_LINCALFACT_NUM - 1; i >= 0; i--) { 902 /* Clear STM32H7_LINCALRDYW[6..1]: transfer calib to CALFACT2 */ 903 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 904 905 /* Poll: wait calib data to be ready in CALFACT2 register */ 906 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 907 !(val & lincalrdyw_mask), 908 100, STM32_ADC_TIMEOUT_US); 909 if (ret) { 910 dev_err(&indio_dev->dev, "Failed to read calfact\n"); 911 return ret; 912 } 913 914 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT2); 915 adc->cal.lincalfact[i] = (val & STM32H7_LINCALFACT_MASK); 916 adc->cal.lincalfact[i] >>= STM32H7_LINCALFACT_SHIFT; 917 918 lincalrdyw_mask >>= 1; 919 } 920 921 /* Read offset calibration */ 922 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT); 923 adc->cal.calfact_s = (val & STM32H7_CALFACT_S_MASK); 924 adc->cal.calfact_s >>= STM32H7_CALFACT_S_SHIFT; 925 adc->cal.calfact_d = (val & STM32H7_CALFACT_D_MASK); 926 adc->cal.calfact_d >>= STM32H7_CALFACT_D_SHIFT; 927 adc->cal.calibrated = true; 928 929 return 0; 930 } 931 932 /** 933 * stm32h7_adc_restore_selfcalib() - Restore saved self-calibration result 934 * @indio_dev: IIO device instance 935 * Note: ADC must be enabled, with no on-going conversions. 936 */ 937 static int stm32h7_adc_restore_selfcalib(struct iio_dev *indio_dev) 938 { 939 struct stm32_adc *adc = iio_priv(indio_dev); 940 int i, ret; 941 u32 lincalrdyw_mask, val; 942 943 val = (adc->cal.calfact_s << STM32H7_CALFACT_S_SHIFT) | 944 (adc->cal.calfact_d << STM32H7_CALFACT_D_SHIFT); 945 stm32_adc_writel(adc, STM32H7_ADC_CALFACT, val); 946 947 lincalrdyw_mask = STM32H7_LINCALRDYW6; 948 for (i = STM32H7_LINCALFACT_NUM - 1; i >= 0; i--) { 949 /* 950 * Write saved calibration data to shadow registers: 951 * Write CALFACT2, and set LINCALRDYW[6..1] bit to trigger 952 * data write. Then poll to wait for complete transfer. 953 */ 954 val = adc->cal.lincalfact[i] << STM32H7_LINCALFACT_SHIFT; 955 stm32_adc_writel(adc, STM32H7_ADC_CALFACT2, val); 956 stm32_adc_set_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 957 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 958 val & lincalrdyw_mask, 959 100, STM32_ADC_TIMEOUT_US); 960 if (ret) { 961 dev_err(&indio_dev->dev, "Failed to write calfact\n"); 962 return ret; 963 } 964 965 /* 966 * Read back calibration data, has two effects: 967 * - It ensures bits LINCALRDYW[6..1] are kept cleared 968 * for next time calibration needs to be restored. 969 * - BTW, bit clear triggers a read, then check data has been 970 * correctly written. 971 */ 972 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 973 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 974 !(val & lincalrdyw_mask), 975 100, STM32_ADC_TIMEOUT_US); 976 if (ret) { 977 dev_err(&indio_dev->dev, "Failed to read calfact\n"); 978 return ret; 979 } 980 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT2); 981 if (val != adc->cal.lincalfact[i] << STM32H7_LINCALFACT_SHIFT) { 982 dev_err(&indio_dev->dev, "calfact not consistent\n"); 983 return -EIO; 984 } 985 986 lincalrdyw_mask >>= 1; 987 } 988 989 return 0; 990 } 991 992 /* 993 * Fixed timeout value for ADC calibration. 994 * worst cases: 995 * - low clock frequency 996 * - maximum prescalers 997 * Calibration requires: 998 * - 131,072 ADC clock cycle for the linear calibration 999 * - 20 ADC clock cycle for the offset calibration 1000 * 1001 * Set to 100ms for now 1002 */ 1003 #define STM32H7_ADC_CALIB_TIMEOUT_US 100000 1004 1005 /** 1006 * stm32h7_adc_selfcalib() - Procedure to calibrate ADC 1007 * @indio_dev: IIO device instance 1008 * Note: Must be called once ADC is out of power down. 1009 */ 1010 static int stm32h7_adc_selfcalib(struct iio_dev *indio_dev) 1011 { 1012 struct stm32_adc *adc = iio_priv(indio_dev); 1013 int ret; 1014 u32 val; 1015 1016 if (adc->cal.calibrated) 1017 return true; 1018 1019 /* 1020 * Select calibration mode: 1021 * - Offset calibration for single ended inputs 1022 * - No linearity calibration (do it later, before reading it) 1023 */ 1024 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_ADCALDIF); 1025 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_ADCALLIN); 1026 1027 /* Start calibration, then wait for completion */ 1028 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADCAL); 1029 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 1030 !(val & STM32H7_ADCAL), 100, 1031 STM32H7_ADC_CALIB_TIMEOUT_US); 1032 if (ret) { 1033 dev_err(&indio_dev->dev, "calibration failed\n"); 1034 goto out; 1035 } 1036 1037 /* 1038 * Select calibration mode, then start calibration: 1039 * - Offset calibration for differential input 1040 * - Linearity calibration (needs to be done only once for single/diff) 1041 * will run simultaneously with offset calibration. 1042 */ 1043 stm32_adc_set_bits(adc, STM32H7_ADC_CR, 1044 STM32H7_ADCALDIF | STM32H7_ADCALLIN); 1045 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADCAL); 1046 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 1047 !(val & STM32H7_ADCAL), 100, 1048 STM32H7_ADC_CALIB_TIMEOUT_US); 1049 if (ret) { 1050 dev_err(&indio_dev->dev, "calibration failed\n"); 1051 goto out; 1052 } 1053 1054 out: 1055 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, 1056 STM32H7_ADCALDIF | STM32H7_ADCALLIN); 1057 1058 return ret; 1059 } 1060 1061 /** 1062 * stm32h7_adc_prepare() - Leave power down mode to enable ADC. 1063 * @indio_dev: IIO device instance 1064 * Leave power down mode. 1065 * Configure channels as single ended or differential before enabling ADC. 1066 * Enable ADC. 1067 * Restore calibration data. 1068 * Pre-select channels that may be used in PCSEL (required by input MUX / IO): 1069 * - Only one input is selected for single ended (e.g. 'vinp') 1070 * - Two inputs are selected for differential channels (e.g. 'vinp' & 'vinn') 1071 */ 1072 static int stm32h7_adc_prepare(struct iio_dev *indio_dev) 1073 { 1074 struct stm32_adc *adc = iio_priv(indio_dev); 1075 int calib, ret; 1076 1077 ret = stm32h7_adc_exit_pwr_down(indio_dev); 1078 if (ret) 1079 return ret; 1080 1081 ret = stm32h7_adc_selfcalib(indio_dev); 1082 if (ret < 0) 1083 goto pwr_dwn; 1084 calib = ret; 1085 1086 stm32_adc_int_ch_enable(indio_dev); 1087 1088 stm32_adc_writel(adc, STM32H7_ADC_DIFSEL, adc->difsel); 1089 1090 ret = stm32h7_adc_enable(indio_dev); 1091 if (ret) 1092 goto ch_disable; 1093 1094 /* Either restore or read calibration result for future reference */ 1095 if (calib) 1096 ret = stm32h7_adc_restore_selfcalib(indio_dev); 1097 else 1098 ret = stm32h7_adc_read_selfcalib(indio_dev); 1099 if (ret) 1100 goto disable; 1101 1102 stm32_adc_writel(adc, STM32H7_ADC_PCSEL, adc->pcsel); 1103 1104 return 0; 1105 1106 disable: 1107 stm32h7_adc_disable(indio_dev); 1108 ch_disable: 1109 stm32_adc_int_ch_disable(adc); 1110 pwr_dwn: 1111 stm32h7_adc_enter_pwr_down(adc); 1112 1113 return ret; 1114 } 1115 1116 static void stm32h7_adc_unprepare(struct iio_dev *indio_dev) 1117 { 1118 struct stm32_adc *adc = iio_priv(indio_dev); 1119 1120 stm32_adc_writel(adc, STM32H7_ADC_PCSEL, 0); 1121 stm32h7_adc_disable(indio_dev); 1122 stm32_adc_int_ch_disable(adc); 1123 stm32h7_adc_enter_pwr_down(adc); 1124 } 1125 1126 /** 1127 * stm32_adc_conf_scan_seq() - Build regular channels scan sequence 1128 * @indio_dev: IIO device 1129 * @scan_mask: channels to be converted 1130 * 1131 * Conversion sequence : 1132 * Apply sampling time settings for all channels. 1133 * Configure ADC scan sequence based on selected channels in scan_mask. 1134 * Add channels to SQR registers, from scan_mask LSB to MSB, then 1135 * program sequence len. 1136 */ 1137 static int stm32_adc_conf_scan_seq(struct iio_dev *indio_dev, 1138 const unsigned long *scan_mask) 1139 { 1140 struct stm32_adc *adc = iio_priv(indio_dev); 1141 const struct stm32_adc_regs *sqr = adc->cfg->regs->sqr; 1142 const struct iio_chan_spec *chan; 1143 u32 val, bit; 1144 int i = 0; 1145 1146 /* Apply sampling time settings */ 1147 stm32_adc_writel(adc, adc->cfg->regs->smpr[0], adc->smpr_val[0]); 1148 stm32_adc_writel(adc, adc->cfg->regs->smpr[1], adc->smpr_val[1]); 1149 1150 for_each_set_bit(bit, scan_mask, indio_dev->masklength) { 1151 chan = indio_dev->channels + bit; 1152 /* 1153 * Assign one channel per SQ entry in regular 1154 * sequence, starting with SQ1. 1155 */ 1156 i++; 1157 if (i > STM32_ADC_MAX_SQ) 1158 return -EINVAL; 1159 1160 dev_dbg(&indio_dev->dev, "%s chan %d to SQ%d\n", 1161 __func__, chan->channel, i); 1162 1163 val = stm32_adc_readl(adc, sqr[i].reg); 1164 val &= ~sqr[i].mask; 1165 val |= chan->channel << sqr[i].shift; 1166 stm32_adc_writel(adc, sqr[i].reg, val); 1167 } 1168 1169 if (!i) 1170 return -EINVAL; 1171 1172 /* Sequence len */ 1173 val = stm32_adc_readl(adc, sqr[0].reg); 1174 val &= ~sqr[0].mask; 1175 val |= ((i - 1) << sqr[0].shift); 1176 stm32_adc_writel(adc, sqr[0].reg, val); 1177 1178 return 0; 1179 } 1180 1181 /** 1182 * stm32_adc_get_trig_extsel() - Get external trigger selection 1183 * @indio_dev: IIO device structure 1184 * @trig: trigger 1185 * 1186 * Returns trigger extsel value, if trig matches, -EINVAL otherwise. 1187 */ 1188 static int stm32_adc_get_trig_extsel(struct iio_dev *indio_dev, 1189 struct iio_trigger *trig) 1190 { 1191 struct stm32_adc *adc = iio_priv(indio_dev); 1192 int i; 1193 1194 /* lookup triggers registered by stm32 timer trigger driver */ 1195 for (i = 0; adc->cfg->trigs[i].name; i++) { 1196 /** 1197 * Checking both stm32 timer trigger type and trig name 1198 * should be safe against arbitrary trigger names. 1199 */ 1200 if ((is_stm32_timer_trigger(trig) || 1201 is_stm32_lptim_trigger(trig)) && 1202 !strcmp(adc->cfg->trigs[i].name, trig->name)) { 1203 return adc->cfg->trigs[i].extsel; 1204 } 1205 } 1206 1207 return -EINVAL; 1208 } 1209 1210 /** 1211 * stm32_adc_set_trig() - Set a regular trigger 1212 * @indio_dev: IIO device 1213 * @trig: IIO trigger 1214 * 1215 * Set trigger source/polarity (e.g. SW, or HW with polarity) : 1216 * - if HW trigger disabled (e.g. trig == NULL, conversion launched by sw) 1217 * - if HW trigger enabled, set source & polarity 1218 */ 1219 static int stm32_adc_set_trig(struct iio_dev *indio_dev, 1220 struct iio_trigger *trig) 1221 { 1222 struct stm32_adc *adc = iio_priv(indio_dev); 1223 u32 val, extsel = 0, exten = STM32_EXTEN_SWTRIG; 1224 unsigned long flags; 1225 int ret; 1226 1227 if (trig) { 1228 ret = stm32_adc_get_trig_extsel(indio_dev, trig); 1229 if (ret < 0) 1230 return ret; 1231 1232 /* set trigger source and polarity (default to rising edge) */ 1233 extsel = ret; 1234 exten = adc->trigger_polarity + STM32_EXTEN_HWTRIG_RISING_EDGE; 1235 } 1236 1237 spin_lock_irqsave(&adc->lock, flags); 1238 val = stm32_adc_readl(adc, adc->cfg->regs->exten.reg); 1239 val &= ~(adc->cfg->regs->exten.mask | adc->cfg->regs->extsel.mask); 1240 val |= exten << adc->cfg->regs->exten.shift; 1241 val |= extsel << adc->cfg->regs->extsel.shift; 1242 stm32_adc_writel(adc, adc->cfg->regs->exten.reg, val); 1243 spin_unlock_irqrestore(&adc->lock, flags); 1244 1245 return 0; 1246 } 1247 1248 static int stm32_adc_set_trig_pol(struct iio_dev *indio_dev, 1249 const struct iio_chan_spec *chan, 1250 unsigned int type) 1251 { 1252 struct stm32_adc *adc = iio_priv(indio_dev); 1253 1254 adc->trigger_polarity = type; 1255 1256 return 0; 1257 } 1258 1259 static int stm32_adc_get_trig_pol(struct iio_dev *indio_dev, 1260 const struct iio_chan_spec *chan) 1261 { 1262 struct stm32_adc *adc = iio_priv(indio_dev); 1263 1264 return adc->trigger_polarity; 1265 } 1266 1267 static const char * const stm32_trig_pol_items[] = { 1268 "rising-edge", "falling-edge", "both-edges", 1269 }; 1270 1271 static const struct iio_enum stm32_adc_trig_pol = { 1272 .items = stm32_trig_pol_items, 1273 .num_items = ARRAY_SIZE(stm32_trig_pol_items), 1274 .get = stm32_adc_get_trig_pol, 1275 .set = stm32_adc_set_trig_pol, 1276 }; 1277 1278 /** 1279 * stm32_adc_single_conv() - Performs a single conversion 1280 * @indio_dev: IIO device 1281 * @chan: IIO channel 1282 * @res: conversion result 1283 * 1284 * The function performs a single conversion on a given channel: 1285 * - Apply sampling time settings 1286 * - Program sequencer with one channel (e.g. in SQ1 with len = 1) 1287 * - Use SW trigger 1288 * - Start conversion, then wait for interrupt completion. 1289 */ 1290 static int stm32_adc_single_conv(struct iio_dev *indio_dev, 1291 const struct iio_chan_spec *chan, 1292 int *res) 1293 { 1294 struct stm32_adc *adc = iio_priv(indio_dev); 1295 struct device *dev = indio_dev->dev.parent; 1296 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1297 long timeout; 1298 u32 val; 1299 int ret; 1300 1301 reinit_completion(&adc->completion); 1302 1303 adc->bufi = 0; 1304 1305 ret = pm_runtime_resume_and_get(dev); 1306 if (ret < 0) 1307 return ret; 1308 1309 /* Apply sampling time settings */ 1310 stm32_adc_writel(adc, regs->smpr[0], adc->smpr_val[0]); 1311 stm32_adc_writel(adc, regs->smpr[1], adc->smpr_val[1]); 1312 1313 /* Program chan number in regular sequence (SQ1) */ 1314 val = stm32_adc_readl(adc, regs->sqr[1].reg); 1315 val &= ~regs->sqr[1].mask; 1316 val |= chan->channel << regs->sqr[1].shift; 1317 stm32_adc_writel(adc, regs->sqr[1].reg, val); 1318 1319 /* Set regular sequence len (0 for 1 conversion) */ 1320 stm32_adc_clr_bits(adc, regs->sqr[0].reg, regs->sqr[0].mask); 1321 1322 /* Trigger detection disabled (conversion can be launched in SW) */ 1323 stm32_adc_clr_bits(adc, regs->exten.reg, regs->exten.mask); 1324 1325 stm32_adc_conv_irq_enable(adc); 1326 1327 adc->cfg->start_conv(indio_dev, false); 1328 1329 timeout = wait_for_completion_interruptible_timeout( 1330 &adc->completion, STM32_ADC_TIMEOUT); 1331 if (timeout == 0) { 1332 ret = -ETIMEDOUT; 1333 } else if (timeout < 0) { 1334 ret = timeout; 1335 } else { 1336 *res = adc->buffer[0]; 1337 ret = IIO_VAL_INT; 1338 } 1339 1340 adc->cfg->stop_conv(indio_dev); 1341 1342 stm32_adc_conv_irq_disable(adc); 1343 1344 pm_runtime_mark_last_busy(dev); 1345 pm_runtime_put_autosuspend(dev); 1346 1347 return ret; 1348 } 1349 1350 static int stm32_adc_read_raw(struct iio_dev *indio_dev, 1351 struct iio_chan_spec const *chan, 1352 int *val, int *val2, long mask) 1353 { 1354 struct stm32_adc *adc = iio_priv(indio_dev); 1355 int ret; 1356 1357 switch (mask) { 1358 case IIO_CHAN_INFO_RAW: 1359 case IIO_CHAN_INFO_PROCESSED: 1360 ret = iio_device_claim_direct_mode(indio_dev); 1361 if (ret) 1362 return ret; 1363 if (chan->type == IIO_VOLTAGE) 1364 ret = stm32_adc_single_conv(indio_dev, chan, val); 1365 else 1366 ret = -EINVAL; 1367 1368 if (mask == IIO_CHAN_INFO_PROCESSED && adc->vrefint.vrefint_cal) 1369 *val = STM32_ADC_VREFINT_VOLTAGE * adc->vrefint.vrefint_cal / *val; 1370 1371 iio_device_release_direct_mode(indio_dev); 1372 return ret; 1373 1374 case IIO_CHAN_INFO_SCALE: 1375 if (chan->differential) { 1376 *val = adc->common->vref_mv * 2; 1377 *val2 = chan->scan_type.realbits; 1378 } else { 1379 *val = adc->common->vref_mv; 1380 *val2 = chan->scan_type.realbits; 1381 } 1382 return IIO_VAL_FRACTIONAL_LOG2; 1383 1384 case IIO_CHAN_INFO_OFFSET: 1385 if (chan->differential) 1386 /* ADC_full_scale / 2 */ 1387 *val = -((1 << chan->scan_type.realbits) / 2); 1388 else 1389 *val = 0; 1390 return IIO_VAL_INT; 1391 1392 default: 1393 return -EINVAL; 1394 } 1395 } 1396 1397 static void stm32_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 1398 { 1399 struct stm32_adc *adc = iio_priv(indio_dev); 1400 1401 adc->cfg->irq_clear(indio_dev, msk); 1402 } 1403 1404 static irqreturn_t stm32_adc_threaded_isr(int irq, void *data) 1405 { 1406 struct iio_dev *indio_dev = data; 1407 struct stm32_adc *adc = iio_priv(indio_dev); 1408 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1409 u32 status = stm32_adc_readl(adc, regs->isr_eoc.reg); 1410 u32 mask = stm32_adc_readl(adc, regs->ier_eoc.reg); 1411 1412 /* Check ovr status right now, as ovr mask should be already disabled */ 1413 if (status & regs->isr_ovr.mask) { 1414 /* 1415 * Clear ovr bit to avoid subsequent calls to IRQ handler. 1416 * This requires to stop ADC first. OVR bit state in ISR, 1417 * is propaged to CSR register by hardware. 1418 */ 1419 adc->cfg->stop_conv(indio_dev); 1420 stm32_adc_irq_clear(indio_dev, regs->isr_ovr.mask); 1421 dev_err(&indio_dev->dev, "Overrun, stopping: restart needed\n"); 1422 return IRQ_HANDLED; 1423 } 1424 1425 if (!(status & mask)) 1426 dev_err_ratelimited(&indio_dev->dev, 1427 "Unexpected IRQ: IER=0x%08x, ISR=0x%08x\n", 1428 mask, status); 1429 1430 return IRQ_NONE; 1431 } 1432 1433 static irqreturn_t stm32_adc_isr(int irq, void *data) 1434 { 1435 struct iio_dev *indio_dev = data; 1436 struct stm32_adc *adc = iio_priv(indio_dev); 1437 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1438 u32 status = stm32_adc_readl(adc, regs->isr_eoc.reg); 1439 u32 mask = stm32_adc_readl(adc, regs->ier_eoc.reg); 1440 1441 if (!(status & mask)) 1442 return IRQ_WAKE_THREAD; 1443 1444 if (status & regs->isr_ovr.mask) { 1445 /* 1446 * Overrun occurred on regular conversions: data for wrong 1447 * channel may be read. Unconditionally disable interrupts 1448 * to stop processing data and print error message. 1449 * Restarting the capture can be done by disabling, then 1450 * re-enabling it (e.g. write 0, then 1 to buffer/enable). 1451 */ 1452 stm32_adc_ovr_irq_disable(adc); 1453 stm32_adc_conv_irq_disable(adc); 1454 return IRQ_WAKE_THREAD; 1455 } 1456 1457 if (status & regs->isr_eoc.mask) { 1458 /* Reading DR also clears EOC status flag */ 1459 adc->buffer[adc->bufi] = stm32_adc_readw(adc, regs->dr); 1460 if (iio_buffer_enabled(indio_dev)) { 1461 adc->bufi++; 1462 if (adc->bufi >= adc->num_conv) { 1463 stm32_adc_conv_irq_disable(adc); 1464 iio_trigger_poll(indio_dev->trig); 1465 } 1466 } else { 1467 complete(&adc->completion); 1468 } 1469 return IRQ_HANDLED; 1470 } 1471 1472 return IRQ_NONE; 1473 } 1474 1475 /** 1476 * stm32_adc_validate_trigger() - validate trigger for stm32 adc 1477 * @indio_dev: IIO device 1478 * @trig: new trigger 1479 * 1480 * Returns: 0 if trig matches one of the triggers registered by stm32 adc 1481 * driver, -EINVAL otherwise. 1482 */ 1483 static int stm32_adc_validate_trigger(struct iio_dev *indio_dev, 1484 struct iio_trigger *trig) 1485 { 1486 return stm32_adc_get_trig_extsel(indio_dev, trig) < 0 ? -EINVAL : 0; 1487 } 1488 1489 static int stm32_adc_set_watermark(struct iio_dev *indio_dev, unsigned int val) 1490 { 1491 struct stm32_adc *adc = iio_priv(indio_dev); 1492 unsigned int watermark = STM32_DMA_BUFFER_SIZE / 2; 1493 unsigned int rx_buf_sz = STM32_DMA_BUFFER_SIZE; 1494 1495 /* 1496 * dma cyclic transfers are used, buffer is split into two periods. 1497 * There should be : 1498 * - always one buffer (period) dma is working on 1499 * - one buffer (period) driver can push data. 1500 */ 1501 watermark = min(watermark, val * (unsigned)(sizeof(u16))); 1502 adc->rx_buf_sz = min(rx_buf_sz, watermark * 2 * adc->num_conv); 1503 1504 return 0; 1505 } 1506 1507 static int stm32_adc_update_scan_mode(struct iio_dev *indio_dev, 1508 const unsigned long *scan_mask) 1509 { 1510 struct stm32_adc *adc = iio_priv(indio_dev); 1511 struct device *dev = indio_dev->dev.parent; 1512 int ret; 1513 1514 ret = pm_runtime_resume_and_get(dev); 1515 if (ret < 0) 1516 return ret; 1517 1518 adc->num_conv = bitmap_weight(scan_mask, indio_dev->masklength); 1519 1520 ret = stm32_adc_conf_scan_seq(indio_dev, scan_mask); 1521 pm_runtime_mark_last_busy(dev); 1522 pm_runtime_put_autosuspend(dev); 1523 1524 return ret; 1525 } 1526 1527 static int stm32_adc_of_xlate(struct iio_dev *indio_dev, 1528 const struct of_phandle_args *iiospec) 1529 { 1530 int i; 1531 1532 for (i = 0; i < indio_dev->num_channels; i++) 1533 if (indio_dev->channels[i].channel == iiospec->args[0]) 1534 return i; 1535 1536 return -EINVAL; 1537 } 1538 1539 /** 1540 * stm32_adc_debugfs_reg_access - read or write register value 1541 * @indio_dev: IIO device structure 1542 * @reg: register offset 1543 * @writeval: value to write 1544 * @readval: value to read 1545 * 1546 * To read a value from an ADC register: 1547 * echo [ADC reg offset] > direct_reg_access 1548 * cat direct_reg_access 1549 * 1550 * To write a value in a ADC register: 1551 * echo [ADC_reg_offset] [value] > direct_reg_access 1552 */ 1553 static int stm32_adc_debugfs_reg_access(struct iio_dev *indio_dev, 1554 unsigned reg, unsigned writeval, 1555 unsigned *readval) 1556 { 1557 struct stm32_adc *adc = iio_priv(indio_dev); 1558 struct device *dev = indio_dev->dev.parent; 1559 int ret; 1560 1561 ret = pm_runtime_resume_and_get(dev); 1562 if (ret < 0) 1563 return ret; 1564 1565 if (!readval) 1566 stm32_adc_writel(adc, reg, writeval); 1567 else 1568 *readval = stm32_adc_readl(adc, reg); 1569 1570 pm_runtime_mark_last_busy(dev); 1571 pm_runtime_put_autosuspend(dev); 1572 1573 return 0; 1574 } 1575 1576 static const struct iio_info stm32_adc_iio_info = { 1577 .read_raw = stm32_adc_read_raw, 1578 .validate_trigger = stm32_adc_validate_trigger, 1579 .hwfifo_set_watermark = stm32_adc_set_watermark, 1580 .update_scan_mode = stm32_adc_update_scan_mode, 1581 .debugfs_reg_access = stm32_adc_debugfs_reg_access, 1582 .of_xlate = stm32_adc_of_xlate, 1583 }; 1584 1585 static unsigned int stm32_adc_dma_residue(struct stm32_adc *adc) 1586 { 1587 struct dma_tx_state state; 1588 enum dma_status status; 1589 1590 status = dmaengine_tx_status(adc->dma_chan, 1591 adc->dma_chan->cookie, 1592 &state); 1593 if (status == DMA_IN_PROGRESS) { 1594 /* Residue is size in bytes from end of buffer */ 1595 unsigned int i = adc->rx_buf_sz - state.residue; 1596 unsigned int size; 1597 1598 /* Return available bytes */ 1599 if (i >= adc->bufi) 1600 size = i - adc->bufi; 1601 else 1602 size = adc->rx_buf_sz + i - adc->bufi; 1603 1604 return size; 1605 } 1606 1607 return 0; 1608 } 1609 1610 static void stm32_adc_dma_buffer_done(void *data) 1611 { 1612 struct iio_dev *indio_dev = data; 1613 struct stm32_adc *adc = iio_priv(indio_dev); 1614 int residue = stm32_adc_dma_residue(adc); 1615 1616 /* 1617 * In DMA mode the trigger services of IIO are not used 1618 * (e.g. no call to iio_trigger_poll). 1619 * Calling irq handler associated to the hardware trigger is not 1620 * relevant as the conversions have already been done. Data 1621 * transfers are performed directly in DMA callback instead. 1622 * This implementation avoids to call trigger irq handler that 1623 * may sleep, in an atomic context (DMA irq handler context). 1624 */ 1625 dev_dbg(&indio_dev->dev, "%s bufi=%d\n", __func__, adc->bufi); 1626 1627 while (residue >= indio_dev->scan_bytes) { 1628 u16 *buffer = (u16 *)&adc->rx_buf[adc->bufi]; 1629 1630 iio_push_to_buffers(indio_dev, buffer); 1631 1632 residue -= indio_dev->scan_bytes; 1633 adc->bufi += indio_dev->scan_bytes; 1634 if (adc->bufi >= adc->rx_buf_sz) 1635 adc->bufi = 0; 1636 } 1637 } 1638 1639 static int stm32_adc_dma_start(struct iio_dev *indio_dev) 1640 { 1641 struct stm32_adc *adc = iio_priv(indio_dev); 1642 struct dma_async_tx_descriptor *desc; 1643 dma_cookie_t cookie; 1644 int ret; 1645 1646 if (!adc->dma_chan) 1647 return 0; 1648 1649 dev_dbg(&indio_dev->dev, "%s size=%d watermark=%d\n", __func__, 1650 adc->rx_buf_sz, adc->rx_buf_sz / 2); 1651 1652 /* Prepare a DMA cyclic transaction */ 1653 desc = dmaengine_prep_dma_cyclic(adc->dma_chan, 1654 adc->rx_dma_buf, 1655 adc->rx_buf_sz, adc->rx_buf_sz / 2, 1656 DMA_DEV_TO_MEM, 1657 DMA_PREP_INTERRUPT); 1658 if (!desc) 1659 return -EBUSY; 1660 1661 desc->callback = stm32_adc_dma_buffer_done; 1662 desc->callback_param = indio_dev; 1663 1664 cookie = dmaengine_submit(desc); 1665 ret = dma_submit_error(cookie); 1666 if (ret) { 1667 dmaengine_terminate_sync(adc->dma_chan); 1668 return ret; 1669 } 1670 1671 /* Issue pending DMA requests */ 1672 dma_async_issue_pending(adc->dma_chan); 1673 1674 return 0; 1675 } 1676 1677 static int stm32_adc_buffer_postenable(struct iio_dev *indio_dev) 1678 { 1679 struct stm32_adc *adc = iio_priv(indio_dev); 1680 struct device *dev = indio_dev->dev.parent; 1681 int ret; 1682 1683 ret = pm_runtime_resume_and_get(dev); 1684 if (ret < 0) 1685 return ret; 1686 1687 ret = stm32_adc_set_trig(indio_dev, indio_dev->trig); 1688 if (ret) { 1689 dev_err(&indio_dev->dev, "Can't set trigger\n"); 1690 goto err_pm_put; 1691 } 1692 1693 ret = stm32_adc_dma_start(indio_dev); 1694 if (ret) { 1695 dev_err(&indio_dev->dev, "Can't start dma\n"); 1696 goto err_clr_trig; 1697 } 1698 1699 /* Reset adc buffer index */ 1700 adc->bufi = 0; 1701 1702 stm32_adc_ovr_irq_enable(adc); 1703 1704 if (!adc->dma_chan) 1705 stm32_adc_conv_irq_enable(adc); 1706 1707 adc->cfg->start_conv(indio_dev, !!adc->dma_chan); 1708 1709 return 0; 1710 1711 err_clr_trig: 1712 stm32_adc_set_trig(indio_dev, NULL); 1713 err_pm_put: 1714 pm_runtime_mark_last_busy(dev); 1715 pm_runtime_put_autosuspend(dev); 1716 1717 return ret; 1718 } 1719 1720 static int stm32_adc_buffer_predisable(struct iio_dev *indio_dev) 1721 { 1722 struct stm32_adc *adc = iio_priv(indio_dev); 1723 struct device *dev = indio_dev->dev.parent; 1724 1725 adc->cfg->stop_conv(indio_dev); 1726 if (!adc->dma_chan) 1727 stm32_adc_conv_irq_disable(adc); 1728 1729 stm32_adc_ovr_irq_disable(adc); 1730 1731 if (adc->dma_chan) 1732 dmaengine_terminate_sync(adc->dma_chan); 1733 1734 if (stm32_adc_set_trig(indio_dev, NULL)) 1735 dev_err(&indio_dev->dev, "Can't clear trigger\n"); 1736 1737 pm_runtime_mark_last_busy(dev); 1738 pm_runtime_put_autosuspend(dev); 1739 1740 return 0; 1741 } 1742 1743 static const struct iio_buffer_setup_ops stm32_adc_buffer_setup_ops = { 1744 .postenable = &stm32_adc_buffer_postenable, 1745 .predisable = &stm32_adc_buffer_predisable, 1746 }; 1747 1748 static irqreturn_t stm32_adc_trigger_handler(int irq, void *p) 1749 { 1750 struct iio_poll_func *pf = p; 1751 struct iio_dev *indio_dev = pf->indio_dev; 1752 struct stm32_adc *adc = iio_priv(indio_dev); 1753 1754 dev_dbg(&indio_dev->dev, "%s bufi=%d\n", __func__, adc->bufi); 1755 1756 /* reset buffer index */ 1757 adc->bufi = 0; 1758 iio_push_to_buffers_with_timestamp(indio_dev, adc->buffer, 1759 pf->timestamp); 1760 iio_trigger_notify_done(indio_dev->trig); 1761 1762 /* re-enable eoc irq */ 1763 stm32_adc_conv_irq_enable(adc); 1764 1765 return IRQ_HANDLED; 1766 } 1767 1768 static const struct iio_chan_spec_ext_info stm32_adc_ext_info[] = { 1769 IIO_ENUM("trigger_polarity", IIO_SHARED_BY_ALL, &stm32_adc_trig_pol), 1770 { 1771 .name = "trigger_polarity_available", 1772 .shared = IIO_SHARED_BY_ALL, 1773 .read = iio_enum_available_read, 1774 .private = (uintptr_t)&stm32_adc_trig_pol, 1775 }, 1776 {}, 1777 }; 1778 1779 static int stm32_adc_of_get_resolution(struct iio_dev *indio_dev) 1780 { 1781 struct device_node *node = indio_dev->dev.of_node; 1782 struct stm32_adc *adc = iio_priv(indio_dev); 1783 unsigned int i; 1784 u32 res; 1785 1786 if (of_property_read_u32(node, "assigned-resolution-bits", &res)) 1787 res = adc->cfg->adc_info->resolutions[0]; 1788 1789 for (i = 0; i < adc->cfg->adc_info->num_res; i++) 1790 if (res == adc->cfg->adc_info->resolutions[i]) 1791 break; 1792 if (i >= adc->cfg->adc_info->num_res) { 1793 dev_err(&indio_dev->dev, "Bad resolution: %u bits\n", res); 1794 return -EINVAL; 1795 } 1796 1797 dev_dbg(&indio_dev->dev, "Using %u bits resolution\n", res); 1798 adc->res = i; 1799 1800 return 0; 1801 } 1802 1803 static void stm32_adc_smpr_init(struct stm32_adc *adc, int channel, u32 smp_ns) 1804 { 1805 const struct stm32_adc_regs *smpr = &adc->cfg->regs->smp_bits[channel]; 1806 u32 period_ns, shift = smpr->shift, mask = smpr->mask; 1807 unsigned int smp, r = smpr->reg; 1808 1809 /* 1810 * For vrefint channel, ensure that the sampling time cannot 1811 * be lower than the one specified in the datasheet 1812 */ 1813 if (channel == adc->int_ch[STM32_ADC_INT_CH_VREFINT]) 1814 smp_ns = max(smp_ns, adc->cfg->ts_vrefint_ns); 1815 1816 /* Determine sampling time (ADC clock cycles) */ 1817 period_ns = NSEC_PER_SEC / adc->common->rate; 1818 for (smp = 0; smp <= STM32_ADC_MAX_SMP; smp++) 1819 if ((period_ns * adc->cfg->smp_cycles[smp]) >= smp_ns) 1820 break; 1821 if (smp > STM32_ADC_MAX_SMP) 1822 smp = STM32_ADC_MAX_SMP; 1823 1824 /* pre-build sampling time registers (e.g. smpr1, smpr2) */ 1825 adc->smpr_val[r] = (adc->smpr_val[r] & ~mask) | (smp << shift); 1826 } 1827 1828 static void stm32_adc_chan_init_one(struct iio_dev *indio_dev, 1829 struct iio_chan_spec *chan, u32 vinp, 1830 u32 vinn, int scan_index, bool differential) 1831 { 1832 struct stm32_adc *adc = iio_priv(indio_dev); 1833 char *name = adc->chan_name[vinp]; 1834 1835 chan->type = IIO_VOLTAGE; 1836 chan->channel = vinp; 1837 if (differential) { 1838 chan->differential = 1; 1839 chan->channel2 = vinn; 1840 snprintf(name, STM32_ADC_CH_SZ, "in%d-in%d", vinp, vinn); 1841 } else { 1842 snprintf(name, STM32_ADC_CH_SZ, "in%d", vinp); 1843 } 1844 chan->datasheet_name = name; 1845 chan->scan_index = scan_index; 1846 chan->indexed = 1; 1847 if (chan->channel == adc->int_ch[STM32_ADC_INT_CH_VREFINT]) 1848 chan->info_mask_separate = BIT(IIO_CHAN_INFO_PROCESSED); 1849 else 1850 chan->info_mask_separate = BIT(IIO_CHAN_INFO_RAW); 1851 chan->info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE) | 1852 BIT(IIO_CHAN_INFO_OFFSET); 1853 chan->scan_type.sign = 'u'; 1854 chan->scan_type.realbits = adc->cfg->adc_info->resolutions[adc->res]; 1855 chan->scan_type.storagebits = 16; 1856 chan->ext_info = stm32_adc_ext_info; 1857 1858 /* pre-build selected channels mask */ 1859 adc->pcsel |= BIT(chan->channel); 1860 if (differential) { 1861 /* pre-build diff channels mask */ 1862 adc->difsel |= BIT(chan->channel); 1863 /* Also add negative input to pre-selected channels */ 1864 adc->pcsel |= BIT(chan->channel2); 1865 } 1866 } 1867 1868 static int stm32_adc_get_legacy_chan_count(struct iio_dev *indio_dev, struct stm32_adc *adc) 1869 { 1870 struct device_node *node = indio_dev->dev.of_node; 1871 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 1872 int num_channels = 0, ret; 1873 1874 ret = of_property_count_u32_elems(node, "st,adc-channels"); 1875 if (ret > adc_info->max_channels) { 1876 dev_err(&indio_dev->dev, "Bad st,adc-channels?\n"); 1877 return -EINVAL; 1878 } else if (ret > 0) { 1879 num_channels += ret; 1880 } 1881 1882 ret = of_property_count_elems_of_size(node, "st,adc-diff-channels", 1883 sizeof(struct stm32_adc_diff_channel)); 1884 if (ret > adc_info->max_channels) { 1885 dev_err(&indio_dev->dev, "Bad st,adc-diff-channels?\n"); 1886 return -EINVAL; 1887 } else if (ret > 0) { 1888 adc->num_diff = ret; 1889 num_channels += ret; 1890 } 1891 1892 /* Optional sample time is provided either for each, or all channels */ 1893 ret = of_property_count_u32_elems(node, "st,min-sample-time-nsecs"); 1894 if (ret > 1 && ret != num_channels) { 1895 dev_err(&indio_dev->dev, "Invalid st,min-sample-time-nsecs\n"); 1896 return -EINVAL; 1897 } 1898 1899 return num_channels; 1900 } 1901 1902 static int stm32_adc_legacy_chan_init(struct iio_dev *indio_dev, 1903 struct stm32_adc *adc, 1904 struct iio_chan_spec *channels) 1905 { 1906 struct device_node *node = indio_dev->dev.of_node; 1907 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 1908 struct stm32_adc_diff_channel diff[STM32_ADC_CH_MAX]; 1909 u32 num_diff = adc->num_diff; 1910 int size = num_diff * sizeof(*diff) / sizeof(u32); 1911 int scan_index = 0, val, ret, i; 1912 struct property *prop; 1913 const __be32 *cur; 1914 u32 smp = 0; 1915 1916 if (num_diff) { 1917 ret = of_property_read_u32_array(node, "st,adc-diff-channels", 1918 (u32 *)diff, size); 1919 if (ret) { 1920 dev_err(&indio_dev->dev, "Failed to get diff channels %d\n", ret); 1921 return ret; 1922 } 1923 1924 for (i = 0; i < num_diff; i++) { 1925 if (diff[i].vinp >= adc_info->max_channels || 1926 diff[i].vinn >= adc_info->max_channels) { 1927 dev_err(&indio_dev->dev, "Invalid channel in%d-in%d\n", 1928 diff[i].vinp, diff[i].vinn); 1929 return -EINVAL; 1930 } 1931 1932 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], 1933 diff[i].vinp, diff[i].vinn, 1934 scan_index, true); 1935 scan_index++; 1936 } 1937 } 1938 1939 of_property_for_each_u32(node, "st,adc-channels", prop, cur, val) { 1940 if (val >= adc_info->max_channels) { 1941 dev_err(&indio_dev->dev, "Invalid channel %d\n", val); 1942 return -EINVAL; 1943 } 1944 1945 /* Channel can't be configured both as single-ended & diff */ 1946 for (i = 0; i < num_diff; i++) { 1947 if (val == diff[i].vinp) { 1948 dev_err(&indio_dev->dev, "channel %d misconfigured\n", val); 1949 return -EINVAL; 1950 } 1951 } 1952 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], val, 1953 0, scan_index, false); 1954 scan_index++; 1955 } 1956 1957 for (i = 0; i < scan_index; i++) { 1958 /* 1959 * Using of_property_read_u32_index(), smp value will only be 1960 * modified if valid u32 value can be decoded. This allows to 1961 * get either no value, 1 shared value for all indexes, or one 1962 * value per channel. 1963 */ 1964 of_property_read_u32_index(node, "st,min-sample-time-nsecs", i, &smp); 1965 1966 /* Prepare sampling time settings */ 1967 stm32_adc_smpr_init(adc, channels[i].channel, smp); 1968 } 1969 1970 return scan_index; 1971 } 1972 1973 static int stm32_adc_populate_int_ch(struct iio_dev *indio_dev, const char *ch_name, 1974 int chan) 1975 { 1976 struct stm32_adc *adc = iio_priv(indio_dev); 1977 u16 vrefint; 1978 int i, ret; 1979 1980 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 1981 if (!strncmp(stm32_adc_ic[i].name, ch_name, STM32_ADC_CH_SZ)) { 1982 adc->int_ch[i] = chan; 1983 1984 if (stm32_adc_ic[i].idx != STM32_ADC_INT_CH_VREFINT) 1985 continue; 1986 1987 /* Get calibration data for vrefint channel */ 1988 ret = nvmem_cell_read_u16(&indio_dev->dev, "vrefint", &vrefint); 1989 if (ret && ret != -ENOENT) { 1990 return dev_err_probe(indio_dev->dev.parent, ret, 1991 "nvmem access error\n"); 1992 } 1993 if (ret == -ENOENT) 1994 dev_dbg(&indio_dev->dev, "vrefint calibration not found\n"); 1995 else 1996 adc->vrefint.vrefint_cal = vrefint; 1997 } 1998 } 1999 2000 return 0; 2001 } 2002 2003 static int stm32_adc_generic_chan_init(struct iio_dev *indio_dev, 2004 struct stm32_adc *adc, 2005 struct iio_chan_spec *channels) 2006 { 2007 struct device_node *node = indio_dev->dev.of_node; 2008 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 2009 struct device_node *child; 2010 const char *name; 2011 int val, scan_index = 0, ret; 2012 bool differential; 2013 u32 vin[2]; 2014 2015 for_each_available_child_of_node(node, child) { 2016 ret = of_property_read_u32(child, "reg", &val); 2017 if (ret) { 2018 dev_err(&indio_dev->dev, "Missing channel index %d\n", ret); 2019 goto err; 2020 } 2021 2022 ret = of_property_read_string(child, "label", &name); 2023 /* label is optional */ 2024 if (!ret) { 2025 if (strlen(name) >= STM32_ADC_CH_SZ) { 2026 dev_err(&indio_dev->dev, "Label %s exceeds %d characters\n", 2027 name, STM32_ADC_CH_SZ); 2028 ret = -EINVAL; 2029 goto err; 2030 } 2031 strncpy(adc->chan_name[val], name, STM32_ADC_CH_SZ); 2032 ret = stm32_adc_populate_int_ch(indio_dev, name, val); 2033 if (ret) 2034 goto err; 2035 } else if (ret != -EINVAL) { 2036 dev_err(&indio_dev->dev, "Invalid label %d\n", ret); 2037 goto err; 2038 } 2039 2040 if (val >= adc_info->max_channels) { 2041 dev_err(&indio_dev->dev, "Invalid channel %d\n", val); 2042 ret = -EINVAL; 2043 goto err; 2044 } 2045 2046 differential = false; 2047 ret = of_property_read_u32_array(child, "diff-channels", vin, 2); 2048 /* diff-channels is optional */ 2049 if (!ret) { 2050 differential = true; 2051 if (vin[0] != val || vin[1] >= adc_info->max_channels) { 2052 dev_err(&indio_dev->dev, "Invalid channel in%d-in%d\n", 2053 vin[0], vin[1]); 2054 goto err; 2055 } 2056 } else if (ret != -EINVAL) { 2057 dev_err(&indio_dev->dev, "Invalid diff-channels property %d\n", ret); 2058 goto err; 2059 } 2060 2061 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], val, 2062 vin[1], scan_index, differential); 2063 2064 ret = of_property_read_u32(child, "st,min-sample-time-ns", &val); 2065 /* st,min-sample-time-ns is optional */ 2066 if (!ret) { 2067 stm32_adc_smpr_init(adc, channels[scan_index].channel, val); 2068 if (differential) 2069 stm32_adc_smpr_init(adc, vin[1], val); 2070 } else if (ret != -EINVAL) { 2071 dev_err(&indio_dev->dev, "Invalid st,min-sample-time-ns property %d\n", 2072 ret); 2073 goto err; 2074 } 2075 2076 scan_index++; 2077 } 2078 2079 return scan_index; 2080 2081 err: 2082 of_node_put(child); 2083 2084 return ret; 2085 } 2086 2087 static int stm32_adc_chan_of_init(struct iio_dev *indio_dev, bool timestamping) 2088 { 2089 struct device_node *node = indio_dev->dev.of_node; 2090 struct stm32_adc *adc = iio_priv(indio_dev); 2091 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 2092 struct iio_chan_spec *channels; 2093 int scan_index = 0, num_channels = 0, ret, i; 2094 bool legacy = false; 2095 2096 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) 2097 adc->int_ch[i] = STM32_ADC_INT_CH_NONE; 2098 2099 num_channels = of_get_available_child_count(node); 2100 /* If no channels have been found, fallback to channels legacy properties. */ 2101 if (!num_channels) { 2102 legacy = true; 2103 2104 ret = stm32_adc_get_legacy_chan_count(indio_dev, adc); 2105 if (!ret) { 2106 dev_err(indio_dev->dev.parent, "No channel found\n"); 2107 return -ENODATA; 2108 } else if (ret < 0) { 2109 return ret; 2110 } 2111 2112 num_channels = ret; 2113 } 2114 2115 if (num_channels > adc_info->max_channels) { 2116 dev_err(&indio_dev->dev, "Channel number [%d] exceeds %d\n", 2117 num_channels, adc_info->max_channels); 2118 return -EINVAL; 2119 } 2120 2121 if (timestamping) 2122 num_channels++; 2123 2124 channels = devm_kcalloc(&indio_dev->dev, num_channels, 2125 sizeof(struct iio_chan_spec), GFP_KERNEL); 2126 if (!channels) 2127 return -ENOMEM; 2128 2129 if (legacy) 2130 ret = stm32_adc_legacy_chan_init(indio_dev, adc, channels); 2131 else 2132 ret = stm32_adc_generic_chan_init(indio_dev, adc, channels); 2133 if (ret < 0) 2134 return ret; 2135 scan_index = ret; 2136 2137 if (timestamping) { 2138 struct iio_chan_spec *timestamp = &channels[scan_index]; 2139 2140 timestamp->type = IIO_TIMESTAMP; 2141 timestamp->channel = -1; 2142 timestamp->scan_index = scan_index; 2143 timestamp->scan_type.sign = 's'; 2144 timestamp->scan_type.realbits = 64; 2145 timestamp->scan_type.storagebits = 64; 2146 2147 scan_index++; 2148 } 2149 2150 indio_dev->num_channels = scan_index; 2151 indio_dev->channels = channels; 2152 2153 return 0; 2154 } 2155 2156 static int stm32_adc_dma_request(struct device *dev, struct iio_dev *indio_dev) 2157 { 2158 struct stm32_adc *adc = iio_priv(indio_dev); 2159 struct dma_slave_config config; 2160 int ret; 2161 2162 adc->dma_chan = dma_request_chan(dev, "rx"); 2163 if (IS_ERR(adc->dma_chan)) { 2164 ret = PTR_ERR(adc->dma_chan); 2165 if (ret != -ENODEV) 2166 return dev_err_probe(dev, ret, 2167 "DMA channel request failed with\n"); 2168 2169 /* DMA is optional: fall back to IRQ mode */ 2170 adc->dma_chan = NULL; 2171 return 0; 2172 } 2173 2174 adc->rx_buf = dma_alloc_coherent(adc->dma_chan->device->dev, 2175 STM32_DMA_BUFFER_SIZE, 2176 &adc->rx_dma_buf, GFP_KERNEL); 2177 if (!adc->rx_buf) { 2178 ret = -ENOMEM; 2179 goto err_release; 2180 } 2181 2182 /* Configure DMA channel to read data register */ 2183 memset(&config, 0, sizeof(config)); 2184 config.src_addr = (dma_addr_t)adc->common->phys_base; 2185 config.src_addr += adc->offset + adc->cfg->regs->dr; 2186 config.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 2187 2188 ret = dmaengine_slave_config(adc->dma_chan, &config); 2189 if (ret) 2190 goto err_free; 2191 2192 return 0; 2193 2194 err_free: 2195 dma_free_coherent(adc->dma_chan->device->dev, STM32_DMA_BUFFER_SIZE, 2196 adc->rx_buf, adc->rx_dma_buf); 2197 err_release: 2198 dma_release_channel(adc->dma_chan); 2199 2200 return ret; 2201 } 2202 2203 static int stm32_adc_probe(struct platform_device *pdev) 2204 { 2205 struct iio_dev *indio_dev; 2206 struct device *dev = &pdev->dev; 2207 irqreturn_t (*handler)(int irq, void *p) = NULL; 2208 struct stm32_adc *adc; 2209 bool timestamping = false; 2210 int ret; 2211 2212 if (!pdev->dev.of_node) 2213 return -ENODEV; 2214 2215 indio_dev = devm_iio_device_alloc(&pdev->dev, sizeof(*adc)); 2216 if (!indio_dev) 2217 return -ENOMEM; 2218 2219 adc = iio_priv(indio_dev); 2220 adc->common = dev_get_drvdata(pdev->dev.parent); 2221 spin_lock_init(&adc->lock); 2222 init_completion(&adc->completion); 2223 adc->cfg = (const struct stm32_adc_cfg *) 2224 of_match_device(dev->driver->of_match_table, dev)->data; 2225 2226 indio_dev->name = dev_name(&pdev->dev); 2227 indio_dev->dev.of_node = pdev->dev.of_node; 2228 indio_dev->info = &stm32_adc_iio_info; 2229 indio_dev->modes = INDIO_DIRECT_MODE | INDIO_HARDWARE_TRIGGERED; 2230 2231 platform_set_drvdata(pdev, indio_dev); 2232 2233 ret = of_property_read_u32(pdev->dev.of_node, "reg", &adc->offset); 2234 if (ret != 0) { 2235 dev_err(&pdev->dev, "missing reg property\n"); 2236 return -EINVAL; 2237 } 2238 2239 adc->irq = platform_get_irq(pdev, 0); 2240 if (adc->irq < 0) 2241 return adc->irq; 2242 2243 ret = devm_request_threaded_irq(&pdev->dev, adc->irq, stm32_adc_isr, 2244 stm32_adc_threaded_isr, 2245 0, pdev->name, indio_dev); 2246 if (ret) { 2247 dev_err(&pdev->dev, "failed to request IRQ\n"); 2248 return ret; 2249 } 2250 2251 adc->clk = devm_clk_get(&pdev->dev, NULL); 2252 if (IS_ERR(adc->clk)) { 2253 ret = PTR_ERR(adc->clk); 2254 if (ret == -ENOENT && !adc->cfg->clk_required) { 2255 adc->clk = NULL; 2256 } else { 2257 dev_err(&pdev->dev, "Can't get clock\n"); 2258 return ret; 2259 } 2260 } 2261 2262 ret = stm32_adc_of_get_resolution(indio_dev); 2263 if (ret < 0) 2264 return ret; 2265 2266 ret = stm32_adc_dma_request(dev, indio_dev); 2267 if (ret < 0) 2268 return ret; 2269 2270 if (!adc->dma_chan) { 2271 /* For PIO mode only, iio_pollfunc_store_time stores a timestamp 2272 * in the primary trigger IRQ handler and stm32_adc_trigger_handler 2273 * runs in the IRQ thread to push out buffer along with timestamp. 2274 */ 2275 handler = &stm32_adc_trigger_handler; 2276 timestamping = true; 2277 } 2278 2279 ret = stm32_adc_chan_of_init(indio_dev, timestamping); 2280 if (ret < 0) 2281 goto err_dma_disable; 2282 2283 ret = iio_triggered_buffer_setup(indio_dev, 2284 &iio_pollfunc_store_time, handler, 2285 &stm32_adc_buffer_setup_ops); 2286 if (ret) { 2287 dev_err(&pdev->dev, "buffer setup failed\n"); 2288 goto err_dma_disable; 2289 } 2290 2291 /* Get stm32-adc-core PM online */ 2292 pm_runtime_get_noresume(dev); 2293 pm_runtime_set_active(dev); 2294 pm_runtime_set_autosuspend_delay(dev, STM32_ADC_HW_STOP_DELAY_MS); 2295 pm_runtime_use_autosuspend(dev); 2296 pm_runtime_enable(dev); 2297 2298 ret = stm32_adc_hw_start(dev); 2299 if (ret) 2300 goto err_buffer_cleanup; 2301 2302 ret = iio_device_register(indio_dev); 2303 if (ret) { 2304 dev_err(&pdev->dev, "iio dev register failed\n"); 2305 goto err_hw_stop; 2306 } 2307 2308 pm_runtime_mark_last_busy(dev); 2309 pm_runtime_put_autosuspend(dev); 2310 2311 return 0; 2312 2313 err_hw_stop: 2314 stm32_adc_hw_stop(dev); 2315 2316 err_buffer_cleanup: 2317 pm_runtime_disable(dev); 2318 pm_runtime_set_suspended(dev); 2319 pm_runtime_put_noidle(dev); 2320 iio_triggered_buffer_cleanup(indio_dev); 2321 2322 err_dma_disable: 2323 if (adc->dma_chan) { 2324 dma_free_coherent(adc->dma_chan->device->dev, 2325 STM32_DMA_BUFFER_SIZE, 2326 adc->rx_buf, adc->rx_dma_buf); 2327 dma_release_channel(adc->dma_chan); 2328 } 2329 2330 return ret; 2331 } 2332 2333 static int stm32_adc_remove(struct platform_device *pdev) 2334 { 2335 struct iio_dev *indio_dev = platform_get_drvdata(pdev); 2336 struct stm32_adc *adc = iio_priv(indio_dev); 2337 2338 pm_runtime_get_sync(&pdev->dev); 2339 iio_device_unregister(indio_dev); 2340 stm32_adc_hw_stop(&pdev->dev); 2341 pm_runtime_disable(&pdev->dev); 2342 pm_runtime_set_suspended(&pdev->dev); 2343 pm_runtime_put_noidle(&pdev->dev); 2344 iio_triggered_buffer_cleanup(indio_dev); 2345 if (adc->dma_chan) { 2346 dma_free_coherent(adc->dma_chan->device->dev, 2347 STM32_DMA_BUFFER_SIZE, 2348 adc->rx_buf, adc->rx_dma_buf); 2349 dma_release_channel(adc->dma_chan); 2350 } 2351 2352 return 0; 2353 } 2354 2355 #if defined(CONFIG_PM_SLEEP) 2356 static int stm32_adc_suspend(struct device *dev) 2357 { 2358 struct iio_dev *indio_dev = dev_get_drvdata(dev); 2359 2360 if (iio_buffer_enabled(indio_dev)) 2361 stm32_adc_buffer_predisable(indio_dev); 2362 2363 return pm_runtime_force_suspend(dev); 2364 } 2365 2366 static int stm32_adc_resume(struct device *dev) 2367 { 2368 struct iio_dev *indio_dev = dev_get_drvdata(dev); 2369 int ret; 2370 2371 ret = pm_runtime_force_resume(dev); 2372 if (ret < 0) 2373 return ret; 2374 2375 if (!iio_buffer_enabled(indio_dev)) 2376 return 0; 2377 2378 ret = stm32_adc_update_scan_mode(indio_dev, 2379 indio_dev->active_scan_mask); 2380 if (ret < 0) 2381 return ret; 2382 2383 return stm32_adc_buffer_postenable(indio_dev); 2384 } 2385 #endif 2386 2387 #if defined(CONFIG_PM) 2388 static int stm32_adc_runtime_suspend(struct device *dev) 2389 { 2390 return stm32_adc_hw_stop(dev); 2391 } 2392 2393 static int stm32_adc_runtime_resume(struct device *dev) 2394 { 2395 return stm32_adc_hw_start(dev); 2396 } 2397 #endif 2398 2399 static const struct dev_pm_ops stm32_adc_pm_ops = { 2400 SET_SYSTEM_SLEEP_PM_OPS(stm32_adc_suspend, stm32_adc_resume) 2401 SET_RUNTIME_PM_OPS(stm32_adc_runtime_suspend, stm32_adc_runtime_resume, 2402 NULL) 2403 }; 2404 2405 static const struct stm32_adc_cfg stm32f4_adc_cfg = { 2406 .regs = &stm32f4_adc_regspec, 2407 .adc_info = &stm32f4_adc_info, 2408 .trigs = stm32f4_adc_trigs, 2409 .clk_required = true, 2410 .start_conv = stm32f4_adc_start_conv, 2411 .stop_conv = stm32f4_adc_stop_conv, 2412 .smp_cycles = stm32f4_adc_smp_cycles, 2413 .irq_clear = stm32f4_adc_irq_clear, 2414 }; 2415 2416 static const struct stm32_adc_cfg stm32h7_adc_cfg = { 2417 .regs = &stm32h7_adc_regspec, 2418 .adc_info = &stm32h7_adc_info, 2419 .trigs = stm32h7_adc_trigs, 2420 .start_conv = stm32h7_adc_start_conv, 2421 .stop_conv = stm32h7_adc_stop_conv, 2422 .prepare = stm32h7_adc_prepare, 2423 .unprepare = stm32h7_adc_unprepare, 2424 .smp_cycles = stm32h7_adc_smp_cycles, 2425 .irq_clear = stm32h7_adc_irq_clear, 2426 }; 2427 2428 static const struct stm32_adc_cfg stm32mp1_adc_cfg = { 2429 .regs = &stm32mp1_adc_regspec, 2430 .adc_info = &stm32h7_adc_info, 2431 .trigs = stm32h7_adc_trigs, 2432 .has_vregready = true, 2433 .start_conv = stm32h7_adc_start_conv, 2434 .stop_conv = stm32h7_adc_stop_conv, 2435 .prepare = stm32h7_adc_prepare, 2436 .unprepare = stm32h7_adc_unprepare, 2437 .smp_cycles = stm32h7_adc_smp_cycles, 2438 .irq_clear = stm32h7_adc_irq_clear, 2439 .ts_vrefint_ns = 4300, 2440 }; 2441 2442 static const struct of_device_id stm32_adc_of_match[] = { 2443 { .compatible = "st,stm32f4-adc", .data = (void *)&stm32f4_adc_cfg }, 2444 { .compatible = "st,stm32h7-adc", .data = (void *)&stm32h7_adc_cfg }, 2445 { .compatible = "st,stm32mp1-adc", .data = (void *)&stm32mp1_adc_cfg }, 2446 {}, 2447 }; 2448 MODULE_DEVICE_TABLE(of, stm32_adc_of_match); 2449 2450 static struct platform_driver stm32_adc_driver = { 2451 .probe = stm32_adc_probe, 2452 .remove = stm32_adc_remove, 2453 .driver = { 2454 .name = "stm32-adc", 2455 .of_match_table = stm32_adc_of_match, 2456 .pm = &stm32_adc_pm_ops, 2457 }, 2458 }; 2459 module_platform_driver(stm32_adc_driver); 2460 2461 MODULE_AUTHOR("Fabrice Gasnier <fabrice.gasnier@st.com>"); 2462 MODULE_DESCRIPTION("STMicroelectronics STM32 ADC IIO driver"); 2463 MODULE_LICENSE("GPL v2"); 2464 MODULE_ALIAS("platform:stm32-adc"); 2465