1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * This file is part of STM32 ADC driver 4 * 5 * Copyright (C) 2016, STMicroelectronics - All Rights Reserved 6 * Author: Fabrice Gasnier <fabrice.gasnier@st.com>. 7 */ 8 9 #include <linux/clk.h> 10 #include <linux/delay.h> 11 #include <linux/dma-mapping.h> 12 #include <linux/dmaengine.h> 13 #include <linux/iio/iio.h> 14 #include <linux/iio/buffer.h> 15 #include <linux/iio/timer/stm32-lptim-trigger.h> 16 #include <linux/iio/timer/stm32-timer-trigger.h> 17 #include <linux/iio/trigger.h> 18 #include <linux/iio/trigger_consumer.h> 19 #include <linux/iio/triggered_buffer.h> 20 #include <linux/interrupt.h> 21 #include <linux/io.h> 22 #include <linux/iopoll.h> 23 #include <linux/module.h> 24 #include <linux/nvmem-consumer.h> 25 #include <linux/platform_device.h> 26 #include <linux/pm_runtime.h> 27 #include <linux/of.h> 28 #include <linux/of_device.h> 29 30 #include "stm32-adc-core.h" 31 32 /* Number of linear calibration shadow registers / LINCALRDYW control bits */ 33 #define STM32H7_LINCALFACT_NUM 6 34 35 /* BOOST bit must be set on STM32H7 when ADC clock is above 20MHz */ 36 #define STM32H7_BOOST_CLKRATE 20000000UL 37 38 #define STM32_ADC_CH_MAX 20 /* max number of channels */ 39 #define STM32_ADC_CH_SZ 16 /* max channel name size */ 40 #define STM32_ADC_MAX_SQ 16 /* SQ1..SQ16 */ 41 #define STM32_ADC_MAX_SMP 7 /* SMPx range is [0..7] */ 42 #define STM32_ADC_TIMEOUT_US 100000 43 #define STM32_ADC_TIMEOUT (msecs_to_jiffies(STM32_ADC_TIMEOUT_US / 1000)) 44 #define STM32_ADC_HW_STOP_DELAY_MS 100 45 #define STM32_ADC_VREFINT_VOLTAGE 3300 46 47 #define STM32_DMA_BUFFER_SIZE PAGE_SIZE 48 49 /* External trigger enable */ 50 enum stm32_adc_exten { 51 STM32_EXTEN_SWTRIG, 52 STM32_EXTEN_HWTRIG_RISING_EDGE, 53 STM32_EXTEN_HWTRIG_FALLING_EDGE, 54 STM32_EXTEN_HWTRIG_BOTH_EDGES, 55 }; 56 57 /* extsel - trigger mux selection value */ 58 enum stm32_adc_extsel { 59 STM32_EXT0, 60 STM32_EXT1, 61 STM32_EXT2, 62 STM32_EXT3, 63 STM32_EXT4, 64 STM32_EXT5, 65 STM32_EXT6, 66 STM32_EXT7, 67 STM32_EXT8, 68 STM32_EXT9, 69 STM32_EXT10, 70 STM32_EXT11, 71 STM32_EXT12, 72 STM32_EXT13, 73 STM32_EXT14, 74 STM32_EXT15, 75 STM32_EXT16, 76 STM32_EXT17, 77 STM32_EXT18, 78 STM32_EXT19, 79 STM32_EXT20, 80 }; 81 82 enum stm32_adc_int_ch { 83 STM32_ADC_INT_CH_NONE = -1, 84 STM32_ADC_INT_CH_VDDCORE, 85 STM32_ADC_INT_CH_VREFINT, 86 STM32_ADC_INT_CH_VBAT, 87 STM32_ADC_INT_CH_NB, 88 }; 89 90 /** 91 * struct stm32_adc_ic - ADC internal channels 92 * @name: name of the internal channel 93 * @idx: internal channel enum index 94 */ 95 struct stm32_adc_ic { 96 const char *name; 97 u32 idx; 98 }; 99 100 static const struct stm32_adc_ic stm32_adc_ic[STM32_ADC_INT_CH_NB] = { 101 { "vddcore", STM32_ADC_INT_CH_VDDCORE }, 102 { "vrefint", STM32_ADC_INT_CH_VREFINT }, 103 { "vbat", STM32_ADC_INT_CH_VBAT }, 104 }; 105 106 /** 107 * struct stm32_adc_trig_info - ADC trigger info 108 * @name: name of the trigger, corresponding to its source 109 * @extsel: trigger selection 110 */ 111 struct stm32_adc_trig_info { 112 const char *name; 113 enum stm32_adc_extsel extsel; 114 }; 115 116 /** 117 * struct stm32_adc_calib - optional adc calibration data 118 * @calfact_s: Calibration offset for single ended channels 119 * @calfact_d: Calibration offset in differential 120 * @lincalfact: Linearity calibration factor 121 * @calibrated: Indicates calibration status 122 */ 123 struct stm32_adc_calib { 124 u32 calfact_s; 125 u32 calfact_d; 126 u32 lincalfact[STM32H7_LINCALFACT_NUM]; 127 bool calibrated; 128 }; 129 130 /** 131 * struct stm32_adc_regs - stm32 ADC misc registers & bitfield desc 132 * @reg: register offset 133 * @mask: bitfield mask 134 * @shift: left shift 135 */ 136 struct stm32_adc_regs { 137 int reg; 138 int mask; 139 int shift; 140 }; 141 142 /** 143 * struct stm32_adc_vrefint - stm32 ADC internal reference voltage data 144 * @vrefint_cal: vrefint calibration value from nvmem 145 * @vrefint_data: vrefint actual value 146 */ 147 struct stm32_adc_vrefint { 148 u32 vrefint_cal; 149 u32 vrefint_data; 150 }; 151 152 /** 153 * struct stm32_adc_regspec - stm32 registers definition 154 * @dr: data register offset 155 * @ier_eoc: interrupt enable register & eocie bitfield 156 * @ier_ovr: interrupt enable register & overrun bitfield 157 * @isr_eoc: interrupt status register & eoc bitfield 158 * @isr_ovr: interrupt status register & overrun bitfield 159 * @sqr: reference to sequence registers array 160 * @exten: trigger control register & bitfield 161 * @extsel: trigger selection register & bitfield 162 * @res: resolution selection register & bitfield 163 * @smpr: smpr1 & smpr2 registers offset array 164 * @smp_bits: smpr1 & smpr2 index and bitfields 165 * @or_vdd: option register & vddcore bitfield 166 * @ccr_vbat: common register & vbat bitfield 167 * @ccr_vref: common register & vrefint bitfield 168 */ 169 struct stm32_adc_regspec { 170 const u32 dr; 171 const struct stm32_adc_regs ier_eoc; 172 const struct stm32_adc_regs ier_ovr; 173 const struct stm32_adc_regs isr_eoc; 174 const struct stm32_adc_regs isr_ovr; 175 const struct stm32_adc_regs *sqr; 176 const struct stm32_adc_regs exten; 177 const struct stm32_adc_regs extsel; 178 const struct stm32_adc_regs res; 179 const u32 smpr[2]; 180 const struct stm32_adc_regs *smp_bits; 181 const struct stm32_adc_regs or_vdd; 182 const struct stm32_adc_regs ccr_vbat; 183 const struct stm32_adc_regs ccr_vref; 184 }; 185 186 struct stm32_adc; 187 188 /** 189 * struct stm32_adc_cfg - stm32 compatible configuration data 190 * @regs: registers descriptions 191 * @adc_info: per instance input channels definitions 192 * @trigs: external trigger sources 193 * @clk_required: clock is required 194 * @has_vregready: vregready status flag presence 195 * @prepare: optional prepare routine (power-up, enable) 196 * @start_conv: routine to start conversions 197 * @stop_conv: routine to stop conversions 198 * @unprepare: optional unprepare routine (disable, power-down) 199 * @irq_clear: routine to clear irqs 200 * @smp_cycles: programmable sampling time (ADC clock cycles) 201 * @ts_vrefint_ns: vrefint minimum sampling time in ns 202 */ 203 struct stm32_adc_cfg { 204 const struct stm32_adc_regspec *regs; 205 const struct stm32_adc_info *adc_info; 206 struct stm32_adc_trig_info *trigs; 207 bool clk_required; 208 bool has_vregready; 209 int (*prepare)(struct iio_dev *); 210 void (*start_conv)(struct iio_dev *, bool dma); 211 void (*stop_conv)(struct iio_dev *); 212 void (*unprepare)(struct iio_dev *); 213 void (*irq_clear)(struct iio_dev *indio_dev, u32 msk); 214 const unsigned int *smp_cycles; 215 const unsigned int ts_vrefint_ns; 216 }; 217 218 /** 219 * struct stm32_adc - private data of each ADC IIO instance 220 * @common: reference to ADC block common data 221 * @offset: ADC instance register offset in ADC block 222 * @cfg: compatible configuration data 223 * @completion: end of single conversion completion 224 * @buffer: data buffer + 8 bytes for timestamp if enabled 225 * @clk: clock for this adc instance 226 * @irq: interrupt for this adc instance 227 * @lock: spinlock 228 * @bufi: data buffer index 229 * @num_conv: expected number of scan conversions 230 * @res: data resolution (e.g. RES bitfield value) 231 * @trigger_polarity: external trigger polarity (e.g. exten) 232 * @dma_chan: dma channel 233 * @rx_buf: dma rx buffer cpu address 234 * @rx_dma_buf: dma rx buffer bus address 235 * @rx_buf_sz: dma rx buffer size 236 * @difsel: bitmask to set single-ended/differential channel 237 * @pcsel: bitmask to preselect channels on some devices 238 * @smpr_val: sampling time settings (e.g. smpr1 / smpr2) 239 * @cal: optional calibration data on some devices 240 * @vrefint: internal reference voltage data 241 * @chan_name: channel name array 242 * @num_diff: number of differential channels 243 * @int_ch: internal channel indexes array 244 */ 245 struct stm32_adc { 246 struct stm32_adc_common *common; 247 u32 offset; 248 const struct stm32_adc_cfg *cfg; 249 struct completion completion; 250 u16 buffer[STM32_ADC_MAX_SQ + 4] __aligned(8); 251 struct clk *clk; 252 int irq; 253 spinlock_t lock; /* interrupt lock */ 254 unsigned int bufi; 255 unsigned int num_conv; 256 u32 res; 257 u32 trigger_polarity; 258 struct dma_chan *dma_chan; 259 u8 *rx_buf; 260 dma_addr_t rx_dma_buf; 261 unsigned int rx_buf_sz; 262 u32 difsel; 263 u32 pcsel; 264 u32 smpr_val[2]; 265 struct stm32_adc_calib cal; 266 struct stm32_adc_vrefint vrefint; 267 char chan_name[STM32_ADC_CH_MAX][STM32_ADC_CH_SZ]; 268 u32 num_diff; 269 int int_ch[STM32_ADC_INT_CH_NB]; 270 }; 271 272 struct stm32_adc_diff_channel { 273 u32 vinp; 274 u32 vinn; 275 }; 276 277 /** 278 * struct stm32_adc_info - stm32 ADC, per instance config data 279 * @max_channels: Number of channels 280 * @resolutions: available resolutions 281 * @num_res: number of available resolutions 282 */ 283 struct stm32_adc_info { 284 int max_channels; 285 const unsigned int *resolutions; 286 const unsigned int num_res; 287 }; 288 289 static const unsigned int stm32f4_adc_resolutions[] = { 290 /* sorted values so the index matches RES[1:0] in STM32F4_ADC_CR1 */ 291 12, 10, 8, 6, 292 }; 293 294 /* stm32f4 can have up to 16 channels */ 295 static const struct stm32_adc_info stm32f4_adc_info = { 296 .max_channels = 16, 297 .resolutions = stm32f4_adc_resolutions, 298 .num_res = ARRAY_SIZE(stm32f4_adc_resolutions), 299 }; 300 301 static const unsigned int stm32h7_adc_resolutions[] = { 302 /* sorted values so the index matches RES[2:0] in STM32H7_ADC_CFGR */ 303 16, 14, 12, 10, 8, 304 }; 305 306 /* stm32h7 can have up to 20 channels */ 307 static const struct stm32_adc_info stm32h7_adc_info = { 308 .max_channels = STM32_ADC_CH_MAX, 309 .resolutions = stm32h7_adc_resolutions, 310 .num_res = ARRAY_SIZE(stm32h7_adc_resolutions), 311 }; 312 313 /* 314 * stm32f4_sq - describe regular sequence registers 315 * - L: sequence len (register & bit field) 316 * - SQ1..SQ16: sequence entries (register & bit field) 317 */ 318 static const struct stm32_adc_regs stm32f4_sq[STM32_ADC_MAX_SQ + 1] = { 319 /* L: len bit field description to be kept as first element */ 320 { STM32F4_ADC_SQR1, GENMASK(23, 20), 20 }, 321 /* SQ1..SQ16 registers & bit fields (reg, mask, shift) */ 322 { STM32F4_ADC_SQR3, GENMASK(4, 0), 0 }, 323 { STM32F4_ADC_SQR3, GENMASK(9, 5), 5 }, 324 { STM32F4_ADC_SQR3, GENMASK(14, 10), 10 }, 325 { STM32F4_ADC_SQR3, GENMASK(19, 15), 15 }, 326 { STM32F4_ADC_SQR3, GENMASK(24, 20), 20 }, 327 { STM32F4_ADC_SQR3, GENMASK(29, 25), 25 }, 328 { STM32F4_ADC_SQR2, GENMASK(4, 0), 0 }, 329 { STM32F4_ADC_SQR2, GENMASK(9, 5), 5 }, 330 { STM32F4_ADC_SQR2, GENMASK(14, 10), 10 }, 331 { STM32F4_ADC_SQR2, GENMASK(19, 15), 15 }, 332 { STM32F4_ADC_SQR2, GENMASK(24, 20), 20 }, 333 { STM32F4_ADC_SQR2, GENMASK(29, 25), 25 }, 334 { STM32F4_ADC_SQR1, GENMASK(4, 0), 0 }, 335 { STM32F4_ADC_SQR1, GENMASK(9, 5), 5 }, 336 { STM32F4_ADC_SQR1, GENMASK(14, 10), 10 }, 337 { STM32F4_ADC_SQR1, GENMASK(19, 15), 15 }, 338 }; 339 340 /* STM32F4 external trigger sources for all instances */ 341 static struct stm32_adc_trig_info stm32f4_adc_trigs[] = { 342 { TIM1_CH1, STM32_EXT0 }, 343 { TIM1_CH2, STM32_EXT1 }, 344 { TIM1_CH3, STM32_EXT2 }, 345 { TIM2_CH2, STM32_EXT3 }, 346 { TIM2_CH3, STM32_EXT4 }, 347 { TIM2_CH4, STM32_EXT5 }, 348 { TIM2_TRGO, STM32_EXT6 }, 349 { TIM3_CH1, STM32_EXT7 }, 350 { TIM3_TRGO, STM32_EXT8 }, 351 { TIM4_CH4, STM32_EXT9 }, 352 { TIM5_CH1, STM32_EXT10 }, 353 { TIM5_CH2, STM32_EXT11 }, 354 { TIM5_CH3, STM32_EXT12 }, 355 { TIM8_CH1, STM32_EXT13 }, 356 { TIM8_TRGO, STM32_EXT14 }, 357 {}, /* sentinel */ 358 }; 359 360 /* 361 * stm32f4_smp_bits[] - describe sampling time register index & bit fields 362 * Sorted so it can be indexed by channel number. 363 */ 364 static const struct stm32_adc_regs stm32f4_smp_bits[] = { 365 /* STM32F4_ADC_SMPR2: smpr[] index, mask, shift for SMP0 to SMP9 */ 366 { 1, GENMASK(2, 0), 0 }, 367 { 1, GENMASK(5, 3), 3 }, 368 { 1, GENMASK(8, 6), 6 }, 369 { 1, GENMASK(11, 9), 9 }, 370 { 1, GENMASK(14, 12), 12 }, 371 { 1, GENMASK(17, 15), 15 }, 372 { 1, GENMASK(20, 18), 18 }, 373 { 1, GENMASK(23, 21), 21 }, 374 { 1, GENMASK(26, 24), 24 }, 375 { 1, GENMASK(29, 27), 27 }, 376 /* STM32F4_ADC_SMPR1, smpr[] index, mask, shift for SMP10 to SMP18 */ 377 { 0, GENMASK(2, 0), 0 }, 378 { 0, GENMASK(5, 3), 3 }, 379 { 0, GENMASK(8, 6), 6 }, 380 { 0, GENMASK(11, 9), 9 }, 381 { 0, GENMASK(14, 12), 12 }, 382 { 0, GENMASK(17, 15), 15 }, 383 { 0, GENMASK(20, 18), 18 }, 384 { 0, GENMASK(23, 21), 21 }, 385 { 0, GENMASK(26, 24), 24 }, 386 }; 387 388 /* STM32F4 programmable sampling time (ADC clock cycles) */ 389 static const unsigned int stm32f4_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = { 390 3, 15, 28, 56, 84, 112, 144, 480, 391 }; 392 393 static const struct stm32_adc_regspec stm32f4_adc_regspec = { 394 .dr = STM32F4_ADC_DR, 395 .ier_eoc = { STM32F4_ADC_CR1, STM32F4_EOCIE }, 396 .ier_ovr = { STM32F4_ADC_CR1, STM32F4_OVRIE }, 397 .isr_eoc = { STM32F4_ADC_SR, STM32F4_EOC }, 398 .isr_ovr = { STM32F4_ADC_SR, STM32F4_OVR }, 399 .sqr = stm32f4_sq, 400 .exten = { STM32F4_ADC_CR2, STM32F4_EXTEN_MASK, STM32F4_EXTEN_SHIFT }, 401 .extsel = { STM32F4_ADC_CR2, STM32F4_EXTSEL_MASK, 402 STM32F4_EXTSEL_SHIFT }, 403 .res = { STM32F4_ADC_CR1, STM32F4_RES_MASK, STM32F4_RES_SHIFT }, 404 .smpr = { STM32F4_ADC_SMPR1, STM32F4_ADC_SMPR2 }, 405 .smp_bits = stm32f4_smp_bits, 406 }; 407 408 static const struct stm32_adc_regs stm32h7_sq[STM32_ADC_MAX_SQ + 1] = { 409 /* L: len bit field description to be kept as first element */ 410 { STM32H7_ADC_SQR1, GENMASK(3, 0), 0 }, 411 /* SQ1..SQ16 registers & bit fields (reg, mask, shift) */ 412 { STM32H7_ADC_SQR1, GENMASK(10, 6), 6 }, 413 { STM32H7_ADC_SQR1, GENMASK(16, 12), 12 }, 414 { STM32H7_ADC_SQR1, GENMASK(22, 18), 18 }, 415 { STM32H7_ADC_SQR1, GENMASK(28, 24), 24 }, 416 { STM32H7_ADC_SQR2, GENMASK(4, 0), 0 }, 417 { STM32H7_ADC_SQR2, GENMASK(10, 6), 6 }, 418 { STM32H7_ADC_SQR2, GENMASK(16, 12), 12 }, 419 { STM32H7_ADC_SQR2, GENMASK(22, 18), 18 }, 420 { STM32H7_ADC_SQR2, GENMASK(28, 24), 24 }, 421 { STM32H7_ADC_SQR3, GENMASK(4, 0), 0 }, 422 { STM32H7_ADC_SQR3, GENMASK(10, 6), 6 }, 423 { STM32H7_ADC_SQR3, GENMASK(16, 12), 12 }, 424 { STM32H7_ADC_SQR3, GENMASK(22, 18), 18 }, 425 { STM32H7_ADC_SQR3, GENMASK(28, 24), 24 }, 426 { STM32H7_ADC_SQR4, GENMASK(4, 0), 0 }, 427 { STM32H7_ADC_SQR4, GENMASK(10, 6), 6 }, 428 }; 429 430 /* STM32H7 external trigger sources for all instances */ 431 static struct stm32_adc_trig_info stm32h7_adc_trigs[] = { 432 { TIM1_CH1, STM32_EXT0 }, 433 { TIM1_CH2, STM32_EXT1 }, 434 { TIM1_CH3, STM32_EXT2 }, 435 { TIM2_CH2, STM32_EXT3 }, 436 { TIM3_TRGO, STM32_EXT4 }, 437 { TIM4_CH4, STM32_EXT5 }, 438 { TIM8_TRGO, STM32_EXT7 }, 439 { TIM8_TRGO2, STM32_EXT8 }, 440 { TIM1_TRGO, STM32_EXT9 }, 441 { TIM1_TRGO2, STM32_EXT10 }, 442 { TIM2_TRGO, STM32_EXT11 }, 443 { TIM4_TRGO, STM32_EXT12 }, 444 { TIM6_TRGO, STM32_EXT13 }, 445 { TIM15_TRGO, STM32_EXT14 }, 446 { TIM3_CH4, STM32_EXT15 }, 447 { LPTIM1_OUT, STM32_EXT18 }, 448 { LPTIM2_OUT, STM32_EXT19 }, 449 { LPTIM3_OUT, STM32_EXT20 }, 450 {}, 451 }; 452 453 /* 454 * stm32h7_smp_bits - describe sampling time register index & bit fields 455 * Sorted so it can be indexed by channel number. 456 */ 457 static const struct stm32_adc_regs stm32h7_smp_bits[] = { 458 /* STM32H7_ADC_SMPR1, smpr[] index, mask, shift for SMP0 to SMP9 */ 459 { 0, GENMASK(2, 0), 0 }, 460 { 0, GENMASK(5, 3), 3 }, 461 { 0, GENMASK(8, 6), 6 }, 462 { 0, GENMASK(11, 9), 9 }, 463 { 0, GENMASK(14, 12), 12 }, 464 { 0, GENMASK(17, 15), 15 }, 465 { 0, GENMASK(20, 18), 18 }, 466 { 0, GENMASK(23, 21), 21 }, 467 { 0, GENMASK(26, 24), 24 }, 468 { 0, GENMASK(29, 27), 27 }, 469 /* STM32H7_ADC_SMPR2, smpr[] index, mask, shift for SMP10 to SMP19 */ 470 { 1, GENMASK(2, 0), 0 }, 471 { 1, GENMASK(5, 3), 3 }, 472 { 1, GENMASK(8, 6), 6 }, 473 { 1, GENMASK(11, 9), 9 }, 474 { 1, GENMASK(14, 12), 12 }, 475 { 1, GENMASK(17, 15), 15 }, 476 { 1, GENMASK(20, 18), 18 }, 477 { 1, GENMASK(23, 21), 21 }, 478 { 1, GENMASK(26, 24), 24 }, 479 { 1, GENMASK(29, 27), 27 }, 480 }; 481 482 /* STM32H7 programmable sampling time (ADC clock cycles, rounded down) */ 483 static const unsigned int stm32h7_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = { 484 1, 2, 8, 16, 32, 64, 387, 810, 485 }; 486 487 static const struct stm32_adc_regspec stm32h7_adc_regspec = { 488 .dr = STM32H7_ADC_DR, 489 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE }, 490 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE }, 491 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC }, 492 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR }, 493 .sqr = stm32h7_sq, 494 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT }, 495 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK, 496 STM32H7_EXTSEL_SHIFT }, 497 .res = { STM32H7_ADC_CFGR, STM32H7_RES_MASK, STM32H7_RES_SHIFT }, 498 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 }, 499 .smp_bits = stm32h7_smp_bits, 500 }; 501 502 static const struct stm32_adc_regspec stm32mp1_adc_regspec = { 503 .dr = STM32H7_ADC_DR, 504 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE }, 505 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE }, 506 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC }, 507 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR }, 508 .sqr = stm32h7_sq, 509 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT }, 510 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK, 511 STM32H7_EXTSEL_SHIFT }, 512 .res = { STM32H7_ADC_CFGR, STM32H7_RES_MASK, STM32H7_RES_SHIFT }, 513 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 }, 514 .smp_bits = stm32h7_smp_bits, 515 .or_vdd = { STM32MP1_ADC2_OR, STM32MP1_VDDCOREEN }, 516 .ccr_vbat = { STM32H7_ADC_CCR, STM32H7_VBATEN }, 517 .ccr_vref = { STM32H7_ADC_CCR, STM32H7_VREFEN }, 518 }; 519 520 /* 521 * STM32 ADC registers access routines 522 * @adc: stm32 adc instance 523 * @reg: reg offset in adc instance 524 * 525 * Note: All instances share same base, with 0x0, 0x100 or 0x200 offset resp. 526 * for adc1, adc2 and adc3. 527 */ 528 static u32 stm32_adc_readl(struct stm32_adc *adc, u32 reg) 529 { 530 return readl_relaxed(adc->common->base + adc->offset + reg); 531 } 532 533 #define stm32_adc_readl_addr(addr) stm32_adc_readl(adc, addr) 534 535 #define stm32_adc_readl_poll_timeout(reg, val, cond, sleep_us, timeout_us) \ 536 readx_poll_timeout(stm32_adc_readl_addr, reg, val, \ 537 cond, sleep_us, timeout_us) 538 539 static u16 stm32_adc_readw(struct stm32_adc *adc, u32 reg) 540 { 541 return readw_relaxed(adc->common->base + adc->offset + reg); 542 } 543 544 static void stm32_adc_writel(struct stm32_adc *adc, u32 reg, u32 val) 545 { 546 writel_relaxed(val, adc->common->base + adc->offset + reg); 547 } 548 549 static void stm32_adc_set_bits(struct stm32_adc *adc, u32 reg, u32 bits) 550 { 551 unsigned long flags; 552 553 spin_lock_irqsave(&adc->lock, flags); 554 stm32_adc_writel(adc, reg, stm32_adc_readl(adc, reg) | bits); 555 spin_unlock_irqrestore(&adc->lock, flags); 556 } 557 558 static void stm32_adc_set_bits_common(struct stm32_adc *adc, u32 reg, u32 bits) 559 { 560 spin_lock(&adc->common->lock); 561 writel_relaxed(readl_relaxed(adc->common->base + reg) | bits, 562 adc->common->base + reg); 563 spin_unlock(&adc->common->lock); 564 } 565 566 static void stm32_adc_clr_bits(struct stm32_adc *adc, u32 reg, u32 bits) 567 { 568 unsigned long flags; 569 570 spin_lock_irqsave(&adc->lock, flags); 571 stm32_adc_writel(adc, reg, stm32_adc_readl(adc, reg) & ~bits); 572 spin_unlock_irqrestore(&adc->lock, flags); 573 } 574 575 static void stm32_adc_clr_bits_common(struct stm32_adc *adc, u32 reg, u32 bits) 576 { 577 spin_lock(&adc->common->lock); 578 writel_relaxed(readl_relaxed(adc->common->base + reg) & ~bits, 579 adc->common->base + reg); 580 spin_unlock(&adc->common->lock); 581 } 582 583 /** 584 * stm32_adc_conv_irq_enable() - Enable end of conversion interrupt 585 * @adc: stm32 adc instance 586 */ 587 static void stm32_adc_conv_irq_enable(struct stm32_adc *adc) 588 { 589 stm32_adc_set_bits(adc, adc->cfg->regs->ier_eoc.reg, 590 adc->cfg->regs->ier_eoc.mask); 591 }; 592 593 /** 594 * stm32_adc_conv_irq_disable() - Disable end of conversion interrupt 595 * @adc: stm32 adc instance 596 */ 597 static void stm32_adc_conv_irq_disable(struct stm32_adc *adc) 598 { 599 stm32_adc_clr_bits(adc, adc->cfg->regs->ier_eoc.reg, 600 adc->cfg->regs->ier_eoc.mask); 601 } 602 603 static void stm32_adc_ovr_irq_enable(struct stm32_adc *adc) 604 { 605 stm32_adc_set_bits(adc, adc->cfg->regs->ier_ovr.reg, 606 adc->cfg->regs->ier_ovr.mask); 607 } 608 609 static void stm32_adc_ovr_irq_disable(struct stm32_adc *adc) 610 { 611 stm32_adc_clr_bits(adc, adc->cfg->regs->ier_ovr.reg, 612 adc->cfg->regs->ier_ovr.mask); 613 } 614 615 static void stm32_adc_set_res(struct stm32_adc *adc) 616 { 617 const struct stm32_adc_regs *res = &adc->cfg->regs->res; 618 u32 val; 619 620 val = stm32_adc_readl(adc, res->reg); 621 val = (val & ~res->mask) | (adc->res << res->shift); 622 stm32_adc_writel(adc, res->reg, val); 623 } 624 625 static int stm32_adc_hw_stop(struct device *dev) 626 { 627 struct iio_dev *indio_dev = dev_get_drvdata(dev); 628 struct stm32_adc *adc = iio_priv(indio_dev); 629 630 if (adc->cfg->unprepare) 631 adc->cfg->unprepare(indio_dev); 632 633 clk_disable_unprepare(adc->clk); 634 635 return 0; 636 } 637 638 static int stm32_adc_hw_start(struct device *dev) 639 { 640 struct iio_dev *indio_dev = dev_get_drvdata(dev); 641 struct stm32_adc *adc = iio_priv(indio_dev); 642 int ret; 643 644 ret = clk_prepare_enable(adc->clk); 645 if (ret) 646 return ret; 647 648 stm32_adc_set_res(adc); 649 650 if (adc->cfg->prepare) { 651 ret = adc->cfg->prepare(indio_dev); 652 if (ret) 653 goto err_clk_dis; 654 } 655 656 return 0; 657 658 err_clk_dis: 659 clk_disable_unprepare(adc->clk); 660 661 return ret; 662 } 663 664 static void stm32_adc_int_ch_enable(struct iio_dev *indio_dev) 665 { 666 struct stm32_adc *adc = iio_priv(indio_dev); 667 u32 i; 668 669 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 670 if (adc->int_ch[i] == STM32_ADC_INT_CH_NONE) 671 continue; 672 673 switch (i) { 674 case STM32_ADC_INT_CH_VDDCORE: 675 dev_dbg(&indio_dev->dev, "Enable VDDCore\n"); 676 stm32_adc_set_bits(adc, adc->cfg->regs->or_vdd.reg, 677 adc->cfg->regs->or_vdd.mask); 678 break; 679 case STM32_ADC_INT_CH_VREFINT: 680 dev_dbg(&indio_dev->dev, "Enable VREFInt\n"); 681 stm32_adc_set_bits_common(adc, adc->cfg->regs->ccr_vref.reg, 682 adc->cfg->regs->ccr_vref.mask); 683 break; 684 case STM32_ADC_INT_CH_VBAT: 685 dev_dbg(&indio_dev->dev, "Enable VBAT\n"); 686 stm32_adc_set_bits_common(adc, adc->cfg->regs->ccr_vbat.reg, 687 adc->cfg->regs->ccr_vbat.mask); 688 break; 689 } 690 } 691 } 692 693 static void stm32_adc_int_ch_disable(struct stm32_adc *adc) 694 { 695 u32 i; 696 697 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 698 if (adc->int_ch[i] == STM32_ADC_INT_CH_NONE) 699 continue; 700 701 switch (i) { 702 case STM32_ADC_INT_CH_VDDCORE: 703 stm32_adc_clr_bits(adc, adc->cfg->regs->or_vdd.reg, 704 adc->cfg->regs->or_vdd.mask); 705 break; 706 case STM32_ADC_INT_CH_VREFINT: 707 stm32_adc_clr_bits_common(adc, adc->cfg->regs->ccr_vref.reg, 708 adc->cfg->regs->ccr_vref.mask); 709 break; 710 case STM32_ADC_INT_CH_VBAT: 711 stm32_adc_clr_bits_common(adc, adc->cfg->regs->ccr_vbat.reg, 712 adc->cfg->regs->ccr_vbat.mask); 713 break; 714 } 715 } 716 } 717 718 /** 719 * stm32f4_adc_start_conv() - Start conversions for regular channels. 720 * @indio_dev: IIO device instance 721 * @dma: use dma to transfer conversion result 722 * 723 * Start conversions for regular channels. 724 * Also take care of normal or DMA mode. Circular DMA may be used for regular 725 * conversions, in IIO buffer modes. Otherwise, use ADC interrupt with direct 726 * DR read instead (e.g. read_raw, or triggered buffer mode without DMA). 727 */ 728 static void stm32f4_adc_start_conv(struct iio_dev *indio_dev, bool dma) 729 { 730 struct stm32_adc *adc = iio_priv(indio_dev); 731 732 stm32_adc_set_bits(adc, STM32F4_ADC_CR1, STM32F4_SCAN); 733 734 if (dma) 735 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, 736 STM32F4_DMA | STM32F4_DDS); 737 738 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, STM32F4_EOCS | STM32F4_ADON); 739 740 /* Wait for Power-up time (tSTAB from datasheet) */ 741 usleep_range(2, 3); 742 743 /* Software start ? (e.g. trigger detection disabled ?) */ 744 if (!(stm32_adc_readl(adc, STM32F4_ADC_CR2) & STM32F4_EXTEN_MASK)) 745 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, STM32F4_SWSTART); 746 } 747 748 static void stm32f4_adc_stop_conv(struct iio_dev *indio_dev) 749 { 750 struct stm32_adc *adc = iio_priv(indio_dev); 751 752 stm32_adc_clr_bits(adc, STM32F4_ADC_CR2, STM32F4_EXTEN_MASK); 753 stm32_adc_clr_bits(adc, STM32F4_ADC_SR, STM32F4_STRT); 754 755 stm32_adc_clr_bits(adc, STM32F4_ADC_CR1, STM32F4_SCAN); 756 stm32_adc_clr_bits(adc, STM32F4_ADC_CR2, 757 STM32F4_ADON | STM32F4_DMA | STM32F4_DDS); 758 } 759 760 static void stm32f4_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 761 { 762 struct stm32_adc *adc = iio_priv(indio_dev); 763 764 stm32_adc_clr_bits(adc, adc->cfg->regs->isr_eoc.reg, msk); 765 } 766 767 static void stm32h7_adc_start_conv(struct iio_dev *indio_dev, bool dma) 768 { 769 struct stm32_adc *adc = iio_priv(indio_dev); 770 enum stm32h7_adc_dmngt dmngt; 771 unsigned long flags; 772 u32 val; 773 774 if (dma) 775 dmngt = STM32H7_DMNGT_DMA_CIRC; 776 else 777 dmngt = STM32H7_DMNGT_DR_ONLY; 778 779 spin_lock_irqsave(&adc->lock, flags); 780 val = stm32_adc_readl(adc, STM32H7_ADC_CFGR); 781 val = (val & ~STM32H7_DMNGT_MASK) | (dmngt << STM32H7_DMNGT_SHIFT); 782 stm32_adc_writel(adc, STM32H7_ADC_CFGR, val); 783 spin_unlock_irqrestore(&adc->lock, flags); 784 785 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTART); 786 } 787 788 static void stm32h7_adc_stop_conv(struct iio_dev *indio_dev) 789 { 790 struct stm32_adc *adc = iio_priv(indio_dev); 791 int ret; 792 u32 val; 793 794 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTP); 795 796 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 797 !(val & (STM32H7_ADSTART)), 798 100, STM32_ADC_TIMEOUT_US); 799 if (ret) 800 dev_warn(&indio_dev->dev, "stop failed\n"); 801 802 stm32_adc_clr_bits(adc, STM32H7_ADC_CFGR, STM32H7_DMNGT_MASK); 803 } 804 805 static void stm32h7_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 806 { 807 struct stm32_adc *adc = iio_priv(indio_dev); 808 /* On STM32H7 IRQs are cleared by writing 1 into ISR register */ 809 stm32_adc_set_bits(adc, adc->cfg->regs->isr_eoc.reg, msk); 810 } 811 812 static int stm32h7_adc_exit_pwr_down(struct iio_dev *indio_dev) 813 { 814 struct stm32_adc *adc = iio_priv(indio_dev); 815 int ret; 816 u32 val; 817 818 /* Exit deep power down, then enable ADC voltage regulator */ 819 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 820 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADVREGEN); 821 822 if (adc->common->rate > STM32H7_BOOST_CLKRATE) 823 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_BOOST); 824 825 /* Wait for startup time */ 826 if (!adc->cfg->has_vregready) { 827 usleep_range(10, 20); 828 return 0; 829 } 830 831 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_ISR, val, 832 val & STM32MP1_VREGREADY, 100, 833 STM32_ADC_TIMEOUT_US); 834 if (ret) { 835 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 836 dev_err(&indio_dev->dev, "Failed to exit power down\n"); 837 } 838 839 return ret; 840 } 841 842 static void stm32h7_adc_enter_pwr_down(struct stm32_adc *adc) 843 { 844 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_BOOST); 845 846 /* Setting DEEPPWD disables ADC vreg and clears ADVREGEN */ 847 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 848 } 849 850 static int stm32h7_adc_enable(struct iio_dev *indio_dev) 851 { 852 struct stm32_adc *adc = iio_priv(indio_dev); 853 int ret; 854 u32 val; 855 856 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADEN); 857 858 /* Poll for ADRDY to be set (after adc startup time) */ 859 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_ISR, val, 860 val & STM32H7_ADRDY, 861 100, STM32_ADC_TIMEOUT_US); 862 if (ret) { 863 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADDIS); 864 dev_err(&indio_dev->dev, "Failed to enable ADC\n"); 865 } else { 866 /* Clear ADRDY by writing one */ 867 stm32_adc_set_bits(adc, STM32H7_ADC_ISR, STM32H7_ADRDY); 868 } 869 870 return ret; 871 } 872 873 static void stm32h7_adc_disable(struct iio_dev *indio_dev) 874 { 875 struct stm32_adc *adc = iio_priv(indio_dev); 876 int ret; 877 u32 val; 878 879 /* Disable ADC and wait until it's effectively disabled */ 880 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADDIS); 881 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 882 !(val & STM32H7_ADEN), 100, 883 STM32_ADC_TIMEOUT_US); 884 if (ret) 885 dev_warn(&indio_dev->dev, "Failed to disable\n"); 886 } 887 888 /** 889 * stm32h7_adc_read_selfcalib() - read calibration shadow regs, save result 890 * @indio_dev: IIO device instance 891 * Note: Must be called once ADC is enabled, so LINCALRDYW[1..6] are writable 892 */ 893 static int stm32h7_adc_read_selfcalib(struct iio_dev *indio_dev) 894 { 895 struct stm32_adc *adc = iio_priv(indio_dev); 896 int i, ret; 897 u32 lincalrdyw_mask, val; 898 899 /* Read linearity calibration */ 900 lincalrdyw_mask = STM32H7_LINCALRDYW6; 901 for (i = STM32H7_LINCALFACT_NUM - 1; i >= 0; i--) { 902 /* Clear STM32H7_LINCALRDYW[6..1]: transfer calib to CALFACT2 */ 903 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 904 905 /* Poll: wait calib data to be ready in CALFACT2 register */ 906 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 907 !(val & lincalrdyw_mask), 908 100, STM32_ADC_TIMEOUT_US); 909 if (ret) { 910 dev_err(&indio_dev->dev, "Failed to read calfact\n"); 911 return ret; 912 } 913 914 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT2); 915 adc->cal.lincalfact[i] = (val & STM32H7_LINCALFACT_MASK); 916 adc->cal.lincalfact[i] >>= STM32H7_LINCALFACT_SHIFT; 917 918 lincalrdyw_mask >>= 1; 919 } 920 921 /* Read offset calibration */ 922 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT); 923 adc->cal.calfact_s = (val & STM32H7_CALFACT_S_MASK); 924 adc->cal.calfact_s >>= STM32H7_CALFACT_S_SHIFT; 925 adc->cal.calfact_d = (val & STM32H7_CALFACT_D_MASK); 926 adc->cal.calfact_d >>= STM32H7_CALFACT_D_SHIFT; 927 adc->cal.calibrated = true; 928 929 return 0; 930 } 931 932 /** 933 * stm32h7_adc_restore_selfcalib() - Restore saved self-calibration result 934 * @indio_dev: IIO device instance 935 * Note: ADC must be enabled, with no on-going conversions. 936 */ 937 static int stm32h7_adc_restore_selfcalib(struct iio_dev *indio_dev) 938 { 939 struct stm32_adc *adc = iio_priv(indio_dev); 940 int i, ret; 941 u32 lincalrdyw_mask, val; 942 943 val = (adc->cal.calfact_s << STM32H7_CALFACT_S_SHIFT) | 944 (adc->cal.calfact_d << STM32H7_CALFACT_D_SHIFT); 945 stm32_adc_writel(adc, STM32H7_ADC_CALFACT, val); 946 947 lincalrdyw_mask = STM32H7_LINCALRDYW6; 948 for (i = STM32H7_LINCALFACT_NUM - 1; i >= 0; i--) { 949 /* 950 * Write saved calibration data to shadow registers: 951 * Write CALFACT2, and set LINCALRDYW[6..1] bit to trigger 952 * data write. Then poll to wait for complete transfer. 953 */ 954 val = adc->cal.lincalfact[i] << STM32H7_LINCALFACT_SHIFT; 955 stm32_adc_writel(adc, STM32H7_ADC_CALFACT2, val); 956 stm32_adc_set_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 957 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 958 val & lincalrdyw_mask, 959 100, STM32_ADC_TIMEOUT_US); 960 if (ret) { 961 dev_err(&indio_dev->dev, "Failed to write calfact\n"); 962 return ret; 963 } 964 965 /* 966 * Read back calibration data, has two effects: 967 * - It ensures bits LINCALRDYW[6..1] are kept cleared 968 * for next time calibration needs to be restored. 969 * - BTW, bit clear triggers a read, then check data has been 970 * correctly written. 971 */ 972 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 973 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 974 !(val & lincalrdyw_mask), 975 100, STM32_ADC_TIMEOUT_US); 976 if (ret) { 977 dev_err(&indio_dev->dev, "Failed to read calfact\n"); 978 return ret; 979 } 980 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT2); 981 if (val != adc->cal.lincalfact[i] << STM32H7_LINCALFACT_SHIFT) { 982 dev_err(&indio_dev->dev, "calfact not consistent\n"); 983 return -EIO; 984 } 985 986 lincalrdyw_mask >>= 1; 987 } 988 989 return 0; 990 } 991 992 /* 993 * Fixed timeout value for ADC calibration. 994 * worst cases: 995 * - low clock frequency 996 * - maximum prescalers 997 * Calibration requires: 998 * - 131,072 ADC clock cycle for the linear calibration 999 * - 20 ADC clock cycle for the offset calibration 1000 * 1001 * Set to 100ms for now 1002 */ 1003 #define STM32H7_ADC_CALIB_TIMEOUT_US 100000 1004 1005 /** 1006 * stm32h7_adc_selfcalib() - Procedure to calibrate ADC 1007 * @indio_dev: IIO device instance 1008 * Note: Must be called once ADC is out of power down. 1009 */ 1010 static int stm32h7_adc_selfcalib(struct iio_dev *indio_dev) 1011 { 1012 struct stm32_adc *adc = iio_priv(indio_dev); 1013 int ret; 1014 u32 val; 1015 1016 if (adc->cal.calibrated) 1017 return true; 1018 1019 /* 1020 * Select calibration mode: 1021 * - Offset calibration for single ended inputs 1022 * - No linearity calibration (do it later, before reading it) 1023 */ 1024 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_ADCALDIF); 1025 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_ADCALLIN); 1026 1027 /* Start calibration, then wait for completion */ 1028 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADCAL); 1029 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 1030 !(val & STM32H7_ADCAL), 100, 1031 STM32H7_ADC_CALIB_TIMEOUT_US); 1032 if (ret) { 1033 dev_err(&indio_dev->dev, "calibration failed\n"); 1034 goto out; 1035 } 1036 1037 /* 1038 * Select calibration mode, then start calibration: 1039 * - Offset calibration for differential input 1040 * - Linearity calibration (needs to be done only once for single/diff) 1041 * will run simultaneously with offset calibration. 1042 */ 1043 stm32_adc_set_bits(adc, STM32H7_ADC_CR, 1044 STM32H7_ADCALDIF | STM32H7_ADCALLIN); 1045 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADCAL); 1046 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 1047 !(val & STM32H7_ADCAL), 100, 1048 STM32H7_ADC_CALIB_TIMEOUT_US); 1049 if (ret) { 1050 dev_err(&indio_dev->dev, "calibration failed\n"); 1051 goto out; 1052 } 1053 1054 out: 1055 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, 1056 STM32H7_ADCALDIF | STM32H7_ADCALLIN); 1057 1058 return ret; 1059 } 1060 1061 /** 1062 * stm32h7_adc_prepare() - Leave power down mode to enable ADC. 1063 * @indio_dev: IIO device instance 1064 * Leave power down mode. 1065 * Configure channels as single ended or differential before enabling ADC. 1066 * Enable ADC. 1067 * Restore calibration data. 1068 * Pre-select channels that may be used in PCSEL (required by input MUX / IO): 1069 * - Only one input is selected for single ended (e.g. 'vinp') 1070 * - Two inputs are selected for differential channels (e.g. 'vinp' & 'vinn') 1071 */ 1072 static int stm32h7_adc_prepare(struct iio_dev *indio_dev) 1073 { 1074 struct stm32_adc *adc = iio_priv(indio_dev); 1075 int calib, ret; 1076 1077 ret = stm32h7_adc_exit_pwr_down(indio_dev); 1078 if (ret) 1079 return ret; 1080 1081 ret = stm32h7_adc_selfcalib(indio_dev); 1082 if (ret < 0) 1083 goto pwr_dwn; 1084 calib = ret; 1085 1086 stm32_adc_int_ch_enable(indio_dev); 1087 1088 stm32_adc_writel(adc, STM32H7_ADC_DIFSEL, adc->difsel); 1089 1090 ret = stm32h7_adc_enable(indio_dev); 1091 if (ret) 1092 goto ch_disable; 1093 1094 /* Either restore or read calibration result for future reference */ 1095 if (calib) 1096 ret = stm32h7_adc_restore_selfcalib(indio_dev); 1097 else 1098 ret = stm32h7_adc_read_selfcalib(indio_dev); 1099 if (ret) 1100 goto disable; 1101 1102 stm32_adc_writel(adc, STM32H7_ADC_PCSEL, adc->pcsel); 1103 1104 return 0; 1105 1106 disable: 1107 stm32h7_adc_disable(indio_dev); 1108 ch_disable: 1109 stm32_adc_int_ch_disable(adc); 1110 pwr_dwn: 1111 stm32h7_adc_enter_pwr_down(adc); 1112 1113 return ret; 1114 } 1115 1116 static void stm32h7_adc_unprepare(struct iio_dev *indio_dev) 1117 { 1118 struct stm32_adc *adc = iio_priv(indio_dev); 1119 1120 stm32h7_adc_disable(indio_dev); 1121 stm32_adc_int_ch_disable(adc); 1122 stm32h7_adc_enter_pwr_down(adc); 1123 } 1124 1125 /** 1126 * stm32_adc_conf_scan_seq() - Build regular channels scan sequence 1127 * @indio_dev: IIO device 1128 * @scan_mask: channels to be converted 1129 * 1130 * Conversion sequence : 1131 * Apply sampling time settings for all channels. 1132 * Configure ADC scan sequence based on selected channels in scan_mask. 1133 * Add channels to SQR registers, from scan_mask LSB to MSB, then 1134 * program sequence len. 1135 */ 1136 static int stm32_adc_conf_scan_seq(struct iio_dev *indio_dev, 1137 const unsigned long *scan_mask) 1138 { 1139 struct stm32_adc *adc = iio_priv(indio_dev); 1140 const struct stm32_adc_regs *sqr = adc->cfg->regs->sqr; 1141 const struct iio_chan_spec *chan; 1142 u32 val, bit; 1143 int i = 0; 1144 1145 /* Apply sampling time settings */ 1146 stm32_adc_writel(adc, adc->cfg->regs->smpr[0], adc->smpr_val[0]); 1147 stm32_adc_writel(adc, adc->cfg->regs->smpr[1], adc->smpr_val[1]); 1148 1149 for_each_set_bit(bit, scan_mask, indio_dev->masklength) { 1150 chan = indio_dev->channels + bit; 1151 /* 1152 * Assign one channel per SQ entry in regular 1153 * sequence, starting with SQ1. 1154 */ 1155 i++; 1156 if (i > STM32_ADC_MAX_SQ) 1157 return -EINVAL; 1158 1159 dev_dbg(&indio_dev->dev, "%s chan %d to SQ%d\n", 1160 __func__, chan->channel, i); 1161 1162 val = stm32_adc_readl(adc, sqr[i].reg); 1163 val &= ~sqr[i].mask; 1164 val |= chan->channel << sqr[i].shift; 1165 stm32_adc_writel(adc, sqr[i].reg, val); 1166 } 1167 1168 if (!i) 1169 return -EINVAL; 1170 1171 /* Sequence len */ 1172 val = stm32_adc_readl(adc, sqr[0].reg); 1173 val &= ~sqr[0].mask; 1174 val |= ((i - 1) << sqr[0].shift); 1175 stm32_adc_writel(adc, sqr[0].reg, val); 1176 1177 return 0; 1178 } 1179 1180 /** 1181 * stm32_adc_get_trig_extsel() - Get external trigger selection 1182 * @indio_dev: IIO device structure 1183 * @trig: trigger 1184 * 1185 * Returns trigger extsel value, if trig matches, -EINVAL otherwise. 1186 */ 1187 static int stm32_adc_get_trig_extsel(struct iio_dev *indio_dev, 1188 struct iio_trigger *trig) 1189 { 1190 struct stm32_adc *adc = iio_priv(indio_dev); 1191 int i; 1192 1193 /* lookup triggers registered by stm32 timer trigger driver */ 1194 for (i = 0; adc->cfg->trigs[i].name; i++) { 1195 /** 1196 * Checking both stm32 timer trigger type and trig name 1197 * should be safe against arbitrary trigger names. 1198 */ 1199 if ((is_stm32_timer_trigger(trig) || 1200 is_stm32_lptim_trigger(trig)) && 1201 !strcmp(adc->cfg->trigs[i].name, trig->name)) { 1202 return adc->cfg->trigs[i].extsel; 1203 } 1204 } 1205 1206 return -EINVAL; 1207 } 1208 1209 /** 1210 * stm32_adc_set_trig() - Set a regular trigger 1211 * @indio_dev: IIO device 1212 * @trig: IIO trigger 1213 * 1214 * Set trigger source/polarity (e.g. SW, or HW with polarity) : 1215 * - if HW trigger disabled (e.g. trig == NULL, conversion launched by sw) 1216 * - if HW trigger enabled, set source & polarity 1217 */ 1218 static int stm32_adc_set_trig(struct iio_dev *indio_dev, 1219 struct iio_trigger *trig) 1220 { 1221 struct stm32_adc *adc = iio_priv(indio_dev); 1222 u32 val, extsel = 0, exten = STM32_EXTEN_SWTRIG; 1223 unsigned long flags; 1224 int ret; 1225 1226 if (trig) { 1227 ret = stm32_adc_get_trig_extsel(indio_dev, trig); 1228 if (ret < 0) 1229 return ret; 1230 1231 /* set trigger source and polarity (default to rising edge) */ 1232 extsel = ret; 1233 exten = adc->trigger_polarity + STM32_EXTEN_HWTRIG_RISING_EDGE; 1234 } 1235 1236 spin_lock_irqsave(&adc->lock, flags); 1237 val = stm32_adc_readl(adc, adc->cfg->regs->exten.reg); 1238 val &= ~(adc->cfg->regs->exten.mask | adc->cfg->regs->extsel.mask); 1239 val |= exten << adc->cfg->regs->exten.shift; 1240 val |= extsel << adc->cfg->regs->extsel.shift; 1241 stm32_adc_writel(adc, adc->cfg->regs->exten.reg, val); 1242 spin_unlock_irqrestore(&adc->lock, flags); 1243 1244 return 0; 1245 } 1246 1247 static int stm32_adc_set_trig_pol(struct iio_dev *indio_dev, 1248 const struct iio_chan_spec *chan, 1249 unsigned int type) 1250 { 1251 struct stm32_adc *adc = iio_priv(indio_dev); 1252 1253 adc->trigger_polarity = type; 1254 1255 return 0; 1256 } 1257 1258 static int stm32_adc_get_trig_pol(struct iio_dev *indio_dev, 1259 const struct iio_chan_spec *chan) 1260 { 1261 struct stm32_adc *adc = iio_priv(indio_dev); 1262 1263 return adc->trigger_polarity; 1264 } 1265 1266 static const char * const stm32_trig_pol_items[] = { 1267 "rising-edge", "falling-edge", "both-edges", 1268 }; 1269 1270 static const struct iio_enum stm32_adc_trig_pol = { 1271 .items = stm32_trig_pol_items, 1272 .num_items = ARRAY_SIZE(stm32_trig_pol_items), 1273 .get = stm32_adc_get_trig_pol, 1274 .set = stm32_adc_set_trig_pol, 1275 }; 1276 1277 /** 1278 * stm32_adc_single_conv() - Performs a single conversion 1279 * @indio_dev: IIO device 1280 * @chan: IIO channel 1281 * @res: conversion result 1282 * 1283 * The function performs a single conversion on a given channel: 1284 * - Apply sampling time settings 1285 * - Program sequencer with one channel (e.g. in SQ1 with len = 1) 1286 * - Use SW trigger 1287 * - Start conversion, then wait for interrupt completion. 1288 */ 1289 static int stm32_adc_single_conv(struct iio_dev *indio_dev, 1290 const struct iio_chan_spec *chan, 1291 int *res) 1292 { 1293 struct stm32_adc *adc = iio_priv(indio_dev); 1294 struct device *dev = indio_dev->dev.parent; 1295 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1296 long timeout; 1297 u32 val; 1298 int ret; 1299 1300 reinit_completion(&adc->completion); 1301 1302 adc->bufi = 0; 1303 1304 ret = pm_runtime_resume_and_get(dev); 1305 if (ret < 0) 1306 return ret; 1307 1308 /* Apply sampling time settings */ 1309 stm32_adc_writel(adc, regs->smpr[0], adc->smpr_val[0]); 1310 stm32_adc_writel(adc, regs->smpr[1], adc->smpr_val[1]); 1311 1312 /* Program chan number in regular sequence (SQ1) */ 1313 val = stm32_adc_readl(adc, regs->sqr[1].reg); 1314 val &= ~regs->sqr[1].mask; 1315 val |= chan->channel << regs->sqr[1].shift; 1316 stm32_adc_writel(adc, regs->sqr[1].reg, val); 1317 1318 /* Set regular sequence len (0 for 1 conversion) */ 1319 stm32_adc_clr_bits(adc, regs->sqr[0].reg, regs->sqr[0].mask); 1320 1321 /* Trigger detection disabled (conversion can be launched in SW) */ 1322 stm32_adc_clr_bits(adc, regs->exten.reg, regs->exten.mask); 1323 1324 stm32_adc_conv_irq_enable(adc); 1325 1326 adc->cfg->start_conv(indio_dev, false); 1327 1328 timeout = wait_for_completion_interruptible_timeout( 1329 &adc->completion, STM32_ADC_TIMEOUT); 1330 if (timeout == 0) { 1331 ret = -ETIMEDOUT; 1332 } else if (timeout < 0) { 1333 ret = timeout; 1334 } else { 1335 *res = adc->buffer[0]; 1336 ret = IIO_VAL_INT; 1337 } 1338 1339 adc->cfg->stop_conv(indio_dev); 1340 1341 stm32_adc_conv_irq_disable(adc); 1342 1343 pm_runtime_mark_last_busy(dev); 1344 pm_runtime_put_autosuspend(dev); 1345 1346 return ret; 1347 } 1348 1349 static int stm32_adc_read_raw(struct iio_dev *indio_dev, 1350 struct iio_chan_spec const *chan, 1351 int *val, int *val2, long mask) 1352 { 1353 struct stm32_adc *adc = iio_priv(indio_dev); 1354 int ret; 1355 1356 switch (mask) { 1357 case IIO_CHAN_INFO_RAW: 1358 case IIO_CHAN_INFO_PROCESSED: 1359 ret = iio_device_claim_direct_mode(indio_dev); 1360 if (ret) 1361 return ret; 1362 if (chan->type == IIO_VOLTAGE) 1363 ret = stm32_adc_single_conv(indio_dev, chan, val); 1364 else 1365 ret = -EINVAL; 1366 1367 if (mask == IIO_CHAN_INFO_PROCESSED && adc->vrefint.vrefint_cal) 1368 *val = STM32_ADC_VREFINT_VOLTAGE * adc->vrefint.vrefint_cal / *val; 1369 1370 iio_device_release_direct_mode(indio_dev); 1371 return ret; 1372 1373 case IIO_CHAN_INFO_SCALE: 1374 if (chan->differential) { 1375 *val = adc->common->vref_mv * 2; 1376 *val2 = chan->scan_type.realbits; 1377 } else { 1378 *val = adc->common->vref_mv; 1379 *val2 = chan->scan_type.realbits; 1380 } 1381 return IIO_VAL_FRACTIONAL_LOG2; 1382 1383 case IIO_CHAN_INFO_OFFSET: 1384 if (chan->differential) 1385 /* ADC_full_scale / 2 */ 1386 *val = -((1 << chan->scan_type.realbits) / 2); 1387 else 1388 *val = 0; 1389 return IIO_VAL_INT; 1390 1391 default: 1392 return -EINVAL; 1393 } 1394 } 1395 1396 static void stm32_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 1397 { 1398 struct stm32_adc *adc = iio_priv(indio_dev); 1399 1400 adc->cfg->irq_clear(indio_dev, msk); 1401 } 1402 1403 static irqreturn_t stm32_adc_threaded_isr(int irq, void *data) 1404 { 1405 struct iio_dev *indio_dev = data; 1406 struct stm32_adc *adc = iio_priv(indio_dev); 1407 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1408 u32 status = stm32_adc_readl(adc, regs->isr_eoc.reg); 1409 u32 mask = stm32_adc_readl(adc, regs->ier_eoc.reg); 1410 1411 /* Check ovr status right now, as ovr mask should be already disabled */ 1412 if (status & regs->isr_ovr.mask) { 1413 /* 1414 * Clear ovr bit to avoid subsequent calls to IRQ handler. 1415 * This requires to stop ADC first. OVR bit state in ISR, 1416 * is propaged to CSR register by hardware. 1417 */ 1418 adc->cfg->stop_conv(indio_dev); 1419 stm32_adc_irq_clear(indio_dev, regs->isr_ovr.mask); 1420 dev_err(&indio_dev->dev, "Overrun, stopping: restart needed\n"); 1421 return IRQ_HANDLED; 1422 } 1423 1424 if (!(status & mask)) 1425 dev_err_ratelimited(&indio_dev->dev, 1426 "Unexpected IRQ: IER=0x%08x, ISR=0x%08x\n", 1427 mask, status); 1428 1429 return IRQ_NONE; 1430 } 1431 1432 static irqreturn_t stm32_adc_isr(int irq, void *data) 1433 { 1434 struct iio_dev *indio_dev = data; 1435 struct stm32_adc *adc = iio_priv(indio_dev); 1436 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1437 u32 status = stm32_adc_readl(adc, regs->isr_eoc.reg); 1438 u32 mask = stm32_adc_readl(adc, regs->ier_eoc.reg); 1439 1440 if (!(status & mask)) 1441 return IRQ_WAKE_THREAD; 1442 1443 if (status & regs->isr_ovr.mask) { 1444 /* 1445 * Overrun occurred on regular conversions: data for wrong 1446 * channel may be read. Unconditionally disable interrupts 1447 * to stop processing data and print error message. 1448 * Restarting the capture can be done by disabling, then 1449 * re-enabling it (e.g. write 0, then 1 to buffer/enable). 1450 */ 1451 stm32_adc_ovr_irq_disable(adc); 1452 stm32_adc_conv_irq_disable(adc); 1453 return IRQ_WAKE_THREAD; 1454 } 1455 1456 if (status & regs->isr_eoc.mask) { 1457 /* Reading DR also clears EOC status flag */ 1458 adc->buffer[adc->bufi] = stm32_adc_readw(adc, regs->dr); 1459 if (iio_buffer_enabled(indio_dev)) { 1460 adc->bufi++; 1461 if (adc->bufi >= adc->num_conv) { 1462 stm32_adc_conv_irq_disable(adc); 1463 iio_trigger_poll(indio_dev->trig); 1464 } 1465 } else { 1466 complete(&adc->completion); 1467 } 1468 return IRQ_HANDLED; 1469 } 1470 1471 return IRQ_NONE; 1472 } 1473 1474 /** 1475 * stm32_adc_validate_trigger() - validate trigger for stm32 adc 1476 * @indio_dev: IIO device 1477 * @trig: new trigger 1478 * 1479 * Returns: 0 if trig matches one of the triggers registered by stm32 adc 1480 * driver, -EINVAL otherwise. 1481 */ 1482 static int stm32_adc_validate_trigger(struct iio_dev *indio_dev, 1483 struct iio_trigger *trig) 1484 { 1485 return stm32_adc_get_trig_extsel(indio_dev, trig) < 0 ? -EINVAL : 0; 1486 } 1487 1488 static int stm32_adc_set_watermark(struct iio_dev *indio_dev, unsigned int val) 1489 { 1490 struct stm32_adc *adc = iio_priv(indio_dev); 1491 unsigned int watermark = STM32_DMA_BUFFER_SIZE / 2; 1492 unsigned int rx_buf_sz = STM32_DMA_BUFFER_SIZE; 1493 1494 /* 1495 * dma cyclic transfers are used, buffer is split into two periods. 1496 * There should be : 1497 * - always one buffer (period) dma is working on 1498 * - one buffer (period) driver can push data. 1499 */ 1500 watermark = min(watermark, val * (unsigned)(sizeof(u16))); 1501 adc->rx_buf_sz = min(rx_buf_sz, watermark * 2 * adc->num_conv); 1502 1503 return 0; 1504 } 1505 1506 static int stm32_adc_update_scan_mode(struct iio_dev *indio_dev, 1507 const unsigned long *scan_mask) 1508 { 1509 struct stm32_adc *adc = iio_priv(indio_dev); 1510 struct device *dev = indio_dev->dev.parent; 1511 int ret; 1512 1513 ret = pm_runtime_resume_and_get(dev); 1514 if (ret < 0) 1515 return ret; 1516 1517 adc->num_conv = bitmap_weight(scan_mask, indio_dev->masklength); 1518 1519 ret = stm32_adc_conf_scan_seq(indio_dev, scan_mask); 1520 pm_runtime_mark_last_busy(dev); 1521 pm_runtime_put_autosuspend(dev); 1522 1523 return ret; 1524 } 1525 1526 static int stm32_adc_of_xlate(struct iio_dev *indio_dev, 1527 const struct of_phandle_args *iiospec) 1528 { 1529 int i; 1530 1531 for (i = 0; i < indio_dev->num_channels; i++) 1532 if (indio_dev->channels[i].channel == iiospec->args[0]) 1533 return i; 1534 1535 return -EINVAL; 1536 } 1537 1538 /** 1539 * stm32_adc_debugfs_reg_access - read or write register value 1540 * @indio_dev: IIO device structure 1541 * @reg: register offset 1542 * @writeval: value to write 1543 * @readval: value to read 1544 * 1545 * To read a value from an ADC register: 1546 * echo [ADC reg offset] > direct_reg_access 1547 * cat direct_reg_access 1548 * 1549 * To write a value in a ADC register: 1550 * echo [ADC_reg_offset] [value] > direct_reg_access 1551 */ 1552 static int stm32_adc_debugfs_reg_access(struct iio_dev *indio_dev, 1553 unsigned reg, unsigned writeval, 1554 unsigned *readval) 1555 { 1556 struct stm32_adc *adc = iio_priv(indio_dev); 1557 struct device *dev = indio_dev->dev.parent; 1558 int ret; 1559 1560 ret = pm_runtime_resume_and_get(dev); 1561 if (ret < 0) 1562 return ret; 1563 1564 if (!readval) 1565 stm32_adc_writel(adc, reg, writeval); 1566 else 1567 *readval = stm32_adc_readl(adc, reg); 1568 1569 pm_runtime_mark_last_busy(dev); 1570 pm_runtime_put_autosuspend(dev); 1571 1572 return 0; 1573 } 1574 1575 static const struct iio_info stm32_adc_iio_info = { 1576 .read_raw = stm32_adc_read_raw, 1577 .validate_trigger = stm32_adc_validate_trigger, 1578 .hwfifo_set_watermark = stm32_adc_set_watermark, 1579 .update_scan_mode = stm32_adc_update_scan_mode, 1580 .debugfs_reg_access = stm32_adc_debugfs_reg_access, 1581 .of_xlate = stm32_adc_of_xlate, 1582 }; 1583 1584 static unsigned int stm32_adc_dma_residue(struct stm32_adc *adc) 1585 { 1586 struct dma_tx_state state; 1587 enum dma_status status; 1588 1589 status = dmaengine_tx_status(adc->dma_chan, 1590 adc->dma_chan->cookie, 1591 &state); 1592 if (status == DMA_IN_PROGRESS) { 1593 /* Residue is size in bytes from end of buffer */ 1594 unsigned int i = adc->rx_buf_sz - state.residue; 1595 unsigned int size; 1596 1597 /* Return available bytes */ 1598 if (i >= adc->bufi) 1599 size = i - adc->bufi; 1600 else 1601 size = adc->rx_buf_sz + i - adc->bufi; 1602 1603 return size; 1604 } 1605 1606 return 0; 1607 } 1608 1609 static void stm32_adc_dma_buffer_done(void *data) 1610 { 1611 struct iio_dev *indio_dev = data; 1612 struct stm32_adc *adc = iio_priv(indio_dev); 1613 int residue = stm32_adc_dma_residue(adc); 1614 1615 /* 1616 * In DMA mode the trigger services of IIO are not used 1617 * (e.g. no call to iio_trigger_poll). 1618 * Calling irq handler associated to the hardware trigger is not 1619 * relevant as the conversions have already been done. Data 1620 * transfers are performed directly in DMA callback instead. 1621 * This implementation avoids to call trigger irq handler that 1622 * may sleep, in an atomic context (DMA irq handler context). 1623 */ 1624 dev_dbg(&indio_dev->dev, "%s bufi=%d\n", __func__, adc->bufi); 1625 1626 while (residue >= indio_dev->scan_bytes) { 1627 u16 *buffer = (u16 *)&adc->rx_buf[adc->bufi]; 1628 1629 iio_push_to_buffers(indio_dev, buffer); 1630 1631 residue -= indio_dev->scan_bytes; 1632 adc->bufi += indio_dev->scan_bytes; 1633 if (adc->bufi >= adc->rx_buf_sz) 1634 adc->bufi = 0; 1635 } 1636 } 1637 1638 static int stm32_adc_dma_start(struct iio_dev *indio_dev) 1639 { 1640 struct stm32_adc *adc = iio_priv(indio_dev); 1641 struct dma_async_tx_descriptor *desc; 1642 dma_cookie_t cookie; 1643 int ret; 1644 1645 if (!adc->dma_chan) 1646 return 0; 1647 1648 dev_dbg(&indio_dev->dev, "%s size=%d watermark=%d\n", __func__, 1649 adc->rx_buf_sz, adc->rx_buf_sz / 2); 1650 1651 /* Prepare a DMA cyclic transaction */ 1652 desc = dmaengine_prep_dma_cyclic(adc->dma_chan, 1653 adc->rx_dma_buf, 1654 adc->rx_buf_sz, adc->rx_buf_sz / 2, 1655 DMA_DEV_TO_MEM, 1656 DMA_PREP_INTERRUPT); 1657 if (!desc) 1658 return -EBUSY; 1659 1660 desc->callback = stm32_adc_dma_buffer_done; 1661 desc->callback_param = indio_dev; 1662 1663 cookie = dmaengine_submit(desc); 1664 ret = dma_submit_error(cookie); 1665 if (ret) { 1666 dmaengine_terminate_sync(adc->dma_chan); 1667 return ret; 1668 } 1669 1670 /* Issue pending DMA requests */ 1671 dma_async_issue_pending(adc->dma_chan); 1672 1673 return 0; 1674 } 1675 1676 static int stm32_adc_buffer_postenable(struct iio_dev *indio_dev) 1677 { 1678 struct stm32_adc *adc = iio_priv(indio_dev); 1679 struct device *dev = indio_dev->dev.parent; 1680 int ret; 1681 1682 ret = pm_runtime_resume_and_get(dev); 1683 if (ret < 0) 1684 return ret; 1685 1686 ret = stm32_adc_set_trig(indio_dev, indio_dev->trig); 1687 if (ret) { 1688 dev_err(&indio_dev->dev, "Can't set trigger\n"); 1689 goto err_pm_put; 1690 } 1691 1692 ret = stm32_adc_dma_start(indio_dev); 1693 if (ret) { 1694 dev_err(&indio_dev->dev, "Can't start dma\n"); 1695 goto err_clr_trig; 1696 } 1697 1698 /* Reset adc buffer index */ 1699 adc->bufi = 0; 1700 1701 stm32_adc_ovr_irq_enable(adc); 1702 1703 if (!adc->dma_chan) 1704 stm32_adc_conv_irq_enable(adc); 1705 1706 adc->cfg->start_conv(indio_dev, !!adc->dma_chan); 1707 1708 return 0; 1709 1710 err_clr_trig: 1711 stm32_adc_set_trig(indio_dev, NULL); 1712 err_pm_put: 1713 pm_runtime_mark_last_busy(dev); 1714 pm_runtime_put_autosuspend(dev); 1715 1716 return ret; 1717 } 1718 1719 static int stm32_adc_buffer_predisable(struct iio_dev *indio_dev) 1720 { 1721 struct stm32_adc *adc = iio_priv(indio_dev); 1722 struct device *dev = indio_dev->dev.parent; 1723 1724 adc->cfg->stop_conv(indio_dev); 1725 if (!adc->dma_chan) 1726 stm32_adc_conv_irq_disable(adc); 1727 1728 stm32_adc_ovr_irq_disable(adc); 1729 1730 if (adc->dma_chan) 1731 dmaengine_terminate_sync(adc->dma_chan); 1732 1733 if (stm32_adc_set_trig(indio_dev, NULL)) 1734 dev_err(&indio_dev->dev, "Can't clear trigger\n"); 1735 1736 pm_runtime_mark_last_busy(dev); 1737 pm_runtime_put_autosuspend(dev); 1738 1739 return 0; 1740 } 1741 1742 static const struct iio_buffer_setup_ops stm32_adc_buffer_setup_ops = { 1743 .postenable = &stm32_adc_buffer_postenable, 1744 .predisable = &stm32_adc_buffer_predisable, 1745 }; 1746 1747 static irqreturn_t stm32_adc_trigger_handler(int irq, void *p) 1748 { 1749 struct iio_poll_func *pf = p; 1750 struct iio_dev *indio_dev = pf->indio_dev; 1751 struct stm32_adc *adc = iio_priv(indio_dev); 1752 1753 dev_dbg(&indio_dev->dev, "%s bufi=%d\n", __func__, adc->bufi); 1754 1755 /* reset buffer index */ 1756 adc->bufi = 0; 1757 iio_push_to_buffers_with_timestamp(indio_dev, adc->buffer, 1758 pf->timestamp); 1759 iio_trigger_notify_done(indio_dev->trig); 1760 1761 /* re-enable eoc irq */ 1762 stm32_adc_conv_irq_enable(adc); 1763 1764 return IRQ_HANDLED; 1765 } 1766 1767 static const struct iio_chan_spec_ext_info stm32_adc_ext_info[] = { 1768 IIO_ENUM("trigger_polarity", IIO_SHARED_BY_ALL, &stm32_adc_trig_pol), 1769 { 1770 .name = "trigger_polarity_available", 1771 .shared = IIO_SHARED_BY_ALL, 1772 .read = iio_enum_available_read, 1773 .private = (uintptr_t)&stm32_adc_trig_pol, 1774 }, 1775 {}, 1776 }; 1777 1778 static int stm32_adc_of_get_resolution(struct iio_dev *indio_dev) 1779 { 1780 struct device_node *node = indio_dev->dev.of_node; 1781 struct stm32_adc *adc = iio_priv(indio_dev); 1782 unsigned int i; 1783 u32 res; 1784 1785 if (of_property_read_u32(node, "assigned-resolution-bits", &res)) 1786 res = adc->cfg->adc_info->resolutions[0]; 1787 1788 for (i = 0; i < adc->cfg->adc_info->num_res; i++) 1789 if (res == adc->cfg->adc_info->resolutions[i]) 1790 break; 1791 if (i >= adc->cfg->adc_info->num_res) { 1792 dev_err(&indio_dev->dev, "Bad resolution: %u bits\n", res); 1793 return -EINVAL; 1794 } 1795 1796 dev_dbg(&indio_dev->dev, "Using %u bits resolution\n", res); 1797 adc->res = i; 1798 1799 return 0; 1800 } 1801 1802 static void stm32_adc_smpr_init(struct stm32_adc *adc, int channel, u32 smp_ns) 1803 { 1804 const struct stm32_adc_regs *smpr = &adc->cfg->regs->smp_bits[channel]; 1805 u32 period_ns, shift = smpr->shift, mask = smpr->mask; 1806 unsigned int smp, r = smpr->reg; 1807 1808 /* 1809 * For vrefint channel, ensure that the sampling time cannot 1810 * be lower than the one specified in the datasheet 1811 */ 1812 if (channel == adc->int_ch[STM32_ADC_INT_CH_VREFINT]) 1813 smp_ns = max(smp_ns, adc->cfg->ts_vrefint_ns); 1814 1815 /* Determine sampling time (ADC clock cycles) */ 1816 period_ns = NSEC_PER_SEC / adc->common->rate; 1817 for (smp = 0; smp <= STM32_ADC_MAX_SMP; smp++) 1818 if ((period_ns * adc->cfg->smp_cycles[smp]) >= smp_ns) 1819 break; 1820 if (smp > STM32_ADC_MAX_SMP) 1821 smp = STM32_ADC_MAX_SMP; 1822 1823 /* pre-build sampling time registers (e.g. smpr1, smpr2) */ 1824 adc->smpr_val[r] = (adc->smpr_val[r] & ~mask) | (smp << shift); 1825 } 1826 1827 static void stm32_adc_chan_init_one(struct iio_dev *indio_dev, 1828 struct iio_chan_spec *chan, u32 vinp, 1829 u32 vinn, int scan_index, bool differential) 1830 { 1831 struct stm32_adc *adc = iio_priv(indio_dev); 1832 char *name = adc->chan_name[vinp]; 1833 1834 chan->type = IIO_VOLTAGE; 1835 chan->channel = vinp; 1836 if (differential) { 1837 chan->differential = 1; 1838 chan->channel2 = vinn; 1839 snprintf(name, STM32_ADC_CH_SZ, "in%d-in%d", vinp, vinn); 1840 } else { 1841 snprintf(name, STM32_ADC_CH_SZ, "in%d", vinp); 1842 } 1843 chan->datasheet_name = name; 1844 chan->scan_index = scan_index; 1845 chan->indexed = 1; 1846 if (chan->channel == adc->int_ch[STM32_ADC_INT_CH_VREFINT]) 1847 chan->info_mask_separate = BIT(IIO_CHAN_INFO_PROCESSED); 1848 else 1849 chan->info_mask_separate = BIT(IIO_CHAN_INFO_RAW); 1850 chan->info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE) | 1851 BIT(IIO_CHAN_INFO_OFFSET); 1852 chan->scan_type.sign = 'u'; 1853 chan->scan_type.realbits = adc->cfg->adc_info->resolutions[adc->res]; 1854 chan->scan_type.storagebits = 16; 1855 chan->ext_info = stm32_adc_ext_info; 1856 1857 /* pre-build selected channels mask */ 1858 adc->pcsel |= BIT(chan->channel); 1859 if (differential) { 1860 /* pre-build diff channels mask */ 1861 adc->difsel |= BIT(chan->channel); 1862 /* Also add negative input to pre-selected channels */ 1863 adc->pcsel |= BIT(chan->channel2); 1864 } 1865 } 1866 1867 static int stm32_adc_get_legacy_chan_count(struct iio_dev *indio_dev, struct stm32_adc *adc) 1868 { 1869 struct device_node *node = indio_dev->dev.of_node; 1870 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 1871 int num_channels = 0, ret; 1872 1873 ret = of_property_count_u32_elems(node, "st,adc-channels"); 1874 if (ret > adc_info->max_channels) { 1875 dev_err(&indio_dev->dev, "Bad st,adc-channels?\n"); 1876 return -EINVAL; 1877 } else if (ret > 0) { 1878 num_channels += ret; 1879 } 1880 1881 ret = of_property_count_elems_of_size(node, "st,adc-diff-channels", 1882 sizeof(struct stm32_adc_diff_channel)); 1883 if (ret > adc_info->max_channels) { 1884 dev_err(&indio_dev->dev, "Bad st,adc-diff-channels?\n"); 1885 return -EINVAL; 1886 } else if (ret > 0) { 1887 adc->num_diff = ret; 1888 num_channels += ret; 1889 } 1890 1891 /* Optional sample time is provided either for each, or all channels */ 1892 ret = of_property_count_u32_elems(node, "st,min-sample-time-nsecs"); 1893 if (ret > 1 && ret != num_channels) { 1894 dev_err(&indio_dev->dev, "Invalid st,min-sample-time-nsecs\n"); 1895 return -EINVAL; 1896 } 1897 1898 return num_channels; 1899 } 1900 1901 static int stm32_adc_legacy_chan_init(struct iio_dev *indio_dev, 1902 struct stm32_adc *adc, 1903 struct iio_chan_spec *channels) 1904 { 1905 struct device_node *node = indio_dev->dev.of_node; 1906 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 1907 struct stm32_adc_diff_channel diff[STM32_ADC_CH_MAX]; 1908 u32 num_diff = adc->num_diff; 1909 int size = num_diff * sizeof(*diff) / sizeof(u32); 1910 int scan_index = 0, val, ret, i; 1911 struct property *prop; 1912 const __be32 *cur; 1913 u32 smp = 0; 1914 1915 if (num_diff) { 1916 ret = of_property_read_u32_array(node, "st,adc-diff-channels", 1917 (u32 *)diff, size); 1918 if (ret) { 1919 dev_err(&indio_dev->dev, "Failed to get diff channels %d\n", ret); 1920 return ret; 1921 } 1922 1923 for (i = 0; i < num_diff; i++) { 1924 if (diff[i].vinp >= adc_info->max_channels || 1925 diff[i].vinn >= adc_info->max_channels) { 1926 dev_err(&indio_dev->dev, "Invalid channel in%d-in%d\n", 1927 diff[i].vinp, diff[i].vinn); 1928 return -EINVAL; 1929 } 1930 1931 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], 1932 diff[i].vinp, diff[i].vinn, 1933 scan_index, true); 1934 scan_index++; 1935 } 1936 } 1937 1938 of_property_for_each_u32(node, "st,adc-channels", prop, cur, val) { 1939 if (val >= adc_info->max_channels) { 1940 dev_err(&indio_dev->dev, "Invalid channel %d\n", val); 1941 return -EINVAL; 1942 } 1943 1944 /* Channel can't be configured both as single-ended & diff */ 1945 for (i = 0; i < num_diff; i++) { 1946 if (val == diff[i].vinp) { 1947 dev_err(&indio_dev->dev, "channel %d misconfigured\n", val); 1948 return -EINVAL; 1949 } 1950 } 1951 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], val, 1952 0, scan_index, false); 1953 scan_index++; 1954 } 1955 1956 for (i = 0; i < scan_index; i++) { 1957 /* 1958 * Using of_property_read_u32_index(), smp value will only be 1959 * modified if valid u32 value can be decoded. This allows to 1960 * get either no value, 1 shared value for all indexes, or one 1961 * value per channel. 1962 */ 1963 of_property_read_u32_index(node, "st,min-sample-time-nsecs", i, &smp); 1964 1965 /* Prepare sampling time settings */ 1966 stm32_adc_smpr_init(adc, channels[i].channel, smp); 1967 } 1968 1969 return scan_index; 1970 } 1971 1972 static int stm32_adc_populate_int_ch(struct iio_dev *indio_dev, const char *ch_name, 1973 int chan) 1974 { 1975 struct stm32_adc *adc = iio_priv(indio_dev); 1976 u16 vrefint; 1977 int i, ret; 1978 1979 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 1980 if (!strncmp(stm32_adc_ic[i].name, ch_name, STM32_ADC_CH_SZ)) { 1981 adc->int_ch[i] = chan; 1982 1983 if (stm32_adc_ic[i].idx != STM32_ADC_INT_CH_VREFINT) 1984 continue; 1985 1986 /* Get calibration data for vrefint channel */ 1987 ret = nvmem_cell_read_u16(&indio_dev->dev, "vrefint", &vrefint); 1988 if (ret && ret != -ENOENT) { 1989 return dev_err_probe(&indio_dev->dev, ret, 1990 "nvmem access error\n"); 1991 } 1992 if (ret == -ENOENT) 1993 dev_dbg(&indio_dev->dev, "vrefint calibration not found\n"); 1994 else 1995 adc->vrefint.vrefint_cal = vrefint; 1996 } 1997 } 1998 1999 return 0; 2000 } 2001 2002 static int stm32_adc_generic_chan_init(struct iio_dev *indio_dev, 2003 struct stm32_adc *adc, 2004 struct iio_chan_spec *channels) 2005 { 2006 struct device_node *node = indio_dev->dev.of_node; 2007 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 2008 struct device_node *child; 2009 const char *name; 2010 int val, scan_index = 0, ret; 2011 bool differential; 2012 u32 vin[2]; 2013 2014 for_each_available_child_of_node(node, child) { 2015 ret = of_property_read_u32(child, "reg", &val); 2016 if (ret) { 2017 dev_err(&indio_dev->dev, "Missing channel index %d\n", ret); 2018 goto err; 2019 } 2020 2021 ret = of_property_read_string(child, "label", &name); 2022 /* label is optional */ 2023 if (!ret) { 2024 if (strlen(name) >= STM32_ADC_CH_SZ) { 2025 dev_err(&indio_dev->dev, "Label %s exceeds %d characters\n", 2026 name, STM32_ADC_CH_SZ); 2027 return -EINVAL; 2028 } 2029 strncpy(adc->chan_name[val], name, STM32_ADC_CH_SZ); 2030 ret = stm32_adc_populate_int_ch(indio_dev, name, val); 2031 if (ret) 2032 goto err; 2033 } else if (ret != -EINVAL) { 2034 dev_err(&indio_dev->dev, "Invalid label %d\n", ret); 2035 goto err; 2036 } 2037 2038 if (val >= adc_info->max_channels) { 2039 dev_err(&indio_dev->dev, "Invalid channel %d\n", val); 2040 ret = -EINVAL; 2041 goto err; 2042 } 2043 2044 differential = false; 2045 ret = of_property_read_u32_array(child, "diff-channels", vin, 2); 2046 /* diff-channels is optional */ 2047 if (!ret) { 2048 differential = true; 2049 if (vin[0] != val || vin[1] >= adc_info->max_channels) { 2050 dev_err(&indio_dev->dev, "Invalid channel in%d-in%d\n", 2051 vin[0], vin[1]); 2052 goto err; 2053 } 2054 } else if (ret != -EINVAL) { 2055 dev_err(&indio_dev->dev, "Invalid diff-channels property %d\n", ret); 2056 goto err; 2057 } 2058 2059 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], val, 2060 vin[1], scan_index, differential); 2061 2062 ret = of_property_read_u32(child, "st,min-sample-time-ns", &val); 2063 /* st,min-sample-time-ns is optional */ 2064 if (!ret) { 2065 stm32_adc_smpr_init(adc, channels[scan_index].channel, val); 2066 if (differential) 2067 stm32_adc_smpr_init(adc, vin[1], val); 2068 } else if (ret != -EINVAL) { 2069 dev_err(&indio_dev->dev, "Invalid st,min-sample-time-ns property %d\n", 2070 ret); 2071 goto err; 2072 } 2073 2074 scan_index++; 2075 } 2076 2077 return scan_index; 2078 2079 err: 2080 of_node_put(child); 2081 2082 return ret; 2083 } 2084 2085 static int stm32_adc_chan_of_init(struct iio_dev *indio_dev, bool timestamping) 2086 { 2087 struct device_node *node = indio_dev->dev.of_node; 2088 struct stm32_adc *adc = iio_priv(indio_dev); 2089 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 2090 struct iio_chan_spec *channels; 2091 int scan_index = 0, num_channels = 0, ret, i; 2092 bool legacy = false; 2093 2094 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) 2095 adc->int_ch[i] = STM32_ADC_INT_CH_NONE; 2096 2097 num_channels = of_get_available_child_count(node); 2098 /* If no channels have been found, fallback to channels legacy properties. */ 2099 if (!num_channels) { 2100 legacy = true; 2101 2102 ret = stm32_adc_get_legacy_chan_count(indio_dev, adc); 2103 if (!ret) { 2104 dev_err(indio_dev->dev.parent, "No channel found\n"); 2105 return -ENODATA; 2106 } else if (ret < 0) { 2107 return ret; 2108 } 2109 2110 num_channels = ret; 2111 } 2112 2113 if (num_channels > adc_info->max_channels) { 2114 dev_err(&indio_dev->dev, "Channel number [%d] exceeds %d\n", 2115 num_channels, adc_info->max_channels); 2116 return -EINVAL; 2117 } 2118 2119 if (timestamping) 2120 num_channels++; 2121 2122 channels = devm_kcalloc(&indio_dev->dev, num_channels, 2123 sizeof(struct iio_chan_spec), GFP_KERNEL); 2124 if (!channels) 2125 return -ENOMEM; 2126 2127 if (legacy) 2128 ret = stm32_adc_legacy_chan_init(indio_dev, adc, channels); 2129 else 2130 ret = stm32_adc_generic_chan_init(indio_dev, adc, channels); 2131 if (ret < 0) 2132 return ret; 2133 scan_index = ret; 2134 2135 if (timestamping) { 2136 struct iio_chan_spec *timestamp = &channels[scan_index]; 2137 2138 timestamp->type = IIO_TIMESTAMP; 2139 timestamp->channel = -1; 2140 timestamp->scan_index = scan_index; 2141 timestamp->scan_type.sign = 's'; 2142 timestamp->scan_type.realbits = 64; 2143 timestamp->scan_type.storagebits = 64; 2144 2145 scan_index++; 2146 } 2147 2148 indio_dev->num_channels = scan_index; 2149 indio_dev->channels = channels; 2150 2151 return 0; 2152 } 2153 2154 static int stm32_adc_dma_request(struct device *dev, struct iio_dev *indio_dev) 2155 { 2156 struct stm32_adc *adc = iio_priv(indio_dev); 2157 struct dma_slave_config config; 2158 int ret; 2159 2160 adc->dma_chan = dma_request_chan(dev, "rx"); 2161 if (IS_ERR(adc->dma_chan)) { 2162 ret = PTR_ERR(adc->dma_chan); 2163 if (ret != -ENODEV) 2164 return dev_err_probe(dev, ret, 2165 "DMA channel request failed with\n"); 2166 2167 /* DMA is optional: fall back to IRQ mode */ 2168 adc->dma_chan = NULL; 2169 return 0; 2170 } 2171 2172 adc->rx_buf = dma_alloc_coherent(adc->dma_chan->device->dev, 2173 STM32_DMA_BUFFER_SIZE, 2174 &adc->rx_dma_buf, GFP_KERNEL); 2175 if (!adc->rx_buf) { 2176 ret = -ENOMEM; 2177 goto err_release; 2178 } 2179 2180 /* Configure DMA channel to read data register */ 2181 memset(&config, 0, sizeof(config)); 2182 config.src_addr = (dma_addr_t)adc->common->phys_base; 2183 config.src_addr += adc->offset + adc->cfg->regs->dr; 2184 config.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 2185 2186 ret = dmaengine_slave_config(adc->dma_chan, &config); 2187 if (ret) 2188 goto err_free; 2189 2190 return 0; 2191 2192 err_free: 2193 dma_free_coherent(adc->dma_chan->device->dev, STM32_DMA_BUFFER_SIZE, 2194 adc->rx_buf, adc->rx_dma_buf); 2195 err_release: 2196 dma_release_channel(adc->dma_chan); 2197 2198 return ret; 2199 } 2200 2201 static int stm32_adc_probe(struct platform_device *pdev) 2202 { 2203 struct iio_dev *indio_dev; 2204 struct device *dev = &pdev->dev; 2205 irqreturn_t (*handler)(int irq, void *p) = NULL; 2206 struct stm32_adc *adc; 2207 bool timestamping = false; 2208 int ret; 2209 2210 if (!pdev->dev.of_node) 2211 return -ENODEV; 2212 2213 indio_dev = devm_iio_device_alloc(&pdev->dev, sizeof(*adc)); 2214 if (!indio_dev) 2215 return -ENOMEM; 2216 2217 adc = iio_priv(indio_dev); 2218 adc->common = dev_get_drvdata(pdev->dev.parent); 2219 spin_lock_init(&adc->lock); 2220 init_completion(&adc->completion); 2221 adc->cfg = (const struct stm32_adc_cfg *) 2222 of_match_device(dev->driver->of_match_table, dev)->data; 2223 2224 indio_dev->name = dev_name(&pdev->dev); 2225 indio_dev->dev.of_node = pdev->dev.of_node; 2226 indio_dev->info = &stm32_adc_iio_info; 2227 indio_dev->modes = INDIO_DIRECT_MODE | INDIO_HARDWARE_TRIGGERED; 2228 2229 platform_set_drvdata(pdev, indio_dev); 2230 2231 ret = of_property_read_u32(pdev->dev.of_node, "reg", &adc->offset); 2232 if (ret != 0) { 2233 dev_err(&pdev->dev, "missing reg property\n"); 2234 return -EINVAL; 2235 } 2236 2237 adc->irq = platform_get_irq(pdev, 0); 2238 if (adc->irq < 0) 2239 return adc->irq; 2240 2241 ret = devm_request_threaded_irq(&pdev->dev, adc->irq, stm32_adc_isr, 2242 stm32_adc_threaded_isr, 2243 0, pdev->name, indio_dev); 2244 if (ret) { 2245 dev_err(&pdev->dev, "failed to request IRQ\n"); 2246 return ret; 2247 } 2248 2249 adc->clk = devm_clk_get(&pdev->dev, NULL); 2250 if (IS_ERR(adc->clk)) { 2251 ret = PTR_ERR(adc->clk); 2252 if (ret == -ENOENT && !adc->cfg->clk_required) { 2253 adc->clk = NULL; 2254 } else { 2255 dev_err(&pdev->dev, "Can't get clock\n"); 2256 return ret; 2257 } 2258 } 2259 2260 ret = stm32_adc_of_get_resolution(indio_dev); 2261 if (ret < 0) 2262 return ret; 2263 2264 ret = stm32_adc_dma_request(dev, indio_dev); 2265 if (ret < 0) 2266 return ret; 2267 2268 if (!adc->dma_chan) { 2269 /* For PIO mode only, iio_pollfunc_store_time stores a timestamp 2270 * in the primary trigger IRQ handler and stm32_adc_trigger_handler 2271 * runs in the IRQ thread to push out buffer along with timestamp. 2272 */ 2273 handler = &stm32_adc_trigger_handler; 2274 timestamping = true; 2275 } 2276 2277 ret = stm32_adc_chan_of_init(indio_dev, timestamping); 2278 if (ret < 0) 2279 goto err_dma_disable; 2280 2281 ret = iio_triggered_buffer_setup(indio_dev, 2282 &iio_pollfunc_store_time, handler, 2283 &stm32_adc_buffer_setup_ops); 2284 if (ret) { 2285 dev_err(&pdev->dev, "buffer setup failed\n"); 2286 goto err_dma_disable; 2287 } 2288 2289 /* Get stm32-adc-core PM online */ 2290 pm_runtime_get_noresume(dev); 2291 pm_runtime_set_active(dev); 2292 pm_runtime_set_autosuspend_delay(dev, STM32_ADC_HW_STOP_DELAY_MS); 2293 pm_runtime_use_autosuspend(dev); 2294 pm_runtime_enable(dev); 2295 2296 ret = stm32_adc_hw_start(dev); 2297 if (ret) 2298 goto err_buffer_cleanup; 2299 2300 ret = iio_device_register(indio_dev); 2301 if (ret) { 2302 dev_err(&pdev->dev, "iio dev register failed\n"); 2303 goto err_hw_stop; 2304 } 2305 2306 pm_runtime_mark_last_busy(dev); 2307 pm_runtime_put_autosuspend(dev); 2308 2309 return 0; 2310 2311 err_hw_stop: 2312 stm32_adc_hw_stop(dev); 2313 2314 err_buffer_cleanup: 2315 pm_runtime_disable(dev); 2316 pm_runtime_set_suspended(dev); 2317 pm_runtime_put_noidle(dev); 2318 iio_triggered_buffer_cleanup(indio_dev); 2319 2320 err_dma_disable: 2321 if (adc->dma_chan) { 2322 dma_free_coherent(adc->dma_chan->device->dev, 2323 STM32_DMA_BUFFER_SIZE, 2324 adc->rx_buf, adc->rx_dma_buf); 2325 dma_release_channel(adc->dma_chan); 2326 } 2327 2328 return ret; 2329 } 2330 2331 static int stm32_adc_remove(struct platform_device *pdev) 2332 { 2333 struct iio_dev *indio_dev = platform_get_drvdata(pdev); 2334 struct stm32_adc *adc = iio_priv(indio_dev); 2335 2336 pm_runtime_get_sync(&pdev->dev); 2337 iio_device_unregister(indio_dev); 2338 stm32_adc_hw_stop(&pdev->dev); 2339 pm_runtime_disable(&pdev->dev); 2340 pm_runtime_set_suspended(&pdev->dev); 2341 pm_runtime_put_noidle(&pdev->dev); 2342 iio_triggered_buffer_cleanup(indio_dev); 2343 if (adc->dma_chan) { 2344 dma_free_coherent(adc->dma_chan->device->dev, 2345 STM32_DMA_BUFFER_SIZE, 2346 adc->rx_buf, adc->rx_dma_buf); 2347 dma_release_channel(adc->dma_chan); 2348 } 2349 2350 return 0; 2351 } 2352 2353 #if defined(CONFIG_PM_SLEEP) 2354 static int stm32_adc_suspend(struct device *dev) 2355 { 2356 struct iio_dev *indio_dev = dev_get_drvdata(dev); 2357 2358 if (iio_buffer_enabled(indio_dev)) 2359 stm32_adc_buffer_predisable(indio_dev); 2360 2361 return pm_runtime_force_suspend(dev); 2362 } 2363 2364 static int stm32_adc_resume(struct device *dev) 2365 { 2366 struct iio_dev *indio_dev = dev_get_drvdata(dev); 2367 int ret; 2368 2369 ret = pm_runtime_force_resume(dev); 2370 if (ret < 0) 2371 return ret; 2372 2373 if (!iio_buffer_enabled(indio_dev)) 2374 return 0; 2375 2376 ret = stm32_adc_update_scan_mode(indio_dev, 2377 indio_dev->active_scan_mask); 2378 if (ret < 0) 2379 return ret; 2380 2381 return stm32_adc_buffer_postenable(indio_dev); 2382 } 2383 #endif 2384 2385 #if defined(CONFIG_PM) 2386 static int stm32_adc_runtime_suspend(struct device *dev) 2387 { 2388 return stm32_adc_hw_stop(dev); 2389 } 2390 2391 static int stm32_adc_runtime_resume(struct device *dev) 2392 { 2393 return stm32_adc_hw_start(dev); 2394 } 2395 #endif 2396 2397 static const struct dev_pm_ops stm32_adc_pm_ops = { 2398 SET_SYSTEM_SLEEP_PM_OPS(stm32_adc_suspend, stm32_adc_resume) 2399 SET_RUNTIME_PM_OPS(stm32_adc_runtime_suspend, stm32_adc_runtime_resume, 2400 NULL) 2401 }; 2402 2403 static const struct stm32_adc_cfg stm32f4_adc_cfg = { 2404 .regs = &stm32f4_adc_regspec, 2405 .adc_info = &stm32f4_adc_info, 2406 .trigs = stm32f4_adc_trigs, 2407 .clk_required = true, 2408 .start_conv = stm32f4_adc_start_conv, 2409 .stop_conv = stm32f4_adc_stop_conv, 2410 .smp_cycles = stm32f4_adc_smp_cycles, 2411 .irq_clear = stm32f4_adc_irq_clear, 2412 }; 2413 2414 static const struct stm32_adc_cfg stm32h7_adc_cfg = { 2415 .regs = &stm32h7_adc_regspec, 2416 .adc_info = &stm32h7_adc_info, 2417 .trigs = stm32h7_adc_trigs, 2418 .start_conv = stm32h7_adc_start_conv, 2419 .stop_conv = stm32h7_adc_stop_conv, 2420 .prepare = stm32h7_adc_prepare, 2421 .unprepare = stm32h7_adc_unprepare, 2422 .smp_cycles = stm32h7_adc_smp_cycles, 2423 .irq_clear = stm32h7_adc_irq_clear, 2424 }; 2425 2426 static const struct stm32_adc_cfg stm32mp1_adc_cfg = { 2427 .regs = &stm32mp1_adc_regspec, 2428 .adc_info = &stm32h7_adc_info, 2429 .trigs = stm32h7_adc_trigs, 2430 .has_vregready = true, 2431 .start_conv = stm32h7_adc_start_conv, 2432 .stop_conv = stm32h7_adc_stop_conv, 2433 .prepare = stm32h7_adc_prepare, 2434 .unprepare = stm32h7_adc_unprepare, 2435 .smp_cycles = stm32h7_adc_smp_cycles, 2436 .irq_clear = stm32h7_adc_irq_clear, 2437 .ts_vrefint_ns = 4300, 2438 }; 2439 2440 static const struct of_device_id stm32_adc_of_match[] = { 2441 { .compatible = "st,stm32f4-adc", .data = (void *)&stm32f4_adc_cfg }, 2442 { .compatible = "st,stm32h7-adc", .data = (void *)&stm32h7_adc_cfg }, 2443 { .compatible = "st,stm32mp1-adc", .data = (void *)&stm32mp1_adc_cfg }, 2444 {}, 2445 }; 2446 MODULE_DEVICE_TABLE(of, stm32_adc_of_match); 2447 2448 static struct platform_driver stm32_adc_driver = { 2449 .probe = stm32_adc_probe, 2450 .remove = stm32_adc_remove, 2451 .driver = { 2452 .name = "stm32-adc", 2453 .of_match_table = stm32_adc_of_match, 2454 .pm = &stm32_adc_pm_ops, 2455 }, 2456 }; 2457 module_platform_driver(stm32_adc_driver); 2458 2459 MODULE_AUTHOR("Fabrice Gasnier <fabrice.gasnier@st.com>"); 2460 MODULE_DESCRIPTION("STMicroelectronics STM32 ADC IIO driver"); 2461 MODULE_LICENSE("GPL v2"); 2462 MODULE_ALIAS("platform:stm32-adc"); 2463