1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * This file is part of STM32 ADC driver 4 * 5 * Copyright (C) 2016, STMicroelectronics - All Rights Reserved 6 * Author: Fabrice Gasnier <fabrice.gasnier@st.com>. 7 */ 8 9 #include <linux/clk.h> 10 #include <linux/debugfs.h> 11 #include <linux/delay.h> 12 #include <linux/dma-mapping.h> 13 #include <linux/dmaengine.h> 14 #include <linux/iio/iio.h> 15 #include <linux/iio/buffer.h> 16 #include <linux/iio/timer/stm32-lptim-trigger.h> 17 #include <linux/iio/timer/stm32-timer-trigger.h> 18 #include <linux/iio/trigger.h> 19 #include <linux/iio/trigger_consumer.h> 20 #include <linux/iio/triggered_buffer.h> 21 #include <linux/interrupt.h> 22 #include <linux/io.h> 23 #include <linux/iopoll.h> 24 #include <linux/module.h> 25 #include <linux/mod_devicetable.h> 26 #include <linux/nvmem-consumer.h> 27 #include <linux/platform_device.h> 28 #include <linux/pm_runtime.h> 29 #include <linux/property.h> 30 31 #include "stm32-adc-core.h" 32 33 /* Number of linear calibration shadow registers / LINCALRDYW control bits */ 34 #define STM32H7_LINCALFACT_NUM 6 35 36 /* BOOST bit must be set on STM32H7 when ADC clock is above 20MHz */ 37 #define STM32H7_BOOST_CLKRATE 20000000UL 38 39 #define STM32_ADC_CH_MAX 20 /* max number of channels */ 40 #define STM32_ADC_CH_SZ 16 /* max channel name size */ 41 #define STM32_ADC_MAX_SQ 16 /* SQ1..SQ16 */ 42 #define STM32_ADC_MAX_SMP 7 /* SMPx range is [0..7] */ 43 #define STM32_ADC_TIMEOUT_US 100000 44 #define STM32_ADC_TIMEOUT (msecs_to_jiffies(STM32_ADC_TIMEOUT_US / 1000)) 45 #define STM32_ADC_HW_STOP_DELAY_MS 100 46 #define STM32_ADC_VREFINT_VOLTAGE 3300 47 48 #define STM32_DMA_BUFFER_SIZE PAGE_SIZE 49 50 /* External trigger enable */ 51 enum stm32_adc_exten { 52 STM32_EXTEN_SWTRIG, 53 STM32_EXTEN_HWTRIG_RISING_EDGE, 54 STM32_EXTEN_HWTRIG_FALLING_EDGE, 55 STM32_EXTEN_HWTRIG_BOTH_EDGES, 56 }; 57 58 /* extsel - trigger mux selection value */ 59 enum stm32_adc_extsel { 60 STM32_EXT0, 61 STM32_EXT1, 62 STM32_EXT2, 63 STM32_EXT3, 64 STM32_EXT4, 65 STM32_EXT5, 66 STM32_EXT6, 67 STM32_EXT7, 68 STM32_EXT8, 69 STM32_EXT9, 70 STM32_EXT10, 71 STM32_EXT11, 72 STM32_EXT12, 73 STM32_EXT13, 74 STM32_EXT14, 75 STM32_EXT15, 76 STM32_EXT16, 77 STM32_EXT17, 78 STM32_EXT18, 79 STM32_EXT19, 80 STM32_EXT20, 81 }; 82 83 enum stm32_adc_int_ch { 84 STM32_ADC_INT_CH_NONE = -1, 85 STM32_ADC_INT_CH_VDDCORE, 86 STM32_ADC_INT_CH_VDDCPU, 87 STM32_ADC_INT_CH_VDDQ_DDR, 88 STM32_ADC_INT_CH_VREFINT, 89 STM32_ADC_INT_CH_VBAT, 90 STM32_ADC_INT_CH_NB, 91 }; 92 93 /** 94 * struct stm32_adc_ic - ADC internal channels 95 * @name: name of the internal channel 96 * @idx: internal channel enum index 97 */ 98 struct stm32_adc_ic { 99 const char *name; 100 u32 idx; 101 }; 102 103 static const struct stm32_adc_ic stm32_adc_ic[STM32_ADC_INT_CH_NB] = { 104 { "vddcore", STM32_ADC_INT_CH_VDDCORE }, 105 { "vddcpu", STM32_ADC_INT_CH_VDDCPU }, 106 { "vddq_ddr", STM32_ADC_INT_CH_VDDQ_DDR }, 107 { "vrefint", STM32_ADC_INT_CH_VREFINT }, 108 { "vbat", STM32_ADC_INT_CH_VBAT }, 109 }; 110 111 /** 112 * struct stm32_adc_trig_info - ADC trigger info 113 * @name: name of the trigger, corresponding to its source 114 * @extsel: trigger selection 115 */ 116 struct stm32_adc_trig_info { 117 const char *name; 118 enum stm32_adc_extsel extsel; 119 }; 120 121 /** 122 * struct stm32_adc_calib - optional adc calibration data 123 * @lincalfact: Linearity calibration factor 124 * @lincal_saved: Indicates that linear calibration factors are saved 125 */ 126 struct stm32_adc_calib { 127 u32 lincalfact[STM32H7_LINCALFACT_NUM]; 128 bool lincal_saved; 129 }; 130 131 /** 132 * struct stm32_adc_regs - stm32 ADC misc registers & bitfield desc 133 * @reg: register offset 134 * @mask: bitfield mask 135 * @shift: left shift 136 */ 137 struct stm32_adc_regs { 138 int reg; 139 int mask; 140 int shift; 141 }; 142 143 /** 144 * struct stm32_adc_vrefint - stm32 ADC internal reference voltage data 145 * @vrefint_cal: vrefint calibration value from nvmem 146 * @vrefint_data: vrefint actual value 147 */ 148 struct stm32_adc_vrefint { 149 u32 vrefint_cal; 150 u32 vrefint_data; 151 }; 152 153 /** 154 * struct stm32_adc_regspec - stm32 registers definition 155 * @dr: data register offset 156 * @ier_eoc: interrupt enable register & eocie bitfield 157 * @ier_ovr: interrupt enable register & overrun bitfield 158 * @isr_eoc: interrupt status register & eoc bitfield 159 * @isr_ovr: interrupt status register & overrun bitfield 160 * @sqr: reference to sequence registers array 161 * @exten: trigger control register & bitfield 162 * @extsel: trigger selection register & bitfield 163 * @res: resolution selection register & bitfield 164 * @difsel: differential mode selection register & bitfield 165 * @smpr: smpr1 & smpr2 registers offset array 166 * @smp_bits: smpr1 & smpr2 index and bitfields 167 * @or_vddcore: option register & vddcore bitfield 168 * @or_vddcpu: option register & vddcpu bitfield 169 * @or_vddq_ddr: option register & vddq_ddr bitfield 170 * @ccr_vbat: common register & vbat bitfield 171 * @ccr_vref: common register & vrefint bitfield 172 */ 173 struct stm32_adc_regspec { 174 const u32 dr; 175 const struct stm32_adc_regs ier_eoc; 176 const struct stm32_adc_regs ier_ovr; 177 const struct stm32_adc_regs isr_eoc; 178 const struct stm32_adc_regs isr_ovr; 179 const struct stm32_adc_regs *sqr; 180 const struct stm32_adc_regs exten; 181 const struct stm32_adc_regs extsel; 182 const struct stm32_adc_regs res; 183 const struct stm32_adc_regs difsel; 184 const u32 smpr[2]; 185 const struct stm32_adc_regs *smp_bits; 186 const struct stm32_adc_regs or_vddcore; 187 const struct stm32_adc_regs or_vddcpu; 188 const struct stm32_adc_regs or_vddq_ddr; 189 const struct stm32_adc_regs ccr_vbat; 190 const struct stm32_adc_regs ccr_vref; 191 }; 192 193 struct stm32_adc; 194 195 /** 196 * struct stm32_adc_cfg - stm32 compatible configuration data 197 * @regs: registers descriptions 198 * @adc_info: per instance input channels definitions 199 * @trigs: external trigger sources 200 * @clk_required: clock is required 201 * @has_vregready: vregready status flag presence 202 * @has_boostmode: boost mode support flag 203 * @has_linearcal: linear calibration support flag 204 * @has_presel: channel preselection support flag 205 * @prepare: optional prepare routine (power-up, enable) 206 * @start_conv: routine to start conversions 207 * @stop_conv: routine to stop conversions 208 * @unprepare: optional unprepare routine (disable, power-down) 209 * @irq_clear: routine to clear irqs 210 * @smp_cycles: programmable sampling time (ADC clock cycles) 211 * @ts_int_ch: pointer to array of internal channels minimum sampling time in ns 212 */ 213 struct stm32_adc_cfg { 214 const struct stm32_adc_regspec *regs; 215 const struct stm32_adc_info *adc_info; 216 struct stm32_adc_trig_info *trigs; 217 bool clk_required; 218 bool has_vregready; 219 bool has_boostmode; 220 bool has_linearcal; 221 bool has_presel; 222 int (*prepare)(struct iio_dev *); 223 void (*start_conv)(struct iio_dev *, bool dma); 224 void (*stop_conv)(struct iio_dev *); 225 void (*unprepare)(struct iio_dev *); 226 void (*irq_clear)(struct iio_dev *indio_dev, u32 msk); 227 const unsigned int *smp_cycles; 228 const unsigned int *ts_int_ch; 229 }; 230 231 /** 232 * struct stm32_adc - private data of each ADC IIO instance 233 * @common: reference to ADC block common data 234 * @offset: ADC instance register offset in ADC block 235 * @cfg: compatible configuration data 236 * @completion: end of single conversion completion 237 * @buffer: data buffer + 8 bytes for timestamp if enabled 238 * @clk: clock for this adc instance 239 * @irq: interrupt for this adc instance 240 * @lock: spinlock 241 * @bufi: data buffer index 242 * @num_conv: expected number of scan conversions 243 * @res: data resolution (e.g. RES bitfield value) 244 * @trigger_polarity: external trigger polarity (e.g. exten) 245 * @dma_chan: dma channel 246 * @rx_buf: dma rx buffer cpu address 247 * @rx_dma_buf: dma rx buffer bus address 248 * @rx_buf_sz: dma rx buffer size 249 * @difsel: bitmask to set single-ended/differential channel 250 * @pcsel: bitmask to preselect channels on some devices 251 * @smpr_val: sampling time settings (e.g. smpr1 / smpr2) 252 * @cal: optional calibration data on some devices 253 * @vrefint: internal reference voltage data 254 * @chan_name: channel name array 255 * @num_diff: number of differential channels 256 * @int_ch: internal channel indexes array 257 * @nsmps: number of channels with optional sample time 258 */ 259 struct stm32_adc { 260 struct stm32_adc_common *common; 261 u32 offset; 262 const struct stm32_adc_cfg *cfg; 263 struct completion completion; 264 u16 buffer[STM32_ADC_MAX_SQ + 4] __aligned(8); 265 struct clk *clk; 266 int irq; 267 spinlock_t lock; /* interrupt lock */ 268 unsigned int bufi; 269 unsigned int num_conv; 270 u32 res; 271 u32 trigger_polarity; 272 struct dma_chan *dma_chan; 273 u8 *rx_buf; 274 dma_addr_t rx_dma_buf; 275 unsigned int rx_buf_sz; 276 u32 difsel; 277 u32 pcsel; 278 u32 smpr_val[2]; 279 struct stm32_adc_calib cal; 280 struct stm32_adc_vrefint vrefint; 281 char chan_name[STM32_ADC_CH_MAX][STM32_ADC_CH_SZ]; 282 u32 num_diff; 283 int int_ch[STM32_ADC_INT_CH_NB]; 284 int nsmps; 285 }; 286 287 struct stm32_adc_diff_channel { 288 u32 vinp; 289 u32 vinn; 290 }; 291 292 /** 293 * struct stm32_adc_info - stm32 ADC, per instance config data 294 * @max_channels: Number of channels 295 * @resolutions: available resolutions 296 * @num_res: number of available resolutions 297 */ 298 struct stm32_adc_info { 299 int max_channels; 300 const unsigned int *resolutions; 301 const unsigned int num_res; 302 }; 303 304 static const unsigned int stm32f4_adc_resolutions[] = { 305 /* sorted values so the index matches RES[1:0] in STM32F4_ADC_CR1 */ 306 12, 10, 8, 6, 307 }; 308 309 /* stm32f4 can have up to 16 channels */ 310 static const struct stm32_adc_info stm32f4_adc_info = { 311 .max_channels = 16, 312 .resolutions = stm32f4_adc_resolutions, 313 .num_res = ARRAY_SIZE(stm32f4_adc_resolutions), 314 }; 315 316 static const unsigned int stm32h7_adc_resolutions[] = { 317 /* sorted values so the index matches RES[2:0] in STM32H7_ADC_CFGR */ 318 16, 14, 12, 10, 8, 319 }; 320 321 /* stm32h7 can have up to 20 channels */ 322 static const struct stm32_adc_info stm32h7_adc_info = { 323 .max_channels = STM32_ADC_CH_MAX, 324 .resolutions = stm32h7_adc_resolutions, 325 .num_res = ARRAY_SIZE(stm32h7_adc_resolutions), 326 }; 327 328 /* stm32mp13 can have up to 19 channels */ 329 static const struct stm32_adc_info stm32mp13_adc_info = { 330 .max_channels = 19, 331 .resolutions = stm32f4_adc_resolutions, 332 .num_res = ARRAY_SIZE(stm32f4_adc_resolutions), 333 }; 334 335 /* 336 * stm32f4_sq - describe regular sequence registers 337 * - L: sequence len (register & bit field) 338 * - SQ1..SQ16: sequence entries (register & bit field) 339 */ 340 static const struct stm32_adc_regs stm32f4_sq[STM32_ADC_MAX_SQ + 1] = { 341 /* L: len bit field description to be kept as first element */ 342 { STM32F4_ADC_SQR1, GENMASK(23, 20), 20 }, 343 /* SQ1..SQ16 registers & bit fields (reg, mask, shift) */ 344 { STM32F4_ADC_SQR3, GENMASK(4, 0), 0 }, 345 { STM32F4_ADC_SQR3, GENMASK(9, 5), 5 }, 346 { STM32F4_ADC_SQR3, GENMASK(14, 10), 10 }, 347 { STM32F4_ADC_SQR3, GENMASK(19, 15), 15 }, 348 { STM32F4_ADC_SQR3, GENMASK(24, 20), 20 }, 349 { STM32F4_ADC_SQR3, GENMASK(29, 25), 25 }, 350 { STM32F4_ADC_SQR2, GENMASK(4, 0), 0 }, 351 { STM32F4_ADC_SQR2, GENMASK(9, 5), 5 }, 352 { STM32F4_ADC_SQR2, GENMASK(14, 10), 10 }, 353 { STM32F4_ADC_SQR2, GENMASK(19, 15), 15 }, 354 { STM32F4_ADC_SQR2, GENMASK(24, 20), 20 }, 355 { STM32F4_ADC_SQR2, GENMASK(29, 25), 25 }, 356 { STM32F4_ADC_SQR1, GENMASK(4, 0), 0 }, 357 { STM32F4_ADC_SQR1, GENMASK(9, 5), 5 }, 358 { STM32F4_ADC_SQR1, GENMASK(14, 10), 10 }, 359 { STM32F4_ADC_SQR1, GENMASK(19, 15), 15 }, 360 }; 361 362 /* STM32F4 external trigger sources for all instances */ 363 static struct stm32_adc_trig_info stm32f4_adc_trigs[] = { 364 { TIM1_CH1, STM32_EXT0 }, 365 { TIM1_CH2, STM32_EXT1 }, 366 { TIM1_CH3, STM32_EXT2 }, 367 { TIM2_CH2, STM32_EXT3 }, 368 { TIM2_CH3, STM32_EXT4 }, 369 { TIM2_CH4, STM32_EXT5 }, 370 { TIM2_TRGO, STM32_EXT6 }, 371 { TIM3_CH1, STM32_EXT7 }, 372 { TIM3_TRGO, STM32_EXT8 }, 373 { TIM4_CH4, STM32_EXT9 }, 374 { TIM5_CH1, STM32_EXT10 }, 375 { TIM5_CH2, STM32_EXT11 }, 376 { TIM5_CH3, STM32_EXT12 }, 377 { TIM8_CH1, STM32_EXT13 }, 378 { TIM8_TRGO, STM32_EXT14 }, 379 {}, /* sentinel */ 380 }; 381 382 /* 383 * stm32f4_smp_bits[] - describe sampling time register index & bit fields 384 * Sorted so it can be indexed by channel number. 385 */ 386 static const struct stm32_adc_regs stm32f4_smp_bits[] = { 387 /* STM32F4_ADC_SMPR2: smpr[] index, mask, shift for SMP0 to SMP9 */ 388 { 1, GENMASK(2, 0), 0 }, 389 { 1, GENMASK(5, 3), 3 }, 390 { 1, GENMASK(8, 6), 6 }, 391 { 1, GENMASK(11, 9), 9 }, 392 { 1, GENMASK(14, 12), 12 }, 393 { 1, GENMASK(17, 15), 15 }, 394 { 1, GENMASK(20, 18), 18 }, 395 { 1, GENMASK(23, 21), 21 }, 396 { 1, GENMASK(26, 24), 24 }, 397 { 1, GENMASK(29, 27), 27 }, 398 /* STM32F4_ADC_SMPR1, smpr[] index, mask, shift for SMP10 to SMP18 */ 399 { 0, GENMASK(2, 0), 0 }, 400 { 0, GENMASK(5, 3), 3 }, 401 { 0, GENMASK(8, 6), 6 }, 402 { 0, GENMASK(11, 9), 9 }, 403 { 0, GENMASK(14, 12), 12 }, 404 { 0, GENMASK(17, 15), 15 }, 405 { 0, GENMASK(20, 18), 18 }, 406 { 0, GENMASK(23, 21), 21 }, 407 { 0, GENMASK(26, 24), 24 }, 408 }; 409 410 /* STM32F4 programmable sampling time (ADC clock cycles) */ 411 static const unsigned int stm32f4_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = { 412 3, 15, 28, 56, 84, 112, 144, 480, 413 }; 414 415 static const struct stm32_adc_regspec stm32f4_adc_regspec = { 416 .dr = STM32F4_ADC_DR, 417 .ier_eoc = { STM32F4_ADC_CR1, STM32F4_EOCIE }, 418 .ier_ovr = { STM32F4_ADC_CR1, STM32F4_OVRIE }, 419 .isr_eoc = { STM32F4_ADC_SR, STM32F4_EOC }, 420 .isr_ovr = { STM32F4_ADC_SR, STM32F4_OVR }, 421 .sqr = stm32f4_sq, 422 .exten = { STM32F4_ADC_CR2, STM32F4_EXTEN_MASK, STM32F4_EXTEN_SHIFT }, 423 .extsel = { STM32F4_ADC_CR2, STM32F4_EXTSEL_MASK, 424 STM32F4_EXTSEL_SHIFT }, 425 .res = { STM32F4_ADC_CR1, STM32F4_RES_MASK, STM32F4_RES_SHIFT }, 426 .smpr = { STM32F4_ADC_SMPR1, STM32F4_ADC_SMPR2 }, 427 .smp_bits = stm32f4_smp_bits, 428 }; 429 430 static const struct stm32_adc_regs stm32h7_sq[STM32_ADC_MAX_SQ + 1] = { 431 /* L: len bit field description to be kept as first element */ 432 { STM32H7_ADC_SQR1, GENMASK(3, 0), 0 }, 433 /* SQ1..SQ16 registers & bit fields (reg, mask, shift) */ 434 { STM32H7_ADC_SQR1, GENMASK(10, 6), 6 }, 435 { STM32H7_ADC_SQR1, GENMASK(16, 12), 12 }, 436 { STM32H7_ADC_SQR1, GENMASK(22, 18), 18 }, 437 { STM32H7_ADC_SQR1, GENMASK(28, 24), 24 }, 438 { STM32H7_ADC_SQR2, GENMASK(4, 0), 0 }, 439 { STM32H7_ADC_SQR2, GENMASK(10, 6), 6 }, 440 { STM32H7_ADC_SQR2, GENMASK(16, 12), 12 }, 441 { STM32H7_ADC_SQR2, GENMASK(22, 18), 18 }, 442 { STM32H7_ADC_SQR2, GENMASK(28, 24), 24 }, 443 { STM32H7_ADC_SQR3, GENMASK(4, 0), 0 }, 444 { STM32H7_ADC_SQR3, GENMASK(10, 6), 6 }, 445 { STM32H7_ADC_SQR3, GENMASK(16, 12), 12 }, 446 { STM32H7_ADC_SQR3, GENMASK(22, 18), 18 }, 447 { STM32H7_ADC_SQR3, GENMASK(28, 24), 24 }, 448 { STM32H7_ADC_SQR4, GENMASK(4, 0), 0 }, 449 { STM32H7_ADC_SQR4, GENMASK(10, 6), 6 }, 450 }; 451 452 /* STM32H7 external trigger sources for all instances */ 453 static struct stm32_adc_trig_info stm32h7_adc_trigs[] = { 454 { TIM1_CH1, STM32_EXT0 }, 455 { TIM1_CH2, STM32_EXT1 }, 456 { TIM1_CH3, STM32_EXT2 }, 457 { TIM2_CH2, STM32_EXT3 }, 458 { TIM3_TRGO, STM32_EXT4 }, 459 { TIM4_CH4, STM32_EXT5 }, 460 { TIM8_TRGO, STM32_EXT7 }, 461 { TIM8_TRGO2, STM32_EXT8 }, 462 { TIM1_TRGO, STM32_EXT9 }, 463 { TIM1_TRGO2, STM32_EXT10 }, 464 { TIM2_TRGO, STM32_EXT11 }, 465 { TIM4_TRGO, STM32_EXT12 }, 466 { TIM6_TRGO, STM32_EXT13 }, 467 { TIM15_TRGO, STM32_EXT14 }, 468 { TIM3_CH4, STM32_EXT15 }, 469 { LPTIM1_OUT, STM32_EXT18 }, 470 { LPTIM2_OUT, STM32_EXT19 }, 471 { LPTIM3_OUT, STM32_EXT20 }, 472 {}, 473 }; 474 475 /* 476 * stm32h7_smp_bits - describe sampling time register index & bit fields 477 * Sorted so it can be indexed by channel number. 478 */ 479 static const struct stm32_adc_regs stm32h7_smp_bits[] = { 480 /* STM32H7_ADC_SMPR1, smpr[] index, mask, shift for SMP0 to SMP9 */ 481 { 0, GENMASK(2, 0), 0 }, 482 { 0, GENMASK(5, 3), 3 }, 483 { 0, GENMASK(8, 6), 6 }, 484 { 0, GENMASK(11, 9), 9 }, 485 { 0, GENMASK(14, 12), 12 }, 486 { 0, GENMASK(17, 15), 15 }, 487 { 0, GENMASK(20, 18), 18 }, 488 { 0, GENMASK(23, 21), 21 }, 489 { 0, GENMASK(26, 24), 24 }, 490 { 0, GENMASK(29, 27), 27 }, 491 /* STM32H7_ADC_SMPR2, smpr[] index, mask, shift for SMP10 to SMP19 */ 492 { 1, GENMASK(2, 0), 0 }, 493 { 1, GENMASK(5, 3), 3 }, 494 { 1, GENMASK(8, 6), 6 }, 495 { 1, GENMASK(11, 9), 9 }, 496 { 1, GENMASK(14, 12), 12 }, 497 { 1, GENMASK(17, 15), 15 }, 498 { 1, GENMASK(20, 18), 18 }, 499 { 1, GENMASK(23, 21), 21 }, 500 { 1, GENMASK(26, 24), 24 }, 501 { 1, GENMASK(29, 27), 27 }, 502 }; 503 504 /* STM32H7 programmable sampling time (ADC clock cycles, rounded down) */ 505 static const unsigned int stm32h7_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = { 506 1, 2, 8, 16, 32, 64, 387, 810, 507 }; 508 509 static const struct stm32_adc_regspec stm32h7_adc_regspec = { 510 .dr = STM32H7_ADC_DR, 511 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE }, 512 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE }, 513 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC }, 514 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR }, 515 .sqr = stm32h7_sq, 516 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT }, 517 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK, 518 STM32H7_EXTSEL_SHIFT }, 519 .res = { STM32H7_ADC_CFGR, STM32H7_RES_MASK, STM32H7_RES_SHIFT }, 520 .difsel = { STM32H7_ADC_DIFSEL, STM32H7_DIFSEL_MASK}, 521 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 }, 522 .smp_bits = stm32h7_smp_bits, 523 }; 524 525 /* STM32MP13 programmable sampling time (ADC clock cycles, rounded down) */ 526 static const unsigned int stm32mp13_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = { 527 2, 6, 12, 24, 47, 92, 247, 640, 528 }; 529 530 static const struct stm32_adc_regspec stm32mp13_adc_regspec = { 531 .dr = STM32H7_ADC_DR, 532 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE }, 533 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE }, 534 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC }, 535 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR }, 536 .sqr = stm32h7_sq, 537 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT }, 538 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK, 539 STM32H7_EXTSEL_SHIFT }, 540 .res = { STM32H7_ADC_CFGR, STM32MP13_RES_MASK, STM32MP13_RES_SHIFT }, 541 .difsel = { STM32MP13_ADC_DIFSEL, STM32MP13_DIFSEL_MASK}, 542 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 }, 543 .smp_bits = stm32h7_smp_bits, 544 .or_vddcore = { STM32MP13_ADC2_OR, STM32MP13_OP0 }, 545 .or_vddcpu = { STM32MP13_ADC2_OR, STM32MP13_OP1 }, 546 .or_vddq_ddr = { STM32MP13_ADC2_OR, STM32MP13_OP2 }, 547 .ccr_vbat = { STM32H7_ADC_CCR, STM32H7_VBATEN }, 548 .ccr_vref = { STM32H7_ADC_CCR, STM32H7_VREFEN }, 549 }; 550 551 static const struct stm32_adc_regspec stm32mp1_adc_regspec = { 552 .dr = STM32H7_ADC_DR, 553 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE }, 554 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE }, 555 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC }, 556 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR }, 557 .sqr = stm32h7_sq, 558 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT }, 559 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK, 560 STM32H7_EXTSEL_SHIFT }, 561 .res = { STM32H7_ADC_CFGR, STM32H7_RES_MASK, STM32H7_RES_SHIFT }, 562 .difsel = { STM32H7_ADC_DIFSEL, STM32H7_DIFSEL_MASK}, 563 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 }, 564 .smp_bits = stm32h7_smp_bits, 565 .or_vddcore = { STM32MP1_ADC2_OR, STM32MP1_VDDCOREEN }, 566 .ccr_vbat = { STM32H7_ADC_CCR, STM32H7_VBATEN }, 567 .ccr_vref = { STM32H7_ADC_CCR, STM32H7_VREFEN }, 568 }; 569 570 /* 571 * STM32 ADC registers access routines 572 * @adc: stm32 adc instance 573 * @reg: reg offset in adc instance 574 * 575 * Note: All instances share same base, with 0x0, 0x100 or 0x200 offset resp. 576 * for adc1, adc2 and adc3. 577 */ 578 static u32 stm32_adc_readl(struct stm32_adc *adc, u32 reg) 579 { 580 return readl_relaxed(adc->common->base + adc->offset + reg); 581 } 582 583 #define stm32_adc_readl_addr(addr) stm32_adc_readl(adc, addr) 584 585 #define stm32_adc_readl_poll_timeout(reg, val, cond, sleep_us, timeout_us) \ 586 readx_poll_timeout(stm32_adc_readl_addr, reg, val, \ 587 cond, sleep_us, timeout_us) 588 589 static u16 stm32_adc_readw(struct stm32_adc *adc, u32 reg) 590 { 591 return readw_relaxed(adc->common->base + adc->offset + reg); 592 } 593 594 static void stm32_adc_writel(struct stm32_adc *adc, u32 reg, u32 val) 595 { 596 writel_relaxed(val, adc->common->base + adc->offset + reg); 597 } 598 599 static void stm32_adc_set_bits(struct stm32_adc *adc, u32 reg, u32 bits) 600 { 601 unsigned long flags; 602 603 spin_lock_irqsave(&adc->lock, flags); 604 stm32_adc_writel(adc, reg, stm32_adc_readl(adc, reg) | bits); 605 spin_unlock_irqrestore(&adc->lock, flags); 606 } 607 608 static void stm32_adc_set_bits_common(struct stm32_adc *adc, u32 reg, u32 bits) 609 { 610 spin_lock(&adc->common->lock); 611 writel_relaxed(readl_relaxed(adc->common->base + reg) | bits, 612 adc->common->base + reg); 613 spin_unlock(&adc->common->lock); 614 } 615 616 static void stm32_adc_clr_bits(struct stm32_adc *adc, u32 reg, u32 bits) 617 { 618 unsigned long flags; 619 620 spin_lock_irqsave(&adc->lock, flags); 621 stm32_adc_writel(adc, reg, stm32_adc_readl(adc, reg) & ~bits); 622 spin_unlock_irqrestore(&adc->lock, flags); 623 } 624 625 static void stm32_adc_clr_bits_common(struct stm32_adc *adc, u32 reg, u32 bits) 626 { 627 spin_lock(&adc->common->lock); 628 writel_relaxed(readl_relaxed(adc->common->base + reg) & ~bits, 629 adc->common->base + reg); 630 spin_unlock(&adc->common->lock); 631 } 632 633 /** 634 * stm32_adc_conv_irq_enable() - Enable end of conversion interrupt 635 * @adc: stm32 adc instance 636 */ 637 static void stm32_adc_conv_irq_enable(struct stm32_adc *adc) 638 { 639 stm32_adc_set_bits(adc, adc->cfg->regs->ier_eoc.reg, 640 adc->cfg->regs->ier_eoc.mask); 641 }; 642 643 /** 644 * stm32_adc_conv_irq_disable() - Disable end of conversion interrupt 645 * @adc: stm32 adc instance 646 */ 647 static void stm32_adc_conv_irq_disable(struct stm32_adc *adc) 648 { 649 stm32_adc_clr_bits(adc, adc->cfg->regs->ier_eoc.reg, 650 adc->cfg->regs->ier_eoc.mask); 651 } 652 653 static void stm32_adc_ovr_irq_enable(struct stm32_adc *adc) 654 { 655 stm32_adc_set_bits(adc, adc->cfg->regs->ier_ovr.reg, 656 adc->cfg->regs->ier_ovr.mask); 657 } 658 659 static void stm32_adc_ovr_irq_disable(struct stm32_adc *adc) 660 { 661 stm32_adc_clr_bits(adc, adc->cfg->regs->ier_ovr.reg, 662 adc->cfg->regs->ier_ovr.mask); 663 } 664 665 static void stm32_adc_set_res(struct stm32_adc *adc) 666 { 667 const struct stm32_adc_regs *res = &adc->cfg->regs->res; 668 u32 val; 669 670 val = stm32_adc_readl(adc, res->reg); 671 val = (val & ~res->mask) | (adc->res << res->shift); 672 stm32_adc_writel(adc, res->reg, val); 673 } 674 675 static int stm32_adc_hw_stop(struct device *dev) 676 { 677 struct iio_dev *indio_dev = dev_get_drvdata(dev); 678 struct stm32_adc *adc = iio_priv(indio_dev); 679 680 if (adc->cfg->unprepare) 681 adc->cfg->unprepare(indio_dev); 682 683 clk_disable_unprepare(adc->clk); 684 685 return 0; 686 } 687 688 static int stm32_adc_hw_start(struct device *dev) 689 { 690 struct iio_dev *indio_dev = dev_get_drvdata(dev); 691 struct stm32_adc *adc = iio_priv(indio_dev); 692 int ret; 693 694 ret = clk_prepare_enable(adc->clk); 695 if (ret) 696 return ret; 697 698 stm32_adc_set_res(adc); 699 700 if (adc->cfg->prepare) { 701 ret = adc->cfg->prepare(indio_dev); 702 if (ret) 703 goto err_clk_dis; 704 } 705 706 return 0; 707 708 err_clk_dis: 709 clk_disable_unprepare(adc->clk); 710 711 return ret; 712 } 713 714 static void stm32_adc_int_ch_enable(struct iio_dev *indio_dev) 715 { 716 struct stm32_adc *adc = iio_priv(indio_dev); 717 u32 i; 718 719 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 720 if (adc->int_ch[i] == STM32_ADC_INT_CH_NONE) 721 continue; 722 723 switch (i) { 724 case STM32_ADC_INT_CH_VDDCORE: 725 dev_dbg(&indio_dev->dev, "Enable VDDCore\n"); 726 stm32_adc_set_bits(adc, adc->cfg->regs->or_vddcore.reg, 727 adc->cfg->regs->or_vddcore.mask); 728 break; 729 case STM32_ADC_INT_CH_VDDCPU: 730 dev_dbg(&indio_dev->dev, "Enable VDDCPU\n"); 731 stm32_adc_set_bits(adc, adc->cfg->regs->or_vddcpu.reg, 732 adc->cfg->regs->or_vddcpu.mask); 733 break; 734 case STM32_ADC_INT_CH_VDDQ_DDR: 735 dev_dbg(&indio_dev->dev, "Enable VDDQ_DDR\n"); 736 stm32_adc_set_bits(adc, adc->cfg->regs->or_vddq_ddr.reg, 737 adc->cfg->regs->or_vddq_ddr.mask); 738 break; 739 case STM32_ADC_INT_CH_VREFINT: 740 dev_dbg(&indio_dev->dev, "Enable VREFInt\n"); 741 stm32_adc_set_bits_common(adc, adc->cfg->regs->ccr_vref.reg, 742 adc->cfg->regs->ccr_vref.mask); 743 break; 744 case STM32_ADC_INT_CH_VBAT: 745 dev_dbg(&indio_dev->dev, "Enable VBAT\n"); 746 stm32_adc_set_bits_common(adc, adc->cfg->regs->ccr_vbat.reg, 747 adc->cfg->regs->ccr_vbat.mask); 748 break; 749 } 750 } 751 } 752 753 static void stm32_adc_int_ch_disable(struct stm32_adc *adc) 754 { 755 u32 i; 756 757 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 758 if (adc->int_ch[i] == STM32_ADC_INT_CH_NONE) 759 continue; 760 761 switch (i) { 762 case STM32_ADC_INT_CH_VDDCORE: 763 stm32_adc_clr_bits(adc, adc->cfg->regs->or_vddcore.reg, 764 adc->cfg->regs->or_vddcore.mask); 765 break; 766 case STM32_ADC_INT_CH_VDDCPU: 767 stm32_adc_clr_bits(adc, adc->cfg->regs->or_vddcpu.reg, 768 adc->cfg->regs->or_vddcpu.mask); 769 break; 770 case STM32_ADC_INT_CH_VDDQ_DDR: 771 stm32_adc_clr_bits(adc, adc->cfg->regs->or_vddq_ddr.reg, 772 adc->cfg->regs->or_vddq_ddr.mask); 773 break; 774 case STM32_ADC_INT_CH_VREFINT: 775 stm32_adc_clr_bits_common(adc, adc->cfg->regs->ccr_vref.reg, 776 adc->cfg->regs->ccr_vref.mask); 777 break; 778 case STM32_ADC_INT_CH_VBAT: 779 stm32_adc_clr_bits_common(adc, adc->cfg->regs->ccr_vbat.reg, 780 adc->cfg->regs->ccr_vbat.mask); 781 break; 782 } 783 } 784 } 785 786 /** 787 * stm32f4_adc_start_conv() - Start conversions for regular channels. 788 * @indio_dev: IIO device instance 789 * @dma: use dma to transfer conversion result 790 * 791 * Start conversions for regular channels. 792 * Also take care of normal or DMA mode. Circular DMA may be used for regular 793 * conversions, in IIO buffer modes. Otherwise, use ADC interrupt with direct 794 * DR read instead (e.g. read_raw, or triggered buffer mode without DMA). 795 */ 796 static void stm32f4_adc_start_conv(struct iio_dev *indio_dev, bool dma) 797 { 798 struct stm32_adc *adc = iio_priv(indio_dev); 799 800 stm32_adc_set_bits(adc, STM32F4_ADC_CR1, STM32F4_SCAN); 801 802 if (dma) 803 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, 804 STM32F4_DMA | STM32F4_DDS); 805 806 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, STM32F4_EOCS | STM32F4_ADON); 807 808 /* Wait for Power-up time (tSTAB from datasheet) */ 809 usleep_range(2, 3); 810 811 /* Software start ? (e.g. trigger detection disabled ?) */ 812 if (!(stm32_adc_readl(adc, STM32F4_ADC_CR2) & STM32F4_EXTEN_MASK)) 813 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, STM32F4_SWSTART); 814 } 815 816 static void stm32f4_adc_stop_conv(struct iio_dev *indio_dev) 817 { 818 struct stm32_adc *adc = iio_priv(indio_dev); 819 820 stm32_adc_clr_bits(adc, STM32F4_ADC_CR2, STM32F4_EXTEN_MASK); 821 stm32_adc_clr_bits(adc, STM32F4_ADC_SR, STM32F4_STRT); 822 823 stm32_adc_clr_bits(adc, STM32F4_ADC_CR1, STM32F4_SCAN); 824 stm32_adc_clr_bits(adc, STM32F4_ADC_CR2, 825 STM32F4_ADON | STM32F4_DMA | STM32F4_DDS); 826 } 827 828 static void stm32f4_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 829 { 830 struct stm32_adc *adc = iio_priv(indio_dev); 831 832 stm32_adc_clr_bits(adc, adc->cfg->regs->isr_eoc.reg, msk); 833 } 834 835 static void stm32h7_adc_start_conv(struct iio_dev *indio_dev, bool dma) 836 { 837 struct stm32_adc *adc = iio_priv(indio_dev); 838 enum stm32h7_adc_dmngt dmngt; 839 unsigned long flags; 840 u32 val; 841 842 if (dma) 843 dmngt = STM32H7_DMNGT_DMA_CIRC; 844 else 845 dmngt = STM32H7_DMNGT_DR_ONLY; 846 847 spin_lock_irqsave(&adc->lock, flags); 848 val = stm32_adc_readl(adc, STM32H7_ADC_CFGR); 849 val = (val & ~STM32H7_DMNGT_MASK) | (dmngt << STM32H7_DMNGT_SHIFT); 850 stm32_adc_writel(adc, STM32H7_ADC_CFGR, val); 851 spin_unlock_irqrestore(&adc->lock, flags); 852 853 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTART); 854 } 855 856 static void stm32h7_adc_stop_conv(struct iio_dev *indio_dev) 857 { 858 struct stm32_adc *adc = iio_priv(indio_dev); 859 int ret; 860 u32 val; 861 862 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTP); 863 864 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 865 !(val & (STM32H7_ADSTART)), 866 100, STM32_ADC_TIMEOUT_US); 867 if (ret) 868 dev_warn(&indio_dev->dev, "stop failed\n"); 869 870 /* STM32H7_DMNGT_MASK covers STM32MP13_DMAEN & STM32MP13_DMACFG */ 871 stm32_adc_clr_bits(adc, STM32H7_ADC_CFGR, STM32H7_DMNGT_MASK); 872 } 873 874 static void stm32h7_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 875 { 876 struct stm32_adc *adc = iio_priv(indio_dev); 877 /* On STM32H7 IRQs are cleared by writing 1 into ISR register */ 878 stm32_adc_set_bits(adc, adc->cfg->regs->isr_eoc.reg, msk); 879 } 880 881 static void stm32mp13_adc_start_conv(struct iio_dev *indio_dev, bool dma) 882 { 883 struct stm32_adc *adc = iio_priv(indio_dev); 884 885 if (dma) 886 stm32_adc_set_bits(adc, STM32H7_ADC_CFGR, 887 STM32MP13_DMAEN | STM32MP13_DMACFG); 888 889 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTART); 890 } 891 892 static int stm32h7_adc_exit_pwr_down(struct iio_dev *indio_dev) 893 { 894 struct stm32_adc *adc = iio_priv(indio_dev); 895 int ret; 896 u32 val; 897 898 /* Exit deep power down, then enable ADC voltage regulator */ 899 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 900 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADVREGEN); 901 902 if (adc->cfg->has_boostmode && 903 adc->common->rate > STM32H7_BOOST_CLKRATE) 904 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_BOOST); 905 906 /* Wait for startup time */ 907 if (!adc->cfg->has_vregready) { 908 usleep_range(10, 20); 909 return 0; 910 } 911 912 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_ISR, val, 913 val & STM32MP1_VREGREADY, 100, 914 STM32_ADC_TIMEOUT_US); 915 if (ret) { 916 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 917 dev_err(&indio_dev->dev, "Failed to exit power down\n"); 918 } 919 920 return ret; 921 } 922 923 static void stm32h7_adc_enter_pwr_down(struct stm32_adc *adc) 924 { 925 if (adc->cfg->has_boostmode) 926 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_BOOST); 927 928 /* Setting DEEPPWD disables ADC vreg and clears ADVREGEN */ 929 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD); 930 } 931 932 static int stm32h7_adc_enable(struct iio_dev *indio_dev) 933 { 934 struct stm32_adc *adc = iio_priv(indio_dev); 935 int ret; 936 u32 val; 937 938 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADEN); 939 940 /* Poll for ADRDY to be set (after adc startup time) */ 941 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_ISR, val, 942 val & STM32H7_ADRDY, 943 100, STM32_ADC_TIMEOUT_US); 944 if (ret) { 945 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADDIS); 946 dev_err(&indio_dev->dev, "Failed to enable ADC\n"); 947 } else { 948 /* Clear ADRDY by writing one */ 949 stm32_adc_set_bits(adc, STM32H7_ADC_ISR, STM32H7_ADRDY); 950 } 951 952 return ret; 953 } 954 955 static void stm32h7_adc_disable(struct iio_dev *indio_dev) 956 { 957 struct stm32_adc *adc = iio_priv(indio_dev); 958 int ret; 959 u32 val; 960 961 if (!(stm32_adc_readl(adc, STM32H7_ADC_CR) & STM32H7_ADEN)) 962 return; 963 964 /* Disable ADC and wait until it's effectively disabled */ 965 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADDIS); 966 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 967 !(val & STM32H7_ADEN), 100, 968 STM32_ADC_TIMEOUT_US); 969 if (ret) 970 dev_warn(&indio_dev->dev, "Failed to disable\n"); 971 } 972 973 /** 974 * stm32h7_adc_read_selfcalib() - read calibration shadow regs, save result 975 * @indio_dev: IIO device instance 976 * Note: Must be called once ADC is enabled, so LINCALRDYW[1..6] are writable 977 */ 978 static int stm32h7_adc_read_selfcalib(struct iio_dev *indio_dev) 979 { 980 struct stm32_adc *adc = iio_priv(indio_dev); 981 int i, ret; 982 u32 lincalrdyw_mask, val; 983 984 /* Read linearity calibration */ 985 lincalrdyw_mask = STM32H7_LINCALRDYW6; 986 for (i = STM32H7_LINCALFACT_NUM - 1; i >= 0; i--) { 987 /* Clear STM32H7_LINCALRDYW[6..1]: transfer calib to CALFACT2 */ 988 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 989 990 /* Poll: wait calib data to be ready in CALFACT2 register */ 991 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 992 !(val & lincalrdyw_mask), 993 100, STM32_ADC_TIMEOUT_US); 994 if (ret) { 995 dev_err(&indio_dev->dev, "Failed to read calfact\n"); 996 return ret; 997 } 998 999 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT2); 1000 adc->cal.lincalfact[i] = (val & STM32H7_LINCALFACT_MASK); 1001 adc->cal.lincalfact[i] >>= STM32H7_LINCALFACT_SHIFT; 1002 1003 lincalrdyw_mask >>= 1; 1004 } 1005 adc->cal.lincal_saved = true; 1006 1007 return 0; 1008 } 1009 1010 /** 1011 * stm32h7_adc_restore_selfcalib() - Restore saved self-calibration result 1012 * @indio_dev: IIO device instance 1013 * Note: ADC must be enabled, with no on-going conversions. 1014 */ 1015 static int stm32h7_adc_restore_selfcalib(struct iio_dev *indio_dev) 1016 { 1017 struct stm32_adc *adc = iio_priv(indio_dev); 1018 int i, ret; 1019 u32 lincalrdyw_mask, val; 1020 1021 lincalrdyw_mask = STM32H7_LINCALRDYW6; 1022 for (i = STM32H7_LINCALFACT_NUM - 1; i >= 0; i--) { 1023 /* 1024 * Write saved calibration data to shadow registers: 1025 * Write CALFACT2, and set LINCALRDYW[6..1] bit to trigger 1026 * data write. Then poll to wait for complete transfer. 1027 */ 1028 val = adc->cal.lincalfact[i] << STM32H7_LINCALFACT_SHIFT; 1029 stm32_adc_writel(adc, STM32H7_ADC_CALFACT2, val); 1030 stm32_adc_set_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 1031 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 1032 val & lincalrdyw_mask, 1033 100, STM32_ADC_TIMEOUT_US); 1034 if (ret) { 1035 dev_err(&indio_dev->dev, "Failed to write calfact\n"); 1036 return ret; 1037 } 1038 1039 /* 1040 * Read back calibration data, has two effects: 1041 * - It ensures bits LINCALRDYW[6..1] are kept cleared 1042 * for next time calibration needs to be restored. 1043 * - BTW, bit clear triggers a read, then check data has been 1044 * correctly written. 1045 */ 1046 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask); 1047 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 1048 !(val & lincalrdyw_mask), 1049 100, STM32_ADC_TIMEOUT_US); 1050 if (ret) { 1051 dev_err(&indio_dev->dev, "Failed to read calfact\n"); 1052 return ret; 1053 } 1054 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT2); 1055 if (val != adc->cal.lincalfact[i] << STM32H7_LINCALFACT_SHIFT) { 1056 dev_err(&indio_dev->dev, "calfact not consistent\n"); 1057 return -EIO; 1058 } 1059 1060 lincalrdyw_mask >>= 1; 1061 } 1062 1063 return 0; 1064 } 1065 1066 /* 1067 * Fixed timeout value for ADC calibration. 1068 * worst cases: 1069 * - low clock frequency 1070 * - maximum prescalers 1071 * Calibration requires: 1072 * - 131,072 ADC clock cycle for the linear calibration 1073 * - 20 ADC clock cycle for the offset calibration 1074 * 1075 * Set to 100ms for now 1076 */ 1077 #define STM32H7_ADC_CALIB_TIMEOUT_US 100000 1078 1079 /** 1080 * stm32h7_adc_selfcalib() - Procedure to calibrate ADC 1081 * @indio_dev: IIO device instance 1082 * @do_lincal: linear calibration request flag 1083 * Note: Must be called once ADC is out of power down. 1084 * 1085 * Run offset calibration unconditionally. 1086 * Run linear calibration if requested & supported. 1087 */ 1088 static int stm32h7_adc_selfcalib(struct iio_dev *indio_dev, int do_lincal) 1089 { 1090 struct stm32_adc *adc = iio_priv(indio_dev); 1091 int ret; 1092 u32 msk = STM32H7_ADCALDIF; 1093 u32 val; 1094 1095 if (adc->cfg->has_linearcal && do_lincal) 1096 msk |= STM32H7_ADCALLIN; 1097 /* ADC must be disabled for calibration */ 1098 stm32h7_adc_disable(indio_dev); 1099 1100 /* 1101 * Select calibration mode: 1102 * - Offset calibration for single ended inputs 1103 * - No linearity calibration (do it later, before reading it) 1104 */ 1105 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, msk); 1106 1107 /* Start calibration, then wait for completion */ 1108 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADCAL); 1109 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 1110 !(val & STM32H7_ADCAL), 100, 1111 STM32H7_ADC_CALIB_TIMEOUT_US); 1112 if (ret) { 1113 dev_err(&indio_dev->dev, "calibration (single-ended) error %d\n", ret); 1114 goto out; 1115 } 1116 1117 /* 1118 * Select calibration mode, then start calibration: 1119 * - Offset calibration for differential input 1120 * - Linearity calibration (needs to be done only once for single/diff) 1121 * will run simultaneously with offset calibration. 1122 */ 1123 stm32_adc_set_bits(adc, STM32H7_ADC_CR, msk); 1124 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADCAL); 1125 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val, 1126 !(val & STM32H7_ADCAL), 100, 1127 STM32H7_ADC_CALIB_TIMEOUT_US); 1128 if (ret) { 1129 dev_err(&indio_dev->dev, "calibration (diff%s) error %d\n", 1130 (msk & STM32H7_ADCALLIN) ? "+linear" : "", ret); 1131 goto out; 1132 } 1133 1134 out: 1135 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, msk); 1136 1137 return ret; 1138 } 1139 1140 /** 1141 * stm32h7_adc_check_selfcalib() - Check linear calibration status 1142 * @indio_dev: IIO device instance 1143 * 1144 * Used to check if linear calibration has been done. 1145 * Return true if linear calibration factors are already saved in private data 1146 * or if a linear calibration has been done at boot stage. 1147 */ 1148 static int stm32h7_adc_check_selfcalib(struct iio_dev *indio_dev) 1149 { 1150 struct stm32_adc *adc = iio_priv(indio_dev); 1151 u32 val; 1152 1153 if (adc->cal.lincal_saved) 1154 return true; 1155 1156 /* 1157 * Check if linear calibration factors are available in ADC registers, 1158 * by checking that all LINCALRDYWx bits are set. 1159 */ 1160 val = stm32_adc_readl(adc, STM32H7_ADC_CR) & STM32H7_LINCALRDYW_MASK; 1161 if (val == STM32H7_LINCALRDYW_MASK) 1162 return true; 1163 1164 return false; 1165 } 1166 1167 /** 1168 * stm32h7_adc_prepare() - Leave power down mode to enable ADC. 1169 * @indio_dev: IIO device instance 1170 * Leave power down mode. 1171 * Configure channels as single ended or differential before enabling ADC. 1172 * Enable ADC. 1173 * Restore calibration data. 1174 * Pre-select channels that may be used in PCSEL (required by input MUX / IO): 1175 * - Only one input is selected for single ended (e.g. 'vinp') 1176 * - Two inputs are selected for differential channels (e.g. 'vinp' & 'vinn') 1177 */ 1178 static int stm32h7_adc_prepare(struct iio_dev *indio_dev) 1179 { 1180 struct stm32_adc *adc = iio_priv(indio_dev); 1181 int lincal_done = false; 1182 int ret; 1183 1184 ret = stm32h7_adc_exit_pwr_down(indio_dev); 1185 if (ret) 1186 return ret; 1187 1188 if (adc->cfg->has_linearcal) 1189 lincal_done = stm32h7_adc_check_selfcalib(indio_dev); 1190 1191 /* Always run offset calibration. Run linear calibration only once */ 1192 ret = stm32h7_adc_selfcalib(indio_dev, !lincal_done); 1193 if (ret < 0) 1194 goto pwr_dwn; 1195 1196 stm32_adc_int_ch_enable(indio_dev); 1197 1198 stm32_adc_writel(adc, adc->cfg->regs->difsel.reg, adc->difsel); 1199 1200 ret = stm32h7_adc_enable(indio_dev); 1201 if (ret) 1202 goto ch_disable; 1203 1204 if (adc->cfg->has_linearcal) { 1205 if (!adc->cal.lincal_saved) 1206 ret = stm32h7_adc_read_selfcalib(indio_dev); 1207 else 1208 ret = stm32h7_adc_restore_selfcalib(indio_dev); 1209 1210 if (ret) 1211 goto disable; 1212 } 1213 1214 if (adc->cfg->has_presel) 1215 stm32_adc_writel(adc, STM32H7_ADC_PCSEL, adc->pcsel); 1216 1217 return 0; 1218 1219 disable: 1220 stm32h7_adc_disable(indio_dev); 1221 ch_disable: 1222 stm32_adc_int_ch_disable(adc); 1223 pwr_dwn: 1224 stm32h7_adc_enter_pwr_down(adc); 1225 1226 return ret; 1227 } 1228 1229 static void stm32h7_adc_unprepare(struct iio_dev *indio_dev) 1230 { 1231 struct stm32_adc *adc = iio_priv(indio_dev); 1232 1233 if (adc->cfg->has_presel) 1234 stm32_adc_writel(adc, STM32H7_ADC_PCSEL, 0); 1235 stm32h7_adc_disable(indio_dev); 1236 stm32_adc_int_ch_disable(adc); 1237 stm32h7_adc_enter_pwr_down(adc); 1238 } 1239 1240 /** 1241 * stm32_adc_conf_scan_seq() - Build regular channels scan sequence 1242 * @indio_dev: IIO device 1243 * @scan_mask: channels to be converted 1244 * 1245 * Conversion sequence : 1246 * Apply sampling time settings for all channels. 1247 * Configure ADC scan sequence based on selected channels in scan_mask. 1248 * Add channels to SQR registers, from scan_mask LSB to MSB, then 1249 * program sequence len. 1250 */ 1251 static int stm32_adc_conf_scan_seq(struct iio_dev *indio_dev, 1252 const unsigned long *scan_mask) 1253 { 1254 struct stm32_adc *adc = iio_priv(indio_dev); 1255 const struct stm32_adc_regs *sqr = adc->cfg->regs->sqr; 1256 const struct iio_chan_spec *chan; 1257 u32 val, bit; 1258 int i = 0; 1259 1260 /* Apply sampling time settings */ 1261 stm32_adc_writel(adc, adc->cfg->regs->smpr[0], adc->smpr_val[0]); 1262 stm32_adc_writel(adc, adc->cfg->regs->smpr[1], adc->smpr_val[1]); 1263 1264 for_each_set_bit(bit, scan_mask, indio_dev->masklength) { 1265 chan = indio_dev->channels + bit; 1266 /* 1267 * Assign one channel per SQ entry in regular 1268 * sequence, starting with SQ1. 1269 */ 1270 i++; 1271 if (i > STM32_ADC_MAX_SQ) 1272 return -EINVAL; 1273 1274 dev_dbg(&indio_dev->dev, "%s chan %d to SQ%d\n", 1275 __func__, chan->channel, i); 1276 1277 val = stm32_adc_readl(adc, sqr[i].reg); 1278 val &= ~sqr[i].mask; 1279 val |= chan->channel << sqr[i].shift; 1280 stm32_adc_writel(adc, sqr[i].reg, val); 1281 } 1282 1283 if (!i) 1284 return -EINVAL; 1285 1286 /* Sequence len */ 1287 val = stm32_adc_readl(adc, sqr[0].reg); 1288 val &= ~sqr[0].mask; 1289 val |= ((i - 1) << sqr[0].shift); 1290 stm32_adc_writel(adc, sqr[0].reg, val); 1291 1292 return 0; 1293 } 1294 1295 /** 1296 * stm32_adc_get_trig_extsel() - Get external trigger selection 1297 * @indio_dev: IIO device structure 1298 * @trig: trigger 1299 * 1300 * Returns trigger extsel value, if trig matches, -EINVAL otherwise. 1301 */ 1302 static int stm32_adc_get_trig_extsel(struct iio_dev *indio_dev, 1303 struct iio_trigger *trig) 1304 { 1305 struct stm32_adc *adc = iio_priv(indio_dev); 1306 int i; 1307 1308 /* lookup triggers registered by stm32 timer trigger driver */ 1309 for (i = 0; adc->cfg->trigs[i].name; i++) { 1310 /** 1311 * Checking both stm32 timer trigger type and trig name 1312 * should be safe against arbitrary trigger names. 1313 */ 1314 if ((is_stm32_timer_trigger(trig) || 1315 is_stm32_lptim_trigger(trig)) && 1316 !strcmp(adc->cfg->trigs[i].name, trig->name)) { 1317 return adc->cfg->trigs[i].extsel; 1318 } 1319 } 1320 1321 return -EINVAL; 1322 } 1323 1324 /** 1325 * stm32_adc_set_trig() - Set a regular trigger 1326 * @indio_dev: IIO device 1327 * @trig: IIO trigger 1328 * 1329 * Set trigger source/polarity (e.g. SW, or HW with polarity) : 1330 * - if HW trigger disabled (e.g. trig == NULL, conversion launched by sw) 1331 * - if HW trigger enabled, set source & polarity 1332 */ 1333 static int stm32_adc_set_trig(struct iio_dev *indio_dev, 1334 struct iio_trigger *trig) 1335 { 1336 struct stm32_adc *adc = iio_priv(indio_dev); 1337 u32 val, extsel = 0, exten = STM32_EXTEN_SWTRIG; 1338 unsigned long flags; 1339 int ret; 1340 1341 if (trig) { 1342 ret = stm32_adc_get_trig_extsel(indio_dev, trig); 1343 if (ret < 0) 1344 return ret; 1345 1346 /* set trigger source and polarity (default to rising edge) */ 1347 extsel = ret; 1348 exten = adc->trigger_polarity + STM32_EXTEN_HWTRIG_RISING_EDGE; 1349 } 1350 1351 spin_lock_irqsave(&adc->lock, flags); 1352 val = stm32_adc_readl(adc, adc->cfg->regs->exten.reg); 1353 val &= ~(adc->cfg->regs->exten.mask | adc->cfg->regs->extsel.mask); 1354 val |= exten << adc->cfg->regs->exten.shift; 1355 val |= extsel << adc->cfg->regs->extsel.shift; 1356 stm32_adc_writel(adc, adc->cfg->regs->exten.reg, val); 1357 spin_unlock_irqrestore(&adc->lock, flags); 1358 1359 return 0; 1360 } 1361 1362 static int stm32_adc_set_trig_pol(struct iio_dev *indio_dev, 1363 const struct iio_chan_spec *chan, 1364 unsigned int type) 1365 { 1366 struct stm32_adc *adc = iio_priv(indio_dev); 1367 1368 adc->trigger_polarity = type; 1369 1370 return 0; 1371 } 1372 1373 static int stm32_adc_get_trig_pol(struct iio_dev *indio_dev, 1374 const struct iio_chan_spec *chan) 1375 { 1376 struct stm32_adc *adc = iio_priv(indio_dev); 1377 1378 return adc->trigger_polarity; 1379 } 1380 1381 static const char * const stm32_trig_pol_items[] = { 1382 "rising-edge", "falling-edge", "both-edges", 1383 }; 1384 1385 static const struct iio_enum stm32_adc_trig_pol = { 1386 .items = stm32_trig_pol_items, 1387 .num_items = ARRAY_SIZE(stm32_trig_pol_items), 1388 .get = stm32_adc_get_trig_pol, 1389 .set = stm32_adc_set_trig_pol, 1390 }; 1391 1392 /** 1393 * stm32_adc_single_conv() - Performs a single conversion 1394 * @indio_dev: IIO device 1395 * @chan: IIO channel 1396 * @res: conversion result 1397 * 1398 * The function performs a single conversion on a given channel: 1399 * - Apply sampling time settings 1400 * - Program sequencer with one channel (e.g. in SQ1 with len = 1) 1401 * - Use SW trigger 1402 * - Start conversion, then wait for interrupt completion. 1403 */ 1404 static int stm32_adc_single_conv(struct iio_dev *indio_dev, 1405 const struct iio_chan_spec *chan, 1406 int *res) 1407 { 1408 struct stm32_adc *adc = iio_priv(indio_dev); 1409 struct device *dev = indio_dev->dev.parent; 1410 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1411 long timeout; 1412 u32 val; 1413 int ret; 1414 1415 reinit_completion(&adc->completion); 1416 1417 adc->bufi = 0; 1418 1419 ret = pm_runtime_resume_and_get(dev); 1420 if (ret < 0) 1421 return ret; 1422 1423 /* Apply sampling time settings */ 1424 stm32_adc_writel(adc, regs->smpr[0], adc->smpr_val[0]); 1425 stm32_adc_writel(adc, regs->smpr[1], adc->smpr_val[1]); 1426 1427 /* Program chan number in regular sequence (SQ1) */ 1428 val = stm32_adc_readl(adc, regs->sqr[1].reg); 1429 val &= ~regs->sqr[1].mask; 1430 val |= chan->channel << regs->sqr[1].shift; 1431 stm32_adc_writel(adc, regs->sqr[1].reg, val); 1432 1433 /* Set regular sequence len (0 for 1 conversion) */ 1434 stm32_adc_clr_bits(adc, regs->sqr[0].reg, regs->sqr[0].mask); 1435 1436 /* Trigger detection disabled (conversion can be launched in SW) */ 1437 stm32_adc_clr_bits(adc, regs->exten.reg, regs->exten.mask); 1438 1439 stm32_adc_conv_irq_enable(adc); 1440 1441 adc->cfg->start_conv(indio_dev, false); 1442 1443 timeout = wait_for_completion_interruptible_timeout( 1444 &adc->completion, STM32_ADC_TIMEOUT); 1445 if (timeout == 0) { 1446 ret = -ETIMEDOUT; 1447 } else if (timeout < 0) { 1448 ret = timeout; 1449 } else { 1450 *res = adc->buffer[0]; 1451 ret = IIO_VAL_INT; 1452 } 1453 1454 adc->cfg->stop_conv(indio_dev); 1455 1456 stm32_adc_conv_irq_disable(adc); 1457 1458 pm_runtime_mark_last_busy(dev); 1459 pm_runtime_put_autosuspend(dev); 1460 1461 return ret; 1462 } 1463 1464 static int stm32_adc_read_raw(struct iio_dev *indio_dev, 1465 struct iio_chan_spec const *chan, 1466 int *val, int *val2, long mask) 1467 { 1468 struct stm32_adc *adc = iio_priv(indio_dev); 1469 int ret; 1470 1471 switch (mask) { 1472 case IIO_CHAN_INFO_RAW: 1473 case IIO_CHAN_INFO_PROCESSED: 1474 ret = iio_device_claim_direct_mode(indio_dev); 1475 if (ret) 1476 return ret; 1477 if (chan->type == IIO_VOLTAGE) 1478 ret = stm32_adc_single_conv(indio_dev, chan, val); 1479 else 1480 ret = -EINVAL; 1481 1482 if (mask == IIO_CHAN_INFO_PROCESSED) 1483 *val = STM32_ADC_VREFINT_VOLTAGE * adc->vrefint.vrefint_cal / *val; 1484 1485 iio_device_release_direct_mode(indio_dev); 1486 return ret; 1487 1488 case IIO_CHAN_INFO_SCALE: 1489 if (chan->differential) { 1490 *val = adc->common->vref_mv * 2; 1491 *val2 = chan->scan_type.realbits; 1492 } else { 1493 *val = adc->common->vref_mv; 1494 *val2 = chan->scan_type.realbits; 1495 } 1496 return IIO_VAL_FRACTIONAL_LOG2; 1497 1498 case IIO_CHAN_INFO_OFFSET: 1499 if (chan->differential) 1500 /* ADC_full_scale / 2 */ 1501 *val = -((1 << chan->scan_type.realbits) / 2); 1502 else 1503 *val = 0; 1504 return IIO_VAL_INT; 1505 1506 default: 1507 return -EINVAL; 1508 } 1509 } 1510 1511 static void stm32_adc_irq_clear(struct iio_dev *indio_dev, u32 msk) 1512 { 1513 struct stm32_adc *adc = iio_priv(indio_dev); 1514 1515 adc->cfg->irq_clear(indio_dev, msk); 1516 } 1517 1518 static irqreturn_t stm32_adc_threaded_isr(int irq, void *data) 1519 { 1520 struct iio_dev *indio_dev = data; 1521 struct stm32_adc *adc = iio_priv(indio_dev); 1522 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1523 u32 status = stm32_adc_readl(adc, regs->isr_eoc.reg); 1524 1525 /* Check ovr status right now, as ovr mask should be already disabled */ 1526 if (status & regs->isr_ovr.mask) { 1527 /* 1528 * Clear ovr bit to avoid subsequent calls to IRQ handler. 1529 * This requires to stop ADC first. OVR bit state in ISR, 1530 * is propaged to CSR register by hardware. 1531 */ 1532 adc->cfg->stop_conv(indio_dev); 1533 stm32_adc_irq_clear(indio_dev, regs->isr_ovr.mask); 1534 dev_err(&indio_dev->dev, "Overrun, stopping: restart needed\n"); 1535 return IRQ_HANDLED; 1536 } 1537 1538 return IRQ_NONE; 1539 } 1540 1541 static irqreturn_t stm32_adc_isr(int irq, void *data) 1542 { 1543 struct iio_dev *indio_dev = data; 1544 struct stm32_adc *adc = iio_priv(indio_dev); 1545 const struct stm32_adc_regspec *regs = adc->cfg->regs; 1546 u32 status = stm32_adc_readl(adc, regs->isr_eoc.reg); 1547 1548 if (status & regs->isr_ovr.mask) { 1549 /* 1550 * Overrun occurred on regular conversions: data for wrong 1551 * channel may be read. Unconditionally disable interrupts 1552 * to stop processing data and print error message. 1553 * Restarting the capture can be done by disabling, then 1554 * re-enabling it (e.g. write 0, then 1 to buffer/enable). 1555 */ 1556 stm32_adc_ovr_irq_disable(adc); 1557 stm32_adc_conv_irq_disable(adc); 1558 return IRQ_WAKE_THREAD; 1559 } 1560 1561 if (status & regs->isr_eoc.mask) { 1562 /* Reading DR also clears EOC status flag */ 1563 adc->buffer[adc->bufi] = stm32_adc_readw(adc, regs->dr); 1564 if (iio_buffer_enabled(indio_dev)) { 1565 adc->bufi++; 1566 if (adc->bufi >= adc->num_conv) { 1567 stm32_adc_conv_irq_disable(adc); 1568 iio_trigger_poll(indio_dev->trig); 1569 } 1570 } else { 1571 complete(&adc->completion); 1572 } 1573 return IRQ_HANDLED; 1574 } 1575 1576 return IRQ_NONE; 1577 } 1578 1579 /** 1580 * stm32_adc_validate_trigger() - validate trigger for stm32 adc 1581 * @indio_dev: IIO device 1582 * @trig: new trigger 1583 * 1584 * Returns: 0 if trig matches one of the triggers registered by stm32 adc 1585 * driver, -EINVAL otherwise. 1586 */ 1587 static int stm32_adc_validate_trigger(struct iio_dev *indio_dev, 1588 struct iio_trigger *trig) 1589 { 1590 return stm32_adc_get_trig_extsel(indio_dev, trig) < 0 ? -EINVAL : 0; 1591 } 1592 1593 static int stm32_adc_set_watermark(struct iio_dev *indio_dev, unsigned int val) 1594 { 1595 struct stm32_adc *adc = iio_priv(indio_dev); 1596 unsigned int watermark = STM32_DMA_BUFFER_SIZE / 2; 1597 unsigned int rx_buf_sz = STM32_DMA_BUFFER_SIZE; 1598 1599 /* 1600 * dma cyclic transfers are used, buffer is split into two periods. 1601 * There should be : 1602 * - always one buffer (period) dma is working on 1603 * - one buffer (period) driver can push data. 1604 */ 1605 watermark = min(watermark, val * (unsigned)(sizeof(u16))); 1606 adc->rx_buf_sz = min(rx_buf_sz, watermark * 2 * adc->num_conv); 1607 1608 return 0; 1609 } 1610 1611 static int stm32_adc_update_scan_mode(struct iio_dev *indio_dev, 1612 const unsigned long *scan_mask) 1613 { 1614 struct stm32_adc *adc = iio_priv(indio_dev); 1615 struct device *dev = indio_dev->dev.parent; 1616 int ret; 1617 1618 ret = pm_runtime_resume_and_get(dev); 1619 if (ret < 0) 1620 return ret; 1621 1622 adc->num_conv = bitmap_weight(scan_mask, indio_dev->masklength); 1623 1624 ret = stm32_adc_conf_scan_seq(indio_dev, scan_mask); 1625 pm_runtime_mark_last_busy(dev); 1626 pm_runtime_put_autosuspend(dev); 1627 1628 return ret; 1629 } 1630 1631 static int stm32_adc_fwnode_xlate(struct iio_dev *indio_dev, 1632 const struct fwnode_reference_args *iiospec) 1633 { 1634 int i; 1635 1636 for (i = 0; i < indio_dev->num_channels; i++) 1637 if (indio_dev->channels[i].channel == iiospec->args[0]) 1638 return i; 1639 1640 return -EINVAL; 1641 } 1642 1643 /** 1644 * stm32_adc_debugfs_reg_access - read or write register value 1645 * @indio_dev: IIO device structure 1646 * @reg: register offset 1647 * @writeval: value to write 1648 * @readval: value to read 1649 * 1650 * To read a value from an ADC register: 1651 * echo [ADC reg offset] > direct_reg_access 1652 * cat direct_reg_access 1653 * 1654 * To write a value in a ADC register: 1655 * echo [ADC_reg_offset] [value] > direct_reg_access 1656 */ 1657 static int stm32_adc_debugfs_reg_access(struct iio_dev *indio_dev, 1658 unsigned reg, unsigned writeval, 1659 unsigned *readval) 1660 { 1661 struct stm32_adc *adc = iio_priv(indio_dev); 1662 struct device *dev = indio_dev->dev.parent; 1663 int ret; 1664 1665 ret = pm_runtime_resume_and_get(dev); 1666 if (ret < 0) 1667 return ret; 1668 1669 if (!readval) 1670 stm32_adc_writel(adc, reg, writeval); 1671 else 1672 *readval = stm32_adc_readl(adc, reg); 1673 1674 pm_runtime_mark_last_busy(dev); 1675 pm_runtime_put_autosuspend(dev); 1676 1677 return 0; 1678 } 1679 1680 static const struct iio_info stm32_adc_iio_info = { 1681 .read_raw = stm32_adc_read_raw, 1682 .validate_trigger = stm32_adc_validate_trigger, 1683 .hwfifo_set_watermark = stm32_adc_set_watermark, 1684 .update_scan_mode = stm32_adc_update_scan_mode, 1685 .debugfs_reg_access = stm32_adc_debugfs_reg_access, 1686 .fwnode_xlate = stm32_adc_fwnode_xlate, 1687 }; 1688 1689 static unsigned int stm32_adc_dma_residue(struct stm32_adc *adc) 1690 { 1691 struct dma_tx_state state; 1692 enum dma_status status; 1693 1694 status = dmaengine_tx_status(adc->dma_chan, 1695 adc->dma_chan->cookie, 1696 &state); 1697 if (status == DMA_IN_PROGRESS) { 1698 /* Residue is size in bytes from end of buffer */ 1699 unsigned int i = adc->rx_buf_sz - state.residue; 1700 unsigned int size; 1701 1702 /* Return available bytes */ 1703 if (i >= adc->bufi) 1704 size = i - adc->bufi; 1705 else 1706 size = adc->rx_buf_sz + i - adc->bufi; 1707 1708 return size; 1709 } 1710 1711 return 0; 1712 } 1713 1714 static void stm32_adc_dma_buffer_done(void *data) 1715 { 1716 struct iio_dev *indio_dev = data; 1717 struct stm32_adc *adc = iio_priv(indio_dev); 1718 int residue = stm32_adc_dma_residue(adc); 1719 1720 /* 1721 * In DMA mode the trigger services of IIO are not used 1722 * (e.g. no call to iio_trigger_poll). 1723 * Calling irq handler associated to the hardware trigger is not 1724 * relevant as the conversions have already been done. Data 1725 * transfers are performed directly in DMA callback instead. 1726 * This implementation avoids to call trigger irq handler that 1727 * may sleep, in an atomic context (DMA irq handler context). 1728 */ 1729 dev_dbg(&indio_dev->dev, "%s bufi=%d\n", __func__, adc->bufi); 1730 1731 while (residue >= indio_dev->scan_bytes) { 1732 u16 *buffer = (u16 *)&adc->rx_buf[adc->bufi]; 1733 1734 iio_push_to_buffers(indio_dev, buffer); 1735 1736 residue -= indio_dev->scan_bytes; 1737 adc->bufi += indio_dev->scan_bytes; 1738 if (adc->bufi >= adc->rx_buf_sz) 1739 adc->bufi = 0; 1740 } 1741 } 1742 1743 static int stm32_adc_dma_start(struct iio_dev *indio_dev) 1744 { 1745 struct stm32_adc *adc = iio_priv(indio_dev); 1746 struct dma_async_tx_descriptor *desc; 1747 dma_cookie_t cookie; 1748 int ret; 1749 1750 if (!adc->dma_chan) 1751 return 0; 1752 1753 dev_dbg(&indio_dev->dev, "%s size=%d watermark=%d\n", __func__, 1754 adc->rx_buf_sz, adc->rx_buf_sz / 2); 1755 1756 /* Prepare a DMA cyclic transaction */ 1757 desc = dmaengine_prep_dma_cyclic(adc->dma_chan, 1758 adc->rx_dma_buf, 1759 adc->rx_buf_sz, adc->rx_buf_sz / 2, 1760 DMA_DEV_TO_MEM, 1761 DMA_PREP_INTERRUPT); 1762 if (!desc) 1763 return -EBUSY; 1764 1765 desc->callback = stm32_adc_dma_buffer_done; 1766 desc->callback_param = indio_dev; 1767 1768 cookie = dmaengine_submit(desc); 1769 ret = dma_submit_error(cookie); 1770 if (ret) { 1771 dmaengine_terminate_sync(adc->dma_chan); 1772 return ret; 1773 } 1774 1775 /* Issue pending DMA requests */ 1776 dma_async_issue_pending(adc->dma_chan); 1777 1778 return 0; 1779 } 1780 1781 static int stm32_adc_buffer_postenable(struct iio_dev *indio_dev) 1782 { 1783 struct stm32_adc *adc = iio_priv(indio_dev); 1784 struct device *dev = indio_dev->dev.parent; 1785 int ret; 1786 1787 ret = pm_runtime_resume_and_get(dev); 1788 if (ret < 0) 1789 return ret; 1790 1791 ret = stm32_adc_set_trig(indio_dev, indio_dev->trig); 1792 if (ret) { 1793 dev_err(&indio_dev->dev, "Can't set trigger\n"); 1794 goto err_pm_put; 1795 } 1796 1797 ret = stm32_adc_dma_start(indio_dev); 1798 if (ret) { 1799 dev_err(&indio_dev->dev, "Can't start dma\n"); 1800 goto err_clr_trig; 1801 } 1802 1803 /* Reset adc buffer index */ 1804 adc->bufi = 0; 1805 1806 stm32_adc_ovr_irq_enable(adc); 1807 1808 if (!adc->dma_chan) 1809 stm32_adc_conv_irq_enable(adc); 1810 1811 adc->cfg->start_conv(indio_dev, !!adc->dma_chan); 1812 1813 return 0; 1814 1815 err_clr_trig: 1816 stm32_adc_set_trig(indio_dev, NULL); 1817 err_pm_put: 1818 pm_runtime_mark_last_busy(dev); 1819 pm_runtime_put_autosuspend(dev); 1820 1821 return ret; 1822 } 1823 1824 static int stm32_adc_buffer_predisable(struct iio_dev *indio_dev) 1825 { 1826 struct stm32_adc *adc = iio_priv(indio_dev); 1827 struct device *dev = indio_dev->dev.parent; 1828 1829 adc->cfg->stop_conv(indio_dev); 1830 if (!adc->dma_chan) 1831 stm32_adc_conv_irq_disable(adc); 1832 1833 stm32_adc_ovr_irq_disable(adc); 1834 1835 if (adc->dma_chan) 1836 dmaengine_terminate_sync(adc->dma_chan); 1837 1838 if (stm32_adc_set_trig(indio_dev, NULL)) 1839 dev_err(&indio_dev->dev, "Can't clear trigger\n"); 1840 1841 pm_runtime_mark_last_busy(dev); 1842 pm_runtime_put_autosuspend(dev); 1843 1844 return 0; 1845 } 1846 1847 static const struct iio_buffer_setup_ops stm32_adc_buffer_setup_ops = { 1848 .postenable = &stm32_adc_buffer_postenable, 1849 .predisable = &stm32_adc_buffer_predisable, 1850 }; 1851 1852 static irqreturn_t stm32_adc_trigger_handler(int irq, void *p) 1853 { 1854 struct iio_poll_func *pf = p; 1855 struct iio_dev *indio_dev = pf->indio_dev; 1856 struct stm32_adc *adc = iio_priv(indio_dev); 1857 1858 dev_dbg(&indio_dev->dev, "%s bufi=%d\n", __func__, adc->bufi); 1859 1860 /* reset buffer index */ 1861 adc->bufi = 0; 1862 iio_push_to_buffers_with_timestamp(indio_dev, adc->buffer, 1863 pf->timestamp); 1864 iio_trigger_notify_done(indio_dev->trig); 1865 1866 /* re-enable eoc irq */ 1867 stm32_adc_conv_irq_enable(adc); 1868 1869 return IRQ_HANDLED; 1870 } 1871 1872 static const struct iio_chan_spec_ext_info stm32_adc_ext_info[] = { 1873 IIO_ENUM("trigger_polarity", IIO_SHARED_BY_ALL, &stm32_adc_trig_pol), 1874 { 1875 .name = "trigger_polarity_available", 1876 .shared = IIO_SHARED_BY_ALL, 1877 .read = iio_enum_available_read, 1878 .private = (uintptr_t)&stm32_adc_trig_pol, 1879 }, 1880 {}, 1881 }; 1882 1883 static void stm32_adc_debugfs_init(struct iio_dev *indio_dev) 1884 { 1885 struct stm32_adc *adc = iio_priv(indio_dev); 1886 struct dentry *d = iio_get_debugfs_dentry(indio_dev); 1887 struct stm32_adc_calib *cal = &adc->cal; 1888 char buf[16]; 1889 unsigned int i; 1890 1891 if (!adc->cfg->has_linearcal) 1892 return; 1893 1894 for (i = 0; i < STM32H7_LINCALFACT_NUM; i++) { 1895 snprintf(buf, sizeof(buf), "lincalfact%d", i + 1); 1896 debugfs_create_u32(buf, 0444, d, &cal->lincalfact[i]); 1897 } 1898 } 1899 1900 static int stm32_adc_fw_get_resolution(struct iio_dev *indio_dev) 1901 { 1902 struct device *dev = &indio_dev->dev; 1903 struct stm32_adc *adc = iio_priv(indio_dev); 1904 unsigned int i; 1905 u32 res; 1906 1907 if (device_property_read_u32(dev, "assigned-resolution-bits", &res)) 1908 res = adc->cfg->adc_info->resolutions[0]; 1909 1910 for (i = 0; i < adc->cfg->adc_info->num_res; i++) 1911 if (res == adc->cfg->adc_info->resolutions[i]) 1912 break; 1913 if (i >= adc->cfg->adc_info->num_res) { 1914 dev_err(&indio_dev->dev, "Bad resolution: %u bits\n", res); 1915 return -EINVAL; 1916 } 1917 1918 dev_dbg(&indio_dev->dev, "Using %u bits resolution\n", res); 1919 adc->res = i; 1920 1921 return 0; 1922 } 1923 1924 static void stm32_adc_smpr_init(struct stm32_adc *adc, int channel, u32 smp_ns) 1925 { 1926 const struct stm32_adc_regs *smpr = &adc->cfg->regs->smp_bits[channel]; 1927 u32 period_ns, shift = smpr->shift, mask = smpr->mask; 1928 unsigned int i, smp, r = smpr->reg; 1929 1930 /* 1931 * For internal channels, ensure that the sampling time cannot 1932 * be lower than the one specified in the datasheet 1933 */ 1934 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) 1935 if (channel == adc->int_ch[i] && adc->int_ch[i] != STM32_ADC_INT_CH_NONE) 1936 smp_ns = max(smp_ns, adc->cfg->ts_int_ch[i]); 1937 1938 /* Determine sampling time (ADC clock cycles) */ 1939 period_ns = NSEC_PER_SEC / adc->common->rate; 1940 for (smp = 0; smp <= STM32_ADC_MAX_SMP; smp++) 1941 if ((period_ns * adc->cfg->smp_cycles[smp]) >= smp_ns) 1942 break; 1943 if (smp > STM32_ADC_MAX_SMP) 1944 smp = STM32_ADC_MAX_SMP; 1945 1946 /* pre-build sampling time registers (e.g. smpr1, smpr2) */ 1947 adc->smpr_val[r] = (adc->smpr_val[r] & ~mask) | (smp << shift); 1948 } 1949 1950 static void stm32_adc_chan_init_one(struct iio_dev *indio_dev, 1951 struct iio_chan_spec *chan, u32 vinp, 1952 u32 vinn, int scan_index, bool differential) 1953 { 1954 struct stm32_adc *adc = iio_priv(indio_dev); 1955 char *name = adc->chan_name[vinp]; 1956 1957 chan->type = IIO_VOLTAGE; 1958 chan->channel = vinp; 1959 if (differential) { 1960 chan->differential = 1; 1961 chan->channel2 = vinn; 1962 snprintf(name, STM32_ADC_CH_SZ, "in%d-in%d", vinp, vinn); 1963 } else { 1964 snprintf(name, STM32_ADC_CH_SZ, "in%d", vinp); 1965 } 1966 chan->datasheet_name = name; 1967 chan->scan_index = scan_index; 1968 chan->indexed = 1; 1969 if (chan->channel == adc->int_ch[STM32_ADC_INT_CH_VREFINT]) 1970 chan->info_mask_separate = BIT(IIO_CHAN_INFO_PROCESSED); 1971 else 1972 chan->info_mask_separate = BIT(IIO_CHAN_INFO_RAW); 1973 chan->info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE) | 1974 BIT(IIO_CHAN_INFO_OFFSET); 1975 chan->scan_type.sign = 'u'; 1976 chan->scan_type.realbits = adc->cfg->adc_info->resolutions[adc->res]; 1977 chan->scan_type.storagebits = 16; 1978 chan->ext_info = stm32_adc_ext_info; 1979 1980 /* pre-build selected channels mask */ 1981 adc->pcsel |= BIT(chan->channel); 1982 if (differential) { 1983 /* pre-build diff channels mask */ 1984 adc->difsel |= BIT(chan->channel) & adc->cfg->regs->difsel.mask; 1985 /* Also add negative input to pre-selected channels */ 1986 adc->pcsel |= BIT(chan->channel2); 1987 } 1988 } 1989 1990 static int stm32_adc_get_legacy_chan_count(struct iio_dev *indio_dev, struct stm32_adc *adc) 1991 { 1992 struct device *dev = &indio_dev->dev; 1993 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 1994 int num_channels = 0, ret; 1995 1996 dev_dbg(&indio_dev->dev, "using legacy channel config\n"); 1997 1998 ret = device_property_count_u32(dev, "st,adc-channels"); 1999 if (ret > adc_info->max_channels) { 2000 dev_err(&indio_dev->dev, "Bad st,adc-channels?\n"); 2001 return -EINVAL; 2002 } else if (ret > 0) { 2003 num_channels += ret; 2004 } 2005 2006 /* 2007 * each st,adc-diff-channels is a group of 2 u32 so we divide @ret 2008 * to get the *real* number of channels. 2009 */ 2010 ret = device_property_count_u32(dev, "st,adc-diff-channels"); 2011 if (ret > 0) { 2012 ret /= (int)(sizeof(struct stm32_adc_diff_channel) / sizeof(u32)); 2013 if (ret > adc_info->max_channels) { 2014 dev_err(&indio_dev->dev, "Bad st,adc-diff-channels?\n"); 2015 return -EINVAL; 2016 } else if (ret > 0) { 2017 adc->num_diff = ret; 2018 num_channels += ret; 2019 } 2020 } 2021 2022 /* Optional sample time is provided either for each, or all channels */ 2023 adc->nsmps = device_property_count_u32(dev, "st,min-sample-time-nsecs"); 2024 if (adc->nsmps > 1 && adc->nsmps != num_channels) { 2025 dev_err(&indio_dev->dev, "Invalid st,min-sample-time-nsecs\n"); 2026 return -EINVAL; 2027 } 2028 2029 return num_channels; 2030 } 2031 2032 static int stm32_adc_legacy_chan_init(struct iio_dev *indio_dev, 2033 struct stm32_adc *adc, 2034 struct iio_chan_spec *channels, 2035 int nchans) 2036 { 2037 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 2038 struct stm32_adc_diff_channel diff[STM32_ADC_CH_MAX]; 2039 struct device *dev = &indio_dev->dev; 2040 u32 num_diff = adc->num_diff; 2041 int num_se = nchans - num_diff; 2042 int size = num_diff * sizeof(*diff) / sizeof(u32); 2043 int scan_index = 0, ret, i, c; 2044 u32 smp = 0, smps[STM32_ADC_CH_MAX], chans[STM32_ADC_CH_MAX]; 2045 2046 if (num_diff) { 2047 ret = device_property_read_u32_array(dev, "st,adc-diff-channels", 2048 (u32 *)diff, size); 2049 if (ret) { 2050 dev_err(&indio_dev->dev, "Failed to get diff channels %d\n", ret); 2051 return ret; 2052 } 2053 2054 for (i = 0; i < num_diff; i++) { 2055 if (diff[i].vinp >= adc_info->max_channels || 2056 diff[i].vinn >= adc_info->max_channels) { 2057 dev_err(&indio_dev->dev, "Invalid channel in%d-in%d\n", 2058 diff[i].vinp, diff[i].vinn); 2059 return -EINVAL; 2060 } 2061 2062 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], 2063 diff[i].vinp, diff[i].vinn, 2064 scan_index, true); 2065 scan_index++; 2066 } 2067 } 2068 if (num_se > 0) { 2069 ret = device_property_read_u32_array(dev, "st,adc-channels", chans, num_se); 2070 if (ret) { 2071 dev_err(&indio_dev->dev, "Failed to get st,adc-channels %d\n", ret); 2072 return ret; 2073 } 2074 2075 for (c = 0; c < num_se; c++) { 2076 if (chans[c] >= adc_info->max_channels) { 2077 dev_err(&indio_dev->dev, "Invalid channel %d\n", 2078 chans[c]); 2079 return -EINVAL; 2080 } 2081 2082 /* Channel can't be configured both as single-ended & diff */ 2083 for (i = 0; i < num_diff; i++) { 2084 if (chans[c] == diff[i].vinp) { 2085 dev_err(&indio_dev->dev, "channel %d misconfigured\n", 2086 chans[c]); 2087 return -EINVAL; 2088 } 2089 } 2090 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], 2091 chans[c], 0, scan_index, false); 2092 scan_index++; 2093 } 2094 } 2095 2096 if (adc->nsmps > 0) { 2097 ret = device_property_read_u32_array(dev, "st,min-sample-time-nsecs", 2098 smps, adc->nsmps); 2099 if (ret) 2100 return ret; 2101 } 2102 2103 for (i = 0; i < scan_index; i++) { 2104 /* 2105 * This check is used with the above logic so that smp value 2106 * will only be modified if valid u32 value can be decoded. This 2107 * allows to get either no value, 1 shared value for all indexes, 2108 * or one value per channel. The point is to have the same 2109 * behavior as 'of_property_read_u32_index()'. 2110 */ 2111 if (i < adc->nsmps) 2112 smp = smps[i]; 2113 2114 /* Prepare sampling time settings */ 2115 stm32_adc_smpr_init(adc, channels[i].channel, smp); 2116 } 2117 2118 return scan_index; 2119 } 2120 2121 static int stm32_adc_populate_int_ch(struct iio_dev *indio_dev, const char *ch_name, 2122 int chan) 2123 { 2124 struct stm32_adc *adc = iio_priv(indio_dev); 2125 u16 vrefint; 2126 int i, ret; 2127 2128 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) { 2129 if (!strncmp(stm32_adc_ic[i].name, ch_name, STM32_ADC_CH_SZ)) { 2130 /* Check internal channel availability */ 2131 switch (i) { 2132 case STM32_ADC_INT_CH_VDDCORE: 2133 if (!adc->cfg->regs->or_vddcore.reg) 2134 dev_warn(&indio_dev->dev, 2135 "%s channel not available\n", ch_name); 2136 break; 2137 case STM32_ADC_INT_CH_VDDCPU: 2138 if (!adc->cfg->regs->or_vddcpu.reg) 2139 dev_warn(&indio_dev->dev, 2140 "%s channel not available\n", ch_name); 2141 break; 2142 case STM32_ADC_INT_CH_VDDQ_DDR: 2143 if (!adc->cfg->regs->or_vddq_ddr.reg) 2144 dev_warn(&indio_dev->dev, 2145 "%s channel not available\n", ch_name); 2146 break; 2147 case STM32_ADC_INT_CH_VREFINT: 2148 if (!adc->cfg->regs->ccr_vref.reg) 2149 dev_warn(&indio_dev->dev, 2150 "%s channel not available\n", ch_name); 2151 break; 2152 case STM32_ADC_INT_CH_VBAT: 2153 if (!adc->cfg->regs->ccr_vbat.reg) 2154 dev_warn(&indio_dev->dev, 2155 "%s channel not available\n", ch_name); 2156 break; 2157 } 2158 2159 if (stm32_adc_ic[i].idx != STM32_ADC_INT_CH_VREFINT) { 2160 adc->int_ch[i] = chan; 2161 break; 2162 } 2163 2164 /* Get calibration data for vrefint channel */ 2165 ret = nvmem_cell_read_u16(&indio_dev->dev, "vrefint", &vrefint); 2166 if (ret && ret != -ENOENT) { 2167 return dev_err_probe(indio_dev->dev.parent, ret, 2168 "nvmem access error\n"); 2169 } 2170 if (ret == -ENOENT) { 2171 dev_dbg(&indio_dev->dev, "vrefint calibration not found. Skip vrefint channel\n"); 2172 return ret; 2173 } else if (!vrefint) { 2174 dev_dbg(&indio_dev->dev, "Null vrefint calibration value. Skip vrefint channel\n"); 2175 return -ENOENT; 2176 } 2177 adc->int_ch[i] = chan; 2178 adc->vrefint.vrefint_cal = vrefint; 2179 } 2180 } 2181 2182 return 0; 2183 } 2184 2185 static int stm32_adc_generic_chan_init(struct iio_dev *indio_dev, 2186 struct stm32_adc *adc, 2187 struct iio_chan_spec *channels) 2188 { 2189 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 2190 struct fwnode_handle *child; 2191 const char *name; 2192 int val, scan_index = 0, ret; 2193 bool differential; 2194 u32 vin[2]; 2195 2196 device_for_each_child_node(&indio_dev->dev, child) { 2197 ret = fwnode_property_read_u32(child, "reg", &val); 2198 if (ret) { 2199 dev_err(&indio_dev->dev, "Missing channel index %d\n", ret); 2200 goto err; 2201 } 2202 2203 ret = fwnode_property_read_string(child, "label", &name); 2204 /* label is optional */ 2205 if (!ret) { 2206 if (strlen(name) >= STM32_ADC_CH_SZ) { 2207 dev_err(&indio_dev->dev, "Label %s exceeds %d characters\n", 2208 name, STM32_ADC_CH_SZ); 2209 ret = -EINVAL; 2210 goto err; 2211 } 2212 strncpy(adc->chan_name[val], name, STM32_ADC_CH_SZ); 2213 ret = stm32_adc_populate_int_ch(indio_dev, name, val); 2214 if (ret == -ENOENT) 2215 continue; 2216 else if (ret) 2217 goto err; 2218 } else if (ret != -EINVAL) { 2219 dev_err(&indio_dev->dev, "Invalid label %d\n", ret); 2220 goto err; 2221 } 2222 2223 if (val >= adc_info->max_channels) { 2224 dev_err(&indio_dev->dev, "Invalid channel %d\n", val); 2225 ret = -EINVAL; 2226 goto err; 2227 } 2228 2229 differential = false; 2230 ret = fwnode_property_read_u32_array(child, "diff-channels", vin, 2); 2231 /* diff-channels is optional */ 2232 if (!ret) { 2233 differential = true; 2234 if (vin[0] != val || vin[1] >= adc_info->max_channels) { 2235 dev_err(&indio_dev->dev, "Invalid channel in%d-in%d\n", 2236 vin[0], vin[1]); 2237 ret = -EINVAL; 2238 goto err; 2239 } 2240 } else if (ret != -EINVAL) { 2241 dev_err(&indio_dev->dev, "Invalid diff-channels property %d\n", ret); 2242 goto err; 2243 } 2244 2245 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], val, 2246 vin[1], scan_index, differential); 2247 2248 val = 0; 2249 ret = fwnode_property_read_u32(child, "st,min-sample-time-ns", &val); 2250 /* st,min-sample-time-ns is optional */ 2251 if (ret && ret != -EINVAL) { 2252 dev_err(&indio_dev->dev, "Invalid st,min-sample-time-ns property %d\n", 2253 ret); 2254 goto err; 2255 } 2256 2257 stm32_adc_smpr_init(adc, channels[scan_index].channel, val); 2258 if (differential) 2259 stm32_adc_smpr_init(adc, vin[1], val); 2260 2261 scan_index++; 2262 } 2263 2264 return scan_index; 2265 2266 err: 2267 fwnode_handle_put(child); 2268 2269 return ret; 2270 } 2271 2272 static int stm32_adc_chan_fw_init(struct iio_dev *indio_dev, bool timestamping) 2273 { 2274 struct stm32_adc *adc = iio_priv(indio_dev); 2275 const struct stm32_adc_info *adc_info = adc->cfg->adc_info; 2276 struct iio_chan_spec *channels; 2277 int scan_index = 0, num_channels = 0, ret, i; 2278 bool legacy = false; 2279 2280 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) 2281 adc->int_ch[i] = STM32_ADC_INT_CH_NONE; 2282 2283 num_channels = device_get_child_node_count(&indio_dev->dev); 2284 /* If no channels have been found, fallback to channels legacy properties. */ 2285 if (!num_channels) { 2286 legacy = true; 2287 2288 ret = stm32_adc_get_legacy_chan_count(indio_dev, adc); 2289 if (!ret) { 2290 dev_err(indio_dev->dev.parent, "No channel found\n"); 2291 return -ENODATA; 2292 } else if (ret < 0) { 2293 return ret; 2294 } 2295 2296 num_channels = ret; 2297 } 2298 2299 if (num_channels > adc_info->max_channels) { 2300 dev_err(&indio_dev->dev, "Channel number [%d] exceeds %d\n", 2301 num_channels, adc_info->max_channels); 2302 return -EINVAL; 2303 } 2304 2305 if (timestamping) 2306 num_channels++; 2307 2308 channels = devm_kcalloc(&indio_dev->dev, num_channels, 2309 sizeof(struct iio_chan_spec), GFP_KERNEL); 2310 if (!channels) 2311 return -ENOMEM; 2312 2313 if (legacy) 2314 ret = stm32_adc_legacy_chan_init(indio_dev, adc, channels, 2315 timestamping ? num_channels - 1 : num_channels); 2316 else 2317 ret = stm32_adc_generic_chan_init(indio_dev, adc, channels); 2318 if (ret < 0) 2319 return ret; 2320 scan_index = ret; 2321 2322 if (timestamping) { 2323 struct iio_chan_spec *timestamp = &channels[scan_index]; 2324 2325 timestamp->type = IIO_TIMESTAMP; 2326 timestamp->channel = -1; 2327 timestamp->scan_index = scan_index; 2328 timestamp->scan_type.sign = 's'; 2329 timestamp->scan_type.realbits = 64; 2330 timestamp->scan_type.storagebits = 64; 2331 2332 scan_index++; 2333 } 2334 2335 indio_dev->num_channels = scan_index; 2336 indio_dev->channels = channels; 2337 2338 return 0; 2339 } 2340 2341 static int stm32_adc_dma_request(struct device *dev, struct iio_dev *indio_dev) 2342 { 2343 struct stm32_adc *adc = iio_priv(indio_dev); 2344 struct dma_slave_config config; 2345 int ret; 2346 2347 adc->dma_chan = dma_request_chan(dev, "rx"); 2348 if (IS_ERR(adc->dma_chan)) { 2349 ret = PTR_ERR(adc->dma_chan); 2350 if (ret != -ENODEV) 2351 return dev_err_probe(dev, ret, 2352 "DMA channel request failed with\n"); 2353 2354 /* DMA is optional: fall back to IRQ mode */ 2355 adc->dma_chan = NULL; 2356 return 0; 2357 } 2358 2359 adc->rx_buf = dma_alloc_coherent(adc->dma_chan->device->dev, 2360 STM32_DMA_BUFFER_SIZE, 2361 &adc->rx_dma_buf, GFP_KERNEL); 2362 if (!adc->rx_buf) { 2363 ret = -ENOMEM; 2364 goto err_release; 2365 } 2366 2367 /* Configure DMA channel to read data register */ 2368 memset(&config, 0, sizeof(config)); 2369 config.src_addr = (dma_addr_t)adc->common->phys_base; 2370 config.src_addr += adc->offset + adc->cfg->regs->dr; 2371 config.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 2372 2373 ret = dmaengine_slave_config(adc->dma_chan, &config); 2374 if (ret) 2375 goto err_free; 2376 2377 return 0; 2378 2379 err_free: 2380 dma_free_coherent(adc->dma_chan->device->dev, STM32_DMA_BUFFER_SIZE, 2381 adc->rx_buf, adc->rx_dma_buf); 2382 err_release: 2383 dma_release_channel(adc->dma_chan); 2384 2385 return ret; 2386 } 2387 2388 static int stm32_adc_probe(struct platform_device *pdev) 2389 { 2390 struct iio_dev *indio_dev; 2391 struct device *dev = &pdev->dev; 2392 irqreturn_t (*handler)(int irq, void *p) = NULL; 2393 struct stm32_adc *adc; 2394 bool timestamping = false; 2395 int ret; 2396 2397 indio_dev = devm_iio_device_alloc(&pdev->dev, sizeof(*adc)); 2398 if (!indio_dev) 2399 return -ENOMEM; 2400 2401 adc = iio_priv(indio_dev); 2402 adc->common = dev_get_drvdata(pdev->dev.parent); 2403 spin_lock_init(&adc->lock); 2404 init_completion(&adc->completion); 2405 adc->cfg = device_get_match_data(dev); 2406 2407 indio_dev->name = dev_name(&pdev->dev); 2408 device_set_node(&indio_dev->dev, dev_fwnode(&pdev->dev)); 2409 indio_dev->info = &stm32_adc_iio_info; 2410 indio_dev->modes = INDIO_DIRECT_MODE | INDIO_HARDWARE_TRIGGERED; 2411 2412 platform_set_drvdata(pdev, indio_dev); 2413 2414 ret = device_property_read_u32(dev, "reg", &adc->offset); 2415 if (ret != 0) { 2416 dev_err(&pdev->dev, "missing reg property\n"); 2417 return -EINVAL; 2418 } 2419 2420 adc->irq = platform_get_irq(pdev, 0); 2421 if (adc->irq < 0) 2422 return adc->irq; 2423 2424 ret = devm_request_threaded_irq(&pdev->dev, adc->irq, stm32_adc_isr, 2425 stm32_adc_threaded_isr, 2426 0, pdev->name, indio_dev); 2427 if (ret) { 2428 dev_err(&pdev->dev, "failed to request IRQ\n"); 2429 return ret; 2430 } 2431 2432 adc->clk = devm_clk_get(&pdev->dev, NULL); 2433 if (IS_ERR(adc->clk)) { 2434 ret = PTR_ERR(adc->clk); 2435 if (ret == -ENOENT && !adc->cfg->clk_required) { 2436 adc->clk = NULL; 2437 } else { 2438 dev_err(&pdev->dev, "Can't get clock\n"); 2439 return ret; 2440 } 2441 } 2442 2443 ret = stm32_adc_fw_get_resolution(indio_dev); 2444 if (ret < 0) 2445 return ret; 2446 2447 ret = stm32_adc_dma_request(dev, indio_dev); 2448 if (ret < 0) 2449 return ret; 2450 2451 if (!adc->dma_chan) { 2452 /* For PIO mode only, iio_pollfunc_store_time stores a timestamp 2453 * in the primary trigger IRQ handler and stm32_adc_trigger_handler 2454 * runs in the IRQ thread to push out buffer along with timestamp. 2455 */ 2456 handler = &stm32_adc_trigger_handler; 2457 timestamping = true; 2458 } 2459 2460 ret = stm32_adc_chan_fw_init(indio_dev, timestamping); 2461 if (ret < 0) 2462 goto err_dma_disable; 2463 2464 ret = iio_triggered_buffer_setup(indio_dev, 2465 &iio_pollfunc_store_time, handler, 2466 &stm32_adc_buffer_setup_ops); 2467 if (ret) { 2468 dev_err(&pdev->dev, "buffer setup failed\n"); 2469 goto err_dma_disable; 2470 } 2471 2472 /* Get stm32-adc-core PM online */ 2473 pm_runtime_get_noresume(dev); 2474 pm_runtime_set_active(dev); 2475 pm_runtime_set_autosuspend_delay(dev, STM32_ADC_HW_STOP_DELAY_MS); 2476 pm_runtime_use_autosuspend(dev); 2477 pm_runtime_enable(dev); 2478 2479 ret = stm32_adc_hw_start(dev); 2480 if (ret) 2481 goto err_buffer_cleanup; 2482 2483 ret = iio_device_register(indio_dev); 2484 if (ret) { 2485 dev_err(&pdev->dev, "iio dev register failed\n"); 2486 goto err_hw_stop; 2487 } 2488 2489 pm_runtime_mark_last_busy(dev); 2490 pm_runtime_put_autosuspend(dev); 2491 2492 if (IS_ENABLED(CONFIG_DEBUG_FS)) 2493 stm32_adc_debugfs_init(indio_dev); 2494 2495 return 0; 2496 2497 err_hw_stop: 2498 stm32_adc_hw_stop(dev); 2499 2500 err_buffer_cleanup: 2501 pm_runtime_disable(dev); 2502 pm_runtime_set_suspended(dev); 2503 pm_runtime_put_noidle(dev); 2504 iio_triggered_buffer_cleanup(indio_dev); 2505 2506 err_dma_disable: 2507 if (adc->dma_chan) { 2508 dma_free_coherent(adc->dma_chan->device->dev, 2509 STM32_DMA_BUFFER_SIZE, 2510 adc->rx_buf, adc->rx_dma_buf); 2511 dma_release_channel(adc->dma_chan); 2512 } 2513 2514 return ret; 2515 } 2516 2517 static int stm32_adc_remove(struct platform_device *pdev) 2518 { 2519 struct iio_dev *indio_dev = platform_get_drvdata(pdev); 2520 struct stm32_adc *adc = iio_priv(indio_dev); 2521 2522 pm_runtime_get_sync(&pdev->dev); 2523 /* iio_device_unregister() also removes debugfs entries */ 2524 iio_device_unregister(indio_dev); 2525 stm32_adc_hw_stop(&pdev->dev); 2526 pm_runtime_disable(&pdev->dev); 2527 pm_runtime_set_suspended(&pdev->dev); 2528 pm_runtime_put_noidle(&pdev->dev); 2529 iio_triggered_buffer_cleanup(indio_dev); 2530 if (adc->dma_chan) { 2531 dma_free_coherent(adc->dma_chan->device->dev, 2532 STM32_DMA_BUFFER_SIZE, 2533 adc->rx_buf, adc->rx_dma_buf); 2534 dma_release_channel(adc->dma_chan); 2535 } 2536 2537 return 0; 2538 } 2539 2540 static int stm32_adc_suspend(struct device *dev) 2541 { 2542 struct iio_dev *indio_dev = dev_get_drvdata(dev); 2543 2544 if (iio_buffer_enabled(indio_dev)) 2545 stm32_adc_buffer_predisable(indio_dev); 2546 2547 return pm_runtime_force_suspend(dev); 2548 } 2549 2550 static int stm32_adc_resume(struct device *dev) 2551 { 2552 struct iio_dev *indio_dev = dev_get_drvdata(dev); 2553 int ret; 2554 2555 ret = pm_runtime_force_resume(dev); 2556 if (ret < 0) 2557 return ret; 2558 2559 if (!iio_buffer_enabled(indio_dev)) 2560 return 0; 2561 2562 ret = stm32_adc_update_scan_mode(indio_dev, 2563 indio_dev->active_scan_mask); 2564 if (ret < 0) 2565 return ret; 2566 2567 return stm32_adc_buffer_postenable(indio_dev); 2568 } 2569 2570 static int stm32_adc_runtime_suspend(struct device *dev) 2571 { 2572 return stm32_adc_hw_stop(dev); 2573 } 2574 2575 static int stm32_adc_runtime_resume(struct device *dev) 2576 { 2577 return stm32_adc_hw_start(dev); 2578 } 2579 2580 static const struct dev_pm_ops stm32_adc_pm_ops = { 2581 SYSTEM_SLEEP_PM_OPS(stm32_adc_suspend, stm32_adc_resume) 2582 RUNTIME_PM_OPS(stm32_adc_runtime_suspend, stm32_adc_runtime_resume, 2583 NULL) 2584 }; 2585 2586 static const struct stm32_adc_cfg stm32f4_adc_cfg = { 2587 .regs = &stm32f4_adc_regspec, 2588 .adc_info = &stm32f4_adc_info, 2589 .trigs = stm32f4_adc_trigs, 2590 .clk_required = true, 2591 .start_conv = stm32f4_adc_start_conv, 2592 .stop_conv = stm32f4_adc_stop_conv, 2593 .smp_cycles = stm32f4_adc_smp_cycles, 2594 .irq_clear = stm32f4_adc_irq_clear, 2595 }; 2596 2597 static const unsigned int stm32_adc_min_ts_h7[] = { 0, 0, 0, 4300, 9000 }; 2598 static_assert(ARRAY_SIZE(stm32_adc_min_ts_h7) == STM32_ADC_INT_CH_NB); 2599 2600 static const struct stm32_adc_cfg stm32h7_adc_cfg = { 2601 .regs = &stm32h7_adc_regspec, 2602 .adc_info = &stm32h7_adc_info, 2603 .trigs = stm32h7_adc_trigs, 2604 .has_boostmode = true, 2605 .has_linearcal = true, 2606 .has_presel = true, 2607 .start_conv = stm32h7_adc_start_conv, 2608 .stop_conv = stm32h7_adc_stop_conv, 2609 .prepare = stm32h7_adc_prepare, 2610 .unprepare = stm32h7_adc_unprepare, 2611 .smp_cycles = stm32h7_adc_smp_cycles, 2612 .irq_clear = stm32h7_adc_irq_clear, 2613 .ts_int_ch = stm32_adc_min_ts_h7, 2614 }; 2615 2616 static const unsigned int stm32_adc_min_ts_mp1[] = { 100, 100, 100, 4300, 9800 }; 2617 static_assert(ARRAY_SIZE(stm32_adc_min_ts_mp1) == STM32_ADC_INT_CH_NB); 2618 2619 static const struct stm32_adc_cfg stm32mp1_adc_cfg = { 2620 .regs = &stm32mp1_adc_regspec, 2621 .adc_info = &stm32h7_adc_info, 2622 .trigs = stm32h7_adc_trigs, 2623 .has_vregready = true, 2624 .has_boostmode = true, 2625 .has_linearcal = true, 2626 .has_presel = true, 2627 .start_conv = stm32h7_adc_start_conv, 2628 .stop_conv = stm32h7_adc_stop_conv, 2629 .prepare = stm32h7_adc_prepare, 2630 .unprepare = stm32h7_adc_unprepare, 2631 .smp_cycles = stm32h7_adc_smp_cycles, 2632 .irq_clear = stm32h7_adc_irq_clear, 2633 .ts_int_ch = stm32_adc_min_ts_mp1, 2634 }; 2635 2636 static const unsigned int stm32_adc_min_ts_mp13[] = { 100, 0, 0, 4300, 9800 }; 2637 static_assert(ARRAY_SIZE(stm32_adc_min_ts_mp13) == STM32_ADC_INT_CH_NB); 2638 2639 static const struct stm32_adc_cfg stm32mp13_adc_cfg = { 2640 .regs = &stm32mp13_adc_regspec, 2641 .adc_info = &stm32mp13_adc_info, 2642 .trigs = stm32h7_adc_trigs, 2643 .start_conv = stm32mp13_adc_start_conv, 2644 .stop_conv = stm32h7_adc_stop_conv, 2645 .prepare = stm32h7_adc_prepare, 2646 .unprepare = stm32h7_adc_unprepare, 2647 .smp_cycles = stm32mp13_adc_smp_cycles, 2648 .irq_clear = stm32h7_adc_irq_clear, 2649 .ts_int_ch = stm32_adc_min_ts_mp13, 2650 }; 2651 2652 static const struct of_device_id stm32_adc_of_match[] = { 2653 { .compatible = "st,stm32f4-adc", .data = (void *)&stm32f4_adc_cfg }, 2654 { .compatible = "st,stm32h7-adc", .data = (void *)&stm32h7_adc_cfg }, 2655 { .compatible = "st,stm32mp1-adc", .data = (void *)&stm32mp1_adc_cfg }, 2656 { .compatible = "st,stm32mp13-adc", .data = (void *)&stm32mp13_adc_cfg }, 2657 {}, 2658 }; 2659 MODULE_DEVICE_TABLE(of, stm32_adc_of_match); 2660 2661 static struct platform_driver stm32_adc_driver = { 2662 .probe = stm32_adc_probe, 2663 .remove = stm32_adc_remove, 2664 .driver = { 2665 .name = "stm32-adc", 2666 .of_match_table = stm32_adc_of_match, 2667 .pm = pm_ptr(&stm32_adc_pm_ops), 2668 }, 2669 }; 2670 module_platform_driver(stm32_adc_driver); 2671 2672 MODULE_AUTHOR("Fabrice Gasnier <fabrice.gasnier@st.com>"); 2673 MODULE_DESCRIPTION("STMicroelectronics STM32 ADC IIO driver"); 2674 MODULE_LICENSE("GPL v2"); 2675 MODULE_ALIAS("platform:stm32-adc"); 2676