1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2014, NVIDIA CORPORATION. All rights reserved. 4 * 5 * Author: 6 * Mikko Perttunen <mperttunen@nvidia.com> 7 */ 8 9 #include <linux/clk-provider.h> 10 #include <linux/clk.h> 11 #include <linux/clkdev.h> 12 #include <linux/debugfs.h> 13 #include <linux/delay.h> 14 #include <linux/io.h> 15 #include <linux/of_address.h> 16 #include <linux/of_platform.h> 17 #include <linux/platform_device.h> 18 #include <linux/sort.h> 19 #include <linux/string.h> 20 21 #include <soc/tegra/emc.h> 22 #include <soc/tegra/fuse.h> 23 #include <soc/tegra/mc.h> 24 25 #define EMC_FBIO_CFG5 0x104 26 #define EMC_FBIO_CFG5_DRAM_TYPE_MASK 0x3 27 #define EMC_FBIO_CFG5_DRAM_TYPE_SHIFT 0 28 29 #define EMC_INTSTATUS 0x0 30 #define EMC_INTSTATUS_CLKCHANGE_COMPLETE BIT(4) 31 32 #define EMC_CFG 0xc 33 #define EMC_CFG_DRAM_CLKSTOP_PD BIT(31) 34 #define EMC_CFG_DRAM_CLKSTOP_SR BIT(30) 35 #define EMC_CFG_DRAM_ACPD BIT(29) 36 #define EMC_CFG_DYN_SREF BIT(28) 37 #define EMC_CFG_PWR_MASK ((0xF << 28) | BIT(18)) 38 #define EMC_CFG_DSR_VTTGEN_DRV_EN BIT(18) 39 40 #define EMC_REFCTRL 0x20 41 #define EMC_REFCTRL_DEV_SEL_SHIFT 0 42 #define EMC_REFCTRL_ENABLE BIT(31) 43 44 #define EMC_TIMING_CONTROL 0x28 45 #define EMC_RC 0x2c 46 #define EMC_RFC 0x30 47 #define EMC_RAS 0x34 48 #define EMC_RP 0x38 49 #define EMC_R2W 0x3c 50 #define EMC_W2R 0x40 51 #define EMC_R2P 0x44 52 #define EMC_W2P 0x48 53 #define EMC_RD_RCD 0x4c 54 #define EMC_WR_RCD 0x50 55 #define EMC_RRD 0x54 56 #define EMC_REXT 0x58 57 #define EMC_WDV 0x5c 58 #define EMC_QUSE 0x60 59 #define EMC_QRST 0x64 60 #define EMC_QSAFE 0x68 61 #define EMC_RDV 0x6c 62 #define EMC_REFRESH 0x70 63 #define EMC_BURST_REFRESH_NUM 0x74 64 #define EMC_PDEX2WR 0x78 65 #define EMC_PDEX2RD 0x7c 66 #define EMC_PCHG2PDEN 0x80 67 #define EMC_ACT2PDEN 0x84 68 #define EMC_AR2PDEN 0x88 69 #define EMC_RW2PDEN 0x8c 70 #define EMC_TXSR 0x90 71 #define EMC_TCKE 0x94 72 #define EMC_TFAW 0x98 73 #define EMC_TRPAB 0x9c 74 #define EMC_TCLKSTABLE 0xa0 75 #define EMC_TCLKSTOP 0xa4 76 #define EMC_TREFBW 0xa8 77 #define EMC_ODT_WRITE 0xb0 78 #define EMC_ODT_READ 0xb4 79 #define EMC_WEXT 0xb8 80 #define EMC_CTT 0xbc 81 #define EMC_RFC_SLR 0xc0 82 #define EMC_MRS_WAIT_CNT2 0xc4 83 84 #define EMC_MRS_WAIT_CNT 0xc8 85 #define EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT 0 86 #define EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK \ 87 (0x3FF << EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT) 88 #define EMC_MRS_WAIT_CNT_LONG_WAIT_SHIFT 16 89 #define EMC_MRS_WAIT_CNT_LONG_WAIT_MASK \ 90 (0x3FF << EMC_MRS_WAIT_CNT_LONG_WAIT_SHIFT) 91 92 #define EMC_MRS 0xcc 93 #define EMC_MODE_SET_DLL_RESET BIT(8) 94 #define EMC_MODE_SET_LONG_CNT BIT(26) 95 #define EMC_EMRS 0xd0 96 #define EMC_REF 0xd4 97 #define EMC_PRE 0xd8 98 99 #define EMC_SELF_REF 0xe0 100 #define EMC_SELF_REF_CMD_ENABLED BIT(0) 101 #define EMC_SELF_REF_DEV_SEL_SHIFT 30 102 103 #define EMC_MRW 0xe8 104 105 #define EMC_MRR 0xec 106 #define EMC_MRR_MA_SHIFT 16 107 #define LPDDR2_MR4_TEMP_SHIFT 0 108 109 #define EMC_XM2DQSPADCTRL3 0xf8 110 #define EMC_FBIO_SPARE 0x100 111 112 #define EMC_FBIO_CFG6 0x114 113 #define EMC_EMRS2 0x12c 114 #define EMC_MRW2 0x134 115 #define EMC_MRW4 0x13c 116 #define EMC_EINPUT 0x14c 117 #define EMC_EINPUT_DURATION 0x150 118 #define EMC_PUTERM_EXTRA 0x154 119 #define EMC_TCKESR 0x158 120 #define EMC_TPD 0x15c 121 122 #define EMC_AUTO_CAL_CONFIG 0x2a4 123 #define EMC_AUTO_CAL_CONFIG_AUTO_CAL_START BIT(31) 124 #define EMC_AUTO_CAL_INTERVAL 0x2a8 125 #define EMC_AUTO_CAL_STATUS 0x2ac 126 #define EMC_AUTO_CAL_STATUS_ACTIVE BIT(31) 127 #define EMC_STATUS 0x2b4 128 #define EMC_STATUS_TIMING_UPDATE_STALLED BIT(23) 129 130 #define EMC_CFG_2 0x2b8 131 #define EMC_CFG_2_MODE_SHIFT 0 132 #define EMC_CFG_2_DIS_STP_OB_CLK_DURING_NON_WR BIT(6) 133 134 #define EMC_CFG_DIG_DLL 0x2bc 135 #define EMC_CFG_DIG_DLL_PERIOD 0x2c0 136 #define EMC_RDV_MASK 0x2cc 137 #define EMC_WDV_MASK 0x2d0 138 #define EMC_CTT_DURATION 0x2d8 139 #define EMC_CTT_TERM_CTRL 0x2dc 140 #define EMC_ZCAL_INTERVAL 0x2e0 141 #define EMC_ZCAL_WAIT_CNT 0x2e4 142 143 #define EMC_ZQ_CAL 0x2ec 144 #define EMC_ZQ_CAL_CMD BIT(0) 145 #define EMC_ZQ_CAL_LONG BIT(4) 146 #define EMC_ZQ_CAL_LONG_CMD_DEV0 \ 147 (DRAM_DEV_SEL_0 | EMC_ZQ_CAL_LONG | EMC_ZQ_CAL_CMD) 148 #define EMC_ZQ_CAL_LONG_CMD_DEV1 \ 149 (DRAM_DEV_SEL_1 | EMC_ZQ_CAL_LONG | EMC_ZQ_CAL_CMD) 150 151 #define EMC_XM2CMDPADCTRL 0x2f0 152 #define EMC_XM2DQSPADCTRL 0x2f8 153 #define EMC_XM2DQSPADCTRL2 0x2fc 154 #define EMC_XM2DQSPADCTRL2_RX_FT_REC_ENABLE BIT(0) 155 #define EMC_XM2DQSPADCTRL2_VREF_ENABLE BIT(5) 156 #define EMC_XM2DQPADCTRL 0x300 157 #define EMC_XM2DQPADCTRL2 0x304 158 #define EMC_XM2CLKPADCTRL 0x308 159 #define EMC_XM2COMPPADCTRL 0x30c 160 #define EMC_XM2VTTGENPADCTRL 0x310 161 #define EMC_XM2VTTGENPADCTRL2 0x314 162 #define EMC_XM2VTTGENPADCTRL3 0x318 163 #define EMC_XM2DQSPADCTRL4 0x320 164 #define EMC_DLL_XFORM_DQS0 0x328 165 #define EMC_DLL_XFORM_DQS1 0x32c 166 #define EMC_DLL_XFORM_DQS2 0x330 167 #define EMC_DLL_XFORM_DQS3 0x334 168 #define EMC_DLL_XFORM_DQS4 0x338 169 #define EMC_DLL_XFORM_DQS5 0x33c 170 #define EMC_DLL_XFORM_DQS6 0x340 171 #define EMC_DLL_XFORM_DQS7 0x344 172 #define EMC_DLL_XFORM_QUSE0 0x348 173 #define EMC_DLL_XFORM_QUSE1 0x34c 174 #define EMC_DLL_XFORM_QUSE2 0x350 175 #define EMC_DLL_XFORM_QUSE3 0x354 176 #define EMC_DLL_XFORM_QUSE4 0x358 177 #define EMC_DLL_XFORM_QUSE5 0x35c 178 #define EMC_DLL_XFORM_QUSE6 0x360 179 #define EMC_DLL_XFORM_QUSE7 0x364 180 #define EMC_DLL_XFORM_DQ0 0x368 181 #define EMC_DLL_XFORM_DQ1 0x36c 182 #define EMC_DLL_XFORM_DQ2 0x370 183 #define EMC_DLL_XFORM_DQ3 0x374 184 #define EMC_DLI_TRIM_TXDQS0 0x3a8 185 #define EMC_DLI_TRIM_TXDQS1 0x3ac 186 #define EMC_DLI_TRIM_TXDQS2 0x3b0 187 #define EMC_DLI_TRIM_TXDQS3 0x3b4 188 #define EMC_DLI_TRIM_TXDQS4 0x3b8 189 #define EMC_DLI_TRIM_TXDQS5 0x3bc 190 #define EMC_DLI_TRIM_TXDQS6 0x3c0 191 #define EMC_DLI_TRIM_TXDQS7 0x3c4 192 #define EMC_STALL_THEN_EXE_AFTER_CLKCHANGE 0x3cc 193 #define EMC_SEL_DPD_CTRL 0x3d8 194 #define EMC_SEL_DPD_CTRL_DATA_SEL_DPD BIT(8) 195 #define EMC_SEL_DPD_CTRL_ODT_SEL_DPD BIT(5) 196 #define EMC_SEL_DPD_CTRL_RESET_SEL_DPD BIT(4) 197 #define EMC_SEL_DPD_CTRL_CA_SEL_DPD BIT(3) 198 #define EMC_SEL_DPD_CTRL_CLK_SEL_DPD BIT(2) 199 #define EMC_SEL_DPD_CTRL_DDR3_MASK \ 200 ((0xf << 2) | BIT(8)) 201 #define EMC_SEL_DPD_CTRL_MASK \ 202 ((0x3 << 2) | BIT(5) | BIT(8)) 203 #define EMC_PRE_REFRESH_REQ_CNT 0x3dc 204 #define EMC_DYN_SELF_REF_CONTROL 0x3e0 205 #define EMC_TXSRDLL 0x3e4 206 #define EMC_CCFIFO_ADDR 0x3e8 207 #define EMC_CCFIFO_DATA 0x3ec 208 #define EMC_CCFIFO_STATUS 0x3f0 209 #define EMC_CDB_CNTL_1 0x3f4 210 #define EMC_CDB_CNTL_2 0x3f8 211 #define EMC_XM2CLKPADCTRL2 0x3fc 212 #define EMC_AUTO_CAL_CONFIG2 0x458 213 #define EMC_AUTO_CAL_CONFIG3 0x45c 214 #define EMC_IBDLY 0x468 215 #define EMC_DLL_XFORM_ADDR0 0x46c 216 #define EMC_DLL_XFORM_ADDR1 0x470 217 #define EMC_DLL_XFORM_ADDR2 0x474 218 #define EMC_DSR_VTTGEN_DRV 0x47c 219 #define EMC_TXDSRVTTGEN 0x480 220 #define EMC_XM2CMDPADCTRL4 0x484 221 #define EMC_XM2CMDPADCTRL5 0x488 222 #define EMC_DLL_XFORM_DQS8 0x4a0 223 #define EMC_DLL_XFORM_DQS9 0x4a4 224 #define EMC_DLL_XFORM_DQS10 0x4a8 225 #define EMC_DLL_XFORM_DQS11 0x4ac 226 #define EMC_DLL_XFORM_DQS12 0x4b0 227 #define EMC_DLL_XFORM_DQS13 0x4b4 228 #define EMC_DLL_XFORM_DQS14 0x4b8 229 #define EMC_DLL_XFORM_DQS15 0x4bc 230 #define EMC_DLL_XFORM_QUSE8 0x4c0 231 #define EMC_DLL_XFORM_QUSE9 0x4c4 232 #define EMC_DLL_XFORM_QUSE10 0x4c8 233 #define EMC_DLL_XFORM_QUSE11 0x4cc 234 #define EMC_DLL_XFORM_QUSE12 0x4d0 235 #define EMC_DLL_XFORM_QUSE13 0x4d4 236 #define EMC_DLL_XFORM_QUSE14 0x4d8 237 #define EMC_DLL_XFORM_QUSE15 0x4dc 238 #define EMC_DLL_XFORM_DQ4 0x4e0 239 #define EMC_DLL_XFORM_DQ5 0x4e4 240 #define EMC_DLL_XFORM_DQ6 0x4e8 241 #define EMC_DLL_XFORM_DQ7 0x4ec 242 #define EMC_DLI_TRIM_TXDQS8 0x520 243 #define EMC_DLI_TRIM_TXDQS9 0x524 244 #define EMC_DLI_TRIM_TXDQS10 0x528 245 #define EMC_DLI_TRIM_TXDQS11 0x52c 246 #define EMC_DLI_TRIM_TXDQS12 0x530 247 #define EMC_DLI_TRIM_TXDQS13 0x534 248 #define EMC_DLI_TRIM_TXDQS14 0x538 249 #define EMC_DLI_TRIM_TXDQS15 0x53c 250 #define EMC_CDB_CNTL_3 0x540 251 #define EMC_XM2DQSPADCTRL5 0x544 252 #define EMC_XM2DQSPADCTRL6 0x548 253 #define EMC_XM2DQPADCTRL3 0x54c 254 #define EMC_DLL_XFORM_ADDR3 0x550 255 #define EMC_DLL_XFORM_ADDR4 0x554 256 #define EMC_DLL_XFORM_ADDR5 0x558 257 #define EMC_CFG_PIPE 0x560 258 #define EMC_QPOP 0x564 259 #define EMC_QUSE_WIDTH 0x568 260 #define EMC_PUTERM_WIDTH 0x56c 261 #define EMC_BGBIAS_CTL0 0x570 262 #define EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD_IBIAS_RX BIT(3) 263 #define EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD_IBIAS_VTTGEN BIT(2) 264 #define EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD BIT(1) 265 #define EMC_PUTERM_ADJ 0x574 266 267 #define DRAM_DEV_SEL_ALL 0 268 #define DRAM_DEV_SEL_0 (2 << 30) 269 #define DRAM_DEV_SEL_1 (1 << 30) 270 271 #define EMC_CFG_POWER_FEATURES_MASK \ 272 (EMC_CFG_DYN_SREF | EMC_CFG_DRAM_ACPD | EMC_CFG_DRAM_CLKSTOP_SR | \ 273 EMC_CFG_DRAM_CLKSTOP_PD | EMC_CFG_DSR_VTTGEN_DRV_EN) 274 #define EMC_REFCTRL_DEV_SEL(n) (((n > 1) ? 0 : 2) << EMC_REFCTRL_DEV_SEL_SHIFT) 275 #define EMC_DRAM_DEV_SEL(n) ((n > 1) ? DRAM_DEV_SEL_ALL : DRAM_DEV_SEL_0) 276 277 /* Maximum amount of time in us. to wait for changes to become effective */ 278 #define EMC_STATUS_UPDATE_TIMEOUT 1000 279 280 enum emc_dram_type { 281 DRAM_TYPE_DDR3 = 0, 282 DRAM_TYPE_DDR1 = 1, 283 DRAM_TYPE_LPDDR3 = 2, 284 DRAM_TYPE_DDR2 = 3 285 }; 286 287 enum emc_dll_change { 288 DLL_CHANGE_NONE, 289 DLL_CHANGE_ON, 290 DLL_CHANGE_OFF 291 }; 292 293 static const unsigned long emc_burst_regs[] = { 294 EMC_RC, 295 EMC_RFC, 296 EMC_RFC_SLR, 297 EMC_RAS, 298 EMC_RP, 299 EMC_R2W, 300 EMC_W2R, 301 EMC_R2P, 302 EMC_W2P, 303 EMC_RD_RCD, 304 EMC_WR_RCD, 305 EMC_RRD, 306 EMC_REXT, 307 EMC_WEXT, 308 EMC_WDV, 309 EMC_WDV_MASK, 310 EMC_QUSE, 311 EMC_QUSE_WIDTH, 312 EMC_IBDLY, 313 EMC_EINPUT, 314 EMC_EINPUT_DURATION, 315 EMC_PUTERM_EXTRA, 316 EMC_PUTERM_WIDTH, 317 EMC_PUTERM_ADJ, 318 EMC_CDB_CNTL_1, 319 EMC_CDB_CNTL_2, 320 EMC_CDB_CNTL_3, 321 EMC_QRST, 322 EMC_QSAFE, 323 EMC_RDV, 324 EMC_RDV_MASK, 325 EMC_REFRESH, 326 EMC_BURST_REFRESH_NUM, 327 EMC_PRE_REFRESH_REQ_CNT, 328 EMC_PDEX2WR, 329 EMC_PDEX2RD, 330 EMC_PCHG2PDEN, 331 EMC_ACT2PDEN, 332 EMC_AR2PDEN, 333 EMC_RW2PDEN, 334 EMC_TXSR, 335 EMC_TXSRDLL, 336 EMC_TCKE, 337 EMC_TCKESR, 338 EMC_TPD, 339 EMC_TFAW, 340 EMC_TRPAB, 341 EMC_TCLKSTABLE, 342 EMC_TCLKSTOP, 343 EMC_TREFBW, 344 EMC_FBIO_CFG6, 345 EMC_ODT_WRITE, 346 EMC_ODT_READ, 347 EMC_FBIO_CFG5, 348 EMC_CFG_DIG_DLL, 349 EMC_CFG_DIG_DLL_PERIOD, 350 EMC_DLL_XFORM_DQS0, 351 EMC_DLL_XFORM_DQS1, 352 EMC_DLL_XFORM_DQS2, 353 EMC_DLL_XFORM_DQS3, 354 EMC_DLL_XFORM_DQS4, 355 EMC_DLL_XFORM_DQS5, 356 EMC_DLL_XFORM_DQS6, 357 EMC_DLL_XFORM_DQS7, 358 EMC_DLL_XFORM_DQS8, 359 EMC_DLL_XFORM_DQS9, 360 EMC_DLL_XFORM_DQS10, 361 EMC_DLL_XFORM_DQS11, 362 EMC_DLL_XFORM_DQS12, 363 EMC_DLL_XFORM_DQS13, 364 EMC_DLL_XFORM_DQS14, 365 EMC_DLL_XFORM_DQS15, 366 EMC_DLL_XFORM_QUSE0, 367 EMC_DLL_XFORM_QUSE1, 368 EMC_DLL_XFORM_QUSE2, 369 EMC_DLL_XFORM_QUSE3, 370 EMC_DLL_XFORM_QUSE4, 371 EMC_DLL_XFORM_QUSE5, 372 EMC_DLL_XFORM_QUSE6, 373 EMC_DLL_XFORM_QUSE7, 374 EMC_DLL_XFORM_ADDR0, 375 EMC_DLL_XFORM_ADDR1, 376 EMC_DLL_XFORM_ADDR2, 377 EMC_DLL_XFORM_ADDR3, 378 EMC_DLL_XFORM_ADDR4, 379 EMC_DLL_XFORM_ADDR5, 380 EMC_DLL_XFORM_QUSE8, 381 EMC_DLL_XFORM_QUSE9, 382 EMC_DLL_XFORM_QUSE10, 383 EMC_DLL_XFORM_QUSE11, 384 EMC_DLL_XFORM_QUSE12, 385 EMC_DLL_XFORM_QUSE13, 386 EMC_DLL_XFORM_QUSE14, 387 EMC_DLL_XFORM_QUSE15, 388 EMC_DLI_TRIM_TXDQS0, 389 EMC_DLI_TRIM_TXDQS1, 390 EMC_DLI_TRIM_TXDQS2, 391 EMC_DLI_TRIM_TXDQS3, 392 EMC_DLI_TRIM_TXDQS4, 393 EMC_DLI_TRIM_TXDQS5, 394 EMC_DLI_TRIM_TXDQS6, 395 EMC_DLI_TRIM_TXDQS7, 396 EMC_DLI_TRIM_TXDQS8, 397 EMC_DLI_TRIM_TXDQS9, 398 EMC_DLI_TRIM_TXDQS10, 399 EMC_DLI_TRIM_TXDQS11, 400 EMC_DLI_TRIM_TXDQS12, 401 EMC_DLI_TRIM_TXDQS13, 402 EMC_DLI_TRIM_TXDQS14, 403 EMC_DLI_TRIM_TXDQS15, 404 EMC_DLL_XFORM_DQ0, 405 EMC_DLL_XFORM_DQ1, 406 EMC_DLL_XFORM_DQ2, 407 EMC_DLL_XFORM_DQ3, 408 EMC_DLL_XFORM_DQ4, 409 EMC_DLL_XFORM_DQ5, 410 EMC_DLL_XFORM_DQ6, 411 EMC_DLL_XFORM_DQ7, 412 EMC_XM2CMDPADCTRL, 413 EMC_XM2CMDPADCTRL4, 414 EMC_XM2CMDPADCTRL5, 415 EMC_XM2DQPADCTRL2, 416 EMC_XM2DQPADCTRL3, 417 EMC_XM2CLKPADCTRL, 418 EMC_XM2CLKPADCTRL2, 419 EMC_XM2COMPPADCTRL, 420 EMC_XM2VTTGENPADCTRL, 421 EMC_XM2VTTGENPADCTRL2, 422 EMC_XM2VTTGENPADCTRL3, 423 EMC_XM2DQSPADCTRL3, 424 EMC_XM2DQSPADCTRL4, 425 EMC_XM2DQSPADCTRL5, 426 EMC_XM2DQSPADCTRL6, 427 EMC_DSR_VTTGEN_DRV, 428 EMC_TXDSRVTTGEN, 429 EMC_FBIO_SPARE, 430 EMC_ZCAL_WAIT_CNT, 431 EMC_MRS_WAIT_CNT2, 432 EMC_CTT, 433 EMC_CTT_DURATION, 434 EMC_CFG_PIPE, 435 EMC_DYN_SELF_REF_CONTROL, 436 EMC_QPOP 437 }; 438 439 struct emc_timing { 440 unsigned long rate; 441 442 u32 emc_burst_data[ARRAY_SIZE(emc_burst_regs)]; 443 444 u32 emc_auto_cal_config; 445 u32 emc_auto_cal_config2; 446 u32 emc_auto_cal_config3; 447 u32 emc_auto_cal_interval; 448 u32 emc_bgbias_ctl0; 449 u32 emc_cfg; 450 u32 emc_cfg_2; 451 u32 emc_ctt_term_ctrl; 452 u32 emc_mode_1; 453 u32 emc_mode_2; 454 u32 emc_mode_4; 455 u32 emc_mode_reset; 456 u32 emc_mrs_wait_cnt; 457 u32 emc_sel_dpd_ctrl; 458 u32 emc_xm2dqspadctrl2; 459 u32 emc_zcal_cnt_long; 460 u32 emc_zcal_interval; 461 }; 462 463 struct tegra_emc { 464 struct device *dev; 465 466 struct tegra_mc *mc; 467 468 void __iomem *regs; 469 470 struct clk *clk; 471 472 enum emc_dram_type dram_type; 473 unsigned int dram_num; 474 475 struct emc_timing last_timing; 476 struct emc_timing *timings; 477 unsigned int num_timings; 478 479 struct { 480 struct dentry *root; 481 unsigned long min_rate; 482 unsigned long max_rate; 483 } debugfs; 484 }; 485 486 /* Timing change sequence functions */ 487 488 static void emc_ccfifo_writel(struct tegra_emc *emc, u32 value, 489 unsigned long offset) 490 { 491 writel(value, emc->regs + EMC_CCFIFO_DATA); 492 writel(offset, emc->regs + EMC_CCFIFO_ADDR); 493 } 494 495 static void emc_seq_update_timing(struct tegra_emc *emc) 496 { 497 unsigned int i; 498 u32 value; 499 500 writel(1, emc->regs + EMC_TIMING_CONTROL); 501 502 for (i = 0; i < EMC_STATUS_UPDATE_TIMEOUT; ++i) { 503 value = readl(emc->regs + EMC_STATUS); 504 if ((value & EMC_STATUS_TIMING_UPDATE_STALLED) == 0) 505 return; 506 udelay(1); 507 } 508 509 dev_err(emc->dev, "timing update timed out\n"); 510 } 511 512 static void emc_seq_disable_auto_cal(struct tegra_emc *emc) 513 { 514 unsigned int i; 515 u32 value; 516 517 writel(0, emc->regs + EMC_AUTO_CAL_INTERVAL); 518 519 for (i = 0; i < EMC_STATUS_UPDATE_TIMEOUT; ++i) { 520 value = readl(emc->regs + EMC_AUTO_CAL_STATUS); 521 if ((value & EMC_AUTO_CAL_STATUS_ACTIVE) == 0) 522 return; 523 udelay(1); 524 } 525 526 dev_err(emc->dev, "auto cal disable timed out\n"); 527 } 528 529 static void emc_seq_wait_clkchange(struct tegra_emc *emc) 530 { 531 unsigned int i; 532 u32 value; 533 534 for (i = 0; i < EMC_STATUS_UPDATE_TIMEOUT; ++i) { 535 value = readl(emc->regs + EMC_INTSTATUS); 536 if (value & EMC_INTSTATUS_CLKCHANGE_COMPLETE) 537 return; 538 udelay(1); 539 } 540 541 dev_err(emc->dev, "clock change timed out\n"); 542 } 543 544 static struct emc_timing *tegra_emc_find_timing(struct tegra_emc *emc, 545 unsigned long rate) 546 { 547 struct emc_timing *timing = NULL; 548 unsigned int i; 549 550 for (i = 0; i < emc->num_timings; i++) { 551 if (emc->timings[i].rate == rate) { 552 timing = &emc->timings[i]; 553 break; 554 } 555 } 556 557 if (!timing) { 558 dev_err(emc->dev, "no timing for rate %lu\n", rate); 559 return NULL; 560 } 561 562 return timing; 563 } 564 565 int tegra_emc_prepare_timing_change(struct tegra_emc *emc, 566 unsigned long rate) 567 { 568 struct emc_timing *timing = tegra_emc_find_timing(emc, rate); 569 struct emc_timing *last = &emc->last_timing; 570 enum emc_dll_change dll_change; 571 unsigned int pre_wait = 0; 572 u32 val, val2, mask; 573 bool update = false; 574 unsigned int i; 575 576 if (!timing) 577 return -ENOENT; 578 579 if ((last->emc_mode_1 & 0x1) == (timing->emc_mode_1 & 0x1)) 580 dll_change = DLL_CHANGE_NONE; 581 else if (timing->emc_mode_1 & 0x1) 582 dll_change = DLL_CHANGE_ON; 583 else 584 dll_change = DLL_CHANGE_OFF; 585 586 /* Clear CLKCHANGE_COMPLETE interrupts */ 587 writel(EMC_INTSTATUS_CLKCHANGE_COMPLETE, emc->regs + EMC_INTSTATUS); 588 589 /* Disable dynamic self-refresh */ 590 val = readl(emc->regs + EMC_CFG); 591 if (val & EMC_CFG_PWR_MASK) { 592 val &= ~EMC_CFG_POWER_FEATURES_MASK; 593 writel(val, emc->regs + EMC_CFG); 594 595 pre_wait = 5; 596 } 597 598 /* Disable SEL_DPD_CTRL for clock change */ 599 if (emc->dram_type == DRAM_TYPE_DDR3) 600 mask = EMC_SEL_DPD_CTRL_DDR3_MASK; 601 else 602 mask = EMC_SEL_DPD_CTRL_MASK; 603 604 val = readl(emc->regs + EMC_SEL_DPD_CTRL); 605 if (val & mask) { 606 val &= ~mask; 607 writel(val, emc->regs + EMC_SEL_DPD_CTRL); 608 } 609 610 /* Prepare DQ/DQS for clock change */ 611 val = readl(emc->regs + EMC_BGBIAS_CTL0); 612 val2 = last->emc_bgbias_ctl0; 613 if (!(timing->emc_bgbias_ctl0 & 614 EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD_IBIAS_RX) && 615 (val & EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD_IBIAS_RX)) { 616 val2 &= ~EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD_IBIAS_RX; 617 update = true; 618 } 619 620 if ((val & EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD) || 621 (val & EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD_IBIAS_VTTGEN)) { 622 update = true; 623 } 624 625 if (update) { 626 writel(val2, emc->regs + EMC_BGBIAS_CTL0); 627 if (pre_wait < 5) 628 pre_wait = 5; 629 } 630 631 update = false; 632 val = readl(emc->regs + EMC_XM2DQSPADCTRL2); 633 if (timing->emc_xm2dqspadctrl2 & EMC_XM2DQSPADCTRL2_VREF_ENABLE && 634 !(val & EMC_XM2DQSPADCTRL2_VREF_ENABLE)) { 635 val |= EMC_XM2DQSPADCTRL2_VREF_ENABLE; 636 update = true; 637 } 638 639 if (timing->emc_xm2dqspadctrl2 & EMC_XM2DQSPADCTRL2_RX_FT_REC_ENABLE && 640 !(val & EMC_XM2DQSPADCTRL2_RX_FT_REC_ENABLE)) { 641 val |= EMC_XM2DQSPADCTRL2_RX_FT_REC_ENABLE; 642 update = true; 643 } 644 645 if (update) { 646 writel(val, emc->regs + EMC_XM2DQSPADCTRL2); 647 if (pre_wait < 30) 648 pre_wait = 30; 649 } 650 651 /* Wait to settle */ 652 if (pre_wait) { 653 emc_seq_update_timing(emc); 654 udelay(pre_wait); 655 } 656 657 /* Program CTT_TERM control */ 658 if (last->emc_ctt_term_ctrl != timing->emc_ctt_term_ctrl) { 659 emc_seq_disable_auto_cal(emc); 660 writel(timing->emc_ctt_term_ctrl, 661 emc->regs + EMC_CTT_TERM_CTRL); 662 emc_seq_update_timing(emc); 663 } 664 665 /* Program burst shadow registers */ 666 for (i = 0; i < ARRAY_SIZE(timing->emc_burst_data); ++i) 667 writel(timing->emc_burst_data[i], 668 emc->regs + emc_burst_regs[i]); 669 670 writel(timing->emc_xm2dqspadctrl2, emc->regs + EMC_XM2DQSPADCTRL2); 671 writel(timing->emc_zcal_interval, emc->regs + EMC_ZCAL_INTERVAL); 672 673 tegra_mc_write_emem_configuration(emc->mc, timing->rate); 674 675 val = timing->emc_cfg & ~EMC_CFG_POWER_FEATURES_MASK; 676 emc_ccfifo_writel(emc, val, EMC_CFG); 677 678 /* Program AUTO_CAL_CONFIG */ 679 if (timing->emc_auto_cal_config2 != last->emc_auto_cal_config2) 680 emc_ccfifo_writel(emc, timing->emc_auto_cal_config2, 681 EMC_AUTO_CAL_CONFIG2); 682 683 if (timing->emc_auto_cal_config3 != last->emc_auto_cal_config3) 684 emc_ccfifo_writel(emc, timing->emc_auto_cal_config3, 685 EMC_AUTO_CAL_CONFIG3); 686 687 if (timing->emc_auto_cal_config != last->emc_auto_cal_config) { 688 val = timing->emc_auto_cal_config; 689 val &= EMC_AUTO_CAL_CONFIG_AUTO_CAL_START; 690 emc_ccfifo_writel(emc, val, EMC_AUTO_CAL_CONFIG); 691 } 692 693 /* DDR3: predict MRS long wait count */ 694 if (emc->dram_type == DRAM_TYPE_DDR3 && 695 dll_change == DLL_CHANGE_ON) { 696 u32 cnt = 512; 697 698 if (timing->emc_zcal_interval != 0 && 699 last->emc_zcal_interval == 0) 700 cnt -= emc->dram_num * 256; 701 702 val = (timing->emc_mrs_wait_cnt 703 & EMC_MRS_WAIT_CNT_SHORT_WAIT_MASK) 704 >> EMC_MRS_WAIT_CNT_SHORT_WAIT_SHIFT; 705 if (cnt < val) 706 cnt = val; 707 708 val = timing->emc_mrs_wait_cnt 709 & ~EMC_MRS_WAIT_CNT_LONG_WAIT_MASK; 710 val |= (cnt << EMC_MRS_WAIT_CNT_LONG_WAIT_SHIFT) 711 & EMC_MRS_WAIT_CNT_LONG_WAIT_MASK; 712 713 writel(val, emc->regs + EMC_MRS_WAIT_CNT); 714 } 715 716 val = timing->emc_cfg_2; 717 val &= ~EMC_CFG_2_DIS_STP_OB_CLK_DURING_NON_WR; 718 emc_ccfifo_writel(emc, val, EMC_CFG_2); 719 720 /* DDR3: Turn off DLL and enter self-refresh */ 721 if (emc->dram_type == DRAM_TYPE_DDR3 && dll_change == DLL_CHANGE_OFF) 722 emc_ccfifo_writel(emc, timing->emc_mode_1, EMC_EMRS); 723 724 /* Disable refresh controller */ 725 emc_ccfifo_writel(emc, EMC_REFCTRL_DEV_SEL(emc->dram_num), 726 EMC_REFCTRL); 727 if (emc->dram_type == DRAM_TYPE_DDR3) 728 emc_ccfifo_writel(emc, EMC_DRAM_DEV_SEL(emc->dram_num) | 729 EMC_SELF_REF_CMD_ENABLED, 730 EMC_SELF_REF); 731 732 /* Flow control marker */ 733 emc_ccfifo_writel(emc, 1, EMC_STALL_THEN_EXE_AFTER_CLKCHANGE); 734 735 /* DDR3: Exit self-refresh */ 736 if (emc->dram_type == DRAM_TYPE_DDR3) 737 emc_ccfifo_writel(emc, EMC_DRAM_DEV_SEL(emc->dram_num), 738 EMC_SELF_REF); 739 emc_ccfifo_writel(emc, EMC_REFCTRL_DEV_SEL(emc->dram_num) | 740 EMC_REFCTRL_ENABLE, 741 EMC_REFCTRL); 742 743 /* Set DRAM mode registers */ 744 if (emc->dram_type == DRAM_TYPE_DDR3) { 745 if (timing->emc_mode_1 != last->emc_mode_1) 746 emc_ccfifo_writel(emc, timing->emc_mode_1, EMC_EMRS); 747 if (timing->emc_mode_2 != last->emc_mode_2) 748 emc_ccfifo_writel(emc, timing->emc_mode_2, EMC_EMRS2); 749 750 if ((timing->emc_mode_reset != last->emc_mode_reset) || 751 dll_change == DLL_CHANGE_ON) { 752 val = timing->emc_mode_reset; 753 if (dll_change == DLL_CHANGE_ON) { 754 val |= EMC_MODE_SET_DLL_RESET; 755 val |= EMC_MODE_SET_LONG_CNT; 756 } else { 757 val &= ~EMC_MODE_SET_DLL_RESET; 758 } 759 emc_ccfifo_writel(emc, val, EMC_MRS); 760 } 761 } else { 762 if (timing->emc_mode_2 != last->emc_mode_2) 763 emc_ccfifo_writel(emc, timing->emc_mode_2, EMC_MRW2); 764 if (timing->emc_mode_1 != last->emc_mode_1) 765 emc_ccfifo_writel(emc, timing->emc_mode_1, EMC_MRW); 766 if (timing->emc_mode_4 != last->emc_mode_4) 767 emc_ccfifo_writel(emc, timing->emc_mode_4, EMC_MRW4); 768 } 769 770 /* Issue ZCAL command if turning ZCAL on */ 771 if (timing->emc_zcal_interval != 0 && last->emc_zcal_interval == 0) { 772 emc_ccfifo_writel(emc, EMC_ZQ_CAL_LONG_CMD_DEV0, EMC_ZQ_CAL); 773 if (emc->dram_num > 1) 774 emc_ccfifo_writel(emc, EMC_ZQ_CAL_LONG_CMD_DEV1, 775 EMC_ZQ_CAL); 776 } 777 778 /* Write to RO register to remove stall after change */ 779 emc_ccfifo_writel(emc, 0, EMC_CCFIFO_STATUS); 780 781 if (timing->emc_cfg_2 & EMC_CFG_2_DIS_STP_OB_CLK_DURING_NON_WR) 782 emc_ccfifo_writel(emc, timing->emc_cfg_2, EMC_CFG_2); 783 784 /* Disable AUTO_CAL for clock change */ 785 emc_seq_disable_auto_cal(emc); 786 787 /* Read register to wait until programming has settled */ 788 readl(emc->regs + EMC_INTSTATUS); 789 790 return 0; 791 } 792 793 void tegra_emc_complete_timing_change(struct tegra_emc *emc, 794 unsigned long rate) 795 { 796 struct emc_timing *timing = tegra_emc_find_timing(emc, rate); 797 struct emc_timing *last = &emc->last_timing; 798 u32 val; 799 800 if (!timing) 801 return; 802 803 /* Wait until the state machine has settled */ 804 emc_seq_wait_clkchange(emc); 805 806 /* Restore AUTO_CAL */ 807 if (timing->emc_ctt_term_ctrl != last->emc_ctt_term_ctrl) 808 writel(timing->emc_auto_cal_interval, 809 emc->regs + EMC_AUTO_CAL_INTERVAL); 810 811 /* Restore dynamic self-refresh */ 812 if (timing->emc_cfg & EMC_CFG_PWR_MASK) 813 writel(timing->emc_cfg, emc->regs + EMC_CFG); 814 815 /* Set ZCAL wait count */ 816 writel(timing->emc_zcal_cnt_long, emc->regs + EMC_ZCAL_WAIT_CNT); 817 818 /* LPDDR3: Turn off BGBIAS if low frequency */ 819 if (emc->dram_type == DRAM_TYPE_LPDDR3 && 820 timing->emc_bgbias_ctl0 & 821 EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD_IBIAS_RX) { 822 val = timing->emc_bgbias_ctl0; 823 val |= EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD_IBIAS_VTTGEN; 824 val |= EMC_BGBIAS_CTL0_BIAS0_DSC_E_PWRD; 825 writel(val, emc->regs + EMC_BGBIAS_CTL0); 826 } else { 827 if (emc->dram_type == DRAM_TYPE_DDR3 && 828 readl(emc->regs + EMC_BGBIAS_CTL0) != 829 timing->emc_bgbias_ctl0) { 830 writel(timing->emc_bgbias_ctl0, 831 emc->regs + EMC_BGBIAS_CTL0); 832 } 833 834 writel(timing->emc_auto_cal_interval, 835 emc->regs + EMC_AUTO_CAL_INTERVAL); 836 } 837 838 /* Wait for timing to settle */ 839 udelay(2); 840 841 /* Reprogram SEL_DPD_CTRL */ 842 writel(timing->emc_sel_dpd_ctrl, emc->regs + EMC_SEL_DPD_CTRL); 843 emc_seq_update_timing(emc); 844 845 emc->last_timing = *timing; 846 } 847 848 /* Initialization and deinitialization */ 849 850 static void emc_read_current_timing(struct tegra_emc *emc, 851 struct emc_timing *timing) 852 { 853 unsigned int i; 854 855 for (i = 0; i < ARRAY_SIZE(emc_burst_regs); ++i) 856 timing->emc_burst_data[i] = 857 readl(emc->regs + emc_burst_regs[i]); 858 859 timing->emc_cfg = readl(emc->regs + EMC_CFG); 860 861 timing->emc_auto_cal_interval = 0; 862 timing->emc_zcal_cnt_long = 0; 863 timing->emc_mode_1 = 0; 864 timing->emc_mode_2 = 0; 865 timing->emc_mode_4 = 0; 866 timing->emc_mode_reset = 0; 867 } 868 869 static int emc_init(struct tegra_emc *emc) 870 { 871 emc->dram_type = readl(emc->regs + EMC_FBIO_CFG5); 872 emc->dram_type &= EMC_FBIO_CFG5_DRAM_TYPE_MASK; 873 emc->dram_type >>= EMC_FBIO_CFG5_DRAM_TYPE_SHIFT; 874 875 emc->dram_num = tegra_mc_get_emem_device_count(emc->mc); 876 877 emc_read_current_timing(emc, &emc->last_timing); 878 879 return 0; 880 } 881 882 static int load_one_timing_from_dt(struct tegra_emc *emc, 883 struct emc_timing *timing, 884 struct device_node *node) 885 { 886 u32 value; 887 int err; 888 889 err = of_property_read_u32(node, "clock-frequency", &value); 890 if (err) { 891 dev_err(emc->dev, "timing %pOFn: failed to read rate: %d\n", 892 node, err); 893 return err; 894 } 895 896 timing->rate = value; 897 898 err = of_property_read_u32_array(node, "nvidia,emc-configuration", 899 timing->emc_burst_data, 900 ARRAY_SIZE(timing->emc_burst_data)); 901 if (err) { 902 dev_err(emc->dev, 903 "timing %pOFn: failed to read emc burst data: %d\n", 904 node, err); 905 return err; 906 } 907 908 #define EMC_READ_PROP(prop, dtprop) { \ 909 err = of_property_read_u32(node, dtprop, &timing->prop); \ 910 if (err) { \ 911 dev_err(emc->dev, "timing %pOFn: failed to read " #prop ": %d\n", \ 912 node, err); \ 913 return err; \ 914 } \ 915 } 916 917 EMC_READ_PROP(emc_auto_cal_config, "nvidia,emc-auto-cal-config") 918 EMC_READ_PROP(emc_auto_cal_config2, "nvidia,emc-auto-cal-config2") 919 EMC_READ_PROP(emc_auto_cal_config3, "nvidia,emc-auto-cal-config3") 920 EMC_READ_PROP(emc_auto_cal_interval, "nvidia,emc-auto-cal-interval") 921 EMC_READ_PROP(emc_bgbias_ctl0, "nvidia,emc-bgbias-ctl0") 922 EMC_READ_PROP(emc_cfg, "nvidia,emc-cfg") 923 EMC_READ_PROP(emc_cfg_2, "nvidia,emc-cfg-2") 924 EMC_READ_PROP(emc_ctt_term_ctrl, "nvidia,emc-ctt-term-ctrl") 925 EMC_READ_PROP(emc_mode_1, "nvidia,emc-mode-1") 926 EMC_READ_PROP(emc_mode_2, "nvidia,emc-mode-2") 927 EMC_READ_PROP(emc_mode_4, "nvidia,emc-mode-4") 928 EMC_READ_PROP(emc_mode_reset, "nvidia,emc-mode-reset") 929 EMC_READ_PROP(emc_mrs_wait_cnt, "nvidia,emc-mrs-wait-cnt") 930 EMC_READ_PROP(emc_sel_dpd_ctrl, "nvidia,emc-sel-dpd-ctrl") 931 EMC_READ_PROP(emc_xm2dqspadctrl2, "nvidia,emc-xm2dqspadctrl2") 932 EMC_READ_PROP(emc_zcal_cnt_long, "nvidia,emc-zcal-cnt-long") 933 EMC_READ_PROP(emc_zcal_interval, "nvidia,emc-zcal-interval") 934 935 #undef EMC_READ_PROP 936 937 return 0; 938 } 939 940 static int cmp_timings(const void *_a, const void *_b) 941 { 942 const struct emc_timing *a = _a; 943 const struct emc_timing *b = _b; 944 945 if (a->rate < b->rate) 946 return -1; 947 else if (a->rate == b->rate) 948 return 0; 949 else 950 return 1; 951 } 952 953 static int tegra_emc_load_timings_from_dt(struct tegra_emc *emc, 954 struct device_node *node) 955 { 956 int child_count = of_get_child_count(node); 957 struct device_node *child; 958 struct emc_timing *timing; 959 unsigned int i = 0; 960 int err; 961 962 emc->timings = devm_kcalloc(emc->dev, child_count, sizeof(*timing), 963 GFP_KERNEL); 964 if (!emc->timings) 965 return -ENOMEM; 966 967 emc->num_timings = child_count; 968 969 for_each_child_of_node(node, child) { 970 timing = &emc->timings[i++]; 971 972 err = load_one_timing_from_dt(emc, timing, child); 973 if (err) { 974 of_node_put(child); 975 return err; 976 } 977 } 978 979 sort(emc->timings, emc->num_timings, sizeof(*timing), cmp_timings, 980 NULL); 981 982 return 0; 983 } 984 985 static const struct of_device_id tegra_emc_of_match[] = { 986 { .compatible = "nvidia,tegra124-emc" }, 987 { .compatible = "nvidia,tegra132-emc" }, 988 {} 989 }; 990 991 static struct device_node * 992 tegra_emc_find_node_by_ram_code(struct device_node *node, u32 ram_code) 993 { 994 struct device_node *np; 995 int err; 996 997 for_each_child_of_node(node, np) { 998 u32 value; 999 1000 err = of_property_read_u32(np, "nvidia,ram-code", &value); 1001 if (err || (value != ram_code)) 1002 continue; 1003 1004 return np; 1005 } 1006 1007 return NULL; 1008 } 1009 1010 /* 1011 * debugfs interface 1012 * 1013 * The memory controller driver exposes some files in debugfs that can be used 1014 * to control the EMC frequency. The top-level directory can be found here: 1015 * 1016 * /sys/kernel/debug/emc 1017 * 1018 * It contains the following files: 1019 * 1020 * - available_rates: This file contains a list of valid, space-separated 1021 * EMC frequencies. 1022 * 1023 * - min_rate: Writing a value to this file sets the given frequency as the 1024 * floor of the permitted range. If this is higher than the currently 1025 * configured EMC frequency, this will cause the frequency to be 1026 * increased so that it stays within the valid range. 1027 * 1028 * - max_rate: Similarily to the min_rate file, writing a value to this file 1029 * sets the given frequency as the ceiling of the permitted range. If 1030 * the value is lower than the currently configured EMC frequency, this 1031 * will cause the frequency to be decreased so that it stays within the 1032 * valid range. 1033 */ 1034 1035 static bool tegra_emc_validate_rate(struct tegra_emc *emc, unsigned long rate) 1036 { 1037 unsigned int i; 1038 1039 for (i = 0; i < emc->num_timings; i++) 1040 if (rate == emc->timings[i].rate) 1041 return true; 1042 1043 return false; 1044 } 1045 1046 static int tegra_emc_debug_available_rates_show(struct seq_file *s, 1047 void *data) 1048 { 1049 struct tegra_emc *emc = s->private; 1050 const char *prefix = ""; 1051 unsigned int i; 1052 1053 for (i = 0; i < emc->num_timings; i++) { 1054 seq_printf(s, "%s%lu", prefix, emc->timings[i].rate); 1055 prefix = " "; 1056 } 1057 1058 seq_puts(s, "\n"); 1059 1060 return 0; 1061 } 1062 1063 DEFINE_SHOW_ATTRIBUTE(tegra_emc_debug_available_rates); 1064 1065 static int tegra_emc_debug_min_rate_get(void *data, u64 *rate) 1066 { 1067 struct tegra_emc *emc = data; 1068 1069 *rate = emc->debugfs.min_rate; 1070 1071 return 0; 1072 } 1073 1074 static int tegra_emc_debug_min_rate_set(void *data, u64 rate) 1075 { 1076 struct tegra_emc *emc = data; 1077 int err; 1078 1079 if (!tegra_emc_validate_rate(emc, rate)) 1080 return -EINVAL; 1081 1082 err = clk_set_min_rate(emc->clk, rate); 1083 if (err < 0) 1084 return err; 1085 1086 emc->debugfs.min_rate = rate; 1087 1088 return 0; 1089 } 1090 1091 DEFINE_SIMPLE_ATTRIBUTE(tegra_emc_debug_min_rate_fops, 1092 tegra_emc_debug_min_rate_get, 1093 tegra_emc_debug_min_rate_set, "%llu\n"); 1094 1095 static int tegra_emc_debug_max_rate_get(void *data, u64 *rate) 1096 { 1097 struct tegra_emc *emc = data; 1098 1099 *rate = emc->debugfs.max_rate; 1100 1101 return 0; 1102 } 1103 1104 static int tegra_emc_debug_max_rate_set(void *data, u64 rate) 1105 { 1106 struct tegra_emc *emc = data; 1107 int err; 1108 1109 if (!tegra_emc_validate_rate(emc, rate)) 1110 return -EINVAL; 1111 1112 err = clk_set_max_rate(emc->clk, rate); 1113 if (err < 0) 1114 return err; 1115 1116 emc->debugfs.max_rate = rate; 1117 1118 return 0; 1119 } 1120 1121 DEFINE_SIMPLE_ATTRIBUTE(tegra_emc_debug_max_rate_fops, 1122 tegra_emc_debug_max_rate_get, 1123 tegra_emc_debug_max_rate_set, "%llu\n"); 1124 1125 static void emc_debugfs_init(struct device *dev, struct tegra_emc *emc) 1126 { 1127 unsigned int i; 1128 int err; 1129 1130 emc->clk = devm_clk_get(dev, "emc"); 1131 if (IS_ERR(emc->clk)) { 1132 if (PTR_ERR(emc->clk) != -ENODEV) { 1133 dev_err(dev, "failed to get EMC clock: %ld\n", 1134 PTR_ERR(emc->clk)); 1135 return; 1136 } 1137 } 1138 1139 emc->debugfs.min_rate = ULONG_MAX; 1140 emc->debugfs.max_rate = 0; 1141 1142 for (i = 0; i < emc->num_timings; i++) { 1143 if (emc->timings[i].rate < emc->debugfs.min_rate) 1144 emc->debugfs.min_rate = emc->timings[i].rate; 1145 1146 if (emc->timings[i].rate > emc->debugfs.max_rate) 1147 emc->debugfs.max_rate = emc->timings[i].rate; 1148 } 1149 1150 if (!emc->num_timings) { 1151 emc->debugfs.min_rate = clk_get_rate(emc->clk); 1152 emc->debugfs.max_rate = emc->debugfs.min_rate; 1153 } 1154 1155 err = clk_set_rate_range(emc->clk, emc->debugfs.min_rate, 1156 emc->debugfs.max_rate); 1157 if (err < 0) { 1158 dev_err(dev, "failed to set rate range [%lu-%lu] for %pC\n", 1159 emc->debugfs.min_rate, emc->debugfs.max_rate, 1160 emc->clk); 1161 return; 1162 } 1163 1164 emc->debugfs.root = debugfs_create_dir("emc", NULL); 1165 if (!emc->debugfs.root) { 1166 dev_err(dev, "failed to create debugfs directory\n"); 1167 return; 1168 } 1169 1170 debugfs_create_file("available_rates", 0444, emc->debugfs.root, emc, 1171 &tegra_emc_debug_available_rates_fops); 1172 debugfs_create_file("min_rate", 0644, emc->debugfs.root, 1173 emc, &tegra_emc_debug_min_rate_fops); 1174 debugfs_create_file("max_rate", 0644, emc->debugfs.root, 1175 emc, &tegra_emc_debug_max_rate_fops); 1176 } 1177 1178 static int tegra_emc_probe(struct platform_device *pdev) 1179 { 1180 struct device_node *np; 1181 struct tegra_emc *emc; 1182 u32 ram_code; 1183 int err; 1184 1185 emc = devm_kzalloc(&pdev->dev, sizeof(*emc), GFP_KERNEL); 1186 if (!emc) 1187 return -ENOMEM; 1188 1189 emc->dev = &pdev->dev; 1190 1191 emc->regs = devm_platform_ioremap_resource(pdev, 0); 1192 if (IS_ERR(emc->regs)) 1193 return PTR_ERR(emc->regs); 1194 1195 emc->mc = devm_tegra_memory_controller_get(&pdev->dev); 1196 if (IS_ERR(emc->mc)) 1197 return PTR_ERR(emc->mc); 1198 1199 ram_code = tegra_read_ram_code(); 1200 1201 np = tegra_emc_find_node_by_ram_code(pdev->dev.of_node, ram_code); 1202 if (!np) { 1203 dev_err(&pdev->dev, 1204 "no memory timings for RAM code %u found in DT\n", 1205 ram_code); 1206 return -ENOENT; 1207 } 1208 1209 err = tegra_emc_load_timings_from_dt(emc, np); 1210 of_node_put(np); 1211 if (err) 1212 return err; 1213 1214 if (emc->num_timings == 0) { 1215 dev_err(&pdev->dev, 1216 "no memory timings for RAM code %u registered\n", 1217 ram_code); 1218 return -ENOENT; 1219 } 1220 1221 err = emc_init(emc); 1222 if (err) { 1223 dev_err(&pdev->dev, "EMC initialization failed: %d\n", err); 1224 return err; 1225 } 1226 1227 platform_set_drvdata(pdev, emc); 1228 1229 if (IS_ENABLED(CONFIG_DEBUG_FS)) 1230 emc_debugfs_init(&pdev->dev, emc); 1231 1232 return 0; 1233 }; 1234 1235 static struct platform_driver tegra_emc_driver = { 1236 .probe = tegra_emc_probe, 1237 .driver = { 1238 .name = "tegra-emc", 1239 .of_match_table = tegra_emc_of_match, 1240 .suppress_bind_attrs = true, 1241 }, 1242 }; 1243 1244 static int tegra_emc_init(void) 1245 { 1246 return platform_driver_register(&tegra_emc_driver); 1247 } 1248 subsys_initcall(tegra_emc_init); 1249