1*548cc109SLukasz Majewski // SPDX-License-Identifier: GPL-2.0+ 2*548cc109SLukasz Majewski /* 3*548cc109SLukasz Majewski * ddrmc DDR3 calibration code for NXP's VF610 4*548cc109SLukasz Majewski * 5*548cc109SLukasz Majewski * Copyright (C) 2018 DENX Software Engineering 6*548cc109SLukasz Majewski * Lukasz Majewski, DENX Software Engineering, lukma@denx.de 7*548cc109SLukasz Majewski * 8*548cc109SLukasz Majewski */ 9*548cc109SLukasz Majewski /* #define DEBUG */ 10*548cc109SLukasz Majewski #include <common.h> 11*548cc109SLukasz Majewski #include <asm/io.h> 12*548cc109SLukasz Majewski #include <asm/arch/imx-regs.h> 13*548cc109SLukasz Majewski #include <linux/bitmap.h> 14*548cc109SLukasz Majewski 15*548cc109SLukasz Majewski #include "ddrmc-vf610-calibration.h" 16*548cc109SLukasz Majewski 17*548cc109SLukasz Majewski /* 18*548cc109SLukasz Majewski * Documents: 19*548cc109SLukasz Majewski * 20*548cc109SLukasz Majewski * [1] "Vybrid: About DDR leveling feature on DDRMC." 21*548cc109SLukasz Majewski * https://community.nxp.com/thread/395323 22*548cc109SLukasz Majewski * 23*548cc109SLukasz Majewski * [2] VFxxx Controller Reference Manual, Rev. 0, 10/2016 24*548cc109SLukasz Majewski * 25*548cc109SLukasz Majewski * 26*548cc109SLukasz Majewski * NOTE 27*548cc109SLukasz Majewski * ==== 28*548cc109SLukasz Majewski * 29*548cc109SLukasz Majewski * NXP recommends setting 'fixed' parameters instead of performing the 30*548cc109SLukasz Majewski * training at each boot. 31*548cc109SLukasz Majewski * 32*548cc109SLukasz Majewski * Use those functions to determine those values on new HW, read the 33*548cc109SLukasz Majewski * calculated value from registers and add them to the board specific 34*548cc109SLukasz Majewski * struct ddrmc_cr_setting. 35*548cc109SLukasz Majewski * 36*548cc109SLukasz Majewski * SW leveling supported operations - CR93[SW_LVL_MODE]: 37*548cc109SLukasz Majewski * 38*548cc109SLukasz Majewski * - 0x0 (b'00) - No leveling 39*548cc109SLukasz Majewski * 40*548cc109SLukasz Majewski * - 0x1 (b'01) - WRLVL_DL_X - It is not recommended to perform this tuning 41*548cc109SLukasz Majewski * on HW designs utilizing non-flyback topology 42*548cc109SLukasz Majewski * (Single DDR3 with x16). 43*548cc109SLukasz Majewski * Instead the WRLVL_DL_0/1 fields shall be set 44*548cc109SLukasz Majewski * based on trace length differences from their 45*548cc109SLukasz Majewski * layout. 46*548cc109SLukasz Majewski * Mismatches up to 25% or tCK (clock period) are 47*548cc109SLukasz Majewski * allowed, so the value in the filed doesn’t have 48*548cc109SLukasz Majewski * to be very accurate. 49*548cc109SLukasz Majewski * 50*548cc109SLukasz Majewski * - 0x2 (b'10) - RDLVL_DL_0/1 - refers to adjusting the DQS strobe in relation 51*548cc109SLukasz Majewski * to the DQ signals so that the strobe edge is 52*548cc109SLukasz Majewski * centered in the window of valid read data. 53*548cc109SLukasz Majewski * 54*548cc109SLukasz Majewski * - 0x3 (b'11) - RDLVL_GTDL_0/1 - refers to the delay the PHY uses to un-gate 55*548cc109SLukasz Majewski * the Read DQS strobe pad from the time that the 56*548cc109SLukasz Majewski * PHY enables the pad to input the strobe signal. 57*548cc109SLukasz Majewski * 58*548cc109SLukasz Majewski */ 59*548cc109SLukasz Majewski static int ddr_cal_get_first_edge_index(unsigned long *bmap, enum edge e, 60*548cc109SLukasz Majewski int samples, int start, int max) 61*548cc109SLukasz Majewski { 62*548cc109SLukasz Majewski int i, ret = -1; 63*548cc109SLukasz Majewski 64*548cc109SLukasz Majewski /* 65*548cc109SLukasz Majewski * We look only for the first value (and filter out 66*548cc109SLukasz Majewski * some wrong data) 67*548cc109SLukasz Majewski */ 68*548cc109SLukasz Majewski switch (e) { 69*548cc109SLukasz Majewski case RISING_EDGE: 70*548cc109SLukasz Majewski for (i = start; i <= max - samples; i++) { 71*548cc109SLukasz Majewski if (test_bit(i, bmap)) { 72*548cc109SLukasz Majewski if (!test_bit(i - 1, bmap) && 73*548cc109SLukasz Majewski test_bit(i + 1, bmap) && 74*548cc109SLukasz Majewski test_bit(i + 2, bmap) && 75*548cc109SLukasz Majewski test_bit(i + 3, bmap)) { 76*548cc109SLukasz Majewski return i; 77*548cc109SLukasz Majewski } 78*548cc109SLukasz Majewski } 79*548cc109SLukasz Majewski } 80*548cc109SLukasz Majewski break; 81*548cc109SLukasz Majewski case FALLING_EDGE: 82*548cc109SLukasz Majewski for (i = start; i <= max - samples; i++) { 83*548cc109SLukasz Majewski if (!test_bit(i, bmap)) { 84*548cc109SLukasz Majewski if (test_bit(i - 1, bmap) && 85*548cc109SLukasz Majewski test_bit(i - 2, bmap) && 86*548cc109SLukasz Majewski test_bit(i - 3, bmap)) { 87*548cc109SLukasz Majewski return i; 88*548cc109SLukasz Majewski } 89*548cc109SLukasz Majewski } 90*548cc109SLukasz Majewski } 91*548cc109SLukasz Majewski } 92*548cc109SLukasz Majewski 93*548cc109SLukasz Majewski return ret; 94*548cc109SLukasz Majewski } 95*548cc109SLukasz Majewski 96*548cc109SLukasz Majewski static void bitmap_print(unsigned long *bmap, int max) 97*548cc109SLukasz Majewski { 98*548cc109SLukasz Majewski int i; 99*548cc109SLukasz Majewski 100*548cc109SLukasz Majewski debug("BITMAP [0x%p]:\n", bmap); 101*548cc109SLukasz Majewski for (i = 0; i <= max; i++) { 102*548cc109SLukasz Majewski debug("%d ", test_bit(i, bmap) ? 1 : 0); 103*548cc109SLukasz Majewski if (i && (i % 32) == (32 - 1)) 104*548cc109SLukasz Majewski debug("\n"); 105*548cc109SLukasz Majewski } 106*548cc109SLukasz Majewski debug("\n"); 107*548cc109SLukasz Majewski } 108*548cc109SLukasz Majewski 109*548cc109SLukasz Majewski #define sw_leveling_op_done \ 110*548cc109SLukasz Majewski while (!(readl(&ddrmr->cr[94]) & DDRMC_CR94_SWLVL_OP_DONE)) 111*548cc109SLukasz Majewski 112*548cc109SLukasz Majewski #define sw_leveling_load_value \ 113*548cc109SLukasz Majewski do { clrsetbits_le32(&ddrmr->cr[93], DDRMC_CR93_SWLVL_LOAD, \ 114*548cc109SLukasz Majewski DDRMC_CR93_SWLVL_LOAD); } while (0) 115*548cc109SLukasz Majewski 116*548cc109SLukasz Majewski #define sw_leveling_start \ 117*548cc109SLukasz Majewski do { clrsetbits_le32(&ddrmr->cr[93], DDRMC_CR93_SWLVL_START, \ 118*548cc109SLukasz Majewski DDRMC_CR93_SWLVL_START); } while (0) 119*548cc109SLukasz Majewski 120*548cc109SLukasz Majewski #define sw_leveling_exit \ 121*548cc109SLukasz Majewski do { clrsetbits_le32(&ddrmr->cr[94], DDRMC_CR94_SWLVL_EXIT, \ 122*548cc109SLukasz Majewski DDRMC_CR94_SWLVL_EXIT); } while (0) 123*548cc109SLukasz Majewski 124*548cc109SLukasz Majewski /* 125*548cc109SLukasz Majewski * RDLVL_DL calibration: 126*548cc109SLukasz Majewski * 127*548cc109SLukasz Majewski * NXP is _NOT_ recommending performing the leveling at each 128*548cc109SLukasz Majewski * boot. Instead - one shall run this procedure on new boards 129*548cc109SLukasz Majewski * and then use hardcoded values. 130*548cc109SLukasz Majewski * 131*548cc109SLukasz Majewski */ 132*548cc109SLukasz Majewski static int ddrmc_cal_dqs_to_dq(struct ddrmr_regs *ddrmr) 133*548cc109SLukasz Majewski { 134*548cc109SLukasz Majewski DECLARE_BITMAP(rdlvl_rsp, DDRMC_DQS_DQ_MAX_DELAY + 1); 135*548cc109SLukasz Majewski int rdlvl_dl_0_min = -1, rdlvl_dl_0_max = -1; 136*548cc109SLukasz Majewski int rdlvl_dl_1_min = -1, rdlvl_dl_1_max = -1; 137*548cc109SLukasz Majewski int rdlvl_dl_0, rdlvl_dl_1; 138*548cc109SLukasz Majewski u8 swlvl_rsp; 139*548cc109SLukasz Majewski u32 tmp; 140*548cc109SLukasz Majewski int i; 141*548cc109SLukasz Majewski 142*548cc109SLukasz Majewski /* Read defaults */ 143*548cc109SLukasz Majewski u16 rdlvl_dl_0_def = 144*548cc109SLukasz Majewski (readl(&ddrmr->cr[105]) >> DDRMC_CR105_RDLVL_DL_0_OFF) & 0xFFFF; 145*548cc109SLukasz Majewski u16 rdlvl_dl_1_def = readl(&ddrmr->cr[110]) & 0xFFFF; 146*548cc109SLukasz Majewski 147*548cc109SLukasz Majewski debug("\nRDLVL: ======================\n"); 148*548cc109SLukasz Majewski debug("RDLVL: DQS to DQ (RDLVL)\n"); 149*548cc109SLukasz Majewski 150*548cc109SLukasz Majewski debug("RDLVL: RDLVL_DL_0_DFL:\t 0x%x\n", rdlvl_dl_0_def); 151*548cc109SLukasz Majewski debug("RDLVL: RDLVL_DL_1_DFL:\t 0x%x\n", rdlvl_dl_1_def); 152*548cc109SLukasz Majewski 153*548cc109SLukasz Majewski /* 154*548cc109SLukasz Majewski * Set/Read setup for calibration 155*548cc109SLukasz Majewski * 156*548cc109SLukasz Majewski * Values necessary for leveling from Vybrid RM [2] - page 1600 157*548cc109SLukasz Majewski */ 158*548cc109SLukasz Majewski writel(0x40703030, &ddrmr->cr[144]); 159*548cc109SLukasz Majewski writel(0x40, &ddrmr->cr[145]); 160*548cc109SLukasz Majewski writel(0x40, &ddrmr->cr[146]); 161*548cc109SLukasz Majewski 162*548cc109SLukasz Majewski tmp = readl(&ddrmr->cr[144]); 163*548cc109SLukasz Majewski debug("RDLVL: PHY_RDLVL_RES:\t 0x%x\n", (tmp >> 24) & 0xFF);// set 0x40 164*548cc109SLukasz Majewski debug("RDLVL: PHY_RDLV_LOAD:\t 0x%x\n", (tmp >> 16) & 0xFF);// set 0x70 165*548cc109SLukasz Majewski debug("RDLVL: PHY_RDLV_DLL:\t 0x%x\n", (tmp >> 8) & 0xFF); // set 0x30 166*548cc109SLukasz Majewski debug("RDLVL: PHY_RDLV_EN:\t 0x%x\n", tmp & 0xFF); //set 0x30 167*548cc109SLukasz Majewski 168*548cc109SLukasz Majewski tmp = readl(&ddrmr->cr[145]); 169*548cc109SLukasz Majewski debug("RDLVL: PHY_RDLV_RR:\t 0x%x\n", tmp & 0x3FF); //set 0x40 170*548cc109SLukasz Majewski 171*548cc109SLukasz Majewski tmp = readl(&ddrmr->cr[146]); 172*548cc109SLukasz Majewski debug("RDLVL: PHY_RDLV_RESP:\t 0x%x\n", tmp); //set 0x40 173*548cc109SLukasz Majewski 174*548cc109SLukasz Majewski /* 175*548cc109SLukasz Majewski * Program/read the leveling edge RDLVL_EDGE = 0 176*548cc109SLukasz Majewski * 177*548cc109SLukasz Majewski * 0x00 is the correct output on SWLVL_RSP_X 178*548cc109SLukasz Majewski * If by any chance 1s are visible -> wrong number read 179*548cc109SLukasz Majewski */ 180*548cc109SLukasz Majewski clrbits_le32(&ddrmr->cr[101], DDRMC_CR101_PHY_RDLVL_EDGE); 181*548cc109SLukasz Majewski 182*548cc109SLukasz Majewski tmp = readl(&ddrmr->cr[101]); 183*548cc109SLukasz Majewski debug("RDLVL: PHY_RDLVL_EDGE:\t 0x%x\n", 184*548cc109SLukasz Majewski (tmp >> DDRMC_CR101_PHY_RDLVL_EDGE_OFF) & 0x1); //set 0 185*548cc109SLukasz Majewski 186*548cc109SLukasz Majewski /* Program Leveling mode - CR93[SW_LVL_MODE] to ’b10 */ 187*548cc109SLukasz Majewski clrsetbits_le32(&ddrmr->cr[93], DDRMC_CR93_SW_LVL_MODE(0x3), 188*548cc109SLukasz Majewski DDRMC_CR93_SW_LVL_MODE(0x2)); 189*548cc109SLukasz Majewski tmp = readl(&ddrmr->cr[93]); 190*548cc109SLukasz Majewski debug("RDLVL: SW_LVL_MODE:\t 0x%x\n", 191*548cc109SLukasz Majewski (tmp >> DDRMC_CR93_SW_LVL_MODE_OFF) & 0x3); 192*548cc109SLukasz Majewski 193*548cc109SLukasz Majewski /* Start procedure - CR93[SWLVL_START] to ’b1 */ 194*548cc109SLukasz Majewski sw_leveling_start; 195*548cc109SLukasz Majewski 196*548cc109SLukasz Majewski /* Poll CR94[SWLVL_OP_DONE] */ 197*548cc109SLukasz Majewski sw_leveling_op_done; 198*548cc109SLukasz Majewski 199*548cc109SLukasz Majewski /* 200*548cc109SLukasz Majewski * Program delays for RDLVL_DL_0 201*548cc109SLukasz Majewski * 202*548cc109SLukasz Majewski * The procedure is to increase the delay values from 0 to 0xFF 203*548cc109SLukasz Majewski * and read the response from the DDRMC 204*548cc109SLukasz Majewski */ 205*548cc109SLukasz Majewski debug("\nRDLVL: ---> RDLVL_DL_0\n"); 206*548cc109SLukasz Majewski bitmap_zero(rdlvl_rsp, DDRMC_DQS_DQ_MAX_DELAY + 1); 207*548cc109SLukasz Majewski 208*548cc109SLukasz Majewski for (i = 0; i <= DDRMC_DQS_DQ_MAX_DELAY; i++) { 209*548cc109SLukasz Majewski clrsetbits_le32(&ddrmr->cr[105], 210*548cc109SLukasz Majewski 0xFFFF << DDRMC_CR105_RDLVL_DL_0_OFF, 211*548cc109SLukasz Majewski i << DDRMC_CR105_RDLVL_DL_0_OFF); 212*548cc109SLukasz Majewski 213*548cc109SLukasz Majewski /* Load values CR93[SWLVL_LOAD] to ’b1 */ 214*548cc109SLukasz Majewski sw_leveling_load_value; 215*548cc109SLukasz Majewski 216*548cc109SLukasz Majewski /* Poll CR94[SWLVL_OP_DONE] */ 217*548cc109SLukasz Majewski sw_leveling_op_done; 218*548cc109SLukasz Majewski 219*548cc109SLukasz Majewski /* 220*548cc109SLukasz Majewski * Read Responses - SWLVL_RESP_0 221*548cc109SLukasz Majewski * 222*548cc109SLukasz Majewski * The 0x00 (correct response when PHY_RDLVL_EDGE = 0) 223*548cc109SLukasz Majewski * -> 1 in the bit vector 224*548cc109SLukasz Majewski */ 225*548cc109SLukasz Majewski swlvl_rsp = (readl(&ddrmr->cr[94]) >> 226*548cc109SLukasz Majewski DDRMC_CR94_SWLVL_RESP_0_OFF) & 0xF; 227*548cc109SLukasz Majewski if (swlvl_rsp == 0) 228*548cc109SLukasz Majewski generic_set_bit(i, rdlvl_rsp); 229*548cc109SLukasz Majewski } 230*548cc109SLukasz Majewski 231*548cc109SLukasz Majewski bitmap_print(rdlvl_rsp, DDRMC_DQS_DQ_MAX_DELAY); 232*548cc109SLukasz Majewski 233*548cc109SLukasz Majewski /* 234*548cc109SLukasz Majewski * First test for rising edge 0x0 -> 0x1 in bitmap 235*548cc109SLukasz Majewski */ 236*548cc109SLukasz Majewski rdlvl_dl_0_min = ddr_cal_get_first_edge_index(rdlvl_rsp, RISING_EDGE, 237*548cc109SLukasz Majewski N_SAMPLES, N_SAMPLES, 238*548cc109SLukasz Majewski DDRMC_DQS_DQ_MAX_DELAY); 239*548cc109SLukasz Majewski 240*548cc109SLukasz Majewski /* 241*548cc109SLukasz Majewski * Secondly test for falling edge 0x1 -> 0x0 in bitmap 242*548cc109SLukasz Majewski */ 243*548cc109SLukasz Majewski rdlvl_dl_0_max = ddr_cal_get_first_edge_index(rdlvl_rsp, FALLING_EDGE, 244*548cc109SLukasz Majewski N_SAMPLES, rdlvl_dl_0_min, 245*548cc109SLukasz Majewski DDRMC_DQS_DQ_MAX_DELAY); 246*548cc109SLukasz Majewski 247*548cc109SLukasz Majewski debug("RDLVL: DL_0 min: %d [0x%x] DL_0 max: %d [0x%x]\n", 248*548cc109SLukasz Majewski rdlvl_dl_0_min, rdlvl_dl_0_min, rdlvl_dl_0_max, rdlvl_dl_0_max); 249*548cc109SLukasz Majewski rdlvl_dl_0 = (rdlvl_dl_0_max - rdlvl_dl_0_min) / 2; 250*548cc109SLukasz Majewski 251*548cc109SLukasz Majewski if (rdlvl_dl_0_max == -1 || rdlvl_dl_0_min == -1 || rdlvl_dl_0 <= 0) { 252*548cc109SLukasz Majewski debug("RDLVL: The DQS to DQ delay cannot be found!\n"); 253*548cc109SLukasz Majewski debug("RDLVL: Using default - slice 0: %d!\n", rdlvl_dl_0_def); 254*548cc109SLukasz Majewski rdlvl_dl_0 = rdlvl_dl_0_def; 255*548cc109SLukasz Majewski } 256*548cc109SLukasz Majewski 257*548cc109SLukasz Majewski debug("\nRDLVL: ---> RDLVL_DL_1\n"); 258*548cc109SLukasz Majewski bitmap_zero(rdlvl_rsp, DDRMC_DQS_DQ_MAX_DELAY + 1); 259*548cc109SLukasz Majewski 260*548cc109SLukasz Majewski for (i = 0; i <= DDRMC_DQS_DQ_MAX_DELAY; i++) { 261*548cc109SLukasz Majewski clrsetbits_le32(&ddrmr->cr[110], 262*548cc109SLukasz Majewski 0xFFFF << DDRMC_CR110_RDLVL_DL_1_OFF, 263*548cc109SLukasz Majewski i << DDRMC_CR110_RDLVL_DL_1_OFF); 264*548cc109SLukasz Majewski 265*548cc109SLukasz Majewski /* Load values CR93[SWLVL_LOAD] to ’b1 */ 266*548cc109SLukasz Majewski sw_leveling_load_value; 267*548cc109SLukasz Majewski 268*548cc109SLukasz Majewski /* Poll CR94[SWLVL_OP_DONE] */ 269*548cc109SLukasz Majewski sw_leveling_op_done; 270*548cc109SLukasz Majewski 271*548cc109SLukasz Majewski /* 272*548cc109SLukasz Majewski * Read Responses - SWLVL_RESP_1 273*548cc109SLukasz Majewski * 274*548cc109SLukasz Majewski * The 0x00 (correct response when PHY_RDLVL_EDGE = 0) 275*548cc109SLukasz Majewski * -> 1 in the bit vector 276*548cc109SLukasz Majewski */ 277*548cc109SLukasz Majewski swlvl_rsp = (readl(&ddrmr->cr[95]) >> 278*548cc109SLukasz Majewski DDRMC_CR95_SWLVL_RESP_1_OFF) & 0xF; 279*548cc109SLukasz Majewski if (swlvl_rsp == 0) 280*548cc109SLukasz Majewski generic_set_bit(i, rdlvl_rsp); 281*548cc109SLukasz Majewski } 282*548cc109SLukasz Majewski 283*548cc109SLukasz Majewski bitmap_print(rdlvl_rsp, DDRMC_DQS_DQ_MAX_DELAY); 284*548cc109SLukasz Majewski 285*548cc109SLukasz Majewski /* 286*548cc109SLukasz Majewski * First test for rising edge 0x0 -> 0x1 in bitmap 287*548cc109SLukasz Majewski */ 288*548cc109SLukasz Majewski rdlvl_dl_1_min = ddr_cal_get_first_edge_index(rdlvl_rsp, RISING_EDGE, 289*548cc109SLukasz Majewski N_SAMPLES, N_SAMPLES, 290*548cc109SLukasz Majewski DDRMC_DQS_DQ_MAX_DELAY); 291*548cc109SLukasz Majewski 292*548cc109SLukasz Majewski /* 293*548cc109SLukasz Majewski * Secondly test for falling edge 0x1 -> 0x0 in bitmap 294*548cc109SLukasz Majewski */ 295*548cc109SLukasz Majewski rdlvl_dl_1_max = ddr_cal_get_first_edge_index(rdlvl_rsp, FALLING_EDGE, 296*548cc109SLukasz Majewski N_SAMPLES, rdlvl_dl_1_min, 297*548cc109SLukasz Majewski DDRMC_DQS_DQ_MAX_DELAY); 298*548cc109SLukasz Majewski 299*548cc109SLukasz Majewski debug("RDLVL: DL_1 min: %d [0x%x] DL_1 max: %d [0x%x]\n", 300*548cc109SLukasz Majewski rdlvl_dl_1_min, rdlvl_dl_1_min, rdlvl_dl_1_max, rdlvl_dl_1_max); 301*548cc109SLukasz Majewski rdlvl_dl_1 = (rdlvl_dl_1_max - rdlvl_dl_1_min) / 2; 302*548cc109SLukasz Majewski 303*548cc109SLukasz Majewski if (rdlvl_dl_1_max == -1 || rdlvl_dl_1_min == -1 || rdlvl_dl_1 <= 0) { 304*548cc109SLukasz Majewski debug("RDLVL: The DQS to DQ delay cannot be found!\n"); 305*548cc109SLukasz Majewski debug("RDLVL: Using default - slice 1: %d!\n", rdlvl_dl_1_def); 306*548cc109SLukasz Majewski rdlvl_dl_1 = rdlvl_dl_1_def; 307*548cc109SLukasz Majewski } 308*548cc109SLukasz Majewski 309*548cc109SLukasz Majewski debug("RDLVL: CALIBRATED: rdlvl_dl_0: 0x%x\t rdlvl_dl_1: 0x%x\n", 310*548cc109SLukasz Majewski rdlvl_dl_0, rdlvl_dl_1); 311*548cc109SLukasz Majewski 312*548cc109SLukasz Majewski /* Write new delay values */ 313*548cc109SLukasz Majewski writel(DDRMC_CR105_RDLVL_DL_0(rdlvl_dl_0), &ddrmr->cr[105]); 314*548cc109SLukasz Majewski writel(DDRMC_CR110_RDLVL_DL_1(rdlvl_dl_1), &ddrmr->cr[110]); 315*548cc109SLukasz Majewski 316*548cc109SLukasz Majewski sw_leveling_load_value; 317*548cc109SLukasz Majewski sw_leveling_op_done; 318*548cc109SLukasz Majewski 319*548cc109SLukasz Majewski /* Exit procedure - CR94[SWLVL_EXIT] to ’b1 */ 320*548cc109SLukasz Majewski sw_leveling_exit; 321*548cc109SLukasz Majewski 322*548cc109SLukasz Majewski /* Poll CR94[SWLVL_OP_DONE] */ 323*548cc109SLukasz Majewski sw_leveling_op_done; 324*548cc109SLukasz Majewski 325*548cc109SLukasz Majewski return 0; 326*548cc109SLukasz Majewski } 327*548cc109SLukasz Majewski 328*548cc109SLukasz Majewski /* 329*548cc109SLukasz Majewski * WRLVL_DL calibration: 330*548cc109SLukasz Majewski * 331*548cc109SLukasz Majewski * For non-flyback memory architecture - where one have a single DDR3 x16 332*548cc109SLukasz Majewski * memory - it is NOT necessary to perform "Write Leveling" 333*548cc109SLukasz Majewski * [3] 'Vybrid DDR3 write leveling' https://community.nxp.com/thread/429362 334*548cc109SLukasz Majewski * 335*548cc109SLukasz Majewski */ 336*548cc109SLukasz Majewski 337*548cc109SLukasz Majewski int ddrmc_calibration(struct ddrmr_regs *ddrmr) 338*548cc109SLukasz Majewski { 339*548cc109SLukasz Majewski ddrmc_cal_dqs_to_dq(ddrmr); 340*548cc109SLukasz Majewski 341*548cc109SLukasz Majewski return 0; 342*548cc109SLukasz Majewski } 343