1 /* 2 * Copyright 2012 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 */ 23 24 #include "radeon.h" 25 #include "sumod.h" 26 #include "sumo_dpm.h" 27 #include "ppsmc.h" 28 29 #define SUMO_SMU_SERVICE_ROUTINE_PG_INIT 1 30 #define SUMO_SMU_SERVICE_ROUTINE_ALTVDDNB_NOTIFY 27 31 #define SUMO_SMU_SERVICE_ROUTINE_GFX_SRV_ID_20 20 32 33 struct sumo_power_info *sumo_get_pi(struct radeon_device *rdev); 34 35 static void sumo_send_msg_to_smu(struct radeon_device *rdev, u32 id) 36 { 37 u32 gfx_int_req; 38 int i; 39 40 for (i = 0; i < rdev->usec_timeout; i++) { 41 if (RREG32(GFX_INT_STATUS) & INT_DONE) 42 break; 43 udelay(1); 44 } 45 46 gfx_int_req = SERV_INDEX(id) | INT_REQ; 47 WREG32(GFX_INT_REQ, gfx_int_req); 48 49 for (i = 0; i < rdev->usec_timeout; i++) { 50 if (RREG32(GFX_INT_REQ) & INT_REQ) 51 break; 52 udelay(1); 53 } 54 55 for (i = 0; i < rdev->usec_timeout; i++) { 56 if (RREG32(GFX_INT_STATUS) & INT_ACK) 57 break; 58 udelay(1); 59 } 60 61 for (i = 0; i < rdev->usec_timeout; i++) { 62 if (RREG32(GFX_INT_STATUS) & INT_DONE) 63 break; 64 udelay(1); 65 } 66 67 gfx_int_req &= ~INT_REQ; 68 WREG32(GFX_INT_REQ, gfx_int_req); 69 } 70 71 void sumo_initialize_m3_arb(struct radeon_device *rdev) 72 { 73 struct sumo_power_info *pi = sumo_get_pi(rdev); 74 u32 i; 75 76 if (!pi->enable_dynamic_m3_arbiter) 77 return; 78 79 for (i = 0; i < NUMBER_OF_M3ARB_PARAM_SETS; i++) 80 WREG32_RCU(MCU_M3ARB_PARAMS + (i * 4), 81 pi->sys_info.csr_m3_arb_cntl_default[i]); 82 83 for (; i < NUMBER_OF_M3ARB_PARAM_SETS * 2; i++) 84 WREG32_RCU(MCU_M3ARB_PARAMS + (i * 4), 85 pi->sys_info.csr_m3_arb_cntl_uvd[i % NUMBER_OF_M3ARB_PARAM_SETS]); 86 87 for (; i < NUMBER_OF_M3ARB_PARAM_SETS * 3; i++) 88 WREG32_RCU(MCU_M3ARB_PARAMS + (i * 4), 89 pi->sys_info.csr_m3_arb_cntl_fs3d[i % NUMBER_OF_M3ARB_PARAM_SETS]); 90 } 91 92 static bool sumo_is_alt_vddnb_supported(struct radeon_device *rdev) 93 { 94 struct sumo_power_info *pi = sumo_get_pi(rdev); 95 bool return_code = false; 96 97 if (!pi->enable_alt_vddnb) 98 return return_code; 99 100 if ((rdev->family == CHIP_SUMO) || (rdev->family == CHIP_SUMO2)) { 101 if (pi->fw_version >= 0x00010C00) 102 return_code = true; 103 } 104 105 return return_code; 106 } 107 108 void sumo_smu_notify_alt_vddnb_change(struct radeon_device *rdev, 109 bool powersaving, bool force_nbps1) 110 { 111 u32 param = 0; 112 113 if (!sumo_is_alt_vddnb_supported(rdev)) 114 return; 115 116 if (powersaving) 117 param |= 1; 118 119 if (force_nbps1) 120 param |= 2; 121 122 WREG32_RCU(RCU_ALTVDDNB_NOTIFY, param); 123 124 sumo_send_msg_to_smu(rdev, SUMO_SMU_SERVICE_ROUTINE_ALTVDDNB_NOTIFY); 125 } 126 127 void sumo_smu_pg_init(struct radeon_device *rdev) 128 { 129 sumo_send_msg_to_smu(rdev, SUMO_SMU_SERVICE_ROUTINE_PG_INIT); 130 } 131 132 static u32 sumo_power_of_4(u32 unit) 133 { 134 u32 ret = 1; 135 u32 i; 136 137 for (i = 0; i < unit; i++) 138 ret *= 4; 139 140 return ret; 141 } 142 143 void sumo_enable_boost_timer(struct radeon_device *rdev) 144 { 145 struct sumo_power_info *pi = sumo_get_pi(rdev); 146 u32 period, unit, timer_value; 147 u32 xclk = radeon_get_xclk(rdev); 148 149 unit = (RREG32_RCU(RCU_LCLK_SCALING_CNTL) & LCLK_SCALING_TIMER_PRESCALER_MASK) 150 >> LCLK_SCALING_TIMER_PRESCALER_SHIFT; 151 152 period = 100 * (xclk / 100 / sumo_power_of_4(unit)); 153 154 timer_value = (period << 16) | (unit << 4); 155 156 WREG32_RCU(RCU_GNB_PWR_REP_TIMER_CNTL, timer_value); 157 WREG32_RCU(RCU_BOOST_MARGIN, pi->sys_info.sclk_dpm_boost_margin); 158 WREG32_RCU(RCU_THROTTLE_MARGIN, pi->sys_info.sclk_dpm_throttle_margin); 159 WREG32_RCU(GNB_TDP_LIMIT, pi->sys_info.gnb_tdp_limit); 160 WREG32_RCU(RCU_SclkDpmTdpLimitPG, pi->sys_info.sclk_dpm_tdp_limit_pg); 161 162 sumo_send_msg_to_smu(rdev, SUMO_SMU_SERVICE_ROUTINE_GFX_SRV_ID_20); 163 } 164 165 void sumo_set_tdp_limit(struct radeon_device *rdev, u32 index, u32 tdp_limit) 166 { 167 u32 regoffset = 0; 168 u32 shift = 0; 169 u32 mask = 0xFFF; 170 u32 sclk_dpm_tdp_limit; 171 172 switch (index) { 173 case 0: 174 regoffset = RCU_SclkDpmTdpLimit01; 175 shift = 16; 176 break; 177 case 1: 178 regoffset = RCU_SclkDpmTdpLimit01; 179 shift = 0; 180 break; 181 case 2: 182 regoffset = RCU_SclkDpmTdpLimit23; 183 shift = 16; 184 break; 185 case 3: 186 regoffset = RCU_SclkDpmTdpLimit23; 187 shift = 0; 188 break; 189 case 4: 190 regoffset = RCU_SclkDpmTdpLimit47; 191 shift = 16; 192 break; 193 case 7: 194 regoffset = RCU_SclkDpmTdpLimit47; 195 shift = 0; 196 break; 197 default: 198 break; 199 } 200 201 sclk_dpm_tdp_limit = RREG32_RCU(regoffset); 202 sclk_dpm_tdp_limit &= ~(mask << shift); 203 sclk_dpm_tdp_limit |= (tdp_limit << shift); 204 WREG32_RCU(regoffset, sclk_dpm_tdp_limit); 205 } 206 207 void sumo_boost_state_enable(struct radeon_device *rdev, bool enable) 208 { 209 u32 boost_disable = RREG32_RCU(RCU_GPU_BOOST_DISABLE); 210 211 boost_disable &= 0xFFFFFFFE; 212 boost_disable |= (enable ? 0 : 1); 213 WREG32_RCU(RCU_GPU_BOOST_DISABLE, boost_disable); 214 } 215 216 u32 sumo_get_running_fw_version(struct radeon_device *rdev) 217 { 218 return RREG32_RCU(RCU_FW_VERSION); 219 } 220 221