Lines Matching +full:100 +full:ps

266 	100,
302 100,
320 100,
499 100,
507 100
548 100,
566 100,
584 100,
999 100
1526 100
1557 100
1708 100
1757 i_leakage = div64_s64(drm_int2fixp(ileakage), 100); in si_calculate_leakage_for_v_and_t_formula()
1793 i_leakage = div64_s64(drm_int2fixp(ileakage), 100); in si_calculate_leakage_for_v_formula()
1833 (p_limit2 * (u32)100); in si_update_dte_from_pl2()
2092 wintime = (cac_window_size * 100) / xclk; in si_calculate_cac_wintime()
2113 max_tdp_limit = ((100 + 100) * rdev->pm.dpm.tdp_limit) / 100; in si_calculate_adjusted_tdp_limits()
2116 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; in si_calculate_adjusted_tdp_limits()
2119 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; in si_calculate_adjusted_tdp_limits()
2168 …cpu_to_be32(si_scale_power_for_smc((near_tdp_limit * SISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100, … in si_populate_smc_tdp_limits()
2216 …->pm.dpm.near_tdp_limit_adjusted * SISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100, scaling_factor) * … in si_populate_smc_tdp_limits_2()
2320 min_sclk = (prev_sclk * (u32)max_ps_percent) / 100; in si_populate_power_containment_values()
2535 if (si_pi->powertune_data->lkge_lut_v0_percent > 100) in si_get_cac_std_voltage_max_min()
2538 v0_loadline = (*min) * (100 - si_pi->powertune_data->lkge_lut_v0_percent) / 100; in si_get_cac_std_voltage_max_min()
2641 u32 ticks_per_us = radeon_get_xclk(rdev) / 100; in si_initialize_smc_cac_tables()
2681 load_line_slope = ((u32)rdev->pm.dpm.load_line_slope << SMC_SISLANDS_SCALE_R) / 100; in si_initialize_smc_cac_tables()
2949 struct ni_ps *ps = ni_get_ps(rps); in si_apply_state_adjust_rules() local
3011 for (i = ps->performance_level_count - 2; i >= 0; i--) { in si_apply_state_adjust_rules()
3012 if (ps->performance_levels[i].vddc > ps->performance_levels[i+1].vddc) in si_apply_state_adjust_rules()
3013 ps->performance_levels[i].vddc = ps->performance_levels[i+1].vddc; in si_apply_state_adjust_rules()
3016 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3017 if (ps->performance_levels[i].mclk > max_limits->mclk) in si_apply_state_adjust_rules()
3018 ps->performance_levels[i].mclk = max_limits->mclk; in si_apply_state_adjust_rules()
3019 if (ps->performance_levels[i].sclk > max_limits->sclk) in si_apply_state_adjust_rules()
3020 ps->performance_levels[i].sclk = max_limits->sclk; in si_apply_state_adjust_rules()
3021 if (ps->performance_levels[i].vddc > max_limits->vddc) in si_apply_state_adjust_rules()
3022 ps->performance_levels[i].vddc = max_limits->vddc; in si_apply_state_adjust_rules()
3023 if (ps->performance_levels[i].vddci > max_limits->vddci) in si_apply_state_adjust_rules()
3024 ps->performance_levels[i].vddci = max_limits->vddci; in si_apply_state_adjust_rules()
3036 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3038 if (ps->performance_levels[i].sclk > max_sclk_vddc) in si_apply_state_adjust_rules()
3039 ps->performance_levels[i].sclk = max_sclk_vddc; in si_apply_state_adjust_rules()
3042 if (ps->performance_levels[i].mclk > max_mclk_vddci) in si_apply_state_adjust_rules()
3043 ps->performance_levels[i].mclk = max_mclk_vddci; in si_apply_state_adjust_rules()
3046 if (ps->performance_levels[i].mclk > max_mclk_vddc) in si_apply_state_adjust_rules()
3047 ps->performance_levels[i].mclk = max_mclk_vddc; in si_apply_state_adjust_rules()
3050 if (ps->performance_levels[i].mclk > max_mclk) in si_apply_state_adjust_rules()
3051 ps->performance_levels[i].mclk = max_mclk; in si_apply_state_adjust_rules()
3054 if (ps->performance_levels[i].sclk > max_sclk) in si_apply_state_adjust_rules()
3055 ps->performance_levels[i].sclk = max_sclk; in si_apply_state_adjust_rules()
3062 mclk = ps->performance_levels[ps->performance_level_count - 1].mclk; in si_apply_state_adjust_rules()
3063 vddci = ps->performance_levels[ps->performance_level_count - 1].vddci; in si_apply_state_adjust_rules()
3065 mclk = ps->performance_levels[0].mclk; in si_apply_state_adjust_rules()
3066 vddci = ps->performance_levels[0].vddci; in si_apply_state_adjust_rules()
3070 sclk = ps->performance_levels[ps->performance_level_count - 1].sclk; in si_apply_state_adjust_rules()
3071 vddc = ps->performance_levels[ps->performance_level_count - 1].vddc; in si_apply_state_adjust_rules()
3073 sclk = ps->performance_levels[0].sclk; in si_apply_state_adjust_rules()
3074 vddc = ps->performance_levels[0].vddc; in si_apply_state_adjust_rules()
3085 ps->performance_levels[0].sclk = sclk; in si_apply_state_adjust_rules()
3086 ps->performance_levels[0].mclk = mclk; in si_apply_state_adjust_rules()
3087 ps->performance_levels[0].vddc = vddc; in si_apply_state_adjust_rules()
3088 ps->performance_levels[0].vddci = vddci; in si_apply_state_adjust_rules()
3091 sclk = ps->performance_levels[0].sclk; in si_apply_state_adjust_rules()
3092 for (i = 1; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3093 if (sclk < ps->performance_levels[i].sclk) in si_apply_state_adjust_rules()
3094 sclk = ps->performance_levels[i].sclk; in si_apply_state_adjust_rules()
3096 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3097 ps->performance_levels[i].sclk = sclk; in si_apply_state_adjust_rules()
3098 ps->performance_levels[i].vddc = vddc; in si_apply_state_adjust_rules()
3101 for (i = 1; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3102 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk) in si_apply_state_adjust_rules()
3103 ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk; in si_apply_state_adjust_rules()
3104 if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc) in si_apply_state_adjust_rules()
3105 ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc; in si_apply_state_adjust_rules()
3110 mclk = ps->performance_levels[0].mclk; in si_apply_state_adjust_rules()
3111 for (i = 1; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3112 if (mclk < ps->performance_levels[i].mclk) in si_apply_state_adjust_rules()
3113 mclk = ps->performance_levels[i].mclk; in si_apply_state_adjust_rules()
3115 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3116 ps->performance_levels[i].mclk = mclk; in si_apply_state_adjust_rules()
3117 ps->performance_levels[i].vddci = vddci; in si_apply_state_adjust_rules()
3120 for (i = 1; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3121 if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk) in si_apply_state_adjust_rules()
3122 ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk; in si_apply_state_adjust_rules()
3123 if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci) in si_apply_state_adjust_rules()
3124 ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci; in si_apply_state_adjust_rules()
3128 for (i = 0; i < ps->performance_level_count; i++) in si_apply_state_adjust_rules()
3130 &ps->performance_levels[i]); in si_apply_state_adjust_rules()
3132 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3133 if (ps->performance_levels[i].vddc < min_vce_voltage) in si_apply_state_adjust_rules()
3134 ps->performance_levels[i].vddc = min_vce_voltage; in si_apply_state_adjust_rules()
3136 ps->performance_levels[i].sclk, in si_apply_state_adjust_rules()
3137 max_limits->vddc, &ps->performance_levels[i].vddc); in si_apply_state_adjust_rules()
3139 ps->performance_levels[i].mclk, in si_apply_state_adjust_rules()
3140 max_limits->vddci, &ps->performance_levels[i].vddci); in si_apply_state_adjust_rules()
3142 ps->performance_levels[i].mclk, in si_apply_state_adjust_rules()
3143 max_limits->vddc, &ps->performance_levels[i].vddc); in si_apply_state_adjust_rules()
3146 max_limits->vddc, &ps->performance_levels[i].vddc); in si_apply_state_adjust_rules()
3149 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3152 &ps->performance_levels[i].vddc, in si_apply_state_adjust_rules()
3153 &ps->performance_levels[i].vddci); in si_apply_state_adjust_rules()
3156 ps->dc_compatible = true; in si_apply_state_adjust_rules()
3157 for (i = 0; i < ps->performance_level_count; i++) { in si_apply_state_adjust_rules()
3158 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) in si_apply_state_adjust_rules()
3159 ps->dc_compatible = false; in si_apply_state_adjust_rules()
3385 struct ni_ps *ps = ni_get_ps(rps); in si_dpm_force_performance_level() local
3386 u32 levels = ps->performance_level_count; in si_dpm_force_performance_level()
3639 vddc_dly = (voltage_response_time * reference_clock) / 100; in si_program_response_times()
3640 acpi_dly = (acpi_delay_time * reference_clock) / 100; in si_program_response_times()
3641 vbi_dly = (vbi_time_out * reference_clock) / 100; in si_program_response_times()
4911 u32 clkv = (u32)((((131 * ss.percentage * ss.rate) / 100) * tmp) / freq_nom); in si_populate_mclk_value()
4947 struct ni_ps *ps = ni_get_ps(radeon_state); in si_populate_smc_sp() local
4951 for (i = 0; i < ps->performance_level_count - 1; i++) in si_populate_smc_sp()
4954 smc_state->levels[ps->performance_level_count - 1].bSP = in si_populate_smc_sp()
5091 (50 / SISLANDS_MAX_HARDWARE_POWERLEVELS) * 100 * (i + 1), in si_populate_smc_t()
5092 100 * R600_AH_DFLT, in si_populate_smc_t()
5185 threshold = state->performance_levels[state->performance_level_count-1].sclk * 100 / 100; in si_convert_power_state_to_smc()
6043 slope1 = (u16)((50 + ((16 * duty100 * pwm_diff1) / t_diff1)) / 100); in si_thermal_setup_fan_table()
6044 slope2 = (u16)((50 + ((16 * duty100 * pwm_diff2) / t_diff2)) / 100); in si_thermal_setup_fan_table()
6046 fan_table.temp_min = cpu_to_be16((50 + rdev->pm.dpm.fan.t_min) / 100); in si_thermal_setup_fan_table()
6047 fan_table.temp_med = cpu_to_be16((50 + rdev->pm.dpm.fan.t_med) / 100); in si_thermal_setup_fan_table()
6048 fan_table.temp_max = cpu_to_be16((50 + rdev->pm.dpm.fan.t_max) / 100); in si_thermal_setup_fan_table()
6131 tmp64 = (u64)duty * 100; in si_fan_ctrl_get_fan_speed_percent()
6135 if (*speed > 100) in si_fan_ctrl_get_fan_speed_percent()
6136 *speed = 100; in si_fan_ctrl_get_fan_speed_percent()
6155 if (speed > 100) in si_fan_ctrl_set_fan_speed_percent()
6164 do_div(tmp64, 100); in si_fan_ctrl_set_fan_speed_percent()
6719 struct ni_ps *ps = ni_get_ps(rps); in si_parse_pplib_clock_info() local
6721 struct rv7xx_pl *pl = &ps->performance_levels[index]; in si_parse_pplib_clock_info()
6724 ps->performance_level_count = index + 1; in si_parse_pplib_clock_info()
6803 struct ni_ps *ps; in si_parse_power_table() local
6820 rdev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, in si_parse_power_table()
6823 if (!rdev->pm.dpm.ps) in si_parse_power_table()
6834 ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL); in si_parse_power_table()
6835 if (ps == NULL) { in si_parse_power_table()
6836 kfree(rdev->pm.dpm.ps); in si_parse_power_table()
6839 rdev->pm.dpm.ps[i].ps_priv = ps; in si_parse_power_table()
6840 si_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], in si_parse_power_table()
6855 &rdev->pm.dpm.ps[i], k, in si_parse_power_table()
7064 kfree(rdev->pm.dpm.ps[i].ps_priv); in si_dpm_fini()
7066 kfree(rdev->pm.dpm.ps); in si_dpm_fini()
7077 struct ni_ps *ps = ni_get_ps(rps); in si_dpm_debugfs_print_current_performance_level() local
7083 if (current_index >= ps->performance_level_count) { in si_dpm_debugfs_print_current_performance_level()
7086 pl = &ps->performance_levels[current_index]; in si_dpm_debugfs_print_current_performance_level()
7097 struct ni_ps *ps = ni_get_ps(rps); in si_dpm_get_current_sclk() local
7103 if (current_index >= ps->performance_level_count) { in si_dpm_get_current_sclk()
7106 pl = &ps->performance_levels[current_index]; in si_dpm_get_current_sclk()
7115 struct ni_ps *ps = ni_get_ps(rps); in si_dpm_get_current_mclk() local
7121 if (current_index >= ps->performance_level_count) { in si_dpm_get_current_mclk()
7124 pl = &ps->performance_levels[current_index]; in si_dpm_get_current_mclk()