/openbmc/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_umc.c | 28 struct ras_err_data *err_data, uint64_t err_addr, in amdgpu_umc_convert_error_address() argument 34 err_data, err_addr, ch_inst, umc_inst); in amdgpu_umc_convert_error_address() 48 struct ras_err_data err_data = {0, 0, 0, NULL}; in amdgpu_umc_page_retirement_mca() local 51 err_data.err_addr = in amdgpu_umc_page_retirement_mca() 54 if (!err_data.err_addr) { in amdgpu_umc_page_retirement_mca() 63 ret = amdgpu_umc_convert_error_address(adev, &err_data, err_addr, in amdgpu_umc_page_retirement_mca() 69 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_umc_page_retirement_mca() 70 err_data.err_addr_cnt); in amdgpu_umc_page_retirement_mca() 75 kfree(err_data.err_addr); in amdgpu_umc_page_retirement_mca() 84 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_umc_do_page_retirement() local [all …]
|
H A D | umc_v8_7.c | 93 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v8_7_ecc_info_query_ras_error_count() local 104 &(err_data->ce_count)); in umc_v8_7_ecc_info_query_ras_error_count() 107 &(err_data->ue_count)); in umc_v8_7_ecc_info_query_ras_error_count() 112 struct ras_err_data *err_data, uint64_t err_addr, in umc_v8_7_convert_error_address() argument 126 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v8_7_convert_error_address() 131 struct ras_err_data *err_data, in umc_v8_7_ecc_info_query_error_address() argument 145 if (!err_data->err_addr) in umc_v8_7_ecc_info_query_error_address() 155 umc_v8_7_convert_error_address(adev, err_data, err_addr, in umc_v8_7_ecc_info_query_error_address() 163 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v8_7_ecc_info_query_ras_error_address() local 174 err_data, in umc_v8_7_ecc_info_query_ras_error_address() [all …]
|
H A D | umc_v8_10.c | 147 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_query_ecc_error_count() local 153 &(err_data->ce_count)); in umc_v8_10_query_ecc_error_count() 156 &(err_data->ue_count)); in umc_v8_10_query_ecc_error_count() 206 struct ras_err_data *err_data, uint64_t err_addr, in umc_v8_10_convert_error_address() argument 239 amdgpu_umc_fill_error_record(err_data, na_err_addr, in umc_v8_10_convert_error_address() 251 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_query_error_address() local 262 if (!err_data->err_addr) { in umc_v8_10_query_error_address() 277 umc_v8_10_convert_error_address(adev, err_data, err_addr, in umc_v8_10_query_error_address() 385 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v8_10_ecc_info_query_ecc_error_count() local 389 &(err_data->ce_count)); in umc_v8_10_ecc_info_query_ecc_error_count() [all …]
|
H A D | umc_v6_7.c | 167 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v6_7_ecc_info_querry_ecc_error_count() local 171 &(err_data->ce_count)); in umc_v6_7_ecc_info_querry_ecc_error_count() 175 &(err_data->ue_count)); in umc_v6_7_ecc_info_querry_ecc_error_count() 188 struct ras_err_data *err_data, uint64_t err_addr, in umc_v6_7_convert_error_address() argument 211 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_7_convert_error_address() 217 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_7_convert_error_address() 229 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v6_7_ecc_info_query_error_address() local 237 if (!err_data->err_addr) in umc_v6_7_ecc_info_query_error_address() 247 umc_v6_7_convert_error_address(adev, err_data, err_addr, in umc_v6_7_ecc_info_query_error_address() 416 struct ras_err_data *err_data = (struct ras_err_data *)data; in umc_v6_7_query_ecc_error_count() local [all …]
|
H A D | nbio_v7_4.c | 368 struct ras_err_data err_data = {0, 0, 0, NULL}; in nbio_v7_4_handle_ras_controller_intr_no_bifring() local 393 nbio_v7_4_query_ras_error_count(adev, &err_data); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 396 obj->err_data.ue_count += err_data.ue_count; in nbio_v7_4_handle_ras_controller_intr_no_bifring() 397 obj->err_data.ce_count += err_data.ce_count; in nbio_v7_4_handle_ras_controller_intr_no_bifring() 399 if (err_data.ce_count) in nbio_v7_4_handle_ras_controller_intr_no_bifring() 403 obj->err_data.ce_count, in nbio_v7_4_handle_ras_controller_intr_no_bifring() 406 if (err_data.ue_count) in nbio_v7_4_handle_ras_controller_intr_no_bifring() 409 obj->err_data.ue_count, in nbio_v7_4_handle_ras_controller_intr_no_bifring() 592 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in nbio_v7_4_query_ras_error_count() local 610 err_data->ce_count++; in nbio_v7_4_query_ras_error_count() [all …]
|
H A D | umc_v6_1.c | 257 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_count() local 279 &(err_data->ce_count)); in umc_v6_1_query_ras_error_count() 282 &(err_data->ue_count)); in umc_v6_1_query_ras_error_count() 296 struct ras_err_data *err_data, in umc_v6_1_query_error_address() argument 324 if (!err_data->err_addr) { in umc_v6_1_query_error_address() 345 amdgpu_umc_fill_error_record(err_data, err_addr, in umc_v6_1_query_error_address() 356 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in umc_v6_1_query_ras_error_address() local 377 err_data, in umc_v6_1_query_ras_error_address()
|
H A D | nbio_v7_9.c | 572 struct ras_err_data err_data = {0, 0, 0, NULL}; in nbio_v7_9_handle_ras_controller_intr_no_bifring() local 591 nbio_v7_9_query_ras_error_count(adev, &err_data); in nbio_v7_9_handle_ras_controller_intr_no_bifring() 594 obj->err_data.ue_count += err_data.ue_count; in nbio_v7_9_handle_ras_controller_intr_no_bifring() 595 obj->err_data.ce_count += err_data.ce_count; in nbio_v7_9_handle_ras_controller_intr_no_bifring() 597 if (err_data.ce_count) in nbio_v7_9_handle_ras_controller_intr_no_bifring() 601 obj->err_data.ce_count, in nbio_v7_9_handle_ras_controller_intr_no_bifring() 604 if (err_data.ue_count) in nbio_v7_9_handle_ras_controller_intr_no_bifring() 607 obj->err_data.ue_count, in nbio_v7_9_handle_ras_controller_intr_no_bifring()
|
H A D | hdp_v4_0.c | 70 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in hdp_v4_0_query_ras_error_count() local 72 err_data->ue_count = 0; in hdp_v4_0_query_ras_error_count() 73 err_data->ce_count = 0; in hdp_v4_0_query_ras_error_count() 79 err_data->ue_count += RREG32_SOC15(HDP, 0, mmHDP_EDC_CNT); in hdp_v4_0_query_ras_error_count()
|
H A D | gfx_v9_4.c | 690 struct ras_err_data *err_data) in gfx_v9_4_query_utc_edc_status() argument 716 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 724 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 738 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 747 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 760 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 768 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 782 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() 791 err_data->ue_count += ded_count; in gfx_v9_4_query_utc_edc_status() 805 err_data->ce_count += sec_count; in gfx_v9_4_query_utc_edc_status() [all …]
|
H A D | amdgpu_ras.c | 155 struct ras_err_data err_data = {0, 0, 0, NULL}; in amdgpu_reserve_page_direct() local 174 err_data.err_addr = &err_rec; in amdgpu_reserve_page_direct() 175 amdgpu_umc_fill_error_record(&err_data, address, address, 0, 0); in amdgpu_reserve_page_direct() 178 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_reserve_page_direct() 179 err_data.err_addr_cnt); in amdgpu_reserve_page_direct() 985 static void amdgpu_ras_get_ecc_info(struct amdgpu_device *adev, struct ras_err_data *err_data) in amdgpu_ras_get_ecc_info() argument 998 adev->umc.ras->ras_block.hw_ops->query_ras_error_count(adev, err_data); in amdgpu_ras_get_ecc_info() 1005 adev->umc.ras->ras_block.hw_ops->query_ras_error_address(adev, err_data); in amdgpu_ras_get_ecc_info() 1009 adev->umc.ras->ecc_info_query_ras_error_count(adev, err_data); in amdgpu_ras_get_ecc_info() 1013 adev->umc.ras->ecc_info_query_ras_error_address(adev, err_data); in amdgpu_ras_get_ecc_info() [all …]
|
H A D | amdgpu_mca.c | 66 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_mca_query_ras_error_count() local 68 amdgpu_mca_query_correctable_error_count(adev, mc_status_addr, &(err_data->ce_count)); in amdgpu_mca_query_ras_error_count() 69 amdgpu_mca_query_uncorrectable_error_count(adev, mc_status_addr, &(err_data->ue_count)); in amdgpu_mca_query_ras_error_count()
|
H A D | sdma_v4_4.c | 199 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in sdma_v4_4_query_ras_error_count_by_instance() local 225 err_data->ue_count += sec_count; in sdma_v4_4_query_ras_error_count_by_instance() 231 err_data->ce_count = 0; in sdma_v4_4_query_ras_error_count_by_instance()
|
H A D | umc_v6_7.h | 75 struct ras_err_data *err_data, uint64_t err_addr,
|
H A D | mmhub_v1_0.c | 748 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_0_query_ras_error_count() local 753 err_data->ue_count = 0; in mmhub_v1_0_query_ras_error_count() 754 err_data->ce_count = 0; in mmhub_v1_0_query_ras_error_count() 765 err_data->ce_count += sec_count; in mmhub_v1_0_query_ras_error_count() 766 err_data->ue_count += ded_count; in mmhub_v1_0_query_ras_error_count()
|
H A D | amdgpu_xgmi.c | 988 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_xgmi_query_ras_error_count() local 996 err_data->ue_count = 0; in amdgpu_xgmi_query_ras_error_count() 997 err_data->ce_count = 0; in amdgpu_xgmi_query_ras_error_count() 1059 err_data->ue_count += ue_cnt; in amdgpu_xgmi_query_ras_error_count() 1060 err_data->ce_count += ce_cnt; in amdgpu_xgmi_query_ras_error_count()
|
H A D | amdgpu_umc.h | 102 void amdgpu_umc_fill_error_record(struct ras_err_data *err_data,
|
H A D | amdgpu_gfx.c | 866 void *err_data, in amdgpu_gfx_process_ras_data_cb() argument 879 adev->gfx.ras->ras_block.hw_ops->query_ras_error_count(adev, err_data); in amdgpu_gfx_process_ras_data_cb() 912 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in amdgpu_gfx_ras_error_func() local 914 if (err_data) { in amdgpu_gfx_ras_error_func() 915 err_data->ue_count = 0; in amdgpu_gfx_ras_error_func() 916 err_data->ce_count = 0; in amdgpu_gfx_ras_error_func()
|
H A D | amdgpu_sdma.h | 163 void *err_data,
|
H A D | mmhub_v1_7.c | 1246 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v1_7_query_ras_error_count() local 1251 err_data->ue_count = 0; in mmhub_v1_7_query_ras_error_count() 1252 err_data->ce_count = 0; in mmhub_v1_7_query_ras_error_count() 1262 err_data->ce_count += sec_count; in mmhub_v1_7_query_ras_error_count() 1263 err_data->ue_count += ded_count; in mmhub_v1_7_query_ras_error_count()
|
H A D | amdgpu_ras.h | 460 void *err_data, 496 struct ras_err_data err_data; member
|
H A D | gfx_v9_4_2.c | 1650 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in gfx_v9_4_2_query_ras_error_count() local 1656 err_data->ue_count = 0; in gfx_v9_4_2_query_ras_error_count() 1657 err_data->ce_count = 0; in gfx_v9_4_2_query_ras_error_count() 1660 err_data->ce_count += sec_count; in gfx_v9_4_2_query_ras_error_count() 1661 err_data->ue_count += ded_count; in gfx_v9_4_2_query_ras_error_count() 1664 err_data->ce_count += sec_count; in gfx_v9_4_2_query_ras_error_count() 1665 err_data->ue_count += ded_count; in gfx_v9_4_2_query_ras_error_count()
|
H A D | mmhub_v1_8.c | 628 struct ras_err_data *err_data = (struct ras_err_data *)ras_err_status; in mmhub_v1_8_inst_query_ras_error_count() local 637 &err_data->ce_count); in mmhub_v1_8_inst_query_ras_error_count() 645 &err_data->ue_count); in mmhub_v1_8_inst_query_ras_error_count()
|
H A D | mmhub_v9_4.c | 1601 struct ras_err_data *err_data = (struct ras_err_data *)ras_error_status; in mmhub_v9_4_query_ras_error_count() local 1606 err_data->ue_count = 0; in mmhub_v9_4_query_ras_error_count() 1607 err_data->ce_count = 0; in mmhub_v9_4_query_ras_error_count() 1617 err_data->ce_count += sec_count; in mmhub_v9_4_query_ras_error_count() 1618 err_data->ue_count += ded_count; in mmhub_v9_4_query_ras_error_count()
|
/openbmc/linux/drivers/misc/ |
H A D | smpro-errmon.c | 251 unsigned char err_data[MAX_READ_BLOCK_LENGTH]; in smpro_error_data_read() local 271 memset(err_data, 0x00, MAX_READ_BLOCK_LENGTH); in smpro_error_data_read() 272 ret = regmap_noinc_read(errmon->regmap, err_info->data, err_data, err_length); in smpro_error_data_read() 285 return sysfs_emit(buf, "%*phN\n", MAX_READ_BLOCK_LENGTH, err_data); in smpro_error_data_read()
|
/openbmc/linux/drivers/clk/hisilicon/ |
H A D | clk.c | 78 goto err_data; in hisi_clk_init() 84 err_data: in hisi_clk_init()
|