Lines Matching refs:path

151 	u8 path;  in _wait_rx_mode()  local
155 for (path = 0; path < RF_PATH_MAX; path++) { in _wait_rx_mode()
156 if (!(kpath & BIT(path))) in _wait_rx_mode()
160 2, 5000, false, rtwdev, path, 0x00, in _wait_rx_mode()
164 path, ret); in _wait_rx_mode()
302 enum rtw89_rf_path path, u8 index) in _dack_reload_by_path() argument
310 path_offset = (path == RF_PATH_A ? 0 : 0x28); in _dack_reload_by_path()
318 val32 |= dack->msbk_d[path][index][i + 12] << (i * 8); in _dack_reload_by_path()
327 val32 |= dack->msbk_d[path][index][i + 8] << (i * 8); in _dack_reload_by_path()
336 val32 |= dack->msbk_d[path][index][i + 4] << (i * 8); in _dack_reload_by_path()
345 val32 |= dack->msbk_d[path][index][i] << (i * 8); in _dack_reload_by_path()
352 val32 = (dack->biask_d[path][index] << 22) | in _dack_reload_by_path()
353 (dack->dadck_d[path][index] << 14); in _dack_reload_by_path()
359 static void _dack_reload(struct rtw89_dev *rtwdev, enum rtw89_rf_path path) in _dack_reload() argument
364 _dack_reload_by_path(rtwdev, path, i); in _dack_reload()
405 static void _dack_reset(struct rtw89_dev *rtwdev, u8 path) in _dack_reset() argument
407 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _dack_reset()
441 static void rtw8852c_txck_force(struct rtw89_dev *rtwdev, u8 path, bool force, in rtw8852c_txck_force() argument
444 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_TXCK_ON, 0x0); in rtw8852c_txck_force()
449 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_TXCK_VAL, ck); in rtw8852c_txck_force()
450 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_TXCK_ON, 0x1); in rtw8852c_txck_force()
453 static void rtw8852c_rxck_force(struct rtw89_dev *rtwdev, u8 path, bool force, in rtw8852c_rxck_force() argument
458 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_RXCK_ON, 0x0); in rtw8852c_rxck_force()
463 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_RXCK_VAL, ck); in rtw8852c_rxck_force()
464 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_RXCK_ON, 0x1); in rtw8852c_rxck_force()
479 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_CTL, def->ctl); in rtw8852c_rxck_force()
480 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_EN, def->en); in rtw8852c_rxck_force()
481 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, def->bw0); in rtw8852c_rxck_force()
482 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, def->bw1); in rtw8852c_rxck_force()
483 rtw89_phy_write32_mask(rtwdev, R_DRCK | (path << 8), B_DRCK_MUL, def->mul); in rtw8852c_rxck_force()
484 rtw89_phy_write32_mask(rtwdev, R_ADCMOD | (path << 8), B_ADCMOD_LP, def->lp); in rtw8852c_rxck_force()
645 static void rtw8852c_disable_rxagc(struct rtw89_dev *rtwdev, u8 path, u8 en_rxgac) in rtw8852c_disable_rxagc() argument
647 if (path == RF_PATH_A) in rtw8852c_disable_rxagc()
653 static void _iqk_rxk_setting(struct rtw89_dev *rtwdev, u8 path) in _iqk_rxk_setting() argument
657 if (path == RF_PATH_A) in _iqk_rxk_setting()
662 switch (iqk_info->iqk_bw[path]) { in _iqk_rxk_setting()
665 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_DPD_GDIS, 0x1); in _iqk_rxk_setting()
666 rtw8852c_rxck_force(rtwdev, path, true, ADC_480M); in _iqk_rxk_setting()
667 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_ACK_VAL, 0x0); in _iqk_rxk_setting()
668 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_CKT, 0x1); in _iqk_rxk_setting()
669 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13), B_P0_NRBW_DBG, 0x1); in _iqk_rxk_setting()
672 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_DPD_GDIS, 0x1); in _iqk_rxk_setting()
673 rtw8852c_rxck_force(rtwdev, path, true, ADC_960M); in _iqk_rxk_setting()
674 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_ACK_VAL, 0x1); in _iqk_rxk_setting()
675 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_CKT, 0x1); in _iqk_rxk_setting()
676 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13), B_P0_NRBW_DBG, 0x1); in _iqk_rxk_setting()
679 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_DPD_GDIS, 0x1); in _iqk_rxk_setting()
680 rtw8852c_rxck_force(rtwdev, path, true, ADC_1920M); in _iqk_rxk_setting()
681 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_ACK_VAL, 0x2); in _iqk_rxk_setting()
682 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_CKT, 0x1); in _iqk_rxk_setting()
683 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13), B_P0_NRBW_DBG, 0x1); in _iqk_rxk_setting()
691 if (path == RF_PATH_A) in _iqk_rxk_setting()
697 static bool _iqk_check_cal(struct rtw89_dev *rtwdev, u8 path, u8 ktype) in _iqk_check_cal() argument
709 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, ret=%d\n", path, ret); in _iqk_check_cal()
712 "[IQK]S%x, type= %x, 0x8008 = 0x%x\n", path, ktype, tmp); in _iqk_check_cal()
718 enum rtw89_phy_idx phy_idx, u8 path, u8 ktype) in _iqk_one_shot() argument
721 u32 addr_rfc_ctl = R_UPD_CLK + (path << 13); in _iqk_one_shot()
727 iqk_cmd = 0x008 | (1 << (4 + path)) | (path << 1); in _iqk_one_shot()
731 iqk_cmd = 0x008 | (1 << (4 + path)); in _iqk_one_shot()
735 iqk_cmd = 0x108 | (1 << (4 + path)); in _iqk_one_shot()
739 iqk_cmd = 0x508 | (1 << (4 + path)); in _iqk_one_shot()
743 iqk_cmd = 0x208 | (1 << (4 + path)); in _iqk_one_shot()
747 iqk_cmd = 0x308 | (1 << (4 + path)); in _iqk_one_shot()
751 iqk_cmd = 0x008 | (1 << (4 + path)) | ((0x8 + iqk_info->iqk_bw[path]) << 8); in _iqk_one_shot()
754 iqk_cmd = 0x508 | (1 << (4 + path)) | (path << 1); in _iqk_one_shot()
758 iqk_cmd = 0x008 | (1 << (4 + path)) | ((0xc + iqk_info->iqk_bw[path]) << 8); in _iqk_one_shot()
762 iqk_cmd = 0x408 | (1 << (4 + path)); in _iqk_one_shot()
766 iqk_cmd = 0x608 | (1 << (4 + path)); in _iqk_one_shot()
774 fail = _iqk_check_cal(rtwdev, path, ktype); in _iqk_one_shot()
781 enum rtw89_phy_idx phy_idx, u8 path) in _rxk_group_sel() argument
789 bkrf0 = rtw89_read_rf(rtwdev, path, RR_MOD, RR_MOD_NBW); in _rxk_group_sel()
790 if (path == RF_PATH_B) { in _rxk_group_sel()
798 switch (iqk_info->iqk_band[path]) { in _rxk_group_sel()
801 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc); in _rxk_group_sel()
802 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0); in _rxk_group_sel()
803 rtw89_write_rf(rtwdev, path, RR_RXG, RR_RXG_IQKMOD, 0x9); in _rxk_group_sel()
806 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc); in _rxk_group_sel()
807 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0); in _rxk_group_sel()
808 rtw89_write_rf(rtwdev, path, RR_RXAE, RR_RXAE_IQKMOD, 0x8); in _rxk_group_sel()
811 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc); in _rxk_group_sel()
812 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0); in _rxk_group_sel()
813 rtw89_write_rf(rtwdev, path, RR_RXAE, RR_RXAE_IQKMOD, 0x9); in _rxk_group_sel()
820 switch (iqk_info->iqk_band[path]) { in _rxk_group_sel()
823 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, in _rxk_group_sel()
825 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_VOBUF, in _rxk_group_sel()
829 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, in _rxk_group_sel()
831 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_IATT, in _rxk_group_sel()
835 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, in _rxk_group_sel()
837 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_IATT, in _rxk_group_sel()
841 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), in _rxk_group_sel()
843 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), in _rxk_group_sel()
845 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), in _rxk_group_sel()
847 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_RXK); in _rxk_group_sel()
850 if (path == RF_PATH_B) in _rxk_group_sel()
851 rtw89_write_rf(rtwdev, path, RR_IQKPLL, RR_IQKPLL_MOD, 0x0); in _rxk_group_sel()
852 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, bkrf0); in _rxk_group_sel()
855 iqk_info->nb_rxcfir[path] = 0x40000002; in _rxk_group_sel()
856 iqk_info->is_wb_rxiqk[path] = false; in _rxk_group_sel()
858 iqk_info->nb_rxcfir[path] = 0x40000000; in _rxk_group_sel()
859 iqk_info->is_wb_rxiqk[path] = true; in _rxk_group_sel()
866 enum rtw89_phy_idx phy_idx, u8 path) in _iqk_nbrxk() argument
874 bkrf0 = rtw89_read_rf(rtwdev, path, RR_MOD, RR_MOD_NBW); in _iqk_nbrxk()
875 if (path == RF_PATH_B) { in _iqk_nbrxk()
883 switch (iqk_info->iqk_band[path]) { in _iqk_nbrxk()
886 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc); in _iqk_nbrxk()
887 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0); in _iqk_nbrxk()
888 rtw89_write_rf(rtwdev, path, RR_RXG, RR_RXG_IQKMOD, 0x9); in _iqk_nbrxk()
891 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc); in _iqk_nbrxk()
892 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0); in _iqk_nbrxk()
893 rtw89_write_rf(rtwdev, path, RR_RXAE, RR_RXAE_IQKMOD, 0x8); in _iqk_nbrxk()
896 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc); in _iqk_nbrxk()
897 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0); in _iqk_nbrxk()
898 rtw89_write_rf(rtwdev, path, RR_RXAE, RR_RXAE_IQKMOD, 0x9); in _iqk_nbrxk()
904 switch (iqk_info->iqk_band[path]) { in _iqk_nbrxk()
907 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, _rxk_g_idxrxgain[gp]); in _iqk_nbrxk()
908 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_VOBUF, _rxk_g_idxattc2[gp]); in _iqk_nbrxk()
911 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, _rxk_a_idxrxgain[gp]); in _iqk_nbrxk()
912 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_IATT, _rxk_a_idxattc2[gp]); in _iqk_nbrxk()
915 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, _rxk_a6_idxrxgain[gp]); in _iqk_nbrxk()
916 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_IATT, _rxk_a6_idxattc2[gp]); in _iqk_nbrxk()
920 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SEL, 0x1); in _iqk_nbrxk()
921 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SET, 0x0); in _iqk_nbrxk()
922 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_GP_V1, gp); in _iqk_nbrxk()
923 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_RXK); in _iqk_nbrxk()
925 if (path == RF_PATH_B) in _iqk_nbrxk()
926 rtw89_write_rf(rtwdev, path, RR_IQKPLL, RR_IQKPLL_MOD, 0x0); in _iqk_nbrxk()
928 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, bkrf0); in _iqk_nbrxk()
931 iqk_info->nb_rxcfir[path] = in _iqk_nbrxk()
932 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), in _iqk_nbrxk()
935 iqk_info->nb_rxcfir[path] = 0x40000002; in _iqk_nbrxk()
937 iqk_info->is_wb_rxiqk[path] = false; in _iqk_nbrxk()
942 enum rtw89_phy_idx phy_idx, u8 path) in _txk_group_sel() argument
949 switch (iqk_info->iqk_band[path]) { in _txk_group_sel()
951 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, in _txk_group_sel()
953 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, in _txk_group_sel()
955 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, in _txk_group_sel()
958 R_KIP_IQP + (path << 8), in _txk_group_sel()
962 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, in _txk_group_sel()
964 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, in _txk_group_sel()
966 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, in _txk_group_sel()
969 R_KIP_IQP + (path << 8), in _txk_group_sel()
973 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, in _txk_group_sel()
975 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, in _txk_group_sel()
977 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, in _txk_group_sel()
980 R_KIP_IQP + (path << 8), in _txk_group_sel()
986 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), in _txk_group_sel()
988 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), in _txk_group_sel()
990 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), in _txk_group_sel()
992 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), in _txk_group_sel()
996 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_TXK); in _txk_group_sel()
1000 iqk_info->nb_txcfir[path] = 0x40000002; in _txk_group_sel()
1001 iqk_info->is_wb_txiqk[path] = false; in _txk_group_sel()
1003 iqk_info->nb_txcfir[path] = 0x40000000; in _txk_group_sel()
1004 iqk_info->is_wb_txiqk[path] = true; in _txk_group_sel()
1011 enum rtw89_phy_idx phy_idx, u8 path) in _iqk_nbtxk() argument
1017 switch (iqk_info->iqk_band[path]) { in _iqk_nbtxk()
1019 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, _txk_g_power_range[gp]); in _iqk_nbtxk()
1020 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, _txk_g_track_range[gp]); in _iqk_nbtxk()
1021 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, _txk_g_gain_bb[gp]); in _iqk_nbtxk()
1022 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_nbtxk()
1026 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, _txk_a_power_range[gp]); in _iqk_nbtxk()
1027 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, _txk_a_track_range[gp]); in _iqk_nbtxk()
1028 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, _txk_a_gain_bb[gp]); in _iqk_nbtxk()
1029 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_nbtxk()
1033 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, _txk_a6_power_range[gp]); in _iqk_nbtxk()
1034 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, _txk_a6_track_range[gp]); in _iqk_nbtxk()
1035 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, _txk_a6_gain_bb[gp]); in _iqk_nbtxk()
1036 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_nbtxk()
1043 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SEL, 0x1); in _iqk_nbtxk()
1044 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SET, 0x1); in _iqk_nbtxk()
1045 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_G2, 0x0); in _iqk_nbtxk()
1046 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_GP, gp + 1); in _iqk_nbtxk()
1049 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_NBTXK); in _iqk_nbtxk()
1052 iqk_info->nb_txcfir[path] = in _iqk_nbtxk()
1053 rtw89_phy_read32_mask(rtwdev, R_TXIQC + (path << 8), in _iqk_nbtxk()
1056 iqk_info->nb_txcfir[path] = 0x40000002; in _iqk_nbtxk()
1058 iqk_info->is_wb_txiqk[path] = false; in _iqk_nbtxk()
1063 static bool _lok_finetune_check(struct rtw89_dev *rtwdev, u8 path) in _lok_finetune_check() argument
1076 val = rtw89_read_rf(rtwdev, path, RR_TXMO, RFREG_MASK); in _lok_finetune_check()
1085 iqk_info->lok_idac[idx][path] = val; in _lok_finetune_check()
1087 val = rtw89_read_rf(rtwdev, path, RR_LOKVB, RFREG_MASK); in _lok_finetune_check()
1096 iqk_info->lok_vbuf[idx][path] = val; in _lok_finetune_check()
1102 enum rtw89_phy_idx phy_idx, u8 path) in _iqk_lok() argument
1113 switch (iqk_info->iqk_band[path]) { in _iqk_lok()
1115 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6); in _iqk_lok()
1116 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1121 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6); in _iqk_lok()
1122 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1127 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6); in _iqk_lok()
1128 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1135 tmp = _iqk_one_shot(rtwdev, phy_idx, path, tmp_id); in _iqk_lok()
1136 iqk_info->lok_cor_fail[0][path] = tmp; in _iqk_lok()
1139 switch (iqk_info->iqk_band[path]) { in _iqk_lok()
1141 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12); in _iqk_lok()
1142 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1146 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12); in _iqk_lok()
1147 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1151 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12); in _iqk_lok()
1152 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1158 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_VBUFFER); in _iqk_lok()
1161 switch (iqk_info->iqk_band[path]) { in _iqk_lok()
1163 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6); in _iqk_lok()
1164 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1169 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6); in _iqk_lok()
1170 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1175 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6); in _iqk_lok()
1176 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1183 tmp = _iqk_one_shot(rtwdev, phy_idx, path, tmp_id); in _iqk_lok()
1184 iqk_info->lok_fin_fail[0][path] = tmp; in _iqk_lok()
1187 switch (iqk_info->iqk_band[path]) { in _iqk_lok()
1190 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12); in _iqk_lok()
1191 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1195 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12); in _iqk_lok()
1196 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1200 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12); in _iqk_lok()
1201 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), in _iqk_lok()
1205 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_VBUFFER); in _iqk_lok()
1206 fail = _lok_finetune_check(rtwdev, path); in _iqk_lok()
1211 static void _iqk_txk_setting(struct rtw89_dev *rtwdev, u8 path) in _iqk_txk_setting() argument
1215 switch (iqk_info->iqk_band[path]) { in _iqk_txk_setting()
1218 rtw89_write_rf(rtwdev, path, RR_TXG1, RR_TXG1_ATT2, 0x0); in _iqk_txk_setting()
1219 rtw89_write_rf(rtwdev, path, RR_TXG1, RR_TXG1_ATT1, 0x0); in _iqk_txk_setting()
1220 rtw89_write_rf(rtwdev, path, RR_TXG2, RR_TXG2_ATT0, 0x1); in _iqk_txk_setting()
1221 rtw89_write_rf(rtwdev, path, RR_TXA2, RR_TXA2_LDO, 0xf); in _iqk_txk_setting()
1222 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0); in _iqk_txk_setting()
1223 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1); in _iqk_txk_setting()
1224 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, in _iqk_txk_setting()
1227 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0); in _iqk_txk_setting()
1228 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x6); in _iqk_txk_setting()
1231 rtw89_write_rf(rtwdev, path, RR_TXATANK, RR_TXATANK_LBSW2, 0x0); in _iqk_txk_setting()
1232 rtw89_write_rf(rtwdev, path, RR_TXPOW, RR_TXPOW_TXAS, 0x1); in _iqk_txk_setting()
1233 rtw89_write_rf(rtwdev, path, RR_TXA2, RR_TXA2_LDO, 0xf); in _iqk_txk_setting()
1234 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0); in _iqk_txk_setting()
1235 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1); in _iqk_txk_setting()
1236 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, in _iqk_txk_setting()
1239 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0); in _iqk_txk_setting()
1240 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x6); in _iqk_txk_setting()
1243 rtw89_write_rf(rtwdev, path, RR_TXATANK, RR_TXATANK_LBSW2, 0x0); in _iqk_txk_setting()
1244 rtw89_write_rf(rtwdev, path, RR_TXPOW, RR_TXPOW_TXAS, 0x1); in _iqk_txk_setting()
1245 rtw89_write_rf(rtwdev, path, RR_TXA2, RR_TXA2_LDO, 0xf); in _iqk_txk_setting()
1246 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0); in _iqk_txk_setting()
1247 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1); in _iqk_txk_setting()
1248 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, in _iqk_txk_setting()
1251 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0); in _iqk_txk_setting()
1252 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x6); in _iqk_txk_setting()
1258 u8 path) in _iqk_info_iqk() argument
1264 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_thermal = %lu\n", path, in _iqk_info_iqk()
1265 ewma_thermal_read(&rtwdev->phystat.avg_thermal[path])); in _iqk_info_iqk()
1266 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_LOK_COR_fail= %d\n", path, in _iqk_info_iqk()
1267 iqk_info->lok_cor_fail[0][path]); in _iqk_info_iqk()
1268 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_LOK_FIN_fail= %d\n", path, in _iqk_info_iqk()
1269 iqk_info->lok_fin_fail[0][path]); in _iqk_info_iqk()
1270 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_TXIQK_fail = %d\n", path, in _iqk_info_iqk()
1271 iqk_info->iqk_tx_fail[0][path]); in _iqk_info_iqk()
1272 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_RXIQK_fail= %d,\n", path, in _iqk_info_iqk()
1273 iqk_info->iqk_rx_fail[0][path]); in _iqk_info_iqk()
1275 flag = iqk_info->lok_cor_fail[0][path]; in _iqk_info_iqk()
1276 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FCOR << (path * 4), flag); in _iqk_info_iqk()
1277 flag = iqk_info->lok_fin_fail[0][path]; in _iqk_info_iqk()
1278 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FFIN << (path * 4), flag); in _iqk_info_iqk()
1279 flag = iqk_info->iqk_tx_fail[0][path]; in _iqk_info_iqk()
1280 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FTX << (path * 4), flag); in _iqk_info_iqk()
1281 flag = iqk_info->iqk_rx_fail[0][path]; in _iqk_info_iqk()
1282 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_F_RX << (path * 4), flag); in _iqk_info_iqk()
1284 tmp = rtw89_phy_read32_mask(rtwdev, R_IQK_RES + (path << 8), MASKDWORD); in _iqk_info_iqk()
1285 iqk_info->bp_iqkenable[path] = tmp; in _iqk_info_iqk()
1286 tmp = rtw89_phy_read32_mask(rtwdev, R_TXIQC + (path << 8), MASKDWORD); in _iqk_info_iqk()
1287 iqk_info->bp_txkresult[path] = tmp; in _iqk_info_iqk()
1288 tmp = rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD); in _iqk_info_iqk()
1289 iqk_info->bp_rxkresult[path] = tmp; in _iqk_info_iqk()
1294 tmp = rtw89_phy_read32_mask(rtwdev, R_IQKINF, B_IQKINF_FAIL << (path * 4)); in _iqk_info_iqk()
1297 rtw89_phy_write32_mask(rtwdev, R_IQKINF2, B_IQKINF2_FCNT << (path * 4), in _iqk_info_iqk()
1301 static void _iqk_by_path(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path) in _iqk_by_path() argument
1305 _iqk_txk_setting(rtwdev, path); in _iqk_by_path()
1306 iqk_info->lok_fail[path] = _iqk_lok(rtwdev, phy_idx, path); in _iqk_by_path()
1309 iqk_info->iqk_tx_fail[0][path] = _iqk_nbtxk(rtwdev, phy_idx, path); in _iqk_by_path()
1311 iqk_info->iqk_tx_fail[0][path] = _txk_group_sel(rtwdev, phy_idx, path); in _iqk_by_path()
1313 _iqk_rxk_setting(rtwdev, path); in _iqk_by_path()
1315 iqk_info->iqk_rx_fail[0][path] = _iqk_nbrxk(rtwdev, phy_idx, path); in _iqk_by_path()
1317 iqk_info->iqk_rx_fail[0][path] = _rxk_group_sel(rtwdev, phy_idx, path); in _iqk_by_path()
1319 _iqk_info_iqk(rtwdev, phy_idx, path); in _iqk_by_path()
1323 enum rtw89_phy_idx phy, u8 path) in _iqk_get_ch_info() argument
1330 iqk_info->iqk_band[path] = chan->band_type; in _iqk_get_ch_info()
1331 iqk_info->iqk_bw[path] = chan->band_width; in _iqk_get_ch_info()
1332 iqk_info->iqk_ch[path] = chan->channel; in _iqk_get_ch_info()
1335 "[IQK]iqk_info->iqk_band[%x] = 0x%x\n", path, in _iqk_get_ch_info()
1336 iqk_info->iqk_band[path]); in _iqk_get_ch_info()
1338 path, iqk_info->iqk_bw[path]); in _iqk_get_ch_info()
1340 path, iqk_info->iqk_ch[path]); in _iqk_get_ch_info()
1342 "[IQK]S%d (PHY%d): / DBCC %s/ %s/ CH%d/ %s\n", path, phy, in _iqk_get_ch_info()
1344 iqk_info->iqk_band[path] == 0 ? "2G" : in _iqk_get_ch_info()
1345 iqk_info->iqk_band[path] == 1 ? "5G" : "6G", in _iqk_get_ch_info()
1346 iqk_info->iqk_ch[path], in _iqk_get_ch_info()
1347 iqk_info->iqk_bw[path] == 0 ? "20M" : in _iqk_get_ch_info()
1348 iqk_info->iqk_bw[path] == 1 ? "40M" : "80M"); in _iqk_get_ch_info()
1355 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_BAND << (path * 16), in _iqk_get_ch_info()
1356 iqk_info->iqk_band[path]); in _iqk_get_ch_info()
1357 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_BW << (path * 16), in _iqk_get_ch_info()
1358 iqk_info->iqk_bw[path]); in _iqk_get_ch_info()
1359 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_CH << (path * 16), in _iqk_get_ch_info()
1360 iqk_info->iqk_ch[path]); in _iqk_get_ch_info()
1366 u8 path) in _iqk_start_iqk() argument
1368 _iqk_by_path(rtwdev, phy_idx, path); in _iqk_start_iqk()
1371 static void _iqk_restore(struct rtw89_dev *rtwdev, u8 path) in _iqk_restore() argument
1376 rtw89_phy_write32_mask(rtwdev, R_TXIQC + (path << 8), MASKDWORD, in _iqk_restore()
1377 iqk_info->nb_txcfir[path]); in _iqk_restore()
1378 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD, in _iqk_restore()
1379 iqk_info->nb_rxcfir[path]); in _iqk_restore()
1381 0x00001219 + (path << 4)); in _iqk_restore()
1383 fail = _iqk_check_cal(rtwdev, path, 0x12); in _iqk_restore()
1390 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x0); in _iqk_restore()
1391 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX); in _iqk_restore()
1392 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x1); in _iqk_restore()
1396 enum rtw89_phy_idx phy_idx, u8 path) in _iqk_afebb_restore() argument
1398 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _iqk_afebb_restore()
1402 rtw8852c_disable_rxagc(rtwdev, path, 0x1); in _iqk_afebb_restore()
1405 static void _iqk_preset(struct rtw89_dev *rtwdev, u8 path) in _iqk_preset() argument
1411 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8), B_COEF_SEL_IQC, idx); in _iqk_preset()
1412 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_G3, idx); in _iqk_preset()
1413 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0); in _iqk_preset()
1419 enum rtw89_phy_idx phy_idx, u8 path) in _iqk_macbb_setting() argument
1424 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x0); in _iqk_macbb_setting()
1425 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A0 << path, 0x1); in _iqk_macbb_setting()
1426 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x0); in _iqk_macbb_setting()
1427 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A2 << path, 0x1); in _iqk_macbb_setting()
1428 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x0); in _iqk_macbb_setting()
1431 rtw8852c_disable_rxagc(rtwdev, path, 0x0); in _iqk_macbb_setting()
1432 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), MASKDWORD, 0xf801fffd); in _iqk_macbb_setting()
1433 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), B_DPD_DIS, 0x1); in _iqk_macbb_setting()
1434 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), B_DAC_VAL, 0x1); in _iqk_macbb_setting()
1436 rtw8852c_txck_force(rtwdev, path, true, DAC_960M); in _iqk_macbb_setting()
1437 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), B_DPD_GDIS, 0x1); in _iqk_macbb_setting()
1439 rtw8852c_rxck_force(rtwdev, path, true, ADC_1920M); in _iqk_macbb_setting()
1440 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), B_ACK_VAL, 0x2); in _iqk_macbb_setting()
1442 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW | (path << 13), B_P0_NRBW_DBG, 0x1); in _iqk_macbb_setting()
1447 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x1); in _iqk_macbb_setting()
1448 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x1); in _iqk_macbb_setting()
1451 static void _rck(struct rtw89_dev *rtwdev, enum rtw89_rf_path path) in _rck() argument
1457 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] ====== S%d RCK ======\n", path); in _rck()
1459 rf_reg5 = rtw89_read_rf(rtwdev, path, RR_RSV1, RFREG_MASK); in _rck()
1461 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0); in _rck()
1462 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX); in _rck()
1465 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK)); in _rck()
1468 rtw89_write_rf(rtwdev, path, RR_RCKC, RFREG_MASK, 0x00240); in _rck()
1471 false, rtwdev, path, 0x1c, BIT(3)); in _rck()
1475 rck_val = rtw89_read_rf(rtwdev, path, RR_RCKC, RR_RCKC_CA); in _rck()
1476 rtw89_write_rf(rtwdev, path, RR_RCKC, RFREG_MASK, rck_val); in _rck()
1478 rtw89_write_rf(rtwdev, path, RR_RSV1, RFREG_MASK, rf_reg5); in _rck()
1482 rtw89_read_rf(rtwdev, path, RR_RCKC, RFREG_MASK), in _rck()
1483 rtw89_read_rf(rtwdev, path, RR_RCKS, RFREG_MASK)); in _rck()
1489 u8 ch, path; in _iqk_init() local
1506 for (path = 0; path < RTW8852C_IQK_SS; path++) { in _iqk_init()
1507 iqk_info->lok_cor_fail[ch][path] = false; in _iqk_init()
1508 iqk_info->lok_fin_fail[ch][path] = false; in _iqk_init()
1509 iqk_info->iqk_tx_fail[ch][path] = false; in _iqk_init()
1510 iqk_info->iqk_rx_fail[ch][path] = false; in _iqk_init()
1511 iqk_info->iqk_mcc_ch[ch][path] = 0x0; in _iqk_init()
1512 iqk_info->iqk_table_idx[path] = 0x0; in _iqk_init()
1518 enum rtw89_phy_idx phy_idx, u8 path) in _doiqk() argument
1533 _iqk_get_ch_info(rtwdev, phy_idx, path); in _doiqk()
1535 _rfk_backup_rf_reg(rtwdev, backup_rf_val[path], path); in _doiqk()
1536 _iqk_macbb_setting(rtwdev, phy_idx, path); in _doiqk()
1537 _iqk_preset(rtwdev, path); in _doiqk()
1538 _iqk_start_iqk(rtwdev, phy_idx, path); in _doiqk()
1539 _iqk_restore(rtwdev, path); in _doiqk()
1540 _iqk_afebb_restore(rtwdev, phy_idx, path); in _doiqk()
1542 _rfk_restore_rf_reg(rtwdev, backup_rf_val[path], path); in _doiqk()
1564 static void _rx_dck_value_rewrite(struct rtw89_dev *rtwdev, u8 path, u8 addr, in _rx_dck_value_rewrite() argument
1576 rtw89_write_rf(rtwdev, path, RR_LUTPLL, RR_CAL_RW, 0x1); in _rx_dck_value_rewrite()
1577 rtw89_write_rf(rtwdev, path, RR_RFC, RR_WCAL, 0x1); in _rx_dck_value_rewrite()
1578 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_FINE, 0x1); in _rx_dck_value_rewrite()
1579 rtw89_write_rf(rtwdev, path, RR_LUTWA, MASKBYTE0, addr); in _rx_dck_value_rewrite()
1580 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, ofst_val); in _rx_dck_value_rewrite()
1581 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, ofst_val); in _rx_dck_value_rewrite()
1582 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_FINE, 0x0); in _rx_dck_value_rewrite()
1583 rtw89_write_rf(rtwdev, path, RR_RFC, RR_WCAL, 0x0); in _rx_dck_value_rewrite()
1584 rtw89_write_rf(rtwdev, path, RR_LUTPLL, RR_CAL_RW, 0x0); in _rx_dck_value_rewrite()
1591 static bool _rx_dck_rek_check(struct rtw89_dev *rtwdev, u8 path) in _rx_dck_rek_check() argument
1601 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr_bs[i]); in _rx_dck_rek_check()
1602 i_even_bs = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA); in _rx_dck_rek_check()
1603 q_even_bs = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA); in _rx_dck_rek_check()
1608 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr[i]); in _rx_dck_rek_check()
1609 i_even = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA); in _rx_dck_rek_check()
1610 q_even = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA); in _rx_dck_rek_check()
1618 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr_bs[i] + 1); in _rx_dck_rek_check()
1619 i_odd_bs = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA); in _rx_dck_rek_check()
1620 q_odd_bs = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA); in _rx_dck_rek_check()
1625 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr[i] + 1); in _rx_dck_rek_check()
1626 i_odd = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA); in _rx_dck_rek_check()
1627 q_odd = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA); in _rx_dck_rek_check()
1639 static void _rx_dck_fix_if_need(struct rtw89_dev *rtwdev, u8 path, u8 addr, in _rx_dck_fix_if_need() argument
1661 _rx_dck_value_rewrite(rtwdev, path, addr, val_i, val_q); in _rx_dck_fix_if_need()
1664 static void _rx_dck_recover(struct rtw89_dev *rtwdev, u8 path) in _rx_dck_recover() argument
1675 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr_bs[i]); in _rx_dck_recover()
1676 i_even_bs = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA); in _rx_dck_recover()
1677 q_even_bs = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA); in _rx_dck_recover()
1679 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr_bs[i] + 1); in _rx_dck_recover()
1680 i_odd_bs = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA); in _rx_dck_recover()
1681 q_odd_bs = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA); in _rx_dck_recover()
1687 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr[i]); in _rx_dck_recover()
1688 i_even = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA); in _rx_dck_recover()
1689 q_even = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA); in _rx_dck_recover()
1694 _rx_dck_fix_if_need(rtwdev, path, _dck_addr[i], in _rx_dck_recover()
1701 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr[i] + 1); in _rx_dck_recover()
1702 i_odd = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA); in _rx_dck_recover()
1703 q_odd = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA); in _rx_dck_recover()
1708 _rx_dck_fix_if_need(rtwdev, path, _dck_addr[i] + 1, in _rx_dck_recover()
1713 static void _rx_dck_toggle(struct rtw89_dev *rtwdev, u8 path) in _rx_dck_toggle() argument
1718 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x0); in _rx_dck_toggle()
1719 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x1); in _rx_dck_toggle()
1722 2, 2000, false, rtwdev, path, in _rx_dck_toggle()
1725 rtw89_warn(rtwdev, "[RX_DCK] S%d RXDCK timeout\n", path); in _rx_dck_toggle()
1727 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RX_DCK] S%d RXDCK finish\n", path); in _rx_dck_toggle()
1729 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x0); in _rx_dck_toggle()
1732 static void _set_rx_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy, u8 path, in _set_rx_dck() argument
1737 rtw89_write_rf(rtwdev, path, RR_DCK1, RR_DCK1_CLR, 0x0); in _set_rx_dck()
1739 _rx_dck_toggle(rtwdev, path); in _set_rx_dck()
1740 if (rtw89_read_rf(rtwdev, path, RR_DCKC, RR_DCKC_CHK) == 0) in _set_rx_dck()
1742 res = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_DONE); in _set_rx_dck()
1744 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_IDAC, res); in _set_rx_dck()
1745 _rx_dck_toggle(rtwdev, path); in _set_rx_dck()
1746 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_IDAC, 0x1); in _set_rx_dck()
1829 enum rtw89_rf_path path, bool is_bybb) in _rf_direct_cntrl() argument
1832 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x1); in _rf_direct_cntrl()
1834 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0); in _rf_direct_cntrl()
1838 enum rtw89_rf_path path, bool off);
1841 u32 reg_bkup[][RTW8852C_DPK_KIP_REG_NUM], u8 path) in _dpk_bkup_kip() argument
1846 reg_bkup[path][i] = in _dpk_bkup_kip()
1847 rtw89_phy_read32_mask(rtwdev, reg[i] + (path << 8), MASKDWORD); in _dpk_bkup_kip()
1850 reg[i] + (path << 8), reg_bkup[path][i]); in _dpk_bkup_kip()
1855 u32 reg_bkup[][RTW8852C_DPK_KIP_REG_NUM], u8 path) in _dpk_reload_kip() argument
1860 rtw89_phy_write32_mask(rtwdev, reg[i] + (path << 8), in _dpk_reload_kip()
1861 MASKDWORD, reg_bkup[path][i]); in _dpk_reload_kip()
1863 reg[i] + (path << 8), reg_bkup[path][i]); in _dpk_reload_kip()
1868 enum rtw89_rf_path path, enum rtw8852c_dpk_id id) in _dpk_one_shot() argument
1874 dpk_cmd = (u16)((id << 8) | (0x19 + path * 0x12)); in _dpk_one_shot()
1903 enum rtw89_rf_path path) in _dpk_information() argument
1908 u8 kidx = dpk->cur_idx[path]; in _dpk_information()
1910 dpk->bp[path][kidx].band = chan->band_type; in _dpk_information()
1911 dpk->bp[path][kidx].ch = chan->channel; in _dpk_information()
1912 dpk->bp[path][kidx].bw = chan->band_width; in _dpk_information()
1916 path, dpk->cur_idx[path], phy, in _dpk_information()
1917 rtwdev->is_tssi_mode[path] ? "on" : "off", in _dpk_information()
1919 dpk->bp[path][kidx].band == 0 ? "2G" : in _dpk_information()
1920 dpk->bp[path][kidx].band == 1 ? "5G" : "6G", in _dpk_information()
1921 dpk->bp[path][kidx].ch, in _dpk_information()
1922 dpk->bp[path][kidx].bw == 0 ? "20M" : in _dpk_information()
1923 dpk->bp[path][kidx].bw == 1 ? "40M" : "80M"); in _dpk_information()
1928 enum rtw89_rf_path path, u8 kpath) in _dpk_bb_afe_setting() argument
1931 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A0 << path, 0x1); in _dpk_bb_afe_setting()
1932 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x0); in _dpk_bb_afe_setting()
1933 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A2 << path, 0x1); in _dpk_bb_afe_setting()
1934 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x0); in _dpk_bb_afe_setting()
1937 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), MASKDWORD, 0xd801dffd); in _dpk_bb_afe_setting()
1940 rtw8852c_txck_force(rtwdev, path, true, DAC_960M); in _dpk_bb_afe_setting()
1943 rtw8852c_rxck_force(rtwdev, path, true, ADC_1920M); in _dpk_bb_afe_setting()
1944 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13), in _dpk_bb_afe_setting()
1952 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x1); in _dpk_bb_afe_setting()
1953 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x1); in _dpk_bb_afe_setting()
1955 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d BB/AFE setting\n", path); in _dpk_bb_afe_setting()
1958 static void _dpk_bb_afe_restore(struct rtw89_dev *rtwdev, u8 path) in _dpk_bb_afe_restore() argument
1960 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13), in _dpk_bb_afe_restore()
1962 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A0 << path, 0x1); in _dpk_bb_afe_restore()
1963 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x0); in _dpk_bb_afe_restore()
1964 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A2 << path, 0x1); in _dpk_bb_afe_restore()
1965 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x0); in _dpk_bb_afe_restore()
1966 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), MASKDWORD, 0x00000000); in _dpk_bb_afe_restore()
1967 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK + (path << 13), B_P0_TXCK_ALL, 0x00); in _dpk_bb_afe_restore()
1968 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A0 << path, 0x0); in _dpk_bb_afe_restore()
1969 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A2 << path, 0x0); in _dpk_bb_afe_restore()
1971 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d BB/AFE restore\n", path); in _dpk_bb_afe_restore()
1975 enum rtw89_rf_path path, bool is_pause) in _dpk_tssi_pause() argument
1977 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK + (path << 13), in _dpk_tssi_pause()
1980 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d TSSI %s\n", path, in _dpk_tssi_pause()
1984 static void _dpk_kip_control_rfc(struct rtw89_dev *rtwdev, u8 path, bool ctrl_by_kip) in _dpk_kip_control_rfc() argument
1986 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_IQK_RFC_ON, ctrl_by_kip); in _dpk_kip_control_rfc()
1991 static void _dpk_txpwr_bb_force(struct rtw89_dev *rtwdev, u8 path, bool force) in _dpk_txpwr_bb_force() argument
1993 rtw89_phy_write32_mask(rtwdev, R_TXPWRB + (path << 13), B_TXPWRB_ON, force); in _dpk_txpwr_bb_force()
1994 rtw89_phy_write32_mask(rtwdev, R_TXPWRB_H + (path << 13), B_TXPWRB_RDY, force); in _dpk_txpwr_bb_force()
1997 path, force ? "on" : "off"); in _dpk_txpwr_bb_force()
2001 enum rtw89_rf_path path) in _dpk_kip_restore() argument
2003 _dpk_one_shot(rtwdev, phy, path, D_KIP_RESTORE); in _dpk_kip_restore()
2004 _dpk_kip_control_rfc(rtwdev, path, false); in _dpk_kip_restore()
2005 _dpk_txpwr_bb_force(rtwdev, path, false); in _dpk_kip_restore()
2006 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d restore KIP\n", path); in _dpk_kip_restore()
2011 enum rtw89_rf_path path) in _dpk_lbk_rxiqk() argument
2017 rtw89_phy_write32_mask(rtwdev, R_DPD_V1 + (path << 8), B_DPD_LBK, 0x1); in _dpk_lbk_rxiqk()
2020 _dpk_kip_control_rfc(rtwdev, path, false); in _dpk_lbk_rxiqk()
2022 cur_rxbb = rtw89_read_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB); in _dpk_lbk_rxiqk()
2023 rf_11 = rtw89_read_rf(rtwdev, path, RR_TXIG, RFREG_MASK); in _dpk_lbk_rxiqk()
2024 reg_81cc = rtw89_phy_read32_mask(rtwdev, R_KIP_IQP + (path << 8), in _dpk_lbk_rxiqk()
2027 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0); in _dpk_lbk_rxiqk()
2028 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x3); in _dpk_lbk_rxiqk()
2029 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0xd); in _dpk_lbk_rxiqk()
2030 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB, 0x1f); in _dpk_lbk_rxiqk()
2032 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), B_KIP_IQP_IQSW, 0x12); in _dpk_lbk_rxiqk()
2033 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), B_KIP_IQP_SW, 0x3); in _dpk_lbk_rxiqk()
2035 _dpk_kip_control_rfc(rtwdev, path, true); in _dpk_lbk_rxiqk()
2039 _dpk_one_shot(rtwdev, phy, path, LBK_RXIQK); in _dpk_lbk_rxiqk()
2041 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d LBK RXIQC = 0x%x\n", path, in _dpk_lbk_rxiqk()
2042 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD)); in _dpk_lbk_rxiqk()
2044 _dpk_kip_control_rfc(rtwdev, path, false); in _dpk_lbk_rxiqk()
2046 rtw89_write_rf(rtwdev, path, RR_TXIG, RFREG_MASK, rf_11); in _dpk_lbk_rxiqk()
2047 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB, cur_rxbb); in _dpk_lbk_rxiqk()
2048 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), B_KIP_IQP_SW, reg_81cc); in _dpk_lbk_rxiqk()
2052 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_DI, 0x1); in _dpk_lbk_rxiqk()
2054 _dpk_kip_control_rfc(rtwdev, path, true); in _dpk_lbk_rxiqk()
2058 enum rtw89_rf_path path, u8 kidx) in _dpk_rf_setting() argument
2062 if (dpk->bp[path][kidx].band == RTW89_BAND_2G) { in _dpk_rf_setting()
2063 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, in _dpk_rf_setting()
2065 rtw89_write_rf(rtwdev, path, RR_MOD_V1, RR_MOD_MASK, RF_DPK); in _dpk_rf_setting()
2066 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_ATTC, 0x2); in _dpk_rf_setting()
2067 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_ATTR, 0x4); in _dpk_rf_setting()
2068 rtw89_write_rf(rtwdev, path, RR_LUTDBG, RR_LUTDBG_TIA, 0x1); in _dpk_rf_setting()
2069 rtw89_write_rf(rtwdev, path, RR_TIA, RR_TIA_N6, 0x1); in _dpk_rf_setting()
2073 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK), in _dpk_rf_setting()
2074 rtw89_read_rf(rtwdev, path, RR_RXBB, RFREG_MASK), in _dpk_rf_setting()
2075 rtw89_read_rf(rtwdev, path, RR_TIA, RFREG_MASK), in _dpk_rf_setting()
2076 rtw89_read_rf(rtwdev, path, RR_BTC, RFREG_MASK), in _dpk_rf_setting()
2077 rtw89_read_rf(rtwdev, path, RR_LUTDBG, RFREG_MASK), in _dpk_rf_setting()
2078 rtw89_read_rf(rtwdev, path, 0x1001a, RFREG_MASK)); in _dpk_rf_setting()
2080 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, in _dpk_rf_setting()
2082 rtw89_write_rf(rtwdev, path, RR_MOD_V1, RR_MOD_MASK, RF_DPK); in _dpk_rf_setting()
2084 if (dpk->bp[path][kidx].band == RTW89_BAND_6G && dpk->bp[path][kidx].ch >= 161) in _dpk_rf_setting()
2085 rtw89_write_rf(rtwdev, path, RR_IQGEN, RR_IQGEN_BIAS, 0x8); in _dpk_rf_setting()
2087 rtw89_write_rf(rtwdev, path, RR_LOGEN, RR_LOGEN_RPT, 0xd); in _dpk_rf_setting()
2088 rtw89_write_rf(rtwdev, path, RR_TXAC, RR_TXAC_IQG, 0x8); in _dpk_rf_setting()
2090 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_ATT, 0x0); in _dpk_rf_setting()
2091 rtw89_write_rf(rtwdev, path, RR_TXIQK, RR_TXIQK_ATT2, 0x3); in _dpk_rf_setting()
2092 rtw89_write_rf(rtwdev, path, RR_LUTDBG, RR_LUTDBG_TIA, 0x1); in _dpk_rf_setting()
2093 rtw89_write_rf(rtwdev, path, RR_TIA, RR_TIA_N6, 0x1); in _dpk_rf_setting()
2095 if (dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_160) in _dpk_rf_setting()
2096 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_EBW, 0x0); in _dpk_rf_setting()
2100 static void _dpk_tpg_sel(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx) in _dpk_tpg_sel() argument
2104 if (dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_160) { in _dpk_tpg_sel()
2107 } else if (dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_80) { in _dpk_tpg_sel()
2110 } else if (dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_40) { in _dpk_tpg_sel()
2118 dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_160 ? "160M" : in _dpk_tpg_sel()
2119 dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_80 ? "80M" : in _dpk_tpg_sel()
2120 dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_40 ? "40M" : "20M"); in _dpk_tpg_sel()
2123 static bool _dpk_sync_check(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx) in _dpk_sync_check() argument
2138 dpk->corr_idx[path][kidx] = corr_idx; in _dpk_sync_check()
2139 dpk->corr_val[path][kidx] = corr_val; in _dpk_sync_check()
2151 path, corr_idx, corr_val, dc_i, dc_q); in _dpk_sync_check()
2153 dpk->dc_i[path][kidx] = dc_i; in _dpk_sync_check()
2154 dpk->dc_q[path][kidx] = dc_q; in _dpk_sync_check()
2164 path, rxbb, in _dpk_sync_check()
2202 static void _dpk_kset_query(struct rtw89_dev *rtwdev, enum rtw89_rf_path path) in _dpk_kset_query() argument
2206 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT + (path << 8), B_KIP_RPT_SEL, 0x10); in _dpk_kset_query()
2208 rtw89_phy_read32_mask(rtwdev, R_RPT_PER + (path << 8), 0xE0000000) - 1; in _dpk_kset_query()
2212 enum rtw89_rf_path path, u8 dbm, bool set_from_bb) in _dpk_kip_set_txagc() argument
2216 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] set S%d txagc to %ddBm\n", path, dbm); in _dpk_kip_set_txagc()
2217 rtw89_phy_write32_mask(rtwdev, R_TXPWRB + (path << 13), B_TXPWRB_VAL, dbm << 2); in _dpk_kip_set_txagc()
2219 _dpk_one_shot(rtwdev, phy, path, D_TXAGC); in _dpk_kip_set_txagc()
2220 _dpk_kset_query(rtwdev, path); in _dpk_kip_set_txagc()
2224 enum rtw89_rf_path path, u8 kidx) in _dpk_gainloss() argument
2226 _dpk_one_shot(rtwdev, phy, path, D_GAIN_LOSS); in _dpk_gainloss()
2227 _dpk_kip_set_txagc(rtwdev, phy, path, 0xff, false); in _dpk_gainloss()
2229 rtw89_phy_write32_mask(rtwdev, R_DPK_GL + (path << 8), B_DPK_GL_A1, 0x0); in _dpk_gainloss()
2230 rtw89_phy_write32_mask(rtwdev, R_DPK_GL + (path << 8), B_DPK_GL_A0, 0x0); in _dpk_gainloss()
2281 enum rtw89_rf_path path, u8 kidx) in _dpk_kip_set_rxagc() argument
2283 _dpk_kip_control_rfc(rtwdev, path, false); in _dpk_kip_set_rxagc()
2285 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK)); in _dpk_kip_set_rxagc()
2286 _dpk_kip_control_rfc(rtwdev, path, true); in _dpk_kip_set_rxagc()
2288 _dpk_one_shot(rtwdev, phy, path, D_RXAGC); in _dpk_kip_set_rxagc()
2290 return _dpk_sync_check(rtwdev, path, kidx); in _dpk_kip_set_rxagc()
2309 static void _dpk_bypass_rxiqc(struct rtw89_dev *rtwdev, enum rtw89_rf_path path) in _dpk_bypass_rxiqc() argument
2311 rtw89_phy_write32_mask(rtwdev, R_DPD_V1 + (path << 8), B_DPD_LBK, 0x1); in _dpk_bypass_rxiqc()
2312 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD, 0x40000002); in _dpk_bypass_rxiqc()
2318 enum rtw89_rf_path path, u8 kidx, u8 init_xdbm, u8 loss_only) in _dpk_agc() argument
2333 is_fail = _dpk_kip_set_rxagc(rtwdev, phy, path, kidx); in _dpk_agc()
2346 _dpk_one_shot(rtwdev, phy, path, D_SYNC); in _dpk_agc()
2351 if (dpk->bp[path][kidx].band == RTW89_BAND_2G) in _dpk_agc()
2352 _dpk_bypass_rxiqc(rtwdev, path); in _dpk_agc()
2354 _dpk_lbk_rxiqk(rtwdev, phy, path); in _dpk_agc()
2360 tmp_gl_idx = _dpk_gainloss(rtwdev, phy, path, kidx); in _dpk_agc()
2381 _dpk_kip_set_txagc(rtwdev, phy, path, tmp_dbm, true); in _dpk_agc()
2393 _dpk_kip_set_txagc(rtwdev, phy, path, tmp_dbm, true); in _dpk_agc()
2400 _dpk_kip_control_rfc(rtwdev, path, false); in _dpk_agc()
2401 tmp_rxbb = rtw89_read_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB); in _dpk_agc()
2407 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB, tmp_rxbb); in _dpk_agc()
2410 _dpk_kip_control_rfc(rtwdev, path, true); in _dpk_agc()
2448 enum rtw89_rf_path path, u8 kidx) in _dpk_idl_mpa() argument
2463 else if (dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_5 || in _dpk_idl_mpa()
2464 dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_10 || in _dpk_idl_mpa()
2465 dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_20) in _dpk_idl_mpa()
2467 else if (dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_40 || in _dpk_idl_mpa()
2468 dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_80) in _dpk_idl_mpa()
2476 _dpk_one_shot(rtwdev, phy, path, D_MDPK_IDL); in _dpk_idl_mpa()
2485 _dpk_one_shot(rtwdev, phy, path, D_MDPK_IDL); in _dpk_idl_mpa()
2492 _dpk_one_shot(rtwdev, phy, path, D_MDPK_IDL); in _dpk_idl_mpa()
2497 enum rtw89_rf_path path) in _dpk_reload_check() argument
2508 if (cur_band != dpk->bp[path][idx].band || in _dpk_reload_check()
2509 cur_ch != dpk->bp[path][idx].ch) in _dpk_reload_check()
2512 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8), in _dpk_reload_check()
2514 dpk->cur_idx[path] = idx; in _dpk_reload_check()
2517 "[DPK] reload S%d[%d] success\n", path, idx); in _dpk_reload_check()
2530 enum rtw89_rf_path path, u8 kidx) in _dpk_kip_preset_8852c() argument
2533 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK)); in _dpk_kip_preset_8852c()
2537 R_DPD_CH0A + (path << 8) + (kidx << 2), in _dpk_kip_preset_8852c()
2541 R_DPD_CH0A + (path << 8) + (kidx << 2), in _dpk_kip_preset_8852c()
2544 _dpk_kip_control_rfc(rtwdev, path, true); in _dpk_kip_preset_8852c()
2545 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8), B_COEF_SEL_MDPD, kidx); in _dpk_kip_preset_8852c()
2547 _dpk_one_shot(rtwdev, phy, path, D_KIP_PRESET); in _dpk_kip_preset_8852c()
2550 static void _dpk_para_query(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx) in _dpk_para_query() argument
2557 para = rtw89_phy_read32_mask(rtwdev, dpk_par_regs[kidx][dpk->cur_k_set] + (path << 8), in _dpk_para_query()
2560 dpk->bp[path][kidx].txagc_dpk = FIELD_GET(_DPK_PARA_TXAGC, para); in _dpk_para_query()
2561 dpk->bp[path][kidx].ther_dpk = FIELD_GET(_DPK_PARA_THER, para); in _dpk_para_query()
2564 dpk->cur_k_set, dpk->bp[path][kidx].ther_dpk, dpk->bp[path][kidx].txagc_dpk); in _dpk_para_query()
2568 enum rtw89_rf_path path, u8 kidx, bool is_execute) in _dpk_gain_normalize_8852c() argument
2573 rtw89_phy_write32_mask(rtwdev, R_DPK_GN + (path << 8), B_DPK_GN_AG, 0x200); in _dpk_gain_normalize_8852c()
2574 rtw89_phy_write32_mask(rtwdev, R_DPK_GN + (path << 8), B_DPK_GN_EN, 0x3); in _dpk_gain_normalize_8852c()
2576 _dpk_one_shot(rtwdev, phy, path, D_GAIN_NORM); in _dpk_gain_normalize_8852c()
2578 rtw89_phy_write32_mask(rtwdev, dpk_par_regs[kidx][dpk->cur_k_set] + (path << 8), in _dpk_gain_normalize_8852c()
2581 dpk->bp[path][kidx].gs = in _dpk_gain_normalize_8852c()
2582 rtw89_phy_read32_mask(rtwdev, dpk_par_regs[kidx][dpk->cur_k_set] + (path << 8), in _dpk_gain_normalize_8852c()
2615 enum rtw89_rf_path path, u8 kidx) in _dpk_on() argument
2619 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_MDPD, 0x1); in _dpk_on()
2620 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_MDPD, 0x0); in _dpk_on()
2621 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2), in _dpk_on()
2624 dpk->bp[path][kidx].mdpd_en = BIT(dpk->cur_k_set); in _dpk_on()
2625 dpk->bp[path][kidx].path_ok = true; in _dpk_on()
2628 path, kidx, dpk->bp[path][kidx].mdpd_en); in _dpk_on()
2630 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2), in _dpk_on()
2631 B_DPD_MEN, dpk->bp[path][kidx].mdpd_en); in _dpk_on()
2633 _dpk_gain_normalize_8852c(rtwdev, phy, path, kidx, false); in _dpk_on()
2637 enum rtw89_rf_path path, u8 gain) in _dpk_main() argument
2640 u8 kidx = dpk->cur_idx[path]; in _dpk_main()
2645 "[DPK] ========= S%d[%d] DPK Start =========\n", path, kidx); in _dpk_main()
2646 _dpk_kip_control_rfc(rtwdev, path, false); in _dpk_main()
2647 _rf_direct_cntrl(rtwdev, path, false); in _dpk_main()
2648 rtw89_write_rf(rtwdev, path, RR_BBDC, RFREG_MASK, 0x03ffd); in _dpk_main()
2649 _dpk_rf_setting(rtwdev, gain, path, kidx); in _dpk_main()
2650 _set_rx_dck(rtwdev, phy, path, false); in _dpk_main()
2652 _dpk_kip_preset_8852c(rtwdev, phy, path, kidx); in _dpk_main()
2653 _dpk_txpwr_bb_force(rtwdev, path, true); in _dpk_main()
2654 _dpk_kip_set_txagc(rtwdev, phy, path, init_xdbm, true); in _dpk_main()
2655 _dpk_tpg_sel(rtwdev, path, kidx); in _dpk_main()
2657 is_fail = _dpk_agc(rtwdev, phy, path, kidx, init_xdbm, false); in _dpk_main()
2661 _dpk_idl_mpa(rtwdev, phy, path, kidx); in _dpk_main()
2662 _dpk_para_query(rtwdev, path, kidx); in _dpk_main()
2663 _dpk_on(rtwdev, phy, path, kidx); in _dpk_main()
2666 _dpk_kip_control_rfc(rtwdev, path, false); in _dpk_main()
2667 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RF_RX); in _dpk_main()
2668 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d]_K%d %s\n", path, kidx, in _dpk_main()
2674 static void _dpk_init(struct rtw89_dev *rtwdev, u8 path) in _dpk_init() argument
2677 u8 kidx = dpk->cur_idx[path]; in _dpk_init()
2679 dpk->bp[path][kidx].path_ok = false; in _dpk_init()
2682 static void _dpk_drf_direct_cntrl(struct rtw89_dev *rtwdev, u8 path, bool is_bybb) in _dpk_drf_direct_cntrl() argument
2685 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x1); in _dpk_drf_direct_cntrl()
2687 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x0); in _dpk_drf_direct_cntrl()
2697 u8 path; in _dpk_cal_select() local
2703 for (path = 0; path < RTW8852C_DPK_RF_PATH; path++) { in _dpk_cal_select()
2704 if (!(kpath & BIT(path))) in _dpk_cal_select()
2707 reloaded[path] = _dpk_reload_check(rtwdev, phy, path); in _dpk_cal_select()
2708 if (!reloaded[path] && dpk->bp[path][0].ch != 0) in _dpk_cal_select()
2709 dpk->cur_idx[path] = !dpk->cur_idx[path]; in _dpk_cal_select()
2711 _dpk_onoff(rtwdev, path, false); in _dpk_cal_select()
2714 for (path = 0; path < RTW8852C_DPK_RF_PATH; path++) in _dpk_cal_select()
2715 dpk->cur_idx[path] = 0; in _dpk_cal_select()
2718 for (path = 0; path < RTW8852C_DPK_RF_PATH; path++) { in _dpk_cal_select()
2721 path, dpk->cur_idx[path]); in _dpk_cal_select()
2722 _dpk_bkup_kip(rtwdev, kip_reg, kip_bkup, path); in _dpk_cal_select()
2723 _rfk_backup_rf_reg(rtwdev, backup_rf_val[path], path); in _dpk_cal_select()
2724 _dpk_information(rtwdev, phy, path); in _dpk_cal_select()
2725 _dpk_init(rtwdev, path); in _dpk_cal_select()
2726 if (rtwdev->is_tssi_mode[path]) in _dpk_cal_select()
2727 _dpk_tssi_pause(rtwdev, path, true); in _dpk_cal_select()
2730 for (path = 0; path < RTW8852C_DPK_RF_PATH; path++) { in _dpk_cal_select()
2733 path, dpk->cur_idx[path]); in _dpk_cal_select()
2734 rtw8852c_disable_rxagc(rtwdev, path, 0x0); in _dpk_cal_select()
2735 _dpk_drf_direct_cntrl(rtwdev, path, false); in _dpk_cal_select()
2736 _dpk_bb_afe_setting(rtwdev, phy, path, kpath); in _dpk_cal_select()
2737 is_fail = _dpk_main(rtwdev, phy, path, 1); in _dpk_cal_select()
2738 _dpk_onoff(rtwdev, path, is_fail); in _dpk_cal_select()
2741 for (path = 0; path < RTW8852C_DPK_RF_PATH; path++) { in _dpk_cal_select()
2744 path, dpk->cur_idx[path]); in _dpk_cal_select()
2745 _dpk_kip_restore(rtwdev, phy, path); in _dpk_cal_select()
2746 _dpk_reload_kip(rtwdev, kip_reg, kip_bkup, path); in _dpk_cal_select()
2747 _rfk_restore_rf_reg(rtwdev, backup_rf_val[path], path); in _dpk_cal_select()
2748 _dpk_bb_afe_restore(rtwdev, path); in _dpk_cal_select()
2749 rtw8852c_disable_rxagc(rtwdev, path, 0x1); in _dpk_cal_select()
2750 if (rtwdev->is_tssi_mode[path]) in _dpk_cal_select()
2751 _dpk_tssi_pause(rtwdev, path, false); in _dpk_cal_select()
2782 u8 path, kpath; in _dpk_force_bypass() local
2786 for (path = 0; path < RTW8852C_DPK_RF_PATH; path++) { in _dpk_force_bypass()
2787 if (kpath & BIT(path)) in _dpk_force_bypass()
2788 _dpk_onoff(rtwdev, path, true); in _dpk_force_bypass()
2809 enum rtw89_rf_path path, bool off) in _dpk_onoff() argument
2812 u8 val, kidx = dpk->cur_idx[path]; in _dpk_onoff()
2814 val = dpk->is_dpk_enable && !off && dpk->bp[path][kidx].path_ok ? in _dpk_onoff()
2815 dpk->bp[path][kidx].mdpd_en : 0; in _dpk_onoff()
2817 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2), in _dpk_onoff()
2820 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d] DPK %s !!!\n", path, in _dpk_onoff()
2827 u8 path, kidx; in _dpk_track() local
2834 for (path = 0; path < RTW8852C_DPK_RF_PATH; path++) { in _dpk_track()
2835 kidx = dpk->cur_idx[path]; in _dpk_track()
2838 path, kidx, dpk->bp[path][kidx].ch); in _dpk_track()
2841 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13), 0x0000003f); in _dpk_track()
2843 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13), MASKBYTE2); in _dpk_track()
2845 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BTP + (path << 13), B_TXAGC_BTP); in _dpk_track()
2848 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT + (path << 8), B_KIP_RPT_SEL, 0xf); in _dpk_track()
2850 rtw89_phy_read32_mask(rtwdev, R_RPT_PER + (path << 8), B_RPT_PER_TH); in _dpk_track()
2852 rtw89_phy_read32_mask(rtwdev, R_RPT_PER + (path << 8), B_RPT_PER_OF); in _dpk_track()
2854 rtw89_phy_read32_mask(rtwdev, R_RPT_PER + (path << 8), B_RPT_PER_TSSI); in _dpk_track()
2857 cur_ther = ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]); in _dpk_track()
2862 if (dpk->bp[path][kidx].ch != 0 && cur_ther != 0) in _dpk_track()
2863 delta_ther = dpk->bp[path][kidx].ther_dpk - cur_ther; in _dpk_track()
2869 delta_ther, cur_ther, dpk->bp[path][kidx].ther_dpk); in _dpk_track()
2872 txagc_rf - dpk->bp[path][kidx].txagc_dpk, txagc_rf, in _dpk_track()
2873 dpk->bp[path][kidx].txagc_dpk); in _dpk_track()
2886 rtw89_phy_write32_mask(rtwdev, R_DPD_BND + (path << 8) + (kidx << 2), in _dpk_track()
2893 enum rtw89_rf_path path) in _tssi_set_sys() argument
2900 if (path == RF_PATH_A) in _tssi_set_sys()
2911 enum rtw89_rf_path path) in _tssi_ini_txpwr_ctrl_bb() argument
2913 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _tssi_ini_txpwr_ctrl_bb()
2920 enum rtw89_rf_path path) in _tssi_ini_txpwr_ctrl_bb_he_tb() argument
2922 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _tssi_ini_txpwr_ctrl_bb_he_tb()
2928 enum rtw89_rf_path path) in _tssi_set_dck() argument
2933 if (path == RF_PATH_A) { in _tssi_set_dck()
2947 enum rtw89_rf_path path) in _tssi_set_bbgain_split() argument
2949 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _tssi_set_bbgain_split()
2955 enum rtw89_rf_path path) in _tssi_set_tmeter_tbl() argument
3037 if (path == RF_PATH_A) { in _tssi_set_tmeter_tbl()
3141 enum rtw89_rf_path path) in _tssi_slope_cal_org() argument
3146 if (path == RF_PATH_A) { in _tssi_slope_cal_org()
3158 enum rtw89_rf_path path) in _tssi_set_aligk_default() argument
3164 if (path == RF_PATH_A) { in _tssi_set_aligk_default()
3184 enum rtw89_rf_path path) in _tssi_set_slope() argument
3186 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _tssi_set_slope()
3192 enum rtw89_rf_path path) in _tssi_run_slope() argument
3194 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _tssi_run_slope()
3200 enum rtw89_rf_path path) in _tssi_set_track() argument
3202 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _tssi_set_track()
3209 enum rtw89_rf_path path) in _tssi_set_txagc_offset_mv_avg() argument
3211 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A, in _tssi_set_txagc_offset_mv_avg()
3219 u32 i, path = RF_PATH_A, path_max = RF_PATH_NUM_8852C; in _tssi_enable() local
3223 path = RF_PATH_A; in _tssi_enable()
3226 path = RF_PATH_B; in _tssi_enable()
3231 for (i = path; i < path_max; i++) { in _tssi_enable()
3247 u32 i, path = RF_PATH_A, path_max = RF_PATH_NUM_8852C; in _tssi_disable() local
3251 path = RF_PATH_A; in _tssi_disable()
3254 path = RF_PATH_B; in _tssi_disable()
3259 for (i = path; i < path_max; i++) { in _tssi_disable()
3569 enum rtw89_rf_path path) in _tssi_get_ofdm_de() argument
3585 path, gidx); in _tssi_get_ofdm_de()
3590 de_1st = tssi_info->tssi_mcs[path][gidx_1st]; in _tssi_get_ofdm_de()
3591 de_2nd = tssi_info->tssi_mcs[path][gidx_2nd]; in _tssi_get_ofdm_de()
3596 path, val, de_1st, de_2nd); in _tssi_get_ofdm_de()
3598 val = tssi_info->tssi_mcs[path][gidx]; in _tssi_get_ofdm_de()
3601 "[TSSI][TRIM]: path=%d mcs de=%d\n", path, val); in _tssi_get_ofdm_de()
3608 path, gidx); in _tssi_get_ofdm_de()
3613 de_1st = tssi_info->tssi_6g_mcs[path][gidx_1st]; in _tssi_get_ofdm_de()
3614 de_2nd = tssi_info->tssi_6g_mcs[path][gidx_2nd]; in _tssi_get_ofdm_de()
3619 path, val, de_1st, de_2nd); in _tssi_get_ofdm_de()
3621 val = tssi_info->tssi_6g_mcs[path][gidx]; in _tssi_get_ofdm_de()
3624 "[TSSI][TRIM]: path=%d mcs de=%d\n", path, val); in _tssi_get_ofdm_de()
3633 enum rtw89_rf_path path) in _tssi_get_ofdm_trim_de() argument
3649 path, tgidx); in _tssi_get_ofdm_trim_de()
3654 tde_1st = tssi_info->tssi_trim[path][tgidx_1st]; in _tssi_get_ofdm_trim_de()
3655 tde_2nd = tssi_info->tssi_trim[path][tgidx_2nd]; in _tssi_get_ofdm_trim_de()
3660 path, val, tde_1st, tde_2nd); in _tssi_get_ofdm_trim_de()
3662 val = tssi_info->tssi_trim[path][tgidx]; in _tssi_get_ofdm_trim_de()
3666 path, val); in _tssi_get_ofdm_trim_de()
3673 path, tgidx); in _tssi_get_ofdm_trim_de()
3678 tde_1st = tssi_info->tssi_trim_6g[path][tgidx_1st]; in _tssi_get_ofdm_trim_de()
3679 tde_2nd = tssi_info->tssi_trim_6g[path][tgidx_2nd]; in _tssi_get_ofdm_trim_de()
3684 path, val, tde_1st, tde_2nd); in _tssi_get_ofdm_trim_de()
3686 val = tssi_info->tssi_trim_6g[path][tgidx]; in _tssi_get_ofdm_trim_de()
3690 path, val); in _tssi_get_ofdm_trim_de()
3707 u32 i, path = RF_PATH_A, path_max = RF_PATH_NUM_8852C; in _tssi_set_efuse_to_de() local
3714 path = RF_PATH_A; in _tssi_set_efuse_to_de()
3717 path = RF_PATH_B; in _tssi_set_efuse_to_de()
3722 for (i = path; i < path_max; i++) { in _tssi_set_efuse_to_de()
3764 enum rtw89_rf_path path) in rtw8852c_tssi_cont_en() argument
3770 rtw89_phy_write32_mask(rtwdev, tssi_trk[path], BIT(30), 0x0); in rtw8852c_tssi_cont_en()
3771 rtw89_phy_write32_mask(rtwdev, tssi_en[path], BIT(31), 0x0); in rtw8852c_tssi_cont_en()
3772 if (rtwdev->dbcc_en && path == RF_PATH_B) in rtw8852c_tssi_cont_en()
3777 rtw89_phy_write32_mask(rtwdev, tssi_trk[path], BIT(30), 0x1); in rtw8852c_tssi_cont_en()
3778 rtw89_phy_write32_mask(rtwdev, tssi_en[path], BIT(31), 0x1); in rtw8852c_tssi_cont_en()
3795 static void _bw_setting(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, in _bw_setting() argument
3807 rf_reg18 = rtw89_read_rf(rtwdev, path, reg_reg18_addr, RFREG_MASK); in _bw_setting()
3815 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, 0x3); in _bw_setting()
3816 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, 0xf); in _bw_setting()
3820 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, 0x3); in _bw_setting()
3821 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, 0xf); in _bw_setting()
3825 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, 0x2); in _bw_setting()
3826 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, 0xd); in _bw_setting()
3830 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, 0x1); in _bw_setting()
3831 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, 0xb); in _bw_setting()
3837 rtw89_write_rf(rtwdev, path, reg_reg18_addr, RFREG_MASK, rf_reg18); in _bw_setting()
3844 u8 kpath, path; in _ctrl_bw() local
3850 for (path = 0; path < 2; path++) { in _ctrl_bw()
3851 if (!(kpath & BIT(path))) in _ctrl_bw()
3855 _bw_setting(rtwdev, path, bw, is_dav); in _ctrl_bw()
3857 _bw_setting(rtwdev, path, bw, is_dav); in _ctrl_bw()
3861 if (path == RF_PATH_B && rtwdev->hal.cv == CHIP_CAV) { in _ctrl_bw()
3872 static void _ch_setting(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, in _ch_setting() argument
3884 rf_reg18 = rtw89_read_rf(rtwdev, path, reg_reg18_addr, RFREG_MASK); in _ch_setting()
3904 rtw89_write_rf(rtwdev, path, reg_reg18_addr, RFREG_MASK, rf_reg18); in _ch_setting()
3911 u8 kpath, path; in _ctrl_ch() local
3926 for (path = 0; path < 2; path++) { in _ctrl_ch()
3927 if (kpath & BIT(path)) { in _ctrl_ch()
3928 _ch_setting(rtwdev, path, central_ch, band, true); in _ctrl_ch()
3929 _ch_setting(rtwdev, path, central_ch, band, false); in _ctrl_ch()
3938 u8 path; in _rxbb_bw() local
3942 for (path = 0; path < 2; path++) { in _rxbb_bw()
3943 if (!(kpath & BIT(path))) in _rxbb_bw()
3946 rtw89_write_rf(rtwdev, path, RR_LUTWE2, RR_LUTWE2_RTXBW, 0x1); in _rxbb_bw()
3947 rtw89_write_rf(rtwdev, path, RR_LUTWA, RR_LUTWA_M2, 0xa); in _rxbb_bw()
3963 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, val); in _rxbb_bw()
3964 rtw89_write_rf(rtwdev, path, RR_LUTWE2, RR_LUTWE2_RTXBW, 0x0); in _rxbb_bw()
3971 int path; in _lck_keep_thermal() local
3973 for (path = 0; path < rtwdev->chip->rf_path_num; path++) { in _lck_keep_thermal()
3974 lck->thermal[path] = in _lck_keep_thermal()
3975 ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]); in _lck_keep_thermal()
3977 "[LCK] path=%d thermal=0x%x", path, lck->thermal[path]); in _lck_keep_thermal()
3984 int path = rtwdev->dbcc_en ? 2 : 1; in _lck() local
3992 for (i = 0; i < path; i++) { in _lck()
4008 int path; in rtw8852c_lck_track() local
4010 for (path = 0; path < rtwdev->chip->rf_path_num; path++) { in rtw8852c_lck_track()
4012 ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]); in rtw8852c_lck_track()
4013 delta = abs((int)cur_thermal - lck->thermal[path]); in rtw8852c_lck_track()
4017 path, cur_thermal, delta); in rtw8852c_lck_track()
4071 u8 path; in rtw8852c_rck() local
4073 for (path = 0; path < 2; path++) in rtw8852c_rck()
4074 _rck(rtwdev, path); in rtw8852c_rck()
4108 u8 path, kpath; in _rx_dck() local
4118 for (path = 0; path < 2; path++) { in _rx_dck()
4119 rf_reg5 = rtw89_read_rf(rtwdev, path, RR_RSV1, RFREG_MASK); in _rx_dck()
4120 if (!(kpath & BIT(path))) in _rx_dck()
4123 if (rtwdev->is_tssi_mode[path]) in _rx_dck()
4124 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK + (path << 13), in _rx_dck()
4126 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0); in _rx_dck()
4127 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX); in _rx_dck()
4128 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_LO_SEL, rtwdev->dbcc_en); in _rx_dck()
4131 _set_rx_dck(rtwdev, phy, path, is_afe); in _rx_dck()
4137 _rx_dck_recover(rtwdev, path); in _rx_dck()
4141 is_fail = _rx_dck_rek_check(rtwdev, path); in _rx_dck()
4147 path, rek_cnt); in _rx_dck()
4149 rx_dck->thermal[path] = ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]); in _rx_dck()
4150 rtw89_write_rf(rtwdev, path, RR_RSV1, RFREG_MASK, rf_reg5); in _rx_dck()
4152 if (rtwdev->is_tssi_mode[path]) in _rx_dck()
4153 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK + (path << 13), in _rx_dck()
4175 int path; in rtw8852c_rx_dck_track() local
4183 for (path = 0; path < RF_PATH_NUM_8852C; path++) { in rtw8852c_rx_dck_track()
4185 ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]); in rtw8852c_rx_dck_track()
4186 delta = abs((int)cur_thermal - rx_dck->thermal[path]); in rtw8852c_rx_dck_track()
4190 path, cur_thermal, delta); in rtw8852c_rx_dck_track()
4202 for (path = 0; path < RF_PATH_NUM_8852C; path++) { in rtw8852c_rx_dck_track()
4209 for (path = 0; path < RF_PATH_NUM_8852C; path++) in rtw8852c_rx_dck_track()
4240 u32 i, path = RF_PATH_A, path_max = RF_PATH_NUM_8852C; in rtw8852c_tssi() local
4246 path = RF_PATH_A; in rtw8852c_tssi()
4249 path = RF_PATH_B; in rtw8852c_tssi()
4256 for (i = path; i < path_max; i++) { in rtw8852c_tssi()
4275 u32 i, path = RF_PATH_A, path_max = RF_PATH_NUM_8852C; in rtw8852c_tssi_scan() local
4287 path = RF_PATH_A; in rtw8852c_tssi_scan()
4290 path = RF_PATH_B; in rtw8852c_tssi_scan()
4297 for (i = path; i < path_max; i++) { in rtw8852c_tssi_scan()