Lines Matching refs:u32

185 					  u32 reg, u16 *value)  in ixgbe_read_pci_cfg_word_parent()
293 static u32 ixgbe_check_remove(struct ixgbe_hw *hw, u32 reg) in ixgbe_check_remove()
296 u32 value; in ixgbe_check_remove()
334 u32 ixgbe_read_reg(struct ixgbe_hw *hw, u32 reg) in ixgbe_read_reg()
337 u32 value; in ixgbe_read_reg()
380 u16 ixgbe_read_pci_cfg_word(struct ixgbe_hw *hw, u32 reg) in ixgbe_read_pci_cfg_word()
395 static u32 ixgbe_read_pci_cfg_dword(struct ixgbe_hw *hw, u32 reg) in ixgbe_read_pci_cfg_dword()
398 u32 value; in ixgbe_read_pci_cfg_dword()
410 void ixgbe_write_pci_cfg_word(struct ixgbe_hw *hw, u32 reg, u16 value) in ixgbe_write_pci_cfg_word()
429 u32 ofs;
473 u32 regs[64]; in ixgbe_regdump()
832 u32 ctrl_ext; in ixgbe_release_hw_control()
842 u32 ctrl_ext; in ixgbe_get_hw_control()
861 u32 ivar, index; in ixgbe_set_ivar()
906 u32 mask; in ixgbe_irq_rearm_queues()
933 u32 data; in ixgbe_update_xoff_rx_lfc()
965 u32 xoff[8] = {0}; in ixgbe_update_xoff_received()
980 u32 pxoffrxc; in ixgbe_update_xoff_received()
1030 u32 tx_done = ixgbe_get_tx_completed(tx_ring); in ixgbe_check_tx_hang()
1031 u32 tx_done_old = tx_ring->tx_stats.tx_done_old; in ixgbe_check_tx_hang()
1032 u32 tx_pending = ixgbe_get_tx_pending(tx_ring); in ixgbe_check_tx_hang()
1082 int queue_index, u32 maxrate) in ixgbe_tx_maxrate()
1086 u32 bcnrc_val = ixgbe_link_mbps(adapter); in ixgbe_tx_maxrate()
1272 u32 txctrl = 0; in ixgbe_update_tx_dca()
1309 u32 rxctrl = 0; in ixgbe_update_rx_dca()
1679 u32 flags = rx_ring->q_vector->adapter->flags; in ixgbe_process_skb_fields()
1730 u32 ntc = rx_ring->next_to_clean + 1; in ixgbe_is_non_eop()
1744 u32 rsc_cnt = le32_to_cpu(rsc_enabled); in ixgbe_is_non_eop()
2197 u32 act; in ixgbe_run_xdp()
2453 u32 mask; in ixgbe_configure_msix()
2457 u32 eitrsel = BIT(adapter->num_vfs - 32) - 1; in ixgbe_configure_msix()
2705 u32 itr_reg = q_vector->itr & IXGBE_MAX_EITR; in ixgbe_write_eitr()
2731 u32 new_itr; in ixgbe_set_itr()
2758 u32 eicr = adapter->interrupt_event; in ixgbe_check_overtemp_subtask()
2782 u32 speed; in ixgbe_check_overtemp_subtask()
2813 static void ixgbe_check_fan_failure(struct ixgbe_adapter *adapter, u32 eicr) in ixgbe_check_fan_failure()
2825 static void ixgbe_check_overtemp_event(struct ixgbe_adapter *adapter, u32 eicr) in ixgbe_check_overtemp_event()
2892 static void ixgbe_check_sfp_event(struct ixgbe_adapter *adapter, u32 eicr) in ixgbe_check_sfp_event()
2895 u32 eicr_mask = IXGBE_EICR_GPI_SDP2(hw); in ixgbe_check_sfp_event()
2943 u32 mask; in ixgbe_irq_enable_queues()
2978 u32 mask = (IXGBE_EIMS_ENABLE_MASK & ~IXGBE_EIMS_RTX_QUEUE); in ixgbe_irq_enable()
3037 u32 eicr; in ixgbe_msix_other()
3280 u32 eicr; in ixgbe_intr()
3465 u32 txdctl = IXGBE_TXDCTL_ENABLE; in ixgbe_configure_tx_ring()
3552 u32 rttdcs, mtqc; in ixgbe_setup_mtqc()
3594 u32 sectx = IXGBE_READ_REG(hw, IXGBE_SECTXMINIFG); in ixgbe_setup_mtqc()
3613 u32 dmatxctl; in ixgbe_configure_tx()
3614 u32 i; in ixgbe_configure_tx()
3637 u32 srrctl = IXGBE_READ_REG(hw, IXGBE_SRRCTL(reg_idx)); in ixgbe_enable_rx_drop()
3649 u32 srrctl = IXGBE_READ_REG(hw, IXGBE_SRRCTL(reg_idx)); in ixgbe_disable_rx_drop()
3693 u32 srrctl; in ixgbe_configure_srrctl()
3711 u32 xsk_buf_len = xsk_pool_get_rx_frame_size(rx_ring->xsk_pool); in ixgbe_configure_srrctl()
3745 u32 ixgbe_rss_indir_tbl_entries(struct ixgbe_adapter *adapter) in ixgbe_rss_indir_tbl_entries()
3778 u32 *rss_key; in ixgbe_init_rss_key()
3800 u32 i, reta_entries = ixgbe_rss_indir_tbl_entries(adapter); in ixgbe_store_reta()
3802 u32 reta = 0; in ixgbe_store_reta()
3803 u32 indices_multi; in ixgbe_store_reta()
3839 u32 i, reta_entries = ixgbe_rss_indir_tbl_entries(adapter); in ixgbe_store_vfreta()
3841 u32 vfreta = 0; in ixgbe_store_vfreta()
3847 vfreta |= (u32)adapter->rss_indir_tbl[i] << (i & 0x3) * 8; in ixgbe_store_vfreta()
3861 u32 i, j; in ixgbe_setup_reta()
3862 u32 reta_entries = ixgbe_rss_indir_tbl_entries(adapter); in ixgbe_setup_reta()
3918 u32 mrqc = 0, rss_field = 0, vfmrqc = 0; in ixgbe_setup_mrqc()
3919 u32 rxcsum; in ixgbe_setup_mrqc()
4002 u32 rscctrl; in ixgbe_configure_rscctl()
4025 u32 rxdctl; in ixgbe_rx_desc_queue_enable()
4052 u32 rxdctl; in ixgbe_configure_rx_ring()
4119 u32 xsk_buf_len = xsk_pool_get_rx_frame_size(ring->xsk_pool); in ixgbe_configure_rx_ring()
4154 u32 psrtype = IXGBE_PSRTYPE_TCPHDR | in ixgbe_setup_psrtype()
4176 u32 reg_offset, vf_shift, vmolr; in ixgbe_configure_virtualization()
4177 u32 gcr_ext, vmdctl; in ixgbe_configure_virtualization()
4250 u32 mhadd, hlreg0; in ixgbe_set_rx_buffer_len()
4313 u32 rdrxctl = IXGBE_READ_REG(hw, IXGBE_RDRXCTL); in ixgbe_setup_rdrxctl()
4363 u32 rxctrl, rfctl; in ixgbe_configure_rx()
4419 static int ixgbe_find_vlvf_entry(struct ixgbe_hw *hw, u32 vlan) in ixgbe_find_vlvf_entry()
4421 u32 vlvf; in ixgbe_find_vlvf_entry()
4438 void ixgbe_update_pf_promisc_vlvf(struct ixgbe_adapter *adapter, u32 vid) in ixgbe_update_pf_promisc_vlvf()
4441 u32 bits, word; in ixgbe_update_pf_promisc_vlvf()
4485 u32 vlnctrl; in ixgbe_vlan_strip_disable()
4523 u32 vlnctrl; in ixgbe_vlan_strip_enable()
4557 u32 vlnctrl, i; in ixgbe_vlan_promisc_enable()
4584 u32 reg_offset = IXGBE_VLVFB(i * 2 + VMDQ_P(0) / 32); in ixgbe_vlan_promisc_enable()
4585 u32 vlvfb = IXGBE_READ_REG(hw, reg_offset); in ixgbe_vlan_promisc_enable()
4597 static void ixgbe_scrub_vfta(struct ixgbe_adapter *adapter, u32 vfta_offset) in ixgbe_scrub_vfta()
4600 u32 vfta[VFTA_BLOCK_SIZE] = { 0 }; in ixgbe_scrub_vfta()
4601 u32 vid_start = vfta_offset * 32; in ixgbe_scrub_vfta()
4602 u32 vid_end = vid_start + (VFTA_BLOCK_SIZE * 32); in ixgbe_scrub_vfta()
4603 u32 i, vid, word, bits; in ixgbe_scrub_vfta()
4606 u32 vlvf = IXGBE_READ_REG(hw, IXGBE_VLVF(i)); in ixgbe_scrub_vfta()
4646 u32 vlnctrl, i; in ixgbe_vlan_promisc_disable()
4895 u32 fctrl, vmolr = IXGBE_VMOLR_BAM | IXGBE_VMOLR_AUPE; in ixgbe_set_rx_mode()
5075 u32 msb = 0; in ixgbe_configure_dcb()
5103 u32 dv_id, rx_pba; in ixgbe_hpbthresh()
5164 u32 dv_id; in ixgbe_lpbthresh()
5252 u32 ring = ethtool_get_flow_spec_ring(filter->action); in ixgbe_fdir_filter_restore()
5511 u32 speed; in ixgbe_non_sfp_link_config()
5586 u32 gpie = 0; in ixgbe_setup_gpie()
5669 u32 ctrl_ext; in ixgbe_up_complete()
5707 u32 esdp = IXGBE_READ_REG(hw, IXGBE_ESDP); in ixgbe_up_complete()
5800 u32 rxdctl; in ixgbe_disable_rx()
5872 u32 txdctl; in ixgbe_disable_tx()
6306 u32 fwsm; in ixgbe_sw_init()
6952 u32 err; in ixgbe_resume()
6989 u32 ctrl; in __ixgbe_shutdown()
6990 u32 wufc = adapter->wol; in __ixgbe_shutdown()
7005 u32 fctrl; in __ixgbe_shutdown()
7090 u32 i, missed_rx = 0, mpc, bprc, lxon, lxoff, xon_off_tot; in ixgbe_update_stats()
7443 u32 link_speed = adapter->link_speed; in ixgbe_watchdog_update_link()
7504 u32 link_speed = adapter->link_speed; in ixgbe_watchdog_link_is_up()
7516 u32 frctl = IXGBE_READ_REG(hw, IXGBE_FCTRL); in ixgbe_watchdog_link_is_up()
7517 u32 rmcs = IXGBE_READ_REG(hw, IXGBE_RMCS); in ixgbe_watchdog_link_is_up()
7527 u32 mflcn = IXGBE_READ_REG(hw, IXGBE_MFLCN); in ixgbe_watchdog_link_is_up()
7528 u32 fccfg = IXGBE_READ_REG(hw, IXGBE_FCCFG); in ixgbe_watchdog_link_is_up()
7641 u32 q_per_pool = __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_vf_tx_pending()
7654 u32 h, t; in ixgbe_vf_tx_pending()
7688 static void ixgbe_bad_vf_abort(struct ixgbe_adapter *adapter, u32 vf) in ixgbe_bad_vf_abort()
7714 u32 gpc; in ixgbe_check_for_bad_vf()
7749 u32 ssvpc; in ixgbe_spoof_check()
7885 u32 cap_speed; in ixgbe_sfp_link_config_subtask()
7886 u32 speed; in ixgbe_sfp_link_config_subtask()
7985 u32 fwsm; in ixgbe_check_fw_error()
8051 u32 vlan_macip_lens, type_tucmd, mss_l4len_idx; in ixgbe_tso()
8063 u32 paylen, l4_offset; in ixgbe_tso()
8064 u32 fceof_saidx = 0; in ixgbe_tso()
8157 u32 vlan_macip_lens = 0; in ixgbe_tx_csum()
8158 u32 fceof_saidx = 0; in ixgbe_tx_csum()
8159 u32 type_tucmd = 0; in ixgbe_tx_csum()
8204 ((u32)(_input & _flag) * (_result / _flag)) : \
8205 ((u32)(_input & _flag) / (_flag / _result)))
8207 static u32 ixgbe_tx_cmd_type(struct sk_buff *skb, u32 tx_flags) in ixgbe_tx_cmd_type()
8210 u32 cmd_type = IXGBE_ADVTXD_DTYP_DATA | in ixgbe_tx_cmd_type()
8233 u32 tx_flags, unsigned int paylen) in ixgbe_tx_olinfo_status()
8235 u32 olinfo_status = paylen << IXGBE_ADVTXD_PAYLEN_SHIFT; in ixgbe_tx_olinfo_status()
8291 u32 tx_flags = first->tx_flags; in ixgbe_tx_map()
8292 u32 cmd_type = ixgbe_tx_cmd_type(skb, tx_flags); in ixgbe_tx_map()
8638 u32 cmd_type, len = xdpf->len; in ixgbe_xmit_xdp_ring()
8717 u32 tx_flags = 0; in ixgbe_xmit_frame_ring()
9113 u32 reg, rsave; in ixgbe_validate_rtr()
9296 u32 hdl = cls->knode.handle; in ixgbe_delete_clsu32()
9297 u32 uhtid = TC_U32_USERHTID(cls->knode.handle); in ixgbe_delete_clsu32()
9298 u32 loc = cls->knode.handle & 0xfffff; in ixgbe_delete_clsu32()
9353 u32 uhtid = TC_U32_USERHTID(cls->hnode.handle); in ixgbe_configure_clsu32_add_hnode()
9371 u32 uhtid = TC_U32_USERHTID(cls->hnode.handle); in ixgbe_configure_clsu32_del_hnode()
9504 field_ptr[j].val(input, mask, (__force u32)val, in ixgbe_clsu32_build_input()
9505 (__force u32)m); in ixgbe_clsu32_build_input()
9515 (__force u32)cls->knode.sel->keys[i].val && in ixgbe_clsu32_build_input()
9517 (__force u32)cls->knode.sel->keys[i].mask) in ixgbe_clsu32_build_input()
9543 u32 loc = cls->knode.handle & 0xfffff; in ixgbe_configure_clsu32()
9551 u32 uhtid, link_uhtid; in ixgbe_configure_clsu32()
9622 (__force u32)cls->knode.sel->offmask) in ixgbe_configure_clsu32()
9957 u32 vmdctl; in ixgbe_configure_bridge_mode()
10052 static int ixgbe_ndo_bridge_getlink(struct sk_buff *skb, u32 pid, u32 seq, in ixgbe_ndo_bridge_getlink()
10054 u32 filter_mask, int nlflags) in ixgbe_ndo_bridge_getlink()
10348 struct xdp_frame **frames, u32 flags) in ixgbe_xdp_xmit()
10447 u32 txdctl; in ixgbe_disable_txr_hw()
10483 u32 rxdctl; in ixgbe_disable_rxr_hw()
10532 static void ixgbe_irq_disable_single(struct ixgbe_adapter *adapter, u32 ring) in ixgbe_irq_disable_single()
10536 u32 mask; in ixgbe_irq_disable_single()
10809 u32 eec; in ixgbe_probe()
10950 u32 esdp = IXGBE_READ_REG(hw, IXGBE_ESDP); in ixgbe_probe()
11377 u32 dw0, dw1, dw2, dw3; in ixgbe_io_error_detected()