Home
last modified time | relevance | path

Searched refs:accel_dev (Results 1 – 25 of 78) sorted by relevance

1234

/openbmc/linux/drivers/crypto/intel/qat/qat_common/
H A Dadf_init.c61 static int adf_dev_init(struct adf_accel_dev *accel_dev) in adf_dev_init() argument
65 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_init()
69 dev_err(&GET_DEV(accel_dev), in adf_dev_init()
74 if (!test_bit(ADF_STATUS_CONFIGURED, &accel_dev->status) && in adf_dev_init()
75 !accel_dev->is_vf) { in adf_dev_init()
76 dev_err(&GET_DEV(accel_dev), "Device not configured\n"); in adf_dev_init()
80 if (adf_init_etr_data(accel_dev)) { in adf_dev_init()
81 dev_err(&GET_DEV(accel_dev), "Failed initialize etr\n"); in adf_dev_init()
85 if (hw_data->init_device && hw_data->init_device(accel_dev)) { in adf_dev_init()
86 dev_err(&GET_DEV(accel_dev), "Failed to initialize device\n"); in adf_dev_init()
[all …]
H A Dadf_vf_isr.c27 struct adf_accel_dev *accel_dev; member
31 void adf_enable_pf2vf_interrupts(struct adf_accel_dev *accel_dev) in adf_enable_pf2vf_interrupts() argument
33 void __iomem *pmisc_addr = adf_get_pmisc_base(accel_dev); in adf_enable_pf2vf_interrupts()
38 void adf_disable_pf2vf_interrupts(struct adf_accel_dev *accel_dev) in adf_disable_pf2vf_interrupts() argument
40 void __iomem *pmisc_addr = adf_get_pmisc_base(accel_dev); in adf_disable_pf2vf_interrupts()
46 static int adf_enable_msi(struct adf_accel_dev *accel_dev) in adf_enable_msi() argument
48 struct adf_accel_pci *pci_dev_info = &accel_dev->accel_pci_dev; in adf_enable_msi()
52 dev_err(&GET_DEV(accel_dev), in adf_enable_msi()
60 static void adf_disable_msi(struct adf_accel_dev *accel_dev) in adf_disable_msi() argument
62 struct pci_dev *pdev = accel_to_pci_dev(accel_dev); in adf_disable_msi()
[all …]
H A Dadf_isr.c21 static int adf_enable_msix(struct adf_accel_dev *accel_dev) in adf_enable_msix() argument
23 struct adf_accel_pci *pci_dev_info = &accel_dev->accel_pci_dev; in adf_enable_msix()
24 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_msix()
29 hw_data->set_msix_rttable(accel_dev); in adf_enable_msix()
34 dev_err(&GET_DEV(accel_dev), in adf_enable_msix()
50 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(bank->accel_dev); in adf_msix_isr_bundle()
59 void adf_enable_vf2pf_interrupts(struct adf_accel_dev *accel_dev, u32 vf_mask) in adf_enable_vf2pf_interrupts() argument
61 void __iomem *pmisc_addr = adf_get_pmisc_base(accel_dev); in adf_enable_vf2pf_interrupts()
64 spin_lock_irqsave(&accel_dev->pf.vf2pf_ints_lock, flags); in adf_enable_vf2pf_interrupts()
65 GET_PFVF_OPS(accel_dev)->enable_vf2pf_interrupts(pmisc_addr, vf_mask); in adf_enable_vf2pf_interrupts()
[all …]
H A Dadf_accel_engine.c10 static int adf_ae_fw_load_images(struct adf_accel_dev *accel_dev, void *fw_addr, in adf_ae_fw_load_images() argument
13 struct adf_fw_loader_data *loader_data = accel_dev->fw_loader; in adf_ae_fw_load_images()
14 struct adf_hw_device_data *hw_device = accel_dev->hw_device; in adf_ae_fw_load_images()
25 obj_name = hw_device->uof_get_name(accel_dev, i); in adf_ae_fw_load_images()
26 ae_mask = hw_device->uof_get_ae_mask(accel_dev, i); in adf_ae_fw_load_images()
28 dev_err(&GET_DEV(accel_dev), "Invalid UOF image\n"); in adf_ae_fw_load_images()
33 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images()
38 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images()
43 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images()
53 adf_ae_fw_release(accel_dev); in adf_ae_fw_load_images()
[all …]
H A Dadf_sriov.c25 struct adf_accel_dev *accel_dev = vf_info->accel_dev; in adf_iov_send_resp() local
29 ret = adf_recv_and_handle_vf2pf_msg(accel_dev, vf_nr); in adf_iov_send_resp()
32 adf_enable_vf2pf_interrupts(accel_dev, 1 << vf_nr); in adf_iov_send_resp()
50 static int adf_enable_sriov(struct adf_accel_dev *accel_dev) in adf_enable_sriov() argument
52 struct pci_dev *pdev = accel_to_pci_dev(accel_dev); in adf_enable_sriov()
54 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_sriov()
58 for (i = 0, vf_info = accel_dev->pf.vf_info; i < totalvfs; in adf_enable_sriov()
61 vf_info->accel_dev = accel_dev; in adf_enable_sriov()
73 hw_data->configure_iov_threads(accel_dev, true); in adf_enable_sriov()
76 adf_enable_vf2pf_interrupts(accel_dev, BIT_ULL(totalvfs) - 1); in adf_enable_sriov()
[all …]
H A Dadf_common_drv.h46 int (*event_hld)(struct adf_accel_dev *accel_dev,
57 int adf_dev_up(struct adf_accel_dev *accel_dev, bool init_config);
58 int adf_dev_down(struct adf_accel_dev *accel_dev, bool cache_config);
59 int adf_dev_restart(struct adf_accel_dev *accel_dev);
63 int adf_devmgr_add_dev(struct adf_accel_dev *accel_dev,
65 void adf_devmgr_rm_dev(struct adf_accel_dev *accel_dev,
73 int adf_devmgr_in_reset(struct adf_accel_dev *accel_dev);
74 int adf_dev_started(struct adf_accel_dev *accel_dev);
75 int adf_dev_restarting_notify(struct adf_accel_dev *accel_dev);
76 int adf_dev_restarted_notify(struct adf_accel_dev *accel_dev);
[all …]
H A Dadf_sysfs.c19 struct adf_accel_dev *accel_dev; in state_show() local
22 accel_dev = adf_devmgr_pci_to_accel_dev(to_pci_dev(dev)); in state_show()
23 if (!accel_dev) in state_show()
26 state = adf_dev_started(accel_dev) ? "up" : "down"; in state_show()
33 struct adf_accel_dev *accel_dev; in state_store() local
37 accel_dev = adf_devmgr_pci_to_accel_dev(to_pci_dev(dev)); in state_store()
38 if (!accel_dev) in state_store()
41 accel_id = accel_dev->accel_id; in state_store()
43 if (adf_devmgr_in_reset(accel_dev) || adf_dev_in_use(accel_dev)) { in state_store()
56 if (!adf_dev_started(accel_dev)) { in state_store()
[all …]
H A Dadf_heartbeat.c31 static int adf_hb_check_polling_freq(struct adf_accel_dev *accel_dev) in adf_hb_check_polling_freq() argument
34 u64 polling_time = curr_time - accel_dev->heartbeat->last_hb_check_time; in adf_hb_check_polling_freq()
36 if (polling_time < accel_dev->heartbeat->hb_timer) { in adf_hb_check_polling_freq()
37 dev_warn(&GET_DEV(accel_dev), in adf_hb_check_polling_freq()
39 accel_dev->heartbeat->hb_timer); in adf_hb_check_polling_freq()
43 accel_dev->heartbeat->last_hb_check_time = curr_time; in adf_hb_check_polling_freq()
56 static bool validate_hb_ctrs_cnt(struct adf_accel_dev *accel_dev) in validate_hb_ctrs_cnt() argument
58 const size_t hb_ctrs = accel_dev->hw_device->num_hb_ctrs; in validate_hb_ctrs_cnt()
59 const size_t max_aes = accel_dev->hw_device->num_engines; in validate_hb_ctrs_cnt()
67 struct hb_cnt_pair *hb_stats = accel_dev->heartbeat->dma.virt_addr; in validate_hb_ctrs_cnt()
[all …]
H A Dadf_pfvf_vf_proto.c33 int adf_send_vf2pf_msg(struct adf_accel_dev *accel_dev, struct pfvf_message msg) in adf_send_vf2pf_msg() argument
35 struct adf_pfvf_ops *pfvf_ops = GET_PFVF_OPS(accel_dev); in adf_send_vf2pf_msg()
38 return pfvf_ops->send_msg(accel_dev, msg, pfvf_offset, in adf_send_vf2pf_msg()
39 &accel_dev->vf.vf2pf_lock); in adf_send_vf2pf_msg()
50 static struct pfvf_message adf_recv_pf2vf_msg(struct adf_accel_dev *accel_dev) in adf_recv_pf2vf_msg() argument
52 struct adf_pfvf_ops *pfvf_ops = GET_PFVF_OPS(accel_dev); in adf_recv_pf2vf_msg()
55 return pfvf_ops->recv_msg(accel_dev, pfvf_offset, accel_dev->vf.pf_compat_ver); in adf_recv_pf2vf_msg()
69 int adf_send_vf2pf_req(struct adf_accel_dev *accel_dev, struct pfvf_message msg, in adf_send_vf2pf_req() argument
76 reinit_completion(&accel_dev->vf.msg_received); in adf_send_vf2pf_req()
80 ret = adf_send_vf2pf_msg(accel_dev, msg); in adf_send_vf2pf_req()
[all …]
H A Dadf_admin.c112 static int adf_put_admin_msg_sync(struct adf_accel_dev *accel_dev, u32 ae, in adf_put_admin_msg_sync() argument
117 struct adf_admin_comms *admin = accel_dev->admin; in adf_put_admin_msg_sync()
139 dev_err(&GET_DEV(accel_dev), in adf_put_admin_msg_sync()
154 static int adf_send_admin(struct adf_accel_dev *accel_dev, in adf_send_admin() argument
162 if (adf_put_admin_msg_sync(accel_dev, ae, req, resp) || in adf_send_admin()
169 static int adf_init_ae(struct adf_accel_dev *accel_dev) in adf_init_ae() argument
173 struct adf_hw_device_data *hw_device = accel_dev->hw_device; in adf_init_ae()
180 return adf_send_admin(accel_dev, &req, &resp, ae_mask); in adf_init_ae()
183 static int adf_set_fw_constants(struct adf_accel_dev *accel_dev) in adf_set_fw_constants() argument
187 struct adf_hw_device_data *hw_device = accel_dev->hw_device; in adf_set_fw_constants()
[all …]
H A Dadf_pfvf_vf_msg.c18 int adf_vf2pf_notify_init(struct adf_accel_dev *accel_dev) in adf_vf2pf_notify_init() argument
22 if (adf_send_vf2pf_msg(accel_dev, msg)) { in adf_vf2pf_notify_init()
23 dev_err(&GET_DEV(accel_dev), in adf_vf2pf_notify_init()
27 set_bit(ADF_STATUS_PF_RUNNING, &accel_dev->status); in adf_vf2pf_notify_init()
40 void adf_vf2pf_notify_shutdown(struct adf_accel_dev *accel_dev) in adf_vf2pf_notify_shutdown() argument
44 if (test_bit(ADF_STATUS_PF_RUNNING, &accel_dev->status)) in adf_vf2pf_notify_shutdown()
45 if (adf_send_vf2pf_msg(accel_dev, msg)) in adf_vf2pf_notify_shutdown()
46 dev_err(&GET_DEV(accel_dev), in adf_vf2pf_notify_shutdown()
51 int adf_vf2pf_request_version(struct adf_accel_dev *accel_dev) in adf_vf2pf_request_version() argument
64 ret = adf_send_vf2pf_req(accel_dev, msg, &resp); in adf_vf2pf_request_version()
[all …]
H A Dadf_aer.c16 struct adf_accel_dev *accel_dev = adf_devmgr_pci_to_accel_dev(pdev); in adf_error_detected() local
19 if (!accel_dev) { in adf_error_detected()
35 struct adf_accel_dev *accel_dev; member
40 void adf_reset_sbr(struct adf_accel_dev *accel_dev) in adf_reset_sbr() argument
42 struct pci_dev *pdev = accel_to_pci_dev(accel_dev); in adf_reset_sbr()
50 dev_info(&GET_DEV(accel_dev), in adf_reset_sbr()
53 dev_info(&GET_DEV(accel_dev), "Secondary bus reset\n"); in adf_reset_sbr()
65 void adf_reset_flr(struct adf_accel_dev *accel_dev) in adf_reset_flr() argument
67 pcie_flr(accel_to_pci_dev(accel_dev)); in adf_reset_flr()
71 void adf_dev_restore(struct adf_accel_dev *accel_dev) in adf_dev_restore() argument
[all …]
H A Dadf_accel_devices.h156 int (*enable_comms)(struct adf_accel_dev *accel_dev);
162 int (*send_msg)(struct adf_accel_dev *accel_dev, struct pfvf_message msg,
164 struct pfvf_message (*recv_msg)(struct adf_accel_dev *accel_dev,
176 u32 (*get_accel_cap)(struct adf_accel_dev *accel_dev);
185 u16 (*get_ring_to_svc_map)(struct adf_accel_dev *accel_dev);
186 int (*alloc_irq)(struct adf_accel_dev *accel_dev);
187 void (*free_irq)(struct adf_accel_dev *accel_dev);
188 void (*enable_error_correction)(struct adf_accel_dev *accel_dev);
189 int (*init_admin_comms)(struct adf_accel_dev *accel_dev);
190 void (*exit_admin_comms)(struct adf_accel_dev *accel_dev);
[all …]
H A Dqat_compression.c21 adf_dev_put(inst->accel_dev); in qat_compression_put_instance()
24 static int qat_compression_free_instances(struct adf_accel_dev *accel_dev) in qat_compression_free_instances() argument
30 list_for_each_safe(list_ptr, tmp, &accel_dev->compression_list) { in qat_compression_free_instances()
52 struct adf_accel_dev *accel_dev = NULL; in qat_compression_get_instance_node() local
68 accel_dev = tmp_dev; in qat_compression_get_instance_node()
74 if (!accel_dev) { in qat_compression_get_instance_node()
83 accel_dev = tmp_dev; in qat_compression_get_instance_node()
89 if (!accel_dev) in qat_compression_get_instance_node()
93 list_for_each(itr, &accel_dev->compression_list) { in qat_compression_get_instance_node()
105 if (adf_dev_get(accel_dev)) { in qat_compression_get_instance_node()
[all …]
H A Dqat_crypto.c21 adf_dev_put(inst->accel_dev); in qat_crypto_put_instance()
24 static int qat_crypto_free_instances(struct adf_accel_dev *accel_dev) in qat_crypto_free_instances() argument
29 list_for_each_entry_safe(inst, tmp, &accel_dev->crypto_list, list) { in qat_crypto_free_instances()
53 struct adf_accel_dev *accel_dev = NULL, *tmp_dev; in qat_crypto_get_instance_node() local
66 accel_dev = tmp_dev; in qat_crypto_get_instance_node()
72 if (!accel_dev) { in qat_crypto_get_instance_node()
78 accel_dev = tmp_dev; in qat_crypto_get_instance_node()
84 if (!accel_dev) in qat_crypto_get_instance_node()
88 list_for_each_entry(tmp_inst, &accel_dev->crypto_list, list) { in qat_crypto_get_instance_node()
98 if (adf_dev_get(accel_dev)) { in qat_crypto_get_instance_node()
[all …]
H A Dadf_gen2_config.c13 static int adf_gen2_crypto_dev_config(struct adf_accel_dev *accel_dev) in adf_gen2_crypto_dev_config() argument
16 int banks = GET_MAX_BANKS(accel_dev); in adf_gen2_crypto_dev_config()
23 if (adf_hw_dev_has_crypto(accel_dev)) in adf_gen2_crypto_dev_config()
31 ret = adf_cfg_add_key_value_param(accel_dev, ADF_KERNEL_SEC, in adf_gen2_crypto_dev_config()
37 ret = adf_cfg_add_key_value_param(accel_dev, ADF_KERNEL_SEC, in adf_gen2_crypto_dev_config()
44 ret = adf_cfg_add_key_value_param(accel_dev, ADF_KERNEL_SEC, in adf_gen2_crypto_dev_config()
51 ret = adf_cfg_add_key_value_param(accel_dev, ADF_KERNEL_SEC, in adf_gen2_crypto_dev_config()
58 ret = adf_cfg_add_key_value_param(accel_dev, ADF_KERNEL_SEC, in adf_gen2_crypto_dev_config()
65 ret = adf_cfg_add_key_value_param(accel_dev, ADF_KERNEL_SEC, in adf_gen2_crypto_dev_config()
72 ret = adf_cfg_add_key_value_param(accel_dev, ADF_KERNEL_SEC, in adf_gen2_crypto_dev_config()
[all …]
H A Dadf_dbgfs.c19 void adf_dbgfs_init(struct adf_accel_dev *accel_dev) in adf_dbgfs_init() argument
25 accel_dev->hw_device->dev_class->name, in adf_dbgfs_init()
26 pci_name(accel_dev->accel_pci_dev.pci_dev)); in adf_dbgfs_init()
28 accel_dev->debugfs_dir = debugfs_create_dir(name, NULL); in adf_dbgfs_init()
30 adf_cfg_dev_dbgfs_add(accel_dev); in adf_dbgfs_init()
38 void adf_dbgfs_exit(struct adf_accel_dev *accel_dev) in adf_dbgfs_exit() argument
40 adf_cfg_dev_dbgfs_rm(accel_dev); in adf_dbgfs_exit()
41 debugfs_remove(accel_dev->debugfs_dir); in adf_dbgfs_exit()
52 void adf_dbgfs_add(struct adf_accel_dev *accel_dev) in adf_dbgfs_add() argument
54 if (!accel_dev->is_vf) { in adf_dbgfs_add()
[all …]
H A Dadf_ctl_drv.c111 static int adf_add_key_value_data(struct adf_accel_dev *accel_dev, in adf_add_key_value_data() argument
119 if (adf_cfg_add_key_value_param(accel_dev, section, in adf_add_key_value_data()
122 dev_err(&GET_DEV(accel_dev), in adf_add_key_value_data()
127 if (adf_cfg_add_key_value_param(accel_dev, section, in adf_add_key_value_data()
130 dev_err(&GET_DEV(accel_dev), in adf_add_key_value_data()
138 static int adf_copy_key_value_data(struct adf_accel_dev *accel_dev, in adf_copy_key_value_data() argument
151 dev_err(&GET_DEV(accel_dev), in adf_copy_key_value_data()
156 if (adf_cfg_section_add(accel_dev, section.name)) { in adf_copy_key_value_data()
157 dev_err(&GET_DEV(accel_dev), in adf_copy_key_value_data()
167 dev_err(&GET_DEV(accel_dev), in adf_copy_key_value_data()
[all …]
/openbmc/linux/drivers/crypto/intel/qat/qat_4xxx/
H A Dadf_drv.c27 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument
29 if (accel_dev->hw_device) { in adf_cleanup_accel()
30 adf_clean_hw_data_4xxx(accel_dev->hw_device); in adf_cleanup_accel()
31 accel_dev->hw_device = NULL; in adf_cleanup_accel()
33 adf_dbgfs_exit(accel_dev); in adf_cleanup_accel()
34 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel()
35 adf_devmgr_rm_dev(accel_dev, NULL); in adf_cleanup_accel()
38 static int adf_cfg_dev_init(struct adf_accel_dev *accel_dev) in adf_cfg_dev_init() argument
43 config = accel_dev->accel_id % 2 ? ADF_CFG_DC : ADF_CFG_CY; in adf_cfg_dev_init()
45 ret = adf_cfg_section_add(accel_dev, ADF_GENERAL_SEC); in adf_cfg_dev_init()
[all …]
/openbmc/linux/drivers/crypto/intel/qat/qat_c3xxxvf/
H A Dadf_drv.c38 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument
40 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
41 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
44 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument
46 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel()
57 if (accel_dev->hw_device) { in adf_cleanup_accel()
60 adf_clean_hw_data_c3xxxiov(accel_dev->hw_device); in adf_cleanup_accel()
65 kfree(accel_dev->hw_device); in adf_cleanup_accel()
66 accel_dev->hw_device = NULL; in adf_cleanup_accel()
68 adf_dbgfs_exit(accel_dev); in adf_cleanup_accel()
[all …]
/openbmc/linux/drivers/crypto/intel/qat/qat_c62xvf/
H A Dadf_drv.c38 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument
40 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
41 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
44 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument
46 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel()
57 if (accel_dev->hw_device) { in adf_cleanup_accel()
60 adf_clean_hw_data_c62xiov(accel_dev->hw_device); in adf_cleanup_accel()
65 kfree(accel_dev->hw_device); in adf_cleanup_accel()
66 accel_dev->hw_device = NULL; in adf_cleanup_accel()
68 adf_dbgfs_exit(accel_dev); in adf_cleanup_accel()
[all …]
/openbmc/linux/drivers/crypto/intel/qat/qat_dh895xccvf/
H A Dadf_drv.c38 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument
40 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
41 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
44 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument
46 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel()
57 if (accel_dev->hw_device) { in adf_cleanup_accel()
60 adf_clean_hw_data_dh895xcciov(accel_dev->hw_device); in adf_cleanup_accel()
65 kfree(accel_dev->hw_device); in adf_cleanup_accel()
66 accel_dev->hw_device = NULL; in adf_cleanup_accel()
68 adf_dbgfs_exit(accel_dev); in adf_cleanup_accel()
[all …]
/openbmc/linux/drivers/crypto/intel/qat/qat_c62x/
H A Dadf_drv.c40 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument
42 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
43 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
46 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument
48 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel()
58 if (accel_dev->hw_device) { in adf_cleanup_accel()
61 adf_clean_hw_data_c62x(accel_dev->hw_device); in adf_cleanup_accel()
66 kfree(accel_dev->hw_device); in adf_cleanup_accel()
67 accel_dev->hw_device = NULL; in adf_cleanup_accel()
69 adf_dbgfs_exit(accel_dev); in adf_cleanup_accel()
[all …]
/openbmc/linux/drivers/crypto/intel/qat/qat_c3xxx/
H A Dadf_drv.c40 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument
42 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
43 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
46 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument
48 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel()
58 if (accel_dev->hw_device) { in adf_cleanup_accel()
61 adf_clean_hw_data_c3xxx(accel_dev->hw_device); in adf_cleanup_accel()
66 kfree(accel_dev->hw_device); in adf_cleanup_accel()
67 accel_dev->hw_device = NULL; in adf_cleanup_accel()
69 adf_dbgfs_exit(accel_dev); in adf_cleanup_accel()
[all …]
/openbmc/linux/drivers/crypto/intel/qat/qat_dh895xcc/
H A Dadf_drv.c40 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument
42 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
43 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev()
46 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument
48 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel()
58 if (accel_dev->hw_device) { in adf_cleanup_accel()
61 adf_clean_hw_data_dh895xcc(accel_dev->hw_device); in adf_cleanup_accel()
66 kfree(accel_dev->hw_device); in adf_cleanup_accel()
67 accel_dev->hw_device = NULL; in adf_cleanup_accel()
69 adf_dbgfs_exit(accel_dev); in adf_cleanup_accel()
[all …]

1234