Lines Matching refs:m3_ipc

96 static unsigned long wkup_m3_copy_aux_data(struct wkup_m3_ipc *m3_ipc,  in wkup_m3_copy_aux_data()  argument
103 aux_data_addr = rproc_da_to_va(m3_ipc->rproc, in wkup_m3_copy_aux_data()
116 struct wkup_m3_ipc *m3_ipc = context; in wkup_m3_scale_data_fw_cb() local
117 struct device *dev = m3_ipc->dev; in wkup_m3_scale_data_fw_cb()
131 aux_base = wkup_m3_copy_aux_data(m3_ipc, fw->data + sizeof(hdr), in wkup_m3_scale_data_fw_cb()
137 m3_ipc->volt_scale_offsets = val; in wkup_m3_scale_data_fw_cb()
143 static int wkup_m3_init_scale_data(struct wkup_m3_ipc *m3_ipc, in wkup_m3_init_scale_data() argument
153 if (!m3_ipc->sd_fw_name) in wkup_m3_init_scale_data()
157 m3_ipc->sd_fw_name, dev, GFP_ATOMIC, in wkup_m3_init_scale_data()
158 m3_ipc, wkup_m3_scale_data_fw_cb); in wkup_m3_init_scale_data()
200 static int wkup_m3_ipc_dbg_init(struct wkup_m3_ipc *m3_ipc) in wkup_m3_ipc_dbg_init() argument
202 m3_ipc->dbg_path = debugfs_create_dir("wkup_m3_ipc", NULL); in wkup_m3_ipc_dbg_init()
204 if (IS_ERR(m3_ipc->dbg_path)) in wkup_m3_ipc_dbg_init()
208 m3_ipc->dbg_path, in wkup_m3_ipc_dbg_init()
209 &m3_ipc->halt, in wkup_m3_ipc_dbg_init()
215 static inline void wkup_m3_ipc_dbg_destroy(struct wkup_m3_ipc *m3_ipc) in wkup_m3_ipc_dbg_destroy() argument
217 debugfs_remove_recursive(m3_ipc->dbg_path); in wkup_m3_ipc_dbg_destroy()
220 static inline int wkup_m3_ipc_dbg_init(struct wkup_m3_ipc *m3_ipc) in wkup_m3_ipc_dbg_init() argument
225 static inline void wkup_m3_ipc_dbg_destroy(struct wkup_m3_ipc *m3_ipc) in wkup_m3_ipc_dbg_destroy() argument
230 static void am33xx_txev_eoi(struct wkup_m3_ipc *m3_ipc) in am33xx_txev_eoi() argument
233 m3_ipc->ipc_mem_base + AM33XX_CONTROL_M3_TXEV_EOI); in am33xx_txev_eoi()
236 static void am33xx_txev_enable(struct wkup_m3_ipc *m3_ipc) in am33xx_txev_enable() argument
239 m3_ipc->ipc_mem_base + AM33XX_CONTROL_M3_TXEV_EOI); in am33xx_txev_enable()
242 static void wkup_m3_ctrl_ipc_write(struct wkup_m3_ipc *m3_ipc, in wkup_m3_ctrl_ipc_write() argument
249 writel(val, m3_ipc->ipc_mem_base + in wkup_m3_ctrl_ipc_write()
253 static unsigned int wkup_m3_ctrl_ipc_read(struct wkup_m3_ipc *m3_ipc, in wkup_m3_ctrl_ipc_read() argument
260 return readl(m3_ipc->ipc_mem_base + in wkup_m3_ctrl_ipc_read()
264 static int wkup_m3_fw_version_read(struct wkup_m3_ipc *m3_ipc) in wkup_m3_fw_version_read() argument
268 val = wkup_m3_ctrl_ipc_read(m3_ipc, 2); in wkup_m3_fw_version_read()
275 struct wkup_m3_ipc *m3_ipc = ipc_data; in wkup_m3_txev_handler() local
276 struct device *dev = m3_ipc->dev; in wkup_m3_txev_handler()
279 am33xx_txev_eoi(m3_ipc); in wkup_m3_txev_handler()
281 switch (m3_ipc->state) { in wkup_m3_txev_handler()
283 ver = wkup_m3_fw_version_read(m3_ipc); in wkup_m3_txev_handler()
293 m3_ipc->state = M3_STATE_INITED; in wkup_m3_txev_handler()
294 wkup_m3_init_scale_data(m3_ipc, dev); in wkup_m3_txev_handler()
295 complete(&m3_ipc->sync_complete); in wkup_m3_txev_handler()
298 m3_ipc->state = M3_STATE_INITED; in wkup_m3_txev_handler()
299 complete(&m3_ipc->sync_complete); in wkup_m3_txev_handler()
302 complete(&m3_ipc->sync_complete); in wkup_m3_txev_handler()
308 am33xx_txev_enable(m3_ipc); in wkup_m3_txev_handler()
313 static int wkup_m3_ping(struct wkup_m3_ipc *m3_ipc) in wkup_m3_ping() argument
315 struct device *dev = m3_ipc->dev; in wkup_m3_ping()
318 if (!m3_ipc->mbox) { in wkup_m3_ping()
331 ret = mbox_send_message(m3_ipc->mbox, NULL); in wkup_m3_ping()
338 ret = wait_for_completion_timeout(&m3_ipc->sync_complete, in wkup_m3_ping()
342 m3_ipc->state = M3_STATE_UNKNOWN; in wkup_m3_ping()
346 mbox_client_txdone(m3_ipc->mbox, 0); in wkup_m3_ping()
350 static int wkup_m3_ping_noirq(struct wkup_m3_ipc *m3_ipc) in wkup_m3_ping_noirq() argument
352 struct device *dev = m3_ipc->dev; in wkup_m3_ping_noirq()
355 if (!m3_ipc->mbox) { in wkup_m3_ping_noirq()
361 ret = mbox_send_message(m3_ipc->mbox, NULL); in wkup_m3_ping_noirq()
368 mbox_client_txdone(m3_ipc->mbox, 0); in wkup_m3_ping_noirq()
372 static int wkup_m3_is_available(struct wkup_m3_ipc *m3_ipc) in wkup_m3_is_available() argument
374 return ((m3_ipc->state != M3_STATE_RESET) && in wkup_m3_is_available()
375 (m3_ipc->state != M3_STATE_UNKNOWN)); in wkup_m3_is_available()
378 static void wkup_m3_set_vtt_gpio(struct wkup_m3_ipc *m3_ipc, int gpio) in wkup_m3_set_vtt_gpio() argument
380 m3_ipc->vtt_conf = (1 << IPC_VTT_STAT_SHIFT) | in wkup_m3_set_vtt_gpio()
384 static void wkup_m3_set_io_isolation(struct wkup_m3_ipc *m3_ipc) in wkup_m3_set_io_isolation() argument
386 m3_ipc->isolation_conf = (1 << IPC_IO_ISOLATION_STAT_SHIFT); in wkup_m3_set_io_isolation()
398 static void wkup_m3_set_mem_type(struct wkup_m3_ipc *m3_ipc, int mem_type) in wkup_m3_set_mem_type() argument
400 m3_ipc->mem_type = mem_type; in wkup_m3_set_mem_type()
408 static void wkup_m3_set_resume_address(struct wkup_m3_ipc *m3_ipc, void *addr) in wkup_m3_set_resume_address() argument
410 m3_ipc->resume_addr = (unsigned long)addr; in wkup_m3_set_resume_address()
421 static int wkup_m3_request_pm_status(struct wkup_m3_ipc *m3_ipc) in wkup_m3_request_pm_status() argument
426 val = wkup_m3_ctrl_ipc_read(m3_ipc, 1); in wkup_m3_request_pm_status()
442 static int wkup_m3_prepare_low_power(struct wkup_m3_ipc *m3_ipc, int state) in wkup_m3_prepare_low_power() argument
444 struct device *dev = m3_ipc->dev; in wkup_m3_prepare_low_power()
448 if (!wkup_m3_is_available(m3_ipc)) in wkup_m3_prepare_low_power()
454 wkup_m3_ctrl_ipc_write(m3_ipc, m3_ipc->volt_scale_offsets, 5); in wkup_m3_prepare_low_power()
458 wkup_m3_ctrl_ipc_write(m3_ipc, DS_IPC_DEFAULT, 5); in wkup_m3_prepare_low_power()
462 wkup_m3_ctrl_ipc_write(m3_ipc, DS_IPC_DEFAULT, 5); in wkup_m3_prepare_low_power()
469 wkup_m3_ctrl_ipc_write(m3_ipc, m3_ipc->resume_addr, 0); in wkup_m3_prepare_low_power()
470 wkup_m3_ctrl_ipc_write(m3_ipc, m3_power_state, 1); in wkup_m3_prepare_low_power()
471 wkup_m3_ctrl_ipc_write(m3_ipc, m3_ipc->mem_type | in wkup_m3_prepare_low_power()
472 m3_ipc->vtt_conf | in wkup_m3_prepare_low_power()
473 m3_ipc->isolation_conf | in wkup_m3_prepare_low_power()
474 m3_ipc->halt, 4); in wkup_m3_prepare_low_power()
476 wkup_m3_ctrl_ipc_write(m3_ipc, DS_IPC_DEFAULT, 2); in wkup_m3_prepare_low_power()
477 wkup_m3_ctrl_ipc_write(m3_ipc, DS_IPC_DEFAULT, 3); in wkup_m3_prepare_low_power()
478 wkup_m3_ctrl_ipc_write(m3_ipc, DS_IPC_DEFAULT, 6); in wkup_m3_prepare_low_power()
479 wkup_m3_ctrl_ipc_write(m3_ipc, DS_IPC_DEFAULT, 7); in wkup_m3_prepare_low_power()
481 m3_ipc->state = M3_STATE_MSG_FOR_LP; in wkup_m3_prepare_low_power()
484 ret = wkup_m3_ping_noirq(m3_ipc); in wkup_m3_prepare_low_power()
486 ret = wkup_m3_ping(m3_ipc); in wkup_m3_prepare_low_power()
502 static int wkup_m3_finish_low_power(struct wkup_m3_ipc *m3_ipc) in wkup_m3_finish_low_power() argument
504 struct device *dev = m3_ipc->dev; in wkup_m3_finish_low_power()
507 if (!wkup_m3_is_available(m3_ipc)) in wkup_m3_finish_low_power()
510 wkup_m3_ctrl_ipc_write(m3_ipc, IPC_CMD_RESET, 1); in wkup_m3_finish_low_power()
511 wkup_m3_ctrl_ipc_write(m3_ipc, DS_IPC_DEFAULT, 2); in wkup_m3_finish_low_power()
513 m3_ipc->state = M3_STATE_MSG_FOR_RESET; in wkup_m3_finish_low_power()
515 ret = wkup_m3_ping(m3_ipc); in wkup_m3_finish_low_power()
528 static const char *wkup_m3_request_wake_src(struct wkup_m3_ipc *m3_ipc) in wkup_m3_request_wake_src() argument
533 val = wkup_m3_ctrl_ipc_read(m3_ipc, 6); in wkup_m3_request_wake_src()
548 static void wkup_m3_set_rtc_only(struct wkup_m3_ipc *m3_ipc) in wkup_m3_set_rtc_only() argument
585 void wkup_m3_ipc_put(struct wkup_m3_ipc *m3_ipc) in wkup_m3_ipc_put() argument
594 struct wkup_m3_ipc *m3_ipc = arg; in wkup_m3_rproc_boot_thread() local
595 struct device *dev = m3_ipc->dev; in wkup_m3_rproc_boot_thread()
598 init_completion(&m3_ipc->sync_complete); in wkup_m3_rproc_boot_thread()
600 ret = rproc_boot(m3_ipc->rproc); in wkup_m3_rproc_boot_thread()
604 m3_ipc_state = m3_ipc; in wkup_m3_rproc_boot_thread()
616 struct wkup_m3_ipc *m3_ipc; in wkup_m3_ipc_probe() local
619 m3_ipc = devm_kzalloc(dev, sizeof(*m3_ipc), GFP_KERNEL); in wkup_m3_ipc_probe()
620 if (!m3_ipc) in wkup_m3_ipc_probe()
623 m3_ipc->ipc_mem_base = devm_platform_ioremap_resource(pdev, 0); in wkup_m3_ipc_probe()
624 if (IS_ERR(m3_ipc->ipc_mem_base)) in wkup_m3_ipc_probe()
625 return PTR_ERR(m3_ipc->ipc_mem_base); in wkup_m3_ipc_probe()
632 0, "wkup_m3_txev", m3_ipc); in wkup_m3_ipc_probe()
638 m3_ipc->mbox_client.dev = dev; in wkup_m3_ipc_probe()
639 m3_ipc->mbox_client.tx_done = NULL; in wkup_m3_ipc_probe()
640 m3_ipc->mbox_client.tx_prepare = NULL; in wkup_m3_ipc_probe()
641 m3_ipc->mbox_client.rx_callback = NULL; in wkup_m3_ipc_probe()
642 m3_ipc->mbox_client.tx_block = false; in wkup_m3_ipc_probe()
643 m3_ipc->mbox_client.knows_txdone = false; in wkup_m3_ipc_probe()
645 m3_ipc->mbox = mbox_request_channel(&m3_ipc->mbox_client, 0); in wkup_m3_ipc_probe()
647 if (IS_ERR(m3_ipc->mbox)) { in wkup_m3_ipc_probe()
649 PTR_ERR(m3_ipc->mbox)); in wkup_m3_ipc_probe()
650 return PTR_ERR(m3_ipc->mbox); in wkup_m3_ipc_probe()
666 m3_ipc->rproc = m3_rproc; in wkup_m3_ipc_probe()
667 m3_ipc->dev = dev; in wkup_m3_ipc_probe()
668 m3_ipc->state = M3_STATE_RESET; in wkup_m3_ipc_probe()
670 m3_ipc->ops = &ipc_ops; in wkup_m3_ipc_probe()
674 wkup_m3_set_vtt_gpio(m3_ipc, temp); in wkup_m3_ipc_probe()
680 wkup_m3_set_io_isolation(m3_ipc); in wkup_m3_ipc_probe()
683 &m3_ipc->sd_fw_name); in wkup_m3_ipc_probe()
693 task = kthread_run(wkup_m3_rproc_boot_thread, m3_ipc, in wkup_m3_ipc_probe()
702 wkup_m3_ipc_dbg_init(m3_ipc); in wkup_m3_ipc_probe()
709 mbox_free_channel(m3_ipc->mbox); in wkup_m3_ipc_probe()