Lines Matching refs:__smem
357 static struct qcom_smem *__smem; variable
384 return hwspin_lock_bust(__smem->hwlock, SMEM_HOST_ID_TO_HWSPINLOCK_ID(host)); in qcom_smem_bust_hwspin_lock_by_host()
395 return !!__smem; in qcom_smem_is_available()
509 if (!__smem) in qcom_smem_alloc()
513 dev_err(__smem->dev, in qcom_smem_alloc()
518 if (WARN_ON(item >= __smem->item_count)) in qcom_smem_alloc()
521 ret = hwspin_lock_timeout_irqsave(__smem->hwlock, in qcom_smem_alloc()
527 if (host < SMEM_HOST_COUNT && __smem->partitions[host].virt_base) { in qcom_smem_alloc()
528 part = &__smem->partitions[host]; in qcom_smem_alloc()
529 ret = qcom_smem_alloc_private(__smem, part, item, size); in qcom_smem_alloc()
530 } else if (__smem->global_partition.virt_base) { in qcom_smem_alloc()
531 part = &__smem->global_partition; in qcom_smem_alloc()
532 ret = qcom_smem_alloc_private(__smem, part, item, size); in qcom_smem_alloc()
534 ret = qcom_smem_alloc_global(__smem, item, size); in qcom_smem_alloc()
537 hwspin_unlock_irqrestore(__smem->hwlock, &flags); in qcom_smem_alloc()
688 if (!__smem) in qcom_smem_get()
691 if (WARN_ON(item >= __smem->item_count)) in qcom_smem_get()
694 ret = hwspin_lock_timeout_irqsave(__smem->hwlock, in qcom_smem_get()
700 if (host < SMEM_HOST_COUNT && __smem->partitions[host].virt_base) { in qcom_smem_get()
701 part = &__smem->partitions[host]; in qcom_smem_get()
702 ptr = qcom_smem_get_private(__smem, part, item, size); in qcom_smem_get()
703 } else if (__smem->global_partition.virt_base) { in qcom_smem_get()
704 part = &__smem->global_partition; in qcom_smem_get()
705 ptr = qcom_smem_get_private(__smem, part, item, size); in qcom_smem_get()
707 ptr = qcom_smem_get_global(__smem, item, size); in qcom_smem_get()
710 hwspin_unlock_irqrestore(__smem->hwlock, &flags); in qcom_smem_get()
731 if (!__smem) in qcom_smem_get_free_space()
734 if (host < SMEM_HOST_COUNT && __smem->partitions[host].virt_base) { in qcom_smem_get_free_space()
735 part = &__smem->partitions[host]; in qcom_smem_get_free_space()
742 } else if (__smem->global_partition.virt_base) { in qcom_smem_get_free_space()
743 part = &__smem->global_partition; in qcom_smem_get_free_space()
751 header = __smem->regions[0].virt_base; in qcom_smem_get_free_space()
754 if (ret > __smem->regions[0].size) in qcom_smem_get_free_space()
782 part = &__smem->partitions[i]; in qcom_smem_virt_to_phys()
791 part = &__smem->global_partition; in qcom_smem_virt_to_phys()
799 for (i = 0; i < __smem->num_regions; i++) { in qcom_smem_virt_to_phys()
800 area = &__smem->regions[i]; in qcom_smem_virt_to_phys()
1205 __smem = smem; in qcom_smem_probe()
1218 platform_device_unregister(__smem->socinfo); in qcom_smem_remove()
1220 hwspin_lock_free(__smem->hwlock); in qcom_smem_remove()
1221 __smem = NULL; in qcom_smem_remove()