Lines Matching refs:rcd

88 	struct hfi1_ctxtdata *rcd;  in hfi1_create_kctxt()  local
94 ret = hfi1_create_ctxtdata(ppd, dd->node, &rcd); in hfi1_create_kctxt()
105 rcd->flags = HFI1_CAP_KGET(MULTI_PKT_EGR) | in hfi1_create_kctxt()
111 if (rcd->ctxt == HFI1_CTRL_CTXT) in hfi1_create_kctxt()
112 rcd->flags |= HFI1_CAP_DMA_RTAIL; in hfi1_create_kctxt()
113 rcd->fast_handler = get_dma_rtail_setting(rcd) ? in hfi1_create_kctxt()
117 hfi1_set_seq_cnt(rcd, 1); in hfi1_create_kctxt()
119 rcd->sc = sc_alloc(dd, SC_ACK, rcd->rcvhdrqentsize, dd->node); in hfi1_create_kctxt()
120 if (!rcd->sc) { in hfi1_create_kctxt()
124 hfi1_init_ctxt(rcd->sc); in hfi1_create_kctxt()
137 dd->rcd = kcalloc_node(dd->num_rcv_contexts, sizeof(*dd->rcd), in hfi1_create_kctxts()
139 if (!dd->rcd) in hfi1_create_kctxts()
150 for (i = 0; dd->rcd && i < dd->first_dyn_alloc_ctxt; ++i) in hfi1_create_kctxts()
151 hfi1_free_ctxt(dd->rcd[i]); in hfi1_create_kctxts()
154 kfree(dd->rcd); in hfi1_create_kctxts()
155 dd->rcd = NULL; in hfi1_create_kctxts()
162 static void hfi1_rcd_init(struct hfi1_ctxtdata *rcd) in hfi1_rcd_init() argument
164 kref_init(&rcd->kref); in hfi1_rcd_init()
175 struct hfi1_ctxtdata *rcd = in hfi1_rcd_free() local
178 spin_lock_irqsave(&rcd->dd->uctxt_lock, flags); in hfi1_rcd_free()
179 rcd->dd->rcd[rcd->ctxt] = NULL; in hfi1_rcd_free()
180 spin_unlock_irqrestore(&rcd->dd->uctxt_lock, flags); in hfi1_rcd_free()
182 hfi1_free_ctxtdata(rcd->dd, rcd); in hfi1_rcd_free()
184 kfree(rcd); in hfi1_rcd_free()
193 int hfi1_rcd_put(struct hfi1_ctxtdata *rcd) in hfi1_rcd_put() argument
195 if (rcd) in hfi1_rcd_put()
196 return kref_put(&rcd->kref, hfi1_rcd_free); in hfi1_rcd_put()
210 int hfi1_rcd_get(struct hfi1_ctxtdata *rcd) in hfi1_rcd_get() argument
212 return kref_get_unless_zero(&rcd->kref); in hfi1_rcd_get()
226 struct hfi1_ctxtdata *rcd, u16 *index) in allocate_rcd_index() argument
233 if (!dd->rcd[ctxt]) in allocate_rcd_index()
237 rcd->ctxt = ctxt; in allocate_rcd_index()
238 dd->rcd[ctxt] = rcd; in allocate_rcd_index()
239 hfi1_rcd_init(rcd); in allocate_rcd_index()
287 struct hfi1_ctxtdata *rcd = NULL; in hfi1_rcd_get_by_index() local
290 if (dd->rcd[ctxt]) { in hfi1_rcd_get_by_index()
291 rcd = dd->rcd[ctxt]; in hfi1_rcd_get_by_index()
292 if (!hfi1_rcd_get(rcd)) in hfi1_rcd_get_by_index()
293 rcd = NULL; in hfi1_rcd_get_by_index()
297 return rcd; in hfi1_rcd_get_by_index()
308 struct hfi1_ctxtdata *rcd; in hfi1_create_ctxtdata() local
316 rcd = kzalloc_node(sizeof(*rcd), GFP_KERNEL, numa); in hfi1_create_ctxtdata()
317 if (rcd) { in hfi1_create_ctxtdata()
322 ret = allocate_rcd_index(dd, rcd, &ctxt); in hfi1_create_ctxtdata()
325 kfree(rcd); in hfi1_create_ctxtdata()
329 INIT_LIST_HEAD(&rcd->qp_wait_list); in hfi1_create_ctxtdata()
330 hfi1_exp_tid_group_init(rcd); in hfi1_create_ctxtdata()
331 rcd->ppd = ppd; in hfi1_create_ctxtdata()
332 rcd->dd = dd; in hfi1_create_ctxtdata()
333 rcd->numa_id = numa; in hfi1_create_ctxtdata()
334 rcd->rcv_array_groups = dd->rcv_entries.ngroups; in hfi1_create_ctxtdata()
335 rcd->rhf_rcv_function_map = normal_rhf_rcv_functions; in hfi1_create_ctxtdata()
336 rcd->slow_handler = handle_receive_interrupt; in hfi1_create_ctxtdata()
337 rcd->do_interrupt = rcd->slow_handler; in hfi1_create_ctxtdata()
338 rcd->msix_intr = CCE_NUM_MSIX_VECTORS; in hfi1_create_ctxtdata()
340 mutex_init(&rcd->exp_mutex); in hfi1_create_ctxtdata()
341 spin_lock_init(&rcd->exp_lock); in hfi1_create_ctxtdata()
342 INIT_LIST_HEAD(&rcd->flow_queue.queue_head); in hfi1_create_ctxtdata()
343 INIT_LIST_HEAD(&rcd->rarr_queue.queue_head); in hfi1_create_ctxtdata()
345 hfi1_cdbg(PROC, "setting up context %u", rcd->ctxt); in hfi1_create_ctxtdata()
357 rcd->rcv_array_groups++; in hfi1_create_ctxtdata()
369 rcd->rcv_array_groups++; in hfi1_create_ctxtdata()
375 rcd->eager_base = base * dd->rcv_entries.group_size; in hfi1_create_ctxtdata()
377 rcd->rcvhdrq_cnt = rcvhdrcnt; in hfi1_create_ctxtdata()
378 rcd->rcvhdrqentsize = hfi1_hdrq_entsize; in hfi1_create_ctxtdata()
379 rcd->rhf_offset = in hfi1_create_ctxtdata()
380 rcd->rcvhdrqentsize - sizeof(u64) / sizeof(u32); in hfi1_create_ctxtdata()
392 max_entries = rcd->rcv_array_groups * in hfi1_create_ctxtdata()
395 rcd->egrbufs.count = round_down(rcvtids, in hfi1_create_ctxtdata()
397 if (rcd->egrbufs.count > MAX_EAGER_ENTRIES) { in hfi1_create_ctxtdata()
399 rcd->ctxt); in hfi1_create_ctxtdata()
400 rcd->egrbufs.count = MAX_EAGER_ENTRIES; in hfi1_create_ctxtdata()
404 rcd->ctxt, rcd->egrbufs.count); in hfi1_create_ctxtdata()
414 rcd->egrbufs.buffers = in hfi1_create_ctxtdata()
415 kcalloc_node(rcd->egrbufs.count, in hfi1_create_ctxtdata()
416 sizeof(*rcd->egrbufs.buffers), in hfi1_create_ctxtdata()
418 if (!rcd->egrbufs.buffers) in hfi1_create_ctxtdata()
420 rcd->egrbufs.rcvtids = in hfi1_create_ctxtdata()
421 kcalloc_node(rcd->egrbufs.count, in hfi1_create_ctxtdata()
422 sizeof(*rcd->egrbufs.rcvtids), in hfi1_create_ctxtdata()
424 if (!rcd->egrbufs.rcvtids) in hfi1_create_ctxtdata()
426 rcd->egrbufs.size = eager_buffer_size; in hfi1_create_ctxtdata()
432 if (rcd->egrbufs.size < hfi1_max_mtu) { in hfi1_create_ctxtdata()
433 rcd->egrbufs.size = __roundup_pow_of_two(hfi1_max_mtu); in hfi1_create_ctxtdata()
436 rcd->ctxt, rcd->egrbufs.size); in hfi1_create_ctxtdata()
438 rcd->egrbufs.rcvtid_size = HFI1_MAX_EAGER_BUFFER_SIZE; in hfi1_create_ctxtdata()
442 rcd->opstats = kzalloc_node(sizeof(*rcd->opstats), in hfi1_create_ctxtdata()
444 if (!rcd->opstats) in hfi1_create_ctxtdata()
448 hfi1_kern_init_ctxt_generations(rcd); in hfi1_create_ctxtdata()
451 *context = rcd; in hfi1_create_ctxtdata()
457 hfi1_free_ctxt(rcd); in hfi1_create_ctxtdata()
471 void hfi1_free_ctxt(struct hfi1_ctxtdata *rcd) in hfi1_free_ctxt() argument
473 hfi1_rcd_put(rcd); in hfi1_free_ctxt()
680 struct hfi1_ctxtdata *rcd; in init_after_reset() local
687 rcd = hfi1_rcd_get_by_index(dd, i); in init_after_reset()
690 HFI1_RCVCTRL_TAILUPD_DIS, rcd); in init_after_reset()
691 hfi1_rcd_put(rcd); in init_after_reset()
702 struct hfi1_ctxtdata *rcd; in enable_chip() local
714 rcd = hfi1_rcd_get_by_index(dd, i); in enable_chip()
715 if (!rcd) in enable_chip()
718 rcvmask |= HFI1_CAP_KGET_MASK(rcd->flags, DMA_RTAIL) ? in enable_chip()
720 if (!HFI1_CAP_KGET_MASK(rcd->flags, MULTI_PKT_EGR)) in enable_chip()
722 if (HFI1_CAP_KGET_MASK(rcd->flags, NODROP_RHQ_FULL)) in enable_chip()
724 if (HFI1_CAP_KGET_MASK(rcd->flags, NODROP_EGR_FULL)) in enable_chip()
728 hfi1_rcvctrl(dd, rcvmask, rcd); in enable_chip()
729 sc_enable(rcd->sc); in enable_chip()
730 hfi1_rcd_put(rcd); in enable_chip()
848 struct hfi1_ctxtdata *rcd; in hfi1_init() local
879 for (i = 0; dd->rcd && i < dd->first_dyn_alloc_ctxt; ++i) { in hfi1_init()
886 rcd = hfi1_rcd_get_by_index(dd, i); in hfi1_init()
887 if (!rcd) in hfi1_init()
890 lastfail = hfi1_create_rcvhdrq(dd, rcd); in hfi1_init()
892 lastfail = hfi1_setup_eagerbufs(rcd); in hfi1_init()
894 lastfail = hfi1_kern_exp_rcv_init(rcd, reinit); in hfi1_init()
901 hfi1_rcd_put(rcd); in hfi1_init()
1007 struct hfi1_ctxtdata *rcd; in shutdown_device() local
1032 rcd = hfi1_rcd_get_by_index(dd, i); in shutdown_device()
1037 HFI1_RCVCTRL_ONE_PKT_EGR_DIS, rcd); in shutdown_device()
1038 hfi1_rcd_put(rcd); in shutdown_device()
1086 void hfi1_free_ctxtdata(struct hfi1_devdata *dd, struct hfi1_ctxtdata *rcd) in hfi1_free_ctxtdata() argument
1090 if (!rcd) in hfi1_free_ctxtdata()
1093 if (rcd->rcvhdrq) { in hfi1_free_ctxtdata()
1094 dma_free_coherent(&dd->pcidev->dev, rcvhdrq_size(rcd), in hfi1_free_ctxtdata()
1095 rcd->rcvhdrq, rcd->rcvhdrq_dma); in hfi1_free_ctxtdata()
1096 rcd->rcvhdrq = NULL; in hfi1_free_ctxtdata()
1097 if (hfi1_rcvhdrtail_kvaddr(rcd)) { in hfi1_free_ctxtdata()
1099 (void *)hfi1_rcvhdrtail_kvaddr(rcd), in hfi1_free_ctxtdata()
1100 rcd->rcvhdrqtailaddr_dma); in hfi1_free_ctxtdata()
1101 rcd->rcvhdrtail_kvaddr = NULL; in hfi1_free_ctxtdata()
1106 kfree(rcd->egrbufs.rcvtids); in hfi1_free_ctxtdata()
1107 rcd->egrbufs.rcvtids = NULL; in hfi1_free_ctxtdata()
1109 for (e = 0; e < rcd->egrbufs.alloced; e++) { in hfi1_free_ctxtdata()
1110 if (rcd->egrbufs.buffers[e].addr) in hfi1_free_ctxtdata()
1112 rcd->egrbufs.buffers[e].len, in hfi1_free_ctxtdata()
1113 rcd->egrbufs.buffers[e].addr, in hfi1_free_ctxtdata()
1114 rcd->egrbufs.buffers[e].dma); in hfi1_free_ctxtdata()
1116 kfree(rcd->egrbufs.buffers); in hfi1_free_ctxtdata()
1117 rcd->egrbufs.alloced = 0; in hfi1_free_ctxtdata()
1118 rcd->egrbufs.buffers = NULL; in hfi1_free_ctxtdata()
1120 sc_free(rcd->sc); in hfi1_free_ctxtdata()
1121 rcd->sc = NULL; in hfi1_free_ctxtdata()
1123 vfree(rcd->subctxt_uregbase); in hfi1_free_ctxtdata()
1124 vfree(rcd->subctxt_rcvegrbuf); in hfi1_free_ctxtdata()
1125 vfree(rcd->subctxt_rcvhdr_base); in hfi1_free_ctxtdata()
1126 kfree(rcd->opstats); in hfi1_free_ctxtdata()
1128 rcd->subctxt_uregbase = NULL; in hfi1_free_ctxtdata()
1129 rcd->subctxt_rcvegrbuf = NULL; in hfi1_free_ctxtdata()
1130 rcd->subctxt_rcvhdr_base = NULL; in hfi1_free_ctxtdata()
1131 rcd->opstats = NULL; in hfi1_free_ctxtdata()
1513 for (ctxt = 0; dd->rcd && ctxt < dd->num_rcv_contexts; ctxt++) { in cleanup_device_data()
1514 struct hfi1_ctxtdata *rcd = dd->rcd[ctxt]; in cleanup_device_data() local
1516 if (rcd) { in cleanup_device_data()
1517 hfi1_free_ctxt_rcv_groups(rcd); in cleanup_device_data()
1518 hfi1_free_ctxt(rcd); in cleanup_device_data()
1522 kfree(dd->rcd); in cleanup_device_data()
1523 dd->rcd = NULL; in cleanup_device_data()
1759 int hfi1_create_rcvhdrq(struct hfi1_devdata *dd, struct hfi1_ctxtdata *rcd) in hfi1_create_rcvhdrq() argument
1763 if (!rcd->rcvhdrq) { in hfi1_create_rcvhdrq()
1764 amt = rcvhdrq_size(rcd); in hfi1_create_rcvhdrq()
1766 rcd->rcvhdrq = dma_alloc_coherent(&dd->pcidev->dev, amt, in hfi1_create_rcvhdrq()
1767 &rcd->rcvhdrq_dma, in hfi1_create_rcvhdrq()
1770 if (!rcd->rcvhdrq) { in hfi1_create_rcvhdrq()
1773 amt, rcd->ctxt); in hfi1_create_rcvhdrq()
1777 if (HFI1_CAP_KGET_MASK(rcd->flags, DMA_RTAIL) || in hfi1_create_rcvhdrq()
1778 HFI1_CAP_UGET_MASK(rcd->flags, DMA_RTAIL)) { in hfi1_create_rcvhdrq()
1779 rcd->rcvhdrtail_kvaddr = dma_alloc_coherent(&dd->pcidev->dev, in hfi1_create_rcvhdrq()
1781 &rcd->rcvhdrqtailaddr_dma, in hfi1_create_rcvhdrq()
1783 if (!rcd->rcvhdrtail_kvaddr) in hfi1_create_rcvhdrq()
1788 set_hdrq_regs(rcd->dd, rcd->ctxt, rcd->rcvhdrqentsize, in hfi1_create_rcvhdrq()
1789 rcd->rcvhdrq_cnt); in hfi1_create_rcvhdrq()
1796 rcd->ctxt); in hfi1_create_rcvhdrq()
1797 dma_free_coherent(&dd->pcidev->dev, amt, rcd->rcvhdrq, in hfi1_create_rcvhdrq()
1798 rcd->rcvhdrq_dma); in hfi1_create_rcvhdrq()
1799 rcd->rcvhdrq = NULL; in hfi1_create_rcvhdrq()
1814 int hfi1_setup_eagerbufs(struct hfi1_ctxtdata *rcd) in hfi1_setup_eagerbufs() argument
1816 struct hfi1_devdata *dd = rcd->dd; in hfi1_setup_eagerbufs()
1829 if (rcd->egrbufs.size < (round_mtu * dd->rcv_entries.group_size)) in hfi1_setup_eagerbufs()
1830 rcd->egrbufs.size = round_mtu * dd->rcv_entries.group_size; in hfi1_setup_eagerbufs()
1835 if (!HFI1_CAP_KGET_MASK(rcd->flags, MULTI_PKT_EGR)) in hfi1_setup_eagerbufs()
1836 rcd->egrbufs.rcvtid_size = round_mtu; in hfi1_setup_eagerbufs()
1842 if (rcd->egrbufs.size <= (1 << 20)) in hfi1_setup_eagerbufs()
1843 rcd->egrbufs.rcvtid_size = max((unsigned long)round_mtu, in hfi1_setup_eagerbufs()
1844 rounddown_pow_of_two(rcd->egrbufs.size / 8)); in hfi1_setup_eagerbufs()
1846 while (alloced_bytes < rcd->egrbufs.size && in hfi1_setup_eagerbufs()
1847 rcd->egrbufs.alloced < rcd->egrbufs.count) { in hfi1_setup_eagerbufs()
1848 rcd->egrbufs.buffers[idx].addr = in hfi1_setup_eagerbufs()
1850 rcd->egrbufs.rcvtid_size, in hfi1_setup_eagerbufs()
1851 &rcd->egrbufs.buffers[idx].dma, in hfi1_setup_eagerbufs()
1853 if (rcd->egrbufs.buffers[idx].addr) { in hfi1_setup_eagerbufs()
1854 rcd->egrbufs.buffers[idx].len = in hfi1_setup_eagerbufs()
1855 rcd->egrbufs.rcvtid_size; in hfi1_setup_eagerbufs()
1856 rcd->egrbufs.rcvtids[rcd->egrbufs.alloced].addr = in hfi1_setup_eagerbufs()
1857 rcd->egrbufs.buffers[idx].addr; in hfi1_setup_eagerbufs()
1858 rcd->egrbufs.rcvtids[rcd->egrbufs.alloced].dma = in hfi1_setup_eagerbufs()
1859 rcd->egrbufs.buffers[idx].dma; in hfi1_setup_eagerbufs()
1860 rcd->egrbufs.alloced++; in hfi1_setup_eagerbufs()
1861 alloced_bytes += rcd->egrbufs.rcvtid_size; in hfi1_setup_eagerbufs()
1873 if (rcd->egrbufs.rcvtid_size == round_mtu || in hfi1_setup_eagerbufs()
1874 !HFI1_CAP_KGET_MASK(rcd->flags, MULTI_PKT_EGR)) { in hfi1_setup_eagerbufs()
1876 rcd->ctxt); in hfi1_setup_eagerbufs()
1881 new_size = rcd->egrbufs.rcvtid_size / 2; in hfi1_setup_eagerbufs()
1889 rcd->egrbufs.rcvtid_size = new_size; in hfi1_setup_eagerbufs()
1897 rcd->egrbufs.alloced = 0; in hfi1_setup_eagerbufs()
1899 if (i >= rcd->egrbufs.count) in hfi1_setup_eagerbufs()
1901 rcd->egrbufs.rcvtids[i].dma = in hfi1_setup_eagerbufs()
1902 rcd->egrbufs.buffers[j].dma + offset; in hfi1_setup_eagerbufs()
1903 rcd->egrbufs.rcvtids[i].addr = in hfi1_setup_eagerbufs()
1904 rcd->egrbufs.buffers[j].addr + offset; in hfi1_setup_eagerbufs()
1905 rcd->egrbufs.alloced++; in hfi1_setup_eagerbufs()
1906 if ((rcd->egrbufs.buffers[j].dma + offset + in hfi1_setup_eagerbufs()
1908 (rcd->egrbufs.buffers[j].dma + in hfi1_setup_eagerbufs()
1909 rcd->egrbufs.buffers[j].len)) { in hfi1_setup_eagerbufs()
1916 rcd->egrbufs.rcvtid_size = new_size; in hfi1_setup_eagerbufs()
1919 rcd->egrbufs.numbufs = idx; in hfi1_setup_eagerbufs()
1920 rcd->egrbufs.size = alloced_bytes; in hfi1_setup_eagerbufs()
1924 rcd->ctxt, rcd->egrbufs.alloced, in hfi1_setup_eagerbufs()
1925 rcd->egrbufs.rcvtid_size / 1024, rcd->egrbufs.size / 1024); in hfi1_setup_eagerbufs()
1932 rcd->egrbufs.threshold = in hfi1_setup_eagerbufs()
1933 rounddown_pow_of_two(rcd->egrbufs.alloced / 2); in hfi1_setup_eagerbufs()
1939 max_entries = rcd->rcv_array_groups * dd->rcv_entries.group_size; in hfi1_setup_eagerbufs()
1940 egrtop = roundup(rcd->egrbufs.alloced, dd->rcv_entries.group_size); in hfi1_setup_eagerbufs()
1941 rcd->expected_count = max_entries - egrtop; in hfi1_setup_eagerbufs()
1942 if (rcd->expected_count > MAX_TID_PAIR_ENTRIES * 2) in hfi1_setup_eagerbufs()
1943 rcd->expected_count = MAX_TID_PAIR_ENTRIES * 2; in hfi1_setup_eagerbufs()
1945 rcd->expected_base = rcd->eager_base + egrtop; in hfi1_setup_eagerbufs()
1947 rcd->ctxt, rcd->egrbufs.alloced, rcd->expected_count, in hfi1_setup_eagerbufs()
1948 rcd->eager_base, rcd->expected_base); in hfi1_setup_eagerbufs()
1950 if (!hfi1_rcvbuf_validate(rcd->egrbufs.rcvtid_size, PT_EAGER, &order)) { in hfi1_setup_eagerbufs()
1953 rcd->ctxt, rcd->egrbufs.rcvtid_size); in hfi1_setup_eagerbufs()
1958 for (idx = 0; idx < rcd->egrbufs.alloced; idx++) { in hfi1_setup_eagerbufs()
1959 hfi1_put_tid(dd, rcd->eager_base + idx, PT_EAGER, in hfi1_setup_eagerbufs()
1960 rcd->egrbufs.rcvtids[idx].dma, order); in hfi1_setup_eagerbufs()
1967 for (idx = 0; idx < rcd->egrbufs.alloced && in hfi1_setup_eagerbufs()
1968 rcd->egrbufs.buffers[idx].addr; in hfi1_setup_eagerbufs()
1971 rcd->egrbufs.buffers[idx].len, in hfi1_setup_eagerbufs()
1972 rcd->egrbufs.buffers[idx].addr, in hfi1_setup_eagerbufs()
1973 rcd->egrbufs.buffers[idx].dma); in hfi1_setup_eagerbufs()
1974 rcd->egrbufs.buffers[idx].addr = NULL; in hfi1_setup_eagerbufs()
1975 rcd->egrbufs.buffers[idx].dma = 0; in hfi1_setup_eagerbufs()
1976 rcd->egrbufs.buffers[idx].len = 0; in hfi1_setup_eagerbufs()