xref: /openbmc/linux/drivers/net/wireless/ath/ath12k/hal.c (revision d8899132)
1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3  * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4  * Copyright (c) 2021-2022 Qualcomm Innovation Center, Inc. All rights reserved.
5  */
6 #include <linux/dma-mapping.h>
7 #include "hal_tx.h"
8 #include "hal_rx.h"
9 #include "debug.h"
10 #include "hal_desc.h"
11 #include "hif.h"
12 
13 static const struct hal_srng_config hw_srng_config_template[] = {
14 	/* TODO: max_rings can populated by querying HW capabilities */
15 	[HAL_REO_DST] = {
16 		.start_ring_id = HAL_SRNG_RING_ID_REO2SW1,
17 		.max_rings = 8,
18 		.entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
19 		.mac_type = ATH12K_HAL_SRNG_UMAC,
20 		.ring_dir = HAL_SRNG_DIR_DST,
21 		.max_size = HAL_REO_REO2SW1_RING_BASE_MSB_RING_SIZE,
22 	},
23 	[HAL_REO_EXCEPTION] = {
24 		/* Designating REO2SW0 ring as exception ring.
25 		 * Any of theREO2SW rings can be used as exception ring.
26 		 */
27 		.start_ring_id = HAL_SRNG_RING_ID_REO2SW0,
28 		.max_rings = 1,
29 		.entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
30 		.mac_type = ATH12K_HAL_SRNG_UMAC,
31 		.ring_dir = HAL_SRNG_DIR_DST,
32 		.max_size = HAL_REO_REO2SW0_RING_BASE_MSB_RING_SIZE,
33 	},
34 	[HAL_REO_REINJECT] = {
35 		.start_ring_id = HAL_SRNG_RING_ID_SW2REO,
36 		.max_rings = 4,
37 		.entry_size = sizeof(struct hal_reo_entrance_ring) >> 2,
38 		.mac_type = ATH12K_HAL_SRNG_UMAC,
39 		.ring_dir = HAL_SRNG_DIR_SRC,
40 		.max_size = HAL_REO_SW2REO_RING_BASE_MSB_RING_SIZE,
41 	},
42 	[HAL_REO_CMD] = {
43 		.start_ring_id = HAL_SRNG_RING_ID_REO_CMD,
44 		.max_rings = 1,
45 		.entry_size = (sizeof(struct hal_tlv_64_hdr) +
46 			sizeof(struct hal_reo_get_queue_stats)) >> 2,
47 		.mac_type = ATH12K_HAL_SRNG_UMAC,
48 		.ring_dir = HAL_SRNG_DIR_SRC,
49 		.max_size = HAL_REO_CMD_RING_BASE_MSB_RING_SIZE,
50 	},
51 	[HAL_REO_STATUS] = {
52 		.start_ring_id = HAL_SRNG_RING_ID_REO_STATUS,
53 		.max_rings = 1,
54 		.entry_size = (sizeof(struct hal_tlv_64_hdr) +
55 			sizeof(struct hal_reo_get_queue_stats_status)) >> 2,
56 		.mac_type = ATH12K_HAL_SRNG_UMAC,
57 		.ring_dir = HAL_SRNG_DIR_DST,
58 		.max_size = HAL_REO_STATUS_RING_BASE_MSB_RING_SIZE,
59 	},
60 	[HAL_TCL_DATA] = {
61 		.start_ring_id = HAL_SRNG_RING_ID_SW2TCL1,
62 		.max_rings = 6,
63 		.entry_size = sizeof(struct hal_tcl_data_cmd) >> 2,
64 		.mac_type = ATH12K_HAL_SRNG_UMAC,
65 		.ring_dir = HAL_SRNG_DIR_SRC,
66 		.max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
67 	},
68 	[HAL_TCL_CMD] = {
69 		.start_ring_id = HAL_SRNG_RING_ID_SW2TCL_CMD,
70 		.max_rings = 1,
71 		.entry_size = sizeof(struct hal_tcl_gse_cmd) >> 2,
72 		.mac_type = ATH12K_HAL_SRNG_UMAC,
73 		.ring_dir = HAL_SRNG_DIR_SRC,
74 		.max_size = HAL_SW2TCL1_CMD_RING_BASE_MSB_RING_SIZE,
75 	},
76 	[HAL_TCL_STATUS] = {
77 		.start_ring_id = HAL_SRNG_RING_ID_TCL_STATUS,
78 		.max_rings = 1,
79 		.entry_size = (sizeof(struct hal_tlv_hdr) +
80 			     sizeof(struct hal_tcl_status_ring)) >> 2,
81 		.mac_type = ATH12K_HAL_SRNG_UMAC,
82 		.ring_dir = HAL_SRNG_DIR_DST,
83 		.max_size = HAL_TCL_STATUS_RING_BASE_MSB_RING_SIZE,
84 	},
85 	[HAL_CE_SRC] = {
86 		.start_ring_id = HAL_SRNG_RING_ID_CE0_SRC,
87 		.max_rings = 16,
88 		.entry_size = sizeof(struct hal_ce_srng_src_desc) >> 2,
89 		.mac_type = ATH12K_HAL_SRNG_UMAC,
90 		.ring_dir = HAL_SRNG_DIR_SRC,
91 		.max_size = HAL_CE_SRC_RING_BASE_MSB_RING_SIZE,
92 	},
93 	[HAL_CE_DST] = {
94 		.start_ring_id = HAL_SRNG_RING_ID_CE0_DST,
95 		.max_rings = 16,
96 		.entry_size = sizeof(struct hal_ce_srng_dest_desc) >> 2,
97 		.mac_type = ATH12K_HAL_SRNG_UMAC,
98 		.ring_dir = HAL_SRNG_DIR_SRC,
99 		.max_size = HAL_CE_DST_RING_BASE_MSB_RING_SIZE,
100 	},
101 	[HAL_CE_DST_STATUS] = {
102 		.start_ring_id = HAL_SRNG_RING_ID_CE0_DST_STATUS,
103 		.max_rings = 16,
104 		.entry_size = sizeof(struct hal_ce_srng_dst_status_desc) >> 2,
105 		.mac_type = ATH12K_HAL_SRNG_UMAC,
106 		.ring_dir = HAL_SRNG_DIR_DST,
107 		.max_size = HAL_CE_DST_STATUS_RING_BASE_MSB_RING_SIZE,
108 	},
109 	[HAL_WBM_IDLE_LINK] = {
110 		.start_ring_id = HAL_SRNG_RING_ID_WBM_IDLE_LINK,
111 		.max_rings = 1,
112 		.entry_size = sizeof(struct hal_wbm_link_desc) >> 2,
113 		.mac_type = ATH12K_HAL_SRNG_UMAC,
114 		.ring_dir = HAL_SRNG_DIR_SRC,
115 		.max_size = HAL_WBM_IDLE_LINK_RING_BASE_MSB_RING_SIZE,
116 	},
117 	[HAL_SW2WBM_RELEASE] = {
118 		.start_ring_id = HAL_SRNG_RING_ID_WBM_SW0_RELEASE,
119 		.max_rings = 2,
120 		.entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
121 		.mac_type = ATH12K_HAL_SRNG_UMAC,
122 		.ring_dir = HAL_SRNG_DIR_SRC,
123 		.max_size = HAL_SW2WBM_RELEASE_RING_BASE_MSB_RING_SIZE,
124 	},
125 	[HAL_WBM2SW_RELEASE] = {
126 		.start_ring_id = HAL_SRNG_RING_ID_WBM2SW0_RELEASE,
127 		.max_rings = 8,
128 		.entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
129 		.mac_type = ATH12K_HAL_SRNG_UMAC,
130 		.ring_dir = HAL_SRNG_DIR_DST,
131 		.max_size = HAL_WBM2SW_RELEASE_RING_BASE_MSB_RING_SIZE,
132 	},
133 	[HAL_RXDMA_BUF] = {
134 		.start_ring_id = HAL_SRNG_SW2RXDMA_BUF0,
135 		.max_rings = 1,
136 		.entry_size = sizeof(struct hal_wbm_buffer_ring) >> 2,
137 		.mac_type = ATH12K_HAL_SRNG_DMAC,
138 		.ring_dir = HAL_SRNG_DIR_SRC,
139 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
140 	},
141 	[HAL_RXDMA_DST] = {
142 		.start_ring_id = HAL_SRNG_RING_ID_WMAC1_RXDMA2SW0,
143 		.max_rings = 0,
144 		.entry_size = 0,
145 		.mac_type = ATH12K_HAL_SRNG_PMAC,
146 		.ring_dir = HAL_SRNG_DIR_DST,
147 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
148 	},
149 	[HAL_RXDMA_MONITOR_BUF] = {
150 		.start_ring_id = HAL_SRNG_SW2RXMON_BUF0,
151 		.max_rings = 1,
152 		.entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
153 		.mac_type = ATH12K_HAL_SRNG_PMAC,
154 		.ring_dir = HAL_SRNG_DIR_SRC,
155 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
156 	},
157 	[HAL_RXDMA_MONITOR_STATUS] = { 0, },
158 	[HAL_RXDMA_MONITOR_DESC] = { 0, },
159 	[HAL_RXDMA_DIR_BUF] = {
160 		.start_ring_id = HAL_SRNG_RING_ID_RXDMA_DIR_BUF,
161 		.max_rings = 2,
162 		.entry_size = 8 >> 2, /* TODO: Define the struct */
163 		.mac_type = ATH12K_HAL_SRNG_PMAC,
164 		.ring_dir = HAL_SRNG_DIR_SRC,
165 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
166 	},
167 	[HAL_PPE2TCL] = {
168 		.start_ring_id = HAL_SRNG_RING_ID_PPE2TCL1,
169 		.max_rings = 1,
170 		.entry_size = sizeof(struct hal_tcl_entrance_from_ppe_ring) >> 2,
171 		.mac_type = ATH12K_HAL_SRNG_PMAC,
172 		.ring_dir = HAL_SRNG_DIR_SRC,
173 		.max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
174 	},
175 	[HAL_PPE_RELEASE] = {
176 		.start_ring_id = HAL_SRNG_RING_ID_WBM_PPE_RELEASE,
177 		.max_rings = 1,
178 		.entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
179 		.mac_type = ATH12K_HAL_SRNG_PMAC,
180 		.ring_dir = HAL_SRNG_DIR_SRC,
181 		.max_size = HAL_WBM2PPE_RELEASE_RING_BASE_MSB_RING_SIZE,
182 	},
183 	[HAL_TX_MONITOR_BUF] = {
184 		.start_ring_id = HAL_SRNG_SW2TXMON_BUF0,
185 		.max_rings = 1,
186 		.entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
187 		.mac_type = ATH12K_HAL_SRNG_PMAC,
188 		.ring_dir = HAL_SRNG_DIR_SRC,
189 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
190 	},
191 	[HAL_RXDMA_MONITOR_DST] = {
192 		.start_ring_id = HAL_SRNG_RING_ID_WMAC1_SW2RXMON_BUF0,
193 		.max_rings = 1,
194 		.entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
195 		.mac_type = ATH12K_HAL_SRNG_PMAC,
196 		.ring_dir = HAL_SRNG_DIR_DST,
197 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
198 	},
199 	[HAL_TX_MONITOR_DST] = {
200 		.start_ring_id = HAL_SRNG_RING_ID_WMAC1_TXMON2SW0_BUF0,
201 		.max_rings = 1,
202 		.entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
203 		.mac_type = ATH12K_HAL_SRNG_PMAC,
204 		.ring_dir = HAL_SRNG_DIR_DST,
205 		.max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
206 	}
207 };
208 
209 static const struct ath12k_hal_tcl_to_wbm_rbm_map
210 ath12k_hal_qcn9274_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
211 	{
212 		.wbm_ring_num = 0,
213 		.rbm_id = HAL_RX_BUF_RBM_SW0_BM,
214 	},
215 	{
216 		.wbm_ring_num = 1,
217 		.rbm_id = HAL_RX_BUF_RBM_SW1_BM,
218 	},
219 	{
220 		.wbm_ring_num = 2,
221 		.rbm_id = HAL_RX_BUF_RBM_SW2_BM,
222 	},
223 	{
224 		.wbm_ring_num = 4,
225 		.rbm_id = HAL_RX_BUF_RBM_SW4_BM,
226 	}
227 };
228 
229 static const struct ath12k_hal_tcl_to_wbm_rbm_map
230 ath12k_hal_wcn7850_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
231 	{
232 		.wbm_ring_num = 0,
233 		.rbm_id = HAL_RX_BUF_RBM_SW0_BM,
234 	},
235 	{
236 		.wbm_ring_num = 2,
237 		.rbm_id = HAL_RX_BUF_RBM_SW2_BM,
238 	},
239 	{
240 		.wbm_ring_num = 4,
241 		.rbm_id = HAL_RX_BUF_RBM_SW4_BM,
242 	},
243 };
244 
245 static bool ath12k_hw_qcn9274_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
246 {
247 	return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
248 			       RX_MSDU_END_INFO5_FIRST_MSDU);
249 }
250 
251 static bool ath12k_hw_qcn9274_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
252 {
253 	return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
254 			       RX_MSDU_END_INFO5_LAST_MSDU);
255 }
256 
257 static u8 ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
258 {
259 	return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
260 			     RX_MSDU_END_INFO5_L3_HDR_PADDING);
261 }
262 
263 static bool ath12k_hw_qcn9274_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
264 {
265 	return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
266 			       RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
267 }
268 
269 static u32 ath12k_hw_qcn9274_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
270 {
271 	return le32_get_bits(desc->u.qcn9274.mpdu_start.info2,
272 			     RX_MPDU_START_INFO2_ENC_TYPE);
273 }
274 
275 static u8 ath12k_hw_qcn9274_rx_desc_get_decap_type(struct hal_rx_desc *desc)
276 {
277 	return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
278 			     RX_MSDU_END_INFO11_DECAP_FORMAT);
279 }
280 
281 static u8 ath12k_hw_qcn9274_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
282 {
283 	return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
284 			     RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
285 }
286 
287 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
288 {
289 	return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
290 			       RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
291 }
292 
293 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
294 {
295 	return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
296 			       RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
297 }
298 
299 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
300 {
301 	return le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
302 			     RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
303 }
304 
305 static u16 ath12k_hw_qcn9274_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
306 {
307 	return le32_get_bits(desc->u.qcn9274.msdu_end.info10,
308 			     RX_MSDU_END_INFO10_MSDU_LENGTH);
309 }
310 
311 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
312 {
313 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
314 			     RX_MSDU_END_INFO12_SGI);
315 }
316 
317 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
318 {
319 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
320 			     RX_MSDU_END_INFO12_RATE_MCS);
321 }
322 
323 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
324 {
325 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
326 			     RX_MSDU_END_INFO12_RECV_BW);
327 }
328 
329 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
330 {
331 	return __le32_to_cpu(desc->u.qcn9274.msdu_end.phy_meta_data);
332 }
333 
334 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
335 {
336 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
337 			     RX_MSDU_END_INFO12_PKT_TYPE);
338 }
339 
340 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
341 {
342 	return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
343 			     RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
344 }
345 
346 static u8 ath12k_hw_qcn9274_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
347 {
348 	return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
349 			    RX_MSDU_END_INFO5_TID);
350 }
351 
352 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
353 {
354 	return __le16_to_cpu(desc->u.qcn9274.mpdu_start.sw_peer_id);
355 }
356 
357 static void ath12k_hw_qcn9274_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
358 						   struct hal_rx_desc *ldesc)
359 {
360 	memcpy(&fdesc->u.qcn9274.msdu_end, &ldesc->u.qcn9274.msdu_end,
361 	       sizeof(struct rx_msdu_end_qcn9274));
362 }
363 
364 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
365 {
366 	return __le16_to_cpu(desc->u.qcn9274.mpdu_start.phy_ppdu_id);
367 }
368 
369 static void ath12k_hw_qcn9274_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
370 {
371 	u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info10);
372 
373 	info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
374 	info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
375 
376 	desc->u.qcn9274.msdu_end.info10 = __cpu_to_le32(info);
377 }
378 
379 static u8 *ath12k_hw_qcn9274_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
380 {
381 	return &desc->u.qcn9274.msdu_payload[0];
382 }
383 
384 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset(void)
385 {
386 	return offsetof(struct hal_rx_desc_qcn9274, mpdu_start);
387 }
388 
389 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset(void)
390 {
391 	return offsetof(struct hal_rx_desc_qcn9274, msdu_end);
392 }
393 
394 static bool ath12k_hw_qcn9274_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
395 {
396 	return __le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
397 	       RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
398 }
399 
400 static u8 *ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
401 {
402 	return desc->u.qcn9274.mpdu_start.addr2;
403 }
404 
405 static bool ath12k_hw_qcn9274_rx_desc_is_mcbc(struct hal_rx_desc *desc)
406 {
407 	return __le32_to_cpu(desc->u.qcn9274.mpdu_start.info6) &
408 	       RX_MPDU_START_INFO6_MCAST_BCAST;
409 }
410 
411 static void ath12k_hw_qcn9274_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
412 						    struct ieee80211_hdr *hdr)
413 {
414 	hdr->frame_control = desc->u.qcn9274.mpdu_start.frame_ctrl;
415 	hdr->duration_id = desc->u.qcn9274.mpdu_start.duration;
416 	ether_addr_copy(hdr->addr1, desc->u.qcn9274.mpdu_start.addr1);
417 	ether_addr_copy(hdr->addr2, desc->u.qcn9274.mpdu_start.addr2);
418 	ether_addr_copy(hdr->addr3, desc->u.qcn9274.mpdu_start.addr3);
419 	if (__le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
420 			RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
421 		ether_addr_copy(hdr->addr4, desc->u.qcn9274.mpdu_start.addr4);
422 	}
423 	hdr->seq_ctrl = desc->u.qcn9274.mpdu_start.seq_ctrl;
424 }
425 
426 static void ath12k_hw_qcn9274_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
427 						     u8 *crypto_hdr,
428 						     enum hal_encrypt_type enctype)
429 {
430 	unsigned int key_id;
431 
432 	switch (enctype) {
433 	case HAL_ENCRYPT_TYPE_OPEN:
434 		return;
435 	case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
436 	case HAL_ENCRYPT_TYPE_TKIP_MIC:
437 		crypto_hdr[0] =
438 			HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
439 		crypto_hdr[1] = 0;
440 		crypto_hdr[2] =
441 			HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
442 		break;
443 	case HAL_ENCRYPT_TYPE_CCMP_128:
444 	case HAL_ENCRYPT_TYPE_CCMP_256:
445 	case HAL_ENCRYPT_TYPE_GCMP_128:
446 	case HAL_ENCRYPT_TYPE_AES_GCMP_256:
447 		crypto_hdr[0] =
448 			HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
449 		crypto_hdr[1] =
450 			HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
451 		crypto_hdr[2] = 0;
452 		break;
453 	case HAL_ENCRYPT_TYPE_WEP_40:
454 	case HAL_ENCRYPT_TYPE_WEP_104:
455 	case HAL_ENCRYPT_TYPE_WEP_128:
456 	case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
457 	case HAL_ENCRYPT_TYPE_WAPI:
458 		return;
459 	}
460 	key_id = le32_get_bits(desc->u.qcn9274.mpdu_start.info5,
461 			       RX_MPDU_START_INFO5_KEY_ID);
462 	crypto_hdr[3] = 0x20 | (key_id << 6);
463 	crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.qcn9274.mpdu_start.pn[0]);
464 	crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.qcn9274.mpdu_start.pn[0]);
465 	crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[1]);
466 	crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[1]);
467 }
468 
469 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc *desc)
470 {
471 	return __le16_to_cpu(desc->u.qcn9274.mpdu_start.frame_ctrl);
472 }
473 
474 static int ath12k_hal_srng_create_config_qcn9274(struct ath12k_base *ab)
475 {
476 	struct ath12k_hal *hal = &ab->hal;
477 	struct hal_srng_config *s;
478 
479 	hal->srng_config = kmemdup(hw_srng_config_template,
480 				   sizeof(hw_srng_config_template),
481 				   GFP_KERNEL);
482 	if (!hal->srng_config)
483 		return -ENOMEM;
484 
485 	s = &hal->srng_config[HAL_REO_DST];
486 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
487 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
488 	s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
489 	s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
490 
491 	s = &hal->srng_config[HAL_REO_EXCEPTION];
492 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
493 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
494 
495 	s = &hal->srng_config[HAL_REO_REINJECT];
496 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
497 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
498 	s->reg_size[0] = HAL_SW2REO1_RING_BASE_LSB(ab) - HAL_SW2REO_RING_BASE_LSB(ab);
499 	s->reg_size[1] = HAL_SW2REO1_RING_HP - HAL_SW2REO_RING_HP;
500 
501 	s = &hal->srng_config[HAL_REO_CMD];
502 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
503 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
504 
505 	s = &hal->srng_config[HAL_REO_STATUS];
506 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
507 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
508 
509 	s = &hal->srng_config[HAL_TCL_DATA];
510 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB;
511 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
512 	s->reg_size[0] = HAL_TCL2_RING_BASE_LSB - HAL_TCL1_RING_BASE_LSB;
513 	s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
514 
515 	s = &hal->srng_config[HAL_TCL_CMD];
516 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
517 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
518 
519 	s = &hal->srng_config[HAL_TCL_STATUS];
520 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
521 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
522 
523 	s = &hal->srng_config[HAL_CE_SRC];
524 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_BASE_LSB;
525 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_HP;
526 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
527 		HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
528 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
529 		HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
530 
531 	s = &hal->srng_config[HAL_CE_DST];
532 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_BASE_LSB;
533 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_HP;
534 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
535 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
536 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
537 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
538 
539 	s = &hal->srng_config[HAL_CE_DST_STATUS];
540 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG +
541 		HAL_CE_DST_STATUS_RING_BASE_LSB;
542 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_STATUS_RING_HP;
543 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
544 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
545 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
546 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
547 
548 	s = &hal->srng_config[HAL_WBM_IDLE_LINK];
549 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
550 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
551 
552 	s = &hal->srng_config[HAL_SW2WBM_RELEASE];
553 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
554 		HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
555 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
556 	s->reg_size[0] = HAL_WBM_SW1_RELEASE_RING_BASE_LSB(ab) -
557 			 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
558 	s->reg_size[1] = HAL_WBM_SW1_RELEASE_RING_HP - HAL_WBM_SW_RELEASE_RING_HP;
559 
560 	s = &hal->srng_config[HAL_WBM2SW_RELEASE];
561 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
562 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
563 	s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
564 		HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
565 	s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
566 
567 	/* Some LMAC rings are not accesed from the host:
568 	 * RXDMA_BUG, RXDMA_DST, RXDMA_MONITOR_BUF, RXDMA_MONITOR_STATUS,
569 	 * RXDMA_MONITOR_DST, RXDMA_MONITOR_DESC, RXDMA_DIR_BUF_SRC,
570 	 * RXDMA_RX_MONITOR_BUF, TX_MONITOR_BUF, TX_MONITOR_DST, SW2RXDMA
571 	 */
572 	s = &hal->srng_config[HAL_PPE2TCL];
573 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_BASE_LSB;
574 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_HP;
575 
576 	s = &hal->srng_config[HAL_PPE_RELEASE];
577 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
578 				HAL_WBM_PPE_RELEASE_RING_BASE_LSB(ab);
579 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_PPE_RELEASE_RING_HP;
580 
581 	return 0;
582 }
583 
584 static bool ath12k_hw_qcn9274_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
585 {
586 	return !!le32_get_bits(desc->u.qcn9274.msdu_end.info14,
587 			       RX_MSDU_END_INFO14_MSDU_DONE);
588 }
589 
590 static bool ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
591 {
592 	return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
593 			       RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
594 }
595 
596 static bool ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
597 {
598 	return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
599 			       RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
600 }
601 
602 static bool ath12k_hw_qcn9274_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
603 {
604 	return (le32_get_bits(desc->u.qcn9274.msdu_end.info14,
605 			      RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
606 			      RX_DESC_DECRYPT_STATUS_CODE_OK);
607 }
608 
609 static u32 ath12k_hw_qcn9274_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
610 {
611 	u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info13);
612 	u32 errmap = 0;
613 
614 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
615 		errmap |= HAL_RX_MPDU_ERR_FCS;
616 
617 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
618 		errmap |= HAL_RX_MPDU_ERR_DECRYPT;
619 
620 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
621 		errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
622 
623 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
624 		errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
625 
626 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
627 		errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
628 
629 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
630 		errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
631 
632 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
633 		errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
634 
635 	return errmap;
636 }
637 
638 const struct hal_ops hal_qcn9274_ops = {
639 	.rx_desc_get_first_msdu = ath12k_hw_qcn9274_rx_desc_get_first_msdu,
640 	.rx_desc_get_last_msdu = ath12k_hw_qcn9274_rx_desc_get_last_msdu,
641 	.rx_desc_get_l3_pad_bytes = ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes,
642 	.rx_desc_encrypt_valid = ath12k_hw_qcn9274_rx_desc_encrypt_valid,
643 	.rx_desc_get_encrypt_type = ath12k_hw_qcn9274_rx_desc_get_encrypt_type,
644 	.rx_desc_get_decap_type = ath12k_hw_qcn9274_rx_desc_get_decap_type,
645 	.rx_desc_get_mesh_ctl = ath12k_hw_qcn9274_rx_desc_get_mesh_ctl,
646 	.rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld,
647 	.rx_desc_get_mpdu_fc_valid = ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid,
648 	.rx_desc_get_mpdu_start_seq_no = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no,
649 	.rx_desc_get_msdu_len = ath12k_hw_qcn9274_rx_desc_get_msdu_len,
650 	.rx_desc_get_msdu_sgi = ath12k_hw_qcn9274_rx_desc_get_msdu_sgi,
651 	.rx_desc_get_msdu_rate_mcs = ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs,
652 	.rx_desc_get_msdu_rx_bw = ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw,
653 	.rx_desc_get_msdu_freq = ath12k_hw_qcn9274_rx_desc_get_msdu_freq,
654 	.rx_desc_get_msdu_pkt_type = ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type,
655 	.rx_desc_get_msdu_nss = ath12k_hw_qcn9274_rx_desc_get_msdu_nss,
656 	.rx_desc_get_mpdu_tid = ath12k_hw_qcn9274_rx_desc_get_mpdu_tid,
657 	.rx_desc_get_mpdu_peer_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id,
658 	.rx_desc_copy_end_tlv = ath12k_hw_qcn9274_rx_desc_copy_end_tlv,
659 	.rx_desc_get_mpdu_ppdu_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id,
660 	.rx_desc_set_msdu_len = ath12k_hw_qcn9274_rx_desc_set_msdu_len,
661 	.rx_desc_get_msdu_payload = ath12k_hw_qcn9274_rx_desc_get_msdu_payload,
662 	.rx_desc_get_mpdu_start_offset = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset,
663 	.rx_desc_get_msdu_end_offset = ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset,
664 	.rx_desc_mac_addr2_valid = ath12k_hw_qcn9274_rx_desc_mac_addr2_valid,
665 	.rx_desc_mpdu_start_addr2 = ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2,
666 	.rx_desc_is_mcbc = ath12k_hw_qcn9274_rx_desc_is_mcbc,
667 	.rx_desc_get_dot11_hdr = ath12k_hw_qcn9274_rx_desc_get_dot11_hdr,
668 	.rx_desc_get_crypto_header = ath12k_hw_qcn9274_rx_desc_get_crypto_hdr,
669 	.rx_desc_get_mpdu_frame_ctl = ath12k_hw_qcn9274_rx_desc_get_mpdu_frame_ctl,
670 	.create_srng_config = ath12k_hal_srng_create_config_qcn9274,
671 	.tcl_to_wbm_rbm_map = ath12k_hal_qcn9274_tcl_to_wbm_rbm_map,
672 	.dp_rx_h_msdu_done = ath12k_hw_qcn9274_dp_rx_h_msdu_done,
673 	.dp_rx_h_l4_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail,
674 	.dp_rx_h_ip_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail,
675 	.dp_rx_h_is_decrypted = ath12k_hw_qcn9274_dp_rx_h_is_decrypted,
676 	.dp_rx_h_mpdu_err = ath12k_hw_qcn9274_dp_rx_h_mpdu_err,
677 };
678 
679 static bool ath12k_hw_wcn7850_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
680 {
681 	return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
682 			       RX_MSDU_END_INFO5_FIRST_MSDU);
683 }
684 
685 static bool ath12k_hw_wcn7850_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
686 {
687 	return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
688 			       RX_MSDU_END_INFO5_LAST_MSDU);
689 }
690 
691 static u8 ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
692 {
693 	return le16_get_bits(desc->u.wcn7850.msdu_end.info5,
694 			    RX_MSDU_END_INFO5_L3_HDR_PADDING);
695 }
696 
697 static bool ath12k_hw_wcn7850_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
698 {
699 	return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
700 			       RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
701 }
702 
703 static u32 ath12k_hw_wcn7850_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
704 {
705 	return le32_get_bits(desc->u.wcn7850.mpdu_start.info2,
706 			     RX_MPDU_START_INFO2_ENC_TYPE);
707 }
708 
709 static u8 ath12k_hw_wcn7850_rx_desc_get_decap_type(struct hal_rx_desc *desc)
710 {
711 	return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
712 			     RX_MSDU_END_INFO11_DECAP_FORMAT);
713 }
714 
715 static u8 ath12k_hw_wcn7850_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
716 {
717 	return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
718 			     RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
719 }
720 
721 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
722 {
723 	return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
724 			       RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
725 }
726 
727 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
728 {
729 	return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
730 			       RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
731 }
732 
733 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
734 {
735 	return le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
736 			     RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
737 }
738 
739 static u16 ath12k_hw_wcn7850_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
740 {
741 	return le32_get_bits(desc->u.wcn7850.msdu_end.info10,
742 			     RX_MSDU_END_INFO10_MSDU_LENGTH);
743 }
744 
745 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
746 {
747 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
748 			     RX_MSDU_END_INFO12_SGI);
749 }
750 
751 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
752 {
753 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
754 			     RX_MSDU_END_INFO12_RATE_MCS);
755 }
756 
757 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
758 {
759 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
760 			     RX_MSDU_END_INFO12_RECV_BW);
761 }
762 
763 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
764 {
765 	return __le32_to_cpu(desc->u.wcn7850.msdu_end.phy_meta_data);
766 }
767 
768 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
769 {
770 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
771 			     RX_MSDU_END_INFO12_PKT_TYPE);
772 }
773 
774 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
775 {
776 	return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
777 			     RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
778 }
779 
780 static u8 ath12k_hw_wcn7850_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
781 {
782 	return le16_get_bits(desc->u.wcn7850.msdu_end.info5,
783 			     RX_MSDU_END_INFO5_TID);
784 }
785 
786 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
787 {
788 	return __le16_to_cpu(desc->u.wcn7850.mpdu_start.sw_peer_id);
789 }
790 
791 static void ath12k_hw_wcn7850_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
792 						   struct hal_rx_desc *ldesc)
793 {
794 	memcpy(&fdesc->u.wcn7850.msdu_end, &ldesc->u.wcn7850.msdu_end,
795 	       sizeof(struct rx_msdu_end_qcn9274));
796 }
797 
798 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag(struct hal_rx_desc *desc)
799 {
800 	return le64_get_bits(desc->u.wcn7850.mpdu_start_tag,
801 			    HAL_TLV_HDR_TAG);
802 }
803 
804 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
805 {
806 	return __le16_to_cpu(desc->u.wcn7850.mpdu_start.phy_ppdu_id);
807 }
808 
809 static void ath12k_hw_wcn7850_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
810 {
811 	u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info10);
812 
813 	info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
814 	info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
815 
816 	desc->u.wcn7850.msdu_end.info10 = __cpu_to_le32(info);
817 }
818 
819 static u8 *ath12k_hw_wcn7850_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
820 {
821 	return &desc->u.wcn7850.msdu_payload[0];
822 }
823 
824 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset(void)
825 {
826 	return offsetof(struct hal_rx_desc_wcn7850, mpdu_start_tag);
827 }
828 
829 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset(void)
830 {
831 	return offsetof(struct hal_rx_desc_wcn7850, msdu_end_tag);
832 }
833 
834 static bool ath12k_hw_wcn7850_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
835 {
836 	return __le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
837 	       RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
838 }
839 
840 static u8 *ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
841 {
842 	return desc->u.wcn7850.mpdu_start.addr2;
843 }
844 
845 static bool ath12k_hw_wcn7850_rx_desc_is_mcbc(struct hal_rx_desc *desc)
846 {
847 	return __le32_to_cpu(desc->u.wcn7850.mpdu_start.info6) &
848 	       RX_MPDU_START_INFO6_MCAST_BCAST;
849 }
850 
851 static void ath12k_hw_wcn7850_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
852 						    struct ieee80211_hdr *hdr)
853 {
854 	hdr->frame_control = desc->u.wcn7850.mpdu_start.frame_ctrl;
855 	hdr->duration_id = desc->u.wcn7850.mpdu_start.duration;
856 	ether_addr_copy(hdr->addr1, desc->u.wcn7850.mpdu_start.addr1);
857 	ether_addr_copy(hdr->addr2, desc->u.wcn7850.mpdu_start.addr2);
858 	ether_addr_copy(hdr->addr3, desc->u.wcn7850.mpdu_start.addr3);
859 	if (__le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
860 			RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
861 		ether_addr_copy(hdr->addr4, desc->u.wcn7850.mpdu_start.addr4);
862 	}
863 	hdr->seq_ctrl = desc->u.wcn7850.mpdu_start.seq_ctrl;
864 }
865 
866 static void ath12k_hw_wcn7850_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
867 						     u8 *crypto_hdr,
868 						     enum hal_encrypt_type enctype)
869 {
870 	unsigned int key_id;
871 
872 	switch (enctype) {
873 	case HAL_ENCRYPT_TYPE_OPEN:
874 		return;
875 	case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
876 	case HAL_ENCRYPT_TYPE_TKIP_MIC:
877 		crypto_hdr[0] =
878 			HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
879 		crypto_hdr[1] = 0;
880 		crypto_hdr[2] =
881 			HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
882 		break;
883 	case HAL_ENCRYPT_TYPE_CCMP_128:
884 	case HAL_ENCRYPT_TYPE_CCMP_256:
885 	case HAL_ENCRYPT_TYPE_GCMP_128:
886 	case HAL_ENCRYPT_TYPE_AES_GCMP_256:
887 		crypto_hdr[0] =
888 			HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
889 		crypto_hdr[1] =
890 			HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
891 		crypto_hdr[2] = 0;
892 		break;
893 	case HAL_ENCRYPT_TYPE_WEP_40:
894 	case HAL_ENCRYPT_TYPE_WEP_104:
895 	case HAL_ENCRYPT_TYPE_WEP_128:
896 	case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
897 	case HAL_ENCRYPT_TYPE_WAPI:
898 		return;
899 	}
900 	key_id = u32_get_bits(__le32_to_cpu(desc->u.wcn7850.mpdu_start.info5),
901 			      RX_MPDU_START_INFO5_KEY_ID);
902 	crypto_hdr[3] = 0x20 | (key_id << 6);
903 	crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.wcn7850.mpdu_start.pn[0]);
904 	crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.wcn7850.mpdu_start.pn[0]);
905 	crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[1]);
906 	crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[1]);
907 }
908 
909 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc *desc)
910 {
911 	return __le16_to_cpu(desc->u.wcn7850.mpdu_start.frame_ctrl);
912 }
913 
914 static int ath12k_hal_srng_create_config_wcn7850(struct ath12k_base *ab)
915 {
916 	struct ath12k_hal *hal = &ab->hal;
917 	struct hal_srng_config *s;
918 
919 	hal->srng_config = kmemdup(hw_srng_config_template,
920 				   sizeof(hw_srng_config_template),
921 				   GFP_KERNEL);
922 	if (!hal->srng_config)
923 		return -ENOMEM;
924 
925 	s = &hal->srng_config[HAL_REO_DST];
926 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
927 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
928 	s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
929 	s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
930 
931 	s = &hal->srng_config[HAL_REO_EXCEPTION];
932 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
933 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
934 
935 	s = &hal->srng_config[HAL_REO_REINJECT];
936 	s->max_rings = 1;
937 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
938 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
939 
940 	s = &hal->srng_config[HAL_REO_CMD];
941 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
942 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
943 
944 	s = &hal->srng_config[HAL_REO_STATUS];
945 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
946 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
947 
948 	s = &hal->srng_config[HAL_TCL_DATA];
949 	s->max_rings = 5;
950 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB;
951 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
952 	s->reg_size[0] = HAL_TCL2_RING_BASE_LSB - HAL_TCL1_RING_BASE_LSB;
953 	s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
954 
955 	s = &hal->srng_config[HAL_TCL_CMD];
956 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
957 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
958 
959 	s = &hal->srng_config[HAL_TCL_STATUS];
960 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
961 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
962 
963 	s = &hal->srng_config[HAL_CE_SRC];
964 	s->max_rings = 12;
965 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_BASE_LSB;
966 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_HP;
967 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
968 		HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
969 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
970 		HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
971 
972 	s = &hal->srng_config[HAL_CE_DST];
973 	s->max_rings = 12;
974 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_BASE_LSB;
975 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_HP;
976 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
977 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
978 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
979 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
980 
981 	s = &hal->srng_config[HAL_CE_DST_STATUS];
982 	s->max_rings = 12;
983 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG +
984 		HAL_CE_DST_STATUS_RING_BASE_LSB;
985 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_STATUS_RING_HP;
986 	s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
987 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
988 	s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
989 		HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
990 
991 	s = &hal->srng_config[HAL_WBM_IDLE_LINK];
992 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
993 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
994 
995 	s = &hal->srng_config[HAL_SW2WBM_RELEASE];
996 	s->max_rings = 1;
997 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
998 		HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
999 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
1000 
1001 	s = &hal->srng_config[HAL_WBM2SW_RELEASE];
1002 	s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1003 	s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
1004 	s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
1005 		HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1006 	s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
1007 
1008 	s = &hal->srng_config[HAL_RXDMA_BUF];
1009 	s->max_rings = 2;
1010 	s->mac_type = ATH12K_HAL_SRNG_PMAC;
1011 
1012 	s = &hal->srng_config[HAL_RXDMA_DST];
1013 	s->max_rings = 1;
1014 	s->entry_size = sizeof(struct hal_reo_entrance_ring) >> 2;
1015 
1016 	/* below rings are not used */
1017 	s = &hal->srng_config[HAL_RXDMA_DIR_BUF];
1018 	s->max_rings = 0;
1019 
1020 	s = &hal->srng_config[HAL_PPE2TCL];
1021 	s->max_rings = 0;
1022 
1023 	s = &hal->srng_config[HAL_PPE_RELEASE];
1024 	s->max_rings = 0;
1025 
1026 	s = &hal->srng_config[HAL_TX_MONITOR_BUF];
1027 	s->max_rings = 0;
1028 
1029 	s = &hal->srng_config[HAL_TX_MONITOR_DST];
1030 	s->max_rings = 0;
1031 
1032 	s = &hal->srng_config[HAL_PPE2TCL];
1033 	s->max_rings = 0;
1034 
1035 	return 0;
1036 }
1037 
1038 static bool ath12k_hw_wcn7850_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
1039 {
1040 	return !!le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1041 			       RX_MSDU_END_INFO14_MSDU_DONE);
1042 }
1043 
1044 static bool ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
1045 {
1046 	return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1047 			       RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
1048 }
1049 
1050 static bool ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
1051 {
1052 	return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1053 			      RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
1054 }
1055 
1056 static bool ath12k_hw_wcn7850_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
1057 {
1058 	return (le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1059 			      RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
1060 			      RX_DESC_DECRYPT_STATUS_CODE_OK);
1061 }
1062 
1063 static u32 ath12k_hw_wcn7850_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
1064 {
1065 	u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info13);
1066 	u32 errmap = 0;
1067 
1068 	if (info & RX_MSDU_END_INFO13_FCS_ERR)
1069 		errmap |= HAL_RX_MPDU_ERR_FCS;
1070 
1071 	if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1072 		errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1073 
1074 	if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1075 		errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1076 
1077 	if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1078 		errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1079 
1080 	if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1081 		errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1082 
1083 	if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1084 		errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1085 
1086 	if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1087 		errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1088 
1089 	return errmap;
1090 }
1091 
1092 const struct hal_ops hal_wcn7850_ops = {
1093 	.rx_desc_get_first_msdu = ath12k_hw_wcn7850_rx_desc_get_first_msdu,
1094 	.rx_desc_get_last_msdu = ath12k_hw_wcn7850_rx_desc_get_last_msdu,
1095 	.rx_desc_get_l3_pad_bytes = ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes,
1096 	.rx_desc_encrypt_valid = ath12k_hw_wcn7850_rx_desc_encrypt_valid,
1097 	.rx_desc_get_encrypt_type = ath12k_hw_wcn7850_rx_desc_get_encrypt_type,
1098 	.rx_desc_get_decap_type = ath12k_hw_wcn7850_rx_desc_get_decap_type,
1099 	.rx_desc_get_mesh_ctl = ath12k_hw_wcn7850_rx_desc_get_mesh_ctl,
1100 	.rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld,
1101 	.rx_desc_get_mpdu_fc_valid = ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid,
1102 	.rx_desc_get_mpdu_start_seq_no = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no,
1103 	.rx_desc_get_msdu_len = ath12k_hw_wcn7850_rx_desc_get_msdu_len,
1104 	.rx_desc_get_msdu_sgi = ath12k_hw_wcn7850_rx_desc_get_msdu_sgi,
1105 	.rx_desc_get_msdu_rate_mcs = ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs,
1106 	.rx_desc_get_msdu_rx_bw = ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw,
1107 	.rx_desc_get_msdu_freq = ath12k_hw_wcn7850_rx_desc_get_msdu_freq,
1108 	.rx_desc_get_msdu_pkt_type = ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type,
1109 	.rx_desc_get_msdu_nss = ath12k_hw_wcn7850_rx_desc_get_msdu_nss,
1110 	.rx_desc_get_mpdu_tid = ath12k_hw_wcn7850_rx_desc_get_mpdu_tid,
1111 	.rx_desc_get_mpdu_peer_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id,
1112 	.rx_desc_copy_end_tlv = ath12k_hw_wcn7850_rx_desc_copy_end_tlv,
1113 	.rx_desc_get_mpdu_start_tag = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag,
1114 	.rx_desc_get_mpdu_ppdu_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id,
1115 	.rx_desc_set_msdu_len = ath12k_hw_wcn7850_rx_desc_set_msdu_len,
1116 	.rx_desc_get_msdu_payload = ath12k_hw_wcn7850_rx_desc_get_msdu_payload,
1117 	.rx_desc_get_mpdu_start_offset = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset,
1118 	.rx_desc_get_msdu_end_offset = ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset,
1119 	.rx_desc_mac_addr2_valid = ath12k_hw_wcn7850_rx_desc_mac_addr2_valid,
1120 	.rx_desc_mpdu_start_addr2 = ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2,
1121 	.rx_desc_is_mcbc = ath12k_hw_wcn7850_rx_desc_is_mcbc,
1122 	.rx_desc_get_dot11_hdr = ath12k_hw_wcn7850_rx_desc_get_dot11_hdr,
1123 	.rx_desc_get_crypto_header = ath12k_hw_wcn7850_rx_desc_get_crypto_hdr,
1124 	.rx_desc_get_mpdu_frame_ctl = ath12k_hw_wcn7850_rx_desc_get_mpdu_frame_ctl,
1125 	.create_srng_config = ath12k_hal_srng_create_config_wcn7850,
1126 	.tcl_to_wbm_rbm_map = ath12k_hal_wcn7850_tcl_to_wbm_rbm_map,
1127 	.dp_rx_h_msdu_done = ath12k_hw_wcn7850_dp_rx_h_msdu_done,
1128 	.dp_rx_h_l4_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail,
1129 	.dp_rx_h_ip_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail,
1130 	.dp_rx_h_is_decrypted = ath12k_hw_wcn7850_dp_rx_h_is_decrypted,
1131 	.dp_rx_h_mpdu_err = ath12k_hw_wcn7850_dp_rx_h_mpdu_err,
1132 };
1133 
1134 static int ath12k_hal_alloc_cont_rdp(struct ath12k_base *ab)
1135 {
1136 	struct ath12k_hal *hal = &ab->hal;
1137 	size_t size;
1138 
1139 	size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1140 	hal->rdp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->rdp.paddr,
1141 					    GFP_KERNEL);
1142 	if (!hal->rdp.vaddr)
1143 		return -ENOMEM;
1144 
1145 	return 0;
1146 }
1147 
1148 static void ath12k_hal_free_cont_rdp(struct ath12k_base *ab)
1149 {
1150 	struct ath12k_hal *hal = &ab->hal;
1151 	size_t size;
1152 
1153 	if (!hal->rdp.vaddr)
1154 		return;
1155 
1156 	size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1157 	dma_free_coherent(ab->dev, size,
1158 			  hal->rdp.vaddr, hal->rdp.paddr);
1159 	hal->rdp.vaddr = NULL;
1160 }
1161 
1162 static int ath12k_hal_alloc_cont_wrp(struct ath12k_base *ab)
1163 {
1164 	struct ath12k_hal *hal = &ab->hal;
1165 	size_t size;
1166 
1167 	size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1168 	hal->wrp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->wrp.paddr,
1169 					    GFP_KERNEL);
1170 	if (!hal->wrp.vaddr)
1171 		return -ENOMEM;
1172 
1173 	return 0;
1174 }
1175 
1176 static void ath12k_hal_free_cont_wrp(struct ath12k_base *ab)
1177 {
1178 	struct ath12k_hal *hal = &ab->hal;
1179 	size_t size;
1180 
1181 	if (!hal->wrp.vaddr)
1182 		return;
1183 
1184 	size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1185 	dma_free_coherent(ab->dev, size,
1186 			  hal->wrp.vaddr, hal->wrp.paddr);
1187 	hal->wrp.vaddr = NULL;
1188 }
1189 
1190 static void ath12k_hal_ce_dst_setup(struct ath12k_base *ab,
1191 				    struct hal_srng *srng, int ring_num)
1192 {
1193 	struct hal_srng_config *srng_config = &ab->hal.srng_config[HAL_CE_DST];
1194 	u32 addr;
1195 	u32 val;
1196 
1197 	addr = HAL_CE_DST_RING_CTRL +
1198 	       srng_config->reg_start[HAL_SRNG_REG_GRP_R0] +
1199 	       ring_num * srng_config->reg_size[HAL_SRNG_REG_GRP_R0];
1200 
1201 	val = ath12k_hif_read32(ab, addr);
1202 	val &= ~HAL_CE_DST_R0_DEST_CTRL_MAX_LEN;
1203 	val |= u32_encode_bits(srng->u.dst_ring.max_buffer_length,
1204 			       HAL_CE_DST_R0_DEST_CTRL_MAX_LEN);
1205 	ath12k_hif_write32(ab, addr, val);
1206 }
1207 
1208 static void ath12k_hal_srng_dst_hw_init(struct ath12k_base *ab,
1209 					struct hal_srng *srng)
1210 {
1211 	struct ath12k_hal *hal = &ab->hal;
1212 	u32 val;
1213 	u64 hp_addr;
1214 	u32 reg_base;
1215 
1216 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1217 
1218 	if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1219 		ath12k_hif_write32(ab, reg_base +
1220 				   HAL_REO1_RING_MSI1_BASE_LSB_OFFSET,
1221 				   srng->msi_addr);
1222 
1223 		val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1224 				      HAL_REO1_RING_MSI1_BASE_MSB_ADDR) |
1225 				      HAL_REO1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1226 		ath12k_hif_write32(ab, reg_base +
1227 				       HAL_REO1_RING_MSI1_BASE_MSB_OFFSET, val);
1228 
1229 		ath12k_hif_write32(ab,
1230 				   reg_base + HAL_REO1_RING_MSI1_DATA_OFFSET,
1231 				   srng->msi_data);
1232 	}
1233 
1234 	ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1235 
1236 	val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1237 			      HAL_REO1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1238 	      u32_encode_bits((srng->entry_size * srng->num_entries),
1239 			      HAL_REO1_RING_BASE_MSB_RING_SIZE);
1240 	ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_BASE_MSB_OFFSET, val);
1241 
1242 	val = u32_encode_bits(srng->ring_id, HAL_REO1_RING_ID_RING_ID) |
1243 	      u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1244 	ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_ID_OFFSET(ab), val);
1245 
1246 	/* interrupt setup */
1247 	val = u32_encode_bits((srng->intr_timer_thres_us >> 3),
1248 			      HAL_REO1_RING_PRDR_INT_SETUP_INTR_TMR_THOLD);
1249 
1250 	val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1251 				HAL_REO1_RING_PRDR_INT_SETUP_BATCH_COUNTER_THOLD);
1252 
1253 	ath12k_hif_write32(ab,
1254 			   reg_base + HAL_REO1_RING_PRODUCER_INT_SETUP_OFFSET,
1255 			   val);
1256 
1257 	hp_addr = hal->rdp.paddr +
1258 		  ((unsigned long)srng->u.dst_ring.hp_addr -
1259 		   (unsigned long)hal->rdp.vaddr);
1260 	ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_HP_ADDR_LSB_OFFSET,
1261 			   hp_addr & HAL_ADDR_LSB_REG_MASK);
1262 	ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_HP_ADDR_MSB_OFFSET,
1263 			   hp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1264 
1265 	/* Initialize head and tail pointers to indicate ring is empty */
1266 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1267 	ath12k_hif_write32(ab, reg_base, 0);
1268 	ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_TP_OFFSET, 0);
1269 	*srng->u.dst_ring.hp_addr = 0;
1270 
1271 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1272 	val = 0;
1273 	if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1274 		val |= HAL_REO1_RING_MISC_DATA_TLV_SWAP;
1275 	if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1276 		val |= HAL_REO1_RING_MISC_HOST_FW_SWAP;
1277 	if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1278 		val |= HAL_REO1_RING_MISC_MSI_SWAP;
1279 	val |= HAL_REO1_RING_MISC_SRNG_ENABLE;
1280 
1281 	ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_MISC_OFFSET, val);
1282 }
1283 
1284 static void ath12k_hal_srng_src_hw_init(struct ath12k_base *ab,
1285 					struct hal_srng *srng)
1286 {
1287 	struct ath12k_hal *hal = &ab->hal;
1288 	u32 val;
1289 	u64 tp_addr;
1290 	u32 reg_base;
1291 
1292 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1293 
1294 	if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1295 		ath12k_hif_write32(ab, reg_base +
1296 				   HAL_TCL1_RING_MSI1_BASE_LSB_OFFSET(ab),
1297 				   srng->msi_addr);
1298 
1299 		val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1300 				      HAL_TCL1_RING_MSI1_BASE_MSB_ADDR) |
1301 				      HAL_TCL1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1302 		ath12k_hif_write32(ab, reg_base +
1303 				       HAL_TCL1_RING_MSI1_BASE_MSB_OFFSET(ab),
1304 				   val);
1305 
1306 		ath12k_hif_write32(ab, reg_base +
1307 				       HAL_TCL1_RING_MSI1_DATA_OFFSET(ab),
1308 				   srng->msi_data);
1309 	}
1310 
1311 	ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1312 
1313 	val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1314 			      HAL_TCL1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1315 	      u32_encode_bits((srng->entry_size * srng->num_entries),
1316 			      HAL_TCL1_RING_BASE_MSB_RING_SIZE);
1317 	ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_BASE_MSB_OFFSET, val);
1318 
1319 	val = u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1320 	ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_ID_OFFSET(ab), val);
1321 
1322 	val = u32_encode_bits(srng->intr_timer_thres_us,
1323 			      HAL_TCL1_RING_CONSR_INT_SETUP_IX0_INTR_TMR_THOLD);
1324 
1325 	val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1326 			       HAL_TCL1_RING_CONSR_INT_SETUP_IX0_BATCH_COUNTER_THOLD);
1327 
1328 	ath12k_hif_write32(ab,
1329 			   reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX0_OFFSET(ab),
1330 			   val);
1331 
1332 	val = 0;
1333 	if (srng->flags & HAL_SRNG_FLAGS_LOW_THRESH_INTR_EN) {
1334 		val |= u32_encode_bits(srng->u.src_ring.low_threshold,
1335 				       HAL_TCL1_RING_CONSR_INT_SETUP_IX1_LOW_THOLD);
1336 	}
1337 	ath12k_hif_write32(ab,
1338 			   reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX1_OFFSET(ab),
1339 			   val);
1340 
1341 	if (srng->ring_id != HAL_SRNG_RING_ID_WBM_IDLE_LINK) {
1342 		tp_addr = hal->rdp.paddr +
1343 			  ((unsigned long)srng->u.src_ring.tp_addr -
1344 			   (unsigned long)hal->rdp.vaddr);
1345 		ath12k_hif_write32(ab,
1346 				   reg_base + HAL_TCL1_RING_TP_ADDR_LSB_OFFSET(ab),
1347 				   tp_addr & HAL_ADDR_LSB_REG_MASK);
1348 		ath12k_hif_write32(ab,
1349 				   reg_base + HAL_TCL1_RING_TP_ADDR_MSB_OFFSET(ab),
1350 				   tp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1351 	}
1352 
1353 	/* Initialize head and tail pointers to indicate ring is empty */
1354 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1355 	ath12k_hif_write32(ab, reg_base, 0);
1356 	ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_TP_OFFSET, 0);
1357 	*srng->u.src_ring.tp_addr = 0;
1358 
1359 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1360 	val = 0;
1361 	if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1362 		val |= HAL_TCL1_RING_MISC_DATA_TLV_SWAP;
1363 	if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1364 		val |= HAL_TCL1_RING_MISC_HOST_FW_SWAP;
1365 	if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1366 		val |= HAL_TCL1_RING_MISC_MSI_SWAP;
1367 
1368 	/* Loop count is not used for SRC rings */
1369 	val |= HAL_TCL1_RING_MISC_MSI_LOOPCNT_DISABLE;
1370 
1371 	val |= HAL_TCL1_RING_MISC_SRNG_ENABLE;
1372 
1373 	if (srng->ring_id == HAL_SRNG_RING_ID_WBM_IDLE_LINK)
1374 		val |= HAL_TCL1_RING_MISC_MSI_RING_ID_DISABLE;
1375 
1376 	ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_MISC_OFFSET(ab), val);
1377 }
1378 
1379 static void ath12k_hal_srng_hw_init(struct ath12k_base *ab,
1380 				    struct hal_srng *srng)
1381 {
1382 	if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1383 		ath12k_hal_srng_src_hw_init(ab, srng);
1384 	else
1385 		ath12k_hal_srng_dst_hw_init(ab, srng);
1386 }
1387 
1388 static int ath12k_hal_srng_get_ring_id(struct ath12k_base *ab,
1389 				       enum hal_ring_type type,
1390 				       int ring_num, int mac_id)
1391 {
1392 	struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
1393 	int ring_id;
1394 
1395 	if (ring_num >= srng_config->max_rings) {
1396 		ath12k_warn(ab, "invalid ring number :%d\n", ring_num);
1397 		return -EINVAL;
1398 	}
1399 
1400 	ring_id = srng_config->start_ring_id + ring_num;
1401 	if (srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
1402 		ring_id += mac_id * HAL_SRNG_RINGS_PER_PMAC;
1403 
1404 	if (WARN_ON(ring_id >= HAL_SRNG_RING_ID_MAX))
1405 		return -EINVAL;
1406 
1407 	return ring_id;
1408 }
1409 
1410 int ath12k_hal_srng_get_entrysize(struct ath12k_base *ab, u32 ring_type)
1411 {
1412 	struct hal_srng_config *srng_config;
1413 
1414 	if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1415 		return -EINVAL;
1416 
1417 	srng_config = &ab->hal.srng_config[ring_type];
1418 
1419 	return (srng_config->entry_size << 2);
1420 }
1421 
1422 int ath12k_hal_srng_get_max_entries(struct ath12k_base *ab, u32 ring_type)
1423 {
1424 	struct hal_srng_config *srng_config;
1425 
1426 	if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1427 		return -EINVAL;
1428 
1429 	srng_config = &ab->hal.srng_config[ring_type];
1430 
1431 	return (srng_config->max_size / srng_config->entry_size);
1432 }
1433 
1434 void ath12k_hal_srng_get_params(struct ath12k_base *ab, struct hal_srng *srng,
1435 				struct hal_srng_params *params)
1436 {
1437 	params->ring_base_paddr = srng->ring_base_paddr;
1438 	params->ring_base_vaddr = srng->ring_base_vaddr;
1439 	params->num_entries = srng->num_entries;
1440 	params->intr_timer_thres_us = srng->intr_timer_thres_us;
1441 	params->intr_batch_cntr_thres_entries =
1442 		srng->intr_batch_cntr_thres_entries;
1443 	params->low_threshold = srng->u.src_ring.low_threshold;
1444 	params->msi_addr = srng->msi_addr;
1445 	params->msi2_addr = srng->msi2_addr;
1446 	params->msi_data = srng->msi_data;
1447 	params->msi2_data = srng->msi2_data;
1448 	params->flags = srng->flags;
1449 }
1450 
1451 dma_addr_t ath12k_hal_srng_get_hp_addr(struct ath12k_base *ab,
1452 				       struct hal_srng *srng)
1453 {
1454 	if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1455 		return 0;
1456 
1457 	if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1458 		return ab->hal.wrp.paddr +
1459 		       ((unsigned long)srng->u.src_ring.hp_addr -
1460 			(unsigned long)ab->hal.wrp.vaddr);
1461 	else
1462 		return ab->hal.rdp.paddr +
1463 		       ((unsigned long)srng->u.dst_ring.hp_addr -
1464 			 (unsigned long)ab->hal.rdp.vaddr);
1465 }
1466 
1467 dma_addr_t ath12k_hal_srng_get_tp_addr(struct ath12k_base *ab,
1468 				       struct hal_srng *srng)
1469 {
1470 	if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1471 		return 0;
1472 
1473 	if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1474 		return ab->hal.rdp.paddr +
1475 		       ((unsigned long)srng->u.src_ring.tp_addr -
1476 			(unsigned long)ab->hal.rdp.vaddr);
1477 	else
1478 		return ab->hal.wrp.paddr +
1479 		       ((unsigned long)srng->u.dst_ring.tp_addr -
1480 			(unsigned long)ab->hal.wrp.vaddr);
1481 }
1482 
1483 u32 ath12k_hal_ce_get_desc_size(enum hal_ce_desc type)
1484 {
1485 	switch (type) {
1486 	case HAL_CE_DESC_SRC:
1487 		return sizeof(struct hal_ce_srng_src_desc);
1488 	case HAL_CE_DESC_DST:
1489 		return sizeof(struct hal_ce_srng_dest_desc);
1490 	case HAL_CE_DESC_DST_STATUS:
1491 		return sizeof(struct hal_ce_srng_dst_status_desc);
1492 	}
1493 
1494 	return 0;
1495 }
1496 
1497 void ath12k_hal_ce_src_set_desc(struct hal_ce_srng_src_desc *desc, dma_addr_t paddr,
1498 				u32 len, u32 id, u8 byte_swap_data)
1499 {
1500 	desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1501 	desc->buffer_addr_info =
1502 		le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1503 				 HAL_CE_SRC_DESC_ADDR_INFO_ADDR_HI) |
1504 		le32_encode_bits(byte_swap_data,
1505 				 HAL_CE_SRC_DESC_ADDR_INFO_BYTE_SWAP) |
1506 		le32_encode_bits(0, HAL_CE_SRC_DESC_ADDR_INFO_GATHER) |
1507 		le32_encode_bits(len, HAL_CE_SRC_DESC_ADDR_INFO_LEN);
1508 	desc->meta_info = le32_encode_bits(id, HAL_CE_SRC_DESC_META_INFO_DATA);
1509 }
1510 
1511 void ath12k_hal_ce_dst_set_desc(struct hal_ce_srng_dest_desc *desc, dma_addr_t paddr)
1512 {
1513 	desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1514 	desc->buffer_addr_info =
1515 		le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1516 				 HAL_CE_DEST_DESC_ADDR_INFO_ADDR_HI);
1517 }
1518 
1519 u32 ath12k_hal_ce_dst_status_get_length(struct hal_ce_srng_dst_status_desc *desc)
1520 {
1521 	u32 len;
1522 
1523 	len = le32_get_bits(desc->flags, HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1524 	desc->flags &= ~cpu_to_le32(HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1525 
1526 	return len;
1527 }
1528 
1529 void ath12k_hal_set_link_desc_addr(struct hal_wbm_link_desc *desc, u32 cookie,
1530 				   dma_addr_t paddr)
1531 {
1532 	desc->buf_addr_info.info0 = le32_encode_bits((paddr & HAL_ADDR_LSB_REG_MASK),
1533 						     BUFFER_ADDR_INFO0_ADDR);
1534 	desc->buf_addr_info.info1 =
1535 			le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1536 					 BUFFER_ADDR_INFO1_ADDR) |
1537 			le32_encode_bits(1, BUFFER_ADDR_INFO1_RET_BUF_MGR) |
1538 			le32_encode_bits(cookie, BUFFER_ADDR_INFO1_SW_COOKIE);
1539 }
1540 
1541 void *ath12k_hal_srng_dst_peek(struct ath12k_base *ab, struct hal_srng *srng)
1542 {
1543 	lockdep_assert_held(&srng->lock);
1544 
1545 	if (srng->u.dst_ring.tp != srng->u.dst_ring.cached_hp)
1546 		return (srng->ring_base_vaddr + srng->u.dst_ring.tp);
1547 
1548 	return NULL;
1549 }
1550 
1551 void *ath12k_hal_srng_dst_get_next_entry(struct ath12k_base *ab,
1552 					 struct hal_srng *srng)
1553 {
1554 	void *desc;
1555 
1556 	lockdep_assert_held(&srng->lock);
1557 
1558 	if (srng->u.dst_ring.tp == srng->u.dst_ring.cached_hp)
1559 		return NULL;
1560 
1561 	desc = srng->ring_base_vaddr + srng->u.dst_ring.tp;
1562 
1563 	srng->u.dst_ring.tp = (srng->u.dst_ring.tp + srng->entry_size) %
1564 			      srng->ring_size;
1565 
1566 	return desc;
1567 }
1568 
1569 int ath12k_hal_srng_dst_num_free(struct ath12k_base *ab, struct hal_srng *srng,
1570 				 bool sync_hw_ptr)
1571 {
1572 	u32 tp, hp;
1573 
1574 	lockdep_assert_held(&srng->lock);
1575 
1576 	tp = srng->u.dst_ring.tp;
1577 
1578 	if (sync_hw_ptr) {
1579 		hp = *srng->u.dst_ring.hp_addr;
1580 		srng->u.dst_ring.cached_hp = hp;
1581 	} else {
1582 		hp = srng->u.dst_ring.cached_hp;
1583 	}
1584 
1585 	if (hp >= tp)
1586 		return (hp - tp) / srng->entry_size;
1587 	else
1588 		return (srng->ring_size - tp + hp) / srng->entry_size;
1589 }
1590 
1591 /* Returns number of available entries in src ring */
1592 int ath12k_hal_srng_src_num_free(struct ath12k_base *ab, struct hal_srng *srng,
1593 				 bool sync_hw_ptr)
1594 {
1595 	u32 tp, hp;
1596 
1597 	lockdep_assert_held(&srng->lock);
1598 
1599 	hp = srng->u.src_ring.hp;
1600 
1601 	if (sync_hw_ptr) {
1602 		tp = *srng->u.src_ring.tp_addr;
1603 		srng->u.src_ring.cached_tp = tp;
1604 	} else {
1605 		tp = srng->u.src_ring.cached_tp;
1606 	}
1607 
1608 	if (tp > hp)
1609 		return ((tp - hp) / srng->entry_size) - 1;
1610 	else
1611 		return ((srng->ring_size - hp + tp) / srng->entry_size) - 1;
1612 }
1613 
1614 void *ath12k_hal_srng_src_get_next_entry(struct ath12k_base *ab,
1615 					 struct hal_srng *srng)
1616 {
1617 	void *desc;
1618 	u32 next_hp;
1619 
1620 	lockdep_assert_held(&srng->lock);
1621 
1622 	/* TODO: Using % is expensive, but we have to do this since size of some
1623 	 * SRNG rings is not power of 2 (due to descriptor sizes). Need to see
1624 	 * if separate function is defined for rings having power of 2 ring size
1625 	 * (TCL2SW, REO2SW, SW2RXDMA and CE rings) so that we can avoid the
1626 	 * overhead of % by using mask (with &).
1627 	 */
1628 	next_hp = (srng->u.src_ring.hp + srng->entry_size) % srng->ring_size;
1629 
1630 	if (next_hp == srng->u.src_ring.cached_tp)
1631 		return NULL;
1632 
1633 	desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
1634 	srng->u.src_ring.hp = next_hp;
1635 
1636 	/* TODO: Reap functionality is not used by all rings. If particular
1637 	 * ring does not use reap functionality, we need not update reap_hp
1638 	 * with next_hp pointer. Need to make sure a separate function is used
1639 	 * before doing any optimization by removing below code updating
1640 	 * reap_hp.
1641 	 */
1642 	srng->u.src_ring.reap_hp = next_hp;
1643 
1644 	return desc;
1645 }
1646 
1647 void *ath12k_hal_srng_src_reap_next(struct ath12k_base *ab,
1648 				    struct hal_srng *srng)
1649 {
1650 	void *desc;
1651 	u32 next_reap_hp;
1652 
1653 	lockdep_assert_held(&srng->lock);
1654 
1655 	next_reap_hp = (srng->u.src_ring.reap_hp + srng->entry_size) %
1656 		       srng->ring_size;
1657 
1658 	if (next_reap_hp == srng->u.src_ring.cached_tp)
1659 		return NULL;
1660 
1661 	desc = srng->ring_base_vaddr + next_reap_hp;
1662 	srng->u.src_ring.reap_hp = next_reap_hp;
1663 
1664 	return desc;
1665 }
1666 
1667 void *ath12k_hal_srng_src_get_next_reaped(struct ath12k_base *ab,
1668 					  struct hal_srng *srng)
1669 {
1670 	void *desc;
1671 
1672 	lockdep_assert_held(&srng->lock);
1673 
1674 	if (srng->u.src_ring.hp == srng->u.src_ring.reap_hp)
1675 		return NULL;
1676 
1677 	desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
1678 	srng->u.src_ring.hp = (srng->u.src_ring.hp + srng->entry_size) %
1679 			      srng->ring_size;
1680 
1681 	return desc;
1682 }
1683 
1684 void ath12k_hal_srng_access_begin(struct ath12k_base *ab, struct hal_srng *srng)
1685 {
1686 	lockdep_assert_held(&srng->lock);
1687 
1688 	if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1689 		srng->u.src_ring.cached_tp =
1690 			*(volatile u32 *)srng->u.src_ring.tp_addr;
1691 	else
1692 		srng->u.dst_ring.cached_hp = *srng->u.dst_ring.hp_addr;
1693 }
1694 
1695 /* Update cached ring head/tail pointers to HW. ath12k_hal_srng_access_begin()
1696  * should have been called before this.
1697  */
1698 void ath12k_hal_srng_access_end(struct ath12k_base *ab, struct hal_srng *srng)
1699 {
1700 	lockdep_assert_held(&srng->lock);
1701 
1702 	/* TODO: See if we need a write memory barrier here */
1703 	if (srng->flags & HAL_SRNG_FLAGS_LMAC_RING) {
1704 		/* For LMAC rings, ring pointer updates are done through FW and
1705 		 * hence written to a shared memory location that is read by FW
1706 		 */
1707 		if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
1708 			srng->u.src_ring.last_tp =
1709 				*(volatile u32 *)srng->u.src_ring.tp_addr;
1710 			*srng->u.src_ring.hp_addr = srng->u.src_ring.hp;
1711 		} else {
1712 			srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
1713 			*srng->u.dst_ring.tp_addr = srng->u.dst_ring.tp;
1714 		}
1715 	} else {
1716 		if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
1717 			srng->u.src_ring.last_tp =
1718 				*(volatile u32 *)srng->u.src_ring.tp_addr;
1719 			ath12k_hif_write32(ab,
1720 					   (unsigned long)srng->u.src_ring.hp_addr -
1721 					   (unsigned long)ab->mem,
1722 					   srng->u.src_ring.hp);
1723 		} else {
1724 			srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
1725 			ath12k_hif_write32(ab,
1726 					   (unsigned long)srng->u.dst_ring.tp_addr -
1727 					   (unsigned long)ab->mem,
1728 					   srng->u.dst_ring.tp);
1729 		}
1730 	}
1731 
1732 	srng->timestamp = jiffies;
1733 }
1734 
1735 void ath12k_hal_setup_link_idle_list(struct ath12k_base *ab,
1736 				     struct hal_wbm_idle_scatter_list *sbuf,
1737 				     u32 nsbufs, u32 tot_link_desc,
1738 				     u32 end_offset)
1739 {
1740 	struct ath12k_buffer_addr *link_addr;
1741 	int i;
1742 	u32 reg_scatter_buf_sz = HAL_WBM_IDLE_SCATTER_BUF_SIZE / 64;
1743 	u32 val;
1744 
1745 	link_addr = (void *)sbuf[0].vaddr + HAL_WBM_IDLE_SCATTER_BUF_SIZE;
1746 
1747 	for (i = 1; i < nsbufs; i++) {
1748 		link_addr->info0 = cpu_to_le32(sbuf[i].paddr & HAL_ADDR_LSB_REG_MASK);
1749 
1750 		link_addr->info1 =
1751 			le32_encode_bits((u64)sbuf[i].paddr >> HAL_ADDR_MSB_REG_SHIFT,
1752 					 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
1753 			le32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
1754 					 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG);
1755 
1756 		link_addr = (void *)sbuf[i].vaddr +
1757 			     HAL_WBM_IDLE_SCATTER_BUF_SIZE;
1758 	}
1759 
1760 	val = u32_encode_bits(reg_scatter_buf_sz, HAL_WBM_SCATTER_BUFFER_SIZE) |
1761 	      u32_encode_bits(0x1, HAL_WBM_LINK_DESC_IDLE_LIST_MODE);
1762 
1763 	ath12k_hif_write32(ab,
1764 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1765 			   HAL_WBM_R0_IDLE_LIST_CONTROL_ADDR(ab),
1766 			   val);
1767 
1768 	val = u32_encode_bits(reg_scatter_buf_sz * nsbufs,
1769 			      HAL_WBM_SCATTER_RING_SIZE_OF_IDLE_LINK_DESC_LIST);
1770 	ath12k_hif_write32(ab,
1771 			   HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_R0_IDLE_LIST_SIZE_ADDR(ab),
1772 			   val);
1773 
1774 	val = u32_encode_bits(sbuf[0].paddr & HAL_ADDR_LSB_REG_MASK,
1775 			      BUFFER_ADDR_INFO0_ADDR);
1776 	ath12k_hif_write32(ab,
1777 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1778 			   HAL_WBM_SCATTERED_RING_BASE_LSB(ab),
1779 			   val);
1780 
1781 	val = u32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
1782 			      HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG) |
1783 	      u32_encode_bits((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT,
1784 			      HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32);
1785 	ath12k_hif_write32(ab,
1786 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1787 			   HAL_WBM_SCATTERED_RING_BASE_MSB(ab),
1788 			   val);
1789 
1790 	/* Setup head and tail pointers for the idle list */
1791 	val = u32_encode_bits(sbuf[nsbufs - 1].paddr, BUFFER_ADDR_INFO0_ADDR);
1792 	ath12k_hif_write32(ab,
1793 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1794 			   HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
1795 			   val);
1796 
1797 	val = u32_encode_bits(((u64)sbuf[nsbufs - 1].paddr >> HAL_ADDR_MSB_REG_SHIFT),
1798 			      HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
1799 	       u32_encode_bits((end_offset >> 2),
1800 			       HAL_WBM_SCATTERED_DESC_HEAD_P_OFFSET_IX1);
1801 	ath12k_hif_write32(ab,
1802 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1803 			   HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX1(ab),
1804 			   val);
1805 
1806 	val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
1807 	ath12k_hif_write32(ab,
1808 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1809 			   HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
1810 			   val);
1811 
1812 	val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
1813 	ath12k_hif_write32(ab,
1814 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1815 			   HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX0(ab),
1816 			   val);
1817 
1818 	val = u32_encode_bits(((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT),
1819 			      HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
1820 	      u32_encode_bits(0, HAL_WBM_SCATTERED_DESC_TAIL_P_OFFSET_IX1);
1821 	ath12k_hif_write32(ab,
1822 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1823 			   HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX1(ab),
1824 			   val);
1825 
1826 	val = 2 * tot_link_desc;
1827 	ath12k_hif_write32(ab,
1828 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1829 			   HAL_WBM_SCATTERED_DESC_PTR_HP_ADDR(ab),
1830 			   val);
1831 
1832 	/* Enable the SRNG */
1833 	val = u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_SRNG_ENABLE) |
1834 	      u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_RIND_ID_DISABLE);
1835 	ath12k_hif_write32(ab,
1836 			   HAL_SEQ_WCSS_UMAC_WBM_REG +
1837 			   HAL_WBM_IDLE_LINK_RING_MISC_ADDR(ab),
1838 			   val);
1839 }
1840 
1841 int ath12k_hal_srng_setup(struct ath12k_base *ab, enum hal_ring_type type,
1842 			  int ring_num, int mac_id,
1843 			  struct hal_srng_params *params)
1844 {
1845 	struct ath12k_hal *hal = &ab->hal;
1846 	struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
1847 	struct hal_srng *srng;
1848 	int ring_id;
1849 	u32 idx;
1850 	int i;
1851 	u32 reg_base;
1852 
1853 	ring_id = ath12k_hal_srng_get_ring_id(ab, type, ring_num, mac_id);
1854 	if (ring_id < 0)
1855 		return ring_id;
1856 
1857 	srng = &hal->srng_list[ring_id];
1858 
1859 	srng->ring_id = ring_id;
1860 	srng->ring_dir = srng_config->ring_dir;
1861 	srng->ring_base_paddr = params->ring_base_paddr;
1862 	srng->ring_base_vaddr = params->ring_base_vaddr;
1863 	srng->entry_size = srng_config->entry_size;
1864 	srng->num_entries = params->num_entries;
1865 	srng->ring_size = srng->entry_size * srng->num_entries;
1866 	srng->intr_batch_cntr_thres_entries =
1867 				params->intr_batch_cntr_thres_entries;
1868 	srng->intr_timer_thres_us = params->intr_timer_thres_us;
1869 	srng->flags = params->flags;
1870 	srng->msi_addr = params->msi_addr;
1871 	srng->msi2_addr = params->msi2_addr;
1872 	srng->msi_data = params->msi_data;
1873 	srng->msi2_data = params->msi2_data;
1874 	srng->initialized = 1;
1875 	spin_lock_init(&srng->lock);
1876 	lockdep_set_class(&srng->lock, &srng->lock_key);
1877 
1878 	for (i = 0; i < HAL_SRNG_NUM_REG_GRP; i++) {
1879 		srng->hwreg_base[i] = srng_config->reg_start[i] +
1880 				      (ring_num * srng_config->reg_size[i]);
1881 	}
1882 
1883 	memset(srng->ring_base_vaddr, 0,
1884 	       (srng->entry_size * srng->num_entries) << 2);
1885 
1886 	reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1887 
1888 	if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
1889 		srng->u.src_ring.hp = 0;
1890 		srng->u.src_ring.cached_tp = 0;
1891 		srng->u.src_ring.reap_hp = srng->ring_size - srng->entry_size;
1892 		srng->u.src_ring.tp_addr = (void *)(hal->rdp.vaddr + ring_id);
1893 		srng->u.src_ring.low_threshold = params->low_threshold *
1894 						 srng->entry_size;
1895 		if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
1896 			if (!ab->hw_params->supports_shadow_regs)
1897 				srng->u.src_ring.hp_addr =
1898 					(u32 *)((unsigned long)ab->mem + reg_base);
1899 			else
1900 				ath12k_dbg(ab, ATH12K_DBG_HAL,
1901 					   "hal type %d ring_num %d reg_base 0x%x shadow 0x%lx\n",
1902 					   type, ring_num,
1903 					   reg_base,
1904 					   (unsigned long)srng->u.src_ring.hp_addr -
1905 					   (unsigned long)ab->mem);
1906 		} else {
1907 			idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
1908 			srng->u.src_ring.hp_addr = (void *)(hal->wrp.vaddr +
1909 						   idx);
1910 			srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
1911 		}
1912 	} else {
1913 		/* During initialization loop count in all the descriptors
1914 		 * will be set to zero, and HW will set it to 1 on completing
1915 		 * descriptor update in first loop, and increments it by 1 on
1916 		 * subsequent loops (loop count wraps around after reaching
1917 		 * 0xffff). The 'loop_cnt' in SW ring state is the expected
1918 		 * loop count in descriptors updated by HW (to be processed
1919 		 * by SW).
1920 		 */
1921 		srng->u.dst_ring.loop_cnt = 1;
1922 		srng->u.dst_ring.tp = 0;
1923 		srng->u.dst_ring.cached_hp = 0;
1924 		srng->u.dst_ring.hp_addr = (void *)(hal->rdp.vaddr + ring_id);
1925 		if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
1926 			if (!ab->hw_params->supports_shadow_regs)
1927 				srng->u.dst_ring.tp_addr =
1928 					(u32 *)((unsigned long)ab->mem + reg_base +
1929 					(HAL_REO1_RING_TP - HAL_REO1_RING_HP));
1930 			else
1931 				ath12k_dbg(ab, ATH12K_DBG_HAL,
1932 					   "type %d ring_num %d target_reg 0x%x shadow 0x%lx\n",
1933 					   type, ring_num,
1934 					   reg_base + HAL_REO1_RING_TP - HAL_REO1_RING_HP,
1935 					   (unsigned long)srng->u.dst_ring.tp_addr -
1936 					   (unsigned long)ab->mem);
1937 		} else {
1938 			/* For PMAC & DMAC rings, tail pointer updates will be done
1939 			 * through FW by writing to a shared memory location
1940 			 */
1941 			idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
1942 			srng->u.dst_ring.tp_addr = (void *)(hal->wrp.vaddr +
1943 						   idx);
1944 			srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
1945 		}
1946 	}
1947 
1948 	if (srng_config->mac_type != ATH12K_HAL_SRNG_UMAC)
1949 		return ring_id;
1950 
1951 	ath12k_hal_srng_hw_init(ab, srng);
1952 
1953 	if (type == HAL_CE_DST) {
1954 		srng->u.dst_ring.max_buffer_length = params->max_buffer_len;
1955 		ath12k_hal_ce_dst_setup(ab, srng, ring_num);
1956 	}
1957 
1958 	return ring_id;
1959 }
1960 
1961 static void ath12k_hal_srng_update_hp_tp_addr(struct ath12k_base *ab,
1962 					      int shadow_cfg_idx,
1963 					      enum hal_ring_type ring_type,
1964 					      int ring_num)
1965 {
1966 	struct hal_srng *srng;
1967 	struct ath12k_hal *hal = &ab->hal;
1968 	int ring_id;
1969 	struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
1970 
1971 	ring_id = ath12k_hal_srng_get_ring_id(ab, ring_type, ring_num, 0);
1972 	if (ring_id < 0)
1973 		return;
1974 
1975 	srng = &hal->srng_list[ring_id];
1976 
1977 	if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
1978 		srng->u.dst_ring.tp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
1979 						   (unsigned long)ab->mem);
1980 	else
1981 		srng->u.src_ring.hp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
1982 						   (unsigned long)ab->mem);
1983 }
1984 
1985 int ath12k_hal_srng_update_shadow_config(struct ath12k_base *ab,
1986 					 enum hal_ring_type ring_type,
1987 					 int ring_num)
1988 {
1989 	struct ath12k_hal *hal = &ab->hal;
1990 	struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
1991 	int shadow_cfg_idx = hal->num_shadow_reg_configured;
1992 	u32 target_reg;
1993 
1994 	if (shadow_cfg_idx >= HAL_SHADOW_NUM_REGS)
1995 		return -EINVAL;
1996 
1997 	hal->num_shadow_reg_configured++;
1998 
1999 	target_reg = srng_config->reg_start[HAL_HP_OFFSET_IN_REG_START];
2000 	target_reg += srng_config->reg_size[HAL_HP_OFFSET_IN_REG_START] *
2001 		ring_num;
2002 
2003 	/* For destination ring, shadow the TP */
2004 	if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2005 		target_reg += HAL_OFFSET_FROM_HP_TO_TP;
2006 
2007 	hal->shadow_reg_addr[shadow_cfg_idx] = target_reg;
2008 
2009 	/* update hp/tp addr to hal structure*/
2010 	ath12k_hal_srng_update_hp_tp_addr(ab, shadow_cfg_idx, ring_type,
2011 					  ring_num);
2012 
2013 	ath12k_dbg(ab, ATH12K_DBG_HAL,
2014 		   "target_reg %x, shadow reg 0x%x shadow_idx 0x%x, ring_type %d, ring num %d",
2015 		  target_reg,
2016 		  HAL_SHADOW_REG(shadow_cfg_idx),
2017 		  shadow_cfg_idx,
2018 		  ring_type, ring_num);
2019 
2020 	return 0;
2021 }
2022 
2023 void ath12k_hal_srng_shadow_config(struct ath12k_base *ab)
2024 {
2025 	struct ath12k_hal *hal = &ab->hal;
2026 	int ring_type, ring_num;
2027 
2028 	/* update all the non-CE srngs. */
2029 	for (ring_type = 0; ring_type < HAL_MAX_RING_TYPES; ring_type++) {
2030 		struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2031 
2032 		if (ring_type == HAL_CE_SRC ||
2033 		    ring_type == HAL_CE_DST ||
2034 			ring_type == HAL_CE_DST_STATUS)
2035 			continue;
2036 
2037 		if (srng_config->mac_type == ATH12K_HAL_SRNG_DMAC ||
2038 		    srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
2039 			continue;
2040 
2041 		for (ring_num = 0; ring_num < srng_config->max_rings; ring_num++)
2042 			ath12k_hal_srng_update_shadow_config(ab, ring_type, ring_num);
2043 	}
2044 }
2045 
2046 void ath12k_hal_srng_get_shadow_config(struct ath12k_base *ab,
2047 				       u32 **cfg, u32 *len)
2048 {
2049 	struct ath12k_hal *hal = &ab->hal;
2050 
2051 	*len = hal->num_shadow_reg_configured;
2052 	*cfg = hal->shadow_reg_addr;
2053 }
2054 
2055 void ath12k_hal_srng_shadow_update_hp_tp(struct ath12k_base *ab,
2056 					 struct hal_srng *srng)
2057 {
2058 	lockdep_assert_held(&srng->lock);
2059 
2060 	/* check whether the ring is empty. Update the shadow
2061 	 * HP only when then ring isn't' empty.
2062 	 */
2063 	if (srng->ring_dir == HAL_SRNG_DIR_SRC &&
2064 	    *srng->u.src_ring.tp_addr != srng->u.src_ring.hp)
2065 		ath12k_hal_srng_access_end(ab, srng);
2066 }
2067 
2068 static void ath12k_hal_register_srng_lock_keys(struct ath12k_base *ab)
2069 {
2070 	struct ath12k_hal *hal = &ab->hal;
2071 	u32 ring_id;
2072 
2073 	for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2074 		lockdep_register_key(&hal->srng_list[ring_id].lock_key);
2075 }
2076 
2077 static void ath12k_hal_unregister_srng_lock_keys(struct ath12k_base *ab)
2078 {
2079 	struct ath12k_hal *hal = &ab->hal;
2080 	u32 ring_id;
2081 
2082 	for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2083 		lockdep_unregister_key(&hal->srng_list[ring_id].lock_key);
2084 }
2085 
2086 int ath12k_hal_srng_init(struct ath12k_base *ab)
2087 {
2088 	struct ath12k_hal *hal = &ab->hal;
2089 	int ret;
2090 
2091 	memset(hal, 0, sizeof(*hal));
2092 
2093 	ret = ab->hw_params->hal_ops->create_srng_config(ab);
2094 	if (ret)
2095 		goto err_hal;
2096 
2097 	ret = ath12k_hal_alloc_cont_rdp(ab);
2098 	if (ret)
2099 		goto err_hal;
2100 
2101 	ret = ath12k_hal_alloc_cont_wrp(ab);
2102 	if (ret)
2103 		goto err_free_cont_rdp;
2104 
2105 	ath12k_hal_register_srng_lock_keys(ab);
2106 
2107 	return 0;
2108 
2109 err_free_cont_rdp:
2110 	ath12k_hal_free_cont_rdp(ab);
2111 
2112 err_hal:
2113 	return ret;
2114 }
2115 
2116 void ath12k_hal_srng_deinit(struct ath12k_base *ab)
2117 {
2118 	struct ath12k_hal *hal = &ab->hal;
2119 
2120 	ath12k_hal_unregister_srng_lock_keys(ab);
2121 	ath12k_hal_free_cont_rdp(ab);
2122 	ath12k_hal_free_cont_wrp(ab);
2123 	kfree(hal->srng_config);
2124 	hal->srng_config = NULL;
2125 }
2126 
2127 void ath12k_hal_dump_srng_stats(struct ath12k_base *ab)
2128 {
2129 	struct hal_srng *srng;
2130 	struct ath12k_ext_irq_grp *irq_grp;
2131 	struct ath12k_ce_pipe *ce_pipe;
2132 	int i;
2133 
2134 	ath12k_err(ab, "Last interrupt received for each CE:\n");
2135 	for (i = 0; i < ab->hw_params->ce_count; i++) {
2136 		ce_pipe = &ab->ce.ce_pipe[i];
2137 
2138 		if (ath12k_ce_get_attr_flags(ab, i) & CE_ATTR_DIS_INTR)
2139 			continue;
2140 
2141 		ath12k_err(ab, "CE_id %d pipe_num %d %ums before\n",
2142 			   i, ce_pipe->pipe_num,
2143 			   jiffies_to_msecs(jiffies - ce_pipe->timestamp));
2144 	}
2145 
2146 	ath12k_err(ab, "\nLast interrupt received for each group:\n");
2147 	for (i = 0; i < ATH12K_EXT_IRQ_GRP_NUM_MAX; i++) {
2148 		irq_grp = &ab->ext_irq_grp[i];
2149 		ath12k_err(ab, "group_id %d %ums before\n",
2150 			   irq_grp->grp_id,
2151 			   jiffies_to_msecs(jiffies - irq_grp->timestamp));
2152 	}
2153 
2154 	for (i = 0; i < HAL_SRNG_RING_ID_MAX; i++) {
2155 		srng = &ab->hal.srng_list[i];
2156 
2157 		if (!srng->initialized)
2158 			continue;
2159 
2160 		if (srng->ring_dir == HAL_SRNG_DIR_SRC)
2161 			ath12k_err(ab,
2162 				   "src srng id %u hp %u, reap_hp %u, cur tp %u, cached tp %u last tp %u napi processed before %ums\n",
2163 				   srng->ring_id, srng->u.src_ring.hp,
2164 				   srng->u.src_ring.reap_hp,
2165 				   *srng->u.src_ring.tp_addr, srng->u.src_ring.cached_tp,
2166 				   srng->u.src_ring.last_tp,
2167 				   jiffies_to_msecs(jiffies - srng->timestamp));
2168 		else if (srng->ring_dir == HAL_SRNG_DIR_DST)
2169 			ath12k_err(ab,
2170 				   "dst srng id %u tp %u, cur hp %u, cached hp %u last hp %u napi processed before %ums\n",
2171 				   srng->ring_id, srng->u.dst_ring.tp,
2172 				   *srng->u.dst_ring.hp_addr,
2173 				   srng->u.dst_ring.cached_hp,
2174 				   srng->u.dst_ring.last_hp,
2175 				   jiffies_to_msecs(jiffies - srng->timestamp));
2176 	}
2177 }
2178