xref: /openbmc/linux/sound/pci/asihpi/hpifunc.c (revision 4800cd83)
1 
2 #include "hpi_internal.h"
3 #include "hpimsginit.h"
4 
5 #include "hpidebug.h"
6 
7 struct hpi_handle {
8 	unsigned int obj_index:12;
9 	unsigned int obj_type:4;
10 	unsigned int adapter_index:14;
11 	unsigned int spare:1;
12 	unsigned int read_only:1;
13 };
14 
15 union handle_word {
16 	struct hpi_handle h;
17 	u32 w;
18 };
19 
20 u32 hpi_indexes_to_handle(const char c_object, const u16 adapter_index,
21 	const u16 object_index)
22 {
23 	union handle_word handle;
24 
25 	handle.h.adapter_index = adapter_index;
26 	handle.h.spare = 0;
27 	handle.h.read_only = 0;
28 	handle.h.obj_type = c_object;
29 	handle.h.obj_index = object_index;
30 	return handle.w;
31 }
32 
33 void hpi_handle_to_indexes(const u32 handle, u16 *pw_adapter_index,
34 	u16 *pw_object_index)
35 {
36 	union handle_word uhandle;
37 	uhandle.w = handle;
38 
39 	if (pw_adapter_index)
40 		*pw_adapter_index = (u16)uhandle.h.adapter_index;
41 	if (pw_object_index)
42 		*pw_object_index = (u16)uhandle.h.obj_index;
43 }
44 
45 char hpi_handle_object(const u32 handle)
46 {
47 	union handle_word uhandle;
48 	uhandle.w = handle;
49 	return (char)uhandle.h.obj_type;
50 }
51 
52 #define u32TOINDEX(h, i1) \
53 do {\
54 	if (h == 0) \
55 		return HPI_ERROR_INVALID_OBJ; \
56 	else \
57 		hpi_handle_to_indexes(h, i1, NULL); \
58 } while (0)
59 
60 #define u32TOINDEXES(h, i1, i2) \
61 do {\
62 	if (h == 0) \
63 		return HPI_ERROR_INVALID_OBJ; \
64 	else \
65 		hpi_handle_to_indexes(h, i1, i2);\
66 } while (0)
67 
68 void hpi_format_to_msg(struct hpi_msg_format *pMF,
69 	const struct hpi_format *pF)
70 {
71 	pMF->sample_rate = pF->sample_rate;
72 	pMF->bit_rate = pF->bit_rate;
73 	pMF->attributes = pF->attributes;
74 	pMF->channels = pF->channels;
75 	pMF->format = pF->format;
76 }
77 
78 static void hpi_msg_to_format(struct hpi_format *pF,
79 	struct hpi_msg_format *pMF)
80 {
81 	pF->sample_rate = pMF->sample_rate;
82 	pF->bit_rate = pMF->bit_rate;
83 	pF->attributes = pMF->attributes;
84 	pF->channels = pMF->channels;
85 	pF->format = pMF->format;
86 	pF->mode_legacy = 0;
87 	pF->unused = 0;
88 }
89 
90 void hpi_stream_response_to_legacy(struct hpi_stream_res *pSR)
91 {
92 	pSR->u.legacy_stream_info.auxiliary_data_available =
93 		pSR->u.stream_info.auxiliary_data_available;
94 	pSR->u.legacy_stream_info.state = pSR->u.stream_info.state;
95 }
96 
97 static struct hpi_hsubsys gh_subsys;
98 
99 struct hpi_hsubsys *hpi_subsys_create(void)
100 {
101 	struct hpi_message hm;
102 	struct hpi_response hr;
103 
104 	memset(&gh_subsys, 0, sizeof(struct hpi_hsubsys));
105 
106 	{
107 		hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
108 			HPI_SUBSYS_OPEN);
109 		hpi_send_recv(&hm, &hr);
110 
111 		if (hr.error == 0)
112 			return &gh_subsys;
113 
114 	}
115 	return NULL;
116 }
117 
118 void hpi_subsys_free(const struct hpi_hsubsys *ph_subsys)
119 {
120 	struct hpi_message hm;
121 	struct hpi_response hr;
122 
123 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
124 		HPI_SUBSYS_CLOSE);
125 	hpi_send_recv(&hm, &hr);
126 
127 }
128 
129 u16 hpi_subsys_get_version(const struct hpi_hsubsys *ph_subsys, u32 *pversion)
130 {
131 	struct hpi_message hm;
132 	struct hpi_response hr;
133 
134 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
135 		HPI_SUBSYS_GET_VERSION);
136 	hpi_send_recv(&hm, &hr);
137 	*pversion = hr.u.s.version;
138 	return hr.error;
139 }
140 
141 u16 hpi_subsys_get_version_ex(const struct hpi_hsubsys *ph_subsys,
142 	u32 *pversion_ex)
143 {
144 	struct hpi_message hm;
145 	struct hpi_response hr;
146 
147 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
148 		HPI_SUBSYS_GET_VERSION);
149 	hpi_send_recv(&hm, &hr);
150 	*pversion_ex = hr.u.s.data;
151 	return hr.error;
152 }
153 
154 u16 hpi_subsys_get_info(const struct hpi_hsubsys *ph_subsys, u32 *pversion,
155 	u16 *pw_num_adapters, u16 aw_adapter_list[], u16 list_length)
156 {
157 	struct hpi_message hm;
158 	struct hpi_response hr;
159 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
160 		HPI_SUBSYS_GET_INFO);
161 
162 	hpi_send_recv(&hm, &hr);
163 
164 	*pversion = hr.u.s.version;
165 	if (list_length > HPI_MAX_ADAPTERS)
166 		memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
167 			HPI_MAX_ADAPTERS);
168 	else
169 		memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list, list_length);
170 	*pw_num_adapters = hr.u.s.num_adapters;
171 	return hr.error;
172 }
173 
174 u16 hpi_subsys_find_adapters(const struct hpi_hsubsys *ph_subsys,
175 	u16 *pw_num_adapters, u16 aw_adapter_list[], u16 list_length)
176 {
177 	struct hpi_message hm;
178 	struct hpi_response hr;
179 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
180 		HPI_SUBSYS_FIND_ADAPTERS);
181 
182 	hpi_send_recv(&hm, &hr);
183 
184 	if (list_length > HPI_MAX_ADAPTERS) {
185 		memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
186 			HPI_MAX_ADAPTERS * sizeof(u16));
187 		memset(&aw_adapter_list[HPI_MAX_ADAPTERS], 0,
188 			(list_length - HPI_MAX_ADAPTERS) * sizeof(u16));
189 	} else
190 		memcpy(aw_adapter_list, &hr.u.s.aw_adapter_list,
191 			list_length * sizeof(u16));
192 	*pw_num_adapters = hr.u.s.num_adapters;
193 
194 	return hr.error;
195 }
196 
197 u16 hpi_subsys_create_adapter(const struct hpi_hsubsys *ph_subsys,
198 	const struct hpi_resource *p_resource, u16 *pw_adapter_index)
199 {
200 	struct hpi_message hm;
201 	struct hpi_response hr;
202 
203 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
204 		HPI_SUBSYS_CREATE_ADAPTER);
205 	hm.u.s.resource = *p_resource;
206 
207 	hpi_send_recv(&hm, &hr);
208 
209 	*pw_adapter_index = hr.u.s.adapter_index;
210 	return hr.error;
211 }
212 
213 u16 hpi_subsys_delete_adapter(const struct hpi_hsubsys *ph_subsys,
214 	u16 adapter_index)
215 {
216 	struct hpi_message hm;
217 	struct hpi_response hr;
218 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
219 		HPI_SUBSYS_DELETE_ADAPTER);
220 	hm.adapter_index = adapter_index;
221 	hpi_send_recv(&hm, &hr);
222 	return hr.error;
223 }
224 
225 u16 hpi_subsys_get_num_adapters(const struct hpi_hsubsys *ph_subsys,
226 	int *pn_num_adapters)
227 {
228 	struct hpi_message hm;
229 	struct hpi_response hr;
230 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
231 		HPI_SUBSYS_GET_NUM_ADAPTERS);
232 	hpi_send_recv(&hm, &hr);
233 	*pn_num_adapters = (int)hr.u.s.num_adapters;
234 	return hr.error;
235 }
236 
237 u16 hpi_subsys_get_adapter(const struct hpi_hsubsys *ph_subsys, int iterator,
238 	u32 *padapter_index, u16 *pw_adapter_type)
239 {
240 	struct hpi_message hm;
241 	struct hpi_response hr;
242 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
243 		HPI_SUBSYS_GET_ADAPTER);
244 	hm.adapter_index = (u16)iterator;
245 	hpi_send_recv(&hm, &hr);
246 	*padapter_index = (int)hr.u.s.adapter_index;
247 	*pw_adapter_type = hr.u.s.aw_adapter_list[0];
248 	return hr.error;
249 }
250 
251 u16 hpi_subsys_set_host_network_interface(const struct hpi_hsubsys *ph_subsys,
252 	const char *sz_interface)
253 {
254 	struct hpi_message hm;
255 	struct hpi_response hr;
256 	hpi_init_message_response(&hm, &hr, HPI_OBJ_SUBSYSTEM,
257 		HPI_SUBSYS_SET_NETWORK_INTERFACE);
258 	if (sz_interface == NULL)
259 		return HPI_ERROR_INVALID_RESOURCE;
260 	hm.u.s.resource.r.net_if = sz_interface;
261 	hpi_send_recv(&hm, &hr);
262 	return hr.error;
263 }
264 
265 u16 hpi_adapter_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index)
266 {
267 	struct hpi_message hm;
268 	struct hpi_response hr;
269 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
270 		HPI_ADAPTER_OPEN);
271 	hm.adapter_index = adapter_index;
272 
273 	hpi_send_recv(&hm, &hr);
274 
275 	return hr.error;
276 
277 }
278 
279 u16 hpi_adapter_close(const struct hpi_hsubsys *ph_subsys, u16 adapter_index)
280 {
281 	struct hpi_message hm;
282 	struct hpi_response hr;
283 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
284 		HPI_ADAPTER_CLOSE);
285 	hm.adapter_index = adapter_index;
286 
287 	hpi_send_recv(&hm, &hr);
288 
289 	return hr.error;
290 }
291 
292 u16 hpi_adapter_set_mode(const struct hpi_hsubsys *ph_subsys,
293 	u16 adapter_index, u32 adapter_mode)
294 {
295 	return hpi_adapter_set_mode_ex(ph_subsys, adapter_index, adapter_mode,
296 		HPI_ADAPTER_MODE_SET);
297 }
298 
299 u16 hpi_adapter_set_mode_ex(const struct hpi_hsubsys *ph_subsys,
300 	u16 adapter_index, u32 adapter_mode, u16 query_or_set)
301 {
302 	struct hpi_message hm;
303 	struct hpi_response hr;
304 
305 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
306 		HPI_ADAPTER_SET_MODE);
307 	hm.adapter_index = adapter_index;
308 	hm.u.a.adapter_mode = adapter_mode;
309 	hm.u.a.assert_id = query_or_set;
310 	hpi_send_recv(&hm, &hr);
311 	return hr.error;
312 }
313 
314 u16 hpi_adapter_get_mode(const struct hpi_hsubsys *ph_subsys,
315 	u16 adapter_index, u32 *padapter_mode)
316 {
317 	struct hpi_message hm;
318 	struct hpi_response hr;
319 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
320 		HPI_ADAPTER_GET_MODE);
321 	hm.adapter_index = adapter_index;
322 	hpi_send_recv(&hm, &hr);
323 	if (padapter_mode)
324 		*padapter_mode = hr.u.a.serial_number;
325 	return hr.error;
326 }
327 
328 u16 hpi_adapter_get_info(const struct hpi_hsubsys *ph_subsys,
329 	u16 adapter_index, u16 *pw_num_outstreams, u16 *pw_num_instreams,
330 	u16 *pw_version, u32 *pserial_number, u16 *pw_adapter_type)
331 {
332 	struct hpi_message hm;
333 	struct hpi_response hr;
334 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
335 		HPI_ADAPTER_GET_INFO);
336 	hm.adapter_index = adapter_index;
337 
338 	hpi_send_recv(&hm, &hr);
339 
340 	*pw_adapter_type = hr.u.a.adapter_type;
341 	*pw_num_outstreams = hr.u.a.num_outstreams;
342 	*pw_num_instreams = hr.u.a.num_instreams;
343 	*pw_version = hr.u.a.version;
344 	*pserial_number = hr.u.a.serial_number;
345 	return hr.error;
346 }
347 
348 u16 hpi_adapter_get_module_by_index(const struct hpi_hsubsys *ph_subsys,
349 	u16 adapter_index, u16 module_index, u16 *pw_num_outputs,
350 	u16 *pw_num_inputs, u16 *pw_version, u32 *pserial_number,
351 	u16 *pw_module_type, u32 *ph_module)
352 {
353 	struct hpi_message hm;
354 	struct hpi_response hr;
355 
356 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
357 		HPI_ADAPTER_MODULE_INFO);
358 	hm.adapter_index = adapter_index;
359 	hm.u.ax.module_info.index = module_index;
360 
361 	hpi_send_recv(&hm, &hr);
362 
363 	*pw_module_type = hr.u.a.adapter_type;
364 	*pw_num_outputs = hr.u.a.num_outstreams;
365 	*pw_num_inputs = hr.u.a.num_instreams;
366 	*pw_version = hr.u.a.version;
367 	*pserial_number = hr.u.a.serial_number;
368 	*ph_module = 0;
369 
370 	return hr.error;
371 }
372 
373 u16 hpi_adapter_get_assert(const struct hpi_hsubsys *ph_subsys,
374 	u16 adapter_index, u16 *assert_present, char *psz_assert,
375 	u16 *pw_line_number)
376 {
377 	struct hpi_message hm;
378 	struct hpi_response hr;
379 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
380 		HPI_ADAPTER_GET_ASSERT);
381 	hm.adapter_index = adapter_index;
382 	hpi_send_recv(&hm, &hr);
383 
384 	*assert_present = 0;
385 
386 	if (!hr.error) {
387 
388 		*pw_line_number = (u16)hr.u.a.serial_number;
389 		if (*pw_line_number) {
390 
391 			int i;
392 			char *src = (char *)hr.u.a.sz_adapter_assert;
393 			char *dst = psz_assert;
394 
395 			*assert_present = 1;
396 
397 			for (i = 0; i < HPI_STRING_LEN; i++) {
398 				char c;
399 				c = *src++;
400 				*dst++ = c;
401 				if (c == 0)
402 					break;
403 			}
404 
405 		}
406 	}
407 	return hr.error;
408 }
409 
410 u16 hpi_adapter_get_assert_ex(const struct hpi_hsubsys *ph_subsys,
411 	u16 adapter_index, u16 *assert_present, char *psz_assert,
412 	u32 *pline_number, u16 *pw_assert_on_dsp)
413 {
414 	struct hpi_message hm;
415 	struct hpi_response hr;
416 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
417 		HPI_ADAPTER_GET_ASSERT);
418 	hm.adapter_index = adapter_index;
419 
420 	hpi_send_recv(&hm, &hr);
421 
422 	*assert_present = 0;
423 
424 	if (!hr.error) {
425 
426 		*pline_number = hr.u.a.serial_number;
427 
428 		*assert_present = hr.u.a.adapter_type;
429 
430 		*pw_assert_on_dsp = hr.u.a.adapter_index;
431 
432 		if (!*assert_present && *pline_number)
433 
434 			*assert_present = 1;
435 
436 		if (*assert_present) {
437 
438 			int i;
439 			char *src = (char *)hr.u.a.sz_adapter_assert;
440 			char *dst = psz_assert;
441 
442 			for (i = 0; i < HPI_STRING_LEN; i++) {
443 				char c;
444 				c = *src++;
445 				*dst++ = c;
446 				if (c == 0)
447 					break;
448 			}
449 
450 		} else {
451 			*psz_assert = 0;
452 		}
453 	}
454 	return hr.error;
455 }
456 
457 u16 hpi_adapter_test_assert(const struct hpi_hsubsys *ph_subsys,
458 	u16 adapter_index, u16 assert_id)
459 {
460 	struct hpi_message hm;
461 	struct hpi_response hr;
462 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
463 		HPI_ADAPTER_TEST_ASSERT);
464 	hm.adapter_index = adapter_index;
465 	hm.u.a.assert_id = assert_id;
466 
467 	hpi_send_recv(&hm, &hr);
468 
469 	return hr.error;
470 }
471 
472 u16 hpi_adapter_enable_capability(const struct hpi_hsubsys *ph_subsys,
473 	u16 adapter_index, u16 capability, u32 key)
474 {
475 	struct hpi_message hm;
476 	struct hpi_response hr;
477 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
478 		HPI_ADAPTER_ENABLE_CAPABILITY);
479 	hm.adapter_index = adapter_index;
480 	hm.u.a.assert_id = capability;
481 	hm.u.a.adapter_mode = key;
482 
483 	hpi_send_recv(&hm, &hr);
484 
485 	return hr.error;
486 }
487 
488 u16 hpi_adapter_self_test(const struct hpi_hsubsys *ph_subsys,
489 	u16 adapter_index)
490 {
491 	struct hpi_message hm;
492 	struct hpi_response hr;
493 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
494 		HPI_ADAPTER_SELFTEST);
495 	hm.adapter_index = adapter_index;
496 	hpi_send_recv(&hm, &hr);
497 	return hr.error;
498 }
499 
500 u16 hpi_adapter_debug_read(const struct hpi_hsubsys *ph_subsys,
501 	u16 adapter_index, u32 dsp_address, char *p_buffer, int *count_bytes)
502 {
503 	struct hpi_message hm;
504 	struct hpi_response hr;
505 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
506 		HPI_ADAPTER_DEBUG_READ);
507 
508 	hr.size = sizeof(hr);
509 
510 	hm.adapter_index = adapter_index;
511 	hm.u.ax.debug_read.dsp_address = dsp_address;
512 
513 	if (*count_bytes > (int)sizeof(hr.u.bytes))
514 		*count_bytes = sizeof(hr.u.bytes);
515 
516 	hm.u.ax.debug_read.count_bytes = *count_bytes;
517 
518 	hpi_send_recv(&hm, &hr);
519 
520 	if (!hr.error) {
521 		*count_bytes = hr.size - 12;
522 		memcpy(p_buffer, &hr.u.bytes, *count_bytes);
523 	} else
524 		*count_bytes = 0;
525 	return hr.error;
526 }
527 
528 u16 hpi_adapter_set_property(const struct hpi_hsubsys *ph_subsys,
529 	u16 adapter_index, u16 property, u16 parameter1, u16 parameter2)
530 {
531 	struct hpi_message hm;
532 	struct hpi_response hr;
533 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
534 		HPI_ADAPTER_SET_PROPERTY);
535 	hm.adapter_index = adapter_index;
536 	hm.u.ax.property_set.property = property;
537 	hm.u.ax.property_set.parameter1 = parameter1;
538 	hm.u.ax.property_set.parameter2 = parameter2;
539 
540 	hpi_send_recv(&hm, &hr);
541 
542 	return hr.error;
543 }
544 
545 u16 hpi_adapter_get_property(const struct hpi_hsubsys *ph_subsys,
546 	u16 adapter_index, u16 property, u16 *pw_parameter1,
547 	u16 *pw_parameter2)
548 {
549 	struct hpi_message hm;
550 	struct hpi_response hr;
551 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ADAPTER,
552 		HPI_ADAPTER_GET_PROPERTY);
553 	hm.adapter_index = adapter_index;
554 	hm.u.ax.property_set.property = property;
555 
556 	hpi_send_recv(&hm, &hr);
557 	if (!hr.error) {
558 		if (pw_parameter1)
559 			*pw_parameter1 = hr.u.ax.property_get.parameter1;
560 		if (pw_parameter2)
561 			*pw_parameter2 = hr.u.ax.property_get.parameter2;
562 	}
563 
564 	return hr.error;
565 }
566 
567 u16 hpi_adapter_enumerate_property(const struct hpi_hsubsys *ph_subsys,
568 	u16 adapter_index, u16 index, u16 what_to_enumerate,
569 	u16 property_index, u32 *psetting)
570 {
571 	return 0;
572 }
573 
574 u16 hpi_format_create(struct hpi_format *p_format, u16 channels, u16 format,
575 	u32 sample_rate, u32 bit_rate, u32 attributes)
576 {
577 	u16 error = 0;
578 	struct hpi_msg_format fmt;
579 
580 	switch (channels) {
581 	case 1:
582 	case 2:
583 	case 4:
584 	case 6:
585 	case 8:
586 	case 16:
587 		break;
588 	default:
589 		error = HPI_ERROR_INVALID_CHANNELS;
590 		return error;
591 	}
592 	fmt.channels = channels;
593 
594 	switch (format) {
595 	case HPI_FORMAT_PCM16_SIGNED:
596 	case HPI_FORMAT_PCM24_SIGNED:
597 	case HPI_FORMAT_PCM32_SIGNED:
598 	case HPI_FORMAT_PCM32_FLOAT:
599 	case HPI_FORMAT_PCM16_BIGENDIAN:
600 	case HPI_FORMAT_PCM8_UNSIGNED:
601 	case HPI_FORMAT_MPEG_L1:
602 	case HPI_FORMAT_MPEG_L2:
603 	case HPI_FORMAT_MPEG_L3:
604 	case HPI_FORMAT_DOLBY_AC2:
605 	case HPI_FORMAT_AA_TAGIT1_HITS:
606 	case HPI_FORMAT_AA_TAGIT1_INSERTS:
607 	case HPI_FORMAT_RAW_BITSTREAM:
608 	case HPI_FORMAT_AA_TAGIT1_HITS_EX1:
609 	case HPI_FORMAT_OEM1:
610 	case HPI_FORMAT_OEM2:
611 		break;
612 	default:
613 		error = HPI_ERROR_INVALID_FORMAT;
614 		return error;
615 	}
616 	fmt.format = format;
617 
618 	if (sample_rate < 8000L) {
619 		error = HPI_ERROR_INCOMPATIBLE_SAMPLERATE;
620 		sample_rate = 8000L;
621 	}
622 	if (sample_rate > 200000L) {
623 		error = HPI_ERROR_INCOMPATIBLE_SAMPLERATE;
624 		sample_rate = 200000L;
625 	}
626 	fmt.sample_rate = sample_rate;
627 
628 	switch (format) {
629 	case HPI_FORMAT_MPEG_L1:
630 	case HPI_FORMAT_MPEG_L2:
631 	case HPI_FORMAT_MPEG_L3:
632 		fmt.bit_rate = bit_rate;
633 		break;
634 	case HPI_FORMAT_PCM16_SIGNED:
635 	case HPI_FORMAT_PCM16_BIGENDIAN:
636 		fmt.bit_rate = channels * sample_rate * 2;
637 		break;
638 	case HPI_FORMAT_PCM32_SIGNED:
639 	case HPI_FORMAT_PCM32_FLOAT:
640 		fmt.bit_rate = channels * sample_rate * 4;
641 		break;
642 	case HPI_FORMAT_PCM8_UNSIGNED:
643 		fmt.bit_rate = channels * sample_rate;
644 		break;
645 	default:
646 		fmt.bit_rate = 0;
647 	}
648 
649 	switch (format) {
650 	case HPI_FORMAT_MPEG_L2:
651 		if ((channels == 1)
652 			&& (attributes != HPI_MPEG_MODE_DEFAULT)) {
653 			attributes = HPI_MPEG_MODE_DEFAULT;
654 			error = HPI_ERROR_INVALID_FORMAT;
655 		} else if (attributes > HPI_MPEG_MODE_DUALCHANNEL) {
656 			attributes = HPI_MPEG_MODE_DEFAULT;
657 			error = HPI_ERROR_INVALID_FORMAT;
658 		}
659 		fmt.attributes = attributes;
660 		break;
661 	default:
662 		fmt.attributes = attributes;
663 	}
664 
665 	hpi_msg_to_format(p_format, &fmt);
666 	return error;
667 }
668 
669 u16 hpi_stream_estimate_buffer_size(struct hpi_format *p_format,
670 	u32 host_polling_rate_in_milli_seconds, u32 *recommended_buffer_size)
671 {
672 
673 	u32 bytes_per_second;
674 	u32 size;
675 	u16 channels;
676 	struct hpi_format *pF = p_format;
677 
678 	channels = pF->channels;
679 
680 	switch (pF->format) {
681 	case HPI_FORMAT_PCM16_BIGENDIAN:
682 	case HPI_FORMAT_PCM16_SIGNED:
683 		bytes_per_second = pF->sample_rate * 2L * channels;
684 		break;
685 	case HPI_FORMAT_PCM24_SIGNED:
686 		bytes_per_second = pF->sample_rate * 3L * channels;
687 		break;
688 	case HPI_FORMAT_PCM32_SIGNED:
689 	case HPI_FORMAT_PCM32_FLOAT:
690 		bytes_per_second = pF->sample_rate * 4L * channels;
691 		break;
692 	case HPI_FORMAT_PCM8_UNSIGNED:
693 		bytes_per_second = pF->sample_rate * 1L * channels;
694 		break;
695 	case HPI_FORMAT_MPEG_L1:
696 	case HPI_FORMAT_MPEG_L2:
697 	case HPI_FORMAT_MPEG_L3:
698 		bytes_per_second = pF->bit_rate / 8L;
699 		break;
700 	case HPI_FORMAT_DOLBY_AC2:
701 
702 		bytes_per_second = 256000L / 8L;
703 		break;
704 	default:
705 		return HPI_ERROR_INVALID_FORMAT;
706 	}
707 	size = (bytes_per_second * host_polling_rate_in_milli_seconds * 2) /
708 		1000L;
709 
710 	*recommended_buffer_size =
711 		roundup_pow_of_two(((size + 4095L) & ~4095L));
712 	return 0;
713 }
714 
715 u16 hpi_outstream_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
716 	u16 outstream_index, u32 *ph_outstream)
717 {
718 	struct hpi_message hm;
719 	struct hpi_response hr;
720 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
721 		HPI_OSTREAM_OPEN);
722 	hm.adapter_index = adapter_index;
723 	hm.obj_index = outstream_index;
724 
725 	hpi_send_recv(&hm, &hr);
726 
727 	if (hr.error == 0)
728 		*ph_outstream =
729 			hpi_indexes_to_handle(HPI_OBJ_OSTREAM, adapter_index,
730 			outstream_index);
731 	else
732 		*ph_outstream = 0;
733 	return hr.error;
734 }
735 
736 u16 hpi_outstream_close(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
737 {
738 	struct hpi_message hm;
739 	struct hpi_response hr;
740 
741 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
742 		HPI_OSTREAM_HOSTBUFFER_FREE);
743 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
744 	hpi_send_recv(&hm, &hr);
745 
746 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
747 		HPI_OSTREAM_GROUP_RESET);
748 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
749 	hpi_send_recv(&hm, &hr);
750 
751 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
752 		HPI_OSTREAM_CLOSE);
753 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
754 	hpi_send_recv(&hm, &hr);
755 
756 	return hr.error;
757 }
758 
759 u16 hpi_outstream_get_info_ex(const struct hpi_hsubsys *ph_subsys,
760 	u32 h_outstream, u16 *pw_state, u32 *pbuffer_size, u32 *pdata_to_play,
761 	u32 *psamples_played, u32 *pauxiliary_data_to_play)
762 {
763 	struct hpi_message hm;
764 	struct hpi_response hr;
765 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
766 		HPI_OSTREAM_GET_INFO);
767 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
768 
769 	hpi_send_recv(&hm, &hr);
770 
771 	if (pw_state)
772 		*pw_state = hr.u.d.u.stream_info.state;
773 	if (pbuffer_size)
774 		*pbuffer_size = hr.u.d.u.stream_info.buffer_size;
775 	if (pdata_to_play)
776 		*pdata_to_play = hr.u.d.u.stream_info.data_available;
777 	if (psamples_played)
778 		*psamples_played = hr.u.d.u.stream_info.samples_transferred;
779 	if (pauxiliary_data_to_play)
780 		*pauxiliary_data_to_play =
781 			hr.u.d.u.stream_info.auxiliary_data_available;
782 	return hr.error;
783 }
784 
785 u16 hpi_outstream_write_buf(const struct hpi_hsubsys *ph_subsys,
786 	u32 h_outstream, const u8 *pb_data, u32 bytes_to_write,
787 	const struct hpi_format *p_format)
788 {
789 	struct hpi_message hm;
790 	struct hpi_response hr;
791 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
792 		HPI_OSTREAM_WRITE);
793 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
794 	hm.u.d.u.data.pb_data = (u8 *)pb_data;
795 	hm.u.d.u.data.data_size = bytes_to_write;
796 
797 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
798 
799 	hpi_send_recv(&hm, &hr);
800 
801 	return hr.error;
802 }
803 
804 u16 hpi_outstream_start(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
805 {
806 	struct hpi_message hm;
807 	struct hpi_response hr;
808 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
809 		HPI_OSTREAM_START);
810 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
811 
812 	hpi_send_recv(&hm, &hr);
813 
814 	return hr.error;
815 }
816 
817 u16 hpi_outstream_wait_start(const struct hpi_hsubsys *ph_subsys,
818 	u32 h_outstream)
819 {
820 	struct hpi_message hm;
821 	struct hpi_response hr;
822 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
823 		HPI_OSTREAM_WAIT_START);
824 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
825 
826 	hpi_send_recv(&hm, &hr);
827 
828 	return hr.error;
829 }
830 
831 u16 hpi_outstream_stop(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
832 {
833 	struct hpi_message hm;
834 	struct hpi_response hr;
835 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
836 		HPI_OSTREAM_STOP);
837 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
838 
839 	hpi_send_recv(&hm, &hr);
840 
841 	return hr.error;
842 }
843 
844 u16 hpi_outstream_sinegen(const struct hpi_hsubsys *ph_subsys,
845 	u32 h_outstream)
846 {
847 	struct hpi_message hm;
848 	struct hpi_response hr;
849 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
850 		HPI_OSTREAM_SINEGEN);
851 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
852 
853 	hpi_send_recv(&hm, &hr);
854 
855 	return hr.error;
856 }
857 
858 u16 hpi_outstream_reset(const struct hpi_hsubsys *ph_subsys, u32 h_outstream)
859 {
860 	struct hpi_message hm;
861 	struct hpi_response hr;
862 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
863 		HPI_OSTREAM_RESET);
864 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
865 
866 	hpi_send_recv(&hm, &hr);
867 
868 	return hr.error;
869 }
870 
871 u16 hpi_outstream_query_format(const struct hpi_hsubsys *ph_subsys,
872 	u32 h_outstream, struct hpi_format *p_format)
873 {
874 	struct hpi_message hm;
875 	struct hpi_response hr;
876 
877 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
878 		HPI_OSTREAM_QUERY_FORMAT);
879 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
880 
881 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
882 
883 	hpi_send_recv(&hm, &hr);
884 
885 	return hr.error;
886 }
887 
888 u16 hpi_outstream_set_format(const struct hpi_hsubsys *ph_subsys,
889 	u32 h_outstream, struct hpi_format *p_format)
890 {
891 	struct hpi_message hm;
892 	struct hpi_response hr;
893 
894 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
895 		HPI_OSTREAM_SET_FORMAT);
896 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
897 
898 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
899 
900 	hpi_send_recv(&hm, &hr);
901 
902 	return hr.error;
903 }
904 
905 u16 hpi_outstream_set_velocity(const struct hpi_hsubsys *ph_subsys,
906 	u32 h_outstream, short velocity)
907 {
908 	struct hpi_message hm;
909 	struct hpi_response hr;
910 
911 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
912 		HPI_OSTREAM_SET_VELOCITY);
913 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
914 	hm.u.d.u.velocity = velocity;
915 
916 	hpi_send_recv(&hm, &hr);
917 
918 	return hr.error;
919 }
920 
921 u16 hpi_outstream_set_punch_in_out(const struct hpi_hsubsys *ph_subsys,
922 	u32 h_outstream, u32 punch_in_sample, u32 punch_out_sample)
923 {
924 	struct hpi_message hm;
925 	struct hpi_response hr;
926 
927 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
928 		HPI_OSTREAM_SET_PUNCHINOUT);
929 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
930 
931 	hm.u.d.u.pio.punch_in_sample = punch_in_sample;
932 	hm.u.d.u.pio.punch_out_sample = punch_out_sample;
933 
934 	hpi_send_recv(&hm, &hr);
935 
936 	return hr.error;
937 }
938 
939 u16 hpi_outstream_ancillary_reset(const struct hpi_hsubsys *ph_subsys,
940 	u32 h_outstream, u16 mode)
941 {
942 	struct hpi_message hm;
943 	struct hpi_response hr;
944 
945 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
946 		HPI_OSTREAM_ANC_RESET);
947 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
948 	hm.u.d.u.data.format.channels = mode;
949 	hpi_send_recv(&hm, &hr);
950 	return hr.error;
951 }
952 
953 u16 hpi_outstream_ancillary_get_info(const struct hpi_hsubsys *ph_subsys,
954 	u32 h_outstream, u32 *pframes_available)
955 {
956 	struct hpi_message hm;
957 	struct hpi_response hr;
958 
959 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
960 		HPI_OSTREAM_ANC_GET_INFO);
961 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
962 	hpi_send_recv(&hm, &hr);
963 	if (hr.error == 0) {
964 		if (pframes_available)
965 			*pframes_available =
966 				hr.u.d.u.stream_info.data_available /
967 				sizeof(struct hpi_anc_frame);
968 	}
969 	return hr.error;
970 }
971 
972 u16 hpi_outstream_ancillary_read(const struct hpi_hsubsys *ph_subsys,
973 	u32 h_outstream, struct hpi_anc_frame *p_anc_frame_buffer,
974 	u32 anc_frame_buffer_size_in_bytes,
975 	u32 number_of_ancillary_frames_to_read)
976 {
977 	struct hpi_message hm;
978 	struct hpi_response hr;
979 
980 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
981 		HPI_OSTREAM_ANC_READ);
982 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
983 	hm.u.d.u.data.pb_data = (u8 *)p_anc_frame_buffer;
984 	hm.u.d.u.data.data_size =
985 		number_of_ancillary_frames_to_read *
986 		sizeof(struct hpi_anc_frame);
987 	if (hm.u.d.u.data.data_size <= anc_frame_buffer_size_in_bytes)
988 		hpi_send_recv(&hm, &hr);
989 	else
990 		hr.error = HPI_ERROR_INVALID_DATA_TRANSFER;
991 	return hr.error;
992 }
993 
994 u16 hpi_outstream_set_time_scale(const struct hpi_hsubsys *ph_subsys,
995 	u32 h_outstream, u32 time_scale)
996 {
997 	struct hpi_message hm;
998 	struct hpi_response hr;
999 
1000 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1001 		HPI_OSTREAM_SET_TIMESCALE);
1002 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1003 
1004 	hm.u.d.u.time_scale = time_scale;
1005 
1006 	hpi_send_recv(&hm, &hr);
1007 
1008 	return hr.error;
1009 }
1010 
1011 u16 hpi_outstream_host_buffer_allocate(const struct hpi_hsubsys *ph_subsys,
1012 	u32 h_outstream, u32 size_in_bytes)
1013 {
1014 	struct hpi_message hm;
1015 	struct hpi_response hr;
1016 
1017 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1018 		HPI_OSTREAM_HOSTBUFFER_ALLOC);
1019 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1020 	hm.u.d.u.data.data_size = size_in_bytes;
1021 	hpi_send_recv(&hm, &hr);
1022 	return hr.error;
1023 }
1024 
1025 u16 hpi_outstream_host_buffer_get_info(const struct hpi_hsubsys *ph_subsys,
1026 	u32 h_outstream, u8 **pp_buffer,
1027 	struct hpi_hostbuffer_status **pp_status)
1028 {
1029 	struct hpi_message hm;
1030 	struct hpi_response hr;
1031 
1032 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1033 		HPI_OSTREAM_HOSTBUFFER_GET_INFO);
1034 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1035 	hpi_send_recv(&hm, &hr);
1036 
1037 	if (hr.error == 0) {
1038 		if (pp_buffer)
1039 			*pp_buffer = hr.u.d.u.hostbuffer_info.p_buffer;
1040 		if (pp_status)
1041 			*pp_status = hr.u.d.u.hostbuffer_info.p_status;
1042 	}
1043 	return hr.error;
1044 }
1045 
1046 u16 hpi_outstream_host_buffer_free(const struct hpi_hsubsys *ph_subsys,
1047 	u32 h_outstream)
1048 {
1049 	struct hpi_message hm;
1050 	struct hpi_response hr;
1051 
1052 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1053 		HPI_OSTREAM_HOSTBUFFER_FREE);
1054 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1055 	hpi_send_recv(&hm, &hr);
1056 	return hr.error;
1057 }
1058 
1059 u16 hpi_outstream_group_add(const struct hpi_hsubsys *ph_subsys,
1060 	u32 h_outstream, u32 h_stream)
1061 {
1062 	struct hpi_message hm;
1063 	struct hpi_response hr;
1064 	u16 adapter;
1065 	char c_obj_type;
1066 
1067 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1068 		HPI_OSTREAM_GROUP_ADD);
1069 	hr.error = 0;
1070 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1071 	c_obj_type = hpi_handle_object(h_stream);
1072 	switch (c_obj_type) {
1073 	case HPI_OBJ_OSTREAM:
1074 		hm.u.d.u.stream.object_type = HPI_OBJ_OSTREAM;
1075 		u32TOINDEXES(h_stream, &adapter,
1076 			&hm.u.d.u.stream.stream_index);
1077 		break;
1078 	case HPI_OBJ_ISTREAM:
1079 		hm.u.d.u.stream.object_type = HPI_OBJ_ISTREAM;
1080 		u32TOINDEXES(h_stream, &adapter,
1081 			&hm.u.d.u.stream.stream_index);
1082 		break;
1083 	default:
1084 		return HPI_ERROR_INVALID_STREAM;
1085 	}
1086 	if (adapter != hm.adapter_index)
1087 		return HPI_ERROR_NO_INTERADAPTER_GROUPS;
1088 
1089 	hpi_send_recv(&hm, &hr);
1090 	return hr.error;
1091 }
1092 
1093 u16 hpi_outstream_group_get_map(const struct hpi_hsubsys *ph_subsys,
1094 	u32 h_outstream, u32 *poutstream_map, u32 *pinstream_map)
1095 {
1096 	struct hpi_message hm;
1097 	struct hpi_response hr;
1098 
1099 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1100 		HPI_OSTREAM_GROUP_GETMAP);
1101 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1102 	hpi_send_recv(&hm, &hr);
1103 
1104 	if (poutstream_map)
1105 		*poutstream_map = hr.u.d.u.group_info.outstream_group_map;
1106 	if (pinstream_map)
1107 		*pinstream_map = hr.u.d.u.group_info.instream_group_map;
1108 
1109 	return hr.error;
1110 }
1111 
1112 u16 hpi_outstream_group_reset(const struct hpi_hsubsys *ph_subsys,
1113 	u32 h_outstream)
1114 {
1115 	struct hpi_message hm;
1116 	struct hpi_response hr;
1117 
1118 	hpi_init_message_response(&hm, &hr, HPI_OBJ_OSTREAM,
1119 		HPI_OSTREAM_GROUP_RESET);
1120 	u32TOINDEXES(h_outstream, &hm.adapter_index, &hm.obj_index);
1121 	hpi_send_recv(&hm, &hr);
1122 	return hr.error;
1123 }
1124 
1125 u16 hpi_instream_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
1126 	u16 instream_index, u32 *ph_instream)
1127 {
1128 	struct hpi_message hm;
1129 	struct hpi_response hr;
1130 
1131 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1132 		HPI_ISTREAM_OPEN);
1133 	hm.adapter_index = adapter_index;
1134 	hm.obj_index = instream_index;
1135 
1136 	hpi_send_recv(&hm, &hr);
1137 
1138 	if (hr.error == 0)
1139 		*ph_instream =
1140 			hpi_indexes_to_handle(HPI_OBJ_ISTREAM, adapter_index,
1141 			instream_index);
1142 	else
1143 		*ph_instream = 0;
1144 
1145 	return hr.error;
1146 }
1147 
1148 u16 hpi_instream_close(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1149 {
1150 	struct hpi_message hm;
1151 	struct hpi_response hr;
1152 
1153 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1154 		HPI_ISTREAM_HOSTBUFFER_FREE);
1155 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1156 	hpi_send_recv(&hm, &hr);
1157 
1158 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1159 		HPI_ISTREAM_GROUP_RESET);
1160 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1161 	hpi_send_recv(&hm, &hr);
1162 
1163 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1164 		HPI_ISTREAM_CLOSE);
1165 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1166 	hpi_send_recv(&hm, &hr);
1167 
1168 	return hr.error;
1169 }
1170 
1171 u16 hpi_instream_query_format(const struct hpi_hsubsys *ph_subsys,
1172 	u32 h_instream, const struct hpi_format *p_format)
1173 {
1174 	struct hpi_message hm;
1175 	struct hpi_response hr;
1176 
1177 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1178 		HPI_ISTREAM_QUERY_FORMAT);
1179 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1180 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
1181 
1182 	hpi_send_recv(&hm, &hr);
1183 
1184 	return hr.error;
1185 }
1186 
1187 u16 hpi_instream_set_format(const struct hpi_hsubsys *ph_subsys,
1188 	u32 h_instream, const struct hpi_format *p_format)
1189 {
1190 	struct hpi_message hm;
1191 	struct hpi_response hr;
1192 
1193 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1194 		HPI_ISTREAM_SET_FORMAT);
1195 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1196 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
1197 
1198 	hpi_send_recv(&hm, &hr);
1199 
1200 	return hr.error;
1201 }
1202 
1203 u16 hpi_instream_read_buf(const struct hpi_hsubsys *ph_subsys, u32 h_instream,
1204 	u8 *pb_data, u32 bytes_to_read)
1205 {
1206 	struct hpi_message hm;
1207 	struct hpi_response hr;
1208 
1209 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1210 		HPI_ISTREAM_READ);
1211 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1212 	hm.u.d.u.data.data_size = bytes_to_read;
1213 	hm.u.d.u.data.pb_data = pb_data;
1214 
1215 	hpi_send_recv(&hm, &hr);
1216 
1217 	return hr.error;
1218 }
1219 
1220 u16 hpi_instream_start(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1221 {
1222 	struct hpi_message hm;
1223 	struct hpi_response hr;
1224 
1225 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1226 		HPI_ISTREAM_START);
1227 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1228 
1229 	hpi_send_recv(&hm, &hr);
1230 
1231 	return hr.error;
1232 }
1233 
1234 u16 hpi_instream_wait_start(const struct hpi_hsubsys *ph_subsys,
1235 	u32 h_instream)
1236 {
1237 	struct hpi_message hm;
1238 	struct hpi_response hr;
1239 
1240 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1241 		HPI_ISTREAM_WAIT_START);
1242 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1243 
1244 	hpi_send_recv(&hm, &hr);
1245 
1246 	return hr.error;
1247 }
1248 
1249 u16 hpi_instream_stop(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1250 {
1251 	struct hpi_message hm;
1252 	struct hpi_response hr;
1253 
1254 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1255 		HPI_ISTREAM_STOP);
1256 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1257 
1258 	hpi_send_recv(&hm, &hr);
1259 
1260 	return hr.error;
1261 }
1262 
1263 u16 hpi_instream_reset(const struct hpi_hsubsys *ph_subsys, u32 h_instream)
1264 {
1265 	struct hpi_message hm;
1266 	struct hpi_response hr;
1267 
1268 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1269 		HPI_ISTREAM_RESET);
1270 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1271 
1272 	hpi_send_recv(&hm, &hr);
1273 
1274 	return hr.error;
1275 }
1276 
1277 u16 hpi_instream_get_info_ex(const struct hpi_hsubsys *ph_subsys,
1278 	u32 h_instream, u16 *pw_state, u32 *pbuffer_size, u32 *pdata_recorded,
1279 	u32 *psamples_recorded, u32 *pauxiliary_data_recorded)
1280 {
1281 	struct hpi_message hm;
1282 	struct hpi_response hr;
1283 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1284 		HPI_ISTREAM_GET_INFO);
1285 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1286 
1287 	hpi_send_recv(&hm, &hr);
1288 
1289 	if (pw_state)
1290 		*pw_state = hr.u.d.u.stream_info.state;
1291 	if (pbuffer_size)
1292 		*pbuffer_size = hr.u.d.u.stream_info.buffer_size;
1293 	if (pdata_recorded)
1294 		*pdata_recorded = hr.u.d.u.stream_info.data_available;
1295 	if (psamples_recorded)
1296 		*psamples_recorded = hr.u.d.u.stream_info.samples_transferred;
1297 	if (pauxiliary_data_recorded)
1298 		*pauxiliary_data_recorded =
1299 			hr.u.d.u.stream_info.auxiliary_data_available;
1300 	return hr.error;
1301 }
1302 
1303 u16 hpi_instream_ancillary_reset(const struct hpi_hsubsys *ph_subsys,
1304 	u32 h_instream, u16 bytes_per_frame, u16 mode, u16 alignment,
1305 	u16 idle_bit)
1306 {
1307 	struct hpi_message hm;
1308 	struct hpi_response hr;
1309 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1310 		HPI_ISTREAM_ANC_RESET);
1311 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1312 	hm.u.d.u.data.format.attributes = bytes_per_frame;
1313 	hm.u.d.u.data.format.format = (mode << 8) | (alignment & 0xff);
1314 	hm.u.d.u.data.format.channels = idle_bit;
1315 	hpi_send_recv(&hm, &hr);
1316 	return hr.error;
1317 }
1318 
1319 u16 hpi_instream_ancillary_get_info(const struct hpi_hsubsys *ph_subsys,
1320 	u32 h_instream, u32 *pframe_space)
1321 {
1322 	struct hpi_message hm;
1323 	struct hpi_response hr;
1324 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1325 		HPI_ISTREAM_ANC_GET_INFO);
1326 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1327 	hpi_send_recv(&hm, &hr);
1328 	if (pframe_space)
1329 		*pframe_space =
1330 			(hr.u.d.u.stream_info.buffer_size -
1331 			hr.u.d.u.stream_info.data_available) /
1332 			sizeof(struct hpi_anc_frame);
1333 	return hr.error;
1334 }
1335 
1336 u16 hpi_instream_ancillary_write(const struct hpi_hsubsys *ph_subsys,
1337 	u32 h_instream, const struct hpi_anc_frame *p_anc_frame_buffer,
1338 	u32 anc_frame_buffer_size_in_bytes,
1339 	u32 number_of_ancillary_frames_to_write)
1340 {
1341 	struct hpi_message hm;
1342 	struct hpi_response hr;
1343 
1344 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1345 		HPI_ISTREAM_ANC_WRITE);
1346 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1347 	hm.u.d.u.data.pb_data = (u8 *)p_anc_frame_buffer;
1348 	hm.u.d.u.data.data_size =
1349 		number_of_ancillary_frames_to_write *
1350 		sizeof(struct hpi_anc_frame);
1351 	if (hm.u.d.u.data.data_size <= anc_frame_buffer_size_in_bytes)
1352 		hpi_send_recv(&hm, &hr);
1353 	else
1354 		hr.error = HPI_ERROR_INVALID_DATA_TRANSFER;
1355 	return hr.error;
1356 }
1357 
1358 u16 hpi_instream_host_buffer_allocate(const struct hpi_hsubsys *ph_subsys,
1359 	u32 h_instream, u32 size_in_bytes)
1360 {
1361 
1362 	struct hpi_message hm;
1363 	struct hpi_response hr;
1364 
1365 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1366 		HPI_ISTREAM_HOSTBUFFER_ALLOC);
1367 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1368 	hm.u.d.u.data.data_size = size_in_bytes;
1369 	hpi_send_recv(&hm, &hr);
1370 	return hr.error;
1371 }
1372 
1373 u16 hpi_instream_host_buffer_get_info(const struct hpi_hsubsys *ph_subsys,
1374 	u32 h_instream, u8 **pp_buffer,
1375 	struct hpi_hostbuffer_status **pp_status)
1376 {
1377 	struct hpi_message hm;
1378 	struct hpi_response hr;
1379 
1380 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1381 		HPI_ISTREAM_HOSTBUFFER_GET_INFO);
1382 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1383 	hpi_send_recv(&hm, &hr);
1384 
1385 	if (hr.error == 0) {
1386 		if (pp_buffer)
1387 			*pp_buffer = hr.u.d.u.hostbuffer_info.p_buffer;
1388 		if (pp_status)
1389 			*pp_status = hr.u.d.u.hostbuffer_info.p_status;
1390 	}
1391 	return hr.error;
1392 }
1393 
1394 u16 hpi_instream_host_buffer_free(const struct hpi_hsubsys *ph_subsys,
1395 	u32 h_instream)
1396 {
1397 
1398 	struct hpi_message hm;
1399 	struct hpi_response hr;
1400 
1401 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1402 		HPI_ISTREAM_HOSTBUFFER_FREE);
1403 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1404 	hpi_send_recv(&hm, &hr);
1405 	return hr.error;
1406 }
1407 
1408 u16 hpi_instream_group_add(const struct hpi_hsubsys *ph_subsys,
1409 	u32 h_instream, u32 h_stream)
1410 {
1411 	struct hpi_message hm;
1412 	struct hpi_response hr;
1413 	u16 adapter;
1414 	char c_obj_type;
1415 
1416 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1417 		HPI_ISTREAM_GROUP_ADD);
1418 	hr.error = 0;
1419 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1420 	c_obj_type = hpi_handle_object(h_stream);
1421 
1422 	switch (c_obj_type) {
1423 	case HPI_OBJ_OSTREAM:
1424 		hm.u.d.u.stream.object_type = HPI_OBJ_OSTREAM;
1425 		u32TOINDEXES(h_stream, &adapter,
1426 			&hm.u.d.u.stream.stream_index);
1427 		break;
1428 	case HPI_OBJ_ISTREAM:
1429 		hm.u.d.u.stream.object_type = HPI_OBJ_ISTREAM;
1430 		u32TOINDEXES(h_stream, &adapter,
1431 			&hm.u.d.u.stream.stream_index);
1432 		break;
1433 	default:
1434 		return HPI_ERROR_INVALID_STREAM;
1435 	}
1436 
1437 	if (adapter != hm.adapter_index)
1438 		return HPI_ERROR_NO_INTERADAPTER_GROUPS;
1439 
1440 	hpi_send_recv(&hm, &hr);
1441 	return hr.error;
1442 }
1443 
1444 u16 hpi_instream_group_get_map(const struct hpi_hsubsys *ph_subsys,
1445 	u32 h_instream, u32 *poutstream_map, u32 *pinstream_map)
1446 {
1447 	struct hpi_message hm;
1448 	struct hpi_response hr;
1449 
1450 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1451 		HPI_ISTREAM_HOSTBUFFER_FREE);
1452 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1453 	hpi_send_recv(&hm, &hr);
1454 
1455 	if (poutstream_map)
1456 		*poutstream_map = hr.u.d.u.group_info.outstream_group_map;
1457 	if (pinstream_map)
1458 		*pinstream_map = hr.u.d.u.group_info.instream_group_map;
1459 
1460 	return hr.error;
1461 }
1462 
1463 u16 hpi_instream_group_reset(const struct hpi_hsubsys *ph_subsys,
1464 	u32 h_instream)
1465 {
1466 	struct hpi_message hm;
1467 	struct hpi_response hr;
1468 
1469 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ISTREAM,
1470 		HPI_ISTREAM_GROUP_RESET);
1471 	u32TOINDEXES(h_instream, &hm.adapter_index, &hm.obj_index);
1472 	hpi_send_recv(&hm, &hr);
1473 	return hr.error;
1474 }
1475 
1476 u16 hpi_mixer_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
1477 	u32 *ph_mixer)
1478 {
1479 	struct hpi_message hm;
1480 	struct hpi_response hr;
1481 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_OPEN);
1482 	hm.adapter_index = adapter_index;
1483 
1484 	hpi_send_recv(&hm, &hr);
1485 
1486 	if (hr.error == 0)
1487 		*ph_mixer =
1488 			hpi_indexes_to_handle(HPI_OBJ_MIXER, adapter_index,
1489 			0);
1490 	else
1491 		*ph_mixer = 0;
1492 	return hr.error;
1493 }
1494 
1495 u16 hpi_mixer_close(const struct hpi_hsubsys *ph_subsys, u32 h_mixer)
1496 {
1497 	struct hpi_message hm;
1498 	struct hpi_response hr;
1499 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_CLOSE);
1500 	u32TOINDEX(h_mixer, &hm.adapter_index);
1501 	hpi_send_recv(&hm, &hr);
1502 	return hr.error;
1503 }
1504 
1505 u16 hpi_mixer_get_control(const struct hpi_hsubsys *ph_subsys, u32 h_mixer,
1506 	u16 src_node_type, u16 src_node_type_index, u16 dst_node_type,
1507 	u16 dst_node_type_index, u16 control_type, u32 *ph_control)
1508 {
1509 	struct hpi_message hm;
1510 	struct hpi_response hr;
1511 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER,
1512 		HPI_MIXER_GET_CONTROL);
1513 	u32TOINDEX(h_mixer, &hm.adapter_index);
1514 	hm.u.m.node_type1 = src_node_type;
1515 	hm.u.m.node_index1 = src_node_type_index;
1516 	hm.u.m.node_type2 = dst_node_type;
1517 	hm.u.m.node_index2 = dst_node_type_index;
1518 	hm.u.m.control_type = control_type;
1519 
1520 	hpi_send_recv(&hm, &hr);
1521 
1522 	if (hr.error == 0)
1523 		*ph_control =
1524 			hpi_indexes_to_handle(HPI_OBJ_CONTROL,
1525 			hm.adapter_index, hr.u.m.control_index);
1526 	else
1527 		*ph_control = 0;
1528 	return hr.error;
1529 }
1530 
1531 u16 hpi_mixer_get_control_by_index(const struct hpi_hsubsys *ph_subsys,
1532 	u32 h_mixer, u16 control_index, u16 *pw_src_node_type,
1533 	u16 *pw_src_node_index, u16 *pw_dst_node_type, u16 *pw_dst_node_index,
1534 	u16 *pw_control_type, u32 *ph_control)
1535 {
1536 	struct hpi_message hm;
1537 	struct hpi_response hr;
1538 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER,
1539 		HPI_MIXER_GET_CONTROL_BY_INDEX);
1540 	u32TOINDEX(h_mixer, &hm.adapter_index);
1541 	hm.u.m.control_index = control_index;
1542 	hpi_send_recv(&hm, &hr);
1543 
1544 	if (pw_src_node_type) {
1545 		*pw_src_node_type =
1546 			hr.u.m.src_node_type + HPI_SOURCENODE_NONE;
1547 		*pw_src_node_index = hr.u.m.src_node_index;
1548 		*pw_dst_node_type = hr.u.m.dst_node_type + HPI_DESTNODE_NONE;
1549 		*pw_dst_node_index = hr.u.m.dst_node_index;
1550 	}
1551 	if (pw_control_type)
1552 		*pw_control_type = hr.u.m.control_index;
1553 
1554 	if (ph_control) {
1555 		if (hr.error == 0)
1556 			*ph_control =
1557 				hpi_indexes_to_handle(HPI_OBJ_CONTROL,
1558 				hm.adapter_index, control_index);
1559 		else
1560 			*ph_control = 0;
1561 	}
1562 	return hr.error;
1563 }
1564 
1565 u16 hpi_mixer_store(const struct hpi_hsubsys *ph_subsys, u32 h_mixer,
1566 	enum HPI_MIXER_STORE_COMMAND command, u16 index)
1567 {
1568 	struct hpi_message hm;
1569 	struct hpi_response hr;
1570 	hpi_init_message_response(&hm, &hr, HPI_OBJ_MIXER, HPI_MIXER_STORE);
1571 	u32TOINDEX(h_mixer, &hm.adapter_index);
1572 	hm.u.mx.store.command = command;
1573 	hm.u.mx.store.index = index;
1574 	hpi_send_recv(&hm, &hr);
1575 	return hr.error;
1576 }
1577 
1578 static
1579 u16 hpi_control_param_set(const struct hpi_hsubsys *ph_subsys,
1580 	const u32 h_control, const u16 attrib, const u32 param1,
1581 	const u32 param2)
1582 {
1583 	struct hpi_message hm;
1584 	struct hpi_response hr;
1585 
1586 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1587 		HPI_CONTROL_SET_STATE);
1588 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1589 	hm.u.c.attribute = attrib;
1590 	hm.u.c.param1 = param1;
1591 	hm.u.c.param2 = param2;
1592 	hpi_send_recv(&hm, &hr);
1593 	return hr.error;
1594 }
1595 
1596 static u16 hpi_control_log_set2(u32 h_control, u16 attrib, short sv0,
1597 	short sv1)
1598 {
1599 	struct hpi_message hm;
1600 	struct hpi_response hr;
1601 
1602 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1603 		HPI_CONTROL_SET_STATE);
1604 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1605 	hm.u.c.attribute = attrib;
1606 	hm.u.c.an_log_value[0] = sv0;
1607 	hm.u.c.an_log_value[1] = sv1;
1608 	hpi_send_recv(&hm, &hr);
1609 	return hr.error;
1610 }
1611 
1612 static
1613 u16 hpi_control_param_get(const struct hpi_hsubsys *ph_subsys,
1614 	const u32 h_control, const u16 attrib, u32 param1, u32 param2,
1615 	u32 *pparam1, u32 *pparam2)
1616 {
1617 	struct hpi_message hm;
1618 	struct hpi_response hr;
1619 
1620 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1621 		HPI_CONTROL_GET_STATE);
1622 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1623 	hm.u.c.attribute = attrib;
1624 	hm.u.c.param1 = param1;
1625 	hm.u.c.param2 = param2;
1626 	hpi_send_recv(&hm, &hr);
1627 
1628 	*pparam1 = hr.u.c.param1;
1629 	if (pparam2)
1630 		*pparam2 = hr.u.c.param2;
1631 
1632 	return hr.error;
1633 }
1634 
1635 #define hpi_control_param1_get(s, h, a, p1) \
1636 		hpi_control_param_get(s, h, a, 0, 0, p1, NULL)
1637 #define hpi_control_param2_get(s, h, a, p1, p2) \
1638 		hpi_control_param_get(s, h, a, 0, 0, p1, p2)
1639 
1640 static u16 hpi_control_log_get2(const struct hpi_hsubsys *ph_subsys,
1641 	u32 h_control, u16 attrib, short *sv0, short *sv1)
1642 {
1643 	struct hpi_message hm;
1644 	struct hpi_response hr;
1645 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1646 		HPI_CONTROL_GET_STATE);
1647 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1648 	hm.u.c.attribute = attrib;
1649 
1650 	hpi_send_recv(&hm, &hr);
1651 	*sv0 = hr.u.c.an_log_value[0];
1652 	if (sv1)
1653 		*sv1 = hr.u.c.an_log_value[1];
1654 	return hr.error;
1655 }
1656 
1657 static
1658 u16 hpi_control_query(const struct hpi_hsubsys *ph_subsys,
1659 	const u32 h_control, const u16 attrib, const u32 index,
1660 	const u32 param, u32 *psetting)
1661 {
1662 	struct hpi_message hm;
1663 	struct hpi_response hr;
1664 
1665 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1666 		HPI_CONTROL_GET_INFO);
1667 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1668 
1669 	hm.u.c.attribute = attrib;
1670 	hm.u.c.param1 = index;
1671 	hm.u.c.param2 = param;
1672 
1673 	hpi_send_recv(&hm, &hr);
1674 	*psetting = hr.u.c.param1;
1675 
1676 	return hr.error;
1677 }
1678 
1679 static u16 hpi_control_get_string(const u32 h_control, const u16 attribute,
1680 	char *psz_string, const u32 string_length)
1681 {
1682 	unsigned int sub_string_index = 0, j = 0;
1683 	char c = 0;
1684 	unsigned int n = 0;
1685 	u16 hE = 0;
1686 
1687 	if ((string_length < 1) || (string_length > 256))
1688 		return HPI_ERROR_INVALID_CONTROL_VALUE;
1689 	for (sub_string_index = 0; sub_string_index < string_length;
1690 		sub_string_index += 8) {
1691 		struct hpi_message hm;
1692 		struct hpi_response hr;
1693 
1694 		hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1695 			HPI_CONTROL_GET_STATE);
1696 		u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1697 		hm.u.c.attribute = attribute;
1698 		hm.u.c.param1 = sub_string_index;
1699 		hm.u.c.param2 = 0;
1700 		hpi_send_recv(&hm, &hr);
1701 
1702 		if (sub_string_index == 0
1703 			&& (hr.u.cu.chars8.remaining_chars + 8) >
1704 			string_length)
1705 			return HPI_ERROR_INVALID_CONTROL_VALUE;
1706 
1707 		if (hr.error) {
1708 			hE = hr.error;
1709 			break;
1710 		}
1711 		for (j = 0; j < 8; j++) {
1712 			c = hr.u.cu.chars8.sz_data[j];
1713 			psz_string[sub_string_index + j] = c;
1714 			n++;
1715 			if (n >= string_length) {
1716 				psz_string[string_length - 1] = 0;
1717 				hE = HPI_ERROR_INVALID_CONTROL_VALUE;
1718 				break;
1719 			}
1720 			if (c == 0)
1721 				break;
1722 		}
1723 
1724 		if ((hr.u.cu.chars8.remaining_chars == 0)
1725 			&& ((sub_string_index + j) < string_length)
1726 			&& (c != 0)) {
1727 			c = 0;
1728 			psz_string[sub_string_index + j] = c;
1729 		}
1730 		if (c == 0)
1731 			break;
1732 	}
1733 	return hE;
1734 }
1735 
1736 u16 HPI_AESEBU__receiver_query_format(const struct hpi_hsubsys *ph_subsys,
1737 	const u32 h_aes_rx, const u32 index, u16 *pw_format)
1738 {
1739 	u32 qr;
1740 	u16 err;
1741 
1742 	err = hpi_control_query(ph_subsys, h_aes_rx, HPI_AESEBURX_FORMAT,
1743 		index, 0, &qr);
1744 	*pw_format = (u16)qr;
1745 	return err;
1746 }
1747 
1748 u16 HPI_AESEBU__receiver_set_format(const struct hpi_hsubsys *ph_subsys,
1749 	u32 h_control, u16 format)
1750 {
1751 	return hpi_control_param_set(ph_subsys, h_control,
1752 		HPI_AESEBURX_FORMAT, format, 0);
1753 }
1754 
1755 u16 HPI_AESEBU__receiver_get_format(const struct hpi_hsubsys *ph_subsys,
1756 	u32 h_control, u16 *pw_format)
1757 {
1758 	u16 err;
1759 	u32 param;
1760 
1761 	err = hpi_control_param1_get(ph_subsys, h_control,
1762 		HPI_AESEBURX_FORMAT, &param);
1763 	if (!err && pw_format)
1764 		*pw_format = (u16)param;
1765 
1766 	return err;
1767 }
1768 
1769 u16 HPI_AESEBU__receiver_get_sample_rate(const struct hpi_hsubsys *ph_subsys,
1770 	u32 h_control, u32 *psample_rate)
1771 {
1772 	return hpi_control_param1_get(ph_subsys, h_control,
1773 		HPI_AESEBURX_SAMPLERATE, psample_rate);
1774 }
1775 
1776 u16 HPI_AESEBU__receiver_get_user_data(const struct hpi_hsubsys *ph_subsys,
1777 	u32 h_control, u16 index, u16 *pw_data)
1778 {
1779 	struct hpi_message hm;
1780 	struct hpi_response hr;
1781 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1782 		HPI_CONTROL_GET_STATE);
1783 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1784 	hm.u.c.attribute = HPI_AESEBURX_USERDATA;
1785 	hm.u.c.param1 = index;
1786 
1787 	hpi_send_recv(&hm, &hr);
1788 
1789 	if (pw_data)
1790 		*pw_data = (u16)hr.u.c.param2;
1791 	return hr.error;
1792 }
1793 
1794 u16 HPI_AESEBU__receiver_get_channel_status(const struct hpi_hsubsys
1795 	*ph_subsys, u32 h_control, u16 index, u16 *pw_data)
1796 {
1797 	struct hpi_message hm;
1798 	struct hpi_response hr;
1799 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1800 		HPI_CONTROL_GET_STATE);
1801 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1802 	hm.u.c.attribute = HPI_AESEBURX_CHANNELSTATUS;
1803 	hm.u.c.param1 = index;
1804 
1805 	hpi_send_recv(&hm, &hr);
1806 
1807 	if (pw_data)
1808 		*pw_data = (u16)hr.u.c.param2;
1809 	return hr.error;
1810 }
1811 
1812 u16 HPI_AESEBU__receiver_get_error_status(const struct hpi_hsubsys *ph_subsys,
1813 	u32 h_control, u16 *pw_error_data)
1814 {
1815 	u32 error_data = 0;
1816 	u16 error = 0;
1817 
1818 	error = hpi_control_param1_get(ph_subsys, h_control,
1819 		HPI_AESEBURX_ERRORSTATUS, &error_data);
1820 	if (pw_error_data)
1821 		*pw_error_data = (u16)error_data;
1822 	return error;
1823 }
1824 
1825 u16 HPI_AESEBU__transmitter_set_sample_rate(const struct hpi_hsubsys
1826 	*ph_subsys, u32 h_control, u32 sample_rate)
1827 {
1828 	return hpi_control_param_set(ph_subsys, h_control,
1829 		HPI_AESEBUTX_SAMPLERATE, sample_rate, 0);
1830 }
1831 
1832 u16 HPI_AESEBU__transmitter_set_user_data(const struct hpi_hsubsys *ph_subsys,
1833 	u32 h_control, u16 index, u16 data)
1834 {
1835 	return hpi_control_param_set(ph_subsys, h_control,
1836 		HPI_AESEBUTX_USERDATA, index, data);
1837 }
1838 
1839 u16 HPI_AESEBU__transmitter_set_channel_status(const struct hpi_hsubsys
1840 	*ph_subsys, u32 h_control, u16 index, u16 data)
1841 {
1842 	return hpi_control_param_set(ph_subsys, h_control,
1843 		HPI_AESEBUTX_CHANNELSTATUS, index, data);
1844 }
1845 
1846 u16 HPI_AESEBU__transmitter_get_channel_status(const struct hpi_hsubsys
1847 	*ph_subsys, u32 h_control, u16 index, u16 *pw_data)
1848 {
1849 	return HPI_ERROR_INVALID_OPERATION;
1850 }
1851 
1852 u16 HPI_AESEBU__transmitter_query_format(const struct hpi_hsubsys *ph_subsys,
1853 	const u32 h_aes_tx, const u32 index, u16 *pw_format)
1854 {
1855 	u32 qr;
1856 	u16 err;
1857 
1858 	err = hpi_control_query(ph_subsys, h_aes_tx, HPI_AESEBUTX_FORMAT,
1859 		index, 0, &qr);
1860 	*pw_format = (u16)qr;
1861 	return err;
1862 }
1863 
1864 u16 HPI_AESEBU__transmitter_set_format(const struct hpi_hsubsys *ph_subsys,
1865 	u32 h_control, u16 output_format)
1866 {
1867 	return hpi_control_param_set(ph_subsys, h_control,
1868 		HPI_AESEBUTX_FORMAT, output_format, 0);
1869 }
1870 
1871 u16 HPI_AESEBU__transmitter_get_format(const struct hpi_hsubsys *ph_subsys,
1872 	u32 h_control, u16 *pw_output_format)
1873 {
1874 	u16 err;
1875 	u32 param;
1876 
1877 	err = hpi_control_param1_get(ph_subsys, h_control,
1878 		HPI_AESEBUTX_FORMAT, &param);
1879 	if (!err && pw_output_format)
1880 		*pw_output_format = (u16)param;
1881 
1882 	return err;
1883 }
1884 
1885 u16 hpi_bitstream_set_clock_edge(const struct hpi_hsubsys *ph_subsys,
1886 	u32 h_control, u16 edge_type)
1887 {
1888 	return hpi_control_param_set(ph_subsys, h_control,
1889 		HPI_BITSTREAM_CLOCK_EDGE, edge_type, 0);
1890 }
1891 
1892 u16 hpi_bitstream_set_data_polarity(const struct hpi_hsubsys *ph_subsys,
1893 	u32 h_control, u16 polarity)
1894 {
1895 	return hpi_control_param_set(ph_subsys, h_control,
1896 		HPI_BITSTREAM_DATA_POLARITY, polarity, 0);
1897 }
1898 
1899 u16 hpi_bitstream_get_activity(const struct hpi_hsubsys *ph_subsys,
1900 	u32 h_control, u16 *pw_clk_activity, u16 *pw_data_activity)
1901 {
1902 	struct hpi_message hm;
1903 	struct hpi_response hr;
1904 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
1905 		HPI_CONTROL_GET_STATE);
1906 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1907 	hm.u.c.attribute = HPI_BITSTREAM_ACTIVITY;
1908 	hpi_send_recv(&hm, &hr);
1909 	if (pw_clk_activity)
1910 		*pw_clk_activity = (u16)hr.u.c.param1;
1911 	if (pw_data_activity)
1912 		*pw_data_activity = (u16)hr.u.c.param2;
1913 	return hr.error;
1914 }
1915 
1916 u16 hpi_channel_mode_query_mode(const struct hpi_hsubsys *ph_subsys,
1917 	const u32 h_mode, const u32 index, u16 *pw_mode)
1918 {
1919 	u32 qr;
1920 	u16 err;
1921 
1922 	err = hpi_control_query(ph_subsys, h_mode, HPI_CHANNEL_MODE_MODE,
1923 		index, 0, &qr);
1924 	*pw_mode = (u16)qr;
1925 	return err;
1926 }
1927 
1928 u16 hpi_channel_mode_set(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1929 	u16 mode)
1930 {
1931 	return hpi_control_param_set(ph_subsys, h_control,
1932 		HPI_CHANNEL_MODE_MODE, mode, 0);
1933 }
1934 
1935 u16 hpi_channel_mode_get(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1936 	u16 *mode)
1937 {
1938 	u32 mode32 = 0;
1939 	u16 error = hpi_control_param1_get(ph_subsys, h_control,
1940 		HPI_CHANNEL_MODE_MODE, &mode32);
1941 	if (mode)
1942 		*mode = (u16)mode32;
1943 	return error;
1944 }
1945 
1946 u16 hpi_cobranet_hmi_write(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1947 	u32 hmi_address, u32 byte_count, u8 *pb_data)
1948 {
1949 	struct hpi_message hm;
1950 	struct hpi_response hr;
1951 
1952 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
1953 		HPI_CONTROL_SET_STATE);
1954 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1955 
1956 	hm.u.cx.u.cobranet_data.byte_count = byte_count;
1957 	hm.u.cx.u.cobranet_data.hmi_address = hmi_address;
1958 
1959 	if (byte_count <= 8) {
1960 		memcpy(hm.u.cx.u.cobranet_data.data, pb_data, byte_count);
1961 		hm.u.cx.attribute = HPI_COBRANET_SET;
1962 	} else {
1963 		hm.u.cx.u.cobranet_bigdata.pb_data = pb_data;
1964 		hm.u.cx.attribute = HPI_COBRANET_SET_DATA;
1965 	}
1966 
1967 	hpi_send_recv(&hm, &hr);
1968 
1969 	return hr.error;
1970 }
1971 
1972 u16 hpi_cobranet_hmi_read(const struct hpi_hsubsys *ph_subsys, u32 h_control,
1973 	u32 hmi_address, u32 max_byte_count, u32 *pbyte_count, u8 *pb_data)
1974 {
1975 	struct hpi_message hm;
1976 	struct hpi_response hr;
1977 
1978 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
1979 		HPI_CONTROL_GET_STATE);
1980 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
1981 
1982 	hm.u.cx.u.cobranet_data.byte_count = max_byte_count;
1983 	hm.u.cx.u.cobranet_data.hmi_address = hmi_address;
1984 
1985 	if (max_byte_count <= 8) {
1986 		hm.u.cx.attribute = HPI_COBRANET_GET;
1987 	} else {
1988 		hm.u.cx.u.cobranet_bigdata.pb_data = pb_data;
1989 		hm.u.cx.attribute = HPI_COBRANET_GET_DATA;
1990 	}
1991 
1992 	hpi_send_recv(&hm, &hr);
1993 	if (!hr.error && pb_data) {
1994 
1995 		*pbyte_count = hr.u.cx.u.cobranet_data.byte_count;
1996 
1997 		if (*pbyte_count < max_byte_count)
1998 			max_byte_count = *pbyte_count;
1999 
2000 		if (hm.u.cx.attribute == HPI_COBRANET_GET) {
2001 			memcpy(pb_data, hr.u.cx.u.cobranet_data.data,
2002 				max_byte_count);
2003 		} else {
2004 
2005 		}
2006 
2007 	}
2008 	return hr.error;
2009 }
2010 
2011 u16 hpi_cobranet_hmi_get_status(const struct hpi_hsubsys *ph_subsys,
2012 	u32 h_control, u32 *pstatus, u32 *preadable_size,
2013 	u32 *pwriteable_size)
2014 {
2015 	struct hpi_message hm;
2016 	struct hpi_response hr;
2017 
2018 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROLEX,
2019 		HPI_CONTROL_GET_STATE);
2020 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2021 
2022 	hm.u.cx.attribute = HPI_COBRANET_GET_STATUS;
2023 
2024 	hpi_send_recv(&hm, &hr);
2025 	if (!hr.error) {
2026 		if (pstatus)
2027 			*pstatus = hr.u.cx.u.cobranet_status.status;
2028 		if (preadable_size)
2029 			*preadable_size =
2030 				hr.u.cx.u.cobranet_status.readable_size;
2031 		if (pwriteable_size)
2032 			*pwriteable_size =
2033 				hr.u.cx.u.cobranet_status.writeable_size;
2034 	}
2035 	return hr.error;
2036 }
2037 
2038 u16 hpi_cobranet_getI_paddress(const struct hpi_hsubsys *ph_subsys,
2039 	u32 h_control, u32 *pi_paddress)
2040 {
2041 	u32 byte_count;
2042 	u32 iP;
2043 	u16 error;
2044 
2045 	error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2046 		HPI_COBRANET_HMI_cobra_ip_mon_currentIP, 4, &byte_count,
2047 		(u8 *)&iP);
2048 
2049 	*pi_paddress =
2050 		((iP & 0xff000000) >> 8) | ((iP & 0x00ff0000) << 8) | ((iP &
2051 			0x0000ff00) >> 8) | ((iP & 0x000000ff) << 8);
2052 
2053 	if (error)
2054 		*pi_paddress = 0;
2055 
2056 	return error;
2057 
2058 }
2059 
2060 u16 hpi_cobranet_setI_paddress(const struct hpi_hsubsys *ph_subsys,
2061 	u32 h_control, u32 i_paddress)
2062 {
2063 	u32 iP;
2064 	u16 error;
2065 
2066 	iP = ((i_paddress & 0xff000000) >> 8) | ((i_paddress & 0x00ff0000) <<
2067 		8) | ((i_paddress & 0x0000ff00) >> 8) | ((i_paddress &
2068 			0x000000ff) << 8);
2069 
2070 	error = hpi_cobranet_hmi_write(ph_subsys, h_control,
2071 		HPI_COBRANET_HMI_cobra_ip_mon_currentIP, 4, (u8 *)&iP);
2072 
2073 	return error;
2074 
2075 }
2076 
2077 u16 hpi_cobranet_get_staticI_paddress(const struct hpi_hsubsys *ph_subsys,
2078 	u32 h_control, u32 *pi_paddress)
2079 {
2080 	u32 byte_count;
2081 	u32 iP;
2082 	u16 error;
2083 	error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2084 		HPI_COBRANET_HMI_cobra_ip_mon_staticIP, 4, &byte_count,
2085 		(u8 *)&iP);
2086 
2087 	*pi_paddress =
2088 		((iP & 0xff000000) >> 8) | ((iP & 0x00ff0000) << 8) | ((iP &
2089 			0x0000ff00) >> 8) | ((iP & 0x000000ff) << 8);
2090 
2091 	if (error)
2092 		*pi_paddress = 0;
2093 
2094 	return error;
2095 
2096 }
2097 
2098 u16 hpi_cobranet_set_staticI_paddress(const struct hpi_hsubsys *ph_subsys,
2099 	u32 h_control, u32 i_paddress)
2100 {
2101 	u32 iP;
2102 	u16 error;
2103 
2104 	iP = ((i_paddress & 0xff000000) >> 8) | ((i_paddress & 0x00ff0000) <<
2105 		8) | ((i_paddress & 0x0000ff00) >> 8) | ((i_paddress &
2106 			0x000000ff) << 8);
2107 
2108 	error = hpi_cobranet_hmi_write(ph_subsys, h_control,
2109 		HPI_COBRANET_HMI_cobra_ip_mon_staticIP, 4, (u8 *)&iP);
2110 
2111 	return error;
2112 
2113 }
2114 
2115 u16 hpi_cobranet_getMA_caddress(const struct hpi_hsubsys *ph_subsys,
2116 	u32 h_control, u32 *pmAC_MS_bs, u32 *pmAC_LS_bs)
2117 {
2118 	u32 byte_count;
2119 	u16 error;
2120 	u32 mAC;
2121 
2122 	error = hpi_cobranet_hmi_read(ph_subsys, h_control,
2123 		HPI_COBRANET_HMI_cobra_if_phy_address, 4, &byte_count,
2124 		(u8 *)&mAC);
2125 	*pmAC_MS_bs =
2126 		((mAC & 0xff000000) >> 8) | ((mAC & 0x00ff0000) << 8) | ((mAC
2127 			& 0x0000ff00) >> 8) | ((mAC & 0x000000ff) << 8);
2128 	error += hpi_cobranet_hmi_read(ph_subsys, h_control,
2129 		HPI_COBRANET_HMI_cobra_if_phy_address + 1, 4, &byte_count,
2130 		(u8 *)&mAC);
2131 	*pmAC_LS_bs =
2132 		((mAC & 0xff000000) >> 8) | ((mAC & 0x00ff0000) << 8) | ((mAC
2133 			& 0x0000ff00) >> 8) | ((mAC & 0x000000ff) << 8);
2134 
2135 	if (error) {
2136 		*pmAC_MS_bs = 0;
2137 		*pmAC_LS_bs = 0;
2138 	}
2139 
2140 	return error;
2141 }
2142 
2143 u16 hpi_compander_set_enable(const struct hpi_hsubsys *ph_subsys,
2144 	u32 h_control, u32 enable)
2145 {
2146 	return hpi_control_param_set(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2147 		enable, 0);
2148 }
2149 
2150 u16 hpi_compander_get_enable(const struct hpi_hsubsys *ph_subsys,
2151 	u32 h_control, u32 *enable)
2152 {
2153 	return hpi_control_param1_get(ph_subsys, h_control,
2154 		HPI_GENERIC_ENABLE, enable);
2155 }
2156 
2157 u16 hpi_compander_set_makeup_gain(const struct hpi_hsubsys *ph_subsys,
2158 	u32 h_control, short makeup_gain0_01dB)
2159 {
2160 	return hpi_control_log_set2(h_control, HPI_COMPANDER_MAKEUPGAIN,
2161 		makeup_gain0_01dB, 0);
2162 }
2163 
2164 u16 hpi_compander_get_makeup_gain(const struct hpi_hsubsys *ph_subsys,
2165 	u32 h_control, short *makeup_gain0_01dB)
2166 {
2167 	return hpi_control_log_get2(ph_subsys, h_control,
2168 		HPI_COMPANDER_MAKEUPGAIN, makeup_gain0_01dB, NULL);
2169 }
2170 
2171 u16 hpi_compander_set_attack_time_constant(const struct hpi_hsubsys
2172 	*ph_subsys, u32 h_control, unsigned int index, u32 attack)
2173 {
2174 	return hpi_control_param_set(ph_subsys, h_control,
2175 		HPI_COMPANDER_ATTACK, attack, index);
2176 }
2177 
2178 u16 hpi_compander_get_attack_time_constant(const struct hpi_hsubsys
2179 	*ph_subsys, u32 h_control, unsigned int index, u32 *attack)
2180 {
2181 	return hpi_control_param_get(ph_subsys, h_control,
2182 		HPI_COMPANDER_ATTACK, 0, index, attack, NULL);
2183 }
2184 
2185 u16 hpi_compander_set_decay_time_constant(const struct hpi_hsubsys *ph_subsys,
2186 	u32 h_control, unsigned int index, u32 decay)
2187 {
2188 	return hpi_control_param_set(ph_subsys, h_control,
2189 		HPI_COMPANDER_DECAY, decay, index);
2190 }
2191 
2192 u16 hpi_compander_get_decay_time_constant(const struct hpi_hsubsys *ph_subsys,
2193 	u32 h_control, unsigned int index, u32 *decay)
2194 {
2195 	return hpi_control_param_get(ph_subsys, h_control,
2196 		HPI_COMPANDER_DECAY, 0, index, decay, NULL);
2197 
2198 }
2199 
2200 u16 hpi_compander_set_threshold(const struct hpi_hsubsys *ph_subsys,
2201 	u32 h_control, unsigned int index, short threshold0_01dB)
2202 {
2203 	struct hpi_message hm;
2204 	struct hpi_response hr;
2205 
2206 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2207 		HPI_CONTROL_SET_STATE);
2208 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2209 	hm.u.c.attribute = HPI_COMPANDER_THRESHOLD;
2210 	hm.u.c.param2 = index;
2211 	hm.u.c.an_log_value[0] = threshold0_01dB;
2212 
2213 	hpi_send_recv(&hm, &hr);
2214 
2215 	return hr.error;
2216 }
2217 
2218 u16 hpi_compander_get_threshold(const struct hpi_hsubsys *ph_subsys,
2219 	u32 h_control, unsigned int index, short *threshold0_01dB)
2220 {
2221 	struct hpi_message hm;
2222 	struct hpi_response hr;
2223 
2224 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2225 		HPI_CONTROL_GET_STATE);
2226 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2227 	hm.u.c.attribute = HPI_COMPANDER_THRESHOLD;
2228 	hm.u.c.param2 = index;
2229 
2230 	hpi_send_recv(&hm, &hr);
2231 	*threshold0_01dB = hr.u.c.an_log_value[0];
2232 
2233 	return hr.error;
2234 }
2235 
2236 u16 hpi_compander_set_ratio(const struct hpi_hsubsys *ph_subsys,
2237 	u32 h_control, u32 index, u32 ratio100)
2238 {
2239 	return hpi_control_param_set(ph_subsys, h_control,
2240 		HPI_COMPANDER_RATIO, ratio100, index);
2241 }
2242 
2243 u16 hpi_compander_get_ratio(const struct hpi_hsubsys *ph_subsys,
2244 	u32 h_control, u32 index, u32 *ratio100)
2245 {
2246 	return hpi_control_param_get(ph_subsys, h_control,
2247 		HPI_COMPANDER_RATIO, 0, index, ratio100, NULL);
2248 }
2249 
2250 u16 hpi_level_query_range(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2251 	short *min_gain_01dB, short *max_gain_01dB, short *step_gain_01dB)
2252 {
2253 	struct hpi_message hm;
2254 	struct hpi_response hr;
2255 
2256 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2257 		HPI_CONTROL_GET_STATE);
2258 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2259 	hm.u.c.attribute = HPI_LEVEL_RANGE;
2260 
2261 	hpi_send_recv(&hm, &hr);
2262 	if (hr.error) {
2263 		hr.u.c.an_log_value[0] = 0;
2264 		hr.u.c.an_log_value[1] = 0;
2265 		hr.u.c.param1 = 0;
2266 	}
2267 	if (min_gain_01dB)
2268 		*min_gain_01dB = hr.u.c.an_log_value[0];
2269 	if (max_gain_01dB)
2270 		*max_gain_01dB = hr.u.c.an_log_value[1];
2271 	if (step_gain_01dB)
2272 		*step_gain_01dB = (short)hr.u.c.param1;
2273 	return hr.error;
2274 }
2275 
2276 u16 hpi_level_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2277 	short an_gain0_01dB[HPI_MAX_CHANNELS]
2278 	)
2279 {
2280 	return hpi_control_log_set2(h_control, HPI_LEVEL_GAIN,
2281 		an_gain0_01dB[0], an_gain0_01dB[1]);
2282 }
2283 
2284 u16 hpi_level_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2285 	short an_gain0_01dB[HPI_MAX_CHANNELS]
2286 	)
2287 {
2288 	return hpi_control_log_get2(ph_subsys, h_control, HPI_LEVEL_GAIN,
2289 		&an_gain0_01dB[0], &an_gain0_01dB[1]);
2290 }
2291 
2292 u16 hpi_meter_query_channels(const struct hpi_hsubsys *ph_subsys,
2293 	const u32 h_meter, u32 *p_channels)
2294 {
2295 	return hpi_control_query(ph_subsys, h_meter, HPI_METER_NUM_CHANNELS,
2296 		0, 0, p_channels);
2297 }
2298 
2299 u16 hpi_meter_get_peak(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2300 	short an_peakdB[HPI_MAX_CHANNELS]
2301 	)
2302 {
2303 	short i = 0;
2304 
2305 	struct hpi_message hm;
2306 	struct hpi_response hr;
2307 
2308 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2309 		HPI_CONTROL_GET_STATE);
2310 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2311 	hm.obj_index = hm.obj_index;
2312 	hm.u.c.attribute = HPI_METER_PEAK;
2313 
2314 	hpi_send_recv(&hm, &hr);
2315 
2316 	if (!hr.error)
2317 		memcpy(an_peakdB, hr.u.c.an_log_value,
2318 			sizeof(short) * HPI_MAX_CHANNELS);
2319 	else
2320 		for (i = 0; i < HPI_MAX_CHANNELS; i++)
2321 			an_peakdB[i] = HPI_METER_MINIMUM;
2322 	return hr.error;
2323 }
2324 
2325 u16 hpi_meter_get_rms(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2326 	short an_rmsdB[HPI_MAX_CHANNELS]
2327 	)
2328 {
2329 	short i = 0;
2330 
2331 	struct hpi_message hm;
2332 	struct hpi_response hr;
2333 
2334 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2335 		HPI_CONTROL_GET_STATE);
2336 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2337 	hm.u.c.attribute = HPI_METER_RMS;
2338 
2339 	hpi_send_recv(&hm, &hr);
2340 
2341 	if (!hr.error)
2342 		memcpy(an_rmsdB, hr.u.c.an_log_value,
2343 			sizeof(short) * HPI_MAX_CHANNELS);
2344 	else
2345 		for (i = 0; i < HPI_MAX_CHANNELS; i++)
2346 			an_rmsdB[i] = HPI_METER_MINIMUM;
2347 
2348 	return hr.error;
2349 }
2350 
2351 u16 hpi_meter_set_rms_ballistics(const struct hpi_hsubsys *ph_subsys,
2352 	u32 h_control, u16 attack, u16 decay)
2353 {
2354 	return hpi_control_param_set(ph_subsys, h_control,
2355 		HPI_METER_RMS_BALLISTICS, attack, decay);
2356 }
2357 
2358 u16 hpi_meter_get_rms_ballistics(const struct hpi_hsubsys *ph_subsys,
2359 	u32 h_control, u16 *pn_attack, u16 *pn_decay)
2360 {
2361 	u32 attack;
2362 	u32 decay;
2363 	u16 error;
2364 
2365 	error = hpi_control_param2_get(ph_subsys, h_control,
2366 		HPI_METER_RMS_BALLISTICS, &attack, &decay);
2367 
2368 	if (pn_attack)
2369 		*pn_attack = (unsigned short)attack;
2370 	if (pn_decay)
2371 		*pn_decay = (unsigned short)decay;
2372 
2373 	return error;
2374 }
2375 
2376 u16 hpi_meter_set_peak_ballistics(const struct hpi_hsubsys *ph_subsys,
2377 	u32 h_control, u16 attack, u16 decay)
2378 {
2379 	return hpi_control_param_set(ph_subsys, h_control,
2380 		HPI_METER_PEAK_BALLISTICS, attack, decay);
2381 }
2382 
2383 u16 hpi_meter_get_peak_ballistics(const struct hpi_hsubsys *ph_subsys,
2384 	u32 h_control, u16 *pn_attack, u16 *pn_decay)
2385 {
2386 	u32 attack;
2387 	u32 decay;
2388 	u16 error;
2389 
2390 	error = hpi_control_param2_get(ph_subsys, h_control,
2391 		HPI_METER_PEAK_BALLISTICS, &attack, &decay);
2392 
2393 	if (pn_attack)
2394 		*pn_attack = (short)attack;
2395 	if (pn_decay)
2396 		*pn_decay = (short)decay;
2397 
2398 	return error;
2399 }
2400 
2401 u16 hpi_microphone_set_phantom_power(const struct hpi_hsubsys *ph_subsys,
2402 	u32 h_control, u16 on_off)
2403 {
2404 	return hpi_control_param_set(ph_subsys, h_control,
2405 		HPI_MICROPHONE_PHANTOM_POWER, (u32)on_off, 0);
2406 }
2407 
2408 u16 hpi_microphone_get_phantom_power(const struct hpi_hsubsys *ph_subsys,
2409 	u32 h_control, u16 *pw_on_off)
2410 {
2411 	u16 error = 0;
2412 	u32 on_off = 0;
2413 	error = hpi_control_param1_get(ph_subsys, h_control,
2414 		HPI_MICROPHONE_PHANTOM_POWER, &on_off);
2415 	if (pw_on_off)
2416 		*pw_on_off = (u16)on_off;
2417 	return error;
2418 }
2419 
2420 u16 hpi_multiplexer_set_source(const struct hpi_hsubsys *ph_subsys,
2421 	u32 h_control, u16 source_node_type, u16 source_node_index)
2422 {
2423 	return hpi_control_param_set(ph_subsys, h_control,
2424 		HPI_MULTIPLEXER_SOURCE, source_node_type, source_node_index);
2425 }
2426 
2427 u16 hpi_multiplexer_get_source(const struct hpi_hsubsys *ph_subsys,
2428 	u32 h_control, u16 *source_node_type, u16 *source_node_index)
2429 {
2430 	u32 node, index;
2431 	u16 error = hpi_control_param2_get(ph_subsys, h_control,
2432 		HPI_MULTIPLEXER_SOURCE, &node,
2433 		&index);
2434 	if (source_node_type)
2435 		*source_node_type = (u16)node;
2436 	if (source_node_index)
2437 		*source_node_index = (u16)index;
2438 	return error;
2439 }
2440 
2441 u16 hpi_multiplexer_query_source(const struct hpi_hsubsys *ph_subsys,
2442 	u32 h_control, u16 index, u16 *source_node_type,
2443 	u16 *source_node_index)
2444 {
2445 	struct hpi_message hm;
2446 	struct hpi_response hr;
2447 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2448 		HPI_CONTROL_GET_STATE);
2449 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2450 	hm.u.c.attribute = HPI_MULTIPLEXER_QUERYSOURCE;
2451 	hm.u.c.param1 = index;
2452 
2453 	hpi_send_recv(&hm, &hr);
2454 
2455 	if (source_node_type)
2456 		*source_node_type = (u16)hr.u.c.param1;
2457 	if (source_node_index)
2458 		*source_node_index = (u16)hr.u.c.param2;
2459 	return hr.error;
2460 }
2461 
2462 u16 hpi_parametricEQ__get_info(const struct hpi_hsubsys *ph_subsys,
2463 	u32 h_control, u16 *pw_number_of_bands, u16 *pw_on_off)
2464 {
2465 	u32 oB = 0;
2466 	u32 oO = 0;
2467 	u16 error = 0;
2468 
2469 	error = hpi_control_param2_get(ph_subsys, h_control,
2470 		HPI_EQUALIZER_NUM_FILTERS, &oO, &oB);
2471 	if (pw_number_of_bands)
2472 		*pw_number_of_bands = (u16)oB;
2473 	if (pw_on_off)
2474 		*pw_on_off = (u16)oO;
2475 	return error;
2476 }
2477 
2478 u16 hpi_parametricEQ__set_state(const struct hpi_hsubsys *ph_subsys,
2479 	u32 h_control, u16 on_off)
2480 {
2481 	return hpi_control_param_set(ph_subsys, h_control,
2482 		HPI_EQUALIZER_NUM_FILTERS, on_off, 0);
2483 }
2484 
2485 u16 hpi_parametricEQ__get_band(const struct hpi_hsubsys *ph_subsys,
2486 	u32 h_control, u16 index, u16 *pn_type, u32 *pfrequency_hz,
2487 	short *pnQ100, short *pn_gain0_01dB)
2488 {
2489 	struct hpi_message hm;
2490 	struct hpi_response hr;
2491 
2492 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2493 		HPI_CONTROL_GET_STATE);
2494 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2495 	hm.u.c.attribute = HPI_EQUALIZER_FILTER;
2496 	hm.u.c.param2 = index;
2497 
2498 	hpi_send_recv(&hm, &hr);
2499 
2500 	if (pfrequency_hz)
2501 		*pfrequency_hz = hr.u.c.param1;
2502 	if (pn_type)
2503 		*pn_type = (u16)(hr.u.c.param2 >> 16);
2504 	if (pnQ100)
2505 		*pnQ100 = hr.u.c.an_log_value[1];
2506 	if (pn_gain0_01dB)
2507 		*pn_gain0_01dB = hr.u.c.an_log_value[0];
2508 
2509 	return hr.error;
2510 }
2511 
2512 u16 hpi_parametricEQ__set_band(const struct hpi_hsubsys *ph_subsys,
2513 	u32 h_control, u16 index, u16 type, u32 frequency_hz, short q100,
2514 	short gain0_01dB)
2515 {
2516 	struct hpi_message hm;
2517 	struct hpi_response hr;
2518 
2519 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2520 		HPI_CONTROL_SET_STATE);
2521 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2522 
2523 	hm.u.c.param1 = frequency_hz;
2524 	hm.u.c.param2 = (index & 0xFFFFL) + ((u32)type << 16);
2525 	hm.u.c.an_log_value[0] = gain0_01dB;
2526 	hm.u.c.an_log_value[1] = q100;
2527 	hm.u.c.attribute = HPI_EQUALIZER_FILTER;
2528 
2529 	hpi_send_recv(&hm, &hr);
2530 
2531 	return hr.error;
2532 }
2533 
2534 u16 hpi_parametricEQ__get_coeffs(const struct hpi_hsubsys *ph_subsys,
2535 	u32 h_control, u16 index, short coeffs[5]
2536 	)
2537 {
2538 	struct hpi_message hm;
2539 	struct hpi_response hr;
2540 
2541 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2542 		HPI_CONTROL_GET_STATE);
2543 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2544 	hm.u.c.attribute = HPI_EQUALIZER_COEFFICIENTS;
2545 	hm.u.c.param2 = index;
2546 
2547 	hpi_send_recv(&hm, &hr);
2548 
2549 	coeffs[0] = (short)hr.u.c.an_log_value[0];
2550 	coeffs[1] = (short)hr.u.c.an_log_value[1];
2551 	coeffs[2] = (short)hr.u.c.param1;
2552 	coeffs[3] = (short)(hr.u.c.param1 >> 16);
2553 	coeffs[4] = (short)hr.u.c.param2;
2554 
2555 	return hr.error;
2556 }
2557 
2558 u16 hpi_sample_clock_query_source(const struct hpi_hsubsys *ph_subsys,
2559 	const u32 h_clock, const u32 index, u16 *pw_source)
2560 {
2561 	u32 qr;
2562 	u16 err;
2563 
2564 	err = hpi_control_query(ph_subsys, h_clock, HPI_SAMPLECLOCK_SOURCE,
2565 		index, 0, &qr);
2566 	*pw_source = (u16)qr;
2567 	return err;
2568 }
2569 
2570 u16 hpi_sample_clock_set_source(const struct hpi_hsubsys *ph_subsys,
2571 	u32 h_control, u16 source)
2572 {
2573 	return hpi_control_param_set(ph_subsys, h_control,
2574 		HPI_SAMPLECLOCK_SOURCE, source, 0);
2575 }
2576 
2577 u16 hpi_sample_clock_get_source(const struct hpi_hsubsys *ph_subsys,
2578 	u32 h_control, u16 *pw_source)
2579 {
2580 	u16 error = 0;
2581 	u32 source = 0;
2582 	error = hpi_control_param1_get(ph_subsys, h_control,
2583 		HPI_SAMPLECLOCK_SOURCE, &source);
2584 	if (!error)
2585 		if (pw_source)
2586 			*pw_source = (u16)source;
2587 	return error;
2588 }
2589 
2590 u16 hpi_sample_clock_query_source_index(const struct hpi_hsubsys *ph_subsys,
2591 	const u32 h_clock, const u32 index, const u32 source,
2592 	u16 *pw_source_index)
2593 {
2594 	u32 qr;
2595 	u16 err;
2596 
2597 	err = hpi_control_query(ph_subsys, h_clock,
2598 		HPI_SAMPLECLOCK_SOURCE_INDEX, index, source, &qr);
2599 	*pw_source_index = (u16)qr;
2600 	return err;
2601 }
2602 
2603 u16 hpi_sample_clock_set_source_index(const struct hpi_hsubsys *ph_subsys,
2604 	u32 h_control, u16 source_index)
2605 {
2606 	return hpi_control_param_set(ph_subsys, h_control,
2607 		HPI_SAMPLECLOCK_SOURCE_INDEX, source_index, 0);
2608 }
2609 
2610 u16 hpi_sample_clock_get_source_index(const struct hpi_hsubsys *ph_subsys,
2611 	u32 h_control, u16 *pw_source_index)
2612 {
2613 	u16 error = 0;
2614 	u32 source_index = 0;
2615 	error = hpi_control_param1_get(ph_subsys, h_control,
2616 		HPI_SAMPLECLOCK_SOURCE_INDEX, &source_index);
2617 	if (!error)
2618 		if (pw_source_index)
2619 			*pw_source_index = (u16)source_index;
2620 	return error;
2621 }
2622 
2623 u16 hpi_sample_clock_query_local_rate(const struct hpi_hsubsys *ph_subsys,
2624 	const u32 h_clock, const u32 index, u32 *prate)
2625 {
2626 	u16 err;
2627 	err = hpi_control_query(ph_subsys, h_clock,
2628 		HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, index, 0, prate);
2629 
2630 	return err;
2631 }
2632 
2633 u16 hpi_sample_clock_set_local_rate(const struct hpi_hsubsys *ph_subsys,
2634 	u32 h_control, u32 sample_rate)
2635 {
2636 	return hpi_control_param_set(ph_subsys, h_control,
2637 		HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, sample_rate, 0);
2638 }
2639 
2640 u16 hpi_sample_clock_get_local_rate(const struct hpi_hsubsys *ph_subsys,
2641 	u32 h_control, u32 *psample_rate)
2642 {
2643 	u16 error = 0;
2644 	u32 sample_rate = 0;
2645 	error = hpi_control_param1_get(ph_subsys, h_control,
2646 		HPI_SAMPLECLOCK_LOCAL_SAMPLERATE, &sample_rate);
2647 	if (!error)
2648 		if (psample_rate)
2649 			*psample_rate = sample_rate;
2650 	return error;
2651 }
2652 
2653 u16 hpi_sample_clock_get_sample_rate(const struct hpi_hsubsys *ph_subsys,
2654 	u32 h_control, u32 *psample_rate)
2655 {
2656 	u16 error = 0;
2657 	u32 sample_rate = 0;
2658 	error = hpi_control_param1_get(ph_subsys, h_control,
2659 		HPI_SAMPLECLOCK_SAMPLERATE, &sample_rate);
2660 	if (!error)
2661 		if (psample_rate)
2662 			*psample_rate = sample_rate;
2663 	return error;
2664 }
2665 
2666 u16 hpi_sample_clock_set_auto(const struct hpi_hsubsys *ph_subsys,
2667 	u32 h_control, u32 enable)
2668 {
2669 	return hpi_control_param_set(ph_subsys, h_control,
2670 		HPI_SAMPLECLOCK_AUTO, enable, 0);
2671 }
2672 
2673 u16 hpi_sample_clock_get_auto(const struct hpi_hsubsys *ph_subsys,
2674 	u32 h_control, u32 *penable)
2675 {
2676 	return hpi_control_param1_get(ph_subsys, h_control,
2677 		HPI_SAMPLECLOCK_AUTO, penable);
2678 }
2679 
2680 u16 hpi_sample_clock_set_local_rate_lock(const struct hpi_hsubsys *ph_subsys,
2681 	u32 h_control, u32 lock)
2682 {
2683 	return hpi_control_param_set(ph_subsys, h_control,
2684 		HPI_SAMPLECLOCK_LOCAL_LOCK, lock, 0);
2685 }
2686 
2687 u16 hpi_sample_clock_get_local_rate_lock(const struct hpi_hsubsys *ph_subsys,
2688 	u32 h_control, u32 *plock)
2689 {
2690 	return hpi_control_param1_get(ph_subsys, h_control,
2691 		HPI_SAMPLECLOCK_LOCAL_LOCK, plock);
2692 }
2693 
2694 u16 hpi_tone_detector_get_frequency(const struct hpi_hsubsys *ph_subsys,
2695 	u32 h_control, u32 index, u32 *frequency)
2696 {
2697 	return hpi_control_param_get(ph_subsys, h_control,
2698 		HPI_TONEDETECTOR_FREQUENCY, index, 0, frequency, NULL);
2699 }
2700 
2701 u16 hpi_tone_detector_get_state(const struct hpi_hsubsys *ph_subsys,
2702 	u32 h_control, u32 *state)
2703 {
2704 	return hpi_control_param1_get(ph_subsys, h_control,
2705 		HPI_TONEDETECTOR_STATE, state);
2706 }
2707 
2708 u16 hpi_tone_detector_set_enable(const struct hpi_hsubsys *ph_subsys,
2709 	u32 h_control, u32 enable)
2710 {
2711 	return hpi_control_param_set(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2712 		(u32)enable, 0);
2713 }
2714 
2715 u16 hpi_tone_detector_get_enable(const struct hpi_hsubsys *ph_subsys,
2716 	u32 h_control, u32 *enable)
2717 {
2718 	return hpi_control_param1_get(ph_subsys, h_control,
2719 		HPI_GENERIC_ENABLE, enable);
2720 }
2721 
2722 u16 hpi_tone_detector_set_event_enable(const struct hpi_hsubsys *ph_subsys,
2723 	u32 h_control, u32 event_enable)
2724 {
2725 	return hpi_control_param_set(ph_subsys, h_control,
2726 		HPI_GENERIC_EVENT_ENABLE, (u32)event_enable, 0);
2727 }
2728 
2729 u16 hpi_tone_detector_get_event_enable(const struct hpi_hsubsys *ph_subsys,
2730 	u32 h_control, u32 *event_enable)
2731 {
2732 	return hpi_control_param1_get(ph_subsys, h_control,
2733 		HPI_GENERIC_EVENT_ENABLE, event_enable);
2734 }
2735 
2736 u16 hpi_tone_detector_set_threshold(const struct hpi_hsubsys *ph_subsys,
2737 	u32 h_control, int threshold)
2738 {
2739 	return hpi_control_param_set(ph_subsys, h_control,
2740 		HPI_TONEDETECTOR_THRESHOLD, (u32)threshold, 0);
2741 }
2742 
2743 u16 hpi_tone_detector_get_threshold(const struct hpi_hsubsys *ph_subsys,
2744 	u32 h_control, int *threshold)
2745 {
2746 	return hpi_control_param1_get(ph_subsys, h_control,
2747 		HPI_TONEDETECTOR_THRESHOLD, (u32 *)threshold);
2748 }
2749 
2750 u16 hpi_silence_detector_get_state(const struct hpi_hsubsys *ph_subsys,
2751 	u32 h_control, u32 *state)
2752 {
2753 	return hpi_control_param1_get(ph_subsys, h_control,
2754 		HPI_SILENCEDETECTOR_STATE, state);
2755 }
2756 
2757 u16 hpi_silence_detector_set_enable(const struct hpi_hsubsys *ph_subsys,
2758 	u32 h_control, u32 enable)
2759 {
2760 	return hpi_control_param_set(ph_subsys, h_control, HPI_GENERIC_ENABLE,
2761 		(u32)enable, 0);
2762 }
2763 
2764 u16 hpi_silence_detector_get_enable(const struct hpi_hsubsys *ph_subsys,
2765 	u32 h_control, u32 *enable)
2766 {
2767 	return hpi_control_param1_get(ph_subsys, h_control,
2768 		HPI_GENERIC_ENABLE, enable);
2769 }
2770 
2771 u16 hpi_silence_detector_set_event_enable(const struct hpi_hsubsys *ph_subsys,
2772 	u32 h_control, u32 event_enable)
2773 {
2774 	return hpi_control_param_set(ph_subsys, h_control,
2775 		HPI_GENERIC_EVENT_ENABLE, event_enable, 0);
2776 }
2777 
2778 u16 hpi_silence_detector_get_event_enable(const struct hpi_hsubsys *ph_subsys,
2779 	u32 h_control, u32 *event_enable)
2780 {
2781 	return hpi_control_param1_get(ph_subsys, h_control,
2782 		HPI_GENERIC_EVENT_ENABLE, event_enable);
2783 }
2784 
2785 u16 hpi_silence_detector_set_delay(const struct hpi_hsubsys *ph_subsys,
2786 	u32 h_control, u32 delay)
2787 {
2788 	return hpi_control_param_set(ph_subsys, h_control,
2789 		HPI_SILENCEDETECTOR_DELAY, delay, 0);
2790 }
2791 
2792 u16 hpi_silence_detector_get_delay(const struct hpi_hsubsys *ph_subsys,
2793 	u32 h_control, u32 *delay)
2794 {
2795 	return hpi_control_param1_get(ph_subsys, h_control,
2796 		HPI_SILENCEDETECTOR_DELAY, delay);
2797 }
2798 
2799 u16 hpi_silence_detector_set_threshold(const struct hpi_hsubsys *ph_subsys,
2800 	u32 h_control, int threshold)
2801 {
2802 	return hpi_control_param_set(ph_subsys, h_control,
2803 		HPI_SILENCEDETECTOR_THRESHOLD, threshold, 0);
2804 }
2805 
2806 u16 hpi_silence_detector_get_threshold(const struct hpi_hsubsys *ph_subsys,
2807 	u32 h_control, int *threshold)
2808 {
2809 	return hpi_control_param1_get(ph_subsys, h_control,
2810 		HPI_SILENCEDETECTOR_THRESHOLD, (u32 *)threshold);
2811 }
2812 
2813 u16 hpi_tuner_query_band(const struct hpi_hsubsys *ph_subsys,
2814 	const u32 h_tuner, const u32 index, u16 *pw_band)
2815 {
2816 	u32 qr;
2817 	u16 err;
2818 
2819 	err = hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_BAND, index, 0,
2820 		&qr);
2821 	*pw_band = (u16)qr;
2822 	return err;
2823 }
2824 
2825 u16 hpi_tuner_set_band(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2826 	u16 band)
2827 {
2828 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_BAND,
2829 		band, 0);
2830 }
2831 
2832 u16 hpi_tuner_get_band(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2833 	u16 *pw_band)
2834 {
2835 	u32 band = 0;
2836 	u16 error = 0;
2837 
2838 	error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_BAND,
2839 		&band);
2840 	if (pw_band)
2841 		*pw_band = (u16)band;
2842 	return error;
2843 }
2844 
2845 u16 hpi_tuner_query_frequency(const struct hpi_hsubsys *ph_subsys,
2846 	const u32 h_tuner, const u32 index, const u16 band, u32 *pfreq)
2847 {
2848 	return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_FREQ, index,
2849 		band, pfreq);
2850 }
2851 
2852 u16 hpi_tuner_set_frequency(const struct hpi_hsubsys *ph_subsys,
2853 	u32 h_control, u32 freq_ink_hz)
2854 {
2855 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_FREQ,
2856 		freq_ink_hz, 0);
2857 }
2858 
2859 u16 hpi_tuner_get_frequency(const struct hpi_hsubsys *ph_subsys,
2860 	u32 h_control, u32 *pw_freq_ink_hz)
2861 {
2862 	return hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_FREQ,
2863 		pw_freq_ink_hz);
2864 }
2865 
2866 u16 hpi_tuner_query_gain(const struct hpi_hsubsys *ph_subsys,
2867 	const u32 h_tuner, const u32 index, u16 *pw_gain)
2868 {
2869 	u32 qr;
2870 	u16 err;
2871 
2872 	err = hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_BAND, index, 0,
2873 		&qr);
2874 	*pw_gain = (u16)qr;
2875 	return err;
2876 }
2877 
2878 u16 hpi_tuner_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2879 	short gain)
2880 {
2881 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_GAIN,
2882 		gain, 0);
2883 }
2884 
2885 u16 hpi_tuner_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2886 	short *pn_gain)
2887 {
2888 	u32 gain = 0;
2889 	u16 error = 0;
2890 
2891 	error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_GAIN,
2892 		&gain);
2893 	if (pn_gain)
2894 		*pn_gain = (u16)gain;
2895 	return error;
2896 }
2897 
2898 u16 hpi_tuner_getRF_level(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2899 	short *pw_level)
2900 {
2901 	struct hpi_message hm;
2902 	struct hpi_response hr;
2903 
2904 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2905 		HPI_CONTROL_GET_STATE);
2906 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2907 	hm.u.c.attribute = HPI_TUNER_LEVEL;
2908 	hm.u.c.param1 = HPI_TUNER_LEVEL_AVERAGE;
2909 	hpi_send_recv(&hm, &hr);
2910 	if (pw_level)
2911 		*pw_level = (short)hr.u.c.param1;
2912 	return hr.error;
2913 }
2914 
2915 u16 hpi_tuner_get_rawRF_level(const struct hpi_hsubsys *ph_subsys,
2916 	u32 h_control, short *pw_level)
2917 {
2918 	struct hpi_message hm;
2919 	struct hpi_response hr;
2920 
2921 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
2922 		HPI_CONTROL_GET_STATE);
2923 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
2924 	hm.u.c.attribute = HPI_TUNER_LEVEL;
2925 	hm.u.c.param1 = HPI_TUNER_LEVEL_RAW;
2926 	hpi_send_recv(&hm, &hr);
2927 	if (pw_level)
2928 		*pw_level = (short)hr.u.c.param1;
2929 	return hr.error;
2930 }
2931 
2932 u16 hpi_tuner_query_deemphasis(const struct hpi_hsubsys *ph_subsys,
2933 	const u32 h_tuner, const u32 index, const u16 band, u32 *pdeemphasis)
2934 {
2935 	return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_DEEMPHASIS,
2936 		index, band, pdeemphasis);
2937 }
2938 
2939 u16 hpi_tuner_set_deemphasis(const struct hpi_hsubsys *ph_subsys,
2940 	u32 h_control, u32 deemphasis)
2941 {
2942 	return hpi_control_param_set(ph_subsys, h_control,
2943 		HPI_TUNER_DEEMPHASIS, deemphasis, 0);
2944 }
2945 
2946 u16 hpi_tuner_get_deemphasis(const struct hpi_hsubsys *ph_subsys,
2947 	u32 h_control, u32 *pdeemphasis)
2948 {
2949 	return hpi_control_param1_get(ph_subsys, h_control,
2950 		HPI_TUNER_DEEMPHASIS, pdeemphasis);
2951 }
2952 
2953 u16 hpi_tuner_query_program(const struct hpi_hsubsys *ph_subsys,
2954 	const u32 h_tuner, u32 *pbitmap_program)
2955 {
2956 	return hpi_control_query(ph_subsys, h_tuner, HPI_TUNER_PROGRAM, 0, 0,
2957 		pbitmap_program);
2958 }
2959 
2960 u16 hpi_tuner_set_program(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2961 	u32 program)
2962 {
2963 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_PROGRAM,
2964 		program, 0);
2965 }
2966 
2967 u16 hpi_tuner_get_program(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2968 	u32 *pprogram)
2969 {
2970 	return hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_PROGRAM,
2971 		pprogram);
2972 }
2973 
2974 u16 hpi_tuner_get_hd_radio_dsp_version(const struct hpi_hsubsys *ph_subsys,
2975 	u32 h_control, char *psz_dsp_version, const u32 string_size)
2976 {
2977 	return hpi_control_get_string(h_control,
2978 		HPI_TUNER_HDRADIO_DSP_VERSION, psz_dsp_version, string_size);
2979 }
2980 
2981 u16 hpi_tuner_get_hd_radio_sdk_version(const struct hpi_hsubsys *ph_subsys,
2982 	u32 h_control, char *psz_sdk_version, const u32 string_size)
2983 {
2984 	return hpi_control_get_string(h_control,
2985 		HPI_TUNER_HDRADIO_SDK_VERSION, psz_sdk_version, string_size);
2986 }
2987 
2988 u16 hpi_tuner_get_status(const struct hpi_hsubsys *ph_subsys, u32 h_control,
2989 	u16 *pw_status_mask, u16 *pw_status)
2990 {
2991 	u32 status = 0;
2992 	u16 error = 0;
2993 
2994 	error = hpi_control_param1_get(ph_subsys, h_control, HPI_TUNER_STATUS,
2995 		&status);
2996 	if (pw_status) {
2997 		if (!error) {
2998 			*pw_status_mask = (u16)(status >> 16);
2999 			*pw_status = (u16)(status & 0xFFFF);
3000 		} else {
3001 			*pw_status_mask = 0;
3002 			*pw_status = 0;
3003 		}
3004 	}
3005 	return error;
3006 }
3007 
3008 u16 hpi_tuner_set_mode(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3009 	u32 mode, u32 value)
3010 {
3011 	return hpi_control_param_set(ph_subsys, h_control, HPI_TUNER_MODE,
3012 		mode, value);
3013 }
3014 
3015 u16 hpi_tuner_get_mode(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3016 	u32 mode, u32 *pn_value)
3017 {
3018 	return hpi_control_param_get(ph_subsys, h_control, HPI_TUNER_MODE,
3019 		mode, 0, pn_value, NULL);
3020 }
3021 
3022 u16 hpi_tuner_get_hd_radio_signal_quality(const struct hpi_hsubsys *ph_subsys,
3023 	u32 h_control, u32 *pquality)
3024 {
3025 	return hpi_control_param1_get(ph_subsys, h_control,
3026 		HPI_TUNER_HDRADIO_SIGNAL_QUALITY, pquality);
3027 }
3028 
3029 u16 hpi_tuner_get_hd_radio_signal_blend(const struct hpi_hsubsys *ph_subsys,
3030 	u32 h_control, u32 *pblend)
3031 {
3032 	return hpi_control_param1_get(ph_subsys, h_control,
3033 		HPI_TUNER_HDRADIO_BLEND, pblend);
3034 }
3035 
3036 u16 hpi_tuner_set_hd_radio_signal_blend(const struct hpi_hsubsys *ph_subsys,
3037 	u32 h_control, const u32 blend)
3038 {
3039 	return hpi_control_param_set(ph_subsys, h_control,
3040 		HPI_TUNER_HDRADIO_BLEND, blend, 0);
3041 }
3042 
3043 u16 hpi_tuner_getRDS(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3044 	char *p_data)
3045 {
3046 	struct hpi_message hm;
3047 	struct hpi_response hr;
3048 
3049 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3050 		HPI_CONTROL_GET_STATE);
3051 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3052 	hm.u.c.attribute = HPI_TUNER_RDS;
3053 	hpi_send_recv(&hm, &hr);
3054 	if (p_data) {
3055 		*(u32 *)&p_data[0] = hr.u.cu.tuner.rds.data[0];
3056 		*(u32 *)&p_data[4] = hr.u.cu.tuner.rds.data[1];
3057 		*(u32 *)&p_data[8] = hr.u.cu.tuner.rds.bLER;
3058 	}
3059 	return hr.error;
3060 }
3061 
3062 u16 HPI_PAD__get_channel_name(const struct hpi_hsubsys *ph_subsys,
3063 	u32 h_control, char *psz_string, const u32 data_length)
3064 {
3065 	return hpi_control_get_string(h_control, HPI_PAD_CHANNEL_NAME,
3066 		psz_string, data_length);
3067 }
3068 
3069 u16 HPI_PAD__get_artist(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3070 	char *psz_string, const u32 data_length)
3071 {
3072 	return hpi_control_get_string(h_control, HPI_PAD_ARTIST, psz_string,
3073 		data_length);
3074 }
3075 
3076 u16 HPI_PAD__get_title(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3077 	char *psz_string, const u32 data_length)
3078 {
3079 	return hpi_control_get_string(h_control, HPI_PAD_TITLE, psz_string,
3080 		data_length);
3081 }
3082 
3083 u16 HPI_PAD__get_comment(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3084 	char *psz_string, const u32 data_length)
3085 {
3086 	return hpi_control_get_string(h_control, HPI_PAD_COMMENT, psz_string,
3087 		data_length);
3088 }
3089 
3090 u16 HPI_PAD__get_program_type(const struct hpi_hsubsys *ph_subsys,
3091 	u32 h_control, u32 *ppTY)
3092 {
3093 	return hpi_control_param1_get(ph_subsys, h_control,
3094 		HPI_PAD_PROGRAM_TYPE, ppTY);
3095 }
3096 
3097 u16 HPI_PAD__get_rdsPI(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3098 	u32 *ppI)
3099 {
3100 	return hpi_control_param1_get(ph_subsys, h_control,
3101 		HPI_PAD_PROGRAM_ID, ppI);
3102 }
3103 
3104 u16 hpi_volume_query_channels(const struct hpi_hsubsys *ph_subsys,
3105 	const u32 h_volume, u32 *p_channels)
3106 {
3107 	return hpi_control_query(ph_subsys, h_volume, HPI_VOLUME_NUM_CHANNELS,
3108 		0, 0, p_channels);
3109 }
3110 
3111 u16 hpi_volume_set_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3112 	short an_log_gain[HPI_MAX_CHANNELS]
3113 	)
3114 {
3115 	return hpi_control_log_set2(h_control, HPI_VOLUME_GAIN,
3116 		an_log_gain[0], an_log_gain[1]);
3117 }
3118 
3119 u16 hpi_volume_get_gain(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3120 	short an_log_gain[HPI_MAX_CHANNELS]
3121 	)
3122 {
3123 	return hpi_control_log_get2(ph_subsys, h_control, HPI_VOLUME_GAIN,
3124 		&an_log_gain[0], &an_log_gain[1]);
3125 }
3126 
3127 u16 hpi_volume_query_range(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3128 	short *min_gain_01dB, short *max_gain_01dB, short *step_gain_01dB)
3129 {
3130 	struct hpi_message hm;
3131 	struct hpi_response hr;
3132 
3133 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3134 		HPI_CONTROL_GET_STATE);
3135 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3136 	hm.u.c.attribute = HPI_VOLUME_RANGE;
3137 
3138 	hpi_send_recv(&hm, &hr);
3139 	if (hr.error) {
3140 		hr.u.c.an_log_value[0] = 0;
3141 		hr.u.c.an_log_value[1] = 0;
3142 		hr.u.c.param1 = 0;
3143 	}
3144 	if (min_gain_01dB)
3145 		*min_gain_01dB = hr.u.c.an_log_value[0];
3146 	if (max_gain_01dB)
3147 		*max_gain_01dB = hr.u.c.an_log_value[1];
3148 	if (step_gain_01dB)
3149 		*step_gain_01dB = (short)hr.u.c.param1;
3150 	return hr.error;
3151 }
3152 
3153 u16 hpi_volume_auto_fade_profile(const struct hpi_hsubsys *ph_subsys,
3154 	u32 h_control, short an_stop_gain0_01dB[HPI_MAX_CHANNELS],
3155 	u32 duration_ms, u16 profile)
3156 {
3157 	struct hpi_message hm;
3158 	struct hpi_response hr;
3159 
3160 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3161 		HPI_CONTROL_SET_STATE);
3162 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3163 
3164 	memcpy(hm.u.c.an_log_value, an_stop_gain0_01dB,
3165 		sizeof(short) * HPI_MAX_CHANNELS);
3166 
3167 	hm.u.c.attribute = HPI_VOLUME_AUTOFADE;
3168 	hm.u.c.param1 = duration_ms;
3169 	hm.u.c.param2 = profile;
3170 
3171 	hpi_send_recv(&hm, &hr);
3172 
3173 	return hr.error;
3174 }
3175 
3176 u16 hpi_volume_auto_fade(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3177 	short an_stop_gain0_01dB[HPI_MAX_CHANNELS], u32 duration_ms)
3178 {
3179 	return hpi_volume_auto_fade_profile(ph_subsys, h_control,
3180 		an_stop_gain0_01dB, duration_ms, HPI_VOLUME_AUTOFADE_LOG);
3181 }
3182 
3183 u16 hpi_vox_set_threshold(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3184 	short an_gain0_01dB)
3185 {
3186 	struct hpi_message hm;
3187 	struct hpi_response hr;
3188 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3189 		HPI_CONTROL_SET_STATE);
3190 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3191 	hm.u.c.attribute = HPI_VOX_THRESHOLD;
3192 
3193 	hm.u.c.an_log_value[0] = an_gain0_01dB;
3194 
3195 	hpi_send_recv(&hm, &hr);
3196 
3197 	return hr.error;
3198 }
3199 
3200 u16 hpi_vox_get_threshold(const struct hpi_hsubsys *ph_subsys, u32 h_control,
3201 	short *an_gain0_01dB)
3202 {
3203 	struct hpi_message hm;
3204 	struct hpi_response hr;
3205 	hpi_init_message_response(&hm, &hr, HPI_OBJ_CONTROL,
3206 		HPI_CONTROL_GET_STATE);
3207 	u32TOINDEXES(h_control, &hm.adapter_index, &hm.obj_index);
3208 	hm.u.c.attribute = HPI_VOX_THRESHOLD;
3209 
3210 	hpi_send_recv(&hm, &hr);
3211 
3212 	*an_gain0_01dB = hr.u.c.an_log_value[0];
3213 
3214 	return hr.error;
3215 }
3216 
3217 static size_t strv_packet_size = MIN_STRV_PACKET_SIZE;
3218 
3219 static size_t entity_type_to_size[LAST_ENTITY_TYPE] = {
3220 	0,
3221 	sizeof(struct hpi_entity),
3222 	sizeof(void *),
3223 
3224 	sizeof(int),
3225 	sizeof(float),
3226 	sizeof(double),
3227 
3228 	sizeof(char),
3229 	sizeof(char),
3230 
3231 	4 * sizeof(char),
3232 	16 * sizeof(char),
3233 	6 * sizeof(char),
3234 };
3235 
3236 static inline size_t hpi_entity_size(struct hpi_entity *entity_ptr)
3237 {
3238 	return entity_ptr->header.size;
3239 }
3240 
3241 static inline size_t hpi_entity_header_size(struct hpi_entity *entity_ptr)
3242 {
3243 	return sizeof(entity_ptr->header);
3244 }
3245 
3246 static inline size_t hpi_entity_value_size(struct hpi_entity *entity_ptr)
3247 {
3248 	return hpi_entity_size(entity_ptr) -
3249 		hpi_entity_header_size(entity_ptr);
3250 }
3251 
3252 static inline size_t hpi_entity_item_count(struct hpi_entity *entity_ptr)
3253 {
3254 	return hpi_entity_value_size(entity_ptr) /
3255 		entity_type_to_size[entity_ptr->header.type];
3256 }
3257 
3258 static inline struct hpi_entity *hpi_entity_ptr_to_next(struct hpi_entity
3259 	*entity_ptr)
3260 {
3261 	return (void *)(((u8 *)entity_ptr) + hpi_entity_size(entity_ptr));
3262 }
3263 
3264 static inline u16 hpi_entity_check_type(const enum e_entity_type t)
3265 {
3266 	if (t >= 0 && t < STR_TYPE_FIELD_MAX)
3267 		return 0;
3268 	return HPI_ERROR_ENTITY_TYPE_INVALID;
3269 }
3270 
3271 static inline u16 hpi_entity_check_role(const enum e_entity_role r)
3272 {
3273 	if (r >= 0 && r < STR_ROLE_FIELD_MAX)
3274 		return 0;
3275 	return HPI_ERROR_ENTITY_ROLE_INVALID;
3276 }
3277 
3278 static u16 hpi_entity_get_next(struct hpi_entity *entity, int recursive_flag,
3279 	void *guard_p, struct hpi_entity **next)
3280 {
3281 	HPI_DEBUG_ASSERT(entity != NULL);
3282 	HPI_DEBUG_ASSERT(next != NULL);
3283 	HPI_DEBUG_ASSERT(hpi_entity_size(entity) != 0);
3284 
3285 	if (guard_p <= (void *)entity) {
3286 		*next = NULL;
3287 		return 0;
3288 	}
3289 
3290 	if (recursive_flag && entity->header.type == entity_type_sequence)
3291 		*next = (struct hpi_entity *)entity->value;
3292 	else
3293 		*next = (struct hpi_entity *)hpi_entity_ptr_to_next(entity);
3294 
3295 	if (guard_p <= (void *)*next) {
3296 		*next = NULL;
3297 		return 0;
3298 	}
3299 
3300 	HPI_DEBUG_ASSERT(guard_p >= (void *)hpi_entity_ptr_to_next(*next));
3301 	return 0;
3302 }
3303 
3304 u16 hpi_entity_find_next(struct hpi_entity *container_entity,
3305 	enum e_entity_type type, enum e_entity_role role, int recursive_flag,
3306 	struct hpi_entity **current_match)
3307 {
3308 	struct hpi_entity *tmp = NULL;
3309 	void *guard_p = NULL;
3310 
3311 	HPI_DEBUG_ASSERT(container_entity != NULL);
3312 	guard_p = hpi_entity_ptr_to_next(container_entity);
3313 
3314 	if (*current_match != NULL)
3315 		hpi_entity_get_next(*current_match, recursive_flag, guard_p,
3316 			&tmp);
3317 	else
3318 		hpi_entity_get_next(container_entity, 1, guard_p, &tmp);
3319 
3320 	while (tmp) {
3321 		u16 err;
3322 
3323 		HPI_DEBUG_ASSERT((void *)tmp >= (void *)container_entity);
3324 
3325 		if ((!type || tmp->header.type == type) && (!role
3326 				|| tmp->header.role == role)) {
3327 			*current_match = tmp;
3328 			return 0;
3329 		}
3330 
3331 		err = hpi_entity_get_next(tmp, recursive_flag, guard_p,
3332 			current_match);
3333 		if (err)
3334 			return err;
3335 
3336 		tmp = *current_match;
3337 	}
3338 
3339 	*current_match = NULL;
3340 	return 0;
3341 }
3342 
3343 void hpi_entity_free(struct hpi_entity *entity)
3344 {
3345 	kfree(entity);
3346 }
3347 
3348 static u16 hpi_entity_alloc_and_copy(struct hpi_entity *src,
3349 	struct hpi_entity **dst)
3350 {
3351 	size_t buf_size;
3352 	HPI_DEBUG_ASSERT(dst != NULL);
3353 	HPI_DEBUG_ASSERT(src != NULL);
3354 
3355 	buf_size = hpi_entity_size(src);
3356 	*dst = kmalloc(buf_size, GFP_KERNEL);
3357 	if (*dst == NULL)
3358 		return HPI_ERROR_MEMORY_ALLOC;
3359 	memcpy(*dst, src, buf_size);
3360 	return 0;
3361 }
3362 
3363 u16 hpi_universal_info(const struct hpi_hsubsys *ph_subsys, u32 hC,
3364 	struct hpi_entity **info)
3365 {
3366 	struct hpi_msg_strv hm;
3367 	struct hpi_res_strv *phr;
3368 	u16 hpi_err;
3369 	int remaining_attempts = 2;
3370 	size_t resp_packet_size = 1024;
3371 
3372 	*info = NULL;
3373 
3374 	while (remaining_attempts--) {
3375 		phr = kmalloc(resp_packet_size, GFP_KERNEL);
3376 		HPI_DEBUG_ASSERT(phr != NULL);
3377 
3378 		hpi_init_message_responseV1(&hm.h, (u16)sizeof(hm), &phr->h,
3379 			(u16)resp_packet_size, HPI_OBJ_CONTROL,
3380 			HPI_CONTROL_GET_INFO);
3381 		u32TOINDEXES(hC, &hm.h.adapter_index, &hm.h.obj_index);
3382 
3383 		hm.strv.header.size = sizeof(hm.strv);
3384 		phr->strv.header.size = resp_packet_size - sizeof(phr->h);
3385 
3386 		hpi_send_recv((struct hpi_message *)&hm.h,
3387 			(struct hpi_response *)&phr->h);
3388 		if (phr->h.error == HPI_ERROR_RESPONSE_BUFFER_TOO_SMALL) {
3389 
3390 			HPI_DEBUG_ASSERT(phr->h.specific_error >
3391 				MIN_STRV_PACKET_SIZE
3392 				&& phr->h.specific_error < 1500);
3393 			resp_packet_size = phr->h.specific_error;
3394 		} else {
3395 			remaining_attempts = 0;
3396 			if (!phr->h.error)
3397 				hpi_entity_alloc_and_copy(&phr->strv, info);
3398 		}
3399 
3400 		hpi_err = phr->h.error;
3401 		kfree(phr);
3402 	}
3403 
3404 	return hpi_err;
3405 }
3406 
3407 u16 hpi_universal_get(const struct hpi_hsubsys *ph_subsys, u32 hC,
3408 	struct hpi_entity **value)
3409 {
3410 	struct hpi_msg_strv hm;
3411 	struct hpi_res_strv *phr;
3412 	u16 hpi_err;
3413 	int remaining_attempts = 2;
3414 
3415 	*value = NULL;
3416 
3417 	while (remaining_attempts--) {
3418 		phr = kmalloc(strv_packet_size, GFP_KERNEL);
3419 		if (!phr)
3420 			return HPI_ERROR_MEMORY_ALLOC;
3421 
3422 		hpi_init_message_responseV1(&hm.h, (u16)sizeof(hm), &phr->h,
3423 			(u16)strv_packet_size, HPI_OBJ_CONTROL,
3424 			HPI_CONTROL_GET_STATE);
3425 		u32TOINDEXES(hC, &hm.h.adapter_index, &hm.h.obj_index);
3426 
3427 		hm.strv.header.size = sizeof(hm.strv);
3428 		phr->strv.header.size = strv_packet_size - sizeof(phr->h);
3429 
3430 		hpi_send_recv((struct hpi_message *)&hm.h,
3431 			(struct hpi_response *)&phr->h);
3432 		if (phr->h.error == HPI_ERROR_RESPONSE_BUFFER_TOO_SMALL) {
3433 
3434 			HPI_DEBUG_ASSERT(phr->h.specific_error >
3435 				MIN_STRV_PACKET_SIZE
3436 				&& phr->h.specific_error < 1000);
3437 			strv_packet_size = phr->h.specific_error;
3438 		} else {
3439 			remaining_attempts = 0;
3440 			if (!phr->h.error)
3441 				hpi_entity_alloc_and_copy(&phr->strv, value);
3442 		}
3443 
3444 		hpi_err = phr->h.error;
3445 		kfree(phr);
3446 	}
3447 
3448 	return hpi_err;
3449 }
3450 
3451 u16 hpi_universal_set(const struct hpi_hsubsys *ph_subsys, u32 hC,
3452 	struct hpi_entity *value)
3453 {
3454 	struct hpi_msg_strv *phm;
3455 	struct hpi_res_strv hr;
3456 
3457 	phm = kmalloc(sizeof(phm->h) + value->header.size, GFP_KERNEL);
3458 	HPI_DEBUG_ASSERT(phm != NULL);
3459 
3460 	hpi_init_message_responseV1(&phm->h,
3461 		sizeof(phm->h) + value->header.size, &hr.h, sizeof(hr),
3462 		HPI_OBJ_CONTROL, HPI_CONTROL_SET_STATE);
3463 	u32TOINDEXES(hC, &phm->h.adapter_index, &phm->h.obj_index);
3464 	hr.strv.header.size = sizeof(hr.strv);
3465 
3466 	memcpy(&phm->strv, value, value->header.size);
3467 	hpi_send_recv((struct hpi_message *)&phm->h,
3468 		(struct hpi_response *)&hr.h);
3469 
3470 	return hr.h.error;
3471 }
3472 
3473 u16 hpi_entity_alloc_and_pack(const enum e_entity_type type,
3474 	const size_t item_count, const enum e_entity_role role, void *value,
3475 	struct hpi_entity **entity)
3476 {
3477 	size_t bytes_to_copy, total_size;
3478 	u16 hE = 0;
3479 	*entity = NULL;
3480 
3481 	hE = hpi_entity_check_type(type);
3482 	if (hE)
3483 		return hE;
3484 
3485 	HPI_DEBUG_ASSERT(role > entity_role_null && type < LAST_ENTITY_TYPE);
3486 
3487 	bytes_to_copy = entity_type_to_size[type] * item_count;
3488 	total_size = hpi_entity_header_size(*entity) + bytes_to_copy;
3489 
3490 	HPI_DEBUG_ASSERT(total_size >= hpi_entity_header_size(*entity)
3491 		&& total_size < STR_SIZE_FIELD_MAX);
3492 
3493 	*entity = kmalloc(total_size, GFP_KERNEL);
3494 	if (*entity == NULL)
3495 		return HPI_ERROR_MEMORY_ALLOC;
3496 	memcpy((*entity)->value, value, bytes_to_copy);
3497 	(*entity)->header.size =
3498 		hpi_entity_header_size(*entity) + bytes_to_copy;
3499 	(*entity)->header.type = type;
3500 	(*entity)->header.role = role;
3501 	return 0;
3502 }
3503 
3504 u16 hpi_entity_copy_value_from(struct hpi_entity *entity,
3505 	enum e_entity_type type, size_t item_count, void *value_dst_p)
3506 {
3507 	size_t bytes_to_copy;
3508 
3509 	if (entity->header.type != type)
3510 		return HPI_ERROR_ENTITY_TYPE_MISMATCH;
3511 
3512 	if (hpi_entity_item_count(entity) != item_count)
3513 		return HPI_ERROR_ENTITY_ITEM_COUNT;
3514 
3515 	bytes_to_copy = entity_type_to_size[type] * item_count;
3516 	memcpy(value_dst_p, entity->value, bytes_to_copy);
3517 	return 0;
3518 }
3519 
3520 u16 hpi_entity_unpack(struct hpi_entity *entity, enum e_entity_type *type,
3521 	size_t *item_count, enum e_entity_role *role, void **value)
3522 {
3523 	u16 err = 0;
3524 	HPI_DEBUG_ASSERT(entity != NULL);
3525 
3526 	if (type)
3527 		*type = entity->header.type;
3528 
3529 	if (role)
3530 		*role = entity->header.role;
3531 
3532 	if (value)
3533 		*value = entity->value;
3534 
3535 	if (item_count != NULL) {
3536 		if (entity->header.type == entity_type_sequence) {
3537 			void *guard_p = hpi_entity_ptr_to_next(entity);
3538 			struct hpi_entity *next = NULL;
3539 			void *contents = entity->value;
3540 
3541 			*item_count = 0;
3542 			while (contents < guard_p) {
3543 				(*item_count)++;
3544 				err = hpi_entity_get_next(contents, 0,
3545 					guard_p, &next);
3546 				if (next == NULL || err)
3547 					break;
3548 				contents = next;
3549 			}
3550 		} else {
3551 			*item_count = hpi_entity_item_count(entity);
3552 		}
3553 	}
3554 	return err;
3555 }
3556 
3557 u16 hpi_gpio_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3558 	u32 *ph_gpio, u16 *pw_number_input_bits, u16 *pw_number_output_bits)
3559 {
3560 	struct hpi_message hm;
3561 	struct hpi_response hr;
3562 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_OPEN);
3563 	hm.adapter_index = adapter_index;
3564 
3565 	hpi_send_recv(&hm, &hr);
3566 
3567 	if (hr.error == 0) {
3568 		*ph_gpio =
3569 			hpi_indexes_to_handle(HPI_OBJ_GPIO, adapter_index, 0);
3570 		if (pw_number_input_bits)
3571 			*pw_number_input_bits = hr.u.l.number_input_bits;
3572 		if (pw_number_output_bits)
3573 			*pw_number_output_bits = hr.u.l.number_output_bits;
3574 	} else
3575 		*ph_gpio = 0;
3576 	return hr.error;
3577 }
3578 
3579 u16 hpi_gpio_read_bit(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3580 	u16 bit_index, u16 *pw_bit_data)
3581 {
3582 	struct hpi_message hm;
3583 	struct hpi_response hr;
3584 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_READ_BIT);
3585 	u32TOINDEX(h_gpio, &hm.adapter_index);
3586 	hm.u.l.bit_index = bit_index;
3587 
3588 	hpi_send_recv(&hm, &hr);
3589 
3590 	*pw_bit_data = hr.u.l.bit_data[0];
3591 	return hr.error;
3592 }
3593 
3594 u16 hpi_gpio_read_all_bits(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3595 	u16 aw_all_bit_data[4]
3596 	)
3597 {
3598 	struct hpi_message hm;
3599 	struct hpi_response hr;
3600 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_READ_ALL);
3601 	u32TOINDEX(h_gpio, &hm.adapter_index);
3602 
3603 	hpi_send_recv(&hm, &hr);
3604 
3605 	if (aw_all_bit_data) {
3606 		aw_all_bit_data[0] = hr.u.l.bit_data[0];
3607 		aw_all_bit_data[1] = hr.u.l.bit_data[1];
3608 		aw_all_bit_data[2] = hr.u.l.bit_data[2];
3609 		aw_all_bit_data[3] = hr.u.l.bit_data[3];
3610 	}
3611 	return hr.error;
3612 }
3613 
3614 u16 hpi_gpio_write_bit(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3615 	u16 bit_index, u16 bit_data)
3616 {
3617 	struct hpi_message hm;
3618 	struct hpi_response hr;
3619 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO, HPI_GPIO_WRITE_BIT);
3620 	u32TOINDEX(h_gpio, &hm.adapter_index);
3621 	hm.u.l.bit_index = bit_index;
3622 	hm.u.l.bit_data = bit_data;
3623 
3624 	hpi_send_recv(&hm, &hr);
3625 
3626 	return hr.error;
3627 }
3628 
3629 u16 hpi_gpio_write_status(const struct hpi_hsubsys *ph_subsys, u32 h_gpio,
3630 	u16 aw_all_bit_data[4]
3631 	)
3632 {
3633 	struct hpi_message hm;
3634 	struct hpi_response hr;
3635 	hpi_init_message_response(&hm, &hr, HPI_OBJ_GPIO,
3636 		HPI_GPIO_WRITE_STATUS);
3637 	u32TOINDEX(h_gpio, &hm.adapter_index);
3638 
3639 	hpi_send_recv(&hm, &hr);
3640 
3641 	if (aw_all_bit_data) {
3642 		aw_all_bit_data[0] = hr.u.l.bit_data[0];
3643 		aw_all_bit_data[1] = hr.u.l.bit_data[1];
3644 		aw_all_bit_data[2] = hr.u.l.bit_data[2];
3645 		aw_all_bit_data[3] = hr.u.l.bit_data[3];
3646 	}
3647 	return hr.error;
3648 }
3649 
3650 u16 hpi_async_event_open(const struct hpi_hsubsys *ph_subsys,
3651 	u16 adapter_index, u32 *ph_async)
3652 {
3653 	struct hpi_message hm;
3654 	struct hpi_response hr;
3655 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3656 		HPI_ASYNCEVENT_OPEN);
3657 	hm.adapter_index = adapter_index;
3658 
3659 	hpi_send_recv(&hm, &hr);
3660 
3661 	if (hr.error == 0)
3662 
3663 		*ph_async =
3664 			hpi_indexes_to_handle(HPI_OBJ_ASYNCEVENT,
3665 			adapter_index, 0);
3666 	else
3667 		*ph_async = 0;
3668 	return hr.error;
3669 
3670 }
3671 
3672 u16 hpi_async_event_close(const struct hpi_hsubsys *ph_subsys, u32 h_async)
3673 {
3674 	struct hpi_message hm;
3675 	struct hpi_response hr;
3676 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3677 		HPI_ASYNCEVENT_OPEN);
3678 	u32TOINDEX(h_async, &hm.adapter_index);
3679 
3680 	hpi_send_recv(&hm, &hr);
3681 
3682 	return hr.error;
3683 }
3684 
3685 u16 hpi_async_event_wait(const struct hpi_hsubsys *ph_subsys, u32 h_async,
3686 	u16 maximum_events, struct hpi_async_event *p_events,
3687 	u16 *pw_number_returned)
3688 {
3689 
3690 	return 0;
3691 }
3692 
3693 u16 hpi_async_event_get_count(const struct hpi_hsubsys *ph_subsys,
3694 	u32 h_async, u16 *pw_count)
3695 {
3696 	struct hpi_message hm;
3697 	struct hpi_response hr;
3698 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3699 		HPI_ASYNCEVENT_GETCOUNT);
3700 	u32TOINDEX(h_async, &hm.adapter_index);
3701 
3702 	hpi_send_recv(&hm, &hr);
3703 
3704 	if (hr.error == 0)
3705 		if (pw_count)
3706 			*pw_count = hr.u.as.u.count.count;
3707 
3708 	return hr.error;
3709 }
3710 
3711 u16 hpi_async_event_get(const struct hpi_hsubsys *ph_subsys, u32 h_async,
3712 	u16 maximum_events, struct hpi_async_event *p_events,
3713 	u16 *pw_number_returned)
3714 {
3715 	struct hpi_message hm;
3716 	struct hpi_response hr;
3717 	hpi_init_message_response(&hm, &hr, HPI_OBJ_ASYNCEVENT,
3718 		HPI_ASYNCEVENT_GET);
3719 	u32TOINDEX(h_async, &hm.adapter_index);
3720 
3721 	hpi_send_recv(&hm, &hr);
3722 	if (!hr.error) {
3723 		memcpy(p_events, &hr.u.as.u.event,
3724 			sizeof(struct hpi_async_event));
3725 		*pw_number_returned = 1;
3726 	}
3727 
3728 	return hr.error;
3729 }
3730 
3731 u16 hpi_nv_memory_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3732 	u32 *ph_nv_memory, u16 *pw_size_in_bytes)
3733 {
3734 	struct hpi_message hm;
3735 	struct hpi_response hr;
3736 	hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3737 		HPI_NVMEMORY_OPEN);
3738 	hm.adapter_index = adapter_index;
3739 
3740 	hpi_send_recv(&hm, &hr);
3741 
3742 	if (hr.error == 0) {
3743 		*ph_nv_memory =
3744 			hpi_indexes_to_handle(HPI_OBJ_NVMEMORY, adapter_index,
3745 			0);
3746 		if (pw_size_in_bytes)
3747 			*pw_size_in_bytes = hr.u.n.size_in_bytes;
3748 	} else
3749 		*ph_nv_memory = 0;
3750 	return hr.error;
3751 }
3752 
3753 u16 hpi_nv_memory_read_byte(const struct hpi_hsubsys *ph_subsys,
3754 	u32 h_nv_memory, u16 index, u16 *pw_data)
3755 {
3756 	struct hpi_message hm;
3757 	struct hpi_response hr;
3758 	hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3759 		HPI_NVMEMORY_READ_BYTE);
3760 	u32TOINDEX(h_nv_memory, &hm.adapter_index);
3761 	hm.u.n.address = index;
3762 
3763 	hpi_send_recv(&hm, &hr);
3764 
3765 	*pw_data = hr.u.n.data;
3766 	return hr.error;
3767 }
3768 
3769 u16 hpi_nv_memory_write_byte(const struct hpi_hsubsys *ph_subsys,
3770 	u32 h_nv_memory, u16 index, u16 data)
3771 {
3772 	struct hpi_message hm;
3773 	struct hpi_response hr;
3774 	hpi_init_message_response(&hm, &hr, HPI_OBJ_NVMEMORY,
3775 		HPI_NVMEMORY_WRITE_BYTE);
3776 	u32TOINDEX(h_nv_memory, &hm.adapter_index);
3777 	hm.u.n.address = index;
3778 	hm.u.n.data = data;
3779 
3780 	hpi_send_recv(&hm, &hr);
3781 
3782 	return hr.error;
3783 }
3784 
3785 u16 hpi_profile_open_all(const struct hpi_hsubsys *ph_subsys,
3786 	u16 adapter_index, u16 profile_index, u32 *ph_profile,
3787 	u16 *pw_max_profiles)
3788 {
3789 	struct hpi_message hm;
3790 	struct hpi_response hr;
3791 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3792 		HPI_PROFILE_OPEN_ALL);
3793 	hm.adapter_index = adapter_index;
3794 	hm.obj_index = profile_index;
3795 	hpi_send_recv(&hm, &hr);
3796 
3797 	*pw_max_profiles = hr.u.p.u.o.max_profiles;
3798 	if (hr.error == 0)
3799 		*ph_profile =
3800 			hpi_indexes_to_handle(HPI_OBJ_PROFILE, adapter_index,
3801 			profile_index);
3802 	else
3803 		*ph_profile = 0;
3804 	return hr.error;
3805 }
3806 
3807 u16 hpi_profile_get(const struct hpi_hsubsys *ph_subsys, u32 h_profile,
3808 	u16 bin_index, u16 *pw_seconds, u32 *pmicro_seconds, u32 *pcall_count,
3809 	u32 *pmax_micro_seconds, u32 *pmin_micro_seconds)
3810 {
3811 	struct hpi_message hm;
3812 	struct hpi_response hr;
3813 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE, HPI_PROFILE_GET);
3814 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3815 	hm.u.p.bin_index = bin_index;
3816 	hpi_send_recv(&hm, &hr);
3817 	if (pw_seconds)
3818 		*pw_seconds = hr.u.p.u.t.seconds;
3819 	if (pmicro_seconds)
3820 		*pmicro_seconds = hr.u.p.u.t.micro_seconds;
3821 	if (pcall_count)
3822 		*pcall_count = hr.u.p.u.t.call_count;
3823 	if (pmax_micro_seconds)
3824 		*pmax_micro_seconds = hr.u.p.u.t.max_micro_seconds;
3825 	if (pmin_micro_seconds)
3826 		*pmin_micro_seconds = hr.u.p.u.t.min_micro_seconds;
3827 	return hr.error;
3828 }
3829 
3830 u16 hpi_profile_get_utilization(const struct hpi_hsubsys *ph_subsys,
3831 	u32 h_profile, u32 *putilization)
3832 {
3833 	struct hpi_message hm;
3834 	struct hpi_response hr;
3835 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3836 		HPI_PROFILE_GET_UTILIZATION);
3837 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3838 	hpi_send_recv(&hm, &hr);
3839 	if (hr.error) {
3840 		if (putilization)
3841 			*putilization = 0;
3842 	} else {
3843 		if (putilization)
3844 			*putilization = hr.u.p.u.t.call_count;
3845 	}
3846 	return hr.error;
3847 }
3848 
3849 u16 hpi_profile_get_name(const struct hpi_hsubsys *ph_subsys, u32 h_profile,
3850 	u16 bin_index, char *sz_name, u16 name_length)
3851 {
3852 	struct hpi_message hm;
3853 	struct hpi_response hr;
3854 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3855 		HPI_PROFILE_GET_NAME);
3856 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3857 	hm.u.p.bin_index = bin_index;
3858 	hpi_send_recv(&hm, &hr);
3859 	if (hr.error) {
3860 		if (sz_name)
3861 			strcpy(sz_name, "??");
3862 	} else {
3863 		if (sz_name)
3864 			memcpy(sz_name, (char *)hr.u.p.u.n.sz_name,
3865 				name_length);
3866 	}
3867 	return hr.error;
3868 }
3869 
3870 u16 hpi_profile_start_all(const struct hpi_hsubsys *ph_subsys, u32 h_profile)
3871 {
3872 	struct hpi_message hm;
3873 	struct hpi_response hr;
3874 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3875 		HPI_PROFILE_START_ALL);
3876 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3877 	hpi_send_recv(&hm, &hr);
3878 
3879 	return hr.error;
3880 }
3881 
3882 u16 hpi_profile_stop_all(const struct hpi_hsubsys *ph_subsys, u32 h_profile)
3883 {
3884 	struct hpi_message hm;
3885 	struct hpi_response hr;
3886 	hpi_init_message_response(&hm, &hr, HPI_OBJ_PROFILE,
3887 		HPI_PROFILE_STOP_ALL);
3888 	u32TOINDEXES(h_profile, &hm.adapter_index, &hm.obj_index);
3889 	hpi_send_recv(&hm, &hr);
3890 
3891 	return hr.error;
3892 }
3893 
3894 u16 hpi_watchdog_open(const struct hpi_hsubsys *ph_subsys, u16 adapter_index,
3895 	u32 *ph_watchdog)
3896 {
3897 	struct hpi_message hm;
3898 	struct hpi_response hr;
3899 	hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3900 		HPI_WATCHDOG_OPEN);
3901 	hm.adapter_index = adapter_index;
3902 
3903 	hpi_send_recv(&hm, &hr);
3904 
3905 	if (hr.error == 0)
3906 		*ph_watchdog =
3907 			hpi_indexes_to_handle(HPI_OBJ_WATCHDOG, adapter_index,
3908 			0);
3909 	else
3910 		*ph_watchdog = 0;
3911 	return hr.error;
3912 }
3913 
3914 u16 hpi_watchdog_set_time(const struct hpi_hsubsys *ph_subsys, u32 h_watchdog,
3915 	u32 time_millisec)
3916 {
3917 	struct hpi_message hm;
3918 	struct hpi_response hr;
3919 	hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3920 		HPI_WATCHDOG_SET_TIME);
3921 	u32TOINDEX(h_watchdog, &hm.adapter_index);
3922 	hm.u.w.time_ms = time_millisec;
3923 
3924 	hpi_send_recv(&hm, &hr);
3925 
3926 	return hr.error;
3927 }
3928 
3929 u16 hpi_watchdog_ping(const struct hpi_hsubsys *ph_subsys, u32 h_watchdog)
3930 {
3931 	struct hpi_message hm;
3932 	struct hpi_response hr;
3933 	hpi_init_message_response(&hm, &hr, HPI_OBJ_WATCHDOG,
3934 		HPI_WATCHDOG_PING);
3935 	u32TOINDEX(h_watchdog, &hm.adapter_index);
3936 
3937 	hpi_send_recv(&hm, &hr);
3938 
3939 	return hr.error;
3940 }
3941