1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * V4L2 controls framework core implementation. 4 * 5 * Copyright (C) 2010-2021 Hans Verkuil <hverkuil-cisco@xs4all.nl> 6 */ 7 8 #include <linux/export.h> 9 #include <linux/mm.h> 10 #include <linux/slab.h> 11 #include <media/v4l2-ctrls.h> 12 #include <media/v4l2-event.h> 13 #include <media/v4l2-fwnode.h> 14 15 #include "v4l2-ctrls-priv.h" 16 17 static const union v4l2_ctrl_ptr ptr_null; 18 19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl, 20 u32 changes) 21 { 22 memset(ev, 0, sizeof(*ev)); 23 ev->type = V4L2_EVENT_CTRL; 24 ev->id = ctrl->id; 25 ev->u.ctrl.changes = changes; 26 ev->u.ctrl.type = ctrl->type; 27 ev->u.ctrl.flags = user_flags(ctrl); 28 if (ctrl->is_ptr) 29 ev->u.ctrl.value64 = 0; 30 else 31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64; 32 ev->u.ctrl.minimum = ctrl->minimum; 33 ev->u.ctrl.maximum = ctrl->maximum; 34 if (ctrl->type == V4L2_CTRL_TYPE_MENU 35 || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU) 36 ev->u.ctrl.step = 1; 37 else 38 ev->u.ctrl.step = ctrl->step; 39 ev->u.ctrl.default_value = ctrl->default_value; 40 } 41 42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl) 43 { 44 struct v4l2_event ev; 45 u32 changes = V4L2_EVENT_CTRL_CH_FLAGS; 46 47 if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY)) 48 changes |= V4L2_EVENT_CTRL_CH_VALUE; 49 fill_event(&ev, ctrl, changes); 50 v4l2_event_queue_fh(fh, &ev); 51 } 52 53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes) 54 { 55 struct v4l2_event ev; 56 struct v4l2_subscribed_event *sev; 57 58 if (list_empty(&ctrl->ev_subs)) 59 return; 60 fill_event(&ev, ctrl, changes); 61 62 list_for_each_entry(sev, &ctrl->ev_subs, node) 63 if (sev->fh != fh || 64 (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK)) 65 v4l2_event_queue_fh(sev->fh, &ev); 66 } 67 68 bool v4l2_ctrl_type_op_equal(const struct v4l2_ctrl *ctrl, 69 union v4l2_ctrl_ptr ptr1, union v4l2_ctrl_ptr ptr2) 70 { 71 unsigned int i; 72 73 switch (ctrl->type) { 74 case V4L2_CTRL_TYPE_BUTTON: 75 return false; 76 case V4L2_CTRL_TYPE_STRING: 77 for (i = 0; i < ctrl->elems; i++) { 78 unsigned int idx = i * ctrl->elem_size; 79 80 /* strings are always 0-terminated */ 81 if (strcmp(ptr1.p_char + idx, ptr2.p_char + idx)) 82 return false; 83 } 84 return true; 85 default: 86 return !memcmp(ptr1.p_const, ptr2.p_const, 87 ctrl->elems * ctrl->elem_size); 88 } 89 } 90 EXPORT_SYMBOL(v4l2_ctrl_type_op_equal); 91 92 /* Default intra MPEG-2 quantisation coefficients, from the specification. */ 93 static const u8 mpeg2_intra_quant_matrix[64] = { 94 8, 16, 16, 19, 16, 19, 22, 22, 95 22, 22, 22, 22, 26, 24, 26, 27, 96 27, 27, 26, 26, 26, 26, 27, 27, 97 27, 29, 29, 29, 34, 34, 34, 29, 98 29, 29, 27, 27, 29, 29, 32, 32, 99 34, 34, 37, 38, 37, 35, 35, 34, 100 35, 38, 38, 40, 40, 40, 48, 48, 101 46, 46, 56, 56, 58, 69, 69, 83 102 }; 103 104 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx, 105 union v4l2_ctrl_ptr ptr) 106 { 107 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 108 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 109 struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant; 110 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 111 struct v4l2_ctrl_vp9_frame *p_vp9_frame; 112 struct v4l2_ctrl_fwht_params *p_fwht_params; 113 struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix; 114 void *p = ptr.p + idx * ctrl->elem_size; 115 116 if (ctrl->p_def.p_const) 117 memcpy(p, ctrl->p_def.p_const, ctrl->elem_size); 118 else 119 memset(p, 0, ctrl->elem_size); 120 121 switch ((u32)ctrl->type) { 122 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 123 p_mpeg2_sequence = p; 124 125 /* 4:2:0 */ 126 p_mpeg2_sequence->chroma_format = 1; 127 break; 128 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 129 p_mpeg2_picture = p; 130 131 /* interlaced top field */ 132 p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD; 133 p_mpeg2_picture->picture_coding_type = 134 V4L2_MPEG2_PIC_CODING_TYPE_I; 135 break; 136 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 137 p_mpeg2_quant = p; 138 139 memcpy(p_mpeg2_quant->intra_quantiser_matrix, 140 mpeg2_intra_quant_matrix, 141 ARRAY_SIZE(mpeg2_intra_quant_matrix)); 142 /* 143 * The default non-intra MPEG-2 quantisation 144 * coefficients are all 16, as per the specification. 145 */ 146 memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16, 147 sizeof(p_mpeg2_quant->non_intra_quantiser_matrix)); 148 break; 149 case V4L2_CTRL_TYPE_VP8_FRAME: 150 p_vp8_frame = p; 151 p_vp8_frame->num_dct_parts = 1; 152 break; 153 case V4L2_CTRL_TYPE_VP9_FRAME: 154 p_vp9_frame = p; 155 p_vp9_frame->profile = 0; 156 p_vp9_frame->bit_depth = 8; 157 p_vp9_frame->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 158 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING; 159 break; 160 case V4L2_CTRL_TYPE_FWHT_PARAMS: 161 p_fwht_params = p; 162 p_fwht_params->version = V4L2_FWHT_VERSION; 163 p_fwht_params->width = 1280; 164 p_fwht_params->height = 720; 165 p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV | 166 (2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET); 167 break; 168 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 169 p_h264_scaling_matrix = p; 170 /* 171 * The default (flat) H.264 scaling matrix when none are 172 * specified in the bitstream, this is according to formulas 173 * (7-8) and (7-9) of the specification. 174 */ 175 memset(p_h264_scaling_matrix, 16, sizeof(*p_h264_scaling_matrix)); 176 break; 177 } 178 } 179 180 void v4l2_ctrl_type_op_init(const struct v4l2_ctrl *ctrl, u32 from_idx, 181 union v4l2_ctrl_ptr ptr) 182 { 183 unsigned int i; 184 u32 tot_elems = ctrl->elems; 185 u32 elems = tot_elems - from_idx; 186 187 if (from_idx >= tot_elems) 188 return; 189 190 switch (ctrl->type) { 191 case V4L2_CTRL_TYPE_STRING: 192 for (i = from_idx; i < tot_elems; i++) { 193 unsigned int offset = i * ctrl->elem_size; 194 195 memset(ptr.p_char + offset, ' ', ctrl->minimum); 196 ptr.p_char[offset + ctrl->minimum] = '\0'; 197 } 198 break; 199 case V4L2_CTRL_TYPE_INTEGER64: 200 if (ctrl->default_value) { 201 for (i = from_idx; i < tot_elems; i++) 202 ptr.p_s64[i] = ctrl->default_value; 203 } else { 204 memset(ptr.p_s64 + from_idx, 0, elems * sizeof(s64)); 205 } 206 break; 207 case V4L2_CTRL_TYPE_INTEGER: 208 case V4L2_CTRL_TYPE_INTEGER_MENU: 209 case V4L2_CTRL_TYPE_MENU: 210 case V4L2_CTRL_TYPE_BITMASK: 211 case V4L2_CTRL_TYPE_BOOLEAN: 212 if (ctrl->default_value) { 213 for (i = from_idx; i < tot_elems; i++) 214 ptr.p_s32[i] = ctrl->default_value; 215 } else { 216 memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32)); 217 } 218 break; 219 case V4L2_CTRL_TYPE_BUTTON: 220 case V4L2_CTRL_TYPE_CTRL_CLASS: 221 memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32)); 222 break; 223 case V4L2_CTRL_TYPE_U8: 224 memset(ptr.p_u8 + from_idx, ctrl->default_value, elems); 225 break; 226 case V4L2_CTRL_TYPE_U16: 227 if (ctrl->default_value) { 228 for (i = from_idx; i < tot_elems; i++) 229 ptr.p_u16[i] = ctrl->default_value; 230 } else { 231 memset(ptr.p_u16 + from_idx, 0, elems * sizeof(u16)); 232 } 233 break; 234 case V4L2_CTRL_TYPE_U32: 235 if (ctrl->default_value) { 236 for (i = from_idx; i < tot_elems; i++) 237 ptr.p_u32[i] = ctrl->default_value; 238 } else { 239 memset(ptr.p_u32 + from_idx, 0, elems * sizeof(u32)); 240 } 241 break; 242 default: 243 for (i = from_idx; i < tot_elems; i++) 244 std_init_compound(ctrl, i, ptr); 245 break; 246 } 247 } 248 EXPORT_SYMBOL(v4l2_ctrl_type_op_init); 249 250 void v4l2_ctrl_type_op_log(const struct v4l2_ctrl *ctrl) 251 { 252 union v4l2_ctrl_ptr ptr = ctrl->p_cur; 253 254 if (ctrl->is_array) { 255 unsigned i; 256 257 for (i = 0; i < ctrl->nr_of_dims; i++) 258 pr_cont("[%u]", ctrl->dims[i]); 259 pr_cont(" "); 260 } 261 262 switch (ctrl->type) { 263 case V4L2_CTRL_TYPE_INTEGER: 264 pr_cont("%d", *ptr.p_s32); 265 break; 266 case V4L2_CTRL_TYPE_BOOLEAN: 267 pr_cont("%s", *ptr.p_s32 ? "true" : "false"); 268 break; 269 case V4L2_CTRL_TYPE_MENU: 270 pr_cont("%s", ctrl->qmenu[*ptr.p_s32]); 271 break; 272 case V4L2_CTRL_TYPE_INTEGER_MENU: 273 pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]); 274 break; 275 case V4L2_CTRL_TYPE_BITMASK: 276 pr_cont("0x%08x", *ptr.p_s32); 277 break; 278 case V4L2_CTRL_TYPE_INTEGER64: 279 pr_cont("%lld", *ptr.p_s64); 280 break; 281 case V4L2_CTRL_TYPE_STRING: 282 pr_cont("%s", ptr.p_char); 283 break; 284 case V4L2_CTRL_TYPE_U8: 285 pr_cont("%u", (unsigned)*ptr.p_u8); 286 break; 287 case V4L2_CTRL_TYPE_U16: 288 pr_cont("%u", (unsigned)*ptr.p_u16); 289 break; 290 case V4L2_CTRL_TYPE_U32: 291 pr_cont("%u", (unsigned)*ptr.p_u32); 292 break; 293 case V4L2_CTRL_TYPE_H264_SPS: 294 pr_cont("H264_SPS"); 295 break; 296 case V4L2_CTRL_TYPE_H264_PPS: 297 pr_cont("H264_PPS"); 298 break; 299 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 300 pr_cont("H264_SCALING_MATRIX"); 301 break; 302 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 303 pr_cont("H264_SLICE_PARAMS"); 304 break; 305 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 306 pr_cont("H264_DECODE_PARAMS"); 307 break; 308 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 309 pr_cont("H264_PRED_WEIGHTS"); 310 break; 311 case V4L2_CTRL_TYPE_FWHT_PARAMS: 312 pr_cont("FWHT_PARAMS"); 313 break; 314 case V4L2_CTRL_TYPE_VP8_FRAME: 315 pr_cont("VP8_FRAME"); 316 break; 317 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 318 pr_cont("HDR10_CLL_INFO"); 319 break; 320 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 321 pr_cont("HDR10_MASTERING_DISPLAY"); 322 break; 323 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 324 pr_cont("MPEG2_QUANTISATION"); 325 break; 326 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 327 pr_cont("MPEG2_SEQUENCE"); 328 break; 329 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 330 pr_cont("MPEG2_PICTURE"); 331 break; 332 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 333 pr_cont("VP9_COMPRESSED_HDR"); 334 break; 335 case V4L2_CTRL_TYPE_VP9_FRAME: 336 pr_cont("VP9_FRAME"); 337 break; 338 case V4L2_CTRL_TYPE_HEVC_SPS: 339 pr_cont("HEVC_SPS"); 340 break; 341 case V4L2_CTRL_TYPE_HEVC_PPS: 342 pr_cont("HEVC_PPS"); 343 break; 344 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 345 pr_cont("HEVC_SLICE_PARAMS"); 346 break; 347 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 348 pr_cont("HEVC_SCALING_MATRIX"); 349 break; 350 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 351 pr_cont("HEVC_DECODE_PARAMS"); 352 break; 353 default: 354 pr_cont("unknown type %d", ctrl->type); 355 break; 356 } 357 } 358 EXPORT_SYMBOL(v4l2_ctrl_type_op_log); 359 360 /* 361 * Round towards the closest legal value. Be careful when we are 362 * close to the maximum range of the control type to prevent 363 * wrap-arounds. 364 */ 365 #define ROUND_TO_RANGE(val, offset_type, ctrl) \ 366 ({ \ 367 offset_type offset; \ 368 if ((ctrl)->maximum >= 0 && \ 369 val >= (ctrl)->maximum - (s32)((ctrl)->step / 2)) \ 370 val = (ctrl)->maximum; \ 371 else \ 372 val += (s32)((ctrl)->step / 2); \ 373 val = clamp_t(typeof(val), val, \ 374 (ctrl)->minimum, (ctrl)->maximum); \ 375 offset = (val) - (ctrl)->minimum; \ 376 offset = (ctrl)->step * (offset / (u32)(ctrl)->step); \ 377 val = (ctrl)->minimum + offset; \ 378 0; \ 379 }) 380 381 /* Validate a new control */ 382 383 #define zero_padding(s) \ 384 memset(&(s).padding, 0, sizeof((s).padding)) 385 #define zero_reserved(s) \ 386 memset(&(s).reserved, 0, sizeof((s).reserved)) 387 388 static int 389 validate_vp9_lf_params(struct v4l2_vp9_loop_filter *lf) 390 { 391 unsigned int i; 392 393 if (lf->flags & ~(V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED | 394 V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE)) 395 return -EINVAL; 396 397 /* That all values are in the accepted range. */ 398 if (lf->level > GENMASK(5, 0)) 399 return -EINVAL; 400 401 if (lf->sharpness > GENMASK(2, 0)) 402 return -EINVAL; 403 404 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) 405 if (lf->ref_deltas[i] < -63 || lf->ref_deltas[i] > 63) 406 return -EINVAL; 407 408 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) 409 if (lf->mode_deltas[i] < -63 || lf->mode_deltas[i] > 63) 410 return -EINVAL; 411 412 zero_reserved(*lf); 413 return 0; 414 } 415 416 static int 417 validate_vp9_quant_params(struct v4l2_vp9_quantization *quant) 418 { 419 if (quant->delta_q_y_dc < -15 || quant->delta_q_y_dc > 15 || 420 quant->delta_q_uv_dc < -15 || quant->delta_q_uv_dc > 15 || 421 quant->delta_q_uv_ac < -15 || quant->delta_q_uv_ac > 15) 422 return -EINVAL; 423 424 zero_reserved(*quant); 425 return 0; 426 } 427 428 static int 429 validate_vp9_seg_params(struct v4l2_vp9_segmentation *seg) 430 { 431 unsigned int i, j; 432 433 if (seg->flags & ~(V4L2_VP9_SEGMENTATION_FLAG_ENABLED | 434 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP | 435 V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE | 436 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA | 437 V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE)) 438 return -EINVAL; 439 440 for (i = 0; i < ARRAY_SIZE(seg->feature_enabled); i++) { 441 if (seg->feature_enabled[i] & 442 ~V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK) 443 return -EINVAL; 444 } 445 446 for (i = 0; i < ARRAY_SIZE(seg->feature_data); i++) { 447 static const int range[] = { 255, 63, 3, 0 }; 448 449 for (j = 0; j < ARRAY_SIZE(seg->feature_data[j]); j++) { 450 if (seg->feature_data[i][j] < -range[j] || 451 seg->feature_data[i][j] > range[j]) 452 return -EINVAL; 453 } 454 } 455 456 zero_reserved(*seg); 457 return 0; 458 } 459 460 static int 461 validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr *hdr) 462 { 463 if (hdr->tx_mode > V4L2_VP9_TX_MODE_SELECT) 464 return -EINVAL; 465 466 return 0; 467 } 468 469 static int 470 validate_vp9_frame(struct v4l2_ctrl_vp9_frame *frame) 471 { 472 int ret; 473 474 /* Make sure we're not passed invalid flags. */ 475 if (frame->flags & ~(V4L2_VP9_FRAME_FLAG_KEY_FRAME | 476 V4L2_VP9_FRAME_FLAG_SHOW_FRAME | 477 V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT | 478 V4L2_VP9_FRAME_FLAG_INTRA_ONLY | 479 V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV | 480 V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX | 481 V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE | 482 V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 483 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING | 484 V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING)) 485 return -EINVAL; 486 487 if (frame->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT && 488 frame->flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX) 489 return -EINVAL; 490 491 if (frame->profile > V4L2_VP9_PROFILE_MAX) 492 return -EINVAL; 493 494 if (frame->reset_frame_context > V4L2_VP9_RESET_FRAME_CTX_ALL) 495 return -EINVAL; 496 497 if (frame->frame_context_idx >= V4L2_VP9_NUM_FRAME_CTX) 498 return -EINVAL; 499 500 /* 501 * Profiles 0 and 1 only support 8-bit depth, profiles 2 and 3 only 10 502 * and 12 bit depths. 503 */ 504 if ((frame->profile < 2 && frame->bit_depth != 8) || 505 (frame->profile >= 2 && 506 (frame->bit_depth != 10 && frame->bit_depth != 12))) 507 return -EINVAL; 508 509 /* Profile 0 and 2 only accept YUV 4:2:0. */ 510 if ((frame->profile == 0 || frame->profile == 2) && 511 (!(frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) || 512 !(frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 513 return -EINVAL; 514 515 /* Profile 1 and 3 only accept YUV 4:2:2, 4:4:0 and 4:4:4. */ 516 if ((frame->profile == 1 || frame->profile == 3) && 517 ((frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) && 518 (frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 519 return -EINVAL; 520 521 if (frame->interpolation_filter > V4L2_VP9_INTERP_FILTER_SWITCHABLE) 522 return -EINVAL; 523 524 /* 525 * According to the spec, tile_cols_log2 shall be less than or equal 526 * to 6. 527 */ 528 if (frame->tile_cols_log2 > 6) 529 return -EINVAL; 530 531 if (frame->reference_mode > V4L2_VP9_REFERENCE_MODE_SELECT) 532 return -EINVAL; 533 534 ret = validate_vp9_lf_params(&frame->lf); 535 if (ret) 536 return ret; 537 538 ret = validate_vp9_quant_params(&frame->quant); 539 if (ret) 540 return ret; 541 542 ret = validate_vp9_seg_params(&frame->seg); 543 if (ret) 544 return ret; 545 546 zero_reserved(*frame); 547 return 0; 548 } 549 550 /* 551 * Compound controls validation requires setting unused fields/flags to zero 552 * in order to properly detect unchanged controls with v4l2_ctrl_type_op_equal's 553 * memcmp. 554 */ 555 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx, 556 union v4l2_ctrl_ptr ptr) 557 { 558 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 559 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 560 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 561 struct v4l2_ctrl_fwht_params *p_fwht_params; 562 struct v4l2_ctrl_h264_sps *p_h264_sps; 563 struct v4l2_ctrl_h264_pps *p_h264_pps; 564 struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights; 565 struct v4l2_ctrl_h264_slice_params *p_h264_slice_params; 566 struct v4l2_ctrl_h264_decode_params *p_h264_dec_params; 567 struct v4l2_ctrl_hevc_sps *p_hevc_sps; 568 struct v4l2_ctrl_hevc_pps *p_hevc_pps; 569 struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering; 570 struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params; 571 struct v4l2_area *area; 572 void *p = ptr.p + idx * ctrl->elem_size; 573 unsigned int i; 574 575 switch ((u32)ctrl->type) { 576 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 577 p_mpeg2_sequence = p; 578 579 switch (p_mpeg2_sequence->chroma_format) { 580 case 1: /* 4:2:0 */ 581 case 2: /* 4:2:2 */ 582 case 3: /* 4:4:4 */ 583 break; 584 default: 585 return -EINVAL; 586 } 587 break; 588 589 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 590 p_mpeg2_picture = p; 591 592 switch (p_mpeg2_picture->intra_dc_precision) { 593 case 0: /* 8 bits */ 594 case 1: /* 9 bits */ 595 case 2: /* 10 bits */ 596 case 3: /* 11 bits */ 597 break; 598 default: 599 return -EINVAL; 600 } 601 602 switch (p_mpeg2_picture->picture_structure) { 603 case V4L2_MPEG2_PIC_TOP_FIELD: 604 case V4L2_MPEG2_PIC_BOTTOM_FIELD: 605 case V4L2_MPEG2_PIC_FRAME: 606 break; 607 default: 608 return -EINVAL; 609 } 610 611 switch (p_mpeg2_picture->picture_coding_type) { 612 case V4L2_MPEG2_PIC_CODING_TYPE_I: 613 case V4L2_MPEG2_PIC_CODING_TYPE_P: 614 case V4L2_MPEG2_PIC_CODING_TYPE_B: 615 break; 616 default: 617 return -EINVAL; 618 } 619 zero_reserved(*p_mpeg2_picture); 620 break; 621 622 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 623 break; 624 625 case V4L2_CTRL_TYPE_FWHT_PARAMS: 626 p_fwht_params = p; 627 if (p_fwht_params->version < V4L2_FWHT_VERSION) 628 return -EINVAL; 629 if (!p_fwht_params->width || !p_fwht_params->height) 630 return -EINVAL; 631 break; 632 633 case V4L2_CTRL_TYPE_H264_SPS: 634 p_h264_sps = p; 635 636 /* Some syntax elements are only conditionally valid */ 637 if (p_h264_sps->pic_order_cnt_type != 0) { 638 p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0; 639 } else if (p_h264_sps->pic_order_cnt_type != 1) { 640 p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0; 641 p_h264_sps->offset_for_non_ref_pic = 0; 642 p_h264_sps->offset_for_top_to_bottom_field = 0; 643 memset(&p_h264_sps->offset_for_ref_frame, 0, 644 sizeof(p_h264_sps->offset_for_ref_frame)); 645 } 646 647 if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) { 648 p_h264_sps->chroma_format_idc = 1; 649 p_h264_sps->bit_depth_luma_minus8 = 0; 650 p_h264_sps->bit_depth_chroma_minus8 = 0; 651 652 p_h264_sps->flags &= 653 ~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS; 654 655 if (p_h264_sps->chroma_format_idc < 3) 656 p_h264_sps->flags &= 657 ~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE; 658 } 659 660 if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY) 661 p_h264_sps->flags &= 662 ~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD; 663 664 /* 665 * Chroma 4:2:2 format require at least High 4:2:2 profile. 666 * 667 * The H264 specification and well-known parser implementations 668 * use profile-idc values directly, as that is clearer and 669 * less ambiguous. We do the same here. 670 */ 671 if (p_h264_sps->profile_idc < 122 && 672 p_h264_sps->chroma_format_idc > 1) 673 return -EINVAL; 674 /* Chroma 4:4:4 format require at least High 4:2:2 profile */ 675 if (p_h264_sps->profile_idc < 244 && 676 p_h264_sps->chroma_format_idc > 2) 677 return -EINVAL; 678 if (p_h264_sps->chroma_format_idc > 3) 679 return -EINVAL; 680 681 if (p_h264_sps->bit_depth_luma_minus8 > 6) 682 return -EINVAL; 683 if (p_h264_sps->bit_depth_chroma_minus8 > 6) 684 return -EINVAL; 685 if (p_h264_sps->log2_max_frame_num_minus4 > 12) 686 return -EINVAL; 687 if (p_h264_sps->pic_order_cnt_type > 2) 688 return -EINVAL; 689 if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12) 690 return -EINVAL; 691 if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN) 692 return -EINVAL; 693 break; 694 695 case V4L2_CTRL_TYPE_H264_PPS: 696 p_h264_pps = p; 697 698 if (p_h264_pps->num_slice_groups_minus1 > 7) 699 return -EINVAL; 700 if (p_h264_pps->num_ref_idx_l0_default_active_minus1 > 701 (V4L2_H264_REF_LIST_LEN - 1)) 702 return -EINVAL; 703 if (p_h264_pps->num_ref_idx_l1_default_active_minus1 > 704 (V4L2_H264_REF_LIST_LEN - 1)) 705 return -EINVAL; 706 if (p_h264_pps->weighted_bipred_idc > 2) 707 return -EINVAL; 708 /* 709 * pic_init_qp_minus26 shall be in the range of 710 * -(26 + QpBdOffset_y) to +25, inclusive, 711 * where QpBdOffset_y is 6 * bit_depth_luma_minus8 712 */ 713 if (p_h264_pps->pic_init_qp_minus26 < -62 || 714 p_h264_pps->pic_init_qp_minus26 > 25) 715 return -EINVAL; 716 if (p_h264_pps->pic_init_qs_minus26 < -26 || 717 p_h264_pps->pic_init_qs_minus26 > 25) 718 return -EINVAL; 719 if (p_h264_pps->chroma_qp_index_offset < -12 || 720 p_h264_pps->chroma_qp_index_offset > 12) 721 return -EINVAL; 722 if (p_h264_pps->second_chroma_qp_index_offset < -12 || 723 p_h264_pps->second_chroma_qp_index_offset > 12) 724 return -EINVAL; 725 break; 726 727 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 728 break; 729 730 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 731 p_h264_pred_weights = p; 732 733 if (p_h264_pred_weights->luma_log2_weight_denom > 7) 734 return -EINVAL; 735 if (p_h264_pred_weights->chroma_log2_weight_denom > 7) 736 return -EINVAL; 737 break; 738 739 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 740 p_h264_slice_params = p; 741 742 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 743 p_h264_slice_params->flags &= 744 ~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED; 745 746 if (p_h264_slice_params->colour_plane_id > 2) 747 return -EINVAL; 748 if (p_h264_slice_params->cabac_init_idc > 2) 749 return -EINVAL; 750 if (p_h264_slice_params->disable_deblocking_filter_idc > 2) 751 return -EINVAL; 752 if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 || 753 p_h264_slice_params->slice_alpha_c0_offset_div2 > 6) 754 return -EINVAL; 755 if (p_h264_slice_params->slice_beta_offset_div2 < -6 || 756 p_h264_slice_params->slice_beta_offset_div2 > 6) 757 return -EINVAL; 758 759 if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I || 760 p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI) 761 p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0; 762 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 763 p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0; 764 765 if (p_h264_slice_params->num_ref_idx_l0_active_minus1 > 766 (V4L2_H264_REF_LIST_LEN - 1)) 767 return -EINVAL; 768 if (p_h264_slice_params->num_ref_idx_l1_active_minus1 > 769 (V4L2_H264_REF_LIST_LEN - 1)) 770 return -EINVAL; 771 zero_reserved(*p_h264_slice_params); 772 break; 773 774 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 775 p_h264_dec_params = p; 776 777 if (p_h264_dec_params->nal_ref_idc > 3) 778 return -EINVAL; 779 for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) { 780 struct v4l2_h264_dpb_entry *dpb_entry = 781 &p_h264_dec_params->dpb[i]; 782 783 zero_reserved(*dpb_entry); 784 } 785 zero_reserved(*p_h264_dec_params); 786 break; 787 788 case V4L2_CTRL_TYPE_VP8_FRAME: 789 p_vp8_frame = p; 790 791 switch (p_vp8_frame->num_dct_parts) { 792 case 1: 793 case 2: 794 case 4: 795 case 8: 796 break; 797 default: 798 return -EINVAL; 799 } 800 zero_padding(p_vp8_frame->segment); 801 zero_padding(p_vp8_frame->lf); 802 zero_padding(p_vp8_frame->quant); 803 zero_padding(p_vp8_frame->entropy); 804 zero_padding(p_vp8_frame->coder_state); 805 break; 806 807 case V4L2_CTRL_TYPE_HEVC_SPS: 808 p_hevc_sps = p; 809 810 if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) { 811 p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0; 812 p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0; 813 p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0; 814 p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0; 815 } 816 817 if (!(p_hevc_sps->flags & 818 V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT)) 819 p_hevc_sps->num_long_term_ref_pics_sps = 0; 820 break; 821 822 case V4L2_CTRL_TYPE_HEVC_PPS: 823 p_hevc_pps = p; 824 825 if (!(p_hevc_pps->flags & 826 V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED)) 827 p_hevc_pps->diff_cu_qp_delta_depth = 0; 828 829 if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) { 830 p_hevc_pps->num_tile_columns_minus1 = 0; 831 p_hevc_pps->num_tile_rows_minus1 = 0; 832 memset(&p_hevc_pps->column_width_minus1, 0, 833 sizeof(p_hevc_pps->column_width_minus1)); 834 memset(&p_hevc_pps->row_height_minus1, 0, 835 sizeof(p_hevc_pps->row_height_minus1)); 836 837 p_hevc_pps->flags &= 838 ~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED; 839 } 840 841 if (p_hevc_pps->flags & 842 V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) { 843 p_hevc_pps->pps_beta_offset_div2 = 0; 844 p_hevc_pps->pps_tc_offset_div2 = 0; 845 } 846 break; 847 848 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 849 p_hevc_decode_params = p; 850 851 if (p_hevc_decode_params->num_active_dpb_entries > 852 V4L2_HEVC_DPB_ENTRIES_NUM_MAX) 853 return -EINVAL; 854 break; 855 856 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 857 break; 858 859 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 860 break; 861 862 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 863 p_hdr10_mastering = p; 864 865 for (i = 0; i < 3; ++i) { 866 if (p_hdr10_mastering->display_primaries_x[i] < 867 V4L2_HDR10_MASTERING_PRIMARIES_X_LOW || 868 p_hdr10_mastering->display_primaries_x[i] > 869 V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH || 870 p_hdr10_mastering->display_primaries_y[i] < 871 V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW || 872 p_hdr10_mastering->display_primaries_y[i] > 873 V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH) 874 return -EINVAL; 875 } 876 877 if (p_hdr10_mastering->white_point_x < 878 V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW || 879 p_hdr10_mastering->white_point_x > 880 V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH || 881 p_hdr10_mastering->white_point_y < 882 V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW || 883 p_hdr10_mastering->white_point_y > 884 V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH) 885 return -EINVAL; 886 887 if (p_hdr10_mastering->max_display_mastering_luminance < 888 V4L2_HDR10_MASTERING_MAX_LUMA_LOW || 889 p_hdr10_mastering->max_display_mastering_luminance > 890 V4L2_HDR10_MASTERING_MAX_LUMA_HIGH || 891 p_hdr10_mastering->min_display_mastering_luminance < 892 V4L2_HDR10_MASTERING_MIN_LUMA_LOW || 893 p_hdr10_mastering->min_display_mastering_luminance > 894 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 895 return -EINVAL; 896 897 /* The following restriction comes from ITU-T Rec. H.265 spec */ 898 if (p_hdr10_mastering->max_display_mastering_luminance == 899 V4L2_HDR10_MASTERING_MAX_LUMA_LOW && 900 p_hdr10_mastering->min_display_mastering_luminance == 901 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 902 return -EINVAL; 903 904 break; 905 906 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 907 break; 908 909 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 910 return validate_vp9_compressed_hdr(p); 911 912 case V4L2_CTRL_TYPE_VP9_FRAME: 913 return validate_vp9_frame(p); 914 915 case V4L2_CTRL_TYPE_AREA: 916 area = p; 917 if (!area->width || !area->height) 918 return -EINVAL; 919 break; 920 921 default: 922 return -EINVAL; 923 } 924 925 return 0; 926 } 927 928 static int std_validate_elem(const struct v4l2_ctrl *ctrl, u32 idx, 929 union v4l2_ctrl_ptr ptr) 930 { 931 size_t len; 932 u64 offset; 933 s64 val; 934 935 switch ((u32)ctrl->type) { 936 case V4L2_CTRL_TYPE_INTEGER: 937 return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl); 938 case V4L2_CTRL_TYPE_INTEGER64: 939 /* 940 * We can't use the ROUND_TO_RANGE define here due to 941 * the u64 divide that needs special care. 942 */ 943 val = ptr.p_s64[idx]; 944 if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2)) 945 val = ctrl->maximum; 946 else 947 val += (s64)(ctrl->step / 2); 948 val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum); 949 offset = val - ctrl->minimum; 950 do_div(offset, ctrl->step); 951 ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step; 952 return 0; 953 case V4L2_CTRL_TYPE_U8: 954 return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl); 955 case V4L2_CTRL_TYPE_U16: 956 return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl); 957 case V4L2_CTRL_TYPE_U32: 958 return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl); 959 960 case V4L2_CTRL_TYPE_BOOLEAN: 961 ptr.p_s32[idx] = !!ptr.p_s32[idx]; 962 return 0; 963 964 case V4L2_CTRL_TYPE_MENU: 965 case V4L2_CTRL_TYPE_INTEGER_MENU: 966 if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum) 967 return -ERANGE; 968 if (ptr.p_s32[idx] < BITS_PER_LONG_LONG && 969 (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx]))) 970 return -EINVAL; 971 if (ctrl->type == V4L2_CTRL_TYPE_MENU && 972 ctrl->qmenu[ptr.p_s32[idx]][0] == '\0') 973 return -EINVAL; 974 return 0; 975 976 case V4L2_CTRL_TYPE_BITMASK: 977 ptr.p_s32[idx] &= ctrl->maximum; 978 return 0; 979 980 case V4L2_CTRL_TYPE_BUTTON: 981 case V4L2_CTRL_TYPE_CTRL_CLASS: 982 ptr.p_s32[idx] = 0; 983 return 0; 984 985 case V4L2_CTRL_TYPE_STRING: 986 idx *= ctrl->elem_size; 987 len = strlen(ptr.p_char + idx); 988 if (len < ctrl->minimum) 989 return -ERANGE; 990 if ((len - (u32)ctrl->minimum) % (u32)ctrl->step) 991 return -ERANGE; 992 return 0; 993 994 default: 995 return std_validate_compound(ctrl, idx, ptr); 996 } 997 } 998 999 int v4l2_ctrl_type_op_validate(const struct v4l2_ctrl *ctrl, 1000 union v4l2_ctrl_ptr ptr) 1001 { 1002 unsigned int i; 1003 int ret = 0; 1004 1005 switch ((u32)ctrl->type) { 1006 case V4L2_CTRL_TYPE_U8: 1007 if (ctrl->maximum == 0xff && ctrl->minimum == 0 && ctrl->step == 1) 1008 return 0; 1009 break; 1010 case V4L2_CTRL_TYPE_U16: 1011 if (ctrl->maximum == 0xffff && ctrl->minimum == 0 && ctrl->step == 1) 1012 return 0; 1013 break; 1014 case V4L2_CTRL_TYPE_U32: 1015 if (ctrl->maximum == 0xffffffff && ctrl->minimum == 0 && ctrl->step == 1) 1016 return 0; 1017 break; 1018 1019 case V4L2_CTRL_TYPE_BUTTON: 1020 case V4L2_CTRL_TYPE_CTRL_CLASS: 1021 memset(ptr.p_s32, 0, ctrl->new_elems * sizeof(s32)); 1022 return 0; 1023 } 1024 1025 for (i = 0; !ret && i < ctrl->new_elems; i++) 1026 ret = std_validate_elem(ctrl, i, ptr); 1027 return ret; 1028 } 1029 EXPORT_SYMBOL(v4l2_ctrl_type_op_validate); 1030 1031 static const struct v4l2_ctrl_type_ops std_type_ops = { 1032 .equal = v4l2_ctrl_type_op_equal, 1033 .init = v4l2_ctrl_type_op_init, 1034 .log = v4l2_ctrl_type_op_log, 1035 .validate = v4l2_ctrl_type_op_validate, 1036 }; 1037 1038 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv) 1039 { 1040 if (!ctrl) 1041 return; 1042 if (!notify) { 1043 ctrl->call_notify = 0; 1044 return; 1045 } 1046 if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify)) 1047 return; 1048 ctrl->handler->notify = notify; 1049 ctrl->handler->notify_priv = priv; 1050 ctrl->call_notify = 1; 1051 } 1052 EXPORT_SYMBOL(v4l2_ctrl_notify); 1053 1054 /* Copy the one value to another. */ 1055 static void ptr_to_ptr(struct v4l2_ctrl *ctrl, 1056 union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to, 1057 unsigned int elems) 1058 { 1059 if (ctrl == NULL) 1060 return; 1061 memcpy(to.p, from.p_const, elems * ctrl->elem_size); 1062 } 1063 1064 /* Copy the new value to the current value. */ 1065 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags) 1066 { 1067 bool changed; 1068 1069 if (ctrl == NULL) 1070 return; 1071 1072 /* has_changed is set by cluster_changed */ 1073 changed = ctrl->has_changed; 1074 if (changed) { 1075 if (ctrl->is_dyn_array) 1076 ctrl->elems = ctrl->new_elems; 1077 ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur, ctrl->elems); 1078 } 1079 1080 if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) { 1081 /* Note: CH_FLAGS is only set for auto clusters. */ 1082 ctrl->flags &= 1083 ~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE); 1084 if (!is_cur_manual(ctrl->cluster[0])) { 1085 ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE; 1086 if (ctrl->cluster[0]->has_volatiles) 1087 ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE; 1088 } 1089 fh = NULL; 1090 } 1091 if (changed || ch_flags) { 1092 /* If a control was changed that was not one of the controls 1093 modified by the application, then send the event to all. */ 1094 if (!ctrl->is_new) 1095 fh = NULL; 1096 send_event(fh, ctrl, 1097 (changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags); 1098 if (ctrl->call_notify && changed && ctrl->handler->notify) 1099 ctrl->handler->notify(ctrl, ctrl->handler->notify_priv); 1100 } 1101 } 1102 1103 /* Copy the current value to the new value */ 1104 void cur_to_new(struct v4l2_ctrl *ctrl) 1105 { 1106 if (ctrl == NULL) 1107 return; 1108 if (ctrl->is_dyn_array) 1109 ctrl->new_elems = ctrl->elems; 1110 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems); 1111 } 1112 1113 static bool req_alloc_array(struct v4l2_ctrl_ref *ref, u32 elems) 1114 { 1115 void *tmp; 1116 1117 if (elems == ref->p_req_array_alloc_elems) 1118 return true; 1119 if (ref->ctrl->is_dyn_array && 1120 elems < ref->p_req_array_alloc_elems) 1121 return true; 1122 1123 tmp = kvmalloc(elems * ref->ctrl->elem_size, GFP_KERNEL); 1124 1125 if (!tmp) { 1126 ref->p_req_array_enomem = true; 1127 return false; 1128 } 1129 ref->p_req_array_enomem = false; 1130 kvfree(ref->p_req.p); 1131 ref->p_req.p = tmp; 1132 ref->p_req_array_alloc_elems = elems; 1133 return true; 1134 } 1135 1136 /* Copy the new value to the request value */ 1137 void new_to_req(struct v4l2_ctrl_ref *ref) 1138 { 1139 struct v4l2_ctrl *ctrl; 1140 1141 if (!ref) 1142 return; 1143 1144 ctrl = ref->ctrl; 1145 if (ctrl->is_array && !req_alloc_array(ref, ctrl->new_elems)) 1146 return; 1147 1148 ref->p_req_elems = ctrl->new_elems; 1149 ptr_to_ptr(ctrl, ctrl->p_new, ref->p_req, ref->p_req_elems); 1150 ref->p_req_valid = true; 1151 } 1152 1153 /* Copy the current value to the request value */ 1154 void cur_to_req(struct v4l2_ctrl_ref *ref) 1155 { 1156 struct v4l2_ctrl *ctrl; 1157 1158 if (!ref) 1159 return; 1160 1161 ctrl = ref->ctrl; 1162 if (ctrl->is_array && !req_alloc_array(ref, ctrl->elems)) 1163 return; 1164 1165 ref->p_req_elems = ctrl->elems; 1166 ptr_to_ptr(ctrl, ctrl->p_cur, ref->p_req, ctrl->elems); 1167 ref->p_req_valid = true; 1168 } 1169 1170 /* Copy the request value to the new value */ 1171 int req_to_new(struct v4l2_ctrl_ref *ref) 1172 { 1173 struct v4l2_ctrl *ctrl; 1174 1175 if (!ref) 1176 return 0; 1177 1178 ctrl = ref->ctrl; 1179 1180 /* 1181 * This control was never set in the request, so just use the current 1182 * value. 1183 */ 1184 if (!ref->p_req_valid) { 1185 if (ctrl->is_dyn_array) 1186 ctrl->new_elems = ctrl->elems; 1187 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems); 1188 return 0; 1189 } 1190 1191 /* Not an array, so just copy the request value */ 1192 if (!ctrl->is_array) { 1193 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems); 1194 return 0; 1195 } 1196 1197 /* Sanity check, should never happen */ 1198 if (WARN_ON(!ref->p_req_array_alloc_elems)) 1199 return -ENOMEM; 1200 1201 if (!ctrl->is_dyn_array && 1202 ref->p_req_elems != ctrl->p_array_alloc_elems) 1203 return -ENOMEM; 1204 1205 /* 1206 * Check if the number of elements in the request is more than the 1207 * elements in ctrl->p_array. If so, attempt to realloc ctrl->p_array. 1208 * Note that p_array is allocated with twice the number of elements 1209 * in the dynamic array since it has to store both the current and 1210 * new value of such a control. 1211 */ 1212 if (ref->p_req_elems > ctrl->p_array_alloc_elems) { 1213 unsigned int sz = ref->p_req_elems * ctrl->elem_size; 1214 void *old = ctrl->p_array; 1215 void *tmp = kvzalloc(2 * sz, GFP_KERNEL); 1216 1217 if (!tmp) 1218 return -ENOMEM; 1219 memcpy(tmp, ctrl->p_new.p, ctrl->elems * ctrl->elem_size); 1220 memcpy(tmp + sz, ctrl->p_cur.p, ctrl->elems * ctrl->elem_size); 1221 ctrl->p_new.p = tmp; 1222 ctrl->p_cur.p = tmp + sz; 1223 ctrl->p_array = tmp; 1224 ctrl->p_array_alloc_elems = ref->p_req_elems; 1225 kvfree(old); 1226 } 1227 1228 ctrl->new_elems = ref->p_req_elems; 1229 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems); 1230 return 0; 1231 } 1232 1233 /* Control range checking */ 1234 int check_range(enum v4l2_ctrl_type type, 1235 s64 min, s64 max, u64 step, s64 def) 1236 { 1237 switch (type) { 1238 case V4L2_CTRL_TYPE_BOOLEAN: 1239 if (step != 1 || max > 1 || min < 0) 1240 return -ERANGE; 1241 fallthrough; 1242 case V4L2_CTRL_TYPE_U8: 1243 case V4L2_CTRL_TYPE_U16: 1244 case V4L2_CTRL_TYPE_U32: 1245 case V4L2_CTRL_TYPE_INTEGER: 1246 case V4L2_CTRL_TYPE_INTEGER64: 1247 if (step == 0 || min > max || def < min || def > max) 1248 return -ERANGE; 1249 return 0; 1250 case V4L2_CTRL_TYPE_BITMASK: 1251 if (step || min || !max || (def & ~max)) 1252 return -ERANGE; 1253 return 0; 1254 case V4L2_CTRL_TYPE_MENU: 1255 case V4L2_CTRL_TYPE_INTEGER_MENU: 1256 if (min > max || def < min || def > max) 1257 return -ERANGE; 1258 /* Note: step == menu_skip_mask for menu controls. 1259 So here we check if the default value is masked out. */ 1260 if (step && ((1 << def) & step)) 1261 return -EINVAL; 1262 return 0; 1263 case V4L2_CTRL_TYPE_STRING: 1264 if (min > max || min < 0 || step < 1 || def) 1265 return -ERANGE; 1266 return 0; 1267 default: 1268 return 0; 1269 } 1270 } 1271 1272 /* Set the handler's error code if it wasn't set earlier already */ 1273 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err) 1274 { 1275 if (hdl->error == 0) 1276 hdl->error = err; 1277 return err; 1278 } 1279 1280 /* Initialize the handler */ 1281 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl, 1282 unsigned nr_of_controls_hint, 1283 struct lock_class_key *key, const char *name) 1284 { 1285 mutex_init(&hdl->_lock); 1286 hdl->lock = &hdl->_lock; 1287 lockdep_set_class_and_name(hdl->lock, key, name); 1288 INIT_LIST_HEAD(&hdl->ctrls); 1289 INIT_LIST_HEAD(&hdl->ctrl_refs); 1290 hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8; 1291 hdl->buckets = kvcalloc(hdl->nr_of_buckets, sizeof(hdl->buckets[0]), 1292 GFP_KERNEL); 1293 hdl->error = hdl->buckets ? 0 : -ENOMEM; 1294 v4l2_ctrl_handler_init_request(hdl); 1295 return hdl->error; 1296 } 1297 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class); 1298 1299 /* Free all controls and control refs */ 1300 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl) 1301 { 1302 struct v4l2_ctrl_ref *ref, *next_ref; 1303 struct v4l2_ctrl *ctrl, *next_ctrl; 1304 struct v4l2_subscribed_event *sev, *next_sev; 1305 1306 if (hdl == NULL || hdl->buckets == NULL) 1307 return; 1308 1309 v4l2_ctrl_handler_free_request(hdl); 1310 1311 mutex_lock(hdl->lock); 1312 /* Free all nodes */ 1313 list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) { 1314 list_del(&ref->node); 1315 if (ref->p_req_array_alloc_elems) 1316 kvfree(ref->p_req.p); 1317 kfree(ref); 1318 } 1319 /* Free all controls owned by the handler */ 1320 list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) { 1321 list_del(&ctrl->node); 1322 list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node) 1323 list_del(&sev->node); 1324 kvfree(ctrl->p_array); 1325 kvfree(ctrl); 1326 } 1327 kvfree(hdl->buckets); 1328 hdl->buckets = NULL; 1329 hdl->cached = NULL; 1330 hdl->error = 0; 1331 mutex_unlock(hdl->lock); 1332 mutex_destroy(&hdl->_lock); 1333 } 1334 EXPORT_SYMBOL(v4l2_ctrl_handler_free); 1335 1336 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer 1337 be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing 1338 with applications that do not use the NEXT_CTRL flag. 1339 1340 We just find the n-th private user control. It's O(N), but that should not 1341 be an issue in this particular case. */ 1342 static struct v4l2_ctrl_ref *find_private_ref( 1343 struct v4l2_ctrl_handler *hdl, u32 id) 1344 { 1345 struct v4l2_ctrl_ref *ref; 1346 1347 id -= V4L2_CID_PRIVATE_BASE; 1348 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1349 /* Search for private user controls that are compatible with 1350 VIDIOC_G/S_CTRL. */ 1351 if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER && 1352 V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) { 1353 if (!ref->ctrl->is_int) 1354 continue; 1355 if (id == 0) 1356 return ref; 1357 id--; 1358 } 1359 } 1360 return NULL; 1361 } 1362 1363 /* Find a control with the given ID. */ 1364 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id) 1365 { 1366 struct v4l2_ctrl_ref *ref; 1367 int bucket; 1368 1369 id &= V4L2_CTRL_ID_MASK; 1370 1371 /* Old-style private controls need special handling */ 1372 if (id >= V4L2_CID_PRIVATE_BASE) 1373 return find_private_ref(hdl, id); 1374 bucket = id % hdl->nr_of_buckets; 1375 1376 /* Simple optimization: cache the last control found */ 1377 if (hdl->cached && hdl->cached->ctrl->id == id) 1378 return hdl->cached; 1379 1380 /* Not in cache, search the hash */ 1381 ref = hdl->buckets ? hdl->buckets[bucket] : NULL; 1382 while (ref && ref->ctrl->id != id) 1383 ref = ref->next; 1384 1385 if (ref) 1386 hdl->cached = ref; /* cache it! */ 1387 return ref; 1388 } 1389 1390 /* Find a control with the given ID. Take the handler's lock first. */ 1391 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id) 1392 { 1393 struct v4l2_ctrl_ref *ref = NULL; 1394 1395 if (hdl) { 1396 mutex_lock(hdl->lock); 1397 ref = find_ref(hdl, id); 1398 mutex_unlock(hdl->lock); 1399 } 1400 return ref; 1401 } 1402 1403 /* Find a control with the given ID. */ 1404 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id) 1405 { 1406 struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id); 1407 1408 return ref ? ref->ctrl : NULL; 1409 } 1410 EXPORT_SYMBOL(v4l2_ctrl_find); 1411 1412 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */ 1413 int handler_new_ref(struct v4l2_ctrl_handler *hdl, 1414 struct v4l2_ctrl *ctrl, 1415 struct v4l2_ctrl_ref **ctrl_ref, 1416 bool from_other_dev, bool allocate_req) 1417 { 1418 struct v4l2_ctrl_ref *ref; 1419 struct v4l2_ctrl_ref *new_ref; 1420 u32 id = ctrl->id; 1421 u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1; 1422 int bucket = id % hdl->nr_of_buckets; /* which bucket to use */ 1423 unsigned int size_extra_req = 0; 1424 1425 if (ctrl_ref) 1426 *ctrl_ref = NULL; 1427 1428 /* 1429 * Automatically add the control class if it is not yet present and 1430 * the new control is not a compound control. 1431 */ 1432 if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES && 1433 id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL) 1434 if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0)) 1435 return hdl->error; 1436 1437 if (hdl->error) 1438 return hdl->error; 1439 1440 if (allocate_req && !ctrl->is_array) 1441 size_extra_req = ctrl->elems * ctrl->elem_size; 1442 new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL); 1443 if (!new_ref) 1444 return handler_set_err(hdl, -ENOMEM); 1445 new_ref->ctrl = ctrl; 1446 new_ref->from_other_dev = from_other_dev; 1447 if (size_extra_req) 1448 new_ref->p_req.p = &new_ref[1]; 1449 1450 INIT_LIST_HEAD(&new_ref->node); 1451 1452 mutex_lock(hdl->lock); 1453 1454 /* Add immediately at the end of the list if the list is empty, or if 1455 the last element in the list has a lower ID. 1456 This ensures that when elements are added in ascending order the 1457 insertion is an O(1) operation. */ 1458 if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) { 1459 list_add_tail(&new_ref->node, &hdl->ctrl_refs); 1460 goto insert_in_hash; 1461 } 1462 1463 /* Find insert position in sorted list */ 1464 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1465 if (ref->ctrl->id < id) 1466 continue; 1467 /* Don't add duplicates */ 1468 if (ref->ctrl->id == id) { 1469 kfree(new_ref); 1470 goto unlock; 1471 } 1472 list_add(&new_ref->node, ref->node.prev); 1473 break; 1474 } 1475 1476 insert_in_hash: 1477 /* Insert the control node in the hash */ 1478 new_ref->next = hdl->buckets[bucket]; 1479 hdl->buckets[bucket] = new_ref; 1480 if (ctrl_ref) 1481 *ctrl_ref = new_ref; 1482 if (ctrl->handler == hdl) { 1483 /* By default each control starts in a cluster of its own. 1484 * new_ref->ctrl is basically a cluster array with one 1485 * element, so that's perfect to use as the cluster pointer. 1486 * But only do this for the handler that owns the control. 1487 */ 1488 ctrl->cluster = &new_ref->ctrl; 1489 ctrl->ncontrols = 1; 1490 } 1491 1492 unlock: 1493 mutex_unlock(hdl->lock); 1494 return 0; 1495 } 1496 1497 /* Add a new control */ 1498 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl, 1499 const struct v4l2_ctrl_ops *ops, 1500 const struct v4l2_ctrl_type_ops *type_ops, 1501 u32 id, const char *name, enum v4l2_ctrl_type type, 1502 s64 min, s64 max, u64 step, s64 def, 1503 const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size, 1504 u32 flags, const char * const *qmenu, 1505 const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def, 1506 void *priv) 1507 { 1508 struct v4l2_ctrl *ctrl; 1509 unsigned sz_extra; 1510 unsigned nr_of_dims = 0; 1511 unsigned elems = 1; 1512 bool is_array; 1513 unsigned tot_ctrl_size; 1514 void *data; 1515 int err; 1516 1517 if (hdl->error) 1518 return NULL; 1519 1520 while (dims && dims[nr_of_dims]) { 1521 elems *= dims[nr_of_dims]; 1522 nr_of_dims++; 1523 if (nr_of_dims == V4L2_CTRL_MAX_DIMS) 1524 break; 1525 } 1526 is_array = nr_of_dims > 0; 1527 1528 /* Prefill elem_size for all types handled by std_type_ops */ 1529 switch ((u32)type) { 1530 case V4L2_CTRL_TYPE_INTEGER64: 1531 elem_size = sizeof(s64); 1532 break; 1533 case V4L2_CTRL_TYPE_STRING: 1534 elem_size = max + 1; 1535 break; 1536 case V4L2_CTRL_TYPE_U8: 1537 elem_size = sizeof(u8); 1538 break; 1539 case V4L2_CTRL_TYPE_U16: 1540 elem_size = sizeof(u16); 1541 break; 1542 case V4L2_CTRL_TYPE_U32: 1543 elem_size = sizeof(u32); 1544 break; 1545 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 1546 elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence); 1547 break; 1548 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 1549 elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture); 1550 break; 1551 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 1552 elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation); 1553 break; 1554 case V4L2_CTRL_TYPE_FWHT_PARAMS: 1555 elem_size = sizeof(struct v4l2_ctrl_fwht_params); 1556 break; 1557 case V4L2_CTRL_TYPE_H264_SPS: 1558 elem_size = sizeof(struct v4l2_ctrl_h264_sps); 1559 break; 1560 case V4L2_CTRL_TYPE_H264_PPS: 1561 elem_size = sizeof(struct v4l2_ctrl_h264_pps); 1562 break; 1563 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 1564 elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix); 1565 break; 1566 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 1567 elem_size = sizeof(struct v4l2_ctrl_h264_slice_params); 1568 break; 1569 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 1570 elem_size = sizeof(struct v4l2_ctrl_h264_decode_params); 1571 break; 1572 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 1573 elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights); 1574 break; 1575 case V4L2_CTRL_TYPE_VP8_FRAME: 1576 elem_size = sizeof(struct v4l2_ctrl_vp8_frame); 1577 break; 1578 case V4L2_CTRL_TYPE_HEVC_SPS: 1579 elem_size = sizeof(struct v4l2_ctrl_hevc_sps); 1580 break; 1581 case V4L2_CTRL_TYPE_HEVC_PPS: 1582 elem_size = sizeof(struct v4l2_ctrl_hevc_pps); 1583 break; 1584 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 1585 elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params); 1586 break; 1587 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 1588 elem_size = sizeof(struct v4l2_ctrl_hevc_scaling_matrix); 1589 break; 1590 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 1591 elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params); 1592 break; 1593 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 1594 elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info); 1595 break; 1596 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 1597 elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display); 1598 break; 1599 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 1600 elem_size = sizeof(struct v4l2_ctrl_vp9_compressed_hdr); 1601 break; 1602 case V4L2_CTRL_TYPE_VP9_FRAME: 1603 elem_size = sizeof(struct v4l2_ctrl_vp9_frame); 1604 break; 1605 case V4L2_CTRL_TYPE_AREA: 1606 elem_size = sizeof(struct v4l2_area); 1607 break; 1608 default: 1609 if (type < V4L2_CTRL_COMPOUND_TYPES) 1610 elem_size = sizeof(s32); 1611 break; 1612 } 1613 1614 /* Sanity checks */ 1615 if (id == 0 || name == NULL || !elem_size || 1616 id >= V4L2_CID_PRIVATE_BASE || 1617 (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) || 1618 (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) { 1619 handler_set_err(hdl, -ERANGE); 1620 return NULL; 1621 } 1622 err = check_range(type, min, max, step, def); 1623 if (err) { 1624 handler_set_err(hdl, err); 1625 return NULL; 1626 } 1627 if (is_array && 1628 (type == V4L2_CTRL_TYPE_BUTTON || 1629 type == V4L2_CTRL_TYPE_CTRL_CLASS)) { 1630 handler_set_err(hdl, -EINVAL); 1631 return NULL; 1632 } 1633 if (flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY) { 1634 /* 1635 * For now only support this for one-dimensional arrays only. 1636 * 1637 * This can be relaxed in the future, but this will 1638 * require more effort. 1639 */ 1640 if (nr_of_dims != 1) { 1641 handler_set_err(hdl, -EINVAL); 1642 return NULL; 1643 } 1644 /* Start with just 1 element */ 1645 elems = 1; 1646 } 1647 1648 tot_ctrl_size = elem_size * elems; 1649 sz_extra = 0; 1650 if (type == V4L2_CTRL_TYPE_BUTTON) 1651 flags |= V4L2_CTRL_FLAG_WRITE_ONLY | 1652 V4L2_CTRL_FLAG_EXECUTE_ON_WRITE; 1653 else if (type == V4L2_CTRL_TYPE_CTRL_CLASS) 1654 flags |= V4L2_CTRL_FLAG_READ_ONLY; 1655 else if (!is_array && 1656 (type == V4L2_CTRL_TYPE_INTEGER64 || 1657 type == V4L2_CTRL_TYPE_STRING || 1658 type >= V4L2_CTRL_COMPOUND_TYPES)) 1659 sz_extra += 2 * tot_ctrl_size; 1660 1661 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) 1662 sz_extra += elem_size; 1663 1664 ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL); 1665 if (ctrl == NULL) { 1666 handler_set_err(hdl, -ENOMEM); 1667 return NULL; 1668 } 1669 1670 INIT_LIST_HEAD(&ctrl->node); 1671 INIT_LIST_HEAD(&ctrl->ev_subs); 1672 ctrl->handler = hdl; 1673 ctrl->ops = ops; 1674 ctrl->type_ops = type_ops ? type_ops : &std_type_ops; 1675 ctrl->id = id; 1676 ctrl->name = name; 1677 ctrl->type = type; 1678 ctrl->flags = flags; 1679 ctrl->minimum = min; 1680 ctrl->maximum = max; 1681 ctrl->step = step; 1682 ctrl->default_value = def; 1683 ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING; 1684 ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string; 1685 ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64; 1686 ctrl->is_array = is_array; 1687 ctrl->is_dyn_array = !!(flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY); 1688 ctrl->elems = elems; 1689 ctrl->new_elems = elems; 1690 ctrl->nr_of_dims = nr_of_dims; 1691 if (nr_of_dims) 1692 memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0])); 1693 ctrl->elem_size = elem_size; 1694 if (type == V4L2_CTRL_TYPE_MENU) 1695 ctrl->qmenu = qmenu; 1696 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 1697 ctrl->qmenu_int = qmenu_int; 1698 ctrl->priv = priv; 1699 ctrl->cur.val = ctrl->val = def; 1700 data = &ctrl[1]; 1701 1702 if (ctrl->is_array) { 1703 ctrl->p_array_alloc_elems = elems; 1704 ctrl->p_array = kvzalloc(2 * elems * elem_size, GFP_KERNEL); 1705 if (!ctrl->p_array) { 1706 kvfree(ctrl); 1707 return NULL; 1708 } 1709 data = ctrl->p_array; 1710 } 1711 1712 if (!ctrl->is_int) { 1713 ctrl->p_new.p = data; 1714 ctrl->p_cur.p = data + tot_ctrl_size; 1715 } else { 1716 ctrl->p_new.p = &ctrl->val; 1717 ctrl->p_cur.p = &ctrl->cur.val; 1718 } 1719 1720 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) { 1721 if (ctrl->is_array) 1722 ctrl->p_def.p = &ctrl[1]; 1723 else 1724 ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size; 1725 memcpy(ctrl->p_def.p, p_def.p_const, elem_size); 1726 } 1727 1728 ctrl->type_ops->init(ctrl, 0, ctrl->p_cur); 1729 cur_to_new(ctrl); 1730 1731 if (handler_new_ref(hdl, ctrl, NULL, false, false)) { 1732 kvfree(ctrl->p_array); 1733 kvfree(ctrl); 1734 return NULL; 1735 } 1736 mutex_lock(hdl->lock); 1737 list_add_tail(&ctrl->node, &hdl->ctrls); 1738 mutex_unlock(hdl->lock); 1739 return ctrl; 1740 } 1741 1742 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl, 1743 const struct v4l2_ctrl_config *cfg, void *priv) 1744 { 1745 bool is_menu; 1746 struct v4l2_ctrl *ctrl; 1747 const char *name = cfg->name; 1748 const char * const *qmenu = cfg->qmenu; 1749 const s64 *qmenu_int = cfg->qmenu_int; 1750 enum v4l2_ctrl_type type = cfg->type; 1751 u32 flags = cfg->flags; 1752 s64 min = cfg->min; 1753 s64 max = cfg->max; 1754 u64 step = cfg->step; 1755 s64 def = cfg->def; 1756 1757 if (name == NULL) 1758 v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step, 1759 &def, &flags); 1760 1761 is_menu = (type == V4L2_CTRL_TYPE_MENU || 1762 type == V4L2_CTRL_TYPE_INTEGER_MENU); 1763 if (is_menu) 1764 WARN_ON(step); 1765 else 1766 WARN_ON(cfg->menu_skip_mask); 1767 if (type == V4L2_CTRL_TYPE_MENU && !qmenu) { 1768 qmenu = v4l2_ctrl_get_menu(cfg->id); 1769 } else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) { 1770 handler_set_err(hdl, -EINVAL); 1771 return NULL; 1772 } 1773 1774 ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name, 1775 type, min, max, 1776 is_menu ? cfg->menu_skip_mask : step, def, 1777 cfg->dims, cfg->elem_size, 1778 flags, qmenu, qmenu_int, cfg->p_def, priv); 1779 if (ctrl) 1780 ctrl->is_private = cfg->is_private; 1781 return ctrl; 1782 } 1783 EXPORT_SYMBOL(v4l2_ctrl_new_custom); 1784 1785 /* Helper function for standard non-menu controls */ 1786 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl, 1787 const struct v4l2_ctrl_ops *ops, 1788 u32 id, s64 min, s64 max, u64 step, s64 def) 1789 { 1790 const char *name; 1791 enum v4l2_ctrl_type type; 1792 u32 flags; 1793 1794 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1795 if (type == V4L2_CTRL_TYPE_MENU || 1796 type == V4L2_CTRL_TYPE_INTEGER_MENU || 1797 type >= V4L2_CTRL_COMPOUND_TYPES) { 1798 handler_set_err(hdl, -EINVAL); 1799 return NULL; 1800 } 1801 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1802 min, max, step, def, NULL, 0, 1803 flags, NULL, NULL, ptr_null, NULL); 1804 } 1805 EXPORT_SYMBOL(v4l2_ctrl_new_std); 1806 1807 /* Helper function for standard menu controls */ 1808 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl, 1809 const struct v4l2_ctrl_ops *ops, 1810 u32 id, u8 _max, u64 mask, u8 _def) 1811 { 1812 const char * const *qmenu = NULL; 1813 const s64 *qmenu_int = NULL; 1814 unsigned int qmenu_int_len = 0; 1815 const char *name; 1816 enum v4l2_ctrl_type type; 1817 s64 min; 1818 s64 max = _max; 1819 s64 def = _def; 1820 u64 step; 1821 u32 flags; 1822 1823 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1824 1825 if (type == V4L2_CTRL_TYPE_MENU) 1826 qmenu = v4l2_ctrl_get_menu(id); 1827 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 1828 qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len); 1829 1830 if ((!qmenu && !qmenu_int) || (qmenu_int && max > qmenu_int_len)) { 1831 handler_set_err(hdl, -EINVAL); 1832 return NULL; 1833 } 1834 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1835 0, max, mask, def, NULL, 0, 1836 flags, qmenu, qmenu_int, ptr_null, NULL); 1837 } 1838 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu); 1839 1840 /* Helper function for standard menu controls with driver defined menu */ 1841 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl, 1842 const struct v4l2_ctrl_ops *ops, u32 id, u8 _max, 1843 u64 mask, u8 _def, const char * const *qmenu) 1844 { 1845 enum v4l2_ctrl_type type; 1846 const char *name; 1847 u32 flags; 1848 u64 step; 1849 s64 min; 1850 s64 max = _max; 1851 s64 def = _def; 1852 1853 /* v4l2_ctrl_new_std_menu_items() should only be called for 1854 * standard controls without a standard menu. 1855 */ 1856 if (v4l2_ctrl_get_menu(id)) { 1857 handler_set_err(hdl, -EINVAL); 1858 return NULL; 1859 } 1860 1861 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1862 if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) { 1863 handler_set_err(hdl, -EINVAL); 1864 return NULL; 1865 } 1866 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1867 0, max, mask, def, NULL, 0, 1868 flags, qmenu, NULL, ptr_null, NULL); 1869 1870 } 1871 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items); 1872 1873 /* Helper function for standard compound controls */ 1874 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl, 1875 const struct v4l2_ctrl_ops *ops, u32 id, 1876 const union v4l2_ctrl_ptr p_def) 1877 { 1878 const char *name; 1879 enum v4l2_ctrl_type type; 1880 u32 flags; 1881 s64 min, max, step, def; 1882 1883 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1884 if (type < V4L2_CTRL_COMPOUND_TYPES) { 1885 handler_set_err(hdl, -EINVAL); 1886 return NULL; 1887 } 1888 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1889 min, max, step, def, NULL, 0, 1890 flags, NULL, NULL, p_def, NULL); 1891 } 1892 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound); 1893 1894 /* Helper function for standard integer menu controls */ 1895 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl, 1896 const struct v4l2_ctrl_ops *ops, 1897 u32 id, u8 _max, u8 _def, const s64 *qmenu_int) 1898 { 1899 const char *name; 1900 enum v4l2_ctrl_type type; 1901 s64 min; 1902 u64 step; 1903 s64 max = _max; 1904 s64 def = _def; 1905 u32 flags; 1906 1907 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1908 if (type != V4L2_CTRL_TYPE_INTEGER_MENU) { 1909 handler_set_err(hdl, -EINVAL); 1910 return NULL; 1911 } 1912 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1913 0, max, 0, def, NULL, 0, 1914 flags, NULL, qmenu_int, ptr_null, NULL); 1915 } 1916 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu); 1917 1918 /* Add the controls from another handler to our own. */ 1919 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl, 1920 struct v4l2_ctrl_handler *add, 1921 bool (*filter)(const struct v4l2_ctrl *ctrl), 1922 bool from_other_dev) 1923 { 1924 struct v4l2_ctrl_ref *ref; 1925 int ret = 0; 1926 1927 /* Do nothing if either handler is NULL or if they are the same */ 1928 if (!hdl || !add || hdl == add) 1929 return 0; 1930 if (hdl->error) 1931 return hdl->error; 1932 mutex_lock(add->lock); 1933 list_for_each_entry(ref, &add->ctrl_refs, node) { 1934 struct v4l2_ctrl *ctrl = ref->ctrl; 1935 1936 /* Skip handler-private controls. */ 1937 if (ctrl->is_private) 1938 continue; 1939 /* And control classes */ 1940 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 1941 continue; 1942 /* Filter any unwanted controls */ 1943 if (filter && !filter(ctrl)) 1944 continue; 1945 ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false); 1946 if (ret) 1947 break; 1948 } 1949 mutex_unlock(add->lock); 1950 return ret; 1951 } 1952 EXPORT_SYMBOL(v4l2_ctrl_add_handler); 1953 1954 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl) 1955 { 1956 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX) 1957 return true; 1958 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX) 1959 return true; 1960 switch (ctrl->id) { 1961 case V4L2_CID_AUDIO_MUTE: 1962 case V4L2_CID_AUDIO_VOLUME: 1963 case V4L2_CID_AUDIO_BALANCE: 1964 case V4L2_CID_AUDIO_BASS: 1965 case V4L2_CID_AUDIO_TREBLE: 1966 case V4L2_CID_AUDIO_LOUDNESS: 1967 return true; 1968 default: 1969 break; 1970 } 1971 return false; 1972 } 1973 EXPORT_SYMBOL(v4l2_ctrl_radio_filter); 1974 1975 /* Cluster controls */ 1976 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls) 1977 { 1978 bool has_volatiles = false; 1979 int i; 1980 1981 /* The first control is the master control and it must not be NULL */ 1982 if (WARN_ON(ncontrols == 0 || controls[0] == NULL)) 1983 return; 1984 1985 for (i = 0; i < ncontrols; i++) { 1986 if (controls[i]) { 1987 controls[i]->cluster = controls; 1988 controls[i]->ncontrols = ncontrols; 1989 if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE) 1990 has_volatiles = true; 1991 } 1992 } 1993 controls[0]->has_volatiles = has_volatiles; 1994 } 1995 EXPORT_SYMBOL(v4l2_ctrl_cluster); 1996 1997 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls, 1998 u8 manual_val, bool set_volatile) 1999 { 2000 struct v4l2_ctrl *master = controls[0]; 2001 u32 flag = 0; 2002 int i; 2003 2004 v4l2_ctrl_cluster(ncontrols, controls); 2005 WARN_ON(ncontrols <= 1); 2006 WARN_ON(manual_val < master->minimum || manual_val > master->maximum); 2007 WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl)); 2008 master->is_auto = true; 2009 master->has_volatiles = set_volatile; 2010 master->manual_mode_value = manual_val; 2011 master->flags |= V4L2_CTRL_FLAG_UPDATE; 2012 2013 if (!is_cur_manual(master)) 2014 flag = V4L2_CTRL_FLAG_INACTIVE | 2015 (set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0); 2016 2017 for (i = 1; i < ncontrols; i++) 2018 if (controls[i]) 2019 controls[i]->flags |= flag; 2020 } 2021 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster); 2022 2023 /* 2024 * Obtain the current volatile values of an autocluster and mark them 2025 * as new. 2026 */ 2027 void update_from_auto_cluster(struct v4l2_ctrl *master) 2028 { 2029 int i; 2030 2031 for (i = 1; i < master->ncontrols; i++) 2032 cur_to_new(master->cluster[i]); 2033 if (!call_op(master, g_volatile_ctrl)) 2034 for (i = 1; i < master->ncontrols; i++) 2035 if (master->cluster[i]) 2036 master->cluster[i]->is_new = 1; 2037 } 2038 2039 /* 2040 * Return non-zero if one or more of the controls in the cluster has a new 2041 * value that differs from the current value. 2042 */ 2043 static int cluster_changed(struct v4l2_ctrl *master) 2044 { 2045 bool changed = false; 2046 int i; 2047 2048 for (i = 0; i < master->ncontrols; i++) { 2049 struct v4l2_ctrl *ctrl = master->cluster[i]; 2050 bool ctrl_changed = false; 2051 2052 if (!ctrl) 2053 continue; 2054 2055 if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) { 2056 changed = true; 2057 ctrl_changed = true; 2058 } 2059 2060 /* 2061 * Set has_changed to false to avoid generating 2062 * the event V4L2_EVENT_CTRL_CH_VALUE 2063 */ 2064 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) { 2065 ctrl->has_changed = false; 2066 continue; 2067 } 2068 2069 if (ctrl->elems != ctrl->new_elems) 2070 ctrl_changed = true; 2071 if (!ctrl_changed) 2072 ctrl_changed = !ctrl->type_ops->equal(ctrl, 2073 ctrl->p_cur, ctrl->p_new); 2074 ctrl->has_changed = ctrl_changed; 2075 changed |= ctrl->has_changed; 2076 } 2077 return changed; 2078 } 2079 2080 /* 2081 * Core function that calls try/s_ctrl and ensures that the new value is 2082 * copied to the current value on a set. 2083 * Must be called with ctrl->handler->lock held. 2084 */ 2085 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master, 2086 bool set, u32 ch_flags) 2087 { 2088 bool update_flag; 2089 int ret; 2090 int i; 2091 2092 /* 2093 * Go through the cluster and either validate the new value or 2094 * (if no new value was set), copy the current value to the new 2095 * value, ensuring a consistent view for the control ops when 2096 * called. 2097 */ 2098 for (i = 0; i < master->ncontrols; i++) { 2099 struct v4l2_ctrl *ctrl = master->cluster[i]; 2100 2101 if (!ctrl) 2102 continue; 2103 2104 if (!ctrl->is_new) { 2105 cur_to_new(ctrl); 2106 continue; 2107 } 2108 /* 2109 * Check again: it may have changed since the 2110 * previous check in try_or_set_ext_ctrls(). 2111 */ 2112 if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)) 2113 return -EBUSY; 2114 } 2115 2116 ret = call_op(master, try_ctrl); 2117 2118 /* Don't set if there is no change */ 2119 if (ret || !set || !cluster_changed(master)) 2120 return ret; 2121 ret = call_op(master, s_ctrl); 2122 if (ret) 2123 return ret; 2124 2125 /* If OK, then make the new values permanent. */ 2126 update_flag = is_cur_manual(master) != is_new_manual(master); 2127 2128 for (i = 0; i < master->ncontrols; i++) { 2129 /* 2130 * If we switch from auto to manual mode, and this cluster 2131 * contains volatile controls, then all non-master controls 2132 * have to be marked as changed. The 'new' value contains 2133 * the volatile value (obtained by update_from_auto_cluster), 2134 * which now has to become the current value. 2135 */ 2136 if (i && update_flag && is_new_manual(master) && 2137 master->has_volatiles && master->cluster[i]) 2138 master->cluster[i]->has_changed = true; 2139 2140 new_to_cur(fh, master->cluster[i], ch_flags | 2141 ((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0)); 2142 } 2143 return 0; 2144 } 2145 2146 /* Activate/deactivate a control. */ 2147 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active) 2148 { 2149 /* invert since the actual flag is called 'inactive' */ 2150 bool inactive = !active; 2151 bool old; 2152 2153 if (ctrl == NULL) 2154 return; 2155 2156 if (inactive) 2157 /* set V4L2_CTRL_FLAG_INACTIVE */ 2158 old = test_and_set_bit(4, &ctrl->flags); 2159 else 2160 /* clear V4L2_CTRL_FLAG_INACTIVE */ 2161 old = test_and_clear_bit(4, &ctrl->flags); 2162 if (old != inactive) 2163 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2164 } 2165 EXPORT_SYMBOL(v4l2_ctrl_activate); 2166 2167 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed) 2168 { 2169 bool old; 2170 2171 if (ctrl == NULL) 2172 return; 2173 2174 lockdep_assert_held(ctrl->handler->lock); 2175 2176 if (grabbed) 2177 /* set V4L2_CTRL_FLAG_GRABBED */ 2178 old = test_and_set_bit(1, &ctrl->flags); 2179 else 2180 /* clear V4L2_CTRL_FLAG_GRABBED */ 2181 old = test_and_clear_bit(1, &ctrl->flags); 2182 if (old != grabbed) 2183 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2184 } 2185 EXPORT_SYMBOL(__v4l2_ctrl_grab); 2186 2187 /* Call s_ctrl for all controls owned by the handler */ 2188 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2189 { 2190 struct v4l2_ctrl *ctrl; 2191 int ret = 0; 2192 2193 if (hdl == NULL) 2194 return 0; 2195 2196 lockdep_assert_held(hdl->lock); 2197 2198 list_for_each_entry(ctrl, &hdl->ctrls, node) 2199 ctrl->done = false; 2200 2201 list_for_each_entry(ctrl, &hdl->ctrls, node) { 2202 struct v4l2_ctrl *master = ctrl->cluster[0]; 2203 int i; 2204 2205 /* Skip if this control was already handled by a cluster. */ 2206 /* Skip button controls and read-only controls. */ 2207 if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON || 2208 (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY)) 2209 continue; 2210 2211 for (i = 0; i < master->ncontrols; i++) { 2212 if (master->cluster[i]) { 2213 cur_to_new(master->cluster[i]); 2214 master->cluster[i]->is_new = 1; 2215 master->cluster[i]->done = true; 2216 } 2217 } 2218 ret = call_op(master, s_ctrl); 2219 if (ret) 2220 break; 2221 } 2222 2223 return ret; 2224 } 2225 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup); 2226 2227 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2228 { 2229 int ret; 2230 2231 if (hdl == NULL) 2232 return 0; 2233 2234 mutex_lock(hdl->lock); 2235 ret = __v4l2_ctrl_handler_setup(hdl); 2236 mutex_unlock(hdl->lock); 2237 2238 return ret; 2239 } 2240 EXPORT_SYMBOL(v4l2_ctrl_handler_setup); 2241 2242 /* Log the control name and value */ 2243 static void log_ctrl(const struct v4l2_ctrl *ctrl, 2244 const char *prefix, const char *colon) 2245 { 2246 if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY)) 2247 return; 2248 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 2249 return; 2250 2251 pr_info("%s%s%s: ", prefix, colon, ctrl->name); 2252 2253 ctrl->type_ops->log(ctrl); 2254 2255 if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE | 2256 V4L2_CTRL_FLAG_GRABBED | 2257 V4L2_CTRL_FLAG_VOLATILE)) { 2258 if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE) 2259 pr_cont(" inactive"); 2260 if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED) 2261 pr_cont(" grabbed"); 2262 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) 2263 pr_cont(" volatile"); 2264 } 2265 pr_cont("\n"); 2266 } 2267 2268 /* Log all controls owned by the handler */ 2269 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl, 2270 const char *prefix) 2271 { 2272 struct v4l2_ctrl *ctrl; 2273 const char *colon = ""; 2274 int len; 2275 2276 if (!hdl) 2277 return; 2278 if (!prefix) 2279 prefix = ""; 2280 len = strlen(prefix); 2281 if (len && prefix[len - 1] != ' ') 2282 colon = ": "; 2283 mutex_lock(hdl->lock); 2284 list_for_each_entry(ctrl, &hdl->ctrls, node) 2285 if (!(ctrl->flags & V4L2_CTRL_FLAG_DISABLED)) 2286 log_ctrl(ctrl, prefix, colon); 2287 mutex_unlock(hdl->lock); 2288 } 2289 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status); 2290 2291 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl, 2292 const struct v4l2_ctrl_ops *ctrl_ops, 2293 const struct v4l2_fwnode_device_properties *p) 2294 { 2295 if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) { 2296 u32 orientation_ctrl; 2297 2298 switch (p->orientation) { 2299 case V4L2_FWNODE_ORIENTATION_FRONT: 2300 orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT; 2301 break; 2302 case V4L2_FWNODE_ORIENTATION_BACK: 2303 orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK; 2304 break; 2305 case V4L2_FWNODE_ORIENTATION_EXTERNAL: 2306 orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL; 2307 break; 2308 default: 2309 return -EINVAL; 2310 } 2311 if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops, 2312 V4L2_CID_CAMERA_ORIENTATION, 2313 V4L2_CAMERA_ORIENTATION_EXTERNAL, 0, 2314 orientation_ctrl)) 2315 return hdl->error; 2316 } 2317 2318 if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) { 2319 if (!v4l2_ctrl_new_std(hdl, ctrl_ops, 2320 V4L2_CID_CAMERA_SENSOR_ROTATION, 2321 p->rotation, p->rotation, 1, 2322 p->rotation)) 2323 return hdl->error; 2324 } 2325 2326 return hdl->error; 2327 } 2328 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties); 2329