1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * V4L2 controls framework core implementation. 4 * 5 * Copyright (C) 2010-2021 Hans Verkuil <hverkuil-cisco@xs4all.nl> 6 */ 7 8 #include <linux/export.h> 9 #include <linux/mm.h> 10 #include <linux/slab.h> 11 #include <media/v4l2-ctrls.h> 12 #include <media/v4l2-event.h> 13 #include <media/v4l2-fwnode.h> 14 15 #include "v4l2-ctrls-priv.h" 16 17 static const union v4l2_ctrl_ptr ptr_null; 18 19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl, 20 u32 changes) 21 { 22 memset(ev, 0, sizeof(*ev)); 23 ev->type = V4L2_EVENT_CTRL; 24 ev->id = ctrl->id; 25 ev->u.ctrl.changes = changes; 26 ev->u.ctrl.type = ctrl->type; 27 ev->u.ctrl.flags = user_flags(ctrl); 28 if (ctrl->is_ptr) 29 ev->u.ctrl.value64 = 0; 30 else 31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64; 32 ev->u.ctrl.minimum = ctrl->minimum; 33 ev->u.ctrl.maximum = ctrl->maximum; 34 if (ctrl->type == V4L2_CTRL_TYPE_MENU 35 || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU) 36 ev->u.ctrl.step = 1; 37 else 38 ev->u.ctrl.step = ctrl->step; 39 ev->u.ctrl.default_value = ctrl->default_value; 40 } 41 42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl) 43 { 44 struct v4l2_event ev; 45 u32 changes = V4L2_EVENT_CTRL_CH_FLAGS; 46 47 if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY)) 48 changes |= V4L2_EVENT_CTRL_CH_VALUE; 49 fill_event(&ev, ctrl, changes); 50 v4l2_event_queue_fh(fh, &ev); 51 } 52 53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes) 54 { 55 struct v4l2_event ev; 56 struct v4l2_subscribed_event *sev; 57 58 if (list_empty(&ctrl->ev_subs)) 59 return; 60 fill_event(&ev, ctrl, changes); 61 62 list_for_each_entry(sev, &ctrl->ev_subs, node) 63 if (sev->fh != fh || 64 (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK)) 65 v4l2_event_queue_fh(sev->fh, &ev); 66 } 67 68 bool v4l2_ctrl_type_op_equal(const struct v4l2_ctrl *ctrl, u32 elems, 69 union v4l2_ctrl_ptr ptr1, union v4l2_ctrl_ptr ptr2) 70 { 71 unsigned int i; 72 73 switch (ctrl->type) { 74 case V4L2_CTRL_TYPE_BUTTON: 75 return false; 76 case V4L2_CTRL_TYPE_STRING: 77 for (i = 0; i < elems; i++) { 78 unsigned int idx = i * ctrl->elem_size; 79 80 /* strings are always 0-terminated */ 81 if (strcmp(ptr1.p_char + idx, ptr2.p_char + idx)) 82 return false; 83 } 84 return true; 85 default: 86 return !memcmp(ptr1.p_const, ptr2.p_const, 87 elems * ctrl->elem_size); 88 } 89 } 90 EXPORT_SYMBOL(v4l2_ctrl_type_op_equal); 91 92 /* Default intra MPEG-2 quantisation coefficients, from the specification. */ 93 static const u8 mpeg2_intra_quant_matrix[64] = { 94 8, 16, 16, 19, 16, 19, 22, 22, 95 22, 22, 22, 22, 26, 24, 26, 27, 96 27, 27, 26, 26, 26, 26, 27, 27, 97 27, 29, 29, 29, 34, 34, 34, 29, 98 29, 29, 27, 27, 29, 29, 32, 32, 99 34, 34, 37, 38, 37, 35, 35, 34, 100 35, 38, 38, 40, 40, 40, 48, 48, 101 46, 46, 56, 56, 58, 69, 69, 83 102 }; 103 104 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx, 105 union v4l2_ctrl_ptr ptr) 106 { 107 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 108 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 109 struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant; 110 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 111 struct v4l2_ctrl_vp9_frame *p_vp9_frame; 112 struct v4l2_ctrl_fwht_params *p_fwht_params; 113 struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix; 114 void *p = ptr.p + idx * ctrl->elem_size; 115 116 if (ctrl->p_def.p_const) 117 memcpy(p, ctrl->p_def.p_const, ctrl->elem_size); 118 else 119 memset(p, 0, ctrl->elem_size); 120 121 switch ((u32)ctrl->type) { 122 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 123 p_mpeg2_sequence = p; 124 125 /* 4:2:0 */ 126 p_mpeg2_sequence->chroma_format = 1; 127 break; 128 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 129 p_mpeg2_picture = p; 130 131 /* interlaced top field */ 132 p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD; 133 p_mpeg2_picture->picture_coding_type = 134 V4L2_MPEG2_PIC_CODING_TYPE_I; 135 break; 136 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 137 p_mpeg2_quant = p; 138 139 memcpy(p_mpeg2_quant->intra_quantiser_matrix, 140 mpeg2_intra_quant_matrix, 141 ARRAY_SIZE(mpeg2_intra_quant_matrix)); 142 /* 143 * The default non-intra MPEG-2 quantisation 144 * coefficients are all 16, as per the specification. 145 */ 146 memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16, 147 sizeof(p_mpeg2_quant->non_intra_quantiser_matrix)); 148 break; 149 case V4L2_CTRL_TYPE_VP8_FRAME: 150 p_vp8_frame = p; 151 p_vp8_frame->num_dct_parts = 1; 152 break; 153 case V4L2_CTRL_TYPE_VP9_FRAME: 154 p_vp9_frame = p; 155 p_vp9_frame->profile = 0; 156 p_vp9_frame->bit_depth = 8; 157 p_vp9_frame->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 158 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING; 159 break; 160 case V4L2_CTRL_TYPE_FWHT_PARAMS: 161 p_fwht_params = p; 162 p_fwht_params->version = V4L2_FWHT_VERSION; 163 p_fwht_params->width = 1280; 164 p_fwht_params->height = 720; 165 p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV | 166 (2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET); 167 break; 168 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 169 p_h264_scaling_matrix = p; 170 /* 171 * The default (flat) H.264 scaling matrix when none are 172 * specified in the bitstream, this is according to formulas 173 * (7-8) and (7-9) of the specification. 174 */ 175 memset(p_h264_scaling_matrix, 16, sizeof(*p_h264_scaling_matrix)); 176 break; 177 } 178 } 179 180 void v4l2_ctrl_type_op_init(const struct v4l2_ctrl *ctrl, u32 from_idx, 181 u32 tot_elems, union v4l2_ctrl_ptr ptr) 182 { 183 unsigned int i; 184 u32 elems = tot_elems - from_idx; 185 186 if (from_idx >= tot_elems) 187 return; 188 189 switch (ctrl->type) { 190 case V4L2_CTRL_TYPE_STRING: 191 for (i = from_idx; i < tot_elems; i++) { 192 unsigned int offset = i * ctrl->elem_size; 193 194 memset(ptr.p_char + offset, ' ', ctrl->minimum); 195 ptr.p_char[offset + ctrl->minimum] = '\0'; 196 } 197 break; 198 case V4L2_CTRL_TYPE_INTEGER64: 199 if (ctrl->default_value) { 200 for (i = from_idx; i < tot_elems; i++) 201 ptr.p_s64[i] = ctrl->default_value; 202 } else { 203 memset(ptr.p_s64 + from_idx, 0, elems * sizeof(s64)); 204 } 205 break; 206 case V4L2_CTRL_TYPE_INTEGER: 207 case V4L2_CTRL_TYPE_INTEGER_MENU: 208 case V4L2_CTRL_TYPE_MENU: 209 case V4L2_CTRL_TYPE_BITMASK: 210 case V4L2_CTRL_TYPE_BOOLEAN: 211 if (ctrl->default_value) { 212 for (i = from_idx; i < tot_elems; i++) 213 ptr.p_s32[i] = ctrl->default_value; 214 } else { 215 memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32)); 216 } 217 break; 218 case V4L2_CTRL_TYPE_BUTTON: 219 case V4L2_CTRL_TYPE_CTRL_CLASS: 220 memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32)); 221 break; 222 case V4L2_CTRL_TYPE_U8: 223 memset(ptr.p_u8 + from_idx, ctrl->default_value, elems); 224 break; 225 case V4L2_CTRL_TYPE_U16: 226 if (ctrl->default_value) { 227 for (i = from_idx; i < tot_elems; i++) 228 ptr.p_u16[i] = ctrl->default_value; 229 } else { 230 memset(ptr.p_u16 + from_idx, 0, elems * sizeof(u16)); 231 } 232 break; 233 case V4L2_CTRL_TYPE_U32: 234 if (ctrl->default_value) { 235 for (i = from_idx; i < tot_elems; i++) 236 ptr.p_u32[i] = ctrl->default_value; 237 } else { 238 memset(ptr.p_u32 + from_idx, 0, elems * sizeof(u32)); 239 } 240 break; 241 default: 242 for (i = from_idx; i < tot_elems; i++) 243 std_init_compound(ctrl, i, ptr); 244 break; 245 } 246 } 247 EXPORT_SYMBOL(v4l2_ctrl_type_op_init); 248 249 void v4l2_ctrl_type_op_log(const struct v4l2_ctrl *ctrl) 250 { 251 union v4l2_ctrl_ptr ptr = ctrl->p_cur; 252 253 if (ctrl->is_array) { 254 unsigned i; 255 256 for (i = 0; i < ctrl->nr_of_dims; i++) 257 pr_cont("[%u]", ctrl->dims[i]); 258 pr_cont(" "); 259 } 260 261 switch (ctrl->type) { 262 case V4L2_CTRL_TYPE_INTEGER: 263 pr_cont("%d", *ptr.p_s32); 264 break; 265 case V4L2_CTRL_TYPE_BOOLEAN: 266 pr_cont("%s", *ptr.p_s32 ? "true" : "false"); 267 break; 268 case V4L2_CTRL_TYPE_MENU: 269 pr_cont("%s", ctrl->qmenu[*ptr.p_s32]); 270 break; 271 case V4L2_CTRL_TYPE_INTEGER_MENU: 272 pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]); 273 break; 274 case V4L2_CTRL_TYPE_BITMASK: 275 pr_cont("0x%08x", *ptr.p_s32); 276 break; 277 case V4L2_CTRL_TYPE_INTEGER64: 278 pr_cont("%lld", *ptr.p_s64); 279 break; 280 case V4L2_CTRL_TYPE_STRING: 281 pr_cont("%s", ptr.p_char); 282 break; 283 case V4L2_CTRL_TYPE_U8: 284 pr_cont("%u", (unsigned)*ptr.p_u8); 285 break; 286 case V4L2_CTRL_TYPE_U16: 287 pr_cont("%u", (unsigned)*ptr.p_u16); 288 break; 289 case V4L2_CTRL_TYPE_U32: 290 pr_cont("%u", (unsigned)*ptr.p_u32); 291 break; 292 case V4L2_CTRL_TYPE_H264_SPS: 293 pr_cont("H264_SPS"); 294 break; 295 case V4L2_CTRL_TYPE_H264_PPS: 296 pr_cont("H264_PPS"); 297 break; 298 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 299 pr_cont("H264_SCALING_MATRIX"); 300 break; 301 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 302 pr_cont("H264_SLICE_PARAMS"); 303 break; 304 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 305 pr_cont("H264_DECODE_PARAMS"); 306 break; 307 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 308 pr_cont("H264_PRED_WEIGHTS"); 309 break; 310 case V4L2_CTRL_TYPE_FWHT_PARAMS: 311 pr_cont("FWHT_PARAMS"); 312 break; 313 case V4L2_CTRL_TYPE_VP8_FRAME: 314 pr_cont("VP8_FRAME"); 315 break; 316 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 317 pr_cont("HDR10_CLL_INFO"); 318 break; 319 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 320 pr_cont("HDR10_MASTERING_DISPLAY"); 321 break; 322 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 323 pr_cont("MPEG2_QUANTISATION"); 324 break; 325 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 326 pr_cont("MPEG2_SEQUENCE"); 327 break; 328 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 329 pr_cont("MPEG2_PICTURE"); 330 break; 331 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 332 pr_cont("VP9_COMPRESSED_HDR"); 333 break; 334 case V4L2_CTRL_TYPE_VP9_FRAME: 335 pr_cont("VP9_FRAME"); 336 break; 337 case V4L2_CTRL_TYPE_HEVC_SPS: 338 pr_cont("HEVC_SPS"); 339 break; 340 case V4L2_CTRL_TYPE_HEVC_PPS: 341 pr_cont("HEVC_PPS"); 342 break; 343 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 344 pr_cont("HEVC_SLICE_PARAMS"); 345 break; 346 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 347 pr_cont("HEVC_SCALING_MATRIX"); 348 break; 349 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 350 pr_cont("HEVC_DECODE_PARAMS"); 351 break; 352 default: 353 pr_cont("unknown type %d", ctrl->type); 354 break; 355 } 356 } 357 EXPORT_SYMBOL(v4l2_ctrl_type_op_log); 358 359 /* 360 * Round towards the closest legal value. Be careful when we are 361 * close to the maximum range of the control type to prevent 362 * wrap-arounds. 363 */ 364 #define ROUND_TO_RANGE(val, offset_type, ctrl) \ 365 ({ \ 366 offset_type offset; \ 367 if ((ctrl)->maximum >= 0 && \ 368 val >= (ctrl)->maximum - (s32)((ctrl)->step / 2)) \ 369 val = (ctrl)->maximum; \ 370 else \ 371 val += (s32)((ctrl)->step / 2); \ 372 val = clamp_t(typeof(val), val, \ 373 (ctrl)->minimum, (ctrl)->maximum); \ 374 offset = (val) - (ctrl)->minimum; \ 375 offset = (ctrl)->step * (offset / (u32)(ctrl)->step); \ 376 val = (ctrl)->minimum + offset; \ 377 0; \ 378 }) 379 380 /* Validate a new control */ 381 382 #define zero_padding(s) \ 383 memset(&(s).padding, 0, sizeof((s).padding)) 384 #define zero_reserved(s) \ 385 memset(&(s).reserved, 0, sizeof((s).reserved)) 386 387 static int 388 validate_vp9_lf_params(struct v4l2_vp9_loop_filter *lf) 389 { 390 unsigned int i; 391 392 if (lf->flags & ~(V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED | 393 V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE)) 394 return -EINVAL; 395 396 /* That all values are in the accepted range. */ 397 if (lf->level > GENMASK(5, 0)) 398 return -EINVAL; 399 400 if (lf->sharpness > GENMASK(2, 0)) 401 return -EINVAL; 402 403 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) 404 if (lf->ref_deltas[i] < -63 || lf->ref_deltas[i] > 63) 405 return -EINVAL; 406 407 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) 408 if (lf->mode_deltas[i] < -63 || lf->mode_deltas[i] > 63) 409 return -EINVAL; 410 411 zero_reserved(*lf); 412 return 0; 413 } 414 415 static int 416 validate_vp9_quant_params(struct v4l2_vp9_quantization *quant) 417 { 418 if (quant->delta_q_y_dc < -15 || quant->delta_q_y_dc > 15 || 419 quant->delta_q_uv_dc < -15 || quant->delta_q_uv_dc > 15 || 420 quant->delta_q_uv_ac < -15 || quant->delta_q_uv_ac > 15) 421 return -EINVAL; 422 423 zero_reserved(*quant); 424 return 0; 425 } 426 427 static int 428 validate_vp9_seg_params(struct v4l2_vp9_segmentation *seg) 429 { 430 unsigned int i, j; 431 432 if (seg->flags & ~(V4L2_VP9_SEGMENTATION_FLAG_ENABLED | 433 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP | 434 V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE | 435 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA | 436 V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE)) 437 return -EINVAL; 438 439 for (i = 0; i < ARRAY_SIZE(seg->feature_enabled); i++) { 440 if (seg->feature_enabled[i] & 441 ~V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK) 442 return -EINVAL; 443 } 444 445 for (i = 0; i < ARRAY_SIZE(seg->feature_data); i++) { 446 static const int range[] = { 255, 63, 3, 0 }; 447 448 for (j = 0; j < ARRAY_SIZE(seg->feature_data[j]); j++) { 449 if (seg->feature_data[i][j] < -range[j] || 450 seg->feature_data[i][j] > range[j]) 451 return -EINVAL; 452 } 453 } 454 455 zero_reserved(*seg); 456 return 0; 457 } 458 459 static int 460 validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr *hdr) 461 { 462 if (hdr->tx_mode > V4L2_VP9_TX_MODE_SELECT) 463 return -EINVAL; 464 465 return 0; 466 } 467 468 static int 469 validate_vp9_frame(struct v4l2_ctrl_vp9_frame *frame) 470 { 471 int ret; 472 473 /* Make sure we're not passed invalid flags. */ 474 if (frame->flags & ~(V4L2_VP9_FRAME_FLAG_KEY_FRAME | 475 V4L2_VP9_FRAME_FLAG_SHOW_FRAME | 476 V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT | 477 V4L2_VP9_FRAME_FLAG_INTRA_ONLY | 478 V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV | 479 V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX | 480 V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE | 481 V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 482 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING | 483 V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING)) 484 return -EINVAL; 485 486 if (frame->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT && 487 frame->flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX) 488 return -EINVAL; 489 490 if (frame->profile > V4L2_VP9_PROFILE_MAX) 491 return -EINVAL; 492 493 if (frame->reset_frame_context > V4L2_VP9_RESET_FRAME_CTX_ALL) 494 return -EINVAL; 495 496 if (frame->frame_context_idx >= V4L2_VP9_NUM_FRAME_CTX) 497 return -EINVAL; 498 499 /* 500 * Profiles 0 and 1 only support 8-bit depth, profiles 2 and 3 only 10 501 * and 12 bit depths. 502 */ 503 if ((frame->profile < 2 && frame->bit_depth != 8) || 504 (frame->profile >= 2 && 505 (frame->bit_depth != 10 && frame->bit_depth != 12))) 506 return -EINVAL; 507 508 /* Profile 0 and 2 only accept YUV 4:2:0. */ 509 if ((frame->profile == 0 || frame->profile == 2) && 510 (!(frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) || 511 !(frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 512 return -EINVAL; 513 514 /* Profile 1 and 3 only accept YUV 4:2:2, 4:4:0 and 4:4:4. */ 515 if ((frame->profile == 1 || frame->profile == 3) && 516 ((frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) && 517 (frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 518 return -EINVAL; 519 520 if (frame->interpolation_filter > V4L2_VP9_INTERP_FILTER_SWITCHABLE) 521 return -EINVAL; 522 523 /* 524 * According to the spec, tile_cols_log2 shall be less than or equal 525 * to 6. 526 */ 527 if (frame->tile_cols_log2 > 6) 528 return -EINVAL; 529 530 if (frame->reference_mode > V4L2_VP9_REFERENCE_MODE_SELECT) 531 return -EINVAL; 532 533 ret = validate_vp9_lf_params(&frame->lf); 534 if (ret) 535 return ret; 536 537 ret = validate_vp9_quant_params(&frame->quant); 538 if (ret) 539 return ret; 540 541 ret = validate_vp9_seg_params(&frame->seg); 542 if (ret) 543 return ret; 544 545 zero_reserved(*frame); 546 return 0; 547 } 548 549 /* 550 * Compound controls validation requires setting unused fields/flags to zero 551 * in order to properly detect unchanged controls with v4l2_ctrl_type_op_equal's 552 * memcmp. 553 */ 554 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx, 555 union v4l2_ctrl_ptr ptr) 556 { 557 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 558 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 559 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 560 struct v4l2_ctrl_fwht_params *p_fwht_params; 561 struct v4l2_ctrl_h264_sps *p_h264_sps; 562 struct v4l2_ctrl_h264_pps *p_h264_pps; 563 struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights; 564 struct v4l2_ctrl_h264_slice_params *p_h264_slice_params; 565 struct v4l2_ctrl_h264_decode_params *p_h264_dec_params; 566 struct v4l2_ctrl_hevc_sps *p_hevc_sps; 567 struct v4l2_ctrl_hevc_pps *p_hevc_pps; 568 struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering; 569 struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params; 570 struct v4l2_area *area; 571 void *p = ptr.p + idx * ctrl->elem_size; 572 unsigned int i; 573 574 switch ((u32)ctrl->type) { 575 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 576 p_mpeg2_sequence = p; 577 578 switch (p_mpeg2_sequence->chroma_format) { 579 case 1: /* 4:2:0 */ 580 case 2: /* 4:2:2 */ 581 case 3: /* 4:4:4 */ 582 break; 583 default: 584 return -EINVAL; 585 } 586 break; 587 588 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 589 p_mpeg2_picture = p; 590 591 switch (p_mpeg2_picture->intra_dc_precision) { 592 case 0: /* 8 bits */ 593 case 1: /* 9 bits */ 594 case 2: /* 10 bits */ 595 case 3: /* 11 bits */ 596 break; 597 default: 598 return -EINVAL; 599 } 600 601 switch (p_mpeg2_picture->picture_structure) { 602 case V4L2_MPEG2_PIC_TOP_FIELD: 603 case V4L2_MPEG2_PIC_BOTTOM_FIELD: 604 case V4L2_MPEG2_PIC_FRAME: 605 break; 606 default: 607 return -EINVAL; 608 } 609 610 switch (p_mpeg2_picture->picture_coding_type) { 611 case V4L2_MPEG2_PIC_CODING_TYPE_I: 612 case V4L2_MPEG2_PIC_CODING_TYPE_P: 613 case V4L2_MPEG2_PIC_CODING_TYPE_B: 614 break; 615 default: 616 return -EINVAL; 617 } 618 zero_reserved(*p_mpeg2_picture); 619 break; 620 621 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 622 break; 623 624 case V4L2_CTRL_TYPE_FWHT_PARAMS: 625 p_fwht_params = p; 626 if (p_fwht_params->version < V4L2_FWHT_VERSION) 627 return -EINVAL; 628 if (!p_fwht_params->width || !p_fwht_params->height) 629 return -EINVAL; 630 break; 631 632 case V4L2_CTRL_TYPE_H264_SPS: 633 p_h264_sps = p; 634 635 /* Some syntax elements are only conditionally valid */ 636 if (p_h264_sps->pic_order_cnt_type != 0) { 637 p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0; 638 } else if (p_h264_sps->pic_order_cnt_type != 1) { 639 p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0; 640 p_h264_sps->offset_for_non_ref_pic = 0; 641 p_h264_sps->offset_for_top_to_bottom_field = 0; 642 memset(&p_h264_sps->offset_for_ref_frame, 0, 643 sizeof(p_h264_sps->offset_for_ref_frame)); 644 } 645 646 if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) { 647 p_h264_sps->chroma_format_idc = 1; 648 p_h264_sps->bit_depth_luma_minus8 = 0; 649 p_h264_sps->bit_depth_chroma_minus8 = 0; 650 651 p_h264_sps->flags &= 652 ~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS; 653 654 if (p_h264_sps->chroma_format_idc < 3) 655 p_h264_sps->flags &= 656 ~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE; 657 } 658 659 if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY) 660 p_h264_sps->flags &= 661 ~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD; 662 663 /* 664 * Chroma 4:2:2 format require at least High 4:2:2 profile. 665 * 666 * The H264 specification and well-known parser implementations 667 * use profile-idc values directly, as that is clearer and 668 * less ambiguous. We do the same here. 669 */ 670 if (p_h264_sps->profile_idc < 122 && 671 p_h264_sps->chroma_format_idc > 1) 672 return -EINVAL; 673 /* Chroma 4:4:4 format require at least High 4:2:2 profile */ 674 if (p_h264_sps->profile_idc < 244 && 675 p_h264_sps->chroma_format_idc > 2) 676 return -EINVAL; 677 if (p_h264_sps->chroma_format_idc > 3) 678 return -EINVAL; 679 680 if (p_h264_sps->bit_depth_luma_minus8 > 6) 681 return -EINVAL; 682 if (p_h264_sps->bit_depth_chroma_minus8 > 6) 683 return -EINVAL; 684 if (p_h264_sps->log2_max_frame_num_minus4 > 12) 685 return -EINVAL; 686 if (p_h264_sps->pic_order_cnt_type > 2) 687 return -EINVAL; 688 if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12) 689 return -EINVAL; 690 if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN) 691 return -EINVAL; 692 break; 693 694 case V4L2_CTRL_TYPE_H264_PPS: 695 p_h264_pps = p; 696 697 if (p_h264_pps->num_slice_groups_minus1 > 7) 698 return -EINVAL; 699 if (p_h264_pps->num_ref_idx_l0_default_active_minus1 > 700 (V4L2_H264_REF_LIST_LEN - 1)) 701 return -EINVAL; 702 if (p_h264_pps->num_ref_idx_l1_default_active_minus1 > 703 (V4L2_H264_REF_LIST_LEN - 1)) 704 return -EINVAL; 705 if (p_h264_pps->weighted_bipred_idc > 2) 706 return -EINVAL; 707 /* 708 * pic_init_qp_minus26 shall be in the range of 709 * -(26 + QpBdOffset_y) to +25, inclusive, 710 * where QpBdOffset_y is 6 * bit_depth_luma_minus8 711 */ 712 if (p_h264_pps->pic_init_qp_minus26 < -62 || 713 p_h264_pps->pic_init_qp_minus26 > 25) 714 return -EINVAL; 715 if (p_h264_pps->pic_init_qs_minus26 < -26 || 716 p_h264_pps->pic_init_qs_minus26 > 25) 717 return -EINVAL; 718 if (p_h264_pps->chroma_qp_index_offset < -12 || 719 p_h264_pps->chroma_qp_index_offset > 12) 720 return -EINVAL; 721 if (p_h264_pps->second_chroma_qp_index_offset < -12 || 722 p_h264_pps->second_chroma_qp_index_offset > 12) 723 return -EINVAL; 724 break; 725 726 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 727 break; 728 729 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 730 p_h264_pred_weights = p; 731 732 if (p_h264_pred_weights->luma_log2_weight_denom > 7) 733 return -EINVAL; 734 if (p_h264_pred_weights->chroma_log2_weight_denom > 7) 735 return -EINVAL; 736 break; 737 738 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 739 p_h264_slice_params = p; 740 741 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 742 p_h264_slice_params->flags &= 743 ~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED; 744 745 if (p_h264_slice_params->colour_plane_id > 2) 746 return -EINVAL; 747 if (p_h264_slice_params->cabac_init_idc > 2) 748 return -EINVAL; 749 if (p_h264_slice_params->disable_deblocking_filter_idc > 2) 750 return -EINVAL; 751 if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 || 752 p_h264_slice_params->slice_alpha_c0_offset_div2 > 6) 753 return -EINVAL; 754 if (p_h264_slice_params->slice_beta_offset_div2 < -6 || 755 p_h264_slice_params->slice_beta_offset_div2 > 6) 756 return -EINVAL; 757 758 if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I || 759 p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI) 760 p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0; 761 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 762 p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0; 763 764 if (p_h264_slice_params->num_ref_idx_l0_active_minus1 > 765 (V4L2_H264_REF_LIST_LEN - 1)) 766 return -EINVAL; 767 if (p_h264_slice_params->num_ref_idx_l1_active_minus1 > 768 (V4L2_H264_REF_LIST_LEN - 1)) 769 return -EINVAL; 770 zero_reserved(*p_h264_slice_params); 771 break; 772 773 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 774 p_h264_dec_params = p; 775 776 if (p_h264_dec_params->nal_ref_idc > 3) 777 return -EINVAL; 778 for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) { 779 struct v4l2_h264_dpb_entry *dpb_entry = 780 &p_h264_dec_params->dpb[i]; 781 782 zero_reserved(*dpb_entry); 783 } 784 zero_reserved(*p_h264_dec_params); 785 break; 786 787 case V4L2_CTRL_TYPE_VP8_FRAME: 788 p_vp8_frame = p; 789 790 switch (p_vp8_frame->num_dct_parts) { 791 case 1: 792 case 2: 793 case 4: 794 case 8: 795 break; 796 default: 797 return -EINVAL; 798 } 799 zero_padding(p_vp8_frame->segment); 800 zero_padding(p_vp8_frame->lf); 801 zero_padding(p_vp8_frame->quant); 802 zero_padding(p_vp8_frame->entropy); 803 zero_padding(p_vp8_frame->coder_state); 804 break; 805 806 case V4L2_CTRL_TYPE_HEVC_SPS: 807 p_hevc_sps = p; 808 809 if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) { 810 p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0; 811 p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0; 812 p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0; 813 p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0; 814 } 815 816 if (!(p_hevc_sps->flags & 817 V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT)) 818 p_hevc_sps->num_long_term_ref_pics_sps = 0; 819 break; 820 821 case V4L2_CTRL_TYPE_HEVC_PPS: 822 p_hevc_pps = p; 823 824 if (!(p_hevc_pps->flags & 825 V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED)) 826 p_hevc_pps->diff_cu_qp_delta_depth = 0; 827 828 if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) { 829 p_hevc_pps->num_tile_columns_minus1 = 0; 830 p_hevc_pps->num_tile_rows_minus1 = 0; 831 memset(&p_hevc_pps->column_width_minus1, 0, 832 sizeof(p_hevc_pps->column_width_minus1)); 833 memset(&p_hevc_pps->row_height_minus1, 0, 834 sizeof(p_hevc_pps->row_height_minus1)); 835 836 p_hevc_pps->flags &= 837 ~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED; 838 } 839 840 if (p_hevc_pps->flags & 841 V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) { 842 p_hevc_pps->pps_beta_offset_div2 = 0; 843 p_hevc_pps->pps_tc_offset_div2 = 0; 844 } 845 break; 846 847 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 848 p_hevc_decode_params = p; 849 850 if (p_hevc_decode_params->num_active_dpb_entries > 851 V4L2_HEVC_DPB_ENTRIES_NUM_MAX) 852 return -EINVAL; 853 break; 854 855 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 856 break; 857 858 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 859 break; 860 861 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 862 p_hdr10_mastering = p; 863 864 for (i = 0; i < 3; ++i) { 865 if (p_hdr10_mastering->display_primaries_x[i] < 866 V4L2_HDR10_MASTERING_PRIMARIES_X_LOW || 867 p_hdr10_mastering->display_primaries_x[i] > 868 V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH || 869 p_hdr10_mastering->display_primaries_y[i] < 870 V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW || 871 p_hdr10_mastering->display_primaries_y[i] > 872 V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH) 873 return -EINVAL; 874 } 875 876 if (p_hdr10_mastering->white_point_x < 877 V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW || 878 p_hdr10_mastering->white_point_x > 879 V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH || 880 p_hdr10_mastering->white_point_y < 881 V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW || 882 p_hdr10_mastering->white_point_y > 883 V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH) 884 return -EINVAL; 885 886 if (p_hdr10_mastering->max_display_mastering_luminance < 887 V4L2_HDR10_MASTERING_MAX_LUMA_LOW || 888 p_hdr10_mastering->max_display_mastering_luminance > 889 V4L2_HDR10_MASTERING_MAX_LUMA_HIGH || 890 p_hdr10_mastering->min_display_mastering_luminance < 891 V4L2_HDR10_MASTERING_MIN_LUMA_LOW || 892 p_hdr10_mastering->min_display_mastering_luminance > 893 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 894 return -EINVAL; 895 896 /* The following restriction comes from ITU-T Rec. H.265 spec */ 897 if (p_hdr10_mastering->max_display_mastering_luminance == 898 V4L2_HDR10_MASTERING_MAX_LUMA_LOW && 899 p_hdr10_mastering->min_display_mastering_luminance == 900 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 901 return -EINVAL; 902 903 break; 904 905 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 906 break; 907 908 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 909 return validate_vp9_compressed_hdr(p); 910 911 case V4L2_CTRL_TYPE_VP9_FRAME: 912 return validate_vp9_frame(p); 913 914 case V4L2_CTRL_TYPE_AREA: 915 area = p; 916 if (!area->width || !area->height) 917 return -EINVAL; 918 break; 919 920 default: 921 return -EINVAL; 922 } 923 924 return 0; 925 } 926 927 static int std_validate_elem(const struct v4l2_ctrl *ctrl, u32 idx, 928 union v4l2_ctrl_ptr ptr) 929 { 930 size_t len; 931 u64 offset; 932 s64 val; 933 934 switch ((u32)ctrl->type) { 935 case V4L2_CTRL_TYPE_INTEGER: 936 return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl); 937 case V4L2_CTRL_TYPE_INTEGER64: 938 /* 939 * We can't use the ROUND_TO_RANGE define here due to 940 * the u64 divide that needs special care. 941 */ 942 val = ptr.p_s64[idx]; 943 if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2)) 944 val = ctrl->maximum; 945 else 946 val += (s64)(ctrl->step / 2); 947 val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum); 948 offset = val - ctrl->minimum; 949 do_div(offset, ctrl->step); 950 ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step; 951 return 0; 952 case V4L2_CTRL_TYPE_U8: 953 return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl); 954 case V4L2_CTRL_TYPE_U16: 955 return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl); 956 case V4L2_CTRL_TYPE_U32: 957 return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl); 958 959 case V4L2_CTRL_TYPE_BOOLEAN: 960 ptr.p_s32[idx] = !!ptr.p_s32[idx]; 961 return 0; 962 963 case V4L2_CTRL_TYPE_MENU: 964 case V4L2_CTRL_TYPE_INTEGER_MENU: 965 if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum) 966 return -ERANGE; 967 if (ptr.p_s32[idx] < BITS_PER_LONG_LONG && 968 (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx]))) 969 return -EINVAL; 970 if (ctrl->type == V4L2_CTRL_TYPE_MENU && 971 ctrl->qmenu[ptr.p_s32[idx]][0] == '\0') 972 return -EINVAL; 973 return 0; 974 975 case V4L2_CTRL_TYPE_BITMASK: 976 ptr.p_s32[idx] &= ctrl->maximum; 977 return 0; 978 979 case V4L2_CTRL_TYPE_BUTTON: 980 case V4L2_CTRL_TYPE_CTRL_CLASS: 981 ptr.p_s32[idx] = 0; 982 return 0; 983 984 case V4L2_CTRL_TYPE_STRING: 985 idx *= ctrl->elem_size; 986 len = strlen(ptr.p_char + idx); 987 if (len < ctrl->minimum) 988 return -ERANGE; 989 if ((len - (u32)ctrl->minimum) % (u32)ctrl->step) 990 return -ERANGE; 991 return 0; 992 993 default: 994 return std_validate_compound(ctrl, idx, ptr); 995 } 996 } 997 998 int v4l2_ctrl_type_op_validate(const struct v4l2_ctrl *ctrl, u32 elems, 999 union v4l2_ctrl_ptr ptr) 1000 { 1001 unsigned int i; 1002 int ret = 0; 1003 1004 switch ((u32)ctrl->type) { 1005 case V4L2_CTRL_TYPE_U8: 1006 if (ctrl->maximum == 0xff && ctrl->minimum == 0 && ctrl->step == 1) 1007 return 0; 1008 break; 1009 case V4L2_CTRL_TYPE_U16: 1010 if (ctrl->maximum == 0xffff && ctrl->minimum == 0 && ctrl->step == 1) 1011 return 0; 1012 break; 1013 case V4L2_CTRL_TYPE_U32: 1014 if (ctrl->maximum == 0xffffffff && ctrl->minimum == 0 && ctrl->step == 1) 1015 return 0; 1016 break; 1017 1018 case V4L2_CTRL_TYPE_BUTTON: 1019 case V4L2_CTRL_TYPE_CTRL_CLASS: 1020 memset(ptr.p_s32, 0, elems * sizeof(s32)); 1021 return 0; 1022 } 1023 1024 for (i = 0; !ret && i < elems; i++) 1025 ret = std_validate_elem(ctrl, i, ptr); 1026 return ret; 1027 } 1028 EXPORT_SYMBOL(v4l2_ctrl_type_op_validate); 1029 1030 static const struct v4l2_ctrl_type_ops std_type_ops = { 1031 .equal = v4l2_ctrl_type_op_equal, 1032 .init = v4l2_ctrl_type_op_init, 1033 .log = v4l2_ctrl_type_op_log, 1034 .validate = v4l2_ctrl_type_op_validate, 1035 }; 1036 1037 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv) 1038 { 1039 if (!ctrl) 1040 return; 1041 if (!notify) { 1042 ctrl->call_notify = 0; 1043 return; 1044 } 1045 if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify)) 1046 return; 1047 ctrl->handler->notify = notify; 1048 ctrl->handler->notify_priv = priv; 1049 ctrl->call_notify = 1; 1050 } 1051 EXPORT_SYMBOL(v4l2_ctrl_notify); 1052 1053 /* Copy the one value to another. */ 1054 static void ptr_to_ptr(struct v4l2_ctrl *ctrl, 1055 union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to, 1056 unsigned int elems) 1057 { 1058 if (ctrl == NULL) 1059 return; 1060 memcpy(to.p, from.p_const, elems * ctrl->elem_size); 1061 } 1062 1063 /* Copy the new value to the current value. */ 1064 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags) 1065 { 1066 bool changed; 1067 1068 if (ctrl == NULL) 1069 return; 1070 1071 /* has_changed is set by cluster_changed */ 1072 changed = ctrl->has_changed; 1073 if (changed) { 1074 if (ctrl->is_dyn_array) 1075 ctrl->elems = ctrl->new_elems; 1076 ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur, ctrl->elems); 1077 } 1078 1079 if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) { 1080 /* Note: CH_FLAGS is only set for auto clusters. */ 1081 ctrl->flags &= 1082 ~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE); 1083 if (!is_cur_manual(ctrl->cluster[0])) { 1084 ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE; 1085 if (ctrl->cluster[0]->has_volatiles) 1086 ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE; 1087 } 1088 fh = NULL; 1089 } 1090 if (changed || ch_flags) { 1091 /* If a control was changed that was not one of the controls 1092 modified by the application, then send the event to all. */ 1093 if (!ctrl->is_new) 1094 fh = NULL; 1095 send_event(fh, ctrl, 1096 (changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags); 1097 if (ctrl->call_notify && changed && ctrl->handler->notify) 1098 ctrl->handler->notify(ctrl, ctrl->handler->notify_priv); 1099 } 1100 } 1101 1102 /* Copy the current value to the new value */ 1103 void cur_to_new(struct v4l2_ctrl *ctrl) 1104 { 1105 if (ctrl == NULL) 1106 return; 1107 if (ctrl->is_dyn_array) 1108 ctrl->new_elems = ctrl->elems; 1109 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems); 1110 } 1111 1112 static bool req_alloc_array(struct v4l2_ctrl_ref *ref, u32 elems) 1113 { 1114 void *tmp; 1115 1116 if (elems == ref->p_req_array_alloc_elems) 1117 return true; 1118 if (ref->ctrl->is_dyn_array && 1119 elems < ref->p_req_array_alloc_elems) 1120 return true; 1121 1122 tmp = kvmalloc(elems * ref->ctrl->elem_size, GFP_KERNEL); 1123 1124 if (!tmp) { 1125 ref->p_req_array_enomem = true; 1126 return false; 1127 } 1128 ref->p_req_array_enomem = false; 1129 kvfree(ref->p_req.p); 1130 ref->p_req.p = tmp; 1131 ref->p_req_array_alloc_elems = elems; 1132 return true; 1133 } 1134 1135 /* Copy the new value to the request value */ 1136 void new_to_req(struct v4l2_ctrl_ref *ref) 1137 { 1138 struct v4l2_ctrl *ctrl; 1139 1140 if (!ref) 1141 return; 1142 1143 ctrl = ref->ctrl; 1144 if (ctrl->is_array && !req_alloc_array(ref, ctrl->new_elems)) 1145 return; 1146 1147 ref->p_req_elems = ctrl->new_elems; 1148 ptr_to_ptr(ctrl, ctrl->p_new, ref->p_req, ref->p_req_elems); 1149 ref->p_req_valid = true; 1150 } 1151 1152 /* Copy the current value to the request value */ 1153 void cur_to_req(struct v4l2_ctrl_ref *ref) 1154 { 1155 struct v4l2_ctrl *ctrl; 1156 1157 if (!ref) 1158 return; 1159 1160 ctrl = ref->ctrl; 1161 if (ctrl->is_array && !req_alloc_array(ref, ctrl->elems)) 1162 return; 1163 1164 ref->p_req_elems = ctrl->elems; 1165 ptr_to_ptr(ctrl, ctrl->p_cur, ref->p_req, ctrl->elems); 1166 ref->p_req_valid = true; 1167 } 1168 1169 /* Copy the request value to the new value */ 1170 int req_to_new(struct v4l2_ctrl_ref *ref) 1171 { 1172 struct v4l2_ctrl *ctrl; 1173 1174 if (!ref) 1175 return 0; 1176 1177 ctrl = ref->ctrl; 1178 1179 /* 1180 * This control was never set in the request, so just use the current 1181 * value. 1182 */ 1183 if (!ref->p_req_valid) { 1184 if (ctrl->is_dyn_array) 1185 ctrl->new_elems = ctrl->elems; 1186 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems); 1187 return 0; 1188 } 1189 1190 /* Not an array, so just copy the request value */ 1191 if (!ctrl->is_array) { 1192 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems); 1193 return 0; 1194 } 1195 1196 /* Sanity check, should never happen */ 1197 if (WARN_ON(!ref->p_req_array_alloc_elems)) 1198 return -ENOMEM; 1199 1200 if (!ctrl->is_dyn_array && 1201 ref->p_req_elems != ctrl->p_array_alloc_elems) 1202 return -ENOMEM; 1203 1204 /* 1205 * Check if the number of elements in the request is more than the 1206 * elements in ctrl->p_array. If so, attempt to realloc ctrl->p_array. 1207 * Note that p_array is allocated with twice the number of elements 1208 * in the dynamic array since it has to store both the current and 1209 * new value of such a control. 1210 */ 1211 if (ref->p_req_elems > ctrl->p_array_alloc_elems) { 1212 unsigned int sz = ref->p_req_elems * ctrl->elem_size; 1213 void *old = ctrl->p_array; 1214 void *tmp = kvzalloc(2 * sz, GFP_KERNEL); 1215 1216 if (!tmp) 1217 return -ENOMEM; 1218 memcpy(tmp, ctrl->p_new.p, ctrl->elems * ctrl->elem_size); 1219 memcpy(tmp + sz, ctrl->p_cur.p, ctrl->elems * ctrl->elem_size); 1220 ctrl->p_new.p = tmp; 1221 ctrl->p_cur.p = tmp + sz; 1222 ctrl->p_array = tmp; 1223 ctrl->p_array_alloc_elems = ref->p_req_elems; 1224 kvfree(old); 1225 } 1226 1227 ctrl->new_elems = ref->p_req_elems; 1228 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems); 1229 return 0; 1230 } 1231 1232 /* Control range checking */ 1233 int check_range(enum v4l2_ctrl_type type, 1234 s64 min, s64 max, u64 step, s64 def) 1235 { 1236 switch (type) { 1237 case V4L2_CTRL_TYPE_BOOLEAN: 1238 if (step != 1 || max > 1 || min < 0) 1239 return -ERANGE; 1240 fallthrough; 1241 case V4L2_CTRL_TYPE_U8: 1242 case V4L2_CTRL_TYPE_U16: 1243 case V4L2_CTRL_TYPE_U32: 1244 case V4L2_CTRL_TYPE_INTEGER: 1245 case V4L2_CTRL_TYPE_INTEGER64: 1246 if (step == 0 || min > max || def < min || def > max) 1247 return -ERANGE; 1248 return 0; 1249 case V4L2_CTRL_TYPE_BITMASK: 1250 if (step || min || !max || (def & ~max)) 1251 return -ERANGE; 1252 return 0; 1253 case V4L2_CTRL_TYPE_MENU: 1254 case V4L2_CTRL_TYPE_INTEGER_MENU: 1255 if (min > max || def < min || def > max) 1256 return -ERANGE; 1257 /* Note: step == menu_skip_mask for menu controls. 1258 So here we check if the default value is masked out. */ 1259 if (step && ((1 << def) & step)) 1260 return -EINVAL; 1261 return 0; 1262 case V4L2_CTRL_TYPE_STRING: 1263 if (min > max || min < 0 || step < 1 || def) 1264 return -ERANGE; 1265 return 0; 1266 default: 1267 return 0; 1268 } 1269 } 1270 1271 /* Set the handler's error code if it wasn't set earlier already */ 1272 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err) 1273 { 1274 if (hdl->error == 0) 1275 hdl->error = err; 1276 return err; 1277 } 1278 1279 /* Initialize the handler */ 1280 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl, 1281 unsigned nr_of_controls_hint, 1282 struct lock_class_key *key, const char *name) 1283 { 1284 mutex_init(&hdl->_lock); 1285 hdl->lock = &hdl->_lock; 1286 lockdep_set_class_and_name(hdl->lock, key, name); 1287 INIT_LIST_HEAD(&hdl->ctrls); 1288 INIT_LIST_HEAD(&hdl->ctrl_refs); 1289 hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8; 1290 hdl->buckets = kvcalloc(hdl->nr_of_buckets, sizeof(hdl->buckets[0]), 1291 GFP_KERNEL); 1292 hdl->error = hdl->buckets ? 0 : -ENOMEM; 1293 v4l2_ctrl_handler_init_request(hdl); 1294 return hdl->error; 1295 } 1296 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class); 1297 1298 /* Free all controls and control refs */ 1299 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl) 1300 { 1301 struct v4l2_ctrl_ref *ref, *next_ref; 1302 struct v4l2_ctrl *ctrl, *next_ctrl; 1303 struct v4l2_subscribed_event *sev, *next_sev; 1304 1305 if (hdl == NULL || hdl->buckets == NULL) 1306 return; 1307 1308 v4l2_ctrl_handler_free_request(hdl); 1309 1310 mutex_lock(hdl->lock); 1311 /* Free all nodes */ 1312 list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) { 1313 list_del(&ref->node); 1314 if (ref->p_req_array_alloc_elems) 1315 kvfree(ref->p_req.p); 1316 kfree(ref); 1317 } 1318 /* Free all controls owned by the handler */ 1319 list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) { 1320 list_del(&ctrl->node); 1321 list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node) 1322 list_del(&sev->node); 1323 kvfree(ctrl->p_array); 1324 kvfree(ctrl); 1325 } 1326 kvfree(hdl->buckets); 1327 hdl->buckets = NULL; 1328 hdl->cached = NULL; 1329 hdl->error = 0; 1330 mutex_unlock(hdl->lock); 1331 mutex_destroy(&hdl->_lock); 1332 } 1333 EXPORT_SYMBOL(v4l2_ctrl_handler_free); 1334 1335 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer 1336 be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing 1337 with applications that do not use the NEXT_CTRL flag. 1338 1339 We just find the n-th private user control. It's O(N), but that should not 1340 be an issue in this particular case. */ 1341 static struct v4l2_ctrl_ref *find_private_ref( 1342 struct v4l2_ctrl_handler *hdl, u32 id) 1343 { 1344 struct v4l2_ctrl_ref *ref; 1345 1346 id -= V4L2_CID_PRIVATE_BASE; 1347 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1348 /* Search for private user controls that are compatible with 1349 VIDIOC_G/S_CTRL. */ 1350 if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER && 1351 V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) { 1352 if (!ref->ctrl->is_int) 1353 continue; 1354 if (id == 0) 1355 return ref; 1356 id--; 1357 } 1358 } 1359 return NULL; 1360 } 1361 1362 /* Find a control with the given ID. */ 1363 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id) 1364 { 1365 struct v4l2_ctrl_ref *ref; 1366 int bucket; 1367 1368 id &= V4L2_CTRL_ID_MASK; 1369 1370 /* Old-style private controls need special handling */ 1371 if (id >= V4L2_CID_PRIVATE_BASE) 1372 return find_private_ref(hdl, id); 1373 bucket = id % hdl->nr_of_buckets; 1374 1375 /* Simple optimization: cache the last control found */ 1376 if (hdl->cached && hdl->cached->ctrl->id == id) 1377 return hdl->cached; 1378 1379 /* Not in cache, search the hash */ 1380 ref = hdl->buckets ? hdl->buckets[bucket] : NULL; 1381 while (ref && ref->ctrl->id != id) 1382 ref = ref->next; 1383 1384 if (ref) 1385 hdl->cached = ref; /* cache it! */ 1386 return ref; 1387 } 1388 1389 /* Find a control with the given ID. Take the handler's lock first. */ 1390 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id) 1391 { 1392 struct v4l2_ctrl_ref *ref = NULL; 1393 1394 if (hdl) { 1395 mutex_lock(hdl->lock); 1396 ref = find_ref(hdl, id); 1397 mutex_unlock(hdl->lock); 1398 } 1399 return ref; 1400 } 1401 1402 /* Find a control with the given ID. */ 1403 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id) 1404 { 1405 struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id); 1406 1407 return ref ? ref->ctrl : NULL; 1408 } 1409 EXPORT_SYMBOL(v4l2_ctrl_find); 1410 1411 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */ 1412 int handler_new_ref(struct v4l2_ctrl_handler *hdl, 1413 struct v4l2_ctrl *ctrl, 1414 struct v4l2_ctrl_ref **ctrl_ref, 1415 bool from_other_dev, bool allocate_req) 1416 { 1417 struct v4l2_ctrl_ref *ref; 1418 struct v4l2_ctrl_ref *new_ref; 1419 u32 id = ctrl->id; 1420 u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1; 1421 int bucket = id % hdl->nr_of_buckets; /* which bucket to use */ 1422 unsigned int size_extra_req = 0; 1423 1424 if (ctrl_ref) 1425 *ctrl_ref = NULL; 1426 1427 /* 1428 * Automatically add the control class if it is not yet present and 1429 * the new control is not a compound control. 1430 */ 1431 if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES && 1432 id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL) 1433 if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0)) 1434 return hdl->error; 1435 1436 if (hdl->error) 1437 return hdl->error; 1438 1439 if (allocate_req && !ctrl->is_array) 1440 size_extra_req = ctrl->elems * ctrl->elem_size; 1441 new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL); 1442 if (!new_ref) 1443 return handler_set_err(hdl, -ENOMEM); 1444 new_ref->ctrl = ctrl; 1445 new_ref->from_other_dev = from_other_dev; 1446 if (size_extra_req) 1447 new_ref->p_req.p = &new_ref[1]; 1448 1449 INIT_LIST_HEAD(&new_ref->node); 1450 1451 mutex_lock(hdl->lock); 1452 1453 /* Add immediately at the end of the list if the list is empty, or if 1454 the last element in the list has a lower ID. 1455 This ensures that when elements are added in ascending order the 1456 insertion is an O(1) operation. */ 1457 if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) { 1458 list_add_tail(&new_ref->node, &hdl->ctrl_refs); 1459 goto insert_in_hash; 1460 } 1461 1462 /* Find insert position in sorted list */ 1463 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1464 if (ref->ctrl->id < id) 1465 continue; 1466 /* Don't add duplicates */ 1467 if (ref->ctrl->id == id) { 1468 kfree(new_ref); 1469 goto unlock; 1470 } 1471 list_add(&new_ref->node, ref->node.prev); 1472 break; 1473 } 1474 1475 insert_in_hash: 1476 /* Insert the control node in the hash */ 1477 new_ref->next = hdl->buckets[bucket]; 1478 hdl->buckets[bucket] = new_ref; 1479 if (ctrl_ref) 1480 *ctrl_ref = new_ref; 1481 if (ctrl->handler == hdl) { 1482 /* By default each control starts in a cluster of its own. 1483 * new_ref->ctrl is basically a cluster array with one 1484 * element, so that's perfect to use as the cluster pointer. 1485 * But only do this for the handler that owns the control. 1486 */ 1487 ctrl->cluster = &new_ref->ctrl; 1488 ctrl->ncontrols = 1; 1489 } 1490 1491 unlock: 1492 mutex_unlock(hdl->lock); 1493 return 0; 1494 } 1495 1496 /* Add a new control */ 1497 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl, 1498 const struct v4l2_ctrl_ops *ops, 1499 const struct v4l2_ctrl_type_ops *type_ops, 1500 u32 id, const char *name, enum v4l2_ctrl_type type, 1501 s64 min, s64 max, u64 step, s64 def, 1502 const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size, 1503 u32 flags, const char * const *qmenu, 1504 const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def, 1505 void *priv) 1506 { 1507 struct v4l2_ctrl *ctrl; 1508 unsigned sz_extra; 1509 unsigned nr_of_dims = 0; 1510 unsigned elems = 1; 1511 bool is_array; 1512 unsigned tot_ctrl_size; 1513 void *data; 1514 int err; 1515 1516 if (hdl->error) 1517 return NULL; 1518 1519 while (dims && dims[nr_of_dims]) { 1520 elems *= dims[nr_of_dims]; 1521 nr_of_dims++; 1522 if (nr_of_dims == V4L2_CTRL_MAX_DIMS) 1523 break; 1524 } 1525 is_array = nr_of_dims > 0; 1526 1527 /* Prefill elem_size for all types handled by std_type_ops */ 1528 switch ((u32)type) { 1529 case V4L2_CTRL_TYPE_INTEGER64: 1530 elem_size = sizeof(s64); 1531 break; 1532 case V4L2_CTRL_TYPE_STRING: 1533 elem_size = max + 1; 1534 break; 1535 case V4L2_CTRL_TYPE_U8: 1536 elem_size = sizeof(u8); 1537 break; 1538 case V4L2_CTRL_TYPE_U16: 1539 elem_size = sizeof(u16); 1540 break; 1541 case V4L2_CTRL_TYPE_U32: 1542 elem_size = sizeof(u32); 1543 break; 1544 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 1545 elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence); 1546 break; 1547 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 1548 elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture); 1549 break; 1550 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 1551 elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation); 1552 break; 1553 case V4L2_CTRL_TYPE_FWHT_PARAMS: 1554 elem_size = sizeof(struct v4l2_ctrl_fwht_params); 1555 break; 1556 case V4L2_CTRL_TYPE_H264_SPS: 1557 elem_size = sizeof(struct v4l2_ctrl_h264_sps); 1558 break; 1559 case V4L2_CTRL_TYPE_H264_PPS: 1560 elem_size = sizeof(struct v4l2_ctrl_h264_pps); 1561 break; 1562 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 1563 elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix); 1564 break; 1565 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 1566 elem_size = sizeof(struct v4l2_ctrl_h264_slice_params); 1567 break; 1568 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 1569 elem_size = sizeof(struct v4l2_ctrl_h264_decode_params); 1570 break; 1571 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 1572 elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights); 1573 break; 1574 case V4L2_CTRL_TYPE_VP8_FRAME: 1575 elem_size = sizeof(struct v4l2_ctrl_vp8_frame); 1576 break; 1577 case V4L2_CTRL_TYPE_HEVC_SPS: 1578 elem_size = sizeof(struct v4l2_ctrl_hevc_sps); 1579 break; 1580 case V4L2_CTRL_TYPE_HEVC_PPS: 1581 elem_size = sizeof(struct v4l2_ctrl_hevc_pps); 1582 break; 1583 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 1584 elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params); 1585 break; 1586 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 1587 elem_size = sizeof(struct v4l2_ctrl_hevc_scaling_matrix); 1588 break; 1589 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 1590 elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params); 1591 break; 1592 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 1593 elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info); 1594 break; 1595 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 1596 elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display); 1597 break; 1598 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 1599 elem_size = sizeof(struct v4l2_ctrl_vp9_compressed_hdr); 1600 break; 1601 case V4L2_CTRL_TYPE_VP9_FRAME: 1602 elem_size = sizeof(struct v4l2_ctrl_vp9_frame); 1603 break; 1604 case V4L2_CTRL_TYPE_AREA: 1605 elem_size = sizeof(struct v4l2_area); 1606 break; 1607 default: 1608 if (type < V4L2_CTRL_COMPOUND_TYPES) 1609 elem_size = sizeof(s32); 1610 break; 1611 } 1612 1613 /* Sanity checks */ 1614 if (id == 0 || name == NULL || !elem_size || 1615 id >= V4L2_CID_PRIVATE_BASE || 1616 (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) || 1617 (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) { 1618 handler_set_err(hdl, -ERANGE); 1619 return NULL; 1620 } 1621 err = check_range(type, min, max, step, def); 1622 if (err) { 1623 handler_set_err(hdl, err); 1624 return NULL; 1625 } 1626 if (is_array && 1627 (type == V4L2_CTRL_TYPE_BUTTON || 1628 type == V4L2_CTRL_TYPE_CTRL_CLASS)) { 1629 handler_set_err(hdl, -EINVAL); 1630 return NULL; 1631 } 1632 if (flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY) { 1633 /* 1634 * For now only support this for one-dimensional arrays only. 1635 * 1636 * This can be relaxed in the future, but this will 1637 * require more effort. 1638 */ 1639 if (nr_of_dims != 1) { 1640 handler_set_err(hdl, -EINVAL); 1641 return NULL; 1642 } 1643 /* Start with just 1 element */ 1644 elems = 1; 1645 } 1646 1647 tot_ctrl_size = elem_size * elems; 1648 sz_extra = 0; 1649 if (type == V4L2_CTRL_TYPE_BUTTON) 1650 flags |= V4L2_CTRL_FLAG_WRITE_ONLY | 1651 V4L2_CTRL_FLAG_EXECUTE_ON_WRITE; 1652 else if (type == V4L2_CTRL_TYPE_CTRL_CLASS) 1653 flags |= V4L2_CTRL_FLAG_READ_ONLY; 1654 else if (!is_array && 1655 (type == V4L2_CTRL_TYPE_INTEGER64 || 1656 type == V4L2_CTRL_TYPE_STRING || 1657 type >= V4L2_CTRL_COMPOUND_TYPES)) 1658 sz_extra += 2 * tot_ctrl_size; 1659 1660 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) 1661 sz_extra += elem_size; 1662 1663 ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL); 1664 if (ctrl == NULL) { 1665 handler_set_err(hdl, -ENOMEM); 1666 return NULL; 1667 } 1668 1669 INIT_LIST_HEAD(&ctrl->node); 1670 INIT_LIST_HEAD(&ctrl->ev_subs); 1671 ctrl->handler = hdl; 1672 ctrl->ops = ops; 1673 ctrl->type_ops = type_ops ? type_ops : &std_type_ops; 1674 ctrl->id = id; 1675 ctrl->name = name; 1676 ctrl->type = type; 1677 ctrl->flags = flags; 1678 ctrl->minimum = min; 1679 ctrl->maximum = max; 1680 ctrl->step = step; 1681 ctrl->default_value = def; 1682 ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING; 1683 ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string; 1684 ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64; 1685 ctrl->is_array = is_array; 1686 ctrl->is_dyn_array = !!(flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY); 1687 ctrl->elems = elems; 1688 ctrl->new_elems = elems; 1689 ctrl->nr_of_dims = nr_of_dims; 1690 if (nr_of_dims) 1691 memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0])); 1692 ctrl->elem_size = elem_size; 1693 if (type == V4L2_CTRL_TYPE_MENU) 1694 ctrl->qmenu = qmenu; 1695 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 1696 ctrl->qmenu_int = qmenu_int; 1697 ctrl->priv = priv; 1698 ctrl->cur.val = ctrl->val = def; 1699 data = &ctrl[1]; 1700 1701 if (ctrl->is_array) { 1702 ctrl->p_array_alloc_elems = elems; 1703 ctrl->p_array = kvzalloc(2 * elems * elem_size, GFP_KERNEL); 1704 if (!ctrl->p_array) { 1705 kvfree(ctrl); 1706 return NULL; 1707 } 1708 data = ctrl->p_array; 1709 } 1710 1711 if (!ctrl->is_int) { 1712 ctrl->p_new.p = data; 1713 ctrl->p_cur.p = data + tot_ctrl_size; 1714 } else { 1715 ctrl->p_new.p = &ctrl->val; 1716 ctrl->p_cur.p = &ctrl->cur.val; 1717 } 1718 1719 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) { 1720 if (ctrl->is_array) 1721 ctrl->p_def.p = &ctrl[1]; 1722 else 1723 ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size; 1724 memcpy(ctrl->p_def.p, p_def.p_const, elem_size); 1725 } 1726 1727 ctrl->type_ops->init(ctrl, 0, elems, ctrl->p_cur); 1728 cur_to_new(ctrl); 1729 1730 if (handler_new_ref(hdl, ctrl, NULL, false, false)) { 1731 kvfree(ctrl->p_array); 1732 kvfree(ctrl); 1733 return NULL; 1734 } 1735 mutex_lock(hdl->lock); 1736 list_add_tail(&ctrl->node, &hdl->ctrls); 1737 mutex_unlock(hdl->lock); 1738 return ctrl; 1739 } 1740 1741 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl, 1742 const struct v4l2_ctrl_config *cfg, void *priv) 1743 { 1744 bool is_menu; 1745 struct v4l2_ctrl *ctrl; 1746 const char *name = cfg->name; 1747 const char * const *qmenu = cfg->qmenu; 1748 const s64 *qmenu_int = cfg->qmenu_int; 1749 enum v4l2_ctrl_type type = cfg->type; 1750 u32 flags = cfg->flags; 1751 s64 min = cfg->min; 1752 s64 max = cfg->max; 1753 u64 step = cfg->step; 1754 s64 def = cfg->def; 1755 1756 if (name == NULL) 1757 v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step, 1758 &def, &flags); 1759 1760 is_menu = (type == V4L2_CTRL_TYPE_MENU || 1761 type == V4L2_CTRL_TYPE_INTEGER_MENU); 1762 if (is_menu) 1763 WARN_ON(step); 1764 else 1765 WARN_ON(cfg->menu_skip_mask); 1766 if (type == V4L2_CTRL_TYPE_MENU && !qmenu) { 1767 qmenu = v4l2_ctrl_get_menu(cfg->id); 1768 } else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) { 1769 handler_set_err(hdl, -EINVAL); 1770 return NULL; 1771 } 1772 1773 ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name, 1774 type, min, max, 1775 is_menu ? cfg->menu_skip_mask : step, def, 1776 cfg->dims, cfg->elem_size, 1777 flags, qmenu, qmenu_int, cfg->p_def, priv); 1778 if (ctrl) 1779 ctrl->is_private = cfg->is_private; 1780 return ctrl; 1781 } 1782 EXPORT_SYMBOL(v4l2_ctrl_new_custom); 1783 1784 /* Helper function for standard non-menu controls */ 1785 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl, 1786 const struct v4l2_ctrl_ops *ops, 1787 u32 id, s64 min, s64 max, u64 step, s64 def) 1788 { 1789 const char *name; 1790 enum v4l2_ctrl_type type; 1791 u32 flags; 1792 1793 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1794 if (type == V4L2_CTRL_TYPE_MENU || 1795 type == V4L2_CTRL_TYPE_INTEGER_MENU || 1796 type >= V4L2_CTRL_COMPOUND_TYPES) { 1797 handler_set_err(hdl, -EINVAL); 1798 return NULL; 1799 } 1800 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1801 min, max, step, def, NULL, 0, 1802 flags, NULL, NULL, ptr_null, NULL); 1803 } 1804 EXPORT_SYMBOL(v4l2_ctrl_new_std); 1805 1806 /* Helper function for standard menu controls */ 1807 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl, 1808 const struct v4l2_ctrl_ops *ops, 1809 u32 id, u8 _max, u64 mask, u8 _def) 1810 { 1811 const char * const *qmenu = NULL; 1812 const s64 *qmenu_int = NULL; 1813 unsigned int qmenu_int_len = 0; 1814 const char *name; 1815 enum v4l2_ctrl_type type; 1816 s64 min; 1817 s64 max = _max; 1818 s64 def = _def; 1819 u64 step; 1820 u32 flags; 1821 1822 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1823 1824 if (type == V4L2_CTRL_TYPE_MENU) 1825 qmenu = v4l2_ctrl_get_menu(id); 1826 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 1827 qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len); 1828 1829 if ((!qmenu && !qmenu_int) || (qmenu_int && max > qmenu_int_len)) { 1830 handler_set_err(hdl, -EINVAL); 1831 return NULL; 1832 } 1833 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1834 0, max, mask, def, NULL, 0, 1835 flags, qmenu, qmenu_int, ptr_null, NULL); 1836 } 1837 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu); 1838 1839 /* Helper function for standard menu controls with driver defined menu */ 1840 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl, 1841 const struct v4l2_ctrl_ops *ops, u32 id, u8 _max, 1842 u64 mask, u8 _def, const char * const *qmenu) 1843 { 1844 enum v4l2_ctrl_type type; 1845 const char *name; 1846 u32 flags; 1847 u64 step; 1848 s64 min; 1849 s64 max = _max; 1850 s64 def = _def; 1851 1852 /* v4l2_ctrl_new_std_menu_items() should only be called for 1853 * standard controls without a standard menu. 1854 */ 1855 if (v4l2_ctrl_get_menu(id)) { 1856 handler_set_err(hdl, -EINVAL); 1857 return NULL; 1858 } 1859 1860 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1861 if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) { 1862 handler_set_err(hdl, -EINVAL); 1863 return NULL; 1864 } 1865 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1866 0, max, mask, def, NULL, 0, 1867 flags, qmenu, NULL, ptr_null, NULL); 1868 1869 } 1870 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items); 1871 1872 /* Helper function for standard compound controls */ 1873 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl, 1874 const struct v4l2_ctrl_ops *ops, u32 id, 1875 const union v4l2_ctrl_ptr p_def) 1876 { 1877 const char *name; 1878 enum v4l2_ctrl_type type; 1879 u32 flags; 1880 s64 min, max, step, def; 1881 1882 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1883 if (type < V4L2_CTRL_COMPOUND_TYPES) { 1884 handler_set_err(hdl, -EINVAL); 1885 return NULL; 1886 } 1887 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1888 min, max, step, def, NULL, 0, 1889 flags, NULL, NULL, p_def, NULL); 1890 } 1891 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound); 1892 1893 /* Helper function for standard integer menu controls */ 1894 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl, 1895 const struct v4l2_ctrl_ops *ops, 1896 u32 id, u8 _max, u8 _def, const s64 *qmenu_int) 1897 { 1898 const char *name; 1899 enum v4l2_ctrl_type type; 1900 s64 min; 1901 u64 step; 1902 s64 max = _max; 1903 s64 def = _def; 1904 u32 flags; 1905 1906 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1907 if (type != V4L2_CTRL_TYPE_INTEGER_MENU) { 1908 handler_set_err(hdl, -EINVAL); 1909 return NULL; 1910 } 1911 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1912 0, max, 0, def, NULL, 0, 1913 flags, NULL, qmenu_int, ptr_null, NULL); 1914 } 1915 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu); 1916 1917 /* Add the controls from another handler to our own. */ 1918 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl, 1919 struct v4l2_ctrl_handler *add, 1920 bool (*filter)(const struct v4l2_ctrl *ctrl), 1921 bool from_other_dev) 1922 { 1923 struct v4l2_ctrl_ref *ref; 1924 int ret = 0; 1925 1926 /* Do nothing if either handler is NULL or if they are the same */ 1927 if (!hdl || !add || hdl == add) 1928 return 0; 1929 if (hdl->error) 1930 return hdl->error; 1931 mutex_lock(add->lock); 1932 list_for_each_entry(ref, &add->ctrl_refs, node) { 1933 struct v4l2_ctrl *ctrl = ref->ctrl; 1934 1935 /* Skip handler-private controls. */ 1936 if (ctrl->is_private) 1937 continue; 1938 /* And control classes */ 1939 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 1940 continue; 1941 /* Filter any unwanted controls */ 1942 if (filter && !filter(ctrl)) 1943 continue; 1944 ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false); 1945 if (ret) 1946 break; 1947 } 1948 mutex_unlock(add->lock); 1949 return ret; 1950 } 1951 EXPORT_SYMBOL(v4l2_ctrl_add_handler); 1952 1953 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl) 1954 { 1955 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX) 1956 return true; 1957 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX) 1958 return true; 1959 switch (ctrl->id) { 1960 case V4L2_CID_AUDIO_MUTE: 1961 case V4L2_CID_AUDIO_VOLUME: 1962 case V4L2_CID_AUDIO_BALANCE: 1963 case V4L2_CID_AUDIO_BASS: 1964 case V4L2_CID_AUDIO_TREBLE: 1965 case V4L2_CID_AUDIO_LOUDNESS: 1966 return true; 1967 default: 1968 break; 1969 } 1970 return false; 1971 } 1972 EXPORT_SYMBOL(v4l2_ctrl_radio_filter); 1973 1974 /* Cluster controls */ 1975 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls) 1976 { 1977 bool has_volatiles = false; 1978 int i; 1979 1980 /* The first control is the master control and it must not be NULL */ 1981 if (WARN_ON(ncontrols == 0 || controls[0] == NULL)) 1982 return; 1983 1984 for (i = 0; i < ncontrols; i++) { 1985 if (controls[i]) { 1986 controls[i]->cluster = controls; 1987 controls[i]->ncontrols = ncontrols; 1988 if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE) 1989 has_volatiles = true; 1990 } 1991 } 1992 controls[0]->has_volatiles = has_volatiles; 1993 } 1994 EXPORT_SYMBOL(v4l2_ctrl_cluster); 1995 1996 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls, 1997 u8 manual_val, bool set_volatile) 1998 { 1999 struct v4l2_ctrl *master = controls[0]; 2000 u32 flag = 0; 2001 int i; 2002 2003 v4l2_ctrl_cluster(ncontrols, controls); 2004 WARN_ON(ncontrols <= 1); 2005 WARN_ON(manual_val < master->minimum || manual_val > master->maximum); 2006 WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl)); 2007 master->is_auto = true; 2008 master->has_volatiles = set_volatile; 2009 master->manual_mode_value = manual_val; 2010 master->flags |= V4L2_CTRL_FLAG_UPDATE; 2011 2012 if (!is_cur_manual(master)) 2013 flag = V4L2_CTRL_FLAG_INACTIVE | 2014 (set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0); 2015 2016 for (i = 1; i < ncontrols; i++) 2017 if (controls[i]) 2018 controls[i]->flags |= flag; 2019 } 2020 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster); 2021 2022 /* 2023 * Obtain the current volatile values of an autocluster and mark them 2024 * as new. 2025 */ 2026 void update_from_auto_cluster(struct v4l2_ctrl *master) 2027 { 2028 int i; 2029 2030 for (i = 1; i < master->ncontrols; i++) 2031 cur_to_new(master->cluster[i]); 2032 if (!call_op(master, g_volatile_ctrl)) 2033 for (i = 1; i < master->ncontrols; i++) 2034 if (master->cluster[i]) 2035 master->cluster[i]->is_new = 1; 2036 } 2037 2038 /* 2039 * Return non-zero if one or more of the controls in the cluster has a new 2040 * value that differs from the current value. 2041 */ 2042 static int cluster_changed(struct v4l2_ctrl *master) 2043 { 2044 bool changed = false; 2045 int i; 2046 2047 for (i = 0; i < master->ncontrols; i++) { 2048 struct v4l2_ctrl *ctrl = master->cluster[i]; 2049 bool ctrl_changed = false; 2050 2051 if (!ctrl) 2052 continue; 2053 2054 if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) { 2055 changed = true; 2056 ctrl_changed = true; 2057 } 2058 2059 /* 2060 * Set has_changed to false to avoid generating 2061 * the event V4L2_EVENT_CTRL_CH_VALUE 2062 */ 2063 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) { 2064 ctrl->has_changed = false; 2065 continue; 2066 } 2067 2068 if (ctrl->elems != ctrl->new_elems) 2069 ctrl_changed = true; 2070 if (!ctrl_changed) 2071 ctrl_changed = !ctrl->type_ops->equal(ctrl, 2072 ctrl->elems, ctrl->p_cur, ctrl->p_new); 2073 ctrl->has_changed = ctrl_changed; 2074 changed |= ctrl->has_changed; 2075 } 2076 return changed; 2077 } 2078 2079 /* 2080 * Core function that calls try/s_ctrl and ensures that the new value is 2081 * copied to the current value on a set. 2082 * Must be called with ctrl->handler->lock held. 2083 */ 2084 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master, 2085 bool set, u32 ch_flags) 2086 { 2087 bool update_flag; 2088 int ret; 2089 int i; 2090 2091 /* 2092 * Go through the cluster and either validate the new value or 2093 * (if no new value was set), copy the current value to the new 2094 * value, ensuring a consistent view for the control ops when 2095 * called. 2096 */ 2097 for (i = 0; i < master->ncontrols; i++) { 2098 struct v4l2_ctrl *ctrl = master->cluster[i]; 2099 2100 if (!ctrl) 2101 continue; 2102 2103 if (!ctrl->is_new) { 2104 cur_to_new(ctrl); 2105 continue; 2106 } 2107 /* 2108 * Check again: it may have changed since the 2109 * previous check in try_or_set_ext_ctrls(). 2110 */ 2111 if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)) 2112 return -EBUSY; 2113 } 2114 2115 ret = call_op(master, try_ctrl); 2116 2117 /* Don't set if there is no change */ 2118 if (ret || !set || !cluster_changed(master)) 2119 return ret; 2120 ret = call_op(master, s_ctrl); 2121 if (ret) 2122 return ret; 2123 2124 /* If OK, then make the new values permanent. */ 2125 update_flag = is_cur_manual(master) != is_new_manual(master); 2126 2127 for (i = 0; i < master->ncontrols; i++) { 2128 /* 2129 * If we switch from auto to manual mode, and this cluster 2130 * contains volatile controls, then all non-master controls 2131 * have to be marked as changed. The 'new' value contains 2132 * the volatile value (obtained by update_from_auto_cluster), 2133 * which now has to become the current value. 2134 */ 2135 if (i && update_flag && is_new_manual(master) && 2136 master->has_volatiles && master->cluster[i]) 2137 master->cluster[i]->has_changed = true; 2138 2139 new_to_cur(fh, master->cluster[i], ch_flags | 2140 ((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0)); 2141 } 2142 return 0; 2143 } 2144 2145 /* Activate/deactivate a control. */ 2146 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active) 2147 { 2148 /* invert since the actual flag is called 'inactive' */ 2149 bool inactive = !active; 2150 bool old; 2151 2152 if (ctrl == NULL) 2153 return; 2154 2155 if (inactive) 2156 /* set V4L2_CTRL_FLAG_INACTIVE */ 2157 old = test_and_set_bit(4, &ctrl->flags); 2158 else 2159 /* clear V4L2_CTRL_FLAG_INACTIVE */ 2160 old = test_and_clear_bit(4, &ctrl->flags); 2161 if (old != inactive) 2162 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2163 } 2164 EXPORT_SYMBOL(v4l2_ctrl_activate); 2165 2166 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed) 2167 { 2168 bool old; 2169 2170 if (ctrl == NULL) 2171 return; 2172 2173 lockdep_assert_held(ctrl->handler->lock); 2174 2175 if (grabbed) 2176 /* set V4L2_CTRL_FLAG_GRABBED */ 2177 old = test_and_set_bit(1, &ctrl->flags); 2178 else 2179 /* clear V4L2_CTRL_FLAG_GRABBED */ 2180 old = test_and_clear_bit(1, &ctrl->flags); 2181 if (old != grabbed) 2182 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2183 } 2184 EXPORT_SYMBOL(__v4l2_ctrl_grab); 2185 2186 /* Call s_ctrl for all controls owned by the handler */ 2187 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2188 { 2189 struct v4l2_ctrl *ctrl; 2190 int ret = 0; 2191 2192 if (hdl == NULL) 2193 return 0; 2194 2195 lockdep_assert_held(hdl->lock); 2196 2197 list_for_each_entry(ctrl, &hdl->ctrls, node) 2198 ctrl->done = false; 2199 2200 list_for_each_entry(ctrl, &hdl->ctrls, node) { 2201 struct v4l2_ctrl *master = ctrl->cluster[0]; 2202 int i; 2203 2204 /* Skip if this control was already handled by a cluster. */ 2205 /* Skip button controls and read-only controls. */ 2206 if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON || 2207 (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY)) 2208 continue; 2209 2210 for (i = 0; i < master->ncontrols; i++) { 2211 if (master->cluster[i]) { 2212 cur_to_new(master->cluster[i]); 2213 master->cluster[i]->is_new = 1; 2214 master->cluster[i]->done = true; 2215 } 2216 } 2217 ret = call_op(master, s_ctrl); 2218 if (ret) 2219 break; 2220 } 2221 2222 return ret; 2223 } 2224 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup); 2225 2226 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2227 { 2228 int ret; 2229 2230 if (hdl == NULL) 2231 return 0; 2232 2233 mutex_lock(hdl->lock); 2234 ret = __v4l2_ctrl_handler_setup(hdl); 2235 mutex_unlock(hdl->lock); 2236 2237 return ret; 2238 } 2239 EXPORT_SYMBOL(v4l2_ctrl_handler_setup); 2240 2241 /* Log the control name and value */ 2242 static void log_ctrl(const struct v4l2_ctrl *ctrl, 2243 const char *prefix, const char *colon) 2244 { 2245 if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY)) 2246 return; 2247 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 2248 return; 2249 2250 pr_info("%s%s%s: ", prefix, colon, ctrl->name); 2251 2252 ctrl->type_ops->log(ctrl); 2253 2254 if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE | 2255 V4L2_CTRL_FLAG_GRABBED | 2256 V4L2_CTRL_FLAG_VOLATILE)) { 2257 if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE) 2258 pr_cont(" inactive"); 2259 if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED) 2260 pr_cont(" grabbed"); 2261 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) 2262 pr_cont(" volatile"); 2263 } 2264 pr_cont("\n"); 2265 } 2266 2267 /* Log all controls owned by the handler */ 2268 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl, 2269 const char *prefix) 2270 { 2271 struct v4l2_ctrl *ctrl; 2272 const char *colon = ""; 2273 int len; 2274 2275 if (!hdl) 2276 return; 2277 if (!prefix) 2278 prefix = ""; 2279 len = strlen(prefix); 2280 if (len && prefix[len - 1] != ' ') 2281 colon = ": "; 2282 mutex_lock(hdl->lock); 2283 list_for_each_entry(ctrl, &hdl->ctrls, node) 2284 if (!(ctrl->flags & V4L2_CTRL_FLAG_DISABLED)) 2285 log_ctrl(ctrl, prefix, colon); 2286 mutex_unlock(hdl->lock); 2287 } 2288 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status); 2289 2290 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl, 2291 const struct v4l2_ctrl_ops *ctrl_ops, 2292 const struct v4l2_fwnode_device_properties *p) 2293 { 2294 if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) { 2295 u32 orientation_ctrl; 2296 2297 switch (p->orientation) { 2298 case V4L2_FWNODE_ORIENTATION_FRONT: 2299 orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT; 2300 break; 2301 case V4L2_FWNODE_ORIENTATION_BACK: 2302 orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK; 2303 break; 2304 case V4L2_FWNODE_ORIENTATION_EXTERNAL: 2305 orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL; 2306 break; 2307 default: 2308 return -EINVAL; 2309 } 2310 if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops, 2311 V4L2_CID_CAMERA_ORIENTATION, 2312 V4L2_CAMERA_ORIENTATION_EXTERNAL, 0, 2313 orientation_ctrl)) 2314 return hdl->error; 2315 } 2316 2317 if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) { 2318 if (!v4l2_ctrl_new_std(hdl, ctrl_ops, 2319 V4L2_CID_CAMERA_SENSOR_ROTATION, 2320 p->rotation, p->rotation, 1, 2321 p->rotation)) 2322 return hdl->error; 2323 } 2324 2325 return hdl->error; 2326 } 2327 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties); 2328