1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * V4L2 controls framework core implementation. 4 * 5 * Copyright (C) 2010-2021 Hans Verkuil <hverkuil-cisco@xs4all.nl> 6 */ 7 8 #include <linux/export.h> 9 #include <linux/mm.h> 10 #include <linux/slab.h> 11 #include <media/v4l2-ctrls.h> 12 #include <media/v4l2-event.h> 13 #include <media/v4l2-fwnode.h> 14 15 #include "v4l2-ctrls-priv.h" 16 17 static const union v4l2_ctrl_ptr ptr_null; 18 19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl, 20 u32 changes) 21 { 22 memset(ev, 0, sizeof(*ev)); 23 ev->type = V4L2_EVENT_CTRL; 24 ev->id = ctrl->id; 25 ev->u.ctrl.changes = changes; 26 ev->u.ctrl.type = ctrl->type; 27 ev->u.ctrl.flags = user_flags(ctrl); 28 if (ctrl->is_ptr) 29 ev->u.ctrl.value64 = 0; 30 else 31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64; 32 ev->u.ctrl.minimum = ctrl->minimum; 33 ev->u.ctrl.maximum = ctrl->maximum; 34 if (ctrl->type == V4L2_CTRL_TYPE_MENU 35 || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU) 36 ev->u.ctrl.step = 1; 37 else 38 ev->u.ctrl.step = ctrl->step; 39 ev->u.ctrl.default_value = ctrl->default_value; 40 } 41 42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl) 43 { 44 struct v4l2_event ev; 45 u32 changes = V4L2_EVENT_CTRL_CH_FLAGS; 46 47 if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY)) 48 changes |= V4L2_EVENT_CTRL_CH_VALUE; 49 fill_event(&ev, ctrl, changes); 50 v4l2_event_queue_fh(fh, &ev); 51 } 52 53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes) 54 { 55 struct v4l2_event ev; 56 struct v4l2_subscribed_event *sev; 57 58 if (list_empty(&ctrl->ev_subs)) 59 return; 60 fill_event(&ev, ctrl, changes); 61 62 list_for_each_entry(sev, &ctrl->ev_subs, node) 63 if (sev->fh != fh || 64 (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK)) 65 v4l2_event_queue_fh(sev->fh, &ev); 66 } 67 68 static bool std_equal(const struct v4l2_ctrl *ctrl, u32 idx, 69 union v4l2_ctrl_ptr ptr1, 70 union v4l2_ctrl_ptr ptr2) 71 { 72 switch (ctrl->type) { 73 case V4L2_CTRL_TYPE_BUTTON: 74 return false; 75 case V4L2_CTRL_TYPE_STRING: 76 idx *= ctrl->elem_size; 77 /* strings are always 0-terminated */ 78 return !strcmp(ptr1.p_char + idx, ptr2.p_char + idx); 79 case V4L2_CTRL_TYPE_INTEGER64: 80 return ptr1.p_s64[idx] == ptr2.p_s64[idx]; 81 case V4L2_CTRL_TYPE_U8: 82 return ptr1.p_u8[idx] == ptr2.p_u8[idx]; 83 case V4L2_CTRL_TYPE_U16: 84 return ptr1.p_u16[idx] == ptr2.p_u16[idx]; 85 case V4L2_CTRL_TYPE_U32: 86 return ptr1.p_u32[idx] == ptr2.p_u32[idx]; 87 default: 88 if (ctrl->is_int) 89 return ptr1.p_s32[idx] == ptr2.p_s32[idx]; 90 idx *= ctrl->elem_size; 91 return !memcmp(ptr1.p_const + idx, ptr2.p_const + idx, 92 ctrl->elem_size); 93 } 94 } 95 96 /* Default intra MPEG-2 quantisation coefficients, from the specification. */ 97 static const u8 mpeg2_intra_quant_matrix[64] = { 98 8, 16, 16, 19, 16, 19, 22, 22, 99 22, 22, 22, 22, 26, 24, 26, 27, 100 27, 27, 26, 26, 26, 26, 27, 27, 101 27, 29, 29, 29, 34, 34, 34, 29, 102 29, 29, 27, 27, 29, 29, 32, 32, 103 34, 34, 37, 38, 37, 35, 35, 34, 104 35, 38, 38, 40, 40, 40, 48, 48, 105 46, 46, 56, 56, 58, 69, 69, 83 106 }; 107 108 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx, 109 union v4l2_ctrl_ptr ptr) 110 { 111 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 112 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 113 struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant; 114 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 115 struct v4l2_ctrl_vp9_frame *p_vp9_frame; 116 struct v4l2_ctrl_fwht_params *p_fwht_params; 117 struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix; 118 void *p = ptr.p + idx * ctrl->elem_size; 119 120 if (ctrl->p_def.p_const) 121 memcpy(p, ctrl->p_def.p_const, ctrl->elem_size); 122 else 123 memset(p, 0, ctrl->elem_size); 124 125 switch ((u32)ctrl->type) { 126 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 127 p_mpeg2_sequence = p; 128 129 /* 4:2:0 */ 130 p_mpeg2_sequence->chroma_format = 1; 131 break; 132 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 133 p_mpeg2_picture = p; 134 135 /* interlaced top field */ 136 p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD; 137 p_mpeg2_picture->picture_coding_type = 138 V4L2_MPEG2_PIC_CODING_TYPE_I; 139 break; 140 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 141 p_mpeg2_quant = p; 142 143 memcpy(p_mpeg2_quant->intra_quantiser_matrix, 144 mpeg2_intra_quant_matrix, 145 ARRAY_SIZE(mpeg2_intra_quant_matrix)); 146 /* 147 * The default non-intra MPEG-2 quantisation 148 * coefficients are all 16, as per the specification. 149 */ 150 memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16, 151 sizeof(p_mpeg2_quant->non_intra_quantiser_matrix)); 152 break; 153 case V4L2_CTRL_TYPE_VP8_FRAME: 154 p_vp8_frame = p; 155 p_vp8_frame->num_dct_parts = 1; 156 break; 157 case V4L2_CTRL_TYPE_VP9_FRAME: 158 p_vp9_frame = p; 159 p_vp9_frame->profile = 0; 160 p_vp9_frame->bit_depth = 8; 161 p_vp9_frame->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 162 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING; 163 break; 164 case V4L2_CTRL_TYPE_FWHT_PARAMS: 165 p_fwht_params = p; 166 p_fwht_params->version = V4L2_FWHT_VERSION; 167 p_fwht_params->width = 1280; 168 p_fwht_params->height = 720; 169 p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV | 170 (2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET); 171 break; 172 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 173 p_h264_scaling_matrix = p; 174 /* 175 * The default (flat) H.264 scaling matrix when none are 176 * specified in the bitstream, this is according to formulas 177 * (7-8) and (7-9) of the specification. 178 */ 179 memset(p_h264_scaling_matrix, 16, sizeof(*p_h264_scaling_matrix)); 180 break; 181 } 182 } 183 184 static void std_init(const struct v4l2_ctrl *ctrl, u32 idx, 185 union v4l2_ctrl_ptr ptr) 186 { 187 switch (ctrl->type) { 188 case V4L2_CTRL_TYPE_STRING: 189 idx *= ctrl->elem_size; 190 memset(ptr.p_char + idx, ' ', ctrl->minimum); 191 ptr.p_char[idx + ctrl->minimum] = '\0'; 192 break; 193 case V4L2_CTRL_TYPE_INTEGER64: 194 ptr.p_s64[idx] = ctrl->default_value; 195 break; 196 case V4L2_CTRL_TYPE_INTEGER: 197 case V4L2_CTRL_TYPE_INTEGER_MENU: 198 case V4L2_CTRL_TYPE_MENU: 199 case V4L2_CTRL_TYPE_BITMASK: 200 case V4L2_CTRL_TYPE_BOOLEAN: 201 ptr.p_s32[idx] = ctrl->default_value; 202 break; 203 case V4L2_CTRL_TYPE_BUTTON: 204 case V4L2_CTRL_TYPE_CTRL_CLASS: 205 ptr.p_s32[idx] = 0; 206 break; 207 case V4L2_CTRL_TYPE_U8: 208 ptr.p_u8[idx] = ctrl->default_value; 209 break; 210 case V4L2_CTRL_TYPE_U16: 211 ptr.p_u16[idx] = ctrl->default_value; 212 break; 213 case V4L2_CTRL_TYPE_U32: 214 ptr.p_u32[idx] = ctrl->default_value; 215 break; 216 default: 217 std_init_compound(ctrl, idx, ptr); 218 break; 219 } 220 } 221 222 static void std_log(const struct v4l2_ctrl *ctrl) 223 { 224 union v4l2_ctrl_ptr ptr = ctrl->p_cur; 225 226 if (ctrl->is_array) { 227 unsigned i; 228 229 for (i = 0; i < ctrl->nr_of_dims; i++) 230 pr_cont("[%u]", ctrl->dims[i]); 231 pr_cont(" "); 232 } 233 234 switch (ctrl->type) { 235 case V4L2_CTRL_TYPE_INTEGER: 236 pr_cont("%d", *ptr.p_s32); 237 break; 238 case V4L2_CTRL_TYPE_BOOLEAN: 239 pr_cont("%s", *ptr.p_s32 ? "true" : "false"); 240 break; 241 case V4L2_CTRL_TYPE_MENU: 242 pr_cont("%s", ctrl->qmenu[*ptr.p_s32]); 243 break; 244 case V4L2_CTRL_TYPE_INTEGER_MENU: 245 pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]); 246 break; 247 case V4L2_CTRL_TYPE_BITMASK: 248 pr_cont("0x%08x", *ptr.p_s32); 249 break; 250 case V4L2_CTRL_TYPE_INTEGER64: 251 pr_cont("%lld", *ptr.p_s64); 252 break; 253 case V4L2_CTRL_TYPE_STRING: 254 pr_cont("%s", ptr.p_char); 255 break; 256 case V4L2_CTRL_TYPE_U8: 257 pr_cont("%u", (unsigned)*ptr.p_u8); 258 break; 259 case V4L2_CTRL_TYPE_U16: 260 pr_cont("%u", (unsigned)*ptr.p_u16); 261 break; 262 case V4L2_CTRL_TYPE_U32: 263 pr_cont("%u", (unsigned)*ptr.p_u32); 264 break; 265 case V4L2_CTRL_TYPE_H264_SPS: 266 pr_cont("H264_SPS"); 267 break; 268 case V4L2_CTRL_TYPE_H264_PPS: 269 pr_cont("H264_PPS"); 270 break; 271 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 272 pr_cont("H264_SCALING_MATRIX"); 273 break; 274 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 275 pr_cont("H264_SLICE_PARAMS"); 276 break; 277 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 278 pr_cont("H264_DECODE_PARAMS"); 279 break; 280 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 281 pr_cont("H264_PRED_WEIGHTS"); 282 break; 283 case V4L2_CTRL_TYPE_FWHT_PARAMS: 284 pr_cont("FWHT_PARAMS"); 285 break; 286 case V4L2_CTRL_TYPE_VP8_FRAME: 287 pr_cont("VP8_FRAME"); 288 break; 289 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 290 pr_cont("HDR10_CLL_INFO"); 291 break; 292 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 293 pr_cont("HDR10_MASTERING_DISPLAY"); 294 break; 295 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 296 pr_cont("MPEG2_QUANTISATION"); 297 break; 298 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 299 pr_cont("MPEG2_SEQUENCE"); 300 break; 301 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 302 pr_cont("MPEG2_PICTURE"); 303 break; 304 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 305 pr_cont("VP9_COMPRESSED_HDR"); 306 break; 307 case V4L2_CTRL_TYPE_VP9_FRAME: 308 pr_cont("VP9_FRAME"); 309 break; 310 default: 311 pr_cont("unknown type %d", ctrl->type); 312 break; 313 } 314 } 315 316 /* 317 * Round towards the closest legal value. Be careful when we are 318 * close to the maximum range of the control type to prevent 319 * wrap-arounds. 320 */ 321 #define ROUND_TO_RANGE(val, offset_type, ctrl) \ 322 ({ \ 323 offset_type offset; \ 324 if ((ctrl)->maximum >= 0 && \ 325 val >= (ctrl)->maximum - (s32)((ctrl)->step / 2)) \ 326 val = (ctrl)->maximum; \ 327 else \ 328 val += (s32)((ctrl)->step / 2); \ 329 val = clamp_t(typeof(val), val, \ 330 (ctrl)->minimum, (ctrl)->maximum); \ 331 offset = (val) - (ctrl)->minimum; \ 332 offset = (ctrl)->step * (offset / (u32)(ctrl)->step); \ 333 val = (ctrl)->minimum + offset; \ 334 0; \ 335 }) 336 337 /* Validate a new control */ 338 339 #define zero_padding(s) \ 340 memset(&(s).padding, 0, sizeof((s).padding)) 341 #define zero_reserved(s) \ 342 memset(&(s).reserved, 0, sizeof((s).reserved)) 343 344 static int 345 validate_vp9_lf_params(struct v4l2_vp9_loop_filter *lf) 346 { 347 unsigned int i; 348 349 if (lf->flags & ~(V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED | 350 V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE)) 351 return -EINVAL; 352 353 /* That all values are in the accepted range. */ 354 if (lf->level > GENMASK(5, 0)) 355 return -EINVAL; 356 357 if (lf->sharpness > GENMASK(2, 0)) 358 return -EINVAL; 359 360 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) 361 if (lf->ref_deltas[i] < -63 || lf->ref_deltas[i] > 63) 362 return -EINVAL; 363 364 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) 365 if (lf->mode_deltas[i] < -63 || lf->mode_deltas[i] > 63) 366 return -EINVAL; 367 368 zero_reserved(*lf); 369 return 0; 370 } 371 372 static int 373 validate_vp9_quant_params(struct v4l2_vp9_quantization *quant) 374 { 375 if (quant->delta_q_y_dc < -15 || quant->delta_q_y_dc > 15 || 376 quant->delta_q_uv_dc < -15 || quant->delta_q_uv_dc > 15 || 377 quant->delta_q_uv_ac < -15 || quant->delta_q_uv_ac > 15) 378 return -EINVAL; 379 380 zero_reserved(*quant); 381 return 0; 382 } 383 384 static int 385 validate_vp9_seg_params(struct v4l2_vp9_segmentation *seg) 386 { 387 unsigned int i, j; 388 389 if (seg->flags & ~(V4L2_VP9_SEGMENTATION_FLAG_ENABLED | 390 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP | 391 V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE | 392 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA | 393 V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE)) 394 return -EINVAL; 395 396 for (i = 0; i < ARRAY_SIZE(seg->feature_enabled); i++) { 397 if (seg->feature_enabled[i] & 398 ~V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK) 399 return -EINVAL; 400 } 401 402 for (i = 0; i < ARRAY_SIZE(seg->feature_data); i++) { 403 static const int range[] = { 255, 63, 3, 0 }; 404 405 for (j = 0; j < ARRAY_SIZE(seg->feature_data[j]); j++) { 406 if (seg->feature_data[i][j] < -range[j] || 407 seg->feature_data[i][j] > range[j]) 408 return -EINVAL; 409 } 410 } 411 412 zero_reserved(*seg); 413 return 0; 414 } 415 416 static int 417 validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr *hdr) 418 { 419 if (hdr->tx_mode > V4L2_VP9_TX_MODE_SELECT) 420 return -EINVAL; 421 422 return 0; 423 } 424 425 static int 426 validate_vp9_frame(struct v4l2_ctrl_vp9_frame *frame) 427 { 428 int ret; 429 430 /* Make sure we're not passed invalid flags. */ 431 if (frame->flags & ~(V4L2_VP9_FRAME_FLAG_KEY_FRAME | 432 V4L2_VP9_FRAME_FLAG_SHOW_FRAME | 433 V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT | 434 V4L2_VP9_FRAME_FLAG_INTRA_ONLY | 435 V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV | 436 V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX | 437 V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE | 438 V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 439 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING | 440 V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING)) 441 return -EINVAL; 442 443 if (frame->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT && 444 frame->flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX) 445 return -EINVAL; 446 447 if (frame->profile > V4L2_VP9_PROFILE_MAX) 448 return -EINVAL; 449 450 if (frame->reset_frame_context > V4L2_VP9_RESET_FRAME_CTX_ALL) 451 return -EINVAL; 452 453 if (frame->frame_context_idx >= V4L2_VP9_NUM_FRAME_CTX) 454 return -EINVAL; 455 456 /* 457 * Profiles 0 and 1 only support 8-bit depth, profiles 2 and 3 only 10 458 * and 12 bit depths. 459 */ 460 if ((frame->profile < 2 && frame->bit_depth != 8) || 461 (frame->profile >= 2 && 462 (frame->bit_depth != 10 && frame->bit_depth != 12))) 463 return -EINVAL; 464 465 /* Profile 0 and 2 only accept YUV 4:2:0. */ 466 if ((frame->profile == 0 || frame->profile == 2) && 467 (!(frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) || 468 !(frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 469 return -EINVAL; 470 471 /* Profile 1 and 3 only accept YUV 4:2:2, 4:4:0 and 4:4:4. */ 472 if ((frame->profile == 1 || frame->profile == 3) && 473 ((frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) && 474 (frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 475 return -EINVAL; 476 477 if (frame->interpolation_filter > V4L2_VP9_INTERP_FILTER_SWITCHABLE) 478 return -EINVAL; 479 480 /* 481 * According to the spec, tile_cols_log2 shall be less than or equal 482 * to 6. 483 */ 484 if (frame->tile_cols_log2 > 6) 485 return -EINVAL; 486 487 if (frame->reference_mode > V4L2_VP9_REFERENCE_MODE_SELECT) 488 return -EINVAL; 489 490 ret = validate_vp9_lf_params(&frame->lf); 491 if (ret) 492 return ret; 493 494 ret = validate_vp9_quant_params(&frame->quant); 495 if (ret) 496 return ret; 497 498 ret = validate_vp9_seg_params(&frame->seg); 499 if (ret) 500 return ret; 501 502 zero_reserved(*frame); 503 return 0; 504 } 505 506 /* 507 * Compound controls validation requires setting unused fields/flags to zero 508 * in order to properly detect unchanged controls with std_equal's memcmp. 509 */ 510 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx, 511 union v4l2_ctrl_ptr ptr) 512 { 513 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 514 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 515 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 516 struct v4l2_ctrl_fwht_params *p_fwht_params; 517 struct v4l2_ctrl_h264_sps *p_h264_sps; 518 struct v4l2_ctrl_h264_pps *p_h264_pps; 519 struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights; 520 struct v4l2_ctrl_h264_slice_params *p_h264_slice_params; 521 struct v4l2_ctrl_h264_decode_params *p_h264_dec_params; 522 struct v4l2_ctrl_hevc_sps *p_hevc_sps; 523 struct v4l2_ctrl_hevc_pps *p_hevc_pps; 524 struct v4l2_ctrl_hevc_slice_params *p_hevc_slice_params; 525 struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering; 526 struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params; 527 struct v4l2_area *area; 528 void *p = ptr.p + idx * ctrl->elem_size; 529 unsigned int i; 530 531 switch ((u32)ctrl->type) { 532 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 533 p_mpeg2_sequence = p; 534 535 switch (p_mpeg2_sequence->chroma_format) { 536 case 1: /* 4:2:0 */ 537 case 2: /* 4:2:2 */ 538 case 3: /* 4:4:4 */ 539 break; 540 default: 541 return -EINVAL; 542 } 543 break; 544 545 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 546 p_mpeg2_picture = p; 547 548 switch (p_mpeg2_picture->intra_dc_precision) { 549 case 0: /* 8 bits */ 550 case 1: /* 9 bits */ 551 case 2: /* 10 bits */ 552 case 3: /* 11 bits */ 553 break; 554 default: 555 return -EINVAL; 556 } 557 558 switch (p_mpeg2_picture->picture_structure) { 559 case V4L2_MPEG2_PIC_TOP_FIELD: 560 case V4L2_MPEG2_PIC_BOTTOM_FIELD: 561 case V4L2_MPEG2_PIC_FRAME: 562 break; 563 default: 564 return -EINVAL; 565 } 566 567 switch (p_mpeg2_picture->picture_coding_type) { 568 case V4L2_MPEG2_PIC_CODING_TYPE_I: 569 case V4L2_MPEG2_PIC_CODING_TYPE_P: 570 case V4L2_MPEG2_PIC_CODING_TYPE_B: 571 break; 572 default: 573 return -EINVAL; 574 } 575 zero_reserved(*p_mpeg2_picture); 576 break; 577 578 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 579 break; 580 581 case V4L2_CTRL_TYPE_FWHT_PARAMS: 582 p_fwht_params = p; 583 if (p_fwht_params->version < V4L2_FWHT_VERSION) 584 return -EINVAL; 585 if (!p_fwht_params->width || !p_fwht_params->height) 586 return -EINVAL; 587 break; 588 589 case V4L2_CTRL_TYPE_H264_SPS: 590 p_h264_sps = p; 591 592 /* Some syntax elements are only conditionally valid */ 593 if (p_h264_sps->pic_order_cnt_type != 0) { 594 p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0; 595 } else if (p_h264_sps->pic_order_cnt_type != 1) { 596 p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0; 597 p_h264_sps->offset_for_non_ref_pic = 0; 598 p_h264_sps->offset_for_top_to_bottom_field = 0; 599 memset(&p_h264_sps->offset_for_ref_frame, 0, 600 sizeof(p_h264_sps->offset_for_ref_frame)); 601 } 602 603 if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) { 604 p_h264_sps->chroma_format_idc = 1; 605 p_h264_sps->bit_depth_luma_minus8 = 0; 606 p_h264_sps->bit_depth_chroma_minus8 = 0; 607 608 p_h264_sps->flags &= 609 ~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS; 610 611 if (p_h264_sps->chroma_format_idc < 3) 612 p_h264_sps->flags &= 613 ~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE; 614 } 615 616 if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY) 617 p_h264_sps->flags &= 618 ~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD; 619 620 /* 621 * Chroma 4:2:2 format require at least High 4:2:2 profile. 622 * 623 * The H264 specification and well-known parser implementations 624 * use profile-idc values directly, as that is clearer and 625 * less ambiguous. We do the same here. 626 */ 627 if (p_h264_sps->profile_idc < 122 && 628 p_h264_sps->chroma_format_idc > 1) 629 return -EINVAL; 630 /* Chroma 4:4:4 format require at least High 4:2:2 profile */ 631 if (p_h264_sps->profile_idc < 244 && 632 p_h264_sps->chroma_format_idc > 2) 633 return -EINVAL; 634 if (p_h264_sps->chroma_format_idc > 3) 635 return -EINVAL; 636 637 if (p_h264_sps->bit_depth_luma_minus8 > 6) 638 return -EINVAL; 639 if (p_h264_sps->bit_depth_chroma_minus8 > 6) 640 return -EINVAL; 641 if (p_h264_sps->log2_max_frame_num_minus4 > 12) 642 return -EINVAL; 643 if (p_h264_sps->pic_order_cnt_type > 2) 644 return -EINVAL; 645 if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12) 646 return -EINVAL; 647 if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN) 648 return -EINVAL; 649 break; 650 651 case V4L2_CTRL_TYPE_H264_PPS: 652 p_h264_pps = p; 653 654 if (p_h264_pps->num_slice_groups_minus1 > 7) 655 return -EINVAL; 656 if (p_h264_pps->num_ref_idx_l0_default_active_minus1 > 657 (V4L2_H264_REF_LIST_LEN - 1)) 658 return -EINVAL; 659 if (p_h264_pps->num_ref_idx_l1_default_active_minus1 > 660 (V4L2_H264_REF_LIST_LEN - 1)) 661 return -EINVAL; 662 if (p_h264_pps->weighted_bipred_idc > 2) 663 return -EINVAL; 664 /* 665 * pic_init_qp_minus26 shall be in the range of 666 * -(26 + QpBdOffset_y) to +25, inclusive, 667 * where QpBdOffset_y is 6 * bit_depth_luma_minus8 668 */ 669 if (p_h264_pps->pic_init_qp_minus26 < -62 || 670 p_h264_pps->pic_init_qp_minus26 > 25) 671 return -EINVAL; 672 if (p_h264_pps->pic_init_qs_minus26 < -26 || 673 p_h264_pps->pic_init_qs_minus26 > 25) 674 return -EINVAL; 675 if (p_h264_pps->chroma_qp_index_offset < -12 || 676 p_h264_pps->chroma_qp_index_offset > 12) 677 return -EINVAL; 678 if (p_h264_pps->second_chroma_qp_index_offset < -12 || 679 p_h264_pps->second_chroma_qp_index_offset > 12) 680 return -EINVAL; 681 break; 682 683 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 684 break; 685 686 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 687 p_h264_pred_weights = p; 688 689 if (p_h264_pred_weights->luma_log2_weight_denom > 7) 690 return -EINVAL; 691 if (p_h264_pred_weights->chroma_log2_weight_denom > 7) 692 return -EINVAL; 693 break; 694 695 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 696 p_h264_slice_params = p; 697 698 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 699 p_h264_slice_params->flags &= 700 ~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED; 701 702 if (p_h264_slice_params->colour_plane_id > 2) 703 return -EINVAL; 704 if (p_h264_slice_params->cabac_init_idc > 2) 705 return -EINVAL; 706 if (p_h264_slice_params->disable_deblocking_filter_idc > 2) 707 return -EINVAL; 708 if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 || 709 p_h264_slice_params->slice_alpha_c0_offset_div2 > 6) 710 return -EINVAL; 711 if (p_h264_slice_params->slice_beta_offset_div2 < -6 || 712 p_h264_slice_params->slice_beta_offset_div2 > 6) 713 return -EINVAL; 714 715 if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I || 716 p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI) 717 p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0; 718 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 719 p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0; 720 721 if (p_h264_slice_params->num_ref_idx_l0_active_minus1 > 722 (V4L2_H264_REF_LIST_LEN - 1)) 723 return -EINVAL; 724 if (p_h264_slice_params->num_ref_idx_l1_active_minus1 > 725 (V4L2_H264_REF_LIST_LEN - 1)) 726 return -EINVAL; 727 zero_reserved(*p_h264_slice_params); 728 break; 729 730 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 731 p_h264_dec_params = p; 732 733 if (p_h264_dec_params->nal_ref_idc > 3) 734 return -EINVAL; 735 for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) { 736 struct v4l2_h264_dpb_entry *dpb_entry = 737 &p_h264_dec_params->dpb[i]; 738 739 zero_reserved(*dpb_entry); 740 } 741 zero_reserved(*p_h264_dec_params); 742 break; 743 744 case V4L2_CTRL_TYPE_VP8_FRAME: 745 p_vp8_frame = p; 746 747 switch (p_vp8_frame->num_dct_parts) { 748 case 1: 749 case 2: 750 case 4: 751 case 8: 752 break; 753 default: 754 return -EINVAL; 755 } 756 zero_padding(p_vp8_frame->segment); 757 zero_padding(p_vp8_frame->lf); 758 zero_padding(p_vp8_frame->quant); 759 zero_padding(p_vp8_frame->entropy); 760 zero_padding(p_vp8_frame->coder_state); 761 break; 762 763 case V4L2_CTRL_TYPE_HEVC_SPS: 764 p_hevc_sps = p; 765 766 if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) { 767 p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0; 768 p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0; 769 p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0; 770 p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0; 771 } 772 773 if (!(p_hevc_sps->flags & 774 V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT)) 775 p_hevc_sps->num_long_term_ref_pics_sps = 0; 776 break; 777 778 case V4L2_CTRL_TYPE_HEVC_PPS: 779 p_hevc_pps = p; 780 781 if (!(p_hevc_pps->flags & 782 V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED)) 783 p_hevc_pps->diff_cu_qp_delta_depth = 0; 784 785 if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) { 786 p_hevc_pps->num_tile_columns_minus1 = 0; 787 p_hevc_pps->num_tile_rows_minus1 = 0; 788 memset(&p_hevc_pps->column_width_minus1, 0, 789 sizeof(p_hevc_pps->column_width_minus1)); 790 memset(&p_hevc_pps->row_height_minus1, 0, 791 sizeof(p_hevc_pps->row_height_minus1)); 792 793 p_hevc_pps->flags &= 794 ~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED; 795 } 796 797 if (p_hevc_pps->flags & 798 V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) { 799 p_hevc_pps->pps_beta_offset_div2 = 0; 800 p_hevc_pps->pps_tc_offset_div2 = 0; 801 } 802 803 zero_padding(*p_hevc_pps); 804 break; 805 806 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 807 p_hevc_decode_params = p; 808 809 if (p_hevc_decode_params->num_active_dpb_entries > 810 V4L2_HEVC_DPB_ENTRIES_NUM_MAX) 811 return -EINVAL; 812 813 for (i = 0; i < p_hevc_decode_params->num_active_dpb_entries; 814 i++) { 815 struct v4l2_hevc_dpb_entry *dpb_entry = 816 &p_hevc_decode_params->dpb[i]; 817 818 zero_padding(*dpb_entry); 819 } 820 break; 821 822 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 823 p_hevc_slice_params = p; 824 825 zero_padding(p_hevc_slice_params->pred_weight_table); 826 zero_padding(*p_hevc_slice_params); 827 break; 828 829 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 830 break; 831 832 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 833 p_hdr10_mastering = p; 834 835 for (i = 0; i < 3; ++i) { 836 if (p_hdr10_mastering->display_primaries_x[i] < 837 V4L2_HDR10_MASTERING_PRIMARIES_X_LOW || 838 p_hdr10_mastering->display_primaries_x[i] > 839 V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH || 840 p_hdr10_mastering->display_primaries_y[i] < 841 V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW || 842 p_hdr10_mastering->display_primaries_y[i] > 843 V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH) 844 return -EINVAL; 845 } 846 847 if (p_hdr10_mastering->white_point_x < 848 V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW || 849 p_hdr10_mastering->white_point_x > 850 V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH || 851 p_hdr10_mastering->white_point_y < 852 V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW || 853 p_hdr10_mastering->white_point_y > 854 V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH) 855 return -EINVAL; 856 857 if (p_hdr10_mastering->max_display_mastering_luminance < 858 V4L2_HDR10_MASTERING_MAX_LUMA_LOW || 859 p_hdr10_mastering->max_display_mastering_luminance > 860 V4L2_HDR10_MASTERING_MAX_LUMA_HIGH || 861 p_hdr10_mastering->min_display_mastering_luminance < 862 V4L2_HDR10_MASTERING_MIN_LUMA_LOW || 863 p_hdr10_mastering->min_display_mastering_luminance > 864 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 865 return -EINVAL; 866 867 /* The following restriction comes from ITU-T Rec. H.265 spec */ 868 if (p_hdr10_mastering->max_display_mastering_luminance == 869 V4L2_HDR10_MASTERING_MAX_LUMA_LOW && 870 p_hdr10_mastering->min_display_mastering_luminance == 871 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 872 return -EINVAL; 873 874 break; 875 876 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 877 break; 878 879 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 880 return validate_vp9_compressed_hdr(p); 881 882 case V4L2_CTRL_TYPE_VP9_FRAME: 883 return validate_vp9_frame(p); 884 885 case V4L2_CTRL_TYPE_AREA: 886 area = p; 887 if (!area->width || !area->height) 888 return -EINVAL; 889 break; 890 891 default: 892 return -EINVAL; 893 } 894 895 return 0; 896 } 897 898 static int std_validate(const struct v4l2_ctrl *ctrl, u32 idx, 899 union v4l2_ctrl_ptr ptr) 900 { 901 size_t len; 902 u64 offset; 903 s64 val; 904 905 switch ((u32)ctrl->type) { 906 case V4L2_CTRL_TYPE_INTEGER: 907 return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl); 908 case V4L2_CTRL_TYPE_INTEGER64: 909 /* 910 * We can't use the ROUND_TO_RANGE define here due to 911 * the u64 divide that needs special care. 912 */ 913 val = ptr.p_s64[idx]; 914 if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2)) 915 val = ctrl->maximum; 916 else 917 val += (s64)(ctrl->step / 2); 918 val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum); 919 offset = val - ctrl->minimum; 920 do_div(offset, ctrl->step); 921 ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step; 922 return 0; 923 case V4L2_CTRL_TYPE_U8: 924 return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl); 925 case V4L2_CTRL_TYPE_U16: 926 return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl); 927 case V4L2_CTRL_TYPE_U32: 928 return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl); 929 930 case V4L2_CTRL_TYPE_BOOLEAN: 931 ptr.p_s32[idx] = !!ptr.p_s32[idx]; 932 return 0; 933 934 case V4L2_CTRL_TYPE_MENU: 935 case V4L2_CTRL_TYPE_INTEGER_MENU: 936 if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum) 937 return -ERANGE; 938 if (ptr.p_s32[idx] < BITS_PER_LONG_LONG && 939 (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx]))) 940 return -EINVAL; 941 if (ctrl->type == V4L2_CTRL_TYPE_MENU && 942 ctrl->qmenu[ptr.p_s32[idx]][0] == '\0') 943 return -EINVAL; 944 return 0; 945 946 case V4L2_CTRL_TYPE_BITMASK: 947 ptr.p_s32[idx] &= ctrl->maximum; 948 return 0; 949 950 case V4L2_CTRL_TYPE_BUTTON: 951 case V4L2_CTRL_TYPE_CTRL_CLASS: 952 ptr.p_s32[idx] = 0; 953 return 0; 954 955 case V4L2_CTRL_TYPE_STRING: 956 idx *= ctrl->elem_size; 957 len = strlen(ptr.p_char + idx); 958 if (len < ctrl->minimum) 959 return -ERANGE; 960 if ((len - (u32)ctrl->minimum) % (u32)ctrl->step) 961 return -ERANGE; 962 return 0; 963 964 default: 965 return std_validate_compound(ctrl, idx, ptr); 966 } 967 } 968 969 static const struct v4l2_ctrl_type_ops std_type_ops = { 970 .equal = std_equal, 971 .init = std_init, 972 .log = std_log, 973 .validate = std_validate, 974 }; 975 976 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv) 977 { 978 if (!ctrl) 979 return; 980 if (!notify) { 981 ctrl->call_notify = 0; 982 return; 983 } 984 if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify)) 985 return; 986 ctrl->handler->notify = notify; 987 ctrl->handler->notify_priv = priv; 988 ctrl->call_notify = 1; 989 } 990 EXPORT_SYMBOL(v4l2_ctrl_notify); 991 992 /* Copy the one value to another. */ 993 static void ptr_to_ptr(struct v4l2_ctrl *ctrl, 994 union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to) 995 { 996 if (ctrl == NULL) 997 return; 998 memcpy(to.p, from.p_const, ctrl->elems * ctrl->elem_size); 999 } 1000 1001 /* Copy the new value to the current value. */ 1002 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags) 1003 { 1004 bool changed; 1005 1006 if (ctrl == NULL) 1007 return; 1008 1009 /* has_changed is set by cluster_changed */ 1010 changed = ctrl->has_changed; 1011 if (changed) 1012 ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur); 1013 1014 if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) { 1015 /* Note: CH_FLAGS is only set for auto clusters. */ 1016 ctrl->flags &= 1017 ~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE); 1018 if (!is_cur_manual(ctrl->cluster[0])) { 1019 ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE; 1020 if (ctrl->cluster[0]->has_volatiles) 1021 ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE; 1022 } 1023 fh = NULL; 1024 } 1025 if (changed || ch_flags) { 1026 /* If a control was changed that was not one of the controls 1027 modified by the application, then send the event to all. */ 1028 if (!ctrl->is_new) 1029 fh = NULL; 1030 send_event(fh, ctrl, 1031 (changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags); 1032 if (ctrl->call_notify && changed && ctrl->handler->notify) 1033 ctrl->handler->notify(ctrl, ctrl->handler->notify_priv); 1034 } 1035 } 1036 1037 /* Copy the current value to the new value */ 1038 void cur_to_new(struct v4l2_ctrl *ctrl) 1039 { 1040 if (ctrl == NULL) 1041 return; 1042 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new); 1043 } 1044 1045 /* Copy the new value to the request value */ 1046 void new_to_req(struct v4l2_ctrl_ref *ref) 1047 { 1048 if (!ref) 1049 return; 1050 ptr_to_ptr(ref->ctrl, ref->ctrl->p_new, ref->p_req); 1051 ref->valid_p_req = true; 1052 } 1053 1054 /* Copy the current value to the request value */ 1055 void cur_to_req(struct v4l2_ctrl_ref *ref) 1056 { 1057 if (!ref) 1058 return; 1059 ptr_to_ptr(ref->ctrl, ref->ctrl->p_cur, ref->p_req); 1060 ref->valid_p_req = true; 1061 } 1062 1063 /* Copy the request value to the new value */ 1064 void req_to_new(struct v4l2_ctrl_ref *ref) 1065 { 1066 if (!ref) 1067 return; 1068 if (ref->valid_p_req) 1069 ptr_to_ptr(ref->ctrl, ref->p_req, ref->ctrl->p_new); 1070 else 1071 ptr_to_ptr(ref->ctrl, ref->ctrl->p_cur, ref->ctrl->p_new); 1072 } 1073 1074 /* Control range checking */ 1075 int check_range(enum v4l2_ctrl_type type, 1076 s64 min, s64 max, u64 step, s64 def) 1077 { 1078 switch (type) { 1079 case V4L2_CTRL_TYPE_BOOLEAN: 1080 if (step != 1 || max > 1 || min < 0) 1081 return -ERANGE; 1082 fallthrough; 1083 case V4L2_CTRL_TYPE_U8: 1084 case V4L2_CTRL_TYPE_U16: 1085 case V4L2_CTRL_TYPE_U32: 1086 case V4L2_CTRL_TYPE_INTEGER: 1087 case V4L2_CTRL_TYPE_INTEGER64: 1088 if (step == 0 || min > max || def < min || def > max) 1089 return -ERANGE; 1090 return 0; 1091 case V4L2_CTRL_TYPE_BITMASK: 1092 if (step || min || !max || (def & ~max)) 1093 return -ERANGE; 1094 return 0; 1095 case V4L2_CTRL_TYPE_MENU: 1096 case V4L2_CTRL_TYPE_INTEGER_MENU: 1097 if (min > max || def < min || def > max) 1098 return -ERANGE; 1099 /* Note: step == menu_skip_mask for menu controls. 1100 So here we check if the default value is masked out. */ 1101 if (step && ((1 << def) & step)) 1102 return -EINVAL; 1103 return 0; 1104 case V4L2_CTRL_TYPE_STRING: 1105 if (min > max || min < 0 || step < 1 || def) 1106 return -ERANGE; 1107 return 0; 1108 default: 1109 return 0; 1110 } 1111 } 1112 1113 /* Validate a new control */ 1114 int validate_new(const struct v4l2_ctrl *ctrl, union v4l2_ctrl_ptr p_new) 1115 { 1116 unsigned idx; 1117 int err = 0; 1118 1119 for (idx = 0; !err && idx < ctrl->elems; idx++) 1120 err = ctrl->type_ops->validate(ctrl, idx, p_new); 1121 return err; 1122 } 1123 1124 /* Set the handler's error code if it wasn't set earlier already */ 1125 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err) 1126 { 1127 if (hdl->error == 0) 1128 hdl->error = err; 1129 return err; 1130 } 1131 1132 /* Initialize the handler */ 1133 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl, 1134 unsigned nr_of_controls_hint, 1135 struct lock_class_key *key, const char *name) 1136 { 1137 mutex_init(&hdl->_lock); 1138 hdl->lock = &hdl->_lock; 1139 lockdep_set_class_and_name(hdl->lock, key, name); 1140 INIT_LIST_HEAD(&hdl->ctrls); 1141 INIT_LIST_HEAD(&hdl->ctrl_refs); 1142 hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8; 1143 hdl->buckets = kvmalloc_array(hdl->nr_of_buckets, 1144 sizeof(hdl->buckets[0]), 1145 GFP_KERNEL | __GFP_ZERO); 1146 hdl->error = hdl->buckets ? 0 : -ENOMEM; 1147 v4l2_ctrl_handler_init_request(hdl); 1148 return hdl->error; 1149 } 1150 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class); 1151 1152 /* Free all controls and control refs */ 1153 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl) 1154 { 1155 struct v4l2_ctrl_ref *ref, *next_ref; 1156 struct v4l2_ctrl *ctrl, *next_ctrl; 1157 struct v4l2_subscribed_event *sev, *next_sev; 1158 1159 if (hdl == NULL || hdl->buckets == NULL) 1160 return; 1161 1162 v4l2_ctrl_handler_free_request(hdl); 1163 1164 mutex_lock(hdl->lock); 1165 /* Free all nodes */ 1166 list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) { 1167 list_del(&ref->node); 1168 kfree(ref); 1169 } 1170 /* Free all controls owned by the handler */ 1171 list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) { 1172 list_del(&ctrl->node); 1173 list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node) 1174 list_del(&sev->node); 1175 kvfree(ctrl); 1176 } 1177 kvfree(hdl->buckets); 1178 hdl->buckets = NULL; 1179 hdl->cached = NULL; 1180 hdl->error = 0; 1181 mutex_unlock(hdl->lock); 1182 mutex_destroy(&hdl->_lock); 1183 } 1184 EXPORT_SYMBOL(v4l2_ctrl_handler_free); 1185 1186 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer 1187 be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing 1188 with applications that do not use the NEXT_CTRL flag. 1189 1190 We just find the n-th private user control. It's O(N), but that should not 1191 be an issue in this particular case. */ 1192 static struct v4l2_ctrl_ref *find_private_ref( 1193 struct v4l2_ctrl_handler *hdl, u32 id) 1194 { 1195 struct v4l2_ctrl_ref *ref; 1196 1197 id -= V4L2_CID_PRIVATE_BASE; 1198 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1199 /* Search for private user controls that are compatible with 1200 VIDIOC_G/S_CTRL. */ 1201 if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER && 1202 V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) { 1203 if (!ref->ctrl->is_int) 1204 continue; 1205 if (id == 0) 1206 return ref; 1207 id--; 1208 } 1209 } 1210 return NULL; 1211 } 1212 1213 /* Find a control with the given ID. */ 1214 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id) 1215 { 1216 struct v4l2_ctrl_ref *ref; 1217 int bucket; 1218 1219 id &= V4L2_CTRL_ID_MASK; 1220 1221 /* Old-style private controls need special handling */ 1222 if (id >= V4L2_CID_PRIVATE_BASE) 1223 return find_private_ref(hdl, id); 1224 bucket = id % hdl->nr_of_buckets; 1225 1226 /* Simple optimization: cache the last control found */ 1227 if (hdl->cached && hdl->cached->ctrl->id == id) 1228 return hdl->cached; 1229 1230 /* Not in cache, search the hash */ 1231 ref = hdl->buckets ? hdl->buckets[bucket] : NULL; 1232 while (ref && ref->ctrl->id != id) 1233 ref = ref->next; 1234 1235 if (ref) 1236 hdl->cached = ref; /* cache it! */ 1237 return ref; 1238 } 1239 1240 /* Find a control with the given ID. Take the handler's lock first. */ 1241 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id) 1242 { 1243 struct v4l2_ctrl_ref *ref = NULL; 1244 1245 if (hdl) { 1246 mutex_lock(hdl->lock); 1247 ref = find_ref(hdl, id); 1248 mutex_unlock(hdl->lock); 1249 } 1250 return ref; 1251 } 1252 1253 /* Find a control with the given ID. */ 1254 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id) 1255 { 1256 struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id); 1257 1258 return ref ? ref->ctrl : NULL; 1259 } 1260 EXPORT_SYMBOL(v4l2_ctrl_find); 1261 1262 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */ 1263 int handler_new_ref(struct v4l2_ctrl_handler *hdl, 1264 struct v4l2_ctrl *ctrl, 1265 struct v4l2_ctrl_ref **ctrl_ref, 1266 bool from_other_dev, bool allocate_req) 1267 { 1268 struct v4l2_ctrl_ref *ref; 1269 struct v4l2_ctrl_ref *new_ref; 1270 u32 id = ctrl->id; 1271 u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1; 1272 int bucket = id % hdl->nr_of_buckets; /* which bucket to use */ 1273 unsigned int size_extra_req = 0; 1274 1275 if (ctrl_ref) 1276 *ctrl_ref = NULL; 1277 1278 /* 1279 * Automatically add the control class if it is not yet present and 1280 * the new control is not a compound control. 1281 */ 1282 if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES && 1283 id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL) 1284 if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0)) 1285 return hdl->error; 1286 1287 if (hdl->error) 1288 return hdl->error; 1289 1290 if (allocate_req) 1291 size_extra_req = ctrl->elems * ctrl->elem_size; 1292 new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL); 1293 if (!new_ref) 1294 return handler_set_err(hdl, -ENOMEM); 1295 new_ref->ctrl = ctrl; 1296 new_ref->from_other_dev = from_other_dev; 1297 if (size_extra_req) 1298 new_ref->p_req.p = &new_ref[1]; 1299 1300 INIT_LIST_HEAD(&new_ref->node); 1301 1302 mutex_lock(hdl->lock); 1303 1304 /* Add immediately at the end of the list if the list is empty, or if 1305 the last element in the list has a lower ID. 1306 This ensures that when elements are added in ascending order the 1307 insertion is an O(1) operation. */ 1308 if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) { 1309 list_add_tail(&new_ref->node, &hdl->ctrl_refs); 1310 goto insert_in_hash; 1311 } 1312 1313 /* Find insert position in sorted list */ 1314 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1315 if (ref->ctrl->id < id) 1316 continue; 1317 /* Don't add duplicates */ 1318 if (ref->ctrl->id == id) { 1319 kfree(new_ref); 1320 goto unlock; 1321 } 1322 list_add(&new_ref->node, ref->node.prev); 1323 break; 1324 } 1325 1326 insert_in_hash: 1327 /* Insert the control node in the hash */ 1328 new_ref->next = hdl->buckets[bucket]; 1329 hdl->buckets[bucket] = new_ref; 1330 if (ctrl_ref) 1331 *ctrl_ref = new_ref; 1332 if (ctrl->handler == hdl) { 1333 /* By default each control starts in a cluster of its own. 1334 * new_ref->ctrl is basically a cluster array with one 1335 * element, so that's perfect to use as the cluster pointer. 1336 * But only do this for the handler that owns the control. 1337 */ 1338 ctrl->cluster = &new_ref->ctrl; 1339 ctrl->ncontrols = 1; 1340 } 1341 1342 unlock: 1343 mutex_unlock(hdl->lock); 1344 return 0; 1345 } 1346 1347 /* Add a new control */ 1348 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl, 1349 const struct v4l2_ctrl_ops *ops, 1350 const struct v4l2_ctrl_type_ops *type_ops, 1351 u32 id, const char *name, enum v4l2_ctrl_type type, 1352 s64 min, s64 max, u64 step, s64 def, 1353 const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size, 1354 u32 flags, const char * const *qmenu, 1355 const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def, 1356 void *priv) 1357 { 1358 struct v4l2_ctrl *ctrl; 1359 unsigned sz_extra; 1360 unsigned nr_of_dims = 0; 1361 unsigned elems = 1; 1362 bool is_array; 1363 unsigned tot_ctrl_size; 1364 unsigned idx; 1365 void *data; 1366 int err; 1367 1368 if (hdl->error) 1369 return NULL; 1370 1371 while (dims && dims[nr_of_dims]) { 1372 elems *= dims[nr_of_dims]; 1373 nr_of_dims++; 1374 if (nr_of_dims == V4L2_CTRL_MAX_DIMS) 1375 break; 1376 } 1377 is_array = nr_of_dims > 0; 1378 1379 /* Prefill elem_size for all types handled by std_type_ops */ 1380 switch ((u32)type) { 1381 case V4L2_CTRL_TYPE_INTEGER64: 1382 elem_size = sizeof(s64); 1383 break; 1384 case V4L2_CTRL_TYPE_STRING: 1385 elem_size = max + 1; 1386 break; 1387 case V4L2_CTRL_TYPE_U8: 1388 elem_size = sizeof(u8); 1389 break; 1390 case V4L2_CTRL_TYPE_U16: 1391 elem_size = sizeof(u16); 1392 break; 1393 case V4L2_CTRL_TYPE_U32: 1394 elem_size = sizeof(u32); 1395 break; 1396 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 1397 elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence); 1398 break; 1399 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 1400 elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture); 1401 break; 1402 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 1403 elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation); 1404 break; 1405 case V4L2_CTRL_TYPE_FWHT_PARAMS: 1406 elem_size = sizeof(struct v4l2_ctrl_fwht_params); 1407 break; 1408 case V4L2_CTRL_TYPE_H264_SPS: 1409 elem_size = sizeof(struct v4l2_ctrl_h264_sps); 1410 break; 1411 case V4L2_CTRL_TYPE_H264_PPS: 1412 elem_size = sizeof(struct v4l2_ctrl_h264_pps); 1413 break; 1414 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 1415 elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix); 1416 break; 1417 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 1418 elem_size = sizeof(struct v4l2_ctrl_h264_slice_params); 1419 break; 1420 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 1421 elem_size = sizeof(struct v4l2_ctrl_h264_decode_params); 1422 break; 1423 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 1424 elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights); 1425 break; 1426 case V4L2_CTRL_TYPE_VP8_FRAME: 1427 elem_size = sizeof(struct v4l2_ctrl_vp8_frame); 1428 break; 1429 case V4L2_CTRL_TYPE_HEVC_SPS: 1430 elem_size = sizeof(struct v4l2_ctrl_hevc_sps); 1431 break; 1432 case V4L2_CTRL_TYPE_HEVC_PPS: 1433 elem_size = sizeof(struct v4l2_ctrl_hevc_pps); 1434 break; 1435 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 1436 elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params); 1437 break; 1438 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 1439 elem_size = sizeof(struct v4l2_ctrl_hevc_scaling_matrix); 1440 break; 1441 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 1442 elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params); 1443 break; 1444 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 1445 elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info); 1446 break; 1447 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 1448 elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display); 1449 break; 1450 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 1451 elem_size = sizeof(struct v4l2_ctrl_vp9_compressed_hdr); 1452 break; 1453 case V4L2_CTRL_TYPE_VP9_FRAME: 1454 elem_size = sizeof(struct v4l2_ctrl_vp9_frame); 1455 break; 1456 case V4L2_CTRL_TYPE_AREA: 1457 elem_size = sizeof(struct v4l2_area); 1458 break; 1459 default: 1460 if (type < V4L2_CTRL_COMPOUND_TYPES) 1461 elem_size = sizeof(s32); 1462 break; 1463 } 1464 tot_ctrl_size = elem_size * elems; 1465 1466 /* Sanity checks */ 1467 if (id == 0 || name == NULL || !elem_size || 1468 id >= V4L2_CID_PRIVATE_BASE || 1469 (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) || 1470 (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) { 1471 handler_set_err(hdl, -ERANGE); 1472 return NULL; 1473 } 1474 err = check_range(type, min, max, step, def); 1475 if (err) { 1476 handler_set_err(hdl, err); 1477 return NULL; 1478 } 1479 if (is_array && 1480 (type == V4L2_CTRL_TYPE_BUTTON || 1481 type == V4L2_CTRL_TYPE_CTRL_CLASS)) { 1482 handler_set_err(hdl, -EINVAL); 1483 return NULL; 1484 } 1485 1486 sz_extra = 0; 1487 if (type == V4L2_CTRL_TYPE_BUTTON) 1488 flags |= V4L2_CTRL_FLAG_WRITE_ONLY | 1489 V4L2_CTRL_FLAG_EXECUTE_ON_WRITE; 1490 else if (type == V4L2_CTRL_TYPE_CTRL_CLASS) 1491 flags |= V4L2_CTRL_FLAG_READ_ONLY; 1492 else if (type == V4L2_CTRL_TYPE_INTEGER64 || 1493 type == V4L2_CTRL_TYPE_STRING || 1494 type >= V4L2_CTRL_COMPOUND_TYPES || 1495 is_array) 1496 sz_extra += 2 * tot_ctrl_size; 1497 1498 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) 1499 sz_extra += elem_size; 1500 1501 ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL); 1502 if (ctrl == NULL) { 1503 handler_set_err(hdl, -ENOMEM); 1504 return NULL; 1505 } 1506 1507 INIT_LIST_HEAD(&ctrl->node); 1508 INIT_LIST_HEAD(&ctrl->ev_subs); 1509 ctrl->handler = hdl; 1510 ctrl->ops = ops; 1511 ctrl->type_ops = type_ops ? type_ops : &std_type_ops; 1512 ctrl->id = id; 1513 ctrl->name = name; 1514 ctrl->type = type; 1515 ctrl->flags = flags; 1516 ctrl->minimum = min; 1517 ctrl->maximum = max; 1518 ctrl->step = step; 1519 ctrl->default_value = def; 1520 ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING; 1521 ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string; 1522 ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64; 1523 ctrl->is_array = is_array; 1524 ctrl->elems = elems; 1525 ctrl->nr_of_dims = nr_of_dims; 1526 if (nr_of_dims) 1527 memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0])); 1528 ctrl->elem_size = elem_size; 1529 if (type == V4L2_CTRL_TYPE_MENU) 1530 ctrl->qmenu = qmenu; 1531 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 1532 ctrl->qmenu_int = qmenu_int; 1533 ctrl->priv = priv; 1534 ctrl->cur.val = ctrl->val = def; 1535 data = &ctrl[1]; 1536 1537 if (!ctrl->is_int) { 1538 ctrl->p_new.p = data; 1539 ctrl->p_cur.p = data + tot_ctrl_size; 1540 } else { 1541 ctrl->p_new.p = &ctrl->val; 1542 ctrl->p_cur.p = &ctrl->cur.val; 1543 } 1544 1545 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) { 1546 ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size; 1547 memcpy(ctrl->p_def.p, p_def.p_const, elem_size); 1548 } 1549 1550 for (idx = 0; idx < elems; idx++) { 1551 ctrl->type_ops->init(ctrl, idx, ctrl->p_cur); 1552 ctrl->type_ops->init(ctrl, idx, ctrl->p_new); 1553 } 1554 1555 if (handler_new_ref(hdl, ctrl, NULL, false, false)) { 1556 kvfree(ctrl); 1557 return NULL; 1558 } 1559 mutex_lock(hdl->lock); 1560 list_add_tail(&ctrl->node, &hdl->ctrls); 1561 mutex_unlock(hdl->lock); 1562 return ctrl; 1563 } 1564 1565 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl, 1566 const struct v4l2_ctrl_config *cfg, void *priv) 1567 { 1568 bool is_menu; 1569 struct v4l2_ctrl *ctrl; 1570 const char *name = cfg->name; 1571 const char * const *qmenu = cfg->qmenu; 1572 const s64 *qmenu_int = cfg->qmenu_int; 1573 enum v4l2_ctrl_type type = cfg->type; 1574 u32 flags = cfg->flags; 1575 s64 min = cfg->min; 1576 s64 max = cfg->max; 1577 u64 step = cfg->step; 1578 s64 def = cfg->def; 1579 1580 if (name == NULL) 1581 v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step, 1582 &def, &flags); 1583 1584 is_menu = (type == V4L2_CTRL_TYPE_MENU || 1585 type == V4L2_CTRL_TYPE_INTEGER_MENU); 1586 if (is_menu) 1587 WARN_ON(step); 1588 else 1589 WARN_ON(cfg->menu_skip_mask); 1590 if (type == V4L2_CTRL_TYPE_MENU && !qmenu) { 1591 qmenu = v4l2_ctrl_get_menu(cfg->id); 1592 } else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) { 1593 handler_set_err(hdl, -EINVAL); 1594 return NULL; 1595 } 1596 1597 ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name, 1598 type, min, max, 1599 is_menu ? cfg->menu_skip_mask : step, def, 1600 cfg->dims, cfg->elem_size, 1601 flags, qmenu, qmenu_int, cfg->p_def, priv); 1602 if (ctrl) 1603 ctrl->is_private = cfg->is_private; 1604 return ctrl; 1605 } 1606 EXPORT_SYMBOL(v4l2_ctrl_new_custom); 1607 1608 /* Helper function for standard non-menu controls */ 1609 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl, 1610 const struct v4l2_ctrl_ops *ops, 1611 u32 id, s64 min, s64 max, u64 step, s64 def) 1612 { 1613 const char *name; 1614 enum v4l2_ctrl_type type; 1615 u32 flags; 1616 1617 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1618 if (type == V4L2_CTRL_TYPE_MENU || 1619 type == V4L2_CTRL_TYPE_INTEGER_MENU || 1620 type >= V4L2_CTRL_COMPOUND_TYPES) { 1621 handler_set_err(hdl, -EINVAL); 1622 return NULL; 1623 } 1624 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1625 min, max, step, def, NULL, 0, 1626 flags, NULL, NULL, ptr_null, NULL); 1627 } 1628 EXPORT_SYMBOL(v4l2_ctrl_new_std); 1629 1630 /* Helper function for standard menu controls */ 1631 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl, 1632 const struct v4l2_ctrl_ops *ops, 1633 u32 id, u8 _max, u64 mask, u8 _def) 1634 { 1635 const char * const *qmenu = NULL; 1636 const s64 *qmenu_int = NULL; 1637 unsigned int qmenu_int_len = 0; 1638 const char *name; 1639 enum v4l2_ctrl_type type; 1640 s64 min; 1641 s64 max = _max; 1642 s64 def = _def; 1643 u64 step; 1644 u32 flags; 1645 1646 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1647 1648 if (type == V4L2_CTRL_TYPE_MENU) 1649 qmenu = v4l2_ctrl_get_menu(id); 1650 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 1651 qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len); 1652 1653 if ((!qmenu && !qmenu_int) || (qmenu_int && max > qmenu_int_len)) { 1654 handler_set_err(hdl, -EINVAL); 1655 return NULL; 1656 } 1657 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1658 0, max, mask, def, NULL, 0, 1659 flags, qmenu, qmenu_int, ptr_null, NULL); 1660 } 1661 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu); 1662 1663 /* Helper function for standard menu controls with driver defined menu */ 1664 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl, 1665 const struct v4l2_ctrl_ops *ops, u32 id, u8 _max, 1666 u64 mask, u8 _def, const char * const *qmenu) 1667 { 1668 enum v4l2_ctrl_type type; 1669 const char *name; 1670 u32 flags; 1671 u64 step; 1672 s64 min; 1673 s64 max = _max; 1674 s64 def = _def; 1675 1676 /* v4l2_ctrl_new_std_menu_items() should only be called for 1677 * standard controls without a standard menu. 1678 */ 1679 if (v4l2_ctrl_get_menu(id)) { 1680 handler_set_err(hdl, -EINVAL); 1681 return NULL; 1682 } 1683 1684 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1685 if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) { 1686 handler_set_err(hdl, -EINVAL); 1687 return NULL; 1688 } 1689 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1690 0, max, mask, def, NULL, 0, 1691 flags, qmenu, NULL, ptr_null, NULL); 1692 1693 } 1694 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items); 1695 1696 /* Helper function for standard compound controls */ 1697 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl, 1698 const struct v4l2_ctrl_ops *ops, u32 id, 1699 const union v4l2_ctrl_ptr p_def) 1700 { 1701 const char *name; 1702 enum v4l2_ctrl_type type; 1703 u32 flags; 1704 s64 min, max, step, def; 1705 1706 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1707 if (type < V4L2_CTRL_COMPOUND_TYPES) { 1708 handler_set_err(hdl, -EINVAL); 1709 return NULL; 1710 } 1711 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1712 min, max, step, def, NULL, 0, 1713 flags, NULL, NULL, p_def, NULL); 1714 } 1715 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound); 1716 1717 /* Helper function for standard integer menu controls */ 1718 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl, 1719 const struct v4l2_ctrl_ops *ops, 1720 u32 id, u8 _max, u8 _def, const s64 *qmenu_int) 1721 { 1722 const char *name; 1723 enum v4l2_ctrl_type type; 1724 s64 min; 1725 u64 step; 1726 s64 max = _max; 1727 s64 def = _def; 1728 u32 flags; 1729 1730 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 1731 if (type != V4L2_CTRL_TYPE_INTEGER_MENU) { 1732 handler_set_err(hdl, -EINVAL); 1733 return NULL; 1734 } 1735 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 1736 0, max, 0, def, NULL, 0, 1737 flags, NULL, qmenu_int, ptr_null, NULL); 1738 } 1739 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu); 1740 1741 /* Add the controls from another handler to our own. */ 1742 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl, 1743 struct v4l2_ctrl_handler *add, 1744 bool (*filter)(const struct v4l2_ctrl *ctrl), 1745 bool from_other_dev) 1746 { 1747 struct v4l2_ctrl_ref *ref; 1748 int ret = 0; 1749 1750 /* Do nothing if either handler is NULL or if they are the same */ 1751 if (!hdl || !add || hdl == add) 1752 return 0; 1753 if (hdl->error) 1754 return hdl->error; 1755 mutex_lock(add->lock); 1756 list_for_each_entry(ref, &add->ctrl_refs, node) { 1757 struct v4l2_ctrl *ctrl = ref->ctrl; 1758 1759 /* Skip handler-private controls. */ 1760 if (ctrl->is_private) 1761 continue; 1762 /* And control classes */ 1763 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 1764 continue; 1765 /* Filter any unwanted controls */ 1766 if (filter && !filter(ctrl)) 1767 continue; 1768 ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false); 1769 if (ret) 1770 break; 1771 } 1772 mutex_unlock(add->lock); 1773 return ret; 1774 } 1775 EXPORT_SYMBOL(v4l2_ctrl_add_handler); 1776 1777 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl) 1778 { 1779 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX) 1780 return true; 1781 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX) 1782 return true; 1783 switch (ctrl->id) { 1784 case V4L2_CID_AUDIO_MUTE: 1785 case V4L2_CID_AUDIO_VOLUME: 1786 case V4L2_CID_AUDIO_BALANCE: 1787 case V4L2_CID_AUDIO_BASS: 1788 case V4L2_CID_AUDIO_TREBLE: 1789 case V4L2_CID_AUDIO_LOUDNESS: 1790 return true; 1791 default: 1792 break; 1793 } 1794 return false; 1795 } 1796 EXPORT_SYMBOL(v4l2_ctrl_radio_filter); 1797 1798 /* Cluster controls */ 1799 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls) 1800 { 1801 bool has_volatiles = false; 1802 int i; 1803 1804 /* The first control is the master control and it must not be NULL */ 1805 if (WARN_ON(ncontrols == 0 || controls[0] == NULL)) 1806 return; 1807 1808 for (i = 0; i < ncontrols; i++) { 1809 if (controls[i]) { 1810 controls[i]->cluster = controls; 1811 controls[i]->ncontrols = ncontrols; 1812 if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE) 1813 has_volatiles = true; 1814 } 1815 } 1816 controls[0]->has_volatiles = has_volatiles; 1817 } 1818 EXPORT_SYMBOL(v4l2_ctrl_cluster); 1819 1820 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls, 1821 u8 manual_val, bool set_volatile) 1822 { 1823 struct v4l2_ctrl *master = controls[0]; 1824 u32 flag = 0; 1825 int i; 1826 1827 v4l2_ctrl_cluster(ncontrols, controls); 1828 WARN_ON(ncontrols <= 1); 1829 WARN_ON(manual_val < master->minimum || manual_val > master->maximum); 1830 WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl)); 1831 master->is_auto = true; 1832 master->has_volatiles = set_volatile; 1833 master->manual_mode_value = manual_val; 1834 master->flags |= V4L2_CTRL_FLAG_UPDATE; 1835 1836 if (!is_cur_manual(master)) 1837 flag = V4L2_CTRL_FLAG_INACTIVE | 1838 (set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0); 1839 1840 for (i = 1; i < ncontrols; i++) 1841 if (controls[i]) 1842 controls[i]->flags |= flag; 1843 } 1844 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster); 1845 1846 /* 1847 * Obtain the current volatile values of an autocluster and mark them 1848 * as new. 1849 */ 1850 void update_from_auto_cluster(struct v4l2_ctrl *master) 1851 { 1852 int i; 1853 1854 for (i = 1; i < master->ncontrols; i++) 1855 cur_to_new(master->cluster[i]); 1856 if (!call_op(master, g_volatile_ctrl)) 1857 for (i = 1; i < master->ncontrols; i++) 1858 if (master->cluster[i]) 1859 master->cluster[i]->is_new = 1; 1860 } 1861 1862 /* 1863 * Return non-zero if one or more of the controls in the cluster has a new 1864 * value that differs from the current value. 1865 */ 1866 static int cluster_changed(struct v4l2_ctrl *master) 1867 { 1868 bool changed = false; 1869 unsigned int idx; 1870 int i; 1871 1872 for (i = 0; i < master->ncontrols; i++) { 1873 struct v4l2_ctrl *ctrl = master->cluster[i]; 1874 bool ctrl_changed = false; 1875 1876 if (!ctrl) 1877 continue; 1878 1879 if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) { 1880 changed = true; 1881 ctrl_changed = true; 1882 } 1883 1884 /* 1885 * Set has_changed to false to avoid generating 1886 * the event V4L2_EVENT_CTRL_CH_VALUE 1887 */ 1888 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) { 1889 ctrl->has_changed = false; 1890 continue; 1891 } 1892 1893 for (idx = 0; !ctrl_changed && idx < ctrl->elems; idx++) 1894 ctrl_changed = !ctrl->type_ops->equal(ctrl, idx, 1895 ctrl->p_cur, ctrl->p_new); 1896 ctrl->has_changed = ctrl_changed; 1897 changed |= ctrl->has_changed; 1898 } 1899 return changed; 1900 } 1901 1902 /* 1903 * Core function that calls try/s_ctrl and ensures that the new value is 1904 * copied to the current value on a set. 1905 * Must be called with ctrl->handler->lock held. 1906 */ 1907 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master, 1908 bool set, u32 ch_flags) 1909 { 1910 bool update_flag; 1911 int ret; 1912 int i; 1913 1914 /* 1915 * Go through the cluster and either validate the new value or 1916 * (if no new value was set), copy the current value to the new 1917 * value, ensuring a consistent view for the control ops when 1918 * called. 1919 */ 1920 for (i = 0; i < master->ncontrols; i++) { 1921 struct v4l2_ctrl *ctrl = master->cluster[i]; 1922 1923 if (!ctrl) 1924 continue; 1925 1926 if (!ctrl->is_new) { 1927 cur_to_new(ctrl); 1928 continue; 1929 } 1930 /* 1931 * Check again: it may have changed since the 1932 * previous check in try_or_set_ext_ctrls(). 1933 */ 1934 if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)) 1935 return -EBUSY; 1936 } 1937 1938 ret = call_op(master, try_ctrl); 1939 1940 /* Don't set if there is no change */ 1941 if (ret || !set || !cluster_changed(master)) 1942 return ret; 1943 ret = call_op(master, s_ctrl); 1944 if (ret) 1945 return ret; 1946 1947 /* If OK, then make the new values permanent. */ 1948 update_flag = is_cur_manual(master) != is_new_manual(master); 1949 1950 for (i = 0; i < master->ncontrols; i++) { 1951 /* 1952 * If we switch from auto to manual mode, and this cluster 1953 * contains volatile controls, then all non-master controls 1954 * have to be marked as changed. The 'new' value contains 1955 * the volatile value (obtained by update_from_auto_cluster), 1956 * which now has to become the current value. 1957 */ 1958 if (i && update_flag && is_new_manual(master) && 1959 master->has_volatiles && master->cluster[i]) 1960 master->cluster[i]->has_changed = true; 1961 1962 new_to_cur(fh, master->cluster[i], ch_flags | 1963 ((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0)); 1964 } 1965 return 0; 1966 } 1967 1968 /* Activate/deactivate a control. */ 1969 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active) 1970 { 1971 /* invert since the actual flag is called 'inactive' */ 1972 bool inactive = !active; 1973 bool old; 1974 1975 if (ctrl == NULL) 1976 return; 1977 1978 if (inactive) 1979 /* set V4L2_CTRL_FLAG_INACTIVE */ 1980 old = test_and_set_bit(4, &ctrl->flags); 1981 else 1982 /* clear V4L2_CTRL_FLAG_INACTIVE */ 1983 old = test_and_clear_bit(4, &ctrl->flags); 1984 if (old != inactive) 1985 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 1986 } 1987 EXPORT_SYMBOL(v4l2_ctrl_activate); 1988 1989 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed) 1990 { 1991 bool old; 1992 1993 if (ctrl == NULL) 1994 return; 1995 1996 lockdep_assert_held(ctrl->handler->lock); 1997 1998 if (grabbed) 1999 /* set V4L2_CTRL_FLAG_GRABBED */ 2000 old = test_and_set_bit(1, &ctrl->flags); 2001 else 2002 /* clear V4L2_CTRL_FLAG_GRABBED */ 2003 old = test_and_clear_bit(1, &ctrl->flags); 2004 if (old != grabbed) 2005 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2006 } 2007 EXPORT_SYMBOL(__v4l2_ctrl_grab); 2008 2009 /* Call s_ctrl for all controls owned by the handler */ 2010 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2011 { 2012 struct v4l2_ctrl *ctrl; 2013 int ret = 0; 2014 2015 if (hdl == NULL) 2016 return 0; 2017 2018 lockdep_assert_held(hdl->lock); 2019 2020 list_for_each_entry(ctrl, &hdl->ctrls, node) 2021 ctrl->done = false; 2022 2023 list_for_each_entry(ctrl, &hdl->ctrls, node) { 2024 struct v4l2_ctrl *master = ctrl->cluster[0]; 2025 int i; 2026 2027 /* Skip if this control was already handled by a cluster. */ 2028 /* Skip button controls and read-only controls. */ 2029 if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON || 2030 (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY)) 2031 continue; 2032 2033 for (i = 0; i < master->ncontrols; i++) { 2034 if (master->cluster[i]) { 2035 cur_to_new(master->cluster[i]); 2036 master->cluster[i]->is_new = 1; 2037 master->cluster[i]->done = true; 2038 } 2039 } 2040 ret = call_op(master, s_ctrl); 2041 if (ret) 2042 break; 2043 } 2044 2045 return ret; 2046 } 2047 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup); 2048 2049 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2050 { 2051 int ret; 2052 2053 if (hdl == NULL) 2054 return 0; 2055 2056 mutex_lock(hdl->lock); 2057 ret = __v4l2_ctrl_handler_setup(hdl); 2058 mutex_unlock(hdl->lock); 2059 2060 return ret; 2061 } 2062 EXPORT_SYMBOL(v4l2_ctrl_handler_setup); 2063 2064 /* Log the control name and value */ 2065 static void log_ctrl(const struct v4l2_ctrl *ctrl, 2066 const char *prefix, const char *colon) 2067 { 2068 if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY)) 2069 return; 2070 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 2071 return; 2072 2073 pr_info("%s%s%s: ", prefix, colon, ctrl->name); 2074 2075 ctrl->type_ops->log(ctrl); 2076 2077 if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE | 2078 V4L2_CTRL_FLAG_GRABBED | 2079 V4L2_CTRL_FLAG_VOLATILE)) { 2080 if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE) 2081 pr_cont(" inactive"); 2082 if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED) 2083 pr_cont(" grabbed"); 2084 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) 2085 pr_cont(" volatile"); 2086 } 2087 pr_cont("\n"); 2088 } 2089 2090 /* Log all controls owned by the handler */ 2091 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl, 2092 const char *prefix) 2093 { 2094 struct v4l2_ctrl *ctrl; 2095 const char *colon = ""; 2096 int len; 2097 2098 if (!hdl) 2099 return; 2100 if (!prefix) 2101 prefix = ""; 2102 len = strlen(prefix); 2103 if (len && prefix[len - 1] != ' ') 2104 colon = ": "; 2105 mutex_lock(hdl->lock); 2106 list_for_each_entry(ctrl, &hdl->ctrls, node) 2107 if (!(ctrl->flags & V4L2_CTRL_FLAG_DISABLED)) 2108 log_ctrl(ctrl, prefix, colon); 2109 mutex_unlock(hdl->lock); 2110 } 2111 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status); 2112 2113 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl, 2114 const struct v4l2_ctrl_ops *ctrl_ops, 2115 const struct v4l2_fwnode_device_properties *p) 2116 { 2117 if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) { 2118 u32 orientation_ctrl; 2119 2120 switch (p->orientation) { 2121 case V4L2_FWNODE_ORIENTATION_FRONT: 2122 orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT; 2123 break; 2124 case V4L2_FWNODE_ORIENTATION_BACK: 2125 orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK; 2126 break; 2127 case V4L2_FWNODE_ORIENTATION_EXTERNAL: 2128 orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL; 2129 break; 2130 default: 2131 return -EINVAL; 2132 } 2133 if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops, 2134 V4L2_CID_CAMERA_ORIENTATION, 2135 V4L2_CAMERA_ORIENTATION_EXTERNAL, 0, 2136 orientation_ctrl)) 2137 return hdl->error; 2138 } 2139 2140 if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) { 2141 if (!v4l2_ctrl_new_std(hdl, ctrl_ops, 2142 V4L2_CID_CAMERA_SENSOR_ROTATION, 2143 p->rotation, p->rotation, 1, 2144 p->rotation)) 2145 return hdl->error; 2146 } 2147 2148 return hdl->error; 2149 } 2150 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties); 2151