1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * V4L2 controls framework core implementation. 4 * 5 * Copyright (C) 2010-2021 Hans Verkuil <hverkuil-cisco@xs4all.nl> 6 */ 7 8 #include <linux/export.h> 9 #include <linux/mm.h> 10 #include <linux/slab.h> 11 #include <media/v4l2-ctrls.h> 12 #include <media/v4l2-event.h> 13 #include <media/v4l2-fwnode.h> 14 15 #include "v4l2-ctrls-priv.h" 16 17 static const union v4l2_ctrl_ptr ptr_null; 18 19 static void fill_event(struct v4l2_event *ev, struct v4l2_ctrl *ctrl, 20 u32 changes) 21 { 22 memset(ev, 0, sizeof(*ev)); 23 ev->type = V4L2_EVENT_CTRL; 24 ev->id = ctrl->id; 25 ev->u.ctrl.changes = changes; 26 ev->u.ctrl.type = ctrl->type; 27 ev->u.ctrl.flags = user_flags(ctrl); 28 if (ctrl->is_ptr) 29 ev->u.ctrl.value64 = 0; 30 else 31 ev->u.ctrl.value64 = *ctrl->p_cur.p_s64; 32 ev->u.ctrl.minimum = ctrl->minimum; 33 ev->u.ctrl.maximum = ctrl->maximum; 34 if (ctrl->type == V4L2_CTRL_TYPE_MENU 35 || ctrl->type == V4L2_CTRL_TYPE_INTEGER_MENU) 36 ev->u.ctrl.step = 1; 37 else 38 ev->u.ctrl.step = ctrl->step; 39 ev->u.ctrl.default_value = ctrl->default_value; 40 } 41 42 void send_initial_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl) 43 { 44 struct v4l2_event ev; 45 u32 changes = V4L2_EVENT_CTRL_CH_FLAGS; 46 47 if (!(ctrl->flags & V4L2_CTRL_FLAG_WRITE_ONLY)) 48 changes |= V4L2_EVENT_CTRL_CH_VALUE; 49 fill_event(&ev, ctrl, changes); 50 v4l2_event_queue_fh(fh, &ev); 51 } 52 53 void send_event(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 changes) 54 { 55 struct v4l2_event ev; 56 struct v4l2_subscribed_event *sev; 57 58 if (list_empty(&ctrl->ev_subs)) 59 return; 60 fill_event(&ev, ctrl, changes); 61 62 list_for_each_entry(sev, &ctrl->ev_subs, node) 63 if (sev->fh != fh || 64 (sev->flags & V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK)) 65 v4l2_event_queue_fh(sev->fh, &ev); 66 } 67 68 bool v4l2_ctrl_type_op_equal(const struct v4l2_ctrl *ctrl, 69 union v4l2_ctrl_ptr ptr1, union v4l2_ctrl_ptr ptr2) 70 { 71 unsigned int i; 72 73 switch (ctrl->type) { 74 case V4L2_CTRL_TYPE_BUTTON: 75 return false; 76 case V4L2_CTRL_TYPE_STRING: 77 for (i = 0; i < ctrl->elems; i++) { 78 unsigned int idx = i * ctrl->elem_size; 79 80 /* strings are always 0-terminated */ 81 if (strcmp(ptr1.p_char + idx, ptr2.p_char + idx)) 82 return false; 83 } 84 return true; 85 default: 86 return !memcmp(ptr1.p_const, ptr2.p_const, 87 ctrl->elems * ctrl->elem_size); 88 } 89 } 90 EXPORT_SYMBOL(v4l2_ctrl_type_op_equal); 91 92 /* Default intra MPEG-2 quantisation coefficients, from the specification. */ 93 static const u8 mpeg2_intra_quant_matrix[64] = { 94 8, 16, 16, 19, 16, 19, 22, 22, 95 22, 22, 22, 22, 26, 24, 26, 27, 96 27, 27, 26, 26, 26, 26, 27, 27, 97 27, 29, 29, 29, 34, 34, 34, 29, 98 29, 29, 27, 27, 29, 29, 32, 32, 99 34, 34, 37, 38, 37, 35, 35, 34, 100 35, 38, 38, 40, 40, 40, 48, 48, 101 46, 46, 56, 56, 58, 69, 69, 83 102 }; 103 104 static void std_init_compound(const struct v4l2_ctrl *ctrl, u32 idx, 105 union v4l2_ctrl_ptr ptr) 106 { 107 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 108 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 109 struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quant; 110 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 111 struct v4l2_ctrl_vp9_frame *p_vp9_frame; 112 struct v4l2_ctrl_fwht_params *p_fwht_params; 113 struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix; 114 struct v4l2_ctrl_av1_sequence *p_av1_sequence; 115 void *p = ptr.p + idx * ctrl->elem_size; 116 117 if (ctrl->p_def.p_const) 118 memcpy(p, ctrl->p_def.p_const, ctrl->elem_size); 119 else 120 memset(p, 0, ctrl->elem_size); 121 122 switch ((u32)ctrl->type) { 123 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 124 p_mpeg2_sequence = p; 125 126 /* 4:2:0 */ 127 p_mpeg2_sequence->chroma_format = 1; 128 break; 129 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 130 p_mpeg2_picture = p; 131 132 /* interlaced top field */ 133 p_mpeg2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD; 134 p_mpeg2_picture->picture_coding_type = 135 V4L2_MPEG2_PIC_CODING_TYPE_I; 136 break; 137 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 138 p_mpeg2_quant = p; 139 140 memcpy(p_mpeg2_quant->intra_quantiser_matrix, 141 mpeg2_intra_quant_matrix, 142 ARRAY_SIZE(mpeg2_intra_quant_matrix)); 143 /* 144 * The default non-intra MPEG-2 quantisation 145 * coefficients are all 16, as per the specification. 146 */ 147 memset(p_mpeg2_quant->non_intra_quantiser_matrix, 16, 148 sizeof(p_mpeg2_quant->non_intra_quantiser_matrix)); 149 break; 150 case V4L2_CTRL_TYPE_VP8_FRAME: 151 p_vp8_frame = p; 152 p_vp8_frame->num_dct_parts = 1; 153 break; 154 case V4L2_CTRL_TYPE_VP9_FRAME: 155 p_vp9_frame = p; 156 p_vp9_frame->profile = 0; 157 p_vp9_frame->bit_depth = 8; 158 p_vp9_frame->flags |= V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 159 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING; 160 break; 161 case V4L2_CTRL_TYPE_AV1_SEQUENCE: 162 p_av1_sequence = p; 163 p_av1_sequence->bit_depth = 8; 164 break; 165 case V4L2_CTRL_TYPE_FWHT_PARAMS: 166 p_fwht_params = p; 167 p_fwht_params->version = V4L2_FWHT_VERSION; 168 p_fwht_params->width = 1280; 169 p_fwht_params->height = 720; 170 p_fwht_params->flags = V4L2_FWHT_FL_PIXENC_YUV | 171 (2 << V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET); 172 break; 173 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 174 p_h264_scaling_matrix = p; 175 /* 176 * The default (flat) H.264 scaling matrix when none are 177 * specified in the bitstream, this is according to formulas 178 * (7-8) and (7-9) of the specification. 179 */ 180 memset(p_h264_scaling_matrix, 16, sizeof(*p_h264_scaling_matrix)); 181 break; 182 } 183 } 184 185 void v4l2_ctrl_type_op_init(const struct v4l2_ctrl *ctrl, u32 from_idx, 186 union v4l2_ctrl_ptr ptr) 187 { 188 unsigned int i; 189 u32 tot_elems = ctrl->elems; 190 u32 elems = tot_elems - from_idx; 191 192 if (from_idx >= tot_elems) 193 return; 194 195 switch (ctrl->type) { 196 case V4L2_CTRL_TYPE_STRING: 197 for (i = from_idx; i < tot_elems; i++) { 198 unsigned int offset = i * ctrl->elem_size; 199 200 memset(ptr.p_char + offset, ' ', ctrl->minimum); 201 ptr.p_char[offset + ctrl->minimum] = '\0'; 202 } 203 break; 204 case V4L2_CTRL_TYPE_INTEGER64: 205 if (ctrl->default_value) { 206 for (i = from_idx; i < tot_elems; i++) 207 ptr.p_s64[i] = ctrl->default_value; 208 } else { 209 memset(ptr.p_s64 + from_idx, 0, elems * sizeof(s64)); 210 } 211 break; 212 case V4L2_CTRL_TYPE_INTEGER: 213 case V4L2_CTRL_TYPE_INTEGER_MENU: 214 case V4L2_CTRL_TYPE_MENU: 215 case V4L2_CTRL_TYPE_BITMASK: 216 case V4L2_CTRL_TYPE_BOOLEAN: 217 if (ctrl->default_value) { 218 for (i = from_idx; i < tot_elems; i++) 219 ptr.p_s32[i] = ctrl->default_value; 220 } else { 221 memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32)); 222 } 223 break; 224 case V4L2_CTRL_TYPE_BUTTON: 225 case V4L2_CTRL_TYPE_CTRL_CLASS: 226 memset(ptr.p_s32 + from_idx, 0, elems * sizeof(s32)); 227 break; 228 case V4L2_CTRL_TYPE_U8: 229 memset(ptr.p_u8 + from_idx, ctrl->default_value, elems); 230 break; 231 case V4L2_CTRL_TYPE_U16: 232 if (ctrl->default_value) { 233 for (i = from_idx; i < tot_elems; i++) 234 ptr.p_u16[i] = ctrl->default_value; 235 } else { 236 memset(ptr.p_u16 + from_idx, 0, elems * sizeof(u16)); 237 } 238 break; 239 case V4L2_CTRL_TYPE_U32: 240 if (ctrl->default_value) { 241 for (i = from_idx; i < tot_elems; i++) 242 ptr.p_u32[i] = ctrl->default_value; 243 } else { 244 memset(ptr.p_u32 + from_idx, 0, elems * sizeof(u32)); 245 } 246 break; 247 default: 248 for (i = from_idx; i < tot_elems; i++) 249 std_init_compound(ctrl, i, ptr); 250 break; 251 } 252 } 253 EXPORT_SYMBOL(v4l2_ctrl_type_op_init); 254 255 void v4l2_ctrl_type_op_log(const struct v4l2_ctrl *ctrl) 256 { 257 union v4l2_ctrl_ptr ptr = ctrl->p_cur; 258 259 if (ctrl->is_array) { 260 unsigned i; 261 262 for (i = 0; i < ctrl->nr_of_dims; i++) 263 pr_cont("[%u]", ctrl->dims[i]); 264 pr_cont(" "); 265 } 266 267 switch (ctrl->type) { 268 case V4L2_CTRL_TYPE_INTEGER: 269 pr_cont("%d", *ptr.p_s32); 270 break; 271 case V4L2_CTRL_TYPE_BOOLEAN: 272 pr_cont("%s", *ptr.p_s32 ? "true" : "false"); 273 break; 274 case V4L2_CTRL_TYPE_MENU: 275 pr_cont("%s", ctrl->qmenu[*ptr.p_s32]); 276 break; 277 case V4L2_CTRL_TYPE_INTEGER_MENU: 278 pr_cont("%lld", ctrl->qmenu_int[*ptr.p_s32]); 279 break; 280 case V4L2_CTRL_TYPE_BITMASK: 281 pr_cont("0x%08x", *ptr.p_s32); 282 break; 283 case V4L2_CTRL_TYPE_INTEGER64: 284 pr_cont("%lld", *ptr.p_s64); 285 break; 286 case V4L2_CTRL_TYPE_STRING: 287 pr_cont("%s", ptr.p_char); 288 break; 289 case V4L2_CTRL_TYPE_U8: 290 pr_cont("%u", (unsigned)*ptr.p_u8); 291 break; 292 case V4L2_CTRL_TYPE_U16: 293 pr_cont("%u", (unsigned)*ptr.p_u16); 294 break; 295 case V4L2_CTRL_TYPE_U32: 296 pr_cont("%u", (unsigned)*ptr.p_u32); 297 break; 298 case V4L2_CTRL_TYPE_H264_SPS: 299 pr_cont("H264_SPS"); 300 break; 301 case V4L2_CTRL_TYPE_H264_PPS: 302 pr_cont("H264_PPS"); 303 break; 304 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 305 pr_cont("H264_SCALING_MATRIX"); 306 break; 307 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 308 pr_cont("H264_SLICE_PARAMS"); 309 break; 310 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 311 pr_cont("H264_DECODE_PARAMS"); 312 break; 313 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 314 pr_cont("H264_PRED_WEIGHTS"); 315 break; 316 case V4L2_CTRL_TYPE_FWHT_PARAMS: 317 pr_cont("FWHT_PARAMS"); 318 break; 319 case V4L2_CTRL_TYPE_VP8_FRAME: 320 pr_cont("VP8_FRAME"); 321 break; 322 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 323 pr_cont("HDR10_CLL_INFO"); 324 break; 325 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 326 pr_cont("HDR10_MASTERING_DISPLAY"); 327 break; 328 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 329 pr_cont("MPEG2_QUANTISATION"); 330 break; 331 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 332 pr_cont("MPEG2_SEQUENCE"); 333 break; 334 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 335 pr_cont("MPEG2_PICTURE"); 336 break; 337 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 338 pr_cont("VP9_COMPRESSED_HDR"); 339 break; 340 case V4L2_CTRL_TYPE_VP9_FRAME: 341 pr_cont("VP9_FRAME"); 342 break; 343 case V4L2_CTRL_TYPE_HEVC_SPS: 344 pr_cont("HEVC_SPS"); 345 break; 346 case V4L2_CTRL_TYPE_HEVC_PPS: 347 pr_cont("HEVC_PPS"); 348 break; 349 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 350 pr_cont("HEVC_SLICE_PARAMS"); 351 break; 352 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 353 pr_cont("HEVC_SCALING_MATRIX"); 354 break; 355 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 356 pr_cont("HEVC_DECODE_PARAMS"); 357 break; 358 case V4L2_CTRL_TYPE_AV1_SEQUENCE: 359 pr_cont("AV1_SEQUENCE"); 360 break; 361 case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY: 362 pr_cont("AV1_TILE_GROUP_ENTRY"); 363 break; 364 case V4L2_CTRL_TYPE_AV1_FRAME: 365 pr_cont("AV1_FRAME"); 366 break; 367 case V4L2_CTRL_TYPE_AV1_FILM_GRAIN: 368 pr_cont("AV1_FILM_GRAIN"); 369 break; 370 371 default: 372 pr_cont("unknown type %d", ctrl->type); 373 break; 374 } 375 } 376 EXPORT_SYMBOL(v4l2_ctrl_type_op_log); 377 378 /* 379 * Round towards the closest legal value. Be careful when we are 380 * close to the maximum range of the control type to prevent 381 * wrap-arounds. 382 */ 383 #define ROUND_TO_RANGE(val, offset_type, ctrl) \ 384 ({ \ 385 offset_type offset; \ 386 if ((ctrl)->maximum >= 0 && \ 387 val >= (ctrl)->maximum - (s32)((ctrl)->step / 2)) \ 388 val = (ctrl)->maximum; \ 389 else \ 390 val += (s32)((ctrl)->step / 2); \ 391 val = clamp_t(typeof(val), val, \ 392 (ctrl)->minimum, (ctrl)->maximum); \ 393 offset = (val) - (ctrl)->minimum; \ 394 offset = (ctrl)->step * (offset / (u32)(ctrl)->step); \ 395 val = (ctrl)->minimum + offset; \ 396 0; \ 397 }) 398 399 /* Validate a new control */ 400 401 #define zero_padding(s) \ 402 memset(&(s).padding, 0, sizeof((s).padding)) 403 #define zero_reserved(s) \ 404 memset(&(s).reserved, 0, sizeof((s).reserved)) 405 406 static int 407 validate_vp9_lf_params(struct v4l2_vp9_loop_filter *lf) 408 { 409 unsigned int i; 410 411 if (lf->flags & ~(V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED | 412 V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE)) 413 return -EINVAL; 414 415 /* That all values are in the accepted range. */ 416 if (lf->level > GENMASK(5, 0)) 417 return -EINVAL; 418 419 if (lf->sharpness > GENMASK(2, 0)) 420 return -EINVAL; 421 422 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) 423 if (lf->ref_deltas[i] < -63 || lf->ref_deltas[i] > 63) 424 return -EINVAL; 425 426 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) 427 if (lf->mode_deltas[i] < -63 || lf->mode_deltas[i] > 63) 428 return -EINVAL; 429 430 zero_reserved(*lf); 431 return 0; 432 } 433 434 static int 435 validate_vp9_quant_params(struct v4l2_vp9_quantization *quant) 436 { 437 if (quant->delta_q_y_dc < -15 || quant->delta_q_y_dc > 15 || 438 quant->delta_q_uv_dc < -15 || quant->delta_q_uv_dc > 15 || 439 quant->delta_q_uv_ac < -15 || quant->delta_q_uv_ac > 15) 440 return -EINVAL; 441 442 zero_reserved(*quant); 443 return 0; 444 } 445 446 static int 447 validate_vp9_seg_params(struct v4l2_vp9_segmentation *seg) 448 { 449 unsigned int i, j; 450 451 if (seg->flags & ~(V4L2_VP9_SEGMENTATION_FLAG_ENABLED | 452 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP | 453 V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE | 454 V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA | 455 V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE)) 456 return -EINVAL; 457 458 for (i = 0; i < ARRAY_SIZE(seg->feature_enabled); i++) { 459 if (seg->feature_enabled[i] & 460 ~V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK) 461 return -EINVAL; 462 } 463 464 for (i = 0; i < ARRAY_SIZE(seg->feature_data); i++) { 465 static const int range[] = { 255, 63, 3, 0 }; 466 467 for (j = 0; j < ARRAY_SIZE(seg->feature_data[j]); j++) { 468 if (seg->feature_data[i][j] < -range[j] || 469 seg->feature_data[i][j] > range[j]) 470 return -EINVAL; 471 } 472 } 473 474 zero_reserved(*seg); 475 return 0; 476 } 477 478 static int 479 validate_vp9_compressed_hdr(struct v4l2_ctrl_vp9_compressed_hdr *hdr) 480 { 481 if (hdr->tx_mode > V4L2_VP9_TX_MODE_SELECT) 482 return -EINVAL; 483 484 return 0; 485 } 486 487 static int 488 validate_vp9_frame(struct v4l2_ctrl_vp9_frame *frame) 489 { 490 int ret; 491 492 /* Make sure we're not passed invalid flags. */ 493 if (frame->flags & ~(V4L2_VP9_FRAME_FLAG_KEY_FRAME | 494 V4L2_VP9_FRAME_FLAG_SHOW_FRAME | 495 V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT | 496 V4L2_VP9_FRAME_FLAG_INTRA_ONLY | 497 V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV | 498 V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX | 499 V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE | 500 V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING | 501 V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING | 502 V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING)) 503 return -EINVAL; 504 505 if (frame->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT && 506 frame->flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX) 507 return -EINVAL; 508 509 if (frame->profile > V4L2_VP9_PROFILE_MAX) 510 return -EINVAL; 511 512 if (frame->reset_frame_context > V4L2_VP9_RESET_FRAME_CTX_ALL) 513 return -EINVAL; 514 515 if (frame->frame_context_idx >= V4L2_VP9_NUM_FRAME_CTX) 516 return -EINVAL; 517 518 /* 519 * Profiles 0 and 1 only support 8-bit depth, profiles 2 and 3 only 10 520 * and 12 bit depths. 521 */ 522 if ((frame->profile < 2 && frame->bit_depth != 8) || 523 (frame->profile >= 2 && 524 (frame->bit_depth != 10 && frame->bit_depth != 12))) 525 return -EINVAL; 526 527 /* Profile 0 and 2 only accept YUV 4:2:0. */ 528 if ((frame->profile == 0 || frame->profile == 2) && 529 (!(frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) || 530 !(frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 531 return -EINVAL; 532 533 /* Profile 1 and 3 only accept YUV 4:2:2, 4:4:0 and 4:4:4. */ 534 if ((frame->profile == 1 || frame->profile == 3) && 535 ((frame->flags & V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING) && 536 (frame->flags & V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING))) 537 return -EINVAL; 538 539 if (frame->interpolation_filter > V4L2_VP9_INTERP_FILTER_SWITCHABLE) 540 return -EINVAL; 541 542 /* 543 * According to the spec, tile_cols_log2 shall be less than or equal 544 * to 6. 545 */ 546 if (frame->tile_cols_log2 > 6) 547 return -EINVAL; 548 549 if (frame->reference_mode > V4L2_VP9_REFERENCE_MODE_SELECT) 550 return -EINVAL; 551 552 ret = validate_vp9_lf_params(&frame->lf); 553 if (ret) 554 return ret; 555 556 ret = validate_vp9_quant_params(&frame->quant); 557 if (ret) 558 return ret; 559 560 ret = validate_vp9_seg_params(&frame->seg); 561 if (ret) 562 return ret; 563 564 zero_reserved(*frame); 565 return 0; 566 } 567 568 static int validate_av1_quantization(struct v4l2_av1_quantization *q) 569 { 570 if (q->flags > GENMASK(2, 0)) 571 return -EINVAL; 572 573 if (q->delta_q_y_dc < -64 || q->delta_q_y_dc > 63 || 574 q->delta_q_u_dc < -64 || q->delta_q_u_dc > 63 || 575 q->delta_q_v_dc < -64 || q->delta_q_v_dc > 63 || 576 q->delta_q_u_ac < -64 || q->delta_q_u_ac > 63 || 577 q->delta_q_v_ac < -64 || q->delta_q_v_ac > 63 || 578 q->delta_q_res > GENMASK(1, 0)) 579 return -EINVAL; 580 581 if (q->qm_y > GENMASK(3, 0) || 582 q->qm_u > GENMASK(3, 0) || 583 q->qm_v > GENMASK(3, 0)) 584 return -EINVAL; 585 586 return 0; 587 } 588 589 static int validate_av1_segmentation(struct v4l2_av1_segmentation *s) 590 { 591 u32 i; 592 u32 j; 593 594 if (s->flags > GENMASK(4, 0)) 595 return -EINVAL; 596 597 for (i = 0; i < ARRAY_SIZE(s->feature_data); i++) { 598 static const int segmentation_feature_signed[] = { 1, 1, 1, 1, 1, 0, 0, 0 }; 599 static const int segmentation_feature_max[] = { 255, 63, 63, 63, 63, 7, 0, 0}; 600 601 for (j = 0; j < ARRAY_SIZE(s->feature_data[j]); j++) { 602 s32 limit = segmentation_feature_max[j]; 603 604 if (segmentation_feature_signed[j]) { 605 if (s->feature_data[i][j] < -limit || 606 s->feature_data[i][j] > limit) 607 return -EINVAL; 608 } else { 609 if (s->feature_data[i][j] < 0 || s->feature_data[i][j] > limit) 610 return -EINVAL; 611 } 612 } 613 } 614 615 return 0; 616 } 617 618 static int validate_av1_loop_filter(struct v4l2_av1_loop_filter *lf) 619 { 620 u32 i; 621 622 if (lf->flags > GENMASK(3, 0)) 623 return -EINVAL; 624 625 for (i = 0; i < ARRAY_SIZE(lf->level); i++) { 626 if (lf->level[i] > GENMASK(5, 0)) 627 return -EINVAL; 628 } 629 630 if (lf->sharpness > GENMASK(2, 0)) 631 return -EINVAL; 632 633 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) { 634 if (lf->ref_deltas[i] < -64 || lf->ref_deltas[i] > 63) 635 return -EINVAL; 636 } 637 638 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) { 639 if (lf->mode_deltas[i] < -64 || lf->mode_deltas[i] > 63) 640 return -EINVAL; 641 } 642 643 return 0; 644 } 645 646 static int validate_av1_cdef(struct v4l2_av1_cdef *cdef) 647 { 648 u32 i; 649 650 if (cdef->damping_minus_3 > GENMASK(1, 0) || 651 cdef->bits > GENMASK(1, 0)) 652 return -EINVAL; 653 654 for (i = 0; i < 1 << cdef->bits; i++) { 655 if (cdef->y_pri_strength[i] > GENMASK(3, 0) || 656 cdef->y_sec_strength[i] > 4 || 657 cdef->uv_pri_strength[i] > GENMASK(3, 0) || 658 cdef->uv_sec_strength[i] > 4) 659 return -EINVAL; 660 } 661 662 return 0; 663 } 664 665 static int validate_av1_loop_restauration(struct v4l2_av1_loop_restoration *lr) 666 { 667 if (lr->lr_unit_shift > 3 || lr->lr_uv_shift > 1) 668 return -EINVAL; 669 670 return 0; 671 } 672 673 static int validate_av1_film_grain(struct v4l2_ctrl_av1_film_grain *fg) 674 { 675 u32 i; 676 677 if (fg->flags > GENMASK(4, 0)) 678 return -EINVAL; 679 680 if (fg->film_grain_params_ref_idx > GENMASK(2, 0) || 681 fg->num_y_points > 14 || 682 fg->num_cb_points > 10 || 683 fg->num_cr_points > GENMASK(3, 0) || 684 fg->grain_scaling_minus_8 > GENMASK(1, 0) || 685 fg->ar_coeff_lag > GENMASK(1, 0) || 686 fg->ar_coeff_shift_minus_6 > GENMASK(1, 0) || 687 fg->grain_scale_shift > GENMASK(1, 0)) 688 return -EINVAL; 689 690 if (!(fg->flags & V4L2_AV1_FILM_GRAIN_FLAG_APPLY_GRAIN)) 691 return 0; 692 693 for (i = 1; i < fg->num_y_points; i++) 694 if (fg->point_y_value[i] <= fg->point_y_value[i - 1]) 695 return -EINVAL; 696 697 for (i = 1; i < fg->num_cb_points; i++) 698 if (fg->point_cb_value[i] <= fg->point_cb_value[i - 1]) 699 return -EINVAL; 700 701 for (i = 1; i < fg->num_cr_points; i++) 702 if (fg->point_cr_value[i] <= fg->point_cr_value[i - 1]) 703 return -EINVAL; 704 705 return 0; 706 } 707 708 static int validate_av1_frame(struct v4l2_ctrl_av1_frame *f) 709 { 710 int ret = 0; 711 712 ret = validate_av1_quantization(&f->quantization); 713 if (ret) 714 return ret; 715 ret = validate_av1_segmentation(&f->segmentation); 716 if (ret) 717 return ret; 718 ret = validate_av1_loop_filter(&f->loop_filter); 719 if (ret) 720 return ret; 721 ret = validate_av1_cdef(&f->cdef); 722 if (ret) 723 return ret; 724 ret = validate_av1_loop_restauration(&f->loop_restoration); 725 if (ret) 726 return ret; 727 728 if (f->flags & 729 ~(V4L2_AV1_FRAME_FLAG_SHOW_FRAME | 730 V4L2_AV1_FRAME_FLAG_SHOWABLE_FRAME | 731 V4L2_AV1_FRAME_FLAG_ERROR_RESILIENT_MODE | 732 V4L2_AV1_FRAME_FLAG_DISABLE_CDF_UPDATE | 733 V4L2_AV1_FRAME_FLAG_ALLOW_SCREEN_CONTENT_TOOLS | 734 V4L2_AV1_FRAME_FLAG_FORCE_INTEGER_MV | 735 V4L2_AV1_FRAME_FLAG_ALLOW_INTRABC | 736 V4L2_AV1_FRAME_FLAG_USE_SUPERRES | 737 V4L2_AV1_FRAME_FLAG_ALLOW_HIGH_PRECISION_MV | 738 V4L2_AV1_FRAME_FLAG_IS_MOTION_MODE_SWITCHABLE | 739 V4L2_AV1_FRAME_FLAG_USE_REF_FRAME_MVS | 740 V4L2_AV1_FRAME_FLAG_DISABLE_FRAME_END_UPDATE_CDF | 741 V4L2_AV1_FRAME_FLAG_ALLOW_WARPED_MOTION | 742 V4L2_AV1_FRAME_FLAG_REFERENCE_SELECT | 743 V4L2_AV1_FRAME_FLAG_REDUCED_TX_SET | 744 V4L2_AV1_FRAME_FLAG_SKIP_MODE_ALLOWED | 745 V4L2_AV1_FRAME_FLAG_SKIP_MODE_PRESENT | 746 V4L2_AV1_FRAME_FLAG_FRAME_SIZE_OVERRIDE | 747 V4L2_AV1_FRAME_FLAG_BUFFER_REMOVAL_TIME_PRESENT | 748 V4L2_AV1_FRAME_FLAG_FRAME_REFS_SHORT_SIGNALING)) 749 return -EINVAL; 750 751 if (f->superres_denom > GENMASK(2, 0) + 9) 752 return -EINVAL; 753 754 return 0; 755 } 756 757 static int validate_av1_sequence(struct v4l2_ctrl_av1_sequence *s) 758 { 759 if (s->flags & 760 ~(V4L2_AV1_SEQUENCE_FLAG_STILL_PICTURE | 761 V4L2_AV1_SEQUENCE_FLAG_USE_128X128_SUPERBLOCK | 762 V4L2_AV1_SEQUENCE_FLAG_ENABLE_FILTER_INTRA | 763 V4L2_AV1_SEQUENCE_FLAG_ENABLE_INTRA_EDGE_FILTER | 764 V4L2_AV1_SEQUENCE_FLAG_ENABLE_INTERINTRA_COMPOUND | 765 V4L2_AV1_SEQUENCE_FLAG_ENABLE_MASKED_COMPOUND | 766 V4L2_AV1_SEQUENCE_FLAG_ENABLE_WARPED_MOTION | 767 V4L2_AV1_SEQUENCE_FLAG_ENABLE_DUAL_FILTER | 768 V4L2_AV1_SEQUENCE_FLAG_ENABLE_ORDER_HINT | 769 V4L2_AV1_SEQUENCE_FLAG_ENABLE_JNT_COMP | 770 V4L2_AV1_SEQUENCE_FLAG_ENABLE_REF_FRAME_MVS | 771 V4L2_AV1_SEQUENCE_FLAG_ENABLE_SUPERRES | 772 V4L2_AV1_SEQUENCE_FLAG_ENABLE_CDEF | 773 V4L2_AV1_SEQUENCE_FLAG_ENABLE_RESTORATION | 774 V4L2_AV1_SEQUENCE_FLAG_MONO_CHROME | 775 V4L2_AV1_SEQUENCE_FLAG_COLOR_RANGE | 776 V4L2_AV1_SEQUENCE_FLAG_SUBSAMPLING_X | 777 V4L2_AV1_SEQUENCE_FLAG_SUBSAMPLING_Y | 778 V4L2_AV1_SEQUENCE_FLAG_FILM_GRAIN_PARAMS_PRESENT | 779 V4L2_AV1_SEQUENCE_FLAG_SEPARATE_UV_DELTA_Q)) 780 return -EINVAL; 781 782 if (s->seq_profile == 1 && s->flags & V4L2_AV1_SEQUENCE_FLAG_MONO_CHROME) 783 return -EINVAL; 784 785 /* reserved */ 786 if (s->seq_profile > 2) 787 return -EINVAL; 788 789 /* TODO: PROFILES */ 790 return 0; 791 } 792 793 /* 794 * Compound controls validation requires setting unused fields/flags to zero 795 * in order to properly detect unchanged controls with v4l2_ctrl_type_op_equal's 796 * memcmp. 797 */ 798 static int std_validate_compound(const struct v4l2_ctrl *ctrl, u32 idx, 799 union v4l2_ctrl_ptr ptr) 800 { 801 struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; 802 struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; 803 struct v4l2_ctrl_vp8_frame *p_vp8_frame; 804 struct v4l2_ctrl_fwht_params *p_fwht_params; 805 struct v4l2_ctrl_h264_sps *p_h264_sps; 806 struct v4l2_ctrl_h264_pps *p_h264_pps; 807 struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights; 808 struct v4l2_ctrl_h264_slice_params *p_h264_slice_params; 809 struct v4l2_ctrl_h264_decode_params *p_h264_dec_params; 810 struct v4l2_ctrl_hevc_sps *p_hevc_sps; 811 struct v4l2_ctrl_hevc_pps *p_hevc_pps; 812 struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering; 813 struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params; 814 struct v4l2_area *area; 815 void *p = ptr.p + idx * ctrl->elem_size; 816 unsigned int i; 817 818 switch ((u32)ctrl->type) { 819 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 820 p_mpeg2_sequence = p; 821 822 switch (p_mpeg2_sequence->chroma_format) { 823 case 1: /* 4:2:0 */ 824 case 2: /* 4:2:2 */ 825 case 3: /* 4:4:4 */ 826 break; 827 default: 828 return -EINVAL; 829 } 830 break; 831 832 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 833 p_mpeg2_picture = p; 834 835 switch (p_mpeg2_picture->intra_dc_precision) { 836 case 0: /* 8 bits */ 837 case 1: /* 9 bits */ 838 case 2: /* 10 bits */ 839 case 3: /* 11 bits */ 840 break; 841 default: 842 return -EINVAL; 843 } 844 845 switch (p_mpeg2_picture->picture_structure) { 846 case V4L2_MPEG2_PIC_TOP_FIELD: 847 case V4L2_MPEG2_PIC_BOTTOM_FIELD: 848 case V4L2_MPEG2_PIC_FRAME: 849 break; 850 default: 851 return -EINVAL; 852 } 853 854 switch (p_mpeg2_picture->picture_coding_type) { 855 case V4L2_MPEG2_PIC_CODING_TYPE_I: 856 case V4L2_MPEG2_PIC_CODING_TYPE_P: 857 case V4L2_MPEG2_PIC_CODING_TYPE_B: 858 break; 859 default: 860 return -EINVAL; 861 } 862 zero_reserved(*p_mpeg2_picture); 863 break; 864 865 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 866 break; 867 868 case V4L2_CTRL_TYPE_FWHT_PARAMS: 869 p_fwht_params = p; 870 if (p_fwht_params->version < V4L2_FWHT_VERSION) 871 return -EINVAL; 872 if (!p_fwht_params->width || !p_fwht_params->height) 873 return -EINVAL; 874 break; 875 876 case V4L2_CTRL_TYPE_H264_SPS: 877 p_h264_sps = p; 878 879 /* Some syntax elements are only conditionally valid */ 880 if (p_h264_sps->pic_order_cnt_type != 0) { 881 p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 = 0; 882 } else if (p_h264_sps->pic_order_cnt_type != 1) { 883 p_h264_sps->num_ref_frames_in_pic_order_cnt_cycle = 0; 884 p_h264_sps->offset_for_non_ref_pic = 0; 885 p_h264_sps->offset_for_top_to_bottom_field = 0; 886 memset(&p_h264_sps->offset_for_ref_frame, 0, 887 sizeof(p_h264_sps->offset_for_ref_frame)); 888 } 889 890 if (!V4L2_H264_SPS_HAS_CHROMA_FORMAT(p_h264_sps)) { 891 p_h264_sps->chroma_format_idc = 1; 892 p_h264_sps->bit_depth_luma_minus8 = 0; 893 p_h264_sps->bit_depth_chroma_minus8 = 0; 894 895 p_h264_sps->flags &= 896 ~V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS; 897 898 if (p_h264_sps->chroma_format_idc < 3) 899 p_h264_sps->flags &= 900 ~V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE; 901 } 902 903 if (p_h264_sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY) 904 p_h264_sps->flags &= 905 ~V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD; 906 907 /* 908 * Chroma 4:2:2 format require at least High 4:2:2 profile. 909 * 910 * The H264 specification and well-known parser implementations 911 * use profile-idc values directly, as that is clearer and 912 * less ambiguous. We do the same here. 913 */ 914 if (p_h264_sps->profile_idc < 122 && 915 p_h264_sps->chroma_format_idc > 1) 916 return -EINVAL; 917 /* Chroma 4:4:4 format require at least High 4:2:2 profile */ 918 if (p_h264_sps->profile_idc < 244 && 919 p_h264_sps->chroma_format_idc > 2) 920 return -EINVAL; 921 if (p_h264_sps->chroma_format_idc > 3) 922 return -EINVAL; 923 924 if (p_h264_sps->bit_depth_luma_minus8 > 6) 925 return -EINVAL; 926 if (p_h264_sps->bit_depth_chroma_minus8 > 6) 927 return -EINVAL; 928 if (p_h264_sps->log2_max_frame_num_minus4 > 12) 929 return -EINVAL; 930 if (p_h264_sps->pic_order_cnt_type > 2) 931 return -EINVAL; 932 if (p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 > 12) 933 return -EINVAL; 934 if (p_h264_sps->max_num_ref_frames > V4L2_H264_REF_LIST_LEN) 935 return -EINVAL; 936 break; 937 938 case V4L2_CTRL_TYPE_H264_PPS: 939 p_h264_pps = p; 940 941 if (p_h264_pps->num_slice_groups_minus1 > 7) 942 return -EINVAL; 943 if (p_h264_pps->num_ref_idx_l0_default_active_minus1 > 944 (V4L2_H264_REF_LIST_LEN - 1)) 945 return -EINVAL; 946 if (p_h264_pps->num_ref_idx_l1_default_active_minus1 > 947 (V4L2_H264_REF_LIST_LEN - 1)) 948 return -EINVAL; 949 if (p_h264_pps->weighted_bipred_idc > 2) 950 return -EINVAL; 951 /* 952 * pic_init_qp_minus26 shall be in the range of 953 * -(26 + QpBdOffset_y) to +25, inclusive, 954 * where QpBdOffset_y is 6 * bit_depth_luma_minus8 955 */ 956 if (p_h264_pps->pic_init_qp_minus26 < -62 || 957 p_h264_pps->pic_init_qp_minus26 > 25) 958 return -EINVAL; 959 if (p_h264_pps->pic_init_qs_minus26 < -26 || 960 p_h264_pps->pic_init_qs_minus26 > 25) 961 return -EINVAL; 962 if (p_h264_pps->chroma_qp_index_offset < -12 || 963 p_h264_pps->chroma_qp_index_offset > 12) 964 return -EINVAL; 965 if (p_h264_pps->second_chroma_qp_index_offset < -12 || 966 p_h264_pps->second_chroma_qp_index_offset > 12) 967 return -EINVAL; 968 break; 969 970 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 971 break; 972 973 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 974 p_h264_pred_weights = p; 975 976 if (p_h264_pred_weights->luma_log2_weight_denom > 7) 977 return -EINVAL; 978 if (p_h264_pred_weights->chroma_log2_weight_denom > 7) 979 return -EINVAL; 980 break; 981 982 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 983 p_h264_slice_params = p; 984 985 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 986 p_h264_slice_params->flags &= 987 ~V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED; 988 989 if (p_h264_slice_params->colour_plane_id > 2) 990 return -EINVAL; 991 if (p_h264_slice_params->cabac_init_idc > 2) 992 return -EINVAL; 993 if (p_h264_slice_params->disable_deblocking_filter_idc > 2) 994 return -EINVAL; 995 if (p_h264_slice_params->slice_alpha_c0_offset_div2 < -6 || 996 p_h264_slice_params->slice_alpha_c0_offset_div2 > 6) 997 return -EINVAL; 998 if (p_h264_slice_params->slice_beta_offset_div2 < -6 || 999 p_h264_slice_params->slice_beta_offset_div2 > 6) 1000 return -EINVAL; 1001 1002 if (p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_I || 1003 p_h264_slice_params->slice_type == V4L2_H264_SLICE_TYPE_SI) 1004 p_h264_slice_params->num_ref_idx_l0_active_minus1 = 0; 1005 if (p_h264_slice_params->slice_type != V4L2_H264_SLICE_TYPE_B) 1006 p_h264_slice_params->num_ref_idx_l1_active_minus1 = 0; 1007 1008 if (p_h264_slice_params->num_ref_idx_l0_active_minus1 > 1009 (V4L2_H264_REF_LIST_LEN - 1)) 1010 return -EINVAL; 1011 if (p_h264_slice_params->num_ref_idx_l1_active_minus1 > 1012 (V4L2_H264_REF_LIST_LEN - 1)) 1013 return -EINVAL; 1014 zero_reserved(*p_h264_slice_params); 1015 break; 1016 1017 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 1018 p_h264_dec_params = p; 1019 1020 if (p_h264_dec_params->nal_ref_idc > 3) 1021 return -EINVAL; 1022 for (i = 0; i < V4L2_H264_NUM_DPB_ENTRIES; i++) { 1023 struct v4l2_h264_dpb_entry *dpb_entry = 1024 &p_h264_dec_params->dpb[i]; 1025 1026 zero_reserved(*dpb_entry); 1027 } 1028 zero_reserved(*p_h264_dec_params); 1029 break; 1030 1031 case V4L2_CTRL_TYPE_VP8_FRAME: 1032 p_vp8_frame = p; 1033 1034 switch (p_vp8_frame->num_dct_parts) { 1035 case 1: 1036 case 2: 1037 case 4: 1038 case 8: 1039 break; 1040 default: 1041 return -EINVAL; 1042 } 1043 zero_padding(p_vp8_frame->segment); 1044 zero_padding(p_vp8_frame->lf); 1045 zero_padding(p_vp8_frame->quant); 1046 zero_padding(p_vp8_frame->entropy); 1047 zero_padding(p_vp8_frame->coder_state); 1048 break; 1049 1050 case V4L2_CTRL_TYPE_HEVC_SPS: 1051 p_hevc_sps = p; 1052 1053 if (!(p_hevc_sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED)) { 1054 p_hevc_sps->pcm_sample_bit_depth_luma_minus1 = 0; 1055 p_hevc_sps->pcm_sample_bit_depth_chroma_minus1 = 0; 1056 p_hevc_sps->log2_min_pcm_luma_coding_block_size_minus3 = 0; 1057 p_hevc_sps->log2_diff_max_min_pcm_luma_coding_block_size = 0; 1058 } 1059 1060 if (!(p_hevc_sps->flags & 1061 V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT)) 1062 p_hevc_sps->num_long_term_ref_pics_sps = 0; 1063 break; 1064 1065 case V4L2_CTRL_TYPE_HEVC_PPS: 1066 p_hevc_pps = p; 1067 1068 if (!(p_hevc_pps->flags & 1069 V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED)) 1070 p_hevc_pps->diff_cu_qp_delta_depth = 0; 1071 1072 if (!(p_hevc_pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED)) { 1073 p_hevc_pps->num_tile_columns_minus1 = 0; 1074 p_hevc_pps->num_tile_rows_minus1 = 0; 1075 memset(&p_hevc_pps->column_width_minus1, 0, 1076 sizeof(p_hevc_pps->column_width_minus1)); 1077 memset(&p_hevc_pps->row_height_minus1, 0, 1078 sizeof(p_hevc_pps->row_height_minus1)); 1079 1080 p_hevc_pps->flags &= 1081 ~V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED; 1082 } 1083 1084 if (p_hevc_pps->flags & 1085 V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER) { 1086 p_hevc_pps->pps_beta_offset_div2 = 0; 1087 p_hevc_pps->pps_tc_offset_div2 = 0; 1088 } 1089 break; 1090 1091 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 1092 p_hevc_decode_params = p; 1093 1094 if (p_hevc_decode_params->num_active_dpb_entries > 1095 V4L2_HEVC_DPB_ENTRIES_NUM_MAX) 1096 return -EINVAL; 1097 break; 1098 1099 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 1100 break; 1101 1102 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 1103 break; 1104 1105 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 1106 p_hdr10_mastering = p; 1107 1108 for (i = 0; i < 3; ++i) { 1109 if (p_hdr10_mastering->display_primaries_x[i] < 1110 V4L2_HDR10_MASTERING_PRIMARIES_X_LOW || 1111 p_hdr10_mastering->display_primaries_x[i] > 1112 V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH || 1113 p_hdr10_mastering->display_primaries_y[i] < 1114 V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW || 1115 p_hdr10_mastering->display_primaries_y[i] > 1116 V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH) 1117 return -EINVAL; 1118 } 1119 1120 if (p_hdr10_mastering->white_point_x < 1121 V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW || 1122 p_hdr10_mastering->white_point_x > 1123 V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH || 1124 p_hdr10_mastering->white_point_y < 1125 V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW || 1126 p_hdr10_mastering->white_point_y > 1127 V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH) 1128 return -EINVAL; 1129 1130 if (p_hdr10_mastering->max_display_mastering_luminance < 1131 V4L2_HDR10_MASTERING_MAX_LUMA_LOW || 1132 p_hdr10_mastering->max_display_mastering_luminance > 1133 V4L2_HDR10_MASTERING_MAX_LUMA_HIGH || 1134 p_hdr10_mastering->min_display_mastering_luminance < 1135 V4L2_HDR10_MASTERING_MIN_LUMA_LOW || 1136 p_hdr10_mastering->min_display_mastering_luminance > 1137 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 1138 return -EINVAL; 1139 1140 /* The following restriction comes from ITU-T Rec. H.265 spec */ 1141 if (p_hdr10_mastering->max_display_mastering_luminance == 1142 V4L2_HDR10_MASTERING_MAX_LUMA_LOW && 1143 p_hdr10_mastering->min_display_mastering_luminance == 1144 V4L2_HDR10_MASTERING_MIN_LUMA_HIGH) 1145 return -EINVAL; 1146 1147 break; 1148 1149 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 1150 break; 1151 1152 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 1153 return validate_vp9_compressed_hdr(p); 1154 1155 case V4L2_CTRL_TYPE_VP9_FRAME: 1156 return validate_vp9_frame(p); 1157 case V4L2_CTRL_TYPE_AV1_FRAME: 1158 return validate_av1_frame(p); 1159 case V4L2_CTRL_TYPE_AV1_SEQUENCE: 1160 return validate_av1_sequence(p); 1161 case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY: 1162 break; 1163 case V4L2_CTRL_TYPE_AV1_FILM_GRAIN: 1164 return validate_av1_film_grain(p); 1165 1166 case V4L2_CTRL_TYPE_AREA: 1167 area = p; 1168 if (!area->width || !area->height) 1169 return -EINVAL; 1170 break; 1171 1172 default: 1173 return -EINVAL; 1174 } 1175 1176 return 0; 1177 } 1178 1179 static int std_validate_elem(const struct v4l2_ctrl *ctrl, u32 idx, 1180 union v4l2_ctrl_ptr ptr) 1181 { 1182 size_t len; 1183 u64 offset; 1184 s64 val; 1185 1186 switch ((u32)ctrl->type) { 1187 case V4L2_CTRL_TYPE_INTEGER: 1188 return ROUND_TO_RANGE(ptr.p_s32[idx], u32, ctrl); 1189 case V4L2_CTRL_TYPE_INTEGER64: 1190 /* 1191 * We can't use the ROUND_TO_RANGE define here due to 1192 * the u64 divide that needs special care. 1193 */ 1194 val = ptr.p_s64[idx]; 1195 if (ctrl->maximum >= 0 && val >= ctrl->maximum - (s64)(ctrl->step / 2)) 1196 val = ctrl->maximum; 1197 else 1198 val += (s64)(ctrl->step / 2); 1199 val = clamp_t(s64, val, ctrl->minimum, ctrl->maximum); 1200 offset = val - ctrl->minimum; 1201 do_div(offset, ctrl->step); 1202 ptr.p_s64[idx] = ctrl->minimum + offset * ctrl->step; 1203 return 0; 1204 case V4L2_CTRL_TYPE_U8: 1205 return ROUND_TO_RANGE(ptr.p_u8[idx], u8, ctrl); 1206 case V4L2_CTRL_TYPE_U16: 1207 return ROUND_TO_RANGE(ptr.p_u16[idx], u16, ctrl); 1208 case V4L2_CTRL_TYPE_U32: 1209 return ROUND_TO_RANGE(ptr.p_u32[idx], u32, ctrl); 1210 1211 case V4L2_CTRL_TYPE_BOOLEAN: 1212 ptr.p_s32[idx] = !!ptr.p_s32[idx]; 1213 return 0; 1214 1215 case V4L2_CTRL_TYPE_MENU: 1216 case V4L2_CTRL_TYPE_INTEGER_MENU: 1217 if (ptr.p_s32[idx] < ctrl->minimum || ptr.p_s32[idx] > ctrl->maximum) 1218 return -ERANGE; 1219 if (ptr.p_s32[idx] < BITS_PER_LONG_LONG && 1220 (ctrl->menu_skip_mask & BIT_ULL(ptr.p_s32[idx]))) 1221 return -EINVAL; 1222 if (ctrl->type == V4L2_CTRL_TYPE_MENU && 1223 ctrl->qmenu[ptr.p_s32[idx]][0] == '\0') 1224 return -EINVAL; 1225 return 0; 1226 1227 case V4L2_CTRL_TYPE_BITMASK: 1228 ptr.p_s32[idx] &= ctrl->maximum; 1229 return 0; 1230 1231 case V4L2_CTRL_TYPE_BUTTON: 1232 case V4L2_CTRL_TYPE_CTRL_CLASS: 1233 ptr.p_s32[idx] = 0; 1234 return 0; 1235 1236 case V4L2_CTRL_TYPE_STRING: 1237 idx *= ctrl->elem_size; 1238 len = strlen(ptr.p_char + idx); 1239 if (len < ctrl->minimum) 1240 return -ERANGE; 1241 if ((len - (u32)ctrl->minimum) % (u32)ctrl->step) 1242 return -ERANGE; 1243 return 0; 1244 1245 default: 1246 return std_validate_compound(ctrl, idx, ptr); 1247 } 1248 } 1249 1250 int v4l2_ctrl_type_op_validate(const struct v4l2_ctrl *ctrl, 1251 union v4l2_ctrl_ptr ptr) 1252 { 1253 unsigned int i; 1254 int ret = 0; 1255 1256 switch ((u32)ctrl->type) { 1257 case V4L2_CTRL_TYPE_U8: 1258 if (ctrl->maximum == 0xff && ctrl->minimum == 0 && ctrl->step == 1) 1259 return 0; 1260 break; 1261 case V4L2_CTRL_TYPE_U16: 1262 if (ctrl->maximum == 0xffff && ctrl->minimum == 0 && ctrl->step == 1) 1263 return 0; 1264 break; 1265 case V4L2_CTRL_TYPE_U32: 1266 if (ctrl->maximum == 0xffffffff && ctrl->minimum == 0 && ctrl->step == 1) 1267 return 0; 1268 break; 1269 1270 case V4L2_CTRL_TYPE_BUTTON: 1271 case V4L2_CTRL_TYPE_CTRL_CLASS: 1272 memset(ptr.p_s32, 0, ctrl->new_elems * sizeof(s32)); 1273 return 0; 1274 } 1275 1276 for (i = 0; !ret && i < ctrl->new_elems; i++) 1277 ret = std_validate_elem(ctrl, i, ptr); 1278 return ret; 1279 } 1280 EXPORT_SYMBOL(v4l2_ctrl_type_op_validate); 1281 1282 static const struct v4l2_ctrl_type_ops std_type_ops = { 1283 .equal = v4l2_ctrl_type_op_equal, 1284 .init = v4l2_ctrl_type_op_init, 1285 .log = v4l2_ctrl_type_op_log, 1286 .validate = v4l2_ctrl_type_op_validate, 1287 }; 1288 1289 void v4l2_ctrl_notify(struct v4l2_ctrl *ctrl, v4l2_ctrl_notify_fnc notify, void *priv) 1290 { 1291 if (!ctrl) 1292 return; 1293 if (!notify) { 1294 ctrl->call_notify = 0; 1295 return; 1296 } 1297 if (WARN_ON(ctrl->handler->notify && ctrl->handler->notify != notify)) 1298 return; 1299 ctrl->handler->notify = notify; 1300 ctrl->handler->notify_priv = priv; 1301 ctrl->call_notify = 1; 1302 } 1303 EXPORT_SYMBOL(v4l2_ctrl_notify); 1304 1305 /* Copy the one value to another. */ 1306 static void ptr_to_ptr(struct v4l2_ctrl *ctrl, 1307 union v4l2_ctrl_ptr from, union v4l2_ctrl_ptr to, 1308 unsigned int elems) 1309 { 1310 if (ctrl == NULL) 1311 return; 1312 memcpy(to.p, from.p_const, elems * ctrl->elem_size); 1313 } 1314 1315 /* Copy the new value to the current value. */ 1316 void new_to_cur(struct v4l2_fh *fh, struct v4l2_ctrl *ctrl, u32 ch_flags) 1317 { 1318 bool changed; 1319 1320 if (ctrl == NULL) 1321 return; 1322 1323 /* has_changed is set by cluster_changed */ 1324 changed = ctrl->has_changed; 1325 if (changed) { 1326 if (ctrl->is_dyn_array) 1327 ctrl->elems = ctrl->new_elems; 1328 ptr_to_ptr(ctrl, ctrl->p_new, ctrl->p_cur, ctrl->elems); 1329 } 1330 1331 if (ch_flags & V4L2_EVENT_CTRL_CH_FLAGS) { 1332 /* Note: CH_FLAGS is only set for auto clusters. */ 1333 ctrl->flags &= 1334 ~(V4L2_CTRL_FLAG_INACTIVE | V4L2_CTRL_FLAG_VOLATILE); 1335 if (!is_cur_manual(ctrl->cluster[0])) { 1336 ctrl->flags |= V4L2_CTRL_FLAG_INACTIVE; 1337 if (ctrl->cluster[0]->has_volatiles) 1338 ctrl->flags |= V4L2_CTRL_FLAG_VOLATILE; 1339 } 1340 fh = NULL; 1341 } 1342 if (changed || ch_flags) { 1343 /* If a control was changed that was not one of the controls 1344 modified by the application, then send the event to all. */ 1345 if (!ctrl->is_new) 1346 fh = NULL; 1347 send_event(fh, ctrl, 1348 (changed ? V4L2_EVENT_CTRL_CH_VALUE : 0) | ch_flags); 1349 if (ctrl->call_notify && changed && ctrl->handler->notify) 1350 ctrl->handler->notify(ctrl, ctrl->handler->notify_priv); 1351 } 1352 } 1353 1354 /* Copy the current value to the new value */ 1355 void cur_to_new(struct v4l2_ctrl *ctrl) 1356 { 1357 if (ctrl == NULL) 1358 return; 1359 if (ctrl->is_dyn_array) 1360 ctrl->new_elems = ctrl->elems; 1361 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems); 1362 } 1363 1364 static bool req_alloc_array(struct v4l2_ctrl_ref *ref, u32 elems) 1365 { 1366 void *tmp; 1367 1368 if (elems == ref->p_req_array_alloc_elems) 1369 return true; 1370 if (ref->ctrl->is_dyn_array && 1371 elems < ref->p_req_array_alloc_elems) 1372 return true; 1373 1374 tmp = kvmalloc(elems * ref->ctrl->elem_size, GFP_KERNEL); 1375 1376 if (!tmp) { 1377 ref->p_req_array_enomem = true; 1378 return false; 1379 } 1380 ref->p_req_array_enomem = false; 1381 kvfree(ref->p_req.p); 1382 ref->p_req.p = tmp; 1383 ref->p_req_array_alloc_elems = elems; 1384 return true; 1385 } 1386 1387 /* Copy the new value to the request value */ 1388 void new_to_req(struct v4l2_ctrl_ref *ref) 1389 { 1390 struct v4l2_ctrl *ctrl; 1391 1392 if (!ref) 1393 return; 1394 1395 ctrl = ref->ctrl; 1396 if (ctrl->is_array && !req_alloc_array(ref, ctrl->new_elems)) 1397 return; 1398 1399 ref->p_req_elems = ctrl->new_elems; 1400 ptr_to_ptr(ctrl, ctrl->p_new, ref->p_req, ref->p_req_elems); 1401 ref->p_req_valid = true; 1402 } 1403 1404 /* Copy the current value to the request value */ 1405 void cur_to_req(struct v4l2_ctrl_ref *ref) 1406 { 1407 struct v4l2_ctrl *ctrl; 1408 1409 if (!ref) 1410 return; 1411 1412 ctrl = ref->ctrl; 1413 if (ctrl->is_array && !req_alloc_array(ref, ctrl->elems)) 1414 return; 1415 1416 ref->p_req_elems = ctrl->elems; 1417 ptr_to_ptr(ctrl, ctrl->p_cur, ref->p_req, ctrl->elems); 1418 ref->p_req_valid = true; 1419 } 1420 1421 /* Copy the request value to the new value */ 1422 int req_to_new(struct v4l2_ctrl_ref *ref) 1423 { 1424 struct v4l2_ctrl *ctrl; 1425 1426 if (!ref) 1427 return 0; 1428 1429 ctrl = ref->ctrl; 1430 1431 /* 1432 * This control was never set in the request, so just use the current 1433 * value. 1434 */ 1435 if (!ref->p_req_valid) { 1436 if (ctrl->is_dyn_array) 1437 ctrl->new_elems = ctrl->elems; 1438 ptr_to_ptr(ctrl, ctrl->p_cur, ctrl->p_new, ctrl->new_elems); 1439 return 0; 1440 } 1441 1442 /* Not an array, so just copy the request value */ 1443 if (!ctrl->is_array) { 1444 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems); 1445 return 0; 1446 } 1447 1448 /* Sanity check, should never happen */ 1449 if (WARN_ON(!ref->p_req_array_alloc_elems)) 1450 return -ENOMEM; 1451 1452 if (!ctrl->is_dyn_array && 1453 ref->p_req_elems != ctrl->p_array_alloc_elems) 1454 return -ENOMEM; 1455 1456 /* 1457 * Check if the number of elements in the request is more than the 1458 * elements in ctrl->p_array. If so, attempt to realloc ctrl->p_array. 1459 * Note that p_array is allocated with twice the number of elements 1460 * in the dynamic array since it has to store both the current and 1461 * new value of such a control. 1462 */ 1463 if (ref->p_req_elems > ctrl->p_array_alloc_elems) { 1464 unsigned int sz = ref->p_req_elems * ctrl->elem_size; 1465 void *old = ctrl->p_array; 1466 void *tmp = kvzalloc(2 * sz, GFP_KERNEL); 1467 1468 if (!tmp) 1469 return -ENOMEM; 1470 memcpy(tmp, ctrl->p_new.p, ctrl->elems * ctrl->elem_size); 1471 memcpy(tmp + sz, ctrl->p_cur.p, ctrl->elems * ctrl->elem_size); 1472 ctrl->p_new.p = tmp; 1473 ctrl->p_cur.p = tmp + sz; 1474 ctrl->p_array = tmp; 1475 ctrl->p_array_alloc_elems = ref->p_req_elems; 1476 kvfree(old); 1477 } 1478 1479 ctrl->new_elems = ref->p_req_elems; 1480 ptr_to_ptr(ctrl, ref->p_req, ctrl->p_new, ctrl->new_elems); 1481 return 0; 1482 } 1483 1484 /* Control range checking */ 1485 int check_range(enum v4l2_ctrl_type type, 1486 s64 min, s64 max, u64 step, s64 def) 1487 { 1488 switch (type) { 1489 case V4L2_CTRL_TYPE_BOOLEAN: 1490 if (step != 1 || max > 1 || min < 0) 1491 return -ERANGE; 1492 fallthrough; 1493 case V4L2_CTRL_TYPE_U8: 1494 case V4L2_CTRL_TYPE_U16: 1495 case V4L2_CTRL_TYPE_U32: 1496 case V4L2_CTRL_TYPE_INTEGER: 1497 case V4L2_CTRL_TYPE_INTEGER64: 1498 if (step == 0 || min > max || def < min || def > max) 1499 return -ERANGE; 1500 return 0; 1501 case V4L2_CTRL_TYPE_BITMASK: 1502 if (step || min || !max || (def & ~max)) 1503 return -ERANGE; 1504 return 0; 1505 case V4L2_CTRL_TYPE_MENU: 1506 case V4L2_CTRL_TYPE_INTEGER_MENU: 1507 if (min > max || def < min || def > max) 1508 return -ERANGE; 1509 /* Note: step == menu_skip_mask for menu controls. 1510 So here we check if the default value is masked out. */ 1511 if (step && ((1 << def) & step)) 1512 return -EINVAL; 1513 return 0; 1514 case V4L2_CTRL_TYPE_STRING: 1515 if (min > max || min < 0 || step < 1 || def) 1516 return -ERANGE; 1517 return 0; 1518 default: 1519 return 0; 1520 } 1521 } 1522 1523 /* Set the handler's error code if it wasn't set earlier already */ 1524 static inline int handler_set_err(struct v4l2_ctrl_handler *hdl, int err) 1525 { 1526 if (hdl->error == 0) 1527 hdl->error = err; 1528 return err; 1529 } 1530 1531 /* Initialize the handler */ 1532 int v4l2_ctrl_handler_init_class(struct v4l2_ctrl_handler *hdl, 1533 unsigned nr_of_controls_hint, 1534 struct lock_class_key *key, const char *name) 1535 { 1536 mutex_init(&hdl->_lock); 1537 hdl->lock = &hdl->_lock; 1538 lockdep_set_class_and_name(hdl->lock, key, name); 1539 INIT_LIST_HEAD(&hdl->ctrls); 1540 INIT_LIST_HEAD(&hdl->ctrl_refs); 1541 hdl->nr_of_buckets = 1 + nr_of_controls_hint / 8; 1542 hdl->buckets = kvcalloc(hdl->nr_of_buckets, sizeof(hdl->buckets[0]), 1543 GFP_KERNEL); 1544 hdl->error = hdl->buckets ? 0 : -ENOMEM; 1545 v4l2_ctrl_handler_init_request(hdl); 1546 return hdl->error; 1547 } 1548 EXPORT_SYMBOL(v4l2_ctrl_handler_init_class); 1549 1550 /* Free all controls and control refs */ 1551 void v4l2_ctrl_handler_free(struct v4l2_ctrl_handler *hdl) 1552 { 1553 struct v4l2_ctrl_ref *ref, *next_ref; 1554 struct v4l2_ctrl *ctrl, *next_ctrl; 1555 struct v4l2_subscribed_event *sev, *next_sev; 1556 1557 if (hdl == NULL || hdl->buckets == NULL) 1558 return; 1559 1560 v4l2_ctrl_handler_free_request(hdl); 1561 1562 mutex_lock(hdl->lock); 1563 /* Free all nodes */ 1564 list_for_each_entry_safe(ref, next_ref, &hdl->ctrl_refs, node) { 1565 list_del(&ref->node); 1566 if (ref->p_req_array_alloc_elems) 1567 kvfree(ref->p_req.p); 1568 kfree(ref); 1569 } 1570 /* Free all controls owned by the handler */ 1571 list_for_each_entry_safe(ctrl, next_ctrl, &hdl->ctrls, node) { 1572 list_del(&ctrl->node); 1573 list_for_each_entry_safe(sev, next_sev, &ctrl->ev_subs, node) 1574 list_del(&sev->node); 1575 kvfree(ctrl->p_array); 1576 kvfree(ctrl); 1577 } 1578 kvfree(hdl->buckets); 1579 hdl->buckets = NULL; 1580 hdl->cached = NULL; 1581 hdl->error = 0; 1582 mutex_unlock(hdl->lock); 1583 mutex_destroy(&hdl->_lock); 1584 } 1585 EXPORT_SYMBOL(v4l2_ctrl_handler_free); 1586 1587 /* For backwards compatibility: V4L2_CID_PRIVATE_BASE should no longer 1588 be used except in G_CTRL, S_CTRL, QUERYCTRL and QUERYMENU when dealing 1589 with applications that do not use the NEXT_CTRL flag. 1590 1591 We just find the n-th private user control. It's O(N), but that should not 1592 be an issue in this particular case. */ 1593 static struct v4l2_ctrl_ref *find_private_ref( 1594 struct v4l2_ctrl_handler *hdl, u32 id) 1595 { 1596 struct v4l2_ctrl_ref *ref; 1597 1598 id -= V4L2_CID_PRIVATE_BASE; 1599 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1600 /* Search for private user controls that are compatible with 1601 VIDIOC_G/S_CTRL. */ 1602 if (V4L2_CTRL_ID2WHICH(ref->ctrl->id) == V4L2_CTRL_CLASS_USER && 1603 V4L2_CTRL_DRIVER_PRIV(ref->ctrl->id)) { 1604 if (!ref->ctrl->is_int) 1605 continue; 1606 if (id == 0) 1607 return ref; 1608 id--; 1609 } 1610 } 1611 return NULL; 1612 } 1613 1614 /* Find a control with the given ID. */ 1615 struct v4l2_ctrl_ref *find_ref(struct v4l2_ctrl_handler *hdl, u32 id) 1616 { 1617 struct v4l2_ctrl_ref *ref; 1618 int bucket; 1619 1620 id &= V4L2_CTRL_ID_MASK; 1621 1622 /* Old-style private controls need special handling */ 1623 if (id >= V4L2_CID_PRIVATE_BASE) 1624 return find_private_ref(hdl, id); 1625 bucket = id % hdl->nr_of_buckets; 1626 1627 /* Simple optimization: cache the last control found */ 1628 if (hdl->cached && hdl->cached->ctrl->id == id) 1629 return hdl->cached; 1630 1631 /* Not in cache, search the hash */ 1632 ref = hdl->buckets ? hdl->buckets[bucket] : NULL; 1633 while (ref && ref->ctrl->id != id) 1634 ref = ref->next; 1635 1636 if (ref) 1637 hdl->cached = ref; /* cache it! */ 1638 return ref; 1639 } 1640 1641 /* Find a control with the given ID. Take the handler's lock first. */ 1642 struct v4l2_ctrl_ref *find_ref_lock(struct v4l2_ctrl_handler *hdl, u32 id) 1643 { 1644 struct v4l2_ctrl_ref *ref = NULL; 1645 1646 if (hdl) { 1647 mutex_lock(hdl->lock); 1648 ref = find_ref(hdl, id); 1649 mutex_unlock(hdl->lock); 1650 } 1651 return ref; 1652 } 1653 1654 /* Find a control with the given ID. */ 1655 struct v4l2_ctrl *v4l2_ctrl_find(struct v4l2_ctrl_handler *hdl, u32 id) 1656 { 1657 struct v4l2_ctrl_ref *ref = find_ref_lock(hdl, id); 1658 1659 return ref ? ref->ctrl : NULL; 1660 } 1661 EXPORT_SYMBOL(v4l2_ctrl_find); 1662 1663 /* Allocate a new v4l2_ctrl_ref and hook it into the handler. */ 1664 int handler_new_ref(struct v4l2_ctrl_handler *hdl, 1665 struct v4l2_ctrl *ctrl, 1666 struct v4l2_ctrl_ref **ctrl_ref, 1667 bool from_other_dev, bool allocate_req) 1668 { 1669 struct v4l2_ctrl_ref *ref; 1670 struct v4l2_ctrl_ref *new_ref; 1671 u32 id = ctrl->id; 1672 u32 class_ctrl = V4L2_CTRL_ID2WHICH(id) | 1; 1673 int bucket = id % hdl->nr_of_buckets; /* which bucket to use */ 1674 unsigned int size_extra_req = 0; 1675 1676 if (ctrl_ref) 1677 *ctrl_ref = NULL; 1678 1679 /* 1680 * Automatically add the control class if it is not yet present and 1681 * the new control is not a compound control. 1682 */ 1683 if (ctrl->type < V4L2_CTRL_COMPOUND_TYPES && 1684 id != class_ctrl && find_ref_lock(hdl, class_ctrl) == NULL) 1685 if (!v4l2_ctrl_new_std(hdl, NULL, class_ctrl, 0, 0, 0, 0)) 1686 return hdl->error; 1687 1688 if (hdl->error) 1689 return hdl->error; 1690 1691 if (allocate_req && !ctrl->is_array) 1692 size_extra_req = ctrl->elems * ctrl->elem_size; 1693 new_ref = kzalloc(sizeof(*new_ref) + size_extra_req, GFP_KERNEL); 1694 if (!new_ref) 1695 return handler_set_err(hdl, -ENOMEM); 1696 new_ref->ctrl = ctrl; 1697 new_ref->from_other_dev = from_other_dev; 1698 if (size_extra_req) 1699 new_ref->p_req.p = &new_ref[1]; 1700 1701 INIT_LIST_HEAD(&new_ref->node); 1702 1703 mutex_lock(hdl->lock); 1704 1705 /* Add immediately at the end of the list if the list is empty, or if 1706 the last element in the list has a lower ID. 1707 This ensures that when elements are added in ascending order the 1708 insertion is an O(1) operation. */ 1709 if (list_empty(&hdl->ctrl_refs) || id > node2id(hdl->ctrl_refs.prev)) { 1710 list_add_tail(&new_ref->node, &hdl->ctrl_refs); 1711 goto insert_in_hash; 1712 } 1713 1714 /* Find insert position in sorted list */ 1715 list_for_each_entry(ref, &hdl->ctrl_refs, node) { 1716 if (ref->ctrl->id < id) 1717 continue; 1718 /* Don't add duplicates */ 1719 if (ref->ctrl->id == id) { 1720 kfree(new_ref); 1721 goto unlock; 1722 } 1723 list_add(&new_ref->node, ref->node.prev); 1724 break; 1725 } 1726 1727 insert_in_hash: 1728 /* Insert the control node in the hash */ 1729 new_ref->next = hdl->buckets[bucket]; 1730 hdl->buckets[bucket] = new_ref; 1731 if (ctrl_ref) 1732 *ctrl_ref = new_ref; 1733 if (ctrl->handler == hdl) { 1734 /* By default each control starts in a cluster of its own. 1735 * new_ref->ctrl is basically a cluster array with one 1736 * element, so that's perfect to use as the cluster pointer. 1737 * But only do this for the handler that owns the control. 1738 */ 1739 ctrl->cluster = &new_ref->ctrl; 1740 ctrl->ncontrols = 1; 1741 } 1742 1743 unlock: 1744 mutex_unlock(hdl->lock); 1745 return 0; 1746 } 1747 1748 /* Add a new control */ 1749 static struct v4l2_ctrl *v4l2_ctrl_new(struct v4l2_ctrl_handler *hdl, 1750 const struct v4l2_ctrl_ops *ops, 1751 const struct v4l2_ctrl_type_ops *type_ops, 1752 u32 id, const char *name, enum v4l2_ctrl_type type, 1753 s64 min, s64 max, u64 step, s64 def, 1754 const u32 dims[V4L2_CTRL_MAX_DIMS], u32 elem_size, 1755 u32 flags, const char * const *qmenu, 1756 const s64 *qmenu_int, const union v4l2_ctrl_ptr p_def, 1757 void *priv) 1758 { 1759 struct v4l2_ctrl *ctrl; 1760 unsigned sz_extra; 1761 unsigned nr_of_dims = 0; 1762 unsigned elems = 1; 1763 bool is_array; 1764 unsigned tot_ctrl_size; 1765 void *data; 1766 int err; 1767 1768 if (hdl->error) 1769 return NULL; 1770 1771 while (dims && dims[nr_of_dims]) { 1772 elems *= dims[nr_of_dims]; 1773 nr_of_dims++; 1774 if (nr_of_dims == V4L2_CTRL_MAX_DIMS) 1775 break; 1776 } 1777 is_array = nr_of_dims > 0; 1778 1779 /* Prefill elem_size for all types handled by std_type_ops */ 1780 switch ((u32)type) { 1781 case V4L2_CTRL_TYPE_INTEGER64: 1782 elem_size = sizeof(s64); 1783 break; 1784 case V4L2_CTRL_TYPE_STRING: 1785 elem_size = max + 1; 1786 break; 1787 case V4L2_CTRL_TYPE_U8: 1788 elem_size = sizeof(u8); 1789 break; 1790 case V4L2_CTRL_TYPE_U16: 1791 elem_size = sizeof(u16); 1792 break; 1793 case V4L2_CTRL_TYPE_U32: 1794 elem_size = sizeof(u32); 1795 break; 1796 case V4L2_CTRL_TYPE_MPEG2_SEQUENCE: 1797 elem_size = sizeof(struct v4l2_ctrl_mpeg2_sequence); 1798 break; 1799 case V4L2_CTRL_TYPE_MPEG2_PICTURE: 1800 elem_size = sizeof(struct v4l2_ctrl_mpeg2_picture); 1801 break; 1802 case V4L2_CTRL_TYPE_MPEG2_QUANTISATION: 1803 elem_size = sizeof(struct v4l2_ctrl_mpeg2_quantisation); 1804 break; 1805 case V4L2_CTRL_TYPE_FWHT_PARAMS: 1806 elem_size = sizeof(struct v4l2_ctrl_fwht_params); 1807 break; 1808 case V4L2_CTRL_TYPE_H264_SPS: 1809 elem_size = sizeof(struct v4l2_ctrl_h264_sps); 1810 break; 1811 case V4L2_CTRL_TYPE_H264_PPS: 1812 elem_size = sizeof(struct v4l2_ctrl_h264_pps); 1813 break; 1814 case V4L2_CTRL_TYPE_H264_SCALING_MATRIX: 1815 elem_size = sizeof(struct v4l2_ctrl_h264_scaling_matrix); 1816 break; 1817 case V4L2_CTRL_TYPE_H264_SLICE_PARAMS: 1818 elem_size = sizeof(struct v4l2_ctrl_h264_slice_params); 1819 break; 1820 case V4L2_CTRL_TYPE_H264_DECODE_PARAMS: 1821 elem_size = sizeof(struct v4l2_ctrl_h264_decode_params); 1822 break; 1823 case V4L2_CTRL_TYPE_H264_PRED_WEIGHTS: 1824 elem_size = sizeof(struct v4l2_ctrl_h264_pred_weights); 1825 break; 1826 case V4L2_CTRL_TYPE_VP8_FRAME: 1827 elem_size = sizeof(struct v4l2_ctrl_vp8_frame); 1828 break; 1829 case V4L2_CTRL_TYPE_HEVC_SPS: 1830 elem_size = sizeof(struct v4l2_ctrl_hevc_sps); 1831 break; 1832 case V4L2_CTRL_TYPE_HEVC_PPS: 1833 elem_size = sizeof(struct v4l2_ctrl_hevc_pps); 1834 break; 1835 case V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS: 1836 elem_size = sizeof(struct v4l2_ctrl_hevc_slice_params); 1837 break; 1838 case V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX: 1839 elem_size = sizeof(struct v4l2_ctrl_hevc_scaling_matrix); 1840 break; 1841 case V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS: 1842 elem_size = sizeof(struct v4l2_ctrl_hevc_decode_params); 1843 break; 1844 case V4L2_CTRL_TYPE_HDR10_CLL_INFO: 1845 elem_size = sizeof(struct v4l2_ctrl_hdr10_cll_info); 1846 break; 1847 case V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY: 1848 elem_size = sizeof(struct v4l2_ctrl_hdr10_mastering_display); 1849 break; 1850 case V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR: 1851 elem_size = sizeof(struct v4l2_ctrl_vp9_compressed_hdr); 1852 break; 1853 case V4L2_CTRL_TYPE_VP9_FRAME: 1854 elem_size = sizeof(struct v4l2_ctrl_vp9_frame); 1855 break; 1856 case V4L2_CTRL_TYPE_AV1_SEQUENCE: 1857 elem_size = sizeof(struct v4l2_ctrl_av1_sequence); 1858 break; 1859 case V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY: 1860 elem_size = sizeof(struct v4l2_ctrl_av1_tile_group_entry); 1861 break; 1862 case V4L2_CTRL_TYPE_AV1_FRAME: 1863 elem_size = sizeof(struct v4l2_ctrl_av1_frame); 1864 break; 1865 case V4L2_CTRL_TYPE_AV1_FILM_GRAIN: 1866 elem_size = sizeof(struct v4l2_ctrl_av1_film_grain); 1867 break; 1868 case V4L2_CTRL_TYPE_AREA: 1869 elem_size = sizeof(struct v4l2_area); 1870 break; 1871 default: 1872 if (type < V4L2_CTRL_COMPOUND_TYPES) 1873 elem_size = sizeof(s32); 1874 break; 1875 } 1876 1877 /* Sanity checks */ 1878 if (id == 0 || name == NULL || !elem_size || 1879 id >= V4L2_CID_PRIVATE_BASE || 1880 (type == V4L2_CTRL_TYPE_MENU && qmenu == NULL) || 1881 (type == V4L2_CTRL_TYPE_INTEGER_MENU && qmenu_int == NULL)) { 1882 handler_set_err(hdl, -ERANGE); 1883 return NULL; 1884 } 1885 err = check_range(type, min, max, step, def); 1886 if (err) { 1887 handler_set_err(hdl, err); 1888 return NULL; 1889 } 1890 if (is_array && 1891 (type == V4L2_CTRL_TYPE_BUTTON || 1892 type == V4L2_CTRL_TYPE_CTRL_CLASS)) { 1893 handler_set_err(hdl, -EINVAL); 1894 return NULL; 1895 } 1896 if (flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY) { 1897 /* 1898 * For now only support this for one-dimensional arrays only. 1899 * 1900 * This can be relaxed in the future, but this will 1901 * require more effort. 1902 */ 1903 if (nr_of_dims != 1) { 1904 handler_set_err(hdl, -EINVAL); 1905 return NULL; 1906 } 1907 /* Start with just 1 element */ 1908 elems = 1; 1909 } 1910 1911 tot_ctrl_size = elem_size * elems; 1912 sz_extra = 0; 1913 if (type == V4L2_CTRL_TYPE_BUTTON) 1914 flags |= V4L2_CTRL_FLAG_WRITE_ONLY | 1915 V4L2_CTRL_FLAG_EXECUTE_ON_WRITE; 1916 else if (type == V4L2_CTRL_TYPE_CTRL_CLASS) 1917 flags |= V4L2_CTRL_FLAG_READ_ONLY; 1918 else if (!is_array && 1919 (type == V4L2_CTRL_TYPE_INTEGER64 || 1920 type == V4L2_CTRL_TYPE_STRING || 1921 type >= V4L2_CTRL_COMPOUND_TYPES)) 1922 sz_extra += 2 * tot_ctrl_size; 1923 1924 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) 1925 sz_extra += elem_size; 1926 1927 ctrl = kvzalloc(sizeof(*ctrl) + sz_extra, GFP_KERNEL); 1928 if (ctrl == NULL) { 1929 handler_set_err(hdl, -ENOMEM); 1930 return NULL; 1931 } 1932 1933 INIT_LIST_HEAD(&ctrl->node); 1934 INIT_LIST_HEAD(&ctrl->ev_subs); 1935 ctrl->handler = hdl; 1936 ctrl->ops = ops; 1937 ctrl->type_ops = type_ops ? type_ops : &std_type_ops; 1938 ctrl->id = id; 1939 ctrl->name = name; 1940 ctrl->type = type; 1941 ctrl->flags = flags; 1942 ctrl->minimum = min; 1943 ctrl->maximum = max; 1944 ctrl->step = step; 1945 ctrl->default_value = def; 1946 ctrl->is_string = !is_array && type == V4L2_CTRL_TYPE_STRING; 1947 ctrl->is_ptr = is_array || type >= V4L2_CTRL_COMPOUND_TYPES || ctrl->is_string; 1948 ctrl->is_int = !ctrl->is_ptr && type != V4L2_CTRL_TYPE_INTEGER64; 1949 ctrl->is_array = is_array; 1950 ctrl->is_dyn_array = !!(flags & V4L2_CTRL_FLAG_DYNAMIC_ARRAY); 1951 ctrl->elems = elems; 1952 ctrl->new_elems = elems; 1953 ctrl->nr_of_dims = nr_of_dims; 1954 if (nr_of_dims) 1955 memcpy(ctrl->dims, dims, nr_of_dims * sizeof(dims[0])); 1956 ctrl->elem_size = elem_size; 1957 if (type == V4L2_CTRL_TYPE_MENU) 1958 ctrl->qmenu = qmenu; 1959 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 1960 ctrl->qmenu_int = qmenu_int; 1961 ctrl->priv = priv; 1962 ctrl->cur.val = ctrl->val = def; 1963 data = &ctrl[1]; 1964 1965 if (ctrl->is_array) { 1966 ctrl->p_array_alloc_elems = elems; 1967 ctrl->p_array = kvzalloc(2 * elems * elem_size, GFP_KERNEL); 1968 if (!ctrl->p_array) { 1969 kvfree(ctrl); 1970 return NULL; 1971 } 1972 data = ctrl->p_array; 1973 } 1974 1975 if (!ctrl->is_int) { 1976 ctrl->p_new.p = data; 1977 ctrl->p_cur.p = data + tot_ctrl_size; 1978 } else { 1979 ctrl->p_new.p = &ctrl->val; 1980 ctrl->p_cur.p = &ctrl->cur.val; 1981 } 1982 1983 if (type >= V4L2_CTRL_COMPOUND_TYPES && p_def.p_const) { 1984 if (ctrl->is_array) 1985 ctrl->p_def.p = &ctrl[1]; 1986 else 1987 ctrl->p_def.p = ctrl->p_cur.p + tot_ctrl_size; 1988 memcpy(ctrl->p_def.p, p_def.p_const, elem_size); 1989 } 1990 1991 ctrl->type_ops->init(ctrl, 0, ctrl->p_cur); 1992 cur_to_new(ctrl); 1993 1994 if (handler_new_ref(hdl, ctrl, NULL, false, false)) { 1995 kvfree(ctrl->p_array); 1996 kvfree(ctrl); 1997 return NULL; 1998 } 1999 mutex_lock(hdl->lock); 2000 list_add_tail(&ctrl->node, &hdl->ctrls); 2001 mutex_unlock(hdl->lock); 2002 return ctrl; 2003 } 2004 2005 struct v4l2_ctrl *v4l2_ctrl_new_custom(struct v4l2_ctrl_handler *hdl, 2006 const struct v4l2_ctrl_config *cfg, void *priv) 2007 { 2008 bool is_menu; 2009 struct v4l2_ctrl *ctrl; 2010 const char *name = cfg->name; 2011 const char * const *qmenu = cfg->qmenu; 2012 const s64 *qmenu_int = cfg->qmenu_int; 2013 enum v4l2_ctrl_type type = cfg->type; 2014 u32 flags = cfg->flags; 2015 s64 min = cfg->min; 2016 s64 max = cfg->max; 2017 u64 step = cfg->step; 2018 s64 def = cfg->def; 2019 2020 if (name == NULL) 2021 v4l2_ctrl_fill(cfg->id, &name, &type, &min, &max, &step, 2022 &def, &flags); 2023 2024 is_menu = (type == V4L2_CTRL_TYPE_MENU || 2025 type == V4L2_CTRL_TYPE_INTEGER_MENU); 2026 if (is_menu) 2027 WARN_ON(step); 2028 else 2029 WARN_ON(cfg->menu_skip_mask); 2030 if (type == V4L2_CTRL_TYPE_MENU && !qmenu) { 2031 qmenu = v4l2_ctrl_get_menu(cfg->id); 2032 } else if (type == V4L2_CTRL_TYPE_INTEGER_MENU && !qmenu_int) { 2033 handler_set_err(hdl, -EINVAL); 2034 return NULL; 2035 } 2036 2037 ctrl = v4l2_ctrl_new(hdl, cfg->ops, cfg->type_ops, cfg->id, name, 2038 type, min, max, 2039 is_menu ? cfg->menu_skip_mask : step, def, 2040 cfg->dims, cfg->elem_size, 2041 flags, qmenu, qmenu_int, cfg->p_def, priv); 2042 if (ctrl) 2043 ctrl->is_private = cfg->is_private; 2044 return ctrl; 2045 } 2046 EXPORT_SYMBOL(v4l2_ctrl_new_custom); 2047 2048 /* Helper function for standard non-menu controls */ 2049 struct v4l2_ctrl *v4l2_ctrl_new_std(struct v4l2_ctrl_handler *hdl, 2050 const struct v4l2_ctrl_ops *ops, 2051 u32 id, s64 min, s64 max, u64 step, s64 def) 2052 { 2053 const char *name; 2054 enum v4l2_ctrl_type type; 2055 u32 flags; 2056 2057 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2058 if (type == V4L2_CTRL_TYPE_MENU || 2059 type == V4L2_CTRL_TYPE_INTEGER_MENU || 2060 type >= V4L2_CTRL_COMPOUND_TYPES) { 2061 handler_set_err(hdl, -EINVAL); 2062 return NULL; 2063 } 2064 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2065 min, max, step, def, NULL, 0, 2066 flags, NULL, NULL, ptr_null, NULL); 2067 } 2068 EXPORT_SYMBOL(v4l2_ctrl_new_std); 2069 2070 /* Helper function for standard menu controls */ 2071 struct v4l2_ctrl *v4l2_ctrl_new_std_menu(struct v4l2_ctrl_handler *hdl, 2072 const struct v4l2_ctrl_ops *ops, 2073 u32 id, u8 _max, u64 mask, u8 _def) 2074 { 2075 const char * const *qmenu = NULL; 2076 const s64 *qmenu_int = NULL; 2077 unsigned int qmenu_int_len = 0; 2078 const char *name; 2079 enum v4l2_ctrl_type type; 2080 s64 min; 2081 s64 max = _max; 2082 s64 def = _def; 2083 u64 step; 2084 u32 flags; 2085 2086 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2087 2088 if (type == V4L2_CTRL_TYPE_MENU) 2089 qmenu = v4l2_ctrl_get_menu(id); 2090 else if (type == V4L2_CTRL_TYPE_INTEGER_MENU) 2091 qmenu_int = v4l2_ctrl_get_int_menu(id, &qmenu_int_len); 2092 2093 if ((!qmenu && !qmenu_int) || (qmenu_int && max >= qmenu_int_len)) { 2094 handler_set_err(hdl, -EINVAL); 2095 return NULL; 2096 } 2097 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2098 0, max, mask, def, NULL, 0, 2099 flags, qmenu, qmenu_int, ptr_null, NULL); 2100 } 2101 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu); 2102 2103 /* Helper function for standard menu controls with driver defined menu */ 2104 struct v4l2_ctrl *v4l2_ctrl_new_std_menu_items(struct v4l2_ctrl_handler *hdl, 2105 const struct v4l2_ctrl_ops *ops, u32 id, u8 _max, 2106 u64 mask, u8 _def, const char * const *qmenu) 2107 { 2108 enum v4l2_ctrl_type type; 2109 const char *name; 2110 u32 flags; 2111 u64 step; 2112 s64 min; 2113 s64 max = _max; 2114 s64 def = _def; 2115 2116 /* v4l2_ctrl_new_std_menu_items() should only be called for 2117 * standard controls without a standard menu. 2118 */ 2119 if (v4l2_ctrl_get_menu(id)) { 2120 handler_set_err(hdl, -EINVAL); 2121 return NULL; 2122 } 2123 2124 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2125 if (type != V4L2_CTRL_TYPE_MENU || qmenu == NULL) { 2126 handler_set_err(hdl, -EINVAL); 2127 return NULL; 2128 } 2129 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2130 0, max, mask, def, NULL, 0, 2131 flags, qmenu, NULL, ptr_null, NULL); 2132 2133 } 2134 EXPORT_SYMBOL(v4l2_ctrl_new_std_menu_items); 2135 2136 /* Helper function for standard compound controls */ 2137 struct v4l2_ctrl *v4l2_ctrl_new_std_compound(struct v4l2_ctrl_handler *hdl, 2138 const struct v4l2_ctrl_ops *ops, u32 id, 2139 const union v4l2_ctrl_ptr p_def) 2140 { 2141 const char *name; 2142 enum v4l2_ctrl_type type; 2143 u32 flags; 2144 s64 min, max, step, def; 2145 2146 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2147 if (type < V4L2_CTRL_COMPOUND_TYPES) { 2148 handler_set_err(hdl, -EINVAL); 2149 return NULL; 2150 } 2151 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2152 min, max, step, def, NULL, 0, 2153 flags, NULL, NULL, p_def, NULL); 2154 } 2155 EXPORT_SYMBOL(v4l2_ctrl_new_std_compound); 2156 2157 /* Helper function for standard integer menu controls */ 2158 struct v4l2_ctrl *v4l2_ctrl_new_int_menu(struct v4l2_ctrl_handler *hdl, 2159 const struct v4l2_ctrl_ops *ops, 2160 u32 id, u8 _max, u8 _def, const s64 *qmenu_int) 2161 { 2162 const char *name; 2163 enum v4l2_ctrl_type type; 2164 s64 min; 2165 u64 step; 2166 s64 max = _max; 2167 s64 def = _def; 2168 u32 flags; 2169 2170 v4l2_ctrl_fill(id, &name, &type, &min, &max, &step, &def, &flags); 2171 if (type != V4L2_CTRL_TYPE_INTEGER_MENU) { 2172 handler_set_err(hdl, -EINVAL); 2173 return NULL; 2174 } 2175 return v4l2_ctrl_new(hdl, ops, NULL, id, name, type, 2176 0, max, 0, def, NULL, 0, 2177 flags, NULL, qmenu_int, ptr_null, NULL); 2178 } 2179 EXPORT_SYMBOL(v4l2_ctrl_new_int_menu); 2180 2181 /* Add the controls from another handler to our own. */ 2182 int v4l2_ctrl_add_handler(struct v4l2_ctrl_handler *hdl, 2183 struct v4l2_ctrl_handler *add, 2184 bool (*filter)(const struct v4l2_ctrl *ctrl), 2185 bool from_other_dev) 2186 { 2187 struct v4l2_ctrl_ref *ref; 2188 int ret = 0; 2189 2190 /* Do nothing if either handler is NULL or if they are the same */ 2191 if (!hdl || !add || hdl == add) 2192 return 0; 2193 if (hdl->error) 2194 return hdl->error; 2195 mutex_lock(add->lock); 2196 list_for_each_entry(ref, &add->ctrl_refs, node) { 2197 struct v4l2_ctrl *ctrl = ref->ctrl; 2198 2199 /* Skip handler-private controls. */ 2200 if (ctrl->is_private) 2201 continue; 2202 /* And control classes */ 2203 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 2204 continue; 2205 /* Filter any unwanted controls */ 2206 if (filter && !filter(ctrl)) 2207 continue; 2208 ret = handler_new_ref(hdl, ctrl, NULL, from_other_dev, false); 2209 if (ret) 2210 break; 2211 } 2212 mutex_unlock(add->lock); 2213 return ret; 2214 } 2215 EXPORT_SYMBOL(v4l2_ctrl_add_handler); 2216 2217 bool v4l2_ctrl_radio_filter(const struct v4l2_ctrl *ctrl) 2218 { 2219 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_TX) 2220 return true; 2221 if (V4L2_CTRL_ID2WHICH(ctrl->id) == V4L2_CTRL_CLASS_FM_RX) 2222 return true; 2223 switch (ctrl->id) { 2224 case V4L2_CID_AUDIO_MUTE: 2225 case V4L2_CID_AUDIO_VOLUME: 2226 case V4L2_CID_AUDIO_BALANCE: 2227 case V4L2_CID_AUDIO_BASS: 2228 case V4L2_CID_AUDIO_TREBLE: 2229 case V4L2_CID_AUDIO_LOUDNESS: 2230 return true; 2231 default: 2232 break; 2233 } 2234 return false; 2235 } 2236 EXPORT_SYMBOL(v4l2_ctrl_radio_filter); 2237 2238 /* Cluster controls */ 2239 void v4l2_ctrl_cluster(unsigned ncontrols, struct v4l2_ctrl **controls) 2240 { 2241 bool has_volatiles = false; 2242 int i; 2243 2244 /* The first control is the master control and it must not be NULL */ 2245 if (WARN_ON(ncontrols == 0 || controls[0] == NULL)) 2246 return; 2247 2248 for (i = 0; i < ncontrols; i++) { 2249 if (controls[i]) { 2250 controls[i]->cluster = controls; 2251 controls[i]->ncontrols = ncontrols; 2252 if (controls[i]->flags & V4L2_CTRL_FLAG_VOLATILE) 2253 has_volatiles = true; 2254 } 2255 } 2256 controls[0]->has_volatiles = has_volatiles; 2257 } 2258 EXPORT_SYMBOL(v4l2_ctrl_cluster); 2259 2260 void v4l2_ctrl_auto_cluster(unsigned ncontrols, struct v4l2_ctrl **controls, 2261 u8 manual_val, bool set_volatile) 2262 { 2263 struct v4l2_ctrl *master = controls[0]; 2264 u32 flag = 0; 2265 int i; 2266 2267 v4l2_ctrl_cluster(ncontrols, controls); 2268 WARN_ON(ncontrols <= 1); 2269 WARN_ON(manual_val < master->minimum || manual_val > master->maximum); 2270 WARN_ON(set_volatile && !has_op(master, g_volatile_ctrl)); 2271 master->is_auto = true; 2272 master->has_volatiles = set_volatile; 2273 master->manual_mode_value = manual_val; 2274 master->flags |= V4L2_CTRL_FLAG_UPDATE; 2275 2276 if (!is_cur_manual(master)) 2277 flag = V4L2_CTRL_FLAG_INACTIVE | 2278 (set_volatile ? V4L2_CTRL_FLAG_VOLATILE : 0); 2279 2280 for (i = 1; i < ncontrols; i++) 2281 if (controls[i]) 2282 controls[i]->flags |= flag; 2283 } 2284 EXPORT_SYMBOL(v4l2_ctrl_auto_cluster); 2285 2286 /* 2287 * Obtain the current volatile values of an autocluster and mark them 2288 * as new. 2289 */ 2290 void update_from_auto_cluster(struct v4l2_ctrl *master) 2291 { 2292 int i; 2293 2294 for (i = 1; i < master->ncontrols; i++) 2295 cur_to_new(master->cluster[i]); 2296 if (!call_op(master, g_volatile_ctrl)) 2297 for (i = 1; i < master->ncontrols; i++) 2298 if (master->cluster[i]) 2299 master->cluster[i]->is_new = 1; 2300 } 2301 2302 /* 2303 * Return non-zero if one or more of the controls in the cluster has a new 2304 * value that differs from the current value. 2305 */ 2306 static int cluster_changed(struct v4l2_ctrl *master) 2307 { 2308 bool changed = false; 2309 int i; 2310 2311 for (i = 0; i < master->ncontrols; i++) { 2312 struct v4l2_ctrl *ctrl = master->cluster[i]; 2313 bool ctrl_changed = false; 2314 2315 if (!ctrl) 2316 continue; 2317 2318 if (ctrl->flags & V4L2_CTRL_FLAG_EXECUTE_ON_WRITE) { 2319 changed = true; 2320 ctrl_changed = true; 2321 } 2322 2323 /* 2324 * Set has_changed to false to avoid generating 2325 * the event V4L2_EVENT_CTRL_CH_VALUE 2326 */ 2327 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) { 2328 ctrl->has_changed = false; 2329 continue; 2330 } 2331 2332 if (ctrl->elems != ctrl->new_elems) 2333 ctrl_changed = true; 2334 if (!ctrl_changed) 2335 ctrl_changed = !ctrl->type_ops->equal(ctrl, 2336 ctrl->p_cur, ctrl->p_new); 2337 ctrl->has_changed = ctrl_changed; 2338 changed |= ctrl->has_changed; 2339 } 2340 return changed; 2341 } 2342 2343 /* 2344 * Core function that calls try/s_ctrl and ensures that the new value is 2345 * copied to the current value on a set. 2346 * Must be called with ctrl->handler->lock held. 2347 */ 2348 int try_or_set_cluster(struct v4l2_fh *fh, struct v4l2_ctrl *master, 2349 bool set, u32 ch_flags) 2350 { 2351 bool update_flag; 2352 int ret; 2353 int i; 2354 2355 /* 2356 * Go through the cluster and either validate the new value or 2357 * (if no new value was set), copy the current value to the new 2358 * value, ensuring a consistent view for the control ops when 2359 * called. 2360 */ 2361 for (i = 0; i < master->ncontrols; i++) { 2362 struct v4l2_ctrl *ctrl = master->cluster[i]; 2363 2364 if (!ctrl) 2365 continue; 2366 2367 if (!ctrl->is_new) { 2368 cur_to_new(ctrl); 2369 continue; 2370 } 2371 /* 2372 * Check again: it may have changed since the 2373 * previous check in try_or_set_ext_ctrls(). 2374 */ 2375 if (set && (ctrl->flags & V4L2_CTRL_FLAG_GRABBED)) 2376 return -EBUSY; 2377 } 2378 2379 ret = call_op(master, try_ctrl); 2380 2381 /* Don't set if there is no change */ 2382 if (ret || !set || !cluster_changed(master)) 2383 return ret; 2384 ret = call_op(master, s_ctrl); 2385 if (ret) 2386 return ret; 2387 2388 /* If OK, then make the new values permanent. */ 2389 update_flag = is_cur_manual(master) != is_new_manual(master); 2390 2391 for (i = 0; i < master->ncontrols; i++) { 2392 /* 2393 * If we switch from auto to manual mode, and this cluster 2394 * contains volatile controls, then all non-master controls 2395 * have to be marked as changed. The 'new' value contains 2396 * the volatile value (obtained by update_from_auto_cluster), 2397 * which now has to become the current value. 2398 */ 2399 if (i && update_flag && is_new_manual(master) && 2400 master->has_volatiles && master->cluster[i]) 2401 master->cluster[i]->has_changed = true; 2402 2403 new_to_cur(fh, master->cluster[i], ch_flags | 2404 ((update_flag && i > 0) ? V4L2_EVENT_CTRL_CH_FLAGS : 0)); 2405 } 2406 return 0; 2407 } 2408 2409 /* Activate/deactivate a control. */ 2410 void v4l2_ctrl_activate(struct v4l2_ctrl *ctrl, bool active) 2411 { 2412 /* invert since the actual flag is called 'inactive' */ 2413 bool inactive = !active; 2414 bool old; 2415 2416 if (ctrl == NULL) 2417 return; 2418 2419 if (inactive) 2420 /* set V4L2_CTRL_FLAG_INACTIVE */ 2421 old = test_and_set_bit(4, &ctrl->flags); 2422 else 2423 /* clear V4L2_CTRL_FLAG_INACTIVE */ 2424 old = test_and_clear_bit(4, &ctrl->flags); 2425 if (old != inactive) 2426 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2427 } 2428 EXPORT_SYMBOL(v4l2_ctrl_activate); 2429 2430 void __v4l2_ctrl_grab(struct v4l2_ctrl *ctrl, bool grabbed) 2431 { 2432 bool old; 2433 2434 if (ctrl == NULL) 2435 return; 2436 2437 lockdep_assert_held(ctrl->handler->lock); 2438 2439 if (grabbed) 2440 /* set V4L2_CTRL_FLAG_GRABBED */ 2441 old = test_and_set_bit(1, &ctrl->flags); 2442 else 2443 /* clear V4L2_CTRL_FLAG_GRABBED */ 2444 old = test_and_clear_bit(1, &ctrl->flags); 2445 if (old != grabbed) 2446 send_event(NULL, ctrl, V4L2_EVENT_CTRL_CH_FLAGS); 2447 } 2448 EXPORT_SYMBOL(__v4l2_ctrl_grab); 2449 2450 /* Call s_ctrl for all controls owned by the handler */ 2451 int __v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2452 { 2453 struct v4l2_ctrl *ctrl; 2454 int ret = 0; 2455 2456 if (hdl == NULL) 2457 return 0; 2458 2459 lockdep_assert_held(hdl->lock); 2460 2461 list_for_each_entry(ctrl, &hdl->ctrls, node) 2462 ctrl->done = false; 2463 2464 list_for_each_entry(ctrl, &hdl->ctrls, node) { 2465 struct v4l2_ctrl *master = ctrl->cluster[0]; 2466 int i; 2467 2468 /* Skip if this control was already handled by a cluster. */ 2469 /* Skip button controls and read-only controls. */ 2470 if (ctrl->done || ctrl->type == V4L2_CTRL_TYPE_BUTTON || 2471 (ctrl->flags & V4L2_CTRL_FLAG_READ_ONLY)) 2472 continue; 2473 2474 for (i = 0; i < master->ncontrols; i++) { 2475 if (master->cluster[i]) { 2476 cur_to_new(master->cluster[i]); 2477 master->cluster[i]->is_new = 1; 2478 master->cluster[i]->done = true; 2479 } 2480 } 2481 ret = call_op(master, s_ctrl); 2482 if (ret) 2483 break; 2484 } 2485 2486 return ret; 2487 } 2488 EXPORT_SYMBOL_GPL(__v4l2_ctrl_handler_setup); 2489 2490 int v4l2_ctrl_handler_setup(struct v4l2_ctrl_handler *hdl) 2491 { 2492 int ret; 2493 2494 if (hdl == NULL) 2495 return 0; 2496 2497 mutex_lock(hdl->lock); 2498 ret = __v4l2_ctrl_handler_setup(hdl); 2499 mutex_unlock(hdl->lock); 2500 2501 return ret; 2502 } 2503 EXPORT_SYMBOL(v4l2_ctrl_handler_setup); 2504 2505 /* Log the control name and value */ 2506 static void log_ctrl(const struct v4l2_ctrl *ctrl, 2507 const char *prefix, const char *colon) 2508 { 2509 if (ctrl->flags & (V4L2_CTRL_FLAG_DISABLED | V4L2_CTRL_FLAG_WRITE_ONLY)) 2510 return; 2511 if (ctrl->type == V4L2_CTRL_TYPE_CTRL_CLASS) 2512 return; 2513 2514 pr_info("%s%s%s: ", prefix, colon, ctrl->name); 2515 2516 ctrl->type_ops->log(ctrl); 2517 2518 if (ctrl->flags & (V4L2_CTRL_FLAG_INACTIVE | 2519 V4L2_CTRL_FLAG_GRABBED | 2520 V4L2_CTRL_FLAG_VOLATILE)) { 2521 if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE) 2522 pr_cont(" inactive"); 2523 if (ctrl->flags & V4L2_CTRL_FLAG_GRABBED) 2524 pr_cont(" grabbed"); 2525 if (ctrl->flags & V4L2_CTRL_FLAG_VOLATILE) 2526 pr_cont(" volatile"); 2527 } 2528 pr_cont("\n"); 2529 } 2530 2531 /* Log all controls owned by the handler */ 2532 void v4l2_ctrl_handler_log_status(struct v4l2_ctrl_handler *hdl, 2533 const char *prefix) 2534 { 2535 struct v4l2_ctrl *ctrl; 2536 const char *colon = ""; 2537 int len; 2538 2539 if (!hdl) 2540 return; 2541 if (!prefix) 2542 prefix = ""; 2543 len = strlen(prefix); 2544 if (len && prefix[len - 1] != ' ') 2545 colon = ": "; 2546 mutex_lock(hdl->lock); 2547 list_for_each_entry(ctrl, &hdl->ctrls, node) 2548 if (!(ctrl->flags & V4L2_CTRL_FLAG_DISABLED)) 2549 log_ctrl(ctrl, prefix, colon); 2550 mutex_unlock(hdl->lock); 2551 } 2552 EXPORT_SYMBOL(v4l2_ctrl_handler_log_status); 2553 2554 int v4l2_ctrl_new_fwnode_properties(struct v4l2_ctrl_handler *hdl, 2555 const struct v4l2_ctrl_ops *ctrl_ops, 2556 const struct v4l2_fwnode_device_properties *p) 2557 { 2558 if (p->orientation != V4L2_FWNODE_PROPERTY_UNSET) { 2559 u32 orientation_ctrl; 2560 2561 switch (p->orientation) { 2562 case V4L2_FWNODE_ORIENTATION_FRONT: 2563 orientation_ctrl = V4L2_CAMERA_ORIENTATION_FRONT; 2564 break; 2565 case V4L2_FWNODE_ORIENTATION_BACK: 2566 orientation_ctrl = V4L2_CAMERA_ORIENTATION_BACK; 2567 break; 2568 case V4L2_FWNODE_ORIENTATION_EXTERNAL: 2569 orientation_ctrl = V4L2_CAMERA_ORIENTATION_EXTERNAL; 2570 break; 2571 default: 2572 return -EINVAL; 2573 } 2574 if (!v4l2_ctrl_new_std_menu(hdl, ctrl_ops, 2575 V4L2_CID_CAMERA_ORIENTATION, 2576 V4L2_CAMERA_ORIENTATION_EXTERNAL, 0, 2577 orientation_ctrl)) 2578 return hdl->error; 2579 } 2580 2581 if (p->rotation != V4L2_FWNODE_PROPERTY_UNSET) { 2582 if (!v4l2_ctrl_new_std(hdl, ctrl_ops, 2583 V4L2_CID_CAMERA_SENSOR_ROTATION, 2584 p->rotation, p->rotation, 1, 2585 p->rotation)) 2586 return hdl->error; 2587 } 2588 2589 return hdl->error; 2590 } 2591 EXPORT_SYMBOL(v4l2_ctrl_new_fwnode_properties); 2592