pax_global_header00006660000000000000000000000064147757713420014533gustar00rootroot0000000000000052 comment=3e445c7855f8240d1f8473d127307dd967be2d25 yavta-0.0+git20250410.3e445c7/000077500000000000000000000000001477577134200151105ustar00rootroot00000000000000yavta-0.0+git20250410.3e445c7/.gitignore000066400000000000000000000000211477577134200170710ustar00rootroot00000000000000*.o build/ yavta yavta-0.0+git20250410.3e445c7/COPYING.GPL000066400000000000000000000431031477577134200165650ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. yavta-0.0+git20250410.3e445c7/Makefile000066400000000000000000000004731477577134200165540ustar00rootroot00000000000000CROSS_COMPILE ?= CC := $(CROSS_COMPILE)gcc CFLAGS ?= -O2 -W -Wall -Iinclude LDFLAGS ?= LIBS := -lrt $(warning WARNING: Makefile support is deprecated, please switch to meson) %.o : %.c $(CC) $(CFLAGS) -c -o $@ $< all: yavta yavta: yavta.o $(CC) $(LDFLAGS) -o $@ $^ $(LIBS) clean: -rm -f *.o -rm -f yavta yavta-0.0+git20250410.3e445c7/include/000077500000000000000000000000001477577134200165335ustar00rootroot00000000000000yavta-0.0+git20250410.3e445c7/include/linux/000077500000000000000000000000001477577134200176725ustar00rootroot00000000000000yavta-0.0+git20250410.3e445c7/include/linux/v4l2-common.h000066400000000000000000000040101477577134200221130ustar00rootroot00000000000000/* SPDX-License-Identifier: ((GPL-2.0+ WITH Linux-syscall-note) OR BSD-3-Clause) */ /* * include/linux/v4l2-common.h * * Common V4L2 and V4L2 subdev definitions. * * Users are advised to #include this file either through videodev2.h * (V4L2) or through v4l2-subdev.h (V4L2 subdev) rather than to refer * to this file directly. * * Copyright (C) 2012 Nokia Corporation * Contact: Sakari Ailus */ #ifndef __V4L2_COMMON__ #define __V4L2_COMMON__ #include /* * * Selection interface definitions * */ /* Current cropping area */ #define V4L2_SEL_TGT_CROP 0x0000 /* Default cropping area */ #define V4L2_SEL_TGT_CROP_DEFAULT 0x0001 /* Cropping bounds */ #define V4L2_SEL_TGT_CROP_BOUNDS 0x0002 /* Native frame size */ #define V4L2_SEL_TGT_NATIVE_SIZE 0x0003 /* Current composing area */ #define V4L2_SEL_TGT_COMPOSE 0x0100 /* Default composing area */ #define V4L2_SEL_TGT_COMPOSE_DEFAULT 0x0101 /* Composing bounds */ #define V4L2_SEL_TGT_COMPOSE_BOUNDS 0x0102 /* Current composing area plus all padding pixels */ #define V4L2_SEL_TGT_COMPOSE_PADDED 0x0103 /* Selection flags */ #define V4L2_SEL_FLAG_GE (1 << 0) #define V4L2_SEL_FLAG_LE (1 << 1) #define V4L2_SEL_FLAG_KEEP_CONFIG (1 << 2) struct v4l2_edid { __u32 pad; __u32 start_block; __u32 blocks; __u32 reserved[5]; __u8 *edid; }; /* Backward compatibility target definitions --- to be removed. */ #define V4L2_SEL_TGT_CROP_ACTIVE V4L2_SEL_TGT_CROP #define V4L2_SEL_TGT_COMPOSE_ACTIVE V4L2_SEL_TGT_COMPOSE #define V4L2_SUBDEV_SEL_TGT_CROP_ACTUAL V4L2_SEL_TGT_CROP #define V4L2_SUBDEV_SEL_TGT_COMPOSE_ACTUAL V4L2_SEL_TGT_COMPOSE #define V4L2_SUBDEV_SEL_TGT_CROP_BOUNDS V4L2_SEL_TGT_CROP_BOUNDS #define V4L2_SUBDEV_SEL_TGT_COMPOSE_BOUNDS V4L2_SEL_TGT_COMPOSE_BOUNDS /* Backward compatibility flag definitions --- to be removed. */ #define V4L2_SUBDEV_SEL_FLAG_SIZE_GE V4L2_SEL_FLAG_GE #define V4L2_SUBDEV_SEL_FLAG_SIZE_LE V4L2_SEL_FLAG_LE #define V4L2_SUBDEV_SEL_FLAG_KEEP_CONFIG V4L2_SEL_FLAG_KEEP_CONFIG #endif /* __V4L2_COMMON__ */ yavta-0.0+git20250410.3e445c7/include/linux/v4l2-controls.h000066400000000000000000004435321477577134200225060ustar00rootroot00000000000000/* SPDX-License-Identifier: ((GPL-2.0+ WITH Linux-syscall-note) OR BSD-3-Clause) */ /* * Video for Linux Two controls header file * * Copyright (C) 1999-2012 the contributors * * The contents of this header was split off from videodev2.h. All control * definitions should be added to this header, which is included by * videodev2.h. */ #ifndef __LINUX_V4L2_CONTROLS_H #define __LINUX_V4L2_CONTROLS_H #include #include /* Control classes */ #define V4L2_CTRL_CLASS_USER 0x00980000 /* Old-style 'user' controls */ #define V4L2_CTRL_CLASS_CODEC 0x00990000 /* Stateful codec controls */ #define V4L2_CTRL_CLASS_CAMERA 0x009a0000 /* Camera class controls */ #define V4L2_CTRL_CLASS_FM_TX 0x009b0000 /* FM Modulator controls */ #define V4L2_CTRL_CLASS_FLASH 0x009c0000 /* Camera flash controls */ #define V4L2_CTRL_CLASS_JPEG 0x009d0000 /* JPEG-compression controls */ #define V4L2_CTRL_CLASS_IMAGE_SOURCE 0x009e0000 /* Image source controls */ #define V4L2_CTRL_CLASS_IMAGE_PROC 0x009f0000 /* Image processing controls */ #define V4L2_CTRL_CLASS_DV 0x00a00000 /* Digital Video controls */ #define V4L2_CTRL_CLASS_FM_RX 0x00a10000 /* FM Receiver controls */ #define V4L2_CTRL_CLASS_RF_TUNER 0x00a20000 /* RF tuner controls */ #define V4L2_CTRL_CLASS_DETECT 0x00a30000 /* Detection controls */ #define V4L2_CTRL_CLASS_CODEC_STATELESS 0x00a40000 /* Stateless codecs controls */ #define V4L2_CTRL_CLASS_COLORIMETRY 0x00a50000 /* Colorimetry controls */ /* User-class control IDs */ #define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900) #define V4L2_CID_USER_BASE V4L2_CID_BASE #define V4L2_CID_USER_CLASS (V4L2_CTRL_CLASS_USER | 1) #define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE+0) #define V4L2_CID_CONTRAST (V4L2_CID_BASE+1) #define V4L2_CID_SATURATION (V4L2_CID_BASE+2) #define V4L2_CID_HUE (V4L2_CID_BASE+3) #define V4L2_CID_AUDIO_VOLUME (V4L2_CID_BASE+5) #define V4L2_CID_AUDIO_BALANCE (V4L2_CID_BASE+6) #define V4L2_CID_AUDIO_BASS (V4L2_CID_BASE+7) #define V4L2_CID_AUDIO_TREBLE (V4L2_CID_BASE+8) #define V4L2_CID_AUDIO_MUTE (V4L2_CID_BASE+9) #define V4L2_CID_AUDIO_LOUDNESS (V4L2_CID_BASE+10) #define V4L2_CID_BLACK_LEVEL (V4L2_CID_BASE+11) /* Deprecated */ #define V4L2_CID_AUTO_WHITE_BALANCE (V4L2_CID_BASE+12) #define V4L2_CID_DO_WHITE_BALANCE (V4L2_CID_BASE+13) #define V4L2_CID_RED_BALANCE (V4L2_CID_BASE+14) #define V4L2_CID_BLUE_BALANCE (V4L2_CID_BASE+15) #define V4L2_CID_GAMMA (V4L2_CID_BASE+16) #define V4L2_CID_WHITENESS (V4L2_CID_GAMMA) /* Deprecated */ #define V4L2_CID_EXPOSURE (V4L2_CID_BASE+17) #define V4L2_CID_AUTOGAIN (V4L2_CID_BASE+18) #define V4L2_CID_GAIN (V4L2_CID_BASE+19) #define V4L2_CID_HFLIP (V4L2_CID_BASE+20) #define V4L2_CID_VFLIP (V4L2_CID_BASE+21) #define V4L2_CID_POWER_LINE_FREQUENCY (V4L2_CID_BASE+24) enum v4l2_power_line_frequency { V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0, V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1, V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2, V4L2_CID_POWER_LINE_FREQUENCY_AUTO = 3, }; #define V4L2_CID_HUE_AUTO (V4L2_CID_BASE+25) #define V4L2_CID_WHITE_BALANCE_TEMPERATURE (V4L2_CID_BASE+26) #define V4L2_CID_SHARPNESS (V4L2_CID_BASE+27) #define V4L2_CID_BACKLIGHT_COMPENSATION (V4L2_CID_BASE+28) #define V4L2_CID_CHROMA_AGC (V4L2_CID_BASE+29) #define V4L2_CID_COLOR_KILLER (V4L2_CID_BASE+30) #define V4L2_CID_COLORFX (V4L2_CID_BASE+31) enum v4l2_colorfx { V4L2_COLORFX_NONE = 0, V4L2_COLORFX_BW = 1, V4L2_COLORFX_SEPIA = 2, V4L2_COLORFX_NEGATIVE = 3, V4L2_COLORFX_EMBOSS = 4, V4L2_COLORFX_SKETCH = 5, V4L2_COLORFX_SKY_BLUE = 6, V4L2_COLORFX_GRASS_GREEN = 7, V4L2_COLORFX_SKIN_WHITEN = 8, V4L2_COLORFX_VIVID = 9, V4L2_COLORFX_AQUA = 10, V4L2_COLORFX_ART_FREEZE = 11, V4L2_COLORFX_SILHOUETTE = 12, V4L2_COLORFX_SOLARIZATION = 13, V4L2_COLORFX_ANTIQUE = 14, V4L2_COLORFX_SET_CBCR = 15, V4L2_COLORFX_SET_RGB = 16, }; #define V4L2_CID_AUTOBRIGHTNESS (V4L2_CID_BASE+32) #define V4L2_CID_BAND_STOP_FILTER (V4L2_CID_BASE+33) #define V4L2_CID_ROTATE (V4L2_CID_BASE+34) #define V4L2_CID_BG_COLOR (V4L2_CID_BASE+35) #define V4L2_CID_CHROMA_GAIN (V4L2_CID_BASE+36) #define V4L2_CID_ILLUMINATORS_1 (V4L2_CID_BASE+37) #define V4L2_CID_ILLUMINATORS_2 (V4L2_CID_BASE+38) #define V4L2_CID_MIN_BUFFERS_FOR_CAPTURE (V4L2_CID_BASE+39) #define V4L2_CID_MIN_BUFFERS_FOR_OUTPUT (V4L2_CID_BASE+40) #define V4L2_CID_ALPHA_COMPONENT (V4L2_CID_BASE+41) #define V4L2_CID_COLORFX_CBCR (V4L2_CID_BASE+42) #define V4L2_CID_COLORFX_RGB (V4L2_CID_BASE+43) /* last CID + 1 */ #define V4L2_CID_LASTP1 (V4L2_CID_BASE+44) /* USER-class private control IDs */ /* * The base for the meye driver controls. This driver was removed, but * we keep this define in case any software still uses it. */ #define V4L2_CID_USER_MEYE_BASE (V4L2_CID_USER_BASE + 0x1000) /* The base for the bttv driver controls. * We reserve 32 controls for this driver. */ #define V4L2_CID_USER_BTTV_BASE (V4L2_CID_USER_BASE + 0x1010) /* The base for the s2255 driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_S2255_BASE (V4L2_CID_USER_BASE + 0x1030) /* * The base for the si476x driver controls. See include/media/drv-intf/si476x.h * for the list of controls. Total of 16 controls is reserved for this driver */ #define V4L2_CID_USER_SI476X_BASE (V4L2_CID_USER_BASE + 0x1040) /* The base for the TI VPE driver controls. Total of 16 controls is reserved for * this driver */ #define V4L2_CID_USER_TI_VPE_BASE (V4L2_CID_USER_BASE + 0x1050) /* The base for the saa7134 driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_SAA7134_BASE (V4L2_CID_USER_BASE + 0x1060) /* The base for the adv7180 driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_ADV7180_BASE (V4L2_CID_USER_BASE + 0x1070) /* The base for the tc358743 driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_TC358743_BASE (V4L2_CID_USER_BASE + 0x1080) /* The base for the max217x driver controls. * We reserve 32 controls for this driver */ #define V4L2_CID_USER_MAX217X_BASE (V4L2_CID_USER_BASE + 0x1090) /* The base for the imx driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_IMX_BASE (V4L2_CID_USER_BASE + 0x10b0) /* * The base for the atmel isc driver controls. * We reserve 32 controls for this driver. */ #define V4L2_CID_USER_ATMEL_ISC_BASE (V4L2_CID_USER_BASE + 0x10c0) /* * The base for the CODA driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_CODA_BASE (V4L2_CID_USER_BASE + 0x10e0) /* * The base for MIPI CCS driver controls. * We reserve 128 controls for this driver. */ #define V4L2_CID_USER_CCS_BASE (V4L2_CID_USER_BASE + 0x10f0) /* * The base for Allegro driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_ALLEGRO_BASE (V4L2_CID_USER_BASE + 0x1170) /* * The base for the isl7998x driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_ISL7998X_BASE (V4L2_CID_USER_BASE + 0x1180) /* * The base for DW100 driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_DW100_BASE (V4L2_CID_USER_BASE + 0x1190) /* * The base for Aspeed driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_ASPEED_BASE (V4L2_CID_USER_BASE + 0x11a0) /* * The base for Nuvoton NPCM driver controls. * We reserve 16 controls for this driver. */ #define V4L2_CID_USER_NPCM_BASE (V4L2_CID_USER_BASE + 0x11b0) /* * The base for THine THP7312 driver controls. * We reserve 32 controls for this driver. */ #define V4L2_CID_USER_THP7312_BASE (V4L2_CID_USER_BASE + 0x11c0) /* MPEG-class control IDs */ /* The MPEG controls are applicable to all codec controls * and the 'MPEG' part of the define is historical */ #define V4L2_CID_CODEC_BASE (V4L2_CTRL_CLASS_CODEC | 0x900) #define V4L2_CID_CODEC_CLASS (V4L2_CTRL_CLASS_CODEC | 1) /* MPEG streams, specific to multiplexed streams */ #define V4L2_CID_MPEG_STREAM_TYPE (V4L2_CID_CODEC_BASE+0) enum v4l2_mpeg_stream_type { V4L2_MPEG_STREAM_TYPE_MPEG2_PS = 0, /* MPEG-2 program stream */ V4L2_MPEG_STREAM_TYPE_MPEG2_TS = 1, /* MPEG-2 transport stream */ V4L2_MPEG_STREAM_TYPE_MPEG1_SS = 2, /* MPEG-1 system stream */ V4L2_MPEG_STREAM_TYPE_MPEG2_DVD = 3, /* MPEG-2 DVD-compatible stream */ V4L2_MPEG_STREAM_TYPE_MPEG1_VCD = 4, /* MPEG-1 VCD-compatible stream */ V4L2_MPEG_STREAM_TYPE_MPEG2_SVCD = 5, /* MPEG-2 SVCD-compatible stream */ }; #define V4L2_CID_MPEG_STREAM_PID_PMT (V4L2_CID_CODEC_BASE+1) #define V4L2_CID_MPEG_STREAM_PID_AUDIO (V4L2_CID_CODEC_BASE+2) #define V4L2_CID_MPEG_STREAM_PID_VIDEO (V4L2_CID_CODEC_BASE+3) #define V4L2_CID_MPEG_STREAM_PID_PCR (V4L2_CID_CODEC_BASE+4) #define V4L2_CID_MPEG_STREAM_PES_ID_AUDIO (V4L2_CID_CODEC_BASE+5) #define V4L2_CID_MPEG_STREAM_PES_ID_VIDEO (V4L2_CID_CODEC_BASE+6) #define V4L2_CID_MPEG_STREAM_VBI_FMT (V4L2_CID_CODEC_BASE+7) enum v4l2_mpeg_stream_vbi_fmt { V4L2_MPEG_STREAM_VBI_FMT_NONE = 0, /* No VBI in the MPEG stream */ V4L2_MPEG_STREAM_VBI_FMT_IVTV = 1, /* VBI in private packets, IVTV format */ }; /* MPEG audio controls specific to multiplexed streams */ #define V4L2_CID_MPEG_AUDIO_SAMPLING_FREQ (V4L2_CID_CODEC_BASE+100) enum v4l2_mpeg_audio_sampling_freq { V4L2_MPEG_AUDIO_SAMPLING_FREQ_44100 = 0, V4L2_MPEG_AUDIO_SAMPLING_FREQ_48000 = 1, V4L2_MPEG_AUDIO_SAMPLING_FREQ_32000 = 2, }; #define V4L2_CID_MPEG_AUDIO_ENCODING (V4L2_CID_CODEC_BASE+101) enum v4l2_mpeg_audio_encoding { V4L2_MPEG_AUDIO_ENCODING_LAYER_1 = 0, V4L2_MPEG_AUDIO_ENCODING_LAYER_2 = 1, V4L2_MPEG_AUDIO_ENCODING_LAYER_3 = 2, V4L2_MPEG_AUDIO_ENCODING_AAC = 3, V4L2_MPEG_AUDIO_ENCODING_AC3 = 4, }; #define V4L2_CID_MPEG_AUDIO_L1_BITRATE (V4L2_CID_CODEC_BASE+102) enum v4l2_mpeg_audio_l1_bitrate { V4L2_MPEG_AUDIO_L1_BITRATE_32K = 0, V4L2_MPEG_AUDIO_L1_BITRATE_64K = 1, V4L2_MPEG_AUDIO_L1_BITRATE_96K = 2, V4L2_MPEG_AUDIO_L1_BITRATE_128K = 3, V4L2_MPEG_AUDIO_L1_BITRATE_160K = 4, V4L2_MPEG_AUDIO_L1_BITRATE_192K = 5, V4L2_MPEG_AUDIO_L1_BITRATE_224K = 6, V4L2_MPEG_AUDIO_L1_BITRATE_256K = 7, V4L2_MPEG_AUDIO_L1_BITRATE_288K = 8, V4L2_MPEG_AUDIO_L1_BITRATE_320K = 9, V4L2_MPEG_AUDIO_L1_BITRATE_352K = 10, V4L2_MPEG_AUDIO_L1_BITRATE_384K = 11, V4L2_MPEG_AUDIO_L1_BITRATE_416K = 12, V4L2_MPEG_AUDIO_L1_BITRATE_448K = 13, }; #define V4L2_CID_MPEG_AUDIO_L2_BITRATE (V4L2_CID_CODEC_BASE+103) enum v4l2_mpeg_audio_l2_bitrate { V4L2_MPEG_AUDIO_L2_BITRATE_32K = 0, V4L2_MPEG_AUDIO_L2_BITRATE_48K = 1, V4L2_MPEG_AUDIO_L2_BITRATE_56K = 2, V4L2_MPEG_AUDIO_L2_BITRATE_64K = 3, V4L2_MPEG_AUDIO_L2_BITRATE_80K = 4, V4L2_MPEG_AUDIO_L2_BITRATE_96K = 5, V4L2_MPEG_AUDIO_L2_BITRATE_112K = 6, V4L2_MPEG_AUDIO_L2_BITRATE_128K = 7, V4L2_MPEG_AUDIO_L2_BITRATE_160K = 8, V4L2_MPEG_AUDIO_L2_BITRATE_192K = 9, V4L2_MPEG_AUDIO_L2_BITRATE_224K = 10, V4L2_MPEG_AUDIO_L2_BITRATE_256K = 11, V4L2_MPEG_AUDIO_L2_BITRATE_320K = 12, V4L2_MPEG_AUDIO_L2_BITRATE_384K = 13, }; #define V4L2_CID_MPEG_AUDIO_L3_BITRATE (V4L2_CID_CODEC_BASE+104) enum v4l2_mpeg_audio_l3_bitrate { V4L2_MPEG_AUDIO_L3_BITRATE_32K = 0, V4L2_MPEG_AUDIO_L3_BITRATE_40K = 1, V4L2_MPEG_AUDIO_L3_BITRATE_48K = 2, V4L2_MPEG_AUDIO_L3_BITRATE_56K = 3, V4L2_MPEG_AUDIO_L3_BITRATE_64K = 4, V4L2_MPEG_AUDIO_L3_BITRATE_80K = 5, V4L2_MPEG_AUDIO_L3_BITRATE_96K = 6, V4L2_MPEG_AUDIO_L3_BITRATE_112K = 7, V4L2_MPEG_AUDIO_L3_BITRATE_128K = 8, V4L2_MPEG_AUDIO_L3_BITRATE_160K = 9, V4L2_MPEG_AUDIO_L3_BITRATE_192K = 10, V4L2_MPEG_AUDIO_L3_BITRATE_224K = 11, V4L2_MPEG_AUDIO_L3_BITRATE_256K = 12, V4L2_MPEG_AUDIO_L3_BITRATE_320K = 13, }; #define V4L2_CID_MPEG_AUDIO_MODE (V4L2_CID_CODEC_BASE+105) enum v4l2_mpeg_audio_mode { V4L2_MPEG_AUDIO_MODE_STEREO = 0, V4L2_MPEG_AUDIO_MODE_JOINT_STEREO = 1, V4L2_MPEG_AUDIO_MODE_DUAL = 2, V4L2_MPEG_AUDIO_MODE_MONO = 3, }; #define V4L2_CID_MPEG_AUDIO_MODE_EXTENSION (V4L2_CID_CODEC_BASE+106) enum v4l2_mpeg_audio_mode_extension { V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_4 = 0, V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_8 = 1, V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_12 = 2, V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_16 = 3, }; #define V4L2_CID_MPEG_AUDIO_EMPHASIS (V4L2_CID_CODEC_BASE+107) enum v4l2_mpeg_audio_emphasis { V4L2_MPEG_AUDIO_EMPHASIS_NONE = 0, V4L2_MPEG_AUDIO_EMPHASIS_50_DIV_15_uS = 1, V4L2_MPEG_AUDIO_EMPHASIS_CCITT_J17 = 2, }; #define V4L2_CID_MPEG_AUDIO_CRC (V4L2_CID_CODEC_BASE+108) enum v4l2_mpeg_audio_crc { V4L2_MPEG_AUDIO_CRC_NONE = 0, V4L2_MPEG_AUDIO_CRC_CRC16 = 1, }; #define V4L2_CID_MPEG_AUDIO_MUTE (V4L2_CID_CODEC_BASE+109) #define V4L2_CID_MPEG_AUDIO_AAC_BITRATE (V4L2_CID_CODEC_BASE+110) #define V4L2_CID_MPEG_AUDIO_AC3_BITRATE (V4L2_CID_CODEC_BASE+111) enum v4l2_mpeg_audio_ac3_bitrate { V4L2_MPEG_AUDIO_AC3_BITRATE_32K = 0, V4L2_MPEG_AUDIO_AC3_BITRATE_40K = 1, V4L2_MPEG_AUDIO_AC3_BITRATE_48K = 2, V4L2_MPEG_AUDIO_AC3_BITRATE_56K = 3, V4L2_MPEG_AUDIO_AC3_BITRATE_64K = 4, V4L2_MPEG_AUDIO_AC3_BITRATE_80K = 5, V4L2_MPEG_AUDIO_AC3_BITRATE_96K = 6, V4L2_MPEG_AUDIO_AC3_BITRATE_112K = 7, V4L2_MPEG_AUDIO_AC3_BITRATE_128K = 8, V4L2_MPEG_AUDIO_AC3_BITRATE_160K = 9, V4L2_MPEG_AUDIO_AC3_BITRATE_192K = 10, V4L2_MPEG_AUDIO_AC3_BITRATE_224K = 11, V4L2_MPEG_AUDIO_AC3_BITRATE_256K = 12, V4L2_MPEG_AUDIO_AC3_BITRATE_320K = 13, V4L2_MPEG_AUDIO_AC3_BITRATE_384K = 14, V4L2_MPEG_AUDIO_AC3_BITRATE_448K = 15, V4L2_MPEG_AUDIO_AC3_BITRATE_512K = 16, V4L2_MPEG_AUDIO_AC3_BITRATE_576K = 17, V4L2_MPEG_AUDIO_AC3_BITRATE_640K = 18, }; #define V4L2_CID_MPEG_AUDIO_DEC_PLAYBACK (V4L2_CID_CODEC_BASE+112) enum v4l2_mpeg_audio_dec_playback { V4L2_MPEG_AUDIO_DEC_PLAYBACK_AUTO = 0, V4L2_MPEG_AUDIO_DEC_PLAYBACK_STEREO = 1, V4L2_MPEG_AUDIO_DEC_PLAYBACK_LEFT = 2, V4L2_MPEG_AUDIO_DEC_PLAYBACK_RIGHT = 3, V4L2_MPEG_AUDIO_DEC_PLAYBACK_MONO = 4, V4L2_MPEG_AUDIO_DEC_PLAYBACK_SWAPPED_STEREO = 5, }; #define V4L2_CID_MPEG_AUDIO_DEC_MULTILINGUAL_PLAYBACK (V4L2_CID_CODEC_BASE+113) /* MPEG video controls specific to multiplexed streams */ #define V4L2_CID_MPEG_VIDEO_ENCODING (V4L2_CID_CODEC_BASE+200) enum v4l2_mpeg_video_encoding { V4L2_MPEG_VIDEO_ENCODING_MPEG_1 = 0, V4L2_MPEG_VIDEO_ENCODING_MPEG_2 = 1, V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC = 2, }; #define V4L2_CID_MPEG_VIDEO_ASPECT (V4L2_CID_CODEC_BASE+201) enum v4l2_mpeg_video_aspect { V4L2_MPEG_VIDEO_ASPECT_1x1 = 0, V4L2_MPEG_VIDEO_ASPECT_4x3 = 1, V4L2_MPEG_VIDEO_ASPECT_16x9 = 2, V4L2_MPEG_VIDEO_ASPECT_221x100 = 3, }; #define V4L2_CID_MPEG_VIDEO_B_FRAMES (V4L2_CID_CODEC_BASE+202) #define V4L2_CID_MPEG_VIDEO_GOP_SIZE (V4L2_CID_CODEC_BASE+203) #define V4L2_CID_MPEG_VIDEO_GOP_CLOSURE (V4L2_CID_CODEC_BASE+204) #define V4L2_CID_MPEG_VIDEO_PULLDOWN (V4L2_CID_CODEC_BASE+205) #define V4L2_CID_MPEG_VIDEO_BITRATE_MODE (V4L2_CID_CODEC_BASE+206) enum v4l2_mpeg_video_bitrate_mode { V4L2_MPEG_VIDEO_BITRATE_MODE_VBR = 0, V4L2_MPEG_VIDEO_BITRATE_MODE_CBR = 1, V4L2_MPEG_VIDEO_BITRATE_MODE_CQ = 2, }; #define V4L2_CID_MPEG_VIDEO_BITRATE (V4L2_CID_CODEC_BASE+207) #define V4L2_CID_MPEG_VIDEO_BITRATE_PEAK (V4L2_CID_CODEC_BASE+208) #define V4L2_CID_MPEG_VIDEO_TEMPORAL_DECIMATION (V4L2_CID_CODEC_BASE+209) #define V4L2_CID_MPEG_VIDEO_MUTE (V4L2_CID_CODEC_BASE+210) #define V4L2_CID_MPEG_VIDEO_MUTE_YUV (V4L2_CID_CODEC_BASE+211) #define V4L2_CID_MPEG_VIDEO_DECODER_SLICE_INTERFACE (V4L2_CID_CODEC_BASE+212) #define V4L2_CID_MPEG_VIDEO_DECODER_MPEG4_DEBLOCK_FILTER (V4L2_CID_CODEC_BASE+213) #define V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB (V4L2_CID_CODEC_BASE+214) #define V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE (V4L2_CID_CODEC_BASE+215) #define V4L2_CID_MPEG_VIDEO_HEADER_MODE (V4L2_CID_CODEC_BASE+216) enum v4l2_mpeg_video_header_mode { V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE = 0, V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME = 1, }; #define V4L2_CID_MPEG_VIDEO_MAX_REF_PIC (V4L2_CID_CODEC_BASE+217) #define V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE (V4L2_CID_CODEC_BASE+218) #define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES (V4L2_CID_CODEC_BASE+219) #define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB (V4L2_CID_CODEC_BASE+220) #define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE (V4L2_CID_CODEC_BASE+221) enum v4l2_mpeg_video_multi_slice_mode { V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_SINGLE = 0, V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_MAX_MB = 1, V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_MAX_BYTES = 2, /* Kept for backwards compatibility reasons. Stupid typo... */ V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB = 1, V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES = 2, }; #define V4L2_CID_MPEG_VIDEO_VBV_SIZE (V4L2_CID_CODEC_BASE+222) #define V4L2_CID_MPEG_VIDEO_DEC_PTS (V4L2_CID_CODEC_BASE+223) #define V4L2_CID_MPEG_VIDEO_DEC_FRAME (V4L2_CID_CODEC_BASE+224) #define V4L2_CID_MPEG_VIDEO_VBV_DELAY (V4L2_CID_CODEC_BASE+225) #define V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER (V4L2_CID_CODEC_BASE+226) #define V4L2_CID_MPEG_VIDEO_MV_H_SEARCH_RANGE (V4L2_CID_CODEC_BASE+227) #define V4L2_CID_MPEG_VIDEO_MV_V_SEARCH_RANGE (V4L2_CID_CODEC_BASE+228) #define V4L2_CID_MPEG_VIDEO_FORCE_KEY_FRAME (V4L2_CID_CODEC_BASE+229) #define V4L2_CID_MPEG_VIDEO_BASELAYER_PRIORITY_ID (V4L2_CID_CODEC_BASE+230) #define V4L2_CID_MPEG_VIDEO_AU_DELIMITER (V4L2_CID_CODEC_BASE+231) #define V4L2_CID_MPEG_VIDEO_LTR_COUNT (V4L2_CID_CODEC_BASE+232) #define V4L2_CID_MPEG_VIDEO_FRAME_LTR_INDEX (V4L2_CID_CODEC_BASE+233) #define V4L2_CID_MPEG_VIDEO_USE_LTR_FRAMES (V4L2_CID_CODEC_BASE+234) #define V4L2_CID_MPEG_VIDEO_DEC_CONCEAL_COLOR (V4L2_CID_CODEC_BASE+235) #define V4L2_CID_MPEG_VIDEO_INTRA_REFRESH_PERIOD (V4L2_CID_CODEC_BASE+236) #define V4L2_CID_MPEG_VIDEO_INTRA_REFRESH_PERIOD_TYPE (V4L2_CID_CODEC_BASE+237) enum v4l2_mpeg_video_intra_refresh_period_type { V4L2_CID_MPEG_VIDEO_INTRA_REFRESH_PERIOD_TYPE_RANDOM = 0, V4L2_CID_MPEG_VIDEO_INTRA_REFRESH_PERIOD_TYPE_CYCLIC = 1, }; /* CIDs for the MPEG-2 Part 2 (H.262) codec */ #define V4L2_CID_MPEG_VIDEO_MPEG2_LEVEL (V4L2_CID_CODEC_BASE+270) enum v4l2_mpeg_video_mpeg2_level { V4L2_MPEG_VIDEO_MPEG2_LEVEL_LOW = 0, V4L2_MPEG_VIDEO_MPEG2_LEVEL_MAIN = 1, V4L2_MPEG_VIDEO_MPEG2_LEVEL_HIGH_1440 = 2, V4L2_MPEG_VIDEO_MPEG2_LEVEL_HIGH = 3, }; #define V4L2_CID_MPEG_VIDEO_MPEG2_PROFILE (V4L2_CID_CODEC_BASE+271) enum v4l2_mpeg_video_mpeg2_profile { V4L2_MPEG_VIDEO_MPEG2_PROFILE_SIMPLE = 0, V4L2_MPEG_VIDEO_MPEG2_PROFILE_MAIN = 1, V4L2_MPEG_VIDEO_MPEG2_PROFILE_SNR_SCALABLE = 2, V4L2_MPEG_VIDEO_MPEG2_PROFILE_SPATIALLY_SCALABLE = 3, V4L2_MPEG_VIDEO_MPEG2_PROFILE_HIGH = 4, V4L2_MPEG_VIDEO_MPEG2_PROFILE_MULTIVIEW = 5, }; /* CIDs for the FWHT codec as used by the vicodec driver. */ #define V4L2_CID_FWHT_I_FRAME_QP (V4L2_CID_CODEC_BASE + 290) #define V4L2_CID_FWHT_P_FRAME_QP (V4L2_CID_CODEC_BASE + 291) #define V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP (V4L2_CID_CODEC_BASE+300) #define V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP (V4L2_CID_CODEC_BASE+301) #define V4L2_CID_MPEG_VIDEO_H263_B_FRAME_QP (V4L2_CID_CODEC_BASE+302) #define V4L2_CID_MPEG_VIDEO_H263_MIN_QP (V4L2_CID_CODEC_BASE+303) #define V4L2_CID_MPEG_VIDEO_H263_MAX_QP (V4L2_CID_CODEC_BASE+304) #define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP (V4L2_CID_CODEC_BASE+350) #define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP (V4L2_CID_CODEC_BASE+351) #define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP (V4L2_CID_CODEC_BASE+352) #define V4L2_CID_MPEG_VIDEO_H264_MIN_QP (V4L2_CID_CODEC_BASE+353) #define V4L2_CID_MPEG_VIDEO_H264_MAX_QP (V4L2_CID_CODEC_BASE+354) #define V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM (V4L2_CID_CODEC_BASE+355) #define V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE (V4L2_CID_CODEC_BASE+356) #define V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE (V4L2_CID_CODEC_BASE+357) enum v4l2_mpeg_video_h264_entropy_mode { V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC = 0, V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC = 1, }; #define V4L2_CID_MPEG_VIDEO_H264_I_PERIOD (V4L2_CID_CODEC_BASE+358) #define V4L2_CID_MPEG_VIDEO_H264_LEVEL (V4L2_CID_CODEC_BASE+359) enum v4l2_mpeg_video_h264_level { V4L2_MPEG_VIDEO_H264_LEVEL_1_0 = 0, V4L2_MPEG_VIDEO_H264_LEVEL_1B = 1, V4L2_MPEG_VIDEO_H264_LEVEL_1_1 = 2, V4L2_MPEG_VIDEO_H264_LEVEL_1_2 = 3, V4L2_MPEG_VIDEO_H264_LEVEL_1_3 = 4, V4L2_MPEG_VIDEO_H264_LEVEL_2_0 = 5, V4L2_MPEG_VIDEO_H264_LEVEL_2_1 = 6, V4L2_MPEG_VIDEO_H264_LEVEL_2_2 = 7, V4L2_MPEG_VIDEO_H264_LEVEL_3_0 = 8, V4L2_MPEG_VIDEO_H264_LEVEL_3_1 = 9, V4L2_MPEG_VIDEO_H264_LEVEL_3_2 = 10, V4L2_MPEG_VIDEO_H264_LEVEL_4_0 = 11, V4L2_MPEG_VIDEO_H264_LEVEL_4_1 = 12, V4L2_MPEG_VIDEO_H264_LEVEL_4_2 = 13, V4L2_MPEG_VIDEO_H264_LEVEL_5_0 = 14, V4L2_MPEG_VIDEO_H264_LEVEL_5_1 = 15, V4L2_MPEG_VIDEO_H264_LEVEL_5_2 = 16, V4L2_MPEG_VIDEO_H264_LEVEL_6_0 = 17, V4L2_MPEG_VIDEO_H264_LEVEL_6_1 = 18, V4L2_MPEG_VIDEO_H264_LEVEL_6_2 = 19, }; #define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA (V4L2_CID_CODEC_BASE+360) #define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA (V4L2_CID_CODEC_BASE+361) #define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE (V4L2_CID_CODEC_BASE+362) enum v4l2_mpeg_video_h264_loop_filter_mode { V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_ENABLED = 0, V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED = 1, V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY = 2, }; #define V4L2_CID_MPEG_VIDEO_H264_PROFILE (V4L2_CID_CODEC_BASE+363) enum v4l2_mpeg_video_h264_profile { V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE = 0, V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE = 1, V4L2_MPEG_VIDEO_H264_PROFILE_MAIN = 2, V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED = 3, V4L2_MPEG_VIDEO_H264_PROFILE_HIGH = 4, V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10 = 5, V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422 = 6, V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE = 7, V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA = 8, V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA = 9, V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA = 10, V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA = 11, V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE = 12, V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH = 13, V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA = 14, V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH = 15, V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH = 16, V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_HIGH = 17, }; #define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT (V4L2_CID_CODEC_BASE+364) #define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH (V4L2_CID_CODEC_BASE+365) #define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE (V4L2_CID_CODEC_BASE+366) #define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC (V4L2_CID_CODEC_BASE+367) enum v4l2_mpeg_video_h264_vui_sar_idc { V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_UNSPECIFIED = 0, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1 = 1, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_12x11 = 2, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_10x11 = 3, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_16x11 = 4, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_40x33 = 5, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_24x11 = 6, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_20x11 = 7, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_32x11 = 8, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_80x33 = 9, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_18x11 = 10, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_15x11 = 11, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_64x33 = 12, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_160x99 = 13, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_4x3 = 14, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_3x2 = 15, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_2x1 = 16, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_EXTENDED = 17, }; #define V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING (V4L2_CID_CODEC_BASE+368) #define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_CURRENT_FRAME_0 (V4L2_CID_CODEC_BASE+369) #define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE (V4L2_CID_CODEC_BASE+370) enum v4l2_mpeg_video_h264_sei_fp_arrangement_type { V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_CHECKERBOARD = 0, V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_COLUMN = 1, V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_ROW = 2, V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_SIDE_BY_SIDE = 3, V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TOP_BOTTOM = 4, V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TEMPORAL = 5, }; #define V4L2_CID_MPEG_VIDEO_H264_FMO (V4L2_CID_CODEC_BASE+371) #define V4L2_CID_MPEG_VIDEO_H264_FMO_MAP_TYPE (V4L2_CID_CODEC_BASE+372) enum v4l2_mpeg_video_h264_fmo_map_type { V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES = 0, V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_SCATTERED_SLICES = 1, V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_FOREGROUND_WITH_LEFT_OVER = 2, V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_BOX_OUT = 3, V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_RASTER_SCAN = 4, V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_WIPE_SCAN = 5, V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_EXPLICIT = 6, }; #define V4L2_CID_MPEG_VIDEO_H264_FMO_SLICE_GROUP (V4L2_CID_CODEC_BASE+373) #define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_DIRECTION (V4L2_CID_CODEC_BASE+374) enum v4l2_mpeg_video_h264_fmo_change_dir { V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_RIGHT = 0, V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_LEFT = 1, }; #define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_RATE (V4L2_CID_CODEC_BASE+375) #define V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH (V4L2_CID_CODEC_BASE+376) #define V4L2_CID_MPEG_VIDEO_H264_ASO (V4L2_CID_CODEC_BASE+377) #define V4L2_CID_MPEG_VIDEO_H264_ASO_SLICE_ORDER (V4L2_CID_CODEC_BASE+378) #define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING (V4L2_CID_CODEC_BASE+379) #define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE (V4L2_CID_CODEC_BASE+380) enum v4l2_mpeg_video_h264_hierarchical_coding_type { V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_B = 0, V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P = 1, }; #define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER (V4L2_CID_CODEC_BASE+381) #define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_CODEC_BASE+382) #define V4L2_CID_MPEG_VIDEO_H264_CONSTRAINED_INTRA_PREDICTION (V4L2_CID_CODEC_BASE+383) #define V4L2_CID_MPEG_VIDEO_H264_CHROMA_QP_INDEX_OFFSET (V4L2_CID_CODEC_BASE+384) #define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_MIN_QP (V4L2_CID_CODEC_BASE+385) #define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_MAX_QP (V4L2_CID_CODEC_BASE+386) #define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_MIN_QP (V4L2_CID_CODEC_BASE+387) #define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_MAX_QP (V4L2_CID_CODEC_BASE+388) #define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_MIN_QP (V4L2_CID_CODEC_BASE+389) #define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_MAX_QP (V4L2_CID_CODEC_BASE+390) #define V4L2_CID_MPEG_VIDEO_H264_HIER_CODING_L0_BR (V4L2_CID_CODEC_BASE+391) #define V4L2_CID_MPEG_VIDEO_H264_HIER_CODING_L1_BR (V4L2_CID_CODEC_BASE+392) #define V4L2_CID_MPEG_VIDEO_H264_HIER_CODING_L2_BR (V4L2_CID_CODEC_BASE+393) #define V4L2_CID_MPEG_VIDEO_H264_HIER_CODING_L3_BR (V4L2_CID_CODEC_BASE+394) #define V4L2_CID_MPEG_VIDEO_H264_HIER_CODING_L4_BR (V4L2_CID_CODEC_BASE+395) #define V4L2_CID_MPEG_VIDEO_H264_HIER_CODING_L5_BR (V4L2_CID_CODEC_BASE+396) #define V4L2_CID_MPEG_VIDEO_H264_HIER_CODING_L6_BR (V4L2_CID_CODEC_BASE+397) #define V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP (V4L2_CID_CODEC_BASE+400) #define V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP (V4L2_CID_CODEC_BASE+401) #define V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP (V4L2_CID_CODEC_BASE+402) #define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP (V4L2_CID_CODEC_BASE+403) #define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP (V4L2_CID_CODEC_BASE+404) #define V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL (V4L2_CID_CODEC_BASE+405) enum v4l2_mpeg_video_mpeg4_level { V4L2_MPEG_VIDEO_MPEG4_LEVEL_0 = 0, V4L2_MPEG_VIDEO_MPEG4_LEVEL_0B = 1, V4L2_MPEG_VIDEO_MPEG4_LEVEL_1 = 2, V4L2_MPEG_VIDEO_MPEG4_LEVEL_2 = 3, V4L2_MPEG_VIDEO_MPEG4_LEVEL_3 = 4, V4L2_MPEG_VIDEO_MPEG4_LEVEL_3B = 5, V4L2_MPEG_VIDEO_MPEG4_LEVEL_4 = 6, V4L2_MPEG_VIDEO_MPEG4_LEVEL_5 = 7, }; #define V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE (V4L2_CID_CODEC_BASE+406) enum v4l2_mpeg_video_mpeg4_profile { V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE = 0, V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_SIMPLE = 1, V4L2_MPEG_VIDEO_MPEG4_PROFILE_CORE = 2, V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE_SCALABLE = 3, V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY = 4, }; #define V4L2_CID_MPEG_VIDEO_MPEG4_QPEL (V4L2_CID_CODEC_BASE+407) /* Control IDs for VP8 streams * Although VP8 is not part of MPEG we add these controls to the MPEG class * as that class is already handling other video compression standards */ #define V4L2_CID_MPEG_VIDEO_VPX_NUM_PARTITIONS (V4L2_CID_CODEC_BASE+500) enum v4l2_vp8_num_partitions { V4L2_CID_MPEG_VIDEO_VPX_1_PARTITION = 0, V4L2_CID_MPEG_VIDEO_VPX_2_PARTITIONS = 1, V4L2_CID_MPEG_VIDEO_VPX_4_PARTITIONS = 2, V4L2_CID_MPEG_VIDEO_VPX_8_PARTITIONS = 3, }; #define V4L2_CID_MPEG_VIDEO_VPX_IMD_DISABLE_4X4 (V4L2_CID_CODEC_BASE+501) #define V4L2_CID_MPEG_VIDEO_VPX_NUM_REF_FRAMES (V4L2_CID_CODEC_BASE+502) enum v4l2_vp8_num_ref_frames { V4L2_CID_MPEG_VIDEO_VPX_1_REF_FRAME = 0, V4L2_CID_MPEG_VIDEO_VPX_2_REF_FRAME = 1, V4L2_CID_MPEG_VIDEO_VPX_3_REF_FRAME = 2, }; #define V4L2_CID_MPEG_VIDEO_VPX_FILTER_LEVEL (V4L2_CID_CODEC_BASE+503) #define V4L2_CID_MPEG_VIDEO_VPX_FILTER_SHARPNESS (V4L2_CID_CODEC_BASE+504) #define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_REF_PERIOD (V4L2_CID_CODEC_BASE+505) #define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_SEL (V4L2_CID_CODEC_BASE+506) enum v4l2_vp8_golden_frame_sel { V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_PREV = 0, V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_REF_PERIOD = 1, }; #define V4L2_CID_MPEG_VIDEO_VPX_MIN_QP (V4L2_CID_CODEC_BASE+507) #define V4L2_CID_MPEG_VIDEO_VPX_MAX_QP (V4L2_CID_CODEC_BASE+508) #define V4L2_CID_MPEG_VIDEO_VPX_I_FRAME_QP (V4L2_CID_CODEC_BASE+509) #define V4L2_CID_MPEG_VIDEO_VPX_P_FRAME_QP (V4L2_CID_CODEC_BASE+510) #define V4L2_CID_MPEG_VIDEO_VP8_PROFILE (V4L2_CID_CODEC_BASE+511) enum v4l2_mpeg_video_vp8_profile { V4L2_MPEG_VIDEO_VP8_PROFILE_0 = 0, V4L2_MPEG_VIDEO_VP8_PROFILE_1 = 1, V4L2_MPEG_VIDEO_VP8_PROFILE_2 = 2, V4L2_MPEG_VIDEO_VP8_PROFILE_3 = 3, }; /* Deprecated alias for compatibility reasons. */ #define V4L2_CID_MPEG_VIDEO_VPX_PROFILE V4L2_CID_MPEG_VIDEO_VP8_PROFILE #define V4L2_CID_MPEG_VIDEO_VP9_PROFILE (V4L2_CID_CODEC_BASE+512) enum v4l2_mpeg_video_vp9_profile { V4L2_MPEG_VIDEO_VP9_PROFILE_0 = 0, V4L2_MPEG_VIDEO_VP9_PROFILE_1 = 1, V4L2_MPEG_VIDEO_VP9_PROFILE_2 = 2, V4L2_MPEG_VIDEO_VP9_PROFILE_3 = 3, }; #define V4L2_CID_MPEG_VIDEO_VP9_LEVEL (V4L2_CID_CODEC_BASE+513) enum v4l2_mpeg_video_vp9_level { V4L2_MPEG_VIDEO_VP9_LEVEL_1_0 = 0, V4L2_MPEG_VIDEO_VP9_LEVEL_1_1 = 1, V4L2_MPEG_VIDEO_VP9_LEVEL_2_0 = 2, V4L2_MPEG_VIDEO_VP9_LEVEL_2_1 = 3, V4L2_MPEG_VIDEO_VP9_LEVEL_3_0 = 4, V4L2_MPEG_VIDEO_VP9_LEVEL_3_1 = 5, V4L2_MPEG_VIDEO_VP9_LEVEL_4_0 = 6, V4L2_MPEG_VIDEO_VP9_LEVEL_4_1 = 7, V4L2_MPEG_VIDEO_VP9_LEVEL_5_0 = 8, V4L2_MPEG_VIDEO_VP9_LEVEL_5_1 = 9, V4L2_MPEG_VIDEO_VP9_LEVEL_5_2 = 10, V4L2_MPEG_VIDEO_VP9_LEVEL_6_0 = 11, V4L2_MPEG_VIDEO_VP9_LEVEL_6_1 = 12, V4L2_MPEG_VIDEO_VP9_LEVEL_6_2 = 13, }; /* CIDs for HEVC encoding. */ #define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP (V4L2_CID_CODEC_BASE + 600) #define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP (V4L2_CID_CODEC_BASE + 601) #define V4L2_CID_MPEG_VIDEO_HEVC_I_FRAME_QP (V4L2_CID_CODEC_BASE + 602) #define V4L2_CID_MPEG_VIDEO_HEVC_P_FRAME_QP (V4L2_CID_CODEC_BASE + 603) #define V4L2_CID_MPEG_VIDEO_HEVC_B_FRAME_QP (V4L2_CID_CODEC_BASE + 604) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_QP (V4L2_CID_CODEC_BASE + 605) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_TYPE (V4L2_CID_CODEC_BASE + 606) enum v4l2_mpeg_video_hevc_hier_coding_type { V4L2_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_B = 0, V4L2_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_P = 1, }; #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_LAYER (V4L2_CID_CODEC_BASE + 607) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L0_QP (V4L2_CID_CODEC_BASE + 608) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L1_QP (V4L2_CID_CODEC_BASE + 609) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L2_QP (V4L2_CID_CODEC_BASE + 610) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L3_QP (V4L2_CID_CODEC_BASE + 611) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L4_QP (V4L2_CID_CODEC_BASE + 612) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L5_QP (V4L2_CID_CODEC_BASE + 613) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L6_QP (V4L2_CID_CODEC_BASE + 614) #define V4L2_CID_MPEG_VIDEO_HEVC_PROFILE (V4L2_CID_CODEC_BASE + 615) enum v4l2_mpeg_video_hevc_profile { V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN = 0, V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_STILL_PICTURE = 1, V4L2_MPEG_VIDEO_HEVC_PROFILE_MAIN_10 = 2, }; #define V4L2_CID_MPEG_VIDEO_HEVC_LEVEL (V4L2_CID_CODEC_BASE + 616) enum v4l2_mpeg_video_hevc_level { V4L2_MPEG_VIDEO_HEVC_LEVEL_1 = 0, V4L2_MPEG_VIDEO_HEVC_LEVEL_2 = 1, V4L2_MPEG_VIDEO_HEVC_LEVEL_2_1 = 2, V4L2_MPEG_VIDEO_HEVC_LEVEL_3 = 3, V4L2_MPEG_VIDEO_HEVC_LEVEL_3_1 = 4, V4L2_MPEG_VIDEO_HEVC_LEVEL_4 = 5, V4L2_MPEG_VIDEO_HEVC_LEVEL_4_1 = 6, V4L2_MPEG_VIDEO_HEVC_LEVEL_5 = 7, V4L2_MPEG_VIDEO_HEVC_LEVEL_5_1 = 8, V4L2_MPEG_VIDEO_HEVC_LEVEL_5_2 = 9, V4L2_MPEG_VIDEO_HEVC_LEVEL_6 = 10, V4L2_MPEG_VIDEO_HEVC_LEVEL_6_1 = 11, V4L2_MPEG_VIDEO_HEVC_LEVEL_6_2 = 12, }; #define V4L2_CID_MPEG_VIDEO_HEVC_FRAME_RATE_RESOLUTION (V4L2_CID_CODEC_BASE + 617) #define V4L2_CID_MPEG_VIDEO_HEVC_TIER (V4L2_CID_CODEC_BASE + 618) enum v4l2_mpeg_video_hevc_tier { V4L2_MPEG_VIDEO_HEVC_TIER_MAIN = 0, V4L2_MPEG_VIDEO_HEVC_TIER_HIGH = 1, }; #define V4L2_CID_MPEG_VIDEO_HEVC_MAX_PARTITION_DEPTH (V4L2_CID_CODEC_BASE + 619) #define V4L2_CID_MPEG_VIDEO_HEVC_LOOP_FILTER_MODE (V4L2_CID_CODEC_BASE + 620) enum v4l2_cid_mpeg_video_hevc_loop_filter_mode { V4L2_MPEG_VIDEO_HEVC_LOOP_FILTER_MODE_DISABLED = 0, V4L2_MPEG_VIDEO_HEVC_LOOP_FILTER_MODE_ENABLED = 1, V4L2_MPEG_VIDEO_HEVC_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY = 2, }; #define V4L2_CID_MPEG_VIDEO_HEVC_LF_BETA_OFFSET_DIV2 (V4L2_CID_CODEC_BASE + 621) #define V4L2_CID_MPEG_VIDEO_HEVC_LF_TC_OFFSET_DIV2 (V4L2_CID_CODEC_BASE + 622) #define V4L2_CID_MPEG_VIDEO_HEVC_REFRESH_TYPE (V4L2_CID_CODEC_BASE + 623) enum v4l2_cid_mpeg_video_hevc_refresh_type { V4L2_MPEG_VIDEO_HEVC_REFRESH_NONE = 0, V4L2_MPEG_VIDEO_HEVC_REFRESH_CRA = 1, V4L2_MPEG_VIDEO_HEVC_REFRESH_IDR = 2, }; #define V4L2_CID_MPEG_VIDEO_HEVC_REFRESH_PERIOD (V4L2_CID_CODEC_BASE + 624) #define V4L2_CID_MPEG_VIDEO_HEVC_LOSSLESS_CU (V4L2_CID_CODEC_BASE + 625) #define V4L2_CID_MPEG_VIDEO_HEVC_CONST_INTRA_PRED (V4L2_CID_CODEC_BASE + 626) #define V4L2_CID_MPEG_VIDEO_HEVC_WAVEFRONT (V4L2_CID_CODEC_BASE + 627) #define V4L2_CID_MPEG_VIDEO_HEVC_GENERAL_PB (V4L2_CID_CODEC_BASE + 628) #define V4L2_CID_MPEG_VIDEO_HEVC_TEMPORAL_ID (V4L2_CID_CODEC_BASE + 629) #define V4L2_CID_MPEG_VIDEO_HEVC_STRONG_SMOOTHING (V4L2_CID_CODEC_BASE + 630) #define V4L2_CID_MPEG_VIDEO_HEVC_MAX_NUM_MERGE_MV_MINUS1 (V4L2_CID_CODEC_BASE + 631) #define V4L2_CID_MPEG_VIDEO_HEVC_INTRA_PU_SPLIT (V4L2_CID_CODEC_BASE + 632) #define V4L2_CID_MPEG_VIDEO_HEVC_TMV_PREDICTION (V4L2_CID_CODEC_BASE + 633) #define V4L2_CID_MPEG_VIDEO_HEVC_WITHOUT_STARTCODE (V4L2_CID_CODEC_BASE + 634) #define V4L2_CID_MPEG_VIDEO_HEVC_SIZE_OF_LENGTH_FIELD (V4L2_CID_CODEC_BASE + 635) enum v4l2_cid_mpeg_video_hevc_size_of_length_field { V4L2_MPEG_VIDEO_HEVC_SIZE_0 = 0, V4L2_MPEG_VIDEO_HEVC_SIZE_1 = 1, V4L2_MPEG_VIDEO_HEVC_SIZE_2 = 2, V4L2_MPEG_VIDEO_HEVC_SIZE_4 = 3, }; #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L0_BR (V4L2_CID_CODEC_BASE + 636) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L1_BR (V4L2_CID_CODEC_BASE + 637) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L2_BR (V4L2_CID_CODEC_BASE + 638) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L3_BR (V4L2_CID_CODEC_BASE + 639) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L4_BR (V4L2_CID_CODEC_BASE + 640) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L5_BR (V4L2_CID_CODEC_BASE + 641) #define V4L2_CID_MPEG_VIDEO_HEVC_HIER_CODING_L6_BR (V4L2_CID_CODEC_BASE + 642) #define V4L2_CID_MPEG_VIDEO_REF_NUMBER_FOR_PFRAMES (V4L2_CID_CODEC_BASE + 643) #define V4L2_CID_MPEG_VIDEO_PREPEND_SPSPPS_TO_IDR (V4L2_CID_CODEC_BASE + 644) #define V4L2_CID_MPEG_VIDEO_CONSTANT_QUALITY (V4L2_CID_CODEC_BASE + 645) #define V4L2_CID_MPEG_VIDEO_FRAME_SKIP_MODE (V4L2_CID_CODEC_BASE + 646) enum v4l2_mpeg_video_frame_skip_mode { V4L2_MPEG_VIDEO_FRAME_SKIP_MODE_DISABLED = 0, V4L2_MPEG_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT = 1, V4L2_MPEG_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT = 2, }; #define V4L2_CID_MPEG_VIDEO_HEVC_I_FRAME_MIN_QP (V4L2_CID_CODEC_BASE + 647) #define V4L2_CID_MPEG_VIDEO_HEVC_I_FRAME_MAX_QP (V4L2_CID_CODEC_BASE + 648) #define V4L2_CID_MPEG_VIDEO_HEVC_P_FRAME_MIN_QP (V4L2_CID_CODEC_BASE + 649) #define V4L2_CID_MPEG_VIDEO_HEVC_P_FRAME_MAX_QP (V4L2_CID_CODEC_BASE + 650) #define V4L2_CID_MPEG_VIDEO_HEVC_B_FRAME_MIN_QP (V4L2_CID_CODEC_BASE + 651) #define V4L2_CID_MPEG_VIDEO_HEVC_B_FRAME_MAX_QP (V4L2_CID_CODEC_BASE + 652) #define V4L2_CID_MPEG_VIDEO_DEC_DISPLAY_DELAY (V4L2_CID_CODEC_BASE + 653) #define V4L2_CID_MPEG_VIDEO_DEC_DISPLAY_DELAY_ENABLE (V4L2_CID_CODEC_BASE + 654) #define V4L2_CID_MPEG_VIDEO_AV1_PROFILE (V4L2_CID_CODEC_BASE + 655) /** * enum v4l2_mpeg_video_av1_profile - AV1 profiles * * @V4L2_MPEG_VIDEO_AV1_PROFILE_MAIN: compliant decoders must be able to decode * streams with seq_profile equal to 0. * @V4L2_MPEG_VIDEO_AV1_PROFILE_HIGH: compliant decoders must be able to decode * streams with seq_profile equal less than or equal to 1. * @V4L2_MPEG_VIDEO_AV1_PROFILE_PROFESSIONAL: compliant decoders must be able to * decode streams with seq_profile less than or equal to 2. * * Conveys the highest profile a decoder can work with. */ enum v4l2_mpeg_video_av1_profile { V4L2_MPEG_VIDEO_AV1_PROFILE_MAIN = 0, V4L2_MPEG_VIDEO_AV1_PROFILE_HIGH = 1, V4L2_MPEG_VIDEO_AV1_PROFILE_PROFESSIONAL = 2, }; #define V4L2_CID_MPEG_VIDEO_AV1_LEVEL (V4L2_CID_CODEC_BASE + 656) /** * enum v4l2_mpeg_video_av1_level - AV1 levels * * @V4L2_MPEG_VIDEO_AV1_LEVEL_2_0: Level 2.0. * @V4L2_MPEG_VIDEO_AV1_LEVEL_2_1: Level 2.1. * @V4L2_MPEG_VIDEO_AV1_LEVEL_2_2: Level 2.2. * @V4L2_MPEG_VIDEO_AV1_LEVEL_2_3: Level 2.3. * @V4L2_MPEG_VIDEO_AV1_LEVEL_3_0: Level 3.0. * @V4L2_MPEG_VIDEO_AV1_LEVEL_3_1: Level 3.1. * @V4L2_MPEG_VIDEO_AV1_LEVEL_3_2: Level 3.2. * @V4L2_MPEG_VIDEO_AV1_LEVEL_3_3: Level 3.3. * @V4L2_MPEG_VIDEO_AV1_LEVEL_4_0: Level 4.0. * @V4L2_MPEG_VIDEO_AV1_LEVEL_4_1: Level 4.1. * @V4L2_MPEG_VIDEO_AV1_LEVEL_4_2: Level 4.2. * @V4L2_MPEG_VIDEO_AV1_LEVEL_4_3: Level 4.3. * @V4L2_MPEG_VIDEO_AV1_LEVEL_5_0: Level 5.0. * @V4L2_MPEG_VIDEO_AV1_LEVEL_5_1: Level 5.1. * @V4L2_MPEG_VIDEO_AV1_LEVEL_5_2: Level 5.2. * @V4L2_MPEG_VIDEO_AV1_LEVEL_5_3: Level 5.3. * @V4L2_MPEG_VIDEO_AV1_LEVEL_6_0: Level 6.0. * @V4L2_MPEG_VIDEO_AV1_LEVEL_6_1: Level 6.1. * @V4L2_MPEG_VIDEO_AV1_LEVEL_6_2: Level 6.2. * @V4L2_MPEG_VIDEO_AV1_LEVEL_6_3: Level 6.3. * @V4L2_MPEG_VIDEO_AV1_LEVEL_7_0: Level 7.0. * @V4L2_MPEG_VIDEO_AV1_LEVEL_7_1: Level 7.1. * @V4L2_MPEG_VIDEO_AV1_LEVEL_7_2: Level 7.2. * @V4L2_MPEG_VIDEO_AV1_LEVEL_7_3: Level 7.3. * * Conveys the highest level a decoder can work with. */ enum v4l2_mpeg_video_av1_level { V4L2_MPEG_VIDEO_AV1_LEVEL_2_0 = 0, V4L2_MPEG_VIDEO_AV1_LEVEL_2_1 = 1, V4L2_MPEG_VIDEO_AV1_LEVEL_2_2 = 2, V4L2_MPEG_VIDEO_AV1_LEVEL_2_3 = 3, V4L2_MPEG_VIDEO_AV1_LEVEL_3_0 = 4, V4L2_MPEG_VIDEO_AV1_LEVEL_3_1 = 5, V4L2_MPEG_VIDEO_AV1_LEVEL_3_2 = 6, V4L2_MPEG_VIDEO_AV1_LEVEL_3_3 = 7, V4L2_MPEG_VIDEO_AV1_LEVEL_4_0 = 8, V4L2_MPEG_VIDEO_AV1_LEVEL_4_1 = 9, V4L2_MPEG_VIDEO_AV1_LEVEL_4_2 = 10, V4L2_MPEG_VIDEO_AV1_LEVEL_4_3 = 11, V4L2_MPEG_VIDEO_AV1_LEVEL_5_0 = 12, V4L2_MPEG_VIDEO_AV1_LEVEL_5_1 = 13, V4L2_MPEG_VIDEO_AV1_LEVEL_5_2 = 14, V4L2_MPEG_VIDEO_AV1_LEVEL_5_3 = 15, V4L2_MPEG_VIDEO_AV1_LEVEL_6_0 = 16, V4L2_MPEG_VIDEO_AV1_LEVEL_6_1 = 17, V4L2_MPEG_VIDEO_AV1_LEVEL_6_2 = 18, V4L2_MPEG_VIDEO_AV1_LEVEL_6_3 = 19, V4L2_MPEG_VIDEO_AV1_LEVEL_7_0 = 20, V4L2_MPEG_VIDEO_AV1_LEVEL_7_1 = 21, V4L2_MPEG_VIDEO_AV1_LEVEL_7_2 = 22, V4L2_MPEG_VIDEO_AV1_LEVEL_7_3 = 23 }; /* MPEG-class control IDs specific to the CX2341x driver as defined by V4L2 */ #define V4L2_CID_CODEC_CX2341X_BASE (V4L2_CTRL_CLASS_CODEC | 0x1000) #define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE (V4L2_CID_CODEC_CX2341X_BASE+0) enum v4l2_mpeg_cx2341x_video_spatial_filter_mode { V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_MANUAL = 0, V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_AUTO = 1, }; #define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER (V4L2_CID_CODEC_CX2341X_BASE+1) #define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE (V4L2_CID_CODEC_CX2341X_BASE+2) enum v4l2_mpeg_cx2341x_video_luma_spatial_filter_type { V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_OFF = 0, V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_HOR = 1, V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_VERT = 2, V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_HV_SEPARABLE = 3, V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_SYM_NON_SEPARABLE = 4, }; #define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE (V4L2_CID_CODEC_CX2341X_BASE+3) enum v4l2_mpeg_cx2341x_video_chroma_spatial_filter_type { V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_OFF = 0, V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_1D_HOR = 1, }; #define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE (V4L2_CID_CODEC_CX2341X_BASE+4) enum v4l2_mpeg_cx2341x_video_temporal_filter_mode { V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_MANUAL = 0, V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_AUTO = 1, }; #define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER (V4L2_CID_CODEC_CX2341X_BASE+5) #define V4L2_CID_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE (V4L2_CID_CODEC_CX2341X_BASE+6) enum v4l2_mpeg_cx2341x_video_median_filter_type { V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_OFF = 0, V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR = 1, V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_VERT = 2, V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR_VERT = 3, V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_DIAG = 4, }; #define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_CODEC_CX2341X_BASE+7) #define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_TOP (V4L2_CID_CODEC_CX2341X_BASE+8) #define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_CODEC_CX2341X_BASE+9) #define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_TOP (V4L2_CID_CODEC_CX2341X_BASE+10) #define V4L2_CID_MPEG_CX2341X_STREAM_INSERT_NAV_PACKETS (V4L2_CID_CODEC_CX2341X_BASE+11) /* MPEG-class control IDs specific to the Samsung MFC 5.1 driver as defined by V4L2 */ #define V4L2_CID_CODEC_MFC51_BASE (V4L2_CTRL_CLASS_CODEC | 0x1100) #define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY (V4L2_CID_CODEC_MFC51_BASE+0) #define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY_ENABLE (V4L2_CID_CODEC_MFC51_BASE+1) #define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE (V4L2_CID_CODEC_MFC51_BASE+2) enum v4l2_mpeg_mfc51_video_frame_skip_mode { V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED = 0, V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT = 1, V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT = 2, }; #define V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE (V4L2_CID_CODEC_MFC51_BASE+3) enum v4l2_mpeg_mfc51_video_force_frame_type { V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_DISABLED = 0, V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME = 1, V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_NOT_CODED = 2, }; #define V4L2_CID_MPEG_MFC51_VIDEO_PADDING (V4L2_CID_CODEC_MFC51_BASE+4) #define V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV (V4L2_CID_CODEC_MFC51_BASE+5) #define V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT (V4L2_CID_CODEC_MFC51_BASE+6) #define V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF (V4L2_CID_CODEC_MFC51_BASE+7) #define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY (V4L2_CID_CODEC_MFC51_BASE+50) #define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK (V4L2_CID_CODEC_MFC51_BASE+51) #define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH (V4L2_CID_CODEC_MFC51_BASE+52) #define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC (V4L2_CID_CODEC_MFC51_BASE+53) #define V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P (V4L2_CID_CODEC_MFC51_BASE+54) /* Camera class control IDs */ #define V4L2_CID_CAMERA_CLASS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x900) #define V4L2_CID_CAMERA_CLASS (V4L2_CTRL_CLASS_CAMERA | 1) #define V4L2_CID_EXPOSURE_AUTO (V4L2_CID_CAMERA_CLASS_BASE+1) enum v4l2_exposure_auto_type { V4L2_EXPOSURE_AUTO = 0, V4L2_EXPOSURE_MANUAL = 1, V4L2_EXPOSURE_SHUTTER_PRIORITY = 2, V4L2_EXPOSURE_APERTURE_PRIORITY = 3 }; #define V4L2_CID_EXPOSURE_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+2) #define V4L2_CID_EXPOSURE_AUTO_PRIORITY (V4L2_CID_CAMERA_CLASS_BASE+3) #define V4L2_CID_PAN_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+4) #define V4L2_CID_TILT_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+5) #define V4L2_CID_PAN_RESET (V4L2_CID_CAMERA_CLASS_BASE+6) #define V4L2_CID_TILT_RESET (V4L2_CID_CAMERA_CLASS_BASE+7) #define V4L2_CID_PAN_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+8) #define V4L2_CID_TILT_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+9) #define V4L2_CID_FOCUS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+10) #define V4L2_CID_FOCUS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+11) #define V4L2_CID_FOCUS_AUTO (V4L2_CID_CAMERA_CLASS_BASE+12) #define V4L2_CID_ZOOM_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+13) #define V4L2_CID_ZOOM_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+14) #define V4L2_CID_ZOOM_CONTINUOUS (V4L2_CID_CAMERA_CLASS_BASE+15) #define V4L2_CID_PRIVACY (V4L2_CID_CAMERA_CLASS_BASE+16) #define V4L2_CID_IRIS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+17) #define V4L2_CID_IRIS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+18) #define V4L2_CID_AUTO_EXPOSURE_BIAS (V4L2_CID_CAMERA_CLASS_BASE+19) #define V4L2_CID_AUTO_N_PRESET_WHITE_BALANCE (V4L2_CID_CAMERA_CLASS_BASE+20) enum v4l2_auto_n_preset_white_balance { V4L2_WHITE_BALANCE_MANUAL = 0, V4L2_WHITE_BALANCE_AUTO = 1, V4L2_WHITE_BALANCE_INCANDESCENT = 2, V4L2_WHITE_BALANCE_FLUORESCENT = 3, V4L2_WHITE_BALANCE_FLUORESCENT_H = 4, V4L2_WHITE_BALANCE_HORIZON = 5, V4L2_WHITE_BALANCE_DAYLIGHT = 6, V4L2_WHITE_BALANCE_FLASH = 7, V4L2_WHITE_BALANCE_CLOUDY = 8, V4L2_WHITE_BALANCE_SHADE = 9, }; #define V4L2_CID_WIDE_DYNAMIC_RANGE (V4L2_CID_CAMERA_CLASS_BASE+21) #define V4L2_CID_IMAGE_STABILIZATION (V4L2_CID_CAMERA_CLASS_BASE+22) #define V4L2_CID_ISO_SENSITIVITY (V4L2_CID_CAMERA_CLASS_BASE+23) #define V4L2_CID_ISO_SENSITIVITY_AUTO (V4L2_CID_CAMERA_CLASS_BASE+24) enum v4l2_iso_sensitivity_auto_type { V4L2_ISO_SENSITIVITY_MANUAL = 0, V4L2_ISO_SENSITIVITY_AUTO = 1, }; #define V4L2_CID_EXPOSURE_METERING (V4L2_CID_CAMERA_CLASS_BASE+25) enum v4l2_exposure_metering { V4L2_EXPOSURE_METERING_AVERAGE = 0, V4L2_EXPOSURE_METERING_CENTER_WEIGHTED = 1, V4L2_EXPOSURE_METERING_SPOT = 2, V4L2_EXPOSURE_METERING_MATRIX = 3, }; #define V4L2_CID_SCENE_MODE (V4L2_CID_CAMERA_CLASS_BASE+26) enum v4l2_scene_mode { V4L2_SCENE_MODE_NONE = 0, V4L2_SCENE_MODE_BACKLIGHT = 1, V4L2_SCENE_MODE_BEACH_SNOW = 2, V4L2_SCENE_MODE_CANDLE_LIGHT = 3, V4L2_SCENE_MODE_DAWN_DUSK = 4, V4L2_SCENE_MODE_FALL_COLORS = 5, V4L2_SCENE_MODE_FIREWORKS = 6, V4L2_SCENE_MODE_LANDSCAPE = 7, V4L2_SCENE_MODE_NIGHT = 8, V4L2_SCENE_MODE_PARTY_INDOOR = 9, V4L2_SCENE_MODE_PORTRAIT = 10, V4L2_SCENE_MODE_SPORTS = 11, V4L2_SCENE_MODE_SUNSET = 12, V4L2_SCENE_MODE_TEXT = 13, }; #define V4L2_CID_3A_LOCK (V4L2_CID_CAMERA_CLASS_BASE+27) #define V4L2_LOCK_EXPOSURE (1 << 0) #define V4L2_LOCK_WHITE_BALANCE (1 << 1) #define V4L2_LOCK_FOCUS (1 << 2) #define V4L2_CID_AUTO_FOCUS_START (V4L2_CID_CAMERA_CLASS_BASE+28) #define V4L2_CID_AUTO_FOCUS_STOP (V4L2_CID_CAMERA_CLASS_BASE+29) #define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30) #define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0) #define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0) #define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1) #define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2) #define V4L2_CID_AUTO_FOCUS_RANGE (V4L2_CID_CAMERA_CLASS_BASE+31) enum v4l2_auto_focus_range { V4L2_AUTO_FOCUS_RANGE_AUTO = 0, V4L2_AUTO_FOCUS_RANGE_NORMAL = 1, V4L2_AUTO_FOCUS_RANGE_MACRO = 2, V4L2_AUTO_FOCUS_RANGE_INFINITY = 3, }; #define V4L2_CID_PAN_SPEED (V4L2_CID_CAMERA_CLASS_BASE+32) #define V4L2_CID_TILT_SPEED (V4L2_CID_CAMERA_CLASS_BASE+33) #define V4L2_CID_CAMERA_ORIENTATION (V4L2_CID_CAMERA_CLASS_BASE+34) #define V4L2_CAMERA_ORIENTATION_FRONT 0 #define V4L2_CAMERA_ORIENTATION_BACK 1 #define V4L2_CAMERA_ORIENTATION_EXTERNAL 2 #define V4L2_CID_CAMERA_SENSOR_ROTATION (V4L2_CID_CAMERA_CLASS_BASE+35) #define V4L2_CID_HDR_SENSOR_MODE (V4L2_CID_CAMERA_CLASS_BASE+36) /* FM Modulator class control IDs */ #define V4L2_CID_FM_TX_CLASS_BASE (V4L2_CTRL_CLASS_FM_TX | 0x900) #define V4L2_CID_FM_TX_CLASS (V4L2_CTRL_CLASS_FM_TX | 1) #define V4L2_CID_RDS_TX_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 1) #define V4L2_CID_RDS_TX_PI (V4L2_CID_FM_TX_CLASS_BASE + 2) #define V4L2_CID_RDS_TX_PTY (V4L2_CID_FM_TX_CLASS_BASE + 3) #define V4L2_CID_RDS_TX_PS_NAME (V4L2_CID_FM_TX_CLASS_BASE + 5) #define V4L2_CID_RDS_TX_RADIO_TEXT (V4L2_CID_FM_TX_CLASS_BASE + 6) #define V4L2_CID_RDS_TX_MONO_STEREO (V4L2_CID_FM_TX_CLASS_BASE + 7) #define V4L2_CID_RDS_TX_ARTIFICIAL_HEAD (V4L2_CID_FM_TX_CLASS_BASE + 8) #define V4L2_CID_RDS_TX_COMPRESSED (V4L2_CID_FM_TX_CLASS_BASE + 9) #define V4L2_CID_RDS_TX_DYNAMIC_PTY (V4L2_CID_FM_TX_CLASS_BASE + 10) #define V4L2_CID_RDS_TX_TRAFFIC_ANNOUNCEMENT (V4L2_CID_FM_TX_CLASS_BASE + 11) #define V4L2_CID_RDS_TX_TRAFFIC_PROGRAM (V4L2_CID_FM_TX_CLASS_BASE + 12) #define V4L2_CID_RDS_TX_MUSIC_SPEECH (V4L2_CID_FM_TX_CLASS_BASE + 13) #define V4L2_CID_RDS_TX_ALT_FREQS_ENABLE (V4L2_CID_FM_TX_CLASS_BASE + 14) #define V4L2_CID_RDS_TX_ALT_FREQS (V4L2_CID_FM_TX_CLASS_BASE + 15) #define V4L2_CID_AUDIO_LIMITER_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 64) #define V4L2_CID_AUDIO_LIMITER_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 65) #define V4L2_CID_AUDIO_LIMITER_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 66) #define V4L2_CID_AUDIO_COMPRESSION_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 80) #define V4L2_CID_AUDIO_COMPRESSION_GAIN (V4L2_CID_FM_TX_CLASS_BASE + 81) #define V4L2_CID_AUDIO_COMPRESSION_THRESHOLD (V4L2_CID_FM_TX_CLASS_BASE + 82) #define V4L2_CID_AUDIO_COMPRESSION_ATTACK_TIME (V4L2_CID_FM_TX_CLASS_BASE + 83) #define V4L2_CID_AUDIO_COMPRESSION_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 84) #define V4L2_CID_PILOT_TONE_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 96) #define V4L2_CID_PILOT_TONE_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 97) #define V4L2_CID_PILOT_TONE_FREQUENCY (V4L2_CID_FM_TX_CLASS_BASE + 98) #define V4L2_CID_TUNE_PREEMPHASIS (V4L2_CID_FM_TX_CLASS_BASE + 112) enum v4l2_preemphasis { V4L2_PREEMPHASIS_DISABLED = 0, V4L2_PREEMPHASIS_50_uS = 1, V4L2_PREEMPHASIS_75_uS = 2, }; #define V4L2_CID_TUNE_POWER_LEVEL (V4L2_CID_FM_TX_CLASS_BASE + 113) #define V4L2_CID_TUNE_ANTENNA_CAPACITOR (V4L2_CID_FM_TX_CLASS_BASE + 114) /* Flash and privacy (indicator) light controls */ #define V4L2_CID_FLASH_CLASS_BASE (V4L2_CTRL_CLASS_FLASH | 0x900) #define V4L2_CID_FLASH_CLASS (V4L2_CTRL_CLASS_FLASH | 1) #define V4L2_CID_FLASH_LED_MODE (V4L2_CID_FLASH_CLASS_BASE + 1) enum v4l2_flash_led_mode { V4L2_FLASH_LED_MODE_NONE, V4L2_FLASH_LED_MODE_FLASH, V4L2_FLASH_LED_MODE_TORCH, }; #define V4L2_CID_FLASH_STROBE_SOURCE (V4L2_CID_FLASH_CLASS_BASE + 2) enum v4l2_flash_strobe_source { V4L2_FLASH_STROBE_SOURCE_SOFTWARE, V4L2_FLASH_STROBE_SOURCE_EXTERNAL, }; #define V4L2_CID_FLASH_STROBE (V4L2_CID_FLASH_CLASS_BASE + 3) #define V4L2_CID_FLASH_STROBE_STOP (V4L2_CID_FLASH_CLASS_BASE + 4) #define V4L2_CID_FLASH_STROBE_STATUS (V4L2_CID_FLASH_CLASS_BASE + 5) #define V4L2_CID_FLASH_TIMEOUT (V4L2_CID_FLASH_CLASS_BASE + 6) #define V4L2_CID_FLASH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 7) #define V4L2_CID_FLASH_TORCH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 8) #define V4L2_CID_FLASH_INDICATOR_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 9) #define V4L2_CID_FLASH_FAULT (V4L2_CID_FLASH_CLASS_BASE + 10) #define V4L2_FLASH_FAULT_OVER_VOLTAGE (1 << 0) #define V4L2_FLASH_FAULT_TIMEOUT (1 << 1) #define V4L2_FLASH_FAULT_OVER_TEMPERATURE (1 << 2) #define V4L2_FLASH_FAULT_SHORT_CIRCUIT (1 << 3) #define V4L2_FLASH_FAULT_OVER_CURRENT (1 << 4) #define V4L2_FLASH_FAULT_INDICATOR (1 << 5) #define V4L2_FLASH_FAULT_UNDER_VOLTAGE (1 << 6) #define V4L2_FLASH_FAULT_INPUT_VOLTAGE (1 << 7) #define V4L2_FLASH_FAULT_LED_OVER_TEMPERATURE (1 << 8) #define V4L2_CID_FLASH_CHARGE (V4L2_CID_FLASH_CLASS_BASE + 11) #define V4L2_CID_FLASH_READY (V4L2_CID_FLASH_CLASS_BASE + 12) /* JPEG-class control IDs */ #define V4L2_CID_JPEG_CLASS_BASE (V4L2_CTRL_CLASS_JPEG | 0x900) #define V4L2_CID_JPEG_CLASS (V4L2_CTRL_CLASS_JPEG | 1) #define V4L2_CID_JPEG_CHROMA_SUBSAMPLING (V4L2_CID_JPEG_CLASS_BASE + 1) enum v4l2_jpeg_chroma_subsampling { V4L2_JPEG_CHROMA_SUBSAMPLING_444 = 0, V4L2_JPEG_CHROMA_SUBSAMPLING_422 = 1, V4L2_JPEG_CHROMA_SUBSAMPLING_420 = 2, V4L2_JPEG_CHROMA_SUBSAMPLING_411 = 3, V4L2_JPEG_CHROMA_SUBSAMPLING_410 = 4, V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY = 5, }; #define V4L2_CID_JPEG_RESTART_INTERVAL (V4L2_CID_JPEG_CLASS_BASE + 2) #define V4L2_CID_JPEG_COMPRESSION_QUALITY (V4L2_CID_JPEG_CLASS_BASE + 3) #define V4L2_CID_JPEG_ACTIVE_MARKER (V4L2_CID_JPEG_CLASS_BASE + 4) #define V4L2_JPEG_ACTIVE_MARKER_APP0 (1 << 0) #define V4L2_JPEG_ACTIVE_MARKER_APP1 (1 << 1) #define V4L2_JPEG_ACTIVE_MARKER_COM (1 << 16) #define V4L2_JPEG_ACTIVE_MARKER_DQT (1 << 17) #define V4L2_JPEG_ACTIVE_MARKER_DHT (1 << 18) /* Image source controls */ #define V4L2_CID_IMAGE_SOURCE_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_SOURCE | 0x900) #define V4L2_CID_IMAGE_SOURCE_CLASS (V4L2_CTRL_CLASS_IMAGE_SOURCE | 1) #define V4L2_CID_VBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 1) #define V4L2_CID_HBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 2) #define V4L2_CID_ANALOGUE_GAIN (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 3) #define V4L2_CID_TEST_PATTERN_RED (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 4) #define V4L2_CID_TEST_PATTERN_GREENR (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 5) #define V4L2_CID_TEST_PATTERN_BLUE (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 6) #define V4L2_CID_TEST_PATTERN_GREENB (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 7) #define V4L2_CID_UNIT_CELL_SIZE (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 8) #define V4L2_CID_NOTIFY_GAINS (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 9) /* Image processing controls */ #define V4L2_CID_IMAGE_PROC_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_PROC | 0x900) #define V4L2_CID_IMAGE_PROC_CLASS (V4L2_CTRL_CLASS_IMAGE_PROC | 1) #define V4L2_CID_LINK_FREQ (V4L2_CID_IMAGE_PROC_CLASS_BASE + 1) #define V4L2_CID_PIXEL_RATE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 2) #define V4L2_CID_TEST_PATTERN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 3) #define V4L2_CID_DEINTERLACING_MODE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 4) #define V4L2_CID_DIGITAL_GAIN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 5) /* DV-class control IDs defined by V4L2 */ #define V4L2_CID_DV_CLASS_BASE (V4L2_CTRL_CLASS_DV | 0x900) #define V4L2_CID_DV_CLASS (V4L2_CTRL_CLASS_DV | 1) #define V4L2_CID_DV_TX_HOTPLUG (V4L2_CID_DV_CLASS_BASE + 1) #define V4L2_CID_DV_TX_RXSENSE (V4L2_CID_DV_CLASS_BASE + 2) #define V4L2_CID_DV_TX_EDID_PRESENT (V4L2_CID_DV_CLASS_BASE + 3) #define V4L2_CID_DV_TX_MODE (V4L2_CID_DV_CLASS_BASE + 4) enum v4l2_dv_tx_mode { V4L2_DV_TX_MODE_DVI_D = 0, V4L2_DV_TX_MODE_HDMI = 1, }; #define V4L2_CID_DV_TX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 5) enum v4l2_dv_rgb_range { V4L2_DV_RGB_RANGE_AUTO = 0, V4L2_DV_RGB_RANGE_LIMITED = 1, V4L2_DV_RGB_RANGE_FULL = 2, }; #define V4L2_CID_DV_TX_IT_CONTENT_TYPE (V4L2_CID_DV_CLASS_BASE + 6) enum v4l2_dv_it_content_type { V4L2_DV_IT_CONTENT_TYPE_GRAPHICS = 0, V4L2_DV_IT_CONTENT_TYPE_PHOTO = 1, V4L2_DV_IT_CONTENT_TYPE_CINEMA = 2, V4L2_DV_IT_CONTENT_TYPE_GAME = 3, V4L2_DV_IT_CONTENT_TYPE_NO_ITC = 4, }; #define V4L2_CID_DV_RX_POWER_PRESENT (V4L2_CID_DV_CLASS_BASE + 100) #define V4L2_CID_DV_RX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 101) #define V4L2_CID_DV_RX_IT_CONTENT_TYPE (V4L2_CID_DV_CLASS_BASE + 102) #define V4L2_CID_FM_RX_CLASS_BASE (V4L2_CTRL_CLASS_FM_RX | 0x900) #define V4L2_CID_FM_RX_CLASS (V4L2_CTRL_CLASS_FM_RX | 1) #define V4L2_CID_TUNE_DEEMPHASIS (V4L2_CID_FM_RX_CLASS_BASE + 1) enum v4l2_deemphasis { V4L2_DEEMPHASIS_DISABLED = V4L2_PREEMPHASIS_DISABLED, V4L2_DEEMPHASIS_50_uS = V4L2_PREEMPHASIS_50_uS, V4L2_DEEMPHASIS_75_uS = V4L2_PREEMPHASIS_75_uS, }; #define V4L2_CID_RDS_RECEPTION (V4L2_CID_FM_RX_CLASS_BASE + 2) #define V4L2_CID_RDS_RX_PTY (V4L2_CID_FM_RX_CLASS_BASE + 3) #define V4L2_CID_RDS_RX_PS_NAME (V4L2_CID_FM_RX_CLASS_BASE + 4) #define V4L2_CID_RDS_RX_RADIO_TEXT (V4L2_CID_FM_RX_CLASS_BASE + 5) #define V4L2_CID_RDS_RX_TRAFFIC_ANNOUNCEMENT (V4L2_CID_FM_RX_CLASS_BASE + 6) #define V4L2_CID_RDS_RX_TRAFFIC_PROGRAM (V4L2_CID_FM_RX_CLASS_BASE + 7) #define V4L2_CID_RDS_RX_MUSIC_SPEECH (V4L2_CID_FM_RX_CLASS_BASE + 8) #define V4L2_CID_RF_TUNER_CLASS_BASE (V4L2_CTRL_CLASS_RF_TUNER | 0x900) #define V4L2_CID_RF_TUNER_CLASS (V4L2_CTRL_CLASS_RF_TUNER | 1) #define V4L2_CID_RF_TUNER_BANDWIDTH_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 11) #define V4L2_CID_RF_TUNER_BANDWIDTH (V4L2_CID_RF_TUNER_CLASS_BASE + 12) #define V4L2_CID_RF_TUNER_RF_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 32) #define V4L2_CID_RF_TUNER_LNA_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 41) #define V4L2_CID_RF_TUNER_LNA_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 42) #define V4L2_CID_RF_TUNER_MIXER_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 51) #define V4L2_CID_RF_TUNER_MIXER_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 52) #define V4L2_CID_RF_TUNER_IF_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 61) #define V4L2_CID_RF_TUNER_IF_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 62) #define V4L2_CID_RF_TUNER_PLL_LOCK (V4L2_CID_RF_TUNER_CLASS_BASE + 91) /* Detection-class control IDs defined by V4L2 */ #define V4L2_CID_DETECT_CLASS_BASE (V4L2_CTRL_CLASS_DETECT | 0x900) #define V4L2_CID_DETECT_CLASS (V4L2_CTRL_CLASS_DETECT | 1) #define V4L2_CID_DETECT_MD_MODE (V4L2_CID_DETECT_CLASS_BASE + 1) enum v4l2_detect_md_mode { V4L2_DETECT_MD_MODE_DISABLED = 0, V4L2_DETECT_MD_MODE_GLOBAL = 1, V4L2_DETECT_MD_MODE_THRESHOLD_GRID = 2, V4L2_DETECT_MD_MODE_REGION_GRID = 3, }; #define V4L2_CID_DETECT_MD_GLOBAL_THRESHOLD (V4L2_CID_DETECT_CLASS_BASE + 2) #define V4L2_CID_DETECT_MD_THRESHOLD_GRID (V4L2_CID_DETECT_CLASS_BASE + 3) #define V4L2_CID_DETECT_MD_REGION_GRID (V4L2_CID_DETECT_CLASS_BASE + 4) /* Stateless CODECs controls */ #define V4L2_CID_CODEC_STATELESS_BASE (V4L2_CTRL_CLASS_CODEC_STATELESS | 0x900) #define V4L2_CID_CODEC_STATELESS_CLASS (V4L2_CTRL_CLASS_CODEC_STATELESS | 1) #define V4L2_CID_STATELESS_H264_DECODE_MODE (V4L2_CID_CODEC_STATELESS_BASE + 0) /** * enum v4l2_stateless_h264_decode_mode - Decoding mode * * @V4L2_STATELESS_H264_DECODE_MODE_SLICE_BASED: indicates that decoding * is performed one slice at a time. In this mode, * V4L2_CID_STATELESS_H264_SLICE_PARAMS must contain the parsed slice * parameters and the OUTPUT buffer must contain a single slice. * V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF feature is used * in order to support multislice frames. * @V4L2_STATELESS_H264_DECODE_MODE_FRAME_BASED: indicates that * decoding is performed per frame. The OUTPUT buffer must contain * all slices and also both fields. This mode is typically supported * by device drivers that are able to parse the slice(s) header(s) * in hardware. When this mode is selected, * V4L2_CID_STATELESS_H264_SLICE_PARAMS is not used. */ enum v4l2_stateless_h264_decode_mode { V4L2_STATELESS_H264_DECODE_MODE_SLICE_BASED, V4L2_STATELESS_H264_DECODE_MODE_FRAME_BASED, }; #define V4L2_CID_STATELESS_H264_START_CODE (V4L2_CID_CODEC_STATELESS_BASE + 1) /** * enum v4l2_stateless_h264_start_code - Start code * * @V4L2_STATELESS_H264_START_CODE_NONE: slices are passed * to the driver without any start code. * @V4L2_STATELESS_H264_START_CODE_ANNEX_B: slices are passed * to the driver with an Annex B start code prefix * (legal start codes can be 3-bytes 0x000001 or 4-bytes 0x00000001). * This mode is typically supported by device drivers that parse * the start code in hardware. */ enum v4l2_stateless_h264_start_code { V4L2_STATELESS_H264_START_CODE_NONE, V4L2_STATELESS_H264_START_CODE_ANNEX_B, }; #define V4L2_H264_SPS_CONSTRAINT_SET0_FLAG 0x01 #define V4L2_H264_SPS_CONSTRAINT_SET1_FLAG 0x02 #define V4L2_H264_SPS_CONSTRAINT_SET2_FLAG 0x04 #define V4L2_H264_SPS_CONSTRAINT_SET3_FLAG 0x08 #define V4L2_H264_SPS_CONSTRAINT_SET4_FLAG 0x10 #define V4L2_H264_SPS_CONSTRAINT_SET5_FLAG 0x20 #define V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE 0x01 #define V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS 0x02 #define V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO 0x04 #define V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED 0x08 #define V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY 0x10 #define V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD 0x20 #define V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE 0x40 #define V4L2_H264_SPS_HAS_CHROMA_FORMAT(sps) \ ((sps)->profile_idc == 100 || (sps)->profile_idc == 110 || \ (sps)->profile_idc == 122 || (sps)->profile_idc == 244 || \ (sps)->profile_idc == 44 || (sps)->profile_idc == 83 || \ (sps)->profile_idc == 86 || (sps)->profile_idc == 118 || \ (sps)->profile_idc == 128 || (sps)->profile_idc == 138 || \ (sps)->profile_idc == 139 || (sps)->profile_idc == 134 || \ (sps)->profile_idc == 135) #define V4L2_CID_STATELESS_H264_SPS (V4L2_CID_CODEC_STATELESS_BASE + 2) /** * struct v4l2_ctrl_h264_sps - H264 sequence parameter set * * All the members on this sequence parameter set structure match the * sequence parameter set syntax as specified by the H264 specification. * * @profile_idc: see H264 specification. * @constraint_set_flags: see H264 specification. * @level_idc: see H264 specification. * @seq_parameter_set_id: see H264 specification. * @chroma_format_idc: see H264 specification. * @bit_depth_luma_minus8: see H264 specification. * @bit_depth_chroma_minus8: see H264 specification. * @log2_max_frame_num_minus4: see H264 specification. * @pic_order_cnt_type: see H264 specification. * @log2_max_pic_order_cnt_lsb_minus4: see H264 specification. * @max_num_ref_frames: see H264 specification. * @num_ref_frames_in_pic_order_cnt_cycle: see H264 specification. * @offset_for_ref_frame: see H264 specification. * @offset_for_non_ref_pic: see H264 specification. * @offset_for_top_to_bottom_field: see H264 specification. * @pic_width_in_mbs_minus1: see H264 specification. * @pic_height_in_map_units_minus1: see H264 specification. * @flags: see V4L2_H264_SPS_FLAG_{}. */ struct v4l2_ctrl_h264_sps { __u8 profile_idc; __u8 constraint_set_flags; __u8 level_idc; __u8 seq_parameter_set_id; __u8 chroma_format_idc; __u8 bit_depth_luma_minus8; __u8 bit_depth_chroma_minus8; __u8 log2_max_frame_num_minus4; __u8 pic_order_cnt_type; __u8 log2_max_pic_order_cnt_lsb_minus4; __u8 max_num_ref_frames; __u8 num_ref_frames_in_pic_order_cnt_cycle; __s32 offset_for_ref_frame[255]; __s32 offset_for_non_ref_pic; __s32 offset_for_top_to_bottom_field; __u16 pic_width_in_mbs_minus1; __u16 pic_height_in_map_units_minus1; __u32 flags; }; #define V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE 0x0001 #define V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT 0x0002 #define V4L2_H264_PPS_FLAG_WEIGHTED_PRED 0x0004 #define V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT 0x0008 #define V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED 0x0010 #define V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT 0x0020 #define V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE 0x0040 #define V4L2_H264_PPS_FLAG_SCALING_MATRIX_PRESENT 0x0080 #define V4L2_CID_STATELESS_H264_PPS (V4L2_CID_CODEC_STATELESS_BASE + 3) /** * struct v4l2_ctrl_h264_pps - H264 picture parameter set * * Except where noted, all the members on this picture parameter set * structure match the picture parameter set syntax as specified * by the H264 specification. * * In particular, V4L2_H264_PPS_FLAG_SCALING_MATRIX_PRESENT flag * has a specific meaning. This flag should be set if a non-flat * scaling matrix applies to the picture. In this case, applications * are expected to use V4L2_CID_STATELESS_H264_SCALING_MATRIX, * to pass the values of the non-flat matrices. * * @pic_parameter_set_id: see H264 specification. * @seq_parameter_set_id: see H264 specification. * @num_slice_groups_minus1: see H264 specification. * @num_ref_idx_l0_default_active_minus1: see H264 specification. * @num_ref_idx_l1_default_active_minus1: see H264 specification. * @weighted_bipred_idc: see H264 specification. * @pic_init_qp_minus26: see H264 specification. * @pic_init_qs_minus26: see H264 specification. * @chroma_qp_index_offset: see H264 specification. * @second_chroma_qp_index_offset: see H264 specification. * @flags: see V4L2_H264_PPS_FLAG_{}. */ struct v4l2_ctrl_h264_pps { __u8 pic_parameter_set_id; __u8 seq_parameter_set_id; __u8 num_slice_groups_minus1; __u8 num_ref_idx_l0_default_active_minus1; __u8 num_ref_idx_l1_default_active_minus1; __u8 weighted_bipred_idc; __s8 pic_init_qp_minus26; __s8 pic_init_qs_minus26; __s8 chroma_qp_index_offset; __s8 second_chroma_qp_index_offset; __u16 flags; }; #define V4L2_CID_STATELESS_H264_SCALING_MATRIX (V4L2_CID_CODEC_STATELESS_BASE + 4) /** * struct v4l2_ctrl_h264_scaling_matrix - H264 scaling matrices * * @scaling_list_4x4: scaling matrix after applying the inverse * scanning process. Expected list order is Intra Y, Intra Cb, * Intra Cr, Inter Y, Inter Cb, Inter Cr. The values on each * scaling list are expected in raster scan order. * @scaling_list_8x8: scaling matrix after applying the inverse * scanning process. Expected list order is Intra Y, Inter Y, * Intra Cb, Inter Cb, Intra Cr, Inter Cr. The values on each * scaling list are expected in raster scan order. * * Note that the list order is different for the 4x4 and 8x8 * matrices as per the H264 specification, see table 7-2 "Assignment * of mnemonic names to scaling list indices and specification of * fall-back rule". */ struct v4l2_ctrl_h264_scaling_matrix { __u8 scaling_list_4x4[6][16]; __u8 scaling_list_8x8[6][64]; }; struct v4l2_h264_weight_factors { __s16 luma_weight[32]; __s16 luma_offset[32]; __s16 chroma_weight[32][2]; __s16 chroma_offset[32][2]; }; #define V4L2_H264_CTRL_PRED_WEIGHTS_REQUIRED(pps, slice) \ ((((pps)->flags & V4L2_H264_PPS_FLAG_WEIGHTED_PRED) && \ ((slice)->slice_type == V4L2_H264_SLICE_TYPE_P || \ (slice)->slice_type == V4L2_H264_SLICE_TYPE_SP)) || \ ((pps)->weighted_bipred_idc == 1 && \ (slice)->slice_type == V4L2_H264_SLICE_TYPE_B)) #define V4L2_CID_STATELESS_H264_PRED_WEIGHTS (V4L2_CID_CODEC_STATELESS_BASE + 5) /** * struct v4l2_ctrl_h264_pred_weights - Prediction weight table * * Prediction weight table, which matches the syntax specified * by the H264 specification. * * @luma_log2_weight_denom: see H264 specification. * @chroma_log2_weight_denom: see H264 specification. * @weight_factors: luma and chroma weight factors. */ struct v4l2_ctrl_h264_pred_weights { __u16 luma_log2_weight_denom; __u16 chroma_log2_weight_denom; struct v4l2_h264_weight_factors weight_factors[2]; }; #define V4L2_H264_SLICE_TYPE_P 0 #define V4L2_H264_SLICE_TYPE_B 1 #define V4L2_H264_SLICE_TYPE_I 2 #define V4L2_H264_SLICE_TYPE_SP 3 #define V4L2_H264_SLICE_TYPE_SI 4 #define V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED 0x01 #define V4L2_H264_SLICE_FLAG_SP_FOR_SWITCH 0x02 #define V4L2_H264_TOP_FIELD_REF 0x1 #define V4L2_H264_BOTTOM_FIELD_REF 0x2 #define V4L2_H264_FRAME_REF 0x3 /** * struct v4l2_h264_reference - H264 picture reference * * @fields: indicates how the picture is referenced. * Valid values are V4L2_H264_{}_REF. * @index: index into v4l2_ctrl_h264_decode_params.dpb[]. */ struct v4l2_h264_reference { __u8 fields; __u8 index; }; /* * Maximum DPB size, as specified by section 'A.3.1 Level limits * common to the Baseline, Main, and Extended profiles'. */ #define V4L2_H264_NUM_DPB_ENTRIES 16 #define V4L2_H264_REF_LIST_LEN (2 * V4L2_H264_NUM_DPB_ENTRIES) #define V4L2_CID_STATELESS_H264_SLICE_PARAMS (V4L2_CID_CODEC_STATELESS_BASE + 6) /** * struct v4l2_ctrl_h264_slice_params - H264 slice parameters * * This structure holds the H264 syntax elements that are specified * as non-invariant for the slices in a given frame. * * Slice invariant syntax elements are contained in struct * v4l2_ctrl_h264_decode_params. This is done to reduce the API surface * on frame-based decoders, where slice header parsing is done by the * hardware. * * Slice invariant syntax elements are specified in specification section * "7.4.3 Slice header semantics". * * Except where noted, the members on this struct match the slice header syntax. * * @header_bit_size: offset in bits to slice_data() from the beginning of this slice. * @first_mb_in_slice: see H264 specification. * @slice_type: see H264 specification. * @colour_plane_id: see H264 specification. * @redundant_pic_cnt: see H264 specification. * @cabac_init_idc: see H264 specification. * @slice_qp_delta: see H264 specification. * @slice_qs_delta: see H264 specification. * @disable_deblocking_filter_idc: see H264 specification. * @slice_alpha_c0_offset_div2: see H264 specification. * @slice_beta_offset_div2: see H264 specification. * @num_ref_idx_l0_active_minus1: see H264 specification. * @num_ref_idx_l1_active_minus1: see H264 specification. * @reserved: padding field. Should be zeroed by applications. * @ref_pic_list0: reference picture list 0 after applying the per-slice modifications. * @ref_pic_list1: reference picture list 1 after applying the per-slice modifications. * @flags: see V4L2_H264_SLICE_FLAG_{}. */ struct v4l2_ctrl_h264_slice_params { __u32 header_bit_size; __u32 first_mb_in_slice; __u8 slice_type; __u8 colour_plane_id; __u8 redundant_pic_cnt; __u8 cabac_init_idc; __s8 slice_qp_delta; __s8 slice_qs_delta; __u8 disable_deblocking_filter_idc; __s8 slice_alpha_c0_offset_div2; __s8 slice_beta_offset_div2; __u8 num_ref_idx_l0_active_minus1; __u8 num_ref_idx_l1_active_minus1; __u8 reserved; struct v4l2_h264_reference ref_pic_list0[V4L2_H264_REF_LIST_LEN]; struct v4l2_h264_reference ref_pic_list1[V4L2_H264_REF_LIST_LEN]; __u32 flags; }; #define V4L2_H264_DPB_ENTRY_FLAG_VALID 0x01 #define V4L2_H264_DPB_ENTRY_FLAG_ACTIVE 0x02 #define V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM 0x04 #define V4L2_H264_DPB_ENTRY_FLAG_FIELD 0x08 /** * struct v4l2_h264_dpb_entry - H264 decoded picture buffer entry * * @reference_ts: timestamp of the V4L2 capture buffer to use as reference. * The timestamp refers to the timestamp field in struct v4l2_buffer. * Use v4l2_timeval_to_ns() to convert the struct timeval to a __u64. * @pic_num: matches PicNum variable assigned during the reference * picture lists construction process. * @frame_num: frame identifier which matches frame_num syntax element. * @fields: indicates how the DPB entry is referenced. Valid values are * V4L2_H264_{}_REF. * @reserved: padding field. Should be zeroed by applications. * @top_field_order_cnt: matches TopFieldOrderCnt picture value. * @bottom_field_order_cnt: matches BottomFieldOrderCnt picture value. * Note that picture field is indicated by v4l2_buffer.field. * @flags: see V4L2_H264_DPB_ENTRY_FLAG_{}. */ struct v4l2_h264_dpb_entry { __u64 reference_ts; __u32 pic_num; __u16 frame_num; __u8 fields; __u8 reserved[5]; __s32 top_field_order_cnt; __s32 bottom_field_order_cnt; __u32 flags; }; #define V4L2_H264_DECODE_PARAM_FLAG_IDR_PIC 0x01 #define V4L2_H264_DECODE_PARAM_FLAG_FIELD_PIC 0x02 #define V4L2_H264_DECODE_PARAM_FLAG_BOTTOM_FIELD 0x04 #define V4L2_H264_DECODE_PARAM_FLAG_PFRAME 0x08 #define V4L2_H264_DECODE_PARAM_FLAG_BFRAME 0x10 #define V4L2_CID_STATELESS_H264_DECODE_PARAMS (V4L2_CID_CODEC_STATELESS_BASE + 7) /** * struct v4l2_ctrl_h264_decode_params - H264 decoding parameters * * @dpb: decoded picture buffer. * @nal_ref_idc: slice header syntax element. * @frame_num: slice header syntax element. * @top_field_order_cnt: matches TopFieldOrderCnt picture value. * @bottom_field_order_cnt: matches BottomFieldOrderCnt picture value. * Note that picture field is indicated by v4l2_buffer.field. * @idr_pic_id: slice header syntax element. * @pic_order_cnt_lsb: slice header syntax element. * @delta_pic_order_cnt_bottom: slice header syntax element. * @delta_pic_order_cnt0: slice header syntax element. * @delta_pic_order_cnt1: slice header syntax element. * @dec_ref_pic_marking_bit_size: size in bits of dec_ref_pic_marking() * syntax element. * @pic_order_cnt_bit_size: size in bits of pic order count syntax. * @slice_group_change_cycle: slice header syntax element. * @reserved: padding field. Should be zeroed by applications. * @flags: see V4L2_H264_DECODE_PARAM_FLAG_{}. */ struct v4l2_ctrl_h264_decode_params { struct v4l2_h264_dpb_entry dpb[V4L2_H264_NUM_DPB_ENTRIES]; __u16 nal_ref_idc; __u16 frame_num; __s32 top_field_order_cnt; __s32 bottom_field_order_cnt; __u16 idr_pic_id; __u16 pic_order_cnt_lsb; __s32 delta_pic_order_cnt_bottom; __s32 delta_pic_order_cnt0; __s32 delta_pic_order_cnt1; __u32 dec_ref_pic_marking_bit_size; __u32 pic_order_cnt_bit_size; __u32 slice_group_change_cycle; __u32 reserved; __u32 flags; }; /* Stateless FWHT control, used by the vicodec driver */ /* Current FWHT version */ #define V4L2_FWHT_VERSION 3 /* Set if this is an interlaced format */ #define V4L2_FWHT_FL_IS_INTERLACED _BITUL(0) /* Set if this is a bottom-first (NTSC) interlaced format */ #define V4L2_FWHT_FL_IS_BOTTOM_FIRST _BITUL(1) /* Set if each 'frame' contains just one field */ #define V4L2_FWHT_FL_IS_ALTERNATE _BITUL(2) /* * If V4L2_FWHT_FL_IS_ALTERNATE was set, then this is set if this * 'frame' is the bottom field, else it is the top field. */ #define V4L2_FWHT_FL_IS_BOTTOM_FIELD _BITUL(3) /* Set if the Y' plane is uncompressed */ #define V4L2_FWHT_FL_LUMA_IS_UNCOMPRESSED _BITUL(4) /* Set if the Cb plane is uncompressed */ #define V4L2_FWHT_FL_CB_IS_UNCOMPRESSED _BITUL(5) /* Set if the Cr plane is uncompressed */ #define V4L2_FWHT_FL_CR_IS_UNCOMPRESSED _BITUL(6) /* Set if the chroma plane is full height, if cleared it is half height */ #define V4L2_FWHT_FL_CHROMA_FULL_HEIGHT _BITUL(7) /* Set if the chroma plane is full width, if cleared it is half width */ #define V4L2_FWHT_FL_CHROMA_FULL_WIDTH _BITUL(8) /* Set if the alpha plane is uncompressed */ #define V4L2_FWHT_FL_ALPHA_IS_UNCOMPRESSED _BITUL(9) /* Set if this is an I Frame */ #define V4L2_FWHT_FL_I_FRAME _BITUL(10) /* A 4-values flag - the number of components - 1 */ #define V4L2_FWHT_FL_COMPONENTS_NUM_MSK GENMASK(18, 16) #define V4L2_FWHT_FL_COMPONENTS_NUM_OFFSET 16 /* A 4-values flag - the pixel encoding type */ #define V4L2_FWHT_FL_PIXENC_MSK GENMASK(20, 19) #define V4L2_FWHT_FL_PIXENC_OFFSET 19 #define V4L2_FWHT_FL_PIXENC_YUV (1 << V4L2_FWHT_FL_PIXENC_OFFSET) #define V4L2_FWHT_FL_PIXENC_RGB (2 << V4L2_FWHT_FL_PIXENC_OFFSET) #define V4L2_FWHT_FL_PIXENC_HSV (3 << V4L2_FWHT_FL_PIXENC_OFFSET) #define V4L2_CID_STATELESS_FWHT_PARAMS (V4L2_CID_CODEC_STATELESS_BASE + 100) /** * struct v4l2_ctrl_fwht_params - FWHT parameters * * @backward_ref_ts: timestamp of the V4L2 capture buffer to use as reference. * The timestamp refers to the timestamp field in struct v4l2_buffer. * Use v4l2_timeval_to_ns() to convert the struct timeval to a __u64. * @version: must be V4L2_FWHT_VERSION. * @width: width of frame. * @height: height of frame. * @flags: FWHT flags (see V4L2_FWHT_FL_*). * @colorspace: the colorspace (enum v4l2_colorspace). * @xfer_func: the transfer function (enum v4l2_xfer_func). * @ycbcr_enc: the Y'CbCr encoding (enum v4l2_ycbcr_encoding). * @quantization: the quantization (enum v4l2_quantization). */ struct v4l2_ctrl_fwht_params { __u64 backward_ref_ts; __u32 version; __u32 width; __u32 height; __u32 flags; __u32 colorspace; __u32 xfer_func; __u32 ycbcr_enc; __u32 quantization; }; /* Stateless VP8 control */ #define V4L2_VP8_SEGMENT_FLAG_ENABLED 0x01 #define V4L2_VP8_SEGMENT_FLAG_UPDATE_MAP 0x02 #define V4L2_VP8_SEGMENT_FLAG_UPDATE_FEATURE_DATA 0x04 #define V4L2_VP8_SEGMENT_FLAG_DELTA_VALUE_MODE 0x08 /** * struct v4l2_vp8_segment - VP8 segment-based adjustments parameters * * @quant_update: update values for the segment quantizer. * @lf_update: update values for the loop filter level. * @segment_probs: branch probabilities of the segment_id decoding tree. * @padding: padding field. Should be zeroed by applications. * @flags: see V4L2_VP8_SEGMENT_FLAG_{}. * * This structure contains segment-based adjustments related parameters. * See the 'update_segmentation()' part of the frame header syntax, * and section '9.3. Segment-Based Adjustments' of the VP8 specification * for more details. */ struct v4l2_vp8_segment { __s8 quant_update[4]; __s8 lf_update[4]; __u8 segment_probs[3]; __u8 padding; __u32 flags; }; #define V4L2_VP8_LF_ADJ_ENABLE 0x01 #define V4L2_VP8_LF_DELTA_UPDATE 0x02 #define V4L2_VP8_LF_FILTER_TYPE_SIMPLE 0x04 /** * struct v4l2_vp8_loop_filter - VP8 loop filter parameters * * @ref_frm_delta: Reference frame signed delta values. * @mb_mode_delta: MB prediction mode signed delta values. * @sharpness_level: matches sharpness_level syntax element. * @level: matches loop_filter_level syntax element. * @padding: padding field. Should be zeroed by applications. * @flags: see V4L2_VP8_LF_{}. * * This structure contains loop filter related parameters. * See the 'mb_lf_adjustments()' part of the frame header syntax, * and section '9.4. Loop Filter Type and Levels' of the VP8 specification * for more details. */ struct v4l2_vp8_loop_filter { __s8 ref_frm_delta[4]; __s8 mb_mode_delta[4]; __u8 sharpness_level; __u8 level; __u16 padding; __u32 flags; }; /** * struct v4l2_vp8_quantization - VP8 quantizattion indices * * @y_ac_qi: luma AC coefficient table index. * @y_dc_delta: luma DC delta vaue. * @y2_dc_delta: y2 block DC delta value. * @y2_ac_delta: y2 block AC delta value. * @uv_dc_delta: chroma DC delta value. * @uv_ac_delta: chroma AC delta value. * @padding: padding field. Should be zeroed by applications. * * This structure contains the quantization indices present * in 'quant_indices()' part of the frame header syntax. * See section '9.6. Dequantization Indices' of the VP8 specification * for more details. */ struct v4l2_vp8_quantization { __u8 y_ac_qi; __s8 y_dc_delta; __s8 y2_dc_delta; __s8 y2_ac_delta; __s8 uv_dc_delta; __s8 uv_ac_delta; __u16 padding; }; #define V4L2_VP8_COEFF_PROB_CNT 11 #define V4L2_VP8_MV_PROB_CNT 19 /** * struct v4l2_vp8_entropy - VP8 update probabilities * * @coeff_probs: coefficient probability update values. * @y_mode_probs: luma intra-prediction probabilities. * @uv_mode_probs: chroma intra-prediction probabilities. * @mv_probs: mv decoding probability. * @padding: padding field. Should be zeroed by applications. * * This structure contains the update probabilities present in * 'token_prob_update()' and 'mv_prob_update()' part of the frame header. * See section '17.2. Probability Updates' of the VP8 specification * for more details. */ struct v4l2_vp8_entropy { __u8 coeff_probs[4][8][3][V4L2_VP8_COEFF_PROB_CNT]; __u8 y_mode_probs[4]; __u8 uv_mode_probs[3]; __u8 mv_probs[2][V4L2_VP8_MV_PROB_CNT]; __u8 padding[3]; }; /** * struct v4l2_vp8_entropy_coder_state - VP8 boolean coder state * * @range: coder state value for "Range" * @value: coder state value for "Value" * @bit_count: number of bits left in range "Value". * @padding: padding field. Should be zeroed by applications. * * This structure contains the state for the boolean coder, as * explained in section '7. Boolean Entropy Decoder' of the VP8 specification. */ struct v4l2_vp8_entropy_coder_state { __u8 range; __u8 value; __u8 bit_count; __u8 padding; }; #define V4L2_VP8_FRAME_FLAG_KEY_FRAME 0x01 #define V4L2_VP8_FRAME_FLAG_EXPERIMENTAL 0x02 #define V4L2_VP8_FRAME_FLAG_SHOW_FRAME 0x04 #define V4L2_VP8_FRAME_FLAG_MB_NO_SKIP_COEFF 0x08 #define V4L2_VP8_FRAME_FLAG_SIGN_BIAS_GOLDEN 0x10 #define V4L2_VP8_FRAME_FLAG_SIGN_BIAS_ALT 0x20 #define V4L2_VP8_FRAME_IS_KEY_FRAME(hdr) \ (!!((hdr)->flags & V4L2_VP8_FRAME_FLAG_KEY_FRAME)) #define V4L2_CID_STATELESS_VP8_FRAME (V4L2_CID_CODEC_STATELESS_BASE + 200) /** * struct v4l2_ctrl_vp8_frame - VP8 frame parameters * * @segment: segmentation parameters. See &v4l2_vp8_segment for more details * @lf: loop filter parameters. See &v4l2_vp8_loop_filter for more details * @quant: quantization parameters. See &v4l2_vp8_quantization for more details * @entropy: update probabilities. See &v4l2_vp8_entropy for more details * @coder_state: boolean coder state. See &v4l2_vp8_entropy_coder_state for more details * @width: frame width. * @height: frame height. * @horizontal_scale: horizontal scaling factor. * @vertical_scale: vertical scaling factor. * @version: bitstream version. * @prob_skip_false: frame header syntax element. * @prob_intra: frame header syntax element. * @prob_last: frame header syntax element. * @prob_gf: frame header syntax element. * @num_dct_parts: number of DCT coefficients partitions. * @first_part_size: size of the first partition, i.e. the control partition. * @first_part_header_bits: size in bits of the first partition header portion. * @dct_part_sizes: DCT coefficients sizes. * @last_frame_ts: "last" reference buffer timestamp. * The timestamp refers to the timestamp field in struct v4l2_buffer. * Use v4l2_timeval_to_ns() to convert the struct timeval to a __u64. * @golden_frame_ts: "golden" reference buffer timestamp. * @alt_frame_ts: "alt" reference buffer timestamp. * @flags: see V4L2_VP8_FRAME_FLAG_{}. */ struct v4l2_ctrl_vp8_frame { struct v4l2_vp8_segment segment; struct v4l2_vp8_loop_filter lf; struct v4l2_vp8_quantization quant; struct v4l2_vp8_entropy entropy; struct v4l2_vp8_entropy_coder_state coder_state; __u16 width; __u16 height; __u8 horizontal_scale; __u8 vertical_scale; __u8 version; __u8 prob_skip_false; __u8 prob_intra; __u8 prob_last; __u8 prob_gf; __u8 num_dct_parts; __u32 first_part_size; __u32 first_part_header_bits; __u32 dct_part_sizes[8]; __u64 last_frame_ts; __u64 golden_frame_ts; __u64 alt_frame_ts; __u64 flags; }; /* Stateless MPEG-2 controls */ #define V4L2_MPEG2_SEQ_FLAG_PROGRESSIVE 0x01 #define V4L2_CID_STATELESS_MPEG2_SEQUENCE (V4L2_CID_CODEC_STATELESS_BASE+220) /** * struct v4l2_ctrl_mpeg2_sequence - MPEG-2 sequence header * * All the members on this structure match the sequence header and sequence * extension syntaxes as specified by the MPEG-2 specification. * * Fields horizontal_size, vertical_size and vbv_buffer_size are a * combination of respective _value and extension syntax elements, * as described in section 6.3.3 "Sequence header". * * @horizontal_size: combination of elements horizontal_size_value and * horizontal_size_extension. * @vertical_size: combination of elements vertical_size_value and * vertical_size_extension. * @vbv_buffer_size: combination of elements vbv_buffer_size_value and * vbv_buffer_size_extension. * @profile_and_level_indication: see MPEG-2 specification. * @chroma_format: see MPEG-2 specification. * @flags: see V4L2_MPEG2_SEQ_FLAG_{}. */ struct v4l2_ctrl_mpeg2_sequence { __u16 horizontal_size; __u16 vertical_size; __u32 vbv_buffer_size; __u16 profile_and_level_indication; __u8 chroma_format; __u8 flags; }; #define V4L2_MPEG2_PIC_CODING_TYPE_I 1 #define V4L2_MPEG2_PIC_CODING_TYPE_P 2 #define V4L2_MPEG2_PIC_CODING_TYPE_B 3 #define V4L2_MPEG2_PIC_CODING_TYPE_D 4 #define V4L2_MPEG2_PIC_TOP_FIELD 0x1 #define V4L2_MPEG2_PIC_BOTTOM_FIELD 0x2 #define V4L2_MPEG2_PIC_FRAME 0x3 #define V4L2_MPEG2_PIC_FLAG_TOP_FIELD_FIRST 0x0001 #define V4L2_MPEG2_PIC_FLAG_FRAME_PRED_DCT 0x0002 #define V4L2_MPEG2_PIC_FLAG_CONCEALMENT_MV 0x0004 #define V4L2_MPEG2_PIC_FLAG_Q_SCALE_TYPE 0x0008 #define V4L2_MPEG2_PIC_FLAG_INTRA_VLC 0x0010 #define V4L2_MPEG2_PIC_FLAG_ALT_SCAN 0x0020 #define V4L2_MPEG2_PIC_FLAG_REPEAT_FIRST 0x0040 #define V4L2_MPEG2_PIC_FLAG_PROGRESSIVE 0x0080 #define V4L2_CID_STATELESS_MPEG2_PICTURE (V4L2_CID_CODEC_STATELESS_BASE+221) /** * struct v4l2_ctrl_mpeg2_picture - MPEG-2 picture header * * All the members on this structure match the picture header and picture * coding extension syntaxes as specified by the MPEG-2 specification. * * @backward_ref_ts: timestamp of the V4L2 capture buffer to use as * reference for backward prediction. * @forward_ref_ts: timestamp of the V4L2 capture buffer to use as * reference for forward prediction. These timestamp refers to the * timestamp field in struct v4l2_buffer. Use v4l2_timeval_to_ns() * to convert the struct timeval to a __u64. * @flags: see V4L2_MPEG2_PIC_FLAG_{}. * @f_code: see MPEG-2 specification. * @picture_coding_type: see MPEG-2 specification. * @picture_structure: see V4L2_MPEG2_PIC_{}_FIELD. * @intra_dc_precision: see MPEG-2 specification. * @reserved: padding field. Should be zeroed by applications. */ struct v4l2_ctrl_mpeg2_picture { __u64 backward_ref_ts; __u64 forward_ref_ts; __u32 flags; __u8 f_code[2][2]; __u8 picture_coding_type; __u8 picture_structure; __u8 intra_dc_precision; __u8 reserved[5]; }; #define V4L2_CID_STATELESS_MPEG2_QUANTISATION (V4L2_CID_CODEC_STATELESS_BASE+222) /** * struct v4l2_ctrl_mpeg2_quantisation - MPEG-2 quantisation * * Quantisation matrices as specified by section 6.3.7 * "Quant matrix extension". * * @intra_quantiser_matrix: The quantisation matrix coefficients * for intra-coded frames, in zigzag scanning order. It is relevant * for both luma and chroma components, although it can be superseded * by the chroma-specific matrix for non-4:2:0 YUV formats. * @non_intra_quantiser_matrix: The quantisation matrix coefficients * for non-intra-coded frames, in zigzag scanning order. It is relevant * for both luma and chroma components, although it can be superseded * by the chroma-specific matrix for non-4:2:0 YUV formats. * @chroma_intra_quantiser_matrix: The quantisation matrix coefficients * for the chominance component of intra-coded frames, in zigzag scanning * order. Only relevant for 4:2:2 and 4:4:4 YUV formats. * @chroma_non_intra_quantiser_matrix: The quantisation matrix coefficients * for the chrominance component of non-intra-coded frames, in zigzag scanning * order. Only relevant for 4:2:2 and 4:4:4 YUV formats. */ struct v4l2_ctrl_mpeg2_quantisation { __u8 intra_quantiser_matrix[64]; __u8 non_intra_quantiser_matrix[64]; __u8 chroma_intra_quantiser_matrix[64]; __u8 chroma_non_intra_quantiser_matrix[64]; }; #define V4L2_CID_STATELESS_HEVC_SPS (V4L2_CID_CODEC_STATELESS_BASE + 400) #define V4L2_CID_STATELESS_HEVC_PPS (V4L2_CID_CODEC_STATELESS_BASE + 401) #define V4L2_CID_STATELESS_HEVC_SLICE_PARAMS (V4L2_CID_CODEC_STATELESS_BASE + 402) #define V4L2_CID_STATELESS_HEVC_SCALING_MATRIX (V4L2_CID_CODEC_STATELESS_BASE + 403) #define V4L2_CID_STATELESS_HEVC_DECODE_PARAMS (V4L2_CID_CODEC_STATELESS_BASE + 404) #define V4L2_CID_STATELESS_HEVC_DECODE_MODE (V4L2_CID_CODEC_STATELESS_BASE + 405) #define V4L2_CID_STATELESS_HEVC_START_CODE (V4L2_CID_CODEC_STATELESS_BASE + 406) #define V4L2_CID_STATELESS_HEVC_ENTRY_POINT_OFFSETS (V4L2_CID_CODEC_STATELESS_BASE + 407) enum v4l2_stateless_hevc_decode_mode { V4L2_STATELESS_HEVC_DECODE_MODE_SLICE_BASED, V4L2_STATELESS_HEVC_DECODE_MODE_FRAME_BASED, }; enum v4l2_stateless_hevc_start_code { V4L2_STATELESS_HEVC_START_CODE_NONE, V4L2_STATELESS_HEVC_START_CODE_ANNEX_B, }; #define V4L2_HEVC_SLICE_TYPE_B 0 #define V4L2_HEVC_SLICE_TYPE_P 1 #define V4L2_HEVC_SLICE_TYPE_I 2 #define V4L2_HEVC_SPS_FLAG_SEPARATE_COLOUR_PLANE (1ULL << 0) #define V4L2_HEVC_SPS_FLAG_SCALING_LIST_ENABLED (1ULL << 1) #define V4L2_HEVC_SPS_FLAG_AMP_ENABLED (1ULL << 2) #define V4L2_HEVC_SPS_FLAG_SAMPLE_ADAPTIVE_OFFSET (1ULL << 3) #define V4L2_HEVC_SPS_FLAG_PCM_ENABLED (1ULL << 4) #define V4L2_HEVC_SPS_FLAG_PCM_LOOP_FILTER_DISABLED (1ULL << 5) #define V4L2_HEVC_SPS_FLAG_LONG_TERM_REF_PICS_PRESENT (1ULL << 6) #define V4L2_HEVC_SPS_FLAG_SPS_TEMPORAL_MVP_ENABLED (1ULL << 7) #define V4L2_HEVC_SPS_FLAG_STRONG_INTRA_SMOOTHING_ENABLED (1ULL << 8) /** * struct v4l2_ctrl_hevc_sps - ITU-T Rec. H.265: Sequence parameter set * * @video_parameter_set_id: specifies the value of the * vps_video_parameter_set_id of the active VPS * @seq_parameter_set_id: provides an identifier for the SPS for * reference by other syntax elements * @pic_width_in_luma_samples: specifies the width of each decoded picture * in units of luma samples * @pic_height_in_luma_samples: specifies the height of each decoded picture * in units of luma samples * @bit_depth_luma_minus8: this value plus 8specifies the bit depth of the * samples of the luma array * @bit_depth_chroma_minus8: this value plus 8 specifies the bit depth of the * samples of the chroma arrays * @log2_max_pic_order_cnt_lsb_minus4: this value plus 4 specifies the value of * the variable MaxPicOrderCntLsb * @sps_max_dec_pic_buffering_minus1: this value plus 1 specifies the maximum * required size of the decoded picture * buffer for the codec video sequence * @sps_max_num_reorder_pics: indicates the maximum allowed number of pictures * @sps_max_latency_increase_plus1: not equal to 0 is used to compute the * value of SpsMaxLatencyPictures array * @log2_min_luma_coding_block_size_minus3: plus 3 specifies the minimum * luma coding block size * @log2_diff_max_min_luma_coding_block_size: specifies the difference between * the maximum and minimum luma * coding block size * @log2_min_luma_transform_block_size_minus2: plus 2 specifies the minimum luma * transform block size * @log2_diff_max_min_luma_transform_block_size: specifies the difference between * the maximum and minimum luma * transform block size * @max_transform_hierarchy_depth_inter: specifies the maximum hierarchy * depth for transform units of * coding units coded in inter * prediction mode * @max_transform_hierarchy_depth_intra: specifies the maximum hierarchy * depth for transform units of * coding units coded in intra * prediction mode * @pcm_sample_bit_depth_luma_minus1: this value plus 1 specifies the number of * bits used to represent each of PCM sample * values of the luma component * @pcm_sample_bit_depth_chroma_minus1: this value plus 1 specifies the number * of bits used to represent each of PCM * sample values of the chroma components * @log2_min_pcm_luma_coding_block_size_minus3: this value plus 3 specifies the * minimum size of coding blocks * @log2_diff_max_min_pcm_luma_coding_block_size: specifies the difference between * the maximum and minimum size of * coding blocks * @num_short_term_ref_pic_sets: specifies the number of st_ref_pic_set() * syntax structures included in the SPS * @num_long_term_ref_pics_sps: specifies the number of candidate long-term * reference pictures that are specified in the SPS * @chroma_format_idc: specifies the chroma sampling * @sps_max_sub_layers_minus1: this value plus 1 specifies the maximum number * of temporal sub-layers * @reserved: padding field. Should be zeroed by applications. * @flags: see V4L2_HEVC_SPS_FLAG_{} */ struct v4l2_ctrl_hevc_sps { __u8 video_parameter_set_id; __u8 seq_parameter_set_id; __u16 pic_width_in_luma_samples; __u16 pic_height_in_luma_samples; __u8 bit_depth_luma_minus8; __u8 bit_depth_chroma_minus8; __u8 log2_max_pic_order_cnt_lsb_minus4; __u8 sps_max_dec_pic_buffering_minus1; __u8 sps_max_num_reorder_pics; __u8 sps_max_latency_increase_plus1; __u8 log2_min_luma_coding_block_size_minus3; __u8 log2_diff_max_min_luma_coding_block_size; __u8 log2_min_luma_transform_block_size_minus2; __u8 log2_diff_max_min_luma_transform_block_size; __u8 max_transform_hierarchy_depth_inter; __u8 max_transform_hierarchy_depth_intra; __u8 pcm_sample_bit_depth_luma_minus1; __u8 pcm_sample_bit_depth_chroma_minus1; __u8 log2_min_pcm_luma_coding_block_size_minus3; __u8 log2_diff_max_min_pcm_luma_coding_block_size; __u8 num_short_term_ref_pic_sets; __u8 num_long_term_ref_pics_sps; __u8 chroma_format_idc; __u8 sps_max_sub_layers_minus1; __u8 reserved[6]; __u64 flags; }; #define V4L2_HEVC_PPS_FLAG_DEPENDENT_SLICE_SEGMENT_ENABLED (1ULL << 0) #define V4L2_HEVC_PPS_FLAG_OUTPUT_FLAG_PRESENT (1ULL << 1) #define V4L2_HEVC_PPS_FLAG_SIGN_DATA_HIDING_ENABLED (1ULL << 2) #define V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT (1ULL << 3) #define V4L2_HEVC_PPS_FLAG_CONSTRAINED_INTRA_PRED (1ULL << 4) #define V4L2_HEVC_PPS_FLAG_TRANSFORM_SKIP_ENABLED (1ULL << 5) #define V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED (1ULL << 6) #define V4L2_HEVC_PPS_FLAG_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT (1ULL << 7) #define V4L2_HEVC_PPS_FLAG_WEIGHTED_PRED (1ULL << 8) #define V4L2_HEVC_PPS_FLAG_WEIGHTED_BIPRED (1ULL << 9) #define V4L2_HEVC_PPS_FLAG_TRANSQUANT_BYPASS_ENABLED (1ULL << 10) #define V4L2_HEVC_PPS_FLAG_TILES_ENABLED (1ULL << 11) #define V4L2_HEVC_PPS_FLAG_ENTROPY_CODING_SYNC_ENABLED (1ULL << 12) #define V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED (1ULL << 13) #define V4L2_HEVC_PPS_FLAG_PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED (1ULL << 14) #define V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_OVERRIDE_ENABLED (1ULL << 15) #define V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER (1ULL << 16) #define V4L2_HEVC_PPS_FLAG_LISTS_MODIFICATION_PRESENT (1ULL << 17) #define V4L2_HEVC_PPS_FLAG_SLICE_SEGMENT_HEADER_EXTENSION_PRESENT (1ULL << 18) #define V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT (1ULL << 19) #define V4L2_HEVC_PPS_FLAG_UNIFORM_SPACING (1ULL << 20) /** * struct v4l2_ctrl_hevc_pps - ITU-T Rec. H.265: Picture parameter set * * @pic_parameter_set_id: identifies the PPS for reference by other * syntax elements * @num_extra_slice_header_bits: specifies the number of extra slice header * bits that are present in the slice header RBSP * for coded pictures referring to the PPS. * @num_ref_idx_l0_default_active_minus1: this value plus 1 specifies the * inferred value of num_ref_idx_l0_active_minus1 * @num_ref_idx_l1_default_active_minus1: this value plus 1 specifies the * inferred value of num_ref_idx_l1_active_minus1 * @init_qp_minus26: this value plus 26 specifies the initial value of SliceQp Y for * each slice referring to the PPS * @diff_cu_qp_delta_depth: specifies the difference between the luma coding * tree block size and the minimum luma coding block * size of coding units that convey cu_qp_delta_abs * and cu_qp_delta_sign_flag * @pps_cb_qp_offset: specify the offsets to the luma quantization parameter Cb * @pps_cr_qp_offset: specify the offsets to the luma quantization parameter Cr * @num_tile_columns_minus1: this value plus 1 specifies the number of tile columns * partitioning the picture * @num_tile_rows_minus1: this value plus 1 specifies the number of tile rows partitioning * the picture * @column_width_minus1: this value plus 1 specifies the width of the each tile column in * units of coding tree blocks * @row_height_minus1: this value plus 1 specifies the height of the each tile row in * units of coding tree blocks * @pps_beta_offset_div2: specify the default deblocking parameter offsets for * beta divided by 2 * @pps_tc_offset_div2: specify the default deblocking parameter offsets for tC * divided by 2 * @log2_parallel_merge_level_minus2: this value plus 2 specifies the value of * the variable Log2ParMrgLevel * @reserved: padding field. Should be zeroed by applications. * @flags: see V4L2_HEVC_PPS_FLAG_{} */ struct v4l2_ctrl_hevc_pps { __u8 pic_parameter_set_id; __u8 num_extra_slice_header_bits; __u8 num_ref_idx_l0_default_active_minus1; __u8 num_ref_idx_l1_default_active_minus1; __s8 init_qp_minus26; __u8 diff_cu_qp_delta_depth; __s8 pps_cb_qp_offset; __s8 pps_cr_qp_offset; __u8 num_tile_columns_minus1; __u8 num_tile_rows_minus1; __u8 column_width_minus1[20]; __u8 row_height_minus1[22]; __s8 pps_beta_offset_div2; __s8 pps_tc_offset_div2; __u8 log2_parallel_merge_level_minus2; __u8 reserved; __u64 flags; }; #define V4L2_HEVC_DPB_ENTRY_LONG_TERM_REFERENCE 0x01 #define V4L2_HEVC_SEI_PIC_STRUCT_FRAME 0 #define V4L2_HEVC_SEI_PIC_STRUCT_TOP_FIELD 1 #define V4L2_HEVC_SEI_PIC_STRUCT_BOTTOM_FIELD 2 #define V4L2_HEVC_SEI_PIC_STRUCT_TOP_BOTTOM 3 #define V4L2_HEVC_SEI_PIC_STRUCT_BOTTOM_TOP 4 #define V4L2_HEVC_SEI_PIC_STRUCT_TOP_BOTTOM_TOP 5 #define V4L2_HEVC_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM 6 #define V4L2_HEVC_SEI_PIC_STRUCT_FRAME_DOUBLING 7 #define V4L2_HEVC_SEI_PIC_STRUCT_FRAME_TRIPLING 8 #define V4L2_HEVC_SEI_PIC_STRUCT_TOP_PAIRED_PREVIOUS_BOTTOM 9 #define V4L2_HEVC_SEI_PIC_STRUCT_BOTTOM_PAIRED_PREVIOUS_TOP 10 #define V4L2_HEVC_SEI_PIC_STRUCT_TOP_PAIRED_NEXT_BOTTOM 11 #define V4L2_HEVC_SEI_PIC_STRUCT_BOTTOM_PAIRED_NEXT_TOP 12 #define V4L2_HEVC_DPB_ENTRIES_NUM_MAX 16 /** * struct v4l2_hevc_dpb_entry - HEVC decoded picture buffer entry * * @timestamp: timestamp of the V4L2 capture buffer to use as reference. * @flags: long term flag for the reference frame * @field_pic: whether the reference is a field picture or a frame. * @reserved: padding field. Should be zeroed by applications. * @pic_order_cnt_val: the picture order count of the current picture. */ struct v4l2_hevc_dpb_entry { __u64 timestamp; __u8 flags; __u8 field_pic; __u16 reserved; __s32 pic_order_cnt_val; }; /** * struct v4l2_hevc_pred_weight_table - HEVC weighted prediction parameters * * @delta_luma_weight_l0: the difference of the weighting factor applied * to the luma prediction value for list 0 * @luma_offset_l0: the additive offset applied to the luma prediction value * for list 0 * @delta_chroma_weight_l0: the difference of the weighting factor applied * to the chroma prediction values for list 0 * @chroma_offset_l0: the difference of the additive offset applied to * the chroma prediction values for list 0 * @delta_luma_weight_l1: the difference of the weighting factor applied * to the luma prediction value for list 1 * @luma_offset_l1: the additive offset applied to the luma prediction value * for list 1 * @delta_chroma_weight_l1: the difference of the weighting factor applied * to the chroma prediction values for list 1 * @chroma_offset_l1: the difference of the additive offset applied to * the chroma prediction values for list 1 * @luma_log2_weight_denom: the base 2 logarithm of the denominator for * all luma weighting factors * @delta_chroma_log2_weight_denom: the difference of the base 2 logarithm * of the denominator for all chroma * weighting factors */ struct v4l2_hevc_pred_weight_table { __s8 delta_luma_weight_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __s8 luma_offset_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __s8 delta_chroma_weight_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2]; __s8 chroma_offset_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2]; __s8 delta_luma_weight_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __s8 luma_offset_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __s8 delta_chroma_weight_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2]; __s8 chroma_offset_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX][2]; __u8 luma_log2_weight_denom; __s8 delta_chroma_log2_weight_denom; }; #define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_LUMA (1ULL << 0) #define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_SAO_CHROMA (1ULL << 1) #define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_TEMPORAL_MVP_ENABLED (1ULL << 2) #define V4L2_HEVC_SLICE_PARAMS_FLAG_MVD_L1_ZERO (1ULL << 3) #define V4L2_HEVC_SLICE_PARAMS_FLAG_CABAC_INIT (1ULL << 4) #define V4L2_HEVC_SLICE_PARAMS_FLAG_COLLOCATED_FROM_L0 (1ULL << 5) #define V4L2_HEVC_SLICE_PARAMS_FLAG_USE_INTEGER_MV (1ULL << 6) #define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_DEBLOCKING_FILTER_DISABLED (1ULL << 7) #define V4L2_HEVC_SLICE_PARAMS_FLAG_SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED (1ULL << 8) #define V4L2_HEVC_SLICE_PARAMS_FLAG_DEPENDENT_SLICE_SEGMENT (1ULL << 9) /** * struct v4l2_ctrl_hevc_slice_params - HEVC slice parameters * * This control is a dynamically sized 1-dimensional array, * V4L2_CTRL_FLAG_DYNAMIC_ARRAY flag must be set when using it. * * @bit_size: size (in bits) of the current slice data * @data_byte_offset: offset (in bytes) to the video data in the current slice data * @num_entry_point_offsets: specifies the number of entry point offset syntax * elements in the slice header. * @nal_unit_type: specifies the coding type of the slice (B, P or I) * @nuh_temporal_id_plus1: minus 1 specifies a temporal identifier for the NAL unit * @slice_type: see V4L2_HEVC_SLICE_TYPE_{} * @colour_plane_id: specifies the colour plane associated with the current slice * @slice_pic_order_cnt: specifies the picture order count * @num_ref_idx_l0_active_minus1: this value plus 1 specifies the maximum * reference index for reference picture list 0 * that may be used to decode the slice * @num_ref_idx_l1_active_minus1: this value plus 1 specifies the maximum * reference index for reference picture list 1 * that may be used to decode the slice * @collocated_ref_idx: specifies the reference index of the collocated picture used * for temporal motion vector prediction * @five_minus_max_num_merge_cand: specifies the maximum number of merging * motion vector prediction candidates supported in * the slice subtracted from 5 * @slice_qp_delta: specifies the initial value of QpY to be used for the coding * blocks in the slice * @slice_cb_qp_offset: specifies a difference to be added to the value of pps_cb_qp_offset * @slice_cr_qp_offset: specifies a difference to be added to the value of pps_cr_qp_offset * @slice_act_y_qp_offset: screen content extension parameters * @slice_act_cb_qp_offset: screen content extension parameters * @slice_act_cr_qp_offset: screen content extension parameters * @slice_beta_offset_div2: specify the deblocking parameter offsets for beta divided by 2 * @slice_tc_offset_div2: specify the deblocking parameter offsets for tC divided by 2 * @pic_struct: indicates whether a picture should be displayed as a frame or as one or * more fields * @reserved0: padding field. Should be zeroed by applications. * @slice_segment_addr: specifies the address of the first coding tree block in * the slice segment * @ref_idx_l0: the list of L0 reference elements as indices in the DPB * @ref_idx_l1: the list of L1 reference elements as indices in the DPB * @short_term_ref_pic_set_size: specifies the size of short-term reference * pictures set included in the SPS * @long_term_ref_pic_set_size: specifies the size of long-term reference * pictures set include in the SPS * @pred_weight_table: the prediction weight coefficients for inter-picture * prediction * @reserved1: padding field. Should be zeroed by applications. * @flags: see V4L2_HEVC_SLICE_PARAMS_FLAG_{} */ struct v4l2_ctrl_hevc_slice_params { __u32 bit_size; __u32 data_byte_offset; __u32 num_entry_point_offsets; /* ISO/IEC 23008-2, ITU-T Rec. H.265: NAL unit header */ __u8 nal_unit_type; __u8 nuh_temporal_id_plus1; /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */ __u8 slice_type; __u8 colour_plane_id; __s32 slice_pic_order_cnt; __u8 num_ref_idx_l0_active_minus1; __u8 num_ref_idx_l1_active_minus1; __u8 collocated_ref_idx; __u8 five_minus_max_num_merge_cand; __s8 slice_qp_delta; __s8 slice_cb_qp_offset; __s8 slice_cr_qp_offset; __s8 slice_act_y_qp_offset; __s8 slice_act_cb_qp_offset; __s8 slice_act_cr_qp_offset; __s8 slice_beta_offset_div2; __s8 slice_tc_offset_div2; /* ISO/IEC 23008-2, ITU-T Rec. H.265: Picture timing SEI message */ __u8 pic_struct; __u8 reserved0[3]; /* ISO/IEC 23008-2, ITU-T Rec. H.265: General slice segment header */ __u32 slice_segment_addr; __u8 ref_idx_l0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __u8 ref_idx_l1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __u16 short_term_ref_pic_set_size; __u16 long_term_ref_pic_set_size; /* ISO/IEC 23008-2, ITU-T Rec. H.265: Weighted prediction parameter */ struct v4l2_hevc_pred_weight_table pred_weight_table; __u8 reserved1[2]; __u64 flags; }; #define V4L2_HEVC_DECODE_PARAM_FLAG_IRAP_PIC 0x1 #define V4L2_HEVC_DECODE_PARAM_FLAG_IDR_PIC 0x2 #define V4L2_HEVC_DECODE_PARAM_FLAG_NO_OUTPUT_OF_PRIOR 0x4 /** * struct v4l2_ctrl_hevc_decode_params - HEVC decode parameters * * @pic_order_cnt_val: picture order count * @short_term_ref_pic_set_size: specifies the size of short-term reference * pictures set included in the SPS of the first slice * @long_term_ref_pic_set_size: specifies the size of long-term reference * pictures set include in the SPS of the first slice * @num_active_dpb_entries: the number of entries in dpb * @num_poc_st_curr_before: the number of reference pictures in the short-term * set that come before the current frame * @num_poc_st_curr_after: the number of reference pictures in the short-term * set that come after the current frame * @num_poc_lt_curr: the number of reference pictures in the long-term set * @poc_st_curr_before: provides the index of the short term before references * in DPB array * @poc_st_curr_after: provides the index of the short term after references * in DPB array * @poc_lt_curr: provides the index of the long term references in DPB array * @num_delta_pocs_of_ref_rps_idx: same as the derived value NumDeltaPocs[RefRpsIdx], * can be used to parse the RPS data in slice headers * instead of skipping it with @short_term_ref_pic_set_size. * @reserved: padding field. Should be zeroed by applications. * @dpb: the decoded picture buffer, for meta-data about reference frames * @flags: see V4L2_HEVC_DECODE_PARAM_FLAG_{} */ struct v4l2_ctrl_hevc_decode_params { __s32 pic_order_cnt_val; __u16 short_term_ref_pic_set_size; __u16 long_term_ref_pic_set_size; __u8 num_active_dpb_entries; __u8 num_poc_st_curr_before; __u8 num_poc_st_curr_after; __u8 num_poc_lt_curr; __u8 poc_st_curr_before[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __u8 poc_st_curr_after[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __u8 poc_lt_curr[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __u8 num_delta_pocs_of_ref_rps_idx; __u8 reserved[3]; struct v4l2_hevc_dpb_entry dpb[V4L2_HEVC_DPB_ENTRIES_NUM_MAX]; __u64 flags; }; /** * struct v4l2_ctrl_hevc_scaling_matrix - HEVC scaling lists parameters * * @scaling_list_4x4: scaling list is used for the scaling process for * transform coefficients. The values on each scaling * list are expected in raster scan order * @scaling_list_8x8: scaling list is used for the scaling process for * transform coefficients. The values on each scaling * list are expected in raster scan order * @scaling_list_16x16: scaling list is used for the scaling process for * transform coefficients. The values on each scaling * list are expected in raster scan order * @scaling_list_32x32: scaling list is used for the scaling process for * transform coefficients. The values on each scaling * list are expected in raster scan order * @scaling_list_dc_coef_16x16: scaling list is used for the scaling process * for transform coefficients. The values on each * scaling list are expected in raster scan order. * @scaling_list_dc_coef_32x32: scaling list is used for the scaling process * for transform coefficients. The values on each * scaling list are expected in raster scan order. */ struct v4l2_ctrl_hevc_scaling_matrix { __u8 scaling_list_4x4[6][16]; __u8 scaling_list_8x8[6][64]; __u8 scaling_list_16x16[6][64]; __u8 scaling_list_32x32[2][64]; __u8 scaling_list_dc_coef_16x16[6]; __u8 scaling_list_dc_coef_32x32[2]; }; #define V4L2_CID_COLORIMETRY_CLASS_BASE (V4L2_CTRL_CLASS_COLORIMETRY | 0x900) #define V4L2_CID_COLORIMETRY_CLASS (V4L2_CTRL_CLASS_COLORIMETRY | 1) #define V4L2_CID_COLORIMETRY_HDR10_CLL_INFO (V4L2_CID_COLORIMETRY_CLASS_BASE + 0) struct v4l2_ctrl_hdr10_cll_info { __u16 max_content_light_level; __u16 max_pic_average_light_level; }; #define V4L2_CID_COLORIMETRY_HDR10_MASTERING_DISPLAY (V4L2_CID_COLORIMETRY_CLASS_BASE + 1) #define V4L2_HDR10_MASTERING_PRIMARIES_X_LOW 5 #define V4L2_HDR10_MASTERING_PRIMARIES_X_HIGH 37000 #define V4L2_HDR10_MASTERING_PRIMARIES_Y_LOW 5 #define V4L2_HDR10_MASTERING_PRIMARIES_Y_HIGH 42000 #define V4L2_HDR10_MASTERING_WHITE_POINT_X_LOW 5 #define V4L2_HDR10_MASTERING_WHITE_POINT_X_HIGH 37000 #define V4L2_HDR10_MASTERING_WHITE_POINT_Y_LOW 5 #define V4L2_HDR10_MASTERING_WHITE_POINT_Y_HIGH 42000 #define V4L2_HDR10_MASTERING_MAX_LUMA_LOW 50000 #define V4L2_HDR10_MASTERING_MAX_LUMA_HIGH 100000000 #define V4L2_HDR10_MASTERING_MIN_LUMA_LOW 1 #define V4L2_HDR10_MASTERING_MIN_LUMA_HIGH 50000 struct v4l2_ctrl_hdr10_mastering_display { __u16 display_primaries_x[3]; __u16 display_primaries_y[3]; __u16 white_point_x; __u16 white_point_y; __u32 max_display_mastering_luminance; __u32 min_display_mastering_luminance; }; /* Stateless VP9 controls */ #define V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED 0x1 #define V4L2_VP9_LOOP_FILTER_FLAG_DELTA_UPDATE 0x2 /** * struct v4l2_vp9_loop_filter - VP9 loop filter parameters * * @ref_deltas: contains the adjustment needed for the filter level based on the * chosen reference frame. If this syntax element is not present in the bitstream, * users should pass its last value. * @mode_deltas: contains the adjustment needed for the filter level based on the * chosen mode. If this syntax element is not present in the bitstream, users should * pass its last value. * @level: indicates the loop filter strength. * @sharpness: indicates the sharpness level. * @flags: combination of V4L2_VP9_LOOP_FILTER_FLAG_{} flags. * @reserved: padding field. Should be zeroed by applications. * * This structure contains all loop filter related parameters. See sections * '7.2.8 Loop filter semantics' of the VP9 specification for more details. */ struct v4l2_vp9_loop_filter { __s8 ref_deltas[4]; __s8 mode_deltas[2]; __u8 level; __u8 sharpness; __u8 flags; __u8 reserved[7]; }; /** * struct v4l2_vp9_quantization - VP9 quantization parameters * * @base_q_idx: indicates the base frame qindex. * @delta_q_y_dc: indicates the Y DC quantizer relative to base_q_idx. * @delta_q_uv_dc: indicates the UV DC quantizer relative to base_q_idx. * @delta_q_uv_ac: indicates the UV AC quantizer relative to base_q_idx. * @reserved: padding field. Should be zeroed by applications. * * Encodes the quantization parameters. See section '7.2.9 Quantization params * syntax' of the VP9 specification for more details. */ struct v4l2_vp9_quantization { __u8 base_q_idx; __s8 delta_q_y_dc; __s8 delta_q_uv_dc; __s8 delta_q_uv_ac; __u8 reserved[4]; }; #define V4L2_VP9_SEGMENTATION_FLAG_ENABLED 0x01 #define V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP 0x02 #define V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE 0x04 #define V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA 0x08 #define V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE 0x10 #define V4L2_VP9_SEG_LVL_ALT_Q 0 #define V4L2_VP9_SEG_LVL_ALT_L 1 #define V4L2_VP9_SEG_LVL_REF_FRAME 2 #define V4L2_VP9_SEG_LVL_SKIP 3 #define V4L2_VP9_SEG_LVL_MAX 4 #define V4L2_VP9_SEGMENT_FEATURE_ENABLED(id) (1 << (id)) #define V4L2_VP9_SEGMENT_FEATURE_ENABLED_MASK 0xf /** * struct v4l2_vp9_segmentation - VP9 segmentation parameters * * @feature_data: data attached to each feature. Data entry is only valid if * the feature is enabled. The array shall be indexed with segment number as * the first dimension (0..7) and one of V4L2_VP9_SEG_{} as the second dimension. * @feature_enabled: bitmask defining which features are enabled in each segment. * The value for each segment is a combination of V4L2_VP9_SEGMENT_FEATURE_ENABLED(id) * values where id is one of V4L2_VP9_SEG_LVL_{}. * @tree_probs: specifies the probability values to be used when decoding a * Segment-ID. See '5.15. Segmentation map' section of the VP9 specification * for more details. * @pred_probs: specifies the probability values to be used when decoding a * Predicted-Segment-ID. See '6.4.14. Get segment id syntax' section of :ref:`vp9` * for more details. * @flags: combination of V4L2_VP9_SEGMENTATION_FLAG_{} flags. * @reserved: padding field. Should be zeroed by applications. * * Encodes the quantization parameters. See section '7.2.10 Segmentation params syntax' of * the VP9 specification for more details. */ struct v4l2_vp9_segmentation { __s16 feature_data[8][4]; __u8 feature_enabled[8]; __u8 tree_probs[7]; __u8 pred_probs[3]; __u8 flags; __u8 reserved[5]; }; #define V4L2_VP9_FRAME_FLAG_KEY_FRAME 0x001 #define V4L2_VP9_FRAME_FLAG_SHOW_FRAME 0x002 #define V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT 0x004 #define V4L2_VP9_FRAME_FLAG_INTRA_ONLY 0x008 #define V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV 0x010 #define V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX 0x020 #define V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE 0x040 #define V4L2_VP9_FRAME_FLAG_X_SUBSAMPLING 0x080 #define V4L2_VP9_FRAME_FLAG_Y_SUBSAMPLING 0x100 #define V4L2_VP9_FRAME_FLAG_COLOR_RANGE_FULL_SWING 0x200 #define V4L2_VP9_SIGN_BIAS_LAST 0x1 #define V4L2_VP9_SIGN_BIAS_GOLDEN 0x2 #define V4L2_VP9_SIGN_BIAS_ALT 0x4 #define V4L2_VP9_RESET_FRAME_CTX_NONE 0 #define V4L2_VP9_RESET_FRAME_CTX_SPEC 1 #define V4L2_VP9_RESET_FRAME_CTX_ALL 2 #define V4L2_VP9_INTERP_FILTER_EIGHTTAP 0 #define V4L2_VP9_INTERP_FILTER_EIGHTTAP_SMOOTH 1 #define V4L2_VP9_INTERP_FILTER_EIGHTTAP_SHARP 2 #define V4L2_VP9_INTERP_FILTER_BILINEAR 3 #define V4L2_VP9_INTERP_FILTER_SWITCHABLE 4 #define V4L2_VP9_REFERENCE_MODE_SINGLE_REFERENCE 0 #define V4L2_VP9_REFERENCE_MODE_COMPOUND_REFERENCE 1 #define V4L2_VP9_REFERENCE_MODE_SELECT 2 #define V4L2_VP9_PROFILE_MAX 3 #define V4L2_CID_STATELESS_VP9_FRAME (V4L2_CID_CODEC_STATELESS_BASE + 300) /** * struct v4l2_ctrl_vp9_frame - VP9 frame decoding control * * @lf: loop filter parameters. See &v4l2_vp9_loop_filter for more details. * @quant: quantization parameters. See &v4l2_vp9_quantization for more details. * @seg: segmentation parameters. See &v4l2_vp9_segmentation for more details. * @flags: combination of V4L2_VP9_FRAME_FLAG_{} flags. * @compressed_header_size: compressed header size in bytes. * @uncompressed_header_size: uncompressed header size in bytes. * @frame_width_minus_1: add 1 to it and you'll get the frame width expressed in pixels. * @frame_height_minus_1: add 1 to it and you'll get the frame height expressed in pixels. * @render_width_minus_1: add 1 to it and you'll get the expected render width expressed in * pixels. This is not used during the decoding process but might be used by HW scalers * to prepare a frame that's ready for scanout. * @render_height_minus_1: add 1 to it and you'll get the expected render height expressed in * pixels. This is not used during the decoding process but might be used by HW scalers * to prepare a frame that's ready for scanout. * @last_frame_ts: "last" reference buffer timestamp. * The timestamp refers to the timestamp field in struct v4l2_buffer. * Use v4l2_timeval_to_ns() to convert the struct timeval to a __u64. * @golden_frame_ts: "golden" reference buffer timestamp. * The timestamp refers to the timestamp field in struct v4l2_buffer. * Use v4l2_timeval_to_ns() to convert the struct timeval to a __u64. * @alt_frame_ts: "alt" reference buffer timestamp. * The timestamp refers to the timestamp field in struct v4l2_buffer. * Use v4l2_timeval_to_ns() to convert the struct timeval to a __u64. * @ref_frame_sign_bias: a bitfield specifying whether the sign bias is set for a given * reference frame. Either of V4L2_VP9_SIGN_BIAS_{}. * @reset_frame_context: specifies whether the frame context should be reset to default values. * Either of V4L2_VP9_RESET_FRAME_CTX_{}. * @frame_context_idx: frame context that should be used/updated. * @profile: VP9 profile. Can be 0, 1, 2 or 3. * @bit_depth: bits per components. Can be 8, 10 or 12. Note that not all profiles support * 10 and/or 12 bits depths. * @interpolation_filter: specifies the filter selection used for performing inter prediction. * Set to one of V4L2_VP9_INTERP_FILTER_{}. * @tile_cols_log2: specifies the base 2 logarithm of the width of each tile (where the width * is measured in units of 8x8 blocks). Shall be less than or equal to 6. * @tile_rows_log2: specifies the base 2 logarithm of the height of each tile (where the height * is measured in units of 8x8 blocks). * @reference_mode: specifies the type of inter prediction to be used. * Set to one of V4L2_VP9_REFERENCE_MODE_{}. * @reserved: padding field. Should be zeroed by applications. */ struct v4l2_ctrl_vp9_frame { struct v4l2_vp9_loop_filter lf; struct v4l2_vp9_quantization quant; struct v4l2_vp9_segmentation seg; __u32 flags; __u16 compressed_header_size; __u16 uncompressed_header_size; __u16 frame_width_minus_1; __u16 frame_height_minus_1; __u16 render_width_minus_1; __u16 render_height_minus_1; __u64 last_frame_ts; __u64 golden_frame_ts; __u64 alt_frame_ts; __u8 ref_frame_sign_bias; __u8 reset_frame_context; __u8 frame_context_idx; __u8 profile; __u8 bit_depth; __u8 interpolation_filter; __u8 tile_cols_log2; __u8 tile_rows_log2; __u8 reference_mode; __u8 reserved[7]; }; #define V4L2_VP9_NUM_FRAME_CTX 4 /** * struct v4l2_vp9_mv_probs - VP9 Motion vector probability updates * @joint: motion vector joint probability updates. * @sign: motion vector sign probability updates. * @classes: motion vector class probability updates. * @class0_bit: motion vector class0 bit probability updates. * @bits: motion vector bits probability updates. * @class0_fr: motion vector class0 fractional bit probability updates. * @fr: motion vector fractional bit probability updates. * @class0_hp: motion vector class0 high precision fractional bit probability updates. * @hp: motion vector high precision fractional bit probability updates. * * This structure contains new values of motion vector probabilities. * A value of zero in an array element means there is no update of the relevant probability. * See `struct v4l2_vp9_prob_updates` for details. */ struct v4l2_vp9_mv_probs { __u8 joint[3]; __u8 sign[2]; __u8 classes[2][10]; __u8 class0_bit[2]; __u8 bits[2][10]; __u8 class0_fr[2][2][3]; __u8 fr[2][3]; __u8 class0_hp[2]; __u8 hp[2]; }; #define V4L2_CID_STATELESS_VP9_COMPRESSED_HDR (V4L2_CID_CODEC_STATELESS_BASE + 301) #define V4L2_VP9_TX_MODE_ONLY_4X4 0 #define V4L2_VP9_TX_MODE_ALLOW_8X8 1 #define V4L2_VP9_TX_MODE_ALLOW_16X16 2 #define V4L2_VP9_TX_MODE_ALLOW_32X32 3 #define V4L2_VP9_TX_MODE_SELECT 4 /** * struct v4l2_ctrl_vp9_compressed_hdr - VP9 probability updates control * @tx_mode: specifies the TX mode. Set to one of V4L2_VP9_TX_MODE_{}. * @tx8: TX 8x8 probability updates. * @tx16: TX 16x16 probability updates. * @tx32: TX 32x32 probability updates. * @coef: coefficient probability updates. * @skip: skip probability updates. * @inter_mode: inter mode probability updates. * @interp_filter: interpolation filter probability updates. * @is_inter: is inter-block probability updates. * @comp_mode: compound prediction mode probability updates. * @single_ref: single ref probability updates. * @comp_ref: compound ref probability updates. * @y_mode: Y prediction mode probability updates. * @uv_mode: UV prediction mode probability updates. * @partition: partition probability updates. * @mv: motion vector probability updates. * * This structure holds the probabilities update as parsed in the compressed * header (Spec 6.3). These values represent the value of probability update after * being translated with inv_map_table[] (see 6.3.5). A value of zero in an array element * means that there is no update of the relevant probability. * * This control is optional and needs to be used when dealing with the hardware which is * not capable of parsing the compressed header itself. Only drivers which need it will * implement it. */ struct v4l2_ctrl_vp9_compressed_hdr { __u8 tx_mode; __u8 tx8[2][1]; __u8 tx16[2][2]; __u8 tx32[2][3]; __u8 coef[4][2][2][6][6][3]; __u8 skip[3]; __u8 inter_mode[7][3]; __u8 interp_filter[4][2]; __u8 is_inter[4]; __u8 comp_mode[5]; __u8 single_ref[5][2]; __u8 comp_ref[5]; __u8 y_mode[4][9]; __u8 uv_mode[10][9]; __u8 partition[16][3]; struct v4l2_vp9_mv_probs mv; }; /* Stateless AV1 controls */ #define V4L2_AV1_TOTAL_REFS_PER_FRAME 8 #define V4L2_AV1_CDEF_MAX 8 #define V4L2_AV1_NUM_PLANES_MAX 3 /* 1 if monochrome, 3 otherwise */ #define V4L2_AV1_MAX_SEGMENTS 8 #define V4L2_AV1_MAX_OPERATING_POINTS (1 << 5) /* 5 bits to encode */ #define V4L2_AV1_REFS_PER_FRAME 7 #define V4L2_AV1_MAX_NUM_Y_POINTS (1 << 4) /* 4 bits to encode */ #define V4L2_AV1_MAX_NUM_CB_POINTS (1 << 4) /* 4 bits to encode */ #define V4L2_AV1_MAX_NUM_CR_POINTS (1 << 4) /* 4 bits to encode */ #define V4L2_AV1_AR_COEFFS_SIZE 25 /* (2 * 3 * (3 + 1)) + 1 */ #define V4L2_AV1_MAX_NUM_PLANES 3 #define V4L2_AV1_MAX_TILE_COLS 64 #define V4L2_AV1_MAX_TILE_ROWS 64 #define V4L2_AV1_MAX_TILE_COUNT 512 #define V4L2_AV1_SEQUENCE_FLAG_STILL_PICTURE 0x00000001 #define V4L2_AV1_SEQUENCE_FLAG_USE_128X128_SUPERBLOCK 0x00000002 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_FILTER_INTRA 0x00000004 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_INTRA_EDGE_FILTER 0x00000008 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_INTERINTRA_COMPOUND 0x00000010 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_MASKED_COMPOUND 0x00000020 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_WARPED_MOTION 0x00000040 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_DUAL_FILTER 0x00000080 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_ORDER_HINT 0x00000100 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_JNT_COMP 0x00000200 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_REF_FRAME_MVS 0x00000400 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_SUPERRES 0x00000800 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_CDEF 0x00001000 #define V4L2_AV1_SEQUENCE_FLAG_ENABLE_RESTORATION 0x00002000 #define V4L2_AV1_SEQUENCE_FLAG_MONO_CHROME 0x00004000 #define V4L2_AV1_SEQUENCE_FLAG_COLOR_RANGE 0x00008000 #define V4L2_AV1_SEQUENCE_FLAG_SUBSAMPLING_X 0x00010000 #define V4L2_AV1_SEQUENCE_FLAG_SUBSAMPLING_Y 0x00020000 #define V4L2_AV1_SEQUENCE_FLAG_FILM_GRAIN_PARAMS_PRESENT 0x00040000 #define V4L2_AV1_SEQUENCE_FLAG_SEPARATE_UV_DELTA_Q 0x00080000 #define V4L2_CID_STATELESS_AV1_SEQUENCE (V4L2_CID_CODEC_STATELESS_BASE + 500) /** * struct v4l2_ctrl_av1_sequence - AV1 Sequence * * Represents an AV1 Sequence OBU. See section 5.5 "Sequence header OBU syntax" * for more details. * * @flags: See V4L2_AV1_SEQUENCE_FLAG_{}. * @seq_profile: specifies the features that can be used in the coded video * sequence. * @order_hint_bits: specifies the number of bits used for the order_hint field * at each frame. * @bit_depth: the bitdepth to use for the sequence as described in section * 5.5.2 "Color config syntax". * @reserved: padding field. Should be zeroed by applications. * @max_frame_width_minus_1: specifies the maximum frame width minus 1 for the * frames represented by this sequence header. * @max_frame_height_minus_1: specifies the maximum frame height minus 1 for the * frames represented by this sequence header. */ struct v4l2_ctrl_av1_sequence { __u32 flags; __u8 seq_profile; __u8 order_hint_bits; __u8 bit_depth; __u8 reserved; __u16 max_frame_width_minus_1; __u16 max_frame_height_minus_1; }; #define V4L2_CID_STATELESS_AV1_TILE_GROUP_ENTRY (V4L2_CID_CODEC_STATELESS_BASE + 501) /** * struct v4l2_ctrl_av1_tile_group_entry - AV1 Tile Group entry * * Represents a single AV1 tile inside an AV1 Tile Group. Note that MiRowStart, * MiRowEnd, MiColStart and MiColEnd can be retrieved from struct * v4l2_av1_tile_info in struct v4l2_ctrl_av1_frame using tile_row and * tile_col. See section 6.10.1 "General tile group OBU semantics" for more * details. * * @tile_offset: offset from the OBU data, i.e. where the coded tile data * actually starts. * @tile_size: specifies the size in bytes of the coded tile. Equivalent to * "TileSize" in the AV1 Specification. * @tile_row: specifies the row of the current tile. Equivalent to "TileRow" in * the AV1 Specification. * @tile_col: specifies the col of the current tile. Equivalent to "TileCol" in * the AV1 Specification. */ struct v4l2_ctrl_av1_tile_group_entry { __u32 tile_offset; __u32 tile_size; __u32 tile_row; __u32 tile_col; }; /** * enum v4l2_av1_warp_model - AV1 Warp Model as described in section 3 * "Symbols and abbreviated terms" of the AV1 Specification. * * @V4L2_AV1_WARP_MODEL_IDENTITY: Warp model is just an identity transform. * @V4L2_AV1_WARP_MODEL_TRANSLATION: Warp model is a pure translation. * @V4L2_AV1_WARP_MODEL_ROTZOOM: Warp model is a rotation + symmetric zoom + * translation. * @V4L2_AV1_WARP_MODEL_AFFINE: Warp model is a general affine transform. */ enum v4l2_av1_warp_model { V4L2_AV1_WARP_MODEL_IDENTITY = 0, V4L2_AV1_WARP_MODEL_TRANSLATION = 1, V4L2_AV1_WARP_MODEL_ROTZOOM = 2, V4L2_AV1_WARP_MODEL_AFFINE = 3, }; /** * enum v4l2_av1_reference_frame - AV1 reference frames * * @V4L2_AV1_REF_INTRA_FRAME: Intra Frame Reference * @V4L2_AV1_REF_LAST_FRAME: Last Reference Frame * @V4L2_AV1_REF_LAST2_FRAME: Last2 Reference Frame * @V4L2_AV1_REF_LAST3_FRAME: Last3 Reference Frame * @V4L2_AV1_REF_GOLDEN_FRAME: Golden Reference Frame * @V4L2_AV1_REF_BWDREF_FRAME: BWD Reference Frame * @V4L2_AV1_REF_ALTREF2_FRAME: Alternative2 Reference Frame * @V4L2_AV1_REF_ALTREF_FRAME: Alternative Reference Frame */ enum v4l2_av1_reference_frame { V4L2_AV1_REF_INTRA_FRAME = 0, V4L2_AV1_REF_LAST_FRAME = 1, V4L2_AV1_REF_LAST2_FRAME = 2, V4L2_AV1_REF_LAST3_FRAME = 3, V4L2_AV1_REF_GOLDEN_FRAME = 4, V4L2_AV1_REF_BWDREF_FRAME = 5, V4L2_AV1_REF_ALTREF2_FRAME = 6, V4L2_AV1_REF_ALTREF_FRAME = 7, }; #define V4L2_AV1_GLOBAL_MOTION_IS_INVALID(ref) (1 << (ref)) #define V4L2_AV1_GLOBAL_MOTION_FLAG_IS_GLOBAL 0x1 #define V4L2_AV1_GLOBAL_MOTION_FLAG_IS_ROT_ZOOM 0x2 #define V4L2_AV1_GLOBAL_MOTION_FLAG_IS_TRANSLATION 0x4 /** * struct v4l2_av1_global_motion - AV1 Global Motion parameters as described in * section 6.8.17 "Global motion params semantics" of the AV1 specification. * * @flags: A bitfield containing the flags per reference frame. See * V4L2_AV1_GLOBAL_MOTION_FLAG_{} * @type: The type of global motion transform used. * @params: this field has the same meaning as "gm_params" in the AV1 * specification. * @invalid: bitfield indicating whether the global motion params are invalid * for a given reference frame. See section 7.11.3.6 Setup shear process and * the variable "warpValid". Use V4L2_AV1_GLOBAL_MOTION_IS_INVALID(ref) to * create a suitable mask. * @reserved: padding field. Should be zeroed by applications. */ struct v4l2_av1_global_motion { __u8 flags[V4L2_AV1_TOTAL_REFS_PER_FRAME]; enum v4l2_av1_warp_model type[V4L2_AV1_TOTAL_REFS_PER_FRAME]; __s32 params[V4L2_AV1_TOTAL_REFS_PER_FRAME][6]; __u8 invalid; __u8 reserved[3]; }; /** * enum v4l2_av1_frame_restoration_type - AV1 Frame Restoration Type * @V4L2_AV1_FRAME_RESTORE_NONE: no filtering is applied. * @V4L2_AV1_FRAME_RESTORE_WIENER: Wiener filter process is invoked. * @V4L2_AV1_FRAME_RESTORE_SGRPROJ: self guided filter process is invoked. * @V4L2_AV1_FRAME_RESTORE_SWITCHABLE: restoration filter is swichtable. */ enum v4l2_av1_frame_restoration_type { V4L2_AV1_FRAME_RESTORE_NONE = 0, V4L2_AV1_FRAME_RESTORE_WIENER = 1, V4L2_AV1_FRAME_RESTORE_SGRPROJ = 2, V4L2_AV1_FRAME_RESTORE_SWITCHABLE = 3, }; #define V4L2_AV1_LOOP_RESTORATION_FLAG_USES_LR 0x1 #define V4L2_AV1_LOOP_RESTORATION_FLAG_USES_CHROMA_LR 0x2 /** * struct v4l2_av1_loop_restoration - AV1 Loop Restauration as described in * section 6.10.15 "Loop restoration params semantics" of the AV1 specification. * * @flags: See V4L2_AV1_LOOP_RESTORATION_FLAG_{}. * @lr_unit_shift: specifies if the luma restoration size should be halved. * @lr_uv_shift: specifies if the chroma size should be half the luma size. * @reserved: padding field. Should be zeroed by applications. * @frame_restoration_type: specifies the type of restoration used for each * plane. See enum v4l2_av1_frame_restoration_type. * @loop_restoration_size: specifies the size of loop restoration units in units * of samples in the current plane. */ struct v4l2_av1_loop_restoration { __u8 flags; __u8 lr_unit_shift; __u8 lr_uv_shift; __u8 reserved; enum v4l2_av1_frame_restoration_type frame_restoration_type[V4L2_AV1_NUM_PLANES_MAX]; __u32 loop_restoration_size[V4L2_AV1_MAX_NUM_PLANES]; }; /** * struct v4l2_av1_cdef - AV1 CDEF params semantics as described in section * 6.10.14 "CDEF params semantics" of the AV1 specification * * @damping_minus_3: controls the amount of damping in the deringing filter. * @bits: specifies the number of bits needed to specify which CDEF filter to * apply. * @y_pri_strength: specifies the strength of the primary filter. * @y_sec_strength: specifies the strength of the secondary filter. * @uv_pri_strength: specifies the strength of the primary filter. * @uv_sec_strength: specifies the strength of the secondary filter. */ struct v4l2_av1_cdef { __u8 damping_minus_3; __u8 bits; __u8 y_pri_strength[V4L2_AV1_CDEF_MAX]; __u8 y_sec_strength[V4L2_AV1_CDEF_MAX]; __u8 uv_pri_strength[V4L2_AV1_CDEF_MAX]; __u8 uv_sec_strength[V4L2_AV1_CDEF_MAX]; }; #define V4L2_AV1_SEGMENTATION_FLAG_ENABLED 0x1 #define V4L2_AV1_SEGMENTATION_FLAG_UPDATE_MAP 0x2 #define V4L2_AV1_SEGMENTATION_FLAG_TEMPORAL_UPDATE 0x4 #define V4L2_AV1_SEGMENTATION_FLAG_UPDATE_DATA 0x8 #define V4L2_AV1_SEGMENTATION_FLAG_SEG_ID_PRE_SKIP 0x10 /** * enum v4l2_av1_segment_feature - AV1 segment features as described in section * 3 "Symbols and abbreviated terms" of the AV1 specification. * * @V4L2_AV1_SEG_LVL_ALT_Q: Index for quantizer segment feature. * @V4L2_AV1_SEG_LVL_ALT_LF_Y_V: Index for vertical luma loop filter segment * feature. * @V4L2_AV1_SEG_LVL_REF_FRAME: Index for reference frame segment feature. * @V4L2_AV1_SEG_LVL_REF_SKIP: Index for skip segment feature. * @V4L2_AV1_SEG_LVL_REF_GLOBALMV: Index for global mv feature. * @V4L2_AV1_SEG_LVL_MAX: Number of segment features. */ enum v4l2_av1_segment_feature { V4L2_AV1_SEG_LVL_ALT_Q = 0, V4L2_AV1_SEG_LVL_ALT_LF_Y_V = 1, V4L2_AV1_SEG_LVL_REF_FRAME = 5, V4L2_AV1_SEG_LVL_REF_SKIP = 6, V4L2_AV1_SEG_LVL_REF_GLOBALMV = 7, V4L2_AV1_SEG_LVL_MAX = 8 }; #define V4L2_AV1_SEGMENT_FEATURE_ENABLED(id) (1 << (id)) /** * struct v4l2_av1_segmentation - AV1 Segmentation params as defined in section * 6.8.13 "Segmentation params semantics" of the AV1 specification. * * @flags: see V4L2_AV1_SEGMENTATION_FLAG_{}. * @last_active_seg_id: indicates the highest numbered segment id that has some * enabled feature. This is used when decoding the segment id to only decode * choices corresponding to used segments. * @feature_enabled: bitmask defining which features are enabled in each * segment. Use V4L2_AV1_SEGMENT_FEATURE_ENABLED to build a suitable mask. * @feature_data: data attached to each feature. Data entry is only valid if the * feature is enabled */ struct v4l2_av1_segmentation { __u8 flags; __u8 last_active_seg_id; __u8 feature_enabled[V4L2_AV1_MAX_SEGMENTS]; __s16 feature_data[V4L2_AV1_MAX_SEGMENTS][V4L2_AV1_SEG_LVL_MAX]; }; #define V4L2_AV1_LOOP_FILTER_FLAG_DELTA_ENABLED 0x1 #define V4L2_AV1_LOOP_FILTER_FLAG_DELTA_UPDATE 0x2 #define V4L2_AV1_LOOP_FILTER_FLAG_DELTA_LF_PRESENT 0x4 #define V4L2_AV1_LOOP_FILTER_FLAG_DELTA_LF_MULTI 0x8 /** * struct v4l2_av1_loop_filter - AV1 Loop filter params as defined in section * 6.8.10 "Loop filter semantics" and 6.8.16 "Loop filter delta parameters * semantics" of the AV1 specification. * * @flags: see V4L2_AV1_LOOP_FILTER_FLAG_{} * @level: an array containing loop filter strength values. Different loop * filter strength values from the array are used depending on the image plane * being filtered, and the edge direction (vertical or horizontal) being * filtered. * @sharpness: indicates the sharpness level. The loop_filter_level and * loop_filter_sharpness together determine when a block edge is filtered, and * by how much the filtering can change the sample values. The loop filter * process is described in section 7.14 of the AV1 specification. * @ref_deltas: contains the adjustment needed for the filter level based on the * chosen reference frame. If this syntax element is not present, it maintains * its previous value. * @mode_deltas: contains the adjustment needed for the filter level based on * the chosen mode. If this syntax element is not present, it maintains its * previous value. * @delta_lf_res: specifies the left shift which should be applied to decoded * loop filter delta values. */ struct v4l2_av1_loop_filter { __u8 flags; __u8 level[4]; __u8 sharpness; __s8 ref_deltas[V4L2_AV1_TOTAL_REFS_PER_FRAME]; __s8 mode_deltas[2]; __u8 delta_lf_res; }; #define V4L2_AV1_QUANTIZATION_FLAG_DIFF_UV_DELTA 0x1 #define V4L2_AV1_QUANTIZATION_FLAG_USING_QMATRIX 0x2 #define V4L2_AV1_QUANTIZATION_FLAG_DELTA_Q_PRESENT 0x4 /** * struct v4l2_av1_quantization - AV1 Quantization params as defined in section * 6.8.11 "Quantization params semantics" of the AV1 specification. * * @flags: see V4L2_AV1_QUANTIZATION_FLAG_{} * @base_q_idx: indicates the base frame qindex. This is used for Y AC * coefficients and as the base value for the other quantizers. * @delta_q_y_dc: indicates the Y DC quantizer relative to base_q_idx. * @delta_q_u_dc: indicates the U DC quantizer relative to base_q_idx. * @delta_q_u_ac: indicates the U AC quantizer relative to base_q_idx. * @delta_q_v_dc: indicates the V DC quantizer relative to base_q_idx. * @delta_q_v_ac: indicates the V AC quantizer relative to base_q_idx. * @qm_y: specifies the level in the quantizer matrix that should be used for * luma plane decoding. * @qm_u: specifies the level in the quantizer matrix that should be used for * chroma U plane decoding. * @qm_v: specifies the level in the quantizer matrix that should be used for * chroma V plane decoding. * @delta_q_res: specifies the left shift which should be applied to decoded * quantizer index delta values. */ struct v4l2_av1_quantization { __u8 flags; __u8 base_q_idx; __s8 delta_q_y_dc; __s8 delta_q_u_dc; __s8 delta_q_u_ac; __s8 delta_q_v_dc; __s8 delta_q_v_ac; __u8 qm_y; __u8 qm_u; __u8 qm_v; __u8 delta_q_res; }; #define V4L2_AV1_TILE_INFO_FLAG_UNIFORM_TILE_SPACING 0x1 /** * struct v4l2_av1_tile_info - AV1 Tile info as defined in section 6.8.14 "Tile * info semantics" of the AV1 specification. * * @flags: see V4L2_AV1_TILE_INFO_FLAG_{} * @context_update_tile_id: specifies which tile to use for the CDF update. * @tile_rows: specifies the number of tiles down the frame. * @tile_cols: specifies the number of tiles across the frame. * @mi_col_starts: an array specifying the start column (in units of 4x4 luma * samples) for each tile across the image. * @mi_row_starts: an array specifying the start row (in units of 4x4 luma * samples) for each tile down the image. * @width_in_sbs_minus_1: specifies the width of a tile minus 1 in units of * superblocks. * @height_in_sbs_minus_1: specifies the height of a tile minus 1 in units of * superblocks. * @tile_size_bytes: specifies the number of bytes needed to code each tile * size. * @reserved: padding field. Should be zeroed by applications. */ struct v4l2_av1_tile_info { __u8 flags; __u8 context_update_tile_id; __u8 tile_cols; __u8 tile_rows; __u32 mi_col_starts[V4L2_AV1_MAX_TILE_COLS + 1]; __u32 mi_row_starts[V4L2_AV1_MAX_TILE_ROWS + 1]; __u32 width_in_sbs_minus_1[V4L2_AV1_MAX_TILE_COLS]; __u32 height_in_sbs_minus_1[V4L2_AV1_MAX_TILE_ROWS]; __u8 tile_size_bytes; __u8 reserved[3]; }; /** * enum v4l2_av1_frame_type - AV1 Frame Type * * @V4L2_AV1_KEY_FRAME: Key frame * @V4L2_AV1_INTER_FRAME: Inter frame * @V4L2_AV1_INTRA_ONLY_FRAME: Intra-only frame * @V4L2_AV1_SWITCH_FRAME: Switch frame */ enum v4l2_av1_frame_type { V4L2_AV1_KEY_FRAME = 0, V4L2_AV1_INTER_FRAME = 1, V4L2_AV1_INTRA_ONLY_FRAME = 2, V4L2_AV1_SWITCH_FRAME = 3 }; /** * enum v4l2_av1_interpolation_filter - AV1 interpolation filter types * * @V4L2_AV1_INTERPOLATION_FILTER_EIGHTTAP: eight tap filter * @V4L2_AV1_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH: eight tap smooth filter * @V4L2_AV1_INTERPOLATION_FILTER_EIGHTTAP_SHARP: eight tap sharp filter * @V4L2_AV1_INTERPOLATION_FILTER_BILINEAR: bilinear filter * @V4L2_AV1_INTERPOLATION_FILTER_SWITCHABLE: filter selection is signaled at * the block level * * See section 6.8.9 "Interpolation filter semantics" of the AV1 specification * for more details. */ enum v4l2_av1_interpolation_filter { V4L2_AV1_INTERPOLATION_FILTER_EIGHTTAP = 0, V4L2_AV1_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH = 1, V4L2_AV1_INTERPOLATION_FILTER_EIGHTTAP_SHARP = 2, V4L2_AV1_INTERPOLATION_FILTER_BILINEAR = 3, V4L2_AV1_INTERPOLATION_FILTER_SWITCHABLE = 4, }; /** * enum v4l2_av1_tx_mode - AV1 Tx mode as described in section 6.8.21 "TX mode * semantics" of the AV1 specification. * @V4L2_AV1_TX_MODE_ONLY_4X4: the inverse transform will use only 4x4 * transforms * @V4L2_AV1_TX_MODE_LARGEST: the inverse transform will use the largest * transform size that fits inside the block * @V4L2_AV1_TX_MODE_SELECT: the choice of transform size is specified * explicitly for each block. */ enum v4l2_av1_tx_mode { V4L2_AV1_TX_MODE_ONLY_4X4 = 0, V4L2_AV1_TX_MODE_LARGEST = 1, V4L2_AV1_TX_MODE_SELECT = 2 }; #define V4L2_AV1_FRAME_FLAG_SHOW_FRAME 0x00000001 #define V4L2_AV1_FRAME_FLAG_SHOWABLE_FRAME 0x00000002 #define V4L2_AV1_FRAME_FLAG_ERROR_RESILIENT_MODE 0x00000004 #define V4L2_AV1_FRAME_FLAG_DISABLE_CDF_UPDATE 0x00000008 #define V4L2_AV1_FRAME_FLAG_ALLOW_SCREEN_CONTENT_TOOLS 0x00000010 #define V4L2_AV1_FRAME_FLAG_FORCE_INTEGER_MV 0x00000020 #define V4L2_AV1_FRAME_FLAG_ALLOW_INTRABC 0x00000040 #define V4L2_AV1_FRAME_FLAG_USE_SUPERRES 0x00000080 #define V4L2_AV1_FRAME_FLAG_ALLOW_HIGH_PRECISION_MV 0x00000100 #define V4L2_AV1_FRAME_FLAG_IS_MOTION_MODE_SWITCHABLE 0x00000200 #define V4L2_AV1_FRAME_FLAG_USE_REF_FRAME_MVS 0x00000400 #define V4L2_AV1_FRAME_FLAG_DISABLE_FRAME_END_UPDATE_CDF 0x00000800 #define V4L2_AV1_FRAME_FLAG_ALLOW_WARPED_MOTION 0x00001000 #define V4L2_AV1_FRAME_FLAG_REFERENCE_SELECT 0x00002000 #define V4L2_AV1_FRAME_FLAG_REDUCED_TX_SET 0x00004000 #define V4L2_AV1_FRAME_FLAG_SKIP_MODE_ALLOWED 0x00008000 #define V4L2_AV1_FRAME_FLAG_SKIP_MODE_PRESENT 0x00010000 #define V4L2_AV1_FRAME_FLAG_FRAME_SIZE_OVERRIDE 0x00020000 #define V4L2_AV1_FRAME_FLAG_BUFFER_REMOVAL_TIME_PRESENT 0x00040000 #define V4L2_AV1_FRAME_FLAG_FRAME_REFS_SHORT_SIGNALING 0x00080000 #define V4L2_CID_STATELESS_AV1_FRAME (V4L2_CID_CODEC_STATELESS_BASE + 502) /** * struct v4l2_ctrl_av1_frame - Represents an AV1 Frame Header OBU. * * @tile_info: tile info * @quantization: quantization params * @segmentation: segmentation params * @superres_denom: the denominator for the upscaling ratio. * @loop_filter: loop filter params * @cdef: cdef params * @skip_mode_frame: specifies the frames to use for compound prediction when * skip_mode is equal to 1. * @primary_ref_frame: specifies which reference frame contains the CDF values * and other state that should be loaded at the start of the frame. * @loop_restoration: loop restoration params * @global_motion: global motion params * @flags: see V4L2_AV1_FRAME_FLAG_{} * @frame_type: specifies the AV1 frame type * @order_hint: specifies OrderHintBits least significant bits of the expected * output order for this frame. * @upscaled_width: the upscaled width. * @interpolation_filter: specifies the filter selection used for performing * inter prediction. * @tx_mode: specifies how the transform size is determined. * @frame_width_minus_1: add 1 to get the frame's width. * @frame_height_minus_1: add 1 to get the frame's height * @render_width_minus_1: add 1 to get the render width of the frame in luma * samples. * @render_height_minus_1: add 1 to get the render height of the frame in luma * samples. * @current_frame_id: specifies the frame id number for the current frame. Frame * id numbers are additional information that do not affect the decoding * process, but provide decoders with a way of detecting missing reference * frames so that appropriate action can be taken. * @buffer_removal_time: specifies the frame removal time in units of DecCT clock * ticks counted from the removal time of the last random access point for * operating point opNum. * @reserved: padding field. Should be zeroed by applications. * @order_hints: specifies the expected output order hint for each reference * frame. This field corresponds to the OrderHints variable from the * specification (section 5.9.2 "Uncompressed header syntax"). As such, this is * only used for non-intra frames and ignored otherwise. order_hints[0] is * always ignored. * @reference_frame_ts: the V4L2 timestamp of the reference frame slots. * @ref_frame_idx: used to index into @reference_frame_ts when decoding * inter-frames. The meaning of this array is the same as in the specification. * The timestamp refers to the timestamp field in struct v4l2_buffer. Use * v4l2_timeval_to_ns() to convert the struct timeval to a __u64. * @refresh_frame_flags: contains a bitmask that specifies which reference frame * slots will be updated with the current frame after it is decoded. */ struct v4l2_ctrl_av1_frame { struct v4l2_av1_tile_info tile_info; struct v4l2_av1_quantization quantization; __u8 superres_denom; struct v4l2_av1_segmentation segmentation; struct v4l2_av1_loop_filter loop_filter; struct v4l2_av1_cdef cdef; __u8 skip_mode_frame[2]; __u8 primary_ref_frame; struct v4l2_av1_loop_restoration loop_restoration; struct v4l2_av1_global_motion global_motion; __u32 flags; enum v4l2_av1_frame_type frame_type; __u32 order_hint; __u32 upscaled_width; enum v4l2_av1_interpolation_filter interpolation_filter; enum v4l2_av1_tx_mode tx_mode; __u32 frame_width_minus_1; __u32 frame_height_minus_1; __u16 render_width_minus_1; __u16 render_height_minus_1; __u32 current_frame_id; __u32 buffer_removal_time[V4L2_AV1_MAX_OPERATING_POINTS]; __u8 reserved[4]; __u32 order_hints[V4L2_AV1_TOTAL_REFS_PER_FRAME]; __u64 reference_frame_ts[V4L2_AV1_TOTAL_REFS_PER_FRAME]; __s8 ref_frame_idx[V4L2_AV1_REFS_PER_FRAME]; __u8 refresh_frame_flags; }; #define V4L2_AV1_FILM_GRAIN_FLAG_APPLY_GRAIN 0x1 #define V4L2_AV1_FILM_GRAIN_FLAG_UPDATE_GRAIN 0x2 #define V4L2_AV1_FILM_GRAIN_FLAG_CHROMA_SCALING_FROM_LUMA 0x4 #define V4L2_AV1_FILM_GRAIN_FLAG_OVERLAP 0x8 #define V4L2_AV1_FILM_GRAIN_FLAG_CLIP_TO_RESTRICTED_RANGE 0x10 #define V4L2_CID_STATELESS_AV1_FILM_GRAIN (V4L2_CID_CODEC_STATELESS_BASE + 505) /** * struct v4l2_ctrl_av1_film_grain - AV1 Film Grain parameters. * * Film grain parameters as specified by section 6.8.20 of the AV1 Specification. * * @flags: see V4L2_AV1_FILM_GRAIN_{}. * @cr_mult: represents a multiplier for the cr component used in derivation of * the input index to the cr component scaling function. * @grain_seed: specifies the starting value for the pseudo-random numbers used * during film grain synthesis. * @film_grain_params_ref_idx: indicates which reference frame contains the * film grain parameters to be used for this frame. * @num_y_points: specifies the number of points for the piece-wise linear * scaling function of the luma component. * @point_y_value: represents the x (luma value) coordinate for the i-th point * of the piecewise linear scaling function for luma component. The values are * signaled on the scale of 0..255. In case of 10 bit video, these values * correspond to luma values divided by 4. In case of 12 bit video, these values * correspond to luma values divided by 16. * @point_y_scaling: represents the scaling (output) value for the i-th point * of the piecewise linear scaling function for luma component. * @num_cb_points: specifies the number of points for the piece-wise linear * scaling function of the cb component. * @point_cb_value: represents the x coordinate for the i-th point of the * piece-wise linear scaling function for cb component. The values are signaled * on the scale of 0..255. * @point_cb_scaling: represents the scaling (output) value for the i-th point * of the piecewise linear scaling function for cb component. * @num_cr_points: specifies represents the number of points for the piece-wise * linear scaling function of the cr component. * @point_cr_value: represents the x coordinate for the i-th point of the * piece-wise linear scaling function for cr component. The values are signaled * on the scale of 0..255. * @point_cr_scaling: represents the scaling (output) value for the i-th point * of the piecewise linear scaling function for cr component. * @grain_scaling_minus_8: represents the shift – 8 applied to the values of the * chroma component. The grain_scaling_minus_8 can take values of 0..3 and * determines the range and quantization step of the standard deviation of film * grain. * @ar_coeff_lag: specifies the number of auto-regressive coefficients for luma * and chroma. * @ar_coeffs_y_plus_128: specifies auto-regressive coefficients used for the Y * plane. * @ar_coeffs_cb_plus_128: specifies auto-regressive coefficients used for the U * plane. * @ar_coeffs_cr_plus_128: specifies auto-regressive coefficients used for the V * plane. * @ar_coeff_shift_minus_6: specifies the range of the auto-regressive * coefficients. Values of 0, 1, 2, and 3 correspond to the ranges for * auto-regressive coefficients of [-2, 2), [-1, 1), [-0.5, 0.5) and [-0.25, * 0.25) respectively. * @grain_scale_shift: specifies how much the Gaussian random numbers should be * scaled down during the grain synthesis process. * @cb_mult: represents a multiplier for the cb component used in derivation of * the input index to the cb component scaling function. * @cb_luma_mult: represents a multiplier for the average luma component used in * derivation of the input index to the cb component scaling function. * @cr_luma_mult: represents a multiplier for the average luma component used in * derivation of the input index to the cr component scaling function. * @cb_offset: represents an offset used in derivation of the input index to the * cb component scaling function. * @cr_offset: represents an offset used in derivation of the input index to the * cr component scaling function. * @reserved: padding field. Should be zeroed by applications. */ struct v4l2_ctrl_av1_film_grain { __u8 flags; __u8 cr_mult; __u16 grain_seed; __u8 film_grain_params_ref_idx; __u8 num_y_points; __u8 point_y_value[V4L2_AV1_MAX_NUM_Y_POINTS]; __u8 point_y_scaling[V4L2_AV1_MAX_NUM_Y_POINTS]; __u8 num_cb_points; __u8 point_cb_value[V4L2_AV1_MAX_NUM_CB_POINTS]; __u8 point_cb_scaling[V4L2_AV1_MAX_NUM_CB_POINTS]; __u8 num_cr_points; __u8 point_cr_value[V4L2_AV1_MAX_NUM_CR_POINTS]; __u8 point_cr_scaling[V4L2_AV1_MAX_NUM_CR_POINTS]; __u8 grain_scaling_minus_8; __u8 ar_coeff_lag; __u8 ar_coeffs_y_plus_128[V4L2_AV1_AR_COEFFS_SIZE]; __u8 ar_coeffs_cb_plus_128[V4L2_AV1_AR_COEFFS_SIZE]; __u8 ar_coeffs_cr_plus_128[V4L2_AV1_AR_COEFFS_SIZE]; __u8 ar_coeff_shift_minus_6; __u8 grain_scale_shift; __u8 cb_mult; __u8 cb_luma_mult; __u8 cr_luma_mult; __u16 cb_offset; __u16 cr_offset; __u8 reserved[4]; }; /* MPEG-compression definitions kept for backwards compatibility */ #define V4L2_CTRL_CLASS_MPEG V4L2_CTRL_CLASS_CODEC #define V4L2_CID_MPEG_CLASS V4L2_CID_CODEC_CLASS #define V4L2_CID_MPEG_BASE V4L2_CID_CODEC_BASE #define V4L2_CID_MPEG_CX2341X_BASE V4L2_CID_CODEC_CX2341X_BASE #define V4L2_CID_MPEG_MFC51_BASE V4L2_CID_CODEC_MFC51_BASE #endif yavta-0.0+git20250410.3e445c7/include/linux/videodev2.h000066400000000000000000003050021477577134200217320ustar00rootroot00000000000000/* SPDX-License-Identifier: ((GPL-2.0+ WITH Linux-syscall-note) OR BSD-3-Clause) */ /* * Video for Linux Two header file * * Copyright (C) 1999-2012 the contributors * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * Alternatively you can redistribute this file under the terms of the * BSD license as stated below: * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * 3. The names of its contributors may not be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * Header file for v4l or V4L2 drivers and applications * with public API. * All kernel-specific stuff were moved to media/v4l2-dev.h, so * no #if __KERNEL tests are allowed here * * See https://linuxtv.org for more info * * Author: Bill Dirks * Justin Schoeman * Hans Verkuil * et al. */ #ifndef __LINUX_VIDEODEV2_H #define __LINUX_VIDEODEV2_H #include #include #include #include #include /* * Common stuff for both V4L1 and V4L2 * Moved from videodev.h */ #define VIDEO_MAX_FRAME 32 #define VIDEO_MAX_PLANES 8 /* * M I S C E L L A N E O U S */ /* Four-character-code (FOURCC) */ #define v4l2_fourcc(a, b, c, d)\ ((__u32)(a) | ((__u32)(b) << 8) | ((__u32)(c) << 16) | ((__u32)(d) << 24)) #define v4l2_fourcc_be(a, b, c, d) (v4l2_fourcc(a, b, c, d) | (1U << 31)) /* * E N U M S */ enum v4l2_field { V4L2_FIELD_ANY = 0, /* driver can choose from none, top, bottom, interlaced depending on whatever it thinks is approximate ... */ V4L2_FIELD_NONE = 1, /* this device has no fields ... */ V4L2_FIELD_TOP = 2, /* top field only */ V4L2_FIELD_BOTTOM = 3, /* bottom field only */ V4L2_FIELD_INTERLACED = 4, /* both fields interlaced */ V4L2_FIELD_SEQ_TB = 5, /* both fields sequential into one buffer, top-bottom order */ V4L2_FIELD_SEQ_BT = 6, /* same as above + bottom-top order */ V4L2_FIELD_ALTERNATE = 7, /* both fields alternating into separate buffers */ V4L2_FIELD_INTERLACED_TB = 8, /* both fields interlaced, top field first and the top field is transmitted first */ V4L2_FIELD_INTERLACED_BT = 9, /* both fields interlaced, top field first and the bottom field is transmitted first */ }; #define V4L2_FIELD_HAS_TOP(field) \ ((field) == V4L2_FIELD_TOP ||\ (field) == V4L2_FIELD_INTERLACED ||\ (field) == V4L2_FIELD_INTERLACED_TB ||\ (field) == V4L2_FIELD_INTERLACED_BT ||\ (field) == V4L2_FIELD_SEQ_TB ||\ (field) == V4L2_FIELD_SEQ_BT) #define V4L2_FIELD_HAS_BOTTOM(field) \ ((field) == V4L2_FIELD_BOTTOM ||\ (field) == V4L2_FIELD_INTERLACED ||\ (field) == V4L2_FIELD_INTERLACED_TB ||\ (field) == V4L2_FIELD_INTERLACED_BT ||\ (field) == V4L2_FIELD_SEQ_TB ||\ (field) == V4L2_FIELD_SEQ_BT) #define V4L2_FIELD_HAS_BOTH(field) \ ((field) == V4L2_FIELD_INTERLACED ||\ (field) == V4L2_FIELD_INTERLACED_TB ||\ (field) == V4L2_FIELD_INTERLACED_BT ||\ (field) == V4L2_FIELD_SEQ_TB ||\ (field) == V4L2_FIELD_SEQ_BT) #define V4L2_FIELD_HAS_T_OR_B(field) \ ((field) == V4L2_FIELD_BOTTOM ||\ (field) == V4L2_FIELD_TOP ||\ (field) == V4L2_FIELD_ALTERNATE) #define V4L2_FIELD_IS_INTERLACED(field) \ ((field) == V4L2_FIELD_INTERLACED ||\ (field) == V4L2_FIELD_INTERLACED_TB ||\ (field) == V4L2_FIELD_INTERLACED_BT) #define V4L2_FIELD_IS_SEQUENTIAL(field) \ ((field) == V4L2_FIELD_SEQ_TB ||\ (field) == V4L2_FIELD_SEQ_BT) enum v4l2_buf_type { V4L2_BUF_TYPE_VIDEO_CAPTURE = 1, V4L2_BUF_TYPE_VIDEO_OUTPUT = 2, V4L2_BUF_TYPE_VIDEO_OVERLAY = 3, V4L2_BUF_TYPE_VBI_CAPTURE = 4, V4L2_BUF_TYPE_VBI_OUTPUT = 5, V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6, V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7, V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10, V4L2_BUF_TYPE_SDR_CAPTURE = 11, V4L2_BUF_TYPE_SDR_OUTPUT = 12, V4L2_BUF_TYPE_META_CAPTURE = 13, V4L2_BUF_TYPE_META_OUTPUT = 14, /* Deprecated, do not use */ V4L2_BUF_TYPE_PRIVATE = 0x80, }; #define V4L2_TYPE_IS_MULTIPLANAR(type) \ ((type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE \ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) #define V4L2_TYPE_IS_OUTPUT(type) \ ((type) == V4L2_BUF_TYPE_VIDEO_OUTPUT \ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE \ || (type) == V4L2_BUF_TYPE_VIDEO_OVERLAY \ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY \ || (type) == V4L2_BUF_TYPE_VBI_OUTPUT \ || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT \ || (type) == V4L2_BUF_TYPE_SDR_OUTPUT \ || (type) == V4L2_BUF_TYPE_META_OUTPUT) #define V4L2_TYPE_IS_CAPTURE(type) (!V4L2_TYPE_IS_OUTPUT(type)) enum v4l2_tuner_type { V4L2_TUNER_RADIO = 1, V4L2_TUNER_ANALOG_TV = 2, V4L2_TUNER_DIGITAL_TV = 3, V4L2_TUNER_SDR = 4, V4L2_TUNER_RF = 5, }; /* Deprecated, do not use */ #define V4L2_TUNER_ADC V4L2_TUNER_SDR enum v4l2_memory { V4L2_MEMORY_MMAP = 1, V4L2_MEMORY_USERPTR = 2, V4L2_MEMORY_OVERLAY = 3, V4L2_MEMORY_DMABUF = 4, }; /* see also http://vektor.theorem.ca/graphics/ycbcr/ */ enum v4l2_colorspace { /* * Default colorspace, i.e. let the driver figure it out. * Can only be used with video capture. */ V4L2_COLORSPACE_DEFAULT = 0, /* SMPTE 170M: used for broadcast NTSC/PAL SDTV */ V4L2_COLORSPACE_SMPTE170M = 1, /* Obsolete pre-1998 SMPTE 240M HDTV standard, superseded by Rec 709 */ V4L2_COLORSPACE_SMPTE240M = 2, /* Rec.709: used for HDTV */ V4L2_COLORSPACE_REC709 = 3, /* * Deprecated, do not use. No driver will ever return this. This was * based on a misunderstanding of the bt878 datasheet. */ V4L2_COLORSPACE_BT878 = 4, /* * NTSC 1953 colorspace. This only makes sense when dealing with * really, really old NTSC recordings. Superseded by SMPTE 170M. */ V4L2_COLORSPACE_470_SYSTEM_M = 5, /* * EBU Tech 3213 PAL/SECAM colorspace. */ V4L2_COLORSPACE_470_SYSTEM_BG = 6, /* * Effectively shorthand for V4L2_COLORSPACE_SRGB, V4L2_YCBCR_ENC_601 * and V4L2_QUANTIZATION_FULL_RANGE. To be used for (Motion-)JPEG. */ V4L2_COLORSPACE_JPEG = 7, /* For RGB colorspaces such as produces by most webcams. */ V4L2_COLORSPACE_SRGB = 8, /* opRGB colorspace */ V4L2_COLORSPACE_OPRGB = 9, /* BT.2020 colorspace, used for UHDTV. */ V4L2_COLORSPACE_BT2020 = 10, /* Raw colorspace: for RAW unprocessed images */ V4L2_COLORSPACE_RAW = 11, /* DCI-P3 colorspace, used by cinema projectors */ V4L2_COLORSPACE_DCI_P3 = 12, }; /* * Determine how COLORSPACE_DEFAULT should map to a proper colorspace. * This depends on whether this is a SDTV image (use SMPTE 170M), an * HDTV image (use Rec. 709), or something else (use sRGB). */ #define V4L2_MAP_COLORSPACE_DEFAULT(is_sdtv, is_hdtv) \ ((is_sdtv) ? V4L2_COLORSPACE_SMPTE170M : \ ((is_hdtv) ? V4L2_COLORSPACE_REC709 : V4L2_COLORSPACE_SRGB)) enum v4l2_xfer_func { /* * Mapping of V4L2_XFER_FUNC_DEFAULT to actual transfer functions * for the various colorspaces: * * V4L2_COLORSPACE_SMPTE170M, V4L2_COLORSPACE_470_SYSTEM_M, * V4L2_COLORSPACE_470_SYSTEM_BG, V4L2_COLORSPACE_REC709 and * V4L2_COLORSPACE_BT2020: V4L2_XFER_FUNC_709 * * V4L2_COLORSPACE_SRGB, V4L2_COLORSPACE_JPEG: V4L2_XFER_FUNC_SRGB * * V4L2_COLORSPACE_OPRGB: V4L2_XFER_FUNC_OPRGB * * V4L2_COLORSPACE_SMPTE240M: V4L2_XFER_FUNC_SMPTE240M * * V4L2_COLORSPACE_RAW: V4L2_XFER_FUNC_NONE * * V4L2_COLORSPACE_DCI_P3: V4L2_XFER_FUNC_DCI_P3 */ V4L2_XFER_FUNC_DEFAULT = 0, V4L2_XFER_FUNC_709 = 1, V4L2_XFER_FUNC_SRGB = 2, V4L2_XFER_FUNC_OPRGB = 3, V4L2_XFER_FUNC_SMPTE240M = 4, V4L2_XFER_FUNC_NONE = 5, V4L2_XFER_FUNC_DCI_P3 = 6, V4L2_XFER_FUNC_SMPTE2084 = 7, }; /* * Determine how XFER_FUNC_DEFAULT should map to a proper transfer function. * This depends on the colorspace. */ #define V4L2_MAP_XFER_FUNC_DEFAULT(colsp) \ ((colsp) == V4L2_COLORSPACE_OPRGB ? V4L2_XFER_FUNC_OPRGB : \ ((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_XFER_FUNC_SMPTE240M : \ ((colsp) == V4L2_COLORSPACE_DCI_P3 ? V4L2_XFER_FUNC_DCI_P3 : \ ((colsp) == V4L2_COLORSPACE_RAW ? V4L2_XFER_FUNC_NONE : \ ((colsp) == V4L2_COLORSPACE_SRGB || (colsp) == V4L2_COLORSPACE_JPEG ? \ V4L2_XFER_FUNC_SRGB : V4L2_XFER_FUNC_709))))) enum v4l2_ycbcr_encoding { /* * Mapping of V4L2_YCBCR_ENC_DEFAULT to actual encodings for the * various colorspaces: * * V4L2_COLORSPACE_SMPTE170M, V4L2_COLORSPACE_470_SYSTEM_M, * V4L2_COLORSPACE_470_SYSTEM_BG, V4L2_COLORSPACE_SRGB, * V4L2_COLORSPACE_OPRGB and V4L2_COLORSPACE_JPEG: V4L2_YCBCR_ENC_601 * * V4L2_COLORSPACE_REC709 and V4L2_COLORSPACE_DCI_P3: V4L2_YCBCR_ENC_709 * * V4L2_COLORSPACE_BT2020: V4L2_YCBCR_ENC_BT2020 * * V4L2_COLORSPACE_SMPTE240M: V4L2_YCBCR_ENC_SMPTE240M */ V4L2_YCBCR_ENC_DEFAULT = 0, /* ITU-R 601 -- SDTV */ V4L2_YCBCR_ENC_601 = 1, /* Rec. 709 -- HDTV */ V4L2_YCBCR_ENC_709 = 2, /* ITU-R 601/EN 61966-2-4 Extended Gamut -- SDTV */ V4L2_YCBCR_ENC_XV601 = 3, /* Rec. 709/EN 61966-2-4 Extended Gamut -- HDTV */ V4L2_YCBCR_ENC_XV709 = 4, /* * sYCC (Y'CbCr encoding of sRGB), identical to ENC_601. It was added * originally due to a misunderstanding of the sYCC standard. It should * not be used, instead use V4L2_YCBCR_ENC_601. */ V4L2_YCBCR_ENC_SYCC = 5, /* BT.2020 Non-constant Luminance Y'CbCr */ V4L2_YCBCR_ENC_BT2020 = 6, /* BT.2020 Constant Luminance Y'CbcCrc */ V4L2_YCBCR_ENC_BT2020_CONST_LUM = 7, /* SMPTE 240M -- Obsolete HDTV */ V4L2_YCBCR_ENC_SMPTE240M = 8, }; /* * enum v4l2_hsv_encoding values should not collide with the ones from * enum v4l2_ycbcr_encoding. */ enum v4l2_hsv_encoding { /* Hue mapped to 0 - 179 */ V4L2_HSV_ENC_180 = 128, /* Hue mapped to 0-255 */ V4L2_HSV_ENC_256 = 129, }; /* * Determine how YCBCR_ENC_DEFAULT should map to a proper Y'CbCr encoding. * This depends on the colorspace. */ #define V4L2_MAP_YCBCR_ENC_DEFAULT(colsp) \ (((colsp) == V4L2_COLORSPACE_REC709 || \ (colsp) == V4L2_COLORSPACE_DCI_P3) ? V4L2_YCBCR_ENC_709 : \ ((colsp) == V4L2_COLORSPACE_BT2020 ? V4L2_YCBCR_ENC_BT2020 : \ ((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_YCBCR_ENC_SMPTE240M : \ V4L2_YCBCR_ENC_601))) enum v4l2_quantization { /* * The default for R'G'B' quantization is always full range. * For Y'CbCr the quantization is always limited range, except * for COLORSPACE_JPEG: this is full range. */ V4L2_QUANTIZATION_DEFAULT = 0, V4L2_QUANTIZATION_FULL_RANGE = 1, V4L2_QUANTIZATION_LIM_RANGE = 2, }; /* * Determine how QUANTIZATION_DEFAULT should map to a proper quantization. * This depends on whether the image is RGB or not, the colorspace. * The Y'CbCr encoding is not used anymore, but is still there for backwards * compatibility. */ #define V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb_or_hsv, colsp, ycbcr_enc) \ (((is_rgb_or_hsv) || (colsp) == V4L2_COLORSPACE_JPEG) ? \ V4L2_QUANTIZATION_FULL_RANGE : V4L2_QUANTIZATION_LIM_RANGE) /* * Deprecated names for opRGB colorspace (IEC 61966-2-5) * * WARNING: Please don't use these deprecated defines in your code, as * there is a chance we have to remove them in the future. */ #define V4L2_COLORSPACE_ADOBERGB V4L2_COLORSPACE_OPRGB #define V4L2_XFER_FUNC_ADOBERGB V4L2_XFER_FUNC_OPRGB enum v4l2_priority { V4L2_PRIORITY_UNSET = 0, /* not initialized */ V4L2_PRIORITY_BACKGROUND = 1, V4L2_PRIORITY_INTERACTIVE = 2, V4L2_PRIORITY_RECORD = 3, V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE, }; struct v4l2_rect { __s32 left; __s32 top; __u32 width; __u32 height; }; struct v4l2_fract { __u32 numerator; __u32 denominator; }; struct v4l2_area { __u32 width; __u32 height; }; /** * struct v4l2_capability - Describes V4L2 device caps returned by VIDIOC_QUERYCAP * * @driver: name of the driver module (e.g. "bttv") * @card: name of the card (e.g. "Hauppauge WinTV") * @bus_info: name of the bus (e.g. "PCI:" + pci_name(pci_dev) ) * @version: KERNEL_VERSION * @capabilities: capabilities of the physical device as a whole * @device_caps: capabilities accessed via this particular device (node) * @reserved: reserved fields for future extensions */ struct v4l2_capability { __u8 driver[16]; __u8 card[32]; __u8 bus_info[32]; __u32 version; __u32 capabilities; __u32 device_caps; __u32 reserved[3]; }; /* Values for 'capabilities' field */ #define V4L2_CAP_VIDEO_CAPTURE 0x00000001 /* Is a video capture device */ #define V4L2_CAP_VIDEO_OUTPUT 0x00000002 /* Is a video output device */ #define V4L2_CAP_VIDEO_OVERLAY 0x00000004 /* Can do video overlay */ #define V4L2_CAP_VBI_CAPTURE 0x00000010 /* Is a raw VBI capture device */ #define V4L2_CAP_VBI_OUTPUT 0x00000020 /* Is a raw VBI output device */ #define V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 /* Is a sliced VBI capture device */ #define V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 /* Is a sliced VBI output device */ #define V4L2_CAP_RDS_CAPTURE 0x00000100 /* RDS data capture */ #define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 /* Can do video output overlay */ #define V4L2_CAP_HW_FREQ_SEEK 0x00000400 /* Can do hardware frequency seek */ #define V4L2_CAP_RDS_OUTPUT 0x00000800 /* Is an RDS encoder */ /* Is a video capture device that supports multiplanar formats */ #define V4L2_CAP_VIDEO_CAPTURE_MPLANE 0x00001000 /* Is a video output device that supports multiplanar formats */ #define V4L2_CAP_VIDEO_OUTPUT_MPLANE 0x00002000 /* Is a video mem-to-mem device that supports multiplanar formats */ #define V4L2_CAP_VIDEO_M2M_MPLANE 0x00004000 /* Is a video mem-to-mem device */ #define V4L2_CAP_VIDEO_M2M 0x00008000 #define V4L2_CAP_TUNER 0x00010000 /* has a tuner */ #define V4L2_CAP_AUDIO 0x00020000 /* has audio support */ #define V4L2_CAP_RADIO 0x00040000 /* is a radio device */ #define V4L2_CAP_MODULATOR 0x00080000 /* has a modulator */ #define V4L2_CAP_SDR_CAPTURE 0x00100000 /* Is a SDR capture device */ #define V4L2_CAP_EXT_PIX_FORMAT 0x00200000 /* Supports the extended pixel format */ #define V4L2_CAP_SDR_OUTPUT 0x00400000 /* Is a SDR output device */ #define V4L2_CAP_META_CAPTURE 0x00800000 /* Is a metadata capture device */ #define V4L2_CAP_READWRITE 0x01000000 /* read/write systemcalls */ #define V4L2_CAP_STREAMING 0x04000000 /* streaming I/O ioctls */ #define V4L2_CAP_META_OUTPUT 0x08000000 /* Is a metadata output device */ #define V4L2_CAP_TOUCH 0x10000000 /* Is a touch device */ #define V4L2_CAP_IO_MC 0x20000000 /* Is input/output controlled by the media controller */ #define V4L2_CAP_DEVICE_CAPS 0x80000000 /* sets device capabilities field */ /* * V I D E O I M A G E F O R M A T */ struct v4l2_pix_format { __u32 width; __u32 height; __u32 pixelformat; __u32 field; /* enum v4l2_field */ __u32 bytesperline; /* for padding, zero if unused */ __u32 sizeimage; __u32 colorspace; /* enum v4l2_colorspace */ __u32 priv; /* private data, depends on pixelformat */ __u32 flags; /* format flags (V4L2_PIX_FMT_FLAG_*) */ union { /* enum v4l2_ycbcr_encoding */ __u32 ycbcr_enc; /* enum v4l2_hsv_encoding */ __u32 hsv_enc; }; __u32 quantization; /* enum v4l2_quantization */ __u32 xfer_func; /* enum v4l2_xfer_func */ }; /* Pixel format FOURCC depth Description */ /* RGB formats (1 or 2 bytes per pixel) */ #define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') /* 8 RGB-3-3-2 */ #define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') /* 16 xxxxrrrr ggggbbbb */ #define V4L2_PIX_FMT_ARGB444 v4l2_fourcc('A', 'R', '1', '2') /* 16 aaaarrrr ggggbbbb */ #define V4L2_PIX_FMT_XRGB444 v4l2_fourcc('X', 'R', '1', '2') /* 16 xxxxrrrr ggggbbbb */ #define V4L2_PIX_FMT_RGBA444 v4l2_fourcc('R', 'A', '1', '2') /* 16 rrrrgggg bbbbaaaa */ #define V4L2_PIX_FMT_RGBX444 v4l2_fourcc('R', 'X', '1', '2') /* 16 rrrrgggg bbbbxxxx */ #define V4L2_PIX_FMT_ABGR444 v4l2_fourcc('A', 'B', '1', '2') /* 16 aaaabbbb ggggrrrr */ #define V4L2_PIX_FMT_XBGR444 v4l2_fourcc('X', 'B', '1', '2') /* 16 xxxxbbbb ggggrrrr */ #define V4L2_PIX_FMT_BGRA444 v4l2_fourcc('G', 'A', '1', '2') /* 16 bbbbgggg rrrraaaa */ #define V4L2_PIX_FMT_BGRX444 v4l2_fourcc('B', 'X', '1', '2') /* 16 bbbbgggg rrrrxxxx */ #define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') /* 16 RGB-5-5-5 */ #define V4L2_PIX_FMT_ARGB555 v4l2_fourcc('A', 'R', '1', '5') /* 16 ARGB-1-5-5-5 */ #define V4L2_PIX_FMT_XRGB555 v4l2_fourcc('X', 'R', '1', '5') /* 16 XRGB-1-5-5-5 */ #define V4L2_PIX_FMT_RGBA555 v4l2_fourcc('R', 'A', '1', '5') /* 16 RGBA-5-5-5-1 */ #define V4L2_PIX_FMT_RGBX555 v4l2_fourcc('R', 'X', '1', '5') /* 16 RGBX-5-5-5-1 */ #define V4L2_PIX_FMT_ABGR555 v4l2_fourcc('A', 'B', '1', '5') /* 16 ABGR-1-5-5-5 */ #define V4L2_PIX_FMT_XBGR555 v4l2_fourcc('X', 'B', '1', '5') /* 16 XBGR-1-5-5-5 */ #define V4L2_PIX_FMT_BGRA555 v4l2_fourcc('B', 'A', '1', '5') /* 16 BGRA-5-5-5-1 */ #define V4L2_PIX_FMT_BGRX555 v4l2_fourcc('B', 'X', '1', '5') /* 16 BGRX-5-5-5-1 */ #define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') /* 16 RGB-5-6-5 */ #define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') /* 16 RGB-5-5-5 BE */ #define V4L2_PIX_FMT_ARGB555X v4l2_fourcc_be('A', 'R', '1', '5') /* 16 ARGB-5-5-5 BE */ #define V4L2_PIX_FMT_XRGB555X v4l2_fourcc_be('X', 'R', '1', '5') /* 16 XRGB-5-5-5 BE */ #define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') /* 16 RGB-5-6-5 BE */ /* RGB formats (3 or 4 bytes per pixel) */ #define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') /* 18 BGR-6-6-6 */ #define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') /* 24 BGR-8-8-8 */ #define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') /* 24 RGB-8-8-8 */ #define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') /* 32 BGR-8-8-8-8 */ #define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') /* 32 BGRA-8-8-8-8 */ #define V4L2_PIX_FMT_XBGR32 v4l2_fourcc('X', 'R', '2', '4') /* 32 BGRX-8-8-8-8 */ #define V4L2_PIX_FMT_BGRA32 v4l2_fourcc('R', 'A', '2', '4') /* 32 ABGR-8-8-8-8 */ #define V4L2_PIX_FMT_BGRX32 v4l2_fourcc('R', 'X', '2', '4') /* 32 XBGR-8-8-8-8 */ #define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') /* 32 RGB-8-8-8-8 */ #define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') /* 32 RGBA-8-8-8-8 */ #define V4L2_PIX_FMT_RGBX32 v4l2_fourcc('X', 'B', '2', '4') /* 32 RGBX-8-8-8-8 */ #define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') /* 32 ARGB-8-8-8-8 */ #define V4L2_PIX_FMT_XRGB32 v4l2_fourcc('B', 'X', '2', '4') /* 32 XRGB-8-8-8-8 */ #define V4L2_PIX_FMT_RGBX1010102 v4l2_fourcc('R', 'X', '3', '0') /* 32 RGBX-10-10-10-2 */ #define V4L2_PIX_FMT_RGBA1010102 v4l2_fourcc('R', 'A', '3', '0') /* 32 RGBA-10-10-10-2 */ #define V4L2_PIX_FMT_ARGB2101010 v4l2_fourcc('A', 'R', '3', '0') /* 32 ARGB-2-10-10-10 */ /* RGB formats (6 or 8 bytes per pixel) */ #define V4L2_PIX_FMT_BGR48_12 v4l2_fourcc('B', '3', '1', '2') /* 48 BGR 12-bit per component */ #define V4L2_PIX_FMT_ABGR64_12 v4l2_fourcc('B', '4', '1', '2') /* 64 BGRA 12-bit per component */ /* Grey formats */ #define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') /* 8 Greyscale */ #define V4L2_PIX_FMT_Y4 v4l2_fourcc('Y', '0', '4', ' ') /* 4 Greyscale */ #define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') /* 6 Greyscale */ #define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') /* 10 Greyscale */ #define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') /* 12 Greyscale */ #define V4L2_PIX_FMT_Y012 v4l2_fourcc('Y', '0', '1', '2') /* 12 Greyscale */ #define V4L2_PIX_FMT_Y14 v4l2_fourcc('Y', '1', '4', ' ') /* 14 Greyscale */ #define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* 16 Greyscale */ #define V4L2_PIX_FMT_Y16_BE v4l2_fourcc_be('Y', '1', '6', ' ') /* 16 Greyscale BE */ /* Grey bit-packed formats */ #define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') /* 10 Greyscale bit-packed */ #define V4L2_PIX_FMT_Y10P v4l2_fourcc('Y', '1', '0', 'P') /* 10 Greyscale, MIPI RAW10 packed */ #define V4L2_PIX_FMT_IPU3_Y10 v4l2_fourcc('i', 'p', '3', 'y') /* IPU3 packed 10-bit greyscale */ /* Palette formats */ #define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') /* 8 8-bit palette */ /* Chrominance formats */ #define V4L2_PIX_FMT_UV8 v4l2_fourcc('U', 'V', '8', ' ') /* 8 UV 4:4 */ /* Luminance+Chrominance formats */ #define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */ #define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */ #define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') /* 16 YVU 4:2:2 */ #define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */ #define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */ #define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */ #define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') /* 16 xxxxyyyy uuuuvvvv */ #define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */ #define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */ #define V4L2_PIX_FMT_YUV24 v4l2_fourcc('Y', 'U', 'V', '3') /* 24 YUV-8-8-8 */ #define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */ #define V4L2_PIX_FMT_AYUV32 v4l2_fourcc('A', 'Y', 'U', 'V') /* 32 AYUV-8-8-8-8 */ #define V4L2_PIX_FMT_XYUV32 v4l2_fourcc('X', 'Y', 'U', 'V') /* 32 XYUV-8-8-8-8 */ #define V4L2_PIX_FMT_VUYA32 v4l2_fourcc('V', 'U', 'Y', 'A') /* 32 VUYA-8-8-8-8 */ #define V4L2_PIX_FMT_VUYX32 v4l2_fourcc('V', 'U', 'Y', 'X') /* 32 VUYX-8-8-8-8 */ #define V4L2_PIX_FMT_YUVA32 v4l2_fourcc('Y', 'U', 'V', 'A') /* 32 YUVA-8-8-8-8 */ #define V4L2_PIX_FMT_YUVX32 v4l2_fourcc('Y', 'U', 'V', 'X') /* 32 YUVX-8-8-8-8 */ #define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') /* 12 YUV 4:2:0 2 lines y, 1 line uv interleaved */ #define V4L2_PIX_FMT_YUV48_12 v4l2_fourcc('Y', '3', '1', '2') /* 48 YUV 4:4:4 12-bit per component */ /* * YCbCr packed format. For each Y2xx format, xx bits of valid data occupy the MSBs * of the 16 bit components, and 16-xx bits of zero padding occupy the LSBs. */ #define V4L2_PIX_FMT_Y210 v4l2_fourcc('Y', '2', '1', '0') /* 32 YUYV 4:2:2 */ #define V4L2_PIX_FMT_Y212 v4l2_fourcc('Y', '2', '1', '2') /* 32 YUYV 4:2:2 */ #define V4L2_PIX_FMT_Y216 v4l2_fourcc('Y', '2', '1', '6') /* 32 YUYV 4:2:2 */ /* two planes -- one Y, one Cr + Cb interleaved */ #define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') /* 12 Y/CbCr 4:2:0 */ #define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') /* 12 Y/CrCb 4:2:0 */ #define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') /* 16 Y/CbCr 4:2:2 */ #define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') /* 16 Y/CrCb 4:2:2 */ #define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') /* 24 Y/CbCr 4:4:4 */ #define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') /* 24 Y/CrCb 4:4:4 */ #define V4L2_PIX_FMT_P010 v4l2_fourcc('P', '0', '1', '0') /* 24 Y/CbCr 4:2:0 10-bit per component */ #define V4L2_PIX_FMT_P012 v4l2_fourcc('P', '0', '1', '2') /* 24 Y/CbCr 4:2:0 12-bit per component */ /* two non contiguous planes - one Y, one Cr + Cb interleaved */ #define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 */ #define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') /* 21 Y/CrCb 4:2:0 */ #define V4L2_PIX_FMT_NV16M v4l2_fourcc('N', 'M', '1', '6') /* 16 Y/CbCr 4:2:2 */ #define V4L2_PIX_FMT_NV61M v4l2_fourcc('N', 'M', '6', '1') /* 16 Y/CrCb 4:2:2 */ #define V4L2_PIX_FMT_P012M v4l2_fourcc('P', 'M', '1', '2') /* 24 Y/CbCr 4:2:0 12-bit per component */ /* three planes - Y Cb, Cr */ #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */ #define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') /* 9 YVU 4:1:0 */ #define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') /* 12 YVU411 planar */ #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 */ #define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') /* 12 YVU 4:2:0 */ #define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') /* 16 YVU422 planar */ /* three non contiguous planes - Y, Cb, Cr */ #define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') /* 12 YUV420 planar */ #define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') /* 12 YVU420 planar */ #define V4L2_PIX_FMT_YUV422M v4l2_fourcc('Y', 'M', '1', '6') /* 16 YUV422 planar */ #define V4L2_PIX_FMT_YVU422M v4l2_fourcc('Y', 'M', '6', '1') /* 16 YVU422 planar */ #define V4L2_PIX_FMT_YUV444M v4l2_fourcc('Y', 'M', '2', '4') /* 24 YUV444 planar */ #define V4L2_PIX_FMT_YVU444M v4l2_fourcc('Y', 'M', '4', '2') /* 24 YVU444 planar */ /* Tiled YUV formats */ #define V4L2_PIX_FMT_NV12_4L4 v4l2_fourcc('V', 'T', '1', '2') /* 12 Y/CbCr 4:2:0 4x4 tiles */ #define V4L2_PIX_FMT_NV12_16L16 v4l2_fourcc('H', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 16x16 tiles */ #define V4L2_PIX_FMT_NV12_32L32 v4l2_fourcc('S', 'T', '1', '2') /* 12 Y/CbCr 4:2:0 32x32 tiles */ #define V4L2_PIX_FMT_NV15_4L4 v4l2_fourcc('V', 'T', '1', '5') /* 15 Y/CbCr 4:2:0 10-bit 4x4 tiles */ #define V4L2_PIX_FMT_P010_4L4 v4l2_fourcc('T', '0', '1', '0') /* 12 Y/CbCr 4:2:0 10-bit 4x4 macroblocks */ #define V4L2_PIX_FMT_NV12_8L128 v4l2_fourcc('A', 'T', '1', '2') /* Y/CbCr 4:2:0 8x128 tiles */ #define V4L2_PIX_FMT_NV12_10BE_8L128 v4l2_fourcc_be('A', 'X', '1', '2') /* Y/CbCr 4:2:0 10-bit 8x128 tiles */ /* Tiled YUV formats, non contiguous planes */ #define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 64x32 tiles */ #define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 16x16 tiles */ #define V4L2_PIX_FMT_NV12M_8L128 v4l2_fourcc('N', 'A', '1', '2') /* Y/CbCr 4:2:0 8x128 tiles */ #define V4L2_PIX_FMT_NV12M_10BE_8L128 v4l2_fourcc_be('N', 'T', '1', '2') /* Y/CbCr 4:2:0 10-bit 8x128 tiles */ /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */ #define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') /* 8 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') /* 8 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') /* 8 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') /* 8 RGRG.. GBGB.. */ #define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '0') /* 10 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '0') /* 10 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0') /* 10 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '0') /* 10 RGRG.. GBGB.. */ /* 10bit raw bayer packed, 5 bytes for every 4 pixels */ #define V4L2_PIX_FMT_SBGGR10P v4l2_fourcc('p', 'B', 'A', 'A') #define V4L2_PIX_FMT_SGBRG10P v4l2_fourcc('p', 'G', 'A', 'A') #define V4L2_PIX_FMT_SGRBG10P v4l2_fourcc('p', 'g', 'A', 'A') #define V4L2_PIX_FMT_SRGGB10P v4l2_fourcc('p', 'R', 'A', 'A') /* 10bit raw bayer a-law compressed to 8 bits */ #define V4L2_PIX_FMT_SBGGR10ALAW8 v4l2_fourcc('a', 'B', 'A', '8') #define V4L2_PIX_FMT_SGBRG10ALAW8 v4l2_fourcc('a', 'G', 'A', '8') #define V4L2_PIX_FMT_SGRBG10ALAW8 v4l2_fourcc('a', 'g', 'A', '8') #define V4L2_PIX_FMT_SRGGB10ALAW8 v4l2_fourcc('a', 'R', 'A', '8') /* 10bit raw bayer DPCM compressed to 8 bits */ #define V4L2_PIX_FMT_SBGGR10DPCM8 v4l2_fourcc('b', 'B', 'A', '8') #define V4L2_PIX_FMT_SGBRG10DPCM8 v4l2_fourcc('b', 'G', 'A', '8') #define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0') #define V4L2_PIX_FMT_SRGGB10DPCM8 v4l2_fourcc('b', 'R', 'A', '8') #define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') /* 12 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12 RGRG.. GBGB.. */ /* 12bit raw bayer packed, 6 bytes for every 4 pixels */ #define V4L2_PIX_FMT_SBGGR12P v4l2_fourcc('p', 'B', 'C', 'C') #define V4L2_PIX_FMT_SGBRG12P v4l2_fourcc('p', 'G', 'C', 'C') #define V4L2_PIX_FMT_SGRBG12P v4l2_fourcc('p', 'g', 'C', 'C') #define V4L2_PIX_FMT_SRGGB12P v4l2_fourcc('p', 'R', 'C', 'C') #define V4L2_PIX_FMT_SBGGR14 v4l2_fourcc('B', 'G', '1', '4') /* 14 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG14 v4l2_fourcc('G', 'B', '1', '4') /* 14 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG14 v4l2_fourcc('G', 'R', '1', '4') /* 14 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB14 v4l2_fourcc('R', 'G', '1', '4') /* 14 RGRG.. GBGB.. */ /* 14bit raw bayer packed, 7 bytes for every 4 pixels */ #define V4L2_PIX_FMT_SBGGR14P v4l2_fourcc('p', 'B', 'E', 'E') #define V4L2_PIX_FMT_SGBRG14P v4l2_fourcc('p', 'G', 'E', 'E') #define V4L2_PIX_FMT_SGRBG14P v4l2_fourcc('p', 'g', 'E', 'E') #define V4L2_PIX_FMT_SRGGB14P v4l2_fourcc('p', 'R', 'E', 'E') #define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG16 v4l2_fourcc('G', 'B', '1', '6') /* 16 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG16 v4l2_fourcc('G', 'R', '1', '6') /* 16 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB16 v4l2_fourcc('R', 'G', '1', '6') /* 16 RGRG.. GBGB.. */ /* HSV formats */ #define V4L2_PIX_FMT_HSV24 v4l2_fourcc('H', 'S', 'V', '3') #define V4L2_PIX_FMT_HSV32 v4l2_fourcc('H', 'S', 'V', '4') /* compressed formats */ #define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') /* Motion-JPEG */ #define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') /* JFIF JPEG */ #define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') /* 1394 */ #define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') /* MPEG-1/2/4 Multiplexed */ #define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') /* H264 with start codes */ #define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') /* H264 without start codes */ #define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') /* H264 MVC */ #define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') /* H263 */ #define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') /* MPEG-1 ES */ #define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') /* MPEG-2 ES */ #define V4L2_PIX_FMT_MPEG2_SLICE v4l2_fourcc('M', 'G', '2', 'S') /* MPEG-2 parsed slice data */ #define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') /* MPEG-4 part 2 ES */ #define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') /* Xvid */ #define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') /* SMPTE 421M Annex G compliant stream */ #define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') /* SMPTE 421M Annex L compliant stream */ #define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') /* VP8 */ #define V4L2_PIX_FMT_VP8_FRAME v4l2_fourcc('V', 'P', '8', 'F') /* VP8 parsed frame */ #define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0') /* VP9 */ #define V4L2_PIX_FMT_VP9_FRAME v4l2_fourcc('V', 'P', '9', 'F') /* VP9 parsed frame */ #define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C') /* HEVC aka H.265 */ #define V4L2_PIX_FMT_FWHT v4l2_fourcc('F', 'W', 'H', 'T') /* Fast Walsh Hadamard Transform (vicodec) */ #define V4L2_PIX_FMT_FWHT_STATELESS v4l2_fourcc('S', 'F', 'W', 'H') /* Stateless FWHT (vicodec) */ #define V4L2_PIX_FMT_H264_SLICE v4l2_fourcc('S', '2', '6', '4') /* H264 parsed slices */ #define V4L2_PIX_FMT_HEVC_SLICE v4l2_fourcc('S', '2', '6', '5') /* HEVC parsed slices */ #define V4L2_PIX_FMT_AV1_FRAME v4l2_fourcc('A', 'V', '1', 'F') /* AV1 parsed frame */ #define V4L2_PIX_FMT_SPK v4l2_fourcc('S', 'P', 'K', '0') /* Sorenson Spark */ #define V4L2_PIX_FMT_RV30 v4l2_fourcc('R', 'V', '3', '0') /* RealVideo 8 */ #define V4L2_PIX_FMT_RV40 v4l2_fourcc('R', 'V', '4', '0') /* RealVideo 9 & 10 */ /* Vendor-specific formats */ #define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') /* cpia1 YUV */ #define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') /* Winnov hw compress */ #define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') /* SN9C10x compression */ #define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') /* SN9C20x YUV 4:2:0 */ #define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') /* pwc older webcam */ #define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') /* pwc newer webcam */ #define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') /* ET61X251 compression */ #define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') /* YUYV per line */ #define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') /* YYUV per line */ #define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') /* YUVY per line */ #define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') /* compressed GBRG bayer */ #define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') /* compressed BGGR bayer */ #define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') /* compressed BGGR bayer */ #define V4L2_PIX_FMT_JL2005BCD v4l2_fourcc('J', 'L', '2', '0') /* compressed RGGB bayer */ #define V4L2_PIX_FMT_SN9C2028 v4l2_fourcc('S', 'O', 'N', 'X') /* compressed GBRG bayer */ #define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') /* compressed RGGB bayer */ #define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') /* Pixart 73xx JPEG */ #define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') /* ov511 JPEG */ #define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') /* ov518 JPEG */ #define V4L2_PIX_FMT_STV0680 v4l2_fourcc('S', '6', '8', '0') /* stv0680 bayer */ #define V4L2_PIX_FMT_TM6000 v4l2_fourcc('T', 'M', '6', '0') /* tm5600/tm60x0 */ #define V4L2_PIX_FMT_CIT_YYVYUY v4l2_fourcc('C', 'I', 'T', 'V') /* one line of Y then 1 line of VYUY */ #define V4L2_PIX_FMT_KONICA420 v4l2_fourcc('K', 'O', 'N', 'I') /* YUV420 planar in blocks of 256 pixels */ #define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') /* JPEG-Lite */ #define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') /* se401 janggu compressed rgb */ #define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') /* S5C73M3 interleaved UYVY/JPEG */ #define V4L2_PIX_FMT_Y8I v4l2_fourcc('Y', '8', 'I', ' ') /* Greyscale 8-bit L/R interleaved */ #define V4L2_PIX_FMT_Y12I v4l2_fourcc('Y', '1', '2', 'I') /* Greyscale 12-bit L/R interleaved */ #define V4L2_PIX_FMT_Z16 v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */ #define V4L2_PIX_FMT_MT21C v4l2_fourcc('M', 'T', '2', '1') /* Mediatek compressed block mode */ #define V4L2_PIX_FMT_MM21 v4l2_fourcc('M', 'M', '2', '1') /* Mediatek 8-bit block mode, two non-contiguous planes */ #define V4L2_PIX_FMT_MT2110T v4l2_fourcc('M', 'T', '2', 'T') /* Mediatek 10-bit block tile mode */ #define V4L2_PIX_FMT_MT2110R v4l2_fourcc('M', 'T', '2', 'R') /* Mediatek 10-bit block raster mode */ #define V4L2_PIX_FMT_INZI v4l2_fourcc('I', 'N', 'Z', 'I') /* Intel Planar Greyscale 10-bit and Depth 16-bit */ #define V4L2_PIX_FMT_CNF4 v4l2_fourcc('C', 'N', 'F', '4') /* Intel 4-bit packed depth confidence information */ #define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* BTTV 8-bit dithered RGB */ #define V4L2_PIX_FMT_QC08C v4l2_fourcc('Q', '0', '8', 'C') /* Qualcomm 8-bit compressed */ #define V4L2_PIX_FMT_QC10C v4l2_fourcc('Q', '1', '0', 'C') /* Qualcomm 10-bit compressed */ #define V4L2_PIX_FMT_AJPG v4l2_fourcc('A', 'J', 'P', 'G') /* Aspeed JPEG */ #define V4L2_PIX_FMT_HEXTILE v4l2_fourcc('H', 'X', 'T', 'L') /* Hextile compressed */ /* 10bit raw packed, 32 bytes for every 25 pixels, last LSB 6 bits unused */ #define V4L2_PIX_FMT_IPU3_SBGGR10 v4l2_fourcc('i', 'p', '3', 'b') /* IPU3 packed 10-bit BGGR bayer */ #define V4L2_PIX_FMT_IPU3_SGBRG10 v4l2_fourcc('i', 'p', '3', 'g') /* IPU3 packed 10-bit GBRG bayer */ #define V4L2_PIX_FMT_IPU3_SGRBG10 v4l2_fourcc('i', 'p', '3', 'G') /* IPU3 packed 10-bit GRBG bayer */ #define V4L2_PIX_FMT_IPU3_SRGGB10 v4l2_fourcc('i', 'p', '3', 'r') /* IPU3 packed 10-bit RGGB bayer */ /* SDR formats - used only for Software Defined Radio devices */ #define V4L2_SDR_FMT_CU8 v4l2_fourcc('C', 'U', '0', '8') /* IQ u8 */ #define V4L2_SDR_FMT_CU16LE v4l2_fourcc('C', 'U', '1', '6') /* IQ u16le */ #define V4L2_SDR_FMT_CS8 v4l2_fourcc('C', 'S', '0', '8') /* complex s8 */ #define V4L2_SDR_FMT_CS14LE v4l2_fourcc('C', 'S', '1', '4') /* complex s14le */ #define V4L2_SDR_FMT_RU12LE v4l2_fourcc('R', 'U', '1', '2') /* real u12le */ #define V4L2_SDR_FMT_PCU16BE v4l2_fourcc('P', 'C', '1', '6') /* planar complex u16be */ #define V4L2_SDR_FMT_PCU18BE v4l2_fourcc('P', 'C', '1', '8') /* planar complex u18be */ #define V4L2_SDR_FMT_PCU20BE v4l2_fourcc('P', 'C', '2', '0') /* planar complex u20be */ /* Touch formats - used for Touch devices */ #define V4L2_TCH_FMT_DELTA_TD16 v4l2_fourcc('T', 'D', '1', '6') /* 16-bit signed deltas */ #define V4L2_TCH_FMT_DELTA_TD08 v4l2_fourcc('T', 'D', '0', '8') /* 8-bit signed deltas */ #define V4L2_TCH_FMT_TU16 v4l2_fourcc('T', 'U', '1', '6') /* 16-bit unsigned touch data */ #define V4L2_TCH_FMT_TU08 v4l2_fourcc('T', 'U', '0', '8') /* 8-bit unsigned touch data */ /* Meta-data formats */ #define V4L2_META_FMT_VSP1_HGO v4l2_fourcc('V', 'S', 'P', 'H') /* R-Car VSP1 1-D Histogram */ #define V4L2_META_FMT_VSP1_HGT v4l2_fourcc('V', 'S', 'P', 'T') /* R-Car VSP1 2-D Histogram */ #define V4L2_META_FMT_UVC v4l2_fourcc('U', 'V', 'C', 'H') /* UVC Payload Header metadata */ #define V4L2_META_FMT_D4XX v4l2_fourcc('D', '4', 'X', 'X') /* D4XX Payload Header metadata */ #define V4L2_META_FMT_VIVID v4l2_fourcc('V', 'I', 'V', 'D') /* Vivid Metadata */ /* Vendor specific - used for RK_ISP1 camera sub-system */ #define V4L2_META_FMT_RK_ISP1_PARAMS v4l2_fourcc('R', 'K', '1', 'P') /* Rockchip ISP1 3A Parameters */ #define V4L2_META_FMT_RK_ISP1_STAT_3A v4l2_fourcc('R', 'K', '1', 'S') /* Rockchip ISP1 3A Statistics */ /* priv field value to indicates that subsequent fields are valid. */ #define V4L2_PIX_FMT_PRIV_MAGIC 0xfeedcafe /* Flags */ #define V4L2_PIX_FMT_FLAG_PREMUL_ALPHA 0x00000001 #define V4L2_PIX_FMT_FLAG_SET_CSC 0x00000002 /* * F O R M A T E N U M E R A T I O N */ struct v4l2_fmtdesc { __u32 index; /* Format number */ __u32 type; /* enum v4l2_buf_type */ __u32 flags; __u8 description[32]; /* Description string */ __u32 pixelformat; /* Format fourcc */ __u32 mbus_code; /* Media bus code */ __u32 reserved[3]; }; #define V4L2_FMT_FLAG_COMPRESSED 0x0001 #define V4L2_FMT_FLAG_EMULATED 0x0002 #define V4L2_FMT_FLAG_CONTINUOUS_BYTESTREAM 0x0004 #define V4L2_FMT_FLAG_DYN_RESOLUTION 0x0008 #define V4L2_FMT_FLAG_ENC_CAP_FRAME_INTERVAL 0x0010 #define V4L2_FMT_FLAG_CSC_COLORSPACE 0x0020 #define V4L2_FMT_FLAG_CSC_XFER_FUNC 0x0040 #define V4L2_FMT_FLAG_CSC_YCBCR_ENC 0x0080 #define V4L2_FMT_FLAG_CSC_HSV_ENC V4L2_FMT_FLAG_CSC_YCBCR_ENC #define V4L2_FMT_FLAG_CSC_QUANTIZATION 0x0100 /* Frame Size and frame rate enumeration */ /* * F R A M E S I Z E E N U M E R A T I O N */ enum v4l2_frmsizetypes { V4L2_FRMSIZE_TYPE_DISCRETE = 1, V4L2_FRMSIZE_TYPE_CONTINUOUS = 2, V4L2_FRMSIZE_TYPE_STEPWISE = 3, }; struct v4l2_frmsize_discrete { __u32 width; /* Frame width [pixel] */ __u32 height; /* Frame height [pixel] */ }; struct v4l2_frmsize_stepwise { __u32 min_width; /* Minimum frame width [pixel] */ __u32 max_width; /* Maximum frame width [pixel] */ __u32 step_width; /* Frame width step size [pixel] */ __u32 min_height; /* Minimum frame height [pixel] */ __u32 max_height; /* Maximum frame height [pixel] */ __u32 step_height; /* Frame height step size [pixel] */ }; struct v4l2_frmsizeenum { __u32 index; /* Frame size number */ __u32 pixel_format; /* Pixel format */ __u32 type; /* Frame size type the device supports. */ union { /* Frame size */ struct v4l2_frmsize_discrete discrete; struct v4l2_frmsize_stepwise stepwise; }; __u32 reserved[2]; /* Reserved space for future use */ }; /* * F R A M E R A T E E N U M E R A T I O N */ enum v4l2_frmivaltypes { V4L2_FRMIVAL_TYPE_DISCRETE = 1, V4L2_FRMIVAL_TYPE_CONTINUOUS = 2, V4L2_FRMIVAL_TYPE_STEPWISE = 3, }; struct v4l2_frmival_stepwise { struct v4l2_fract min; /* Minimum frame interval [s] */ struct v4l2_fract max; /* Maximum frame interval [s] */ struct v4l2_fract step; /* Frame interval step size [s] */ }; struct v4l2_frmivalenum { __u32 index; /* Frame format index */ __u32 pixel_format; /* Pixel format */ __u32 width; /* Frame width */ __u32 height; /* Frame height */ __u32 type; /* Frame interval type the device supports. */ union { /* Frame interval */ struct v4l2_fract discrete; struct v4l2_frmival_stepwise stepwise; }; __u32 reserved[2]; /* Reserved space for future use */ }; /* * T I M E C O D E */ struct v4l2_timecode { __u32 type; __u32 flags; __u8 frames; __u8 seconds; __u8 minutes; __u8 hours; __u8 userbits[4]; }; /* Type */ #define V4L2_TC_TYPE_24FPS 1 #define V4L2_TC_TYPE_25FPS 2 #define V4L2_TC_TYPE_30FPS 3 #define V4L2_TC_TYPE_50FPS 4 #define V4L2_TC_TYPE_60FPS 5 /* Flags */ #define V4L2_TC_FLAG_DROPFRAME 0x0001 /* "drop-frame" mode */ #define V4L2_TC_FLAG_COLORFRAME 0x0002 #define V4L2_TC_USERBITS_field 0x000C #define V4L2_TC_USERBITS_USERDEFINED 0x0000 #define V4L2_TC_USERBITS_8BITCHARS 0x0008 /* The above is based on SMPTE timecodes */ struct v4l2_jpegcompression { int quality; int APPn; /* Number of APP segment to be written, * must be 0..15 */ int APP_len; /* Length of data in JPEG APPn segment */ char APP_data[60]; /* Data in the JPEG APPn segment. */ int COM_len; /* Length of data in JPEG COM segment */ char COM_data[60]; /* Data in JPEG COM segment */ __u32 jpeg_markers; /* Which markers should go into the JPEG * output. Unless you exactly know what * you do, leave them untouched. * Including less markers will make the * resulting code smaller, but there will * be fewer applications which can read it. * The presence of the APP and COM marker * is influenced by APP_len and COM_len * ONLY, not by this property! */ #define V4L2_JPEG_MARKER_DHT (1<<3) /* Define Huffman Tables */ #define V4L2_JPEG_MARKER_DQT (1<<4) /* Define Quantization Tables */ #define V4L2_JPEG_MARKER_DRI (1<<5) /* Define Restart Interval */ #define V4L2_JPEG_MARKER_COM (1<<6) /* Comment segment */ #define V4L2_JPEG_MARKER_APP (1<<7) /* App segment, driver will * always use APP0 */ }; /* * M E M O R Y - M A P P I N G B U F F E R S */ struct v4l2_requestbuffers { __u32 count; __u32 type; /* enum v4l2_buf_type */ __u32 memory; /* enum v4l2_memory */ __u32 capabilities; __u8 flags; __u8 reserved[3]; }; #define V4L2_MEMORY_FLAG_NON_COHERENT (1 << 0) /* capabilities for struct v4l2_requestbuffers and v4l2_create_buffers */ #define V4L2_BUF_CAP_SUPPORTS_MMAP (1 << 0) #define V4L2_BUF_CAP_SUPPORTS_USERPTR (1 << 1) #define V4L2_BUF_CAP_SUPPORTS_DMABUF (1 << 2) #define V4L2_BUF_CAP_SUPPORTS_REQUESTS (1 << 3) #define V4L2_BUF_CAP_SUPPORTS_ORPHANED_BUFS (1 << 4) #define V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF (1 << 5) #define V4L2_BUF_CAP_SUPPORTS_MMAP_CACHE_HINTS (1 << 6) #define V4L2_BUF_CAP_SUPPORTS_MAX_NUM_BUFFERS (1 << 7) /** * struct v4l2_plane - plane info for multi-planar buffers * @bytesused: number of bytes occupied by data in the plane (payload) * @length: size of this plane (NOT the payload) in bytes * @mem_offset: when memory in the associated struct v4l2_buffer is * V4L2_MEMORY_MMAP, equals the offset from the start of * the device memory for this plane (or is a "cookie" that * should be passed to mmap() called on the video node) * @userptr: when memory is V4L2_MEMORY_USERPTR, a userspace pointer * pointing to this plane * @fd: when memory is V4L2_MEMORY_DMABUF, a userspace file * descriptor associated with this plane * @m: union of @mem_offset, @userptr and @fd * @data_offset: offset in the plane to the start of data; usually 0, * unless there is a header in front of the data * @reserved: drivers and applications must zero this array * * Multi-planar buffers consist of one or more planes, e.g. an YCbCr buffer * with two planes can have one plane for Y, and another for interleaved CbCr * components. Each plane can reside in a separate memory buffer, or even in * a completely separate memory node (e.g. in embedded devices). */ struct v4l2_plane { __u32 bytesused; __u32 length; union { __u32 mem_offset; unsigned long userptr; __s32 fd; } m; __u32 data_offset; __u32 reserved[11]; }; /** * struct v4l2_buffer - video buffer info * @index: id number of the buffer * @type: enum v4l2_buf_type; buffer type (type == *_MPLANE for * multiplanar buffers); * @bytesused: number of bytes occupied by data in the buffer (payload); * unused (set to 0) for multiplanar buffers * @flags: buffer informational flags * @field: enum v4l2_field; field order of the image in the buffer * @timestamp: frame timestamp * @timecode: frame timecode * @sequence: sequence count of this frame * @memory: enum v4l2_memory; the method, in which the actual video data is * passed * @offset: for non-multiplanar buffers with memory == V4L2_MEMORY_MMAP; * offset from the start of the device memory for this plane, * (or a "cookie" that should be passed to mmap() as offset) * @userptr: for non-multiplanar buffers with memory == V4L2_MEMORY_USERPTR; * a userspace pointer pointing to this buffer * @fd: for non-multiplanar buffers with memory == V4L2_MEMORY_DMABUF; * a userspace file descriptor associated with this buffer * @planes: for multiplanar buffers; userspace pointer to the array of plane * info structs for this buffer * @m: union of @offset, @userptr, @planes and @fd * @length: size in bytes of the buffer (NOT its payload) for single-plane * buffers (when type != *_MPLANE); number of elements in the * planes array for multi-plane buffers * @reserved2: drivers and applications must zero this field * @request_fd: fd of the request that this buffer should use * @reserved: for backwards compatibility with applications that do not know * about @request_fd * * Contains data exchanged by application and driver using one of the Streaming * I/O methods. */ struct v4l2_buffer { __u32 index; __u32 type; __u32 bytesused; __u32 flags; __u32 field; struct timeval timestamp; struct v4l2_timecode timecode; __u32 sequence; /* memory location */ __u32 memory; union { __u32 offset; unsigned long userptr; struct v4l2_plane *planes; __s32 fd; } m; __u32 length; __u32 reserved2; union { __s32 request_fd; __u32 reserved; }; }; /** * v4l2_timeval_to_ns - Convert timeval to nanoseconds * @tv: pointer to the timeval variable to be converted * * Returns the scalar nanosecond representation of the timeval * parameter. */ static __inline__ __u64 v4l2_timeval_to_ns(const struct timeval *tv) { return (__u64)tv->tv_sec * 1000000000ULL + tv->tv_usec * 1000; } /* Flags for 'flags' field */ /* Buffer is mapped (flag) */ #define V4L2_BUF_FLAG_MAPPED 0x00000001 /* Buffer is queued for processing */ #define V4L2_BUF_FLAG_QUEUED 0x00000002 /* Buffer is ready */ #define V4L2_BUF_FLAG_DONE 0x00000004 /* Image is a keyframe (I-frame) */ #define V4L2_BUF_FLAG_KEYFRAME 0x00000008 /* Image is a P-frame */ #define V4L2_BUF_FLAG_PFRAME 0x00000010 /* Image is a B-frame */ #define V4L2_BUF_FLAG_BFRAME 0x00000020 /* Buffer is ready, but the data contained within is corrupted. */ #define V4L2_BUF_FLAG_ERROR 0x00000040 /* Buffer is added to an unqueued request */ #define V4L2_BUF_FLAG_IN_REQUEST 0x00000080 /* timecode field is valid */ #define V4L2_BUF_FLAG_TIMECODE 0x00000100 /* Don't return the capture buffer until OUTPUT timestamp changes */ #define V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF 0x00000200 /* Buffer is prepared for queuing */ #define V4L2_BUF_FLAG_PREPARED 0x00000400 /* Cache handling flags */ #define V4L2_BUF_FLAG_NO_CACHE_INVALIDATE 0x00000800 #define V4L2_BUF_FLAG_NO_CACHE_CLEAN 0x00001000 /* Timestamp type */ #define V4L2_BUF_FLAG_TIMESTAMP_MASK 0x0000e000 #define V4L2_BUF_FLAG_TIMESTAMP_UNKNOWN 0x00000000 #define V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC 0x00002000 #define V4L2_BUF_FLAG_TIMESTAMP_COPY 0x00004000 /* Timestamp sources. */ #define V4L2_BUF_FLAG_TSTAMP_SRC_MASK 0x00070000 #define V4L2_BUF_FLAG_TSTAMP_SRC_EOF 0x00000000 #define V4L2_BUF_FLAG_TSTAMP_SRC_SOE 0x00010000 /* mem2mem encoder/decoder */ #define V4L2_BUF_FLAG_LAST 0x00100000 /* request_fd is valid */ #define V4L2_BUF_FLAG_REQUEST_FD 0x00800000 /** * struct v4l2_exportbuffer - export of video buffer as DMABUF file descriptor * * @index: id number of the buffer * @type: enum v4l2_buf_type; buffer type (type == *_MPLANE for * multiplanar buffers); * @plane: index of the plane to be exported, 0 for single plane queues * @flags: flags for newly created file, currently only O_CLOEXEC is * supported, refer to manual of open syscall for more details * @fd: file descriptor associated with DMABUF (set by driver) * @reserved: drivers and applications must zero this array * * Contains data used for exporting a video buffer as DMABUF file descriptor. * The buffer is identified by a 'cookie' returned by VIDIOC_QUERYBUF * (identical to the cookie used to mmap() the buffer to userspace). All * reserved fields must be set to zero. The field reserved0 is expected to * become a structure 'type' allowing an alternative layout of the structure * content. Therefore this field should not be used for any other extensions. */ struct v4l2_exportbuffer { __u32 type; /* enum v4l2_buf_type */ __u32 index; __u32 plane; __u32 flags; __s32 fd; __u32 reserved[11]; }; /* * O V E R L A Y P R E V I E W */ struct v4l2_framebuffer { __u32 capability; __u32 flags; /* FIXME: in theory we should pass something like PCI device + memory * region + offset instead of some physical address */ void *base; struct { __u32 width; __u32 height; __u32 pixelformat; __u32 field; /* enum v4l2_field */ __u32 bytesperline; /* for padding, zero if unused */ __u32 sizeimage; __u32 colorspace; /* enum v4l2_colorspace */ __u32 priv; /* reserved field, set to 0 */ } fmt; }; /* Flags for the 'capability' field. Read only */ #define V4L2_FBUF_CAP_EXTERNOVERLAY 0x0001 #define V4L2_FBUF_CAP_CHROMAKEY 0x0002 #define V4L2_FBUF_CAP_LIST_CLIPPING 0x0004 #define V4L2_FBUF_CAP_BITMAP_CLIPPING 0x0008 #define V4L2_FBUF_CAP_LOCAL_ALPHA 0x0010 #define V4L2_FBUF_CAP_GLOBAL_ALPHA 0x0020 #define V4L2_FBUF_CAP_LOCAL_INV_ALPHA 0x0040 #define V4L2_FBUF_CAP_SRC_CHROMAKEY 0x0080 /* Flags for the 'flags' field. */ #define V4L2_FBUF_FLAG_PRIMARY 0x0001 #define V4L2_FBUF_FLAG_OVERLAY 0x0002 #define V4L2_FBUF_FLAG_CHROMAKEY 0x0004 #define V4L2_FBUF_FLAG_LOCAL_ALPHA 0x0008 #define V4L2_FBUF_FLAG_GLOBAL_ALPHA 0x0010 #define V4L2_FBUF_FLAG_LOCAL_INV_ALPHA 0x0020 #define V4L2_FBUF_FLAG_SRC_CHROMAKEY 0x0040 struct v4l2_clip { struct v4l2_rect c; struct v4l2_clip *next; }; struct v4l2_window { struct v4l2_rect w; __u32 field; /* enum v4l2_field */ __u32 chromakey; struct v4l2_clip *clips; __u32 clipcount; void *bitmap; __u8 global_alpha; }; /* * C A P T U R E P A R A M E T E R S */ struct v4l2_captureparm { __u32 capability; /* Supported modes */ __u32 capturemode; /* Current mode */ struct v4l2_fract timeperframe; /* Time per frame in seconds */ __u32 extendedmode; /* Driver-specific extensions */ __u32 readbuffers; /* # of buffers for read */ __u32 reserved[4]; }; /* Flags for 'capability' and 'capturemode' fields */ #define V4L2_MODE_HIGHQUALITY 0x0001 /* High quality imaging mode */ #define V4L2_CAP_TIMEPERFRAME 0x1000 /* timeperframe field is supported */ struct v4l2_outputparm { __u32 capability; /* Supported modes */ __u32 outputmode; /* Current mode */ struct v4l2_fract timeperframe; /* Time per frame in seconds */ __u32 extendedmode; /* Driver-specific extensions */ __u32 writebuffers; /* # of buffers for write */ __u32 reserved[4]; }; /* * I N P U T I M A G E C R O P P I N G */ struct v4l2_cropcap { __u32 type; /* enum v4l2_buf_type */ struct v4l2_rect bounds; struct v4l2_rect defrect; struct v4l2_fract pixelaspect; }; struct v4l2_crop { __u32 type; /* enum v4l2_buf_type */ struct v4l2_rect c; }; /** * struct v4l2_selection - selection info * @type: buffer type (do not use *_MPLANE types) * @target: Selection target, used to choose one of possible rectangles; * defined in v4l2-common.h; V4L2_SEL_TGT_* . * @flags: constraints flags, defined in v4l2-common.h; V4L2_SEL_FLAG_*. * @r: coordinates of selection window * @reserved: for future use, rounds structure size to 64 bytes, set to zero * * Hardware may use multiple helper windows to process a video stream. * The structure is used to exchange this selection areas between * an application and a driver. */ struct v4l2_selection { __u32 type; __u32 target; __u32 flags; struct v4l2_rect r; __u32 reserved[9]; }; /* * A N A L O G V I D E O S T A N D A R D */ typedef __u64 v4l2_std_id; /* * Attention: Keep the V4L2_STD_* bit definitions in sync with * include/dt-bindings/display/sdtv-standards.h SDTV_STD_* bit definitions. */ /* one bit for each */ #define V4L2_STD_PAL_B ((v4l2_std_id)0x00000001) #define V4L2_STD_PAL_B1 ((v4l2_std_id)0x00000002) #define V4L2_STD_PAL_G ((v4l2_std_id)0x00000004) #define V4L2_STD_PAL_H ((v4l2_std_id)0x00000008) #define V4L2_STD_PAL_I ((v4l2_std_id)0x00000010) #define V4L2_STD_PAL_D ((v4l2_std_id)0x00000020) #define V4L2_STD_PAL_D1 ((v4l2_std_id)0x00000040) #define V4L2_STD_PAL_K ((v4l2_std_id)0x00000080) #define V4L2_STD_PAL_M ((v4l2_std_id)0x00000100) #define V4L2_STD_PAL_N ((v4l2_std_id)0x00000200) #define V4L2_STD_PAL_Nc ((v4l2_std_id)0x00000400) #define V4L2_STD_PAL_60 ((v4l2_std_id)0x00000800) #define V4L2_STD_NTSC_M ((v4l2_std_id)0x00001000) /* BTSC */ #define V4L2_STD_NTSC_M_JP ((v4l2_std_id)0x00002000) /* EIA-J */ #define V4L2_STD_NTSC_443 ((v4l2_std_id)0x00004000) #define V4L2_STD_NTSC_M_KR ((v4l2_std_id)0x00008000) /* FM A2 */ #define V4L2_STD_SECAM_B ((v4l2_std_id)0x00010000) #define V4L2_STD_SECAM_D ((v4l2_std_id)0x00020000) #define V4L2_STD_SECAM_G ((v4l2_std_id)0x00040000) #define V4L2_STD_SECAM_H ((v4l2_std_id)0x00080000) #define V4L2_STD_SECAM_K ((v4l2_std_id)0x00100000) #define V4L2_STD_SECAM_K1 ((v4l2_std_id)0x00200000) #define V4L2_STD_SECAM_L ((v4l2_std_id)0x00400000) #define V4L2_STD_SECAM_LC ((v4l2_std_id)0x00800000) /* ATSC/HDTV */ #define V4L2_STD_ATSC_8_VSB ((v4l2_std_id)0x01000000) #define V4L2_STD_ATSC_16_VSB ((v4l2_std_id)0x02000000) /* FIXME: Although std_id is 64 bits, there is an issue on PPC32 architecture that makes switch(__u64) to break. So, there's a hack on v4l2-common.c rounding this value to 32 bits. As, currently, the max value is for V4L2_STD_ATSC_16_VSB (30 bits wide), it should work fine. However, if needed to add more than two standards, v4l2-common.c should be fixed. */ /* * Some macros to merge video standards in order to make live easier for the * drivers and V4L2 applications */ /* * "Common" NTSC/M - It should be noticed that V4L2_STD_NTSC_443 is * Missing here. */ #define V4L2_STD_NTSC (V4L2_STD_NTSC_M |\ V4L2_STD_NTSC_M_JP |\ V4L2_STD_NTSC_M_KR) /* Secam macros */ #define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D |\ V4L2_STD_SECAM_K |\ V4L2_STD_SECAM_K1) /* All Secam Standards */ #define V4L2_STD_SECAM (V4L2_STD_SECAM_B |\ V4L2_STD_SECAM_G |\ V4L2_STD_SECAM_H |\ V4L2_STD_SECAM_DK |\ V4L2_STD_SECAM_L |\ V4L2_STD_SECAM_LC) /* PAL macros */ #define V4L2_STD_PAL_BG (V4L2_STD_PAL_B |\ V4L2_STD_PAL_B1 |\ V4L2_STD_PAL_G) #define V4L2_STD_PAL_DK (V4L2_STD_PAL_D |\ V4L2_STD_PAL_D1 |\ V4L2_STD_PAL_K) /* * "Common" PAL - This macro is there to be compatible with the old * V4L1 concept of "PAL": /BGDKHI. * Several PAL standards are missing here: /M, /N and /Nc */ #define V4L2_STD_PAL (V4L2_STD_PAL_BG |\ V4L2_STD_PAL_DK |\ V4L2_STD_PAL_H |\ V4L2_STD_PAL_I) /* Chroma "agnostic" standards */ #define V4L2_STD_B (V4L2_STD_PAL_B |\ V4L2_STD_PAL_B1 |\ V4L2_STD_SECAM_B) #define V4L2_STD_G (V4L2_STD_PAL_G |\ V4L2_STD_SECAM_G) #define V4L2_STD_H (V4L2_STD_PAL_H |\ V4L2_STD_SECAM_H) #define V4L2_STD_L (V4L2_STD_SECAM_L |\ V4L2_STD_SECAM_LC) #define V4L2_STD_GH (V4L2_STD_G |\ V4L2_STD_H) #define V4L2_STD_DK (V4L2_STD_PAL_DK |\ V4L2_STD_SECAM_DK) #define V4L2_STD_BG (V4L2_STD_B |\ V4L2_STD_G) #define V4L2_STD_MN (V4L2_STD_PAL_M |\ V4L2_STD_PAL_N |\ V4L2_STD_PAL_Nc |\ V4L2_STD_NTSC) /* Standards where MTS/BTSC stereo could be found */ #define V4L2_STD_MTS (V4L2_STD_NTSC_M |\ V4L2_STD_PAL_M |\ V4L2_STD_PAL_N |\ V4L2_STD_PAL_Nc) /* Standards for Countries with 60Hz Line frequency */ #define V4L2_STD_525_60 (V4L2_STD_PAL_M |\ V4L2_STD_PAL_60 |\ V4L2_STD_NTSC |\ V4L2_STD_NTSC_443) /* Standards for Countries with 50Hz Line frequency */ #define V4L2_STD_625_50 (V4L2_STD_PAL |\ V4L2_STD_PAL_N |\ V4L2_STD_PAL_Nc |\ V4L2_STD_SECAM) #define V4L2_STD_ATSC (V4L2_STD_ATSC_8_VSB |\ V4L2_STD_ATSC_16_VSB) /* Macros with none and all analog standards */ #define V4L2_STD_UNKNOWN 0 #define V4L2_STD_ALL (V4L2_STD_525_60 |\ V4L2_STD_625_50) struct v4l2_standard { __u32 index; v4l2_std_id id; __u8 name[24]; struct v4l2_fract frameperiod; /* Frames, not fields */ __u32 framelines; __u32 reserved[4]; }; /* * D V B T T I M I N G S */ /** struct v4l2_bt_timings - BT.656/BT.1120 timing data * @width: total width of the active video in pixels * @height: total height of the active video in lines * @interlaced: Interlaced or progressive * @polarities: Positive or negative polarities * @pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000 * @hfrontporch:Horizontal front porch in pixels * @hsync: Horizontal Sync length in pixels * @hbackporch: Horizontal back porch in pixels * @vfrontporch:Vertical front porch in lines * @vsync: Vertical Sync length in lines * @vbackporch: Vertical back porch in lines * @il_vfrontporch:Vertical front porch for the even field * (aka field 2) of interlaced field formats * @il_vsync: Vertical Sync length for the even field * (aka field 2) of interlaced field formats * @il_vbackporch:Vertical back porch for the even field * (aka field 2) of interlaced field formats * @standards: Standards the timing belongs to * @flags: Flags * @picture_aspect: The picture aspect ratio (hor/vert). * @cea861_vic: VIC code as per the CEA-861 standard. * @hdmi_vic: VIC code as per the HDMI standard. * @reserved: Reserved fields, must be zeroed. * * A note regarding vertical interlaced timings: height refers to the total * height of the active video frame (= two fields). The blanking timings refer * to the blanking of each field. So the height of the total frame is * calculated as follows: * * tot_height = height + vfrontporch + vsync + vbackporch + * il_vfrontporch + il_vsync + il_vbackporch * * The active height of each field is height / 2. */ struct v4l2_bt_timings { __u32 width; __u32 height; __u32 interlaced; __u32 polarities; __u64 pixelclock; __u32 hfrontporch; __u32 hsync; __u32 hbackporch; __u32 vfrontporch; __u32 vsync; __u32 vbackporch; __u32 il_vfrontporch; __u32 il_vsync; __u32 il_vbackporch; __u32 standards; __u32 flags; struct v4l2_fract picture_aspect; __u8 cea861_vic; __u8 hdmi_vic; __u8 reserved[46]; } __attribute__ ((packed)); /* Interlaced or progressive format */ #define V4L2_DV_PROGRESSIVE 0 #define V4L2_DV_INTERLACED 1 /* Polarities. If bit is not set, it is assumed to be negative polarity */ #define V4L2_DV_VSYNC_POS_POL 0x00000001 #define V4L2_DV_HSYNC_POS_POL 0x00000002 /* Timings standards */ #define V4L2_DV_BT_STD_CEA861 (1 << 0) /* CEA-861 Digital TV Profile */ #define V4L2_DV_BT_STD_DMT (1 << 1) /* VESA Discrete Monitor Timings */ #define V4L2_DV_BT_STD_CVT (1 << 2) /* VESA Coordinated Video Timings */ #define V4L2_DV_BT_STD_GTF (1 << 3) /* VESA Generalized Timings Formula */ #define V4L2_DV_BT_STD_SDI (1 << 4) /* SDI Timings */ /* Flags */ /* * CVT/GTF specific: timing uses reduced blanking (CVT) or the 'Secondary * GTF' curve (GTF). In both cases the horizontal and/or vertical blanking * intervals are reduced, allowing a higher resolution over the same * bandwidth. This is a read-only flag. */ #define V4L2_DV_FL_REDUCED_BLANKING (1 << 0) /* * CEA-861 specific: set for CEA-861 formats with a framerate of a multiple * of six. These formats can be optionally played at 1 / 1.001 speed. * This is a read-only flag. */ #define V4L2_DV_FL_CAN_REDUCE_FPS (1 << 1) /* * CEA-861 specific: only valid for video transmitters, the flag is cleared * by receivers. * If the framerate of the format is a multiple of six, then the pixelclock * used to set up the transmitter is divided by 1.001 to make it compatible * with 60 Hz based standards such as NTSC and PAL-M that use a framerate of * 29.97 Hz. Otherwise this flag is cleared. If the transmitter can't generate * such frequencies, then the flag will also be cleared. */ #define V4L2_DV_FL_REDUCED_FPS (1 << 2) /* * Specific to interlaced formats: if set, then field 1 is really one half-line * longer and field 2 is really one half-line shorter, so each field has * exactly the same number of half-lines. Whether half-lines can be detected * or used depends on the hardware. */ #define V4L2_DV_FL_HALF_LINE (1 << 3) /* * If set, then this is a Consumer Electronics (CE) video format. Such formats * differ from other formats (commonly called IT formats) in that if RGB * encoding is used then by default the RGB values use limited range (i.e. * use the range 16-235) as opposed to 0-255. All formats defined in CEA-861 * except for the 640x480 format are CE formats. */ #define V4L2_DV_FL_IS_CE_VIDEO (1 << 4) /* Some formats like SMPTE-125M have an interlaced signal with a odd * total height. For these formats, if this flag is set, the first * field has the extra line. If not, it is the second field. */ #define V4L2_DV_FL_FIRST_FIELD_EXTRA_LINE (1 << 5) /* * If set, then the picture_aspect field is valid. Otherwise assume that the * pixels are square, so the picture aspect ratio is the same as the width to * height ratio. */ #define V4L2_DV_FL_HAS_PICTURE_ASPECT (1 << 6) /* * If set, then the cea861_vic field is valid and contains the Video * Identification Code as per the CEA-861 standard. */ #define V4L2_DV_FL_HAS_CEA861_VIC (1 << 7) /* * If set, then the hdmi_vic field is valid and contains the Video * Identification Code as per the HDMI standard (HDMI Vendor Specific * InfoFrame). */ #define V4L2_DV_FL_HAS_HDMI_VIC (1 << 8) /* * CEA-861 specific: only valid for video receivers. * If set, then HW can detect the difference between regular FPS and * 1000/1001 FPS. Note: This flag is only valid for HDMI VIC codes with * the V4L2_DV_FL_CAN_REDUCE_FPS flag set. */ #define V4L2_DV_FL_CAN_DETECT_REDUCED_FPS (1 << 9) /* A few useful defines to calculate the total blanking and frame sizes */ #define V4L2_DV_BT_BLANKING_WIDTH(bt) \ ((bt)->hfrontporch + (bt)->hsync + (bt)->hbackporch) #define V4L2_DV_BT_FRAME_WIDTH(bt) \ ((bt)->width + V4L2_DV_BT_BLANKING_WIDTH(bt)) #define V4L2_DV_BT_BLANKING_HEIGHT(bt) \ ((bt)->vfrontporch + (bt)->vsync + (bt)->vbackporch + \ ((bt)->interlaced ? \ ((bt)->il_vfrontporch + (bt)->il_vsync + (bt)->il_vbackporch) : 0)) #define V4L2_DV_BT_FRAME_HEIGHT(bt) \ ((bt)->height + V4L2_DV_BT_BLANKING_HEIGHT(bt)) /** struct v4l2_dv_timings - DV timings * @type: the type of the timings * @bt: BT656/1120 timings */ struct v4l2_dv_timings { __u32 type; union { struct v4l2_bt_timings bt; __u32 reserved[32]; }; } __attribute__ ((packed)); /* Values for the type field */ #define V4L2_DV_BT_656_1120 0 /* BT.656/1120 timing type */ /** struct v4l2_enum_dv_timings - DV timings enumeration * @index: enumeration index * @pad: the pad number for which to enumerate timings (used with * v4l-subdev nodes only) * @reserved: must be zeroed * @timings: the timings for the given index */ struct v4l2_enum_dv_timings { __u32 index; __u32 pad; __u32 reserved[2]; struct v4l2_dv_timings timings; }; /** struct v4l2_bt_timings_cap - BT.656/BT.1120 timing capabilities * @min_width: width in pixels * @max_width: width in pixels * @min_height: height in lines * @max_height: height in lines * @min_pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000 * @max_pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000 * @standards: Supported standards * @capabilities: Supported capabilities * @reserved: Must be zeroed */ struct v4l2_bt_timings_cap { __u32 min_width; __u32 max_width; __u32 min_height; __u32 max_height; __u64 min_pixelclock; __u64 max_pixelclock; __u32 standards; __u32 capabilities; __u32 reserved[16]; } __attribute__ ((packed)); /* Supports interlaced formats */ #define V4L2_DV_BT_CAP_INTERLACED (1 << 0) /* Supports progressive formats */ #define V4L2_DV_BT_CAP_PROGRESSIVE (1 << 1) /* Supports CVT/GTF reduced blanking */ #define V4L2_DV_BT_CAP_REDUCED_BLANKING (1 << 2) /* Supports custom formats */ #define V4L2_DV_BT_CAP_CUSTOM (1 << 3) /** struct v4l2_dv_timings_cap - DV timings capabilities * @type: the type of the timings (same as in struct v4l2_dv_timings) * @pad: the pad number for which to query capabilities (used with * v4l-subdev nodes only) * @bt: the BT656/1120 timings capabilities */ struct v4l2_dv_timings_cap { __u32 type; __u32 pad; __u32 reserved[2]; union { struct v4l2_bt_timings_cap bt; __u32 raw_data[32]; }; }; /* * V I D E O I N P U T S */ struct v4l2_input { __u32 index; /* Which input */ __u8 name[32]; /* Label */ __u32 type; /* Type of input */ __u32 audioset; /* Associated audios (bitfield) */ __u32 tuner; /* Tuner index */ v4l2_std_id std; __u32 status; __u32 capabilities; __u32 reserved[3]; }; /* Values for the 'type' field */ #define V4L2_INPUT_TYPE_TUNER 1 #define V4L2_INPUT_TYPE_CAMERA 2 #define V4L2_INPUT_TYPE_TOUCH 3 /* field 'status' - general */ #define V4L2_IN_ST_NO_POWER 0x00000001 /* Attached device is off */ #define V4L2_IN_ST_NO_SIGNAL 0x00000002 #define V4L2_IN_ST_NO_COLOR 0x00000004 /* field 'status' - sensor orientation */ /* If sensor is mounted upside down set both bits */ #define V4L2_IN_ST_HFLIP 0x00000010 /* Frames are flipped horizontally */ #define V4L2_IN_ST_VFLIP 0x00000020 /* Frames are flipped vertically */ /* field 'status' - analog */ #define V4L2_IN_ST_NO_H_LOCK 0x00000100 /* No horizontal sync lock */ #define V4L2_IN_ST_COLOR_KILL 0x00000200 /* Color killer is active */ #define V4L2_IN_ST_NO_V_LOCK 0x00000400 /* No vertical sync lock */ #define V4L2_IN_ST_NO_STD_LOCK 0x00000800 /* No standard format lock */ /* field 'status' - digital */ #define V4L2_IN_ST_NO_SYNC 0x00010000 /* No synchronization lock */ #define V4L2_IN_ST_NO_EQU 0x00020000 /* No equalizer lock */ #define V4L2_IN_ST_NO_CARRIER 0x00040000 /* Carrier recovery failed */ /* field 'status' - VCR and set-top box */ #define V4L2_IN_ST_MACROVISION 0x01000000 /* Macrovision detected */ #define V4L2_IN_ST_NO_ACCESS 0x02000000 /* Conditional access denied */ #define V4L2_IN_ST_VTR 0x04000000 /* VTR time constant */ /* capabilities flags */ #define V4L2_IN_CAP_DV_TIMINGS 0x00000002 /* Supports S_DV_TIMINGS */ #define V4L2_IN_CAP_CUSTOM_TIMINGS V4L2_IN_CAP_DV_TIMINGS /* For compatibility */ #define V4L2_IN_CAP_STD 0x00000004 /* Supports S_STD */ #define V4L2_IN_CAP_NATIVE_SIZE 0x00000008 /* Supports setting native size */ /* * V I D E O O U T P U T S */ struct v4l2_output { __u32 index; /* Which output */ __u8 name[32]; /* Label */ __u32 type; /* Type of output */ __u32 audioset; /* Associated audios (bitfield) */ __u32 modulator; /* Associated modulator */ v4l2_std_id std; __u32 capabilities; __u32 reserved[3]; }; /* Values for the 'type' field */ #define V4L2_OUTPUT_TYPE_MODULATOR 1 #define V4L2_OUTPUT_TYPE_ANALOG 2 #define V4L2_OUTPUT_TYPE_ANALOGVGAOVERLAY 3 /* capabilities flags */ #define V4L2_OUT_CAP_DV_TIMINGS 0x00000002 /* Supports S_DV_TIMINGS */ #define V4L2_OUT_CAP_CUSTOM_TIMINGS V4L2_OUT_CAP_DV_TIMINGS /* For compatibility */ #define V4L2_OUT_CAP_STD 0x00000004 /* Supports S_STD */ #define V4L2_OUT_CAP_NATIVE_SIZE 0x00000008 /* Supports setting native size */ /* * C O N T R O L S */ struct v4l2_control { __u32 id; __s32 value; }; struct v4l2_ext_control { __u32 id; __u32 size; __u32 reserved2[1]; union { __s32 value; __s64 value64; char *string; __u8 *p_u8; __u16 *p_u16; __u32 *p_u32; __s32 *p_s32; __s64 *p_s64; struct v4l2_area *p_area; struct v4l2_ctrl_h264_sps *p_h264_sps; struct v4l2_ctrl_h264_pps *p_h264_pps; struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix; struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights; struct v4l2_ctrl_h264_slice_params *p_h264_slice_params; struct v4l2_ctrl_h264_decode_params *p_h264_decode_params; struct v4l2_ctrl_fwht_params *p_fwht_params; struct v4l2_ctrl_vp8_frame *p_vp8_frame; struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence; struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture; struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quantisation; struct v4l2_ctrl_vp9_compressed_hdr *p_vp9_compressed_hdr_probs; struct v4l2_ctrl_vp9_frame *p_vp9_frame; struct v4l2_ctrl_hevc_sps *p_hevc_sps; struct v4l2_ctrl_hevc_pps *p_hevc_pps; struct v4l2_ctrl_hevc_slice_params *p_hevc_slice_params; struct v4l2_ctrl_hevc_scaling_matrix *p_hevc_scaling_matrix; struct v4l2_ctrl_hevc_decode_params *p_hevc_decode_params; struct v4l2_ctrl_av1_sequence *p_av1_sequence; struct v4l2_ctrl_av1_tile_group_entry *p_av1_tile_group_entry; struct v4l2_ctrl_av1_frame *p_av1_frame; struct v4l2_ctrl_av1_film_grain *p_av1_film_grain; struct v4l2_ctrl_hdr10_cll_info *p_hdr10_cll_info; struct v4l2_ctrl_hdr10_mastering_display *p_hdr10_mastering_display; void *ptr; }; } __attribute__ ((packed)); struct v4l2_ext_controls { union { __u32 ctrl_class; __u32 which; }; __u32 count; __u32 error_idx; __s32 request_fd; __u32 reserved[1]; struct v4l2_ext_control *controls; }; #define V4L2_CTRL_ID_MASK (0x0fffffff) #define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL) #define V4L2_CTRL_ID2WHICH(id) ((id) & 0x0fff0000UL) #define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000) #define V4L2_CTRL_MAX_DIMS (4) #define V4L2_CTRL_WHICH_CUR_VAL 0 #define V4L2_CTRL_WHICH_DEF_VAL 0x0f000000 #define V4L2_CTRL_WHICH_REQUEST_VAL 0x0f010000 enum v4l2_ctrl_type { V4L2_CTRL_TYPE_INTEGER = 1, V4L2_CTRL_TYPE_BOOLEAN = 2, V4L2_CTRL_TYPE_MENU = 3, V4L2_CTRL_TYPE_BUTTON = 4, V4L2_CTRL_TYPE_INTEGER64 = 5, V4L2_CTRL_TYPE_CTRL_CLASS = 6, V4L2_CTRL_TYPE_STRING = 7, V4L2_CTRL_TYPE_BITMASK = 8, V4L2_CTRL_TYPE_INTEGER_MENU = 9, /* Compound types are >= 0x0100 */ V4L2_CTRL_COMPOUND_TYPES = 0x0100, V4L2_CTRL_TYPE_U8 = 0x0100, V4L2_CTRL_TYPE_U16 = 0x0101, V4L2_CTRL_TYPE_U32 = 0x0102, V4L2_CTRL_TYPE_AREA = 0x0106, V4L2_CTRL_TYPE_HDR10_CLL_INFO = 0x0110, V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY = 0x0111, V4L2_CTRL_TYPE_H264_SPS = 0x0200, V4L2_CTRL_TYPE_H264_PPS = 0x0201, V4L2_CTRL_TYPE_H264_SCALING_MATRIX = 0x0202, V4L2_CTRL_TYPE_H264_SLICE_PARAMS = 0x0203, V4L2_CTRL_TYPE_H264_DECODE_PARAMS = 0x0204, V4L2_CTRL_TYPE_H264_PRED_WEIGHTS = 0x0205, V4L2_CTRL_TYPE_FWHT_PARAMS = 0x0220, V4L2_CTRL_TYPE_VP8_FRAME = 0x0240, V4L2_CTRL_TYPE_MPEG2_QUANTISATION = 0x0250, V4L2_CTRL_TYPE_MPEG2_SEQUENCE = 0x0251, V4L2_CTRL_TYPE_MPEG2_PICTURE = 0x0252, V4L2_CTRL_TYPE_VP9_COMPRESSED_HDR = 0x0260, V4L2_CTRL_TYPE_VP9_FRAME = 0x0261, V4L2_CTRL_TYPE_HEVC_SPS = 0x0270, V4L2_CTRL_TYPE_HEVC_PPS = 0x0271, V4L2_CTRL_TYPE_HEVC_SLICE_PARAMS = 0x0272, V4L2_CTRL_TYPE_HEVC_SCALING_MATRIX = 0x0273, V4L2_CTRL_TYPE_HEVC_DECODE_PARAMS = 0x0274, V4L2_CTRL_TYPE_AV1_SEQUENCE = 0x280, V4L2_CTRL_TYPE_AV1_TILE_GROUP_ENTRY = 0x281, V4L2_CTRL_TYPE_AV1_FRAME = 0x282, V4L2_CTRL_TYPE_AV1_FILM_GRAIN = 0x283, }; /* Used in the VIDIOC_QUERYCTRL ioctl for querying controls */ struct v4l2_queryctrl { __u32 id; __u32 type; /* enum v4l2_ctrl_type */ __u8 name[32]; /* Whatever */ __s32 minimum; /* Note signedness */ __s32 maximum; __s32 step; __s32 default_value; __u32 flags; __u32 reserved[2]; }; /* Used in the VIDIOC_QUERY_EXT_CTRL ioctl for querying extended controls */ struct v4l2_query_ext_ctrl { __u32 id; __u32 type; char name[32]; __s64 minimum; __s64 maximum; __u64 step; __s64 default_value; __u32 flags; __u32 elem_size; __u32 elems; __u32 nr_of_dims; __u32 dims[V4L2_CTRL_MAX_DIMS]; __u32 reserved[32]; }; /* Used in the VIDIOC_QUERYMENU ioctl for querying menu items */ struct v4l2_querymenu { __u32 id; __u32 index; union { __u8 name[32]; /* Whatever */ __s64 value; }; __u32 reserved; } __attribute__ ((packed)); /* Control flags */ #define V4L2_CTRL_FLAG_DISABLED 0x0001 #define V4L2_CTRL_FLAG_GRABBED 0x0002 #define V4L2_CTRL_FLAG_READ_ONLY 0x0004 #define V4L2_CTRL_FLAG_UPDATE 0x0008 #define V4L2_CTRL_FLAG_INACTIVE 0x0010 #define V4L2_CTRL_FLAG_SLIDER 0x0020 #define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040 #define V4L2_CTRL_FLAG_VOLATILE 0x0080 #define V4L2_CTRL_FLAG_HAS_PAYLOAD 0x0100 #define V4L2_CTRL_FLAG_EXECUTE_ON_WRITE 0x0200 #define V4L2_CTRL_FLAG_MODIFY_LAYOUT 0x0400 #define V4L2_CTRL_FLAG_DYNAMIC_ARRAY 0x0800 /* Query flags, to be ORed with the control ID */ #define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000 #define V4L2_CTRL_FLAG_NEXT_COMPOUND 0x40000000 /* User-class control IDs defined by V4L2 */ #define V4L2_CID_MAX_CTRLS 1024 /* IDs reserved for driver specific controls */ #define V4L2_CID_PRIVATE_BASE 0x08000000 /* * T U N I N G */ struct v4l2_tuner { __u32 index; __u8 name[32]; __u32 type; /* enum v4l2_tuner_type */ __u32 capability; __u32 rangelow; __u32 rangehigh; __u32 rxsubchans; __u32 audmode; __s32 signal; __s32 afc; __u32 reserved[4]; }; struct v4l2_modulator { __u32 index; __u8 name[32]; __u32 capability; __u32 rangelow; __u32 rangehigh; __u32 txsubchans; __u32 type; /* enum v4l2_tuner_type */ __u32 reserved[3]; }; /* Flags for the 'capability' field */ #define V4L2_TUNER_CAP_LOW 0x0001 #define V4L2_TUNER_CAP_NORM 0x0002 #define V4L2_TUNER_CAP_HWSEEK_BOUNDED 0x0004 #define V4L2_TUNER_CAP_HWSEEK_WRAP 0x0008 #define V4L2_TUNER_CAP_STEREO 0x0010 #define V4L2_TUNER_CAP_LANG2 0x0020 #define V4L2_TUNER_CAP_SAP 0x0020 #define V4L2_TUNER_CAP_LANG1 0x0040 #define V4L2_TUNER_CAP_RDS 0x0080 #define V4L2_TUNER_CAP_RDS_BLOCK_IO 0x0100 #define V4L2_TUNER_CAP_RDS_CONTROLS 0x0200 #define V4L2_TUNER_CAP_FREQ_BANDS 0x0400 #define V4L2_TUNER_CAP_HWSEEK_PROG_LIM 0x0800 #define V4L2_TUNER_CAP_1HZ 0x1000 /* Flags for the 'rxsubchans' field */ #define V4L2_TUNER_SUB_MONO 0x0001 #define V4L2_TUNER_SUB_STEREO 0x0002 #define V4L2_TUNER_SUB_LANG2 0x0004 #define V4L2_TUNER_SUB_SAP 0x0004 #define V4L2_TUNER_SUB_LANG1 0x0008 #define V4L2_TUNER_SUB_RDS 0x0010 /* Values for the 'audmode' field */ #define V4L2_TUNER_MODE_MONO 0x0000 #define V4L2_TUNER_MODE_STEREO 0x0001 #define V4L2_TUNER_MODE_LANG2 0x0002 #define V4L2_TUNER_MODE_SAP 0x0002 #define V4L2_TUNER_MODE_LANG1 0x0003 #define V4L2_TUNER_MODE_LANG1_LANG2 0x0004 struct v4l2_frequency { __u32 tuner; __u32 type; /* enum v4l2_tuner_type */ __u32 frequency; __u32 reserved[8]; }; #define V4L2_BAND_MODULATION_VSB (1 << 1) #define V4L2_BAND_MODULATION_FM (1 << 2) #define V4L2_BAND_MODULATION_AM (1 << 3) struct v4l2_frequency_band { __u32 tuner; __u32 type; /* enum v4l2_tuner_type */ __u32 index; __u32 capability; __u32 rangelow; __u32 rangehigh; __u32 modulation; __u32 reserved[9]; }; struct v4l2_hw_freq_seek { __u32 tuner; __u32 type; /* enum v4l2_tuner_type */ __u32 seek_upward; __u32 wrap_around; __u32 spacing; __u32 rangelow; __u32 rangehigh; __u32 reserved[5]; }; /* * R D S */ struct v4l2_rds_data { __u8 lsb; __u8 msb; __u8 block; } __attribute__ ((packed)); #define V4L2_RDS_BLOCK_MSK 0x7 #define V4L2_RDS_BLOCK_A 0 #define V4L2_RDS_BLOCK_B 1 #define V4L2_RDS_BLOCK_C 2 #define V4L2_RDS_BLOCK_D 3 #define V4L2_RDS_BLOCK_C_ALT 4 #define V4L2_RDS_BLOCK_INVALID 7 #define V4L2_RDS_BLOCK_CORRECTED 0x40 #define V4L2_RDS_BLOCK_ERROR 0x80 /* * A U D I O */ struct v4l2_audio { __u32 index; __u8 name[32]; __u32 capability; __u32 mode; __u32 reserved[2]; }; /* Flags for the 'capability' field */ #define V4L2_AUDCAP_STEREO 0x00001 #define V4L2_AUDCAP_AVL 0x00002 /* Flags for the 'mode' field */ #define V4L2_AUDMODE_AVL 0x00001 struct v4l2_audioout { __u32 index; __u8 name[32]; __u32 capability; __u32 mode; __u32 reserved[2]; }; /* * M P E G S E R V I C E S */ #if 1 #define V4L2_ENC_IDX_FRAME_I (0) #define V4L2_ENC_IDX_FRAME_P (1) #define V4L2_ENC_IDX_FRAME_B (2) #define V4L2_ENC_IDX_FRAME_MASK (0xf) struct v4l2_enc_idx_entry { __u64 offset; __u64 pts; __u32 length; __u32 flags; __u32 reserved[2]; }; #define V4L2_ENC_IDX_ENTRIES (64) struct v4l2_enc_idx { __u32 entries; __u32 entries_cap; __u32 reserved[4]; struct v4l2_enc_idx_entry entry[V4L2_ENC_IDX_ENTRIES]; }; #define V4L2_ENC_CMD_START (0) #define V4L2_ENC_CMD_STOP (1) #define V4L2_ENC_CMD_PAUSE (2) #define V4L2_ENC_CMD_RESUME (3) /* Flags for V4L2_ENC_CMD_STOP */ #define V4L2_ENC_CMD_STOP_AT_GOP_END (1 << 0) struct v4l2_encoder_cmd { __u32 cmd; __u32 flags; union { struct { __u32 data[8]; } raw; }; }; /* Decoder commands */ #define V4L2_DEC_CMD_START (0) #define V4L2_DEC_CMD_STOP (1) #define V4L2_DEC_CMD_PAUSE (2) #define V4L2_DEC_CMD_RESUME (3) #define V4L2_DEC_CMD_FLUSH (4) /* Flags for V4L2_DEC_CMD_START */ #define V4L2_DEC_CMD_START_MUTE_AUDIO (1 << 0) /* Flags for V4L2_DEC_CMD_PAUSE */ #define V4L2_DEC_CMD_PAUSE_TO_BLACK (1 << 0) /* Flags for V4L2_DEC_CMD_STOP */ #define V4L2_DEC_CMD_STOP_TO_BLACK (1 << 0) #define V4L2_DEC_CMD_STOP_IMMEDIATELY (1 << 1) /* Play format requirements (returned by the driver): */ /* The decoder has no special format requirements */ #define V4L2_DEC_START_FMT_NONE (0) /* The decoder requires full GOPs */ #define V4L2_DEC_START_FMT_GOP (1) /* The structure must be zeroed before use by the application This ensures it can be extended safely in the future. */ struct v4l2_decoder_cmd { __u32 cmd; __u32 flags; union { struct { __u64 pts; } stop; struct { /* 0 or 1000 specifies normal speed, 1 specifies forward single stepping, -1 specifies backward single stepping, >1: playback at speed/1000 of the normal speed, <-1: reverse playback at (-speed/1000) of the normal speed. */ __s32 speed; __u32 format; } start; struct { __u32 data[16]; } raw; }; }; #endif /* * D A T A S E R V I C E S ( V B I ) * * Data services API by Michael Schimek */ /* Raw VBI */ struct v4l2_vbi_format { __u32 sampling_rate; /* in 1 Hz */ __u32 offset; __u32 samples_per_line; __u32 sample_format; /* V4L2_PIX_FMT_* */ __s32 start[2]; __u32 count[2]; __u32 flags; /* V4L2_VBI_* */ __u32 reserved[2]; /* must be zero */ }; /* VBI flags */ #define V4L2_VBI_UNSYNC (1 << 0) #define V4L2_VBI_INTERLACED (1 << 1) /* ITU-R start lines for each field */ #define V4L2_VBI_ITU_525_F1_START (1) #define V4L2_VBI_ITU_525_F2_START (264) #define V4L2_VBI_ITU_625_F1_START (1) #define V4L2_VBI_ITU_625_F2_START (314) /* Sliced VBI * * This implements is a proposal V4L2 API to allow SLICED VBI * required for some hardware encoders. It should change without * notice in the definitive implementation. */ struct v4l2_sliced_vbi_format { __u16 service_set; /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field (equals frame lines 313-336 for 625 line video standards, 263-286 for 525 line standards) */ __u16 service_lines[2][24]; __u32 io_size; __u32 reserved[2]; /* must be zero */ }; /* Teletext World System Teletext (WST), defined on ITU-R BT.653-2 */ #define V4L2_SLICED_TELETEXT_B (0x0001) /* Video Program System, defined on ETS 300 231*/ #define V4L2_SLICED_VPS (0x0400) /* Closed Caption, defined on EIA-608 */ #define V4L2_SLICED_CAPTION_525 (0x1000) /* Wide Screen System, defined on ITU-R BT1119.1 */ #define V4L2_SLICED_WSS_625 (0x4000) #define V4L2_SLICED_VBI_525 (V4L2_SLICED_CAPTION_525) #define V4L2_SLICED_VBI_625 (V4L2_SLICED_TELETEXT_B | V4L2_SLICED_VPS | V4L2_SLICED_WSS_625) struct v4l2_sliced_vbi_cap { __u16 service_set; /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field (equals frame lines 313-336 for 625 line video standards, 263-286 for 525 line standards) */ __u16 service_lines[2][24]; __u32 type; /* enum v4l2_buf_type */ __u32 reserved[3]; /* must be 0 */ }; struct v4l2_sliced_vbi_data { __u32 id; __u32 field; /* 0: first field, 1: second field */ __u32 line; /* 1-23 */ __u32 reserved; /* must be 0 */ __u8 data[48]; }; /* * Sliced VBI data inserted into MPEG Streams */ /* * V4L2_MPEG_STREAM_VBI_FMT_IVTV: * * Structure of payload contained in an MPEG 2 Private Stream 1 PES Packet in an * MPEG-2 Program Pack that contains V4L2_MPEG_STREAM_VBI_FMT_IVTV Sliced VBI * data * * Note, the MPEG-2 Program Pack and Private Stream 1 PES packet header * definitions are not included here. See the MPEG-2 specifications for details * on these headers. */ /* Line type IDs */ #define V4L2_MPEG_VBI_IVTV_TELETEXT_B (1) #define V4L2_MPEG_VBI_IVTV_CAPTION_525 (4) #define V4L2_MPEG_VBI_IVTV_WSS_625 (5) #define V4L2_MPEG_VBI_IVTV_VPS (7) struct v4l2_mpeg_vbi_itv0_line { __u8 id; /* One of V4L2_MPEG_VBI_IVTV_* above */ __u8 data[42]; /* Sliced VBI data for the line */ } __attribute__ ((packed)); struct v4l2_mpeg_vbi_itv0 { __le32 linemask[2]; /* Bitmasks of VBI service lines present */ struct v4l2_mpeg_vbi_itv0_line line[35]; } __attribute__ ((packed)); struct v4l2_mpeg_vbi_ITV0 { struct v4l2_mpeg_vbi_itv0_line line[36]; } __attribute__ ((packed)); #define V4L2_MPEG_VBI_IVTV_MAGIC0 "itv0" #define V4L2_MPEG_VBI_IVTV_MAGIC1 "ITV0" struct v4l2_mpeg_vbi_fmt_ivtv { __u8 magic[4]; union { struct v4l2_mpeg_vbi_itv0 itv0; struct v4l2_mpeg_vbi_ITV0 ITV0; }; } __attribute__ ((packed)); /* * A G G R E G A T E S T R U C T U R E S */ /** * struct v4l2_plane_pix_format - additional, per-plane format definition * @sizeimage: maximum size in bytes required for data, for which * this plane will be used * @bytesperline: distance in bytes between the leftmost pixels in two * adjacent lines * @reserved: drivers and applications must zero this array */ struct v4l2_plane_pix_format { __u32 sizeimage; __u32 bytesperline; __u16 reserved[6]; } __attribute__ ((packed)); /** * struct v4l2_pix_format_mplane - multiplanar format definition * @width: image width in pixels * @height: image height in pixels * @pixelformat: little endian four character code (fourcc) * @field: enum v4l2_field; field order (for interlaced video) * @colorspace: enum v4l2_colorspace; supplemental to pixelformat * @plane_fmt: per-plane information * @num_planes: number of planes for this format * @flags: format flags (V4L2_PIX_FMT_FLAG_*) * @ycbcr_enc: enum v4l2_ycbcr_encoding, Y'CbCr encoding * @hsv_enc: enum v4l2_hsv_encoding, HSV encoding * @quantization: enum v4l2_quantization, colorspace quantization * @xfer_func: enum v4l2_xfer_func, colorspace transfer function * @reserved: drivers and applications must zero this array */ struct v4l2_pix_format_mplane { __u32 width; __u32 height; __u32 pixelformat; __u32 field; __u32 colorspace; struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES]; __u8 num_planes; __u8 flags; union { __u8 ycbcr_enc; __u8 hsv_enc; }; __u8 quantization; __u8 xfer_func; __u8 reserved[7]; } __attribute__ ((packed)); /** * struct v4l2_sdr_format - SDR format definition * @pixelformat: little endian four character code (fourcc) * @buffersize: maximum size in bytes required for data * @reserved: drivers and applications must zero this array */ struct v4l2_sdr_format { __u32 pixelformat; __u32 buffersize; __u8 reserved[24]; } __attribute__ ((packed)); /** * struct v4l2_meta_format - metadata format definition * @dataformat: little endian four character code (fourcc) * @buffersize: maximum size in bytes required for data */ struct v4l2_meta_format { __u32 dataformat; __u32 buffersize; } __attribute__ ((packed)); /** * struct v4l2_format - stream data format * @type: enum v4l2_buf_type; type of the data stream * @pix: definition of an image format * @pix_mp: definition of a multiplanar image format * @win: definition of an overlaid image * @vbi: raw VBI capture or output parameters * @sliced: sliced VBI capture or output parameters * @raw_data: placeholder for future extensions and custom formats * @fmt: union of @pix, @pix_mp, @win, @vbi, @sliced, @sdr, @meta * and @raw_data */ struct v4l2_format { __u32 type; union { struct v4l2_pix_format pix; /* V4L2_BUF_TYPE_VIDEO_CAPTURE */ struct v4l2_pix_format_mplane pix_mp; /* V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE */ struct v4l2_window win; /* V4L2_BUF_TYPE_VIDEO_OVERLAY */ struct v4l2_vbi_format vbi; /* V4L2_BUF_TYPE_VBI_CAPTURE */ struct v4l2_sliced_vbi_format sliced; /* V4L2_BUF_TYPE_SLICED_VBI_CAPTURE */ struct v4l2_sdr_format sdr; /* V4L2_BUF_TYPE_SDR_CAPTURE */ struct v4l2_meta_format meta; /* V4L2_BUF_TYPE_META_CAPTURE */ __u8 raw_data[200]; /* user-defined */ } fmt; }; /* Stream type-dependent parameters */ struct v4l2_streamparm { __u32 type; /* enum v4l2_buf_type */ union { struct v4l2_captureparm capture; struct v4l2_outputparm output; __u8 raw_data[200]; /* user-defined */ } parm; }; /* * E V E N T S */ #define V4L2_EVENT_ALL 0 #define V4L2_EVENT_VSYNC 1 #define V4L2_EVENT_EOS 2 #define V4L2_EVENT_CTRL 3 #define V4L2_EVENT_FRAME_SYNC 4 #define V4L2_EVENT_SOURCE_CHANGE 5 #define V4L2_EVENT_MOTION_DET 6 #define V4L2_EVENT_PRIVATE_START 0x08000000 /* Payload for V4L2_EVENT_VSYNC */ struct v4l2_event_vsync { /* Can be V4L2_FIELD_ANY, _NONE, _TOP or _BOTTOM */ __u8 field; } __attribute__ ((packed)); /* Payload for V4L2_EVENT_CTRL */ #define V4L2_EVENT_CTRL_CH_VALUE (1 << 0) #define V4L2_EVENT_CTRL_CH_FLAGS (1 << 1) #define V4L2_EVENT_CTRL_CH_RANGE (1 << 2) #define V4L2_EVENT_CTRL_CH_DIMENSIONS (1 << 3) struct v4l2_event_ctrl { __u32 changes; __u32 type; union { __s32 value; __s64 value64; }; __u32 flags; __s32 minimum; __s32 maximum; __s32 step; __s32 default_value; }; struct v4l2_event_frame_sync { __u32 frame_sequence; }; #define V4L2_EVENT_SRC_CH_RESOLUTION (1 << 0) struct v4l2_event_src_change { __u32 changes; }; #define V4L2_EVENT_MD_FL_HAVE_FRAME_SEQ (1 << 0) /** * struct v4l2_event_motion_det - motion detection event * @flags: if V4L2_EVENT_MD_FL_HAVE_FRAME_SEQ is set, then the * frame_sequence field is valid. * @frame_sequence: the frame sequence number associated with this event. * @region_mask: which regions detected motion. */ struct v4l2_event_motion_det { __u32 flags; __u32 frame_sequence; __u32 region_mask; }; struct v4l2_event { __u32 type; union { struct v4l2_event_vsync vsync; struct v4l2_event_ctrl ctrl; struct v4l2_event_frame_sync frame_sync; struct v4l2_event_src_change src_change; struct v4l2_event_motion_det motion_det; __u8 data[64]; } u; __u32 pending; __u32 sequence; struct timespec timestamp; __u32 id; __u32 reserved[8]; }; #define V4L2_EVENT_SUB_FL_SEND_INITIAL (1 << 0) #define V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK (1 << 1) struct v4l2_event_subscription { __u32 type; __u32 id; __u32 flags; __u32 reserved[5]; }; /* * A D V A N C E D D E B U G G I N G * * NOTE: EXPERIMENTAL API, NEVER RELY ON THIS IN APPLICATIONS! * FOR DEBUGGING, TESTING AND INTERNAL USE ONLY! */ /* VIDIOC_DBG_G_REGISTER and VIDIOC_DBG_S_REGISTER */ #define V4L2_CHIP_MATCH_BRIDGE 0 /* Match against chip ID on the bridge (0 for the bridge) */ #define V4L2_CHIP_MATCH_SUBDEV 4 /* Match against subdev index */ /* The following four defines are no longer in use */ #define V4L2_CHIP_MATCH_HOST V4L2_CHIP_MATCH_BRIDGE #define V4L2_CHIP_MATCH_I2C_DRIVER 1 /* Match against I2C driver name */ #define V4L2_CHIP_MATCH_I2C_ADDR 2 /* Match against I2C 7-bit address */ #define V4L2_CHIP_MATCH_AC97 3 /* Match against ancillary AC97 chip */ struct v4l2_dbg_match { __u32 type; /* Match type */ union { /* Match this chip, meaning determined by type */ __u32 addr; char name[32]; }; } __attribute__ ((packed)); struct v4l2_dbg_register { struct v4l2_dbg_match match; __u32 size; /* register size in bytes */ __u64 reg; __u64 val; } __attribute__ ((packed)); #define V4L2_CHIP_FL_READABLE (1 << 0) #define V4L2_CHIP_FL_WRITABLE (1 << 1) /* VIDIOC_DBG_G_CHIP_INFO */ struct v4l2_dbg_chip_info { struct v4l2_dbg_match match; char name[32]; __u32 flags; __u32 reserved[32]; } __attribute__ ((packed)); /** * struct v4l2_create_buffers - VIDIOC_CREATE_BUFS argument * @index: on return, index of the first created buffer * @count: entry: number of requested buffers, * return: number of created buffers * @memory: enum v4l2_memory; buffer memory type * @format: frame format, for which buffers are requested * @capabilities: capabilities of this buffer type. * @flags: additional buffer management attributes (ignored unless the * queue has V4L2_BUF_CAP_SUPPORTS_MMAP_CACHE_HINTS capability * and configured for MMAP streaming I/O). * @max_num_buffers: if V4L2_BUF_CAP_SUPPORTS_MAX_NUM_BUFFERS capability flag is set * this field indicate the maximum possible number of buffers * for this queue. * @reserved: future extensions */ struct v4l2_create_buffers { __u32 index; __u32 count; __u32 memory; struct v4l2_format format; __u32 capabilities; __u32 flags; __u32 max_num_buffers; __u32 reserved[5]; }; /* * I O C T L C O D E S F O R V I D E O D E V I C E S * */ #define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability) #define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc) #define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format) #define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format) #define VIDIOC_REQBUFS _IOWR('V', 8, struct v4l2_requestbuffers) #define VIDIOC_QUERYBUF _IOWR('V', 9, struct v4l2_buffer) #define VIDIOC_G_FBUF _IOR('V', 10, struct v4l2_framebuffer) #define VIDIOC_S_FBUF _IOW('V', 11, struct v4l2_framebuffer) #define VIDIOC_OVERLAY _IOW('V', 14, int) #define VIDIOC_QBUF _IOWR('V', 15, struct v4l2_buffer) #define VIDIOC_EXPBUF _IOWR('V', 16, struct v4l2_exportbuffer) #define VIDIOC_DQBUF _IOWR('V', 17, struct v4l2_buffer) #define VIDIOC_STREAMON _IOW('V', 18, int) #define VIDIOC_STREAMOFF _IOW('V', 19, int) #define VIDIOC_G_PARM _IOWR('V', 21, struct v4l2_streamparm) #define VIDIOC_S_PARM _IOWR('V', 22, struct v4l2_streamparm) #define VIDIOC_G_STD _IOR('V', 23, v4l2_std_id) #define VIDIOC_S_STD _IOW('V', 24, v4l2_std_id) #define VIDIOC_ENUMSTD _IOWR('V', 25, struct v4l2_standard) #define VIDIOC_ENUMINPUT _IOWR('V', 26, struct v4l2_input) #define VIDIOC_G_CTRL _IOWR('V', 27, struct v4l2_control) #define VIDIOC_S_CTRL _IOWR('V', 28, struct v4l2_control) #define VIDIOC_G_TUNER _IOWR('V', 29, struct v4l2_tuner) #define VIDIOC_S_TUNER _IOW('V', 30, struct v4l2_tuner) #define VIDIOC_G_AUDIO _IOR('V', 33, struct v4l2_audio) #define VIDIOC_S_AUDIO _IOW('V', 34, struct v4l2_audio) #define VIDIOC_QUERYCTRL _IOWR('V', 36, struct v4l2_queryctrl) #define VIDIOC_QUERYMENU _IOWR('V', 37, struct v4l2_querymenu) #define VIDIOC_G_INPUT _IOR('V', 38, int) #define VIDIOC_S_INPUT _IOWR('V', 39, int) #define VIDIOC_G_EDID _IOWR('V', 40, struct v4l2_edid) #define VIDIOC_S_EDID _IOWR('V', 41, struct v4l2_edid) #define VIDIOC_G_OUTPUT _IOR('V', 46, int) #define VIDIOC_S_OUTPUT _IOWR('V', 47, int) #define VIDIOC_ENUMOUTPUT _IOWR('V', 48, struct v4l2_output) #define VIDIOC_G_AUDOUT _IOR('V', 49, struct v4l2_audioout) #define VIDIOC_S_AUDOUT _IOW('V', 50, struct v4l2_audioout) #define VIDIOC_G_MODULATOR _IOWR('V', 54, struct v4l2_modulator) #define VIDIOC_S_MODULATOR _IOW('V', 55, struct v4l2_modulator) #define VIDIOC_G_FREQUENCY _IOWR('V', 56, struct v4l2_frequency) #define VIDIOC_S_FREQUENCY _IOW('V', 57, struct v4l2_frequency) #define VIDIOC_CROPCAP _IOWR('V', 58, struct v4l2_cropcap) #define VIDIOC_G_CROP _IOWR('V', 59, struct v4l2_crop) #define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop) #define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression) #define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression) #define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id) #define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format) #define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio) #define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout) #define VIDIOC_G_PRIORITY _IOR('V', 67, __u32) /* enum v4l2_priority */ #define VIDIOC_S_PRIORITY _IOW('V', 68, __u32) /* enum v4l2_priority */ #define VIDIOC_G_SLICED_VBI_CAP _IOWR('V', 69, struct v4l2_sliced_vbi_cap) #define VIDIOC_LOG_STATUS _IO('V', 70) #define VIDIOC_G_EXT_CTRLS _IOWR('V', 71, struct v4l2_ext_controls) #define VIDIOC_S_EXT_CTRLS _IOWR('V', 72, struct v4l2_ext_controls) #define VIDIOC_TRY_EXT_CTRLS _IOWR('V', 73, struct v4l2_ext_controls) #define VIDIOC_ENUM_FRAMESIZES _IOWR('V', 74, struct v4l2_frmsizeenum) #define VIDIOC_ENUM_FRAMEINTERVALS _IOWR('V', 75, struct v4l2_frmivalenum) #define VIDIOC_G_ENC_INDEX _IOR('V', 76, struct v4l2_enc_idx) #define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd) #define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd) /* * Experimental, meant for debugging, testing and internal use. * Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined. * You must be root to use these ioctls. Never use these in applications! */ #define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register) #define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register) #define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek) #define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings) #define VIDIOC_G_DV_TIMINGS _IOWR('V', 88, struct v4l2_dv_timings) #define VIDIOC_DQEVENT _IOR('V', 89, struct v4l2_event) #define VIDIOC_SUBSCRIBE_EVENT _IOW('V', 90, struct v4l2_event_subscription) #define VIDIOC_UNSUBSCRIBE_EVENT _IOW('V', 91, struct v4l2_event_subscription) #define VIDIOC_CREATE_BUFS _IOWR('V', 92, struct v4l2_create_buffers) #define VIDIOC_PREPARE_BUF _IOWR('V', 93, struct v4l2_buffer) #define VIDIOC_G_SELECTION _IOWR('V', 94, struct v4l2_selection) #define VIDIOC_S_SELECTION _IOWR('V', 95, struct v4l2_selection) #define VIDIOC_DECODER_CMD _IOWR('V', 96, struct v4l2_decoder_cmd) #define VIDIOC_TRY_DECODER_CMD _IOWR('V', 97, struct v4l2_decoder_cmd) #define VIDIOC_ENUM_DV_TIMINGS _IOWR('V', 98, struct v4l2_enum_dv_timings) #define VIDIOC_QUERY_DV_TIMINGS _IOR('V', 99, struct v4l2_dv_timings) #define VIDIOC_DV_TIMINGS_CAP _IOWR('V', 100, struct v4l2_dv_timings_cap) #define VIDIOC_ENUM_FREQ_BANDS _IOWR('V', 101, struct v4l2_frequency_band) /* * Experimental, meant for debugging, testing and internal use. * Never use this in applications! */ #define VIDIOC_DBG_G_CHIP_INFO _IOWR('V', 102, struct v4l2_dbg_chip_info) #define VIDIOC_QUERY_EXT_CTRL _IOWR('V', 103, struct v4l2_query_ext_ctrl) /* Reminder: when adding new ioctls please add support for them to drivers/media/v4l2-core/v4l2-compat-ioctl32.c as well! */ #define BASE_VIDIOC_PRIVATE 192 /* 192-255 are private */ /* Deprecated definitions kept for backwards compatibility */ #define V4L2_PIX_FMT_HM12 V4L2_PIX_FMT_NV12_16L16 #define V4L2_PIX_FMT_SUNXI_TILED_NV12 V4L2_PIX_FMT_NV12_32L32 /* * This capability was never implemented, anyone using this cap should drop it * from their code. */ #define V4L2_CAP_ASYNCIO 0x02000000 #endif /* __LINUX_VIDEODEV2_H */ yavta-0.0+git20250410.3e445c7/meson.build000066400000000000000000000021761477577134200172600ustar00rootroot00000000000000# SPDX-License-Identifier: CC0-1.0 project('yavta', 'c', meson_version : '>= 0.40', version : '0.0.0', default_options : [ 'werror=true', 'warning_level=2', ], license : 'GPL 2.0+') # # Configure the build environment # cc = meson.get_compiler('c') cc_arguments = [ '-Wshadow', ] if cc.get_id() == 'clang' # Turn _FORTIFY_SOURCE by default on optimised builds (as it requires -O1 # or higher). This is needed on clang only as gcc enables it by default. if get_option('optimization') != '0' cc_arguments += [ '-D_FORTIFY_SOURCE=2', ] endif endif add_project_arguments(cc_arguments, language : 'c') # # yavta # yavta_dependencies = [] if not cc.has_function('clock_gettime') # On glibc older than 2.17, clock_gettime is provided by time.h and -lrt yavta_dependencies += [cc.find_library('rt')] endif yavta_sources = files([ 'yavta.c', ]) yavta = executable('yavta', yavta_sources, include_directories : include_directories('include'), dependencies : yavta_dependencies, install : true) yavta-0.0+git20250410.3e445c7/yavta.c000066400000000000000000002163241477577134200164100ustar00rootroot00000000000000// SPDX-License-Identifier: GPL-2.0-or-later /* * yavta -- Yet Another V4L2 Test Application * * Copyright (C) 2005-2025 Laurent Pinchart */ #define __STDC_FORMAT_MACROS #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #define ARRAY_SIZE(a) (sizeof(a)/sizeof((a)[0])) enum buffer_fill_mode { BUFFER_FILL_NONE = 0, BUFFER_FILL_FRAME = 1 << 0, BUFFER_FILL_PADDING = 1 << 1, }; struct buffer { unsigned int idx; unsigned int padding[VIDEO_MAX_PLANES]; unsigned int size[VIDEO_MAX_PLANES]; void *mem[VIDEO_MAX_PLANES]; }; struct device { int fd; int opened; enum v4l2_buf_type type; enum v4l2_memory memtype; unsigned int nbufs; struct buffer *buffers; unsigned int width; unsigned int height; uint32_t buffer_output_flags; uint32_t timestamp_type; unsigned char num_planes; struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES]; void *pattern[VIDEO_MAX_PLANES]; unsigned int patternsize[VIDEO_MAX_PLANES]; bool write_data_prefix; }; /* ----------------------------------------------------------------------------- * Pause Handling */ static bool pause_resume; static struct termios pause_term; static bool pause_no_term; static bool pause_term_configured; static char pause_filename[23]; static void pause_signal_handler(int signal __attribute__((__unused__))) { pause_resume = true; } static void pause_wait(void) { int ret; int fd; fd = open(pause_filename, O_CREAT, 0); if (fd != -1) close(fd); if (pause_no_term) { printf("Paused waiting for SIGUSR1\n"); while (!pause_resume) pause(); goto done; } printf("Paused waiting for key press or SIGUSR1\n"); pause_resume = false; while (!pause_resume) { fd_set rfds; char c; FD_ZERO(&rfds); FD_SET(0, &rfds); ret = select(1, &rfds, NULL, NULL, NULL); if (ret < 0 && errno != EINTR) break; if (ret == 1) { ret = read(0, &c, 1); break; } } done: unlink(pause_filename); } static void pause_cleanup(void) { if (pause_term_configured) tcsetattr(0, TCSANOW, &pause_term); unlink(pause_filename); } static int pause_init(void) { struct sigaction sig_usr1; struct termios term; int ret; sprintf(pause_filename, ".yavta.wait.%u", getpid()); memset(&sig_usr1, 0, sizeof(sig_usr1)); sig_usr1.sa_handler = pause_signal_handler; ret = sigaction(SIGUSR1, &sig_usr1, NULL); if (ret < 0) { printf("Unable to install SIGUSR1 handler: %s (%d)\n", strerror(errno), errno); return -errno; } ret = tcgetattr(0, &term); if (ret < 0) { if (errno == ENOTTY) { pause_no_term = true; return 0; } printf("Unable to retrieve terminal attributes: %s (%d)\n", strerror(errno), errno); return -errno; } pause_term = term; pause_term_configured = true; atexit(pause_cleanup); term.c_lflag &= ~ICANON; term.c_lflag &= ~ECHO; term.c_cc[VMIN] = 0; term.c_cc[VTIME] = 0; ret = tcsetattr(0, TCSANOW, &term); if (ret < 0) { printf("Unable to set terminal attributes: %s (%d)\n", strerror(errno), errno); return -errno; } return 0; } /* ----------------------------------------------------------------------------- * Key-value pairs handling */ struct key_value { const char *name; unsigned int value; }; static int __key_value_get(const struct key_value *values, unsigned int count, const char *name) { unsigned int i; for (i = 0; i < count; ++i) { if (!strcmp(values[i].name, name)) return values[i].value; } return -EINVAL; } static void __key_value_list(const struct key_value *values, unsigned int count, const char *type) { unsigned int chars; unsigned int i; bool first = true; chars = printf("%s: ", type); for (i = 0; i < count; ++i) { unsigned int len = strlen(values[i].name); if (chars + len >= 80) { printf(",\n\t"); chars = 8; first = true; } if (first) first = false; else chars += printf(", "); chars += printf("%s", values[i].name); } printf("\n"); } #define key_value_get(values, name) \ __key_value_get(values, ARRAY_SIZE(values), name) #define key_value_list(values, type) \ __key_value_list(values, ARRAY_SIZE(values), type) /* ----------------------------------------------------------------------------- * Format handling */ static bool video_is_mplane(struct device *dev) { return dev->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || dev->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; } static bool video_is_meta(struct device *dev) { return dev->type == V4L2_BUF_TYPE_META_CAPTURE || dev->type == V4L2_BUF_TYPE_META_OUTPUT; } static bool video_is_capture(struct device *dev) { return dev->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || dev->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || dev->type == V4L2_BUF_TYPE_META_CAPTURE; } static bool video_is_output(struct device *dev) { return dev->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE || dev->type == V4L2_BUF_TYPE_VIDEO_OUTPUT || dev->type == V4L2_BUF_TYPE_META_OUTPUT; } static const struct { enum v4l2_buf_type type; bool supported; const char *name; const char *string; } buf_types[] = { { V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, 1, "Video capture mplanes", "capture-mplane", }, { V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, 1, "Video output", "output-mplane", }, { V4L2_BUF_TYPE_VIDEO_CAPTURE, 1, "Video capture", "capture", }, { V4L2_BUF_TYPE_VIDEO_OUTPUT, 1, "Video output mplanes", "output", }, { V4L2_BUF_TYPE_VIDEO_OVERLAY, 0, "Video overlay", "overlay" }, { V4L2_BUF_TYPE_META_CAPTURE, 1, "Meta-data capture", "meta-capture", }, { V4L2_BUF_TYPE_META_OUTPUT, 1, "Meta-data output", "meta-output", }, }; static int v4l2_buf_type_from_string(const char *str) { unsigned int i; for (i = 0; i < ARRAY_SIZE(buf_types); i++) { if (!buf_types[i].supported) continue; if (strcmp(buf_types[i].string, str)) continue; return buf_types[i].type; } return -1; } static const char *v4l2_buf_type_name(enum v4l2_buf_type type) { unsigned int i; for (i = 0; i < ARRAY_SIZE(buf_types); ++i) { if (buf_types[i].type == type) return buf_types[i].name; } if (type & V4L2_BUF_TYPE_PRIVATE) return "Private"; else return "Unknown"; } static const struct v4l2_format_info { const char *name; unsigned int fourcc; unsigned char n_planes; } pixel_formats[] = { { "RGB332", V4L2_PIX_FMT_RGB332, 1 }, { "RGB444", V4L2_PIX_FMT_RGB444, 1 }, { "ARGB444", V4L2_PIX_FMT_ARGB444, 1 }, { "XRGB444", V4L2_PIX_FMT_XRGB444, 1 }, { "RGB555", V4L2_PIX_FMT_RGB555, 1 }, { "ARGB555", V4L2_PIX_FMT_ARGB555, 1 }, { "XRGB555", V4L2_PIX_FMT_XRGB555, 1 }, { "RGB565", V4L2_PIX_FMT_RGB565, 1 }, { "RGB555X", V4L2_PIX_FMT_RGB555X, 1 }, { "RGB565X", V4L2_PIX_FMT_RGB565X, 1 }, { "BGR666", V4L2_PIX_FMT_BGR666, 1 }, { "BGR24", V4L2_PIX_FMT_BGR24, 1 }, { "RGB24", V4L2_PIX_FMT_RGB24, 1 }, { "BGR32", V4L2_PIX_FMT_BGR32, 1 }, { "ABGR32", V4L2_PIX_FMT_ABGR32, 1 }, { "XBGR32", V4L2_PIX_FMT_XBGR32, 1 }, { "RGB32", V4L2_PIX_FMT_RGB32, 1 }, { "ARGB32", V4L2_PIX_FMT_ARGB32, 1 }, { "XRGB32", V4L2_PIX_FMT_XRGB32, 1 }, { "HSV24", V4L2_PIX_FMT_HSV24, 1 }, { "HSV32", V4L2_PIX_FMT_HSV32, 1 }, { "Y8", V4L2_PIX_FMT_GREY, 1 }, { "Y10", V4L2_PIX_FMT_Y10, 1 }, { "Y12", V4L2_PIX_FMT_Y12, 1 }, { "Y16", V4L2_PIX_FMT_Y16, 1 }, { "UYVY", V4L2_PIX_FMT_UYVY, 1 }, { "VYUY", V4L2_PIX_FMT_VYUY, 1 }, { "YUYV", V4L2_PIX_FMT_YUYV, 1 }, { "YVYU", V4L2_PIX_FMT_YVYU, 1 }, { "YUV32", V4L2_PIX_FMT_YUV32, 1 }, { "AYUV32", V4L2_PIX_FMT_AYUV32, 1 }, { "XYUV32", V4L2_PIX_FMT_XYUV32, 1 }, { "VUYA32", V4L2_PIX_FMT_VUYA32, 1 }, { "VUYX32", V4L2_PIX_FMT_VUYX32, 1 }, { "YUVA32", V4L2_PIX_FMT_YUVA32, 1 }, { "YUVX32", V4L2_PIX_FMT_YUVX32, 1 }, { "NV12", V4L2_PIX_FMT_NV12, 1 }, { "NV12M", V4L2_PIX_FMT_NV12M, 2 }, { "NV21", V4L2_PIX_FMT_NV21, 1 }, { "NV21M", V4L2_PIX_FMT_NV21M, 2 }, { "NV16", V4L2_PIX_FMT_NV16, 1 }, { "NV16M", V4L2_PIX_FMT_NV16M, 2 }, { "NV61", V4L2_PIX_FMT_NV61, 1 }, { "NV61M", V4L2_PIX_FMT_NV61M, 2 }, { "NV24", V4L2_PIX_FMT_NV24, 1 }, { "NV42", V4L2_PIX_FMT_NV42, 1 }, { "YUV420M", V4L2_PIX_FMT_YUV420M, 3 }, { "YUV422M", V4L2_PIX_FMT_YUV422M, 3 }, { "YUV444M", V4L2_PIX_FMT_YUV444M, 3 }, { "YVU420M", V4L2_PIX_FMT_YVU420M, 3 }, { "YVU422M", V4L2_PIX_FMT_YVU422M, 3 }, { "YVU444M", V4L2_PIX_FMT_YVU444M, 3 }, { "SBGGR8", V4L2_PIX_FMT_SBGGR8, 1 }, { "SGBRG8", V4L2_PIX_FMT_SGBRG8, 1 }, { "SGRBG8", V4L2_PIX_FMT_SGRBG8, 1 }, { "SRGGB8", V4L2_PIX_FMT_SRGGB8, 1 }, { "SBGGR10_DPCM8", V4L2_PIX_FMT_SBGGR10DPCM8, 1 }, { "SGBRG10_DPCM8", V4L2_PIX_FMT_SGBRG10DPCM8, 1 }, { "SGRBG10_DPCM8", V4L2_PIX_FMT_SGRBG10DPCM8, 1 }, { "SRGGB10_DPCM8", V4L2_PIX_FMT_SRGGB10DPCM8, 1 }, { "SBGGR10", V4L2_PIX_FMT_SBGGR10, 1 }, { "SGBRG10", V4L2_PIX_FMT_SGBRG10, 1 }, { "SGRBG10", V4L2_PIX_FMT_SGRBG10, 1 }, { "SRGGB10", V4L2_PIX_FMT_SRGGB10, 1 }, { "SBGGR10P", V4L2_PIX_FMT_SBGGR10P, 1 }, { "SGBRG10P", V4L2_PIX_FMT_SGBRG10P, 1 }, { "SGRBG10P", V4L2_PIX_FMT_SGRBG10P, 1 }, { "SRGGB10P", V4L2_PIX_FMT_SRGGB10P, 1 }, { "SBGGR12", V4L2_PIX_FMT_SBGGR12, 1 }, { "SGBRG12", V4L2_PIX_FMT_SGBRG12, 1 }, { "SGRBG12", V4L2_PIX_FMT_SGRBG12, 1 }, { "SRGGB12", V4L2_PIX_FMT_SRGGB12, 1 }, { "SBGGR16", V4L2_PIX_FMT_SBGGR16, 1 }, { "SGBRG16", V4L2_PIX_FMT_SGBRG16, 1 }, { "SGRBG16", V4L2_PIX_FMT_SGRBG16, 1 }, { "SRGGB16", V4L2_PIX_FMT_SRGGB16, 1 }, { "IPU3_SBGGR10", V4L2_PIX_FMT_IPU3_SBGGR10, 1 }, { "IPU3_SGBRG10", V4L2_PIX_FMT_IPU3_SGBRG10, 1 }, { "IPU3_SGRBG10", V4L2_PIX_FMT_IPU3_SGRBG10, 1 }, { "IPU3_SRGGB10", V4L2_PIX_FMT_IPU3_SRGGB10, 1 }, { "IPU3_Y10", V4L2_PIX_FMT_IPU3_Y10, 1 }, { "DV", V4L2_PIX_FMT_DV, 1 }, { "MJPEG", V4L2_PIX_FMT_MJPEG, 1 }, { "MPEG", V4L2_PIX_FMT_MPEG, 1 }, }; static void list_formats(void) { unsigned int i; for (i = 0; i < ARRAY_SIZE(pixel_formats); i++) printf("%s (\"%c%c%c%c\", %u planes)\n", pixel_formats[i].name, pixel_formats[i].fourcc & 0xff, (pixel_formats[i].fourcc >> 8) & 0xff, (pixel_formats[i].fourcc >> 16) & 0xff, (pixel_formats[i].fourcc >> 24) & 0xff, pixel_formats[i].n_planes); } static const struct v4l2_format_info *v4l2_format_by_fourcc(unsigned int fourcc) { unsigned int i; for (i = 0; i < ARRAY_SIZE(pixel_formats); ++i) { if (pixel_formats[i].fourcc == fourcc) return &pixel_formats[i]; } return NULL; } static const struct v4l2_format_info *v4l2_format_by_name(const char *name) { unsigned int i; for (i = 0; i < ARRAY_SIZE(pixel_formats); ++i) { if (strcasecmp(pixel_formats[i].name, name) == 0) return &pixel_formats[i]; } return NULL; } static const char *v4l2_format_name(unsigned int fourcc) { const struct v4l2_format_info *info; static char name[5]; unsigned int i; info = v4l2_format_by_fourcc(fourcc); if (info) return info->name; for (i = 0; i < 4; ++i) { name[i] = fourcc & 0xff; fourcc >>= 8; } name[4] = '\0'; return name; } static const struct { const char *name; enum v4l2_field field; } fields[] = { { "any", V4L2_FIELD_ANY }, { "none", V4L2_FIELD_NONE }, { "top", V4L2_FIELD_TOP }, { "bottom", V4L2_FIELD_BOTTOM }, { "interlaced", V4L2_FIELD_INTERLACED }, { "seq-tb", V4L2_FIELD_SEQ_TB }, { "seq-bt", V4L2_FIELD_SEQ_BT }, { "alternate", V4L2_FIELD_ALTERNATE }, { "interlaced-tb", V4L2_FIELD_INTERLACED_TB }, { "interlaced-bt", V4L2_FIELD_INTERLACED_BT }, }; static enum v4l2_field v4l2_field_from_string(const char *name) { unsigned int i; for (i = 0; i < ARRAY_SIZE(fields); ++i) { if (strcasecmp(fields[i].name, name) == 0) return fields[i].field; } return -1; } static const char *v4l2_field_name(enum v4l2_field field) { unsigned int i; for (i = 0; i < ARRAY_SIZE(fields); ++i) { if (fields[i].field == field) return fields[i].name; } return "unknown"; } static const struct key_value v4l2_colorspaces[] = { { "DEFAULT", V4L2_COLORSPACE_DEFAULT }, { "SMPTE170M", V4L2_COLORSPACE_SMPTE170M }, { "SMPTE240M", V4L2_COLORSPACE_SMPTE240M }, { "REC709", V4L2_COLORSPACE_REC709 }, { "BT878", V4L2_COLORSPACE_BT878 }, { "470_SYSTEM_M", V4L2_COLORSPACE_470_SYSTEM_M }, { "470_SYSTEM_BG", V4L2_COLORSPACE_470_SYSTEM_BG }, { "JPEG", V4L2_COLORSPACE_JPEG }, { "SRGB", V4L2_COLORSPACE_SRGB }, { "OPRGB", V4L2_COLORSPACE_OPRGB }, { "BT2020", V4L2_COLORSPACE_BT2020 }, { "RAW", V4L2_COLORSPACE_RAW }, { "DCI_P3", V4L2_COLORSPACE_DCI_P3 }, }; static const struct key_value v4l2_xfer_funcs[] = { { "DEFAULT", V4L2_COLORSPACE_DEFAULT }, { "709", V4L2_XFER_FUNC_709 }, { "SRGB", V4L2_XFER_FUNC_SRGB }, { "OPRGB", V4L2_XFER_FUNC_OPRGB }, { "SMPTE240M", V4L2_XFER_FUNC_SMPTE240M }, { "NONE", V4L2_XFER_FUNC_NONE }, { "DCI_P3", V4L2_XFER_FUNC_DCI_P3 }, { "SMPTE2084", V4L2_XFER_FUNC_SMPTE2084 }, }; static const struct key_value v4l2_encodings[] = { /* enum v4l2_ycbcr_encoding */ { "DEFAULT", V4L2_YCBCR_ENC_DEFAULT }, { "601", V4L2_YCBCR_ENC_601 }, { "709", V4L2_YCBCR_ENC_709 }, { "XV601", V4L2_YCBCR_ENC_XV601 }, { "XV709", V4L2_YCBCR_ENC_XV709 }, { "SYCC", V4L2_YCBCR_ENC_SYCC }, { "BT2020", V4L2_YCBCR_ENC_BT2020 }, { "BT2020_CONST_LUM", V4L2_YCBCR_ENC_BT2020_CONST_LUM }, { "SMPTE240M", V4L2_YCBCR_ENC_SMPTE240M }, /* enum v4l2_hsv_encoding */ { "HSV180", V4L2_HSV_ENC_180 }, { "HSV256", V4L2_HSV_ENC_256 }, }; static const struct key_value v4l2_quantizations[] = { { "DEFAULT", V4L2_QUANTIZATION_DEFAULT }, { "FULL_RANGE", V4L2_QUANTIZATION_FULL_RANGE }, { "LIM_RANGE", V4L2_QUANTIZATION_LIM_RANGE }, }; #define v4l2_colorspace_from_string(name) \ key_value_get(v4l2_colorspaces, name) #define v4l2_xfer_func_from_string(name) \ key_value_get(v4l2_xfer_funcs, name) #define v4l2_encoding_from_string(name) \ key_value_get(v4l2_encodings, name) #define v4l2_quantization_from_string(name) \ key_value_get(v4l2_quantizations, name) #define list_colorspaces() \ key_value_list(v4l2_colorspaces, "colorspace") #define list_xfer_funcs() \ key_value_list(v4l2_xfer_funcs, "xfer-func") #define list_encodings() \ key_value_list(v4l2_encodings, "encoding") #define list_quantizations() \ key_value_list(v4l2_quantizations, "quantization") /* ----------------------------------------------------------------------------- * */ static void video_set_buf_type(struct device *dev, enum v4l2_buf_type type) { dev->type = type; } static bool video_has_valid_buf_type(struct device *dev) { return (int)dev->type != -1; } static void video_init(struct device *dev) { dev->fd = -1; dev->memtype = V4L2_MEMORY_MMAP; dev->buffers = NULL; dev->type = (enum v4l2_buf_type)-1; } static bool video_has_fd(struct device *dev) { return dev->fd != -1; } static int video_set_fd(struct device *dev, int fd) { if (video_has_fd(dev)) { printf("Can't set fd (already open).\n"); return -1; } dev->fd = fd; return 0; } static int video_open(struct device *dev, const char *devname) { if (video_has_fd(dev)) { printf("Can't open device (already open).\n"); return -1; } dev->fd = open(devname, O_RDWR); if (dev->fd < 0) { printf("Error opening device %s: %s (%d).\n", devname, strerror(errno), errno); return dev->fd; } printf("Device %s opened.\n", devname); dev->opened = 1; return 0; } static int video_querycap(struct device *dev, unsigned int *capabilities) { struct v4l2_capability cap; unsigned int caps; bool has_video; bool has_meta; bool has_capture; bool has_output; bool has_mplane; int ret; memset(&cap, 0, sizeof cap); ret = ioctl(dev->fd, VIDIOC_QUERYCAP, &cap); if (ret < 0) return 0; caps = cap.capabilities & V4L2_CAP_DEVICE_CAPS ? cap.device_caps : cap.capabilities; has_video = caps & (V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_VIDEO_OUTPUT); has_meta = caps & (V4L2_CAP_META_CAPTURE | V4L2_CAP_META_OUTPUT); has_capture = caps & (V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_META_CAPTURE); has_output = caps & (V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_META_OUTPUT); has_mplane = caps & (V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE); printf("Device `%s' on `%s' (driver '%s') supports%s%s%s%s %s mplanes.\n", cap.card, cap.bus_info, cap.driver, has_video ? " video," : "", has_meta ? " meta-data," : "", has_capture ? " capture," : "", has_output ? " output," : "", has_mplane ? "with" : "without"); *capabilities = caps; return 0; } static int cap_get_buf_type(unsigned int capabilities) { if (capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) { return V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; } else if (capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE) { return V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; } else if (capabilities & V4L2_CAP_VIDEO_CAPTURE) { return V4L2_BUF_TYPE_VIDEO_CAPTURE; } else if (capabilities & V4L2_CAP_VIDEO_OUTPUT) { return V4L2_BUF_TYPE_VIDEO_OUTPUT; } else if (capabilities & V4L2_CAP_META_CAPTURE) { return V4L2_BUF_TYPE_META_CAPTURE; } else if (capabilities & V4L2_CAP_META_OUTPUT) { return V4L2_BUF_TYPE_META_OUTPUT; } else { printf("Device supports neither capture nor output.\n"); return -EINVAL; } return 0; } static void video_close(struct device *dev) { unsigned int i; for (i = 0; i < dev->num_planes; i++) free(dev->pattern[i]); free(dev->buffers); if (dev->opened) close(dev->fd); } static void video_log_status(struct device *dev) { ioctl(dev->fd, VIDIOC_LOG_STATUS); } static int query_control(struct device *dev, unsigned int id, struct v4l2_query_ext_ctrl *query) { struct v4l2_queryctrl q; int ret; memset(query, 0, sizeof(*query)); query->id = id; ret = ioctl(dev->fd, VIDIOC_QUERY_EXT_CTRL, query); if (ret < 0) ret = -errno; if (!ret || ret == -EINVAL) return ret; if (ret != -ENOTTY) { printf("unable to query control 0x%8.8x: %s (%d).\n", id, strerror(-ret), -ret); return ret; } /* * If VIDIOC_QUERY_EXT_CTRL isn't available emulate it using * VIDIOC_QUERYCTRL. */ memset(&q, 0, sizeof(q)); q.id = id; ret = ioctl(dev->fd, VIDIOC_QUERYCTRL, &q); if (ret < 0) { ret = -errno; printf("unable to query control 0x%8.8x: %s (%d).\n", id, strerror(-ret), -ret); return ret; } memset(query, 0, sizeof(*query)); query->id = q.id; query->type = q.type; memcpy(query->name, q.name, sizeof(query->name)); query->minimum = q.minimum; query->maximum = q.maximum; query->step = q.step; query->default_value = q.default_value; query->flags = q.flags; if (q.type == V4L2_CTRL_TYPE_STRING && !(q.flags & V4L2_CTRL_FLAG_HAS_PAYLOAD)) { query->elem_size = q.maximum + 1; query->elems = 1; } return 0; } static int get_control(struct device *dev, const struct v4l2_query_ext_ctrl *query, struct v4l2_ext_control *ctrl, unsigned int which) { struct v4l2_ext_controls ctrls; struct v4l2_control old; int ret; memset(&ctrls, 0, sizeof(ctrls)); memset(ctrl, 0, sizeof(*ctrl)); ctrls.which = which; ctrls.count = 1; ctrls.controls = ctrl; ctrl->id = query->id; if (query->flags & V4L2_CTRL_FLAG_HAS_PAYLOAD) { ctrl->size = query->elems * query->elem_size; ctrl->ptr = malloc(ctrl->size); if (ctrl->ptr == NULL) return -ENOMEM; } ret = ioctl(dev->fd, VIDIOC_G_EXT_CTRLS, &ctrls); if (ret != -1) return 0; if (query->flags & V4L2_CTRL_FLAG_HAS_PAYLOAD) free(ctrl->ptr); if (query->flags & V4L2_CTRL_FLAG_HAS_PAYLOAD || query->type == V4L2_CTRL_TYPE_INTEGER64 || (errno != EINVAL && errno != ENOTTY)) return -errno; old.id = query->id; ret = ioctl(dev->fd, VIDIOC_G_CTRL, &old); if (ret < 0) return -errno; ctrl->value = old.value; return 0; } static int set_control(struct device *dev, const struct v4l2_query_ext_ctrl *query, struct v4l2_ext_control *ctrl) { struct v4l2_ext_controls ctrls; struct v4l2_control old; int ret; memset(&ctrls, 0, sizeof(ctrls)); ctrls.ctrl_class = V4L2_CTRL_ID2CLASS(ctrl->id); ctrls.count = 1; ctrls.controls = ctrl; ctrl->id = query->id; ret = ioctl(dev->fd, VIDIOC_S_EXT_CTRLS, &ctrls); if (ret != -1) return 0; if (query->flags & V4L2_CTRL_FLAG_HAS_PAYLOAD || query->type == V4L2_CTRL_TYPE_INTEGER64 || (errno != EINVAL && errno != ENOTTY)) return -1; old.id = ctrl->id; old.value = ctrl->value; ret = ioctl(dev->fd, VIDIOC_S_CTRL, &old); if (ret != -1) ctrl->value = old.value; return ret; } static int video_get_format(struct device *dev) { struct v4l2_format fmt; unsigned int i; int ret; memset(&fmt, 0, sizeof fmt); fmt.type = dev->type; ret = ioctl(dev->fd, VIDIOC_G_FMT, &fmt); if (ret < 0) { printf("Unable to get format: %s (%d).\n", strerror(errno), errno); return ret; } if (video_is_mplane(dev)) { dev->width = fmt.fmt.pix_mp.width; dev->height = fmt.fmt.pix_mp.height; dev->num_planes = fmt.fmt.pix_mp.num_planes; printf("Video format: %s (%08x) %ux%u field %s, %u planes: \n", v4l2_format_name(fmt.fmt.pix_mp.pixelformat), fmt.fmt.pix_mp.pixelformat, fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height, v4l2_field_name(fmt.fmt.pix_mp.field), fmt.fmt.pix_mp.num_planes); for (i = 0; i < fmt.fmt.pix_mp.num_planes; i++) { dev->plane_fmt[i].bytesperline = fmt.fmt.pix_mp.plane_fmt[i].bytesperline; dev->plane_fmt[i].sizeimage = fmt.fmt.pix_mp.plane_fmt[i].bytesperline ? fmt.fmt.pix_mp.plane_fmt[i].sizeimage : 0; printf(" * Stride %u, buffer size %u\n", fmt.fmt.pix_mp.plane_fmt[i].bytesperline, fmt.fmt.pix_mp.plane_fmt[i].sizeimage); } } else if (video_is_meta(dev)) { dev->width = 0; dev->height = 0; dev->num_planes = 1; printf("Meta-data format: %s (%08x) buffer size %u\n", v4l2_format_name(fmt.fmt.meta.dataformat), fmt.fmt.meta.dataformat, fmt.fmt.meta.buffersize); } else { dev->width = fmt.fmt.pix.width; dev->height = fmt.fmt.pix.height; dev->num_planes = 1; dev->plane_fmt[0].bytesperline = fmt.fmt.pix.bytesperline; dev->plane_fmt[0].sizeimage = fmt.fmt.pix.bytesperline ? fmt.fmt.pix.sizeimage : 0; printf("Video format: %s (%08x) %ux%u (stride %u) field %s buffer size %u\n", v4l2_format_name(fmt.fmt.pix.pixelformat), fmt.fmt.pix.pixelformat, fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.bytesperline, v4l2_field_name(fmt.fmt.pix_mp.field), fmt.fmt.pix.sizeimage); } return 0; } static int video_set_format(struct device *dev, unsigned int w, unsigned int h, unsigned int format, unsigned int stride, unsigned int buffer_size, enum v4l2_field field, enum v4l2_colorspace colorspace, enum v4l2_xfer_func xfer_func, enum v4l2_ycbcr_encoding encoding, enum v4l2_quantization quantization, unsigned int flags) { struct v4l2_format fmt; unsigned int i; int ret; memset(&fmt, 0, sizeof fmt); fmt.type = dev->type; if (video_is_mplane(dev)) { const struct v4l2_format_info *info = v4l2_format_by_fourcc(format); fmt.fmt.pix_mp.width = w; fmt.fmt.pix_mp.height = h; fmt.fmt.pix_mp.pixelformat = format; fmt.fmt.pix_mp.field = field; fmt.fmt.pix_mp.num_planes = info->n_planes; fmt.fmt.pix_mp.colorspace = colorspace; fmt.fmt.pix_mp.flags = flags; fmt.fmt.pix_mp.ycbcr_enc = encoding; fmt.fmt.pix_mp.quantization = quantization; fmt.fmt.pix_mp.xfer_func = xfer_func; for (i = 0; i < fmt.fmt.pix_mp.num_planes; i++) { fmt.fmt.pix_mp.plane_fmt[i].bytesperline = stride; fmt.fmt.pix_mp.plane_fmt[i].sizeimage = buffer_size; } } else if (video_is_meta(dev)) { fmt.fmt.meta.dataformat = format; fmt.fmt.meta.buffersize = buffer_size; } else { fmt.fmt.pix.width = w; fmt.fmt.pix.height = h; fmt.fmt.pix.pixelformat = format; fmt.fmt.pix.field = field; fmt.fmt.pix.bytesperline = stride; fmt.fmt.pix.sizeimage = buffer_size; fmt.fmt.pix.colorspace = colorspace; fmt.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC; fmt.fmt.pix.flags = flags; fmt.fmt.pix.ycbcr_enc = encoding; fmt.fmt.pix.quantization = quantization; fmt.fmt.pix.xfer_func = xfer_func; } ret = ioctl(dev->fd, VIDIOC_S_FMT, &fmt); if (ret < 0) { printf("Unable to set format: %s (%d).\n", strerror(errno), errno); return ret; } if (video_is_mplane(dev)) { printf("Video format set: %s (%08x) %ux%u field %s, %u planes: \n", v4l2_format_name(fmt.fmt.pix_mp.pixelformat), fmt.fmt.pix_mp.pixelformat, fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height, v4l2_field_name(fmt.fmt.pix_mp.field), fmt.fmt.pix_mp.num_planes); for (i = 0; i < fmt.fmt.pix_mp.num_planes; i++) { printf(" * Stride %u, buffer size %u\n", fmt.fmt.pix_mp.plane_fmt[i].bytesperline, fmt.fmt.pix_mp.plane_fmt[i].sizeimage); } } else if (video_is_meta(dev)) { printf("Meta-data format: %s (%08x) buffer size %u\n", v4l2_format_name(fmt.fmt.meta.dataformat), fmt.fmt.meta.dataformat, fmt.fmt.meta.buffersize); } else { printf("Video format set: %s (%08x) %ux%u (stride %u) field %s buffer size %u\n", v4l2_format_name(fmt.fmt.pix.pixelformat), fmt.fmt.pix.pixelformat, fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.bytesperline, v4l2_field_name(fmt.fmt.pix.field), fmt.fmt.pix.sizeimage); } return 0; } static int video_set_framerate(struct device *dev, struct v4l2_fract *time_per_frame) { struct v4l2_streamparm parm; int ret; memset(&parm, 0, sizeof parm); parm.type = dev->type; ret = ioctl(dev->fd, VIDIOC_G_PARM, &parm); if (ret < 0) { printf("Unable to get frame rate: %s (%d).\n", strerror(errno), errno); return ret; } printf("Current frame rate: %u/%u\n", parm.parm.capture.timeperframe.numerator, parm.parm.capture.timeperframe.denominator); printf("Setting frame rate to: %u/%u\n", time_per_frame->numerator, time_per_frame->denominator); parm.parm.capture.timeperframe.numerator = time_per_frame->numerator; parm.parm.capture.timeperframe.denominator = time_per_frame->denominator; ret = ioctl(dev->fd, VIDIOC_S_PARM, &parm); if (ret < 0) { printf("Unable to set frame rate: %s (%d).\n", strerror(errno), errno); return ret; } ret = ioctl(dev->fd, VIDIOC_G_PARM, &parm); if (ret < 0) { printf("Unable to get frame rate: %s (%d).\n", strerror(errno), errno); return ret; } printf("Frame rate set: %u/%u\n", parm.parm.capture.timeperframe.numerator, parm.parm.capture.timeperframe.denominator); return 0; } static int video_buffer_mmap(struct device *dev, struct buffer *buffer, struct v4l2_buffer *v4l2buf) { unsigned int length; unsigned int offset; unsigned int i; for (i = 0; i < dev->num_planes; i++) { if (video_is_mplane(dev)) { length = v4l2buf->m.planes[i].length; offset = v4l2buf->m.planes[i].m.mem_offset; } else { length = v4l2buf->length; offset = v4l2buf->m.offset; } buffer->mem[i] = mmap(0, length, PROT_READ | PROT_WRITE, MAP_SHARED, dev->fd, offset); if (buffer->mem[i] == MAP_FAILED) { printf("Unable to map buffer %u/%u: %s (%d)\n", buffer->idx, i, strerror(errno), errno); return -1; } buffer->size[i] = length; buffer->padding[i] = 0; printf("Buffer %u/%u mapped at address %p.\n", buffer->idx, i, buffer->mem[i]); } return 0; } static int video_buffer_munmap(struct device *dev, struct buffer *buffer) { unsigned int i; int ret; for (i = 0; i < dev->num_planes; i++) { ret = munmap(buffer->mem[i], buffer->size[i]); if (ret < 0) { printf("Unable to unmap buffer %u/%u: %s (%d)\n", buffer->idx, i, strerror(errno), errno); } buffer->mem[i] = NULL; } return 0; } static int video_buffer_alloc_userptr(struct device *dev, struct buffer *buffer, struct v4l2_buffer *v4l2buf, unsigned int offset, unsigned int padding) { int page_size = getpagesize(); unsigned int length; unsigned int i; int ret; for (i = 0; i < dev->num_planes; i++) { if (video_is_mplane(dev)) length = v4l2buf->m.planes[i].length; else length = v4l2buf->length; ret = posix_memalign(&buffer->mem[i], page_size, length + offset + padding); if (ret < 0) { printf("Unable to allocate buffer %u/%u (%d)\n", buffer->idx, i, ret); return -ENOMEM; } buffer->mem[i] += offset; buffer->size[i] = length; buffer->padding[i] = padding; printf("Buffer %u/%u allocated at address %p.\n", buffer->idx, i, buffer->mem[i]); } return 0; } static void video_buffer_free_userptr(struct device *dev, struct buffer *buffer) { unsigned int i; for (i = 0; i < dev->num_planes; i++) { free(buffer->mem[i]); buffer->mem[i] = NULL; } } static void video_buffer_fill_userptr(struct device *dev, struct buffer *buffer, struct v4l2_buffer *v4l2buf) { unsigned int i; if (!video_is_mplane(dev)) { v4l2buf->m.userptr = (unsigned long)buffer->mem[0]; return; } for (i = 0; i < dev->num_planes; i++) v4l2buf->m.planes[i].m.userptr = (unsigned long)buffer->mem[i]; } static void get_ts_flags(uint32_t flags, const char **ts_type, const char **ts_source) { switch (flags & V4L2_BUF_FLAG_TIMESTAMP_MASK) { case V4L2_BUF_FLAG_TIMESTAMP_UNKNOWN: *ts_type = "unk"; break; case V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC: *ts_type = "mono"; break; case V4L2_BUF_FLAG_TIMESTAMP_COPY: *ts_type = "copy"; break; default: *ts_type = "inv"; } switch (flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK) { case V4L2_BUF_FLAG_TSTAMP_SRC_EOF: *ts_source = "EoF"; break; case V4L2_BUF_FLAG_TSTAMP_SRC_SOE: *ts_source = "SoE"; break; default: *ts_source = "inv"; } } static int video_alloc_buffers(struct device *dev, int nbufs, unsigned int offset, unsigned int padding) { struct v4l2_plane planes[VIDEO_MAX_PLANES]; struct v4l2_requestbuffers rb; struct v4l2_buffer buf; struct buffer *buffers; unsigned int i; int ret; memset(&rb, 0, sizeof rb); rb.count = nbufs; rb.type = dev->type; rb.memory = dev->memtype; ret = ioctl(dev->fd, VIDIOC_REQBUFS, &rb); if (ret < 0) { printf("Unable to request buffers: %s (%d).\n", strerror(errno), errno); return ret; } printf("%u buffers requested.\n", rb.count); buffers = malloc(rb.count * sizeof buffers[0]); if (buffers == NULL) return -ENOMEM; /* Map the buffers. */ for (i = 0; i < rb.count; ++i) { const char *ts_type, *ts_source; memset(&buf, 0, sizeof buf); memset(planes, 0, sizeof planes); buf.index = i; buf.type = dev->type; buf.memory = dev->memtype; buf.length = VIDEO_MAX_PLANES; buf.m.planes = planes; ret = ioctl(dev->fd, VIDIOC_QUERYBUF, &buf); if (ret < 0) { printf("Unable to query buffer %u: %s (%d).\n", i, strerror(errno), errno); return ret; } get_ts_flags(buf.flags, &ts_type, &ts_source); printf("length: %u offset: %u timestamp type/source: %s/%s\n", buf.length, buf.m.offset, ts_type, ts_source); buffers[i].idx = i; switch (dev->memtype) { case V4L2_MEMORY_MMAP: ret = video_buffer_mmap(dev, &buffers[i], &buf); break; case V4L2_MEMORY_USERPTR: ret = video_buffer_alloc_userptr(dev, &buffers[i], &buf, offset, padding); break; default: break; } if (ret < 0) return ret; } dev->timestamp_type = buf.flags & V4L2_BUF_FLAG_TIMESTAMP_MASK; dev->buffers = buffers; dev->nbufs = rb.count; return 0; } static int video_free_buffers(struct device *dev) { struct v4l2_requestbuffers rb; unsigned int i; int ret; if (dev->nbufs == 0) return 0; for (i = 0; i < dev->nbufs; ++i) { switch (dev->memtype) { case V4L2_MEMORY_MMAP: ret = video_buffer_munmap(dev, &dev->buffers[i]); if (ret < 0) return ret; break; case V4L2_MEMORY_USERPTR: video_buffer_free_userptr(dev, &dev->buffers[i]); break; default: break; } } memset(&rb, 0, sizeof rb); rb.count = 0; rb.type = dev->type; rb.memory = dev->memtype; ret = ioctl(dev->fd, VIDIOC_REQBUFS, &rb); if (ret < 0) { printf("Unable to release buffers: %s (%d).\n", strerror(errno), errno); return ret; } printf("%u buffers released.\n", dev->nbufs); free(dev->buffers); dev->nbufs = 0; dev->buffers = NULL; return 0; } static int video_queue_buffer(struct device *dev, int index, enum buffer_fill_mode fill) { struct v4l2_buffer buf; struct v4l2_plane planes[VIDEO_MAX_PLANES]; int ret; unsigned int i; memset(&buf, 0, sizeof buf); memset(&planes, 0, sizeof planes); buf.index = index; buf.type = dev->type; buf.memory = dev->memtype; if (video_is_output(dev)) { buf.flags = dev->buffer_output_flags; if (dev->timestamp_type == V4L2_BUF_FLAG_TIMESTAMP_COPY) { struct timespec ts; clock_gettime(CLOCK_MONOTONIC, &ts); buf.timestamp.tv_sec = ts.tv_sec; buf.timestamp.tv_usec = ts.tv_nsec / 1000; } } if (video_is_mplane(dev)) { buf.m.planes = planes; buf.length = dev->num_planes; } if (dev->memtype == V4L2_MEMORY_USERPTR) { if (video_is_mplane(dev)) { for (i = 0; i < dev->num_planes; i++) { buf.m.planes[i].m.userptr = (unsigned long) dev->buffers[index].mem[i]; buf.m.planes[i].length = dev->buffers[index].size[i]; } } else { buf.m.userptr = (unsigned long)dev->buffers[index].mem[0]; buf.length = dev->buffers[index].size[0]; } } for (i = 0; i < dev->num_planes; i++) { if (video_is_output(dev)) { if (video_is_mplane(dev)) buf.m.planes[i].bytesused = dev->patternsize[i]; else buf.bytesused = dev->patternsize[i]; memcpy(dev->buffers[buf.index].mem[i], dev->pattern[i], dev->patternsize[i]); } else { if (fill & BUFFER_FILL_FRAME) memset(dev->buffers[buf.index].mem[i], 0x55, dev->buffers[index].size[i]); if (fill & BUFFER_FILL_PADDING) memset(dev->buffers[buf.index].mem[i] + dev->buffers[index].size[i], 0x55, dev->buffers[index].padding[i]); } } ret = ioctl(dev->fd, VIDIOC_QBUF, &buf); if (ret < 0) printf("Unable to queue buffer: %s (%d).\n", strerror(errno), errno); return ret; } static int video_enable(struct device *dev, int enable) { int type = dev->type; int ret; ret = ioctl(dev->fd, enable ? VIDIOC_STREAMON : VIDIOC_STREAMOFF, &type); if (ret < 0) { printf("Unable to %s streaming: %s (%d).\n", enable ? "start" : "stop", strerror(errno), errno); return ret; } return 0; } static int video_for_each_control(struct device *dev, int(*callback)(struct device *dev, const struct v4l2_query_ext_ctrl *query)) { struct v4l2_query_ext_ctrl query; unsigned int nctrls = 0; unsigned int id; int ret; id = 0; while (1) { id |= V4L2_CTRL_FLAG_NEXT_CTRL | V4L2_CTRL_FLAG_NEXT_COMPOUND; ret = query_control(dev, id, &query); if (ret == -EINVAL) break; if (ret < 0) return ret; id = query.id; ret = callback(dev, &query); if (ret < 0) return ret; if (ret) nctrls++; } return nctrls; } static void video_query_menu(struct device *dev, const struct v4l2_query_ext_ctrl *query, unsigned int value) { struct v4l2_querymenu menu; int ret; for (menu.index = query->minimum; menu.index <= (unsigned)query->maximum; menu.index++) { menu.id = query->id; ret = ioctl(dev->fd, VIDIOC_QUERYMENU, &menu); if (ret < 0) continue; if (query->type == V4L2_CTRL_TYPE_MENU) printf(" %u: %.32s%s\n", menu.index, menu.name, menu.index == value ? " (*)" : ""); else printf(" %u: %" PRId64 "%s\n", menu.index, (int64_t)menu.value, menu.index == value ? " (*)" : ""); }; } static void video_print_control_array(const struct v4l2_query_ext_ctrl *query, struct v4l2_ext_control *ctrl) { unsigned int i; printf("{"); for (i = 0; i < query->elems; ++i) { switch (query->type) { case V4L2_CTRL_TYPE_U8: printf("%u", ctrl->p_u8[i]); break; case V4L2_CTRL_TYPE_U16: printf("%u", ctrl->p_u16[i]); break; case V4L2_CTRL_TYPE_U32: printf("%u", ctrl->p_u32[i]); break; } if (i != query->elems - 1) printf(", "); } printf("}"); } static void video_print_control_value(const struct v4l2_query_ext_ctrl *query, struct v4l2_ext_control *ctrl) { if (query->nr_of_dims == 0) { switch (query->type) { case V4L2_CTRL_TYPE_INTEGER: case V4L2_CTRL_TYPE_BOOLEAN: case V4L2_CTRL_TYPE_MENU: case V4L2_CTRL_TYPE_INTEGER_MENU: printf("%d", ctrl->value); break; case V4L2_CTRL_TYPE_BITMASK: printf("0x%08x", ctrl->value); break; case V4L2_CTRL_TYPE_INTEGER64: printf("%" PRId64, (int64_t)ctrl->value64); break; case V4L2_CTRL_TYPE_STRING: printf("%s", ctrl->string); break; } return; } switch (query->type) { case V4L2_CTRL_TYPE_U8: case V4L2_CTRL_TYPE_U16: case V4L2_CTRL_TYPE_U32: video_print_control_array(query, ctrl); break; default: printf("unsupported type %u", query->type); break; } } static int video_get_control(struct device *dev, const struct v4l2_query_ext_ctrl *query, bool full) { struct v4l2_ext_control ctrl; unsigned int i; int ret; if (query->flags & V4L2_CTRL_FLAG_DISABLED) return 0; if (query->type == V4L2_CTRL_TYPE_CTRL_CLASS) { printf("--- %s (class 0x%08x) ---\n", query->name, query->id); return 0; } if (full) { printf("control 0x%08x `%s' min %" PRId64 " max %" PRId64 " step %" PRIu64 " default %" PRId64 " ", query->id, query->name, (int64_t)query->minimum, (int64_t)query->maximum, (uint64_t)query->step, (int64_t)query->default_value); if (query->nr_of_dims) { for (i = 0; i < query->nr_of_dims; ++i) printf("[%u]", query->dims[i]); printf(" "); } } else { printf("control 0x%08x ", query->id); } if (query->type == V4L2_CTRL_TYPE_BUTTON) { /* Button controls have no current value. */ printf("\n"); return 1; } printf("current "); ret = get_control(dev, query, &ctrl, V4L2_CTRL_WHICH_CUR_VAL); if (ret < 0) { printf("n/a\n"); printf("unable to get control 0x%8.8x: %s (%d).\n", query->id, strerror(errno), errno); } else { video_print_control_value(query, &ctrl); printf("\n"); } if (query->flags & V4L2_CTRL_FLAG_HAS_PAYLOAD) free(ctrl.ptr); if (!full) return 1; if (query->type == V4L2_CTRL_TYPE_MENU || query->type == V4L2_CTRL_TYPE_INTEGER_MENU) video_query_menu(dev, query, ctrl.value); return 1; } static int __video_get_control(struct device *dev, const struct v4l2_query_ext_ctrl *query) { return video_get_control(dev, query, true); } static int video_parse_control_array(const struct v4l2_query_ext_ctrl *query, struct v4l2_ext_control *ctrl, const char *val) { unsigned int i; char *endptr; __u32 value; for ( ; isspace(*val); ++val) { } if (*val == '<') { /* Read the control value from the given file. */ ssize_t size; int fd; val++; fd = open(val, O_RDONLY); if (fd < 0) { printf("unable to open control file `%s'\n", val); return -EINVAL; } size = read(fd, ctrl->ptr, ctrl->size); if (size != (ssize_t)ctrl->size) { printf("error reading control file `%s' (%s)\n", val, strerror(errno)); close(fd); return -EINVAL; } close(fd); return 0; } if (*val++ != '{') return -EINVAL; for (i = 0; i < query->elems; ++i) { for ( ; isspace(*val); ++val) { } switch (query->type) { case V4L2_CTRL_TYPE_U8: case V4L2_CTRL_TYPE_U16: case V4L2_CTRL_TYPE_U32: default: value = strtoul(val, &endptr, 0); break; } if (endptr == NULL) return -EINVAL; switch (query->type) { case V4L2_CTRL_TYPE_U8: ctrl->p_u8[i] = value; break; case V4L2_CTRL_TYPE_U16: ctrl->p_u16[i] = value; break; case V4L2_CTRL_TYPE_U32: ctrl->p_u32[i] = value; break; } val = endptr; for ( ; isspace(*val); ++val) { } if (*val != ',') break; val++; } if (i < query->elems - 1) return -EINVAL; for ( ; isspace(*val); ++val) { } if (*val++ != '}') return -EINVAL; for ( ; isspace(*val); ++val) { } if (*val++ != '\0') return -EINVAL; return 0; } static void video_set_control(struct device *dev, unsigned int id, const char *val) { struct v4l2_query_ext_ctrl query; struct v4l2_ext_control ctrl; char *endptr; int ret; ret = query_control(dev, id, &query); if (ret < 0) return; memset(&ctrl, 0, sizeof(ctrl)); if (query.nr_of_dims == 0) { switch (query.type) { case V4L2_CTRL_TYPE_INTEGER: case V4L2_CTRL_TYPE_BOOLEAN: case V4L2_CTRL_TYPE_MENU: case V4L2_CTRL_TYPE_INTEGER_MENU: case V4L2_CTRL_TYPE_BITMASK: ctrl.value = strtol(val, &endptr, 0); if (*endptr != 0) { printf("Invalid control value '%s'\n", val); return; } break; case V4L2_CTRL_TYPE_INTEGER64: ctrl.value64 = strtoll(val, &endptr, 0); if (*endptr != 0) { printf("Invalid control value '%s'\n", val); return; } break; case V4L2_CTRL_TYPE_STRING: ctrl.size = query.elem_size; ctrl.ptr = malloc(ctrl.size); if (ctrl.ptr == NULL) return; strncpy(ctrl.string, val, ctrl.size); break; default: printf("Unsupported control type\n"); return; } } else { switch (query.type) { case V4L2_CTRL_TYPE_U8: case V4L2_CTRL_TYPE_U16: case V4L2_CTRL_TYPE_U32: ctrl.size = query.elem_size * query.elems; ctrl.ptr = malloc(ctrl.size); if (ctrl.ptr == NULL) return; ret = video_parse_control_array(&query, &ctrl, val); if (ret < 0) { printf("Invalid compound control value '%s'\n", val); return; } break; default: printf("Unsupported control type %u\n", query.type); break; } } ret = set_control(dev, &query, &ctrl); if (ret < 0) { printf("unable to set control 0x%8.8x: %s (%d).\n", id, strerror(errno), errno); } else { printf("Control 0x%08x set to %s, is ", id, val); video_print_control_value(&query, &ctrl); printf("\n"); } if ((query.flags & V4L2_CTRL_FLAG_HAS_PAYLOAD) && ctrl.ptr) free(ctrl.ptr); } static void video_list_controls(struct device *dev) { int ret; ret = video_for_each_control(dev, __video_get_control); if (ret < 0) return; if (ret) printf("%u control%s found.\n", ret, ret > 1 ? "s" : ""); else printf("No control found.\n"); } static int video_reset_control(struct device *dev, const struct v4l2_query_ext_ctrl *query) { struct v4l2_ext_control ctrl = { .value = query->default_value, }; int ret; if (query->flags & V4L2_CTRL_FLAG_DISABLED) return 0; if (query->type == V4L2_CTRL_TYPE_CTRL_CLASS) return 0; /* * For controls with payloads the default value must be retrieved with * a VIDIOC_G_EXT_CTRLS call. If the V4L2_CTRL_WHICH_DEF_VAL flag isn't * supported by the kernel (it got introduced in v4.5, while controls * with payloads were introduced in v3.17), there isn't much we can do, * so skip resetting the control. */ if (query->flags & V4L2_CTRL_FLAG_HAS_PAYLOAD) { ret = get_control(dev, query, &ctrl, V4L2_CTRL_WHICH_DEF_VAL); if (ret < 0) return 0; } set_control(dev, query, &ctrl); if (query->flags & V4L2_CTRL_FLAG_HAS_PAYLOAD) free(ctrl.ptr); return 1; } static void video_reset_controls(struct device *dev) { int ret; ret = video_for_each_control(dev, video_reset_control); if (ret < 0) return; if (ret) printf("%u control%s reset.\n", ret, ret > 1 ? "s" : ""); } static void video_enum_frame_intervals(struct device *dev, __u32 pixelformat, unsigned int width, unsigned int height) { struct v4l2_frmivalenum ival; unsigned int i; int ret; for (i = 0; ; ++i) { memset(&ival, 0, sizeof ival); ival.index = i; ival.pixel_format = pixelformat; ival.width = width; ival.height = height; ret = ioctl(dev->fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival); if (ret < 0) break; if (i != ival.index) printf("Warning: driver returned wrong ival index " "%u.\n", ival.index); if (pixelformat != ival.pixel_format) printf("Warning: driver returned wrong ival pixel " "format %08x.\n", ival.pixel_format); if (width != ival.width) printf("Warning: driver returned wrong ival width " "%u.\n", ival.width); if (height != ival.height) printf("Warning: driver returned wrong ival height " "%u.\n", ival.height); if (i != 0) printf(", "); switch (ival.type) { case V4L2_FRMIVAL_TYPE_DISCRETE: printf("%u/%u", ival.discrete.numerator, ival.discrete.denominator); break; case V4L2_FRMIVAL_TYPE_CONTINUOUS: printf("%u/%u - %u/%u", ival.stepwise.min.numerator, ival.stepwise.min.denominator, ival.stepwise.max.numerator, ival.stepwise.max.denominator); return; case V4L2_FRMIVAL_TYPE_STEPWISE: printf("%u/%u - %u/%u (by %u/%u)", ival.stepwise.min.numerator, ival.stepwise.min.denominator, ival.stepwise.max.numerator, ival.stepwise.max.denominator, ival.stepwise.step.numerator, ival.stepwise.step.denominator); return; default: break; } } } static void video_enum_frame_sizes(struct device *dev, __u32 pixelformat) { struct v4l2_frmsizeenum frame; unsigned int i; int ret; for (i = 0; ; ++i) { memset(&frame, 0, sizeof frame); frame.index = i; frame.pixel_format = pixelformat; ret = ioctl(dev->fd, VIDIOC_ENUM_FRAMESIZES, &frame); if (ret < 0) break; if (i != frame.index) printf("Warning: driver returned wrong frame index " "%u.\n", frame.index); if (pixelformat != frame.pixel_format) printf("Warning: driver returned wrong frame pixel " "format %08x.\n", frame.pixel_format); switch (frame.type) { case V4L2_FRMSIZE_TYPE_DISCRETE: printf("\tFrame size: %ux%u (", frame.discrete.width, frame.discrete.height); video_enum_frame_intervals(dev, frame.pixel_format, frame.discrete.width, frame.discrete.height); printf(")\n"); break; case V4L2_FRMSIZE_TYPE_CONTINUOUS: printf("\tFrame size: %ux%u - %ux%u (", frame.stepwise.min_width, frame.stepwise.min_height, frame.stepwise.max_width, frame.stepwise.max_height); video_enum_frame_intervals(dev, frame.pixel_format, frame.stepwise.max_width, frame.stepwise.max_height); printf(")\n"); break; case V4L2_FRMSIZE_TYPE_STEPWISE: printf("\tFrame size: %ux%u - %ux%u (by %ux%u) (\n", frame.stepwise.min_width, frame.stepwise.min_height, frame.stepwise.max_width, frame.stepwise.max_height, frame.stepwise.step_width, frame.stepwise.step_height); video_enum_frame_intervals(dev, frame.pixel_format, frame.stepwise.max_width, frame.stepwise.max_height); printf(")\n"); break; default: break; } } } static void video_enum_formats(struct device *dev, enum v4l2_buf_type type) { struct v4l2_fmtdesc fmt; unsigned int i; int ret; for (i = 0; ; ++i) { memset(&fmt, 0, sizeof fmt); fmt.index = i; fmt.type = type; ret = ioctl(dev->fd, VIDIOC_ENUM_FMT, &fmt); if (ret < 0) break; if (i != fmt.index) printf("Warning: driver returned wrong format index " "%u.\n", fmt.index); if (type != fmt.type) printf("Warning: driver returned wrong format type " "%u.\n", fmt.type); printf("\tFormat %u: %s (%08x)\n", i, v4l2_format_name(fmt.pixelformat), fmt.pixelformat); printf("\tType: %s (%u)\n", v4l2_buf_type_name(fmt.type), fmt.type); printf("\tName: %.32s\n", fmt.description); video_enum_frame_sizes(dev, fmt.pixelformat); printf("\n"); } } static void video_enum_inputs(struct device *dev) { struct v4l2_input input; unsigned int i; int ret; for (i = 0; ; ++i) { memset(&input, 0, sizeof input); input.index = i; ret = ioctl(dev->fd, VIDIOC_ENUMINPUT, &input); if (ret < 0) break; if (i != input.index) printf("Warning: driver returned wrong input index " "%u.\n", input.index); printf("\tInput %u: %s.\n", i, input.name); } printf("\n"); } static int video_get_input(struct device *dev) { __u32 input; int ret; ret = ioctl(dev->fd, VIDIOC_G_INPUT, &input); if (ret < 0) { printf("Unable to get current input: %s (%d).\n", strerror(errno), errno); return ret; } return input; } static int video_set_input(struct device *dev, unsigned int input) { __u32 _input = input; int ret; ret = ioctl(dev->fd, VIDIOC_S_INPUT, &_input); if (ret < 0) printf("Unable to select input %u: %s (%d).\n", input, strerror(errno), errno); return ret; } static int video_set_quality(struct device *dev, unsigned int quality) { struct v4l2_jpegcompression jpeg; int ret; if (quality == (unsigned int)-1) return 0; memset(&jpeg, 0, sizeof jpeg); jpeg.quality = quality; ret = ioctl(dev->fd, VIDIOC_S_JPEGCOMP, &jpeg); if (ret < 0) { printf("Unable to set quality to %u: %s (%d).\n", quality, strerror(errno), errno); return ret; } ret = ioctl(dev->fd, VIDIOC_G_JPEGCOMP, &jpeg); if (ret >= 0) printf("Quality set to %u\n", jpeg.quality); return 0; } static int video_load_test_pattern(struct device *dev, const char *filename) { unsigned int plane; unsigned int size; int fd = -1; int ret; if (filename != NULL) { fd = open(filename, O_RDONLY); if (fd == -1) { printf("Unable to open test pattern file '%s': %s (%d).\n", filename, strerror(errno), errno); return -errno; } } /* Load or generate the test pattern */ for (plane = 0; plane < dev->num_planes; plane++) { size = dev->buffers[0].size[plane]; dev->pattern[plane] = malloc(size); if (dev->pattern[plane] == NULL) { ret = -ENOMEM; goto done; } if (filename != NULL) { ret = read(fd, dev->pattern[plane], size); if (ret != (int)size && dev->plane_fmt[plane].bytesperline != 0) { printf("Test pattern file size %u doesn't match image size %u\n", ret, size); ret = -EINVAL; goto done; } size = ret; } else { uint8_t *data = dev->pattern[plane]; unsigned int i; if (dev->plane_fmt[plane].bytesperline == 0) { printf("Compressed format detected for plane %u and no test pattern filename given.\n" "The test pattern can't be generated automatically.\n", plane); ret = -EINVAL; goto done; } for (i = 0; i < dev->plane_fmt[plane].sizeimage; ++i) *data++ = i; } dev->patternsize[plane] = size; } ret = 0; done: if (fd != -1) close(fd); return ret; } static int video_prepare_capture(struct device *dev, int nbufs, unsigned int offset, const char *filename, enum buffer_fill_mode fill) { unsigned int padding; int ret; /* Allocate and map buffers. */ padding = (fill & BUFFER_FILL_PADDING) ? 4096 : 0; if ((ret = video_alloc_buffers(dev, nbufs, offset, padding)) < 0) return ret; if (video_is_output(dev)) { ret = video_load_test_pattern(dev, filename); if (ret < 0) return ret; } return 0; } static int video_queue_all_buffers(struct device *dev, enum buffer_fill_mode fill) { unsigned int i; int ret; /* Queue the buffers. */ for (i = 0; i < dev->nbufs; ++i) { ret = video_queue_buffer(dev, i, fill); if (ret < 0) return ret; } return 0; } static void video_verify_buffer(struct device *dev, struct v4l2_buffer *buf) { struct buffer *buffer = &dev->buffers[buf->index]; unsigned int plane; unsigned int i; for (plane = 0; plane < dev->num_planes; ++plane) { const uint8_t *data = buffer->mem[plane] + buffer->size[plane]; unsigned int errors = 0; unsigned int dirty = 0; unsigned int length; if (video_is_mplane(dev)) length = buf->m.planes[plane].bytesused; else length = buf->bytesused; if (dev->plane_fmt[plane].sizeimage && dev->plane_fmt[plane].sizeimage != length) printf("Warning: bytes used %u != image size %u for plane %u\n", length, dev->plane_fmt[plane].sizeimage, plane); if (buffer->padding[plane] == 0) continue; for (i = 0; i < buffer->padding[plane]; ++i) { if (data[i] != 0x55) { errors++; dirty = i + 1; } } if (errors) { printf("Warning: %u bytes overwritten among %u first padding bytes for plane %u\n", errors, dirty, plane); dirty = (dirty + 15) & ~15; dirty = dirty > 32 ? 32 : dirty; for (i = 0; i < dirty; ++i) { printf("%02x ", data[i]); if (i % 16 == 15) printf("\n"); } } } } static void video_save_image(struct device *dev, struct v4l2_buffer *buf, const char *pattern, unsigned int sequence) { unsigned int size; unsigned int i; char *filename; const char *p; bool append; int ret = 0; int fd; size = strlen(pattern); filename = malloc(size + 12); if (filename == NULL) return; p = strchr(pattern, '#'); if (p != NULL) { sprintf(filename, "%.*s%06u%s", (int)(p - pattern), pattern, sequence, p + 1); append = false; } else { strcpy(filename, pattern); append = true; } fd = open(filename, O_CREAT | O_WRONLY | (append ? O_APPEND : O_TRUNC), S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH); free(filename); if (fd == -1) return; for (i = 0; i < dev->num_planes; i++) { void *data = dev->buffers[buf->index].mem[i]; unsigned int length; if (video_is_mplane(dev)) { length = buf->m.planes[i].bytesused; if (!dev->write_data_prefix) { data += buf->m.planes[i].data_offset; length -= buf->m.planes[i].data_offset; } } else { length = buf->bytesused; } ret = write(fd, data, length); if (ret < 0) { printf("write error: %s (%d)\n", strerror(errno), errno); break; } else if (ret != (int)length) printf("write error: only %d bytes written instead of %u\n", ret, length); } close(fd); } unsigned int video_buffer_bytes_used(struct device *dev, struct v4l2_buffer *buf) { unsigned int bytesused = 0; unsigned int i; if (!video_is_mplane(dev)) return buf->bytesused; for (i = 0; i < dev->num_planes; i++) bytesused += buf->m.planes[i].bytesused; return bytesused; } static int video_do_capture(struct device *dev, unsigned int nframes, unsigned int skip, unsigned int delay, unsigned int pause, const char *pattern, int do_requeue_last, int do_queue_late, enum buffer_fill_mode fill) { struct v4l2_plane planes[VIDEO_MAX_PLANES]; struct v4l2_buffer buf; struct timespec start; struct timeval last; struct timespec ts; unsigned int size; unsigned int i; double bps; double fps; int ret; if (pause == 0) pause_wait(); /* Start streaming. */ ret = video_enable(dev, 1); if (ret < 0) goto done; if (do_queue_late) video_queue_all_buffers(dev, fill); size = 0; clock_gettime(CLOCK_MONOTONIC, &start); last.tv_sec = start.tv_sec; last.tv_usec = start.tv_nsec / 1000; for (i = 0; i < nframes; ++i) { const char *ts_type, *ts_source; /* Dequeue a buffer. */ memset(&buf, 0, sizeof buf); memset(planes, 0, sizeof planes); buf.type = dev->type; buf.memory = dev->memtype; buf.length = VIDEO_MAX_PLANES; buf.m.planes = planes; ret = ioctl(dev->fd, VIDIOC_DQBUF, &buf); if (ret < 0) { if (errno != EIO) { printf("Unable to dequeue buffer: %s (%d).\n", strerror(errno), errno); goto done; } buf.type = dev->type; buf.memory = dev->memtype; if (dev->memtype == V4L2_MEMORY_USERPTR) video_buffer_fill_userptr(dev, &dev->buffers[i], &buf); } if (video_is_capture(dev)) video_verify_buffer(dev, &buf); size += buf.bytesused; fps = (buf.timestamp.tv_sec - last.tv_sec) * 1000000 + buf.timestamp.tv_usec - last.tv_usec; fps = fps ? 1000000.0 / fps : 0.0; clock_gettime(CLOCK_MONOTONIC, &ts); get_ts_flags(buf.flags, &ts_type, &ts_source); printf("%u (%u) [%c] %s %u %u B %" PRId64 ".%06" PRId64 " %" PRId64 ".%06" PRId64 " %.3f fps ts %s/%s\n", i, buf.index, (buf.flags & V4L2_BUF_FLAG_ERROR) ? 'E' : '-', v4l2_field_name(buf.field), buf.sequence, video_buffer_bytes_used(dev, &buf), (int64_t)buf.timestamp.tv_sec, (int64_t)buf.timestamp.tv_usec, (int64_t)ts.tv_sec, (int64_t)(ts.tv_nsec / 1000), fps, ts_type, ts_source); last = buf.timestamp; /* Save the image. */ if (video_is_capture(dev) && pattern && !skip) video_save_image(dev, &buf, pattern, i); if (skip) --skip; /* Requeue the buffer. */ if (delay > 0) usleep(delay * 1000); fflush(stdout); if (pause == i + 1) pause_wait(); if (i >= nframes - dev->nbufs && !do_requeue_last) continue; ret = video_queue_buffer(dev, buf.index, fill); if (ret < 0) { printf("Unable to requeue buffer: %s (%d).\n", strerror(errno), errno); goto done; } } /* Stop streaming. */ ret = video_enable(dev, 0); if (ret < 0) return ret; if (nframes == 0) { printf("No frames captured.\n"); goto done; } if (ts.tv_sec == start.tv_sec && ts.tv_nsec == start.tv_nsec) { printf("Captured %u frames (%u bytes) 0 seconds\n", i, size); goto done; } ts.tv_sec -= start.tv_sec; ts.tv_nsec -= start.tv_nsec; if (ts.tv_nsec < 0) { ts.tv_sec--; ts.tv_nsec += 1000000000; } bps = size/(ts.tv_nsec/1000.0+1000000.0*ts.tv_sec)*1000000.0; fps = i/(ts.tv_nsec/1000.0+1000000.0*ts.tv_sec)*1000000.0; printf("Captured %u frames in %" PRId64 ".%06" PRId64 " seconds (%f fps, %f B/s).\n", i, (int64_t)ts.tv_sec, (int64_t)(ts.tv_nsec / 1000), fps, bps); done: video_free_buffers(dev); return ret; } #define V4L_BUFFERS_DEFAULT 8 #define V4L_BUFFERS_MAX 32 static void usage(const char *argv0) { printf("Usage: %s [options] device\n", argv0); printf("Supported options:\n"); printf("-B, --buffer-type Buffer type (\"capture\", \"output\",\n"); printf(" \"capture-mplane\" or \"output-mplane\")\n"); printf("-c, --capture[=nframes] Capture frames\n"); printf("-C, --check-overrun Verify dequeued frames for buffer overrun\n"); printf("-d, --delay Delay (in ms) before requeuing buffers\n"); printf("-f, --format format Set the video format\n"); printf(" use -f help to list the supported formats\n"); printf("-F, --file[=name] Read/write frames from/to disk\n"); printf("\tFor video capture devices, the first '#' character in the file name is\n"); printf("\texpanded to the frame sequence number. The default file name is\n"); printf("\t'frame-#.bin'.\n"); printf("-h, --help Show this help screen\n"); printf("-i, --input input Select the video input\n"); printf("-I, --fill-frames Fill frames with check pattern before queuing them\n"); printf("-l, --list-controls List available controls\n"); printf("-n, --nbufs n Set the number of video buffers\n"); printf("-p, --pause[=n] Pause after n frames (0 if n isn't specified)\n"); printf("-q, --quality n MJPEG quality (0-100)\n"); printf("-r, --get-control ctrl Get control 'ctrl'\n"); printf("-R, --realtime=[priority] Enable realtime RR scheduling\n"); printf("-s, --size WxH Set the frame size\n"); printf("-t, --time-per-frame num/denom Set the time per frame (eg. 1/25 = 25 fps)\n"); printf("-u, --userptr Use the user pointers streaming method\n"); printf("-w, --set-control 'ctrl value' Set control 'ctrl' to 'value'\n"); printf(" --buffer-prefix Write portions of buffer before data_offset\n"); printf(" --buffer-size Buffer size in bytes\n"); printf(" --enum-formats Enumerate formats\n"); printf(" --enum-inputs Enumerate inputs\n"); printf(" --colorspace colorspace Set the colorspace\n"); printf(" --encoding encoding Set the YCbCr encoding\n"); printf(" --fd Use a numeric file descriptor insted of a device\n"); printf(" --field field Set the format field order\n"); printf("\tValid values for field are none, top, bottom, interlaced, seq-tb, seq-bt,\n"); printf("\talternate, interlaced-tb and interlaced-bt.\n"); printf(" --log-status Log device status\n"); printf(" --no-query Don't query capabilities on open\n"); printf(" --offset User pointer buffer offset from page start\n"); printf(" --premultiplied Color components are premultiplied by alpha value\n"); printf(" --quantization quantization Set the quantization\n"); printf(" --queue-late Queue buffers after streamon, not before\n"); printf(" --requeue-last Requeue the last buffers before streamoff\n"); printf(" --reset-controls Reset all available controls to their default value\n"); printf(" --timestamp-source Set timestamp source on output buffers [eof, soe]\n"); printf(" --skip n Skip the first n frames\n"); printf(" --sleep-forever Sleep forever after configuring the device\n"); printf(" --stride value Line stride in bytes\n"); printf(" --xfer-func xfer-func Set the transfer function\n"); printf("\nValid fields values:\n"); list_colorspaces(); list_encodings(); list_quantizations(); list_xfer_funcs(); } #define OPT_ENUM_FORMATS 256 #define OPT_ENUM_INPUTS 257 #define OPT_SKIP_FRAMES 258 #define OPT_NO_QUERY 259 #define OPT_SLEEP_FOREVER 260 #define OPT_USERPTR_OFFSET 261 #define OPT_REQUEUE_LAST 262 #define OPT_STRIDE 263 #define OPT_FD 264 #define OPT_TSTAMP_SRC 265 #define OPT_FIELD 266 #define OPT_LOG_STATUS 267 #define OPT_BUFFER_SIZE 268 #define OPT_PREMULTIPLIED 269 #define OPT_QUEUE_LATE 270 #define OPT_DATA_PREFIX 271 #define OPT_RESET_CONTROLS 272 #define OPT_COLORSPACE 273 #define OPT_XFER_FUNC 274 #define OPT_ENCODING 275 #define OPT_QUANTIZATION 276 static const struct option opts[] = { {"buffer-size", 1, 0, OPT_BUFFER_SIZE}, {"buffer-type", 1, 0, 'B'}, {"capture", 2, 0, 'c'}, {"check-overrun", 0, 0, 'C'}, {"colorspace", 1, 0, OPT_COLORSPACE}, {"data-prefix", 0, 0, OPT_DATA_PREFIX}, {"delay", 1, 0, 'd'}, {"encoding", 1, 0, OPT_ENCODING}, {"enum-formats", 0, 0, OPT_ENUM_FORMATS}, {"enum-inputs", 0, 0, OPT_ENUM_INPUTS}, {"fd", 1, 0, OPT_FD}, {"field", 1, 0, OPT_FIELD}, {"file", 2, 0, 'F'}, {"fill-frames", 0, 0, 'I'}, {"format", 1, 0, 'f'}, {"help", 0, 0, 'h'}, {"input", 1, 0, 'i'}, {"list-controls", 0, 0, 'l'}, {"log-status", 0, 0, OPT_LOG_STATUS}, {"nbufs", 1, 0, 'n'}, {"no-query", 0, 0, OPT_NO_QUERY}, {"offset", 1, 0, OPT_USERPTR_OFFSET}, {"pause", 2, 0, 'p'}, {"premultiplied", 0, 0, OPT_PREMULTIPLIED}, {"quality", 1, 0, 'q'}, {"quantization", 1, 0, OPT_QUANTIZATION}, {"queue-late", 0, 0, OPT_QUEUE_LATE}, {"get-control", 1, 0, 'r'}, {"requeue-last", 0, 0, OPT_REQUEUE_LAST}, {"reset-controls", 0, 0, OPT_RESET_CONTROLS}, {"realtime", 2, 0, 'R'}, {"size", 1, 0, 's'}, {"set-control", 1, 0, 'w'}, {"skip", 1, 0, OPT_SKIP_FRAMES}, {"sleep-forever", 0, 0, OPT_SLEEP_FOREVER}, {"stride", 1, 0, OPT_STRIDE}, {"time-per-frame", 1, 0, 't'}, {"timestamp-source", 1, 0, OPT_TSTAMP_SRC}, {"userptr", 0, 0, 'u'}, {"xfer-func", 1, 0, OPT_XFER_FUNC}, {0, 0, 0, 0} }; int main(int argc, char *argv[]) { struct sched_param sched; struct device dev = { 0 }; int ret; /* Options parsings */ const struct v4l2_format_info *info; /* Use video capture by default if query isn't done. */ unsigned int capabilities = V4L2_CAP_VIDEO_CAPTURE; int do_file = 0, do_capture = 0; int do_set_time_per_frame = 0; int do_enum_formats = 0, do_set_format = 0; int do_enum_inputs = 0, do_set_input = 0; int do_list_controls = 0, do_get_control = 0, do_set_control = 0; int do_reset_controls = 0; int do_sleep_forever = 0, do_requeue_last = 0; int do_rt = 0, do_log_status = 0; int no_query = 0, do_queue_late = 0; int do_csc = 0; char *endptr; int c; /* Controls */ int ctrl_name = 0; const char *ctrl_value = NULL; /* Video buffers */ enum v4l2_memory memtype = V4L2_MEMORY_MMAP; unsigned int pixelformat = V4L2_PIX_FMT_YUYV; unsigned int fmt_flags = 0; unsigned int width = 640; unsigned int height = 480; unsigned int stride = 0; unsigned int buffer_size = 0; unsigned int nbufs = V4L_BUFFERS_DEFAULT; unsigned int input = 0; unsigned int skip = 0; unsigned int quality = (unsigned int)-1; unsigned int userptr_offset = 0; unsigned int pause_count = (unsigned int)-1; struct v4l2_fract time_per_frame = {1, 25}; enum v4l2_field field = V4L2_FIELD_ANY; enum v4l2_colorspace colorspace = V4L2_COLORSPACE_DEFAULT; enum v4l2_xfer_func xfer_func = V4L2_XFER_FUNC_DEFAULT; enum v4l2_ycbcr_encoding encoding = V4L2_YCBCR_ENC_DEFAULT; enum v4l2_quantization quantization = V4L2_QUANTIZATION_DEFAULT; /* Capture loop */ enum buffer_fill_mode fill_mode = BUFFER_FILL_NONE; unsigned int delay = 0, nframes = (unsigned int)-1; const char *filename = "frame-#.bin"; unsigned int rt_priority = 1; video_init(&dev); opterr = 0; while ((c = getopt_long(argc, argv, "B:c::Cd:f:F::hi:Iln:p::q:r:R::s:t:uw:", opts, NULL)) != -1) { switch (c) { case 'B': ret = v4l2_buf_type_from_string(optarg); if (ret == -1) { printf("Bad buffer type \"%s\"\n", optarg); return 1; } video_set_buf_type(&dev, ret); break; case 'c': do_capture = 1; if (optarg) nframes = atoi(optarg); break; case 'C': fill_mode |= BUFFER_FILL_PADDING; break; case 'd': delay = atoi(optarg); break; case 'f': if (!strcmp("help", optarg)) { list_formats(); return 0; } do_set_format = 1; info = v4l2_format_by_name(optarg); if (info == NULL) { printf("Unsupported video format '%s'\n", optarg); return 1; } pixelformat = info->fourcc; break; case 'F': do_file = 1; if (optarg) filename = optarg; break; case 'h': usage(argv[0]); return 0; case 'i': do_set_input = 1; input = atoi(optarg); break; case 'I': fill_mode |= BUFFER_FILL_FRAME; break; case 'l': do_list_controls = 1; break; case 'n': nbufs = atoi(optarg); if (nbufs > V4L_BUFFERS_MAX) nbufs = V4L_BUFFERS_MAX; break; case 'p': if (optarg) pause_count = atoi(optarg); else pause_count = 0; break; case 'q': quality = atoi(optarg); break; case 'r': ctrl_name = strtol(optarg, &endptr, 0); if (*endptr != 0) { printf("Invalid control name '%s'\n", optarg); return 1; } do_get_control = 1; break; case 'R': do_rt = 1; if (optarg) rt_priority = atoi(optarg); break; case 's': do_set_format = 1; width = strtol(optarg, &endptr, 10); if (*endptr != 'x' || endptr == optarg) { printf("Invalid size '%s'\n", optarg); return 1; } height = strtol(endptr + 1, &endptr, 10); if (*endptr != 0) { printf("Invalid size '%s'\n", optarg); return 1; } break; case 't': do_set_time_per_frame = 1; time_per_frame.numerator = strtol(optarg, &endptr, 10); if (*endptr != '/' || endptr == optarg) { printf("Invalid time per frame '%s'\n", optarg); return 1; } time_per_frame.denominator = strtol(endptr + 1, &endptr, 10); if (*endptr != 0) { printf("Invalid time per frame '%s'\n", optarg); return 1; } break; case 'u': memtype = V4L2_MEMORY_USERPTR; break; case 'w': ctrl_name = strtol(optarg, &endptr, 0); if (*endptr != ' ' || endptr == optarg) { printf("Invalid control name '%s'\n", optarg); return 1; } ctrl_value = endptr + 1; do_set_control = 1; break; case OPT_BUFFER_SIZE: buffer_size = atoi(optarg); break; case OPT_COLORSPACE: ret = v4l2_colorspace_from_string(optarg); if (ret < 0) { printf("Invalid colorspace value '%s'\n", optarg); return 1; } colorspace = ret; do_csc = 1; break; case OPT_DATA_PREFIX: dev.write_data_prefix = true; break; case OPT_ENCODING: ret = v4l2_encoding_from_string(optarg); if (ret < 0) { printf("Invalid encoding value '%s'\n", optarg); return 1; } encoding = ret; do_csc = 1; break; case OPT_ENUM_FORMATS: do_enum_formats = 1; break; case OPT_ENUM_INPUTS: do_enum_inputs = 1; break; case OPT_FD: ret = atoi(optarg); if (ret < 0) { printf("Bad file descriptor %d\n", ret); return 1; } printf("Using file descriptor %d\n", ret); video_set_fd(&dev, ret); break; case OPT_FIELD: field = v4l2_field_from_string(optarg); if (field == (enum v4l2_field)-1) { printf("Invalid field order '%s'\n", optarg); return 1; } break; case OPT_LOG_STATUS: do_log_status = 1; break; case OPT_NO_QUERY: no_query = 1; break; case OPT_PREMULTIPLIED: fmt_flags |= V4L2_PIX_FMT_FLAG_PREMUL_ALPHA; break; case OPT_QUANTIZATION: ret = v4l2_quantization_from_string(optarg); if (ret < 0) { printf("Invalid quantization value '%s'\n", optarg); return 1; } quantization = ret; do_csc = 1; break; case OPT_QUEUE_LATE: do_queue_late = 1; break; case OPT_RESET_CONTROLS: do_reset_controls = 1; break; case OPT_REQUEUE_LAST: do_requeue_last = 1; break; case OPT_SKIP_FRAMES: skip = atoi(optarg); break; case OPT_SLEEP_FOREVER: do_sleep_forever = 1; break; case OPT_STRIDE: stride = atoi(optarg); break; case OPT_TSTAMP_SRC: if (!strcmp(optarg, "eof")) { dev.buffer_output_flags |= V4L2_BUF_FLAG_TSTAMP_SRC_EOF; } else if (!strcmp(optarg, "soe")) { dev.buffer_output_flags |= V4L2_BUF_FLAG_TSTAMP_SRC_SOE; } else { printf("Invalid timestamp source %s\n", optarg); return 1; } break; case OPT_USERPTR_OFFSET: userptr_offset = atoi(optarg); break; case OPT_XFER_FUNC: ret = v4l2_xfer_func_from_string(optarg); if (ret < 0) { printf("Invalid xfer-func value '%s'\n", optarg); return 1; } xfer_func = ret; do_csc = 1; break; default: printf("Invalid option -%c\n", c); printf("Run %s -h for help.\n", argv[0]); return 1; } } if (pause_count != (unsigned int)-1) { ret = pause_init(); if (ret < 0) return 1; } if ((fill_mode & BUFFER_FILL_PADDING) && memtype != V4L2_MEMORY_USERPTR) { printf("Buffer overrun can only be checked in USERPTR mode.\n"); return 1; } if (!do_file) filename = NULL; if (!video_has_fd(&dev)) { if (optind >= argc) { usage(argv[0]); return 1; } ret = video_open(&dev, argv[optind]); if (ret < 0) return 1; } if (!no_query) { ret = video_querycap(&dev, &capabilities); if (ret < 0) return 1; } ret = cap_get_buf_type(capabilities); if (ret < 0) return 1; if (!video_has_valid_buf_type(&dev)) video_set_buf_type(&dev, ret); dev.memtype = memtype; if (do_log_status) video_log_status(&dev); if (do_get_control) { struct v4l2_query_ext_ctrl query; ret = query_control(&dev, ctrl_name, &query); if (ret == 0) video_get_control(&dev, &query, false); } if (do_set_control) video_set_control(&dev, ctrl_name, ctrl_value); if (do_list_controls) video_list_controls(&dev); if (do_reset_controls) video_reset_controls(&dev); if (do_enum_formats) { printf("- Available formats:\n"); video_enum_formats(&dev, V4L2_BUF_TYPE_VIDEO_CAPTURE); video_enum_formats(&dev, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); video_enum_formats(&dev, V4L2_BUF_TYPE_VIDEO_OUTPUT); video_enum_formats(&dev, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE); video_enum_formats(&dev, V4L2_BUF_TYPE_VIDEO_OVERLAY); video_enum_formats(&dev, V4L2_BUF_TYPE_META_CAPTURE); video_enum_formats(&dev, V4L2_BUF_TYPE_META_OUTPUT); } if (do_enum_inputs) { printf("- Available inputs:\n"); video_enum_inputs(&dev); } if (do_set_input) { video_set_input(&dev, input); ret = video_get_input(&dev); printf("Input %d selected\n", ret); } /* Set the video format. */ if (do_set_format) { if (do_csc && video_is_capture(&dev)) fmt_flags |= V4L2_PIX_FMT_FLAG_SET_CSC; if (video_set_format(&dev, width, height, pixelformat, stride, buffer_size, field, colorspace, xfer_func, encoding, quantization, fmt_flags) < 0) { video_close(&dev); return 1; } } if (!no_query || do_capture) video_get_format(&dev); /* Set the frame rate. */ if (do_set_time_per_frame) { if (video_set_framerate(&dev, &time_per_frame) < 0) { video_close(&dev); return 1; } } while (do_sleep_forever) sleep(1000); if (!do_capture) { video_close(&dev); return 0; } /* Set the compression quality. */ if (video_set_quality(&dev, quality) < 0) { video_close(&dev); return 1; } if (video_prepare_capture(&dev, nbufs, userptr_offset, filename, fill_mode)) { video_close(&dev); return 1; } if (!do_queue_late && video_queue_all_buffers(&dev, fill_mode)) { video_close(&dev); return 1; } if (do_rt) { memset(&sched, 0, sizeof sched); sched.sched_priority = rt_priority; ret = sched_setscheduler(0, SCHED_RR, &sched); if (ret < 0) printf("Failed to select RR scheduler: %s (%d)\n", strerror(errno), errno); } if (video_do_capture(&dev, nframes, skip, delay, pause_count, filename, do_requeue_last, do_queue_late, fill_mode) < 0) { video_close(&dev); return 1; } video_close(&dev); return 0; }