Updating prebuilts and/or headers

77c130ed3990efc3a31750d9380ded85e2b660d1 - v4l2_nv_extensions.h
c2b683c77f90fbca8b8c2e0b59efee45db468369 - nvbufsurface.h
83e21353d1fe20cba4bd630c3b41c1615b8268ed - nvbuf_utils.h
80b3faf4a2e03de49089b320f0cf005d9a0a54ad - gst-v4l2/gstv4l2bufferpool.c
5948d70c07e87f9b1dc403789dcbed6acfa47ad9 - gst-v4l2/gstv4l2av1enc.c
a6f39a3f80f770833a35db7bf41e2ae5de9b6ace - gst-v4l2/sei_parse.c
d89a680415f6ff5acec2571cde0fce9054d8e81f - gst-v4l2/gstv4l2vp9enc.h
4311b3fbc6e5675353491a6fab52577ed36f499d - gst-v4l2/gstv4l2.c
b724de78f364b0855abfbbaf6fda9ae51ecbfd00 - gst-v4l2/gstv4l2videoenc.c
39fcb2f599e6906ab0fd7ab9a46fef3ea58a8cab - gst-v4l2/gstv4l2vp8enc.h
71be284b547ee68fb0e2cd14b0aeb14734a915a1 - gst-v4l2/gstv4l2bufferpool.h
c773f1e03097c888a3fda59ace02ea622e101d13 - gst-v4l2/Makefile
0eabbf0521068ee26f5c620698aac456a2b1d265 - gst-v4l2/gstv4l2object.c
e8e973c103725b65232d32817e0305d12d6ff309 - gst-v4l2/gstv4l2h264enc.c
b827fd6cb1e3b8ecebd6a07f8556e846e26cba17 - gst-v4l2/gstv4l2allocator.h
02d142337f4b96fcb0c9f2405a3cbe90c5917cca - gst-v4l2/gstv4l2vp9enc.c
85ff961e6bdfb02907033709ee001bc250af8e03 - gst-v4l2/gstv4l2object.h
fbdc964b443c64094f5b3f6e2bcd29697bc27694 - gst-v4l2/gstv4l2videodec.h
c81eacb7d88c4fb839506dd70055e30d7a9feeec - gst-v4l2/v4l2-utils.h
cbc84dccd2506afa4c8f03849c95bb28c83ef4a3 - gst-v4l2/gstv4l2av1enc.h
d29e3a719400c3cb27314366d48ec792a3c12363 - gst-v4l2/gstv4l2h265enc.h
bb104683f5e4f7402e3f765a891e149edc794e02 - gst-v4l2/gstv4l2h264enc.h
b1cd923335aa60985ff9866fba91a2068e8671c7 - gst-v4l2/LICENSE.gst-nvvideo4linux2
c08d733da85d44332a0b7b6a9183308d307d160c - gst-v4l2/gstv4l2videodec.c
a002edef13a3bbbdc41e42a7fca40e574ad1bb3e - gst-v4l2/v4l2-utils.c
605f3b6fd4cc1f0e790f5ab50c9e2d87dfea9523 - gst-v4l2/gstv4l2videoenc.h
73b03969d7ae0a8adb374c93999c43af88ea93b2 - gst-v4l2/v4l2_calls.c
807bc9859585a540b0f85e98f147756aab24e1bd - gst-v4l2/gstv4l2vp8enc.c
ed77613908dddf791481ea198dfd75f988684226 - gst-v4l2/gstv4l2allocator.c
4a047575250eb3ccb6db1947ed36e9562fe000af - gst-v4l2/gstv4l2h265enc.c
499a9feb17ceabf1f1443923dffa1e0180bf5972 - gst-v4l2/gst/glib-compat-private.h
20c4f7c0cb89c83256650bc3353ed82154cf3a9d - gst-v4l2/gst/gst-i18n-plugin.h
e864ee6647f3572b144403d799f68152e9900da1 - gst-v4l2/gst/gettext.h
522ab8fc8531a2c758b9278d29642f5b763fd3e7 - gst-v4l2/ext/videodev2.h
a745675b051a2b8434a430c80fde3f245864ca89 - gst-v4l2/ext/v4l2-common.h
1636366b5a062e4bc1791b7bc3012ccf5635b363 - gst-v4l2/ext/v4l2-controls.h
72a34a694337f8f6da3bb94c9faced6730cbd2fc - gst-v4l2/ext/types-compat.h

Change-Id: I317d1220d8119b764a2a5dbe1d5796b1c8f726ff
This commit is contained in:
svcmobrel-release
2022-08-15 08:53:41 -07:00
parent 7a8ebb805b
commit 6111720f79
40 changed files with 27966 additions and 0 deletions

39
commitFile.txt Normal file
View File

@@ -0,0 +1,39 @@
Updating prebuilts and/or headers
77c130ed3990efc3a31750d9380ded85e2b660d1 - v4l2_nv_extensions.h
c2b683c77f90fbca8b8c2e0b59efee45db468369 - nvbufsurface.h
83e21353d1fe20cba4bd630c3b41c1615b8268ed - nvbuf_utils.h
80b3faf4a2e03de49089b320f0cf005d9a0a54ad - gst-v4l2/gstv4l2bufferpool.c
5948d70c07e87f9b1dc403789dcbed6acfa47ad9 - gst-v4l2/gstv4l2av1enc.c
a6f39a3f80f770833a35db7bf41e2ae5de9b6ace - gst-v4l2/sei_parse.c
d89a680415f6ff5acec2571cde0fce9054d8e81f - gst-v4l2/gstv4l2vp9enc.h
4311b3fbc6e5675353491a6fab52577ed36f499d - gst-v4l2/gstv4l2.c
b724de78f364b0855abfbbaf6fda9ae51ecbfd00 - gst-v4l2/gstv4l2videoenc.c
39fcb2f599e6906ab0fd7ab9a46fef3ea58a8cab - gst-v4l2/gstv4l2vp8enc.h
71be284b547ee68fb0e2cd14b0aeb14734a915a1 - gst-v4l2/gstv4l2bufferpool.h
c773f1e03097c888a3fda59ace02ea622e101d13 - gst-v4l2/Makefile
0eabbf0521068ee26f5c620698aac456a2b1d265 - gst-v4l2/gstv4l2object.c
e8e973c103725b65232d32817e0305d12d6ff309 - gst-v4l2/gstv4l2h264enc.c
b827fd6cb1e3b8ecebd6a07f8556e846e26cba17 - gst-v4l2/gstv4l2allocator.h
02d142337f4b96fcb0c9f2405a3cbe90c5917cca - gst-v4l2/gstv4l2vp9enc.c
85ff961e6bdfb02907033709ee001bc250af8e03 - gst-v4l2/gstv4l2object.h
fbdc964b443c64094f5b3f6e2bcd29697bc27694 - gst-v4l2/gstv4l2videodec.h
c81eacb7d88c4fb839506dd70055e30d7a9feeec - gst-v4l2/v4l2-utils.h
cbc84dccd2506afa4c8f03849c95bb28c83ef4a3 - gst-v4l2/gstv4l2av1enc.h
d29e3a719400c3cb27314366d48ec792a3c12363 - gst-v4l2/gstv4l2h265enc.h
bb104683f5e4f7402e3f765a891e149edc794e02 - gst-v4l2/gstv4l2h264enc.h
b1cd923335aa60985ff9866fba91a2068e8671c7 - gst-v4l2/LICENSE.gst-nvvideo4linux2
c08d733da85d44332a0b7b6a9183308d307d160c - gst-v4l2/gstv4l2videodec.c
a002edef13a3bbbdc41e42a7fca40e574ad1bb3e - gst-v4l2/v4l2-utils.c
605f3b6fd4cc1f0e790f5ab50c9e2d87dfea9523 - gst-v4l2/gstv4l2videoenc.h
73b03969d7ae0a8adb374c93999c43af88ea93b2 - gst-v4l2/v4l2_calls.c
807bc9859585a540b0f85e98f147756aab24e1bd - gst-v4l2/gstv4l2vp8enc.c
ed77613908dddf791481ea198dfd75f988684226 - gst-v4l2/gstv4l2allocator.c
4a047575250eb3ccb6db1947ed36e9562fe000af - gst-v4l2/gstv4l2h265enc.c
499a9feb17ceabf1f1443923dffa1e0180bf5972 - gst-v4l2/gst/glib-compat-private.h
20c4f7c0cb89c83256650bc3353ed82154cf3a9d - gst-v4l2/gst/gst-i18n-plugin.h
e864ee6647f3572b144403d799f68152e9900da1 - gst-v4l2/gst/gettext.h
522ab8fc8531a2c758b9278d29642f5b763fd3e7 - gst-v4l2/ext/videodev2.h
a745675b051a2b8434a430c80fde3f245864ca89 - gst-v4l2/ext/v4l2-common.h
1636366b5a062e4bc1791b7bc3012ccf5635b363 - gst-v4l2/ext/v4l2-controls.h
72a34a694337f8f6da3bb94c9faced6730cbd2fc - gst-v4l2/ext/types-compat.h

View File

@@ -0,0 +1,397 @@
The software listed below is licensed under the terms of the LGPLv2
(see below). To obtain source code, contact oss-requests@nvidia.com.
gst-nvvideo4linux2 (libgstnvvideo4linux2.so)
------------------------------------
GNU LIBRARY GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1991 Free Software Foundation, Inc.
51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the library GPL. It is
numbered 2 because it goes with version 2 of the ordinary GPL.]
Preamble
The licenses for most software are designed to take away your freedom to share
and change it. By contrast, the GNU General Public Licenses are intended to
guarantee your freedom to share and change free software--to make sure the
software is free for all its users.
This license, the Library General Public License, applies to some specially
designated Free Software Foundation software, and to any other libraries whose
authors decide to use it. You can use it for your libraries, too.
When we speak of free software, we are referring to freedom, not price. Our
General Public Licenses are designed to make sure that you have the freedom to
distribute copies of free software (and charge for this service if you wish),
that you receive source code or can get it if you want it, that you can change
the software or use pieces of it in new free programs; and that you know you can
do these things.
To protect your rights, we need to make restrictions that forbid anyone to deny
you these rights or to ask you to surrender the rights. These restrictions
translate to certain responsibilities for you if you distribute copies of the
library, or if you modify it.
For example, if you distribute copies of the library, whether gratis or for a
fee, you must give the recipients all the rights that we gave you. You must make
sure that they, too, receive or can get the source code. If you link a program
with the library, you must provide complete object files to the recipients so
that they can relink them with the library, after making changes to the library
and recompiling it. And you must show them these terms so they know their
rights.
Our method of protecting your rights has two steps: (1) copyright the library,
and (2) offer you this license which gives you legal permission to copy,
distribute and/or modify the library.
Also, for each distributor's protection, we want to make certain that everyone
understands that there is no warranty for this free library. If the library is
modified by someone else and passed on, we want its recipients to know that what
they have is not the original version, so that any problems introduced by others
will not reflect on the original authors' reputations.
Finally, any free program is threatened constantly by software patents. We wish
to avoid the danger that companies distributing free software will individually
obtain patent licenses, thus in effect transforming the program into proprietary
software. To prevent this, we have made it clear that any patent must be
licensed for everyone's free use or not licensed at all.
Most GNU software, including some libraries, is covered by the ordinary GNU
General Public License, which was designed for utility programs. This license,
the GNU Library General Public License, applies to certain designated libraries.
This license is quite different from the ordinary one; be sure to read it in
full, and don't assume that anything in it is the same as in the ordinary
license.
The reason we have a separate public license for some libraries is that they
blur the distinction we usually make between modifying or adding to a program
and simply using it. Linking a program with a library, without changing the
library, is in some sense simply using the library, and is analogous to running
a utility program or application program. However, in a textual and legal sense,
the linked executable is a combined work, a derivative of the original library,
and the ordinary General Public License treats it as such.
Because of this blurred distinction, using the ordinary General Public License
for libraries did not effectively promote software sharing, because most
developers did not use the libraries. We concluded that weaker conditions might
promote sharing better.
However, unrestricted linking of non-free programs would deprive the users of
those programs of all benefit from the free status of the libraries themselves.
This Library General Public License is intended to permit developers of non-free
programs to use free libraries, while preserving your freedom as a user of such
programs to change the free libraries that are incorporated in them. (We have
not seen how to achieve this as regards changes in header files, but we have
achieved it as regards changes in the actual functions of the Library.) The hope
is that this will lead to faster development of free libraries.
The precise terms and conditions for copying, distribution and modification
follow. Pay close attention to the difference between a "work based on the
library" and a "work that uses the library". The former contains code derived
from the library, while the latter only works together with the library.
Note that it is possible for a library to be covered by the ordinary General
Public License rather than by this special one.
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library which contains a
notice placed by the copyright holder or other authorized party saying it may be
distributed under the terms of this Library General Public License (also called
"this License"). Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data prepared so as
to be conveniently linked with application programs (which use some of those
functions and data) to form executables.
The "Library", below, refers to any such software library or work which has been
distributed under these terms. A "work based on the Library" means either the
Library or any derivative work under copyright law: that is to say, a work
containing the Library or a portion of it, either verbatim or with modifications
and/or translated straightforwardly into another language. (Hereinafter,
translation is included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for making
modifications to it. For a library, complete source code means all the source
code for all modules it contains, plus any associated interface definition
files, plus the scripts used to control compilation and installation of the
library.
Activities other than copying, distribution and modification are not covered by
this License; they are outside its scope. The act of running a program using the
Library is not restricted, and output from such a program is covered only if its
contents constitute a work based on the Library (independent of the use of the
Library in a tool for writing it). Whether that is true depends on what the
Library does and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's complete source
code as you receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice and
disclaimer of warranty; keep intact all the notices that refer to this License
and to the absence of any warranty; and distribute a copy of this License along
with the Library.
You may charge a fee for the physical act of transferring a copy, and you may at
your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Library or any portion of it, thus
forming a work based on the Library, and copy and distribute such modifications
or work under the terms of Section 1 above, provided that you also meet all of
these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices stating that
you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no charge to all
third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a table of
data to be supplied by an application program that uses the facility, other than
as an argument passed when the facility is invoked, then you must make a good
faith effort to ensure that, in the event an application does not supply such
function or table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has a purpose
that is entirely well-defined independent of the application. Therefore,
Subsection 2d requires that any application-supplied function or table used by
this function must be optional: if the application does not supply it, the
square root function must still compute square roots.)
These requirements apply to the modified work as a whole. If identifiable
sections of that work are not derived from the Library, and can be reasonably
considered independent and separate works in themselves, then this License, and
its terms, do not apply to those sections when you distribute them as separate
works. But when you distribute the same sections as part of a whole which is a
work based on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the entire whole,
and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest your
rights to work written entirely by you; rather, the intent is to exercise the
right to control the distribution of derivative or collective works based on the
Library.
In addition, mere aggregation of another work not based on the Library with the
Library (or with a work based on the Library) on a volume of a storage or
distribution medium does not bring the other work under the scope of this
License.
3. You may opt to apply the terms of the ordinary GNU General Public License
instead of this License to a given copy of the Library. To do this, you must
alter all the notices that refer to this License, so that they refer to the
ordinary GNU General Public License, version 2, instead of to this License. (If
a newer version than version 2 of the ordinary GNU General Public License has
appeared, then you can specify that version instead if you wish.) Do not make
any other change in these notices.
Once this change is made in a given copy, it is irreversible for that copy, so
the ordinary GNU General Public License applies to all subsequent copies and
derivative works made from that copy.
This option is useful when you wish to copy part of the code of the Library into
a program that is not a library.
4. You may copy and distribute the Library (or a portion or derivative of it,
under Section 2) in object code or executable form under the terms of Sections 1
and 2 above provided that you accompany it with the complete corresponding
machine-readable source code, which must be distributed under the terms of
Sections 1 and 2 above on a medium customarily used for software interchange.
If distribution of object code is made by offering access to copy from a
designated place, then offering equivalent access to copy the source code from
the same place satisfies the requirement to distribute the source code, even
though third parties are not compelled to copy the source along with the object
code.
5. A program that contains no derivative of any portion of the Library, but is
designed to work with the Library by being compiled or linked with it, is called
a "work that uses the Library". Such a work, in isolation, is not a derivative
work of the Library, and therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library creates an
executable that is a derivative of the Library (because it contains portions of
the Library), rather than a "work that uses the library". The executable is
therefore covered by this License. Section 6 states terms for distribution of
such executables.
When a "work that uses the Library" uses material from a header file that is
part of the Library, the object code for the work may be a derivative work of
the Library even though the source code is not. Whether this is true is
especially significant if the work can be linked without the Library, or if the
work is itself a library. The threshold for this to be true is not precisely
defined by law.
If such an object file uses only numerical parameters, data structure layouts
and accessors, and small macros and small inline functions (ten lines or less in
length), then the use of the object file is unrestricted, regardless of whether
it is legally a derivative work. (Executables containing this object code plus
portions of the Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may distribute the
object code for the work under the terms of Section 6. Any executables
containing that work also fall under Section 6, whether or not they are linked
directly with the Library itself.
6. As an exception to the Sections above, you may also compile or link a "work
that uses the Library" with the Library to produce a work containing portions of
the Library, and distribute that work under terms of your choice, provided that
the terms permit modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the Library is
used in it and that the Library and its use are covered by this License. You
must supply a copy of this License. If the work during execution displays
copyright notices, you must include the copyright notice for the Library among
them, as well as a reference directing the user to the copy of this License.
Also, you must do one of these things:
a) Accompany the work with the complete corresponding machine-readable
source code for the Library including whatever changes were used in the work
(which must be distributed under Sections 1 and 2 above); and, if the work is an
executable linked with the Library, with the complete machine-readable "work
that uses the Library", as object code and/or source code, so that the user can
modify the Library and then relink to produce a modified executable containing
the modified Library. (It is understood that the user who changes the contents
of definitions files in the Library will not necessarily be able to recompile
the application to use the modified definitions.)
b) Accompany the work with a written offer, valid for at least three years,
to give the same user the materials specified in Subsection 6a, above, for a
charge no more than the cost of performing this distribution.
c) If distribution of the work is made by offering access to copy from a
designated place, offer equivalent access to copy the above specified materials
from the same place.
d) Verify that the user has already received a copy of these materials or
that you have already sent this user a copy.
For an executable, the required form of the "work that uses the Library" must
include any data and utility programs needed for reproducing the executable from
it. However, as a special exception, the source code distributed need not
include anything that is normally distributed (in either source or binary form)
with the major components (compiler, kernel, and so on) of the operating system
on which the executable runs, unless that component itself accompanies the
executable.
It may happen that this requirement contradicts the license restrictions of
other proprietary libraries that do not normally accompany the operating system.
Such a contradiction means you cannot use both them and the Library together in
an executable that you distribute.
7. You may place library facilities that are a work based on the Library
side-by-side in a single library together with other library facilities not
covered by this License, and distribute such a combined library, provided that
the separate distribution of the work based on the Library and of the other
library facilities is otherwise permitted, and provided that you do these two
things:
a) Accompany the combined library with a copy of the same work based on the
Library, uncombined with any other library facilities. This must be distributed
under the terms of the Sections above.
b) Give prominent notice with the combined library of the fact that part of
it is a work based on the Library, and explaining where to find the accompanying
uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute the Library
except as expressly provided under this License. Any attempt otherwise to copy,
modify, sublicense, link with, or distribute the Library is void, and will
automatically terminate your rights under this License. However, parties who
have received copies, or rights, from you under this License will not have their
licenses terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not signed it.
However, nothing else grants you permission to modify or distribute the Library
or its derivative works. These actions are prohibited by law if you do not
accept this License. Therefore, by modifying or distributing the Library (or any
work based on the Library), you indicate your acceptance of this License to do
so, and all its terms and conditions for copying, distributing or modifying the
Library or works based on it.
10. Each time you redistribute the Library (or any work based on the Library),
the recipient automatically receives a license from the original licensor to
copy, distribute, link with or modify the Library subject to these terms and
conditions. You may not impose any further restrictions on the recipients'
exercise of the rights granted herein. You are not responsible for enforcing
compliance by third parties to this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues), conditions
are imposed on you (whether by court order, agreement or otherwise) that
contradict the conditions of this License, they do not excuse you from the
conditions of this License. If you cannot distribute so as to satisfy
simultaneously your obligations under this License and any other pertinent
obligations, then as a consequence you may not distribute the Library at all.
For example, if a patent license would not permit royalty-free redistribution of
the Library by all those who receive copies directly or indirectly through you,
then the only way you could satisfy both it and this License would be to refrain
entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply, and
the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any patents or
other property right claims or to contest validity of any such claims; this
section has the sole purpose of protecting the integrity of the free software
distribution system which is implemented by public license practices. Many
people have made generous contributions to the wide range of software
distributed through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing to
distribute software through any other system and a licensee cannot impose that
choice.
This section is intended to make thoroughly clear what is believed to be a
consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in certain
countries either by patents or by copyrighted interfaces, the original copyright
holder who places the Library under this License may add an explicit
geographical distribution limitation excluding those countries, so that
distribution is permitted only in or among countries not thus excluded. In such
case, this License incorporates the limitation as if written in the body of this
License.
13. The Free Software Foundation may publish revised and/or new versions of the
Library General Public License from time to time. Such new versions will be
similar in spirit to the present version, but may differ in detail to address
new problems or concerns.
Each version is given a distinguishing version number. If the Library specifies
a version number of this License which applies to it and "any later version",
you have the option of following the terms and conditions either of that version
or of any later version published by the Free Software Foundation. If the
Library does not specify a license version number, you may choose any version
ever published by the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free programs
whose distribution conditions are incompatible with these, write to the author
to ask for permission. For software which is copyrighted by the Free Software
Foundation, write to the Free Software Foundation; we sometimes make exceptions
for this. Our decision will be guided by the two goals of preserving the free
status of all derivatives of our free software and of promoting the sharing and
reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE
LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED
IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS
IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL
ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE
LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL,
SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY
TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF
THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.

70
gst-v4l2/Makefile Normal file
View File

@@ -0,0 +1,70 @@
###############################################################################
#
# Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA Corporation and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA Corporation is strictly prohibited.
#
###############################################################################
SO_NAME := libgstnvvideo4linux2.so
TARGET_DEVICE = $(shell gcc -dumpmachine | cut -f1 -d -)
NVDS_VERSION:=6.0
ifeq ($(TARGET_DEVICE),aarch64)
GST_INSTALL_DIR?=/usr/lib/aarch64-linux-gnu/gstreamer-1.0/
LIB_INSTALL_DIR?=/usr/lib/aarch64-linux-gnu/tegra/
INCLUDES += -I/usr/src/jetson_multimedia_api/include/
CFLAGS:=
else
GST_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/lib/gst-plugins/
LIB_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/lib/
CFLAGS:= -DUSE_V4L2_TARGET_NV_CODECSDK=1 -DUSE_V4L2_TARGET_NV_X86=1 -DUSE_V4L2_GST_HEADER_VER_1_8
endif
LIBS:= -lnvbufsurface -lnvbufsurftransform -lgstnvdsseimeta
SRCS := $(wildcard *.c)
INCLUDES += -I./ -I../
PKGS := gstreamer-1.0 \
gstreamer-base-1.0 \
gstreamer-video-1.0 \
gstreamer-allocators-1.0 \
glib-2.0 \
libv4l2
OBJS := $(SRCS:.c=.o)
CFLAGS += -fPIC \
-DEXPLICITLY_ADDED=1 \
-DGETTEXT_PACKAGE=1 \
-DHAVE_LIBV4L2=1 \
-DUSE_V4L2_TARGET_NV=1
CFLAGS += `pkg-config --cflags $(PKGS)`
LDFLAGS = -Wl,--no-undefined -L$(LIB_INSTALL_DIR) -Wl,-rpath,$(LIB_INSTALL_DIR)
LIBS += `pkg-config --libs $(PKGS)`
all: $(SO_NAME)
%.o: %.c
$(CC) -c $< $(CFLAGS) $(INCLUDES) -o $@
$(SO_NAME): $(OBJS)
$(CC) -shared -o $(SO_NAME) $(OBJS) $(LIBS) $(LDFLAGS)
.PHONY: install
install: $(SO_NAME)
cp -vp $(SO_NAME) $(GST_INSTALL_DIR)
.PHONY: clean
clean:
rm -rf $(OBJS) $(SO_NAME)

37
gst-v4l2/README.txt Normal file
View File

@@ -0,0 +1,37 @@
###############################################################################
#
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA Corporation and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA Corporation is strictly prohibited.
#
###############################################################################
Steps to compile the "gst-nvvideo4linux2" sources natively:
1) Install gstreamer related packages on target using the command:
sudo apt-get install libgstreamer1.0-dev \
gstreamer1.0-plugins-base \
gstreamer1.0-plugins-good \
libgstreamer-plugins-base1.0-dev \
libv4l-dev \
libegl1-mesa-dev
2) Download and extract the package "gst-nvvideo4linux_src.tbz2" as follow:
tar -I lbzip2 -xvf gst-nvvideo4linux2_src.tbz2
3) Run the following commands to build and install "libgstnvvideo4linux2.so":
make
make install
or
DEST_DIR=<dir> make install
Note: For Jetson, "make install" will copy library "libgstnvvideo4linux2.so"
into "/usr/lib/aarch64-linux-gnu/gstreamer-1.0" directory. For x86 platforms,
make install will copy the library "libgstnvvideo4linux2.so" into
/opt/nvidia/deepstream/deepstream-4.0/lib/gst-plugins

View File

@@ -0,0 +1,58 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas@ndufresne.ca>
* Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include <glib.h>
#ifndef __TYPES_COMPAT_H__
#define __TYPES_COMPAT_H__
/* From linux/types.h */
#ifndef __bitwise__
# ifdef __CHECKER__
# define __bitwise__ __attribute__((bitwise))
# else
# define __bitwise__
# endif
#endif
#ifndef __bitwise
# ifdef __CHECK_ENDIAN__
# define __bitwise __bitwise__
# else
# define __bitwise
# endif
#endif
#define __u64 guint64
#define __u32 guint32
#define __u16 guint16
#define __u8 guint8
#ifdef USE_V4L2_TARGET_NV
#define __s8 gint8
#endif
#define __s64 gint64
#define __s32 gint32
#define __le32 guint32 __bitwise
#define __user
#endif /* __TYPES_COMPAT_H__ */

107
gst-v4l2/ext/v4l2-common.h Normal file
View File

@@ -0,0 +1,107 @@
/*
* include/linux/v4l2-common.h
*
* Common V4L2 and V4L2 subdev definitions.
*
* Users are advised to #include this file either through videodev2.h
* (V4L2) or through v4l2-subdev.h (V4L2 subdev) rather than to refer
* to this file directly.
*
* Copyright (C) 2012 Nokia Corporation
* Contact: Sakari Ailus <sakari.ailus@iki.fi>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* Alternatively you can redistribute this file under the terms of the
* BSD license as stated below:
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. The names of its contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef __V4L2_COMMON__
#define __V4L2_COMMON__
#include "ext/types-compat.h"
/*
*
* Selection interface definitions
*
*/
/* Current cropping area */
#define V4L2_SEL_TGT_CROP 0x0000
/* Default cropping area */
#define V4L2_SEL_TGT_CROP_DEFAULT 0x0001
/* Cropping bounds */
#define V4L2_SEL_TGT_CROP_BOUNDS 0x0002
/* Native frame size */
#define V4L2_SEL_TGT_NATIVE_SIZE 0x0003
/* Current composing area */
#define V4L2_SEL_TGT_COMPOSE 0x0100
/* Default composing area */
#define V4L2_SEL_TGT_COMPOSE_DEFAULT 0x0101
/* Composing bounds */
#define V4L2_SEL_TGT_COMPOSE_BOUNDS 0x0102
/* Current composing area plus all padding pixels */
#define V4L2_SEL_TGT_COMPOSE_PADDED 0x0103
/* Backward compatibility target definitions --- to be removed. */
#define V4L2_SEL_TGT_CROP_ACTIVE V4L2_SEL_TGT_CROP
#define V4L2_SEL_TGT_COMPOSE_ACTIVE V4L2_SEL_TGT_COMPOSE
#define V4L2_SUBDEV_SEL_TGT_CROP_ACTUAL V4L2_SEL_TGT_CROP
#define V4L2_SUBDEV_SEL_TGT_COMPOSE_ACTUAL V4L2_SEL_TGT_COMPOSE
#define V4L2_SUBDEV_SEL_TGT_CROP_BOUNDS V4L2_SEL_TGT_CROP_BOUNDS
#define V4L2_SUBDEV_SEL_TGT_COMPOSE_BOUNDS V4L2_SEL_TGT_COMPOSE_BOUNDS
/* Selection flags */
#define V4L2_SEL_FLAG_GE (1 << 0)
#define V4L2_SEL_FLAG_LE (1 << 1)
#define V4L2_SEL_FLAG_KEEP_CONFIG (1 << 2)
/* Backward compatibility flag definitions --- to be removed. */
#define V4L2_SUBDEV_SEL_FLAG_SIZE_GE V4L2_SEL_FLAG_GE
#define V4L2_SUBDEV_SEL_FLAG_SIZE_LE V4L2_SEL_FLAG_LE
#define V4L2_SUBDEV_SEL_FLAG_KEEP_CONFIG V4L2_SEL_FLAG_KEEP_CONFIG
struct v4l2_edid {
__u32 pad;
__u32 start_block;
__u32 blocks;
__u32 reserved[5];
__u8 *edid;
};
#endif /* __V4L2_COMMON__ */

View File

@@ -0,0 +1,987 @@
/*
* Video for Linux Two controls header file
*
* Copyright (C) 1999-2012 the contributors
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* Alternatively you can redistribute this file under the terms of the
* BSD license as stated below:
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. The names of its contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The contents of this header was split off from videodev2.h. All control
* definitions should be added to this header, which is included by
* videodev2.h.
*/
#ifndef __LINUX_V4L2_CONTROLS_H
#define __LINUX_V4L2_CONTROLS_H
/* Control classes */
#define V4L2_CTRL_CLASS_USER 0x00980000 /* Old-style 'user' controls */
#define V4L2_CTRL_CLASS_MPEG 0x00990000 /* MPEG-compression controls */
#define V4L2_CTRL_CLASS_CAMERA 0x009a0000 /* Camera class controls */
#define V4L2_CTRL_CLASS_FM_TX 0x009b0000 /* FM Modulator controls */
#define V4L2_CTRL_CLASS_FLASH 0x009c0000 /* Camera flash controls */
#define V4L2_CTRL_CLASS_JPEG 0x009d0000 /* JPEG-compression controls */
#define V4L2_CTRL_CLASS_IMAGE_SOURCE 0x009e0000 /* Image source controls */
#define V4L2_CTRL_CLASS_IMAGE_PROC 0x009f0000 /* Image processing controls */
#define V4L2_CTRL_CLASS_DV 0x00a00000 /* Digital Video controls */
#define V4L2_CTRL_CLASS_FM_RX 0x00a10000 /* FM Receiver controls */
#define V4L2_CTRL_CLASS_RF_TUNER 0x00a20000 /* RF tuner controls */
#define V4L2_CTRL_CLASS_DETECT 0x00a30000 /* Detection controls */
/* User-class control IDs */
#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900)
#define V4L2_CID_USER_BASE V4L2_CID_BASE
#define V4L2_CID_USER_CLASS (V4L2_CTRL_CLASS_USER | 1)
#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE+0)
#define V4L2_CID_CONTRAST (V4L2_CID_BASE+1)
#define V4L2_CID_SATURATION (V4L2_CID_BASE+2)
#define V4L2_CID_HUE (V4L2_CID_BASE+3)
#define V4L2_CID_AUDIO_VOLUME (V4L2_CID_BASE+5)
#define V4L2_CID_AUDIO_BALANCE (V4L2_CID_BASE+6)
#define V4L2_CID_AUDIO_BASS (V4L2_CID_BASE+7)
#define V4L2_CID_AUDIO_TREBLE (V4L2_CID_BASE+8)
#define V4L2_CID_AUDIO_MUTE (V4L2_CID_BASE+9)
#define V4L2_CID_AUDIO_LOUDNESS (V4L2_CID_BASE+10)
#define V4L2_CID_BLACK_LEVEL (V4L2_CID_BASE+11) /* Deprecated */
#define V4L2_CID_AUTO_WHITE_BALANCE (V4L2_CID_BASE+12)
#define V4L2_CID_DO_WHITE_BALANCE (V4L2_CID_BASE+13)
#define V4L2_CID_RED_BALANCE (V4L2_CID_BASE+14)
#define V4L2_CID_BLUE_BALANCE (V4L2_CID_BASE+15)
#define V4L2_CID_GAMMA (V4L2_CID_BASE+16)
#define V4L2_CID_WHITENESS (V4L2_CID_GAMMA) /* Deprecated */
#define V4L2_CID_EXPOSURE (V4L2_CID_BASE+17)
#define V4L2_CID_AUTOGAIN (V4L2_CID_BASE+18)
#define V4L2_CID_GAIN (V4L2_CID_BASE+19)
#define V4L2_CID_HFLIP (V4L2_CID_BASE+20)
#define V4L2_CID_VFLIP (V4L2_CID_BASE+21)
#define V4L2_CID_POWER_LINE_FREQUENCY (V4L2_CID_BASE+24)
enum v4l2_power_line_frequency {
V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0,
V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1,
V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2,
V4L2_CID_POWER_LINE_FREQUENCY_AUTO = 3,
};
#define V4L2_CID_HUE_AUTO (V4L2_CID_BASE+25)
#define V4L2_CID_WHITE_BALANCE_TEMPERATURE (V4L2_CID_BASE+26)
#define V4L2_CID_SHARPNESS (V4L2_CID_BASE+27)
#define V4L2_CID_BACKLIGHT_COMPENSATION (V4L2_CID_BASE+28)
#define V4L2_CID_CHROMA_AGC (V4L2_CID_BASE+29)
#define V4L2_CID_COLOR_KILLER (V4L2_CID_BASE+30)
#define V4L2_CID_COLORFX (V4L2_CID_BASE+31)
enum v4l2_colorfx {
V4L2_COLORFX_NONE = 0,
V4L2_COLORFX_BW = 1,
V4L2_COLORFX_SEPIA = 2,
V4L2_COLORFX_NEGATIVE = 3,
V4L2_COLORFX_EMBOSS = 4,
V4L2_COLORFX_SKETCH = 5,
V4L2_COLORFX_SKY_BLUE = 6,
V4L2_COLORFX_GRASS_GREEN = 7,
V4L2_COLORFX_SKIN_WHITEN = 8,
V4L2_COLORFX_VIVID = 9,
V4L2_COLORFX_AQUA = 10,
V4L2_COLORFX_ART_FREEZE = 11,
V4L2_COLORFX_SILHOUETTE = 12,
V4L2_COLORFX_SOLARIZATION = 13,
V4L2_COLORFX_ANTIQUE = 14,
V4L2_COLORFX_SET_CBCR = 15,
};
#define V4L2_CID_AUTOBRIGHTNESS (V4L2_CID_BASE+32)
#define V4L2_CID_BAND_STOP_FILTER (V4L2_CID_BASE+33)
#define V4L2_CID_ROTATE (V4L2_CID_BASE+34)
#define V4L2_CID_BG_COLOR (V4L2_CID_BASE+35)
#define V4L2_CID_CHROMA_GAIN (V4L2_CID_BASE+36)
#define V4L2_CID_ILLUMINATORS_1 (V4L2_CID_BASE+37)
#define V4L2_CID_ILLUMINATORS_2 (V4L2_CID_BASE+38)
#define V4L2_CID_MIN_BUFFERS_FOR_CAPTURE (V4L2_CID_BASE+39)
#define V4L2_CID_MIN_BUFFERS_FOR_OUTPUT (V4L2_CID_BASE+40)
#define V4L2_CID_ALPHA_COMPONENT (V4L2_CID_BASE+41)
#define V4L2_CID_COLORFX_CBCR (V4L2_CID_BASE+42)
/* last CID + 1 */
#define V4L2_CID_LASTP1 (V4L2_CID_BASE+43)
/* USER-class private control IDs */
/* The base for the meye driver controls. See linux/meye.h for the list
* of controls. We reserve 16 controls for this driver. */
#define V4L2_CID_USER_MEYE_BASE (V4L2_CID_USER_BASE + 0x1000)
/* The base for the bttv driver controls.
* We reserve 32 controls for this driver. */
#define V4L2_CID_USER_BTTV_BASE (V4L2_CID_USER_BASE + 0x1010)
/* The base for the s2255 driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_S2255_BASE (V4L2_CID_USER_BASE + 0x1030)
/*
* The base for the si476x driver controls. See include/media/drv-intf/si476x.h
* for the list of controls. Total of 16 controls is reserved for this driver
*/
#define V4L2_CID_USER_SI476X_BASE (V4L2_CID_USER_BASE + 0x1040)
/* The base for the TI VPE driver controls. Total of 16 controls is reserved for
* this driver */
#define V4L2_CID_USER_TI_VPE_BASE (V4L2_CID_USER_BASE + 0x1050)
/* The base for the saa7134 driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_SAA7134_BASE (V4L2_CID_USER_BASE + 0x1060)
/* The base for the adv7180 driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_ADV7180_BASE (V4L2_CID_USER_BASE + 0x1070)
/* The base for the tc358743 driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_TC358743_BASE (V4L2_CID_USER_BASE + 0x1080)
/* The base for the max217x driver controls.
* We reserve 32 controls for this driver
*/
#define V4L2_CID_USER_MAX217X_BASE (V4L2_CID_USER_BASE + 0x1090)
/* The base for the imx driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_IMX_BASE (V4L2_CID_USER_BASE + 0x1090)
/* MPEG-class control IDs */
/* The MPEG controls are applicable to all codec controls
* and the 'MPEG' part of the define is historical */
#define V4L2_CID_MPEG_BASE (V4L2_CTRL_CLASS_MPEG | 0x900)
#define V4L2_CID_MPEG_CLASS (V4L2_CTRL_CLASS_MPEG | 1)
/* MPEG streams, specific to multiplexed streams */
#define V4L2_CID_MPEG_STREAM_TYPE (V4L2_CID_MPEG_BASE+0)
enum v4l2_mpeg_stream_type {
V4L2_MPEG_STREAM_TYPE_MPEG2_PS = 0, /* MPEG-2 program stream */
V4L2_MPEG_STREAM_TYPE_MPEG2_TS = 1, /* MPEG-2 transport stream */
V4L2_MPEG_STREAM_TYPE_MPEG1_SS = 2, /* MPEG-1 system stream */
V4L2_MPEG_STREAM_TYPE_MPEG2_DVD = 3, /* MPEG-2 DVD-compatible stream */
V4L2_MPEG_STREAM_TYPE_MPEG1_VCD = 4, /* MPEG-1 VCD-compatible stream */
V4L2_MPEG_STREAM_TYPE_MPEG2_SVCD = 5, /* MPEG-2 SVCD-compatible stream */
};
#define V4L2_CID_MPEG_STREAM_PID_PMT (V4L2_CID_MPEG_BASE+1)
#define V4L2_CID_MPEG_STREAM_PID_AUDIO (V4L2_CID_MPEG_BASE+2)
#define V4L2_CID_MPEG_STREAM_PID_VIDEO (V4L2_CID_MPEG_BASE+3)
#define V4L2_CID_MPEG_STREAM_PID_PCR (V4L2_CID_MPEG_BASE+4)
#define V4L2_CID_MPEG_STREAM_PES_ID_AUDIO (V4L2_CID_MPEG_BASE+5)
#define V4L2_CID_MPEG_STREAM_PES_ID_VIDEO (V4L2_CID_MPEG_BASE+6)
#define V4L2_CID_MPEG_STREAM_VBI_FMT (V4L2_CID_MPEG_BASE+7)
enum v4l2_mpeg_stream_vbi_fmt {
V4L2_MPEG_STREAM_VBI_FMT_NONE = 0, /* No VBI in the MPEG stream */
V4L2_MPEG_STREAM_VBI_FMT_IVTV = 1, /* VBI in private packets, IVTV format */
};
/* MPEG audio controls specific to multiplexed streams */
#define V4L2_CID_MPEG_AUDIO_SAMPLING_FREQ (V4L2_CID_MPEG_BASE+100)
enum v4l2_mpeg_audio_sampling_freq {
V4L2_MPEG_AUDIO_SAMPLING_FREQ_44100 = 0,
V4L2_MPEG_AUDIO_SAMPLING_FREQ_48000 = 1,
V4L2_MPEG_AUDIO_SAMPLING_FREQ_32000 = 2,
};
#define V4L2_CID_MPEG_AUDIO_ENCODING (V4L2_CID_MPEG_BASE+101)
enum v4l2_mpeg_audio_encoding {
V4L2_MPEG_AUDIO_ENCODING_LAYER_1 = 0,
V4L2_MPEG_AUDIO_ENCODING_LAYER_2 = 1,
V4L2_MPEG_AUDIO_ENCODING_LAYER_3 = 2,
V4L2_MPEG_AUDIO_ENCODING_AAC = 3,
V4L2_MPEG_AUDIO_ENCODING_AC3 = 4,
};
#define V4L2_CID_MPEG_AUDIO_L1_BITRATE (V4L2_CID_MPEG_BASE+102)
enum v4l2_mpeg_audio_l1_bitrate {
V4L2_MPEG_AUDIO_L1_BITRATE_32K = 0,
V4L2_MPEG_AUDIO_L1_BITRATE_64K = 1,
V4L2_MPEG_AUDIO_L1_BITRATE_96K = 2,
V4L2_MPEG_AUDIO_L1_BITRATE_128K = 3,
V4L2_MPEG_AUDIO_L1_BITRATE_160K = 4,
V4L2_MPEG_AUDIO_L1_BITRATE_192K = 5,
V4L2_MPEG_AUDIO_L1_BITRATE_224K = 6,
V4L2_MPEG_AUDIO_L1_BITRATE_256K = 7,
V4L2_MPEG_AUDIO_L1_BITRATE_288K = 8,
V4L2_MPEG_AUDIO_L1_BITRATE_320K = 9,
V4L2_MPEG_AUDIO_L1_BITRATE_352K = 10,
V4L2_MPEG_AUDIO_L1_BITRATE_384K = 11,
V4L2_MPEG_AUDIO_L1_BITRATE_416K = 12,
V4L2_MPEG_AUDIO_L1_BITRATE_448K = 13,
};
#define V4L2_CID_MPEG_AUDIO_L2_BITRATE (V4L2_CID_MPEG_BASE+103)
enum v4l2_mpeg_audio_l2_bitrate {
V4L2_MPEG_AUDIO_L2_BITRATE_32K = 0,
V4L2_MPEG_AUDIO_L2_BITRATE_48K = 1,
V4L2_MPEG_AUDIO_L2_BITRATE_56K = 2,
V4L2_MPEG_AUDIO_L2_BITRATE_64K = 3,
V4L2_MPEG_AUDIO_L2_BITRATE_80K = 4,
V4L2_MPEG_AUDIO_L2_BITRATE_96K = 5,
V4L2_MPEG_AUDIO_L2_BITRATE_112K = 6,
V4L2_MPEG_AUDIO_L2_BITRATE_128K = 7,
V4L2_MPEG_AUDIO_L2_BITRATE_160K = 8,
V4L2_MPEG_AUDIO_L2_BITRATE_192K = 9,
V4L2_MPEG_AUDIO_L2_BITRATE_224K = 10,
V4L2_MPEG_AUDIO_L2_BITRATE_256K = 11,
V4L2_MPEG_AUDIO_L2_BITRATE_320K = 12,
V4L2_MPEG_AUDIO_L2_BITRATE_384K = 13,
};
#define V4L2_CID_MPEG_AUDIO_L3_BITRATE (V4L2_CID_MPEG_BASE+104)
enum v4l2_mpeg_audio_l3_bitrate {
V4L2_MPEG_AUDIO_L3_BITRATE_32K = 0,
V4L2_MPEG_AUDIO_L3_BITRATE_40K = 1,
V4L2_MPEG_AUDIO_L3_BITRATE_48K = 2,
V4L2_MPEG_AUDIO_L3_BITRATE_56K = 3,
V4L2_MPEG_AUDIO_L3_BITRATE_64K = 4,
V4L2_MPEG_AUDIO_L3_BITRATE_80K = 5,
V4L2_MPEG_AUDIO_L3_BITRATE_96K = 6,
V4L2_MPEG_AUDIO_L3_BITRATE_112K = 7,
V4L2_MPEG_AUDIO_L3_BITRATE_128K = 8,
V4L2_MPEG_AUDIO_L3_BITRATE_160K = 9,
V4L2_MPEG_AUDIO_L3_BITRATE_192K = 10,
V4L2_MPEG_AUDIO_L3_BITRATE_224K = 11,
V4L2_MPEG_AUDIO_L3_BITRATE_256K = 12,
V4L2_MPEG_AUDIO_L3_BITRATE_320K = 13,
};
#define V4L2_CID_MPEG_AUDIO_MODE (V4L2_CID_MPEG_BASE+105)
enum v4l2_mpeg_audio_mode {
V4L2_MPEG_AUDIO_MODE_STEREO = 0,
V4L2_MPEG_AUDIO_MODE_JOINT_STEREO = 1,
V4L2_MPEG_AUDIO_MODE_DUAL = 2,
V4L2_MPEG_AUDIO_MODE_MONO = 3,
};
#define V4L2_CID_MPEG_AUDIO_MODE_EXTENSION (V4L2_CID_MPEG_BASE+106)
enum v4l2_mpeg_audio_mode_extension {
V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_4 = 0,
V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_8 = 1,
V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_12 = 2,
V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_16 = 3,
};
#define V4L2_CID_MPEG_AUDIO_EMPHASIS (V4L2_CID_MPEG_BASE+107)
enum v4l2_mpeg_audio_emphasis {
V4L2_MPEG_AUDIO_EMPHASIS_NONE = 0,
V4L2_MPEG_AUDIO_EMPHASIS_50_DIV_15_uS = 1,
V4L2_MPEG_AUDIO_EMPHASIS_CCITT_J17 = 2,
};
#define V4L2_CID_MPEG_AUDIO_CRC (V4L2_CID_MPEG_BASE+108)
enum v4l2_mpeg_audio_crc {
V4L2_MPEG_AUDIO_CRC_NONE = 0,
V4L2_MPEG_AUDIO_CRC_CRC16 = 1,
};
#define V4L2_CID_MPEG_AUDIO_MUTE (V4L2_CID_MPEG_BASE+109)
#define V4L2_CID_MPEG_AUDIO_AAC_BITRATE (V4L2_CID_MPEG_BASE+110)
#define V4L2_CID_MPEG_AUDIO_AC3_BITRATE (V4L2_CID_MPEG_BASE+111)
enum v4l2_mpeg_audio_ac3_bitrate {
V4L2_MPEG_AUDIO_AC3_BITRATE_32K = 0,
V4L2_MPEG_AUDIO_AC3_BITRATE_40K = 1,
V4L2_MPEG_AUDIO_AC3_BITRATE_48K = 2,
V4L2_MPEG_AUDIO_AC3_BITRATE_56K = 3,
V4L2_MPEG_AUDIO_AC3_BITRATE_64K = 4,
V4L2_MPEG_AUDIO_AC3_BITRATE_80K = 5,
V4L2_MPEG_AUDIO_AC3_BITRATE_96K = 6,
V4L2_MPEG_AUDIO_AC3_BITRATE_112K = 7,
V4L2_MPEG_AUDIO_AC3_BITRATE_128K = 8,
V4L2_MPEG_AUDIO_AC3_BITRATE_160K = 9,
V4L2_MPEG_AUDIO_AC3_BITRATE_192K = 10,
V4L2_MPEG_AUDIO_AC3_BITRATE_224K = 11,
V4L2_MPEG_AUDIO_AC3_BITRATE_256K = 12,
V4L2_MPEG_AUDIO_AC3_BITRATE_320K = 13,
V4L2_MPEG_AUDIO_AC3_BITRATE_384K = 14,
V4L2_MPEG_AUDIO_AC3_BITRATE_448K = 15,
V4L2_MPEG_AUDIO_AC3_BITRATE_512K = 16,
V4L2_MPEG_AUDIO_AC3_BITRATE_576K = 17,
V4L2_MPEG_AUDIO_AC3_BITRATE_640K = 18,
};
#define V4L2_CID_MPEG_AUDIO_DEC_PLAYBACK (V4L2_CID_MPEG_BASE+112)
enum v4l2_mpeg_audio_dec_playback {
V4L2_MPEG_AUDIO_DEC_PLAYBACK_AUTO = 0,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_STEREO = 1,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_LEFT = 2,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_RIGHT = 3,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_MONO = 4,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_SWAPPED_STEREO = 5,
};
#define V4L2_CID_MPEG_AUDIO_DEC_MULTILINGUAL_PLAYBACK (V4L2_CID_MPEG_BASE+113)
/* MPEG video controls specific to multiplexed streams */
#define V4L2_CID_MPEG_VIDEO_ENCODING (V4L2_CID_MPEG_BASE+200)
enum v4l2_mpeg_video_encoding {
V4L2_MPEG_VIDEO_ENCODING_MPEG_1 = 0,
V4L2_MPEG_VIDEO_ENCODING_MPEG_2 = 1,
V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC = 2,
};
#define V4L2_CID_MPEG_VIDEO_ASPECT (V4L2_CID_MPEG_BASE+201)
enum v4l2_mpeg_video_aspect {
V4L2_MPEG_VIDEO_ASPECT_1x1 = 0,
V4L2_MPEG_VIDEO_ASPECT_4x3 = 1,
V4L2_MPEG_VIDEO_ASPECT_16x9 = 2,
V4L2_MPEG_VIDEO_ASPECT_221x100 = 3,
};
#define V4L2_CID_MPEG_VIDEO_B_FRAMES (V4L2_CID_MPEG_BASE+202)
#define V4L2_CID_MPEG_VIDEO_GOP_SIZE (V4L2_CID_MPEG_BASE+203)
#define V4L2_CID_MPEG_VIDEO_GOP_CLOSURE (V4L2_CID_MPEG_BASE+204)
#define V4L2_CID_MPEG_VIDEO_PULLDOWN (V4L2_CID_MPEG_BASE+205)
#define V4L2_CID_MPEG_VIDEO_BITRATE_MODE (V4L2_CID_MPEG_BASE+206)
enum v4l2_mpeg_video_bitrate_mode {
V4L2_MPEG_VIDEO_BITRATE_MODE_VBR = 0,
V4L2_MPEG_VIDEO_BITRATE_MODE_CBR = 1,
};
#define V4L2_CID_MPEG_VIDEO_BITRATE (V4L2_CID_MPEG_BASE+207)
#define V4L2_CID_MPEG_VIDEO_BITRATE_PEAK (V4L2_CID_MPEG_BASE+208)
#define V4L2_CID_MPEG_VIDEO_TEMPORAL_DECIMATION (V4L2_CID_MPEG_BASE+209)
#define V4L2_CID_MPEG_VIDEO_MUTE (V4L2_CID_MPEG_BASE+210)
#define V4L2_CID_MPEG_VIDEO_MUTE_YUV (V4L2_CID_MPEG_BASE+211)
#define V4L2_CID_MPEG_VIDEO_DECODER_SLICE_INTERFACE (V4L2_CID_MPEG_BASE+212)
#define V4L2_CID_MPEG_VIDEO_DECODER_MPEG4_DEBLOCK_FILTER (V4L2_CID_MPEG_BASE+213)
#define V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB (V4L2_CID_MPEG_BASE+214)
#define V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE (V4L2_CID_MPEG_BASE+215)
#define V4L2_CID_MPEG_VIDEO_HEADER_MODE (V4L2_CID_MPEG_BASE+216)
enum v4l2_mpeg_video_header_mode {
V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE = 0,
V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME = 1,
};
#define V4L2_CID_MPEG_VIDEO_MAX_REF_PIC (V4L2_CID_MPEG_BASE+217)
#define V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE (V4L2_CID_MPEG_BASE+218)
#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES (V4L2_CID_MPEG_BASE+219)
#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB (V4L2_CID_MPEG_BASE+220)
#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE (V4L2_CID_MPEG_BASE+221)
enum v4l2_mpeg_video_multi_slice_mode {
V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_SINGLE = 0,
V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB = 1,
V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES = 2,
};
#define V4L2_CID_MPEG_VIDEO_VBV_SIZE (V4L2_CID_MPEG_BASE+222)
#define V4L2_CID_MPEG_VIDEO_DEC_PTS (V4L2_CID_MPEG_BASE+223)
#define V4L2_CID_MPEG_VIDEO_DEC_FRAME (V4L2_CID_MPEG_BASE+224)
#define V4L2_CID_MPEG_VIDEO_VBV_DELAY (V4L2_CID_MPEG_BASE+225)
#define V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER (V4L2_CID_MPEG_BASE+226)
#define V4L2_CID_MPEG_VIDEO_MV_H_SEARCH_RANGE (V4L2_CID_MPEG_BASE+227)
#define V4L2_CID_MPEG_VIDEO_MV_V_SEARCH_RANGE (V4L2_CID_MPEG_BASE+228)
#define V4L2_CID_MPEG_VIDEO_FORCE_KEY_FRAME (V4L2_CID_MPEG_BASE+229)
#define V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP (V4L2_CID_MPEG_BASE+300)
#define V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP (V4L2_CID_MPEG_BASE+301)
#define V4L2_CID_MPEG_VIDEO_H263_B_FRAME_QP (V4L2_CID_MPEG_BASE+302)
#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP (V4L2_CID_MPEG_BASE+303)
#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP (V4L2_CID_MPEG_BASE+304)
#define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP (V4L2_CID_MPEG_BASE+350)
#define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP (V4L2_CID_MPEG_BASE+351)
#define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP (V4L2_CID_MPEG_BASE+352)
#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP (V4L2_CID_MPEG_BASE+353)
#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP (V4L2_CID_MPEG_BASE+354)
#define V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM (V4L2_CID_MPEG_BASE+355)
#define V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE (V4L2_CID_MPEG_BASE+356)
#define V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE (V4L2_CID_MPEG_BASE+357)
enum v4l2_mpeg_video_h264_entropy_mode {
V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC = 0,
V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC = 1,
};
#define V4L2_CID_MPEG_VIDEO_H264_I_PERIOD (V4L2_CID_MPEG_BASE+358)
#define V4L2_CID_MPEG_VIDEO_H264_LEVEL (V4L2_CID_MPEG_BASE+359)
enum v4l2_mpeg_video_h264_level {
V4L2_MPEG_VIDEO_H264_LEVEL_1_0 = 0,
V4L2_MPEG_VIDEO_H264_LEVEL_1B = 1,
V4L2_MPEG_VIDEO_H264_LEVEL_1_1 = 2,
V4L2_MPEG_VIDEO_H264_LEVEL_1_2 = 3,
V4L2_MPEG_VIDEO_H264_LEVEL_1_3 = 4,
V4L2_MPEG_VIDEO_H264_LEVEL_2_0 = 5,
V4L2_MPEG_VIDEO_H264_LEVEL_2_1 = 6,
V4L2_MPEG_VIDEO_H264_LEVEL_2_2 = 7,
V4L2_MPEG_VIDEO_H264_LEVEL_3_0 = 8,
V4L2_MPEG_VIDEO_H264_LEVEL_3_1 = 9,
V4L2_MPEG_VIDEO_H264_LEVEL_3_2 = 10,
V4L2_MPEG_VIDEO_H264_LEVEL_4_0 = 11,
V4L2_MPEG_VIDEO_H264_LEVEL_4_1 = 12,
V4L2_MPEG_VIDEO_H264_LEVEL_4_2 = 13,
V4L2_MPEG_VIDEO_H264_LEVEL_5_0 = 14,
V4L2_MPEG_VIDEO_H264_LEVEL_5_1 = 15,
};
#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA (V4L2_CID_MPEG_BASE+360)
#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA (V4L2_CID_MPEG_BASE+361)
#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE (V4L2_CID_MPEG_BASE+362)
enum v4l2_mpeg_video_h264_loop_filter_mode {
V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_ENABLED = 0,
V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED = 1,
V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY = 2,
};
#define V4L2_CID_MPEG_VIDEO_H264_PROFILE (V4L2_CID_MPEG_BASE+363)
enum v4l2_mpeg_video_h264_profile {
V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE = 0,
V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE = 1,
V4L2_MPEG_VIDEO_H264_PROFILE_MAIN = 2,
V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED = 3,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH = 4,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10 = 5,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422 = 6,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE = 7,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA = 8,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA = 9,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA = 10,
V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA = 11,
V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE = 12,
V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH = 13,
V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA = 14,
V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH = 15,
V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH = 16,
};
#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT (V4L2_CID_MPEG_BASE+364)
#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH (V4L2_CID_MPEG_BASE+365)
#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE (V4L2_CID_MPEG_BASE+366)
#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC (V4L2_CID_MPEG_BASE+367)
enum v4l2_mpeg_video_h264_vui_sar_idc {
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_UNSPECIFIED = 0,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1 = 1,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_12x11 = 2,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_10x11 = 3,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_16x11 = 4,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_40x33 = 5,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_24x11 = 6,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_20x11 = 7,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_32x11 = 8,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_80x33 = 9,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_18x11 = 10,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_15x11 = 11,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_64x33 = 12,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_160x99 = 13,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_4x3 = 14,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_3x2 = 15,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_2x1 = 16,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_EXTENDED = 17,
};
#define V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING (V4L2_CID_MPEG_BASE+368)
#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_CURRENT_FRAME_0 (V4L2_CID_MPEG_BASE+369)
#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE (V4L2_CID_MPEG_BASE+370)
enum v4l2_mpeg_video_h264_sei_fp_arrangement_type {
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_CHECKERBOARD = 0,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_COLUMN = 1,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_ROW = 2,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_SIDE_BY_SIDE = 3,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TOP_BOTTOM = 4,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TEMPORAL = 5,
};
#define V4L2_CID_MPEG_VIDEO_H264_FMO (V4L2_CID_MPEG_BASE+371)
#define V4L2_CID_MPEG_VIDEO_H264_FMO_MAP_TYPE (V4L2_CID_MPEG_BASE+372)
enum v4l2_mpeg_video_h264_fmo_map_type {
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES = 0,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_SCATTERED_SLICES = 1,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_FOREGROUND_WITH_LEFT_OVER = 2,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_BOX_OUT = 3,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_RASTER_SCAN = 4,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_WIPE_SCAN = 5,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_EXPLICIT = 6,
};
#define V4L2_CID_MPEG_VIDEO_H264_FMO_SLICE_GROUP (V4L2_CID_MPEG_BASE+373)
#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_DIRECTION (V4L2_CID_MPEG_BASE+374)
enum v4l2_mpeg_video_h264_fmo_change_dir {
V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_RIGHT = 0,
V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_LEFT = 1,
};
#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_RATE (V4L2_CID_MPEG_BASE+375)
#define V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH (V4L2_CID_MPEG_BASE+376)
#define V4L2_CID_MPEG_VIDEO_H264_ASO (V4L2_CID_MPEG_BASE+377)
#define V4L2_CID_MPEG_VIDEO_H264_ASO_SLICE_ORDER (V4L2_CID_MPEG_BASE+378)
#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING (V4L2_CID_MPEG_BASE+379)
#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_BASE+380)
enum v4l2_mpeg_video_h264_hierarchical_coding_type {
V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_B = 0,
V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P = 1,
};
#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_BASE+381)
#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_BASE+382)
#define V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP (V4L2_CID_MPEG_BASE+400)
#define V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP (V4L2_CID_MPEG_BASE+401)
#define V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP (V4L2_CID_MPEG_BASE+402)
#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP (V4L2_CID_MPEG_BASE+403)
#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP (V4L2_CID_MPEG_BASE+404)
#define V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL (V4L2_CID_MPEG_BASE+405)
enum v4l2_mpeg_video_mpeg4_level {
V4L2_MPEG_VIDEO_MPEG4_LEVEL_0 = 0,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_0B = 1,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_1 = 2,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_2 = 3,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_3 = 4,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_3B = 5,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_4 = 6,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_5 = 7,
};
#define V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE (V4L2_CID_MPEG_BASE+406)
enum v4l2_mpeg_video_mpeg4_profile {
V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE = 0,
V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_SIMPLE = 1,
V4L2_MPEG_VIDEO_MPEG4_PROFILE_CORE = 2,
V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE_SCALABLE = 3,
V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY = 4,
};
#define V4L2_CID_MPEG_VIDEO_MPEG4_QPEL (V4L2_CID_MPEG_BASE+407)
/* Control IDs for VP8 streams
* Although VP8 is not part of MPEG we add these controls to the MPEG class
* as that class is already handling other video compression standards
*/
#define V4L2_CID_MPEG_VIDEO_VPX_NUM_PARTITIONS (V4L2_CID_MPEG_BASE+500)
enum v4l2_vp8_num_partitions {
V4L2_CID_MPEG_VIDEO_VPX_1_PARTITION = 0,
V4L2_CID_MPEG_VIDEO_VPX_2_PARTITIONS = 1,
V4L2_CID_MPEG_VIDEO_VPX_4_PARTITIONS = 2,
V4L2_CID_MPEG_VIDEO_VPX_8_PARTITIONS = 3,
};
#define V4L2_CID_MPEG_VIDEO_VPX_IMD_DISABLE_4X4 (V4L2_CID_MPEG_BASE+501)
#define V4L2_CID_MPEG_VIDEO_VPX_NUM_REF_FRAMES (V4L2_CID_MPEG_BASE+502)
enum v4l2_vp8_num_ref_frames {
V4L2_CID_MPEG_VIDEO_VPX_1_REF_FRAME = 0,
V4L2_CID_MPEG_VIDEO_VPX_2_REF_FRAME = 1,
V4L2_CID_MPEG_VIDEO_VPX_3_REF_FRAME = 2,
};
#define V4L2_CID_MPEG_VIDEO_VPX_FILTER_LEVEL (V4L2_CID_MPEG_BASE+503)
#define V4L2_CID_MPEG_VIDEO_VPX_FILTER_SHARPNESS (V4L2_CID_MPEG_BASE+504)
#define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_REF_PERIOD (V4L2_CID_MPEG_BASE+505)
#define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_SEL (V4L2_CID_MPEG_BASE+506)
enum v4l2_vp8_golden_frame_sel {
V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_PREV = 0,
V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_REF_PERIOD = 1,
};
#define V4L2_CID_MPEG_VIDEO_VPX_MIN_QP (V4L2_CID_MPEG_BASE+507)
#define V4L2_CID_MPEG_VIDEO_VPX_MAX_QP (V4L2_CID_MPEG_BASE+508)
#define V4L2_CID_MPEG_VIDEO_VPX_I_FRAME_QP (V4L2_CID_MPEG_BASE+509)
#define V4L2_CID_MPEG_VIDEO_VPX_P_FRAME_QP (V4L2_CID_MPEG_BASE+510)
#define V4L2_CID_MPEG_VIDEO_VPX_PROFILE (V4L2_CID_MPEG_BASE+511)
/* MPEG-class control IDs specific to the CX2341x driver as defined by V4L2 */
#define V4L2_CID_MPEG_CX2341X_BASE (V4L2_CTRL_CLASS_MPEG | 0x1000)
#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+0)
enum v4l2_mpeg_cx2341x_video_spatial_filter_mode {
V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_MANUAL = 0,
V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_AUTO = 1,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+1)
#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+2)
enum v4l2_mpeg_cx2341x_video_luma_spatial_filter_type {
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_OFF = 0,
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_HOR = 1,
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_VERT = 2,
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_HV_SEPARABLE = 3,
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_SYM_NON_SEPARABLE = 4,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+3)
enum v4l2_mpeg_cx2341x_video_chroma_spatial_filter_type {
V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_OFF = 0,
V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_1D_HOR = 1,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+4)
enum v4l2_mpeg_cx2341x_video_temporal_filter_mode {
V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_MANUAL = 0,
V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_AUTO = 1,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+5)
#define V4L2_CID_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+6)
enum v4l2_mpeg_cx2341x_video_median_filter_type {
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_OFF = 0,
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR = 1,
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_VERT = 2,
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR_VERT = 3,
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_DIAG = 4,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+7)
#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+8)
#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+9)
#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+10)
#define V4L2_CID_MPEG_CX2341X_STREAM_INSERT_NAV_PACKETS (V4L2_CID_MPEG_CX2341X_BASE+11)
/* MPEG-class control IDs specific to the Samsung MFC 5.1 driver as defined by V4L2 */
#define V4L2_CID_MPEG_MFC51_BASE (V4L2_CTRL_CLASS_MPEG | 0x1100)
#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY (V4L2_CID_MPEG_MFC51_BASE+0)
#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY_ENABLE (V4L2_CID_MPEG_MFC51_BASE+1)
#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE (V4L2_CID_MPEG_MFC51_BASE+2)
enum v4l2_mpeg_mfc51_video_frame_skip_mode {
V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED = 0,
V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT = 1,
V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT = 2,
};
#define V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE (V4L2_CID_MPEG_MFC51_BASE+3)
enum v4l2_mpeg_mfc51_video_force_frame_type {
V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_DISABLED = 0,
V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME = 1,
V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_NOT_CODED = 2,
};
#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING (V4L2_CID_MPEG_MFC51_BASE+4)
#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV (V4L2_CID_MPEG_MFC51_BASE+5)
#define V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT (V4L2_CID_MPEG_MFC51_BASE+6)
#define V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF (V4L2_CID_MPEG_MFC51_BASE+7)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC51_BASE+50)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC51_BASE+51)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC51_BASE+52)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC51_BASE+53)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P (V4L2_CID_MPEG_MFC51_BASE+54)
/* Camera class control IDs */
#define V4L2_CID_CAMERA_CLASS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x900)
#define V4L2_CID_CAMERA_CLASS (V4L2_CTRL_CLASS_CAMERA | 1)
#define V4L2_CID_EXPOSURE_AUTO (V4L2_CID_CAMERA_CLASS_BASE+1)
enum v4l2_exposure_auto_type {
V4L2_EXPOSURE_AUTO = 0,
V4L2_EXPOSURE_MANUAL = 1,
V4L2_EXPOSURE_SHUTTER_PRIORITY = 2,
V4L2_EXPOSURE_APERTURE_PRIORITY = 3
};
#define V4L2_CID_EXPOSURE_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+2)
#define V4L2_CID_EXPOSURE_AUTO_PRIORITY (V4L2_CID_CAMERA_CLASS_BASE+3)
#define V4L2_CID_PAN_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+4)
#define V4L2_CID_TILT_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+5)
#define V4L2_CID_PAN_RESET (V4L2_CID_CAMERA_CLASS_BASE+6)
#define V4L2_CID_TILT_RESET (V4L2_CID_CAMERA_CLASS_BASE+7)
#define V4L2_CID_PAN_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+8)
#define V4L2_CID_TILT_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+9)
#define V4L2_CID_FOCUS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+10)
#define V4L2_CID_FOCUS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+11)
#define V4L2_CID_FOCUS_AUTO (V4L2_CID_CAMERA_CLASS_BASE+12)
#define V4L2_CID_ZOOM_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+13)
#define V4L2_CID_ZOOM_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+14)
#define V4L2_CID_ZOOM_CONTINUOUS (V4L2_CID_CAMERA_CLASS_BASE+15)
#define V4L2_CID_PRIVACY (V4L2_CID_CAMERA_CLASS_BASE+16)
#define V4L2_CID_IRIS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+17)
#define V4L2_CID_IRIS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+18)
#define V4L2_CID_AUTO_EXPOSURE_BIAS (V4L2_CID_CAMERA_CLASS_BASE+19)
#define V4L2_CID_AUTO_N_PRESET_WHITE_BALANCE (V4L2_CID_CAMERA_CLASS_BASE+20)
enum v4l2_auto_n_preset_white_balance {
V4L2_WHITE_BALANCE_MANUAL = 0,
V4L2_WHITE_BALANCE_AUTO = 1,
V4L2_WHITE_BALANCE_INCANDESCENT = 2,
V4L2_WHITE_BALANCE_FLUORESCENT = 3,
V4L2_WHITE_BALANCE_FLUORESCENT_H = 4,
V4L2_WHITE_BALANCE_HORIZON = 5,
V4L2_WHITE_BALANCE_DAYLIGHT = 6,
V4L2_WHITE_BALANCE_FLASH = 7,
V4L2_WHITE_BALANCE_CLOUDY = 8,
V4L2_WHITE_BALANCE_SHADE = 9,
};
#define V4L2_CID_WIDE_DYNAMIC_RANGE (V4L2_CID_CAMERA_CLASS_BASE+21)
#define V4L2_CID_IMAGE_STABILIZATION (V4L2_CID_CAMERA_CLASS_BASE+22)
#define V4L2_CID_ISO_SENSITIVITY (V4L2_CID_CAMERA_CLASS_BASE+23)
#define V4L2_CID_ISO_SENSITIVITY_AUTO (V4L2_CID_CAMERA_CLASS_BASE+24)
enum v4l2_iso_sensitivity_auto_type {
V4L2_ISO_SENSITIVITY_MANUAL = 0,
V4L2_ISO_SENSITIVITY_AUTO = 1,
};
#define V4L2_CID_EXPOSURE_METERING (V4L2_CID_CAMERA_CLASS_BASE+25)
enum v4l2_exposure_metering {
V4L2_EXPOSURE_METERING_AVERAGE = 0,
V4L2_EXPOSURE_METERING_CENTER_WEIGHTED = 1,
V4L2_EXPOSURE_METERING_SPOT = 2,
V4L2_EXPOSURE_METERING_MATRIX = 3,
};
#define V4L2_CID_SCENE_MODE (V4L2_CID_CAMERA_CLASS_BASE+26)
enum v4l2_scene_mode {
V4L2_SCENE_MODE_NONE = 0,
V4L2_SCENE_MODE_BACKLIGHT = 1,
V4L2_SCENE_MODE_BEACH_SNOW = 2,
V4L2_SCENE_MODE_CANDLE_LIGHT = 3,
V4L2_SCENE_MODE_DAWN_DUSK = 4,
V4L2_SCENE_MODE_FALL_COLORS = 5,
V4L2_SCENE_MODE_FIREWORKS = 6,
V4L2_SCENE_MODE_LANDSCAPE = 7,
V4L2_SCENE_MODE_NIGHT = 8,
V4L2_SCENE_MODE_PARTY_INDOOR = 9,
V4L2_SCENE_MODE_PORTRAIT = 10,
V4L2_SCENE_MODE_SPORTS = 11,
V4L2_SCENE_MODE_SUNSET = 12,
V4L2_SCENE_MODE_TEXT = 13,
};
#define V4L2_CID_3A_LOCK (V4L2_CID_CAMERA_CLASS_BASE+27)
#define V4L2_LOCK_EXPOSURE (1 << 0)
#define V4L2_LOCK_WHITE_BALANCE (1 << 1)
#define V4L2_LOCK_FOCUS (1 << 2)
#define V4L2_CID_AUTO_FOCUS_START (V4L2_CID_CAMERA_CLASS_BASE+28)
#define V4L2_CID_AUTO_FOCUS_STOP (V4L2_CID_CAMERA_CLASS_BASE+29)
#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30)
#define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0)
#define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0)
#define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1)
#define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2)
#define V4L2_CID_AUTO_FOCUS_RANGE (V4L2_CID_CAMERA_CLASS_BASE+31)
enum v4l2_auto_focus_range {
V4L2_AUTO_FOCUS_RANGE_AUTO = 0,
V4L2_AUTO_FOCUS_RANGE_NORMAL = 1,
V4L2_AUTO_FOCUS_RANGE_MACRO = 2,
V4L2_AUTO_FOCUS_RANGE_INFINITY = 3,
};
#define V4L2_CID_PAN_SPEED (V4L2_CID_CAMERA_CLASS_BASE+32)
#define V4L2_CID_TILT_SPEED (V4L2_CID_CAMERA_CLASS_BASE+33)
/* FM Modulator class control IDs */
#define V4L2_CID_FM_TX_CLASS_BASE (V4L2_CTRL_CLASS_FM_TX | 0x900)
#define V4L2_CID_FM_TX_CLASS (V4L2_CTRL_CLASS_FM_TX | 1)
#define V4L2_CID_RDS_TX_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 1)
#define V4L2_CID_RDS_TX_PI (V4L2_CID_FM_TX_CLASS_BASE + 2)
#define V4L2_CID_RDS_TX_PTY (V4L2_CID_FM_TX_CLASS_BASE + 3)
#define V4L2_CID_RDS_TX_PS_NAME (V4L2_CID_FM_TX_CLASS_BASE + 5)
#define V4L2_CID_RDS_TX_RADIO_TEXT (V4L2_CID_FM_TX_CLASS_BASE + 6)
#define V4L2_CID_RDS_TX_MONO_STEREO (V4L2_CID_FM_TX_CLASS_BASE + 7)
#define V4L2_CID_RDS_TX_ARTIFICIAL_HEAD (V4L2_CID_FM_TX_CLASS_BASE + 8)
#define V4L2_CID_RDS_TX_COMPRESSED (V4L2_CID_FM_TX_CLASS_BASE + 9)
#define V4L2_CID_RDS_TX_DYNAMIC_PTY (V4L2_CID_FM_TX_CLASS_BASE + 10)
#define V4L2_CID_RDS_TX_TRAFFIC_ANNOUNCEMENT (V4L2_CID_FM_TX_CLASS_BASE + 11)
#define V4L2_CID_RDS_TX_TRAFFIC_PROGRAM (V4L2_CID_FM_TX_CLASS_BASE + 12)
#define V4L2_CID_RDS_TX_MUSIC_SPEECH (V4L2_CID_FM_TX_CLASS_BASE + 13)
#define V4L2_CID_RDS_TX_ALT_FREQS_ENABLE (V4L2_CID_FM_TX_CLASS_BASE + 14)
#define V4L2_CID_RDS_TX_ALT_FREQS (V4L2_CID_FM_TX_CLASS_BASE + 15)
#define V4L2_CID_AUDIO_LIMITER_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 64)
#define V4L2_CID_AUDIO_LIMITER_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 65)
#define V4L2_CID_AUDIO_LIMITER_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 66)
#define V4L2_CID_AUDIO_COMPRESSION_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 80)
#define V4L2_CID_AUDIO_COMPRESSION_GAIN (V4L2_CID_FM_TX_CLASS_BASE + 81)
#define V4L2_CID_AUDIO_COMPRESSION_THRESHOLD (V4L2_CID_FM_TX_CLASS_BASE + 82)
#define V4L2_CID_AUDIO_COMPRESSION_ATTACK_TIME (V4L2_CID_FM_TX_CLASS_BASE + 83)
#define V4L2_CID_AUDIO_COMPRESSION_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 84)
#define V4L2_CID_PILOT_TONE_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 96)
#define V4L2_CID_PILOT_TONE_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 97)
#define V4L2_CID_PILOT_TONE_FREQUENCY (V4L2_CID_FM_TX_CLASS_BASE + 98)
#define V4L2_CID_TUNE_PREEMPHASIS (V4L2_CID_FM_TX_CLASS_BASE + 112)
enum v4l2_preemphasis {
V4L2_PREEMPHASIS_DISABLED = 0,
V4L2_PREEMPHASIS_50_uS = 1,
V4L2_PREEMPHASIS_75_uS = 2,
};
#define V4L2_CID_TUNE_POWER_LEVEL (V4L2_CID_FM_TX_CLASS_BASE + 113)
#define V4L2_CID_TUNE_ANTENNA_CAPACITOR (V4L2_CID_FM_TX_CLASS_BASE + 114)
/* Flash and privacy (indicator) light controls */
#define V4L2_CID_FLASH_CLASS_BASE (V4L2_CTRL_CLASS_FLASH | 0x900)
#define V4L2_CID_FLASH_CLASS (V4L2_CTRL_CLASS_FLASH | 1)
#define V4L2_CID_FLASH_LED_MODE (V4L2_CID_FLASH_CLASS_BASE + 1)
enum v4l2_flash_led_mode {
V4L2_FLASH_LED_MODE_NONE,
V4L2_FLASH_LED_MODE_FLASH,
V4L2_FLASH_LED_MODE_TORCH,
};
#define V4L2_CID_FLASH_STROBE_SOURCE (V4L2_CID_FLASH_CLASS_BASE + 2)
enum v4l2_flash_strobe_source {
V4L2_FLASH_STROBE_SOURCE_SOFTWARE,
V4L2_FLASH_STROBE_SOURCE_EXTERNAL,
};
#define V4L2_CID_FLASH_STROBE (V4L2_CID_FLASH_CLASS_BASE + 3)
#define V4L2_CID_FLASH_STROBE_STOP (V4L2_CID_FLASH_CLASS_BASE + 4)
#define V4L2_CID_FLASH_STROBE_STATUS (V4L2_CID_FLASH_CLASS_BASE + 5)
#define V4L2_CID_FLASH_TIMEOUT (V4L2_CID_FLASH_CLASS_BASE + 6)
#define V4L2_CID_FLASH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 7)
#define V4L2_CID_FLASH_TORCH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 8)
#define V4L2_CID_FLASH_INDICATOR_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 9)
#define V4L2_CID_FLASH_FAULT (V4L2_CID_FLASH_CLASS_BASE + 10)
#define V4L2_FLASH_FAULT_OVER_VOLTAGE (1 << 0)
#define V4L2_FLASH_FAULT_TIMEOUT (1 << 1)
#define V4L2_FLASH_FAULT_OVER_TEMPERATURE (1 << 2)
#define V4L2_FLASH_FAULT_SHORT_CIRCUIT (1 << 3)
#define V4L2_FLASH_FAULT_OVER_CURRENT (1 << 4)
#define V4L2_FLASH_FAULT_INDICATOR (1 << 5)
#define V4L2_FLASH_FAULT_UNDER_VOLTAGE (1 << 6)
#define V4L2_FLASH_FAULT_INPUT_VOLTAGE (1 << 7)
#define V4L2_FLASH_FAULT_LED_OVER_TEMPERATURE (1 << 8)
#define V4L2_CID_FLASH_CHARGE (V4L2_CID_FLASH_CLASS_BASE + 11)
#define V4L2_CID_FLASH_READY (V4L2_CID_FLASH_CLASS_BASE + 12)
/* JPEG-class control IDs */
#define V4L2_CID_JPEG_CLASS_BASE (V4L2_CTRL_CLASS_JPEG | 0x900)
#define V4L2_CID_JPEG_CLASS (V4L2_CTRL_CLASS_JPEG | 1)
#define V4L2_CID_JPEG_CHROMA_SUBSAMPLING (V4L2_CID_JPEG_CLASS_BASE + 1)
enum v4l2_jpeg_chroma_subsampling {
V4L2_JPEG_CHROMA_SUBSAMPLING_444 = 0,
V4L2_JPEG_CHROMA_SUBSAMPLING_422 = 1,
V4L2_JPEG_CHROMA_SUBSAMPLING_420 = 2,
V4L2_JPEG_CHROMA_SUBSAMPLING_411 = 3,
V4L2_JPEG_CHROMA_SUBSAMPLING_410 = 4,
V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY = 5,
};
#define V4L2_CID_JPEG_RESTART_INTERVAL (V4L2_CID_JPEG_CLASS_BASE + 2)
#define V4L2_CID_JPEG_COMPRESSION_QUALITY (V4L2_CID_JPEG_CLASS_BASE + 3)
#define V4L2_CID_JPEG_ACTIVE_MARKER (V4L2_CID_JPEG_CLASS_BASE + 4)
#define V4L2_JPEG_ACTIVE_MARKER_APP0 (1 << 0)
#define V4L2_JPEG_ACTIVE_MARKER_APP1 (1 << 1)
#define V4L2_JPEG_ACTIVE_MARKER_COM (1 << 16)
#define V4L2_JPEG_ACTIVE_MARKER_DQT (1 << 17)
#define V4L2_JPEG_ACTIVE_MARKER_DHT (1 << 18)
/* Image source controls */
#define V4L2_CID_IMAGE_SOURCE_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_SOURCE | 0x900)
#define V4L2_CID_IMAGE_SOURCE_CLASS (V4L2_CTRL_CLASS_IMAGE_SOURCE | 1)
#define V4L2_CID_VBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 1)
#define V4L2_CID_HBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 2)
#define V4L2_CID_ANALOGUE_GAIN (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 3)
#define V4L2_CID_TEST_PATTERN_RED (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 4)
#define V4L2_CID_TEST_PATTERN_GREENR (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 5)
#define V4L2_CID_TEST_PATTERN_BLUE (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 6)
#define V4L2_CID_TEST_PATTERN_GREENB (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 7)
/* Image processing controls */
#define V4L2_CID_IMAGE_PROC_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_PROC | 0x900)
#define V4L2_CID_IMAGE_PROC_CLASS (V4L2_CTRL_CLASS_IMAGE_PROC | 1)
#define V4L2_CID_LINK_FREQ (V4L2_CID_IMAGE_PROC_CLASS_BASE + 1)
#define V4L2_CID_PIXEL_RATE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 2)
#define V4L2_CID_TEST_PATTERN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 3)
#define V4L2_CID_DEINTERLACING_MODE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 4)
#define V4L2_CID_DIGITAL_GAIN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 5)
/* DV-class control IDs defined by V4L2 */
#define V4L2_CID_DV_CLASS_BASE (V4L2_CTRL_CLASS_DV | 0x900)
#define V4L2_CID_DV_CLASS (V4L2_CTRL_CLASS_DV | 1)
#define V4L2_CID_DV_TX_HOTPLUG (V4L2_CID_DV_CLASS_BASE + 1)
#define V4L2_CID_DV_TX_RXSENSE (V4L2_CID_DV_CLASS_BASE + 2)
#define V4L2_CID_DV_TX_EDID_PRESENT (V4L2_CID_DV_CLASS_BASE + 3)
#define V4L2_CID_DV_TX_MODE (V4L2_CID_DV_CLASS_BASE + 4)
enum v4l2_dv_tx_mode {
V4L2_DV_TX_MODE_DVI_D = 0,
V4L2_DV_TX_MODE_HDMI = 1,
};
#define V4L2_CID_DV_TX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 5)
enum v4l2_dv_rgb_range {
V4L2_DV_RGB_RANGE_AUTO = 0,
V4L2_DV_RGB_RANGE_LIMITED = 1,
V4L2_DV_RGB_RANGE_FULL = 2,
};
#define V4L2_CID_DV_TX_IT_CONTENT_TYPE (V4L2_CID_DV_CLASS_BASE + 6)
enum v4l2_dv_it_content_type {
V4L2_DV_IT_CONTENT_TYPE_GRAPHICS = 0,
V4L2_DV_IT_CONTENT_TYPE_PHOTO = 1,
V4L2_DV_IT_CONTENT_TYPE_CINEMA = 2,
V4L2_DV_IT_CONTENT_TYPE_GAME = 3,
V4L2_DV_IT_CONTENT_TYPE_NO_ITC = 4,
};
#define V4L2_CID_DV_RX_POWER_PRESENT (V4L2_CID_DV_CLASS_BASE + 100)
#define V4L2_CID_DV_RX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 101)
#define V4L2_CID_DV_RX_IT_CONTENT_TYPE (V4L2_CID_DV_CLASS_BASE + 102)
#define V4L2_CID_FM_RX_CLASS_BASE (V4L2_CTRL_CLASS_FM_RX | 0x900)
#define V4L2_CID_FM_RX_CLASS (V4L2_CTRL_CLASS_FM_RX | 1)
#define V4L2_CID_TUNE_DEEMPHASIS (V4L2_CID_FM_RX_CLASS_BASE + 1)
enum v4l2_deemphasis {
V4L2_DEEMPHASIS_DISABLED = V4L2_PREEMPHASIS_DISABLED,
V4L2_DEEMPHASIS_50_uS = V4L2_PREEMPHASIS_50_uS,
V4L2_DEEMPHASIS_75_uS = V4L2_PREEMPHASIS_75_uS,
};
#define V4L2_CID_RDS_RECEPTION (V4L2_CID_FM_RX_CLASS_BASE + 2)
#define V4L2_CID_RDS_RX_PTY (V4L2_CID_FM_RX_CLASS_BASE + 3)
#define V4L2_CID_RDS_RX_PS_NAME (V4L2_CID_FM_RX_CLASS_BASE + 4)
#define V4L2_CID_RDS_RX_RADIO_TEXT (V4L2_CID_FM_RX_CLASS_BASE + 5)
#define V4L2_CID_RDS_RX_TRAFFIC_ANNOUNCEMENT (V4L2_CID_FM_RX_CLASS_BASE + 6)
#define V4L2_CID_RDS_RX_TRAFFIC_PROGRAM (V4L2_CID_FM_RX_CLASS_BASE + 7)
#define V4L2_CID_RDS_RX_MUSIC_SPEECH (V4L2_CID_FM_RX_CLASS_BASE + 8)
#define V4L2_CID_RF_TUNER_CLASS_BASE (V4L2_CTRL_CLASS_RF_TUNER | 0x900)
#define V4L2_CID_RF_TUNER_CLASS (V4L2_CTRL_CLASS_RF_TUNER | 1)
#define V4L2_CID_RF_TUNER_BANDWIDTH_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 11)
#define V4L2_CID_RF_TUNER_BANDWIDTH (V4L2_CID_RF_TUNER_CLASS_BASE + 12)
#define V4L2_CID_RF_TUNER_RF_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 32)
#define V4L2_CID_RF_TUNER_LNA_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 41)
#define V4L2_CID_RF_TUNER_LNA_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 42)
#define V4L2_CID_RF_TUNER_MIXER_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 51)
#define V4L2_CID_RF_TUNER_MIXER_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 52)
#define V4L2_CID_RF_TUNER_IF_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 61)
#define V4L2_CID_RF_TUNER_IF_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 62)
#define V4L2_CID_RF_TUNER_PLL_LOCK (V4L2_CID_RF_TUNER_CLASS_BASE + 91)
/* Detection-class control IDs defined by V4L2 */
#define V4L2_CID_DETECT_CLASS_BASE (V4L2_CTRL_CLASS_DETECT | 0x900)
#define V4L2_CID_DETECT_CLASS (V4L2_CTRL_CLASS_DETECT | 1)
#define V4L2_CID_DETECT_MD_MODE (V4L2_CID_DETECT_CLASS_BASE + 1)
enum v4l2_detect_md_mode {
V4L2_DETECT_MD_MODE_DISABLED = 0,
V4L2_DETECT_MD_MODE_GLOBAL = 1,
V4L2_DETECT_MD_MODE_THRESHOLD_GRID = 2,
V4L2_DETECT_MD_MODE_REGION_GRID = 3,
};
#define V4L2_CID_DETECT_MD_GLOBAL_THRESHOLD (V4L2_CID_DETECT_CLASS_BASE + 2)
#define V4L2_CID_DETECT_MD_THRESHOLD_GRID (V4L2_CID_DETECT_CLASS_BASE + 3)
#define V4L2_CID_DETECT_MD_REGION_GRID (V4L2_CID_DETECT_CLASS_BASE + 4)
#endif

2410
gst-v4l2/ext/videodev2.h Normal file
View File

File diff suppressed because it is too large Load Diff

69
gst-v4l2/gst/gettext.h Normal file
View File

@@ -0,0 +1,69 @@
/* Convenience header for conditional use of GNU <libintl.h>.
Copyright (C) 1995-1998, 2000-2002 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU Library General Public License as published
by the Free Software Foundation; either version 2, or (at your option)
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301,
USA. */
#ifndef _LIBGETTEXT_H
#define _LIBGETTEXT_H 1
/* NLS can be disabled through the configure --disable-nls option. */
#ifdef ENABLE_NLS
/* Get declarations of GNU message catalog functions. */
# include <libintl.h>
#else
/* Solaris /usr/include/locale.h includes /usr/include/libintl.h, which
chokes if dcgettext is defined as a macro. So include it now, to make
later inclusions of <locale.h> a NOP. We don't include <libintl.h>
as well because people using "gettext.h" will not include <libintl.h>,
and also including <libintl.h> would fail on SunOS 4, whereas <locale.h>
is OK. */
#if defined(__sun)
# include <locale.h>
#endif
/* Disabled NLS.
The casts to 'const char *' serve the purpose of producing warnings
for invalid uses of the value returned from these functions.
On pre-ANSI systems without 'const', the config.h file is supposed to
contain "#define const". */
# define gettext(Msgid) ((const char *) (Msgid))
# define dgettext(Domainname, Msgid) ((const char *) (Msgid))
# define dcgettext(Domainname, Msgid, Category) ((const char *) (Msgid))
# define ngettext(Msgid1, Msgid2, N) \
((N) == 1 ? (const char *) (Msgid1) : (const char *) (Msgid2))
# define dngettext(Domainname, Msgid1, Msgid2, N) \
((N) == 1 ? (const char *) (Msgid1) : (const char *) (Msgid2))
# define dcngettext(Domainname, Msgid1, Msgid2, N, Category) \
((N) == 1 ? (const char *) (Msgid1) : (const char *) (Msgid2))
# define textdomain(Domainname) ((const char *) (Domainname))
# define bindtextdomain(Domainname, Dirname) ((const char *) (Dirname))
# define bind_textdomain_codeset(Domainname, Codeset) ((const char *) (Codeset))
#endif
/* A pseudo function call that serves as a marker for the automated
extraction of messages, but does not call gettext(). The run-time
translation is done at a different place in the code.
The argument, String, should be a literal string. Concatenated strings
and other string expressions won't work.
The macro's expansion is not parenthesized, so that it is suitable as
initializer for static 'char[]' or 'const char[]' variables. */
#define gettext_noop(String) String
#endif /* _LIBGETTEXT_H */

View File

@@ -0,0 +1,36 @@
/*
* glib-compat.c
* Functions copied from glib 2.10
*
* Copyright 2005 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GLIB_COMPAT_PRIVATE_H__
#define __GLIB_COMPAT_PRIVATE_H__
#include <glib.h>
G_BEGIN_DECLS
/* copies */
/* adaptations */
G_END_DECLS
#endif

View File

@@ -0,0 +1,47 @@
/* GStreamer
* Copyright (C) 2004 Thomas Vander Stichele <thomas@apestaart.org>
*
* gst-i18n-plugins.h: internationalization macros for the GStreamer plugins
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_I18N_PLUGIN_H__
#define __GST_I18N_PLUGIN_H__
#ifndef GETTEXT_PACKAGE
#error You must define GETTEXT_PACKAGE before including this header.
#endif
#ifdef ENABLE_NLS
#include <locale.h>
#include "gettext.h" /* included with gettext distribution and copied */
/* we want to use shorthand _() for translating and N_() for marking */
#define _(String) dgettext (GETTEXT_PACKAGE, String)
#define N_(String) gettext_noop (String)
/* FIXME: if we need it, we can add Q_ as well, like in glib */
#else
#define _(String) String
#define N_(String) String
#define ngettext(Singular,Plural,Count) ((Count>1)?Plural:Singular)
#endif
#endif /* __GST_I18N_PLUGIN_H__ */

412
gst-v4l2/gstv4l2.c Normal file
View File

@@ -0,0 +1,412 @@
/* GStreamer
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2.c: plugin for v4l2 elements
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifndef _GNU_SOURCE
# define _GNU_SOURCE /* O_CLOEXEC */
#endif
#include "gst/gst-i18n-plugin.h"
#include <gst/gst.h>
#include <fcntl.h>
#include <string.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include <stdlib.h>
#include "linux/videodev2.h"
#include "v4l2-utils.h"
#include "gstv4l2object.h"
#ifndef USE_V4L2_TARGET_NV
#include "gstv4l2src.h"
#include "gstv4l2sink.h"
#include "gstv4l2radio.h"
#include "gstv4l2h263enc.h"
#include "gstv4l2mpeg4enc.h"
#include "gstv4l2deviceprovider.h"
#include "gstv4l2transform.h"
#endif
#include "gstv4l2videodec.h"
#include "gstv4l2h264enc.h"
#include "gstv4l2h265enc.h"
#include "gstv4l2vp8enc.h"
#include "gstv4l2vp9enc.h"
#include "gstv4l2av1enc.h"
/* used in gstv4l2object.c and v4l2_calls.c */
GST_DEBUG_CATEGORY (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
#ifndef USE_V4L2_TARGET_NV_X86
gboolean is_cuvid;
#else
gboolean is_cuvid = TRUE;
#endif
#ifdef GST_V4L2_ENABLE_PROBE
/* This is a minimalist probe, for speed, we only enumerate formats */
static GstCaps *
gst_v4l2_probe_template_caps (const gchar * device, gint video_fd,
enum v4l2_buf_type type)
{
gint n;
struct v4l2_fmtdesc format;
GstCaps *caps;
GST_DEBUG ("Getting %s format enumerations", device);
caps = gst_caps_new_empty ();
for (n = 0;; n++) {
GstStructure *template;
memset (&format, 0, sizeof (format));
format.index = n;
format.type = type;
if (ioctl (video_fd, VIDIOC_ENUM_FMT, &format) < 0)
break; /* end of enumeration */
GST_LOG ("index: %u", format.index);
GST_LOG ("type: %d", format.type);
GST_LOG ("flags: %08x", format.flags);
GST_LOG ("description: '%s'", format.description);
GST_LOG ("pixelformat: %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (format.pixelformat));
template = gst_v4l2_object_v4l2fourcc_to_structure (format.pixelformat);
if (template) {
GstStructure *alt_t = NULL;
switch (format.pixelformat) {
case V4L2_PIX_FMT_RGB32:
alt_t = gst_structure_copy (template);
gst_structure_set (alt_t, "format", G_TYPE_STRING, "ARGB", NULL);
break;
case V4L2_PIX_FMT_BGR32:
alt_t = gst_structure_copy (template);
gst_structure_set (alt_t, "format", G_TYPE_STRING, "BGRA", NULL);
default:
break;
}
gst_caps_append_structure (caps, template);
if (alt_t)
gst_caps_append_structure (caps, alt_t);
}
}
return gst_caps_simplify (caps);
}
static gboolean
gst_v4l2_probe_and_register (GstPlugin * plugin)
{
GstV4l2Iterator *it;
gint video_fd = -1;
struct v4l2_capability vcap;
guint32 device_caps;
it = gst_v4l2_iterator_new ();
while (gst_v4l2_iterator_next (it)) {
GstCaps *src_caps, *sink_caps;
gchar *basename;
if (video_fd >= 0)
close (video_fd);
video_fd = open (it->device_path, O_RDWR | O_CLOEXEC);
if (video_fd == -1) {
GST_DEBUG ("Failed to open %s: %s", it->device_path, g_strerror (errno));
continue;
}
memset (&vcap, 0, sizeof (vcap));
if (ioctl (video_fd, VIDIOC_QUERYCAP, &vcap) < 0) {
GST_DEBUG ("Failed to get device capabilities: %s", g_strerror (errno));
continue;
}
if (vcap.capabilities & V4L2_CAP_DEVICE_CAPS)
device_caps = vcap.device_caps;
else
device_caps = vcap.capabilities;
if (!((device_caps & (V4L2_CAP_VIDEO_M2M | V4L2_CAP_VIDEO_M2M_MPLANE)) ||
/* But legacy driver may expose both CAPTURE and OUTPUT */
((device_caps &
(V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) &&
(device_caps &
(V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE)))))
continue;
GST_DEBUG ("Probing '%s' located at '%s'",
it->device_name ? it->device_name : (const gchar *) vcap.driver,
it->device_path);
/* get sink supported format (no MPLANE for codec) */
sink_caps = gst_caps_merge (gst_v4l2_probe_template_caps (it->device_path,
video_fd, V4L2_BUF_TYPE_VIDEO_OUTPUT),
gst_v4l2_probe_template_caps (it->device_path, video_fd,
V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE));
/* get src supported format */
src_caps = gst_caps_merge (gst_v4l2_probe_template_caps (it->device_path,
video_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE),
gst_v4l2_probe_template_caps (it->device_path, video_fd,
V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE));
/* Skip devices without any supported formats */
if (gst_caps_is_empty (sink_caps) || gst_caps_is_empty (src_caps)) {
gst_caps_unref (sink_caps);
gst_caps_unref (src_caps);
continue;
}
basename = g_path_get_basename (it->device_path);
if (gst_v4l2_is_video_dec (sink_caps, src_caps)) {
gst_v4l2_video_dec_register (plugin, basename, it->device_path,
sink_caps, src_caps);
} else if (gst_v4l2_is_video_enc (sink_caps, src_caps, NULL)) {
if (gst_v4l2_is_h264_enc (sink_caps, src_caps))
gst_v4l2_h264_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_mpeg4_enc (sink_caps, src_caps))
gst_v4l2_mpeg4_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_h263_enc (sink_caps, src_caps))
gst_v4l2_h263_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_vp8_enc (sink_caps, src_caps))
gst_v4l2_vp8_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_vp9_enc (sink_caps, src_caps))
gst_v4l2_vp9_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_av1_enc (sink_caps, src_caps))
gst_v4l2_av1_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
} else if (gst_v4l2_is_transform (sink_caps, src_caps)) {
gst_v4l2_transform_register (plugin, basename, it->device_path,
sink_caps, src_caps);
}
/* else if ( ... etc. */
gst_caps_unref (sink_caps);
gst_caps_unref (src_caps);
g_free (basename);
}
if (video_fd >= 0)
close (video_fd);
gst_v4l2_iterator_free (it);
return TRUE;
}
#endif
#ifndef USE_V4L2_TARGET_NV
static gboolean
plugin_init (GstPlugin * plugin)
{
const gchar *paths[] = { "/dev", "/dev/v4l2", NULL };
const gchar *names[] = { "video", NULL };
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
/* Add some depedency, so the dynamic features get updated upon changes in
* /dev/video* */
gst_plugin_add_dependency (plugin,
NULL, paths, names, GST_PLUGIN_DEPENDENCY_FLAG_FILE_NAME_IS_PREFIX);
if (!gst_element_register (plugin, "v4l2src", GST_RANK_PRIMARY,
GST_TYPE_V4L2SRC) ||
!gst_element_register (plugin, "v4l2sink", GST_RANK_NONE,
GST_TYPE_V4L2SINK) ||
!gst_element_register (plugin, "v4l2radio", GST_RANK_NONE,
GST_TYPE_V4L2RADIO) ||
!gst_device_provider_register (plugin, "v4l2deviceprovider",
GST_RANK_PRIMARY, GST_TYPE_V4L2_DEVICE_PROVIDER)
/* etc. */
#ifdef GST_V4L2_ENABLE_PROBE
|| !gst_v4l2_probe_and_register (plugin)
#endif
)
return FALSE;
#ifdef ENABLE_NLS
bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
#endif /* ENABLE_NLS */
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
video4linux2,
"elements for Video 4 Linux",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
#else
static gboolean
plugin_init (GstPlugin * plugin)
{
gboolean ret = TRUE;
g_setenv ("GST_V4L2_USE_LIBV4L2", "1", FALSE);
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
#ifndef USE_V4L2_TARGET_NV_X86
int ret_val = -1;
ret_val = system("lsmod | grep 'nvgpu' > /dev/null");
if (ret_val == -1) {
return FALSE;
}
else if (ret_val == 0) {
is_cuvid = FALSE;
}
else {
is_cuvid = TRUE;
}
#endif
if (is_cuvid == TRUE)
gst_v4l2_video_dec_register (plugin,
V4L2_DEVICE_BASENAME_NVDEC,
V4L2_DEVICE_PATH_NVDEC_MCCOY,
NULL,
NULL);
else if (access (V4L2_DEVICE_PATH_NVDEC, F_OK) == 0)
gst_v4l2_video_dec_register (plugin,
V4L2_DEVICE_BASENAME_NVDEC,
V4L2_DEVICE_PATH_NVDEC,
NULL,
NULL);
else
gst_v4l2_video_dec_register (plugin,
V4L2_DEVICE_BASENAME_NVDEC,
V4L2_DEVICE_PATH_NVDEC_ALT,
NULL,
NULL);
if (access (V4L2_DEVICE_PATH_NVENC, F_OK) == 0) {
gst_v4l2_h264_enc_register(plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
gst_v4l2_h265_enc_register(plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
} else {
gst_v4l2_h264_enc_register(plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_h265_enc_register(plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
}
if (is_cuvid == FALSE) {
if (access (V4L2_DEVICE_PATH_NVENC, F_OK) == 0) {
gst_v4l2_vp8_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
gst_v4l2_vp9_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
gst_v4l2_av1_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
} else {
gst_v4l2_vp8_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_vp9_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_av1_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
}
}
return ret;
}
#ifndef PACKAGE
#define PACKAGE "nvvideo4linux2"
#endif
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
nvvideo4linux2,
"Nvidia elements for Video 4 Linux",
plugin_init,
"1.14.0",
"LGPL",
"nvvideo4linux2",
"http://nvidia.com/")
#endif

1619
gst-v4l2/gstv4l2allocator.c Normal file
View File

File diff suppressed because it is too large Load Diff

181
gst-v4l2/gstv4l2allocator.h Normal file
View File

@@ -0,0 +1,181 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
* Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_ALLOCATOR_H__
#define __GST_V4L2_ALLOCATOR_H__
#include "linux/videodev2.h"
#include <gst/gst.h>
#include <gst/gstatomicqueue.h>
G_BEGIN_DECLS
#define GST_TYPE_V4L2_ALLOCATOR (gst_v4l2_allocator_get_type())
#define GST_IS_V4L2_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_ALLOCATOR))
#define GST_IS_V4L2_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_V4L2_ALLOCATOR))
#define GST_V4L2_ALLOCATOR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_V4L2_ALLOCATOR, GstV4l2AllocatorClass))
#define GST_V4L2_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_ALLOCATOR, GstV4l2Allocator))
#define GST_V4L2_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_V4L2_ALLOCATOR, GstV4l2AllocatorClass))
#define GST_V4L2_ALLOCATOR_CAST(obj) ((GstV4l2Allocator *)(obj))
#define GST_V4L2_ALLOCATOR_CAN_REQUEST(obj,type) \
(GST_OBJECT_FLAG_IS_SET (obj, GST_V4L2_ALLOCATOR_FLAG_ ## type ## _REQBUFS))
#define GST_V4L2_ALLOCATOR_CAN_ALLOCATE(obj,type) \
(GST_OBJECT_FLAG_IS_SET (obj, GST_V4L2_ALLOCATOR_FLAG_ ## type ## _CREATE_BUFS))
#define GST_V4L2_MEMORY_QUARK gst_v4l2_memory_quark ()
/* The structures are renamed as the name conflicts with the
* OSS v4l2 library structures. */
#ifdef USE_V4L2_TARGET_NV
#define GstV4l2Allocator GstNvV4l2Allocator
#define GstV4l2AllocatorClass GstNvV4l2AllocatorClass
#endif
#ifdef USE_V4L2_TARGET_NV
#define NV_VIDEO_MAX_FRAME 64
#endif
typedef struct _GstV4l2Allocator GstV4l2Allocator;
typedef struct _GstV4l2AllocatorClass GstV4l2AllocatorClass;
typedef struct _GstV4l2MemoryGroup GstV4l2MemoryGroup;
typedef struct _GstV4l2Memory GstV4l2Memory;
typedef enum _GstV4l2Capabilities GstV4l2Capabilities;
typedef enum _GstV4l2Return GstV4l2Return;
typedef struct _GstV4l2Object GstV4l2Object;
enum _GstV4l2AllocatorFlags
{
GST_V4L2_ALLOCATOR_FLAG_MMAP_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 0),
GST_V4L2_ALLOCATOR_FLAG_MMAP_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 1),
GST_V4L2_ALLOCATOR_FLAG_USERPTR_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 2),
GST_V4L2_ALLOCATOR_FLAG_USERPTR_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 3),
GST_V4L2_ALLOCATOR_FLAG_DMABUF_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 4),
GST_V4L2_ALLOCATOR_FLAG_DMABUF_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 5),
};
enum _GstV4l2Return
{
GST_V4L2_OK = 0,
GST_V4L2_ERROR = -1,
GST_V4L2_BUSY = -2
};
struct _GstV4l2Memory
{
GstMemory mem;
gint plane;
GstV4l2MemoryGroup *group;
gpointer data;
gint dmafd;
};
struct _GstV4l2MemoryGroup
{
gint n_mem;
GstMemory * mem[VIDEO_MAX_PLANES];
gint mems_allocated;
struct v4l2_buffer buffer;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
};
struct _GstV4l2Allocator
{
GstAllocator parent;
GstV4l2Object *obj;
guint32 count;
guint32 memory;
gboolean can_allocate;
gboolean active;
#ifdef USE_V4L2_TARGET_NV
GstV4l2MemoryGroup * groups[NV_VIDEO_MAX_FRAME];
#else
GstV4l2MemoryGroup * groups[VIDEO_MAX_FRAME];
#endif
GstAtomicQueue *free_queue;
GstAtomicQueue *pending_queue;
#ifdef USE_V4L2_TARGET_NV
gboolean enable_dynamic_allocation; /* If dynamic_allocation should be set */
#endif
};
struct _GstV4l2AllocatorClass {
GstAllocatorClass parent_class;
};
GType gst_v4l2_allocator_get_type(void);
gboolean gst_is_v4l2_memory (GstMemory * mem);
GQuark gst_v4l2_memory_quark (void);
gboolean gst_v4l2_allocator_is_active (GstV4l2Allocator * allocator);
guint gst_v4l2_allocator_get_size (GstV4l2Allocator * allocator);
GstV4l2Allocator* gst_v4l2_allocator_new (GstObject *parent, GstV4l2Object * obj);
guint gst_v4l2_allocator_start (GstV4l2Allocator * allocator,
guint32 count, guint32 memory);
GstV4l2Return gst_v4l2_allocator_stop (GstV4l2Allocator * allocator);
GstV4l2MemoryGroup* gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator);
GstV4l2MemoryGroup* gst_v4l2_allocator_alloc_dmabuf (GstV4l2Allocator * allocator,
GstAllocator * dmabuf_allocator);
GstV4l2MemoryGroup * gst_v4l2_allocator_alloc_dmabufin (GstV4l2Allocator * allocator);
GstV4l2MemoryGroup * gst_v4l2_allocator_alloc_userptr (GstV4l2Allocator * allocator);
gboolean gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup *group,
gint n_mem, GstMemory ** dma_mem);
gboolean gst_v4l2_allocator_import_userptr (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup *group,
gsize img_size, int n_planes,
gpointer * data, gsize * size);
void gst_v4l2_allocator_flush (GstV4l2Allocator * allocator);
gboolean gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup * group);
GstFlowReturn gst_v4l2_allocator_dqbuf (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup ** group);
void gst_v4l2_allocator_reset_group (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup * group);
#ifdef USE_V4L2_TARGET_NV
void
gst_v4l2_allocator_enable_dynamic_allocation (GstV4l2Allocator * allocator,
gboolean enable_dynamic_allocation);
#endif
G_END_DECLS
#endif /* __GST_V4L2_ALLOCATOR_H__ */

340
gst-v4l2/gstv4l2av1enc.c Normal file
View File

@@ -0,0 +1,340 @@
/*
* Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include <stdlib.h>
#include "gstv4l2object.h"
#include "gstv4l2av1enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_av1_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_av1_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-av1");
/* prototypes */
gboolean gst_v4l2_av1_enc_tile_configuration (GstV4l2Object * v4l2object,
gboolean enable_tile, guint32 log2_tile_rows, guint32 log2_tile_cols);
static gboolean gst_v4l2_video_enc_parse_tile_configuration (GstV4l2Av1Enc * self,
const gchar * arr);
gboolean set_v4l2_av1_encoder_properties (GstVideoEncoder * encoder);
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
PROP_ENABLE_HEADER,
PROP_ENABLE_TILE_CONFIG,
PROP_DISABLE_CDF,
PROP_ENABLE_SSIMRDO,
PROP_NUM_REFERENCE_FRAMES,
};
#define DEFAULT_NUM_REFERENCE_FRAMES 0
#define MAX_NUM_REFERENCE_FRAMES 4
#define gst_v4l2_av1_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2Av1Enc, gst_v4l2_av1_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_av1_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4l2Av1Enc *self = GST_V4L2_AV1_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
self->EnableHeaders = g_value_get_boolean (value);
video_enc->v4l2capture->Enable_headers = g_value_get_boolean (value);
break;
case PROP_ENABLE_TILE_CONFIG:
gst_v4l2_video_enc_parse_tile_configuration (self,
g_value_get_string (value));
self->EnableTileConfig = TRUE;
break;
case PROP_DISABLE_CDF:
self->DisableCDFUpdate = g_value_get_boolean (value);
break;
case PROP_ENABLE_SSIMRDO:
self->EnableSsimRdo = g_value_get_boolean (value);
break;
case PROP_NUM_REFERENCE_FRAMES:
self->nRefFrames = g_value_get_uint (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_v4l2_av1_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4l2Av1Enc *self = GST_V4L2_AV1_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
g_value_set_boolean (value, self->EnableHeaders);
break;
case PROP_ENABLE_TILE_CONFIG:
break;
case PROP_DISABLE_CDF:
g_value_set_boolean (value, self->DisableCDFUpdate);
break;
case PROP_ENABLE_SSIMRDO:
g_value_set_boolean (value, self->EnableSsimRdo);
break;
case PROP_NUM_REFERENCE_FRAMES:
g_value_set_uint (value, self->nRefFrames);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "0"))
v4l2_profile = 0;
else if (g_str_equal (profile, "1"))
v4l2_profile = 1;
else if (g_str_equal (profile, "2"))
v4l2_profile = 2;
else if (g_str_equal (profile, "3"))
v4l2_profile = 3;
else
GST_WARNING ("Unsupported profile string '%s'", profile);
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case 0:
return "0";
case 1:
return "1";
case 2:
return "2";
case 3:
return "3";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static gboolean
gst_v4l2_video_enc_parse_tile_configuration (GstV4l2Av1Enc * self,
const gchar * arr)
{
gchar *str;
self->Log2TileRows = atoi (arr);
str = g_strstr_len (arr, -1, ",");
self->Log2TileCols = atoi (str + 1);
return TRUE;
}
gboolean
gst_v4l2_av1_enc_tile_configuration (GstV4l2Object * v4l2object,
gboolean enable_tile, guint32 log2_tile_rows, guint32 log2_tile_cols)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret;
v4l2_enc_av1_tile_config param =
{enable_tile, log2_tile_rows, log2_tile_cols};
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_AV1_TILE_CONFIGURATION;
control.string = (gchar *) &param;
ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0) {
g_print ("Error while setting tile configuration\n");
return FALSE;
}
return TRUE;
}
gboolean
set_v4l2_av1_encoder_properties (GstVideoEncoder * encoder)
{
GstV4l2Av1Enc *self = GST_V4L2_AV1_ENC (encoder);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (encoder);
if (!GST_V4L2_IS_OPEN (video_enc->v4l2output)) {
g_print ("V4L2 device is not open\n");
return FALSE;
}
if (self->EnableTileConfig) {
if (!gst_v4l2_av1_enc_tile_configuration (video_enc->v4l2output,
self->EnableTileConfig, self->Log2TileRows, self->Log2TileCols)) {
g_print ("S_EXT_CTRLS for Tile Configuration failed\n");
return FALSE;
}
}
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_AV1_DISABLE_CDF_UPDATE, self->DisableCDFUpdate)) {
g_print ("S_EXT_CTRLS for DisableCDF Update failed\n");
return FALSE;
}
if (self->EnableSsimRdo) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_AV1_ENABLE_SSIMRDO, self->EnableSsimRdo)) {
g_print ("S_EXT_CTRLS for SSIM RDO failed\n");
return FALSE;
}
}
if (self->nRefFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_REFERENCE_FRAMES,
self->nRefFrames)) {
g_print ("S_EXT_CTRLS for NUM_REFERENCE_FRAMES failed\n");
return FALSE;
}
}
return TRUE;
}
static void
gst_v4l2_av1_enc_init (GstV4l2Av1Enc * self)
{
self->EnableTileConfig = FALSE;
self->DisableCDFUpdate = TRUE;
self->EnableSsimRdo = FALSE;
self->Log2TileRows= 0;
self->Log2TileCols= 0;
}
static void
gst_v4l2_av1_enc_class_init (GstV4l2Av1EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_av1_enc_debug, "v4l2av1enc", 0,
"V4L2 AV1 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 AV1 Encoder",
"Codec/Encoder/Video",
"Encode AV1 video streams via V4L2 API",
"Anuma Rathore <arathore@nvidia.com>");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_av1_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_av1_enc_get_property);
g_object_class_install_property (gobject_class, PROP_ENABLE_HEADER,
g_param_spec_boolean ("enable-headers", "Enable AV1 headers",
"Enable AV1 file and frame headers, if enabled, dump elementary stream",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_TILE_CONFIG,
g_param_spec_string ("tiles", "AV1 Log2 Tile Configuration",
"Use string with values of Tile Configuration"
"in Log2Rows:Log2Cols. Eg: \"1,0\"",
"0,0", G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_DISABLE_CDF,
g_param_spec_boolean ("disable-cdf", "Disable CDF Update",
"Flag to control Disable CDF Update, enabled by default",
TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_SSIMRDO,
g_param_spec_boolean ("enable-srdo", "Enable SSIM RDO",
"Enable SSIM RDO",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder, default set by encoder",
0, MAX_NUM_REFERENCE_FRAMES, DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
baseclass->codec_name = "AV1";
baseclass->profile_cid = 0; /* Only single profile supported */
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
baseclass->set_encoder_properties = set_v4l2_av1_encoder_properties;
}
/* Probing functions */
gboolean
gst_v4l2_is_av1_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_av1_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_AV1_ENC,
"av1", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}

66
gst-v4l2/gstv4l2av1enc.h Normal file
View File

@@ -0,0 +1,66 @@
/*
* Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_AV1_ENC_H__
#define __GST_V4L2_AV1_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_AV1_ENC \
(gst_v4l2_av1_enc_get_type())
#define GST_V4L2_AV1_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_AV1_ENC,GstV4l2Av1Enc))
#define GST_V4L2_AV1_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_AV1_ENC,GstV4l2Av1EncClass))
#define GST_IS_V4L2_AV1_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_AV1_ENC))
#define GST_IS_V4L2_AV1_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_AV1_ENC))
typedef struct _GstV4l2Av1Enc GstV4l2Av1Enc;
typedef struct _GstV4l2Av1EncClass GstV4l2Av1EncClass;
struct _GstV4l2Av1Enc
{
GstV4l2VideoEnc parent;
gboolean EnableHeaders;
gboolean EnableTileConfig;
gboolean DisableCDFUpdate;
gboolean EnableSsimRdo;
guint32 Log2TileRows;
guint32 Log2TileCols;
guint32 nRefFrames;
};
struct _GstV4l2Av1EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_av1_enc_get_type (void);
gboolean gst_v4l2_is_av1_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_av1_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_AV1_ENC_H__ */

2533
gst-v4l2/gstv4l2bufferpool.c Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,140 @@
/* GStreamer
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* 2009 Texas Instruments, Inc - http://www.ti.com/
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2bufferpool.h V4L2 buffer pool class
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_V4L2_BUFFER_POOL_H__
#define __GST_V4L2_BUFFER_POOL_H__
#include <gst/gst.h>
/* The structures are renamed as the name conflicts with the
* OSS v4l2 library structures. */
#ifdef USE_V4L2_TARGET_NV
#define GstV4l2BufferPool GstNvV4l2BufferPool
#define GstV4l2BufferPoolClass GstNvV4l2BufferPoolClass
#endif
typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
typedef struct _GstV4l2BufferPoolClass GstV4l2BufferPoolClass;
typedef struct _GstV4l2Meta GstV4l2Meta;
#include "gstv4l2object.h"
#include "gstv4l2allocator.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_BUFFER_POOL (gst_v4l2_buffer_pool_get_type())
#define GST_IS_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER_POOL))
#define GST_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER_POOL, GstV4l2BufferPool))
#define GST_V4L2_BUFFER_POOL_CAST(obj) ((GstV4l2BufferPool*)(obj))
/* This flow return is used to indicated that the last buffer of a
* drain or a resoltuion change has been found. This should normally
* only occure for mem-2-mem devices. */
#define GST_V4L2_FLOW_LAST_BUFFER GST_FLOW_CUSTOM_SUCCESS
/* This flow return is used to indicated that the returned buffer was marked
* with the error flag and had no payload. This error should be recovered by
* simply waiting for next buffer. */
#define GST_V4L2_FLOW_CORRUPTED_BUFFER GST_FLOW_CUSTOM_SUCCESS_1
struct _GstV4l2BufferPool
{
GstBufferPool parent;
GstV4l2Object *obj; /* the v4l2 object */
gint video_fd; /* a dup(2) of the v4l2object's video_fd */
GstPoll *poll; /* a poll for video_fd */
GstPollFD pollfd;
gboolean can_poll_device;
gboolean empty;
GCond empty_cond;
GstV4l2Allocator *vallocator;
GstAllocator *allocator;
GstAllocationParams params;
GstBufferPool *other_pool;
guint size;
GstVideoInfo caps_info; /* Default video information */
gboolean add_videometa; /* set if video meta should be added */
gboolean enable_copy_threshold; /* If copy_threshold should be set */
guint min_latency; /* number of buffers we will hold */
guint max_latency; /* number of buffers we can hold */
guint num_queued; /* number of buffers queued in the driver */
guint num_allocated; /* number of buffers allocated */
guint copy_threshold; /* when our pool runs lower, start handing out copies */
gboolean streaming;
gboolean flushing;
#ifdef USE_V4L2_TARGET_NV
GstBuffer *buffers[NV_VIDEO_MAX_FRAME];
#else
GstBuffer *buffers[VIDEO_MAX_FRAME];
#endif
/* signal handlers */
gulong group_released_handler;
/* Control to warn only once on buggy feild driver bug */
gboolean has_warned_on_buggy_field;
#ifdef USE_V4L2_TARGET_NV
gboolean enable_dynamic_allocation; /* If dynamic_allocation should be set */
#endif
};
struct _GstV4l2BufferPoolClass
{
GstBufferPoolClass parent_class;
};
GType gst_v4l2_buffer_pool_get_type (void);
GstBufferPool * gst_v4l2_buffer_pool_new (GstV4l2Object *obj, GstCaps *caps);
GstFlowReturn gst_v4l2_buffer_pool_process (GstV4l2BufferPool * bpool, GstBuffer ** buf);
void gst_v4l2_buffer_pool_set_other_pool (GstV4l2BufferPool * pool,
GstBufferPool * other_pool);
void gst_v4l2_buffer_pool_copy_at_threshold (GstV4l2BufferPool * pool,
gboolean copy);
gboolean gst_v4l2_buffer_pool_flush (GstBufferPool *pool);
#ifdef USE_V4L2_TARGET_NV
void
gst_v4l2_buffer_pool_enable_dynamic_allocation (GstV4l2BufferPool * pool,
gboolean enable_dynamic_allocation);
gint
get_motion_vectors (GstV4l2Object *obj, guint32 bufferIndex,
v4l2_ctrl_videoenc_outputbuf_metadata_MV *enc_mv_metadata);
#endif
G_END_DECLS
#endif /*__GST_V4L2_BUFFER_POOL_H__ */

842
gst-v4l2/gstv4l2h264enc.c Normal file
View File

@@ -0,0 +1,842 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association
* Author: ayaka <ayaka@soulik.info>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2object.h"
#include "gstv4l2h264enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_h264_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_h264_enc_debug
#ifdef USE_V4L2_TARGET_NV
static GType
gst_v4l2_videnc_profile_get_type (void);
#define GST_TYPE_V4L2_VID_ENC_PROFILE (gst_v4l2_videnc_profile_get_type ())
/* prototypes */
gboolean gst_v4l2_h264_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type);
gboolean set_v4l2_h264_encoder_properties (GstVideoEncoder * encoder);
#endif
#ifdef USE_V4L2_TARGET_NV
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-h264, stream-format=(string) byte-stream, "
"alignment=(string) { au, nal }");
#else
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-h264, stream-format=(string) byte-stream, "
"alignment=(string) au");
#endif
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
#ifdef USE_V4L2_TARGET_NV
PROP_PROFILE,
PROP_INSERT_VUI,
PROP_EXTENDED_COLORFORMAT,
PROP_INSERT_SPS_PPS,
PROP_INSERT_AUD,
PROP_NUM_BFRAMES,
PROP_ENTROPY_CODING,
PROP_BIT_PACKETIZATION,
PROP_SLICE_INTRA_REFRESH,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
PROP_TWO_PASS_CBR,
PROP_ENABLE_MV_META,
PROP_SLICE_HEADER_SPACING,
PROP_NUM_REFERENCE_FRAMES,
PROP_PIC_ORDER_CNT_TYPE,
PROP_ENABLE_LOSSLESS_ENC
#endif
/* TODO add H264 controls
* PROP_I_FRAME_QP,
* PROP_P_FRAME_QP,
* PROP_B_FRAME_QP,
* PROP_MIN_QP,
* PROP_MAX_QP,
* PROP_8x8_TRANSFORM,
* PROP_CPB_SIZE,
* PROP_ENTROPY_MODE,
* PROP_I_PERIOD,
* PROP_LOOP_FILTER_ALPHA,
* PROP_LOOP_FILTER_BETA,
* PROP_LOOP_FILTER_MODE,
* PROP_VUI_EXT_SAR_HEIGHT,
* PROP_VUI_EXT_SAR_WIDTH,
* PROP_VUI_SAR_ENABLED,
* PROP_VUI_SAR_IDC,
* PROP_SEI_FRAME_PACKING,
* PROP_SEI_FP_CURRENT_FRAME_0,
* PROP_SEI_FP_ARRANGEMENT_TYP,
* ...
* */
};
#ifdef USE_V4L2_TARGET_NV
#define DEFAULT_PROFILE V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE
#define DEFAULT_NUM_B_FRAMES 0
#define MAX_NUM_B_FRAMES 2
#define DEFAULT_NUM_REFERENCE_FRAMES 1
#define MAX_NUM_REFERENCE_FRAMES 8
#define DEFAULT_BIT_PACKETIZATION FALSE
#define DEFAULT_SLICE_HEADER_SPACING 0
#define DEFAULT_INTRA_REFRESH_FRAME_INTERVAL 60
#define DEFAULT_PIC_ORDER_CNT_TYPE 0
#endif
#define gst_v4l2_h264_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2H264Enc, gst_v4l2_h264_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_h264_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2H264Enc *self = GST_V4L2_H264_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_PROFILE:
self->profile = g_value_get_enum (value);
if (GST_V4L2_IS_OPEN (video_enc->v4l2output)) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H264_PROFILE, self->profile)) {
g_print ("S_EXT_CTRLS for H264_PROFILE failed\n");
}
}
break;
case PROP_NUM_BFRAMES:
self->nBFrames = g_value_get_uint (value);
if (self->nBFrames && self->nRefFrames == DEFAULT_NUM_REFERENCE_FRAMES)
{
self->nRefFrames = 2;
g_print ("Minimum 2 Ref-Frames are required for B-frames encoding\n");
}
break;
case PROP_INSERT_SPS_PPS:
self->insert_sps_pps = g_value_get_boolean (value);
break;
case PROP_INSERT_AUD:
self->insert_aud = g_value_get_boolean (value);
break;
case PROP_INSERT_VUI:
self->insert_vui = g_value_get_boolean (value);
break;
/* extended-colorformat property is available for cuvid path only*/
case PROP_EXTENDED_COLORFORMAT:
self->extended_colorformat = g_value_get_boolean (value);
break;
case PROP_ENTROPY_CODING:
self->disable_cabac_entropy_coding = g_value_get_boolean (value);
break;
case PROP_BIT_PACKETIZATION:
self->bit_packetization = g_value_get_boolean (value);
break;
case PROP_SLICE_HEADER_SPACING:
self->slice_header_spacing = g_value_get_uint64 (value);
if (self->slice_header_spacing)
video_enc->slice_output = TRUE;
else
video_enc->slice_output = FALSE;
break;
case PROP_SLICE_INTRA_REFRESH_INTERVAL:
self->SliceIntraRefreshInterval = g_value_get_uint (value);
break;
case PROP_TWO_PASS_CBR:
self->EnableTwopassCBR = g_value_get_boolean (value);
break;
case PROP_ENABLE_MV_META:
self->EnableMVBufferMeta = g_value_get_boolean (value);
video_enc->v4l2capture->enableMVBufferMeta = g_value_get_boolean (value);
break;
case PROP_NUM_REFERENCE_FRAMES:
self->nRefFrames = g_value_get_uint (value);
break;
case PROP_PIC_ORDER_CNT_TYPE:
self->poc_type = g_value_get_uint (value);
break;
case PROP_ENABLE_LOSSLESS_ENC:
self->enableLossless = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static void
gst_v4l2_h264_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2H264Enc *self = GST_V4L2_H264_ENC (object);
switch (prop_id) {
case PROP_PROFILE:
g_value_set_enum (value, self->profile);
break;
case PROP_NUM_BFRAMES:
g_value_set_uint (value, self->nBFrames);
break;
case PROP_INSERT_SPS_PPS:
g_value_set_boolean (value, self->insert_sps_pps);
break;
case PROP_INSERT_AUD:
g_value_set_boolean (value, self->insert_aud);
break;
case PROP_INSERT_VUI:
g_value_set_boolean (value, self->insert_vui);
break;
/* extended-colorformat property is available for cuvid path only*/
case PROP_EXTENDED_COLORFORMAT:
g_value_set_boolean (value, self->extended_colorformat);
break;
case PROP_ENTROPY_CODING:
g_value_set_boolean (value, self->disable_cabac_entropy_coding);
break;
case PROP_BIT_PACKETIZATION:
g_value_set_boolean (value, self->bit_packetization);
break;
case PROP_SLICE_HEADER_SPACING:
g_value_set_uint64 (value, self->slice_header_spacing);
break;
case PROP_SLICE_INTRA_REFRESH_INTERVAL:
g_value_set_uint (value, self->SliceIntraRefreshInterval);
break;
case PROP_TWO_PASS_CBR:
g_value_set_boolean (value, self->EnableTwopassCBR);
break;
case PROP_ENABLE_MV_META:
g_value_set_boolean (value, self->EnableMVBufferMeta);
break;
case PROP_NUM_REFERENCE_FRAMES:
g_value_set_uint (value, self->nRefFrames);
break;
case PROP_PIC_ORDER_CNT_TYPE:
g_value_set_uint (value, self->poc_type);
break;
case PROP_ENABLE_LOSSLESS_ENC:
g_value_set_boolean (value, self->enableLossless);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
} else if (g_str_equal (profile, "constrained-baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE;
} else if (g_str_equal (profile, "main")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_MAIN;
} else if (g_str_equal (profile, "extended")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED;
} else if (g_str_equal (profile, "high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH;
} else if (g_str_equal (profile, "high-10")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10;
} else if (g_str_equal (profile, "high-4:2:2")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422;
} else if (g_str_equal (profile, "high-4:4:4")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE;
} else if (g_str_equal (profile, "high-10-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA;
} else if (g_str_equal (profile, "high-4:2:2-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA;
} else if (g_str_equal (profile, "high-4:4:4-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA;
} else if (g_str_equal (profile, "cavlc-4:4:4-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA;
} else if (g_str_equal (profile, "scalable-baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE;
} else if (g_str_equal (profile, "scalable-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH;
} else if (g_str_equal (profile, "scalable-high-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA;
} else if (g_str_equal (profile, "stereo-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH;
} else if (g_str_equal (profile, "multiview-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH;
} else {
GST_WARNING ("Unsupported profile string '%s'", profile);
}
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE:
return "baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE:
return "constrained-baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_MAIN:
return "main";
case V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED:
return "extended";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH:
return "high";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10:
return "high-10";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422:
return "high-4:2:2";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE:
return "high-4:4:4";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA:
return "high-10-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA:
return "high-4:2:2-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA:
return "high-4:4:4-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA:
return "cavlc-4:4:4-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE:
return "scalable-baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH:
return "scalable-high";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA:
return "scalable-high-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH:
return "stereo-high";
case V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH:
return "multiview-high";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static gint
v4l2_level_from_string (const gchar * level)
{
gint v4l2_level = -1;
if (g_str_equal (level, "1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_0;
else if (g_str_equal (level, "1b"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1B;
else if (g_str_equal (level, "1.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_1;
else if (g_str_equal (level, "1.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_2;
else if (g_str_equal (level, "1.3"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_3;
else if (g_str_equal (level, "2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_0;
else if (g_str_equal (level, "2.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_1;
else if (g_str_equal (level, "2.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_2;
else if (g_str_equal (level, "3"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_0;
else if (g_str_equal (level, "3.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_1;
else if (g_str_equal (level, "3.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_2;
else if (g_str_equal (level, "4"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
else if (g_str_equal (level, "4.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_1;
else if (g_str_equal (level, "4.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_2;
else if (g_str_equal (level, "5"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_5_0;
else if (g_str_equal (level, "5.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_5_1;
else
GST_WARNING ("Unsupported level '%s'", level);
return v4l2_level;
}
static const gchar *
v4l2_level_to_string (gint v4l2_level)
{
switch (v4l2_level) {
case V4L2_MPEG_VIDEO_H264_LEVEL_1_0:
return "1";
case V4L2_MPEG_VIDEO_H264_LEVEL_1B:
return "1b";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_1:
return "1.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_2:
return "1.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_3:
return "1.3";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_0:
return "2";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_1:
return "2.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_2:
return "2.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_0:
return "3.0";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_1:
return "3.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_2:
return "3.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_0:
return "4";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_1:
return "4.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_2:
return "4.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_5_0:
return "5";
case V4L2_MPEG_VIDEO_H264_LEVEL_5_1:
return "5.1";
default:
GST_WARNING ("Unsupported V4L2 level %i", v4l2_level);
break;
}
return NULL;
}
static void
gst_v4l2_h264_enc_init (GstV4l2H264Enc * self)
{
#ifdef USE_V4L2_TARGET_NV
self->profile = DEFAULT_PROFILE;
self->insert_sps_pps = FALSE;
self->insert_aud = FALSE;
self->insert_vui = FALSE;
self->enableLossless = FALSE;
if (is_cuvid == TRUE)
self->extended_colorformat = FALSE;
self->nBFrames = 0;
self->nRefFrames = 1;
self->bit_packetization = DEFAULT_BIT_PACKETIZATION;
self->slice_header_spacing = DEFAULT_SLICE_HEADER_SPACING;
self->poc_type = DEFAULT_PIC_ORDER_CNT_TYPE;
#endif
}
static void
gst_v4l2_h264_enc_class_init (GstV4l2H264EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_h264_enc_debug, "v4l2h264enc", 0,
"V4L2 H.264 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 H.264 Encoder",
"Codec/Encoder/Video",
"Encode H.264 video streams via V4L2 API", "ayaka <ayaka@soulik.info>");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h264_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h264_enc_get_property);
#ifdef USE_V4L2_TARGET_NV
g_object_class_install_property (gobject_class, PROP_PROFILE,
g_param_spec_enum ("profile", "profile",
"Set profile for v4l2 encode",
GST_TYPE_V4L2_VID_ENC_PROFILE, DEFAULT_PROFILE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
if (is_cuvid == TRUE) {
g_object_class_install_property (gobject_class, PROP_EXTENDED_COLORFORMAT,
g_param_spec_boolean ("extended-colorformat",
"Set Extended ColorFormat",
"Set Extended ColorFormat pixel values 0 to 255 in VUI Info",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
} else if (is_cuvid == FALSE) {
g_object_class_install_property (gobject_class, PROP_PIC_ORDER_CNT_TYPE,
g_param_spec_uint ("poc-type",
"Picture Order Count type",
"Set Picture Order Count type value",
0, 2, DEFAULT_PIC_ORDER_CNT_TYPE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_INSERT_VUI,
g_param_spec_boolean ("insert-vui",
"Insert H.264 VUI",
"Insert H.264 VUI(Video Usability Information) in SPS",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_SPS_PPS,
g_param_spec_boolean ("insert-sps-pps",
"Insert H.264 SPS, PPS",
"Insert H.264 SPS, PPS at every IDR frame",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_AUD,
g_param_spec_boolean ("insert-aud",
"Insert H.264 AUD",
"Insert H.264 Access Unit Delimiter(AUD)",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_NUM_BFRAMES,
g_param_spec_uint ("num-B-Frames",
"B Frames between two reference frames",
"Number of B Frames between two reference frames (not recommended)",
0, MAX_NUM_B_FRAMES, DEFAULT_NUM_B_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENTROPY_CODING,
g_param_spec_boolean ("disable-cabac",
"Set Entropy Coding",
"Set Entropy Coding Type CAVLC(TRUE) or CABAC(FALSE)",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_BIT_PACKETIZATION,
g_param_spec_boolean ("bit-packetization", "Bit Based Packetization",
"Whether or not Packet size is based upon Number Of bits",
DEFAULT_BIT_PACKETIZATION,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_SLICE_HEADER_SPACING,
g_param_spec_uint64 ("slice-header-spacing", "Slice Header Spacing",
"Slice Header Spacing number of macroblocks/bits in one packet",
0, G_MAXUINT64, DEFAULT_SLICE_HEADER_SPACING,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_MV_META,
g_param_spec_boolean ("EnableMVBufferMeta",
"Enable Motion Vector Meta data",
"Enable Motion Vector Meta data for encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
g_param_spec_uint ("SliceIntraRefreshInterval",
"SliceIntraRefreshInterval", "Set SliceIntraRefreshInterval", 0,
G_MAXUINT, DEFAULT_INTRA_REFRESH_FRAME_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_TWO_PASS_CBR,
g_param_spec_boolean ("EnableTwopassCBR",
"Enable Two pass CBR",
"Enable two pass CBR while encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder",
0, MAX_NUM_REFERENCE_FRAMES, DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_LOSSLESS_ENC,
g_param_spec_boolean ("enable-lossless",
"Enable Lossless encoding",
"Enable lossless encoding for YUV444",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
}
#endif
baseclass->codec_name = "H264";
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
baseclass->level_cid = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
baseclass->level_to_string = v4l2_level_to_string;
baseclass->level_from_string = v4l2_level_from_string;
#ifdef USE_V4L2_TARGET_NV
baseclass->set_encoder_properties = set_v4l2_h264_encoder_properties;
#endif
}
/* Probing functions */
gboolean
gst_v4l2_is_h264_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_h264_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_H264_ENC,
"h264", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}
#ifdef USE_V4L2_TARGET_NV
static GType
gst_v4l2_videnc_profile_get_type (void)
{
static volatile gsize profile = 0;
static const GEnumValue profile_type[] = {
{V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE,
"GST_V4L2_H264_VIDENC_BASELINE_PROFILE",
"Baseline"},
{V4L2_MPEG_VIDEO_H264_PROFILE_MAIN, "GST_V4L2_H264_VIDENC_MAIN_PROFILE",
"Main"},
{V4L2_MPEG_VIDEO_H264_PROFILE_HIGH, "GST_V4L2_H264_VIDENC_HIGH_PROFILE",
"High"},
{V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE, "GST_V4L2_H264_VIDENC_HIGH_444_PREDICTIVE",
"High444"},
{0, NULL, NULL}
};
if (g_once_init_enter (&profile)) {
GType tmp =
g_enum_register_static ("GstV4l2VideoEncProfileType", profile_type);
g_once_init_leave (&profile, tmp);
}
return (GType) profile;
}
gboolean
gst_v4l2_h264_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret;
v4l2_enc_slice_length_param param =
{ slice_length_type, slice_header_spacing };
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_ENABLE_SLICE_LEVEL_ENCODE;
control.value = TRUE;
ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0) {
g_print ("Error while setting spacing and packetization\n");
return FALSE;
}
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_SLICE_LENGTH_PARAM;
control.string = (gchar *) &param;
ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0) {
g_print ("Error while setting spacing and packetization\n");
return FALSE;
}
if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type)) {
v4l2object->format.fmt.pix_mp.plane_fmt[0].sizeimage = slice_header_spacing;
} else {
v4l2object->format.fmt.pix.sizeimage = slice_header_spacing;
}
return TRUE;
}
gboolean
set_v4l2_h264_encoder_properties (GstVideoEncoder * encoder)
{
GstV4l2H264Enc *self = GST_V4L2_H264_ENC (encoder);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (encoder);
if (!GST_V4L2_IS_OPEN (video_enc->v4l2output)) {
g_print ("V4L2 device is not open\n");
return FALSE;
}
if (self->profile) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H264_PROFILE,
self->profile)) {
g_print ("S_EXT_CTRLS for H264_PROFILE failed\n");
return FALSE;
}
}
if (self->nBFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_BFRAMES,
self->nBFrames)) {
g_print ("S_EXT_CTRLS for NUM_BFRAMES failed\n");
return FALSE;
}
}
if (self->insert_vui) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_VUI, 1)) {
g_print ("S_EXT_CTRLS for INSERT_VUI failed\n");
return FALSE;
}
}
if (is_cuvid == TRUE) {
if (self->extended_colorformat) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_EXTEDED_COLORFORMAT, 1)) {
g_print ("S_EXT_CTRLS for EXTENDED_COLORFORMAT failed\n");
return FALSE;
}
}
}
if (self->insert_aud) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_AUD, 1)) {
g_print ("S_EXT_CTRLS for INSERT_AUD failed\n");
return FALSE;
}
}
if (self->insert_sps_pps) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_SPS_PPS_AT_IDR, 1)) {
g_print ("S_EXT_CTRLS for SPS_PPS_AT_IDR failed\n");
return FALSE;
}
}
if (self->disable_cabac_entropy_coding) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE,
V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC)) {
g_print ("S_EXT_CTRLS for ENTROPY_MODE failed\n");
return FALSE;
}
}
if (self->slice_header_spacing) {
enum v4l2_enc_slice_length_type slice_length_type = V4L2_ENC_SLICE_LENGTH_TYPE_MBLK;
if (self->bit_packetization) {
slice_length_type = V4L2_ENC_SLICE_LENGTH_TYPE_BITS;
}
if (!gst_v4l2_h264_enc_slice_header_spacing (video_enc->v4l2capture,
self->slice_header_spacing,
slice_length_type)) {
g_print ("S_EXT_CTRLS for SLICE_LENGTH_PARAM failed\n");
return FALSE;
}
}
if (self->EnableMVBufferMeta) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_ENABLE_METADATA_MV,
self->EnableMVBufferMeta)) {
g_print ("S_EXT_CTRLS for ENABLE_METADATA_MV failed\n");
return FALSE;
}
}
if (self->SliceIntraRefreshInterval) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM,
self->SliceIntraRefreshInterval)) {
g_print ("S_EXT_CTRLS for SLICE_INTRAREFRESH_PARAM failed\n");
return FALSE;
}
}
if (self->EnableTwopassCBR) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_TWO_PASS_CBR, 1)) {
g_print ("S_EXT_CTRLS for TWO_PASS_CBR failed\n");
return FALSE;
}
}
if (self->nRefFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_REFERENCE_FRAMES,
self->nRefFrames)) {
g_print ("S_EXT_CTRLS for NUM_REFERENCE_FRAMES failed\n");
return FALSE;
}
}
if (self->poc_type) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_POC_TYPE, self->poc_type)) {
g_print ("S_EXT_CTRLS for POC_TYPE failed\n");
return FALSE;
}
}
if (self->enableLossless) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_ENABLE_LOSSLESS, self->enableLossless)) {
g_print ("S_EXT_CTRLS for ENABLE_LOSSLESS failed\n");
return FALSE;
}
}
return TRUE;
}
#endif

79
gst-v4l2/gstv4l2h264enc.h Normal file
View File

@@ -0,0 +1,79 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association.
* Author: ayaka <ayaka@soulik.info>
* Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_H264_ENC_H__
#define __GST_V4L2_H264_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_H264_ENC \
(gst_v4l2_h264_enc_get_type())
#define GST_V4L2_H264_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_H264_ENC,GstV4l2H264Enc))
#define GST_V4L2_H264_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_H264_ENC,GstV4l2H264EncClass))
#define GST_IS_V4L2_H264_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_H264_ENC))
#define GST_IS_V4L2_H264_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_H264_ENC))
typedef struct _GstV4l2H264Enc GstV4l2H264Enc;
typedef struct _GstV4l2H264EncClass GstV4l2H264EncClass;
struct _GstV4l2H264Enc
{
GstV4l2VideoEnc parent;
#ifdef USE_V4L2_TARGET_NV
guint profile;
guint nBFrames;
guint nRefFrames;
gboolean insert_sps_pps;
gboolean insert_aud;
gboolean insert_vui;
gboolean extended_colorformat;
gboolean EnableTwopassCBR;
gboolean SliceIntraRefreshEnable;
guint SliceIntraRefreshInterval;
gboolean disable_cabac_entropy_coding;
gboolean bit_packetization;
guint32 slice_header_spacing;
gboolean EnableMVBufferMeta;
guint poc_type;
gboolean enableLossless;
#endif
};
struct _GstV4l2H264EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_h264_enc_get_type (void);
gboolean gst_v4l2_is_h264_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_h264_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_H264_ENC_H__ */

585
gst-v4l2/gstv4l2h265enc.c Normal file
View File

@@ -0,0 +1,585 @@
/*
* Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2object.h"
#include "gstv4l2h265enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_h265_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_h265_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-h265, stream-format=(string) byte-stream, "
"alignment=(string) au");
static GType
gst_v4l2_videnc_profile_get_type (void);
#define GST_TYPE_V4L2_VID_ENC_PROFILE (gst_v4l2_videnc_profile_get_type ())
/* prototypes */
gboolean set_v4l2_h265_encoder_properties (GstVideoEncoder * encoder);
gboolean gst_v4l2_h265_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type);
void set_h265_video_enc_property (GstV4l2Object * v4l2object, guint label,
gint param);
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
PROP_INSERT_SPS_PPS,
PROP_PROFILE,
PROP_INSERT_VUI,
PROP_EXTENDED_COLORFORMAT,
PROP_INSERT_AUD,
PROP_BIT_PACKETIZATION,
PROP_SLICE_HEADER_SPACING,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
PROP_TWO_PASS_CBR,
PROP_ENABLE_MV_META,
PROP_NUM_BFRAMES,
PROP_NUM_REFERENCE_FRAMES,
PROP_ENABLE_LOSSLESS_ENC
};
#define DEFAULT_PROFILE V4L2_MPEG_VIDEO_H265_PROFILE_MAIN
#define DEFAULT_BIT_PACKETIZATION FALSE
#define DEFAULT_SLICE_HEADER_SPACING 0
#define DEFAULT_INTRA_REFRESH_FRAME_INTERVAL 60
#define DEFAULT_NUM_B_FRAMES 0
#define MAX_NUM_B_FRAMES 2
#define DEFAULT_NUM_REFERENCE_FRAMES 1
#define MAX_NUM_REFERENCE_FRAMES 8
#define gst_v4l2_h265_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2H265Enc, gst_v4l2_h265_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_h265_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4l2H265Enc *self = GST_V4L2_H265_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_INSERT_SPS_PPS:
self->insert_sps_pps = g_value_get_boolean (value);
break;
case PROP_PROFILE:
self->profile = g_value_get_enum (value);
if (GST_V4L2_IS_OPEN(video_enc->v4l2output)) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H265_PROFILE, self->profile)) {
g_print ("S_EXT_CTRLS for H265_PROFILE failed\n");
}
}
break;
case PROP_INSERT_AUD:
self->insert_aud = g_value_get_boolean (value);
break;
case PROP_INSERT_VUI:
self->insert_vui = g_value_get_boolean (value);
break;
/* extended-colorformat property is available for cuvid path only*/
case PROP_EXTENDED_COLORFORMAT:
self->extended_colorformat = g_value_get_boolean (value);
break;
case PROP_BIT_PACKETIZATION:
self->bit_packetization = g_value_get_boolean (value);
break;
case PROP_SLICE_HEADER_SPACING:
self->slice_header_spacing = g_value_get_uint64 (value);
break;
case PROP_SLICE_INTRA_REFRESH_INTERVAL:
self->SliceIntraRefreshInterval = g_value_get_uint (value);
break;
case PROP_TWO_PASS_CBR:
self->EnableTwopassCBR = g_value_get_boolean (value);
break;
case PROP_ENABLE_MV_META:
self->EnableMVBufferMeta = g_value_get_boolean (value);
video_enc->v4l2capture->enableMVBufferMeta = g_value_get_boolean (value);
break;
case PROP_NUM_BFRAMES:
self->nBFrames = g_value_get_uint (value);
if (self->nBFrames && (self->nRefFrames == DEFAULT_NUM_REFERENCE_FRAMES)) {
// Minimum 2 Ref-Frames are required for B-frames encoding
self->nRefFrames = 2;
}
break;
case PROP_NUM_REFERENCE_FRAMES:
self->nRefFrames = g_value_get_uint (value);
break;
case PROP_ENABLE_LOSSLESS_ENC:
self->enableLossless = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_v4l2_h265_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4l2H265Enc *self = GST_V4L2_H265_ENC (object);
switch (prop_id) {
case PROP_INSERT_SPS_PPS:
g_value_set_boolean (value, self->insert_sps_pps);
break;
case PROP_PROFILE:
g_value_set_enum (value, self->profile);
break;
case PROP_INSERT_AUD:
g_value_set_boolean (value, self->insert_aud);
break;
case PROP_INSERT_VUI:
g_value_set_boolean (value, self->insert_vui);
break;
/* extended-colorformat property is available for cuvid path only*/
case PROP_EXTENDED_COLORFORMAT:
g_value_set_boolean (value, self->extended_colorformat);
break;
case PROP_BIT_PACKETIZATION:
g_value_set_boolean (value, self->bit_packetization);
break;
case PROP_SLICE_HEADER_SPACING:
g_value_set_uint64 (value, self->slice_header_spacing);
break;
case PROP_SLICE_INTRA_REFRESH_INTERVAL:
g_value_set_uint (value, self->SliceIntraRefreshInterval);
break;
case PROP_TWO_PASS_CBR:
g_value_set_boolean (value, self->EnableTwopassCBR);
break;
case PROP_ENABLE_MV_META:
g_value_set_boolean (value, self->EnableMVBufferMeta);
break;
case PROP_NUM_BFRAMES:
g_value_set_uint (value, self->nBFrames);
break;
case PROP_NUM_REFERENCE_FRAMES:
g_value_set_uint (value, self->nRefFrames);
break;
case PROP_ENABLE_LOSSLESS_ENC:
g_value_set_boolean (value, self->enableLossless);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "main")) {
v4l2_profile = V4L2_MPEG_VIDEO_H265_PROFILE_MAIN;
} else if (g_str_equal (profile, "main10")) {
v4l2_profile = V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10;
} else if (g_str_equal (profile, "mainstillpicture")) {
v4l2_profile = V4L2_MPEG_VIDEO_H265_PROFILE_MAINSTILLPICTURE;
} else {
GST_WARNING ("Unsupported profile string '%s'", profile);
}
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case V4L2_MPEG_VIDEO_H265_PROFILE_MAIN:
return "main";
case V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10:
return "main10";
case V4L2_MPEG_VIDEO_H265_PROFILE_MAINSTILLPICTURE:
return "mainstillpicture";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static gint
v4l2_level_from_string (const gchar * level)
{
gint v4l2_level = -1;
//TODO : Since videodev2 file does not list H265 profiles
//we need to add profiles inside v4l2_nv_extensions.h
//and use them here.
return v4l2_level;
}
static const gchar *
v4l2_level_to_string (gint v4l2_level)
{
return NULL;
}
static void
gst_v4l2_h265_enc_init (GstV4l2H265Enc * self)
{
self->insert_sps_pps = FALSE;
self->profile = DEFAULT_PROFILE;
self->insert_aud = FALSE;
self->insert_vui = FALSE;
self->extended_colorformat = FALSE;
self->bit_packetization = DEFAULT_BIT_PACKETIZATION;
self->slice_header_spacing = DEFAULT_SLICE_HEADER_SPACING;
self->nRefFrames = 1;
self->nBFrames = 0;
self->enableLossless = FALSE;
}
static void
gst_v4l2_h265_enc_class_init (GstV4l2H265EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_h265_enc_debug, "v4l2h265enc", 0,
"V4L2 H.265 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 H.265 Encoder",
"Codec/Encoder/Video",
"Encode H.265 video streams via V4L2 API",
"Viranjan Pagar <vpagar@nvidia.com>, Amit Pandya <apandya@nvidia.com>");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h265_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h265_enc_get_property);
#ifdef USE_V4L2_TARGET_NV
g_object_class_install_property (gobject_class, PROP_PROFILE,
g_param_spec_enum ("profile", "profile",
"Set profile for v4l2 encode",
GST_TYPE_V4L2_VID_ENC_PROFILE, DEFAULT_PROFILE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
if (is_cuvid == TRUE) {
g_object_class_install_property (gobject_class, PROP_EXTENDED_COLORFORMAT,
g_param_spec_boolean ("extended-colorformat",
"Set Extended ColorFormat",
"Set Extended ColorFormat pixel values 0 to 255 in VUI info",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
} else if (is_cuvid == FALSE) {
g_object_class_install_property (gobject_class, PROP_INSERT_SPS_PPS,
g_param_spec_boolean ("insert-sps-pps",
"Insert H.265 SPS, PPS",
"Insert H.265 SPS, PPS at every IDR frame",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_VUI,
g_param_spec_boolean ("insert-vui",
"Insert H.265 VUI",
"Insert H.265 VUI(Video Usability Information) in SPS",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_AUD,
g_param_spec_boolean ("insert-aud",
"Insert H.265 AUD",
"Insert H.265 Access Unit Delimiter(AUD)",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_BIT_PACKETIZATION,
g_param_spec_boolean ("bit-packetization", "Bit Based Packetization",
"Whether or not Packet size is based upon Number Of bits",
DEFAULT_BIT_PACKETIZATION,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_SLICE_HEADER_SPACING,
g_param_spec_uint64 ("slice-header-spacing", "Slice Header Spacing",
"Slice Header Spacing number of macroblocks/bits in one packet",
0, G_MAXUINT64, DEFAULT_SLICE_HEADER_SPACING,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_MV_META,
g_param_spec_boolean ("EnableMVBufferMeta",
"Enable Motion Vector Meta data",
"Enable Motion Vector Meta data for encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
g_param_spec_uint ("SliceIntraRefreshInterval",
"SliceIntraRefreshInterval", "Set SliceIntraRefreshInterval", 0,
G_MAXUINT, DEFAULT_INTRA_REFRESH_FRAME_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_TWO_PASS_CBR,
g_param_spec_boolean ("EnableTwopassCBR",
"Enable Two pass CBR",
"Enable two pass CBR while encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_BFRAMES,
g_param_spec_uint ("num-B-Frames",
"B Frames between two reference frames",
"Number of B Frames between two reference frames (not recommended)(Supported only on Xavier)",
0, MAX_NUM_B_FRAMES, DEFAULT_NUM_B_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder",
0, MAX_NUM_REFERENCE_FRAMES, DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_LOSSLESS_ENC,
g_param_spec_boolean ("enable-lossless",
"Enable Lossless encoding",
"Enable lossless encoding for YUV444",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
}
#endif
baseclass->codec_name = "H265";
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_H265_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
//baseclass->level_cid = V4L2_CID_MPEG_VIDEO_H265_LEVEL;
baseclass->level_to_string = v4l2_level_to_string;
baseclass->level_from_string = v4l2_level_from_string;
baseclass->set_encoder_properties = set_v4l2_h265_encoder_properties;
}
/* Probing functions */
gboolean
gst_v4l2_is_h265_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_h265_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_H265_ENC,
"h265", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}
static GType
gst_v4l2_videnc_profile_get_type (void)
{
static volatile gsize profile = 0;
static const GEnumValue profile_type[] = {
{V4L2_MPEG_VIDEO_H265_PROFILE_MAIN,
"GST_V4L2_H265_VIDENC_MAIN_PROFILE", "Main"},
{V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10,
"GST_V4L2_H265_VIDENC_MAIN10_PROFILE", "Main10"},
{0, NULL, NULL}
};
if (g_once_init_enter (&profile)) {
GType tmp =
g_enum_register_static ("GstV4L2VideoEncProfileType", profile_type);
g_once_init_leave (&profile, tmp);
}
return (GType) profile;
}
gboolean
gst_v4l2_h265_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret;
v4l2_enc_slice_length_param param =
{ slice_length_type, slice_header_spacing };
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_SLICE_LENGTH_PARAM;
control.string = (gchar *) &param;
ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0) {
g_print ("Error while setting spacing and packetization\n");
return FALSE;
}
return TRUE;
}
gboolean
set_v4l2_h265_encoder_properties (GstVideoEncoder * encoder)
{
GstV4l2H265Enc *self = GST_V4L2_H265_ENC (encoder);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (encoder);
if (!GST_V4L2_IS_OPEN (video_enc->v4l2output)) {
g_print ("V4L2 device is not open\n");
return FALSE;
}
if (self->insert_sps_pps) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_SPS_PPS_AT_IDR, 1)) {
g_print ("S_EXT_CTRLS for INSERT_SPS_PPS_AT_IDR failed\n");
return FALSE;
}
}
if (self->profile) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H265_PROFILE, self->profile)) {
g_print ("S_EXT_CTRLS for H265_PROFILE failed\n");
return FALSE;
}
}
if (self->insert_vui) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_VUI, 1)) {
g_print ("S_EXT_CTRLS for INSERT_VUI failed\n");
return FALSE;
}
}
if (self->extended_colorformat) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_EXTEDED_COLORFORMAT, 1)) {
g_print ("S_EXT_CTRLS for EXTENDED_COLORFORMAT failed\n");
return FALSE;
}
}
if (self->insert_aud) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_AUD, 1)) {
g_print ("S_EXT_CTRLS for INSERT_AUD failed\n");
return FALSE;
}
}
if (self->slice_header_spacing) {
enum v4l2_enc_slice_length_type slice_length_type = V4L2_ENC_SLICE_LENGTH_TYPE_MBLK;
if (self->bit_packetization) {
slice_length_type = V4L2_ENC_SLICE_LENGTH_TYPE_BITS;
}
if (!gst_v4l2_h265_enc_slice_header_spacing (video_enc->v4l2output,
self->slice_header_spacing, slice_length_type)) {
g_print ("S_EXT_CTRLS for SLICE_LENGTH_PARAM failed\n");
return FALSE;
}
}
if (self->EnableMVBufferMeta) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_ENABLE_METADATA_MV,
self->EnableMVBufferMeta)) {
g_print ("S_EXT_CTRLS for ENABLE_METADATA_MV failed\n");
return FALSE;
}
}
if (self->SliceIntraRefreshInterval) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM,
self->SliceIntraRefreshInterval)) {
g_print ("S_EXT_CTRLS for SLICE_INTRAREFRESH_PARAM failed\n");
return FALSE;
}
}
if (self->EnableTwopassCBR) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_TWO_PASS_CBR, 1)) {
g_print ("S_EXT_CTRLS for TWO_PASS_CBR failed\n");
return FALSE;
}
}
if (self->nBFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_BFRAMES,
self->nBFrames)) {
g_print ("S_EXT_CTRLS for NUM_BFRAMES failed\n");
return FALSE;
}
}
if (self->nRefFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_REFERENCE_FRAMES,
self->nRefFrames)) {
g_print ("S_EXT_CTRLS for NUM_REFERENCE_FRAMES failed\n");
return FALSE;
}
}
if (self->enableLossless) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_ENABLE_LOSSLESS, self->enableLossless)) {
g_print ("S_EXT_CTRLS for ENABLE_LOSSLESS failed\n");
return FALSE;
}
}
return TRUE;
}

72
gst-v4l2/gstv4l2h265enc.h Normal file
View File

@@ -0,0 +1,72 @@
/*
* Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_H265_ENC_H__
#define __GST_V4L2_H265_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_H265_ENC \
(gst_v4l2_h265_enc_get_type())
#define GST_V4L2_H265_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_H265_ENC,GstV4l2H265Enc))
#define GST_V4L2_H265_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_H265_ENC,GstV4l2H265EncClass))
#define GST_IS_V4L2_H265_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_H265_ENC))
#define GST_IS_V4L2_H265_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_H265_ENC))
typedef struct _GstV4l2H265Enc GstV4l2H265Enc;
typedef struct _GstV4l2H265EncClass GstV4l2H265EncClass;
struct _GstV4l2H265Enc
{
GstV4l2VideoEnc parent;
gboolean insert_sps_pps;
guint profile;
guint nBFrames;
guint nRefFrames;
gboolean insert_aud;
gboolean insert_vui;
gboolean extended_colorformat;
guint SliceIntraRefreshInterval;
gboolean EnableTwopassCBR;
gboolean bit_packetization;
guint32 slice_header_spacing;
gboolean EnableMVBufferMeta;
gboolean enableLossless;
};
struct _GstV4l2H265EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_h265_enc_get_type (void);
gboolean gst_v4l2_is_h265_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_h265_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_H265_ENC_H__ */

4982
gst-v4l2/gstv4l2object.c Normal file
View File

File diff suppressed because it is too large Load Diff

381
gst-v4l2/gstv4l2object.h Normal file
View File

@@ -0,0 +1,381 @@
/* GStreamer
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2object.h: base class for V4L2 elements
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_V4L2_OBJECT_H__
#define __GST_V4L2_OBJECT_H__
#include "linux/videodev2.h"
#ifdef HAVE_LIBV4L2
# include <libv4l2.h>
#endif
#include "v4l2-utils.h"
#ifdef USE_V4L2_TARGET_NV
#include "nvbufsurface.h"
#include "v4l2_nv_extensions.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstpushsrc.h>
#include <gst/video/video.h>
typedef struct _GstV4l2Object GstV4l2Object;
typedef struct _GstV4l2ObjectClassHelper GstV4l2ObjectClassHelper;
#include <gstv4l2bufferpool.h>
/* size of v4l2 buffer pool in streaming case */
#define GST_V4L2_MIN_BUFFERS 2
#ifdef USE_V4L2_TARGET_NV
#define V4L2_DEVICE_BASENAME_NVDEC "nvdec"
#define V4L2_DEVICE_BASENAME_NVENC "msenc"
#define V4L2_DEVICE_PATH_NVDEC "/dev/nvhost-nvdec"
#define V4L2_DEVICE_PATH_NVDEC_ALT "/dev/dri/card0"
#define V4L2_DEVICE_PATH_NVDEC_MCCOY "/dev/nvidia0"
#define V4L2_DEVICE_PATH_NVENC "/dev/nvhost-msenc"
#define V4L2_DEVICE_PATH_NVENC_ALT "/dev/v4l2-nvenc"
#endif
/* max frame width/height */
#define GST_V4L2_MAX_SIZE (1<<15) /* 2^15 == 32768 */
G_BEGIN_DECLS
#define GST_TYPE_V4L2_IO_MODE (gst_v4l2_io_mode_get_type ())
GType gst_v4l2_io_mode_get_type (void);
#ifdef USE_V4L2_TARGET_NV
#define GST_TYPE_V4L2_DEC_OUTPUT_IO_MODE (gst_v4l2_dec_output_io_mode_get_type ())
GType gst_v4l2_dec_output_io_mode_get_type (void);
#define GST_TYPE_V4L2_DEC_CAPTURE_IO_MODE (gst_v4l2_dec_capture_io_mode_get_type ())
GType gst_v4l2_dec_capture_io_mode_get_type (void);
#define GST_TYPE_V4L2_ENC_OUTPUT_IO_MODE (gst_v4l2_enc_output_io_mode_get_type ())
GType gst_v4l2_enc_output_io_mode_get_type (void);
#define GST_TYPE_V4L2_ENC_CAPTURE_IO_MODE (gst_v4l2_enc_capture_io_mode_get_type ())
GType gst_v4l2_enc_capture_io_mode_get_type (void);
#endif
#define GST_V4L2_OBJECT(obj) (GstV4l2Object *)(obj)
extern gboolean is_cuvid;
typedef enum {
GST_V4L2_IO_AUTO = 0,
GST_V4L2_IO_RW = 1,
GST_V4L2_IO_MMAP = 2,
GST_V4L2_IO_USERPTR = 3,
GST_V4L2_IO_DMABUF = 4,
GST_V4L2_IO_DMABUF_IMPORT = 5
} GstV4l2IOMode;
typedef gboolean (*GstV4l2GetInOutFunction) (GstV4l2Object * v4l2object, gint * input);
typedef gboolean (*GstV4l2SetInOutFunction) (GstV4l2Object * v4l2object, gint input);
typedef gboolean (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object);
#define GST_V4L2_WIDTH(o) (GST_VIDEO_INFO_WIDTH (&(o)->info))
#define GST_V4L2_HEIGHT(o) (GST_VIDEO_INFO_HEIGHT (&(o)->info))
#define GST_V4L2_PIXELFORMAT(o) ((o)->fmtdesc->pixelformat)
#define GST_V4L2_FPS_N(o) (GST_VIDEO_INFO_FPS_N (&(o)->info))
#define GST_V4L2_FPS_D(o) (GST_VIDEO_INFO_FPS_D (&(o)->info))
/* simple check whether the device is open */
#define GST_V4L2_IS_OPEN(o) ((o)->video_fd > 0)
/* check whether the device is 'active' */
#define GST_V4L2_IS_ACTIVE(o) ((o)->active)
#define GST_V4L2_SET_ACTIVE(o) ((o)->active = TRUE)
#define GST_V4L2_SET_INACTIVE(o) ((o)->active = FALSE)
/* checks whether the current v4lv4l2object has already been open()'ed or not */
#define GST_V4L2_CHECK_OPEN(v4l2object) \
if (!GST_V4L2_IS_OPEN(v4l2object)) \
{ \
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
(_("Device is not open.")), (NULL)); \
return FALSE; \
}
/* checks whether the current v4lv4l2object is close()'ed or whether it is still open */
#define GST_V4L2_CHECK_NOT_OPEN(v4l2object) \
if (GST_V4L2_IS_OPEN(v4l2object)) \
{ \
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
(_("Device is open.")), (NULL)); \
return FALSE; \
}
/* checks whether we're out of capture mode or not */
#define GST_V4L2_CHECK_NOT_ACTIVE(v4l2object) \
if (GST_V4L2_IS_ACTIVE(v4l2object)) \
{ \
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
(NULL), ("Device is in streaming mode")); \
return FALSE; \
}
struct _GstV4l2Object {
GstElement * element;
GstObject * dbg_obj;
enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
/* the video device */
char *videodev;
#ifdef USE_V4L2_TARGET_NV
gboolean is_encode;
#endif
/* the video-device's file descriptor */
gint video_fd;
GstV4l2IOMode mode;
gboolean active;
gboolean streaming;
/* the current format */
struct v4l2_fmtdesc *fmtdesc;
struct v4l2_format format;
GstVideoInfo info;
GstVideoAlignment align;
/* Features */
gboolean need_video_meta;
gboolean has_alpha_component;
/* only used if the device supports MPLANE
* nb planes is meaning of v4l2 planes
* the gstreamer equivalent is gst_buffer_n_memory
*/
gint n_v4l2_planes;
/* We cache the frame duration if known */
GstClockTime duration;
/* if the MPLANE device support both contiguous and non contiguous
* it allows to select which one we want. But we prefered_non_contiguous
* non contiguous mode.
*/
gboolean prefered_non_contiguous;
/* This will be set if supported in decide_allocation. It can be used to
* calculate the minimum latency. */
guint32 min_buffers;
/* wanted mode */
GstV4l2IOMode req_mode;
/* optional pool */
GstBufferPool *pool;
/* the video device's capabilities */
struct v4l2_capability vcap;
/* opened device specific capabilities */
guint32 device_caps;
/* lists... */
GSList *formats; /* list of available capture formats */
GstCaps *probed_caps;
GList *colors;
GList *norms;
GList *channels;
GData *controls;
/* properties */
v4l2_std_id tv_norm;
gchar *channel;
gulong frequency;
GstStructure *extra_controls;
gboolean keep_aspect;
GValue *par;
#ifdef USE_V4L2_TARGET_NV
gboolean enableMVBufferMeta;
gboolean Enable_frame_type_reporting;
gboolean Enable_error_check;
gboolean Enable_headers;
gint ProcessedFrames;
gboolean open_mjpeg_block;
gboolean capture_plane_stopped;
GCond cplane_stopped_cond;
GMutex cplane_stopped_lock;
guint sei_payload_size;
void* sei_payload;
#endif
/* funcs */
GstV4l2GetInOutFunction get_in_out_func;
GstV4l2SetInOutFunction set_in_out_func;
GstV4l2UpdateFpsFunction update_fps_func;
/* syscalls */
gint (*fd_open) (gint fd, gint v4l2_flags);
gint (*close) (gint fd);
gint (*dup) (gint fd);
gint (*ioctl) (gint fd, gulong request, ...);
gssize (*read) (gint fd, gpointer buffer, gsize n);
gpointer (*mmap) (gpointer start, gsize length, gint prot, gint flags,
gint fd, off_t offset);
gint (*munmap) (gpointer _start, gsize length);
/* Quirks */
/* Skips interlacing probes */
gboolean never_interlaced;
/* Allow to skip reading initial format through G_FMT. Some devices
* just fails if you don't call S_FMT first. (ex: M2M decoders) */
gboolean no_initial_format;
/* Avoid any try_fmt probe. This is used by v4l2src to speedup start up time
* on slow USB firmwares. When this is set, gst_v4l2_set_format() will modify
* the caps to reflect what was negotiated during fixation */
gboolean skip_try_fmt_probes;
};
struct _GstV4l2ObjectClassHelper {
/* probed devices */
GList *devices;
};
GType gst_v4l2_object_get_type (void);
#define V4L2_STD_OBJECT_PROPS \
PROP_DEVICE, \
PROP_DEVICE_NAME, \
PROP_DEVICE_FD, \
PROP_FLAGS, \
PROP_BRIGHTNESS, \
PROP_CONTRAST, \
PROP_SATURATION, \
PROP_HUE, \
PROP_TV_NORM, \
PROP_IO_MODE, \
PROP_OUTPUT_IO_MODE, \
PROP_CAPTURE_IO_MODE, \
PROP_EXTRA_CONTROLS, \
PROP_PIXEL_ASPECT_RATIO, \
PROP_FORCE_ASPECT_RATIO
/* create/destroy */
GstV4l2Object* gst_v4l2_object_new (GstElement * element,
GstObject * dbg_obj,
enum v4l2_buf_type type,
const char * default_device,
GstV4l2GetInOutFunction get_in_out_func,
GstV4l2SetInOutFunction set_in_out_func,
GstV4l2UpdateFpsFunction update_fps_func);
void gst_v4l2_object_destroy (GstV4l2Object * v4l2object);
/* properties */
void gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
const char * default_device);
void gst_v4l2_object_install_m2m_properties_helper (GObjectClass * gobject_class);
#ifdef USE_V4L2_TARGET_NV
void gst_v4l2_object_install_m2m_dec_iomode_properties_helper (GObjectClass * gobject_class);
void gst_v4l2_object_install_m2m_enc_iomode_properties_helper (GObjectClass * gobject_class);
#endif
gboolean gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object,
guint prop_id,
const GValue * value,
GParamSpec * pspec);
gboolean gst_v4l2_object_get_property_helper (GstV4l2Object *v4l2object,
guint prop_id, GValue * value,
GParamSpec * pspec);
/* open/close */
gboolean gst_v4l2_object_open (GstV4l2Object * v4l2object);
gboolean gst_v4l2_object_open_shared (GstV4l2Object * v4l2object, GstV4l2Object * other);
gboolean gst_v4l2_object_close (GstV4l2Object * v4l2object);
/* probing */
GstCaps* gst_v4l2_object_get_all_caps (void);
GstCaps* gst_v4l2_object_get_raw_caps (void);
GstCaps* gst_v4l2_object_get_codec_caps (void);
gint gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo,
gint plane, gint stride);
gboolean gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error);
gboolean gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error);
gboolean gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps);
gboolean gst_v4l2_object_unlock (GstV4l2Object * v4l2object);
gboolean gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object);
gboolean gst_v4l2_object_stop (GstV4l2Object * v4l2object);
GstCaps * gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter);
GstCaps * gst_v4l2_object_get_caps (GstV4l2Object * v4l2object, GstCaps * filter);
gboolean gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info);
gboolean gst_v4l2_object_set_crop (GstV4l2Object * obj);
gboolean gst_v4l2_object_decide_allocation (GstV4l2Object * v4l2object, GstQuery * query);
gboolean gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query);
GstStructure * gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc);
/* TODO Move to proper namespace */
/* open/close the device */
gboolean gst_v4l2_open (GstV4l2Object * v4l2object);
gboolean gst_v4l2_dup (GstV4l2Object * v4l2object, GstV4l2Object * other);
gboolean gst_v4l2_close (GstV4l2Object * v4l2object);
/* norm/input/output */
gboolean gst_v4l2_get_norm (GstV4l2Object * v4l2object, v4l2_std_id * norm);
gboolean gst_v4l2_set_norm (GstV4l2Object * v4l2object, v4l2_std_id norm);
gboolean gst_v4l2_get_input (GstV4l2Object * v4l2object, gint * input);
gboolean gst_v4l2_set_input (GstV4l2Object * v4l2object, gint input);
gboolean gst_v4l2_get_output (GstV4l2Object * v4l2object, gint * output);
gboolean gst_v4l2_set_output (GstV4l2Object * v4l2object, gint output);
/* frequency control */
gboolean gst_v4l2_get_frequency (GstV4l2Object * v4l2object, gint tunernum, gulong * frequency);
gboolean gst_v4l2_set_frequency (GstV4l2Object * v4l2object, gint tunernum, gulong frequency);
gboolean gst_v4l2_signal_strength (GstV4l2Object * v4l2object, gint tunernum, gulong * signal);
/* attribute control */
gboolean gst_v4l2_get_attribute (GstV4l2Object * v4l2object, int attribute, int * value);
gboolean gst_v4l2_set_attribute (GstV4l2Object * v4l2object, int attribute, const int value);
gboolean gst_v4l2_set_controls (GstV4l2Object * v4l2object, GstStructure * controls);
#ifdef USE_V4L2_TARGET_NV
gboolean set_v4l2_video_mpeg_class (GstV4l2Object * v4l2object, guint label,
gint params);
#endif
G_END_DECLS
#endif /* __GST_V4L2_OBJECT_H__ */

2482
gst-v4l2/gstv4l2videodec.c Normal file
View File

File diff suppressed because it is too large Load Diff

114
gst-v4l2/gstv4l2videodec.h Normal file
View File

@@ -0,0 +1,114 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VIDEO_DEC_H__
#define __GST_V4L2_VIDEO_DEC_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideodecoder.h>
#include <gst/video/gstvideometa.h>
#include <gstv4l2object.h>
#include <gstv4l2bufferpool.h>
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VIDEO_DEC \
(gst_v4l2_video_dec_get_type())
#define GST_V4L2_VIDEO_DEC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VIDEO_DEC,GstV4l2VideoDec))
#define GST_V4L2_VIDEO_DEC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VIDEO_DEC,GstV4l2VideoDecClass))
#define GST_IS_V4L2_VIDEO_DEC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VIDEO_DEC))
#define GST_IS_V4L2_VIDEO_DEC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VIDEO_DEC))
/* The structures are renamed as the name conflicts with the
* OSS v4l2 library structures. */
#ifdef USE_V4L2_TARGET_NV
#define GstV4l2VideoDec GstNvV4l2VideoDec
#define GstV4l2VideoDecClass GstNvV4l2VideoDecClass
#define LOOP_COUNT_TO_WAIT_FOR_DQEVENT 6
#define WAIT_TIME_PER_LOOP_FOR_DQEVENT 100*1000
#endif
typedef struct _GstV4l2VideoDec GstV4l2VideoDec;
typedef struct _GstV4l2VideoDecClass GstV4l2VideoDecClass;
struct _GstV4l2VideoDec
{
GstVideoDecoder parent;
/* < private > */
GstV4l2Object *v4l2output;
GstV4l2Object *v4l2capture;
/* pads */
GstCaps *probed_srccaps;
GstCaps *probed_sinkcaps;
/* State */
GstVideoCodecState *input_state;
gboolean active;
GstFlowReturn output_flow;
guint64 frame_num;
#ifdef USE_V4L2_TARGET_NV
GHashTable* hash_pts_systemtime;
gdouble buffer_in_time;
guint64 decoded_picture_cnt;
guint32 skip_frames;
gboolean idr_received;
guint32 drop_frame_interval;
guint32 num_extra_surfaces;
gboolean is_drc;
gboolean disable_dpb;
gboolean enable_full_frame;
gboolean enable_frame_type_reporting;
gboolean enable_error_check;
gboolean enable_max_performance;
guint32 cudadec_mem_type;
guint32 cudadec_gpu_id;
guint32 cudadec_num_surfaces;
gboolean cudadec_low_latency;
gboolean extract_sei_type5_data;
gdouble rate;
guint32 cap_buf_dynamic_allocation;
#endif
};
struct _GstV4l2VideoDecClass
{
GstVideoDecoderClass parent_class;
gchar *default_device;
};
GType gst_v4l2_video_dec_get_type (void);
gboolean gst_v4l2_is_video_dec (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_video_dec_register (GstPlugin * plugin,
const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_VIDEO_DEC_H__ */

2357
gst-v4l2/gstv4l2videoenc.c Normal file
View File

File diff suppressed because it is too large Load Diff

136
gst-v4l2/gstv4l2videoenc.h Normal file
View File

@@ -0,0 +1,136 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association.
* Author: ayaka <ayaka@soulik.info>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VIDEO_ENC_H__
#define __GST_V4L2_VIDEO_ENC_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideoencoder.h>
#include <gst/video/gstvideometa.h>
#include <gstv4l2object.h>
#include <gstv4l2bufferpool.h>
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VIDEO_ENC \
(gst_v4l2_video_enc_get_type())
#define GST_V4L2_VIDEO_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VIDEO_ENC,GstV4l2VideoEnc))
#define GST_V4L2_VIDEO_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VIDEO_ENC,GstV4l2VideoEncClass))
#define GST_IS_V4L2_VIDEO_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VIDEO_ENC))
#define GST_IS_V4L2_VIDEO_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VIDEO_ENC))
#define GST_V4L2_VIDEO_ENC_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_V4L2_VIDEO_ENC, GstV4l2VideoEncClass))
typedef struct _GstV4l2VideoEnc GstV4l2VideoEnc;
typedef struct _GstV4l2VideoEncClass GstV4l2VideoEncClass;
struct _GstV4l2VideoEnc
{
GstVideoEncoder parent;
#ifdef USE_V4L2_TARGET_NV
guint32 ratecontrol;
guint32 bitrate;
guint32 peak_bitrate;
guint32 idrinterval;
guint32 iframeinterval;
guint32 quant_i_frames;
guint32 quant_p_frames;
guint32 quant_b_frames;
guint32 MinQpI;
guint32 MaxQpI;
guint32 MinQpP;
guint32 MaxQpP;
guint32 MinQpB;
guint32 MaxQpB;
gboolean set_qpRange;
guint32 hw_preset_level;
guint virtual_buffer_size;
gboolean measure_latency;
gboolean ratecontrol_enable;
gboolean force_idr;
gboolean force_intra;
gboolean maxperf_enable;
FILE *tracing_file_enc;
GQueue *got_frame_pt;
guint32 cudaenc_gpu_id;
gboolean slice_output;
GstVideoCodecFrame *best_prev;
GstClockTime buf_pts_prev;
#endif
/* < private > */
GstV4l2Object *v4l2output;
GstV4l2Object *v4l2capture;
/* pads */
GstCaps *probed_srccaps;
GstCaps *probed_sinkcaps;
/* State */
GstVideoCodecState *input_state;
gboolean active;
gboolean processing;
GstFlowReturn output_flow;
};
struct _GstV4l2VideoEncClass
{
GstVideoEncoderClass parent_class;
gchar *default_device;
const char *codec_name;
guint32 profile_cid;
const gchar *(*profile_to_string) (gint v4l2_profile);
gint (*profile_from_string) (const gchar * profile);
#ifdef USE_V4L2_TARGET_NV
gboolean (*set_encoder_properties) (GstVideoEncoder * encoder);
gboolean (*set_video_encoder_properties) (GstVideoEncoder * encoder);
#endif
guint32 level_cid;
const gchar *(*level_to_string) (gint v4l2_level);
gint (*level_from_string) (const gchar * level);
#ifdef USE_V4L2_TARGET_NV
void (*force_IDR) (GstV4l2VideoEnc *);
#endif
};
GType gst_v4l2_video_enc_get_type (void);
gboolean gst_v4l2_is_video_enc (GstCaps * sink_caps, GstCaps * src_caps,
GstCaps * codec_caps);
void gst_v4l2_video_enc_register (GstPlugin * plugin, GType type,
const char *codec, const gchar * basename, const gchar * device_path,
GstCaps * sink_caps, GstCaps * codec_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_VIDEO_ENC_H__ */

198
gst-v4l2/gstv4l2vp8enc.c Normal file
View File

@@ -0,0 +1,198 @@
/*
* Copyright (C) 2017 Collabora Inc.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2object.h"
#include "gstv4l2vp8enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_vp8_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_vp8_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-vp8, profile=(string) { 0, 1, 2, 3 }");
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
#ifdef USE_V4L2_TARGET_NV
PROP_ENABLE_HEADER,
#endif
/* TODO */
};
#define gst_v4l2_vp8_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2Vp8Enc, gst_v4l2_vp8_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_vp8_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2Vp8Enc *self = GST_V4L2_VP8_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
self->EnableHeaders = g_value_get_boolean (value);
video_enc->v4l2capture->Enable_headers = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static void
gst_v4l2_vp8_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2Vp8Enc *self = GST_V4L2_VP8_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
g_value_set_boolean (value, self->EnableHeaders);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "0"))
v4l2_profile = 0;
else if (g_str_equal (profile, "1"))
v4l2_profile = 1;
else if (g_str_equal (profile, "2"))
v4l2_profile = 2;
else if (g_str_equal (profile, "3"))
v4l2_profile = 3;
else
GST_WARNING ("Unsupported profile string '%s'", profile);
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case 0:
return "0";
case 1:
return "1";
case 2:
return "2";
case 3:
return "3";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static void
gst_v4l2_vp8_enc_init (GstV4l2Vp8Enc * self)
{
}
static void
gst_v4l2_vp8_enc_class_init (GstV4l2Vp8EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_vp8_enc_debug, "v4l2vp8enc", 0,
"V4L2 VP8 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 VP8 Encoder",
"Codec/Encoder/Video",
"Encode VP8 video streams via V4L2 API",
"Nicolas Dufresne <nicolas.dufresne@collabora.com");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_vp8_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_vp8_enc_get_property);
#ifdef USE_V4L2_TARGET_NV
g_object_class_install_property (gobject_class, PROP_ENABLE_HEADER,
g_param_spec_boolean ("enable-headers",
"Enable VP8 headers",
"Enable VP8 file and frame headers, if enabled, dump elementary stream",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
#endif
baseclass->codec_name = "VP8";
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_VPX_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
}
/* Probing functions */
gboolean
gst_v4l2_is_vp8_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_vp8_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_VP8_ENC,
"vp8", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}

63
gst-v4l2/gstv4l2vp8enc.h Normal file
View File

@@ -0,0 +1,63 @@
/*
* Copyright (C) 2017 Collabora Inc.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VP8_ENC_H__
#define __GST_V4L2_VP8_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VP8_ENC \
(gst_v4l2_vp8_enc_get_type())
#define GST_V4L2_VP8_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VP8_ENC,GstV4l2Vp8Enc))
#define GST_V4L2_VP8_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VP8_ENC,GstV4l2Vp8EncClass))
#define GST_IS_V4L2_VP8_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VP8_ENC))
#define GST_IS_V4L2_VP8_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VP8_ENC))
typedef struct _GstV4l2Vp8Enc GstV4l2Vp8Enc;
typedef struct _GstV4l2Vp8EncClass GstV4l2Vp8EncClass;
struct _GstV4l2Vp8Enc
{
GstV4l2VideoEnc parent;
#ifdef USE_V4L2_TARGET_NV
gboolean EnableHeaders;
#endif
};
struct _GstV4l2Vp8EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_vp8_enc_get_type (void);
gboolean gst_v4l2_is_vp8_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_vp8_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_VP8_ENC_H__ */

197
gst-v4l2/gstv4l2vp9enc.c Normal file
View File

@@ -0,0 +1,197 @@
/*
* Copyright (C) 2017 Collabora Inc.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2object.h"
#include "gstv4l2vp9enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_vp9_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_vp9_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-vp9, profile=(string) { 0, 1, 2, 3 }");
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
#ifdef USE_V4L2_TARGET_NV
PROP_ENABLE_HEADER,
#endif
/* TODO */
};
#define gst_v4l2_vp9_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2Vp9Enc, gst_v4l2_vp9_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_vp9_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2Vp9Enc *self = GST_V4L2_VP9_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
self->EnableHeaders = g_value_get_boolean (value);
video_enc->v4l2capture->Enable_headers = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static void
gst_v4l2_vp9_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2Vp9Enc *self = GST_V4L2_VP9_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
g_value_set_boolean (value, self->EnableHeaders);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "0"))
v4l2_profile = 0;
else if (g_str_equal (profile, "1"))
v4l2_profile = 1;
else if (g_str_equal (profile, "2"))
v4l2_profile = 2;
else if (g_str_equal (profile, "3"))
v4l2_profile = 3;
else
GST_WARNING ("Unsupported profile string '%s'", profile);
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case 0:
return "0";
case 1:
return "1";
case 2:
return "2";
case 3:
return "3";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static void
gst_v4l2_vp9_enc_init (GstV4l2Vp9Enc * self)
{
}
static void
gst_v4l2_vp9_enc_class_init (GstV4l2Vp9EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_vp9_enc_debug, "v4l2vp9enc", 0,
"V4L2 VP9 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 VP9 Encoder",
"Codec/Encoder/Video",
"Encode VP9 video streams via V4L2 API",
"Nicolas Dufresne <nicolas.dufresne@collabora.com");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_vp9_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_vp9_enc_get_property);
#ifdef USE_V4L2_TARGET_NV
g_object_class_install_property (gobject_class, PROP_ENABLE_HEADER,
g_param_spec_boolean ("enable-headers",
"Enable VP9 headers",
"Enable VP9 file and frame headers, if enabled, dump elementary stream",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
#endif
baseclass->codec_name = "VP9";
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_VPX_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
}
/* Probing functions */
gboolean
gst_v4l2_is_vp9_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_vp9_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_VP9_ENC,
"vp9", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}

63
gst-v4l2/gstv4l2vp9enc.h Normal file
View File

@@ -0,0 +1,63 @@
/*
* Copyright (C) 2017 Collabora Inc.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VP9_ENC_H__
#define __GST_V4L2_VP9_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VP9_ENC \
(gst_v4l2_vp9_enc_get_type())
#define GST_V4L2_VP9_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VP9_ENC,GstV4l2Vp9Enc))
#define GST_V4L2_VP9_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VP9_ENC,GstV4l2Vp9EncClass))
#define GST_IS_V4L2_VP9_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VP9_ENC))
#define GST_IS_V4L2_VP9_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VP9_ENC))
typedef struct _GstV4l2Vp9Enc GstV4l2Vp9Enc;
typedef struct _GstV4l2Vp9EncClass GstV4l2Vp9EncClass;
struct _GstV4l2Vp9Enc
{
GstV4l2VideoEnc parent;
#ifdef USE_V4L2_TARGET_NV
gboolean EnableHeaders;
#endif
};
struct _GstV4l2Vp9EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_vp9_enc_get_type (void);
gboolean gst_v4l2_is_vp9_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_vp9_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_VP9_ENC_H__ */

139
gst-v4l2/sei_parse.c Normal file
View File

@@ -0,0 +1,139 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include <stdint.h>
#include <unistd.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <glib.h>
#define UUID_SIZE 16
#define USER_DATA_UNREGISTERED_TYPE 5
gboolean check_uuid(uint8_t *stream);
uint8_t* parse_sei_unit(uint8_t * bs_ptr, guint *size);
uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size);
gboolean check_uuid(uint8_t *stream)
{
char uuid_string[UUID_SIZE] = {0};
uint32_t size = snprintf (uuid_string, UUID_SIZE, "%s", stream);
if (size == (UUID_SIZE-1))
{
if (!strncmp (uuid_string, "NVDS_CUSTOMMETA", (UUID_SIZE-1)))
return TRUE;
else
return FALSE;
}
else
return FALSE;
}
uint8_t* parse_sei_unit(uint8_t * bs_ptr, guint *size)
{
int payload_type = 0;
int payload_size = 0;
uint8_t* payload = NULL;
int i, emu_count;
/* printf("found a SEI NAL unit!\n"); */
payload_type += *bs_ptr++;
while (payload_size % 0xFF == 0)
{
payload_size += *bs_ptr++;
}
/* printf("payload_type = %i payload_size = %i\n", payload_type, payload_size); */
if (!check_uuid (bs_ptr))
{
/* printf ("Expected UUID not found\n"); */
return NULL;
}
else
{
bs_ptr += UUID_SIZE;
}
*size = payload_size;
if (payload_type == USER_DATA_UNREGISTERED_TYPE)
{
payload = (uint8_t*)malloc((payload_size - UUID_SIZE)*sizeof(uint8_t));
for (i = 0, emu_count = 0; i < (payload_size - UUID_SIZE);
i++, emu_count++)
{
payload[i] = *bs_ptr++;
// drop emulation prevention bytes
if ((emu_count >= 2)
&& (payload[i] == 0x03)
&& (payload[i-1] == 0x00)
&& (payload[i-2] == 0x00))
{
i--;
emu_count = 0;
}
}
return payload;
}
else
{
return NULL;
}
}
uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size)
{
int checklen = 0;
unsigned int sei_payload_size = 0;
uint8_t *bs_ptr = bs;
uint8_t *payload = NULL;
while (size > 0)
{
if (checklen < 2 && *bs_ptr++ == 0x00)
checklen++;
else if (checklen == 2 && *bs_ptr++ == 0x00)
checklen++;
else if (checklen == 3 && *bs_ptr++ == 0x01)
checklen++;
else if (checklen == 4 && *bs_ptr++ == 0x06)
{
payload = parse_sei_unit(bs_ptr, &sei_payload_size);
checklen = 0;
if (payload != NULL)
{
*payload_size = (sei_payload_size - 16);
return payload;
}
else
return NULL;
}
else
checklen = 0;
size--;
}
return NULL;
}

202
gst-v4l2/v4l2-utils.c Normal file
View File

@@ -0,0 +1,202 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "v4l2-utils.h"
/**************************/
/* Common device iterator */
/**************************/
#ifdef HAVE_GUDEV
#include <gudev/gudev.h>
struct _GstV4l2GUdevIterator
{
GstV4l2Iterator parent;
GList *devices;
GUdevDevice *device;
GUdevClient *client;
};
GstV4l2Iterator *
gst_v4l2_iterator_new (void)
{
static const gchar *subsystems[] = { "video4linux", NULL };
struct _GstV4l2GUdevIterator *it;
it = g_slice_new0 (struct _GstV4l2GUdevIterator);
it->client = g_udev_client_new (subsystems);
it->devices = g_udev_client_query_by_subsystem (it->client, "video4linux");
return (GstV4l2Iterator *) it;
}
gboolean
gst_v4l2_iterator_next (GstV4l2Iterator * _it)
{
struct _GstV4l2GUdevIterator *it = (struct _GstV4l2GUdevIterator *) _it;
const gchar *device_name;
if (it->device)
g_object_unref (it->device);
it->device = NULL;
it->parent.device_path = NULL;
it->parent.device_name = NULL;
if (it->devices == NULL)
return FALSE;
it->device = it->devices->data;
it->devices = g_list_delete_link (it->devices, it->devices);
device_name = g_udev_device_get_property (it->device, "ID_V4L_PRODUCT");
if (!device_name)
device_name = g_udev_device_get_property (it->device, "ID_MODEL_ENC");
if (!device_name)
device_name = g_udev_device_get_property (it->device, "ID_MODEL");
it->parent.device_path = g_udev_device_get_device_file (it->device);
it->parent.device_name = device_name;
it->parent.sys_path = g_udev_device_get_sysfs_path (it->device);
return TRUE;
}
void
gst_v4l2_iterator_free (GstV4l2Iterator * _it)
{
struct _GstV4l2GUdevIterator *it = (struct _GstV4l2GUdevIterator *) _it;
g_list_free_full (it->devices, g_object_unref);
gst_object_unref (it->client);
g_slice_free (struct _GstV4l2GUdevIterator, it);
}
#else /* No GUDEV */
struct _GstV4l2FsIterator
{
GstV4l2Iterator parent;
gint base_idx;
gint video_idx;
gchar *device;
};
GstV4l2Iterator *
gst_v4l2_iterator_new (void)
{
struct _GstV4l2FsIterator *it;
it = g_slice_new0 (struct _GstV4l2FsIterator);
it->base_idx = 0;
it->video_idx = -1;
it->device = NULL;
return (GstV4l2Iterator *) it;
}
gboolean
gst_v4l2_iterator_next (GstV4l2Iterator * _it)
{
struct _GstV4l2FsIterator *it = (struct _GstV4l2FsIterator *) _it;
static const gchar *dev_base[] = { "/dev/video", "/dev/v4l2/video", NULL };
gchar *device = NULL;
g_free ((gchar *) it->parent.device_path);
it->parent.device_path = NULL;
while (device == NULL) {
it->video_idx++;
if (it->video_idx >= 64) {
it->video_idx = 0;
it->base_idx++;
}
if (dev_base[it->base_idx] == NULL) {
it->video_idx = 0;
break;
}
device = g_strdup_printf ("%s%d", dev_base[it->base_idx], it->video_idx);
if (g_file_test (device, G_FILE_TEST_EXISTS)) {
it->parent.device_path = device;
break;
}
g_free (device);
device = NULL;
}
return it->parent.device_path != NULL;
}
void
gst_v4l2_iterator_free (GstV4l2Iterator * _it)
{
struct _GstV4l2FsIterator *it = (struct _GstV4l2FsIterator *) _it;
g_free ((gchar *) it->parent.device_path);
g_slice_free (struct _GstV4l2FsIterator, it);
}
#endif
void
gst_v4l2_clear_error (GstV4l2Error * v4l2err)
{
if (v4l2err) {
g_clear_error (&v4l2err->error);
g_free (v4l2err->dbg_message);
v4l2err->dbg_message = NULL;
}
}
void
gst_v4l2_error (gpointer element, GstV4l2Error * v4l2err)
{
GError *error;
if (!v4l2err || !v4l2err->error)
return;
error = v4l2err->error;
if (error->message)
GST_WARNING_OBJECT (element, "error: %s", error->message);
if (v4l2err->dbg_message)
GST_WARNING_OBJECT (element, "error: %s", v4l2err->dbg_message);
gst_element_message_full (GST_ELEMENT (element), GST_MESSAGE_ERROR,
error->domain, error->code, error->message, v4l2err->dbg_message,
v4l2err->file, v4l2err->func, v4l2err->line);
error->message = NULL;
v4l2err->dbg_message = NULL;
gst_v4l2_clear_error (v4l2err);
}

78
gst-v4l2/v4l2-utils.h Normal file
View File

@@ -0,0 +1,78 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __V4L2_UTILS_H__
#define __V4L2_UTILS_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_V4L2_ERROR_INIT { NULL, NULL }
#define GST_V4L2_ERROR(v4l2err,domain,code,msg,dbg) \
{\
if (v4l2err) { \
gchar *_msg = _gst_element_error_printf msg; \
v4l2err->error = g_error_new_literal (GST_##domain##_ERROR, \
GST_##domain##_ERROR_##code, _msg); \
g_free (_msg); \
v4l2err->dbg_message = _gst_element_error_printf dbg; \
v4l2err->file = __FILE__; \
v4l2err->func = GST_FUNCTION; \
v4l2err->line = __LINE__; \
} \
}
typedef struct _GstV4l2Iterator GstV4l2Iterator;
typedef struct _GstV4l2Error GstV4l2Error;
struct _GstV4l2Iterator
{
const gchar *device_path;
const gchar *device_name;
const gchar *sys_path;
};
struct _GstV4l2Error
{
GError *error;
gchar *dbg_message;
const gchar *file;
const gchar *func;
gint line;
};
GstV4l2Iterator * gst_v4l2_iterator_new (void);
gboolean gst_v4l2_iterator_next (GstV4l2Iterator *it);
void gst_v4l2_iterator_free (GstV4l2Iterator *it);
const gchar * gst_v4l2_iterator_get_device_path (GstV4l2Iterator *it);
const gchar * gst_v4l2_iterator_get_device_name (GstV4l2Iterator *it);
const gchar * gst_v4l2_iterator_get_sys_path (GstV4l2Iterator *it);
void gst_v4l2_clear_error (GstV4l2Error *error);
void gst_v4l2_error (gpointer element, GstV4l2Error *error);
G_END_DECLS
#endif /* __V4L2_UTILS_H__ */

1166
gst-v4l2/v4l2_calls.c Normal file
View File

File diff suppressed because it is too large Load Diff

897
nvbuf_utils.h Normal file
View File

@@ -0,0 +1,897 @@
/*
* Copyright (c) 2016-2022, NVIDIA CORPORATION. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA Corporation is strictly prohibited.
*/
/**
* @file
* <b>NVIDIA Multimedia Utilities: Buffering and Transform/Composition/Blending</b>
*
*/
/**
* @defgroup ee_nvbuffering_group NvBufUtils API (Deprecated)
* @ingroup ds_nvbuf_api
* NVIDIA buffering utility library for use by applications.
* The utility also transforms, composits, and blends.
* @{
*/
#ifndef _NVBUF_UTILS_H_
#define _NVBUF_UTILS_H_
#ifdef __cplusplus
extern "C"
{
#endif
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <errno.h>
#include <stdbool.h>
/**
* Defines the maximum number of planes for a video frame.
*/
#define MAX_NUM_PLANES 4
/**
* Defines the maximum number of input video frames that can be used for composition.
*/
#define MAX_COMPOSITE_FRAME 16
/**
* Defines the default values for chroma subsampling.
* The default value matches JPEG/MPEG use cases.
*/
#define NVBUF_CHROMA_SUBSAMPLING_HORIZ_DEFAULT 0
#define NVBUF_CHROMA_SUBSAMPLING_VERT_DEFAULT 1
/**
* Defines the maximum number of sync object parameters.
*/
#define NVBUF_MAX_SYNCOBJ_PARAMS 5
/**
* Use this value to represent an infinite wait interval.
* A value of zero should not be interpreted as infinite,
* it should be interpreted as "time out immediately" and
* simply check whether the event has already happened.
*/
#define NVBUFFER_SYNCPOINT_WAIT_INFINITE 0xFFFFFFFF
/**
* Defines Payload types for NvBuffer.
*/
typedef enum
{
/** buffer payload with hardware memory handle for set of planes. */
NvBufferPayload_SurfArray,
/** buffer payload with hardware memory handle for specific memory size. */
NvBufferPayload_MemHandle,
} NvBufferPayloadType;
/**
* Defines display scan formats for NvBuffer video planes.
*/
typedef enum
{
/** Progessive scan formats. */
NvBufferDisplayScanFormat_Progressive = 0,
/** Interlaced scan formats. */
NvBufferDisplayScanFormat_Interlaced,
} NvBufferDisplayScanFormat;
/**
* Defines Layout formats for NvBuffer video planes.
*/
typedef enum
{
/** Pitch Layout. */
NvBufferLayout_Pitch,
/** BlockLinear Layout. */
NvBufferLayout_BlockLinear,
} NvBufferLayout;
/**
* Defines memory access flags for NvBuffer.
*/
typedef enum
{
/** Memory read. */
NvBufferMem_Read,
/** Memory write. */
NvBufferMem_Write,
/** Memory read & write. */
NvBufferMem_Read_Write,
} NvBufferMemFlags;
/**
* Defines tags that identify the components requesting a memory allocation.
* The tags can be used later to identify the total memory allocated to
* particular types of components.
*/
typedef enum
{
/** tag None. */
NvBufferTag_NONE = 0x0,
/** tag for Camera. */
NvBufferTag_CAMERA = 0x200,
/** tag for Jpeg Encoder/Decoder. */
NvBufferTag_JPEG = 0x1500,
/** tag for VPR Buffers. */
NvBufferTag_PROTECTED = 0x1504,
/** tag for H264/H265 Video Encoder. */
NvBufferTag_VIDEO_ENC = 0x1200,
/** tag for H264/H265/VP9 Video Decoder. */
NvBufferTag_VIDEO_DEC = 0x1400,
/** tag for Video Transform/Composite. */
NvBufferTag_VIDEO_CONVERT = 0xf01,
} NvBufferTag;
/**
* Defines color formats for NvBuffer.
*/
typedef enum
{
/** BT.601 colorspace - YUV420 multi-planar. */
NvBufferColorFormat_YUV420,
/** BT.601 colorspace - YUV420 multi-planar. */
NvBufferColorFormat_YVU420,
/** BT.601 colorspace - YUV422 multi-planar. */
NvBufferColorFormat_YUV422,
/** BT.601 colorspace - YUV420 ER multi-planar. */
NvBufferColorFormat_YUV420_ER,
/** BT.601 colorspace - YVU420 ER multi-planar. */
NvBufferColorFormat_YVU420_ER,
/** BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
NvBufferColorFormat_NV12,
/** BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NvBufferColorFormat_NV12_ER,
/** BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
NvBufferColorFormat_NV21,
/** BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NvBufferColorFormat_NV21_ER,
/** BT.601 colorspace - YUV 4:2:2 planar. */
NvBufferColorFormat_UYVY,
/** BT.601 colorspace - YUV ER 4:2:2 planar. */
NvBufferColorFormat_UYVY_ER,
/** BT.601 colorspace - YUV 4:2:2 planar. */
NvBufferColorFormat_VYUY,
/** BT.601 colorspace - YUV ER 4:2:2 planar. */
NvBufferColorFormat_VYUY_ER,
/** BT.601 colorspace - YUV 4:2:2 planar. */
NvBufferColorFormat_YUYV,
/** BT.601 colorspace - YUV ER 4:2:2 planar. */
NvBufferColorFormat_YUYV_ER,
/** BT.601 colorspace - YUV 4:2:2 planar. */
NvBufferColorFormat_YVYU,
/** BT.601 colorspace - YUV ER 4:2:2 planar. */
NvBufferColorFormat_YVYU_ER,
/** LegacyRGBA colorspace - BGRA-8-8-8-8 planar. */
NvBufferColorFormat_ABGR32,
/** LegacyRGBA colorspace - XRGB-8-8-8-8 planar. */
NvBufferColorFormat_XRGB32,
/** LegacyRGBA colorspace - ARGB-8-8-8-8 planar. */
NvBufferColorFormat_ARGB32,
/** BT.601 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NvBufferColorFormat_NV12_10LE,
/** BT.709 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NvBufferColorFormat_NV12_10LE_709,
/** BT.709_ER colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NvBufferColorFormat_NV12_10LE_709_ER,
/** BT.2020 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NvBufferColorFormat_NV12_10LE_2020,
/** BT.601 colorspace - Y/CrCb 4:2:0 10-bit multi-planar. */
NvBufferColorFormat_NV21_10LE,
/** BT.601 colorspace - Y/CbCr 4:2:0 12-bit multi-planar. */
NvBufferColorFormat_NV12_12LE,
/** BT.2020 colorspace - Y/CbCr 4:2:0 12-bit multi-planar. */
NvBufferColorFormat_NV12_12LE_2020,
/** BT.601 colorspace - Y/CrCb 4:2:0 12-bit multi-planar. */
NvBufferColorFormat_NV21_12LE,
/** BT.709 colorspace - YUV420 multi-planar. */
NvBufferColorFormat_YUV420_709,
/** BT.709 colorspace - YUV420 ER multi-planar. */
NvBufferColorFormat_YUV420_709_ER,
/** BT.709 colorspace - Y/CbCr 4:2:0 multi-planar. */
NvBufferColorFormat_NV12_709,
/** BT.709 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NvBufferColorFormat_NV12_709_ER,
/** BT.2020 colorspace - YUV420 multi-planar. */
NvBufferColorFormat_YUV420_2020,
/** BT.2020 colorspace - Y/CbCr 4:2:0 multi-planar. */
NvBufferColorFormat_NV12_2020,
/** BT.601 colorspace - YUV444 multi-planar. */
NvBufferColorFormat_YUV444,
/** Optical flow */
NvBufferColorFormat_SignedR16G16,
/** Optical flow SAD calculation Buffer format */
NvBufferColorFormat_A32,
/** 8-bit grayscale. */
NvBufferColorFormat_GRAY8,
/** BT.601 colorspace - Y/CbCr 4:2:2 multi-planar. */
NvBufferColorFormat_NV16,
/** BT.601 colorspace - Y/CbCr 4:2:2 10-bit semi-planar. */
NvBufferColorFormat_NV16_10LE,
/** BT.601 colorspace - Y/CbCr 4:4:4 multi-planar. */
NvBufferColorFormat_NV24,
/** BT.601 colorspace - Y/CrCb 4:4:4 10-bit multi-planar. */
NvBufferColorFormat_NV24_10LE,
/** BT.601_ER colorspace - Y/CbCr 4:2:2 multi-planar. */
NvBufferColorFormat_NV16_ER,
/** BT.601_ER colorspace - Y/CbCr 4:4:4 multi-planar. */
NvBufferColorFormat_NV24_ER,
/** BT.709 colorspace - Y/CbCr 4:2:2 multi-planar. */
NvBufferColorFormat_NV16_709,
/** BT.709 colorspace - Y/CbCr 4:4:4 multi-planar. */
NvBufferColorFormat_NV24_709,
/** BT.709_ER colorspace - Y/CbCr 4:2:2 multi-planar. */
NvBufferColorFormat_NV16_709_ER,
/** BT.709_ER colorspace - Y/CbCr 4:4:4 multi-planar. */
NvBufferColorFormat_NV24_709_ER,
/** BT.709 colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NvBufferColorFormat_NV24_10LE_709,
/** BT.709 ER colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NvBufferColorFormat_NV24_10LE_709_ER,
/** BT.2020 colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NvBufferColorFormat_NV24_10LE_2020,
/** BT.2020 colorspace - Y/CbCr 12 bit 4:4:4 multi-planar. */
NvBufferColorFormat_NV24_12LE_2020,
/** Non-linear RGB BT.709 colorspace - RGBA-10-10-10-2 planar. */
NvBufferColorFormat_RGBA_10_10_10_2_709,
/** Non-linear RGB BT.2020 colorspace - RGBA-10-10-10-2 planar. */
NvBufferColorFormat_RGBA_10_10_10_2_2020,
/** Non-linear RGB BT.709 colorspace - BGRA-10-10-10-2 planar. */
NvBufferColorFormat_BGRA_10_10_10_2_709,
/** Non-linear RGB BT.2020 colorspace - BGRA-10-10-10-2 planar. */
NvBufferColorFormat_BGRA_10_10_10_2_2020,
/** Invalid color format. */
NvBufferColorFormat_Invalid,
} NvBufferColorFormat;
/**
* Defines video flip methods.
*/
typedef enum
{
/** Video flip none. */
NvBufferTransform_None,
/** Video flip rotate 90 degree counter-clockwise. */
NvBufferTransform_Rotate90,
/** Video flip rotate 180 degree. */
NvBufferTransform_Rotate180,
/** Video flip rotate 270 degree counter-clockwise. */
NvBufferTransform_Rotate270,
/** Video flip with respect to X-axis. */
NvBufferTransform_FlipX,
/** Video flip with respect to Y-axis. */
NvBufferTransform_FlipY,
/** Video flip transpose. */
NvBufferTransform_Transpose,
/** Video flip inverse transpode. */
NvBufferTransform_InvTranspose,
} NvBufferTransform_Flip;
/**
* Defines transform video filter types.
*/
typedef enum
{
/** transform filter nearest. */
NvBufferTransform_Filter_Nearest,
/** transform filter bilinear. */
NvBufferTransform_Filter_Bilinear,
/** transform filter 5 tap. */
NvBufferTransform_Filter_5_Tap,
/** transform filter 10 tap. */
NvBufferTransform_Filter_10_Tap,
/** transform filter smart. */
NvBufferTransform_Filter_Smart,
/** transform filter nicest. */
NvBufferTransform_Filter_Nicest,
} NvBufferTransform_Filter;
/**
* Defines flags to indicate for valid transform.
*/
typedef enum {
/** transform flag to crop source rectangle. */
NVBUFFER_TRANSFORM_CROP_SRC = 1,
/** transform flag to crop destination rectangle. */
NVBUFFER_TRANSFORM_CROP_DST = 1 << 1,
/** transform flag to set filter type. */
NVBUFFER_TRANSFORM_FILTER = 1 << 2,
/** transform flag to set flip method. */
NVBUFFER_TRANSFORM_FLIP = 1 << 3,
} NvBufferTransform_Flag;
/**
* Defines flags that specify valid composition/blending operations.
*/
typedef enum {
/** flag to set for composition. */
NVBUFFER_COMPOSITE = 1,
/** flag to set for blending. */
NVBUFFER_BLEND = 1 << 1,
/** composition flag to set filter type. */
NVBUFFER_COMPOSITE_FILTER = 1 << 2,
} NvBufferComposite_Flag;
/**
* Holds parameters for buffer sync point object.
* sync object params is simply a data structure containing [sync point ID,value] pair.
* This can be used by clients to describe an event that might want to wait for.
*/
typedef struct _NvBufferSyncObjParams
{
uint32_t syncpointID;
uint32_t value;
}NvBufferSyncObjParams;
/**
* buffer sync point object.
*/
typedef struct _NvBufferSyncObjRec
{
NvBufferSyncObjParams insyncobj[NVBUF_MAX_SYNCOBJ_PARAMS];
uint32_t num_insyncobj;
NvBufferSyncObjParams outsyncobj;
uint32_t use_outsyncobj;
}NvBufferSyncObj;
/**
* Holds composition background r,g,b colors.
*/
typedef struct
{
/** background color value for r. */
float r;
/** background color value for g. */
float g;
/** background color value for b. */
float b;
}NvBufferCompositeBackground;
/**
* Holds coordinates for a rectangle.
*/
typedef struct
{
/** rectangle top. */
uint32_t top;
/** rectangle left. */
uint32_t left;
/** rectangle width. */
uint32_t width;
/** rectangle height. */
uint32_t height;
}NvBufferRect;
/**
* Holds an opaque NvBuffer session type required for parallel buffer
* tranformations and compositions. Operations using a single session are
* scheduled sequentially, after the previous operation finishes. Operations for
* multiple sessions are scheduled in parallel.
*/
typedef struct _NvBufferSession * NvBufferSession;
/**
* Holds Chroma Subsampling parameters.
*/
typedef struct _NvBufferChromaSubSamplingParams
{
/** location settings */
uint8_t chromaLocHoriz;
uint8_t chromaLocVert;
}NvBufferChromaSubsamplingParams;
#define NVBUF_CHROMA_SUBSAMPLING_PARAMS_DEFAULT \
{ \
NVBUF_CHROMA_SUBSAMPLING_HORIZ_DEFAULT, \
NVBUF_CHROMA_SUBSAMPLING_VERT_DEFAULT \
}
/**
* Holds the input parameters for hardware buffer creation.
*/
typedef struct _NvBufferCreateParams
{
/** width of the buffer. */
int32_t width;
/** height of the buffer. */
int32_t height;
/** payload type of the buffer. */
NvBufferPayloadType payloadType;
/** size of the memory.(Applicale for NvBufferPayload_MemHandle) */
int32_t memsize;
/** layout of the buffer. */
NvBufferLayout layout;
/** colorformat of the buffer. */
NvBufferColorFormat colorFormat;
/** tag to associate with the buffer. */
NvBufferTag nvbuf_tag;
}NvBufferCreateParams;
/**
* Holds parameters for a hardware buffer.
*/
typedef struct _NvBufferParams
{
/** Holds the DMABUF FD of the hardware buffer. */
uint32_t dmabuf_fd;
/** pointer to hardware buffer memory. */
void *nv_buffer;
/** payload type of the buffer. */
NvBufferPayloadType payloadType;
/** size of the memory.(Applicale for NvBufferPayload_MemHandle) */
int32_t memsize;
/** size of hardware buffer. */
uint32_t nv_buffer_size;
/** video format type of hardware buffer. */
NvBufferColorFormat pixel_format;
/** number of planes of hardware buffer. */
uint32_t num_planes;
/** width of each planes of hardware buffer. */
uint32_t width[MAX_NUM_PLANES];
/** height of each planes of hardware buffer. */
uint32_t height[MAX_NUM_PLANES];
/** pitch of each planes of hardware buffer. */
uint32_t pitch[MAX_NUM_PLANES];
/** memory offset values of each video planes of hardware buffer. */
uint32_t offset[MAX_NUM_PLANES];
/** size of each vodeo planes of hardware buffer. */
uint32_t psize[MAX_NUM_PLANES];
/** layout type of each planes of hardware buffer. */
uint32_t layout[MAX_NUM_PLANES];
}NvBufferParams;
/**
* Holds extended parameters for a hardware buffer.
*/
typedef struct _NvBufferParamsEx
{
/** nvbuffer basic parameters. */
NvBufferParams params;
/** offset in bytes from the start of the buffer to the first valid byte.
(Applicale for NvBufferPayload_MemHandle) */
int32_t startofvaliddata;
/** size of the valid data from the first to the last valid byte.
(Applicale for NvBufferPayload_MemHandle) */
int32_t sizeofvaliddatainbytes;
/** display scan format - progressive/interlaced. */
NvBufferDisplayScanFormat scanformat[MAX_NUM_PLANES];
/** offset of the second field for interlaced buffer. */
uint32_t secondfieldoffset[MAX_NUM_PLANES];
/** block height of the planes for blockLinear layout hardware buffer. */
uint32_t blockheightlog2[MAX_NUM_PLANES];
/** physical address of allocated planes. */
uint32_t physicaladdress[MAX_NUM_PLANES];
/** flags associated with planes */
uint64_t flags[MAX_NUM_PLANES];
/** metadata associated with the hardware buffer. */
void *payloadmetaInfo;
/** chroma subsampling parameters */
NvBufferChromaSubsamplingParams chromaSubsampling;
/** get buffer vpr information. */
bool is_protected;
/** buffer sync point object parameters */
NvBufferSyncObj syncobj;
/** reserved field. */
void *reserved;
}NvBufferParamsEx;
/**
* Holds parameters related to compositing/blending.
*/
typedef struct _NvBufferCompositeParams
{
/** flag to indicate which of the composition/blending parameters are valid. */
uint32_t composite_flag;
/** number of the input buffers to be composited. */
uint32_t input_buf_count;
/** filters to use for composition. */
NvBufferTransform_Filter composite_filter[MAX_COMPOSITE_FRAME];
/** alpha values of input buffers for the blending. */
float dst_comp_rect_alpha[MAX_COMPOSITE_FRAME];
/** source rectangle coordinates of input buffers for composition. */
NvBufferRect src_comp_rect[MAX_COMPOSITE_FRAME];
/** destination rectangle coordinates of input buffers for composition. */
NvBufferRect dst_comp_rect[MAX_COMPOSITE_FRAME];
/** background color values for composition. */
NvBufferCompositeBackground composite_bgcolor;
/** NvBufferSession to be used for composition. If NULL, the default session
* is used. */
NvBufferSession session;
}NvBufferCompositeParams;
/**
* Holds parameters for buffer transform functions.
*/
typedef struct _NvBufferTransformParams
{
/** flag to indicate which of the transform parameters are valid. */
uint32_t transform_flag;
/** flip method. */
NvBufferTransform_Flip transform_flip;
/** transform filter. */
NvBufferTransform_Filter transform_filter;
/** source rectangle coordinates for crop opeartion. */
NvBufferRect src_rect;
/** destination rectangle coordinates for crop opeartion. */
NvBufferRect dst_rect;
/** NvBufferSession to be used for transform. If NULL, the default session
* is used. */
NvBufferSession session;
}NvBufferTransformParams;
/**
* This method can be used to wait on sync point ID.
*
* @param[in] syncobj_params sync point object parameters.
* @param[in] timeout sync point wait timeout value.
*
* @returns 0 for success, -1 for failure
*/
int NvBufferSyncObjWait (NvBufferSyncObjParams *syncobj_params, unsigned int timeout);
/**
* This method can be used to get hardware Buffer struct size.
*
* @returns hardware Buffer struct size.
*/
int NvBufferGetSize (void);
/**
* Creates an instance of EGLImage from a DMABUF FD.
*
* @param[in] display An EGLDisplay object used during the creation
* of the EGLImage. If NULL, nvbuf_utils() uses
* its own instance of EGLDisplay.
* @param[in] dmabuf_fd DMABUF FD of the buffer from which the EGLImage
* is to be created.
*
* @returns `EGLImageKHR` for success, `NULL` for failure
*/
EGLImageKHR NvEGLImageFromFd (EGLDisplay display, int dmabuf_fd);
/**
* Destroys an EGLImage object.
*
* @param[in] display An EGLDisplay object used to destroy the EGLImage.
* If NULL, nvbuf_utils() uses its own instance of
* EGLDisplay.
* @param[in] eglImage The EGLImageKHR object to be destroyed.
*
* @returns 0 for success, -1 for failure
*/
int NvDestroyEGLImage (EGLDisplay display, EGLImageKHR eglImage);
/**
* Allocates a hardware buffer (deprecated).
*
* @deprecated Use NvBufferCreateEx() instead.
* @param[out] dmabuf_fd Returns the DMABUF FD of the hardware buffer.
* @param[in] width Buffer width, in bytes.
* @param[in] height Buffer height, in bytes.
* @param[in] layout Layout of the buffer.
* @param[in] colorFormat Color format of the buffer.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufferCreate (int *dmabuf_fd, int width, int height,
NvBufferLayout layout, NvBufferColorFormat colorFormat);
/**
* Allocates a hardware buffer.
*
* @param[out] dmabuf_fd Returns the DMABUF FD of the hardware buffer.
* @param[in] input_params Input parameters for hardware buffer creation.
*
* @returns 0 for success, -1 for failure
*/
int NvBufferCreateEx (int *dmabuf_fd, NvBufferCreateParams *input_params);
/**
* Allocates a hardware buffer for interlace scan format.
*
* @param[out] dmabuf_fd Returns the DMABUF FD of the hardware buffer.
* @param[in] input_params Input parameters for hardware buffer creation.
*
* @returns 0 for success, -1 for failure
*/
int NvBufferCreateInterlace (int *dmabuf_fd, NvBufferCreateParams *input_params);
/**
* Allocates a hardware buffer with a given chroma subsampling location.
*
* @param[in] dmabuf_fd DMABUF FD of the buffer.
* @param[in] input_params Input parameters for hardware buffer creation.
* @param[in] chromaSubsampling Chroma location parameters.
*
* @returns 0 for success, -1 for failure
*/
int NvBufferCreateWithChromaLoc (int *dmabuf_fd, NvBufferCreateParams *input_params, NvBufferChromaSubsamplingParams *chromaSubsampling);
/**
* Gets buffer parameters.
* @param[in] dmabuf_fd `DMABUF FD` of buffer.
* @param[out] params A pointer to the structure to fill with parameters.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferGetParams (int dmabuf_fd, NvBufferParams *params);
/**
* Gets buffer extended parameters.
* @param[in] dmabuf_fd `DMABUF FD` of buffer.
* @param[out] exparams A pointer to the structure to fill with extended parameters.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferGetParamsEx (int dmabuf_fd, NvBufferParamsEx *exparams);
/**
* Destroys a hardware buffer.
* @param[in] dmabuf_fd Specifies the `dmabuf_fd` `hw_buffer` to destroy.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferDestroy (int dmabuf_fd);
/**
* Extracts the `dmabuf_fd` from the hardware buffer.
* @param[in] nvbuf Specifies the `hw_buffer`.
* @param[out] dmabuf_fd Returns DMABUF FD of `hw_buffer`.
*
* @returns 0 for success, -1 for failure.
*/
int ExtractFdFromNvBuffer (void *nvbuf, int *dmabuf_fd);
/**
* Releases the `dmabuf_fd` buffer.
* @see ExtractfdFromNvBuffer()
* @param[in] dmabuf_fd Specifies the `dmabuf_fd` to release.
*
* @returns 0 for success, -1 for failure.
*/
int NvReleaseFd (int dmabuf_fd);
/**
* Syncs the hardware memory cache for the CPU.
*
* \sa NvBufferMemMap for the purpose of the function
*
* @param[in] dmabuf_fd DMABUF FD of buffer.
* @param[in] plane video frame plane.
* @param[in] pVirtAddr Virtual Address pointer of the memory-mapped plane.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferMemSyncForCpu (int dmabuf_fd, unsigned int plane, void **pVirtAddr);
/**
* Syncs the hardware memory cache for the CPU, API to be used for another process.
*
* \sa NvBufferMemMapEx for the purpose of the function
*
* @param[in] dmabuf_fd DMABUF FD of buffer.
* @param[in] exparams extended parameters for a hardware buffer.
* @param[in] plane video frame plane.
* @param[in] pVirtAddr Virtual Address pointer of the memory-mapped plane.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferMemSyncForCpuEx (int dmabuf_fd, NvBufferParamsEx *exparams, unsigned int plane, void **pVirtAddr);
/**
* Syncs the hardware memory cache for the device.
*
* \sa NvBufferMemMap for the purpose of the function
*
* @param[in] dmabuf_fd DMABUF FD of buffer.
* @param[in] plane video frame plane.
* @param[in] pVirtAddr Virtual Address pointer of the memory-mapped plane.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferMemSyncForDevice (int dmabuf_fd, unsigned int plane, void **pVirtAddr);
/**
* Syncs the hardware memory cache for the device, API to be used for another process.
*
* \sa NvBufferMemMapEx for the purpose of the function
*
* @param[in] dmabuf_fd DMABUF FD of buffer.
* @param[in] exparams extended parameters for a hardware buffer.
* @param[in] plane video frame plane.
* @param[in] pVirtAddr Virtual Address pointer of the memory-mapped plane.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferMemSyncForDeviceEx (int dmabuf_fd, NvBufferParamsEx *exparams, unsigned int plane, void **pVirtAddr);
/**
* Gets the memory-mapped virtual address of the plane.
*
* The client must call NvBufferMemSyncForCpu() with the virtual address returned
* by this function before accessing the mapped memory in CPU.
*
* After memory mapping is complete, mapped memory modification
* must be coordinated between the CPU and hardware device as
* follows:
* - CPU: If the CPU modifies any mapped memory, the client must call
* NvBufferMemSyncForDevice() before any hardware device accesses the memory.
* - Hardware device: If the mapped memory is modified by any hardware device,
* the client must call NvBufferMemSyncForCpu() before CPU accesses the memory.
*
* @param[in] dmabuf_fd DMABUF FD of buffer.
* @param[in] plane video frame plane.(Applies to @ref NvBufferPayload_SurfArray.)
* @param[in] memflag NvBuffer memory flag.
* @param[out] pVirtAddr Virtual Address pointer of the memory-mapped plane.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferMemMap (int dmabuf_fd, unsigned int plane, NvBufferMemFlags memflag, void **pVirtAddr);
/**
* Gets the memory-mapped virtual address of the plane, API to be used for another process.
*
* The client must call NvBufferMemSyncForCpuEx() with the virtual address returned
* by this function before accessing the mapped memory in CPU in another process.
*
* After memory mapping is complete, mapped memory modification
* must be coordinated between the CPU and hardware device as
* follows:
* - CPU: If the CPU modifies any mapped memory, the client must call
* NvBufferMemSyncForDeviceEx() before any hardware device accesses the memory.
* - Hardware device: If the mapped memory is modified by any hardware device,
* the client must call NvBufferMemSyncForCpuEx() before CPU accesses the memory.
*
* @param[in] dmabuf_fd DMABUF FD of buffer.
* @param[in] exparams extended parameters for a hardware buffer.
* @param[in] plane video frame plane.(Applies to @ref NvBufferPayload_SurfArray.)
* @param[in] memflag NvBuffer memory flag.
* @param[out] pVirtAddr Virtual Address pointer of the memory-mapped plane.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferMemMapEx (int dmabuf_fd, NvBufferParamsEx *exparams, unsigned int plane, NvBufferMemFlags memflag, void **pVirtAddr);
/**
* Unmaps the mapped virtual address of the plane.
*
* If the following conditions are both true, the client must call
* NvBufferMemSyncForDevice() before unmapping the memory:
* - Mapped memory was modified by the CPU.
* - Mapped memory will be accessed by a hardware device.
*
* @param[in] dmabuf_fd DMABUF FD of the buffer.
* @param[in] plane Video frame plane. Applies to
* @ref NvBufferPayload_SurfArray.
* @param[in] pVirtAddr Virtual address pointer to the memory-mapped plane.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferMemUnMap (int dmabuf_fd, unsigned int plane, void **pVirtAddr);
/**
* Unmaps the mapped virtual address of the plane, API to be used for another process.
*
* If the following conditions are both true, the client must call
* NvBufferMemSyncForDeviceEx() before unmapping the memory in another process:
* - Mapped memory was modified by the CPU.
* - Mapped memory will be accessed by a hardware device.
*
* @param[in] dmabuf_fd DMABUF FD of the buffer.
* @param[in] exparams extended parameters for a hardware buffer.
* @param[in] plane Video frame plane. Applies to
* @ref NvBufferPayload_SurfArray.
* @param[in] pVirtAddr Virtual address pointer to the memory-mapped plane.
*
* @returns 0 for success, -1 for failure.
*/
int NvBufferMemUnMapEx (int dmabuf_fd, NvBufferParamsEx *exparams, unsigned int plane, void **pVirtAddr);
/**
* Copies the NvBuffer plane contents to a raw buffer plane.
* @param[in] dmabuf_fd DMABUF FD of NvBuffer.
* @param[in] plane video frame plane.
* @param[in] out_width aligned width of the raw data plane.
* @param[in] out_height aligned height of the raw data plane.
* @param[in] ptr pointer to the output raw plane data.
*
* @returns 0 for success, -1 for failure.
*/
int NvBuffer2Raw (int dmabuf_fd, unsigned int plane, unsigned int out_width, unsigned int out_height, unsigned char *ptr);
/**
* Copies raw buffer plane contents to an NvBuffer plane.
* @param[in] ptr pointer to the input raw plane data.
* @param[in] plane video frame plane.
* @param[in] in_width aligned width of the raw data plane.
* @param[in] in_height aligned height of the raw data plane.
* @param[in] dmabuf_fd DMABUF FD of NvBuffer.
*
* @returns 0 for success, -1 for failure.
*/
int Raw2NvBuffer (unsigned char *ptr, unsigned int plane, unsigned int in_width, unsigned int in_height, int dmabuf_fd);
/**
* Creates a new NvBufferSession for parallel scheduling of
* buffer transformations and compositions.
*
* @returns A session pointer, NULL for failure.
*/
NvBufferSession NvBufferSessionCreate(void);
/**
* Destroys an existing \ref NvBufferSession.
* @param[in] session An existing NvBufferSession.
*/
void NvBufferSessionDestroy(NvBufferSession session);
/**
* Transforms one DMA buffer to another DMA buffer.
* This function can support transforms for copying, scaling, fliping, rotating, and cropping.
* @param[in] src_dmabuf_fd DMABUF FD of source buffer
* @param[in] dst_dmabuf_fd DMABUF FD of destination buffer
* @param[in] transform_params transform parameters
*
* @return 0 for sucess, -1 for failure.
*/
int NvBufferTransform (int src_dmabuf_fd, int dst_dmabuf_fd, NvBufferTransformParams *transform_params);
/**
* Transforms one DMA buffer to another DMA buffer, API to be used for another process.
* This function can support transforms for copying, scaling, fliping, rotating, and cropping.
* @param[in] src_dmabuf_fd DMABUF FD of source buffer
* @param[in] input_params extended input parameters for a hardware buffer.
* @param[in] dst_dmabuf_fd DMABUF FD of destination buffer
* @param[in] output_params extended output parameters for a hardware buffer.
* @param[in] transform_params transform parameters
*
* @return 0 for sucess, -1 for failure.
*/
int NvBufferTransformEx (int src_dmabuf_fd, NvBufferParamsEx *input_params, int dst_dmabuf_fd, NvBufferParamsEx *output_params, NvBufferTransformParams *transform_params);
/**
* Transforms one DMA buffer to another DMA buffer asyncroniously (non-blocking).
* This function can support transforms for copying, scaling, fliping, rotating, and cropping.
* @param[in] src_dmabuf_fd DMABUF FD of source buffer
* @param[in] dst_dmabuf_fd DMABUF FD of destination buffer
* @param[in] transform_params transform parameters
* @param[in] syncobj nvbuffer sync point object
*
* @return 0 for sucess, -1 for failure.
*/
int NvBufferTransformAsync (int src_dmabuf_fd, int dst_dmabuf_fd, NvBufferTransformParams *transform_params, NvBufferSyncObj *syncobj);
/**
* \brief Composites multiple input DMA buffers to one output DMA buffer.
*
* This function can composite multiple input frames to one output.
*
* @param[in] src_dmabuf_fds An array of DMABUF FDs of source buffers.
* These buffers are composited together. Output
* is copied to the output buffer referenced by
* @a dst_dmabuf_fd.
* @param[in] dst_dmabuf_fd DMABUF FD of the compositing destination buffer.
* @param[in] composite_params Compositing parameters.
*/
int NvBufferComposite (int *src_dmabuf_fds, int dst_dmabuf_fd, NvBufferCompositeParams *composite_params);
#ifdef __cplusplus
}
#endif
/** @} */
#endif

740
nvbufsurface.h Normal file
View File

@@ -0,0 +1,740 @@
/*
* Copyright (c) 2019-2022, NVIDIA CORPORATION. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA Corporation is strictly prohibited.
*/
/**
* @file nvbufsurface.h
* <b>NvBufSurface Interface </b>
*
* This file specifies the NvBufSurface management API.
*
* The NvBufSurface API provides methods to allocate / deallocate, map / unmap
* and copy batched buffers.
*/
/**
* @defgroup ds_nvbuf_api Buffer Management API module
*
* This section describes types and functions of NvBufSurface application
* programming interface.
*
*/
#ifndef NVBUFSURFACE_H_
#define NVBUFSURFACE_H_
#include <stdint.h>
#include <stdbool.h>
#ifdef __cplusplus
extern "C"
{
#endif
/** @defgroup ds_aaa NvBufSurface Types and Functions
* Defines types and functions of \ref NvBufSurface application
* programming interface.
* @ingroup ds_nvbuf_api
* @{ */
/** Defines the default padding length for reserved fields of structures. */
#define STRUCTURE_PADDING 4
/** Defines the maximum number of planes. */
#define NVBUF_MAX_PLANES 4
/**
* Defines the default values for chroma subsampling.
* The default value matches JPEG/MPEG use cases.
*/
#define NVBUFSURFACE_CHROMA_SUBSAMPLING_HORIZ_DEFAULT 0
#define NVBUFSURFACE_CHROMA_SUBSAMPLING_VERT_DEFAULT 1
#define NVBUFSURFACE_CHROMA_SUBSAMPLING_PARAMS_DEFAULT \
{ \
NVBUFSURFACE_CHROMA_SUBSAMPLING_HORIZ_DEFAULT, \
NVBUFSURFACE_CHROMA_SUBSAMPLING_VERT_DEFAULT \
}
/**
* Defines mapping types of NvBufSurface.
*/
typedef enum
{
NVBUF_MAP_READ, /**< Specifies \ref NvBufSurface mapping type "read." */
NVBUF_MAP_WRITE, /**< Specifies \ref NvBufSurface mapping type
"write." */
NVBUF_MAP_READ_WRITE, /**< Specifies \ref NvBufSurface mapping type
"read/write." */
} NvBufSurfaceMemMapFlags;
/**
* Defines tags that identify the components requesting a memory allocation.
* The tags can be used later to identify the total memory allocated to
* particular types of components.
* TODO: Check if DeepStream require more tags to be defined.
*/
typedef enum
{
/** tag None. */
NvBufSurfaceTag_NONE = 0x0,
/** tag for Camera. */
NvBufSurfaceTag_CAMERA = 0x200,
/** tag for Jpeg Encoder/Decoder. */
NvBufSurfaceTag_JPEG = 0x1500,
/** tag for VPR Buffers. */
NvBufSurfaceTag_PROTECTED = 0x1504,
/** tag for H264/H265 Video Encoder. */
NvBufSurfaceTag_VIDEO_ENC = 0x1200,
/** tag for H264/H265/VP9 Video Decoder. */
NvBufSurfaceTag_VIDEO_DEC = 0x1400,
/** tag for Video Transform/Composite/Blend. */
NvBufSurfaceTag_VIDEO_CONVERT = 0xf01,
} NvBufSurfaceTag;
/**
* Defines color formats for NvBufSurface.
*/
typedef enum
{
/** Specifies an invalid color format. */
NVBUF_COLOR_FORMAT_INVALID,
/** Specifies 8 bit GRAY scale - single plane */
NVBUF_COLOR_FORMAT_GRAY8,
/** Specifies BT.601 colorspace - YUV420 multi-planar. */
NVBUF_COLOR_FORMAT_YUV420,
/** Specifies BT.601 colorspace - YUV420 multi-planar. */
NVBUF_COLOR_FORMAT_YVU420,
/** Specifies BT.601 colorspace - YUV420 ER multi-planar. */
NVBUF_COLOR_FORMAT_YUV420_ER,
/** Specifies BT.601 colorspace - YVU420 ER multi-planar. */
NVBUF_COLOR_FORMAT_YVU420_ER,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12_ER,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV21,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV21_ER,
/** Specifies BT.601 colorspace - YUV 4:2:2 planar. */
NVBUF_COLOR_FORMAT_UYVY,
/** Specifies BT.601 colorspace - YUV ER 4:2:2 planar. */
NVBUF_COLOR_FORMAT_UYVY_ER,
/** Specifies BT.601 colorspace - YUV 4:2:2 planar. */
NVBUF_COLOR_FORMAT_VYUY,
/** Specifies BT.601 colorspace - YUV ER 4:2:2 planar. */
NVBUF_COLOR_FORMAT_VYUY_ER,
/** Specifies BT.601 colorspace - YUV 4:2:2 planar. */
NVBUF_COLOR_FORMAT_YUYV,
/** Specifies BT.601 colorspace - YUV ER 4:2:2 planar. */
NVBUF_COLOR_FORMAT_YUYV_ER,
/** Specifies BT.601 colorspace - YUV 4:2:2 planar. */
NVBUF_COLOR_FORMAT_YVYU,
/** Specifies BT.601 colorspace - YUV ER 4:2:2 planar. */
NVBUF_COLOR_FORMAT_YVYU_ER,
/** Specifies BT.601 colorspace - YUV444 multi-planar. */
NVBUF_COLOR_FORMAT_YUV444,
/** Specifies RGBA-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_RGBA,
/** Specifies BGRA-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_BGRA,
/** Specifies ARGB-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_ARGB,
/** Specifies ABGR-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_ABGR,
/** Specifies RGBx-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_RGBx,
/** Specifies BGRx-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_BGRx,
/** Specifies xRGB-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_xRGB,
/** Specifies xBGR-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_xBGR,
/** Specifies RGB-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_RGB,
/** Specifies BGR-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_BGR,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_12LE,
/** Specifies BT.709 colorspace - YUV420 multi-planar. */
NVBUF_COLOR_FORMAT_YUV420_709,
/** Specifies BT.709 colorspace - YUV420 ER multi-planar. */
NVBUF_COLOR_FORMAT_YUV420_709_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12_709_ER,
/** Specifies BT.2020 colorspace - YUV420 multi-planar. */
NVBUF_COLOR_FORMAT_YUV420_2020,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12_2020,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE_2020,
/** Specifies color format for packed 2 signed shorts */
NVBUF_COLOR_FORMAT_SIGNED_R16G16,
/** Specifies RGB- unsigned 8 bit multiplanar plane. */
NVBUF_COLOR_FORMAT_R8_G8_B8,
/** Specifies BGR- unsigned 8 bit multiplanar plane. */
NVBUF_COLOR_FORMAT_B8_G8_R8,
/** Specifies RGB-32bit Floating point multiplanar plane. */
NVBUF_COLOR_FORMAT_R32F_G32F_B32F,
/** Specifies BGR-32bit Floating point multiplanar plane. */
NVBUF_COLOR_FORMAT_B32F_G32F_R32F,
/** Specifies BT.601 colorspace - YUV422 multi-planar. */
NVBUF_COLOR_FORMAT_YUV422,
/** Specifies BT.601 colorspace - Y/CrCb 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV21_10LE,
/** Specifies BT.601 colorspace - Y/CrCb 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV21_12LE,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_12LE_2020,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:2 10-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_10LE,
/** Specifies BT.601 colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24,
/** Specifies BT.601 colorspace - Y/CrCb 4:4:4 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV24_10LE,
/** Specifies BT.601_ER colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16_ER,
/** Specifies BT.601_ER colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16_709,
/** Specifies BT.709 colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_709,
/** Specifies BT.709_ER colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16_709_ER,
/** Specifies BT.709_ER colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_709_ER,
/** Specifies BT.709 colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_10LE_709,
/** Specifies BT.709 ER colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_10LE_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_10LE_2020,
/** Specifies BT.2020 colorspace - Y/CbCr 12 bit 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_12LE_2020,
/** Specifies Non-linear RGB BT.709 colorspace - RGBA-10-10-10-2 planar. */
NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_709,
/** Specifies Non-linear RGB BT.2020 colorspace - RGBA-10-10-10-2 planar. */
NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_2020,
/** Specifies Non-linear RGB BT.709 colorspace - BGRA-10-10-10-2 planar. */
NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_709,
/** Specifies Non-linear RGB BT.2020 colorspace - BGRA-10-10-10-2 planar. */
NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_2020,
/** Specifies Optical flow SAD calculation Buffer format */
NVBUF_COLOR_FORMAT_A32,
/** Specifies BT.601 colorspace - 10 bit YUV 4:2:2 interleaved. */
NVBUF_COLOR_FORMAT_UYVP,
/** Specifies BT.601 colorspace - 10 bit YUV ER 4:2:2 interleaved. */
NVBUF_COLOR_FORMAT_UYVP_ER,
NVBUF_COLOR_FORMAT_LAST
} NvBufSurfaceColorFormat;
/**
* Specifies layout formats for \ref NvBufSurface video planes.
*/
typedef enum
{
/** Specifies pitch layout. */
NVBUF_LAYOUT_PITCH,
/** Specifies block linear layout. */
NVBUF_LAYOUT_BLOCK_LINEAR,
} NvBufSurfaceLayout;
/**
* Specifies memory types for \ref NvBufSurface.
*/
typedef enum
{
/** Specifies the default memory type, i.e. \ref NVBUF_MEM_CUDA_DEVICE
for dGPU, \ref NVBUF_MEM_SURFACE_ARRAY for Jetson. Use \ref NVBUF_MEM_DEFAULT
to allocate whichever type of memory is appropriate for the platform. */
NVBUF_MEM_DEFAULT,
/** Specifies CUDA Host memory type. */
NVBUF_MEM_CUDA_PINNED,
/** Specifies CUDA Device memory type. */
NVBUF_MEM_CUDA_DEVICE,
/** Specifies CUDA Unified memory type. */
NVBUF_MEM_CUDA_UNIFIED,
/** Specifies NVRM Surface Array type. Valid only for Jetson. */
NVBUF_MEM_SURFACE_ARRAY,
/** Specifies NVRM Handle type. Valid only for Jetson. */
NVBUF_MEM_HANDLE,
/** Specifies memory allocated by malloc(). */
NVBUF_MEM_SYSTEM,
} NvBufSurfaceMemType;
/**
* Defines display scan formats for NvBufSurface video planes.
*/
typedef enum
{
/** Progessive scan formats. */
NVBUF_DISPLAYSCANFORMAT_PROGRESSIVE,
/** Interlaced scan formats. */
NVBUF_DISPLAYSCANFORMAT_INTERLACED,
} NvBufSurfaceDisplayScanFormat;
/**
* Holds plane wise parameters(extended) of a buffer.
*/
typedef struct NvBufSurfacePlaneParamsEx
{
/** display scan format - progressive/interlaced. */
NvBufSurfaceDisplayScanFormat scanformat[NVBUF_MAX_PLANES];
/** offset of the second field for interlaced buffer. */
uint32_t secondfieldoffset[NVBUF_MAX_PLANES];
/** block height of the planes for blockLinear layout buffer. */
uint32_t blockheightlog2[NVBUF_MAX_PLANES];
/** physical address of allocated planes. */
uint32_t physicaladdress[NVBUF_MAX_PLANES];
/** flags associated with planes */
uint64_t flags[NVBUF_MAX_PLANES];
void * _reserved[STRUCTURE_PADDING * NVBUF_MAX_PLANES];
} NvBufSurfacePlaneParamsEx;
/**
* Holds plane wise parameters of a buffer.
*/
typedef struct NvBufSurfacePlaneParams
{
/** Holds the number of planes. */
uint32_t num_planes;
/** Holds the widths of planes. */
uint32_t width[NVBUF_MAX_PLANES];
/** Holds the heights of planes. */
uint32_t height[NVBUF_MAX_PLANES];
/** Holds the pitches of planes in bytes. */
uint32_t pitch[NVBUF_MAX_PLANES];
/** Holds the offsets of planes in bytes. */
uint32_t offset[NVBUF_MAX_PLANES];
/** Holds the sizes of planes in bytes. */
uint32_t psize[NVBUF_MAX_PLANES];
/** Holds the number of bytes occupied by a pixel in each plane. */
uint32_t bytesPerPix[NVBUF_MAX_PLANES];
void * _reserved[STRUCTURE_PADDING * NVBUF_MAX_PLANES];
} NvBufSurfacePlaneParams;
/**
* Holds Chroma Subsampling parameters for NvBufSurface allocation.
*/
typedef struct NvBufSurfaceChromaSubsamplingParams
{
/** location settings */
uint8_t chromaLocHoriz;
uint8_t chromaLocVert;
} NvBufSurfaceChromaSubsamplingParams;
/**
* Holds parameters required to allocate an \ref NvBufSurface.
*/
typedef struct NvBufSurfaceCreateParams {
/** Holds the GPU ID. Valid only for a multi-GPU system. */
uint32_t gpuId;
/** Holds the width of the buffer. */
uint32_t width;
/** Holds the height of the buffer. */
uint32_t height;
/** Holds the amount of memory to be allocated. Optional; if set, all other
parameters (width, height, etc.) are ignored. */
uint32_t size;
/** Holds a "contiguous memory" flag. If set, contiguous memory is allocated
for the batch. Valid only for CUDA memory types. */
bool isContiguous;
/** Holds the color format of the buffer. */
NvBufSurfaceColorFormat colorFormat;
/** Holds the surface layout. May be Block Linear (BL) or Pitch Linear (PL).
For a dGPU, only PL is valid. */
NvBufSurfaceLayout layout;
/** Holds the type of memory to be allocated. */
NvBufSurfaceMemType memType;
} NvBufSurfaceCreateParams;
/**
* Hold extended parameters required to allocate NvBufSurface.
* (Applicable for NvBufSurfaceAllocate API)
*/
typedef struct NvBufSurfaceAllocateParams {
/** Hold legacy NvBufSurface creation parameters */
NvBufSurfaceCreateParams params;
/** Display scan format */
NvBufSurfaceDisplayScanFormat displayscanformat;
/** Chroma Subsampling parameters */
NvBufSurfaceChromaSubsamplingParams chromaSubsampling;
/** components tag to be used for memory allocation */
NvBufSurfaceTag memtag;
void * _reserved[STRUCTURE_PADDING];
} NvBufSurfaceAllocateParams;
/**
* Hold the pointers of mapped buffer.
*/
typedef struct NvBufSurfaceMappedAddr {
/** Holds planewise pointers to a CPU mapped buffer. */
void * addr[NVBUF_MAX_PLANES];
/** Holds a pointer to a mapped EGLImage. */
void *eglImage;
void * _reserved[STRUCTURE_PADDING];
} NvBufSurfaceMappedAddr;
/**
* Hold the information(extended) of single buffer in the batch.
*/
typedef struct NvBufSurfaceParamsEx {
/** offset in bytes from the start of the buffer to the first valid byte.
(Applicable for NVBUF_MEM_HANDLE) */
int32_t startofvaliddata;
/** size of the valid data from the first to the last valid byte.
(Applicable for NVBUF_MEM_HANDLE) */
int32_t sizeofvaliddatainbytes;
/** chroma subsampling parameters.
(Applicable for NVBUF_MEM_SURFACE_ARRAY) */
NvBufSurfaceChromaSubsamplingParams chromaSubsampling;
/** get buffer vpr information. */
bool is_protected;
/** plane wise extended info */
NvBufSurfacePlaneParamsEx planeParamsex;
void * _reserved[STRUCTURE_PADDING];
} NvBufSurfaceParamsEx;
/**
* Hold the information of single buffer in the batch.
*/
typedef struct NvBufSurfaceParams {
/** Holds the width of the buffer. */
uint32_t width;
/** Holds the height of the buffer. */
uint32_t height;
/** Holds the pitch of the buffer. */
uint32_t pitch;
/** Holds the color format of the buffer. */
NvBufSurfaceColorFormat colorFormat;
/** Holds BL or PL. For dGPU, only PL is valid. */
NvBufSurfaceLayout layout;
/** Holds a DMABUF FD. Valid only for \ref NVBUF_MEM_SURFACE_ARRAY and
\ref NVBUF_MEM_HANDLE type memory. */
uint64_t bufferDesc;
/** Holds the amount of allocated memory. */
uint32_t dataSize;
/** Holds a pointer to allocated memory. Not valid for
\ref NVBUF_MEM_SURFACE_ARRAY or \ref NVBUF_MEM_HANDLE. */
void * dataPtr;
/** Holds planewise information (width, height, pitch, offset, etc.). */
NvBufSurfacePlaneParams planeParams;
/** Holds pointers to mapped buffers. Initialized to NULL
when the structure is created. */
NvBufSurfaceMappedAddr mappedAddr;
/** pointers of extended parameters of single buffer in the batch.*/
NvBufSurfaceParamsEx *paramex;
void * _reserved[STRUCTURE_PADDING - 1];
} NvBufSurfaceParams;
/**
* Holds information about batched buffers.
*/
typedef struct NvBufSurface {
/** Holds a GPU ID. Valid only for a multi-GPU system. */
uint32_t gpuId;
/** Holds the batch size. */
uint32_t batchSize;
/** Holds the number valid and filled buffers. Initialized to zero when
an instance of the structure is created. */
uint32_t numFilled;
/** Holds an "is contiguous" flag. If set, memory allocated for the batch
is contiguous. */
bool isContiguous;
/** Holds type of memory for buffers in the batch. */
NvBufSurfaceMemType memType;
/** Holds a pointer to an array of batched buffers. */
NvBufSurfaceParams *surfaceList;
void * _reserved[STRUCTURE_PADDING];
} NvBufSurface;
/**
* \brief Allocates a batch of buffers.
*
* Allocates memory for \a batchSize buffers and returns a pointer to an
* allocated \ref NvBufSurface. The \a params structure must have
* the allocation parameters of a single buffer. If \a params.size
* is set, a buffer of that size is allocated, and all other
* parameters (width, height, color format, etc.) are ignored.
*
* Call NvBufSurfaceDestroy() to free resources allocated by this function.
*
* @param[out] surf An indirect pointer to the allocated batched
* buffers.
* @param[in] batchSize Batch size of buffers.
* @param[in] params A pointer to an \ref NvBufSurfaceCreateParams
* structure.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceCreate (NvBufSurface **surf, uint32_t batchSize,
NvBufSurfaceCreateParams *params);
/**
* \brief Allocate batch of buffers. (Using extended buffer allocation parameters)
*
* Allocates memory for batchSize buffers and returns in *surf a pointer to allocated NvBufSurface.
* params structure should have allocation parameters of single buffer. If size field in
* params is set, buffer of that size will be allocated and all other
* parameters (w, h, color format etc.) will be ignored.
*
* Use NvBufSurfaceDestroy to free all the resources.
*
* @param[out] surf pointer to allocated batched buffers.
* @param[in] batchSize batch size of buffers.
* @param[in] paramsext pointer to NvBufSurfaceAllocateParams structure.
*
* @return 0 for success, -1 for failure.
*/
int NvBufSurfaceAllocate (NvBufSurface **surf, uint32_t batchSize,
NvBufSurfaceAllocateParams *paramsext);
/**
* Free the batched buffers previously allocated through NvBufSurfaceCreate.
*
* @param[in] surf A pointer to an \ref NvBufSurface to be freed.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceDestroy (NvBufSurface *surf);
/**
* \brief Maps hardware batched buffers to the HOST or CPU address space.
*
* Valid for \ref NVBUF_MEM_CUDA_UNIFIED type memory for dGPU and
* \ref NVBUF_MEM_SURFACE_ARRAY and \ref NVBUF_MEM_HANDLE type memory for
* Jetson.
*
* This function fills an array of pointers at
* \a surf->surfaceList->mappedAddr->addr.
* \a surf is a pointer to an \ref NvBufSurface.
* \a surfaceList is a pointer to an \ref NvBufSurfaceParams.
* \a mappedAddr is a pointer to an \ref NvBufSurfaceMappedAddr.
* \a addr is declared as an array of pointers to void, and holds pointers
* to the buffers.
*
* The client must call NvBufSurfaceSyncForCpu() with the virtual address
* populated by this function before accessing mapped memory in the CPU.
*
* After memory mapping is complete, mapped memory modification
* must be coordinated between the CPU and the hardware device as
* follows:
* - CPU: If the CPU modifies mapped memory, the client must call
* NvBufSurfaceSyncForDevice() before any hardware device accesses the memory.
* - Hardware device: If a hardware device modifies mapped memory, the client
* must call NvBufSurfaceSyncForCpu() before the CPU accesses the memory.
*
* Use NvBufSurfaceUnMap() to unmap buffer(s) and release any resource.
*
* @param[in,out] surf A pointer to an NvBufSurface structure. The function
* stores pointers to the buffers in a descendant of this
* structure; see the notes above.
* @param[in] index Index of a buffer in the batch. -1 refers to all buffers
* in the batch.
* @param[in] plane Index of a plane in buffer. -1 refers to all planes
* in the buffer.
* @param[in] type A flag for mapping type.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceMap (NvBufSurface *surf, int index, int plane, NvBufSurfaceMemMapFlags type);
/**
* \brief Unmaps previously mapped buffer(s).
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index Index of a buffer in the batch. -1 indicates
* all buffers in the batch.
* @param[in] plane Index of a plane in the buffer. -1 indicates
* all planes in the buffer.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceUnMap (NvBufSurface *surf, int index, int plane);
/**
* \brief Copies the content of source batched buffer(s) to destination
* batched buffer(s).
*
* You can use this function to copy source buffer(s) of one memory type
* to destination buffer(s) of another memory type,
* e.g. CUDA host to CUDA device, malloc'ed memory to CUDA device, etc.
*
* The source and destination \ref NvBufSurface objects must have same
* buffer and batch size.
*
* @param[in] srcSurf A pointer to the source NvBufSurface structure.
* @param[in] dstSurf A pointer to the destination NvBufSurface structure.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceCopy (NvBufSurface *srcSurf, NvBufSurface *dstSurf);
/**
* \brief Copies the NvBufSurface plane memory content to a raw buffer plane for a specific
* batched buffer.
*
* This function can be used to copy plane memory content from source raw buffer pointer
* to specific destination batch buffer of supported memory type.
*
* @param[in] Surf pointer to NvBufSurface structure.
* @param[in] index index of buffer in the batch.
* @param[in] plane index of plane in buffer.
* @param[in] out_width aligned width of the raw data plane.
* @param[in] out_height aligned height of the raw data plane.
* @param[in] ptr pointer to the output raw plane data.
*
* @return 0 for success, -1 for failure.
*/
int NvBufSurface2Raw (NvBufSurface *Surf, unsigned int index, unsigned int plane, unsigned int out_width, unsigned int out_height, unsigned char *ptr);
/**
* \brief Copies the raw buffer plane memory content to the NvBufSurface plane memory of a specific
* batched buffer.
*
* This function can be used to copy plane memory content from batch buffer
* to specific destination raw buffer pointer.
*
* @param[in] ptr pointer to the input raw plane data.
* @param[in] index index of buffer in the batch.
* @param[in] plane index of plane in buffer.
* @param[in] in_width aligned width of the raw data plane.
* @param[in] in_height aligned height of the raw data plane.
* @param[in] Surf pointer to NvBufSurface structure.
*
* @return 0 for success, -1 for failure.
*/
int Raw2NvBufSurface (unsigned char *ptr, unsigned int index, unsigned int plane, unsigned int in_width, unsigned int in_height, NvBufSurface *Surf);
/**
* Syncs the HW memory cache for the CPU.
*
* Valid only for memory types \ref NVBUF_MEM_SURFACE_ARRAY and
* \ref NVBUF_MEM_HANDLE.
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index Index of the buffer in the batch. -1 refers to
* all buffers in the batch.
* @param[in] plane Index of a plane in the buffer. -1 refers to all planes
* in the buffer.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceSyncForCpu (NvBufSurface *surf, int index, int plane);
/**
* \brief Syncs the hardware memory cache for the device.
*
* Valid only for memory types \ref NVBUF_MEM_SURFACE_ARRAY and
* \ref NVBUF_MEM_HANDLE.
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index Index of a buffer in the batch. -1 refers to all buffers
* in the batch.
* @param[in] plane Index of a plane in the buffer. -1 refers to all planes
* in the buffer.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceSyncForDevice (NvBufSurface *surf, int index, int plane);
/**
* \brief Gets the \ref NvBufSurface from the DMABUF FD.
*
* @param[in] dmabuf_fd DMABUF FD of the buffer.
* @param[out] buffer A pointer to the NvBufSurface.
*
* @return 0 for success, or -1 otherwise.
*/
int NvBufSurfaceFromFd (int dmabuf_fd, void **buffer);
/**
* \brief Fills each byte of the buffer(s) in an \ref NvBufSurface with a
* provided value.
*
* You can also use this function to reset the buffer(s) in the batch.
*
* @param[in] surf A pointer to the NvBufSurface structure.
* @param[in] index Index of a buffer in the batch. -1 refers to all buffers
* in the batch.
* @param[in] plane Index of a plane in the buffer. -1 refers to all planes
* in the buffer.
* @param[in] value The value to be used as fill.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceMemSet (NvBufSurface *surf, int index, int plane, uint8_t value);
/**
* \brief Creates an EGLImage from the memory of one or more
* \ref NvBufSurface buffers.
*
* Only memory type \ref NVBUF_MEM_SURFACE_ARRAY is supported.
*
* This function returns the created EGLImage by storing its address at
* \a surf->surfaceList->mappedAddr->eglImage. (\a surf is a pointer to
* an NvBufSurface. \a surfaceList is a pointer to an \ref NvBufSurfaceParams.
* \a mappedAddr is a pointer to an \ref NvBufSurfaceMappedAddr.
* \a eglImage is declared as a pointer to void, and holds an
* EGLImageKHR.)
*
* You can use this function in scenarios where a CUDA operation on Jetson
* hardware memory (identified by \ref NVBUF_MEM_SURFACE_ARRAY) is required.
* The EGLImageKHR struct provided by this function can then be registered
* with CUDA for further CUDA operations.
*
* @param[in,out] surf A pointer to an NvBufSurface structure. The function
* stores a pointer to the created EGLImage in
* a descendant of this structure; see the notes above.
* @param[in] index Index of a buffer in the batch. -1 specifies all buffers
* in the batch.
*
* @return 0 for success, or -1 otherwise.
*/
int NvBufSurfaceMapEglImage (NvBufSurface *surf, int index);
/**
* \brief Destroys the previously created EGLImage object(s).
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index The index of a buffer in the batch. -1 specifies all
* buffers in the batch.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceUnMapEglImage (NvBufSurface *surf, int index);
/** @} */
#ifdef __cplusplus
}
#endif
#endif /* NVBUFSURFACE_H_ */

1
push_info.txt Normal file
View File

@@ -0,0 +1 @@
jetson_35.1

2644
v4l2_nv_extensions.h Normal file
View File

File diff suppressed because it is too large Load Diff