Updating prebuilts and/or headers

ed8273ff6102bb0b4fa7975a401b12b3e95a7187 - nvbufsurface.h
7af73b80b2f930ab91431d66cd84ec794da9e117 - v4l2_nv_extensions.h
d27a433ddeaefb9f42d0312c23472514b0cd6a45 - gst-nvcustomevent.h
21a860247c06670e4619b8eaae1d92db31bdd3e8 - gst-v4l2/gstv4l2.c
e8e973c103725b65232d32817e0305d12d6ff309 - gst-v4l2/gstv4l2h264enc.c
49a66f0ce02abc71f33e096a65645ddedf5c7f46 - gst-v4l2/gstv4l2bufferpool.c
9f726e4439379bb399f29c68736242f21dab3dd0 - gst-v4l2/gstv4l2allocator.c
65de802e5f162aa04518b7ade5841cc3ced01111 - gst-v4l2/Makefile
02d142337f4b96fcb0c9f2405a3cbe90c5917cca - gst-v4l2/gstv4l2vp9enc.c
dc1a3f7292873f1f71dc27300f97f3ab918ed79f - gst-v4l2/gstv4l2h265enc.c
d29e3a719400c3cb27314366d48ec792a3c12363 - gst-v4l2/gstv4l2h265enc.h
c81eacb7d88c4fb839506dd70055e30d7a9feeec - gst-v4l2/v4l2-utils.h
b1cd923335aa60985ff9866fba91a2068e8671c7 - gst-v4l2/LICENSE.gst-nvvideo4linux2
73b03969d7ae0a8adb374c93999c43af88ea93b2 - gst-v4l2/v4l2_calls.c
d89a680415f6ff5acec2571cde0fce9054d8e81f - gst-v4l2/gstv4l2vp9enc.h
b52a5ee4c739818736b9a3683442df285ebe9eda - gst-v4l2/gstv4l2videodec.c
3f7cafe5beb4395caf2e1591bf0a835e5076031a - gst-v4l2/gstv4l2object.h
d5952b0286c34bf13fbf5e09fe552ced0da49368 - gst-v4l2/gstv4l2videodec.h
398c24d1eef98ec9003a06587bc3784050602cd2 - gst-v4l2/gstv4l2h26xparser.c
39fcb2f599e6906ab0fd7ab9a46fef3ea58a8cab - gst-v4l2/gstv4l2vp8enc.h
cbc84dccd2506afa4c8f03849c95bb28c83ef4a3 - gst-v4l2/gstv4l2av1enc.h
a002edef13a3bbbdc41e42a7fca40e574ad1bb3e - gst-v4l2/v4l2-utils.c
c2099692cdb374440c2a040cb6ad01bbc1549ce5 - gst-v4l2/gstv4l2h26xparser.h
99d65d620807b5ba1ca29a838e032940c9b019cc - gst-v4l2/sei_parse.c
b827fd6cb1e3b8ecebd6a07f8556e846e26cba17 - gst-v4l2/gstv4l2allocator.h
489fde70531590e94d1d211a42f10f81ae68d2b9 - gst-v4l2/gstv4l2videoenc.h
4e79cf75c4fa29791e1f5141318dc8aec13a7835 - gst-v4l2/nalutils.h
71be284b547ee68fb0e2cd14b0aeb14734a915a1 - gst-v4l2/gstv4l2bufferpool.h
5ecd059e5ef9be4014eface37e5e2f7598960f4e - gst-v4l2/nalutils.c
5948d70c07e87f9b1dc403789dcbed6acfa47ad9 - gst-v4l2/gstv4l2av1enc.c
bb104683f5e4f7402e3f765a891e149edc794e02 - gst-v4l2/gstv4l2h264enc.h
9681f7b98dfdfbc4d845f9ce7f11c3692b923195 - gst-v4l2/gstv4l2videoenc.c
807bc9859585a540b0f85e98f147756aab24e1bd - gst-v4l2/gstv4l2vp8enc.c
884e5b97b9fa8d07b6153e6efe6999884922b813 - gst-v4l2/gstv4l2object.c
20c4f7c0cb89c83256650bc3353ed82154cf3a9d - gst-v4l2/gst/gst-i18n-plugin.h
e864ee6647f3572b144403d799f68152e9900da1 - gst-v4l2/gst/gettext.h
499a9feb17ceabf1f1443923dffa1e0180bf5972 - gst-v4l2/gst/glib-compat-private.h
72a34a694337f8f6da3bb94c9faced6730cbd2fc - gst-v4l2/ext/types-compat.h
1636366b5a062e4bc1791b7bc3012ccf5635b363 - gst-v4l2/ext/v4l2-controls.h
a745675b051a2b8434a430c80fde3f245864ca89 - gst-v4l2/ext/v4l2-common.h
522ab8fc8531a2c758b9278d29642f5b763fd3e7 - gst-v4l2/ext/videodev2.h

Change-Id: I3770af2d1c63a6193ccfb47a0ec190f5d241a331
This commit is contained in:
svcmobrel-release
2024-09-06 00:00:25 -07:00
parent 7a8ebb805b
commit 5c1f0868fd
44 changed files with 30490 additions and 0 deletions

43
commitFile.txt Normal file
View File

@@ -0,0 +1,43 @@
Updating prebuilts and/or headers
ed8273ff6102bb0b4fa7975a401b12b3e95a7187 - nvbufsurface.h
7af73b80b2f930ab91431d66cd84ec794da9e117 - v4l2_nv_extensions.h
d27a433ddeaefb9f42d0312c23472514b0cd6a45 - gst-nvcustomevent.h
21a860247c06670e4619b8eaae1d92db31bdd3e8 - gst-v4l2/gstv4l2.c
e8e973c103725b65232d32817e0305d12d6ff309 - gst-v4l2/gstv4l2h264enc.c
49a66f0ce02abc71f33e096a65645ddedf5c7f46 - gst-v4l2/gstv4l2bufferpool.c
9f726e4439379bb399f29c68736242f21dab3dd0 - gst-v4l2/gstv4l2allocator.c
65de802e5f162aa04518b7ade5841cc3ced01111 - gst-v4l2/Makefile
02d142337f4b96fcb0c9f2405a3cbe90c5917cca - gst-v4l2/gstv4l2vp9enc.c
dc1a3f7292873f1f71dc27300f97f3ab918ed79f - gst-v4l2/gstv4l2h265enc.c
d29e3a719400c3cb27314366d48ec792a3c12363 - gst-v4l2/gstv4l2h265enc.h
c81eacb7d88c4fb839506dd70055e30d7a9feeec - gst-v4l2/v4l2-utils.h
b1cd923335aa60985ff9866fba91a2068e8671c7 - gst-v4l2/LICENSE.gst-nvvideo4linux2
73b03969d7ae0a8adb374c93999c43af88ea93b2 - gst-v4l2/v4l2_calls.c
d89a680415f6ff5acec2571cde0fce9054d8e81f - gst-v4l2/gstv4l2vp9enc.h
b52a5ee4c739818736b9a3683442df285ebe9eda - gst-v4l2/gstv4l2videodec.c
3f7cafe5beb4395caf2e1591bf0a835e5076031a - gst-v4l2/gstv4l2object.h
d5952b0286c34bf13fbf5e09fe552ced0da49368 - gst-v4l2/gstv4l2videodec.h
398c24d1eef98ec9003a06587bc3784050602cd2 - gst-v4l2/gstv4l2h26xparser.c
39fcb2f599e6906ab0fd7ab9a46fef3ea58a8cab - gst-v4l2/gstv4l2vp8enc.h
cbc84dccd2506afa4c8f03849c95bb28c83ef4a3 - gst-v4l2/gstv4l2av1enc.h
a002edef13a3bbbdc41e42a7fca40e574ad1bb3e - gst-v4l2/v4l2-utils.c
c2099692cdb374440c2a040cb6ad01bbc1549ce5 - gst-v4l2/gstv4l2h26xparser.h
99d65d620807b5ba1ca29a838e032940c9b019cc - gst-v4l2/sei_parse.c
b827fd6cb1e3b8ecebd6a07f8556e846e26cba17 - gst-v4l2/gstv4l2allocator.h
489fde70531590e94d1d211a42f10f81ae68d2b9 - gst-v4l2/gstv4l2videoenc.h
4e79cf75c4fa29791e1f5141318dc8aec13a7835 - gst-v4l2/nalutils.h
71be284b547ee68fb0e2cd14b0aeb14734a915a1 - gst-v4l2/gstv4l2bufferpool.h
5ecd059e5ef9be4014eface37e5e2f7598960f4e - gst-v4l2/nalutils.c
5948d70c07e87f9b1dc403789dcbed6acfa47ad9 - gst-v4l2/gstv4l2av1enc.c
bb104683f5e4f7402e3f765a891e149edc794e02 - gst-v4l2/gstv4l2h264enc.h
9681f7b98dfdfbc4d845f9ce7f11c3692b923195 - gst-v4l2/gstv4l2videoenc.c
807bc9859585a540b0f85e98f147756aab24e1bd - gst-v4l2/gstv4l2vp8enc.c
884e5b97b9fa8d07b6153e6efe6999884922b813 - gst-v4l2/gstv4l2object.c
20c4f7c0cb89c83256650bc3353ed82154cf3a9d - gst-v4l2/gst/gst-i18n-plugin.h
e864ee6647f3572b144403d799f68152e9900da1 - gst-v4l2/gst/gettext.h
499a9feb17ceabf1f1443923dffa1e0180bf5972 - gst-v4l2/gst/glib-compat-private.h
72a34a694337f8f6da3bb94c9faced6730cbd2fc - gst-v4l2/ext/types-compat.h
1636366b5a062e4bc1791b7bc3012ccf5635b363 - gst-v4l2/ext/v4l2-controls.h
a745675b051a2b8434a430c80fde3f245864ca89 - gst-v4l2/ext/v4l2-common.h
522ab8fc8531a2c758b9278d29642f5b763fd3e7 - gst-v4l2/ext/videodev2.h

235
gst-nvcustomevent.h Normal file
View File

@@ -0,0 +1,235 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/**
* @file
* <b>NVIDIA GStreamer: Custom Events</b>
*
* @b Description: This file specifies the NVIDIA GStreamer custom
* event functions.
*
*/
/**
* @defgroup gstreamer_nvevent Events: Custom Events API
*
* Specifies GStreamer custom event functions.
*
* @ingroup gst_mess_evnt_qry
* @{
*/
#ifndef __GST_NVCUSTOMEVENT_H__
#define __GST_NVCUSTOMEVENT_H__
#include <gst/gst.h>
#ifdef __cplusplus
extern "C" {
#endif
#define FLAG(name) GST_EVENT_TYPE_##name
/** Defines supported types of custom events. */
typedef enum {
/** Specifies a custom event to indicate decoder drop frame interval update
of a particular stream. */
GST_NVEVENT_DEC_DROP_FRAME_INTERVAL_UPDATE
= GST_EVENT_MAKE_TYPE (500, FLAG(DOWNSTREAM) | FLAG(SERIALIZED)),
/** Specifies a custom event to indicate decoder skip frame update
of a particular stream. */
GST_NVEVENT_DEC_SKIP_FRAME_UPDATE
= GST_EVENT_MAKE_TYPE (501, FLAG(DOWNSTREAM) | FLAG(SERIALIZED)),
/** Specifies a custom event to enable decoder low-latency-mode
of a particular stream. */
GST_NVEVENT_DEC_ENABLE_LOW_LATENCY_MODE
= GST_EVENT_MAKE_TYPE (502, FLAG(DOWNSTREAM) | FLAG(SERIALIZED)),
/** Specifies a custom event to indicate encoder bitrate update
of a particular stream. */
GST_NVEVENT_ENC_BITRATE_UPDATE
= GST_EVENT_MAKE_TYPE (503, FLAG(DOWNSTREAM) | FLAG(SERIALIZED)),
/** Specifies a custom event to indicate encoder force IDR frame
of a particular stream. */
GST_NVEVENT_ENC_FORCE_IDR
= GST_EVENT_MAKE_TYPE (504, FLAG(DOWNSTREAM) | FLAG(SERIALIZED)),
/** Specifies a custom event to indicate encoder force Intra frame
of a particular stream. */
GST_NVEVENT_ENC_FORCE_INTRA
= GST_EVENT_MAKE_TYPE (505, FLAG(DOWNSTREAM) | FLAG(SERIALIZED)),
/** Specifies a custom event to indicate iframe interval update
of a particular stream. */
GST_NVEVENT_ENC_IFRAME_INTERVAL_UPDATE
= GST_EVENT_MAKE_TYPE (506, FLAG(DOWNSTREAM) | FLAG(SERIALIZED))
} GstNvCustomEventType;
#undef FLAG
/**
* Creates a new "nv-dec-drop-frame-interval-update" event.
*
* @param[out] stream_id Stream ID of the stream for which decoder-drop-frame-interval is to be sent
* @param[out] interval The decoder drop-frame interval obtained corresponding to stream ID for the event.
*/
GstEvent * gst_nvevent_dec_drop_frame_interval_update (gchar* stream_id, guint interval);
/**
* Parses a "nv-dec-drop-frame-interval-update" event received on the sinkpad.
*
* @param[in] event The event received on the sinkpad
* when the stream ID sends a dec-drop-frame-interval-update event.
* @param[out] stream_id A pointer to the parsed stream ID for which
* the event is sent.
* @param[out] interval A pointer to the parsed interval
* corresponding to stream ID for the event.
*/
void gst_nvevent_parse_dec_drop_frame_interval_update (GstEvent * event, gchar** stream_id, guint *interval);
/**
* Creates a new "nv-dec-skip-frame-update" event.
*
* @param[out] stream_id Stream ID of the stream for which decoder-skip-frame-update is to be sent
* @param[out] frame_type The decoder frame-type to be skipped obtained corresponding to stream ID for the event.
*/
GstEvent * gst_nvevent_dec_skip_frame_update (gchar* stream_id, guint frame_type);
/**
* Parses a "nv-dec-skip-frame-update" event received on the sinkpad.
*
* @param[in] event The event received on the sinkpad
* when the stream ID sends a skip-frame-update event.
* @param[out] stream_id A pointer to the parsed stream ID for which
* the event is sent.
* @param[out] frame_type A pointer to the parsed frame_type
* corresponding to stream ID for the event.
*/
void gst_nvevent_parse_dec_skip_frame_update (GstEvent * event, gchar** stream_id, guint *frame_type);
/**
* Creates a new "nv-dec-enable-low-latency-mode" event.
*
* @param[out] stream_id Stream ID of the stream for which decoder-low-latenct-mode is to be sent
* @param[out] enable The decoder low latency mode to be enabled corresponding to stream ID for the event.
*/
GstEvent * gst_nvevent_dec_enable_low_latency_mode (gchar* stream_id, gint enable);
/**
* Parses a "nv-dec-enable-low-latency-mode" event received on the sinkpad.
*
* @param[in] event The event received on the sinkpad
* when the stream ID sends a enable-low-latency-mode event.
* @param[out] stream_id A pointer to the parsed stream ID for which
* the event is sent.
* @param[out] enable A pointer to the parsed enable flag
* corresponding to stream ID for the event.
*/
void gst_nvevent_parse_dec_enable_low_latency_mode (GstEvent * event, gchar** stream_id, gint *enable);
/**
* Creates a new "nv-enc-bitrate-update" event.
*
* @param[out] stream_id Stream ID of the stream for which encoder-bitrate-update is to be sent
* @param[out] bitrate The encoder bitrate to be set corresponding to stream ID for the event.
*/
GstEvent * gst_nvevent_enc_bitrate_update (gchar* stream_id, guint bitrate);
/**
* Parses a "nv-enc-bitrate-update" event received on the sinkpad.
*
* @param[in] event The event received on the sinkpad
* when the stream ID sends a bitrate-update event.
* @param[out] stream_id A pointer to the parsed stream ID for which
* the event is sent.
* @param[out] bitrate A pointer to the parsed bitrate value
* corresponding to stream ID for the event.
*/
void gst_nvevent_parse_enc_bitrate_update (GstEvent * event, gchar** stream_id, guint *bitrate);
/**
* Creates a new "nv-enc-force-idr" event.
*
* @param[out] stream_id Stream ID of the stream for which encoder-force-idr is to be sent
* @param[out] force The encoder force IDR frame corresponding to stream ID for the event.
*/
GstEvent * gst_nvevent_enc_force_idr (gchar* stream_id, gint force);
/**
* Parses a "nv-enc-force-idr" event received on the sinkpad.
*
* @param[in] event The event received on the sinkpad
* when the stream ID sends a force-idr event.
* @param[out] stream_id A pointer to the parsed stream ID for which
* the event is sent.
* @param[out] force A pointer to the parsed force value
* corresponding to stream ID for the event.
*/
void gst_nvevent_parse_enc_force_idr (GstEvent * event, gchar** stream_id, gint *force);
/**
* Creates a new "nv-enc-force-intra" event.
*
* @param[out] stream_id Stream ID of the stream for which encoder-force-intra is to be sent
* @param[out] force The encoder force Intra frame corresponding to stream ID for the event.
*/
GstEvent * gst_nvevent_enc_force_intra (gchar* stream_id, gint force);
/**
* Parses a "nv-enc-force-intra" event received on the sinkpad.
*
* @param[in] event The event received on the sinkpad
* when the stream ID sends a force-intra event.
* @param[out] stream_id A pointer to the parsed stream ID for which
* the event is sent.
* @param[out] force A pointer to the parsed force value
* corresponding to stream ID for the event.
*/
void gst_nvevent_parse_enc_force_intra (GstEvent * event, gchar** stream_id, gint *force);
/**
* Creates a new "nv-enc-iframeinterval-update" event.
*
* @param[out] stream_id Stream ID of the stream for which encoder-iframeinterval-update is to be sent
* @param[out] interval The encoder iframeinterval to be set corresponding to stream ID for the event.
*/
GstEvent * gst_nvevent_enc_iframeinterval_update (gchar* stream_id, guint interval);
/**
* Parses a "nv-enc-iframeinterval-update" event received on the sinkpad.
*
* @param[in] event The event received on the sinkpad
* when the stream ID sends a iframeinterval-update event.
* @param[out] stream_id A pointer to the parsed stream ID for which
* the event is sent.
* @param[out] bitrate A pointer to the parsed interval value
* corresponding to stream ID for the event.
*/
void gst_nvevent_parse_enc_iframeinterval_update (GstEvent * event, gchar** stream_id, guint *interval);
#ifdef __cplusplus
}
#endif
#endif
/** @} */

View File

@@ -0,0 +1,397 @@
The software listed below is licensed under the terms of the LGPLv2
(see below). To obtain source code, contact oss-requests@nvidia.com.
gst-nvvideo4linux2 (libgstnvvideo4linux2.so)
------------------------------------
GNU LIBRARY GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1991 Free Software Foundation, Inc.
51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the library GPL. It is
numbered 2 because it goes with version 2 of the ordinary GPL.]
Preamble
The licenses for most software are designed to take away your freedom to share
and change it. By contrast, the GNU General Public Licenses are intended to
guarantee your freedom to share and change free software--to make sure the
software is free for all its users.
This license, the Library General Public License, applies to some specially
designated Free Software Foundation software, and to any other libraries whose
authors decide to use it. You can use it for your libraries, too.
When we speak of free software, we are referring to freedom, not price. Our
General Public Licenses are designed to make sure that you have the freedom to
distribute copies of free software (and charge for this service if you wish),
that you receive source code or can get it if you want it, that you can change
the software or use pieces of it in new free programs; and that you know you can
do these things.
To protect your rights, we need to make restrictions that forbid anyone to deny
you these rights or to ask you to surrender the rights. These restrictions
translate to certain responsibilities for you if you distribute copies of the
library, or if you modify it.
For example, if you distribute copies of the library, whether gratis or for a
fee, you must give the recipients all the rights that we gave you. You must make
sure that they, too, receive or can get the source code. If you link a program
with the library, you must provide complete object files to the recipients so
that they can relink them with the library, after making changes to the library
and recompiling it. And you must show them these terms so they know their
rights.
Our method of protecting your rights has two steps: (1) copyright the library,
and (2) offer you this license which gives you legal permission to copy,
distribute and/or modify the library.
Also, for each distributor's protection, we want to make certain that everyone
understands that there is no warranty for this free library. If the library is
modified by someone else and passed on, we want its recipients to know that what
they have is not the original version, so that any problems introduced by others
will not reflect on the original authors' reputations.
Finally, any free program is threatened constantly by software patents. We wish
to avoid the danger that companies distributing free software will individually
obtain patent licenses, thus in effect transforming the program into proprietary
software. To prevent this, we have made it clear that any patent must be
licensed for everyone's free use or not licensed at all.
Most GNU software, including some libraries, is covered by the ordinary GNU
General Public License, which was designed for utility programs. This license,
the GNU Library General Public License, applies to certain designated libraries.
This license is quite different from the ordinary one; be sure to read it in
full, and don't assume that anything in it is the same as in the ordinary
license.
The reason we have a separate public license for some libraries is that they
blur the distinction we usually make between modifying or adding to a program
and simply using it. Linking a program with a library, without changing the
library, is in some sense simply using the library, and is analogous to running
a utility program or application program. However, in a textual and legal sense,
the linked executable is a combined work, a derivative of the original library,
and the ordinary General Public License treats it as such.
Because of this blurred distinction, using the ordinary General Public License
for libraries did not effectively promote software sharing, because most
developers did not use the libraries. We concluded that weaker conditions might
promote sharing better.
However, unrestricted linking of non-free programs would deprive the users of
those programs of all benefit from the free status of the libraries themselves.
This Library General Public License is intended to permit developers of non-free
programs to use free libraries, while preserving your freedom as a user of such
programs to change the free libraries that are incorporated in them. (We have
not seen how to achieve this as regards changes in header files, but we have
achieved it as regards changes in the actual functions of the Library.) The hope
is that this will lead to faster development of free libraries.
The precise terms and conditions for copying, distribution and modification
follow. Pay close attention to the difference between a "work based on the
library" and a "work that uses the library". The former contains code derived
from the library, while the latter only works together with the library.
Note that it is possible for a library to be covered by the ordinary General
Public License rather than by this special one.
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library which contains a
notice placed by the copyright holder or other authorized party saying it may be
distributed under the terms of this Library General Public License (also called
"this License"). Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data prepared so as
to be conveniently linked with application programs (which use some of those
functions and data) to form executables.
The "Library", below, refers to any such software library or work which has been
distributed under these terms. A "work based on the Library" means either the
Library or any derivative work under copyright law: that is to say, a work
containing the Library or a portion of it, either verbatim or with modifications
and/or translated straightforwardly into another language. (Hereinafter,
translation is included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for making
modifications to it. For a library, complete source code means all the source
code for all modules it contains, plus any associated interface definition
files, plus the scripts used to control compilation and installation of the
library.
Activities other than copying, distribution and modification are not covered by
this License; they are outside its scope. The act of running a program using the
Library is not restricted, and output from such a program is covered only if its
contents constitute a work based on the Library (independent of the use of the
Library in a tool for writing it). Whether that is true depends on what the
Library does and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's complete source
code as you receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice and
disclaimer of warranty; keep intact all the notices that refer to this License
and to the absence of any warranty; and distribute a copy of this License along
with the Library.
You may charge a fee for the physical act of transferring a copy, and you may at
your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Library or any portion of it, thus
forming a work based on the Library, and copy and distribute such modifications
or work under the terms of Section 1 above, provided that you also meet all of
these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices stating that
you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no charge to all
third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a table of
data to be supplied by an application program that uses the facility, other than
as an argument passed when the facility is invoked, then you must make a good
faith effort to ensure that, in the event an application does not supply such
function or table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has a purpose
that is entirely well-defined independent of the application. Therefore,
Subsection 2d requires that any application-supplied function or table used by
this function must be optional: if the application does not supply it, the
square root function must still compute square roots.)
These requirements apply to the modified work as a whole. If identifiable
sections of that work are not derived from the Library, and can be reasonably
considered independent and separate works in themselves, then this License, and
its terms, do not apply to those sections when you distribute them as separate
works. But when you distribute the same sections as part of a whole which is a
work based on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the entire whole,
and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest your
rights to work written entirely by you; rather, the intent is to exercise the
right to control the distribution of derivative or collective works based on the
Library.
In addition, mere aggregation of another work not based on the Library with the
Library (or with a work based on the Library) on a volume of a storage or
distribution medium does not bring the other work under the scope of this
License.
3. You may opt to apply the terms of the ordinary GNU General Public License
instead of this License to a given copy of the Library. To do this, you must
alter all the notices that refer to this License, so that they refer to the
ordinary GNU General Public License, version 2, instead of to this License. (If
a newer version than version 2 of the ordinary GNU General Public License has
appeared, then you can specify that version instead if you wish.) Do not make
any other change in these notices.
Once this change is made in a given copy, it is irreversible for that copy, so
the ordinary GNU General Public License applies to all subsequent copies and
derivative works made from that copy.
This option is useful when you wish to copy part of the code of the Library into
a program that is not a library.
4. You may copy and distribute the Library (or a portion or derivative of it,
under Section 2) in object code or executable form under the terms of Sections 1
and 2 above provided that you accompany it with the complete corresponding
machine-readable source code, which must be distributed under the terms of
Sections 1 and 2 above on a medium customarily used for software interchange.
If distribution of object code is made by offering access to copy from a
designated place, then offering equivalent access to copy the source code from
the same place satisfies the requirement to distribute the source code, even
though third parties are not compelled to copy the source along with the object
code.
5. A program that contains no derivative of any portion of the Library, but is
designed to work with the Library by being compiled or linked with it, is called
a "work that uses the Library". Such a work, in isolation, is not a derivative
work of the Library, and therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library creates an
executable that is a derivative of the Library (because it contains portions of
the Library), rather than a "work that uses the library". The executable is
therefore covered by this License. Section 6 states terms for distribution of
such executables.
When a "work that uses the Library" uses material from a header file that is
part of the Library, the object code for the work may be a derivative work of
the Library even though the source code is not. Whether this is true is
especially significant if the work can be linked without the Library, or if the
work is itself a library. The threshold for this to be true is not precisely
defined by law.
If such an object file uses only numerical parameters, data structure layouts
and accessors, and small macros and small inline functions (ten lines or less in
length), then the use of the object file is unrestricted, regardless of whether
it is legally a derivative work. (Executables containing this object code plus
portions of the Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may distribute the
object code for the work under the terms of Section 6. Any executables
containing that work also fall under Section 6, whether or not they are linked
directly with the Library itself.
6. As an exception to the Sections above, you may also compile or link a "work
that uses the Library" with the Library to produce a work containing portions of
the Library, and distribute that work under terms of your choice, provided that
the terms permit modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the Library is
used in it and that the Library and its use are covered by this License. You
must supply a copy of this License. If the work during execution displays
copyright notices, you must include the copyright notice for the Library among
them, as well as a reference directing the user to the copy of this License.
Also, you must do one of these things:
a) Accompany the work with the complete corresponding machine-readable
source code for the Library including whatever changes were used in the work
(which must be distributed under Sections 1 and 2 above); and, if the work is an
executable linked with the Library, with the complete machine-readable "work
that uses the Library", as object code and/or source code, so that the user can
modify the Library and then relink to produce a modified executable containing
the modified Library. (It is understood that the user who changes the contents
of definitions files in the Library will not necessarily be able to recompile
the application to use the modified definitions.)
b) Accompany the work with a written offer, valid for at least three years,
to give the same user the materials specified in Subsection 6a, above, for a
charge no more than the cost of performing this distribution.
c) If distribution of the work is made by offering access to copy from a
designated place, offer equivalent access to copy the above specified materials
from the same place.
d) Verify that the user has already received a copy of these materials or
that you have already sent this user a copy.
For an executable, the required form of the "work that uses the Library" must
include any data and utility programs needed for reproducing the executable from
it. However, as a special exception, the source code distributed need not
include anything that is normally distributed (in either source or binary form)
with the major components (compiler, kernel, and so on) of the operating system
on which the executable runs, unless that component itself accompanies the
executable.
It may happen that this requirement contradicts the license restrictions of
other proprietary libraries that do not normally accompany the operating system.
Such a contradiction means you cannot use both them and the Library together in
an executable that you distribute.
7. You may place library facilities that are a work based on the Library
side-by-side in a single library together with other library facilities not
covered by this License, and distribute such a combined library, provided that
the separate distribution of the work based on the Library and of the other
library facilities is otherwise permitted, and provided that you do these two
things:
a) Accompany the combined library with a copy of the same work based on the
Library, uncombined with any other library facilities. This must be distributed
under the terms of the Sections above.
b) Give prominent notice with the combined library of the fact that part of
it is a work based on the Library, and explaining where to find the accompanying
uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute the Library
except as expressly provided under this License. Any attempt otherwise to copy,
modify, sublicense, link with, or distribute the Library is void, and will
automatically terminate your rights under this License. However, parties who
have received copies, or rights, from you under this License will not have their
licenses terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not signed it.
However, nothing else grants you permission to modify or distribute the Library
or its derivative works. These actions are prohibited by law if you do not
accept this License. Therefore, by modifying or distributing the Library (or any
work based on the Library), you indicate your acceptance of this License to do
so, and all its terms and conditions for copying, distributing or modifying the
Library or works based on it.
10. Each time you redistribute the Library (or any work based on the Library),
the recipient automatically receives a license from the original licensor to
copy, distribute, link with or modify the Library subject to these terms and
conditions. You may not impose any further restrictions on the recipients'
exercise of the rights granted herein. You are not responsible for enforcing
compliance by third parties to this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues), conditions
are imposed on you (whether by court order, agreement or otherwise) that
contradict the conditions of this License, they do not excuse you from the
conditions of this License. If you cannot distribute so as to satisfy
simultaneously your obligations under this License and any other pertinent
obligations, then as a consequence you may not distribute the Library at all.
For example, if a patent license would not permit royalty-free redistribution of
the Library by all those who receive copies directly or indirectly through you,
then the only way you could satisfy both it and this License would be to refrain
entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply, and
the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any patents or
other property right claims or to contest validity of any such claims; this
section has the sole purpose of protecting the integrity of the free software
distribution system which is implemented by public license practices. Many
people have made generous contributions to the wide range of software
distributed through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing to
distribute software through any other system and a licensee cannot impose that
choice.
This section is intended to make thoroughly clear what is believed to be a
consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in certain
countries either by patents or by copyrighted interfaces, the original copyright
holder who places the Library under this License may add an explicit
geographical distribution limitation excluding those countries, so that
distribution is permitted only in or among countries not thus excluded. In such
case, this License incorporates the limitation as if written in the body of this
License.
13. The Free Software Foundation may publish revised and/or new versions of the
Library General Public License from time to time. Such new versions will be
similar in spirit to the present version, but may differ in detail to address
new problems or concerns.
Each version is given a distinguishing version number. If the Library specifies
a version number of this License which applies to it and "any later version",
you have the option of following the terms and conditions either of that version
or of any later version published by the Free Software Foundation. If the
Library does not specify a license version number, you may choose any version
ever published by the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free programs
whose distribution conditions are incompatible with these, write to the author
to ask for permission. For software which is copyrighted by the Free Software
Foundation, write to the Free Software Foundation; we sometimes make exceptions
for this. Our decision will be guided by the two goals of preserving the free
status of all derivatives of our free software and of promoting the sharing and
reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE
LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED
IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS
IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL
ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE
LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL,
SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY
TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF
THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.

70
gst-v4l2/Makefile Normal file
View File

@@ -0,0 +1,70 @@
###############################################################################
#
# Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA Corporation and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA Corporation is strictly prohibited.
#
###############################################################################
SO_NAME := libgstnvvideo4linux2.so
TARGET_DEVICE = $(shell gcc -dumpmachine | cut -f1 -d -)
NVDS_VERSION:=6.0
ifeq ($(TARGET_DEVICE),aarch64)
GST_INSTALL_DIR?=/usr/lib/aarch64-linux-gnu/gstreamer-1.0/
LIB_INSTALL_DIR?=/usr/lib/aarch64-linux-gnu/tegra/
CFLAGS:=
else
GST_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/lib/gst-plugins/
LIB_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/lib/
CFLAGS:= -DUSE_V4L2_TARGET_NV_CODECSDK=1 -DUSE_V4L2_TARGET_NV_X86=1 -DUSE_V4L2_GST_HEADER_VER_1_8
endif
LIBS:= -lnvbufsurface -lnvbufsurftransform -lgstnvdsseimeta -lgstnvcustomhelper
SRCS := $(wildcard *.c)
INCLUDES += -I./ -I../
INCLUDES += -I/usr/src/jetson_multimedia_api/include/
PKGS := gstreamer-1.0 \
gstreamer-base-1.0 \
gstreamer-video-1.0 \
gstreamer-allocators-1.0 \
glib-2.0 \
libv4l2
OBJS := $(SRCS:.c=.o)
CFLAGS += -fPIC \
-DEXPLICITLY_ADDED=1 \
-DGETTEXT_PACKAGE=1 \
-DHAVE_LIBV4L2=1 \
-DUSE_V4L2_TARGET_NV=1
CFLAGS += `pkg-config --cflags $(PKGS)`
LDFLAGS = -Wl,--no-undefined -L$(LIB_INSTALL_DIR) -Wl,-rpath,$(LIB_INSTALL_DIR)
LIBS += `pkg-config --libs $(PKGS)`
all: $(SO_NAME)
%.o: %.c
$(CC) -c $< $(CFLAGS) $(INCLUDES) -o $@
$(SO_NAME): $(OBJS)
$(CC) -shared -o $(SO_NAME) $(OBJS) $(LIBS) $(LDFLAGS)
.PHONY: install
install: $(SO_NAME)
cp -vp $(SO_NAME) $(GST_INSTALL_DIR)
.PHONY: clean
clean:
rm -rf $(OBJS) $(SO_NAME)

37
gst-v4l2/README.txt Normal file
View File

@@ -0,0 +1,37 @@
###############################################################################
#
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA Corporation and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA Corporation is strictly prohibited.
#
###############################################################################
Steps to compile the "gst-nvvideo4linux2" sources natively:
1) Install gstreamer related packages on target using the command:
sudo apt-get install libgstreamer1.0-dev \
gstreamer1.0-plugins-base \
gstreamer1.0-plugins-good \
libgstreamer-plugins-base1.0-dev \
libv4l-dev \
libegl1-mesa-dev
2) Download and extract the package "gst-nvvideo4linux_src.tbz2" as follow:
tar -I lbzip2 -xvf gst-nvvideo4linux2_src.tbz2
3) Run the following commands to build and install "libgstnvvideo4linux2.so":
make
make install
or
DEST_DIR=<dir> make install
Note: For Jetson, "make install" will copy library "libgstnvvideo4linux2.so"
into "/usr/lib/aarch64-linux-gnu/gstreamer-1.0" directory. For x86 platforms,
make install will copy the library "libgstnvvideo4linux2.so" into
/opt/nvidia/deepstream/deepstream-4.0/lib/gst-plugins

View File

@@ -0,0 +1,58 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas@ndufresne.ca>
* Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include <glib.h>
#ifndef __TYPES_COMPAT_H__
#define __TYPES_COMPAT_H__
/* From linux/types.h */
#ifndef __bitwise__
# ifdef __CHECKER__
# define __bitwise__ __attribute__((bitwise))
# else
# define __bitwise__
# endif
#endif
#ifndef __bitwise
# ifdef __CHECK_ENDIAN__
# define __bitwise __bitwise__
# else
# define __bitwise
# endif
#endif
#define __u64 guint64
#define __u32 guint32
#define __u16 guint16
#define __u8 guint8
#ifdef USE_V4L2_TARGET_NV
#define __s8 gint8
#endif
#define __s64 gint64
#define __s32 gint32
#define __le32 guint32 __bitwise
#define __user
#endif /* __TYPES_COMPAT_H__ */

107
gst-v4l2/ext/v4l2-common.h Normal file
View File

@@ -0,0 +1,107 @@
/*
* include/linux/v4l2-common.h
*
* Common V4L2 and V4L2 subdev definitions.
*
* Users are advised to #include this file either through videodev2.h
* (V4L2) or through v4l2-subdev.h (V4L2 subdev) rather than to refer
* to this file directly.
*
* Copyright (C) 2012 Nokia Corporation
* Contact: Sakari Ailus <sakari.ailus@iki.fi>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* Alternatively you can redistribute this file under the terms of the
* BSD license as stated below:
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. The names of its contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef __V4L2_COMMON__
#define __V4L2_COMMON__
#include "ext/types-compat.h"
/*
*
* Selection interface definitions
*
*/
/* Current cropping area */
#define V4L2_SEL_TGT_CROP 0x0000
/* Default cropping area */
#define V4L2_SEL_TGT_CROP_DEFAULT 0x0001
/* Cropping bounds */
#define V4L2_SEL_TGT_CROP_BOUNDS 0x0002
/* Native frame size */
#define V4L2_SEL_TGT_NATIVE_SIZE 0x0003
/* Current composing area */
#define V4L2_SEL_TGT_COMPOSE 0x0100
/* Default composing area */
#define V4L2_SEL_TGT_COMPOSE_DEFAULT 0x0101
/* Composing bounds */
#define V4L2_SEL_TGT_COMPOSE_BOUNDS 0x0102
/* Current composing area plus all padding pixels */
#define V4L2_SEL_TGT_COMPOSE_PADDED 0x0103
/* Backward compatibility target definitions --- to be removed. */
#define V4L2_SEL_TGT_CROP_ACTIVE V4L2_SEL_TGT_CROP
#define V4L2_SEL_TGT_COMPOSE_ACTIVE V4L2_SEL_TGT_COMPOSE
#define V4L2_SUBDEV_SEL_TGT_CROP_ACTUAL V4L2_SEL_TGT_CROP
#define V4L2_SUBDEV_SEL_TGT_COMPOSE_ACTUAL V4L2_SEL_TGT_COMPOSE
#define V4L2_SUBDEV_SEL_TGT_CROP_BOUNDS V4L2_SEL_TGT_CROP_BOUNDS
#define V4L2_SUBDEV_SEL_TGT_COMPOSE_BOUNDS V4L2_SEL_TGT_COMPOSE_BOUNDS
/* Selection flags */
#define V4L2_SEL_FLAG_GE (1 << 0)
#define V4L2_SEL_FLAG_LE (1 << 1)
#define V4L2_SEL_FLAG_KEEP_CONFIG (1 << 2)
/* Backward compatibility flag definitions --- to be removed. */
#define V4L2_SUBDEV_SEL_FLAG_SIZE_GE V4L2_SEL_FLAG_GE
#define V4L2_SUBDEV_SEL_FLAG_SIZE_LE V4L2_SEL_FLAG_LE
#define V4L2_SUBDEV_SEL_FLAG_KEEP_CONFIG V4L2_SEL_FLAG_KEEP_CONFIG
struct v4l2_edid {
__u32 pad;
__u32 start_block;
__u32 blocks;
__u32 reserved[5];
__u8 *edid;
};
#endif /* __V4L2_COMMON__ */

View File

@@ -0,0 +1,987 @@
/*
* Video for Linux Two controls header file
*
* Copyright (C) 1999-2012 the contributors
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* Alternatively you can redistribute this file under the terms of the
* BSD license as stated below:
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. The names of its contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The contents of this header was split off from videodev2.h. All control
* definitions should be added to this header, which is included by
* videodev2.h.
*/
#ifndef __LINUX_V4L2_CONTROLS_H
#define __LINUX_V4L2_CONTROLS_H
/* Control classes */
#define V4L2_CTRL_CLASS_USER 0x00980000 /* Old-style 'user' controls */
#define V4L2_CTRL_CLASS_MPEG 0x00990000 /* MPEG-compression controls */
#define V4L2_CTRL_CLASS_CAMERA 0x009a0000 /* Camera class controls */
#define V4L2_CTRL_CLASS_FM_TX 0x009b0000 /* FM Modulator controls */
#define V4L2_CTRL_CLASS_FLASH 0x009c0000 /* Camera flash controls */
#define V4L2_CTRL_CLASS_JPEG 0x009d0000 /* JPEG-compression controls */
#define V4L2_CTRL_CLASS_IMAGE_SOURCE 0x009e0000 /* Image source controls */
#define V4L2_CTRL_CLASS_IMAGE_PROC 0x009f0000 /* Image processing controls */
#define V4L2_CTRL_CLASS_DV 0x00a00000 /* Digital Video controls */
#define V4L2_CTRL_CLASS_FM_RX 0x00a10000 /* FM Receiver controls */
#define V4L2_CTRL_CLASS_RF_TUNER 0x00a20000 /* RF tuner controls */
#define V4L2_CTRL_CLASS_DETECT 0x00a30000 /* Detection controls */
/* User-class control IDs */
#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900)
#define V4L2_CID_USER_BASE V4L2_CID_BASE
#define V4L2_CID_USER_CLASS (V4L2_CTRL_CLASS_USER | 1)
#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE+0)
#define V4L2_CID_CONTRAST (V4L2_CID_BASE+1)
#define V4L2_CID_SATURATION (V4L2_CID_BASE+2)
#define V4L2_CID_HUE (V4L2_CID_BASE+3)
#define V4L2_CID_AUDIO_VOLUME (V4L2_CID_BASE+5)
#define V4L2_CID_AUDIO_BALANCE (V4L2_CID_BASE+6)
#define V4L2_CID_AUDIO_BASS (V4L2_CID_BASE+7)
#define V4L2_CID_AUDIO_TREBLE (V4L2_CID_BASE+8)
#define V4L2_CID_AUDIO_MUTE (V4L2_CID_BASE+9)
#define V4L2_CID_AUDIO_LOUDNESS (V4L2_CID_BASE+10)
#define V4L2_CID_BLACK_LEVEL (V4L2_CID_BASE+11) /* Deprecated */
#define V4L2_CID_AUTO_WHITE_BALANCE (V4L2_CID_BASE+12)
#define V4L2_CID_DO_WHITE_BALANCE (V4L2_CID_BASE+13)
#define V4L2_CID_RED_BALANCE (V4L2_CID_BASE+14)
#define V4L2_CID_BLUE_BALANCE (V4L2_CID_BASE+15)
#define V4L2_CID_GAMMA (V4L2_CID_BASE+16)
#define V4L2_CID_WHITENESS (V4L2_CID_GAMMA) /* Deprecated */
#define V4L2_CID_EXPOSURE (V4L2_CID_BASE+17)
#define V4L2_CID_AUTOGAIN (V4L2_CID_BASE+18)
#define V4L2_CID_GAIN (V4L2_CID_BASE+19)
#define V4L2_CID_HFLIP (V4L2_CID_BASE+20)
#define V4L2_CID_VFLIP (V4L2_CID_BASE+21)
#define V4L2_CID_POWER_LINE_FREQUENCY (V4L2_CID_BASE+24)
enum v4l2_power_line_frequency {
V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0,
V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1,
V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2,
V4L2_CID_POWER_LINE_FREQUENCY_AUTO = 3,
};
#define V4L2_CID_HUE_AUTO (V4L2_CID_BASE+25)
#define V4L2_CID_WHITE_BALANCE_TEMPERATURE (V4L2_CID_BASE+26)
#define V4L2_CID_SHARPNESS (V4L2_CID_BASE+27)
#define V4L2_CID_BACKLIGHT_COMPENSATION (V4L2_CID_BASE+28)
#define V4L2_CID_CHROMA_AGC (V4L2_CID_BASE+29)
#define V4L2_CID_COLOR_KILLER (V4L2_CID_BASE+30)
#define V4L2_CID_COLORFX (V4L2_CID_BASE+31)
enum v4l2_colorfx {
V4L2_COLORFX_NONE = 0,
V4L2_COLORFX_BW = 1,
V4L2_COLORFX_SEPIA = 2,
V4L2_COLORFX_NEGATIVE = 3,
V4L2_COLORFX_EMBOSS = 4,
V4L2_COLORFX_SKETCH = 5,
V4L2_COLORFX_SKY_BLUE = 6,
V4L2_COLORFX_GRASS_GREEN = 7,
V4L2_COLORFX_SKIN_WHITEN = 8,
V4L2_COLORFX_VIVID = 9,
V4L2_COLORFX_AQUA = 10,
V4L2_COLORFX_ART_FREEZE = 11,
V4L2_COLORFX_SILHOUETTE = 12,
V4L2_COLORFX_SOLARIZATION = 13,
V4L2_COLORFX_ANTIQUE = 14,
V4L2_COLORFX_SET_CBCR = 15,
};
#define V4L2_CID_AUTOBRIGHTNESS (V4L2_CID_BASE+32)
#define V4L2_CID_BAND_STOP_FILTER (V4L2_CID_BASE+33)
#define V4L2_CID_ROTATE (V4L2_CID_BASE+34)
#define V4L2_CID_BG_COLOR (V4L2_CID_BASE+35)
#define V4L2_CID_CHROMA_GAIN (V4L2_CID_BASE+36)
#define V4L2_CID_ILLUMINATORS_1 (V4L2_CID_BASE+37)
#define V4L2_CID_ILLUMINATORS_2 (V4L2_CID_BASE+38)
#define V4L2_CID_MIN_BUFFERS_FOR_CAPTURE (V4L2_CID_BASE+39)
#define V4L2_CID_MIN_BUFFERS_FOR_OUTPUT (V4L2_CID_BASE+40)
#define V4L2_CID_ALPHA_COMPONENT (V4L2_CID_BASE+41)
#define V4L2_CID_COLORFX_CBCR (V4L2_CID_BASE+42)
/* last CID + 1 */
#define V4L2_CID_LASTP1 (V4L2_CID_BASE+43)
/* USER-class private control IDs */
/* The base for the meye driver controls. See linux/meye.h for the list
* of controls. We reserve 16 controls for this driver. */
#define V4L2_CID_USER_MEYE_BASE (V4L2_CID_USER_BASE + 0x1000)
/* The base for the bttv driver controls.
* We reserve 32 controls for this driver. */
#define V4L2_CID_USER_BTTV_BASE (V4L2_CID_USER_BASE + 0x1010)
/* The base for the s2255 driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_S2255_BASE (V4L2_CID_USER_BASE + 0x1030)
/*
* The base for the si476x driver controls. See include/media/drv-intf/si476x.h
* for the list of controls. Total of 16 controls is reserved for this driver
*/
#define V4L2_CID_USER_SI476X_BASE (V4L2_CID_USER_BASE + 0x1040)
/* The base for the TI VPE driver controls. Total of 16 controls is reserved for
* this driver */
#define V4L2_CID_USER_TI_VPE_BASE (V4L2_CID_USER_BASE + 0x1050)
/* The base for the saa7134 driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_SAA7134_BASE (V4L2_CID_USER_BASE + 0x1060)
/* The base for the adv7180 driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_ADV7180_BASE (V4L2_CID_USER_BASE + 0x1070)
/* The base for the tc358743 driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_TC358743_BASE (V4L2_CID_USER_BASE + 0x1080)
/* The base for the max217x driver controls.
* We reserve 32 controls for this driver
*/
#define V4L2_CID_USER_MAX217X_BASE (V4L2_CID_USER_BASE + 0x1090)
/* The base for the imx driver controls.
* We reserve 16 controls for this driver. */
#define V4L2_CID_USER_IMX_BASE (V4L2_CID_USER_BASE + 0x1090)
/* MPEG-class control IDs */
/* The MPEG controls are applicable to all codec controls
* and the 'MPEG' part of the define is historical */
#define V4L2_CID_MPEG_BASE (V4L2_CTRL_CLASS_MPEG | 0x900)
#define V4L2_CID_MPEG_CLASS (V4L2_CTRL_CLASS_MPEG | 1)
/* MPEG streams, specific to multiplexed streams */
#define V4L2_CID_MPEG_STREAM_TYPE (V4L2_CID_MPEG_BASE+0)
enum v4l2_mpeg_stream_type {
V4L2_MPEG_STREAM_TYPE_MPEG2_PS = 0, /* MPEG-2 program stream */
V4L2_MPEG_STREAM_TYPE_MPEG2_TS = 1, /* MPEG-2 transport stream */
V4L2_MPEG_STREAM_TYPE_MPEG1_SS = 2, /* MPEG-1 system stream */
V4L2_MPEG_STREAM_TYPE_MPEG2_DVD = 3, /* MPEG-2 DVD-compatible stream */
V4L2_MPEG_STREAM_TYPE_MPEG1_VCD = 4, /* MPEG-1 VCD-compatible stream */
V4L2_MPEG_STREAM_TYPE_MPEG2_SVCD = 5, /* MPEG-2 SVCD-compatible stream */
};
#define V4L2_CID_MPEG_STREAM_PID_PMT (V4L2_CID_MPEG_BASE+1)
#define V4L2_CID_MPEG_STREAM_PID_AUDIO (V4L2_CID_MPEG_BASE+2)
#define V4L2_CID_MPEG_STREAM_PID_VIDEO (V4L2_CID_MPEG_BASE+3)
#define V4L2_CID_MPEG_STREAM_PID_PCR (V4L2_CID_MPEG_BASE+4)
#define V4L2_CID_MPEG_STREAM_PES_ID_AUDIO (V4L2_CID_MPEG_BASE+5)
#define V4L2_CID_MPEG_STREAM_PES_ID_VIDEO (V4L2_CID_MPEG_BASE+6)
#define V4L2_CID_MPEG_STREAM_VBI_FMT (V4L2_CID_MPEG_BASE+7)
enum v4l2_mpeg_stream_vbi_fmt {
V4L2_MPEG_STREAM_VBI_FMT_NONE = 0, /* No VBI in the MPEG stream */
V4L2_MPEG_STREAM_VBI_FMT_IVTV = 1, /* VBI in private packets, IVTV format */
};
/* MPEG audio controls specific to multiplexed streams */
#define V4L2_CID_MPEG_AUDIO_SAMPLING_FREQ (V4L2_CID_MPEG_BASE+100)
enum v4l2_mpeg_audio_sampling_freq {
V4L2_MPEG_AUDIO_SAMPLING_FREQ_44100 = 0,
V4L2_MPEG_AUDIO_SAMPLING_FREQ_48000 = 1,
V4L2_MPEG_AUDIO_SAMPLING_FREQ_32000 = 2,
};
#define V4L2_CID_MPEG_AUDIO_ENCODING (V4L2_CID_MPEG_BASE+101)
enum v4l2_mpeg_audio_encoding {
V4L2_MPEG_AUDIO_ENCODING_LAYER_1 = 0,
V4L2_MPEG_AUDIO_ENCODING_LAYER_2 = 1,
V4L2_MPEG_AUDIO_ENCODING_LAYER_3 = 2,
V4L2_MPEG_AUDIO_ENCODING_AAC = 3,
V4L2_MPEG_AUDIO_ENCODING_AC3 = 4,
};
#define V4L2_CID_MPEG_AUDIO_L1_BITRATE (V4L2_CID_MPEG_BASE+102)
enum v4l2_mpeg_audio_l1_bitrate {
V4L2_MPEG_AUDIO_L1_BITRATE_32K = 0,
V4L2_MPEG_AUDIO_L1_BITRATE_64K = 1,
V4L2_MPEG_AUDIO_L1_BITRATE_96K = 2,
V4L2_MPEG_AUDIO_L1_BITRATE_128K = 3,
V4L2_MPEG_AUDIO_L1_BITRATE_160K = 4,
V4L2_MPEG_AUDIO_L1_BITRATE_192K = 5,
V4L2_MPEG_AUDIO_L1_BITRATE_224K = 6,
V4L2_MPEG_AUDIO_L1_BITRATE_256K = 7,
V4L2_MPEG_AUDIO_L1_BITRATE_288K = 8,
V4L2_MPEG_AUDIO_L1_BITRATE_320K = 9,
V4L2_MPEG_AUDIO_L1_BITRATE_352K = 10,
V4L2_MPEG_AUDIO_L1_BITRATE_384K = 11,
V4L2_MPEG_AUDIO_L1_BITRATE_416K = 12,
V4L2_MPEG_AUDIO_L1_BITRATE_448K = 13,
};
#define V4L2_CID_MPEG_AUDIO_L2_BITRATE (V4L2_CID_MPEG_BASE+103)
enum v4l2_mpeg_audio_l2_bitrate {
V4L2_MPEG_AUDIO_L2_BITRATE_32K = 0,
V4L2_MPEG_AUDIO_L2_BITRATE_48K = 1,
V4L2_MPEG_AUDIO_L2_BITRATE_56K = 2,
V4L2_MPEG_AUDIO_L2_BITRATE_64K = 3,
V4L2_MPEG_AUDIO_L2_BITRATE_80K = 4,
V4L2_MPEG_AUDIO_L2_BITRATE_96K = 5,
V4L2_MPEG_AUDIO_L2_BITRATE_112K = 6,
V4L2_MPEG_AUDIO_L2_BITRATE_128K = 7,
V4L2_MPEG_AUDIO_L2_BITRATE_160K = 8,
V4L2_MPEG_AUDIO_L2_BITRATE_192K = 9,
V4L2_MPEG_AUDIO_L2_BITRATE_224K = 10,
V4L2_MPEG_AUDIO_L2_BITRATE_256K = 11,
V4L2_MPEG_AUDIO_L2_BITRATE_320K = 12,
V4L2_MPEG_AUDIO_L2_BITRATE_384K = 13,
};
#define V4L2_CID_MPEG_AUDIO_L3_BITRATE (V4L2_CID_MPEG_BASE+104)
enum v4l2_mpeg_audio_l3_bitrate {
V4L2_MPEG_AUDIO_L3_BITRATE_32K = 0,
V4L2_MPEG_AUDIO_L3_BITRATE_40K = 1,
V4L2_MPEG_AUDIO_L3_BITRATE_48K = 2,
V4L2_MPEG_AUDIO_L3_BITRATE_56K = 3,
V4L2_MPEG_AUDIO_L3_BITRATE_64K = 4,
V4L2_MPEG_AUDIO_L3_BITRATE_80K = 5,
V4L2_MPEG_AUDIO_L3_BITRATE_96K = 6,
V4L2_MPEG_AUDIO_L3_BITRATE_112K = 7,
V4L2_MPEG_AUDIO_L3_BITRATE_128K = 8,
V4L2_MPEG_AUDIO_L3_BITRATE_160K = 9,
V4L2_MPEG_AUDIO_L3_BITRATE_192K = 10,
V4L2_MPEG_AUDIO_L3_BITRATE_224K = 11,
V4L2_MPEG_AUDIO_L3_BITRATE_256K = 12,
V4L2_MPEG_AUDIO_L3_BITRATE_320K = 13,
};
#define V4L2_CID_MPEG_AUDIO_MODE (V4L2_CID_MPEG_BASE+105)
enum v4l2_mpeg_audio_mode {
V4L2_MPEG_AUDIO_MODE_STEREO = 0,
V4L2_MPEG_AUDIO_MODE_JOINT_STEREO = 1,
V4L2_MPEG_AUDIO_MODE_DUAL = 2,
V4L2_MPEG_AUDIO_MODE_MONO = 3,
};
#define V4L2_CID_MPEG_AUDIO_MODE_EXTENSION (V4L2_CID_MPEG_BASE+106)
enum v4l2_mpeg_audio_mode_extension {
V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_4 = 0,
V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_8 = 1,
V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_12 = 2,
V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_16 = 3,
};
#define V4L2_CID_MPEG_AUDIO_EMPHASIS (V4L2_CID_MPEG_BASE+107)
enum v4l2_mpeg_audio_emphasis {
V4L2_MPEG_AUDIO_EMPHASIS_NONE = 0,
V4L2_MPEG_AUDIO_EMPHASIS_50_DIV_15_uS = 1,
V4L2_MPEG_AUDIO_EMPHASIS_CCITT_J17 = 2,
};
#define V4L2_CID_MPEG_AUDIO_CRC (V4L2_CID_MPEG_BASE+108)
enum v4l2_mpeg_audio_crc {
V4L2_MPEG_AUDIO_CRC_NONE = 0,
V4L2_MPEG_AUDIO_CRC_CRC16 = 1,
};
#define V4L2_CID_MPEG_AUDIO_MUTE (V4L2_CID_MPEG_BASE+109)
#define V4L2_CID_MPEG_AUDIO_AAC_BITRATE (V4L2_CID_MPEG_BASE+110)
#define V4L2_CID_MPEG_AUDIO_AC3_BITRATE (V4L2_CID_MPEG_BASE+111)
enum v4l2_mpeg_audio_ac3_bitrate {
V4L2_MPEG_AUDIO_AC3_BITRATE_32K = 0,
V4L2_MPEG_AUDIO_AC3_BITRATE_40K = 1,
V4L2_MPEG_AUDIO_AC3_BITRATE_48K = 2,
V4L2_MPEG_AUDIO_AC3_BITRATE_56K = 3,
V4L2_MPEG_AUDIO_AC3_BITRATE_64K = 4,
V4L2_MPEG_AUDIO_AC3_BITRATE_80K = 5,
V4L2_MPEG_AUDIO_AC3_BITRATE_96K = 6,
V4L2_MPEG_AUDIO_AC3_BITRATE_112K = 7,
V4L2_MPEG_AUDIO_AC3_BITRATE_128K = 8,
V4L2_MPEG_AUDIO_AC3_BITRATE_160K = 9,
V4L2_MPEG_AUDIO_AC3_BITRATE_192K = 10,
V4L2_MPEG_AUDIO_AC3_BITRATE_224K = 11,
V4L2_MPEG_AUDIO_AC3_BITRATE_256K = 12,
V4L2_MPEG_AUDIO_AC3_BITRATE_320K = 13,
V4L2_MPEG_AUDIO_AC3_BITRATE_384K = 14,
V4L2_MPEG_AUDIO_AC3_BITRATE_448K = 15,
V4L2_MPEG_AUDIO_AC3_BITRATE_512K = 16,
V4L2_MPEG_AUDIO_AC3_BITRATE_576K = 17,
V4L2_MPEG_AUDIO_AC3_BITRATE_640K = 18,
};
#define V4L2_CID_MPEG_AUDIO_DEC_PLAYBACK (V4L2_CID_MPEG_BASE+112)
enum v4l2_mpeg_audio_dec_playback {
V4L2_MPEG_AUDIO_DEC_PLAYBACK_AUTO = 0,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_STEREO = 1,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_LEFT = 2,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_RIGHT = 3,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_MONO = 4,
V4L2_MPEG_AUDIO_DEC_PLAYBACK_SWAPPED_STEREO = 5,
};
#define V4L2_CID_MPEG_AUDIO_DEC_MULTILINGUAL_PLAYBACK (V4L2_CID_MPEG_BASE+113)
/* MPEG video controls specific to multiplexed streams */
#define V4L2_CID_MPEG_VIDEO_ENCODING (V4L2_CID_MPEG_BASE+200)
enum v4l2_mpeg_video_encoding {
V4L2_MPEG_VIDEO_ENCODING_MPEG_1 = 0,
V4L2_MPEG_VIDEO_ENCODING_MPEG_2 = 1,
V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC = 2,
};
#define V4L2_CID_MPEG_VIDEO_ASPECT (V4L2_CID_MPEG_BASE+201)
enum v4l2_mpeg_video_aspect {
V4L2_MPEG_VIDEO_ASPECT_1x1 = 0,
V4L2_MPEG_VIDEO_ASPECT_4x3 = 1,
V4L2_MPEG_VIDEO_ASPECT_16x9 = 2,
V4L2_MPEG_VIDEO_ASPECT_221x100 = 3,
};
#define V4L2_CID_MPEG_VIDEO_B_FRAMES (V4L2_CID_MPEG_BASE+202)
#define V4L2_CID_MPEG_VIDEO_GOP_SIZE (V4L2_CID_MPEG_BASE+203)
#define V4L2_CID_MPEG_VIDEO_GOP_CLOSURE (V4L2_CID_MPEG_BASE+204)
#define V4L2_CID_MPEG_VIDEO_PULLDOWN (V4L2_CID_MPEG_BASE+205)
#define V4L2_CID_MPEG_VIDEO_BITRATE_MODE (V4L2_CID_MPEG_BASE+206)
enum v4l2_mpeg_video_bitrate_mode {
V4L2_MPEG_VIDEO_BITRATE_MODE_VBR = 0,
V4L2_MPEG_VIDEO_BITRATE_MODE_CBR = 1,
};
#define V4L2_CID_MPEG_VIDEO_BITRATE (V4L2_CID_MPEG_BASE+207)
#define V4L2_CID_MPEG_VIDEO_BITRATE_PEAK (V4L2_CID_MPEG_BASE+208)
#define V4L2_CID_MPEG_VIDEO_TEMPORAL_DECIMATION (V4L2_CID_MPEG_BASE+209)
#define V4L2_CID_MPEG_VIDEO_MUTE (V4L2_CID_MPEG_BASE+210)
#define V4L2_CID_MPEG_VIDEO_MUTE_YUV (V4L2_CID_MPEG_BASE+211)
#define V4L2_CID_MPEG_VIDEO_DECODER_SLICE_INTERFACE (V4L2_CID_MPEG_BASE+212)
#define V4L2_CID_MPEG_VIDEO_DECODER_MPEG4_DEBLOCK_FILTER (V4L2_CID_MPEG_BASE+213)
#define V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB (V4L2_CID_MPEG_BASE+214)
#define V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE (V4L2_CID_MPEG_BASE+215)
#define V4L2_CID_MPEG_VIDEO_HEADER_MODE (V4L2_CID_MPEG_BASE+216)
enum v4l2_mpeg_video_header_mode {
V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE = 0,
V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME = 1,
};
#define V4L2_CID_MPEG_VIDEO_MAX_REF_PIC (V4L2_CID_MPEG_BASE+217)
#define V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE (V4L2_CID_MPEG_BASE+218)
#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES (V4L2_CID_MPEG_BASE+219)
#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB (V4L2_CID_MPEG_BASE+220)
#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE (V4L2_CID_MPEG_BASE+221)
enum v4l2_mpeg_video_multi_slice_mode {
V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_SINGLE = 0,
V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB = 1,
V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES = 2,
};
#define V4L2_CID_MPEG_VIDEO_VBV_SIZE (V4L2_CID_MPEG_BASE+222)
#define V4L2_CID_MPEG_VIDEO_DEC_PTS (V4L2_CID_MPEG_BASE+223)
#define V4L2_CID_MPEG_VIDEO_DEC_FRAME (V4L2_CID_MPEG_BASE+224)
#define V4L2_CID_MPEG_VIDEO_VBV_DELAY (V4L2_CID_MPEG_BASE+225)
#define V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER (V4L2_CID_MPEG_BASE+226)
#define V4L2_CID_MPEG_VIDEO_MV_H_SEARCH_RANGE (V4L2_CID_MPEG_BASE+227)
#define V4L2_CID_MPEG_VIDEO_MV_V_SEARCH_RANGE (V4L2_CID_MPEG_BASE+228)
#define V4L2_CID_MPEG_VIDEO_FORCE_KEY_FRAME (V4L2_CID_MPEG_BASE+229)
#define V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP (V4L2_CID_MPEG_BASE+300)
#define V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP (V4L2_CID_MPEG_BASE+301)
#define V4L2_CID_MPEG_VIDEO_H263_B_FRAME_QP (V4L2_CID_MPEG_BASE+302)
#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP (V4L2_CID_MPEG_BASE+303)
#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP (V4L2_CID_MPEG_BASE+304)
#define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP (V4L2_CID_MPEG_BASE+350)
#define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP (V4L2_CID_MPEG_BASE+351)
#define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP (V4L2_CID_MPEG_BASE+352)
#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP (V4L2_CID_MPEG_BASE+353)
#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP (V4L2_CID_MPEG_BASE+354)
#define V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM (V4L2_CID_MPEG_BASE+355)
#define V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE (V4L2_CID_MPEG_BASE+356)
#define V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE (V4L2_CID_MPEG_BASE+357)
enum v4l2_mpeg_video_h264_entropy_mode {
V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC = 0,
V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC = 1,
};
#define V4L2_CID_MPEG_VIDEO_H264_I_PERIOD (V4L2_CID_MPEG_BASE+358)
#define V4L2_CID_MPEG_VIDEO_H264_LEVEL (V4L2_CID_MPEG_BASE+359)
enum v4l2_mpeg_video_h264_level {
V4L2_MPEG_VIDEO_H264_LEVEL_1_0 = 0,
V4L2_MPEG_VIDEO_H264_LEVEL_1B = 1,
V4L2_MPEG_VIDEO_H264_LEVEL_1_1 = 2,
V4L2_MPEG_VIDEO_H264_LEVEL_1_2 = 3,
V4L2_MPEG_VIDEO_H264_LEVEL_1_3 = 4,
V4L2_MPEG_VIDEO_H264_LEVEL_2_0 = 5,
V4L2_MPEG_VIDEO_H264_LEVEL_2_1 = 6,
V4L2_MPEG_VIDEO_H264_LEVEL_2_2 = 7,
V4L2_MPEG_VIDEO_H264_LEVEL_3_0 = 8,
V4L2_MPEG_VIDEO_H264_LEVEL_3_1 = 9,
V4L2_MPEG_VIDEO_H264_LEVEL_3_2 = 10,
V4L2_MPEG_VIDEO_H264_LEVEL_4_0 = 11,
V4L2_MPEG_VIDEO_H264_LEVEL_4_1 = 12,
V4L2_MPEG_VIDEO_H264_LEVEL_4_2 = 13,
V4L2_MPEG_VIDEO_H264_LEVEL_5_0 = 14,
V4L2_MPEG_VIDEO_H264_LEVEL_5_1 = 15,
};
#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA (V4L2_CID_MPEG_BASE+360)
#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA (V4L2_CID_MPEG_BASE+361)
#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE (V4L2_CID_MPEG_BASE+362)
enum v4l2_mpeg_video_h264_loop_filter_mode {
V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_ENABLED = 0,
V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED = 1,
V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY = 2,
};
#define V4L2_CID_MPEG_VIDEO_H264_PROFILE (V4L2_CID_MPEG_BASE+363)
enum v4l2_mpeg_video_h264_profile {
V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE = 0,
V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE = 1,
V4L2_MPEG_VIDEO_H264_PROFILE_MAIN = 2,
V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED = 3,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH = 4,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10 = 5,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422 = 6,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE = 7,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA = 8,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA = 9,
V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA = 10,
V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA = 11,
V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE = 12,
V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH = 13,
V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA = 14,
V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH = 15,
V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH = 16,
};
#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT (V4L2_CID_MPEG_BASE+364)
#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH (V4L2_CID_MPEG_BASE+365)
#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE (V4L2_CID_MPEG_BASE+366)
#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC (V4L2_CID_MPEG_BASE+367)
enum v4l2_mpeg_video_h264_vui_sar_idc {
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_UNSPECIFIED = 0,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1 = 1,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_12x11 = 2,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_10x11 = 3,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_16x11 = 4,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_40x33 = 5,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_24x11 = 6,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_20x11 = 7,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_32x11 = 8,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_80x33 = 9,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_18x11 = 10,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_15x11 = 11,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_64x33 = 12,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_160x99 = 13,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_4x3 = 14,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_3x2 = 15,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_2x1 = 16,
V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_EXTENDED = 17,
};
#define V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING (V4L2_CID_MPEG_BASE+368)
#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_CURRENT_FRAME_0 (V4L2_CID_MPEG_BASE+369)
#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE (V4L2_CID_MPEG_BASE+370)
enum v4l2_mpeg_video_h264_sei_fp_arrangement_type {
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_CHECKERBOARD = 0,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_COLUMN = 1,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_ROW = 2,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_SIDE_BY_SIDE = 3,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TOP_BOTTOM = 4,
V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TEMPORAL = 5,
};
#define V4L2_CID_MPEG_VIDEO_H264_FMO (V4L2_CID_MPEG_BASE+371)
#define V4L2_CID_MPEG_VIDEO_H264_FMO_MAP_TYPE (V4L2_CID_MPEG_BASE+372)
enum v4l2_mpeg_video_h264_fmo_map_type {
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES = 0,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_SCATTERED_SLICES = 1,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_FOREGROUND_WITH_LEFT_OVER = 2,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_BOX_OUT = 3,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_RASTER_SCAN = 4,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_WIPE_SCAN = 5,
V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_EXPLICIT = 6,
};
#define V4L2_CID_MPEG_VIDEO_H264_FMO_SLICE_GROUP (V4L2_CID_MPEG_BASE+373)
#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_DIRECTION (V4L2_CID_MPEG_BASE+374)
enum v4l2_mpeg_video_h264_fmo_change_dir {
V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_RIGHT = 0,
V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_LEFT = 1,
};
#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_RATE (V4L2_CID_MPEG_BASE+375)
#define V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH (V4L2_CID_MPEG_BASE+376)
#define V4L2_CID_MPEG_VIDEO_H264_ASO (V4L2_CID_MPEG_BASE+377)
#define V4L2_CID_MPEG_VIDEO_H264_ASO_SLICE_ORDER (V4L2_CID_MPEG_BASE+378)
#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING (V4L2_CID_MPEG_BASE+379)
#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_BASE+380)
enum v4l2_mpeg_video_h264_hierarchical_coding_type {
V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_B = 0,
V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P = 1,
};
#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_BASE+381)
#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_BASE+382)
#define V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP (V4L2_CID_MPEG_BASE+400)
#define V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP (V4L2_CID_MPEG_BASE+401)
#define V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP (V4L2_CID_MPEG_BASE+402)
#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP (V4L2_CID_MPEG_BASE+403)
#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP (V4L2_CID_MPEG_BASE+404)
#define V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL (V4L2_CID_MPEG_BASE+405)
enum v4l2_mpeg_video_mpeg4_level {
V4L2_MPEG_VIDEO_MPEG4_LEVEL_0 = 0,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_0B = 1,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_1 = 2,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_2 = 3,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_3 = 4,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_3B = 5,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_4 = 6,
V4L2_MPEG_VIDEO_MPEG4_LEVEL_5 = 7,
};
#define V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE (V4L2_CID_MPEG_BASE+406)
enum v4l2_mpeg_video_mpeg4_profile {
V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE = 0,
V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_SIMPLE = 1,
V4L2_MPEG_VIDEO_MPEG4_PROFILE_CORE = 2,
V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE_SCALABLE = 3,
V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY = 4,
};
#define V4L2_CID_MPEG_VIDEO_MPEG4_QPEL (V4L2_CID_MPEG_BASE+407)
/* Control IDs for VP8 streams
* Although VP8 is not part of MPEG we add these controls to the MPEG class
* as that class is already handling other video compression standards
*/
#define V4L2_CID_MPEG_VIDEO_VPX_NUM_PARTITIONS (V4L2_CID_MPEG_BASE+500)
enum v4l2_vp8_num_partitions {
V4L2_CID_MPEG_VIDEO_VPX_1_PARTITION = 0,
V4L2_CID_MPEG_VIDEO_VPX_2_PARTITIONS = 1,
V4L2_CID_MPEG_VIDEO_VPX_4_PARTITIONS = 2,
V4L2_CID_MPEG_VIDEO_VPX_8_PARTITIONS = 3,
};
#define V4L2_CID_MPEG_VIDEO_VPX_IMD_DISABLE_4X4 (V4L2_CID_MPEG_BASE+501)
#define V4L2_CID_MPEG_VIDEO_VPX_NUM_REF_FRAMES (V4L2_CID_MPEG_BASE+502)
enum v4l2_vp8_num_ref_frames {
V4L2_CID_MPEG_VIDEO_VPX_1_REF_FRAME = 0,
V4L2_CID_MPEG_VIDEO_VPX_2_REF_FRAME = 1,
V4L2_CID_MPEG_VIDEO_VPX_3_REF_FRAME = 2,
};
#define V4L2_CID_MPEG_VIDEO_VPX_FILTER_LEVEL (V4L2_CID_MPEG_BASE+503)
#define V4L2_CID_MPEG_VIDEO_VPX_FILTER_SHARPNESS (V4L2_CID_MPEG_BASE+504)
#define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_REF_PERIOD (V4L2_CID_MPEG_BASE+505)
#define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_SEL (V4L2_CID_MPEG_BASE+506)
enum v4l2_vp8_golden_frame_sel {
V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_PREV = 0,
V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_REF_PERIOD = 1,
};
#define V4L2_CID_MPEG_VIDEO_VPX_MIN_QP (V4L2_CID_MPEG_BASE+507)
#define V4L2_CID_MPEG_VIDEO_VPX_MAX_QP (V4L2_CID_MPEG_BASE+508)
#define V4L2_CID_MPEG_VIDEO_VPX_I_FRAME_QP (V4L2_CID_MPEG_BASE+509)
#define V4L2_CID_MPEG_VIDEO_VPX_P_FRAME_QP (V4L2_CID_MPEG_BASE+510)
#define V4L2_CID_MPEG_VIDEO_VPX_PROFILE (V4L2_CID_MPEG_BASE+511)
/* MPEG-class control IDs specific to the CX2341x driver as defined by V4L2 */
#define V4L2_CID_MPEG_CX2341X_BASE (V4L2_CTRL_CLASS_MPEG | 0x1000)
#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+0)
enum v4l2_mpeg_cx2341x_video_spatial_filter_mode {
V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_MANUAL = 0,
V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_AUTO = 1,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+1)
#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+2)
enum v4l2_mpeg_cx2341x_video_luma_spatial_filter_type {
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_OFF = 0,
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_HOR = 1,
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_VERT = 2,
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_HV_SEPARABLE = 3,
V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_SYM_NON_SEPARABLE = 4,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+3)
enum v4l2_mpeg_cx2341x_video_chroma_spatial_filter_type {
V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_OFF = 0,
V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_1D_HOR = 1,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+4)
enum v4l2_mpeg_cx2341x_video_temporal_filter_mode {
V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_MANUAL = 0,
V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_AUTO = 1,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+5)
#define V4L2_CID_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+6)
enum v4l2_mpeg_cx2341x_video_median_filter_type {
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_OFF = 0,
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR = 1,
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_VERT = 2,
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR_VERT = 3,
V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_DIAG = 4,
};
#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+7)
#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+8)
#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+9)
#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+10)
#define V4L2_CID_MPEG_CX2341X_STREAM_INSERT_NAV_PACKETS (V4L2_CID_MPEG_CX2341X_BASE+11)
/* MPEG-class control IDs specific to the Samsung MFC 5.1 driver as defined by V4L2 */
#define V4L2_CID_MPEG_MFC51_BASE (V4L2_CTRL_CLASS_MPEG | 0x1100)
#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY (V4L2_CID_MPEG_MFC51_BASE+0)
#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY_ENABLE (V4L2_CID_MPEG_MFC51_BASE+1)
#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE (V4L2_CID_MPEG_MFC51_BASE+2)
enum v4l2_mpeg_mfc51_video_frame_skip_mode {
V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED = 0,
V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT = 1,
V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT = 2,
};
#define V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE (V4L2_CID_MPEG_MFC51_BASE+3)
enum v4l2_mpeg_mfc51_video_force_frame_type {
V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_DISABLED = 0,
V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME = 1,
V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_NOT_CODED = 2,
};
#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING (V4L2_CID_MPEG_MFC51_BASE+4)
#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV (V4L2_CID_MPEG_MFC51_BASE+5)
#define V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT (V4L2_CID_MPEG_MFC51_BASE+6)
#define V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF (V4L2_CID_MPEG_MFC51_BASE+7)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC51_BASE+50)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC51_BASE+51)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC51_BASE+52)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC51_BASE+53)
#define V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P (V4L2_CID_MPEG_MFC51_BASE+54)
/* Camera class control IDs */
#define V4L2_CID_CAMERA_CLASS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x900)
#define V4L2_CID_CAMERA_CLASS (V4L2_CTRL_CLASS_CAMERA | 1)
#define V4L2_CID_EXPOSURE_AUTO (V4L2_CID_CAMERA_CLASS_BASE+1)
enum v4l2_exposure_auto_type {
V4L2_EXPOSURE_AUTO = 0,
V4L2_EXPOSURE_MANUAL = 1,
V4L2_EXPOSURE_SHUTTER_PRIORITY = 2,
V4L2_EXPOSURE_APERTURE_PRIORITY = 3
};
#define V4L2_CID_EXPOSURE_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+2)
#define V4L2_CID_EXPOSURE_AUTO_PRIORITY (V4L2_CID_CAMERA_CLASS_BASE+3)
#define V4L2_CID_PAN_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+4)
#define V4L2_CID_TILT_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+5)
#define V4L2_CID_PAN_RESET (V4L2_CID_CAMERA_CLASS_BASE+6)
#define V4L2_CID_TILT_RESET (V4L2_CID_CAMERA_CLASS_BASE+7)
#define V4L2_CID_PAN_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+8)
#define V4L2_CID_TILT_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+9)
#define V4L2_CID_FOCUS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+10)
#define V4L2_CID_FOCUS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+11)
#define V4L2_CID_FOCUS_AUTO (V4L2_CID_CAMERA_CLASS_BASE+12)
#define V4L2_CID_ZOOM_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+13)
#define V4L2_CID_ZOOM_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+14)
#define V4L2_CID_ZOOM_CONTINUOUS (V4L2_CID_CAMERA_CLASS_BASE+15)
#define V4L2_CID_PRIVACY (V4L2_CID_CAMERA_CLASS_BASE+16)
#define V4L2_CID_IRIS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+17)
#define V4L2_CID_IRIS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+18)
#define V4L2_CID_AUTO_EXPOSURE_BIAS (V4L2_CID_CAMERA_CLASS_BASE+19)
#define V4L2_CID_AUTO_N_PRESET_WHITE_BALANCE (V4L2_CID_CAMERA_CLASS_BASE+20)
enum v4l2_auto_n_preset_white_balance {
V4L2_WHITE_BALANCE_MANUAL = 0,
V4L2_WHITE_BALANCE_AUTO = 1,
V4L2_WHITE_BALANCE_INCANDESCENT = 2,
V4L2_WHITE_BALANCE_FLUORESCENT = 3,
V4L2_WHITE_BALANCE_FLUORESCENT_H = 4,
V4L2_WHITE_BALANCE_HORIZON = 5,
V4L2_WHITE_BALANCE_DAYLIGHT = 6,
V4L2_WHITE_BALANCE_FLASH = 7,
V4L2_WHITE_BALANCE_CLOUDY = 8,
V4L2_WHITE_BALANCE_SHADE = 9,
};
#define V4L2_CID_WIDE_DYNAMIC_RANGE (V4L2_CID_CAMERA_CLASS_BASE+21)
#define V4L2_CID_IMAGE_STABILIZATION (V4L2_CID_CAMERA_CLASS_BASE+22)
#define V4L2_CID_ISO_SENSITIVITY (V4L2_CID_CAMERA_CLASS_BASE+23)
#define V4L2_CID_ISO_SENSITIVITY_AUTO (V4L2_CID_CAMERA_CLASS_BASE+24)
enum v4l2_iso_sensitivity_auto_type {
V4L2_ISO_SENSITIVITY_MANUAL = 0,
V4L2_ISO_SENSITIVITY_AUTO = 1,
};
#define V4L2_CID_EXPOSURE_METERING (V4L2_CID_CAMERA_CLASS_BASE+25)
enum v4l2_exposure_metering {
V4L2_EXPOSURE_METERING_AVERAGE = 0,
V4L2_EXPOSURE_METERING_CENTER_WEIGHTED = 1,
V4L2_EXPOSURE_METERING_SPOT = 2,
V4L2_EXPOSURE_METERING_MATRIX = 3,
};
#define V4L2_CID_SCENE_MODE (V4L2_CID_CAMERA_CLASS_BASE+26)
enum v4l2_scene_mode {
V4L2_SCENE_MODE_NONE = 0,
V4L2_SCENE_MODE_BACKLIGHT = 1,
V4L2_SCENE_MODE_BEACH_SNOW = 2,
V4L2_SCENE_MODE_CANDLE_LIGHT = 3,
V4L2_SCENE_MODE_DAWN_DUSK = 4,
V4L2_SCENE_MODE_FALL_COLORS = 5,
V4L2_SCENE_MODE_FIREWORKS = 6,
V4L2_SCENE_MODE_LANDSCAPE = 7,
V4L2_SCENE_MODE_NIGHT = 8,
V4L2_SCENE_MODE_PARTY_INDOOR = 9,
V4L2_SCENE_MODE_PORTRAIT = 10,
V4L2_SCENE_MODE_SPORTS = 11,
V4L2_SCENE_MODE_SUNSET = 12,
V4L2_SCENE_MODE_TEXT = 13,
};
#define V4L2_CID_3A_LOCK (V4L2_CID_CAMERA_CLASS_BASE+27)
#define V4L2_LOCK_EXPOSURE (1 << 0)
#define V4L2_LOCK_WHITE_BALANCE (1 << 1)
#define V4L2_LOCK_FOCUS (1 << 2)
#define V4L2_CID_AUTO_FOCUS_START (V4L2_CID_CAMERA_CLASS_BASE+28)
#define V4L2_CID_AUTO_FOCUS_STOP (V4L2_CID_CAMERA_CLASS_BASE+29)
#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30)
#define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0)
#define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0)
#define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1)
#define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2)
#define V4L2_CID_AUTO_FOCUS_RANGE (V4L2_CID_CAMERA_CLASS_BASE+31)
enum v4l2_auto_focus_range {
V4L2_AUTO_FOCUS_RANGE_AUTO = 0,
V4L2_AUTO_FOCUS_RANGE_NORMAL = 1,
V4L2_AUTO_FOCUS_RANGE_MACRO = 2,
V4L2_AUTO_FOCUS_RANGE_INFINITY = 3,
};
#define V4L2_CID_PAN_SPEED (V4L2_CID_CAMERA_CLASS_BASE+32)
#define V4L2_CID_TILT_SPEED (V4L2_CID_CAMERA_CLASS_BASE+33)
/* FM Modulator class control IDs */
#define V4L2_CID_FM_TX_CLASS_BASE (V4L2_CTRL_CLASS_FM_TX | 0x900)
#define V4L2_CID_FM_TX_CLASS (V4L2_CTRL_CLASS_FM_TX | 1)
#define V4L2_CID_RDS_TX_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 1)
#define V4L2_CID_RDS_TX_PI (V4L2_CID_FM_TX_CLASS_BASE + 2)
#define V4L2_CID_RDS_TX_PTY (V4L2_CID_FM_TX_CLASS_BASE + 3)
#define V4L2_CID_RDS_TX_PS_NAME (V4L2_CID_FM_TX_CLASS_BASE + 5)
#define V4L2_CID_RDS_TX_RADIO_TEXT (V4L2_CID_FM_TX_CLASS_BASE + 6)
#define V4L2_CID_RDS_TX_MONO_STEREO (V4L2_CID_FM_TX_CLASS_BASE + 7)
#define V4L2_CID_RDS_TX_ARTIFICIAL_HEAD (V4L2_CID_FM_TX_CLASS_BASE + 8)
#define V4L2_CID_RDS_TX_COMPRESSED (V4L2_CID_FM_TX_CLASS_BASE + 9)
#define V4L2_CID_RDS_TX_DYNAMIC_PTY (V4L2_CID_FM_TX_CLASS_BASE + 10)
#define V4L2_CID_RDS_TX_TRAFFIC_ANNOUNCEMENT (V4L2_CID_FM_TX_CLASS_BASE + 11)
#define V4L2_CID_RDS_TX_TRAFFIC_PROGRAM (V4L2_CID_FM_TX_CLASS_BASE + 12)
#define V4L2_CID_RDS_TX_MUSIC_SPEECH (V4L2_CID_FM_TX_CLASS_BASE + 13)
#define V4L2_CID_RDS_TX_ALT_FREQS_ENABLE (V4L2_CID_FM_TX_CLASS_BASE + 14)
#define V4L2_CID_RDS_TX_ALT_FREQS (V4L2_CID_FM_TX_CLASS_BASE + 15)
#define V4L2_CID_AUDIO_LIMITER_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 64)
#define V4L2_CID_AUDIO_LIMITER_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 65)
#define V4L2_CID_AUDIO_LIMITER_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 66)
#define V4L2_CID_AUDIO_COMPRESSION_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 80)
#define V4L2_CID_AUDIO_COMPRESSION_GAIN (V4L2_CID_FM_TX_CLASS_BASE + 81)
#define V4L2_CID_AUDIO_COMPRESSION_THRESHOLD (V4L2_CID_FM_TX_CLASS_BASE + 82)
#define V4L2_CID_AUDIO_COMPRESSION_ATTACK_TIME (V4L2_CID_FM_TX_CLASS_BASE + 83)
#define V4L2_CID_AUDIO_COMPRESSION_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 84)
#define V4L2_CID_PILOT_TONE_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 96)
#define V4L2_CID_PILOT_TONE_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 97)
#define V4L2_CID_PILOT_TONE_FREQUENCY (V4L2_CID_FM_TX_CLASS_BASE + 98)
#define V4L2_CID_TUNE_PREEMPHASIS (V4L2_CID_FM_TX_CLASS_BASE + 112)
enum v4l2_preemphasis {
V4L2_PREEMPHASIS_DISABLED = 0,
V4L2_PREEMPHASIS_50_uS = 1,
V4L2_PREEMPHASIS_75_uS = 2,
};
#define V4L2_CID_TUNE_POWER_LEVEL (V4L2_CID_FM_TX_CLASS_BASE + 113)
#define V4L2_CID_TUNE_ANTENNA_CAPACITOR (V4L2_CID_FM_TX_CLASS_BASE + 114)
/* Flash and privacy (indicator) light controls */
#define V4L2_CID_FLASH_CLASS_BASE (V4L2_CTRL_CLASS_FLASH | 0x900)
#define V4L2_CID_FLASH_CLASS (V4L2_CTRL_CLASS_FLASH | 1)
#define V4L2_CID_FLASH_LED_MODE (V4L2_CID_FLASH_CLASS_BASE + 1)
enum v4l2_flash_led_mode {
V4L2_FLASH_LED_MODE_NONE,
V4L2_FLASH_LED_MODE_FLASH,
V4L2_FLASH_LED_MODE_TORCH,
};
#define V4L2_CID_FLASH_STROBE_SOURCE (V4L2_CID_FLASH_CLASS_BASE + 2)
enum v4l2_flash_strobe_source {
V4L2_FLASH_STROBE_SOURCE_SOFTWARE,
V4L2_FLASH_STROBE_SOURCE_EXTERNAL,
};
#define V4L2_CID_FLASH_STROBE (V4L2_CID_FLASH_CLASS_BASE + 3)
#define V4L2_CID_FLASH_STROBE_STOP (V4L2_CID_FLASH_CLASS_BASE + 4)
#define V4L2_CID_FLASH_STROBE_STATUS (V4L2_CID_FLASH_CLASS_BASE + 5)
#define V4L2_CID_FLASH_TIMEOUT (V4L2_CID_FLASH_CLASS_BASE + 6)
#define V4L2_CID_FLASH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 7)
#define V4L2_CID_FLASH_TORCH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 8)
#define V4L2_CID_FLASH_INDICATOR_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 9)
#define V4L2_CID_FLASH_FAULT (V4L2_CID_FLASH_CLASS_BASE + 10)
#define V4L2_FLASH_FAULT_OVER_VOLTAGE (1 << 0)
#define V4L2_FLASH_FAULT_TIMEOUT (1 << 1)
#define V4L2_FLASH_FAULT_OVER_TEMPERATURE (1 << 2)
#define V4L2_FLASH_FAULT_SHORT_CIRCUIT (1 << 3)
#define V4L2_FLASH_FAULT_OVER_CURRENT (1 << 4)
#define V4L2_FLASH_FAULT_INDICATOR (1 << 5)
#define V4L2_FLASH_FAULT_UNDER_VOLTAGE (1 << 6)
#define V4L2_FLASH_FAULT_INPUT_VOLTAGE (1 << 7)
#define V4L2_FLASH_FAULT_LED_OVER_TEMPERATURE (1 << 8)
#define V4L2_CID_FLASH_CHARGE (V4L2_CID_FLASH_CLASS_BASE + 11)
#define V4L2_CID_FLASH_READY (V4L2_CID_FLASH_CLASS_BASE + 12)
/* JPEG-class control IDs */
#define V4L2_CID_JPEG_CLASS_BASE (V4L2_CTRL_CLASS_JPEG | 0x900)
#define V4L2_CID_JPEG_CLASS (V4L2_CTRL_CLASS_JPEG | 1)
#define V4L2_CID_JPEG_CHROMA_SUBSAMPLING (V4L2_CID_JPEG_CLASS_BASE + 1)
enum v4l2_jpeg_chroma_subsampling {
V4L2_JPEG_CHROMA_SUBSAMPLING_444 = 0,
V4L2_JPEG_CHROMA_SUBSAMPLING_422 = 1,
V4L2_JPEG_CHROMA_SUBSAMPLING_420 = 2,
V4L2_JPEG_CHROMA_SUBSAMPLING_411 = 3,
V4L2_JPEG_CHROMA_SUBSAMPLING_410 = 4,
V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY = 5,
};
#define V4L2_CID_JPEG_RESTART_INTERVAL (V4L2_CID_JPEG_CLASS_BASE + 2)
#define V4L2_CID_JPEG_COMPRESSION_QUALITY (V4L2_CID_JPEG_CLASS_BASE + 3)
#define V4L2_CID_JPEG_ACTIVE_MARKER (V4L2_CID_JPEG_CLASS_BASE + 4)
#define V4L2_JPEG_ACTIVE_MARKER_APP0 (1 << 0)
#define V4L2_JPEG_ACTIVE_MARKER_APP1 (1 << 1)
#define V4L2_JPEG_ACTIVE_MARKER_COM (1 << 16)
#define V4L2_JPEG_ACTIVE_MARKER_DQT (1 << 17)
#define V4L2_JPEG_ACTIVE_MARKER_DHT (1 << 18)
/* Image source controls */
#define V4L2_CID_IMAGE_SOURCE_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_SOURCE | 0x900)
#define V4L2_CID_IMAGE_SOURCE_CLASS (V4L2_CTRL_CLASS_IMAGE_SOURCE | 1)
#define V4L2_CID_VBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 1)
#define V4L2_CID_HBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 2)
#define V4L2_CID_ANALOGUE_GAIN (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 3)
#define V4L2_CID_TEST_PATTERN_RED (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 4)
#define V4L2_CID_TEST_PATTERN_GREENR (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 5)
#define V4L2_CID_TEST_PATTERN_BLUE (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 6)
#define V4L2_CID_TEST_PATTERN_GREENB (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 7)
/* Image processing controls */
#define V4L2_CID_IMAGE_PROC_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_PROC | 0x900)
#define V4L2_CID_IMAGE_PROC_CLASS (V4L2_CTRL_CLASS_IMAGE_PROC | 1)
#define V4L2_CID_LINK_FREQ (V4L2_CID_IMAGE_PROC_CLASS_BASE + 1)
#define V4L2_CID_PIXEL_RATE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 2)
#define V4L2_CID_TEST_PATTERN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 3)
#define V4L2_CID_DEINTERLACING_MODE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 4)
#define V4L2_CID_DIGITAL_GAIN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 5)
/* DV-class control IDs defined by V4L2 */
#define V4L2_CID_DV_CLASS_BASE (V4L2_CTRL_CLASS_DV | 0x900)
#define V4L2_CID_DV_CLASS (V4L2_CTRL_CLASS_DV | 1)
#define V4L2_CID_DV_TX_HOTPLUG (V4L2_CID_DV_CLASS_BASE + 1)
#define V4L2_CID_DV_TX_RXSENSE (V4L2_CID_DV_CLASS_BASE + 2)
#define V4L2_CID_DV_TX_EDID_PRESENT (V4L2_CID_DV_CLASS_BASE + 3)
#define V4L2_CID_DV_TX_MODE (V4L2_CID_DV_CLASS_BASE + 4)
enum v4l2_dv_tx_mode {
V4L2_DV_TX_MODE_DVI_D = 0,
V4L2_DV_TX_MODE_HDMI = 1,
};
#define V4L2_CID_DV_TX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 5)
enum v4l2_dv_rgb_range {
V4L2_DV_RGB_RANGE_AUTO = 0,
V4L2_DV_RGB_RANGE_LIMITED = 1,
V4L2_DV_RGB_RANGE_FULL = 2,
};
#define V4L2_CID_DV_TX_IT_CONTENT_TYPE (V4L2_CID_DV_CLASS_BASE + 6)
enum v4l2_dv_it_content_type {
V4L2_DV_IT_CONTENT_TYPE_GRAPHICS = 0,
V4L2_DV_IT_CONTENT_TYPE_PHOTO = 1,
V4L2_DV_IT_CONTENT_TYPE_CINEMA = 2,
V4L2_DV_IT_CONTENT_TYPE_GAME = 3,
V4L2_DV_IT_CONTENT_TYPE_NO_ITC = 4,
};
#define V4L2_CID_DV_RX_POWER_PRESENT (V4L2_CID_DV_CLASS_BASE + 100)
#define V4L2_CID_DV_RX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 101)
#define V4L2_CID_DV_RX_IT_CONTENT_TYPE (V4L2_CID_DV_CLASS_BASE + 102)
#define V4L2_CID_FM_RX_CLASS_BASE (V4L2_CTRL_CLASS_FM_RX | 0x900)
#define V4L2_CID_FM_RX_CLASS (V4L2_CTRL_CLASS_FM_RX | 1)
#define V4L2_CID_TUNE_DEEMPHASIS (V4L2_CID_FM_RX_CLASS_BASE + 1)
enum v4l2_deemphasis {
V4L2_DEEMPHASIS_DISABLED = V4L2_PREEMPHASIS_DISABLED,
V4L2_DEEMPHASIS_50_uS = V4L2_PREEMPHASIS_50_uS,
V4L2_DEEMPHASIS_75_uS = V4L2_PREEMPHASIS_75_uS,
};
#define V4L2_CID_RDS_RECEPTION (V4L2_CID_FM_RX_CLASS_BASE + 2)
#define V4L2_CID_RDS_RX_PTY (V4L2_CID_FM_RX_CLASS_BASE + 3)
#define V4L2_CID_RDS_RX_PS_NAME (V4L2_CID_FM_RX_CLASS_BASE + 4)
#define V4L2_CID_RDS_RX_RADIO_TEXT (V4L2_CID_FM_RX_CLASS_BASE + 5)
#define V4L2_CID_RDS_RX_TRAFFIC_ANNOUNCEMENT (V4L2_CID_FM_RX_CLASS_BASE + 6)
#define V4L2_CID_RDS_RX_TRAFFIC_PROGRAM (V4L2_CID_FM_RX_CLASS_BASE + 7)
#define V4L2_CID_RDS_RX_MUSIC_SPEECH (V4L2_CID_FM_RX_CLASS_BASE + 8)
#define V4L2_CID_RF_TUNER_CLASS_BASE (V4L2_CTRL_CLASS_RF_TUNER | 0x900)
#define V4L2_CID_RF_TUNER_CLASS (V4L2_CTRL_CLASS_RF_TUNER | 1)
#define V4L2_CID_RF_TUNER_BANDWIDTH_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 11)
#define V4L2_CID_RF_TUNER_BANDWIDTH (V4L2_CID_RF_TUNER_CLASS_BASE + 12)
#define V4L2_CID_RF_TUNER_RF_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 32)
#define V4L2_CID_RF_TUNER_LNA_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 41)
#define V4L2_CID_RF_TUNER_LNA_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 42)
#define V4L2_CID_RF_TUNER_MIXER_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 51)
#define V4L2_CID_RF_TUNER_MIXER_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 52)
#define V4L2_CID_RF_TUNER_IF_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 61)
#define V4L2_CID_RF_TUNER_IF_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 62)
#define V4L2_CID_RF_TUNER_PLL_LOCK (V4L2_CID_RF_TUNER_CLASS_BASE + 91)
/* Detection-class control IDs defined by V4L2 */
#define V4L2_CID_DETECT_CLASS_BASE (V4L2_CTRL_CLASS_DETECT | 0x900)
#define V4L2_CID_DETECT_CLASS (V4L2_CTRL_CLASS_DETECT | 1)
#define V4L2_CID_DETECT_MD_MODE (V4L2_CID_DETECT_CLASS_BASE + 1)
enum v4l2_detect_md_mode {
V4L2_DETECT_MD_MODE_DISABLED = 0,
V4L2_DETECT_MD_MODE_GLOBAL = 1,
V4L2_DETECT_MD_MODE_THRESHOLD_GRID = 2,
V4L2_DETECT_MD_MODE_REGION_GRID = 3,
};
#define V4L2_CID_DETECT_MD_GLOBAL_THRESHOLD (V4L2_CID_DETECT_CLASS_BASE + 2)
#define V4L2_CID_DETECT_MD_THRESHOLD_GRID (V4L2_CID_DETECT_CLASS_BASE + 3)
#define V4L2_CID_DETECT_MD_REGION_GRID (V4L2_CID_DETECT_CLASS_BASE + 4)
#endif

2410
gst-v4l2/ext/videodev2.h Normal file
View File

File diff suppressed because it is too large Load Diff

69
gst-v4l2/gst/gettext.h Normal file
View File

@@ -0,0 +1,69 @@
/* Convenience header for conditional use of GNU <libintl.h>.
Copyright (C) 1995-1998, 2000-2002 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU Library General Public License as published
by the Free Software Foundation; either version 2, or (at your option)
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public
License along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301,
USA. */
#ifndef _LIBGETTEXT_H
#define _LIBGETTEXT_H 1
/* NLS can be disabled through the configure --disable-nls option. */
#ifdef ENABLE_NLS
/* Get declarations of GNU message catalog functions. */
# include <libintl.h>
#else
/* Solaris /usr/include/locale.h includes /usr/include/libintl.h, which
chokes if dcgettext is defined as a macro. So include it now, to make
later inclusions of <locale.h> a NOP. We don't include <libintl.h>
as well because people using "gettext.h" will not include <libintl.h>,
and also including <libintl.h> would fail on SunOS 4, whereas <locale.h>
is OK. */
#if defined(__sun)
# include <locale.h>
#endif
/* Disabled NLS.
The casts to 'const char *' serve the purpose of producing warnings
for invalid uses of the value returned from these functions.
On pre-ANSI systems without 'const', the config.h file is supposed to
contain "#define const". */
# define gettext(Msgid) ((const char *) (Msgid))
# define dgettext(Domainname, Msgid) ((const char *) (Msgid))
# define dcgettext(Domainname, Msgid, Category) ((const char *) (Msgid))
# define ngettext(Msgid1, Msgid2, N) \
((N) == 1 ? (const char *) (Msgid1) : (const char *) (Msgid2))
# define dngettext(Domainname, Msgid1, Msgid2, N) \
((N) == 1 ? (const char *) (Msgid1) : (const char *) (Msgid2))
# define dcngettext(Domainname, Msgid1, Msgid2, N, Category) \
((N) == 1 ? (const char *) (Msgid1) : (const char *) (Msgid2))
# define textdomain(Domainname) ((const char *) (Domainname))
# define bindtextdomain(Domainname, Dirname) ((const char *) (Dirname))
# define bind_textdomain_codeset(Domainname, Codeset) ((const char *) (Codeset))
#endif
/* A pseudo function call that serves as a marker for the automated
extraction of messages, but does not call gettext(). The run-time
translation is done at a different place in the code.
The argument, String, should be a literal string. Concatenated strings
and other string expressions won't work.
The macro's expansion is not parenthesized, so that it is suitable as
initializer for static 'char[]' or 'const char[]' variables. */
#define gettext_noop(String) String
#endif /* _LIBGETTEXT_H */

View File

@@ -0,0 +1,36 @@
/*
* glib-compat.c
* Functions copied from glib 2.10
*
* Copyright 2005 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GLIB_COMPAT_PRIVATE_H__
#define __GLIB_COMPAT_PRIVATE_H__
#include <glib.h>
G_BEGIN_DECLS
/* copies */
/* adaptations */
G_END_DECLS
#endif

View File

@@ -0,0 +1,47 @@
/* GStreamer
* Copyright (C) 2004 Thomas Vander Stichele <thomas@apestaart.org>
*
* gst-i18n-plugins.h: internationalization macros for the GStreamer plugins
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_I18N_PLUGIN_H__
#define __GST_I18N_PLUGIN_H__
#ifndef GETTEXT_PACKAGE
#error You must define GETTEXT_PACKAGE before including this header.
#endif
#ifdef ENABLE_NLS
#include <locale.h>
#include "gettext.h" /* included with gettext distribution and copied */
/* we want to use shorthand _() for translating and N_() for marking */
#define _(String) dgettext (GETTEXT_PACKAGE, String)
#define N_(String) gettext_noop (String)
/* FIXME: if we need it, we can add Q_ as well, like in glib */
#else
#define _(String) String
#define N_(String) String
#define ngettext(Singular,Plural,Count) ((Count>1)?Plural:Singular)
#endif
#endif /* __GST_I18N_PLUGIN_H__ */

491
gst-v4l2/gstv4l2.c Normal file
View File

@@ -0,0 +1,491 @@
/* GStreamer
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2.c: plugin for v4l2 elements
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifndef _GNU_SOURCE
# define _GNU_SOURCE /* O_CLOEXEC */
#endif
#include "gst/gst-i18n-plugin.h"
#include <gst/gst.h>
#include <fcntl.h>
#include <string.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include <stdlib.h>
#include "linux/videodev2.h"
#include "v4l2-utils.h"
#include "gstv4l2object.h"
#ifndef USE_V4L2_TARGET_NV
#include "gstv4l2src.h"
#include "gstv4l2sink.h"
#include "gstv4l2radio.h"
#include "gstv4l2h263enc.h"
#include "gstv4l2mpeg4enc.h"
#include "gstv4l2deviceprovider.h"
#include "gstv4l2transform.h"
#endif
#include "gstv4l2videodec.h"
#include "gstv4l2h264enc.h"
#include "gstv4l2h265enc.h"
#include "gstv4l2vp8enc.h"
#include "gstv4l2vp9enc.h"
#include "gstv4l2av1enc.h"
/* used in gstv4l2object.c and v4l2_calls.c */
GST_DEBUG_CATEGORY (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
#ifndef USE_V4L2_TARGET_NV_X86
gboolean is_cuvid;
#else
gboolean is_cuvid = TRUE;
#endif
#ifdef GST_V4L2_ENABLE_PROBE
/* This is a minimalist probe, for speed, we only enumerate formats */
static GstCaps *
gst_v4l2_probe_template_caps (const gchar * device, gint video_fd,
enum v4l2_buf_type type)
{
gint n;
struct v4l2_fmtdesc format;
GstCaps *caps;
GST_DEBUG ("Getting %s format enumerations", device);
caps = gst_caps_new_empty ();
for (n = 0;; n++) {
GstStructure *template;
memset (&format, 0, sizeof (format));
format.index = n;
format.type = type;
if (ioctl (video_fd, VIDIOC_ENUM_FMT, &format) < 0)
break; /* end of enumeration */
GST_LOG ("index: %u", format.index);
GST_LOG ("type: %d", format.type);
GST_LOG ("flags: %08x", format.flags);
GST_LOG ("description: '%s'", format.description);
GST_LOG ("pixelformat: %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (format.pixelformat));
template = gst_v4l2_object_v4l2fourcc_to_structure (format.pixelformat);
if (template) {
GstStructure *alt_t = NULL;
switch (format.pixelformat) {
case V4L2_PIX_FMT_RGB32:
alt_t = gst_structure_copy (template);
gst_structure_set (alt_t, "format", G_TYPE_STRING, "ARGB", NULL);
break;
case V4L2_PIX_FMT_BGR32:
alt_t = gst_structure_copy (template);
gst_structure_set (alt_t, "format", G_TYPE_STRING, "BGRA", NULL);
default:
break;
}
gst_caps_append_structure (caps, template);
if (alt_t)
gst_caps_append_structure (caps, alt_t);
}
}
return gst_caps_simplify (caps);
}
static gboolean
gst_v4l2_probe_and_register (GstPlugin * plugin)
{
GstV4l2Iterator *it;
gint video_fd = -1;
struct v4l2_capability vcap;
guint32 device_caps;
it = gst_v4l2_iterator_new ();
while (gst_v4l2_iterator_next (it)) {
GstCaps *src_caps, *sink_caps;
gchar *basename;
if (video_fd >= 0)
close (video_fd);
video_fd = open (it->device_path, O_RDWR | O_CLOEXEC);
if (video_fd == -1) {
GST_DEBUG ("Failed to open %s: %s", it->device_path, g_strerror (errno));
continue;
}
memset (&vcap, 0, sizeof (vcap));
if (ioctl (video_fd, VIDIOC_QUERYCAP, &vcap) < 0) {
GST_DEBUG ("Failed to get device capabilities: %s", g_strerror (errno));
continue;
}
if (vcap.capabilities & V4L2_CAP_DEVICE_CAPS)
device_caps = vcap.device_caps;
else
device_caps = vcap.capabilities;
if (!((device_caps & (V4L2_CAP_VIDEO_M2M | V4L2_CAP_VIDEO_M2M_MPLANE)) ||
/* But legacy driver may expose both CAPTURE and OUTPUT */
((device_caps &
(V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) &&
(device_caps &
(V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE)))))
continue;
GST_DEBUG ("Probing '%s' located at '%s'",
it->device_name ? it->device_name : (const gchar *) vcap.driver,
it->device_path);
/* get sink supported format (no MPLANE for codec) */
sink_caps = gst_caps_merge (gst_v4l2_probe_template_caps (it->device_path,
video_fd, V4L2_BUF_TYPE_VIDEO_OUTPUT),
gst_v4l2_probe_template_caps (it->device_path, video_fd,
V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE));
/* get src supported format */
src_caps = gst_caps_merge (gst_v4l2_probe_template_caps (it->device_path,
video_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE),
gst_v4l2_probe_template_caps (it->device_path, video_fd,
V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE));
/* Skip devices without any supported formats */
if (gst_caps_is_empty (sink_caps) || gst_caps_is_empty (src_caps)) {
gst_caps_unref (sink_caps);
gst_caps_unref (src_caps);
continue;
}
basename = g_path_get_basename (it->device_path);
if (gst_v4l2_is_video_dec (sink_caps, src_caps)) {
gst_v4l2_video_dec_register (plugin, basename, it->device_path,
sink_caps, src_caps);
} else if (gst_v4l2_is_video_enc (sink_caps, src_caps, NULL)) {
if (gst_v4l2_is_h264_enc (sink_caps, src_caps))
gst_v4l2_h264_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_mpeg4_enc (sink_caps, src_caps))
gst_v4l2_mpeg4_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_h263_enc (sink_caps, src_caps))
gst_v4l2_h263_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_vp8_enc (sink_caps, src_caps))
gst_v4l2_vp8_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_vp9_enc (sink_caps, src_caps))
gst_v4l2_vp9_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
if (gst_v4l2_is_av1_enc (sink_caps, src_caps))
gst_v4l2_av1_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
} else if (gst_v4l2_is_transform (sink_caps, src_caps)) {
gst_v4l2_transform_register (plugin, basename, it->device_path,
sink_caps, src_caps);
}
/* else if ( ... etc. */
gst_caps_unref (sink_caps);
gst_caps_unref (src_caps);
g_free (basename);
}
if (video_fd >= 0)
close (video_fd);
gst_v4l2_iterator_free (it);
return TRUE;
}
#endif
#ifndef USE_V4L2_TARGET_NV
static gboolean
plugin_init (GstPlugin * plugin)
{
const gchar *paths[] = { "/dev", "/dev/v4l2", NULL };
const gchar *names[] = { "video", NULL };
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
/* Add some depedency, so the dynamic features get updated upon changes in
* /dev/video* */
gst_plugin_add_dependency (plugin,
NULL, paths, names, GST_PLUGIN_DEPENDENCY_FLAG_FILE_NAME_IS_PREFIX);
if (!gst_element_register (plugin, "v4l2src", GST_RANK_PRIMARY,
GST_TYPE_V4L2SRC) ||
!gst_element_register (plugin, "v4l2sink", GST_RANK_NONE,
GST_TYPE_V4L2SINK) ||
!gst_element_register (plugin, "v4l2radio", GST_RANK_NONE,
GST_TYPE_V4L2RADIO) ||
!gst_device_provider_register (plugin, "v4l2deviceprovider",
GST_RANK_PRIMARY, GST_TYPE_V4L2_DEVICE_PROVIDER)
/* etc. */
#ifdef GST_V4L2_ENABLE_PROBE
|| !gst_v4l2_probe_and_register (plugin)
#endif
)
return FALSE;
#ifdef ENABLE_NLS
bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
#endif /* ENABLE_NLS */
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
video4linux2,
"elements for Video 4 Linux",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
#else
static gboolean
gst_v4l2_has_vp8_encoder(void)
{
gboolean ret = FALSE;
int fd = -1;
long len = -1;
struct stat statbuf;
char info[128];
if (access (V4L2_DEVICE_PATH_TEGRA_INFO, F_OK) == 0) {
stat(V4L2_DEVICE_PATH_TEGRA_INFO, &statbuf);
if (statbuf.st_size > 0 && statbuf.st_size < 128)
{
fd = open(V4L2_DEVICE_PATH_TEGRA_INFO, O_RDONLY);
read(fd, info, statbuf.st_size);
len = statbuf.st_size - 8;
for (int i = 0; i < len; i ++)
{
if (strncmp(&info[i], "tegra", 5) == 0)
{
if (strncmp(&info[i], "tegra186", 8) == 0 ||
strncmp(&info[i], "tegra210", 8) == 0)
ret = TRUE;
break;
}
}
close(fd);
}
}
return ret;
}
static gboolean
gst_v4l2_is_v4l2_nvenc_present(void)
{
gboolean ret = TRUE;
int fd = -1;
long len = -1;
struct stat statbuf;
char info[128];
if (access (V4L2_DEVICE_PATH_TEGRA_INFO, F_OK) == 0) {
stat(V4L2_DEVICE_PATH_TEGRA_INFO, &statbuf);
if (statbuf.st_size > 0 && statbuf.st_size < 128)
{
fd = open(V4L2_DEVICE_PATH_TEGRA_INFO, O_RDONLY);
read(fd, info, statbuf.st_size);
len = statbuf.st_size - 10;
for (int i = 0; i < len; i ++)
{
if (strncmp(&info[i], "p3767", 5) == 0)
{
/*
Jetson Orin Nano 8GB (P3767-0003) Commercial module
Jetson Orin Nano 4GB (P3767-0004) Commercial module
Jetson Orin Nano 8GB with SD card slot (P3767-0005) For the Developer Kit only
*/
if (strncmp(&info[i + 6], "0003", 4) == 0 ||
strncmp(&info[i + 6], "0004", 4) == 0 ||
strncmp(&info[i + 6], "0005", 4) == 0)
ret = FALSE;
break;
}
}
close(fd);
}
}
return ret;
}
static gboolean
plugin_init (GstPlugin * plugin)
{
gboolean ret = TRUE;
g_setenv ("GST_V4L2_USE_LIBV4L2", "1", FALSE);
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
#ifndef USE_V4L2_TARGET_NV_X86
int igpu = -1, dgpu = -1;
igpu = system("lsmod | grep 'nvgpu' > /dev/null");
dgpu = system("modprobe -D -q nvidia | grep 'dkms' > /dev/null");
if (igpu == -1 || dgpu == -1)
return FALSE;
else if (dgpu == 0)
is_cuvid = TRUE;
else
is_cuvid = FALSE;
if (getenv("AARCH64_DGPU"))
is_cuvid = TRUE;
else if (getenv("AARCH64_IGPU"))
is_cuvid = FALSE;
#endif
if (is_cuvid == TRUE)
gst_v4l2_video_dec_register (plugin,
V4L2_DEVICE_BASENAME_NVDEC,
V4L2_DEVICE_PATH_NVDEC_MCCOY,
NULL,
NULL);
else if (access (V4L2_DEVICE_PATH_NVDEC, F_OK) == 0)
gst_v4l2_video_dec_register (plugin,
V4L2_DEVICE_BASENAME_NVDEC,
V4L2_DEVICE_PATH_NVDEC,
NULL,
NULL);
else
gst_v4l2_video_dec_register (plugin,
V4L2_DEVICE_BASENAME_NVDEC,
V4L2_DEVICE_PATH_NVDEC_ALT,
NULL,
NULL);
if (access (V4L2_DEVICE_PATH_NVENC, F_OK) == 0) {
gst_v4l2_h264_enc_register(plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
gst_v4l2_h265_enc_register(plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
} else {
if (!gst_v4l2_is_v4l2_nvenc_present()) {
// Orin Nano does not have HW encoders, so early return here.
return ret;
}
gst_v4l2_h264_enc_register(plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_h265_enc_register(plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
}
if (is_cuvid == FALSE) {
if (access (V4L2_DEVICE_PATH_NVENC, F_OK) == 0) {
if (gst_v4l2_has_vp8_encoder()) {
gst_v4l2_vp8_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
}
gst_v4l2_vp9_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
gst_v4l2_av1_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
} else {
gst_v4l2_vp8_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_vp9_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_av1_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
}
}
return ret;
}
#ifndef PACKAGE
#define PACKAGE "nvvideo4linux2"
#endif
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
nvvideo4linux2,
"Nvidia elements for Video 4 Linux",
plugin_init,
"1.14.0",
"LGPL",
"nvvideo4linux2",
"http://nvidia.com/")
#endif

1620
gst-v4l2/gstv4l2allocator.c Normal file
View File

File diff suppressed because it is too large Load Diff

181
gst-v4l2/gstv4l2allocator.h Normal file
View File

@@ -0,0 +1,181 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
* Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_ALLOCATOR_H__
#define __GST_V4L2_ALLOCATOR_H__
#include "linux/videodev2.h"
#include <gst/gst.h>
#include <gst/gstatomicqueue.h>
G_BEGIN_DECLS
#define GST_TYPE_V4L2_ALLOCATOR (gst_v4l2_allocator_get_type())
#define GST_IS_V4L2_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_ALLOCATOR))
#define GST_IS_V4L2_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_V4L2_ALLOCATOR))
#define GST_V4L2_ALLOCATOR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_V4L2_ALLOCATOR, GstV4l2AllocatorClass))
#define GST_V4L2_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_ALLOCATOR, GstV4l2Allocator))
#define GST_V4L2_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_V4L2_ALLOCATOR, GstV4l2AllocatorClass))
#define GST_V4L2_ALLOCATOR_CAST(obj) ((GstV4l2Allocator *)(obj))
#define GST_V4L2_ALLOCATOR_CAN_REQUEST(obj,type) \
(GST_OBJECT_FLAG_IS_SET (obj, GST_V4L2_ALLOCATOR_FLAG_ ## type ## _REQBUFS))
#define GST_V4L2_ALLOCATOR_CAN_ALLOCATE(obj,type) \
(GST_OBJECT_FLAG_IS_SET (obj, GST_V4L2_ALLOCATOR_FLAG_ ## type ## _CREATE_BUFS))
#define GST_V4L2_MEMORY_QUARK gst_v4l2_memory_quark ()
/* The structures are renamed as the name conflicts with the
* OSS v4l2 library structures. */
#ifdef USE_V4L2_TARGET_NV
#define GstV4l2Allocator GstNvV4l2Allocator
#define GstV4l2AllocatorClass GstNvV4l2AllocatorClass
#endif
#ifdef USE_V4L2_TARGET_NV
#define NV_VIDEO_MAX_FRAME 64
#endif
typedef struct _GstV4l2Allocator GstV4l2Allocator;
typedef struct _GstV4l2AllocatorClass GstV4l2AllocatorClass;
typedef struct _GstV4l2MemoryGroup GstV4l2MemoryGroup;
typedef struct _GstV4l2Memory GstV4l2Memory;
typedef enum _GstV4l2Capabilities GstV4l2Capabilities;
typedef enum _GstV4l2Return GstV4l2Return;
typedef struct _GstV4l2Object GstV4l2Object;
enum _GstV4l2AllocatorFlags
{
GST_V4L2_ALLOCATOR_FLAG_MMAP_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 0),
GST_V4L2_ALLOCATOR_FLAG_MMAP_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 1),
GST_V4L2_ALLOCATOR_FLAG_USERPTR_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 2),
GST_V4L2_ALLOCATOR_FLAG_USERPTR_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 3),
GST_V4L2_ALLOCATOR_FLAG_DMABUF_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 4),
GST_V4L2_ALLOCATOR_FLAG_DMABUF_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 5),
};
enum _GstV4l2Return
{
GST_V4L2_OK = 0,
GST_V4L2_ERROR = -1,
GST_V4L2_BUSY = -2
};
struct _GstV4l2Memory
{
GstMemory mem;
gint plane;
GstV4l2MemoryGroup *group;
gpointer data;
gint dmafd;
};
struct _GstV4l2MemoryGroup
{
gint n_mem;
GstMemory * mem[VIDEO_MAX_PLANES];
gint mems_allocated;
struct v4l2_buffer buffer;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
};
struct _GstV4l2Allocator
{
GstAllocator parent;
GstV4l2Object *obj;
guint32 count;
guint32 memory;
gboolean can_allocate;
gboolean active;
#ifdef USE_V4L2_TARGET_NV
GstV4l2MemoryGroup * groups[NV_VIDEO_MAX_FRAME];
#else
GstV4l2MemoryGroup * groups[VIDEO_MAX_FRAME];
#endif
GstAtomicQueue *free_queue;
GstAtomicQueue *pending_queue;
#ifdef USE_V4L2_TARGET_NV
gboolean enable_dynamic_allocation; /* If dynamic_allocation should be set */
#endif
};
struct _GstV4l2AllocatorClass {
GstAllocatorClass parent_class;
};
GType gst_v4l2_allocator_get_type(void);
gboolean gst_is_v4l2_memory (GstMemory * mem);
GQuark gst_v4l2_memory_quark (void);
gboolean gst_v4l2_allocator_is_active (GstV4l2Allocator * allocator);
guint gst_v4l2_allocator_get_size (GstV4l2Allocator * allocator);
GstV4l2Allocator* gst_v4l2_allocator_new (GstObject *parent, GstV4l2Object * obj);
guint gst_v4l2_allocator_start (GstV4l2Allocator * allocator,
guint32 count, guint32 memory);
GstV4l2Return gst_v4l2_allocator_stop (GstV4l2Allocator * allocator);
GstV4l2MemoryGroup* gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator);
GstV4l2MemoryGroup* gst_v4l2_allocator_alloc_dmabuf (GstV4l2Allocator * allocator,
GstAllocator * dmabuf_allocator);
GstV4l2MemoryGroup * gst_v4l2_allocator_alloc_dmabufin (GstV4l2Allocator * allocator);
GstV4l2MemoryGroup * gst_v4l2_allocator_alloc_userptr (GstV4l2Allocator * allocator);
gboolean gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup *group,
gint n_mem, GstMemory ** dma_mem);
gboolean gst_v4l2_allocator_import_userptr (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup *group,
gsize img_size, int n_planes,
gpointer * data, gsize * size);
void gst_v4l2_allocator_flush (GstV4l2Allocator * allocator);
gboolean gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup * group);
GstFlowReturn gst_v4l2_allocator_dqbuf (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup ** group);
void gst_v4l2_allocator_reset_group (GstV4l2Allocator * allocator,
GstV4l2MemoryGroup * group);
#ifdef USE_V4L2_TARGET_NV
void
gst_v4l2_allocator_enable_dynamic_allocation (GstV4l2Allocator * allocator,
gboolean enable_dynamic_allocation);
#endif
G_END_DECLS
#endif /* __GST_V4L2_ALLOCATOR_H__ */

340
gst-v4l2/gstv4l2av1enc.c Normal file
View File

@@ -0,0 +1,340 @@
/*
* Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include <stdlib.h>
#include "gstv4l2object.h"
#include "gstv4l2av1enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_av1_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_av1_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-av1");
/* prototypes */
gboolean gst_v4l2_av1_enc_tile_configuration (GstV4l2Object * v4l2object,
gboolean enable_tile, guint32 log2_tile_rows, guint32 log2_tile_cols);
static gboolean gst_v4l2_video_enc_parse_tile_configuration (GstV4l2Av1Enc * self,
const gchar * arr);
gboolean set_v4l2_av1_encoder_properties (GstVideoEncoder * encoder);
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
PROP_ENABLE_HEADER,
PROP_ENABLE_TILE_CONFIG,
PROP_DISABLE_CDF,
PROP_ENABLE_SSIMRDO,
PROP_NUM_REFERENCE_FRAMES,
};
#define DEFAULT_NUM_REFERENCE_FRAMES 0
#define MAX_NUM_REFERENCE_FRAMES 4
#define gst_v4l2_av1_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2Av1Enc, gst_v4l2_av1_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_av1_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4l2Av1Enc *self = GST_V4L2_AV1_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
self->EnableHeaders = g_value_get_boolean (value);
video_enc->v4l2capture->Enable_headers = g_value_get_boolean (value);
break;
case PROP_ENABLE_TILE_CONFIG:
gst_v4l2_video_enc_parse_tile_configuration (self,
g_value_get_string (value));
self->EnableTileConfig = TRUE;
break;
case PROP_DISABLE_CDF:
self->DisableCDFUpdate = g_value_get_boolean (value);
break;
case PROP_ENABLE_SSIMRDO:
self->EnableSsimRdo = g_value_get_boolean (value);
break;
case PROP_NUM_REFERENCE_FRAMES:
self->nRefFrames = g_value_get_uint (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_v4l2_av1_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4l2Av1Enc *self = GST_V4L2_AV1_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
g_value_set_boolean (value, self->EnableHeaders);
break;
case PROP_ENABLE_TILE_CONFIG:
break;
case PROP_DISABLE_CDF:
g_value_set_boolean (value, self->DisableCDFUpdate);
break;
case PROP_ENABLE_SSIMRDO:
g_value_set_boolean (value, self->EnableSsimRdo);
break;
case PROP_NUM_REFERENCE_FRAMES:
g_value_set_uint (value, self->nRefFrames);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "0"))
v4l2_profile = 0;
else if (g_str_equal (profile, "1"))
v4l2_profile = 1;
else if (g_str_equal (profile, "2"))
v4l2_profile = 2;
else if (g_str_equal (profile, "3"))
v4l2_profile = 3;
else
GST_WARNING ("Unsupported profile string '%s'", profile);
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case 0:
return "0";
case 1:
return "1";
case 2:
return "2";
case 3:
return "3";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static gboolean
gst_v4l2_video_enc_parse_tile_configuration (GstV4l2Av1Enc * self,
const gchar * arr)
{
gchar *str;
self->Log2TileRows = atoi (arr);
str = g_strstr_len (arr, -1, ",");
self->Log2TileCols = atoi (str + 1);
return TRUE;
}
gboolean
gst_v4l2_av1_enc_tile_configuration (GstV4l2Object * v4l2object,
gboolean enable_tile, guint32 log2_tile_rows, guint32 log2_tile_cols)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret;
v4l2_enc_av1_tile_config param =
{enable_tile, log2_tile_rows, log2_tile_cols};
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_AV1_TILE_CONFIGURATION;
control.string = (gchar *) &param;
ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0) {
g_print ("Error while setting tile configuration\n");
return FALSE;
}
return TRUE;
}
gboolean
set_v4l2_av1_encoder_properties (GstVideoEncoder * encoder)
{
GstV4l2Av1Enc *self = GST_V4L2_AV1_ENC (encoder);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (encoder);
if (!GST_V4L2_IS_OPEN (video_enc->v4l2output)) {
g_print ("V4L2 device is not open\n");
return FALSE;
}
if (self->EnableTileConfig) {
if (!gst_v4l2_av1_enc_tile_configuration (video_enc->v4l2output,
self->EnableTileConfig, self->Log2TileRows, self->Log2TileCols)) {
g_print ("S_EXT_CTRLS for Tile Configuration failed\n");
return FALSE;
}
}
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_AV1_DISABLE_CDF_UPDATE, self->DisableCDFUpdate)) {
g_print ("S_EXT_CTRLS for DisableCDF Update failed\n");
return FALSE;
}
if (self->EnableSsimRdo) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_AV1_ENABLE_SSIMRDO, self->EnableSsimRdo)) {
g_print ("S_EXT_CTRLS for SSIM RDO failed\n");
return FALSE;
}
}
if (self->nRefFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_REFERENCE_FRAMES,
self->nRefFrames)) {
g_print ("S_EXT_CTRLS for NUM_REFERENCE_FRAMES failed\n");
return FALSE;
}
}
return TRUE;
}
static void
gst_v4l2_av1_enc_init (GstV4l2Av1Enc * self)
{
self->EnableTileConfig = FALSE;
self->DisableCDFUpdate = TRUE;
self->EnableSsimRdo = FALSE;
self->Log2TileRows= 0;
self->Log2TileCols= 0;
}
static void
gst_v4l2_av1_enc_class_init (GstV4l2Av1EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_av1_enc_debug, "v4l2av1enc", 0,
"V4L2 AV1 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 AV1 Encoder",
"Codec/Encoder/Video",
"Encode AV1 video streams via V4L2 API",
"Anuma Rathore <arathore@nvidia.com>");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_av1_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_av1_enc_get_property);
g_object_class_install_property (gobject_class, PROP_ENABLE_HEADER,
g_param_spec_boolean ("enable-headers", "Enable AV1 headers",
"Enable AV1 file and frame headers, if enabled, dump elementary stream",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_TILE_CONFIG,
g_param_spec_string ("tiles", "AV1 Log2 Tile Configuration",
"Use string with values of Tile Configuration"
"in Log2Rows:Log2Cols. Eg: \"1,0\"",
"0,0", G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_DISABLE_CDF,
g_param_spec_boolean ("disable-cdf", "Disable CDF Update",
"Flag to control Disable CDF Update, enabled by default",
TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_SSIMRDO,
g_param_spec_boolean ("enable-srdo", "Enable SSIM RDO",
"Enable SSIM RDO",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder, default set by encoder",
0, MAX_NUM_REFERENCE_FRAMES, DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
baseclass->codec_name = "AV1";
baseclass->profile_cid = 0; /* Only single profile supported */
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
baseclass->set_encoder_properties = set_v4l2_av1_encoder_properties;
}
/* Probing functions */
gboolean
gst_v4l2_is_av1_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_av1_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_AV1_ENC,
"av1", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}

66
gst-v4l2/gstv4l2av1enc.h Normal file
View File

@@ -0,0 +1,66 @@
/*
* Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_AV1_ENC_H__
#define __GST_V4L2_AV1_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_AV1_ENC \
(gst_v4l2_av1_enc_get_type())
#define GST_V4L2_AV1_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_AV1_ENC,GstV4l2Av1Enc))
#define GST_V4L2_AV1_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_AV1_ENC,GstV4l2Av1EncClass))
#define GST_IS_V4L2_AV1_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_AV1_ENC))
#define GST_IS_V4L2_AV1_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_AV1_ENC))
typedef struct _GstV4l2Av1Enc GstV4l2Av1Enc;
typedef struct _GstV4l2Av1EncClass GstV4l2Av1EncClass;
struct _GstV4l2Av1Enc
{
GstV4l2VideoEnc parent;
gboolean EnableHeaders;
gboolean EnableTileConfig;
gboolean DisableCDFUpdate;
gboolean EnableSsimRdo;
guint32 Log2TileRows;
guint32 Log2TileCols;
guint32 nRefFrames;
};
struct _GstV4l2Av1EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_av1_enc_get_type (void);
gboolean gst_v4l2_is_av1_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_av1_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_AV1_ENC_H__ */

2565
gst-v4l2/gstv4l2bufferpool.c Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,140 @@
/* GStreamer
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* 2009 Texas Instruments, Inc - http://www.ti.com/
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2bufferpool.h V4L2 buffer pool class
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_V4L2_BUFFER_POOL_H__
#define __GST_V4L2_BUFFER_POOL_H__
#include <gst/gst.h>
/* The structures are renamed as the name conflicts with the
* OSS v4l2 library structures. */
#ifdef USE_V4L2_TARGET_NV
#define GstV4l2BufferPool GstNvV4l2BufferPool
#define GstV4l2BufferPoolClass GstNvV4l2BufferPoolClass
#endif
typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
typedef struct _GstV4l2BufferPoolClass GstV4l2BufferPoolClass;
typedef struct _GstV4l2Meta GstV4l2Meta;
#include "gstv4l2object.h"
#include "gstv4l2allocator.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_BUFFER_POOL (gst_v4l2_buffer_pool_get_type())
#define GST_IS_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER_POOL))
#define GST_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER_POOL, GstV4l2BufferPool))
#define GST_V4L2_BUFFER_POOL_CAST(obj) ((GstV4l2BufferPool*)(obj))
/* This flow return is used to indicated that the last buffer of a
* drain or a resoltuion change has been found. This should normally
* only occure for mem-2-mem devices. */
#define GST_V4L2_FLOW_LAST_BUFFER GST_FLOW_CUSTOM_SUCCESS
/* This flow return is used to indicated that the returned buffer was marked
* with the error flag and had no payload. This error should be recovered by
* simply waiting for next buffer. */
#define GST_V4L2_FLOW_CORRUPTED_BUFFER GST_FLOW_CUSTOM_SUCCESS_1
struct _GstV4l2BufferPool
{
GstBufferPool parent;
GstV4l2Object *obj; /* the v4l2 object */
gint video_fd; /* a dup(2) of the v4l2object's video_fd */
GstPoll *poll; /* a poll for video_fd */
GstPollFD pollfd;
gboolean can_poll_device;
gboolean empty;
GCond empty_cond;
GstV4l2Allocator *vallocator;
GstAllocator *allocator;
GstAllocationParams params;
GstBufferPool *other_pool;
guint size;
GstVideoInfo caps_info; /* Default video information */
gboolean add_videometa; /* set if video meta should be added */
gboolean enable_copy_threshold; /* If copy_threshold should be set */
guint min_latency; /* number of buffers we will hold */
guint max_latency; /* number of buffers we can hold */
guint num_queued; /* number of buffers queued in the driver */
guint num_allocated; /* number of buffers allocated */
guint copy_threshold; /* when our pool runs lower, start handing out copies */
gboolean streaming;
gboolean flushing;
#ifdef USE_V4L2_TARGET_NV
GstBuffer *buffers[NV_VIDEO_MAX_FRAME];
#else
GstBuffer *buffers[VIDEO_MAX_FRAME];
#endif
/* signal handlers */
gulong group_released_handler;
/* Control to warn only once on buggy feild driver bug */
gboolean has_warned_on_buggy_field;
#ifdef USE_V4L2_TARGET_NV
gboolean enable_dynamic_allocation; /* If dynamic_allocation should be set */
#endif
};
struct _GstV4l2BufferPoolClass
{
GstBufferPoolClass parent_class;
};
GType gst_v4l2_buffer_pool_get_type (void);
GstBufferPool * gst_v4l2_buffer_pool_new (GstV4l2Object *obj, GstCaps *caps);
GstFlowReturn gst_v4l2_buffer_pool_process (GstV4l2BufferPool * bpool, GstBuffer ** buf);
void gst_v4l2_buffer_pool_set_other_pool (GstV4l2BufferPool * pool,
GstBufferPool * other_pool);
void gst_v4l2_buffer_pool_copy_at_threshold (GstV4l2BufferPool * pool,
gboolean copy);
gboolean gst_v4l2_buffer_pool_flush (GstBufferPool *pool);
#ifdef USE_V4L2_TARGET_NV
void
gst_v4l2_buffer_pool_enable_dynamic_allocation (GstV4l2BufferPool * pool,
gboolean enable_dynamic_allocation);
gint
get_motion_vectors (GstV4l2Object *obj, guint32 bufferIndex,
v4l2_ctrl_videoenc_outputbuf_metadata_MV *enc_mv_metadata);
#endif
G_END_DECLS
#endif /*__GST_V4L2_BUFFER_POOL_H__ */

842
gst-v4l2/gstv4l2h264enc.c Normal file
View File

@@ -0,0 +1,842 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association
* Author: ayaka <ayaka@soulik.info>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2object.h"
#include "gstv4l2h264enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_h264_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_h264_enc_debug
#ifdef USE_V4L2_TARGET_NV
static GType
gst_v4l2_videnc_profile_get_type (void);
#define GST_TYPE_V4L2_VID_ENC_PROFILE (gst_v4l2_videnc_profile_get_type ())
/* prototypes */
gboolean gst_v4l2_h264_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type);
gboolean set_v4l2_h264_encoder_properties (GstVideoEncoder * encoder);
#endif
#ifdef USE_V4L2_TARGET_NV
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-h264, stream-format=(string) byte-stream, "
"alignment=(string) { au, nal }");
#else
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-h264, stream-format=(string) byte-stream, "
"alignment=(string) au");
#endif
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
#ifdef USE_V4L2_TARGET_NV
PROP_PROFILE,
PROP_INSERT_VUI,
PROP_EXTENDED_COLORFORMAT,
PROP_INSERT_SPS_PPS,
PROP_INSERT_AUD,
PROP_NUM_BFRAMES,
PROP_ENTROPY_CODING,
PROP_BIT_PACKETIZATION,
PROP_SLICE_INTRA_REFRESH,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
PROP_TWO_PASS_CBR,
PROP_ENABLE_MV_META,
PROP_SLICE_HEADER_SPACING,
PROP_NUM_REFERENCE_FRAMES,
PROP_PIC_ORDER_CNT_TYPE,
PROP_ENABLE_LOSSLESS_ENC
#endif
/* TODO add H264 controls
* PROP_I_FRAME_QP,
* PROP_P_FRAME_QP,
* PROP_B_FRAME_QP,
* PROP_MIN_QP,
* PROP_MAX_QP,
* PROP_8x8_TRANSFORM,
* PROP_CPB_SIZE,
* PROP_ENTROPY_MODE,
* PROP_I_PERIOD,
* PROP_LOOP_FILTER_ALPHA,
* PROP_LOOP_FILTER_BETA,
* PROP_LOOP_FILTER_MODE,
* PROP_VUI_EXT_SAR_HEIGHT,
* PROP_VUI_EXT_SAR_WIDTH,
* PROP_VUI_SAR_ENABLED,
* PROP_VUI_SAR_IDC,
* PROP_SEI_FRAME_PACKING,
* PROP_SEI_FP_CURRENT_FRAME_0,
* PROP_SEI_FP_ARRANGEMENT_TYP,
* ...
* */
};
#ifdef USE_V4L2_TARGET_NV
#define DEFAULT_PROFILE V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE
#define DEFAULT_NUM_B_FRAMES 0
#define MAX_NUM_B_FRAMES 2
#define DEFAULT_NUM_REFERENCE_FRAMES 1
#define MAX_NUM_REFERENCE_FRAMES 8
#define DEFAULT_BIT_PACKETIZATION FALSE
#define DEFAULT_SLICE_HEADER_SPACING 0
#define DEFAULT_INTRA_REFRESH_FRAME_INTERVAL 60
#define DEFAULT_PIC_ORDER_CNT_TYPE 0
#endif
#define gst_v4l2_h264_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2H264Enc, gst_v4l2_h264_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_h264_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2H264Enc *self = GST_V4L2_H264_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_PROFILE:
self->profile = g_value_get_enum (value);
if (GST_V4L2_IS_OPEN (video_enc->v4l2output)) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H264_PROFILE, self->profile)) {
g_print ("S_EXT_CTRLS for H264_PROFILE failed\n");
}
}
break;
case PROP_NUM_BFRAMES:
self->nBFrames = g_value_get_uint (value);
if (self->nBFrames && self->nRefFrames == DEFAULT_NUM_REFERENCE_FRAMES)
{
self->nRefFrames = 2;
g_print ("Minimum 2 Ref-Frames are required for B-frames encoding\n");
}
break;
case PROP_INSERT_SPS_PPS:
self->insert_sps_pps = g_value_get_boolean (value);
break;
case PROP_INSERT_AUD:
self->insert_aud = g_value_get_boolean (value);
break;
case PROP_INSERT_VUI:
self->insert_vui = g_value_get_boolean (value);
break;
/* extended-colorformat property is available for cuvid path only*/
case PROP_EXTENDED_COLORFORMAT:
self->extended_colorformat = g_value_get_boolean (value);
break;
case PROP_ENTROPY_CODING:
self->disable_cabac_entropy_coding = g_value_get_boolean (value);
break;
case PROP_BIT_PACKETIZATION:
self->bit_packetization = g_value_get_boolean (value);
break;
case PROP_SLICE_HEADER_SPACING:
self->slice_header_spacing = g_value_get_uint64 (value);
if (self->slice_header_spacing)
video_enc->slice_output = TRUE;
else
video_enc->slice_output = FALSE;
break;
case PROP_SLICE_INTRA_REFRESH_INTERVAL:
self->SliceIntraRefreshInterval = g_value_get_uint (value);
break;
case PROP_TWO_PASS_CBR:
self->EnableTwopassCBR = g_value_get_boolean (value);
break;
case PROP_ENABLE_MV_META:
self->EnableMVBufferMeta = g_value_get_boolean (value);
video_enc->v4l2capture->enableMVBufferMeta = g_value_get_boolean (value);
break;
case PROP_NUM_REFERENCE_FRAMES:
self->nRefFrames = g_value_get_uint (value);
break;
case PROP_PIC_ORDER_CNT_TYPE:
self->poc_type = g_value_get_uint (value);
break;
case PROP_ENABLE_LOSSLESS_ENC:
self->enableLossless = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static void
gst_v4l2_h264_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2H264Enc *self = GST_V4L2_H264_ENC (object);
switch (prop_id) {
case PROP_PROFILE:
g_value_set_enum (value, self->profile);
break;
case PROP_NUM_BFRAMES:
g_value_set_uint (value, self->nBFrames);
break;
case PROP_INSERT_SPS_PPS:
g_value_set_boolean (value, self->insert_sps_pps);
break;
case PROP_INSERT_AUD:
g_value_set_boolean (value, self->insert_aud);
break;
case PROP_INSERT_VUI:
g_value_set_boolean (value, self->insert_vui);
break;
/* extended-colorformat property is available for cuvid path only*/
case PROP_EXTENDED_COLORFORMAT:
g_value_set_boolean (value, self->extended_colorformat);
break;
case PROP_ENTROPY_CODING:
g_value_set_boolean (value, self->disable_cabac_entropy_coding);
break;
case PROP_BIT_PACKETIZATION:
g_value_set_boolean (value, self->bit_packetization);
break;
case PROP_SLICE_HEADER_SPACING:
g_value_set_uint64 (value, self->slice_header_spacing);
break;
case PROP_SLICE_INTRA_REFRESH_INTERVAL:
g_value_set_uint (value, self->SliceIntraRefreshInterval);
break;
case PROP_TWO_PASS_CBR:
g_value_set_boolean (value, self->EnableTwopassCBR);
break;
case PROP_ENABLE_MV_META:
g_value_set_boolean (value, self->EnableMVBufferMeta);
break;
case PROP_NUM_REFERENCE_FRAMES:
g_value_set_uint (value, self->nRefFrames);
break;
case PROP_PIC_ORDER_CNT_TYPE:
g_value_set_uint (value, self->poc_type);
break;
case PROP_ENABLE_LOSSLESS_ENC:
g_value_set_boolean (value, self->enableLossless);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
} else if (g_str_equal (profile, "constrained-baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE;
} else if (g_str_equal (profile, "main")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_MAIN;
} else if (g_str_equal (profile, "extended")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED;
} else if (g_str_equal (profile, "high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH;
} else if (g_str_equal (profile, "high-10")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10;
} else if (g_str_equal (profile, "high-4:2:2")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422;
} else if (g_str_equal (profile, "high-4:4:4")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE;
} else if (g_str_equal (profile, "high-10-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA;
} else if (g_str_equal (profile, "high-4:2:2-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA;
} else if (g_str_equal (profile, "high-4:4:4-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA;
} else if (g_str_equal (profile, "cavlc-4:4:4-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA;
} else if (g_str_equal (profile, "scalable-baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE;
} else if (g_str_equal (profile, "scalable-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH;
} else if (g_str_equal (profile, "scalable-high-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA;
} else if (g_str_equal (profile, "stereo-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH;
} else if (g_str_equal (profile, "multiview-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH;
} else {
GST_WARNING ("Unsupported profile string '%s'", profile);
}
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE:
return "baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE:
return "constrained-baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_MAIN:
return "main";
case V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED:
return "extended";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH:
return "high";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10:
return "high-10";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422:
return "high-4:2:2";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE:
return "high-4:4:4";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA:
return "high-10-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA:
return "high-4:2:2-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA:
return "high-4:4:4-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA:
return "cavlc-4:4:4-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE:
return "scalable-baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH:
return "scalable-high";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA:
return "scalable-high-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH:
return "stereo-high";
case V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH:
return "multiview-high";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static gint
v4l2_level_from_string (const gchar * level)
{
gint v4l2_level = -1;
if (g_str_equal (level, "1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_0;
else if (g_str_equal (level, "1b"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1B;
else if (g_str_equal (level, "1.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_1;
else if (g_str_equal (level, "1.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_2;
else if (g_str_equal (level, "1.3"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_3;
else if (g_str_equal (level, "2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_0;
else if (g_str_equal (level, "2.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_1;
else if (g_str_equal (level, "2.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_2;
else if (g_str_equal (level, "3"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_0;
else if (g_str_equal (level, "3.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_1;
else if (g_str_equal (level, "3.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_2;
else if (g_str_equal (level, "4"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
else if (g_str_equal (level, "4.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_1;
else if (g_str_equal (level, "4.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_2;
else if (g_str_equal (level, "5"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_5_0;
else if (g_str_equal (level, "5.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_5_1;
else
GST_WARNING ("Unsupported level '%s'", level);
return v4l2_level;
}
static const gchar *
v4l2_level_to_string (gint v4l2_level)
{
switch (v4l2_level) {
case V4L2_MPEG_VIDEO_H264_LEVEL_1_0:
return "1";
case V4L2_MPEG_VIDEO_H264_LEVEL_1B:
return "1b";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_1:
return "1.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_2:
return "1.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_3:
return "1.3";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_0:
return "2";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_1:
return "2.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_2:
return "2.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_0:
return "3.0";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_1:
return "3.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_2:
return "3.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_0:
return "4";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_1:
return "4.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_2:
return "4.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_5_0:
return "5";
case V4L2_MPEG_VIDEO_H264_LEVEL_5_1:
return "5.1";
default:
GST_WARNING ("Unsupported V4L2 level %i", v4l2_level);
break;
}
return NULL;
}
static void
gst_v4l2_h264_enc_init (GstV4l2H264Enc * self)
{
#ifdef USE_V4L2_TARGET_NV
self->profile = DEFAULT_PROFILE;
self->insert_sps_pps = FALSE;
self->insert_aud = FALSE;
self->insert_vui = FALSE;
self->enableLossless = FALSE;
if (is_cuvid == TRUE)
self->extended_colorformat = FALSE;
self->nBFrames = 0;
self->nRefFrames = 1;
self->bit_packetization = DEFAULT_BIT_PACKETIZATION;
self->slice_header_spacing = DEFAULT_SLICE_HEADER_SPACING;
self->poc_type = DEFAULT_PIC_ORDER_CNT_TYPE;
#endif
}
static void
gst_v4l2_h264_enc_class_init (GstV4l2H264EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_h264_enc_debug, "v4l2h264enc", 0,
"V4L2 H.264 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 H.264 Encoder",
"Codec/Encoder/Video",
"Encode H.264 video streams via V4L2 API", "ayaka <ayaka@soulik.info>");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h264_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h264_enc_get_property);
#ifdef USE_V4L2_TARGET_NV
g_object_class_install_property (gobject_class, PROP_PROFILE,
g_param_spec_enum ("profile", "profile",
"Set profile for v4l2 encode",
GST_TYPE_V4L2_VID_ENC_PROFILE, DEFAULT_PROFILE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
if (is_cuvid == TRUE) {
g_object_class_install_property (gobject_class, PROP_EXTENDED_COLORFORMAT,
g_param_spec_boolean ("extended-colorformat",
"Set Extended ColorFormat",
"Set Extended ColorFormat pixel values 0 to 255 in VUI Info",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
} else if (is_cuvid == FALSE) {
g_object_class_install_property (gobject_class, PROP_PIC_ORDER_CNT_TYPE,
g_param_spec_uint ("poc-type",
"Picture Order Count type",
"Set Picture Order Count type value",
0, 2, DEFAULT_PIC_ORDER_CNT_TYPE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_INSERT_VUI,
g_param_spec_boolean ("insert-vui",
"Insert H.264 VUI",
"Insert H.264 VUI(Video Usability Information) in SPS",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_SPS_PPS,
g_param_spec_boolean ("insert-sps-pps",
"Insert H.264 SPS, PPS",
"Insert H.264 SPS, PPS at every IDR frame",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_AUD,
g_param_spec_boolean ("insert-aud",
"Insert H.264 AUD",
"Insert H.264 Access Unit Delimiter(AUD)",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_NUM_BFRAMES,
g_param_spec_uint ("num-B-Frames",
"B Frames between two reference frames",
"Number of B Frames between two reference frames (not recommended)",
0, MAX_NUM_B_FRAMES, DEFAULT_NUM_B_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENTROPY_CODING,
g_param_spec_boolean ("disable-cabac",
"Set Entropy Coding",
"Set Entropy Coding Type CAVLC(TRUE) or CABAC(FALSE)",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_BIT_PACKETIZATION,
g_param_spec_boolean ("bit-packetization", "Bit Based Packetization",
"Whether or not Packet size is based upon Number Of bits",
DEFAULT_BIT_PACKETIZATION,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_SLICE_HEADER_SPACING,
g_param_spec_uint64 ("slice-header-spacing", "Slice Header Spacing",
"Slice Header Spacing number of macroblocks/bits in one packet",
0, G_MAXUINT64, DEFAULT_SLICE_HEADER_SPACING,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_MV_META,
g_param_spec_boolean ("EnableMVBufferMeta",
"Enable Motion Vector Meta data",
"Enable Motion Vector Meta data for encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
g_param_spec_uint ("SliceIntraRefreshInterval",
"SliceIntraRefreshInterval", "Set SliceIntraRefreshInterval", 0,
G_MAXUINT, DEFAULT_INTRA_REFRESH_FRAME_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_TWO_PASS_CBR,
g_param_spec_boolean ("EnableTwopassCBR",
"Enable Two pass CBR",
"Enable two pass CBR while encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder",
0, MAX_NUM_REFERENCE_FRAMES, DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_LOSSLESS_ENC,
g_param_spec_boolean ("enable-lossless",
"Enable Lossless encoding",
"Enable lossless encoding for YUV444",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
}
#endif
baseclass->codec_name = "H264";
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
baseclass->level_cid = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
baseclass->level_to_string = v4l2_level_to_string;
baseclass->level_from_string = v4l2_level_from_string;
#ifdef USE_V4L2_TARGET_NV
baseclass->set_encoder_properties = set_v4l2_h264_encoder_properties;
#endif
}
/* Probing functions */
gboolean
gst_v4l2_is_h264_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_h264_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_H264_ENC,
"h264", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}
#ifdef USE_V4L2_TARGET_NV
static GType
gst_v4l2_videnc_profile_get_type (void)
{
static volatile gsize profile = 0;
static const GEnumValue profile_type[] = {
{V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE,
"GST_V4L2_H264_VIDENC_BASELINE_PROFILE",
"Baseline"},
{V4L2_MPEG_VIDEO_H264_PROFILE_MAIN, "GST_V4L2_H264_VIDENC_MAIN_PROFILE",
"Main"},
{V4L2_MPEG_VIDEO_H264_PROFILE_HIGH, "GST_V4L2_H264_VIDENC_HIGH_PROFILE",
"High"},
{V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE, "GST_V4L2_H264_VIDENC_HIGH_444_PREDICTIVE",
"High444"},
{0, NULL, NULL}
};
if (g_once_init_enter (&profile)) {
GType tmp =
g_enum_register_static ("GstV4l2VideoEncProfileType", profile_type);
g_once_init_leave (&profile, tmp);
}
return (GType) profile;
}
gboolean
gst_v4l2_h264_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret;
v4l2_enc_slice_length_param param =
{ slice_length_type, slice_header_spacing };
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_ENABLE_SLICE_LEVEL_ENCODE;
control.value = TRUE;
ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0) {
g_print ("Error while setting spacing and packetization\n");
return FALSE;
}
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_SLICE_LENGTH_PARAM;
control.string = (gchar *) &param;
ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0) {
g_print ("Error while setting spacing and packetization\n");
return FALSE;
}
if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type)) {
v4l2object->format.fmt.pix_mp.plane_fmt[0].sizeimage = slice_header_spacing;
} else {
v4l2object->format.fmt.pix.sizeimage = slice_header_spacing;
}
return TRUE;
}
gboolean
set_v4l2_h264_encoder_properties (GstVideoEncoder * encoder)
{
GstV4l2H264Enc *self = GST_V4L2_H264_ENC (encoder);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (encoder);
if (!GST_V4L2_IS_OPEN (video_enc->v4l2output)) {
g_print ("V4L2 device is not open\n");
return FALSE;
}
if (self->profile) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H264_PROFILE,
self->profile)) {
g_print ("S_EXT_CTRLS for H264_PROFILE failed\n");
return FALSE;
}
}
if (self->nBFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_BFRAMES,
self->nBFrames)) {
g_print ("S_EXT_CTRLS for NUM_BFRAMES failed\n");
return FALSE;
}
}
if (self->insert_vui) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_VUI, 1)) {
g_print ("S_EXT_CTRLS for INSERT_VUI failed\n");
return FALSE;
}
}
if (is_cuvid == TRUE) {
if (self->extended_colorformat) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_EXTEDED_COLORFORMAT, 1)) {
g_print ("S_EXT_CTRLS for EXTENDED_COLORFORMAT failed\n");
return FALSE;
}
}
}
if (self->insert_aud) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_AUD, 1)) {
g_print ("S_EXT_CTRLS for INSERT_AUD failed\n");
return FALSE;
}
}
if (self->insert_sps_pps) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_SPS_PPS_AT_IDR, 1)) {
g_print ("S_EXT_CTRLS for SPS_PPS_AT_IDR failed\n");
return FALSE;
}
}
if (self->disable_cabac_entropy_coding) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE,
V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC)) {
g_print ("S_EXT_CTRLS for ENTROPY_MODE failed\n");
return FALSE;
}
}
if (self->slice_header_spacing) {
enum v4l2_enc_slice_length_type slice_length_type = V4L2_ENC_SLICE_LENGTH_TYPE_MBLK;
if (self->bit_packetization) {
slice_length_type = V4L2_ENC_SLICE_LENGTH_TYPE_BITS;
}
if (!gst_v4l2_h264_enc_slice_header_spacing (video_enc->v4l2capture,
self->slice_header_spacing,
slice_length_type)) {
g_print ("S_EXT_CTRLS for SLICE_LENGTH_PARAM failed\n");
return FALSE;
}
}
if (self->EnableMVBufferMeta) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_ENABLE_METADATA_MV,
self->EnableMVBufferMeta)) {
g_print ("S_EXT_CTRLS for ENABLE_METADATA_MV failed\n");
return FALSE;
}
}
if (self->SliceIntraRefreshInterval) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM,
self->SliceIntraRefreshInterval)) {
g_print ("S_EXT_CTRLS for SLICE_INTRAREFRESH_PARAM failed\n");
return FALSE;
}
}
if (self->EnableTwopassCBR) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_TWO_PASS_CBR, 1)) {
g_print ("S_EXT_CTRLS for TWO_PASS_CBR failed\n");
return FALSE;
}
}
if (self->nRefFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_REFERENCE_FRAMES,
self->nRefFrames)) {
g_print ("S_EXT_CTRLS for NUM_REFERENCE_FRAMES failed\n");
return FALSE;
}
}
if (self->poc_type) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_POC_TYPE, self->poc_type)) {
g_print ("S_EXT_CTRLS for POC_TYPE failed\n");
return FALSE;
}
}
if (self->enableLossless) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_ENABLE_LOSSLESS, self->enableLossless)) {
g_print ("S_EXT_CTRLS for ENABLE_LOSSLESS failed\n");
return FALSE;
}
}
return TRUE;
}
#endif

79
gst-v4l2/gstv4l2h264enc.h Normal file
View File

@@ -0,0 +1,79 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association.
* Author: ayaka <ayaka@soulik.info>
* Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_H264_ENC_H__
#define __GST_V4L2_H264_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_H264_ENC \
(gst_v4l2_h264_enc_get_type())
#define GST_V4L2_H264_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_H264_ENC,GstV4l2H264Enc))
#define GST_V4L2_H264_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_H264_ENC,GstV4l2H264EncClass))
#define GST_IS_V4L2_H264_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_H264_ENC))
#define GST_IS_V4L2_H264_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_H264_ENC))
typedef struct _GstV4l2H264Enc GstV4l2H264Enc;
typedef struct _GstV4l2H264EncClass GstV4l2H264EncClass;
struct _GstV4l2H264Enc
{
GstV4l2VideoEnc parent;
#ifdef USE_V4L2_TARGET_NV
guint profile;
guint nBFrames;
guint nRefFrames;
gboolean insert_sps_pps;
gboolean insert_aud;
gboolean insert_vui;
gboolean extended_colorformat;
gboolean EnableTwopassCBR;
gboolean SliceIntraRefreshEnable;
guint SliceIntraRefreshInterval;
gboolean disable_cabac_entropy_coding;
gboolean bit_packetization;
guint32 slice_header_spacing;
gboolean EnableMVBufferMeta;
guint poc_type;
gboolean enableLossless;
#endif
};
struct _GstV4l2H264EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_h264_enc_get_type (void);
gboolean gst_v4l2_is_h264_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_h264_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_H264_ENC_H__ */

591
gst-v4l2/gstv4l2h265enc.c Normal file
View File

@@ -0,0 +1,591 @@
/*
* Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2object.h"
#include "gstv4l2h265enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_h265_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_h265_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-h265, stream-format=(string) byte-stream, "
"alignment=(string) au");
static GType
gst_v4l2_videnc_profile_get_type (void);
#define GST_TYPE_V4L2_VID_ENC_PROFILE (gst_v4l2_videnc_profile_get_type ())
/* prototypes */
gboolean set_v4l2_h265_encoder_properties (GstVideoEncoder * encoder);
gboolean gst_v4l2_h265_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type);
void set_h265_video_enc_property (GstV4l2Object * v4l2object, guint label,
gint param);
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
PROP_INSERT_SPS_PPS,
PROP_PROFILE,
PROP_INSERT_VUI,
PROP_EXTENDED_COLORFORMAT,
PROP_INSERT_AUD,
PROP_BIT_PACKETIZATION,
PROP_SLICE_HEADER_SPACING,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
PROP_TWO_PASS_CBR,
PROP_ENABLE_MV_META,
PROP_NUM_BFRAMES,
PROP_NUM_REFERENCE_FRAMES,
PROP_ENABLE_LOSSLESS_ENC
};
#define DEFAULT_PROFILE V4L2_MPEG_VIDEO_H265_PROFILE_MAIN
#define DEFAULT_BIT_PACKETIZATION FALSE
#define DEFAULT_SLICE_HEADER_SPACING 0
#define DEFAULT_INTRA_REFRESH_FRAME_INTERVAL 60
#define DEFAULT_NUM_B_FRAMES 0
#define MAX_NUM_B_FRAMES 2
#define DEFAULT_NUM_REFERENCE_FRAMES 1
#define MAX_NUM_REFERENCE_FRAMES 8
#define gst_v4l2_h265_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2H265Enc, gst_v4l2_h265_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_h265_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4l2H265Enc *self = GST_V4L2_H265_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_INSERT_SPS_PPS:
self->insert_sps_pps = g_value_get_boolean (value);
break;
case PROP_PROFILE:
self->profile = g_value_get_enum (value);
if (GST_V4L2_IS_OPEN(video_enc->v4l2output)) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H265_PROFILE, self->profile)) {
g_print ("S_EXT_CTRLS for H265_PROFILE failed\n");
}
}
break;
case PROP_INSERT_AUD:
self->insert_aud = g_value_get_boolean (value);
break;
case PROP_INSERT_VUI:
self->insert_vui = g_value_get_boolean (value);
break;
/* extended-colorformat property is available for cuvid path only*/
case PROP_EXTENDED_COLORFORMAT:
self->extended_colorformat = g_value_get_boolean (value);
break;
case PROP_BIT_PACKETIZATION:
self->bit_packetization = g_value_get_boolean (value);
break;
case PROP_SLICE_HEADER_SPACING:
self->slice_header_spacing = g_value_get_uint64 (value);
break;
case PROP_SLICE_INTRA_REFRESH_INTERVAL:
self->SliceIntraRefreshInterval = g_value_get_uint (value);
break;
case PROP_TWO_PASS_CBR:
self->EnableTwopassCBR = g_value_get_boolean (value);
break;
case PROP_ENABLE_MV_META:
self->EnableMVBufferMeta = g_value_get_boolean (value);
video_enc->v4l2capture->enableMVBufferMeta = g_value_get_boolean (value);
break;
case PROP_NUM_BFRAMES:
self->nBFrames = g_value_get_uint (value);
if (self->nBFrames && (self->nRefFrames == DEFAULT_NUM_REFERENCE_FRAMES)) {
// Minimum 2 Ref-Frames are required for B-frames encoding
self->nRefFrames = 2;
}
break;
case PROP_NUM_REFERENCE_FRAMES:
self->nRefFrames = g_value_get_uint (value);
break;
case PROP_ENABLE_LOSSLESS_ENC:
self->enableLossless = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_v4l2_h265_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4l2H265Enc *self = GST_V4L2_H265_ENC (object);
switch (prop_id) {
case PROP_INSERT_SPS_PPS:
g_value_set_boolean (value, self->insert_sps_pps);
break;
case PROP_PROFILE:
g_value_set_enum (value, self->profile);
break;
case PROP_INSERT_AUD:
g_value_set_boolean (value, self->insert_aud);
break;
case PROP_INSERT_VUI:
g_value_set_boolean (value, self->insert_vui);
break;
/* extended-colorformat property is available for cuvid path only*/
case PROP_EXTENDED_COLORFORMAT:
g_value_set_boolean (value, self->extended_colorformat);
break;
case PROP_BIT_PACKETIZATION:
g_value_set_boolean (value, self->bit_packetization);
break;
case PROP_SLICE_HEADER_SPACING:
g_value_set_uint64 (value, self->slice_header_spacing);
break;
case PROP_SLICE_INTRA_REFRESH_INTERVAL:
g_value_set_uint (value, self->SliceIntraRefreshInterval);
break;
case PROP_TWO_PASS_CBR:
g_value_set_boolean (value, self->EnableTwopassCBR);
break;
case PROP_ENABLE_MV_META:
g_value_set_boolean (value, self->EnableMVBufferMeta);
break;
case PROP_NUM_BFRAMES:
g_value_set_uint (value, self->nBFrames);
break;
case PROP_NUM_REFERENCE_FRAMES:
g_value_set_uint (value, self->nRefFrames);
break;
case PROP_ENABLE_LOSSLESS_ENC:
g_value_set_boolean (value, self->enableLossless);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "main")) {
v4l2_profile = V4L2_MPEG_VIDEO_H265_PROFILE_MAIN;
} else if (g_str_equal (profile, "main10")) {
v4l2_profile = V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10;
} else if (g_str_equal (profile, "mainstillpicture")) {
v4l2_profile = V4L2_MPEG_VIDEO_H265_PROFILE_MAINSTILLPICTURE;
} else if (g_str_equal (profile, "frext")) {
v4l2_profile = V4L2_MPEG_VIDEO_H265_PROFILE_FREXT;
} else {
GST_WARNING ("Unsupported profile string '%s'", profile);
}
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case V4L2_MPEG_VIDEO_H265_PROFILE_MAIN:
return "main";
case V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10:
return "main10";
case V4L2_MPEG_VIDEO_H265_PROFILE_MAINSTILLPICTURE:
return "mainstillpicture";
case V4L2_MPEG_VIDEO_H265_PROFILE_FREXT:
return "frext";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static gint
v4l2_level_from_string (const gchar * level)
{
gint v4l2_level = -1;
//TODO : Since videodev2 file does not list H265 profiles
//we need to add profiles inside v4l2_nv_extensions.h
//and use them here.
return v4l2_level;
}
static const gchar *
v4l2_level_to_string (gint v4l2_level)
{
return NULL;
}
static void
gst_v4l2_h265_enc_init (GstV4l2H265Enc * self)
{
self->insert_sps_pps = FALSE;
self->profile = DEFAULT_PROFILE;
self->insert_aud = FALSE;
self->insert_vui = FALSE;
self->extended_colorformat = FALSE;
self->bit_packetization = DEFAULT_BIT_PACKETIZATION;
self->slice_header_spacing = DEFAULT_SLICE_HEADER_SPACING;
self->nRefFrames = 1;
self->nBFrames = 0;
self->enableLossless = FALSE;
}
static void
gst_v4l2_h265_enc_class_init (GstV4l2H265EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_h265_enc_debug, "v4l2h265enc", 0,
"V4L2 H.265 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 H.265 Encoder",
"Codec/Encoder/Video",
"Encode H.265 video streams via V4L2 API",
"Viranjan Pagar <vpagar@nvidia.com>, Amit Pandya <apandya@nvidia.com>");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h265_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h265_enc_get_property);
#ifdef USE_V4L2_TARGET_NV
g_object_class_install_property (gobject_class, PROP_PROFILE,
g_param_spec_enum ("profile", "profile",
"Set profile for v4l2 encode",
GST_TYPE_V4L2_VID_ENC_PROFILE, DEFAULT_PROFILE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
if (is_cuvid == TRUE) {
g_object_class_install_property (gobject_class, PROP_EXTENDED_COLORFORMAT,
g_param_spec_boolean ("extended-colorformat",
"Set Extended ColorFormat",
"Set Extended ColorFormat pixel values 0 to 255 in VUI info",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
} else if (is_cuvid == FALSE) {
g_object_class_install_property (gobject_class, PROP_INSERT_SPS_PPS,
g_param_spec_boolean ("insert-sps-pps",
"Insert H.265 SPS, PPS",
"Insert H.265 SPS, PPS at every IDR frame",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_VUI,
g_param_spec_boolean ("insert-vui",
"Insert H.265 VUI",
"Insert H.265 VUI(Video Usability Information) in SPS",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_AUD,
g_param_spec_boolean ("insert-aud",
"Insert H.265 AUD",
"Insert H.265 Access Unit Delimiter(AUD)",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_BIT_PACKETIZATION,
g_param_spec_boolean ("bit-packetization", "Bit Based Packetization",
"Whether or not Packet size is based upon Number Of bits",
DEFAULT_BIT_PACKETIZATION,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_SLICE_HEADER_SPACING,
g_param_spec_uint64 ("slice-header-spacing", "Slice Header Spacing",
"Slice Header Spacing number of macroblocks/bits in one packet",
0, G_MAXUINT64, DEFAULT_SLICE_HEADER_SPACING,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_MV_META,
g_param_spec_boolean ("EnableMVBufferMeta",
"Enable Motion Vector Meta data",
"Enable Motion Vector Meta data for encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
g_param_spec_uint ("SliceIntraRefreshInterval",
"SliceIntraRefreshInterval", "Set SliceIntraRefreshInterval", 0,
G_MAXUINT, DEFAULT_INTRA_REFRESH_FRAME_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_TWO_PASS_CBR,
g_param_spec_boolean ("EnableTwopassCBR",
"Enable Two pass CBR",
"Enable two pass CBR while encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_BFRAMES,
g_param_spec_uint ("num-B-Frames",
"B Frames between two reference frames",
"Number of B Frames between two reference frames (not recommended)(Supported only on Xavier)",
0, MAX_NUM_B_FRAMES, DEFAULT_NUM_B_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder",
0, MAX_NUM_REFERENCE_FRAMES, DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_LOSSLESS_ENC,
g_param_spec_boolean ("enable-lossless",
"Enable Lossless encoding",
"Enable lossless encoding for YUV444",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
}
#endif
baseclass->codec_name = "H265";
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_H265_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
//baseclass->level_cid = V4L2_CID_MPEG_VIDEO_H265_LEVEL;
baseclass->level_to_string = v4l2_level_to_string;
baseclass->level_from_string = v4l2_level_from_string;
baseclass->set_encoder_properties = set_v4l2_h265_encoder_properties;
}
/* Probing functions */
gboolean
gst_v4l2_is_h265_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_h265_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_H265_ENC,
"h265", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}
static GType
gst_v4l2_videnc_profile_get_type (void)
{
static volatile gsize profile = 0;
static const GEnumValue profile_type[] = {
{V4L2_MPEG_VIDEO_H265_PROFILE_MAIN,
"GST_V4L2_H265_VIDENC_MAIN_PROFILE", "Main"},
{V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10,
"GST_V4L2_H265_VIDENC_MAIN10_PROFILE", "Main10"},
{V4L2_MPEG_VIDEO_H265_PROFILE_FREXT,
"GST_V4L2_H265_VIDENC_FREXT_PROFILE", "FREXT"},
{0, NULL, NULL}
};
if (g_once_init_enter (&profile)) {
GType tmp =
g_enum_register_static ("GstV4L2VideoEncProfileType", profile_type);
g_once_init_leave (&profile, tmp);
}
return (GType) profile;
}
gboolean
gst_v4l2_h265_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret;
v4l2_enc_slice_length_param param =
{ slice_length_type, slice_header_spacing };
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_SLICE_LENGTH_PARAM;
control.string = (gchar *) &param;
ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0) {
g_print ("Error while setting spacing and packetization\n");
return FALSE;
}
return TRUE;
}
gboolean
set_v4l2_h265_encoder_properties (GstVideoEncoder * encoder)
{
GstV4l2H265Enc *self = GST_V4L2_H265_ENC (encoder);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (encoder);
if (!GST_V4L2_IS_OPEN (video_enc->v4l2output)) {
g_print ("V4L2 device is not open\n");
return FALSE;
}
if (self->insert_sps_pps) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_SPS_PPS_AT_IDR, 1)) {
g_print ("S_EXT_CTRLS for INSERT_SPS_PPS_AT_IDR failed\n");
return FALSE;
}
}
if (self->profile) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEO_H265_PROFILE, self->profile)) {
g_print ("S_EXT_CTRLS for H265_PROFILE failed\n");
return FALSE;
}
}
if (self->insert_vui) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_VUI, 1)) {
g_print ("S_EXT_CTRLS for INSERT_VUI failed\n");
return FALSE;
}
}
if (self->extended_colorformat) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_EXTEDED_COLORFORMAT, 1)) {
g_print ("S_EXT_CTRLS for EXTENDED_COLORFORMAT failed\n");
return FALSE;
}
}
if (self->insert_aud) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_AUD, 1)) {
g_print ("S_EXT_CTRLS for INSERT_AUD failed\n");
return FALSE;
}
}
if (self->slice_header_spacing) {
enum v4l2_enc_slice_length_type slice_length_type = V4L2_ENC_SLICE_LENGTH_TYPE_MBLK;
if (self->bit_packetization) {
slice_length_type = V4L2_ENC_SLICE_LENGTH_TYPE_BITS;
}
if (!gst_v4l2_h265_enc_slice_header_spacing (video_enc->v4l2output,
self->slice_header_spacing, slice_length_type)) {
g_print ("S_EXT_CTRLS for SLICE_LENGTH_PARAM failed\n");
return FALSE;
}
}
if (self->EnableMVBufferMeta) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_ENABLE_METADATA_MV,
self->EnableMVBufferMeta)) {
g_print ("S_EXT_CTRLS for ENABLE_METADATA_MV failed\n");
return FALSE;
}
}
if (self->SliceIntraRefreshInterval) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM,
self->SliceIntraRefreshInterval)) {
g_print ("S_EXT_CTRLS for SLICE_INTRAREFRESH_PARAM failed\n");
return FALSE;
}
}
if (self->EnableTwopassCBR) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_TWO_PASS_CBR, 1)) {
g_print ("S_EXT_CTRLS for TWO_PASS_CBR failed\n");
return FALSE;
}
}
if (self->nBFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_BFRAMES,
self->nBFrames)) {
g_print ("S_EXT_CTRLS for NUM_BFRAMES failed\n");
return FALSE;
}
}
if (self->nRefFrames) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_NUM_REFERENCE_FRAMES,
self->nRefFrames)) {
g_print ("S_EXT_CTRLS for NUM_REFERENCE_FRAMES failed\n");
return FALSE;
}
}
if (self->enableLossless) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_ENABLE_LOSSLESS, self->enableLossless)) {
g_print ("S_EXT_CTRLS for ENABLE_LOSSLESS failed\n");
return FALSE;
}
}
return TRUE;
}

72
gst-v4l2/gstv4l2h265enc.h Normal file
View File

@@ -0,0 +1,72 @@
/*
* Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_H265_ENC_H__
#define __GST_V4L2_H265_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_H265_ENC \
(gst_v4l2_h265_enc_get_type())
#define GST_V4L2_H265_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_H265_ENC,GstV4l2H265Enc))
#define GST_V4L2_H265_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_H265_ENC,GstV4l2H265EncClass))
#define GST_IS_V4L2_H265_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_H265_ENC))
#define GST_IS_V4L2_H265_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_H265_ENC))
typedef struct _GstV4l2H265Enc GstV4l2H265Enc;
typedef struct _GstV4l2H265EncClass GstV4l2H265EncClass;
struct _GstV4l2H265Enc
{
GstV4l2VideoEnc parent;
gboolean insert_sps_pps;
guint profile;
guint nBFrames;
guint nRefFrames;
gboolean insert_aud;
gboolean insert_vui;
gboolean extended_colorformat;
guint SliceIntraRefreshInterval;
gboolean EnableTwopassCBR;
gboolean bit_packetization;
guint32 slice_header_spacing;
gboolean EnableMVBufferMeta;
gboolean enableLossless;
};
struct _GstV4l2H265EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_h265_enc_get_type (void);
gboolean gst_v4l2_is_h265_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_h265_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_H265_ENC_H__ */

View File

@@ -0,0 +1,924 @@
/*
* Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include "nalutils.h"
#include "gstv4l2h26xparser.h"
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbitreader.h>
#include <string.h>
#include <math.h>
GST_DEBUG_CATEGORY_STATIC (h26x_parser_debug);
#define GST_CAT_DEFAULT h26x_parser_debug
static gboolean initialized = FALSE;
#define INITIALIZE_DEBUG_CATEGORY \
if (!initialized) { \
GST_DEBUG_CATEGORY_INIT (h26x_parser_debug, "codecparsers_h26x", 0, \
"h26x parser library"); \
initialized = TRUE; \
}
/**** Default scaling_lists according to Table 7-2 *****/
static const guint8 default_4x4_intra[16] = {
6, 13, 13, 20, 20, 20, 28, 28, 28, 28, 32, 32,
32, 37, 37, 42
};
static const guint8 default_4x4_inter[16] = {
10, 14, 14, 20, 20, 20, 24, 24, 24, 24, 27, 27,
27, 30, 30, 34
};
static const guint8 default_8x8_intra[64] = {
6, 10, 10, 13, 11, 13, 16, 16, 16, 16, 18, 18,
18, 18, 18, 23, 23, 23, 23, 23, 23, 25, 25, 25, 25, 25, 25, 25, 27, 27, 27,
27, 27, 27, 27, 27, 29, 29, 29, 29, 29, 29, 29, 31, 31, 31, 31, 31, 31, 33,
33, 33, 33, 33, 36, 36, 36, 36, 38, 38, 38, 40, 40, 42
};
static const guint8 default_8x8_inter[64] = {
9, 13, 13, 15, 13, 15, 17, 17, 17, 17, 19, 19,
19, 19, 19, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 24, 24, 24,
24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 27, 27, 27, 27, 27, 27, 28,
28, 28, 28, 28, 30, 30, 30, 30, 32, 32, 32, 33, 33, 35
};
/***** Utils ****/
#define EXTENDED_SAR 255
static gboolean
h264_parse_nalu_header (H264NalUnit * nalu)
{
guint8 *data = nalu->data + nalu->offset;
if (nalu->size < 1)
return FALSE;
nalu->type = (data[0] & 0x1f);
nalu->ref_idc = (data[0] & 0x60) >> 5;
nalu->idr_pic_flag = (nalu->type == 5 ? 1 : 0);
nalu->header_bytes = 1;
nalu->extension_type = H264_NAL_EXTENSION_NONE;
GST_DEBUG ("Nal type %u, ref_idc %u", nalu->type, nalu->ref_idc);
return TRUE;
}
static gboolean
h264_sps_copy (H264SPS * dst_sps, const H264SPS * src_sps)
{
g_return_val_if_fail (dst_sps != NULL, FALSE);
g_return_val_if_fail (src_sps != NULL, FALSE);
h264_sps_clear (dst_sps);
*dst_sps = *src_sps;
return TRUE;
}
static gboolean
h264_parser_parse_scaling_list (NalReader * nr,
guint8 scaling_lists_4x4[6][16], guint8 scaling_lists_8x8[6][64],
const guint8 fallback_4x4_inter[16], const guint8 fallback_4x4_intra[16],
const guint8 fallback_8x8_inter[64], const guint8 fallback_8x8_intra[64],
guint8 n_lists)
{
guint i;
static const guint8 *default_lists[12] = {
default_4x4_intra, default_4x4_intra, default_4x4_intra,
default_4x4_inter, default_4x4_inter, default_4x4_inter,
default_8x8_intra, default_8x8_inter,
default_8x8_intra, default_8x8_inter,
default_8x8_intra, default_8x8_inter
};
GST_DEBUG ("parsing scaling lists");
for (i = 0; i < 12; i++) {
gboolean use_default = FALSE;
if (i < n_lists) {
guint8 scaling_list_present_flag;
READ_UINT8 (nr, scaling_list_present_flag, 1);
if (scaling_list_present_flag) {
guint8 *scaling_list;
guint size;
guint j;
guint8 last_scale, next_scale;
if (i < 6) {
scaling_list = scaling_lists_4x4[i];
size = 16;
} else {
scaling_list = scaling_lists_8x8[i - 6];
size = 64;
}
last_scale = 8;
next_scale = 8;
for (j = 0; j < size; j++) {
if (next_scale != 0) {
gint32 delta_scale;
READ_SE (nr, delta_scale);
next_scale = (last_scale + delta_scale) & 0xff;
}
if (j == 0 && next_scale == 0) {
/* Use default scaling lists (7.4.2.1.1.1) */
memcpy (scaling_list, default_lists[i], size);
break;
}
last_scale = scaling_list[j] =
(next_scale == 0) ? last_scale : next_scale;
}
} else
use_default = TRUE;
} else
use_default = TRUE;
if (use_default) {
switch (i) {
case 0:
memcpy (scaling_lists_4x4[0], fallback_4x4_intra, 16);
break;
case 1:
memcpy (scaling_lists_4x4[1], scaling_lists_4x4[0], 16);
break;
case 2:
memcpy (scaling_lists_4x4[2], scaling_lists_4x4[1], 16);
break;
case 3:
memcpy (scaling_lists_4x4[3], fallback_4x4_inter, 16);
break;
case 4:
memcpy (scaling_lists_4x4[4], scaling_lists_4x4[3], 16);
break;
case 5:
memcpy (scaling_lists_4x4[5], scaling_lists_4x4[4], 16);
break;
case 6:
memcpy (scaling_lists_8x8[0], fallback_8x8_intra, 64);
break;
case 7:
memcpy (scaling_lists_8x8[1], fallback_8x8_inter, 64);
break;
case 8:
memcpy (scaling_lists_8x8[2], scaling_lists_8x8[0], 64);
break;
case 9:
memcpy (scaling_lists_8x8[3], scaling_lists_8x8[1], 64);
break;
case 10:
memcpy (scaling_lists_8x8[4], scaling_lists_8x8[2], 64);
break;
case 11:
memcpy (scaling_lists_8x8[5], scaling_lists_8x8[3], 64);
break;
default:
break;
}
}
}
return TRUE;
error:
GST_WARNING ("error parsing scaling lists");
return FALSE;
}
H264NalParser *
h264_nal_parser_new (void)
{
H264NalParser *nalparser;
nalparser = g_slice_new0 (H264NalParser);
INITIALIZE_DEBUG_CATEGORY;
return nalparser;
}
void
h264_nal_parser_free (H264NalParser * nalparser)
{
guint i;
for (i = 0; i < H264_MAX_SPS_COUNT; i++)
h264_sps_clear (&nalparser->sps[i]);
g_slice_free (H264NalParser, nalparser);
nalparser = NULL;
}
H264ParserResult
h264_parser_identify_nalu_unchecked (H264NalParser * nalparser,
const guint8 * data, guint offset, gsize size, H264NalUnit * nalu)
{
gint off1;
memset (nalu, 0, sizeof (*nalu));
if (size < offset + 4) {
GST_DEBUG ("Can't parse, buffer has too small size %" G_GSIZE_FORMAT
", offset %u", size, offset);
return H264_PARSER_ERROR;
}
off1 = scan_for_start_codes (data + offset, size - offset);
if (off1 < 0) {
GST_DEBUG ("No start code prefix in this buffer");
return H264_PARSER_NO_NAL;
}
if (offset + off1 == size - 1) {
GST_DEBUG ("Missing data to identify nal unit");
return H264_PARSER_ERROR;
}
nalu->sc_offset = offset + off1;
nalu->offset = offset + off1 + 3;
nalu->data = (guint8 *) data;
nalu->size = size - nalu->offset;
if (!h264_parse_nalu_header (nalu)) {
GST_WARNING ("error parsing \"NAL unit header\"");
nalu->size = 0;
return H264_PARSER_BROKEN_DATA;
}
nalu->valid = TRUE;
/* sc might have 2 or 3 0-bytes */
if (nalu->sc_offset > 0 && data[nalu->sc_offset - 1] == 00
&& (nalu->type == H264_NAL_SPS || nalu->type == H264_NAL_PPS
|| nalu->type == H264_NAL_AU_DELIMITER))
nalu->sc_offset--;
if (nalu->type == H264_NAL_SEQ_END ||
nalu->type == H264_NAL_STREAM_END) {
GST_DEBUG ("end-of-seq or end-of-stream nal found");
nalu->size = 1;
return H264_PARSER_OK;
}
return H264_PARSER_OK;
}
H264ParserResult
h264_parser_identify_nalu (H264NalParser * nalparser,
const guint8 * data, guint offset, gsize size, H264NalUnit * nalu)
{
H264ParserResult res;
gint off2;
res =
h264_parser_identify_nalu_unchecked (nalparser, data, offset, size,
nalu);
if (res != H264_PARSER_OK)
goto beach;
/* The two NALs are exactly 1 byte size and are placed at the end of an AU,
* there is no need to wait for the following */
if (nalu->type == H264_NAL_SEQ_END ||
nalu->type == H264_NAL_STREAM_END)
goto beach;
off2 = scan_for_start_codes (data + nalu->offset, size - nalu->offset);
if (off2 < 0) {
GST_DEBUG ("Nal start %d, No end found", nalu->offset);
return H264_PARSER_NO_NAL_END;
}
/* Mini performance improvement:
* We could have a way to store how many 0s were skipped to avoid
* parsing them again on the next NAL */
while (off2 > 0 && data[nalu->offset + off2 - 1] == 00)
off2--;
nalu->size = off2;
if (nalu->size < 2)
return H264_PARSER_BROKEN_DATA;
GST_DEBUG ("Complete nal found. Off: %d, Size: %d", nalu->offset, nalu->size);
beach:
return res;
}
H264ParserResult
h264_parser_parse_sps (H264NalParser * nalparser, H264NalUnit * nalu,
H264SPS * sps, gboolean parse_vui_params)
{
H264ParserResult res = h264_parse_sps (nalu, sps, parse_vui_params);
return res;
if (res == H264_PARSER_OK) {
GST_DEBUG ("adding sequence parameter set with id: %d to array", sps->id);
if (!h264_sps_copy (&nalparser->sps[sps->id], sps))
return H264_PARSER_ERROR;
nalparser->last_sps = &nalparser->sps[sps->id];
}
return res;
}
/* Parse seq_parameter_set_data() */
static gboolean
h264_parse_sps_data (NalReader * nr, H264SPS * sps,
gboolean parse_vui_params)
{
gint width, height;
guint subwc[] = { 1, 2, 2, 1 };
guint subhc[] = { 1, 2, 1, 1 };
memset (sps, 0, sizeof (*sps));
/* set default values for fields that might not be present in the bitstream
and have valid defaults */
sps->extension_type = H264_NAL_EXTENSION_NONE;
sps->chroma_format_idc = 1;
memset (sps->scaling_lists_4x4, 16, 96);
memset (sps->scaling_lists_8x8, 16, 384);
READ_UINT8 (nr, sps->profile_idc, 8);
READ_UINT8 (nr, sps->constraint_set0_flag, 1);
READ_UINT8 (nr, sps->constraint_set1_flag, 1);
READ_UINT8 (nr, sps->constraint_set2_flag, 1);
READ_UINT8 (nr, sps->constraint_set3_flag, 1);
READ_UINT8 (nr, sps->constraint_set4_flag, 1);
READ_UINT8 (nr, sps->constraint_set5_flag, 1);
/* skip reserved_zero_2bits */
if (!_skip (nr, 2))
goto error;
READ_UINT8 (nr, sps->level_idc, 8);
READ_UE_MAX (nr, sps->id, H264_MAX_SPS_COUNT - 1);
if (sps->profile_idc == 100 || sps->profile_idc == 110 ||
sps->profile_idc == 122 || sps->profile_idc == 244 ||
sps->profile_idc == 44 || sps->profile_idc == 83 ||
sps->profile_idc == 86 || sps->profile_idc == 118 ||
sps->profile_idc == 128) {
READ_UE_MAX (nr, sps->chroma_format_idc, 3);
if (sps->chroma_format_idc == 3)
READ_UINT8 (nr, sps->separate_colour_plane_flag, 1);
READ_UE_MAX (nr, sps->bit_depth_luma_minus8, 6);
READ_UE_MAX (nr, sps->bit_depth_chroma_minus8, 6);
READ_UINT8 (nr, sps->qpprime_y_zero_transform_bypass_flag, 1);
READ_UINT8 (nr, sps->scaling_matrix_present_flag, 1);
if (sps->scaling_matrix_present_flag) {
guint8 n_lists;
n_lists = (sps->chroma_format_idc != 3) ? 8 : 12;
if (!h264_parser_parse_scaling_list (nr,
sps->scaling_lists_4x4, sps->scaling_lists_8x8,
default_4x4_inter, default_4x4_intra,
default_8x8_inter, default_8x8_intra, n_lists))
goto error;
}
}
READ_UE_MAX (nr, sps->log2_max_frame_num_minus4, 12);
sps->max_frame_num = 1 << (sps->log2_max_frame_num_minus4 + 4);
READ_UE_MAX (nr, sps->pic_order_cnt_type, 2);
if (sps->pic_order_cnt_type == 0) {
READ_UE_MAX (nr, sps->log2_max_pic_order_cnt_lsb_minus4, 12);
} else if (sps->pic_order_cnt_type == 1) {
guint i;
READ_UINT8 (nr, sps->delta_pic_order_always_zero_flag, 1);
READ_SE (nr, sps->offset_for_non_ref_pic);
READ_SE (nr, sps->offset_for_top_to_bottom_field);
READ_UE_MAX (nr, sps->num_ref_frames_in_pic_order_cnt_cycle, 255);
for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
READ_SE (nr, sps->offset_for_ref_frame[i]);
}
READ_UE (nr, sps->num_ref_frames);
READ_UINT8 (nr, sps->gaps_in_frame_num_value_allowed_flag, 1);
READ_UE (nr, sps->pic_width_in_mbs_minus1);
READ_UE (nr, sps->pic_height_in_map_units_minus1);
READ_UINT8 (nr, sps->frame_mbs_only_flag, 1);
if (!sps->frame_mbs_only_flag)
READ_UINT8 (nr, sps->mb_adaptive_frame_field_flag, 1);
READ_UINT8 (nr, sps->direct_8x8_inference_flag, 1);
READ_UINT8 (nr, sps->frame_cropping_flag, 1);
if (sps->frame_cropping_flag) {
READ_UE (nr, sps->frame_crop_left_offset);
READ_UE (nr, sps->frame_crop_right_offset);
READ_UE (nr, sps->frame_crop_top_offset);
READ_UE (nr, sps->frame_crop_bottom_offset);
}
/* calculate ChromaArrayType */
if (!sps->separate_colour_plane_flag)
sps->chroma_array_type = sps->chroma_format_idc;
/* Calculate width and height */
width = (sps->pic_width_in_mbs_minus1 + 1);
width *= 16;
height = (sps->pic_height_in_map_units_minus1 + 1);
height *= 16 * (2 - sps->frame_mbs_only_flag);
GST_LOG ("initial width=%d, height=%d", width, height);
if (width < 0 || height < 0) {
GST_WARNING ("invalid width/height in SPS");
goto error;
}
sps->width = width;
sps->height = height;
if (sps->frame_cropping_flag) {
const guint crop_unit_x = subwc[sps->chroma_format_idc];
const guint crop_unit_y =
subhc[sps->chroma_format_idc] * (2 - sps->frame_mbs_only_flag);
width -= (sps->frame_crop_left_offset + sps->frame_crop_right_offset)
* crop_unit_x;
height -= (sps->frame_crop_top_offset + sps->frame_crop_bottom_offset)
* crop_unit_y;
sps->crop_rect_width = width;
sps->crop_rect_height = height;
sps->crop_rect_x = sps->frame_crop_left_offset * crop_unit_x;
sps->crop_rect_y = sps->frame_crop_top_offset * crop_unit_y;
GST_LOG ("crop_rectangle x=%u y=%u width=%u, height=%u", sps->crop_rect_x,
sps->crop_rect_y, width, height);
}
sps->fps_num_removed = 0;
sps->fps_den_removed = 1;
return TRUE;
error:
return FALSE;
}
H264ParserResult
h264_parse_sps (H264NalUnit * nalu, H264SPS * sps,
gboolean parse_vui_params)
{
NalReader nr;
INITIALIZE_DEBUG_CATEGORY;
GST_DEBUG ("parsing SPS");
init_nal (&nr, nalu->data + nalu->offset + nalu->header_bytes,
nalu->size - nalu->header_bytes);
if (!h264_parse_sps_data (&nr, sps, parse_vui_params))
goto error;
sps->valid = TRUE;
return H264_PARSER_OK;
error:
GST_WARNING ("error parsing \"Sequence parameter set\"");
sps->valid = FALSE;
return H264_PARSER_ERROR;
}
void
h264_sps_clear (H264SPS * sps)
{
g_return_if_fail (sps != NULL);
}
/************************** H265 *****************************/
static gboolean
h265_parse_nalu_header (H265NalUnit * nalu)
{
guint8 *data = nalu->data + nalu->offset;
GstBitReader br;
if (nalu->size < 2)
return FALSE;
gst_bit_reader_init (&br, data, nalu->size - nalu->offset);
/* skip the forbidden_zero_bit */
gst_bit_reader_skip_unchecked (&br, 1);
nalu->type = gst_bit_reader_get_bits_uint8_unchecked (&br, 6);
nalu->layer_id = gst_bit_reader_get_bits_uint8_unchecked (&br, 6);
nalu->temporal_id_plus1 = gst_bit_reader_get_bits_uint8_unchecked (&br, 3);
nalu->header_bytes = 2;
return TRUE;
}
/****** Parsing functions *****/
static gboolean
h265_parse_profile_tier_level (H265ProfileTierLevel * ptl,
NalReader * nr, guint8 maxNumSubLayersMinus1)
{
guint i, j;
GST_DEBUG ("parsing \"ProfileTierLevel parameters\"");
READ_UINT8 (nr, ptl->profile_space, 2);
READ_UINT8 (nr, ptl->tier_flag, 1);
READ_UINT8 (nr, ptl->profile_idc, 5);
for (j = 0; j < 32; j++)
READ_UINT8 (nr, ptl->profile_compatibility_flag[j], 1);
READ_UINT8 (nr, ptl->progressive_source_flag, 1);
READ_UINT8 (nr, ptl->interlaced_source_flag, 1);
READ_UINT8 (nr, ptl->non_packed_constraint_flag, 1);
READ_UINT8 (nr, ptl->frame_only_constraint_flag, 1);
READ_UINT8 (nr, ptl->max_12bit_constraint_flag, 1);
READ_UINT8 (nr, ptl->max_10bit_constraint_flag, 1);
READ_UINT8 (nr, ptl->max_8bit_constraint_flag, 1);
READ_UINT8 (nr, ptl->max_422chroma_constraint_flag, 1);
READ_UINT8 (nr, ptl->max_420chroma_constraint_flag, 1);
READ_UINT8 (nr, ptl->max_monochrome_constraint_flag, 1);
READ_UINT8 (nr, ptl->intra_constraint_flag, 1);
READ_UINT8 (nr, ptl->one_picture_only_constraint_flag, 1);
READ_UINT8 (nr, ptl->lower_bit_rate_constraint_flag, 1);
READ_UINT8 (nr, ptl->max_14bit_constraint_flag, 1);
/* skip the reserved zero bits */
if (!_skip (nr, 34))
goto error;
READ_UINT8 (nr, ptl->level_idc, 8);
for (j = 0; j < maxNumSubLayersMinus1; j++) {
READ_UINT8 (nr, ptl->sub_layer_profile_present_flag[j], 1);
READ_UINT8 (nr, ptl->sub_layer_level_present_flag[j], 1);
}
if (maxNumSubLayersMinus1 > 0) {
for (i = maxNumSubLayersMinus1; i < 8; i++)
if (!_skip (nr, 2))
goto error;
}
for (i = 0; i < maxNumSubLayersMinus1; i++) {
if (ptl->sub_layer_profile_present_flag[i]) {
READ_UINT8 (nr, ptl->sub_layer_profile_space[i], 2);
READ_UINT8 (nr, ptl->sub_layer_tier_flag[i], 1);
READ_UINT8 (nr, ptl->sub_layer_profile_idc[i], 5);
for (j = 0; j < 32; j++)
READ_UINT8 (nr, ptl->sub_layer_profile_compatibility_flag[i][j], 1);
READ_UINT8 (nr, ptl->sub_layer_progressive_source_flag[i], 1);
READ_UINT8 (nr, ptl->sub_layer_interlaced_source_flag[i], 1);
READ_UINT8 (nr, ptl->sub_layer_non_packed_constraint_flag[i], 1);
READ_UINT8 (nr, ptl->sub_layer_frame_only_constraint_flag[i], 1);
if (!_skip (nr, 44))
goto error;
}
if (ptl->sub_layer_level_present_flag[i])
READ_UINT8 (nr, ptl->sub_layer_level_idc[i], 8);
}
return TRUE;
error:
GST_WARNING ("error parsing \"ProfileTierLevel Parameters\"");
return FALSE;
}
H265Parser *
h265_parser_new (void)
{
H265Parser *parser;
parser = g_slice_new0 (H265Parser);
INITIALIZE_DEBUG_CATEGORY;
return parser;
}
void
h265_parser_free (H265Parser * parser)
{
g_slice_free (H265Parser, parser);
parser = NULL;
}
H265ParserResult
h265_parser_identify_nalu_unchecked (H265Parser * parser,
const guint8 * data, guint offset, gsize size, H265NalUnit * nalu)
{
gint off1;
memset (nalu, 0, sizeof (*nalu));
if (size < offset + 4) {
GST_DEBUG ("Can't parse, buffer has too small size %" G_GSIZE_FORMAT
", offset %u", size, offset);
return H265_PARSER_ERROR;
}
off1 = scan_for_start_codes (data + offset, size - offset);
if (off1 < 0) {
GST_DEBUG ("No start code prefix in this buffer");
return H265_PARSER_NO_NAL;
}
if (offset + off1 == size - 1) {
GST_DEBUG ("Missing data to identify nal unit");
return H265_PARSER_ERROR;
}
nalu->sc_offset = offset + off1;
/* sc might have 2 or 3 0-bytes */
if (nalu->sc_offset > 0 && data[nalu->sc_offset - 1] == 00)
nalu->sc_offset--;
nalu->offset = offset + off1 + 3;
nalu->data = (guint8 *) data;
nalu->size = size - nalu->offset;
if (!h265_parse_nalu_header (nalu)) {
GST_WARNING ("error parsing \"NAL unit header\"");
nalu->size = 0;
return H265_PARSER_BROKEN_DATA;
}
nalu->valid = TRUE;
if (nalu->type == H265_NAL_EOS || nalu->type == H265_NAL_EOB) {
GST_DEBUG ("end-of-seq or end-of-stream nal found");
nalu->size = 2;
return H265_PARSER_OK;
}
return H265_PARSER_OK;
}
H265ParserResult
h265_parser_identify_nalu (H265Parser * parser,
const guint8 * data, guint offset, gsize size, H265NalUnit * nalu)
{
H265ParserResult res;
gint off2;
res =
h265_parser_identify_nalu_unchecked (parser, data, offset, size,
nalu);
if (res != H265_PARSER_OK)
goto beach;
/* The two NALs are exactly 2 bytes size and are placed at the end of an AU,
* there is no need to wait for the following */
if (nalu->type == H265_NAL_EOS || nalu->type == H265_NAL_EOB)
goto beach;
off2 = scan_for_start_codes (data + nalu->offset, size - nalu->offset);
if (off2 < 0) {
GST_DEBUG ("Nal start %d, No end found", nalu->offset);
return H265_PARSER_NO_NAL_END;
}
/* Mini performance improvement:
* We could have a way to store how many 0s were skipped to avoid
* parsing them again on the next NAL */
while (off2 > 0 && data[nalu->offset + off2 - 1] == 00)
off2--;
nalu->size = off2;
if (nalu->size < 3)
return H265_PARSER_BROKEN_DATA;
GST_DEBUG ("Complete nal found. Off: %d, Size: %d", nalu->offset, nalu->size);
beach:
return res;
}
H265ParserResult
h265_parser_identify_nalu_hevc (H265Parser * parser,
const guint8 * data, guint offset, gsize size, guint8 nal_length_size,
H265NalUnit * nalu)
{
GstBitReader br;
memset (nalu, 0, sizeof (*nalu));
if (size < offset + nal_length_size) {
GST_DEBUG ("Can't parse, buffer has too small size %" G_GSIZE_FORMAT
", offset %u", size, offset);
return H265_PARSER_ERROR;
}
size = size - offset;
gst_bit_reader_init (&br, data + offset, size);
nalu->size = gst_bit_reader_get_bits_uint32_unchecked (&br,
nal_length_size * 8);
nalu->sc_offset = offset;
nalu->offset = offset + nal_length_size;
if (size < nalu->size + nal_length_size) {
nalu->size = 0;
return H265_PARSER_NO_NAL_END;
}
nalu->data = (guint8 *) data;
if (!h265_parse_nalu_header (nalu)) {
GST_WARNING ("error parsing \"NAL unit header\"");
nalu->size = 0;
return H265_PARSER_BROKEN_DATA;
}
if (nalu->size < 2)
return H265_PARSER_BROKEN_DATA;
nalu->valid = TRUE;
return H265_PARSER_OK;
}
H265ParserResult
h265_parser_parse_sps (H265Parser * parser, H265NalUnit * nalu,
H265SPS * sps, gboolean parse_vui_params)
{
H265ParserResult res =
h265_parse_sps (parser, nalu, sps, parse_vui_params);
return res;
if (res == H265_PARSER_OK) {
GST_DEBUG ("adding sequence parameter set with id: %d to array", sps->id);
parser->sps[sps->id] = *sps;
parser->last_sps = &parser->sps[sps->id];
}
return res;
}
H265ParserResult
h265_parse_sps (H265Parser * parser, H265NalUnit * nalu,
H265SPS * sps, gboolean parse_vui_params)
{
NalReader nr;
guint8 vps_id;
guint i;
guint subwc[] = { 1, 2, 2, 1, 1 };
guint subhc[] = { 1, 2, 1, 1, 1 };
INITIALIZE_DEBUG_CATEGORY;
GST_DEBUG ("parsing SPS");
init_nal (&nr, nalu->data + nalu->offset + nalu->header_bytes,
nalu->size - nalu->header_bytes);
memset (sps, 0, sizeof (*sps));
READ_UINT8 (&nr, vps_id, 4);
READ_UINT8 (&nr, sps->max_sub_layers_minus1, 3);
READ_UINT8 (&nr, sps->temporal_id_nesting_flag, 1);
if (!h265_parse_profile_tier_level (&sps->profile_tier_level, &nr,
sps->max_sub_layers_minus1))
goto error;
READ_UE_MAX (&nr, sps->id, H265_MAX_SPS_COUNT - 1);
READ_UE_MAX (&nr, sps->chroma_format_idc, 3);
if (sps->chroma_format_idc == 3)
READ_UINT8 (&nr, sps->separate_colour_plane_flag, 1);
READ_UE_ALLOWED (&nr, sps->pic_width_in_luma_samples, 1, 16888);
READ_UE_ALLOWED (&nr, sps->pic_height_in_luma_samples, 1, 16888);
READ_UINT8 (&nr, sps->conformance_window_flag, 1);
if (sps->conformance_window_flag) {
READ_UE (&nr, sps->conf_win_left_offset);
READ_UE (&nr, sps->conf_win_right_offset);
READ_UE (&nr, sps->conf_win_top_offset);
READ_UE (&nr, sps->conf_win_bottom_offset);
}
READ_UE_MAX (&nr, sps->bit_depth_luma_minus8, 6);
READ_UE_MAX (&nr, sps->bit_depth_chroma_minus8, 6);
READ_UE_MAX (&nr, sps->log2_max_pic_order_cnt_lsb_minus4, 12);
READ_UINT8 (&nr, sps->sub_layer_ordering_info_present_flag, 1);
for (i =
(sps->sub_layer_ordering_info_present_flag ? 0 :
sps->max_sub_layers_minus1); i <= sps->max_sub_layers_minus1; i++) {
READ_UE_MAX (&nr, sps->max_dec_pic_buffering_minus1[i], 16);
READ_UE_MAX (&nr, sps->max_num_reorder_pics[i],
sps->max_dec_pic_buffering_minus1[i]);
READ_UE_MAX (&nr, sps->max_latency_increase_plus1[i], G_MAXUINT32 - 1);
}
/* setting default values if sps->sub_layer_ordering_info_present_flag is zero */
if (!sps->sub_layer_ordering_info_present_flag && sps->max_sub_layers_minus1) {
for (i = 0; i <= (guint)(sps->max_sub_layers_minus1 - 1); i++) {
sps->max_dec_pic_buffering_minus1[i] =
sps->max_dec_pic_buffering_minus1[sps->max_sub_layers_minus1];
sps->max_num_reorder_pics[i] =
sps->max_num_reorder_pics[sps->max_sub_layers_minus1];
sps->max_latency_increase_plus1[i] =
sps->max_latency_increase_plus1[sps->max_sub_layers_minus1];
}
}
/* The limits are calculted based on the profile_tier_level constraint
* in Annex-A: CtbLog2SizeY = 4 to 6 */
READ_UE_MAX (&nr, sps->log2_min_luma_coding_block_size_minus3, 3);
READ_UE_MAX (&nr, sps->log2_diff_max_min_luma_coding_block_size, 6);
READ_UE_MAX (&nr, sps->log2_min_transform_block_size_minus2, 3);
READ_UE_MAX (&nr, sps->log2_diff_max_min_transform_block_size, 3);
READ_UE_MAX (&nr, sps->max_transform_hierarchy_depth_inter, 4);
READ_UE_MAX (&nr, sps->max_transform_hierarchy_depth_intra, 4);
/* Calculate width and height */
sps->width = sps->pic_width_in_luma_samples;
sps->height = sps->pic_height_in_luma_samples;
if (sps->width < 0 || sps->height < 0) {
GST_WARNING ("invalid width/height in SPS");
goto error;
}
if (sps->conformance_window_flag) {
const guint crop_unit_x = subwc[sps->chroma_format_idc];
const guint crop_unit_y = subhc[sps->chroma_format_idc];
sps->crop_rect_width = sps->width -
(sps->conf_win_left_offset + sps->conf_win_right_offset) * crop_unit_x;
sps->crop_rect_height = sps->height -
(sps->conf_win_top_offset + sps->conf_win_bottom_offset) * crop_unit_y;
sps->crop_rect_x = sps->conf_win_left_offset * crop_unit_x;
sps->crop_rect_y = sps->conf_win_top_offset * crop_unit_y;
GST_LOG ("crop_rectangle x=%u y=%u width=%u, height=%u", sps->crop_rect_x,
sps->crop_rect_y, sps->crop_rect_width, sps->crop_rect_height);
}
sps->fps_num = 0;
sps->fps_den = 1;
sps->valid = TRUE;
return H265_PARSER_OK;
error:
GST_WARNING ("error parsing \"Sequence parameter set\"");
sps->valid = FALSE;
return H265_PARSER_ERROR;
}

View File

@@ -0,0 +1,462 @@
/*
* Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __H26X_PARSER_H__
#define __H26X_PARSER_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define H264_MAX_SPS_COUNT 32
typedef enum
{
H264_NAL_UNKNOWN = 0,
H264_NAL_SLICE = 1,
H264_NAL_SLICE_DPA = 2,
H264_NAL_SLICE_DPB = 3,
H264_NAL_SLICE_DPC = 4,
H264_NAL_SLICE_IDR = 5,
H264_NAL_SEI = 6,
H264_NAL_SPS = 7,
H264_NAL_PPS = 8,
H264_NAL_AU_DELIMITER = 9,
H264_NAL_SEQ_END = 10,
H264_NAL_STREAM_END = 11,
H264_NAL_FILLER_DATA = 12,
H264_NAL_SPS_EXT = 13,
H264_NAL_PREFIX_UNIT = 14,
H264_NAL_SUBSET_SPS = 15,
H264_NAL_DEPTH_SPS = 16,
H264_NAL_SLICE_AUX = 19,
H264_NAL_SLICE_EXT = 20,
H264_NAL_SLICE_DEPTH = 21
} H264NalUnitType;
typedef enum
{
H264_NAL_EXTENSION_NONE = 0,
H264_NAL_EXTENSION_SVC,
H264_NAL_EXTENSION_MVC,
} H264NalUnitExtensionType;
typedef enum
{
H264_PARSER_OK,
H264_PARSER_BROKEN_DATA,
H264_PARSER_BROKEN_LINK,
H264_PARSER_ERROR,
H264_PARSER_NO_NAL,
H264_PARSER_NO_NAL_END
} H264ParserResult;
typedef enum
{
H264_FRAME_PACKING_NONE = 6,
H264_FRAME_PACKING_CHECKERBOARD_INTERLEAVING = 0,
H264_FRAME_PACKING_COLUMN_INTERLEAVING = 1,
H264_FRAME_PACKING_ROW_INTERLEAVING = 2,
H264_FRAME_PACKING_SIDE_BY_SIDE = 3,
H264_FRMAE_PACKING_TOP_BOTTOM = 4,
H264_FRAME_PACKING_TEMPORAL_INTERLEAVING = 5
} H264FramePackingType;
typedef enum
{
H264_P_SLICE = 0,
H264_B_SLICE = 1,
H264_I_SLICE = 2,
H264_SP_SLICE = 3,
H264_SI_SLICE = 4,
H264_S_P_SLICE = 5,
H264_S_B_SLICE = 6,
H264_S_I_SLICE = 7,
H264_S_SP_SLICE = 8,
H264_S_SI_SLICE = 9
} H264SliceType;
typedef enum
{
H264_CT_TYPE_PROGRESSIVE = 0,
H264_CT_TYPE_INTERLACED = 1,
H264_CT_TYPE_UNKNOWN = 2,
} CtType;
typedef struct _H264NalParser H264NalParser;
typedef struct _H264NalUnit H264NalUnit;
typedef struct _H264SPS H264SPS;
struct _H264NalUnit
{
guint16 ref_idc;
guint16 type;
/* calculated values */
guint8 idr_pic_flag;
guint size;
guint offset;
guint sc_offset;
gboolean valid;
guint8 *data;
guint8 header_bytes;
guint8 extension_type;
};
struct _H264SPS
{
gint id;
guint8 profile_idc;
guint8 constraint_set0_flag;
guint8 constraint_set1_flag;
guint8 constraint_set2_flag;
guint8 constraint_set3_flag;
guint8 constraint_set4_flag;
guint8 constraint_set5_flag;
guint8 level_idc;
guint8 chroma_format_idc;
guint8 separate_colour_plane_flag;
guint8 bit_depth_luma_minus8;
guint8 bit_depth_chroma_minus8;
guint8 qpprime_y_zero_transform_bypass_flag;
guint8 scaling_matrix_present_flag;
guint8 scaling_lists_4x4[6][16];
guint8 scaling_lists_8x8[6][64];
guint8 log2_max_frame_num_minus4;
guint8 pic_order_cnt_type;
/* if pic_order_cnt_type == 0 */
guint8 log2_max_pic_order_cnt_lsb_minus4;
/* else if pic_order_cnt_type == 1 */
guint8 delta_pic_order_always_zero_flag;
gint32 offset_for_non_ref_pic;
gint32 offset_for_top_to_bottom_field;
guint8 num_ref_frames_in_pic_order_cnt_cycle;
gint32 offset_for_ref_frame[255];
guint32 num_ref_frames;
guint8 gaps_in_frame_num_value_allowed_flag;
guint32 pic_width_in_mbs_minus1;
guint32 pic_height_in_map_units_minus1;
guint8 frame_mbs_only_flag;
guint8 mb_adaptive_frame_field_flag;
guint8 direct_8x8_inference_flag;
guint8 frame_cropping_flag;
/* if frame_cropping_flag */
guint32 frame_crop_left_offset;
guint32 frame_crop_right_offset;
guint32 frame_crop_top_offset;
guint32 frame_crop_bottom_offset;
guint8 vui_parameters_present_flag;
/* calculated values */
guint8 chroma_array_type;
guint32 max_frame_num;
gint width, height;
gint crop_rect_width, crop_rect_height;
gint crop_rect_x, crop_rect_y;
gint fps_num_removed, fps_den_removed; /* FIXME: remove */
gboolean valid;
/* Subset SPS extensions */
guint8 extension_type;
};
struct _H264NalParser
{
/*< private >*/
H264SPS sps[H264_MAX_SPS_COUNT];
H264SPS *last_sps;
};
H264NalParser *h264_nal_parser_new (void);
H264ParserResult h264_parser_identify_nalu (H264NalParser *nalparser,
const guint8 *data, guint offset,
gsize size, H264NalUnit *nalu);
H264ParserResult h264_parser_identify_nalu_unchecked (H264NalParser *nalparser,
const guint8 *data, guint offset,
gsize size, H264NalUnit *nalu);
H264ParserResult h264_parser_parse_sps (H264NalParser *nalparser, H264NalUnit *nalu,
H264SPS *sps, gboolean parse_vui_params);
void h264_nal_parser_free (H264NalParser *nalparser);
H264ParserResult h264_parse_sps (H264NalUnit *nalu,
H264SPS *sps, gboolean parse_vui_params);
void h264_sps_clear (H264SPS *sps);
#define H265_MAX_SUB_LAYERS 8
#define H265_MAX_SPS_COUNT 16
typedef enum
{
H265_NAL_SLICE_TRAIL_N = 0,
H265_NAL_SLICE_TRAIL_R = 1,
H265_NAL_SLICE_TSA_N = 2,
H265_NAL_SLICE_TSA_R = 3,
H265_NAL_SLICE_STSA_N = 4,
H265_NAL_SLICE_STSA_R = 5,
H265_NAL_SLICE_RADL_N = 6,
H265_NAL_SLICE_RADL_R = 7,
H265_NAL_SLICE_RASL_N = 8,
H265_NAL_SLICE_RASL_R = 9,
H265_NAL_SLICE_BLA_W_LP = 16,
H265_NAL_SLICE_BLA_W_RADL = 17,
H265_NAL_SLICE_BLA_N_LP = 18,
H265_NAL_SLICE_IDR_W_RADL = 19,
H265_NAL_SLICE_IDR_N_LP = 20,
H265_NAL_SLICE_CRA_NUT = 21,
H265_NAL_VPS = 32,
H265_NAL_SPS = 33,
H265_NAL_PPS = 34,
H265_NAL_AUD = 35,
H265_NAL_EOS = 36,
H265_NAL_EOB = 37,
H265_NAL_FD = 38,
H265_NAL_PREFIX_SEI = 39,
H265_NAL_SUFFIX_SEI = 40
} H265NalUnitType;
typedef enum
{
H265_PARSER_OK,
H265_PARSER_BROKEN_DATA,
H265_PARSER_BROKEN_LINK,
H265_PARSER_ERROR,
H265_PARSER_NO_NAL,
H265_PARSER_NO_NAL_END
} H265ParserResult;
typedef struct _H265Parser H265Parser;
typedef struct _H265NalUnit H265NalUnit;
typedef struct _H265SPS H265SPS;
typedef struct _H265ProfileTierLevel H265ProfileTierLevel;
struct _H265NalUnit
{
guint8 type;
guint8 layer_id;
guint8 temporal_id_plus1;
/* calculated values */
guint size;
guint offset;
guint sc_offset;
gboolean valid;
guint8 *data;
guint8 header_bytes;
};
struct _H265ProfileTierLevel {
guint8 profile_space;
guint8 tier_flag;
guint8 profile_idc;
guint8 profile_compatibility_flag[32];
guint8 progressive_source_flag;
guint8 interlaced_source_flag;
guint8 non_packed_constraint_flag;
guint8 frame_only_constraint_flag;
guint8 max_12bit_constraint_flag;
guint8 max_10bit_constraint_flag;
guint8 max_8bit_constraint_flag;
guint8 max_422chroma_constraint_flag;
guint8 max_420chroma_constraint_flag;
guint8 max_monochrome_constraint_flag;
guint8 intra_constraint_flag;
guint8 one_picture_only_constraint_flag;
guint8 lower_bit_rate_constraint_flag;
guint8 max_14bit_constraint_flag;
guint8 level_idc;
guint8 sub_layer_profile_present_flag[6];
guint8 sub_layer_level_present_flag[6];
guint8 sub_layer_profile_space[6];
guint8 sub_layer_tier_flag[6];
guint8 sub_layer_profile_idc[6];
guint8 sub_layer_profile_compatibility_flag[6][32];
guint8 sub_layer_progressive_source_flag[6];
guint8 sub_layer_interlaced_source_flag[6];
guint8 sub_layer_non_packed_constraint_flag[6];
guint8 sub_layer_frame_only_constraint_flag[6];
guint8 sub_layer_level_idc[6];
};
struct _H265SPS
{
guint8 id;
guint8 max_sub_layers_minus1;
guint8 temporal_id_nesting_flag;
H265ProfileTierLevel profile_tier_level;
guint8 chroma_format_idc;
guint8 separate_colour_plane_flag;
guint16 pic_width_in_luma_samples;
guint16 pic_height_in_luma_samples;
guint8 conformance_window_flag;
/* if conformance_window_flag */
guint32 conf_win_left_offset;
guint32 conf_win_right_offset;
guint32 conf_win_top_offset;
guint32 conf_win_bottom_offset;
guint8 bit_depth_luma_minus8;
guint8 bit_depth_chroma_minus8;
guint8 log2_max_pic_order_cnt_lsb_minus4;
guint8 sub_layer_ordering_info_present_flag;
guint8 max_dec_pic_buffering_minus1[H265_MAX_SUB_LAYERS];
guint8 max_num_reorder_pics[H265_MAX_SUB_LAYERS];
guint8 max_latency_increase_plus1[H265_MAX_SUB_LAYERS];
guint8 log2_min_luma_coding_block_size_minus3;
guint8 log2_diff_max_min_luma_coding_block_size;
guint8 log2_min_transform_block_size_minus2;
guint8 log2_diff_max_min_transform_block_size;
guint8 max_transform_hierarchy_depth_inter;
guint8 max_transform_hierarchy_depth_intra;
guint8 scaling_list_enabled_flag;
/* if scaling_list_enabled_flag */
guint8 scaling_list_data_present_flag;
guint8 amp_enabled_flag;
guint8 sample_adaptive_offset_enabled_flag;
guint8 pcm_enabled_flag;
/* if pcm_enabled_flag */
guint8 pcm_sample_bit_depth_luma_minus1;
guint8 pcm_sample_bit_depth_chroma_minus1;
guint8 log2_min_pcm_luma_coding_block_size_minus3;
guint8 log2_diff_max_min_pcm_luma_coding_block_size;
guint8 pcm_loop_filter_disabled_flag;
guint8 num_short_term_ref_pic_sets;
guint8 long_term_ref_pics_present_flag;
/* if long_term_ref_pics_present_flag */
guint8 num_long_term_ref_pics_sps;
guint16 lt_ref_pic_poc_lsb_sps[32];
guint8 used_by_curr_pic_lt_sps_flag[32];
guint8 temporal_mvp_enabled_flag;
guint8 strong_intra_smoothing_enabled_flag;
guint8 vui_parameters_present_flag;
/* if vui_parameters_present_flat */
guint8 sps_extension_flag;
/* calculated values */
guint8 chroma_array_type;
gint width, height;
gint crop_rect_width, crop_rect_height;
gint crop_rect_x, crop_rect_y;
gint fps_num, fps_den;
gboolean valid;
};
struct _H265Parser
{
/*< private >*/
H265SPS sps[H265_MAX_SPS_COUNT];
H265SPS *last_sps;
};
H265Parser * h265_parser_new (void);
H265ParserResult h265_parser_identify_nalu (H265Parser * parser,
const guint8 * data,
guint offset,
gsize size,
H265NalUnit * nalu);
H265ParserResult h265_parser_identify_nalu_unchecked (H265Parser * parser,
const guint8 * data,
guint offset,
gsize size,
H265NalUnit * nalu);
H265ParserResult h265_parser_identify_nalu_hevc (H265Parser * parser,
const guint8 * data,
guint offset,
gsize size,
guint8 nal_length_size,
H265NalUnit * nalu);
H265ParserResult h265_parser_parse_sps (H265Parser * parser,
H265NalUnit * nalu,
H265SPS * sps,
gboolean parse_vui_params);
void h265_parser_free (H265Parser * parser);
H265ParserResult h265_parse_sps (H265Parser * parser,
H265NalUnit * nalu,
H265SPS * sps,
gboolean parse_vui_params);
G_END_DECLS
#endif

5062
gst-v4l2/gstv4l2object.c Normal file
View File

File diff suppressed because it is too large Load Diff

387
gst-v4l2/gstv4l2object.h Normal file
View File

@@ -0,0 +1,387 @@
/* GStreamer
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2object.h: base class for V4L2 elements
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_V4L2_OBJECT_H__
#define __GST_V4L2_OBJECT_H__
#include "linux/videodev2.h"
#ifdef HAVE_LIBV4L2
# include <libv4l2.h>
#endif
#include "v4l2-utils.h"
#ifdef USE_V4L2_TARGET_NV
#include "nvbufsurface.h"
#include "v4l2_nv_extensions.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstpushsrc.h>
#include <gst/video/video.h>
typedef struct _GstV4l2Object GstV4l2Object;
typedef struct _GstV4l2ObjectClassHelper GstV4l2ObjectClassHelper;
#include <gstv4l2bufferpool.h>
/* size of v4l2 buffer pool in streaming case */
#define GST_V4L2_MIN_BUFFERS 2
#ifdef USE_V4L2_TARGET_NV
#define V4L2_DEVICE_BASENAME_NVDEC "nvdec"
#define V4L2_DEVICE_BASENAME_NVENC "msenc"
#define V4L2_DEVICE_PATH_NVDEC "/dev/nvhost-nvdec"
#define V4L2_DEVICE_PATH_NVDEC_ALT "/dev/v4l2-nvdec"
#define V4L2_DEVICE_PATH_NVDEC_MCCOY "/dev/nvidia0"
#define V4L2_DEVICE_PATH_NVENC "/dev/nvhost-msenc"
#define V4L2_DEVICE_PATH_NVENC_ALT "/dev/v4l2-nvenc"
#define V4L2_DEVICE_PATH_TEGRA_INFO "/sys/firmware/devicetree/base/compatible"
#endif
/* max frame width/height */
#define GST_V4L2_MAX_SIZE (1<<15) /* 2^15 == 32768 */
G_BEGIN_DECLS
#define GST_TYPE_V4L2_IO_MODE (gst_v4l2_io_mode_get_type ())
GType gst_v4l2_io_mode_get_type (void);
#ifdef USE_V4L2_TARGET_NV
#define GST_TYPE_V4L2_DEC_OUTPUT_IO_MODE (gst_v4l2_dec_output_io_mode_get_type ())
GType gst_v4l2_dec_output_io_mode_get_type (void);
#define GST_TYPE_V4L2_DEC_CAPTURE_IO_MODE (gst_v4l2_dec_capture_io_mode_get_type ())
GType gst_v4l2_dec_capture_io_mode_get_type (void);
#define GST_TYPE_V4L2_ENC_OUTPUT_IO_MODE (gst_v4l2_enc_output_io_mode_get_type ())
GType gst_v4l2_enc_output_io_mode_get_type (void);
#define GST_TYPE_V4L2_ENC_CAPTURE_IO_MODE (gst_v4l2_enc_capture_io_mode_get_type ())
GType gst_v4l2_enc_capture_io_mode_get_type (void);
#endif
#define GST_V4L2_OBJECT(obj) (GstV4l2Object *)(obj)
extern gboolean is_cuvid;
typedef enum {
GST_V4L2_IO_AUTO = 0,
GST_V4L2_IO_RW = 1,
GST_V4L2_IO_MMAP = 2,
GST_V4L2_IO_USERPTR = 3,
GST_V4L2_IO_DMABUF = 4,
GST_V4L2_IO_DMABUF_IMPORT = 5
} GstV4l2IOMode;
typedef gboolean (*GstV4l2GetInOutFunction) (GstV4l2Object * v4l2object, gint * input);
typedef gboolean (*GstV4l2SetInOutFunction) (GstV4l2Object * v4l2object, gint input);
typedef gboolean (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object);
#define GST_V4L2_WIDTH(o) (GST_VIDEO_INFO_WIDTH (&(o)->info))
#define GST_V4L2_HEIGHT(o) (GST_VIDEO_INFO_HEIGHT (&(o)->info))
#define GST_V4L2_PIXELFORMAT(o) ((o)->fmtdesc->pixelformat)
#define GST_V4L2_FPS_N(o) (GST_VIDEO_INFO_FPS_N (&(o)->info))
#define GST_V4L2_FPS_D(o) (GST_VIDEO_INFO_FPS_D (&(o)->info))
/* simple check whether the device is open */
#define GST_V4L2_IS_OPEN(o) ((o)->video_fd > 0)
/* check whether the device is 'active' */
#define GST_V4L2_IS_ACTIVE(o) ((o)->active)
#define GST_V4L2_SET_ACTIVE(o) ((o)->active = TRUE)
#define GST_V4L2_SET_INACTIVE(o) ((o)->active = FALSE)
/* checks whether the current v4lv4l2object has already been open()'ed or not */
#define GST_V4L2_CHECK_OPEN(v4l2object) \
if (!GST_V4L2_IS_OPEN(v4l2object)) \
{ \
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
(_("Device is not open.")), (NULL)); \
return FALSE; \
}
/* checks whether the current v4lv4l2object is close()'ed or whether it is still open */
#define GST_V4L2_CHECK_NOT_OPEN(v4l2object) \
if (GST_V4L2_IS_OPEN(v4l2object)) \
{ \
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
(_("Device is open.")), (NULL)); \
return FALSE; \
}
/* checks whether we're out of capture mode or not */
#define GST_V4L2_CHECK_NOT_ACTIVE(v4l2object) \
if (GST_V4L2_IS_ACTIVE(v4l2object)) \
{ \
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
(NULL), ("Device is in streaming mode")); \
return FALSE; \
}
#ifdef USE_V4L2_TARGET_NV
/* Structure to hold the video info inorder to modify the contents, incase of
* GST_VIDEO_FORMAT_I420_12LE format */
GstVideoFormatInfo video_info;
#endif
struct _GstV4l2Object {
GstElement * element;
GstObject * dbg_obj;
enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
/* the video device */
char *videodev;
#ifdef USE_V4L2_TARGET_NV
gboolean is_encode;
#endif
/* the video-device's file descriptor */
gint video_fd;
GstV4l2IOMode mode;
gboolean active;
gboolean streaming;
/* the current format */
struct v4l2_fmtdesc *fmtdesc;
struct v4l2_format format;
GstVideoInfo info;
GstVideoAlignment align;
/* Features */
gboolean need_video_meta;
gboolean has_alpha_component;
/* only used if the device supports MPLANE
* nb planes is meaning of v4l2 planes
* the gstreamer equivalent is gst_buffer_n_memory
*/
gint n_v4l2_planes;
/* We cache the frame duration if known */
GstClockTime duration;
/* if the MPLANE device support both contiguous and non contiguous
* it allows to select which one we want. But we prefered_non_contiguous
* non contiguous mode.
*/
gboolean prefered_non_contiguous;
/* This will be set if supported in decide_allocation. It can be used to
* calculate the minimum latency. */
guint32 min_buffers;
/* wanted mode */
GstV4l2IOMode req_mode;
/* optional pool */
GstBufferPool *pool;
/* the video device's capabilities */
struct v4l2_capability vcap;
/* opened device specific capabilities */
guint32 device_caps;
/* lists... */
GSList *formats; /* list of available capture formats */
GstCaps *probed_caps;
GList *colors;
GList *norms;
GList *channels;
GData *controls;
/* properties */
v4l2_std_id tv_norm;
gchar *channel;
gulong frequency;
GstStructure *extra_controls;
gboolean keep_aspect;
GValue *par;
#ifdef USE_V4L2_TARGET_NV
gboolean enableMVBufferMeta;
gboolean Enable_frame_type_reporting;
gboolean Enable_error_check;
gboolean Enable_headers;
gint ProcessedFrames;
gboolean open_mjpeg_block;
gboolean capture_plane_stopped;
GCond cplane_stopped_cond;
GMutex cplane_stopped_lock;
guint sei_payload_size;
void* sei_payload;
#endif
/* funcs */
GstV4l2GetInOutFunction get_in_out_func;
GstV4l2SetInOutFunction set_in_out_func;
GstV4l2UpdateFpsFunction update_fps_func;
/* syscalls */
gint (*fd_open) (gint fd, gint v4l2_flags);
gint (*close) (gint fd);
gint (*dup) (gint fd);
gint (*ioctl) (gint fd, gulong request, ...);
gssize (*read) (gint fd, gpointer buffer, gsize n);
gpointer (*mmap) (gpointer start, gsize length, gint prot, gint flags,
gint fd, off_t offset);
gint (*munmap) (gpointer _start, gsize length);
/* Quirks */
/* Skips interlacing probes */
gboolean never_interlaced;
/* Allow to skip reading initial format through G_FMT. Some devices
* just fails if you don't call S_FMT first. (ex: M2M decoders) */
gboolean no_initial_format;
/* Avoid any try_fmt probe. This is used by v4l2src to speedup start up time
* on slow USB firmwares. When this is set, gst_v4l2_set_format() will modify
* the caps to reflect what was negotiated during fixation */
gboolean skip_try_fmt_probes;
};
struct _GstV4l2ObjectClassHelper {
/* probed devices */
GList *devices;
};
GType gst_v4l2_object_get_type (void);
#define V4L2_STD_OBJECT_PROPS \
PROP_DEVICE, \
PROP_DEVICE_NAME, \
PROP_DEVICE_FD, \
PROP_FLAGS, \
PROP_BRIGHTNESS, \
PROP_CONTRAST, \
PROP_SATURATION, \
PROP_HUE, \
PROP_TV_NORM, \
PROP_IO_MODE, \
PROP_OUTPUT_IO_MODE, \
PROP_CAPTURE_IO_MODE, \
PROP_EXTRA_CONTROLS, \
PROP_PIXEL_ASPECT_RATIO, \
PROP_FORCE_ASPECT_RATIO
/* create/destroy */
GstV4l2Object* gst_v4l2_object_new (GstElement * element,
GstObject * dbg_obj,
enum v4l2_buf_type type,
const char * default_device,
GstV4l2GetInOutFunction get_in_out_func,
GstV4l2SetInOutFunction set_in_out_func,
GstV4l2UpdateFpsFunction update_fps_func);
void gst_v4l2_object_destroy (GstV4l2Object * v4l2object);
/* properties */
void gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
const char * default_device);
void gst_v4l2_object_install_m2m_properties_helper (GObjectClass * gobject_class);
#ifdef USE_V4L2_TARGET_NV
void gst_v4l2_object_install_m2m_dec_iomode_properties_helper (GObjectClass * gobject_class);
void gst_v4l2_object_install_m2m_enc_iomode_properties_helper (GObjectClass * gobject_class);
#endif
gboolean gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object,
guint prop_id,
const GValue * value,
GParamSpec * pspec);
gboolean gst_v4l2_object_get_property_helper (GstV4l2Object *v4l2object,
guint prop_id, GValue * value,
GParamSpec * pspec);
/* open/close */
gboolean gst_v4l2_object_open (GstV4l2Object * v4l2object);
gboolean gst_v4l2_object_open_shared (GstV4l2Object * v4l2object, GstV4l2Object * other);
gboolean gst_v4l2_object_close (GstV4l2Object * v4l2object);
/* probing */
GstCaps* gst_v4l2_object_get_all_caps (void);
GstCaps* gst_v4l2_object_get_raw_caps (void);
GstCaps* gst_v4l2_object_get_codec_caps (void);
gint gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo,
gint plane, gint stride);
gboolean gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error);
gboolean gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error);
gboolean gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps);
gboolean gst_v4l2_object_unlock (GstV4l2Object * v4l2object);
gboolean gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object);
gboolean gst_v4l2_object_stop (GstV4l2Object * v4l2object);
GstCaps * gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter);
GstCaps * gst_v4l2_object_get_caps (GstV4l2Object * v4l2object, GstCaps * filter);
gboolean gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info);
gboolean gst_v4l2_object_set_crop (GstV4l2Object * obj);
gboolean gst_v4l2_object_decide_allocation (GstV4l2Object * v4l2object, GstQuery * query);
gboolean gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query);
GstStructure * gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc);
/* TODO Move to proper namespace */
/* open/close the device */
gboolean gst_v4l2_open (GstV4l2Object * v4l2object);
gboolean gst_v4l2_dup (GstV4l2Object * v4l2object, GstV4l2Object * other);
gboolean gst_v4l2_close (GstV4l2Object * v4l2object);
/* norm/input/output */
gboolean gst_v4l2_get_norm (GstV4l2Object * v4l2object, v4l2_std_id * norm);
gboolean gst_v4l2_set_norm (GstV4l2Object * v4l2object, v4l2_std_id norm);
gboolean gst_v4l2_get_input (GstV4l2Object * v4l2object, gint * input);
gboolean gst_v4l2_set_input (GstV4l2Object * v4l2object, gint input);
gboolean gst_v4l2_get_output (GstV4l2Object * v4l2object, gint * output);
gboolean gst_v4l2_set_output (GstV4l2Object * v4l2object, gint output);
/* frequency control */
gboolean gst_v4l2_get_frequency (GstV4l2Object * v4l2object, gint tunernum, gulong * frequency);
gboolean gst_v4l2_set_frequency (GstV4l2Object * v4l2object, gint tunernum, gulong frequency);
gboolean gst_v4l2_signal_strength (GstV4l2Object * v4l2object, gint tunernum, gulong * signal);
/* attribute control */
gboolean gst_v4l2_get_attribute (GstV4l2Object * v4l2object, int attribute, int * value);
gboolean gst_v4l2_set_attribute (GstV4l2Object * v4l2object, int attribute, const int value);
gboolean gst_v4l2_set_controls (GstV4l2Object * v4l2object, GstStructure * controls);
#ifdef USE_V4L2_TARGET_NV
gboolean set_v4l2_video_mpeg_class (GstV4l2Object * v4l2object, guint label,
gint params);
#endif
G_END_DECLS
#endif /* __GST_V4L2_OBJECT_H__ */

2977
gst-v4l2/gstv4l2videodec.c Normal file
View File

File diff suppressed because it is too large Load Diff

132
gst-v4l2/gstv4l2videodec.h Normal file
View File

@@ -0,0 +1,132 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VIDEO_DEC_H__
#define __GST_V4L2_VIDEO_DEC_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideodecoder.h>
#include <gst/video/gstvideometa.h>
#include <gstv4l2object.h>
#include <gstv4l2bufferpool.h>
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VIDEO_DEC \
(gst_v4l2_video_dec_get_type())
#define GST_V4L2_VIDEO_DEC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VIDEO_DEC,GstV4l2VideoDec))
#define GST_V4L2_VIDEO_DEC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VIDEO_DEC,GstV4l2VideoDecClass))
#define GST_IS_V4L2_VIDEO_DEC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VIDEO_DEC))
#define GST_IS_V4L2_VIDEO_DEC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VIDEO_DEC))
/* The structures are renamed as the name conflicts with the
* OSS v4l2 library structures. */
#ifdef USE_V4L2_TARGET_NV
#define GstV4l2VideoDec GstNvV4l2VideoDec
#define GstV4l2VideoDecClass GstNvV4l2VideoDecClass
#define LOOP_COUNT_TO_WAIT_FOR_DQEVENT 6
#define WAIT_TIME_PER_LOOP_FOR_DQEVENT 100*1000
#define VP8_START_BYTE_0 0x9D
#define VP8_START_BYTE_1 0x01
#define VP9_START_BYTE_0 0x49
#define VP9_START_BYTE_1 0x83
#define VP9_START_BYTE_2 0x42
#endif
typedef struct _GstV4l2VideoDec GstV4l2VideoDec;
typedef struct _GstV4l2VideoDecClass GstV4l2VideoDecClass;
struct _GstV4l2VideoDec
{
GstVideoDecoder parent;
/* < private > */
GstV4l2Object *v4l2output;
GstV4l2Object *v4l2capture;
/* pads */
GstCaps *probed_srccaps;
GstCaps *probed_sinkcaps;
/* State */
GstVideoCodecState *input_state;
gboolean active;
GstFlowReturn output_flow;
guint64 frame_num;
#ifdef USE_V4L2_TARGET_NV
GHashTable* hash_pts_systemtime;
gdouble buffer_in_time;
guint64 decoded_picture_cnt;
guint32 skip_frames;
gboolean idr_received;
guint32 drop_frame_interval;
guint32 num_extra_surfaces;
gboolean is_drc;
gboolean disable_dpb;
gboolean enable_full_frame;
gboolean enable_frame_type_reporting;
gboolean enable_error_check;
gboolean enable_max_performance;
gboolean set_format;
guint32 cudadec_mem_type;
guint32 cudadec_gpu_id;
guint32 cudadec_num_surfaces;
gboolean cudadec_low_latency;
gboolean extract_sei_type5_data;
gchar *sei_uuid_string;
gdouble rate;
guint32 cap_buf_dynamic_allocation;
guint32 current_width;
guint32 current_height;
guint32 old_width;
guint32 old_height;
gboolean valid_vpx;
#endif
};
struct _GstV4l2VideoDecClass
{
GstVideoDecoderClass parent_class;
gchar *default_device;
};
GType gst_v4l2_video_dec_get_type (void);
gboolean gst_v4l2_is_video_dec (GstCaps * sink_caps, GstCaps * src_caps);
#ifdef USE_V4L2_TARGET_NV
gboolean set_v4l2_controls (GstV4l2VideoDec *self);
#endif
void gst_v4l2_video_dec_register (GstPlugin * plugin,
const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_VIDEO_DEC_H__ */

2746
gst-v4l2/gstv4l2videoenc.c Normal file
View File

File diff suppressed because it is too large Load Diff

153
gst-v4l2/gstv4l2videoenc.h Normal file
View File

@@ -0,0 +1,153 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association.
* Author: ayaka <ayaka@soulik.info>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VIDEO_ENC_H__
#define __GST_V4L2_VIDEO_ENC_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideoencoder.h>
#include <gst/video/gstvideometa.h>
#include <gstv4l2object.h>
#include <gstv4l2bufferpool.h>
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VIDEO_ENC \
(gst_v4l2_video_enc_get_type())
#define GST_V4L2_VIDEO_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VIDEO_ENC,GstV4l2VideoEnc))
#define GST_V4L2_VIDEO_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VIDEO_ENC,GstV4l2VideoEncClass))
#define GST_IS_V4L2_VIDEO_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VIDEO_ENC))
#define GST_IS_V4L2_VIDEO_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VIDEO_ENC))
#define GST_V4L2_VIDEO_ENC_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_V4L2_VIDEO_ENC, GstV4l2VideoEncClass))
typedef struct _GstV4l2VideoEnc GstV4l2VideoEnc;
typedef struct _GstV4l2VideoEncClass GstV4l2VideoEncClass;
struct _GstV4l2VideoEnc
{
GstVideoEncoder parent;
#ifdef USE_V4L2_TARGET_NV
guint32 ratecontrol;
guint32 bitrate;
guint32 peak_bitrate;
guint32 idrinterval;
guint32 iframeinterval;
guint32 quant_i_frames;
guint32 quant_p_frames;
guint32 quant_b_frames;
guint32 MinQpI;
guint32 MaxQpI;
guint32 MinQpP;
guint32 MaxQpP;
guint32 MinQpB;
guint32 MaxQpB;
guint32 constQpI;
guint32 constQpP;
guint32 constQpB;
guint32 IInitQP;
guint32 PInitQP;
guint32 BInitQP;
gboolean set_qpRange;
guint32 hw_preset_level;
guint virtual_buffer_size;
gboolean measure_latency;
gboolean ratecontrol_enable;
gboolean force_idr;
gboolean force_intra;
gboolean maxperf_enable;
FILE *tracing_file_enc;
GQueue *got_frame_pt;
guint32 cudaenc_gpu_id;
guint32 cudaenc_preset_id;
guint32 cudaenc_tuning_info_id;
gboolean slice_output;
GstVideoCodecFrame *best_prev;
GstClockTime buf_pts_prev;
gdouble buffer_in_time;
GHashTable* hash_pts_systemtime;
gboolean copy_meta;
#endif
/* < private > */
GstV4l2Object *v4l2output;
GstV4l2Object *v4l2capture;
/* pads */
GstCaps *probed_srccaps;
GstCaps *probed_sinkcaps;
/* State */
GstVideoCodecState *input_state;
gboolean active;
gboolean processing;
GstFlowReturn output_flow;
};
struct _GstV4l2VideoEncClass
{
GstVideoEncoderClass parent_class;
gchar *default_device;
const char *codec_name;
guint32 profile_cid;
const gchar *(*profile_to_string) (gint v4l2_profile);
gint (*profile_from_string) (const gchar * profile);
#ifdef USE_V4L2_TARGET_NV
gboolean (*set_encoder_properties) (GstVideoEncoder * encoder);
gboolean (*set_video_encoder_properties) (GstVideoEncoder * encoder);
#endif
guint32 level_cid;
const gchar *(*level_to_string) (gint v4l2_level);
gint (*level_from_string) (const gchar * level);
#ifdef USE_V4L2_TARGET_NV
void (*force_IDR) (GstV4l2VideoEnc *);
#endif
};
GType gst_v4l2_video_enc_get_type (void);
gboolean gst_v4l2_is_video_enc (GstCaps * sink_caps, GstCaps * src_caps,
GstCaps * codec_caps);
void gst_v4l2_video_enc_register (GstPlugin * plugin, GType type,
const char *codec, const gchar * basename, const gchar * device_path,
GstCaps * sink_caps, GstCaps * codec_caps, GstCaps * src_caps);
#ifdef USE_V4L2_TARGET_NV
void set_encoder_src_caps (GstVideoEncoder *encoder, GstCaps *input_caps);
gboolean is_drc (GstVideoEncoder *encoder, GstCaps *input_caps);
gboolean reconfigure_fps (GstVideoEncoder *encoder, GstCaps *input_caps, guint label);
#endif
G_END_DECLS
#endif /* __GST_V4L2_VIDEO_ENC_H__ */

198
gst-v4l2/gstv4l2vp8enc.c Normal file
View File

@@ -0,0 +1,198 @@
/*
* Copyright (C) 2017 Collabora Inc.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2object.h"
#include "gstv4l2vp8enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_vp8_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_vp8_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-vp8, profile=(string) { 0, 1, 2, 3 }");
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
#ifdef USE_V4L2_TARGET_NV
PROP_ENABLE_HEADER,
#endif
/* TODO */
};
#define gst_v4l2_vp8_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2Vp8Enc, gst_v4l2_vp8_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_vp8_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2Vp8Enc *self = GST_V4L2_VP8_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
self->EnableHeaders = g_value_get_boolean (value);
video_enc->v4l2capture->Enable_headers = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static void
gst_v4l2_vp8_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2Vp8Enc *self = GST_V4L2_VP8_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
g_value_set_boolean (value, self->EnableHeaders);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "0"))
v4l2_profile = 0;
else if (g_str_equal (profile, "1"))
v4l2_profile = 1;
else if (g_str_equal (profile, "2"))
v4l2_profile = 2;
else if (g_str_equal (profile, "3"))
v4l2_profile = 3;
else
GST_WARNING ("Unsupported profile string '%s'", profile);
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case 0:
return "0";
case 1:
return "1";
case 2:
return "2";
case 3:
return "3";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static void
gst_v4l2_vp8_enc_init (GstV4l2Vp8Enc * self)
{
}
static void
gst_v4l2_vp8_enc_class_init (GstV4l2Vp8EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_vp8_enc_debug, "v4l2vp8enc", 0,
"V4L2 VP8 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 VP8 Encoder",
"Codec/Encoder/Video",
"Encode VP8 video streams via V4L2 API",
"Nicolas Dufresne <nicolas.dufresne@collabora.com");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_vp8_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_vp8_enc_get_property);
#ifdef USE_V4L2_TARGET_NV
g_object_class_install_property (gobject_class, PROP_ENABLE_HEADER,
g_param_spec_boolean ("enable-headers",
"Enable VP8 headers",
"Enable VP8 file and frame headers, if enabled, dump elementary stream",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
#endif
baseclass->codec_name = "VP8";
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_VPX_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
}
/* Probing functions */
gboolean
gst_v4l2_is_vp8_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_vp8_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_VP8_ENC,
"vp8", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}

63
gst-v4l2/gstv4l2vp8enc.h Normal file
View File

@@ -0,0 +1,63 @@
/*
* Copyright (C) 2017 Collabora Inc.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VP8_ENC_H__
#define __GST_V4L2_VP8_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VP8_ENC \
(gst_v4l2_vp8_enc_get_type())
#define GST_V4L2_VP8_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VP8_ENC,GstV4l2Vp8Enc))
#define GST_V4L2_VP8_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VP8_ENC,GstV4l2Vp8EncClass))
#define GST_IS_V4L2_VP8_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VP8_ENC))
#define GST_IS_V4L2_VP8_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VP8_ENC))
typedef struct _GstV4l2Vp8Enc GstV4l2Vp8Enc;
typedef struct _GstV4l2Vp8EncClass GstV4l2Vp8EncClass;
struct _GstV4l2Vp8Enc
{
GstV4l2VideoEnc parent;
#ifdef USE_V4L2_TARGET_NV
gboolean EnableHeaders;
#endif
};
struct _GstV4l2Vp8EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_vp8_enc_get_type (void);
gboolean gst_v4l2_is_vp8_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_vp8_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_VP8_ENC_H__ */

197
gst-v4l2/gstv4l2vp9enc.c Normal file
View File

@@ -0,0 +1,197 @@
/*
* Copyright (C) 2017 Collabora Inc.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2object.h"
#include "gstv4l2vp9enc.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_vp9_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_vp9_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-vp9, profile=(string) { 0, 1, 2, 3 }");
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
#ifdef USE_V4L2_TARGET_NV
PROP_ENABLE_HEADER,
#endif
/* TODO */
};
#define gst_v4l2_vp9_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2Vp9Enc, gst_v4l2_vp9_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_vp9_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2Vp9Enc *self = GST_V4L2_VP9_ENC (object);
GstV4l2VideoEnc *video_enc = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
self->EnableHeaders = g_value_get_boolean (value);
video_enc->v4l2capture->Enable_headers = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static void
gst_v4l2_vp9_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
/* TODO */
#ifdef USE_V4L2_TARGET_NV
GstV4l2Vp9Enc *self = GST_V4L2_VP9_ENC (object);
switch (prop_id) {
case PROP_ENABLE_HEADER:
g_value_set_boolean (value, self->EnableHeaders);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
#endif
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "0"))
v4l2_profile = 0;
else if (g_str_equal (profile, "1"))
v4l2_profile = 1;
else if (g_str_equal (profile, "2"))
v4l2_profile = 2;
else if (g_str_equal (profile, "3"))
v4l2_profile = 3;
else
GST_WARNING ("Unsupported profile string '%s'", profile);
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case 0:
return "0";
case 1:
return "1";
case 2:
return "2";
case 3:
return "3";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static void
gst_v4l2_vp9_enc_init (GstV4l2Vp9Enc * self)
{
}
static void
gst_v4l2_vp9_enc_class_init (GstV4l2Vp9EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstV4l2VideoEncClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = (GstV4l2VideoEncClass *) (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_vp9_enc_debug, "v4l2vp9enc", 0,
"V4L2 VP9 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 VP9 Encoder",
"Codec/Encoder/Video",
"Encode VP9 video streams via V4L2 API",
"Nicolas Dufresne <nicolas.dufresne@collabora.com");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_vp9_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_vp9_enc_get_property);
#ifdef USE_V4L2_TARGET_NV
g_object_class_install_property (gobject_class, PROP_ENABLE_HEADER,
g_param_spec_boolean ("enable-headers",
"Enable VP9 headers",
"Enable VP9 file and frame headers, if enabled, dump elementary stream",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
#endif
baseclass->codec_name = "VP9";
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_VPX_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
}
/* Probing functions */
gboolean
gst_v4l2_is_vp9_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_is_video_enc (sink_caps, src_caps,
gst_static_caps_get (&src_template_caps));
}
void
gst_v4l2_vp9_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_VP9_ENC,
"vp9", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}

63
gst-v4l2/gstv4l2vp9enc.h Normal file
View File

@@ -0,0 +1,63 @@
/*
* Copyright (C) 2017 Collabora Inc.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VP9_ENC_H__
#define __GST_V4L2_VP9_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VP9_ENC \
(gst_v4l2_vp9_enc_get_type())
#define GST_V4L2_VP9_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VP9_ENC,GstV4l2Vp9Enc))
#define GST_V4L2_VP9_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VP9_ENC,GstV4l2Vp9EncClass))
#define GST_IS_V4L2_VP9_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VP9_ENC))
#define GST_IS_V4L2_VP9_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VP9_ENC))
typedef struct _GstV4l2Vp9Enc GstV4l2Vp9Enc;
typedef struct _GstV4l2Vp9EncClass GstV4l2Vp9EncClass;
struct _GstV4l2Vp9Enc
{
GstV4l2VideoEnc parent;
#ifdef USE_V4L2_TARGET_NV
gboolean EnableHeaders;
#endif
};
struct _GstV4l2Vp9EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_vp9_enc_get_type (void);
gboolean gst_v4l2_is_vp9_enc (GstCaps * sink_caps, GstCaps * src_caps);
void gst_v4l2_vp9_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_VP9_ENC_H__ */

296
gst-v4l2/nalutils.c Normal file
View File

@@ -0,0 +1,296 @@
/*
* Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include "nalutils.h"
/* Compute Ceil(Log2(v)) */
/* Derived from branchless code for integer log2(v) from:
<http://graphics.stanford.edu/~seander/bithacks.html#IntegerLog> */
guint
ceil_log2 (guint32 v)
{
guint r, shift;
v--;
r = (v > 0xFFFF) << 4;
v >>= r;
shift = (v > 0xFF) << 3;
v >>= shift;
r |= shift;
shift = (v > 0xF) << 2;
v >>= shift;
r |= shift;
shift = (v > 0x3) << 1;
v >>= shift;
r |= shift;
r |= (v >> 1);
return r + 1;
}
/****** Nal parser ******/
void
init_nal (NalReader * nr, const guint8 * data, guint size)
{
nr->data = data;
nr->size = size;
nr->n_epb = 0;
nr->byte = 0;
nr->bits_in_cache = 0;
/* fill with something other than 0 to detect emulation prevention bytes */
nr->first_byte = 0xff;
nr->cache = 0xff;
}
gboolean
_read (NalReader * nr, guint nbits)
{
if (G_UNLIKELY (nr->byte * 8 + (nbits - nr->bits_in_cache) > nr->size * 8)) {
GST_DEBUG ("Can not read %u bits, bits in cache %u, Byte * 8 %u, size in "
"bits %u", nbits, nr->bits_in_cache, nr->byte * 8, nr->size * 8);
return FALSE;
}
while (nr->bits_in_cache < nbits) {
guint8 byte;
gboolean check_three_byte;
check_three_byte = TRUE;
next_byte:
if (G_UNLIKELY (nr->byte >= nr->size))
return FALSE;
byte = nr->data[nr->byte++];
/* check if the byte is a emulation_prevention_three_byte */
if (check_three_byte && byte == 0x03 && nr->first_byte == 0x00 &&
((nr->cache & 0xff) == 0)) {
/* next byte goes unconditionally to the cache, even if it's 0x03 */
check_three_byte = FALSE;
nr->n_epb++;
goto next_byte;
}
nr->cache = (nr->cache << 8) | nr->first_byte;
nr->first_byte = byte;
nr->bits_in_cache += 8;
}
return TRUE;
}
/* Skips the specified amount of bits. This is only suitable to a
cacheable number of bits */
gboolean
_skip (NalReader * nr, guint nbits)
{
g_assert (nbits <= 8 * sizeof (nr->cache));
if (G_UNLIKELY (!_read (nr, nbits)))
return FALSE;
nr->bits_in_cache -= nbits;
return TRUE;
}
/* Generic version to skip any number of bits */
gboolean
_skip_long (NalReader * nr, guint nbits)
{
/* Leave out enough bits in the cache once we are finished */
const guint skip_size = 4 * sizeof (nr->cache);
guint remaining = nbits;
nbits %= skip_size;
while (remaining > 0) {
if (!_skip (nr, nbits))
return FALSE;
remaining -= nbits;
nbits = skip_size;
}
return TRUE;
}
guint
_get_pos (const NalReader * nr)
{
return nr->byte * 8 - nr->bits_in_cache;
}
guint
_get_remaining (const NalReader * nr)
{
return (nr->size - nr->byte) * 8 + nr->bits_in_cache;
}
guint
_get_epb_count (const NalReader * nr)
{
return nr->n_epb;
}
#define _READ_BITS(bits) \
gboolean \
_get_bits_uint##bits (NalReader *nr, guint##bits *val, guint nbits) \
{ \
guint shift; \
\
if (!_read (nr, nbits)) \
return FALSE; \
\
/* bring the required bits down and truncate */ \
shift = nr->bits_in_cache - nbits; \
*val = nr->first_byte >> shift; \
\
*val |= nr->cache << (8 - shift); \
/* mask out required bits */ \
if (nbits < bits) \
*val &= ((guint##bits)1 << nbits) - 1; \
\
nr->bits_in_cache = shift; \
\
return TRUE; \
} \
_READ_BITS (8);
_READ_BITS (16);
_READ_BITS (32);
#define _PEEK_BITS(bits) \
gboolean \
_peek_bits_uint##bits (const NalReader *nr, guint##bits *val, guint nbits) \
{ \
NalReader tmp; \
\
tmp = *nr; \
return _get_bits_uint##bits (&tmp, val, nbits); \
}
_PEEK_BITS (8);
gboolean
_get_ue (NalReader * nr, guint32 * val)
{
guint i = 0;
guint8 bit;
guint32 value;
if (G_UNLIKELY (!_get_bits_uint8 (nr, &bit, 1)))
return FALSE;
while (bit == 0) {
i++;
if (G_UNLIKELY (!_get_bits_uint8 (nr, &bit, 1)))
return FALSE;
}
if (G_UNLIKELY (i > 31))
return FALSE;
if (G_UNLIKELY (!_get_bits_uint32 (nr, &value, i)))
return FALSE;
*val = (1 << i) - 1 + value;
return TRUE;
}
gboolean
_get_se (NalReader * nr, gint32 * val)
{
guint32 value;
if (G_UNLIKELY (!_get_ue (nr, &value)))
return FALSE;
if (value % 2)
*val = (value / 2) + 1;
else
*val = -(value / 2);
return TRUE;
}
gboolean
_is_byte_aligned (NalReader * nr)
{
if (nr->bits_in_cache != 0)
return FALSE;
return TRUE;
}
gboolean
_has_more_data (NalReader * nr)
{
NalReader nr_tmp;
guint remaining, nbits;
guint8 rbsp_stop_one_bit, zero_bits;
remaining = _get_remaining (nr);
if (remaining == 0)
return FALSE;
nr_tmp = *nr;
nr = &nr_tmp;
/* The spec defines that more_rbsp_data() searches for the last bit
equal to 1, and that it is the rbsp_stop_one_bit. Subsequent bits
until byte boundary is reached shall be zero.
This means that more_rbsp_data() is FALSE if the next bit is 1
and the remaining bits until byte boundary are zero. One way to
be sure that this bit was the very last one, is that every other
bit after we reached byte boundary are also set to zero.
Otherwise, if the next bit is 0 or if there are non-zero bits
afterwards, then then we have more_rbsp_data() */
if (!_get_bits_uint8 (nr, &rbsp_stop_one_bit, 1))
return FALSE;
if (!rbsp_stop_one_bit)
return TRUE;
nbits = --remaining % 8;
while (remaining > 0) {
if (!_get_bits_uint8 (nr, &zero_bits, nbits))
return FALSE;
if (zero_bits != 0)
return TRUE;
remaining -= nbits;
nbits = 8;
}
return FALSE;
}
/*********** end of nal parser ***************/
gint
scan_for_start_codes (const guint8 * data, guint size)
{
GstByteReader br;
gst_byte_reader_init (&br, data, size);
/* NALU not empty, so we can at least expect 1 (even 2) bytes following sc */
return gst_byte_reader_masked_scan_uint32 (&br, 0xffffff00, 0x00000100,
0, size);
}

170
gst-v4l2/nalutils.h Normal file
View File

@@ -0,0 +1,170 @@
/*
* Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbitreader.h>
#include <string.h>
guint ceil_log2 (guint32 v);
typedef struct
{
const guint8 *data;
guint size;
guint n_epb; /* Number of emulation prevention bytes */
guint byte; /* Byte position */
guint bits_in_cache; /* bitpos in the cache of next bit */
guint8 first_byte;
guint64 cache; /* cached bytes */
} NalReader;
G_GNUC_INTERNAL
void init_nal (NalReader * nr, const guint8 * data, guint size);
G_GNUC_INTERNAL
gboolean _read (NalReader * nr, guint nbits);
G_GNUC_INTERNAL
gboolean _skip (NalReader * nr, guint nbits);
G_GNUC_INTERNAL
gboolean _skip_long (NalReader * nr, guint nbits);
G_GNUC_INTERNAL
guint _get_pos (const NalReader * nr);
G_GNUC_INTERNAL
guint _get_remaining (const NalReader * nr);
G_GNUC_INTERNAL
guint _get_epb_count (const NalReader * nr);
G_GNUC_INTERNAL
gboolean _is_byte_aligned (NalReader * nr);
G_GNUC_INTERNAL
gboolean _has_more_data (NalReader * nr);
#define _READ_BITS_H(bits) \
G_GNUC_INTERNAL \
gboolean _get_bits_uint##bits (NalReader *nr, guint##bits *val, guint nbits)
_READ_BITS_H (8);
_READ_BITS_H (16);
_READ_BITS_H (32);
#define _PEEK_BITS_H(bits) \
G_GNUC_INTERNAL \
gboolean _peek_bits_uint##bits (const NalReader *nr, guint##bits *val, guint nbits)
_PEEK_BITS_H (8);
G_GNUC_INTERNAL
gboolean _get_ue (NalReader * nr, guint32 * val);
G_GNUC_INTERNAL
gboolean _get_se (NalReader * nr, gint32 * val);
#define CHECK_ALLOWED_MAX(val, max) { \
if (val > max) { \
GST_WARNING ("value greater than max. value: %d, max %d", \
val, max); \
goto error; \
} \
}
#define CHECK_ALLOWED(val, min, max) { \
if (val < min || val > max) { \
GST_WARNING ("value not in allowed range. value: %d, range %d-%d", \
val, min, max); \
goto error; \
} \
}
#define READ_UINT8(nr, val, nbits) { \
if (!_get_bits_uint8 (nr, &val, nbits)) { \
GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT16(nr, val, nbits) { \
if (!_get_bits_uint16 (nr, &val, nbits)) { \
GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT32(nr, val, nbits) { \
if (!_get_bits_uint32 (nr, &val, nbits)) { \
GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UINT64(nr, val, nbits) { \
if (!_get_bits_uint64 (nr, &val, nbits)) { \
GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
goto error; \
} \
}
#define READ_UE(nr, val) { \
if (!_get_ue (nr, &val)) { \
GST_WARNING ("failed to read UE"); \
goto error; \
} \
}
#define READ_UE_ALLOWED(nr, val, min, max) { \
guint32 tmp; \
READ_UE (nr, tmp); \
CHECK_ALLOWED (tmp, min, max); \
val = tmp; \
}
#define READ_UE_MAX(nr, val, max) { \
guint32 tmp; \
READ_UE (nr, tmp); \
CHECK_ALLOWED_MAX (tmp, max); \
val = tmp; \
}
#define READ_SE(nr, val) { \
if (!_get_se (nr, &val)) { \
GST_WARNING ("failed to read SE"); \
goto error; \
} \
}
#define READ_SE_ALLOWED(nr, val, min, max) { \
gint32 tmp; \
READ_SE (nr, tmp); \
CHECK_ALLOWED (tmp, min, max); \
val = tmp; \
}
G_GNUC_INTERNAL
gint scan_for_start_codes (const guint8 * data, guint size);

141
gst-v4l2/sei_parse.c Normal file
View File

@@ -0,0 +1,141 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include <stdint.h>
#include <unistd.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <glib.h>
#define UUID_SIZE 16
#define USER_DATA_UNREGISTERED_TYPE 5
gboolean check_uuid(uint8_t *stream, char *sei_uuid_string);
uint8_t* parse_sei_unit(uint8_t * bs_ptr, guint *size, char *sei_uuid_string);
uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size, char *sei_uuid_string);
gboolean check_uuid(uint8_t *stream, char *sei_uuid_string)
{
char uuid_string[UUID_SIZE] = {0};
uint32_t size = snprintf (uuid_string, UUID_SIZE, "%s", stream);
if (size == (UUID_SIZE-1))
{
if (!strncmp (uuid_string, sei_uuid_string, (UUID_SIZE-1)))
return TRUE;
else
return FALSE;
}
else
return FALSE;
}
uint8_t* parse_sei_unit(uint8_t * bs_ptr, guint *size, char *sei_uuid_string)
{
int payload_type = 0;
int payload_size = 0;
uint8_t* payload = NULL;
int i;
/* printf("found a SEI NAL unit!\n"); */
payload_type = *bs_ptr++;
while (payload_size % 0xFF == 0)
{
payload_size += *bs_ptr++;
}
//printf("payload_type = %i payload_size = %i\n", payload_type, payload_size);
if (!check_uuid (bs_ptr, sei_uuid_string))
{
//printf ("Expected UUID not found\n");
bs_ptr += (payload_size - UUID_SIZE);
return NULL;
}
else
{
bs_ptr += UUID_SIZE;
}
*size = payload_size;
if (payload_type == USER_DATA_UNREGISTERED_TYPE)
{
payload = (uint8_t*)malloc((payload_size - UUID_SIZE)*sizeof(uint8_t));
for (i = 0; i < (payload_size - UUID_SIZE); i++)
{
payload[i] = *bs_ptr;
// drop emulation prevention bytes
if ((*(bs_ptr) == 0x03)
&& (*(bs_ptr - 1) == 0x00)
&& (*(bs_ptr - 2) == 0x00))
{
i--;
}
bs_ptr++;
}
return payload;
}
else
{
return NULL;
}
}
uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size, char *sei_uuid_string)
{
if (sei_uuid_string == NULL)
return NULL;
int checklen = 0;
unsigned int sei_payload_size = 0;
uint8_t *bs_ptr = bs;
uint8_t *bs_ptr_end = bs + size;
uint8_t *payload = NULL;
while (bs_ptr_end > bs_ptr)
{
if (checklen < 2 && *bs_ptr++ == 0x00)
checklen++;
else if (checklen == 2 && *bs_ptr++ == 0x00)
checklen++;
else if (checklen == 3 && *bs_ptr++ == 0x01)
checklen++;
else if (checklen == 4 && *bs_ptr++ == 0x06)
{
payload = parse_sei_unit(bs_ptr, &sei_payload_size, sei_uuid_string);
checklen = 0;
if (payload != NULL)
{
*payload_size = (sei_payload_size - 16);
return payload;
}
else
{
continue;
}
}
else
checklen = 0;
}
return NULL;
}

202
gst-v4l2/v4l2-utils.c Normal file
View File

@@ -0,0 +1,202 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "v4l2-utils.h"
/**************************/
/* Common device iterator */
/**************************/
#ifdef HAVE_GUDEV
#include <gudev/gudev.h>
struct _GstV4l2GUdevIterator
{
GstV4l2Iterator parent;
GList *devices;
GUdevDevice *device;
GUdevClient *client;
};
GstV4l2Iterator *
gst_v4l2_iterator_new (void)
{
static const gchar *subsystems[] = { "video4linux", NULL };
struct _GstV4l2GUdevIterator *it;
it = g_slice_new0 (struct _GstV4l2GUdevIterator);
it->client = g_udev_client_new (subsystems);
it->devices = g_udev_client_query_by_subsystem (it->client, "video4linux");
return (GstV4l2Iterator *) it;
}
gboolean
gst_v4l2_iterator_next (GstV4l2Iterator * _it)
{
struct _GstV4l2GUdevIterator *it = (struct _GstV4l2GUdevIterator *) _it;
const gchar *device_name;
if (it->device)
g_object_unref (it->device);
it->device = NULL;
it->parent.device_path = NULL;
it->parent.device_name = NULL;
if (it->devices == NULL)
return FALSE;
it->device = it->devices->data;
it->devices = g_list_delete_link (it->devices, it->devices);
device_name = g_udev_device_get_property (it->device, "ID_V4L_PRODUCT");
if (!device_name)
device_name = g_udev_device_get_property (it->device, "ID_MODEL_ENC");
if (!device_name)
device_name = g_udev_device_get_property (it->device, "ID_MODEL");
it->parent.device_path = g_udev_device_get_device_file (it->device);
it->parent.device_name = device_name;
it->parent.sys_path = g_udev_device_get_sysfs_path (it->device);
return TRUE;
}
void
gst_v4l2_iterator_free (GstV4l2Iterator * _it)
{
struct _GstV4l2GUdevIterator *it = (struct _GstV4l2GUdevIterator *) _it;
g_list_free_full (it->devices, g_object_unref);
gst_object_unref (it->client);
g_slice_free (struct _GstV4l2GUdevIterator, it);
}
#else /* No GUDEV */
struct _GstV4l2FsIterator
{
GstV4l2Iterator parent;
gint base_idx;
gint video_idx;
gchar *device;
};
GstV4l2Iterator *
gst_v4l2_iterator_new (void)
{
struct _GstV4l2FsIterator *it;
it = g_slice_new0 (struct _GstV4l2FsIterator);
it->base_idx = 0;
it->video_idx = -1;
it->device = NULL;
return (GstV4l2Iterator *) it;
}
gboolean
gst_v4l2_iterator_next (GstV4l2Iterator * _it)
{
struct _GstV4l2FsIterator *it = (struct _GstV4l2FsIterator *) _it;
static const gchar *dev_base[] = { "/dev/video", "/dev/v4l2/video", NULL };
gchar *device = NULL;
g_free ((gchar *) it->parent.device_path);
it->parent.device_path = NULL;
while (device == NULL) {
it->video_idx++;
if (it->video_idx >= 64) {
it->video_idx = 0;
it->base_idx++;
}
if (dev_base[it->base_idx] == NULL) {
it->video_idx = 0;
break;
}
device = g_strdup_printf ("%s%d", dev_base[it->base_idx], it->video_idx);
if (g_file_test (device, G_FILE_TEST_EXISTS)) {
it->parent.device_path = device;
break;
}
g_free (device);
device = NULL;
}
return it->parent.device_path != NULL;
}
void
gst_v4l2_iterator_free (GstV4l2Iterator * _it)
{
struct _GstV4l2FsIterator *it = (struct _GstV4l2FsIterator *) _it;
g_free ((gchar *) it->parent.device_path);
g_slice_free (struct _GstV4l2FsIterator, it);
}
#endif
void
gst_v4l2_clear_error (GstV4l2Error * v4l2err)
{
if (v4l2err) {
g_clear_error (&v4l2err->error);
g_free (v4l2err->dbg_message);
v4l2err->dbg_message = NULL;
}
}
void
gst_v4l2_error (gpointer element, GstV4l2Error * v4l2err)
{
GError *error;
if (!v4l2err || !v4l2err->error)
return;
error = v4l2err->error;
if (error->message)
GST_WARNING_OBJECT (element, "error: %s", error->message);
if (v4l2err->dbg_message)
GST_WARNING_OBJECT (element, "error: %s", v4l2err->dbg_message);
gst_element_message_full (GST_ELEMENT (element), GST_MESSAGE_ERROR,
error->domain, error->code, error->message, v4l2err->dbg_message,
v4l2err->file, v4l2err->func, v4l2err->line);
error->message = NULL;
v4l2err->dbg_message = NULL;
gst_v4l2_clear_error (v4l2err);
}

78
gst-v4l2/v4l2-utils.h Normal file
View File

@@ -0,0 +1,78 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __V4L2_UTILS_H__
#define __V4L2_UTILS_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_V4L2_ERROR_INIT { NULL, NULL }
#define GST_V4L2_ERROR(v4l2err,domain,code,msg,dbg) \
{\
if (v4l2err) { \
gchar *_msg = _gst_element_error_printf msg; \
v4l2err->error = g_error_new_literal (GST_##domain##_ERROR, \
GST_##domain##_ERROR_##code, _msg); \
g_free (_msg); \
v4l2err->dbg_message = _gst_element_error_printf dbg; \
v4l2err->file = __FILE__; \
v4l2err->func = GST_FUNCTION; \
v4l2err->line = __LINE__; \
} \
}
typedef struct _GstV4l2Iterator GstV4l2Iterator;
typedef struct _GstV4l2Error GstV4l2Error;
struct _GstV4l2Iterator
{
const gchar *device_path;
const gchar *device_name;
const gchar *sys_path;
};
struct _GstV4l2Error
{
GError *error;
gchar *dbg_message;
const gchar *file;
const gchar *func;
gint line;
};
GstV4l2Iterator * gst_v4l2_iterator_new (void);
gboolean gst_v4l2_iterator_next (GstV4l2Iterator *it);
void gst_v4l2_iterator_free (GstV4l2Iterator *it);
const gchar * gst_v4l2_iterator_get_device_path (GstV4l2Iterator *it);
const gchar * gst_v4l2_iterator_get_device_name (GstV4l2Iterator *it);
const gchar * gst_v4l2_iterator_get_sys_path (GstV4l2Iterator *it);
void gst_v4l2_clear_error (GstV4l2Error *error);
void gst_v4l2_error (gpointer element, GstV4l2Error *error);
G_END_DECLS
#endif /* __V4L2_UTILS_H__ */

1166
gst-v4l2/v4l2_calls.c Normal file
View File

File diff suppressed because it is too large Load Diff

858
nvbufsurface.h Normal file
View File

@@ -0,0 +1,858 @@
/*
* Copyright (c) 2019-2023, NVIDIA CORPORATION. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA Corporation is strictly prohibited.
*/
/**
* @file nvbufsurface.h
* <b>NvBufSurface Interface </b>
*
* This file specifies the NvBufSurface management API.
*
* The NvBufSurface API provides methods to allocate / deallocate, map / unmap
* and copy batched buffers.
*/
/**
* @defgroup ds_nvbuf_api Buffer Management API module
*
* This section describes types and functions of NvBufSurface application
* programming interface.
*
*/
#ifndef NVBUFSURFACE_H_
#define NVBUFSURFACE_H_
#include <stdint.h>
#include <stdbool.h>
#ifdef __cplusplus
extern "C"
{
#endif
/** @defgroup ds_aaa NvBufSurface Types and Functions
* Defines types and functions of \ref NvBufSurface application
* programming interface.
* @ingroup ds_nvbuf_api
* @{ */
/** Defines the default padding length for reserved fields of structures. */
#define STRUCTURE_PADDING 4
/** Defines the maximum number of planes. */
#define NVBUF_MAX_PLANES 4
/**
* Defines the default values for chroma subsampling.
* The default value matches JPEG/MPEG use cases.
*/
#define NVBUFSURFACE_CHROMA_SUBSAMPLING_HORIZ_DEFAULT 0
#define NVBUFSURFACE_CHROMA_SUBSAMPLING_VERT_DEFAULT 1
#define NVBUFSURFACE_CHROMA_SUBSAMPLING_PARAMS_DEFAULT \
{ \
NVBUFSURFACE_CHROMA_SUBSAMPLING_HORIZ_DEFAULT, \
NVBUFSURFACE_CHROMA_SUBSAMPLING_VERT_DEFAULT \
}
/**
* Defines mapping types of NvBufSurface.
*/
typedef enum
{
NVBUF_MAP_READ, /**< Specifies \ref NvBufSurface mapping type "read." */
NVBUF_MAP_WRITE, /**< Specifies \ref NvBufSurface mapping type
"write." */
NVBUF_MAP_READ_WRITE, /**< Specifies \ref NvBufSurface mapping type
"read/write." */
} NvBufSurfaceMemMapFlags;
/**
* Defines tags that identify the components requesting a memory allocation.
* The tags can be used later to identify the total memory allocated to
* particular types of components.
* TODO: Check if DeepStream require more tags to be defined.
*/
typedef enum
{
/** tag None. */
NvBufSurfaceTag_NONE = 0x0,
/** tag for Camera. */
NvBufSurfaceTag_CAMERA = 0x200,
/** tag for Jpeg Encoder/Decoder. */
NvBufSurfaceTag_JPEG = 0x1500,
/** tag for VPR Buffers. */
NvBufSurfaceTag_PROTECTED = 0x1504,
/** tag for H264/H265 Video Encoder. */
NvBufSurfaceTag_VIDEO_ENC = 0x1200,
/** tag for H264/H265/VP9 Video Decoder. */
NvBufSurfaceTag_VIDEO_DEC = 0x1400,
/** tag for Video Transform/Composite/Blend. */
NvBufSurfaceTag_VIDEO_CONVERT = 0xf01,
} NvBufSurfaceTag;
/**
* Defines color formats for NvBufSurface.
*/
typedef enum
{
/** Specifies an invalid color format. */
NVBUF_COLOR_FORMAT_INVALID,
/** Specifies 8 bit GRAY scale - single plane */
NVBUF_COLOR_FORMAT_GRAY8,
/** Specifies BT.601 colorspace - YUV420 multi-planar. */
NVBUF_COLOR_FORMAT_YUV420,
/** Specifies BT.601 colorspace - YUV420 multi-planar. */
NVBUF_COLOR_FORMAT_YVU420,
/** Specifies BT.601 colorspace - YUV420 ER multi-planar. */
NVBUF_COLOR_FORMAT_YUV420_ER,
/** Specifies BT.601 colorspace - YVU420 ER multi-planar. */
NVBUF_COLOR_FORMAT_YVU420_ER,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12_ER,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV21,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV21_ER,
/** Specifies BT.601 colorspace - YUV 4:2:2 planar. */
NVBUF_COLOR_FORMAT_UYVY,
/** Specifies BT.601 colorspace - YUV ER 4:2:2 planar. */
NVBUF_COLOR_FORMAT_UYVY_ER,
/** Specifies BT.601 colorspace - YUV 4:2:2 planar. */
NVBUF_COLOR_FORMAT_VYUY,
/** Specifies BT.601 colorspace - YUV ER 4:2:2 planar. */
NVBUF_COLOR_FORMAT_VYUY_ER,
/** Specifies BT.601 colorspace - YUV 4:2:2 planar. */
NVBUF_COLOR_FORMAT_YUYV,
/** Specifies BT.601 colorspace - YUV ER 4:2:2 planar. */
NVBUF_COLOR_FORMAT_YUYV_ER,
/** Specifies BT.601 colorspace - YUV 4:2:2 planar. */
NVBUF_COLOR_FORMAT_YVYU,
/** Specifies BT.601 colorspace - YUV ER 4:2:2 planar. */
NVBUF_COLOR_FORMAT_YVYU_ER,
/** Specifies BT.601 colorspace - YUV444 multi-planar. */
NVBUF_COLOR_FORMAT_YUV444,
/** Specifies RGBA-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_RGBA,
/** Specifies BGRA-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_BGRA,
/** Specifies ARGB-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_ARGB,
/** Specifies ABGR-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_ABGR,
/** Specifies RGBx-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_RGBx,
/** Specifies BGRx-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_BGRx,
/** Specifies xRGB-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_xRGB,
/** Specifies xBGR-8-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_xBGR,
/** Specifies RGB-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_RGB,
/** Specifies BGR-8-8-8 single plane. */
NVBUF_COLOR_FORMAT_BGR,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_12LE,
/** Specifies BT.709 colorspace - YUV420 multi-planar. */
NVBUF_COLOR_FORMAT_YUV420_709,
/** Specifies BT.709 colorspace - YUV420 ER multi-planar. */
NVBUF_COLOR_FORMAT_YUV420_709_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12_709_ER,
/** Specifies BT.2020 colorspace - YUV420 multi-planar. */
NVBUF_COLOR_FORMAT_YUV420_2020,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:0 multi-planar. */
NVBUF_COLOR_FORMAT_NV12_2020,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_10LE_2020,
/** Specifies color format for packed 2 signed shorts */
NVBUF_COLOR_FORMAT_SIGNED_R16G16,
/** Specifies RGB- unsigned 8 bit multiplanar plane. */
NVBUF_COLOR_FORMAT_R8_G8_B8,
/** Specifies BGR- unsigned 8 bit multiplanar plane. */
NVBUF_COLOR_FORMAT_B8_G8_R8,
/** Specifies RGB-32bit Floating point multiplanar plane. */
NVBUF_COLOR_FORMAT_R32F_G32F_B32F,
/** Specifies BGR-32bit Floating point multiplanar plane. */
NVBUF_COLOR_FORMAT_B32F_G32F_R32F,
/** Specifies BT.601 colorspace - YUV422 multi-planar. */
NVBUF_COLOR_FORMAT_YUV422,
/** Specifies BT.601 colorspace - Y/CrCb 4:2:0 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV21_10LE,
/** Specifies BT.601 colorspace - Y/CrCb 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV21_12LE,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_12LE_2020,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:2 10-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_10LE,
/** Specifies BT.601 colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24,
/** Specifies BT.601 colorspace - Y/CrCb 4:4:4 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV24_10LE,
/** Specifies BT.601_ER colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16_ER,
/** Specifies BT.601_ER colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16_709,
/** Specifies BT.709 colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_709,
/** Specifies BT.709_ER colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16_709_ER,
/** Specifies BT.709_ER colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_709_ER,
/** Specifies BT.709 colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_10LE_709,
/** Specifies BT.709 ER colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_10LE_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 10 bit 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_10LE_2020,
/** Specifies BT.2020 colorspace - Y/CbCr 12 bit 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_NV24_12LE_2020,
/** Specifies Non-linear RGB BT.709 colorspace - RGBA-10-10-10-2 planar. */
NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_709,
/** Specifies Non-linear RGB BT.2020 colorspace - RGBA-10-10-10-2 planar. */
NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_2020,
/** Specifies Non-linear RGB BT.709 colorspace - BGRA-10-10-10-2 planar. */
NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_709,
/** Specifies Non-linear RGB BT.2020 colorspace - BGRA-10-10-10-2 planar. */
NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_2020,
/** Specifies Optical flow SAD calculation Buffer format */
NVBUF_COLOR_FORMAT_A32,
/** Specifies BT.601 colorspace - 10 bit YUV 4:2:2 interleaved. */
NVBUF_COLOR_FORMAT_UYVP,
/** Specifies BT.601 colorspace - 10 bit YUV ER 4:2:2 interleaved. */
NVBUF_COLOR_FORMAT_UYVP_ER,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 4:4:4 multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_2020,
/** Specifies BT.601 colorspace - Y/CbCr 4:4:4 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_10LE,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:4:4 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_10LE_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:4:4 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_10LE_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:4:4 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_10LE_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 4:4:4 10-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_10LE_2020,
/** Specifies BT.601 colorspace - Y/CbCr 4:4:4 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_12LE,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:4:4 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_12LE_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:4:4 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_12LE_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:4:4 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_12LE_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 4:4:4 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_YUV444_12LE_2020,
/** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_12LE_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_12LE_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_12LE_709_ER,
NVBUF_COLOR_FORMAT_LAST
} NvBufSurfaceColorFormat;
/**
* Specifies layout formats for \ref NvBufSurface video planes.
*/
typedef enum
{
/** Specifies pitch layout. */
NVBUF_LAYOUT_PITCH,
/** Specifies block linear layout. */
NVBUF_LAYOUT_BLOCK_LINEAR,
} NvBufSurfaceLayout;
/**
* Specifies memory types for \ref NvBufSurface.
*/
typedef enum
{
/** Specifies the default memory type, i.e. \ref NVBUF_MEM_CUDA_DEVICE
for dGPU, \ref NVBUF_MEM_SURFACE_ARRAY for Jetson. Use \ref NVBUF_MEM_DEFAULT
to allocate whichever type of memory is appropriate for the platform. */
NVBUF_MEM_DEFAULT,
/** Specifies CUDA Host memory type. */
NVBUF_MEM_CUDA_PINNED,
/** Specifies CUDA Device memory type. */
NVBUF_MEM_CUDA_DEVICE,
/** Specifies CUDA Unified memory type. */
NVBUF_MEM_CUDA_UNIFIED,
/** Specifies NVRM Surface Array type. Valid only for Jetson. */
NVBUF_MEM_SURFACE_ARRAY,
/** Specifies NVRM Handle type. Valid only for Jetson. */
NVBUF_MEM_HANDLE,
/** Specifies memory allocated by malloc(). */
NVBUF_MEM_SYSTEM,
} NvBufSurfaceMemType;
/**
* Defines display scan formats for NvBufSurface video planes.
*/
typedef enum
{
/** Progessive scan formats. */
NVBUF_DISPLAYSCANFORMAT_PROGRESSIVE,
/** Interlaced scan formats. */
NVBUF_DISPLAYSCANFORMAT_INTERLACED,
} NvBufSurfaceDisplayScanFormat;
/**
* Holds plane wise parameters(extended) of a buffer.
*/
typedef struct NvBufSurfacePlaneParamsEx
{
/** display scan format - progressive/interlaced. */
NvBufSurfaceDisplayScanFormat scanformat[NVBUF_MAX_PLANES];
/** offset of the second field for interlaced buffer. */
uint32_t secondfieldoffset[NVBUF_MAX_PLANES];
/** block height of the planes for blockLinear layout buffer. */
uint32_t blockheightlog2[NVBUF_MAX_PLANES];
/** physical address of allocated planes. */
uint32_t physicaladdress[NVBUF_MAX_PLANES];
/** flags associated with planes */
uint64_t flags[NVBUF_MAX_PLANES];
void * _reserved[STRUCTURE_PADDING * NVBUF_MAX_PLANES];
} NvBufSurfacePlaneParamsEx;
/**
* Holds plane wise parameters of a buffer.
*/
typedef struct NvBufSurfacePlaneParams
{
/** Holds the number of planes. */
uint32_t num_planes;
/** Holds the widths of planes. */
uint32_t width[NVBUF_MAX_PLANES];
/** Holds the heights of planes. */
uint32_t height[NVBUF_MAX_PLANES];
/** Holds the pitches of planes in bytes. */
uint32_t pitch[NVBUF_MAX_PLANES];
/** Holds the offsets of planes in bytes. */
uint32_t offset[NVBUF_MAX_PLANES];
/** Holds the sizes of planes in bytes. */
uint32_t psize[NVBUF_MAX_PLANES];
/** Holds the number of bytes occupied by a pixel in each plane. */
uint32_t bytesPerPix[NVBUF_MAX_PLANES];
void * _reserved[STRUCTURE_PADDING * NVBUF_MAX_PLANES];
} NvBufSurfacePlaneParams;
/**
* Holds Chroma Subsampling parameters for NvBufSurface allocation.
*/
typedef struct NvBufSurfaceChromaSubsamplingParams
{
/** location settings */
uint8_t chromaLocHoriz;
uint8_t chromaLocVert;
} NvBufSurfaceChromaSubsamplingParams;
/**
* Holds parameters required to allocate an \ref NvBufSurface.
*/
typedef struct NvBufSurfaceCreateParams {
/** Holds the GPU ID. Valid only for a multi-GPU system. */
uint32_t gpuId;
/** Holds the width of the buffer. */
uint32_t width;
/** Holds the height of the buffer. */
uint32_t height;
/** Holds the amount of memory to be allocated. Optional; if set, all other
parameters (width, height, etc.) are ignored. */
uint32_t size;
/** Holds a "contiguous memory" flag. If set, contiguous memory is allocated
for the batch. Valid only for CUDA memory types. */
bool isContiguous;
/** Holds the color format of the buffer. */
NvBufSurfaceColorFormat colorFormat;
/** Holds the surface layout. May be Block Linear (BL) or Pitch Linear (PL).
For a dGPU, only PL is valid. */
NvBufSurfaceLayout layout;
/** Holds the type of memory to be allocated. */
NvBufSurfaceMemType memType;
} NvBufSurfaceCreateParams;
/**
* Hold extended parameters required to allocate NvBufSurface.
* (Applicable for NvBufSurfaceAllocate API)
*/
typedef struct NvBufSurfaceAllocateParams {
/** Hold legacy NvBufSurface creation parameters */
NvBufSurfaceCreateParams params;
/** Display scan format */
NvBufSurfaceDisplayScanFormat displayscanformat;
/** Chroma Subsampling parameters */
NvBufSurfaceChromaSubsamplingParams chromaSubsampling;
/** components tag to be used for memory allocation */
NvBufSurfaceTag memtag;
/** disable pitch padding allocation only applicable for cuda and system memory allocation
pitch would be width times bytes per pixel for the plane, for odd width it would be
multiple of 2, also note for some non standard video resolution cuda kernels may fail
due to unaligned pitch
*/
bool disablePitchPadding;
/** Used void* from custom param for 64 bit machine, using other uint32_t param */
uint32_t _reservedParam;
void * _reserved[STRUCTURE_PADDING-1];
} NvBufSurfaceAllocateParams;
/**
* Hold the pointers of mapped buffer.
*/
typedef struct NvBufSurfaceMappedAddr {
/** Holds planewise pointers to a CPU mapped buffer. */
void * addr[NVBUF_MAX_PLANES];
/** Holds a pointer to a mapped EGLImage. */
void *eglImage;
void * _reserved[STRUCTURE_PADDING];
} NvBufSurfaceMappedAddr;
/**
* Hold the information(extended) of single buffer in the batch.
*/
typedef struct NvBufSurfaceParamsEx {
/** offset in bytes from the start of the buffer to the first valid byte.
(Applicable for NVBUF_MEM_HANDLE) */
int32_t startofvaliddata;
/** size of the valid data from the first to the last valid byte.
(Applicable for NVBUF_MEM_HANDLE) */
int32_t sizeofvaliddatainbytes;
/** chroma subsampling parameters.
(Applicable for NVBUF_MEM_SURFACE_ARRAY) */
NvBufSurfaceChromaSubsamplingParams chromaSubsampling;
/** get buffer vpr information. */
bool is_protected;
/** plane wise extended info */
NvBufSurfacePlaneParamsEx planeParamsex;
void * _reserved[STRUCTURE_PADDING];
} NvBufSurfaceParamsEx;
/**
* Hold the information of single buffer in the batch.
*/
typedef struct NvBufSurfaceParams {
/** Holds the width of the buffer. */
uint32_t width;
/** Holds the height of the buffer. */
uint32_t height;
/** Holds the pitch of the buffer. */
uint32_t pitch;
/** Holds the color format of the buffer. */
NvBufSurfaceColorFormat colorFormat;
/** Holds BL or PL. For dGPU, only PL is valid. */
NvBufSurfaceLayout layout;
/** Holds a DMABUF FD. Valid only for \ref NVBUF_MEM_SURFACE_ARRAY and
\ref NVBUF_MEM_HANDLE type memory. */
uint64_t bufferDesc;
/** Holds the amount of allocated memory. */
uint32_t dataSize;
/** Holds a pointer to allocated memory. Not valid for
\ref NVBUF_MEM_SURFACE_ARRAY or \ref NVBUF_MEM_HANDLE. */
void * dataPtr;
/** Holds planewise information (width, height, pitch, offset, etc.). */
NvBufSurfacePlaneParams planeParams;
/** Holds pointers to mapped buffers. Initialized to NULL
when the structure is created. */
NvBufSurfaceMappedAddr mappedAddr;
/** pointers of extended parameters of single buffer in the batch.*/
NvBufSurfaceParamsEx *paramex;
void * _reserved[STRUCTURE_PADDING - 1];
} NvBufSurfaceParams;
/**
* Holds information about batched buffers.
*/
typedef struct NvBufSurface {
/** Holds a GPU ID. Valid only for a multi-GPU system. */
uint32_t gpuId;
/** Holds the batch size. */
uint32_t batchSize;
/** Holds the number valid and filled buffers. Initialized to zero when
an instance of the structure is created. */
uint32_t numFilled;
/** Holds an "is contiguous" flag. If set, memory allocated for the batch
is contiguous. */
bool isContiguous;
/** Holds type of memory for buffers in the batch. */
NvBufSurfaceMemType memType;
/** Holds a pointer to an array of batched buffers. */
NvBufSurfaceParams *surfaceList;
void * _reserved[STRUCTURE_PADDING];
} NvBufSurface;
/**
* Holds plane parameters to map the buffer received from another process.
*/
typedef struct NvBufSurfaceMapPlaneParams
{
/** Holds the widths of planes */
uint32_t width;
/** Holds the heights of planes */
uint32_t height;
/** Holds the pitches of planes in bytes */
uint32_t pitch;
/** Holds the offsets of planes in bytes */
uint32_t offset;
/** Holds the sizes of planes in bytes */
uint32_t psize;
/** Holds offset of the second field for interlaced buffer */
uint32_t secondfieldoffset;
/** Holds block height of the planes for blockLinear layout buffer */
uint32_t blockheightlog2;
/** Holds flags associated with the planes */
uint64_t flags;
/** Reserved */
uint8_t reserved[64];
} NvBufSurfaceMapPlaneParams;
/**
* Holds buffer parameters to map the buffer received from another process.
*/
typedef struct NvBufSurfaceMapParams {
/** Holds the number of planes. */
uint32_t num_planes;
/** Holds a GPU ID */
uint32_t gpuId;
/** Holds a DMABUF FD */
uint64_t fd;
/** Holds the total size of allocated memory */
uint32_t totalSize;
/** Holds type of memory */
NvBufSurfaceMemType memType;
/** Holds BL or PL layout */
NvBufSurfaceLayout layout;
/** Holds display scan format */
NvBufSurfaceDisplayScanFormat scanformat;
/** Holds the color format */
NvBufSurfaceColorFormat colorFormat;
/** Holds chroma subsampling parameters */
NvBufSurfaceChromaSubsamplingParams chromaSubsampling;
/** Holds plane parameters */
NvBufSurfaceMapPlaneParams planes[NVBUF_MAX_PLANES];
/** Reserved */
uint8_t reserved[64];
} NvBufSurfaceMapParams;
/**
* \brief Allocates a batch of buffers.
*
* Allocates memory for \a batchSize buffers and returns a pointer to an
* allocated \ref NvBufSurface. The \a params structure must have
* the allocation parameters of a single buffer. If \a params.size
* is set, a buffer of that size is allocated, and all other
* parameters (width, height, color format, etc.) are ignored.
*
* Call NvBufSurfaceDestroy() to free resources allocated by this function.
*
* @param[out] surf An indirect pointer to the allocated batched
* buffers.
* @param[in] batchSize Batch size of buffers.
* @param[in] params A pointer to an \ref NvBufSurfaceCreateParams
* structure.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceCreate (NvBufSurface **surf, uint32_t batchSize,
NvBufSurfaceCreateParams *params);
/**
* \brief Allocate batch of buffers. (Using extended buffer allocation parameters)
*
* Allocates memory for batchSize buffers and returns in *surf a pointer to allocated NvBufSurface.
* params structure should have allocation parameters of single buffer. If size field in
* params is set, buffer of that size will be allocated and all other
* parameters (w, h, color format etc.) will be ignored.
*
* Use NvBufSurfaceDestroy to free all the resources.
*
* @param[out] surf pointer to allocated batched buffers.
* @param[in] batchSize batch size of buffers.
* @param[in] paramsext pointer to NvBufSurfaceAllocateParams structure.
*
* @return 0 for success, -1 for failure.
*/
int NvBufSurfaceAllocate (NvBufSurface **surf, uint32_t batchSize,
NvBufSurfaceAllocateParams *paramsext);
/**
* Free the batched buffers previously allocated through NvBufSurfaceCreate.
*
* @param[in] surf A pointer to an \ref NvBufSurface to be freed.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceDestroy (NvBufSurface *surf);
/**
* \brief Maps hardware batched buffers to the HOST or CPU address space.
*
* Valid for \ref NVBUF_MEM_CUDA_UNIFIED type memory for dGPU and
* \ref NVBUF_MEM_SURFACE_ARRAY and \ref NVBUF_MEM_HANDLE type memory for
* Jetson.
*
* This function fills an array of pointers at
* \a surf->surfaceList->mappedAddr->addr.
* \a surf is a pointer to an \ref NvBufSurface.
* \a surfaceList is a pointer to an \ref NvBufSurfaceParams.
* \a mappedAddr is a pointer to an \ref NvBufSurfaceMappedAddr.
* \a addr is declared as an array of pointers to void, and holds pointers
* to the buffers.
*
* The client must call NvBufSurfaceSyncForCpu() with the virtual address
* populated by this function before accessing mapped memory in the CPU.
*
* After memory mapping is complete, mapped memory modification
* must be coordinated between the CPU and the hardware device as
* follows:
* - CPU: If the CPU modifies mapped memory, the client must call
* NvBufSurfaceSyncForDevice() before any hardware device accesses the memory.
* - Hardware device: If a hardware device modifies mapped memory, the client
* must call NvBufSurfaceSyncForCpu() before the CPU accesses the memory.
*
* Use NvBufSurfaceUnMap() to unmap buffer(s) and release any resource.
*
* @param[in,out] surf A pointer to an NvBufSurface structure. The function
* stores pointers to the buffers in a descendant of this
* structure; see the notes above.
* @param[in] index Index of a buffer in the batch. -1 refers to all buffers
* in the batch.
* @param[in] plane Index of a plane in buffer. -1 refers to all planes
* in the buffer.
* @param[in] type A flag for mapping type.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceMap (NvBufSurface *surf, int index, int plane, NvBufSurfaceMemMapFlags type);
/**
* \brief Unmaps previously mapped buffer(s).
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index Index of a buffer in the batch. -1 indicates
* all buffers in the batch.
* @param[in] plane Index of a plane in the buffer. -1 indicates
* all planes in the buffer.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceUnMap (NvBufSurface *surf, int index, int plane);
/**
* \brief Copies the content of source batched buffer(s) to destination
* batched buffer(s).
*
* You can use this function to copy source buffer(s) of one memory type
* to destination buffer(s) of another memory type,
* e.g. CUDA host to CUDA device, malloc'ed memory to CUDA device, etc.
*
* The source and destination \ref NvBufSurface objects must have same
* buffer and batch size.
*
* @param[in] srcSurf A pointer to the source NvBufSurface structure.
* @param[in] dstSurf A pointer to the destination NvBufSurface structure.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceCopy (NvBufSurface *srcSurf, NvBufSurface *dstSurf);
/**
* \brief Copies the NvBufSurface plane memory content to a raw buffer plane for a specific
* batched buffer.
*
* This function can be used to copy plane memory content from source raw buffer pointer
* to specific destination batch buffer of supported memory type.
*
* @param[in] surf pointer to NvBufSurface structure.
* @param[in] index index of buffer in the batch.
* @param[in] plane index of plane in buffer.
* @param[in] out_width aligned width of the raw data plane.
* @param[in] out_height aligned height of the raw data plane.
* @param[in] ptr pointer to the output raw plane data.
*
* @return 0 for success, -1 for failure.
*/
int NvBufSurface2Raw (NvBufSurface *Surf, unsigned int index, unsigned int plane, unsigned int outwidth, unsigned int outheight, unsigned char *ptr);
/**
* \brief Copies the raw buffer plane memory content to the NvBufSurface plane memory of a specific
* batched buffer.
*
* This function can be used to copy plane memory content from batch buffer
* to specific destination raw buffer pointer.
*
* @param[in] ptr pointer to the input raw plane data.
* @param[in] index index of buffer in the batch.
* @param[in] plane index of plane in buffer.
* @param[in] in_width aligned width of the raw data plane.
* @param[in] in_height aligned height of the raw data plane.
* @param[in] surf pointer to NvBufSurface structure.
*
* @return 0 for success, -1 for failure.
*/
int Raw2NvBufSurface (unsigned char *ptr, unsigned int index, unsigned int plane, unsigned int inwidth, unsigned int inheight, NvBufSurface *Surf);
/**
* Syncs the HW memory cache for the CPU.
*
* Valid only for memory types \ref NVBUF_MEM_SURFACE_ARRAY and
* \ref NVBUF_MEM_HANDLE.
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index Index of the buffer in the batch. -1 refers to
* all buffers in the batch.
* @param[in] plane Index of a plane in the buffer. -1 refers to all planes
* in the buffer.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceSyncForCpu (NvBufSurface *surf, int index, int plane);
/**
* \brief Syncs the hardware memory cache for the device.
*
* Valid only for memory types \ref NVBUF_MEM_SURFACE_ARRAY and
* \ref NVBUF_MEM_HANDLE.
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index Index of a buffer in the batch. -1 refers to all buffers
* in the batch.
* @param[in] plane Index of a plane in the buffer. -1 refers to all planes
* in the buffer.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceSyncForDevice (NvBufSurface *surf, int index, int plane);
/**
* \brief Gets the \ref NvBufSurface from the DMABUF FD.
*
* @param[in] dmabuf_fd DMABUF FD of the buffer.
* @param[out] buffer A pointer to the NvBufSurface.
*
* @return 0 for success, or -1 otherwise.
*/
int NvBufSurfaceFromFd (int dmabuf_fd, void **buffer);
/**
* \brief Fills each byte of the buffer(s) in an \ref NvBufSurface with a
* provided value.
*
* You can also use this function to reset the buffer(s) in the batch.
*
* @param[in] surf A pointer to the NvBufSurface structure.
* @param[in] index Index of a buffer in the batch. -1 refers to all buffers
* in the batch.
* @param[in] plane Index of a plane in the buffer. -1 refers to all planes
* in the buffer.
* @param[in] value The value to be used as fill.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceMemSet (NvBufSurface *surf, int index, int plane, uint8_t value);
/**
* \brief Creates an EGLImage from the memory of one or more
* \ref NvBufSurface buffers.
*
* Only memory type \ref NVBUF_MEM_SURFACE_ARRAY is supported.
*
* This function returns the created EGLImage by storing its address at
* \a surf->surfaceList->mappedAddr->eglImage. (\a surf is a pointer to
* an NvBufSurface. \a surfaceList is a pointer to an \ref NvBufSurfaceParams.
* \a mappedAddr is a pointer to an \ref NvBufSurfaceMappedAddr.
* \a eglImage is declared as a pointer to void, and holds an
* EGLImageKHR.)
*
* You can use this function in scenarios where a CUDA operation on Jetson
* hardware memory (identified by \ref NVBUF_MEM_SURFACE_ARRAY) is required.
* The EGLImageKHR struct provided by this function can then be registered
* with CUDA for further CUDA operations.
*
* @param[in,out] surf A pointer to an NvBufSurface structure. The function
* stores a pointer to the created EGLImage in
* a descendant of this structure; see the notes above.
* @param[in] index Index of a buffer in the batch. -1 specifies all buffers
* in the batch.
*
* @return 0 for success, or -1 otherwise.
*/
int NvBufSurfaceMapEglImage (NvBufSurface *surf, int index);
/**
* \brief Destroys the previously created EGLImage object(s).
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index The index of a buffer in the batch. -1 specifies all
* buffers in the batch.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceUnMapEglImage (NvBufSurface *surf, int index);
/**
* \brief Import parameters received from another process and create hardware buffer.
*
* Calling process must need to call NvBufferDestroy() to remove reference count for
* hardware buffer handle of the imported DMA buffer.
*
* @param[out] out_nvbuf_surf Pointer to hardware buffer.
* @param[in] in_params Parameters to create hardware buffer.
*
* @return 0 for success, -1 for failure.
*/
int NvBufSurfaceImport (NvBufSurface **out_nvbuf_surf, const NvBufSurfaceMapParams *in_params);
/**
* \brief Get buffer information to map the buffer in another process.
*
* @param[in] surf Pointer to NvBufSurface structure.
* @param[in] index Index of a buffer in the batch.
* @param[out] params Pointer to NvBufSurfaceMapParams information of the buffer.
*
* @return 0 for success, -1 for failure.
*/
int NvBufSurfaceGetMapParams (const NvBufSurface *surf, int index, NvBufSurfaceMapParams *params);
/** @} */
#ifdef __cplusplus
}
#endif
#endif /* NVBUFSURFACE_H_ */

1
push_info.txt Normal file
View File

@@ -0,0 +1 @@
jetson_35.6

2731
v4l2_nv_extensions.h Normal file
View File

File diff suppressed because it is too large Load Diff