Compare commits

..

3 Commits

Author SHA1 Message Date
svcmobrel-release
b119b2b41b Updating prebuilts and/or headers
ed8273ff6102bb0b4fa7975a401b12b3e95a7187 - nvbufsurface.h
7af73b80b2f930ab91431d66cd84ec794da9e117 - v4l2_nv_extensions.h
d27a433ddeaefb9f42d0312c23472514b0cd6a45 - gst-nvcustomevent.h
21a860247c06670e4619b8eaae1d92db31bdd3e8 - gst-v4l2/gstv4l2.c
e8e973c103725b65232d32817e0305d12d6ff309 - gst-v4l2/gstv4l2h264enc.c
49a66f0ce02abc71f33e096a65645ddedf5c7f46 - gst-v4l2/gstv4l2bufferpool.c
9f726e4439379bb399f29c68736242f21dab3dd0 - gst-v4l2/gstv4l2allocator.c
65de802e5f162aa04518b7ade5841cc3ced01111 - gst-v4l2/Makefile
02d142337f4b96fcb0c9f2405a3cbe90c5917cca - gst-v4l2/gstv4l2vp9enc.c
dc1a3f7292873f1f71dc27300f97f3ab918ed79f - gst-v4l2/gstv4l2h265enc.c
d29e3a719400c3cb27314366d48ec792a3c12363 - gst-v4l2/gstv4l2h265enc.h
c81eacb7d88c4fb839506dd70055e30d7a9feeec - gst-v4l2/v4l2-utils.h
b1cd923335aa60985ff9866fba91a2068e8671c7 - gst-v4l2/LICENSE.gst-nvvideo4linux2
73b03969d7ae0a8adb374c93999c43af88ea93b2 - gst-v4l2/v4l2_calls.c
d89a680415f6ff5acec2571cde0fce9054d8e81f - gst-v4l2/gstv4l2vp9enc.h
b52a5ee4c739818736b9a3683442df285ebe9eda - gst-v4l2/gstv4l2videodec.c
3f7cafe5beb4395caf2e1591bf0a835e5076031a - gst-v4l2/gstv4l2object.h
d5952b0286c34bf13fbf5e09fe552ced0da49368 - gst-v4l2/gstv4l2videodec.h
398c24d1eef98ec9003a06587bc3784050602cd2 - gst-v4l2/gstv4l2h26xparser.c
39fcb2f599e6906ab0fd7ab9a46fef3ea58a8cab - gst-v4l2/gstv4l2vp8enc.h
cbc84dccd2506afa4c8f03849c95bb28c83ef4a3 - gst-v4l2/gstv4l2av1enc.h
a002edef13a3bbbdc41e42a7fca40e574ad1bb3e - gst-v4l2/v4l2-utils.c
c2099692cdb374440c2a040cb6ad01bbc1549ce5 - gst-v4l2/gstv4l2h26xparser.h
99d65d620807b5ba1ca29a838e032940c9b019cc - gst-v4l2/sei_parse.c
b827fd6cb1e3b8ecebd6a07f8556e846e26cba17 - gst-v4l2/gstv4l2allocator.h
489fde70531590e94d1d211a42f10f81ae68d2b9 - gst-v4l2/gstv4l2videoenc.h
4e79cf75c4fa29791e1f5141318dc8aec13a7835 - gst-v4l2/nalutils.h
71be284b547ee68fb0e2cd14b0aeb14734a915a1 - gst-v4l2/gstv4l2bufferpool.h
5ecd059e5ef9be4014eface37e5e2f7598960f4e - gst-v4l2/nalutils.c
5948d70c07e87f9b1dc403789dcbed6acfa47ad9 - gst-v4l2/gstv4l2av1enc.c
bb104683f5e4f7402e3f765a891e149edc794e02 - gst-v4l2/gstv4l2h264enc.h
9681f7b98dfdfbc4d845f9ce7f11c3692b923195 - gst-v4l2/gstv4l2videoenc.c
807bc9859585a540b0f85e98f147756aab24e1bd - gst-v4l2/gstv4l2vp8enc.c
884e5b97b9fa8d07b6153e6efe6999884922b813 - gst-v4l2/gstv4l2object.c
20c4f7c0cb89c83256650bc3353ed82154cf3a9d - gst-v4l2/gst/gst-i18n-plugin.h
e864ee6647f3572b144403d799f68152e9900da1 - gst-v4l2/gst/gettext.h
499a9feb17ceabf1f1443923dffa1e0180bf5972 - gst-v4l2/gst/glib-compat-private.h
72a34a694337f8f6da3bb94c9faced6730cbd2fc - gst-v4l2/ext/types-compat.h
1636366b5a062e4bc1791b7bc3012ccf5635b363 - gst-v4l2/ext/v4l2-controls.h
a745675b051a2b8434a430c80fde3f245864ca89 - gst-v4l2/ext/v4l2-common.h
522ab8fc8531a2c758b9278d29642f5b763fd3e7 - gst-v4l2/ext/videodev2.h

Change-Id: I20e40b27ca0d0ba800354bd069a7010532345ab1
2025-07-01 06:51:14 -07:00
svcmobrel-release
d92d2bb91c Updating prebuilts and/or headers
ed8273ff6102bb0b4fa7975a401b12b3e95a7187 - nvbufsurface.h
7af73b80b2f930ab91431d66cd84ec794da9e117 - v4l2_nv_extensions.h
d27a433ddeaefb9f42d0312c23472514b0cd6a45 - gst-nvcustomevent.h
21a860247c06670e4619b8eaae1d92db31bdd3e8 - gst-v4l2/gstv4l2.c
e8e973c103725b65232d32817e0305d12d6ff309 - gst-v4l2/gstv4l2h264enc.c
49a66f0ce02abc71f33e096a65645ddedf5c7f46 - gst-v4l2/gstv4l2bufferpool.c
9f726e4439379bb399f29c68736242f21dab3dd0 - gst-v4l2/gstv4l2allocator.c
65de802e5f162aa04518b7ade5841cc3ced01111 - gst-v4l2/Makefile
02d142337f4b96fcb0c9f2405a3cbe90c5917cca - gst-v4l2/gstv4l2vp9enc.c
dc1a3f7292873f1f71dc27300f97f3ab918ed79f - gst-v4l2/gstv4l2h265enc.c
d29e3a719400c3cb27314366d48ec792a3c12363 - gst-v4l2/gstv4l2h265enc.h
c81eacb7d88c4fb839506dd70055e30d7a9feeec - gst-v4l2/v4l2-utils.h
b1cd923335aa60985ff9866fba91a2068e8671c7 - gst-v4l2/LICENSE.gst-nvvideo4linux2
73b03969d7ae0a8adb374c93999c43af88ea93b2 - gst-v4l2/v4l2_calls.c
d89a680415f6ff5acec2571cde0fce9054d8e81f - gst-v4l2/gstv4l2vp9enc.h
b52a5ee4c739818736b9a3683442df285ebe9eda - gst-v4l2/gstv4l2videodec.c
3f7cafe5beb4395caf2e1591bf0a835e5076031a - gst-v4l2/gstv4l2object.h
d5952b0286c34bf13fbf5e09fe552ced0da49368 - gst-v4l2/gstv4l2videodec.h
398c24d1eef98ec9003a06587bc3784050602cd2 - gst-v4l2/gstv4l2h26xparser.c
39fcb2f599e6906ab0fd7ab9a46fef3ea58a8cab - gst-v4l2/gstv4l2vp8enc.h
cbc84dccd2506afa4c8f03849c95bb28c83ef4a3 - gst-v4l2/gstv4l2av1enc.h
a002edef13a3bbbdc41e42a7fca40e574ad1bb3e - gst-v4l2/v4l2-utils.c
c2099692cdb374440c2a040cb6ad01bbc1549ce5 - gst-v4l2/gstv4l2h26xparser.h
99d65d620807b5ba1ca29a838e032940c9b019cc - gst-v4l2/sei_parse.c
b827fd6cb1e3b8ecebd6a07f8556e846e26cba17 - gst-v4l2/gstv4l2allocator.h
489fde70531590e94d1d211a42f10f81ae68d2b9 - gst-v4l2/gstv4l2videoenc.h
4e79cf75c4fa29791e1f5141318dc8aec13a7835 - gst-v4l2/nalutils.h
71be284b547ee68fb0e2cd14b0aeb14734a915a1 - gst-v4l2/gstv4l2bufferpool.h
5ecd059e5ef9be4014eface37e5e2f7598960f4e - gst-v4l2/nalutils.c
5948d70c07e87f9b1dc403789dcbed6acfa47ad9 - gst-v4l2/gstv4l2av1enc.c
bb104683f5e4f7402e3f765a891e149edc794e02 - gst-v4l2/gstv4l2h264enc.h
9681f7b98dfdfbc4d845f9ce7f11c3692b923195 - gst-v4l2/gstv4l2videoenc.c
807bc9859585a540b0f85e98f147756aab24e1bd - gst-v4l2/gstv4l2vp8enc.c
884e5b97b9fa8d07b6153e6efe6999884922b813 - gst-v4l2/gstv4l2object.c
20c4f7c0cb89c83256650bc3353ed82154cf3a9d - gst-v4l2/gst/gst-i18n-plugin.h
e864ee6647f3572b144403d799f68152e9900da1 - gst-v4l2/gst/gettext.h
499a9feb17ceabf1f1443923dffa1e0180bf5972 - gst-v4l2/gst/glib-compat-private.h
72a34a694337f8f6da3bb94c9faced6730cbd2fc - gst-v4l2/ext/types-compat.h
1636366b5a062e4bc1791b7bc3012ccf5635b363 - gst-v4l2/ext/v4l2-controls.h
a745675b051a2b8434a430c80fde3f245864ca89 - gst-v4l2/ext/v4l2-common.h
522ab8fc8531a2c758b9278d29642f5b763fd3e7 - gst-v4l2/ext/videodev2.h

Change-Id: Ibca7485998b0e1abface85871462d2082da4ae5f
2025-03-17 16:54:04 -07:00
svcmobrel-release
5c1f0868fd Updating prebuilts and/or headers
ed8273ff6102bb0b4fa7975a401b12b3e95a7187 - nvbufsurface.h
7af73b80b2f930ab91431d66cd84ec794da9e117 - v4l2_nv_extensions.h
d27a433ddeaefb9f42d0312c23472514b0cd6a45 - gst-nvcustomevent.h
21a860247c06670e4619b8eaae1d92db31bdd3e8 - gst-v4l2/gstv4l2.c
e8e973c103725b65232d32817e0305d12d6ff309 - gst-v4l2/gstv4l2h264enc.c
49a66f0ce02abc71f33e096a65645ddedf5c7f46 - gst-v4l2/gstv4l2bufferpool.c
9f726e4439379bb399f29c68736242f21dab3dd0 - gst-v4l2/gstv4l2allocator.c
65de802e5f162aa04518b7ade5841cc3ced01111 - gst-v4l2/Makefile
02d142337f4b96fcb0c9f2405a3cbe90c5917cca - gst-v4l2/gstv4l2vp9enc.c
dc1a3f7292873f1f71dc27300f97f3ab918ed79f - gst-v4l2/gstv4l2h265enc.c
d29e3a719400c3cb27314366d48ec792a3c12363 - gst-v4l2/gstv4l2h265enc.h
c81eacb7d88c4fb839506dd70055e30d7a9feeec - gst-v4l2/v4l2-utils.h
b1cd923335aa60985ff9866fba91a2068e8671c7 - gst-v4l2/LICENSE.gst-nvvideo4linux2
73b03969d7ae0a8adb374c93999c43af88ea93b2 - gst-v4l2/v4l2_calls.c
d89a680415f6ff5acec2571cde0fce9054d8e81f - gst-v4l2/gstv4l2vp9enc.h
b52a5ee4c739818736b9a3683442df285ebe9eda - gst-v4l2/gstv4l2videodec.c
3f7cafe5beb4395caf2e1591bf0a835e5076031a - gst-v4l2/gstv4l2object.h
d5952b0286c34bf13fbf5e09fe552ced0da49368 - gst-v4l2/gstv4l2videodec.h
398c24d1eef98ec9003a06587bc3784050602cd2 - gst-v4l2/gstv4l2h26xparser.c
39fcb2f599e6906ab0fd7ab9a46fef3ea58a8cab - gst-v4l2/gstv4l2vp8enc.h
cbc84dccd2506afa4c8f03849c95bb28c83ef4a3 - gst-v4l2/gstv4l2av1enc.h
a002edef13a3bbbdc41e42a7fca40e574ad1bb3e - gst-v4l2/v4l2-utils.c
c2099692cdb374440c2a040cb6ad01bbc1549ce5 - gst-v4l2/gstv4l2h26xparser.h
99d65d620807b5ba1ca29a838e032940c9b019cc - gst-v4l2/sei_parse.c
b827fd6cb1e3b8ecebd6a07f8556e846e26cba17 - gst-v4l2/gstv4l2allocator.h
489fde70531590e94d1d211a42f10f81ae68d2b9 - gst-v4l2/gstv4l2videoenc.h
4e79cf75c4fa29791e1f5141318dc8aec13a7835 - gst-v4l2/nalutils.h
71be284b547ee68fb0e2cd14b0aeb14734a915a1 - gst-v4l2/gstv4l2bufferpool.h
5ecd059e5ef9be4014eface37e5e2f7598960f4e - gst-v4l2/nalutils.c
5948d70c07e87f9b1dc403789dcbed6acfa47ad9 - gst-v4l2/gstv4l2av1enc.c
bb104683f5e4f7402e3f765a891e149edc794e02 - gst-v4l2/gstv4l2h264enc.h
9681f7b98dfdfbc4d845f9ce7f11c3692b923195 - gst-v4l2/gstv4l2videoenc.c
807bc9859585a540b0f85e98f147756aab24e1bd - gst-v4l2/gstv4l2vp8enc.c
884e5b97b9fa8d07b6153e6efe6999884922b813 - gst-v4l2/gstv4l2object.c
20c4f7c0cb89c83256650bc3353ed82154cf3a9d - gst-v4l2/gst/gst-i18n-plugin.h
e864ee6647f3572b144403d799f68152e9900da1 - gst-v4l2/gst/gettext.h
499a9feb17ceabf1f1443923dffa1e0180bf5972 - gst-v4l2/gst/glib-compat-private.h
72a34a694337f8f6da3bb94c9faced6730cbd2fc - gst-v4l2/ext/types-compat.h
1636366b5a062e4bc1791b7bc3012ccf5635b363 - gst-v4l2/ext/v4l2-controls.h
a745675b051a2b8434a430c80fde3f245864ca89 - gst-v4l2/ext/v4l2-common.h
522ab8fc8531a2c758b9278d29642f5b763fd3e7 - gst-v4l2/ext/videodev2.h

Change-Id: I3770af2d1c63a6193ccfb47a0ec190f5d241a331
2024-09-06 00:00:25 -07:00
30 changed files with 1147 additions and 4385 deletions

View File

@@ -1,45 +1,43 @@
Updating prebuilts and/or headers
44b0e909f18f7e2f457ba501fc47d80ecedd150b - nvbufsurface.h
2c5c20979e5fca5ed70b425187c3d09b39c03171 - v4l2_nv_extensions.h
ed8273ff6102bb0b4fa7975a401b12b3e95a7187 - nvbufsurface.h
7af73b80b2f930ab91431d66cd84ec794da9e117 - v4l2_nv_extensions.h
d27a433ddeaefb9f42d0312c23472514b0cd6a45 - gst-nvcustomevent.h
e9519308cbf7b36481da7665e3b74d36569cc3d1 - gst-v4l2/gstv4l2.c
ba87c2bc0bea986ef461e1bc2ab3ded89700a986 - gst-v4l2/gstv4l2h264enc.c
93eaaa0797c1f1dc21c20fbad1885dc109ccffd3 - gst-v4l2/gstv4l2bufferpool.c
9ff38f38c224577c4aaadc4ac4d808429f37ca69 - gst-v4l2/gstv4l2allocator.c
3d06f0b9ae8e465e8aecd7ef101e652ff62268c4 - gst-v4l2/Makefile
21a860247c06670e4619b8eaae1d92db31bdd3e8 - gst-v4l2/gstv4l2.c
e8e973c103725b65232d32817e0305d12d6ff309 - gst-v4l2/gstv4l2h264enc.c
49a66f0ce02abc71f33e096a65645ddedf5c7f46 - gst-v4l2/gstv4l2bufferpool.c
9f726e4439379bb399f29c68736242f21dab3dd0 - gst-v4l2/gstv4l2allocator.c
65de802e5f162aa04518b7ade5841cc3ced01111 - gst-v4l2/Makefile
02d142337f4b96fcb0c9f2405a3cbe90c5917cca - gst-v4l2/gstv4l2vp9enc.c
34adbcb7d5cf5a360d28432429b735710bfe49c5 - gst-v4l2/wsl_utils.h
afc982d855f80b1e21ce1831930a9f327c41832b - gst-v4l2/gstv4l2h265enc.c
55a2c81ab3ffd72e07fc680369683d9635a3665c - gst-v4l2/gstv4l2h265enc.h
dc1a3f7292873f1f71dc27300f97f3ab918ed79f - gst-v4l2/gstv4l2h265enc.c
d29e3a719400c3cb27314366d48ec792a3c12363 - gst-v4l2/gstv4l2h265enc.h
c81eacb7d88c4fb839506dd70055e30d7a9feeec - gst-v4l2/v4l2-utils.h
b1cd923335aa60985ff9866fba91a2068e8671c7 - gst-v4l2/LICENSE.gst-nvvideo4linux2
aa816d369be13e7cb2f6f5283c74bb00f7f1c76e - gst-v4l2/v4l2_calls.c
73b03969d7ae0a8adb374c93999c43af88ea93b2 - gst-v4l2/v4l2_calls.c
d89a680415f6ff5acec2571cde0fce9054d8e81f - gst-v4l2/gstv4l2vp9enc.h
da6c40e84b3b99e443b76c72cbb433541bdc9bcf - gst-v4l2/gstv4l2videodec.c
0d69b17838c57184dace9bfa1d30bbe8f2f83848 - gst-v4l2/gstv4l2object.h
c3ac3836a2d29d813c3c274cde82d2a59dd45a5a - gst-v4l2/gstv4l2videodec.h
4b70823ac5f9a70cce0c909e284c73aed4bccbd6 - gst-v4l2/gstv4l2h26xparser.c
b52a5ee4c739818736b9a3683442df285ebe9eda - gst-v4l2/gstv4l2videodec.c
3f7cafe5beb4395caf2e1591bf0a835e5076031a - gst-v4l2/gstv4l2object.h
d5952b0286c34bf13fbf5e09fe552ced0da49368 - gst-v4l2/gstv4l2videodec.h
398c24d1eef98ec9003a06587bc3784050602cd2 - gst-v4l2/gstv4l2h26xparser.c
39fcb2f599e6906ab0fd7ab9a46fef3ea58a8cab - gst-v4l2/gstv4l2vp8enc.h
08d68910b07d04e1429763ad1e6dbbeb41c5277d - gst-v4l2/gstv4l2av1enc.h
cbc84dccd2506afa4c8f03849c95bb28c83ef4a3 - gst-v4l2/gstv4l2av1enc.h
a002edef13a3bbbdc41e42a7fca40e574ad1bb3e - gst-v4l2/v4l2-utils.c
870a72e5038dba9f4df37f900d53a059beee9bbc - gst-v4l2/gstv4l2h26xparser.h
fac36b61500cf8d1b5f2513d6d2319ef73aa870e - gst-v4l2/sei_parse.c
c2099692cdb374440c2a040cb6ad01bbc1549ce5 - gst-v4l2/gstv4l2h26xparser.h
99d65d620807b5ba1ca29a838e032940c9b019cc - gst-v4l2/sei_parse.c
b827fd6cb1e3b8ecebd6a07f8556e846e26cba17 - gst-v4l2/gstv4l2allocator.h
e18e54d84e643676bfc88fd559d834f26f5b4d4d - gst-v4l2/wsl_utils.c
d0af17fd51ec44b79ef54c1279b631a46cf31f49 - gst-v4l2/gstv4l2videoenc.h
489fde70531590e94d1d211a42f10f81ae68d2b9 - gst-v4l2/gstv4l2videoenc.h
4e79cf75c4fa29791e1f5141318dc8aec13a7835 - gst-v4l2/nalutils.h
add535643bbb5c58b7eb98b45496204e4d63ebb1 - gst-v4l2/gstv4l2bufferpool.h
71be284b547ee68fb0e2cd14b0aeb14734a915a1 - gst-v4l2/gstv4l2bufferpool.h
5ecd059e5ef9be4014eface37e5e2f7598960f4e - gst-v4l2/nalutils.c
719c8569e894b0146a6e027550187df5aaf5adc1 - gst-v4l2/gstv4l2av1enc.c
5948d70c07e87f9b1dc403789dcbed6acfa47ad9 - gst-v4l2/gstv4l2av1enc.c
bb104683f5e4f7402e3f765a891e149edc794e02 - gst-v4l2/gstv4l2h264enc.h
eb5134c907dd4b25097491e4273591db6ac386fc - gst-v4l2/gstv4l2videoenc.c
9681f7b98dfdfbc4d845f9ce7f11c3692b923195 - gst-v4l2/gstv4l2videoenc.c
807bc9859585a540b0f85e98f147756aab24e1bd - gst-v4l2/gstv4l2vp8enc.c
9c3d135576125a6620cc8fa0b249ac73c070110b - gst-v4l2/gstv4l2object.c
884e5b97b9fa8d07b6153e6efe6999884922b813 - gst-v4l2/gstv4l2object.c
20c4f7c0cb89c83256650bc3353ed82154cf3a9d - gst-v4l2/gst/gst-i18n-plugin.h
e864ee6647f3572b144403d799f68152e9900da1 - gst-v4l2/gst/gettext.h
499a9feb17ceabf1f1443923dffa1e0180bf5972 - gst-v4l2/gst/glib-compat-private.h
72a34a694337f8f6da3bb94c9faced6730cbd2fc - gst-v4l2/ext/types-compat.h
583075e89482f1faa08be7f7b278336bf7756def - gst-v4l2/ext/v4l2-controls.h
fe847595bb202501a56702a7c602f0514d23c328 - gst-v4l2/ext/v4l2-common.h
2253e5f55e37aace35af706d5662ef017f17e877 - gst-v4l2/ext/videodev2.h
1636366b5a062e4bc1791b7bc3012ccf5635b363 - gst-v4l2/ext/v4l2-controls.h
a745675b051a2b8434a430c80fde3f245864ca89 - gst-v4l2/ext/v4l2-common.h
522ab8fc8531a2c758b9278d29642f5b763fd3e7 - gst-v4l2/ext/videodev2.h

View File

@@ -1,6 +1,6 @@
###############################################################################
#
# Copyright (c) 2018-2025, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA Corporation and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
@@ -14,12 +14,11 @@ SO_NAME := libgstnvvideo4linux2.so
TARGET_DEVICE = $(shell gcc -dumpmachine | cut -f1 -d -)
NVDS_VERSION:=8.0
NVDS_VERSION:=6.0
ifeq ($(TARGET_DEVICE),aarch64)
GST_INSTALL_DIR?=/usr/lib/aarch64-linux-gnu/gstreamer-1.0/
LIB_INSTALL_DIR?=/usr/lib/aarch64-linux-gnu/tegra/
INCLUDES += -I/usr/src/jetson_multimedia_api/include/
CFLAGS:=
else
GST_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/lib/gst-plugins/
@@ -30,7 +29,8 @@ endif
LIBS:= -lnvbufsurface -lnvbufsurftransform -lgstnvdsseimeta -lgstnvcustomhelper
SRCS := $(wildcard *.c)
INCLUDES += -I./ -I../ -I/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/sources/includes/
INCLUDES += -I./ -I../
INCLUDES += -I/usr/src/jetson_multimedia_api/include/
PKGS := gstreamer-1.0 \
gstreamer-base-1.0 \

View File

@@ -1,6 +1,6 @@
###############################################################################
#
# Copyright (c) 2018-2025, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA Corporation and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
@@ -34,4 +34,4 @@ Steps to compile the "gst-nvvideo4linux2" sources natively:
Note: For Jetson, "make install" will copy library "libgstnvvideo4linux2.so"
into "/usr/lib/aarch64-linux-gnu/gstreamer-1.0" directory. For x86 platforms,
make install will copy the library "libgstnvvideo4linux2.so" into
/opt/nvidia/deepstream/deepstream/lib/gst-plugins
/opt/nvidia/deepstream/deepstream-4.0/lib/gst-plugins

View File

@@ -1,4 +1,3 @@
/* SPDX-License-Identifier: ((GPL-2.0+ WITH Linux-syscall-note) OR BSD-3-Clause) */
/*
* include/linux/v4l2-common.h
*
@@ -54,7 +53,7 @@
#ifndef __V4L2_COMMON__
#define __V4L2_COMMON__
#include <linux/types.h>
#include "ext/types-compat.h"
/*
*
@@ -79,11 +78,24 @@
/* Current composing area plus all padding pixels */
#define V4L2_SEL_TGT_COMPOSE_PADDED 0x0103
/* Backward compatibility target definitions --- to be removed. */
#define V4L2_SEL_TGT_CROP_ACTIVE V4L2_SEL_TGT_CROP
#define V4L2_SEL_TGT_COMPOSE_ACTIVE V4L2_SEL_TGT_COMPOSE
#define V4L2_SUBDEV_SEL_TGT_CROP_ACTUAL V4L2_SEL_TGT_CROP
#define V4L2_SUBDEV_SEL_TGT_COMPOSE_ACTUAL V4L2_SEL_TGT_COMPOSE
#define V4L2_SUBDEV_SEL_TGT_CROP_BOUNDS V4L2_SEL_TGT_CROP_BOUNDS
#define V4L2_SUBDEV_SEL_TGT_COMPOSE_BOUNDS V4L2_SEL_TGT_COMPOSE_BOUNDS
/* Selection flags */
#define V4L2_SEL_FLAG_GE (1 << 0)
#define V4L2_SEL_FLAG_LE (1 << 1)
#define V4L2_SEL_FLAG_KEEP_CONFIG (1 << 2)
/* Backward compatibility flag definitions --- to be removed. */
#define V4L2_SUBDEV_SEL_FLAG_SIZE_GE V4L2_SEL_FLAG_GE
#define V4L2_SUBDEV_SEL_FLAG_SIZE_LE V4L2_SEL_FLAG_LE
#define V4L2_SUBDEV_SEL_FLAG_KEEP_CONFIG V4L2_SEL_FLAG_KEEP_CONFIG
struct v4l2_edid {
__u32 pad;
__u32 start_block;
@@ -92,17 +104,4 @@ struct v4l2_edid {
__u8 *edid;
};
/* Backward compatibility target definitions --- to be removed. */
#define V4L2_SEL_TGT_CROP_ACTIVE V4L2_SEL_TGT_CROP
#define V4L2_SEL_TGT_COMPOSE_ACTIVE V4L2_SEL_TGT_COMPOSE
#define V4L2_SUBDEV_SEL_TGT_CROP_ACTUAL V4L2_SEL_TGT_CROP
#define V4L2_SUBDEV_SEL_TGT_COMPOSE_ACTUAL V4L2_SEL_TGT_COMPOSE
#define V4L2_SUBDEV_SEL_TGT_CROP_BOUNDS V4L2_SEL_TGT_CROP_BOUNDS
#define V4L2_SUBDEV_SEL_TGT_COMPOSE_BOUNDS V4L2_SEL_TGT_COMPOSE_BOUNDS
/* Backward compatibility flag definitions --- to be removed. */
#define V4L2_SUBDEV_SEL_FLAG_SIZE_GE V4L2_SEL_FLAG_GE
#define V4L2_SUBDEV_SEL_FLAG_SIZE_LE V4L2_SEL_FLAG_LE
#define V4L2_SUBDEV_SEL_FLAG_KEEP_CONFIG V4L2_SEL_FLAG_KEEP_CONFIG
#endif /* __V4L2_COMMON__ */

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,8 @@
/* SPDX-License-Identifier: ((GPL-2.0+ WITH Linux-syscall-note) OR BSD-3-Clause) */
/*
* Video for Linux Two header file
*
* Copyright (C) 1999-2012 the contributors
* Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -54,21 +54,32 @@
* Hans Verkuil <hverkuil@xs4all.nl>
* et al.
*/
#ifndef __LINUX_VIDEODEV2_H
#define __LINUX_VIDEODEV2_H
#ifndef _UAPI__LINUX_VIDEODEV2_H
#define _UAPI__LINUX_VIDEODEV2_H
#include <sys/time.h>
#include <sys/ioctl.h>
#include <linux/ioctl.h>
#include <linux/types.h>
#include <linux/v4l2-common.h>
#include <linux/v4l2-controls.h>
#include "ext/types-compat.h"
#include "ext/v4l2-common.h"
#include "ext/v4l2-controls.h"
/*
* Common stuff for both V4L1 and V4L2
* Moved from videodev.h
*/
#ifdef USE_V4L2_TARGET_NV
/*
* As video decoder base class has to queue all the decoded frames
* between IDR interval for reverse playback, buffers are increased
* to 64 to support IDR interval till 60. As per the experiments,
* (IDR interval + 4) buffers are required at decoder capture plane
* for reverse playback
*/
#define VIDEO_MAX_FRAME 64
#else
#define VIDEO_MAX_FRAME 32
#endif
#define VIDEO_MAX_PLANES 8
/*
@@ -78,7 +89,7 @@
/* Four-character-code (FOURCC) */
#define v4l2_fourcc(a, b, c, d)\
((__u32)(a) | ((__u32)(b) << 8) | ((__u32)(c) << 16) | ((__u32)(d) << 24))
#define v4l2_fourcc_be(a, b, c, d) (v4l2_fourcc(a, b, c, d) | (1U << 31))
#define v4l2_fourcc_be(a, b, c, d) (v4l2_fourcc(a, b, c, d) | (1 << 31))
/*
* E N U M S
@@ -105,14 +116,14 @@ enum v4l2_field {
transmitted first */
};
#define V4L2_FIELD_HAS_TOP(field) \
((field) == V4L2_FIELD_TOP ||\
((field) == V4L2_FIELD_TOP ||\
(field) == V4L2_FIELD_INTERLACED ||\
(field) == V4L2_FIELD_INTERLACED_TB ||\
(field) == V4L2_FIELD_INTERLACED_BT ||\
(field) == V4L2_FIELD_SEQ_TB ||\
(field) == V4L2_FIELD_SEQ_BT)
#define V4L2_FIELD_HAS_BOTTOM(field) \
((field) == V4L2_FIELD_BOTTOM ||\
((field) == V4L2_FIELD_BOTTOM ||\
(field) == V4L2_FIELD_INTERLACED ||\
(field) == V4L2_FIELD_INTERLACED_TB ||\
(field) == V4L2_FIELD_INTERLACED_BT ||\
@@ -128,13 +139,6 @@ enum v4l2_field {
((field) == V4L2_FIELD_BOTTOM ||\
(field) == V4L2_FIELD_TOP ||\
(field) == V4L2_FIELD_ALTERNATE)
#define V4L2_FIELD_IS_INTERLACED(field) \
((field) == V4L2_FIELD_INTERLACED ||\
(field) == V4L2_FIELD_INTERLACED_TB ||\
(field) == V4L2_FIELD_INTERLACED_BT)
#define V4L2_FIELD_IS_SEQUENTIAL(field) \
((field) == V4L2_FIELD_SEQ_TB ||\
(field) == V4L2_FIELD_SEQ_BT)
enum v4l2_buf_type {
V4L2_BUF_TYPE_VIDEO_CAPTURE = 1,
@@ -150,7 +154,6 @@ enum v4l2_buf_type {
V4L2_BUF_TYPE_SDR_CAPTURE = 11,
V4L2_BUF_TYPE_SDR_OUTPUT = 12,
V4L2_BUF_TYPE_META_CAPTURE = 13,
V4L2_BUF_TYPE_META_OUTPUT = 14,
/* Deprecated, do not use */
V4L2_BUF_TYPE_PRIVATE = 0x80,
};
@@ -166,10 +169,7 @@ enum v4l2_buf_type {
|| (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY \
|| (type) == V4L2_BUF_TYPE_VBI_OUTPUT \
|| (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT \
|| (type) == V4L2_BUF_TYPE_SDR_OUTPUT \
|| (type) == V4L2_BUF_TYPE_META_OUTPUT)
#define V4L2_TYPE_IS_CAPTURE(type) (!V4L2_TYPE_IS_OUTPUT(type))
|| (type) == V4L2_BUF_TYPE_SDR_OUTPUT)
enum v4l2_tuner_type {
V4L2_TUNER_RADIO = 1,
@@ -219,7 +219,9 @@ enum v4l2_colorspace {
V4L2_COLORSPACE_470_SYSTEM_M = 5,
/*
* EBU Tech 3213 PAL/SECAM colorspace.
* EBU Tech 3213 PAL/SECAM colorspace. This only makes sense when
* dealing with really old PAL/SECAM recordings. Superseded by
* SMPTE 170M.
*/
V4L2_COLORSPACE_470_SYSTEM_BG = 6,
@@ -232,8 +234,8 @@ enum v4l2_colorspace {
/* For RGB colorspaces such as produces by most webcams. */
V4L2_COLORSPACE_SRGB = 8,
/* opRGB colorspace */
V4L2_COLORSPACE_OPRGB = 9,
/* AdobeRGB colorspace */
V4L2_COLORSPACE_ADOBERGB = 9,
/* BT.2020 colorspace, used for UHDTV. */
V4L2_COLORSPACE_BT2020 = 10,
@@ -265,7 +267,7 @@ enum v4l2_xfer_func {
*
* V4L2_COLORSPACE_SRGB, V4L2_COLORSPACE_JPEG: V4L2_XFER_FUNC_SRGB
*
* V4L2_COLORSPACE_OPRGB: V4L2_XFER_FUNC_OPRGB
* V4L2_COLORSPACE_ADOBERGB: V4L2_XFER_FUNC_ADOBERGB
*
* V4L2_COLORSPACE_SMPTE240M: V4L2_XFER_FUNC_SMPTE240M
*
@@ -276,7 +278,7 @@ enum v4l2_xfer_func {
V4L2_XFER_FUNC_DEFAULT = 0,
V4L2_XFER_FUNC_709 = 1,
V4L2_XFER_FUNC_SRGB = 2,
V4L2_XFER_FUNC_OPRGB = 3,
V4L2_XFER_FUNC_ADOBERGB = 3,
V4L2_XFER_FUNC_SMPTE240M = 4,
V4L2_XFER_FUNC_NONE = 5,
V4L2_XFER_FUNC_DCI_P3 = 6,
@@ -288,7 +290,7 @@ enum v4l2_xfer_func {
* This depends on the colorspace.
*/
#define V4L2_MAP_XFER_FUNC_DEFAULT(colsp) \
((colsp) == V4L2_COLORSPACE_OPRGB ? V4L2_XFER_FUNC_OPRGB : \
((colsp) == V4L2_COLORSPACE_ADOBERGB ? V4L2_XFER_FUNC_ADOBERGB : \
((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_XFER_FUNC_SMPTE240M : \
((colsp) == V4L2_COLORSPACE_DCI_P3 ? V4L2_XFER_FUNC_DCI_P3 : \
((colsp) == V4L2_COLORSPACE_RAW ? V4L2_XFER_FUNC_NONE : \
@@ -302,7 +304,7 @@ enum v4l2_ycbcr_encoding {
*
* V4L2_COLORSPACE_SMPTE170M, V4L2_COLORSPACE_470_SYSTEM_M,
* V4L2_COLORSPACE_470_SYSTEM_BG, V4L2_COLORSPACE_SRGB,
* V4L2_COLORSPACE_OPRGB and V4L2_COLORSPACE_JPEG: V4L2_YCBCR_ENC_601
* V4L2_COLORSPACE_ADOBERGB and V4L2_COLORSPACE_JPEG: V4L2_YCBCR_ENC_601
*
* V4L2_COLORSPACE_REC709 and V4L2_COLORSPACE_DCI_P3: V4L2_YCBCR_ENC_709
*
@@ -324,12 +326,14 @@ enum v4l2_ycbcr_encoding {
/* Rec. 709/EN 61966-2-4 Extended Gamut -- HDTV */
V4L2_YCBCR_ENC_XV709 = 4,
#ifndef __KERNEL__
/*
* sYCC (Y'CbCr encoding of sRGB), identical to ENC_601. It was added
* originally due to a misunderstanding of the sYCC standard. It should
* not be used, instead use V4L2_YCBCR_ENC_601.
*/
V4L2_YCBCR_ENC_SYCC = 5,
#endif
/* BT.2020 Non-constant Luminance Y'CbCr */
V4L2_YCBCR_ENC_BT2020 = 6,
@@ -367,9 +371,9 @@ enum v4l2_hsv_encoding {
enum v4l2_quantization {
/*
* The default for R'G'B' quantization is always full range.
* For Y'CbCr the quantization is always limited range, except
* for COLORSPACE_JPEG: this is full range.
* The default for R'G'B' quantization is always full range, except
* for the BT2020 colorspace. For Y'CbCr the quantization is always
* limited range, except for COLORSPACE_JPEG: this is full range.
*/
V4L2_QUANTIZATION_DEFAULT = 0,
V4L2_QUANTIZATION_FULL_RANGE = 1,
@@ -378,22 +382,14 @@ enum v4l2_quantization {
/*
* Determine how QUANTIZATION_DEFAULT should map to a proper quantization.
* This depends on whether the image is RGB or not, the colorspace.
* The Y'CbCr encoding is not used anymore, but is still there for backwards
* compatibility.
* This depends on whether the image is RGB or not, the colorspace and the
* Y'CbCr encoding.
*/
#define V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb_or_hsv, colsp, ycbcr_enc) \
(((is_rgb_or_hsv) || (colsp) == V4L2_COLORSPACE_JPEG) ? \
V4L2_QUANTIZATION_FULL_RANGE : V4L2_QUANTIZATION_LIM_RANGE)
/*
* Deprecated names for opRGB colorspace (IEC 61966-2-5)
*
* WARNING: Please don't use these deprecated defines in your code, as
* there is a chance we have to remove them in the future.
*/
#define V4L2_COLORSPACE_ADOBERGB V4L2_COLORSPACE_OPRGB
#define V4L2_XFER_FUNC_ADOBERGB V4L2_XFER_FUNC_OPRGB
(((is_rgb_or_hsv) && (colsp) == V4L2_COLORSPACE_BT2020) ? \
V4L2_QUANTIZATION_LIM_RANGE : \
(((is_rgb_or_hsv) || (colsp) == V4L2_COLORSPACE_JPEG) ? \
V4L2_QUANTIZATION_FULL_RANGE : V4L2_QUANTIZATION_LIM_RANGE))
enum v4l2_priority {
V4L2_PRIORITY_UNSET = 0, /* not initialized */
@@ -415,11 +411,6 @@ struct v4l2_fract {
__u32 denominator;
};
struct v4l2_area {
__u32 width;
__u32 height;
};
/**
* struct v4l2_capability - Describes V4L2 device caps returned by VIDIOC_QUERYCAP
*
@@ -476,24 +467,21 @@ struct v4l2_capability {
#define V4L2_CAP_READWRITE 0x01000000 /* read/write systemcalls */
#define V4L2_CAP_ASYNCIO 0x02000000 /* async I/O */
#define V4L2_CAP_STREAMING 0x04000000 /* streaming I/O ioctls */
#define V4L2_CAP_META_OUTPUT 0x08000000 /* Is a metadata output device */
#define V4L2_CAP_TOUCH 0x10000000 /* Is a touch device */
#define V4L2_CAP_IO_MC 0x20000000 /* Is input/output controlled by the media controller */
#define V4L2_CAP_DEVICE_CAPS 0x80000000 /* sets device capabilities field */
/*
* V I D E O I M A G E F O R M A T
*/
struct v4l2_pix_format {
__u32 width;
__u32 width;
__u32 height;
__u32 pixelformat;
__u32 field; /* enum v4l2_field */
__u32 bytesperline; /* for padding, zero if unused */
__u32 sizeimage;
__u32 bytesperline; /* for padding, zero if unused */
__u32 sizeimage;
__u32 colorspace; /* enum v4l2_colorspace */
__u32 priv; /* private data, depends on pixelformat */
__u32 flags; /* format flags (V4L2_PIX_FMT_FLAG_*) */
@@ -509,44 +497,26 @@ struct v4l2_pix_format {
/* Pixel format FOURCC depth Description */
/* RGB formats (1 or 2 bytes per pixel) */
/* RGB formats */
#define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') /* 8 RGB-3-3-2 */
#define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') /* 16 xxxxrrrr ggggbbbb */
#define V4L2_PIX_FMT_ARGB444 v4l2_fourcc('A', 'R', '1', '2') /* 16 aaaarrrr ggggbbbb */
#define V4L2_PIX_FMT_XRGB444 v4l2_fourcc('X', 'R', '1', '2') /* 16 xxxxrrrr ggggbbbb */
#define V4L2_PIX_FMT_RGBA444 v4l2_fourcc('R', 'A', '1', '2') /* 16 rrrrgggg bbbbaaaa */
#define V4L2_PIX_FMT_RGBX444 v4l2_fourcc('R', 'X', '1', '2') /* 16 rrrrgggg bbbbxxxx */
#define V4L2_PIX_FMT_ABGR444 v4l2_fourcc('A', 'B', '1', '2') /* 16 aaaabbbb ggggrrrr */
#define V4L2_PIX_FMT_XBGR444 v4l2_fourcc('X', 'B', '1', '2') /* 16 xxxxbbbb ggggrrrr */
#define V4L2_PIX_FMT_BGRA444 v4l2_fourcc('G', 'A', '1', '2') /* 16 bbbbgggg rrrraaaa */
#define V4L2_PIX_FMT_BGRX444 v4l2_fourcc('B', 'X', '1', '2') /* 16 bbbbgggg rrrrxxxx */
#define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') /* 16 RGB-5-5-5 */
#define V4L2_PIX_FMT_ARGB555 v4l2_fourcc('A', 'R', '1', '5') /* 16 ARGB-1-5-5-5 */
#define V4L2_PIX_FMT_XRGB555 v4l2_fourcc('X', 'R', '1', '5') /* 16 XRGB-1-5-5-5 */
#define V4L2_PIX_FMT_RGBA555 v4l2_fourcc('R', 'A', '1', '5') /* 16 RGBA-5-5-5-1 */
#define V4L2_PIX_FMT_RGBX555 v4l2_fourcc('R', 'X', '1', '5') /* 16 RGBX-5-5-5-1 */
#define V4L2_PIX_FMT_ABGR555 v4l2_fourcc('A', 'B', '1', '5') /* 16 ABGR-1-5-5-5 */
#define V4L2_PIX_FMT_XBGR555 v4l2_fourcc('X', 'B', '1', '5') /* 16 XBGR-1-5-5-5 */
#define V4L2_PIX_FMT_BGRA555 v4l2_fourcc('B', 'A', '1', '5') /* 16 BGRA-5-5-5-1 */
#define V4L2_PIX_FMT_BGRX555 v4l2_fourcc('B', 'X', '1', '5') /* 16 BGRX-5-5-5-1 */
#define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') /* 16 RGB-5-6-5 */
#define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') /* 16 RGB-5-5-5 BE */
#define V4L2_PIX_FMT_ARGB555X v4l2_fourcc_be('A', 'R', '1', '5') /* 16 ARGB-5-5-5 BE */
#define V4L2_PIX_FMT_XRGB555X v4l2_fourcc_be('X', 'R', '1', '5') /* 16 XRGB-5-5-5 BE */
#define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') /* 16 RGB-5-6-5 BE */
/* RGB formats (3 or 4 bytes per pixel) */
#define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') /* 18 BGR-6-6-6 */
#define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') /* 24 BGR-8-8-8 */
#define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') /* 24 RGB-8-8-8 */
#define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') /* 32 BGR-8-8-8-8 */
#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') /* 32 BGRA-8-8-8-8 */
#define V4L2_PIX_FMT_XBGR32 v4l2_fourcc('X', 'R', '2', '4') /* 32 BGRX-8-8-8-8 */
#define V4L2_PIX_FMT_BGRA32 v4l2_fourcc('R', 'A', '2', '4') /* 32 ABGR-8-8-8-8 */
#define V4L2_PIX_FMT_BGRX32 v4l2_fourcc('R', 'X', '2', '4') /* 32 XBGR-8-8-8-8 */
#define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') /* 32 RGB-8-8-8-8 */
#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') /* 32 RGBA-8-8-8-8 */
#define V4L2_PIX_FMT_RGBX32 v4l2_fourcc('X', 'B', '2', '4') /* 32 RGBX-8-8-8-8 */
#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') /* 32 ARGB-8-8-8-8 */
#define V4L2_PIX_FMT_XRGB32 v4l2_fourcc('B', 'X', '2', '4') /* 32 XRGB-8-8-8-8 */
@@ -556,13 +526,11 @@ struct v4l2_pix_format {
#define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') /* 6 Greyscale */
#define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') /* 10 Greyscale */
#define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') /* 12 Greyscale */
#define V4L2_PIX_FMT_Y14 v4l2_fourcc('Y', '1', '4', ' ') /* 14 Greyscale */
#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* 16 Greyscale */
#define V4L2_PIX_FMT_Y16_BE v4l2_fourcc_be('Y', '1', '6', ' ') /* 16 Greyscale BE */
/* Grey bit-packed formats */
#define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') /* 10 Greyscale bit-packed */
#define V4L2_PIX_FMT_Y10P v4l2_fourcc('Y', '1', '0', 'P') /* 10 Greyscale, MIPI RAW10 packed */
/* Palette formats */
#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') /* 8 8-bit palette */
@@ -580,12 +548,9 @@ struct v4l2_pix_format {
#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') /* 16 xxxxyyyy uuuuvvvv */
#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */
#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */
#define V4L2_PIX_FMT_YUV24 v4l2_fourcc('Y', 'U', 'V', '3') /* 24 YUV-8-8-8 */
#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */
#define V4L2_PIX_FMT_AYUV32 v4l2_fourcc('A', 'Y', 'U', 'V') /* 32 AYUV-8-8-8-8 */
#define V4L2_PIX_FMT_XYUV32 v4l2_fourcc('X', 'Y', 'U', 'V') /* 32 XYUV-8-8-8-8 */
#define V4L2_PIX_FMT_VUYA32 v4l2_fourcc('V', 'U', 'Y', 'A') /* 32 VUYA-8-8-8-8 */
#define V4L2_PIX_FMT_VUYX32 v4l2_fourcc('V', 'U', 'Y', 'X') /* 32 VUYX-8-8-8-8 */
#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* 8 8-bit color */
#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') /* 8 YUV 4:2:0 16x16 macroblocks */
#define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') /* 12 YUV 4:2:0 2 lines y, 1 line uv interleaved */
/* two planes -- one Y, one Cr + Cb interleaved */
@@ -595,7 +560,6 @@ struct v4l2_pix_format {
#define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') /* 16 Y/CrCb 4:2:2 */
#define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') /* 24 Y/CbCr 4:4:4 */
#define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') /* 24 Y/CrCb 4:4:4 */
#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') /* 8 YUV 4:2:0 16x16 macroblocks */
/* two non contiguous planes - one Y, one Cr + Cb interleaved */
#define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 */
@@ -649,20 +613,6 @@ struct v4l2_pix_format {
#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12 GBGB.. RGRG.. */
#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12 GRGR.. BGBG.. */
#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12 RGRG.. GBGB.. */
/* 12bit raw bayer packed, 6 bytes for every 4 pixels */
#define V4L2_PIX_FMT_SBGGR12P v4l2_fourcc('p', 'B', 'C', 'C')
#define V4L2_PIX_FMT_SGBRG12P v4l2_fourcc('p', 'G', 'C', 'C')
#define V4L2_PIX_FMT_SGRBG12P v4l2_fourcc('p', 'g', 'C', 'C')
#define V4L2_PIX_FMT_SRGGB12P v4l2_fourcc('p', 'R', 'C', 'C')
#define V4L2_PIX_FMT_SBGGR14 v4l2_fourcc('B', 'G', '1', '4') /* 14 BGBG.. GRGR.. */
#define V4L2_PIX_FMT_SGBRG14 v4l2_fourcc('G', 'B', '1', '4') /* 14 GBGB.. RGRG.. */
#define V4L2_PIX_FMT_SGRBG14 v4l2_fourcc('G', 'R', '1', '4') /* 14 GRGR.. BGBG.. */
#define V4L2_PIX_FMT_SRGGB14 v4l2_fourcc('R', 'G', '1', '4') /* 14 RGRG.. GBGB.. */
/* 14bit raw bayer packed, 7 bytes for every 4 pixels */
#define V4L2_PIX_FMT_SBGGR14P v4l2_fourcc('p', 'B', 'E', 'E')
#define V4L2_PIX_FMT_SGBRG14P v4l2_fourcc('p', 'G', 'E', 'E')
#define V4L2_PIX_FMT_SGRBG14P v4l2_fourcc('p', 'g', 'E', 'E')
#define V4L2_PIX_FMT_SRGGB14P v4l2_fourcc('p', 'R', 'E', 'E')
#define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16 BGBG.. GRGR.. */
#define V4L2_PIX_FMT_SGBRG16 v4l2_fourcc('G', 'B', '1', '6') /* 16 GBGB.. RGRG.. */
#define V4L2_PIX_FMT_SGRBG16 v4l2_fourcc('G', 'R', '1', '6') /* 16 GRGR.. BGBG.. */
@@ -683,18 +633,12 @@ struct v4l2_pix_format {
#define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') /* H263 */
#define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') /* MPEG-1 ES */
#define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') /* MPEG-2 ES */
#define V4L2_PIX_FMT_MPEG2_SLICE v4l2_fourcc('M', 'G', '2', 'S') /* MPEG-2 parsed slice data */
#define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') /* MPEG-4 part 2 ES */
#define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') /* Xvid */
#define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') /* SMPTE 421M Annex G compliant stream */
#define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') /* SMPTE 421M Annex L compliant stream */
#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') /* VP8 */
#define V4L2_PIX_FMT_VP8_FRAME v4l2_fourcc('V', 'P', '8', 'F') /* VP8 parsed frame */
#define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0') /* VP9 */
#define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C') /* HEVC aka H.265 */
#define V4L2_PIX_FMT_FWHT v4l2_fourcc('F', 'W', 'H', 'T') /* Fast Walsh Hadamard Transform (vicodec) */
#define V4L2_PIX_FMT_FWHT_STATELESS v4l2_fourcc('S', 'F', 'W', 'H') /* Stateless FWHT (vicodec) */
#define V4L2_PIX_FMT_H264_SLICE v4l2_fourcc('S', '2', '6', '4') /* H264 parsed slices */
/* Vendor-specific formats */
#define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') /* cpia1 YUV */
@@ -728,15 +672,6 @@ struct v4l2_pix_format {
#define V4L2_PIX_FMT_Z16 v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */
#define V4L2_PIX_FMT_MT21C v4l2_fourcc('M', 'T', '2', '1') /* Mediatek compressed block mode */
#define V4L2_PIX_FMT_INZI v4l2_fourcc('I', 'N', 'Z', 'I') /* Intel Planar Greyscale 10-bit and Depth 16-bit */
#define V4L2_PIX_FMT_SUNXI_TILED_NV12 v4l2_fourcc('S', 'T', '1', '2') /* Sunxi Tiled NV12 Format */
#define V4L2_PIX_FMT_CNF4 v4l2_fourcc('C', 'N', 'F', '4') /* Intel 4-bit packed depth confidence information */
#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* BTTV 8-bit dithered RGB */
/* 10bit raw bayer packed, 32 bytes for every 25 pixels, last LSB 6 bits unused */
#define V4L2_PIX_FMT_IPU3_SBGGR10 v4l2_fourcc('i', 'p', '3', 'b') /* IPU3 packed 10-bit BGGR bayer */
#define V4L2_PIX_FMT_IPU3_SGBRG10 v4l2_fourcc('i', 'p', '3', 'g') /* IPU3 packed 10-bit GBRG bayer */
#define V4L2_PIX_FMT_IPU3_SGRBG10 v4l2_fourcc('i', 'p', '3', 'G') /* IPU3 packed 10-bit GRBG bayer */
#define V4L2_PIX_FMT_IPU3_SRGGB10 v4l2_fourcc('i', 'p', '3', 'r') /* IPU3 packed 10-bit RGGB bayer */
/* SDR formats - used only for Software Defined Radio devices */
#define V4L2_SDR_FMT_CU8 v4l2_fourcc('C', 'U', '0', '8') /* IQ u8 */
@@ -757,20 +692,12 @@ struct v4l2_pix_format {
/* Meta-data formats */
#define V4L2_META_FMT_VSP1_HGO v4l2_fourcc('V', 'S', 'P', 'H') /* R-Car VSP1 1-D Histogram */
#define V4L2_META_FMT_VSP1_HGT v4l2_fourcc('V', 'S', 'P', 'T') /* R-Car VSP1 2-D Histogram */
#define V4L2_META_FMT_UVC v4l2_fourcc('U', 'V', 'C', 'H') /* UVC Payload Header metadata */
#define V4L2_META_FMT_D4XX v4l2_fourcc('D', '4', 'X', 'X') /* D4XX Payload Header metadata */
#define V4L2_META_FMT_VIVID v4l2_fourcc('V', 'I', 'V', 'D') /* Vivid Metadata */
/* Vendor specific - used for RK_ISP1 camera sub-system */
#define V4L2_META_FMT_RK_ISP1_PARAMS v4l2_fourcc('R', 'K', '1', 'P') /* Rockchip ISP1 3A Parameters */
#define V4L2_META_FMT_RK_ISP1_STAT_3A v4l2_fourcc('R', 'K', '1', 'S') /* Rockchip ISP1 3A Statistics */
/* priv field value to indicates that subsequent fields are valid. */
#define V4L2_PIX_FMT_PRIV_MAGIC 0xfeedcafe
/* Flags */
#define V4L2_PIX_FMT_FLAG_PREMUL_ALPHA 0x00000001
#define V4L2_PIX_FMT_FLAG_SET_CSC 0x00000002
/*
* F O R M A T E N U M E R A T I O N
@@ -781,20 +708,11 @@ struct v4l2_fmtdesc {
__u32 flags;
__u8 description[32]; /* Description string */
__u32 pixelformat; /* Format fourcc */
__u32 mbus_code; /* Media bus code */
__u32 reserved[3];
__u32 reserved[4];
};
#define V4L2_FMT_FLAG_COMPRESSED 0x0001
#define V4L2_FMT_FLAG_EMULATED 0x0002
#define V4L2_FMT_FLAG_CONTINUOUS_BYTESTREAM 0x0004
#define V4L2_FMT_FLAG_DYN_RESOLUTION 0x0008
#define V4L2_FMT_FLAG_ENC_CAP_FRAME_INTERVAL 0x0010
#define V4L2_FMT_FLAG_CSC_COLORSPACE 0x0020
#define V4L2_FMT_FLAG_CSC_XFER_FUNC 0x0040
#define V4L2_FMT_FLAG_CSC_YCBCR_ENC 0x0080
#define V4L2_FMT_FLAG_CSC_HSV_ENC V4L2_FMT_FLAG_CSC_YCBCR_ENC
#define V4L2_FMT_FLAG_CSC_QUANTIZATION 0x0100
#define V4L2_FMT_FLAG_COMPRESSED 0x0001
#define V4L2_FMT_FLAG_EMULATED 0x0002
/* Frame Size and frame rate enumeration */
/*
@@ -923,25 +841,13 @@ struct v4l2_jpegcompression {
/*
* M E M O R Y - M A P P I N G B U F F E R S
*/
struct v4l2_requestbuffers {
__u32 count;
__u32 type; /* enum v4l2_buf_type */
__u32 memory; /* enum v4l2_memory */
__u32 capabilities;
__u32 reserved[1];
__u32 reserved[2];
};
/* capabilities for struct v4l2_requestbuffers and v4l2_create_buffers */
#define V4L2_BUF_CAP_SUPPORTS_MMAP (1 << 0)
#define V4L2_BUF_CAP_SUPPORTS_USERPTR (1 << 1)
#define V4L2_BUF_CAP_SUPPORTS_DMABUF (1 << 2)
#define V4L2_BUF_CAP_SUPPORTS_REQUESTS (1 << 3)
#define V4L2_BUF_CAP_SUPPORTS_ORPHANED_BUFS (1 << 4)
#define V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF (1 << 5)
#define V4L2_BUF_CAP_SUPPORTS_MMAP_CACHE_HINTS (1 << 6)
/**
* struct v4l2_plane - plane info for multi-planar buffers
* @bytesused: number of bytes occupied by data in the plane (payload)
@@ -954,10 +860,8 @@ struct v4l2_requestbuffers {
* pointing to this plane
* @fd: when memory is V4L2_MEMORY_DMABUF, a userspace file
* descriptor associated with this plane
* @m: union of @mem_offset, @userptr and @fd
* @data_offset: offset in the plane to the start of data; usually 0,
* unless there is a header in front of the data
* @reserved: drivers and applications must zero this array
*
* Multi-planar buffers consist of one or more planes, e.g. an YCbCr buffer
* with two planes can have one plane for Y, and another for interleaved CbCr
@@ -999,14 +903,9 @@ struct v4l2_plane {
* a userspace file descriptor associated with this buffer
* @planes: for multiplanar buffers; userspace pointer to the array of plane
* info structs for this buffer
* @m: union of @offset, @userptr, @planes and @fd
* @length: size in bytes of the buffer (NOT its payload) for single-plane
* buffers (when type != *_MPLANE); number of elements in the
* planes array for multi-plane buffers
* @reserved2: drivers and applications must zero this field
* @request_fd: fd of the request that this buffer should use
* @reserved: for backwards compatibility with applications that do not know
* about @request_fd
*
* Contains data exchanged by application and driver using one of the Streaming
* I/O methods.
@@ -1031,24 +930,9 @@ struct v4l2_buffer {
} m;
__u32 length;
__u32 reserved2;
union {
__s32 request_fd;
__u32 reserved;
};
__u32 reserved;
};
/**
* v4l2_timeval_to_ns - Convert timeval to nanoseconds
* @tv: pointer to the timeval variable to be converted
*
* Returns the scalar nanosecond representation of the timeval
* parameter.
*/
static __inline__ __u64 v4l2_timeval_to_ns(const struct timeval *tv)
{
return (__u64)tv->tv_sec * 1000000000ULL + tv->tv_usec * 1000;
}
/* Flags for 'flags' field */
/* Buffer is mapped (flag) */
#define V4L2_BUF_FLAG_MAPPED 0x00000001
@@ -1064,12 +948,8 @@ static __inline__ __u64 v4l2_timeval_to_ns(const struct timeval *tv)
#define V4L2_BUF_FLAG_BFRAME 0x00000020
/* Buffer is ready, but the data contained within is corrupted. */
#define V4L2_BUF_FLAG_ERROR 0x00000040
/* Buffer is added to an unqueued request */
#define V4L2_BUF_FLAG_IN_REQUEST 0x00000080
/* timecode field is valid */
#define V4L2_BUF_FLAG_TIMECODE 0x00000100
/* Don't return the capture buffer until OUTPUT timestamp changes */
#define V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF 0x00000200
/* Buffer is prepared for queuing */
#define V4L2_BUF_FLAG_PREPARED 0x00000400
/* Cache handling flags */
@@ -1086,8 +966,6 @@ static __inline__ __u64 v4l2_timeval_to_ns(const struct timeval *tv)
#define V4L2_BUF_FLAG_TSTAMP_SRC_SOE 0x00010000
/* mem2mem encoder/decoder */
#define V4L2_BUF_FLAG_LAST 0x00100000
/* request_fd is valid */
#define V4L2_BUF_FLAG_REQUEST_FD 0x00800000
/**
* struct v4l2_exportbuffer - export of video buffer as DMABUF file descriptor
@@ -1099,7 +977,6 @@ static __inline__ __u64 v4l2_timeval_to_ns(const struct timeval *tv)
* @flags: flags for newly created file, currently only O_CLOEXEC is
* supported, refer to manual of open syscall for more details
* @fd: file descriptor associated with DMABUF (set by driver)
* @reserved: drivers and applications must zero this array
*
* Contains data used for exporting a video buffer as DMABUF file descriptor.
* The buffer is identified by a 'cookie' returned by VIDIOC_QUERYBUF
@@ -1157,16 +1034,16 @@ struct v4l2_framebuffer {
struct v4l2_clip {
struct v4l2_rect c;
struct v4l2_clip *next;
struct v4l2_clip __user *next;
};
struct v4l2_window {
struct v4l2_rect w;
__u32 field; /* enum v4l2_field */
__u32 chromakey;
struct v4l2_clip *clips;
struct v4l2_clip __user *clips;
__u32 clipcount;
void *bitmap;
void __user *bitmap;
__u8 global_alpha;
};
@@ -1238,10 +1115,6 @@ struct v4l2_selection {
typedef __u64 v4l2_std_id;
/*
* Attention: Keep the V4L2_STD_* bit definitions in sync with
* include/dt-bindings/display/sdtv-standards.h SDTV_STD_* bit definitions.
*/
/* one bit for each */
#define V4L2_STD_PAL_B ((v4l2_std_id)0x00000001)
#define V4L2_STD_PAL_B1 ((v4l2_std_id)0x00000002)
@@ -1297,7 +1170,7 @@ typedef __u64 v4l2_std_id;
V4L2_STD_NTSC_M_JP |\
V4L2_STD_NTSC_M_KR)
/* Secam macros */
#define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D |\
#define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D |\
V4L2_STD_SECAM_K |\
V4L2_STD_SECAM_K1)
/* All Secam Standards */
@@ -1378,7 +1251,7 @@ struct v4l2_standard {
};
/*
* D V B T T I M I N G S
* D V B T T I M I N G S
*/
/** struct v4l2_bt_timings - BT.656/BT.1120 timing data
@@ -1516,13 +1389,6 @@ struct v4l2_bt_timings {
* InfoFrame).
*/
#define V4L2_DV_FL_HAS_HDMI_VIC (1 << 8)
/*
* CEA-861 specific: only valid for video receivers.
* If set, then HW can detect the difference between regular FPS and
* 1000/1001 FPS. Note: This flag is only valid for HDMI VIC codes with
* the V4L2_DV_FL_CAN_REDUCE_FPS flag set.
*/
#define V4L2_DV_FL_CAN_DETECT_REDUCED_FPS (1 << 9)
/* A few useful defines to calculate the total blanking and frame sizes */
#define V4L2_DV_BT_BLANKING_WIDTH(bt) \
@@ -1531,8 +1397,7 @@ struct v4l2_bt_timings {
((bt)->width + V4L2_DV_BT_BLANKING_WIDTH(bt))
#define V4L2_DV_BT_BLANKING_HEIGHT(bt) \
((bt)->vfrontporch + (bt)->vsync + (bt)->vbackporch + \
((bt)->interlaced ? \
((bt)->il_vfrontporch + (bt)->il_vsync + (bt)->il_vbackporch) : 0))
(bt)->il_vfrontporch + (bt)->il_vsync + (bt)->il_vbackporch)
#define V4L2_DV_BT_FRAME_HEIGHT(bt) \
((bt)->height + V4L2_DV_BT_BLANKING_HEIGHT(bt))
@@ -1706,46 +1571,36 @@ struct v4l2_ext_control {
union {
__s32 value;
__s64 value64;
char *string;
__u8 *p_u8;
__u16 *p_u16;
__u32 *p_u32;
struct v4l2_area *p_area;
struct v4l2_ctrl_h264_sps *p_h264_sps;
struct v4l2_ctrl_h264_pps *p_h264_pps;
struct v4l2_ctrl_h264_scaling_matrix *p_h264_scaling_matrix;
struct v4l2_ctrl_h264_pred_weights *p_h264_pred_weights;
struct v4l2_ctrl_h264_slice_params *p_h264_slice_params;
struct v4l2_ctrl_h264_decode_params *p_h264_decode_params;
struct v4l2_ctrl_fwht_params *p_fwht_params;
struct v4l2_ctrl_vp8_frame *p_vp8_frame;
struct v4l2_ctrl_mpeg2_sequence *p_mpeg2_sequence;
struct v4l2_ctrl_mpeg2_picture *p_mpeg2_picture;
struct v4l2_ctrl_mpeg2_quantisation *p_mpeg2_quantisation;
void *ptr;
char __user *string;
__u8 __user *p_u8;
__u16 __user *p_u16;
__u32 __user *p_u32;
void __user *ptr;
};
} __attribute__ ((packed));
struct v4l2_ext_controls {
union {
#ifndef __KERNEL__
__u32 ctrl_class;
#endif
__u32 which;
};
__u32 count;
__u32 error_idx;
__s32 request_fd;
__u32 reserved[1];
__u32 reserved[2];
struct v4l2_ext_control *controls;
};
#define V4L2_CTRL_ID_MASK (0x0fffffff)
#define V4L2_CTRL_ID_MASK (0x0fffffff)
#ifndef __KERNEL__
#define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL)
#endif
#define V4L2_CTRL_ID2WHICH(id) ((id) & 0x0fff0000UL)
#define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000)
#define V4L2_CTRL_MAX_DIMS (4)
#define V4L2_CTRL_WHICH_CUR_VAL 0
#define V4L2_CTRL_WHICH_DEF_VAL 0x0f000000
#define V4L2_CTRL_WHICH_REQUEST_VAL 0x0f010000
enum v4l2_ctrl_type {
V4L2_CTRL_TYPE_INTEGER = 1,
@@ -1763,25 +1618,6 @@ enum v4l2_ctrl_type {
V4L2_CTRL_TYPE_U8 = 0x0100,
V4L2_CTRL_TYPE_U16 = 0x0101,
V4L2_CTRL_TYPE_U32 = 0x0102,
V4L2_CTRL_TYPE_AREA = 0x0106,
V4L2_CTRL_TYPE_HDR10_CLL_INFO = 0x0110,
V4L2_CTRL_TYPE_HDR10_MASTERING_DISPLAY = 0x0111,
V4L2_CTRL_TYPE_H264_SPS = 0x0200,
V4L2_CTRL_TYPE_H264_PPS = 0x0201,
V4L2_CTRL_TYPE_H264_SCALING_MATRIX = 0x0202,
V4L2_CTRL_TYPE_H264_SLICE_PARAMS = 0x0203,
V4L2_CTRL_TYPE_H264_DECODE_PARAMS = 0x0204,
V4L2_CTRL_TYPE_H264_PRED_WEIGHTS = 0x0205,
V4L2_CTRL_TYPE_FWHT_PARAMS = 0x0220,
V4L2_CTRL_TYPE_VP8_FRAME = 0x0240,
V4L2_CTRL_TYPE_MPEG2_QUANTISATION = 0x0250,
V4L2_CTRL_TYPE_MPEG2_SEQUENCE = 0x0251,
V4L2_CTRL_TYPE_MPEG2_PICTURE = 0x0252,
};
/* Used in the VIDIOC_QUERYCTRL ioctl for querying controls */
@@ -1828,11 +1664,11 @@ struct v4l2_querymenu {
/* Control flags */
#define V4L2_CTRL_FLAG_DISABLED 0x0001
#define V4L2_CTRL_FLAG_GRABBED 0x0002
#define V4L2_CTRL_FLAG_READ_ONLY 0x0004
#define V4L2_CTRL_FLAG_UPDATE 0x0008
#define V4L2_CTRL_FLAG_INACTIVE 0x0010
#define V4L2_CTRL_FLAG_SLIDER 0x0020
#define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040
#define V4L2_CTRL_FLAG_READ_ONLY 0x0004
#define V4L2_CTRL_FLAG_UPDATE 0x0008
#define V4L2_CTRL_FLAG_INACTIVE 0x0010
#define V4L2_CTRL_FLAG_SLIDER 0x0020
#define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040
#define V4L2_CTRL_FLAG_VOLATILE 0x0080
#define V4L2_CTRL_FLAG_HAS_PAYLOAD 0x0100
#define V4L2_CTRL_FLAG_EXECUTE_ON_WRITE 0x0200
@@ -1946,21 +1782,21 @@ struct v4l2_hw_freq_seek {
*/
struct v4l2_rds_data {
__u8 lsb;
__u8 msb;
__u8 block;
__u8 lsb;
__u8 msb;
__u8 block;
} __attribute__ ((packed));
#define V4L2_RDS_BLOCK_MSK 0x7
#define V4L2_RDS_BLOCK_A 0
#define V4L2_RDS_BLOCK_B 1
#define V4L2_RDS_BLOCK_C 2
#define V4L2_RDS_BLOCK_D 3
#define V4L2_RDS_BLOCK_C_ALT 4
#define V4L2_RDS_BLOCK_INVALID 7
#define V4L2_RDS_BLOCK_MSK 0x7
#define V4L2_RDS_BLOCK_A 0
#define V4L2_RDS_BLOCK_B 1
#define V4L2_RDS_BLOCK_C 2
#define V4L2_RDS_BLOCK_D 3
#define V4L2_RDS_BLOCK_C_ALT 4
#define V4L2_RDS_BLOCK_INVALID 7
#define V4L2_RDS_BLOCK_CORRECTED 0x40
#define V4L2_RDS_BLOCK_ERROR 0x80
#define V4L2_RDS_BLOCK_ERROR 0x80
/*
* A U D I O
@@ -2037,7 +1873,6 @@ struct v4l2_encoder_cmd {
#define V4L2_DEC_CMD_STOP (1)
#define V4L2_DEC_CMD_PAUSE (2)
#define V4L2_DEC_CMD_RESUME (3)
#define V4L2_DEC_CMD_FLUSH (4)
/* Flags for V4L2_DEC_CMD_START */
#define V4L2_DEC_CMD_START_MUTE_AUDIO (1 << 0)
@@ -2219,7 +2054,6 @@ struct v4l2_mpeg_vbi_fmt_ivtv {
* this plane will be used
* @bytesperline: distance in bytes between the leftmost pixels in two
* adjacent lines
* @reserved: drivers and applications must zero this array
*/
struct v4l2_plane_pix_format {
__u32 sizeimage;
@@ -2238,10 +2072,8 @@ struct v4l2_plane_pix_format {
* @num_planes: number of planes for this format
* @flags: format flags (V4L2_PIX_FMT_FLAG_*)
* @ycbcr_enc: enum v4l2_ycbcr_encoding, Y'CbCr encoding
* @hsv_enc: enum v4l2_hsv_encoding, HSV encoding
* @quantization: enum v4l2_quantization, colorspace quantization
* @xfer_func: enum v4l2_xfer_func, colorspace transfer function
* @reserved: drivers and applications must zero this array
*/
struct v4l2_pix_format_mplane {
__u32 width;
@@ -2266,7 +2098,6 @@ struct v4l2_pix_format_mplane {
* struct v4l2_sdr_format - SDR format definition
* @pixelformat: little endian four character code (fourcc)
* @buffersize: maximum size in bytes required for data
* @reserved: drivers and applications must zero this array
*/
struct v4l2_sdr_format {
__u32 pixelformat;
@@ -2293,8 +2124,6 @@ struct v4l2_meta_format {
* @vbi: raw VBI capture or output parameters
* @sliced: sliced VBI capture or output parameters
* @raw_data: placeholder for future extensions and custom formats
* @fmt: union of @pix, @pix_mp, @win, @vbi, @sliced, @sdr, @meta
* and @raw_data
*/
struct v4l2_format {
__u32 type;
@@ -2462,7 +2291,6 @@ struct v4l2_dbg_chip_info {
* return: number of created buffers
* @memory: enum v4l2_memory; buffer memory type
* @format: frame format, for which buffers are requested
* @capabilities: capabilities of this buffer type.
* @reserved: future extensions
*/
struct v4l2_create_buffers {
@@ -2470,8 +2298,7 @@ struct v4l2_create_buffers {
__u32 count;
__u32 memory;
struct v4l2_format format;
__u32 capabilities;
__u32 reserved[7];
__u32 reserved[8];
};
/*
@@ -2479,6 +2306,7 @@ struct v4l2_create_buffers {
*
*/
#define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability)
#define VIDIOC_RESERVED _IO('V', 1)
#define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc)
#define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format)
#define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format)
@@ -2524,8 +2352,8 @@ struct v4l2_create_buffers {
#define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop)
#define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression)
#define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression)
#define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id)
#define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format)
#define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id)
#define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format)
#define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio)
#define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout)
#define VIDIOC_G_PRIORITY _IOR('V', 67, __u32) /* enum v4l2_priority */
@@ -2546,8 +2374,8 @@ struct v4l2_create_buffers {
* Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined.
* You must be root to use these ioctls. Never use these in applications!
*/
#define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register)
#define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register)
#define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register)
#define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register)
#define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek)
#define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings)
@@ -2579,4 +2407,4 @@ struct v4l2_create_buffers {
#define BASE_VIDIOC_PRIVATE 192 /* 192-255 are private */
#endif /* __LINUX_VIDEODEV2_H */
#endif /* _UAPI__LINUX_VIDEODEV2_H */

View File

@@ -2,8 +2,7 @@
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2.c: plugin for v4l2 elements
*
@@ -296,42 +295,31 @@ static gboolean
gst_v4l2_has_vp8_encoder(void)
{
gboolean ret = FALSE;
int fd = -1, rval = 0;
int fd = -1;
long len = -1;
struct stat statbuf;
char info[128];
fd = open(V4L2_DEVICE_PATH_TEGRA_INFO, O_RDONLY);
if (fd < 0)
return ret;
rval = stat(V4L2_DEVICE_PATH_TEGRA_INFO, &statbuf);
if (rval < 0)
{
close(fd);
return ret;
}
if (statbuf.st_size > 8 && statbuf.st_size < 128)
{
rval = read(fd, info, statbuf.st_size);
if (rval <= 0)
if (access (V4L2_DEVICE_PATH_TEGRA_INFO, F_OK) == 0) {
stat(V4L2_DEVICE_PATH_TEGRA_INFO, &statbuf);
if (statbuf.st_size > 0 && statbuf.st_size < 128)
{
close(fd);
return ret;
}
len = statbuf.st_size - 8;
for (int i = 0; i < len; i ++)
{
if (strncmp(&info[i], "tegra", 5) == 0)
fd = open(V4L2_DEVICE_PATH_TEGRA_INFO, O_RDONLY);
read(fd, info, statbuf.st_size);
len = statbuf.st_size - 8;
for (int i = 0; i < len; i ++)
{
if (strncmp(&info[i], "tegra186", 8) == 0 ||
strncmp(&info[i], "tegra210", 8) == 0)
ret = TRUE;
break;
if (strncmp(&info[i], "tegra", 5) == 0)
{
if (strncmp(&info[i], "tegra186", 8) == 0 ||
strncmp(&info[i], "tegra210", 8) == 0)
ret = TRUE;
break;
}
}
close(fd);
}
}
close(fd);
return ret;
}
@@ -340,32 +328,31 @@ gst_v4l2_is_v4l2_nvenc_present(void)
{
gboolean ret = TRUE;
int fd = -1;
long len = -1;
struct stat statbuf;
char info[128];
if (access(V4L2_DEVICE_INFO_SOM_EEPROM, F_OK) == 0)
{
stat(V4L2_DEVICE_INFO_SOM_EEPROM, &statbuf);
if (access (V4L2_DEVICE_PATH_TEGRA_INFO, F_OK) == 0) {
stat(V4L2_DEVICE_PATH_TEGRA_INFO, &statbuf);
if (statbuf.st_size > 0 && statbuf.st_size < 128)
{
fd = open(V4L2_DEVICE_INFO_SOM_EEPROM, O_RDONLY);
fd = open(V4L2_DEVICE_PATH_TEGRA_INFO, O_RDONLY);
read(fd, info, statbuf.st_size);
for (int i = 0; i <= (statbuf.st_size - 9); i++)
len = statbuf.st_size - 10;
for (int i = 0; i < len; i ++)
{
if (strncmp(&info[i], "3767", 4) == 0)
if (strncmp(&info[i], "p3767", 5) == 0)
{
/*
* Jetson Orin Nano 8GB (P3767-0003) Commercial module
* Jetson Orin Nano 4GB (P3767-0004) Commercial module
* Jetson Orin Nano 8GB with SD card slot (P3767-0005) For the Developer Kit only
Jetson Orin Nano 8GB (P3767-0003) Commercial module
Jetson Orin Nano 4GB (P3767-0004) Commercial module
Jetson Orin Nano 8GB with SD card slot (P3767-0005) For the Developer Kit only
*/
if (strncmp(&info[i + 5], "0003", 4) == 0 ||
strncmp(&info[i + 5], "0004", 4) == 0 ||
strncmp(&info[i + 5], "0005", 4) == 0)
{
if (strncmp(&info[i + 6], "0003", 4) == 0 ||
strncmp(&info[i + 6], "0004", 4) == 0 ||
strncmp(&info[i + 6], "0005", 4) == 0)
ret = FALSE;
break;
}
break;
}
}
close(fd);
@@ -384,13 +371,15 @@ plugin_init (GstPlugin * plugin)
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
#ifndef USE_V4L2_TARGET_NV_X86
int result = -1;
result = (gboolean)system("lsmod | grep 'nvgpu' > /dev/null");
if (result == 0)
is_cuvid = FALSE;
else
int igpu = -1, dgpu = -1;
igpu = system("lsmod | grep 'nvgpu' > /dev/null");
dgpu = system("modprobe -D -q nvidia | grep 'dkms' > /dev/null");
if (igpu == -1 || dgpu == -1)
return FALSE;
else if (dgpu == 0)
is_cuvid = TRUE;
else
is_cuvid = FALSE;
if (getenv("AARCH64_DGPU"))
is_cuvid = TRUE;
@@ -428,11 +417,6 @@ plugin_init (GstPlugin * plugin)
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
gst_v4l2_av1_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
} else {
if (!gst_v4l2_is_v4l2_nvenc_present()) {
// Orin Nano does not have HW encoders, so early return here.
@@ -448,11 +432,6 @@ plugin_init (GstPlugin * plugin)
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_av1_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
}
if (is_cuvid == FALSE) {
@@ -469,19 +448,27 @@ plugin_init (GstPlugin * plugin)
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
gst_v4l2_av1_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC,
NULL,
NULL);
} else {
if (gst_v4l2_has_vp8_encoder()) {
gst_v4l2_vp8_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
}
gst_v4l2_vp8_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_vp9_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
gst_v4l2_av1_enc_register (plugin,
V4L2_DEVICE_BASENAME_NVENC,
V4L2_DEVICE_PATH_NVENC_ALT,
NULL,
NULL);
}
}

View File

@@ -1,9 +1,7 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* SPDX-FileCopyrightText: Copyright (c) 2018-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -1188,7 +1186,7 @@ gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
for (i = 0; i < group->n_mem; i++) {
gint dmafd;
gsize size, offset = 0, maxsize = 0;
gsize size, offset, maxsize;
if (!gst_is_dmabuf_memory (dma_mem[i]))
goto not_dmabuf;
@@ -1369,19 +1367,13 @@ gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
if (obj->is_encode) {
if ((is_cuvid == true) && (obj->sei_payload != NULL)) {
gint ret;
struct v4l2_ext_control ctls[2];
struct v4l2_ext_control ctl;
struct v4l2_ext_controls ctrls;
ctls[0].id = V4L2_CID_MPEG_VIDEOENC_DS_SEI_DATA;
ctls[0].ptr = obj->sei_payload;
ctls[0].size = obj->sei_payload_size;
ctls[1].id = V4L2_CID_MPEG_VIDEOENC_DS_SEI_UUID;
ctls[1].string = obj->sei_uuid;
ctls[1].size = 16;
ctrls.count = 2;
ctrls.controls = ctls;
ctl.id = V4L2_CID_MPEG_VIDEOENC_DS_SEI_DATA;
ctl.ptr = obj->sei_payload;
ctl.size = obj->sei_payload_size;
ctrls.count = 1;
ctrls.controls = &ctl ;
ret = obj->ioctl (obj->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
{

View File

@@ -1,6 +1,5 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2021-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -57,7 +56,6 @@ enum
PROP_ENABLE_TILE_CONFIG,
PROP_DISABLE_CDF,
PROP_ENABLE_SSIMRDO,
PROP_INSERT_SEQ_HDR,
PROP_NUM_REFERENCE_FRAMES,
};
@@ -90,9 +88,6 @@ gst_v4l2_av1_enc_set_property (GObject * object,
case PROP_ENABLE_SSIMRDO:
self->EnableSsimRdo = g_value_get_boolean (value);
break;
case PROP_INSERT_SEQ_HDR:
self->insert_sps_pps = g_value_get_boolean (value);
break;
case PROP_NUM_REFERENCE_FRAMES:
self->nRefFrames = g_value_get_uint (value);
break;
@@ -120,9 +115,6 @@ gst_v4l2_av1_enc_get_property (GObject * object,
case PROP_ENABLE_SSIMRDO:
g_value_set_boolean (value, self->EnableSsimRdo);
break;
case PROP_INSERT_SEQ_HDR:
g_value_set_boolean (value, self->insert_sps_pps);
break;
case PROP_NUM_REFERENCE_FRAMES:
g_value_set_uint (value, self->nRefFrames);
break;
@@ -223,34 +215,23 @@ set_v4l2_av1_encoder_properties (GstVideoEncoder * encoder)
return FALSE;
}
if (is_cuvid == FALSE) {
if (self->EnableTileConfig) {
if (!gst_v4l2_av1_enc_tile_configuration (video_enc->v4l2output,
self->EnableTileConfig, self->Log2TileRows, self->Log2TileCols)) {
g_print ("S_EXT_CTRLS for Tile Configuration failed\n");
return FALSE;
}
}
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_AV1_DISABLE_CDF_UPDATE, self->DisableCDFUpdate)) {
g_print ("S_EXT_CTRLS for DisableCDF Update failed\n");
if (self->EnableTileConfig) {
if (!gst_v4l2_av1_enc_tile_configuration (video_enc->v4l2output,
self->EnableTileConfig, self->Log2TileRows, self->Log2TileCols)) {
g_print ("S_EXT_CTRLS for Tile Configuration failed\n");
return FALSE;
}
if (self->EnableSsimRdo) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_AV1_ENABLE_SSIMRDO, self->EnableSsimRdo)) {
g_print ("S_EXT_CTRLS for SSIM RDO failed\n");
return FALSE;
}
}
}
if (self->insert_sps_pps) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_AV1_DISABLE_CDF_UPDATE, self->DisableCDFUpdate)) {
g_print ("S_EXT_CTRLS for DisableCDF Update failed\n");
return FALSE;
}
if (self->EnableSsimRdo) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_SPS_PPS_AT_IDR, self->insert_sps_pps)) {
V4L2_CID_MPEG_VIDEOENC_AV1_ENABLE_SSIMRDO, self->EnableSsimRdo)) {
g_print ("S_EXT_CTRLS for SSIM RDO failed\n");
return FALSE;
}
@@ -274,7 +255,6 @@ gst_v4l2_av1_enc_init (GstV4l2Av1Enc * self)
self->EnableTileConfig = FALSE;
self->DisableCDFUpdate = TRUE;
self->EnableSsimRdo = FALSE;
self->insert_sps_pps = FALSE;
self->Log2TileRows= 0;
self->Log2TileCols= 0;
}
@@ -311,8 +291,6 @@ gst_v4l2_av1_enc_class_init (GstV4l2Av1EncClass * klass)
"Enable AV1 file and frame headers, if enabled, dump elementary stream",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
#if !defined(USE_V4L2_TARGET_NV_X86) && !defined(AARCH64_IS_SBSA)
g_object_class_install_property (gobject_class, PROP_ENABLE_TILE_CONFIG,
g_param_spec_string ("tiles", "AV1 Log2 Tile Configuration",
"Use string with values of Tile Configuration"
@@ -329,12 +307,6 @@ gst_v4l2_av1_enc_class_init (GstV4l2Av1EncClass * klass)
"Enable SSIM RDO",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
#endif
g_object_class_install_property (gobject_class, PROP_INSERT_SEQ_HDR,
g_param_spec_boolean ("insert-seq-hdr", "Insert sequence header",
"Insert sequence header at every IDR frame",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",

View File

@@ -1,6 +1,5 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2021-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -46,7 +45,6 @@ struct _GstV4l2Av1Enc
gboolean EnableTileConfig;
gboolean DisableCDFUpdate;
gboolean EnableSsimRdo;
gboolean insert_sps_pps;
guint32 Log2TileRows;
guint32 Log2TileCols;
guint32 nRefFrames;

View File

@@ -3,9 +3,7 @@
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* 2009 Texas Instruments, Inc - http://www.ti.com/
*
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2bufferpool.c V4L2 buffer pool class
*
@@ -82,8 +80,6 @@ report_metadata (GstV4l2Object * obj, guint32 buffer_index,
static void
v4l2_video_dec_get_enable_frame_type_reporting (GstV4l2Object * obj,
guint32 buffer_index, v4l2_ctrl_videodec_outputbuf_metadata * dec_metadata);
static void
v4l2_video_dec_clear_poll_interrupt (GstV4l2Object * obj);
#endif
static gboolean
@@ -144,7 +140,11 @@ static NvBufSurfTransform_Error CopySurfTransform(NvBufSurface* src, NvBufSurfac
NvBufSurfTransformParams transformParams;
NvBufSurfTransformRect srcRect;
NvBufSurfTransformRect destRect;
status = NvBufSurfTransformSetDefaultSession();
if (status != NvBufSurfTransformError_Success)
{
return status;
}
srcRect.top = srcRect.left = 0;
destRect.top = destRect.left = 0;
srcRect.width = src->surfaceList[0].width;
@@ -415,10 +415,16 @@ gst_v4l2_buffer_pool_import_userptr (GstV4l2BufferPool * pool,
for (i = 0; i < GST_VIDEO_FORMAT_INFO_N_PLANES (finfo); i++) {
if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
gint tinfo = GST_VIDEO_FRAME_PLANE_STRIDE (&data->frame, i);
size[i] = GST_VIDEO_TILE_X_TILES (tinfo) *
GST_VIDEO_TILE_Y_TILES (tinfo) *
GST_VIDEO_FORMAT_INFO_TILE_SIZE (finfo, i);
gint pstride;
guint pheight;
pstride = GST_VIDEO_TILE_X_TILES (tinfo) <<
GST_VIDEO_FORMAT_INFO_TILE_WS (finfo);
pheight = GST_VIDEO_TILE_Y_TILES (tinfo) <<
GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
size[i] = pstride * pheight;
} else {
size[i] = GST_VIDEO_FRAME_PLANE_STRIDE (&data->frame, i) *
GST_VIDEO_FRAME_COMP_HEIGHT (&data->frame, i);
@@ -722,20 +728,6 @@ gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
for (i = 0; i < group->n_mem; i++)
gst_buffer_append_memory (newbuf, group->mem[i]);
#ifdef USE_V4L2_TARGET_NV
if (!V4L2_TYPE_IS_OUTPUT(obj->type) && is_cuvid == FALSE) {
GstMapInfo map = GST_MAP_INFO_INIT;
NvBufSurface *nvbuf_surf = NULL;
gst_buffer_map (newbuf, &map, GST_MAP_READ);
nvbuf_surf = (NvBufSurface *) map.data;
if (g_queue_find(pool->allocated_surfaces_queue, nvbuf_surf) == NULL)
{
g_queue_push_tail (pool->allocated_surfaces_queue, nvbuf_surf);
}
gst_buffer_unmap (newbuf, &map);
}
#endif
} else if (newbuf == NULL) {
goto allocation_failed;
}
@@ -765,14 +757,13 @@ gst_v4l2_buffer_pool_set_config (GstBufferPool * bpool, GstStructure * config)
GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
GstV4l2Object *obj = pool->obj;
GstCaps *caps;
guint size, min_buffers = 0, max_buffers = 0;
GstAllocator *allocator = NULL;
GstAllocationParams params = { 0, };
guint size, min_buffers, max_buffers;
GstAllocator *allocator;
GstAllocationParams params;
gboolean can_allocate = FALSE;
gboolean updated = FALSE;
gboolean ret;
gst_allocation_params_init (&params);
pool->add_videometa =
gst_buffer_pool_config_has_option (config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
@@ -879,9 +870,7 @@ gst_v4l2_buffer_pool_set_config (GstBufferPool * bpool, GstStructure * config)
/* Always update the config to ensure the configured size matches */
if ((!strcmp (obj->videodev, V4L2_DEVICE_PATH_NVENC) || !strcmp (obj->videodev, V4L2_DEVICE_PATH_NVENC_ALT)) &&
(obj->mode == GST_V4L2_IO_DMABUF_IMPORT)) {
/*For DMABUF Import, queue size should be kept to max to avoid frame drops coming from decoder*/
min_buffers = max_buffers = 24; /* NvMM Encoder has MAX Q size as 24*/
gst_buffer_pool_config_set_params(config, caps, sizeof(NvBufSurface), min_buffers,
gst_buffer_pool_config_set_params (config, caps, sizeof (NvBufSurface), min_buffers,
max_buffers);
}
else
@@ -1006,12 +995,10 @@ gst_v4l2_buffer_pool_streamoff (GstV4l2BufferPool * pool)
#endif
gint i;
GST_OBJECT_LOCK (pool);
if (!pool->streaming) {
GST_OBJECT_UNLOCK (pool);
if (!pool->streaming)
return;
}
GST_OBJECT_LOCK (pool);
switch (obj->mode) {
case GST_V4L2_IO_MMAP:
@@ -1073,7 +1060,7 @@ gst_v4l2_buffer_pool_start (GstBufferPool * bpool)
GstV4l2Object *obj = pool->obj;
GstStructure *config;
GstCaps *caps;
guint size = 0, min_buffers = 0, max_buffers = 0;
guint size, min_buffers, max_buffers;
guint max_latency, min_latency, copy_threshold = 0;
gboolean can_allocate = FALSE, ret = TRUE;
@@ -1254,32 +1241,6 @@ gst_v4l2_buffer_pool_stop (GstBufferPool * bpool)
GST_DEBUG_OBJECT (pool, "stopping pool");
#ifdef USE_V4L2_TARGET_NV
/*
* On EOS, video_fd becomes -1, this makes VIDIOC_REQBUFS API fail which
* internally releases hardware buffers causing memory leak.
* In below code NvBufSurfaces are destroyed explicitly.
*/
if (pool->vallocator) {
if (pool->vallocator->obj->video_fd == -1 &&
!V4L2_TYPE_IS_OUTPUT (pool->vallocator->obj->type) &&
is_cuvid == FALSE) {
while(g_queue_get_length(pool->allocated_surfaces_queue) > 0) {
int retval = 0;
NvBufSurface* allocated_surface = NULL;
allocated_surface = g_queue_pop_head (pool->allocated_surfaces_queue);
if (allocated_surface) {
retval = NvBufSurfaceDestroy(allocated_surface);
if(retval < 0) {
GST_ERROR_OBJECT (pool, "failed to destroy nvbufsurface");
return GST_V4L2_ERROR;
}
}
}
}
}
#endif
if (pool->group_released_handler > 0) {
g_signal_handler_disconnect (pool->vallocator,
pool->group_released_handler);
@@ -1333,11 +1294,6 @@ gst_v4l2_buffer_pool_flush_start (GstBufferPool * bpool)
#ifndef USE_V4L2_TARGET_NV
gst_poll_set_flushing (pool->poll, TRUE);
#else
if (is_cuvid == TRUE)
{
v4l2_video_dec_clear_poll_interrupt (pool->obj);
}
#endif
GST_OBJECT_LOCK (pool);
@@ -1347,8 +1303,6 @@ gst_v4l2_buffer_pool_flush_start (GstBufferPool * bpool)
if (pool->other_pool)
gst_buffer_pool_set_flushing (pool->other_pool, TRUE);
GST_DEBUG_OBJECT (pool, "End flushing");
}
static void
@@ -1930,9 +1884,7 @@ gst_v4l2_buffer_pool_finalize (GObject * object)
if (pool->video_fd >= 0)
pool->obj->close (pool->video_fd);
#ifdef USE_V4L2_TARGET_NV
g_queue_free(pool->allocated_surfaces_queue);
#else
#ifndef USE_V4L2_TARGET_NV
gst_poll_free (pool->poll);
#endif
@@ -1956,10 +1908,6 @@ gst_v4l2_buffer_pool_init (GstV4l2BufferPool * pool)
#endif
g_cond_init (&pool->empty_cond);
pool->empty = TRUE;
#ifdef USE_V4L2_TARGET_NV
pool->allocated_surfaces_queue = g_queue_new();
#endif
}
static void
@@ -2084,7 +2032,7 @@ gst_v4l2_do_read (GstV4l2BufferPool * pool, GstBuffer * buf)
GstFlowReturn res;
GstV4l2Object *obj = pool->obj;
gint amount;
GstMapInfo map = GST_MAP_INFO_INIT;
GstMapInfo map;
gint toread;
toread = obj->info.size;
@@ -2613,25 +2561,5 @@ v4l2_video_dec_get_enable_frame_type_reporting (GstV4l2Object * obj,
if (ret < 0)
g_print ("Error while getting report metadata\n");
}
static void
v4l2_video_dec_clear_poll_interrupt (GstV4l2Object * obj)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret = -1;
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_SET_POLL_INTERRUPT;
control.value = 0;
ret = obj->ioctl (obj->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0)
g_print ("Error while clearing poll interrupt\n");
}
#endif

View File

@@ -3,9 +3,7 @@
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* 2009 Texas Instruments, Inc - http://www.ti.com/
*
* SPDX-FileCopyrightText: Copyright (c) 2018-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2bufferpool.h V4L2 buffer pool class
*
@@ -95,7 +93,6 @@ struct _GstV4l2BufferPool
#ifdef USE_V4L2_TARGET_NV
GstBuffer *buffers[NV_VIDEO_MAX_FRAME];
GQueue *allocated_surfaces_queue;
#else
GstBuffer *buffers[VIDEO_MAX_FRAME];
#endif

View File

@@ -1,8 +1,7 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association
* Author: ayaka <ayaka@soulik.info>
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -50,8 +49,6 @@ gst_v4l2_videnc_profile_get_type (void);
gboolean gst_v4l2_h264_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type);
gboolean set_v4l2_h264_encoder_properties (GstVideoEncoder * encoder);
gboolean gst_v4l2_h264_enc_slice_intrarefresh (GstV4l2Object * v4l2object,
guint32 slice_count, guint32 slice_interval);
#endif
#ifdef USE_V4L2_TARGET_NV
@@ -118,7 +115,7 @@ enum
#define MAX_NUM_REFERENCE_FRAMES 8
#define DEFAULT_BIT_PACKETIZATION FALSE
#define DEFAULT_SLICE_HEADER_SPACING 0
#define DEFAULT_INTRA_REFRESH_FRAME_INTERVAL 0
#define DEFAULT_INTRA_REFRESH_FRAME_INTERVAL 60
#define DEFAULT_PIC_ORDER_CNT_TYPE 0
#endif
@@ -305,8 +302,6 @@ v4l2_profile_from_string (const gchar * profile)
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH;
} else if (g_str_equal (profile, "multiview-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH;
} else if (g_str_equal (profile, "constrained-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_HIGH;
} else {
GST_WARNING ("Unsupported profile string '%s'", profile);
}
@@ -352,8 +347,6 @@ v4l2_profile_to_string (gint v4l2_profile)
return "stereo-high";
case V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH:
return "multiview-high";
case V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_HIGH:
return "constrained-high";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
@@ -456,20 +449,14 @@ gst_v4l2_h264_enc_init (GstV4l2H264Enc * self)
self->profile = DEFAULT_PROFILE;
self->insert_sps_pps = FALSE;
self->insert_aud = FALSE;
self->enableLossless = FALSE;
self->nRefFrames = 1;
if (is_cuvid == TRUE)
{
self->extended_colorformat = FALSE;
self->nRefFrames = 0;
self->insert_vui = TRUE;
}
#if !defined(USE_V4L2_TARGET_NV_X86) && !defined(AARCH64_IS_SBSA)
self->insert_vui = FALSE;
#endif
self->enableLossless = FALSE;
if (is_cuvid == TRUE)
self->extended_colorformat = FALSE;
self->nBFrames = 0;
self->nRefFrames = 1;
self->bit_packetization = DEFAULT_BIT_PACKETIZATION;
self->slice_header_spacing = DEFAULT_SLICE_HEADER_SPACING;
self->poc_type = DEFAULT_PIC_ORDER_CNT_TYPE;
@@ -510,16 +497,26 @@ gst_v4l2_h264_enc_class_init (GstV4l2H264EncClass * klass)
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_INSERT_VUI,
if (is_cuvid == TRUE) {
g_object_class_install_property (gobject_class, PROP_EXTENDED_COLORFORMAT,
g_param_spec_boolean ("extended-colorformat",
"Set Extended ColorFormat",
"Set Extended ColorFormat pixel values 0 to 255 in VUI Info",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
} else if (is_cuvid == FALSE) {
g_object_class_install_property (gobject_class, PROP_PIC_ORDER_CNT_TYPE,
g_param_spec_uint ("poc-type",
"Picture Order Count type",
"Set Picture Order Count type value",
0, 2, DEFAULT_PIC_ORDER_CNT_TYPE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_INSERT_VUI,
g_param_spec_boolean ("insert-vui",
"Insert H.264 VUI",
"Insert H.264 VUI(Video Usability Information) in SPS",
#if !defined(USE_V4L2_TARGET_NV_X86) && !defined(AARCH64_IS_SBSA)
FALSE,
#else
TRUE,
#endif
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_SPS_PPS,
g_param_spec_boolean ("insert-sps-pps",
@@ -561,43 +558,6 @@ gst_v4l2_h264_enc_class_init (GstV4l2H264EncClass * klass)
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder",
0, MAX_NUM_REFERENCE_FRAMES, (is_cuvid == TRUE) ? 0 : DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_LOSSLESS_ENC,
g_param_spec_boolean ("enable-lossless",
"Enable Lossless encoding",
"Enable lossless encoding for YUV444",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_SLICE_INTRA_REFRESH_INTERVAL,
g_param_spec_uint ("SliceIntraRefreshInterval",
"SliceIntraRefreshInterval", "Set SliceIntraRefreshInterval", 0,
G_MAXUINT, DEFAULT_INTRA_REFRESH_FRAME_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
if (is_cuvid == TRUE) {
g_object_class_install_property (gobject_class, PROP_EXTENDED_COLORFORMAT,
g_param_spec_boolean ("extended-colorformat",
"Set Extended ColorFormat",
"Set Extended ColorFormat pixel values 0 to 255 in VUI Info",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
} else if (is_cuvid == FALSE) {
g_object_class_install_property (gobject_class, PROP_PIC_ORDER_CNT_TYPE,
g_param_spec_uint ("poc-type",
"Picture Order Count type",
"Set Picture Order Count type value",
0, 2, DEFAULT_PIC_ORDER_CNT_TYPE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_MV_META,
g_param_spec_boolean ("EnableMVBufferMeta",
"Enable Motion Vector Meta data",
@@ -605,12 +565,35 @@ gst_v4l2_h264_enc_class_init (GstV4l2H264EncClass * klass)
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
g_param_spec_uint ("SliceIntraRefreshInterval",
"SliceIntraRefreshInterval", "Set SliceIntraRefreshInterval", 0,
G_MAXUINT, DEFAULT_INTRA_REFRESH_FRAME_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_TWO_PASS_CBR,
g_param_spec_boolean ("EnableTwopassCBR",
"Enable Two pass CBR",
"Enable two pass CBR while encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_REFERENCE_FRAMES,
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder",
0, MAX_NUM_REFERENCE_FRAMES, DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_LOSSLESS_ENC,
g_param_spec_boolean ("enable-lossless",
"Enable Lossless encoding",
"Enable lossless encoding for YUV444",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
}
#endif
baseclass->codec_name = "H264";
@@ -646,21 +629,16 @@ gst_v4l2_h264_enc_register (GstPlugin * plugin, const gchar * basename,
static GType
gst_v4l2_videnc_profile_get_type (void)
{
static GType profile = 0;
static volatile gsize profile = 0;
static const GEnumValue profile_type[] = {
{V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE,
"GST_V4L2_H264_VIDENC_BASELINE_PROFILE",
"Baseline"},
{V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE,
"GST_V4L2_H264_VIDENC_CONSTRAINED_BASELINE_PROFILE",
"Constrained-Baseline"},
{V4L2_MPEG_VIDEO_H264_PROFILE_MAIN, "GST_V4L2_H264_VIDENC_MAIN_PROFILE",
"Main"},
{V4L2_MPEG_VIDEO_H264_PROFILE_HIGH, "GST_V4L2_H264_VIDENC_HIGH_PROFILE",
"High"},
{V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_HIGH, "GST_V4L2_H264_VIDENC_CONSTRAINED_HIGH_PROFILE",
"Constrained-High"},
{V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE, "GST_V4L2_H264_VIDENC_HIGH_444_PREDICTIVE_PROFILE",
{V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE, "GST_V4L2_H264_VIDENC_HIGH_444_PREDICTIVE",
"High444"},
{0, NULL, NULL}
};
@@ -724,41 +702,6 @@ gst_v4l2_h264_enc_slice_header_spacing (GstV4l2Object * v4l2object,
return TRUE;
}
gboolean
gst_v4l2_h264_enc_slice_intrarefresh (GstV4l2Object * v4l2object,
guint32 slice_count, guint32 slice_interval)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret;
v4l2_ctrl_intra_refresh cuvid_param = {1, slice_interval, slice_count};
v4l2_enc_slice_intrarefresh_param param = {slice_count};
memset (&control, 0, sizeof (control));
memset (&ctrls, 0, sizeof (ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM;
if (is_cuvid)
control.string = (gchar *)&cuvid_param;
else
control.string = (gchar *)&param;
ret = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0)
{
g_print("Error while setting slice intrarefresh params\n");
return FALSE;
}
return TRUE;
}
gboolean
set_v4l2_h264_encoder_properties (GstVideoEncoder * encoder)
{
@@ -788,10 +731,12 @@ set_v4l2_h264_encoder_properties (GstVideoEncoder * encoder)
}
}
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_VUI, self->insert_vui)) {
g_print ("S_EXT_CTRLS for INSERT_VUI failed\n");
return FALSE;
if (self->insert_vui) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_VUI, 1)) {
g_print ("S_EXT_CTRLS for INSERT_VUI failed\n");
return FALSE;
}
}
if (is_cuvid == TRUE) {
@@ -851,9 +796,9 @@ set_v4l2_h264_encoder_properties (GstVideoEncoder * encoder)
}
if (self->SliceIntraRefreshInterval) {
if (!gst_v4l2_h264_enc_slice_intrarefresh (video_enc->v4l2output,
self->SliceIntraRefreshInterval,
video_enc->idrinterval)) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM,
self->SliceIntraRefreshInterval)) {
g_print ("S_EXT_CTRLS for SLICE_INTRAREFRESH_PARAM failed\n");
return FALSE;
}

View File

@@ -1,6 +1,5 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -49,13 +48,12 @@ gst_v4l2_videnc_profile_get_type (void);
#define GST_TYPE_V4L2_VID_ENC_PROFILE (gst_v4l2_videnc_profile_get_type ())
/* prototypes */
gboolean set_v4l2_h265_encoder_properties(GstVideoEncoder * encoder);
gboolean set_v4l2_h265_encoder_properties (GstVideoEncoder * encoder);
gboolean gst_v4l2_h265_enc_slice_header_spacing (GstV4l2Object * v4l2object,
guint32 slice_header_spacing, enum v4l2_enc_slice_length_type slice_length_type);
void set_h265_video_enc_property (GstV4l2Object * v4l2object, guint label,
gint param);
gboolean gst_v4l2_h265_enc_slice_intrarefresh (GstV4l2Object * v4l2object,
guint32 slice_count, guint32 slice_interval);
enum
{
PROP_0,
@@ -78,7 +76,7 @@ enum
#define DEFAULT_PROFILE V4L2_MPEG_VIDEO_H265_PROFILE_MAIN
#define DEFAULT_BIT_PACKETIZATION FALSE
#define DEFAULT_SLICE_HEADER_SPACING 0
#define DEFAULT_INTRA_REFRESH_FRAME_INTERVAL 0
#define DEFAULT_INTRA_REFRESH_FRAME_INTERVAL 60
#define DEFAULT_NUM_B_FRAMES 0
#define MAX_NUM_B_FRAMES 2
#define DEFAULT_NUM_REFERENCE_FRAMES 1
@@ -247,64 +245,10 @@ static gint
v4l2_level_from_string (const gchar * level)
{
gint v4l2_level = -1;
#ifdef USE_V4L2_TARGET_NV
if (g_str_equal(level, "main_1.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_1_0_MAIN_TIER;
} else if (g_str_equal(level, "high_1.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_1_0_HIGH_TIER;
} else if (g_str_equal(level, "main_2.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_2_0_MAIN_TIER;
} else if (g_str_equal(level, "high_2.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_2_0_HIGH_TIER;
} else if (g_str_equal(level, "main_2.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_2_1_MAIN_TIER;
} else if (g_str_equal(level, "high_2.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_2_1_HIGH_TIER;
} else if (g_str_equal(level, "main_3.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_3_0_MAIN_TIER;
} else if (g_str_equal(level, "high_3.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_3_0_HIGH_TIER;
} else if (g_str_equal(level, "main_3.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_3_1_MAIN_TIER;
} else if (g_str_equal(level, "high_3.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_3_1_HIGH_TIER;
} else if (g_str_equal(level, "main_4.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_4_0_MAIN_TIER;
} else if (g_str_equal(level, "high_4.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_4_0_HIGH_TIER;
} else if (g_str_equal(level, "main_4.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_4_1_MAIN_TIER;
} else if (g_str_equal(level, "high_4.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_4_1_HIGH_TIER;
} else if (g_str_equal(level, "main_5.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_5_0_MAIN_TIER;
} else if (g_str_equal(level, "high_5.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_5_0_HIGH_TIER;
} else if (g_str_equal(level, "main_5.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_5_1_MAIN_TIER;
} else if (g_str_equal(level, "high_5.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_5_1_HIGH_TIER;
} else if (g_str_equal(level, "main_5.2")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_5_2_MAIN_TIER;
} else if (g_str_equal(level, "high_5.2")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_5_2_HIGH_TIER;
} else if (g_str_equal(level, "main_6.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_6_0_MAIN_TIER;
} else if (g_str_equal(level, "high_6.0")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_6_0_HIGH_TIER;
} else if (g_str_equal(level, "main_6.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_6_1_MAIN_TIER;
} else if (g_str_equal(level, "high_6.1")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_6_1_HIGH_TIER;
} else if (g_str_equal(level, "main_6.2")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_6_2_MAIN_TIER;
} else if (g_str_equal(level, "high_6.2")) {
v4l2_level = V4L2_MPEG_VIDEO_H265_LEVEL_6_2_HIGH_TIER;
} else
{
GST_WARNING("Unsupported level string '%s'", level);
}
#endif
//TODO : Since videodev2 file does not list H265 profiles
//we need to add profiles inside v4l2_nv_extensions.h
//and use them here.
return v4l2_level;
}
@@ -312,68 +256,6 @@ v4l2_level_from_string (const gchar * level)
static const gchar *
v4l2_level_to_string (gint v4l2_level)
{
#ifdef USE_V4L2_TARGET_NV
switch (v4l2_level)
{
case V4L2_MPEG_VIDEO_H265_LEVEL_1_0_MAIN_TIER:
return "main_1.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_1_0_HIGH_TIER:
return "high_1.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_2_0_MAIN_TIER:
return "main_2.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_2_0_HIGH_TIER:
return "high_2.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_2_1_MAIN_TIER:
return "main_2.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_2_1_HIGH_TIER:
return "high_2.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_3_0_MAIN_TIER:
return "main_3.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_3_0_HIGH_TIER:
return "high_3.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_3_1_MAIN_TIER:
return "main_3.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_3_1_HIGH_TIER:
return "high_3.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_4_0_MAIN_TIER:
return "main_4.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_4_0_HIGH_TIER:
return "high_4.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_4_1_MAIN_TIER:
return "main_4.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_4_1_HIGH_TIER:
return "high_4.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_5_0_MAIN_TIER:
return "main_5.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_5_0_HIGH_TIER:
return "high_5.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_5_1_MAIN_TIER:
return "main_5.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_5_1_HIGH_TIER:
return "high_5.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_5_2_MAIN_TIER:
return "main_5.2";
case V4L2_MPEG_VIDEO_H265_LEVEL_5_2_HIGH_TIER:
return "high_5.2";
case V4L2_MPEG_VIDEO_H265_LEVEL_6_0_MAIN_TIER:
return "main_6.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_6_0_HIGH_TIER:
return "high_6.0";
case V4L2_MPEG_VIDEO_H265_LEVEL_6_1_MAIN_TIER:
return "main_6.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_6_1_HIGH_TIER:
return "high_6.1";
case V4L2_MPEG_VIDEO_H265_LEVEL_6_2_MAIN_TIER:
return "main_6.2";
case V4L2_MPEG_VIDEO_H265_LEVEL_6_2_HIGH_TIER:
return "high_6.2";
default:
GST_WARNING("Unsupported V4L2 level %i", v4l2_level);
break;
}
#endif
return NULL;
}
@@ -383,22 +265,13 @@ gst_v4l2_h265_enc_init (GstV4l2H265Enc * self)
self->insert_sps_pps = FALSE;
self->profile = DEFAULT_PROFILE;
self->insert_aud = FALSE;
self->insert_vui = FALSE;
self->extended_colorformat = FALSE;
self->bit_packetization = DEFAULT_BIT_PACKETIZATION;
self->slice_header_spacing = DEFAULT_SLICE_HEADER_SPACING;
self->nRefFrames = 1;
self->nBFrames = 0;
self->enableLossless = FALSE;
if (is_cuvid == TRUE)
{
self->extended_colorformat = FALSE;
self->nRefFrames = 0;
self->insert_vui = TRUE;
}
#if !defined(USE_V4L2_TARGET_NV_X86) && !defined(AARCH64_IS_SBSA)
self->insert_vui = FALSE;
#endif
}
static void
@@ -436,17 +309,24 @@ gst_v4l2_h265_enc_class_init (GstV4l2H265EncClass * klass)
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_INSERT_SPS_PPS,
g_param_spec_boolean ("insert-sps-pps",
"Insert H.265 SPS, PPS",
"Insert H.265 SPS, PPS at every IDR frame",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
if (is_cuvid == TRUE) {
g_object_class_install_property (gobject_class, PROP_EXTENDED_COLORFORMAT,
g_param_spec_boolean ("extended-colorformat",
"Set Extended ColorFormat",
"Set Extended ColorFormat pixel values 0 to 255 in VUI info",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
} else if (is_cuvid == FALSE) {
g_object_class_install_property (gobject_class, PROP_INSERT_SPS_PPS,
g_param_spec_boolean ("insert-sps-pps",
"Insert H.265 SPS, PPS",
"Insert H.265 SPS, PPS at every IDR frame",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_VUI,
g_object_class_install_property (gobject_class, PROP_INSERT_VUI,
g_param_spec_boolean ("insert-vui",
"Insert H.265 VUI",
"Insert H.265 VUI(Video Usability Information) in SPS",
(is_cuvid == TRUE) ? TRUE : FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INSERT_AUD,
g_param_spec_boolean ("insert-aud",
@@ -468,6 +348,28 @@ gst_v4l2_h265_enc_class_init (GstV4l2H265EncClass * klass)
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_ENABLE_MV_META,
g_param_spec_boolean ("EnableMVBufferMeta",
"Enable Motion Vector Meta data",
"Enable Motion Vector Meta data for encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class,
PROP_SLICE_INTRA_REFRESH_INTERVAL,
g_param_spec_uint ("SliceIntraRefreshInterval",
"SliceIntraRefreshInterval", "Set SliceIntraRefreshInterval", 0,
G_MAXUINT, DEFAULT_INTRA_REFRESH_FRAME_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_TWO_PASS_CBR,
g_param_spec_boolean ("EnableTwopassCBR",
"Enable Two pass CBR",
"Enable two pass CBR while encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_NUM_BFRAMES,
g_param_spec_uint ("num-B-Frames",
"B Frames between two reference frames",
@@ -480,7 +382,7 @@ gst_v4l2_h265_enc_class_init (GstV4l2H265EncClass * klass)
g_param_spec_uint ("num-Ref-Frames",
"Sets the number of reference frames for encoder",
"Number of Reference Frames for encoder",
0, MAX_NUM_REFERENCE_FRAMES, (is_cuvid == TRUE) ? 0 : DEFAULT_NUM_REFERENCE_FRAMES,
0, MAX_NUM_REFERENCE_FRAMES, DEFAULT_NUM_REFERENCE_FRAMES,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
@@ -490,34 +392,6 @@ gst_v4l2_h265_enc_class_init (GstV4l2H265EncClass * klass)
"Enable lossless encoding for YUV444",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_SLICE_INTRA_REFRESH_INTERVAL,
g_param_spec_uint ("SliceIntraRefreshInterval",
"SliceIntraRefreshInterval", "Set SliceIntraRefreshInterval", 0,
G_MAXUINT, DEFAULT_INTRA_REFRESH_FRAME_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
if (is_cuvid == TRUE) {
g_object_class_install_property (gobject_class, PROP_EXTENDED_COLORFORMAT,
g_param_spec_boolean ("extended-colorformat",
"Set Extended ColorFormat",
"Set Extended ColorFormat pixel values 0 to 255 in VUI info",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
} else if (is_cuvid == FALSE) {
g_object_class_install_property (gobject_class, PROP_ENABLE_MV_META,
g_param_spec_boolean ("EnableMVBufferMeta",
"Enable Motion Vector Meta data",
"Enable Motion Vector Meta data for encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
g_object_class_install_property (gobject_class, PROP_TWO_PASS_CBR,
g_param_spec_boolean ("EnableTwopassCBR",
"Enable Two pass CBR",
"Enable two pass CBR while encoding",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
GST_PARAM_MUTABLE_READY));
}
#endif
@@ -525,7 +399,7 @@ gst_v4l2_h265_enc_class_init (GstV4l2H265EncClass * klass)
baseclass->profile_cid = V4L2_CID_MPEG_VIDEO_H265_PROFILE;
baseclass->profile_to_string = v4l2_profile_to_string;
baseclass->profile_from_string = v4l2_profile_from_string;
baseclass->level_cid = V4L2_CID_MPEG_VIDEOENC_H265_LEVEL;
//baseclass->level_cid = V4L2_CID_MPEG_VIDEO_H265_LEVEL;
baseclass->level_to_string = v4l2_level_to_string;
baseclass->level_from_string = v4l2_level_from_string;
baseclass->set_encoder_properties = set_v4l2_h265_encoder_properties;
@@ -551,7 +425,7 @@ gst_v4l2_h265_enc_register (GstPlugin * plugin, const gchar * basename,
static GType
gst_v4l2_videnc_profile_get_type (void)
{
static GType profile = 0;
static volatile gsize profile = 0;
static const GEnumValue profile_type[] = {
{V4L2_MPEG_VIDEO_H265_PROFILE_MAIN,
"GST_V4L2_H265_VIDENC_MAIN_PROFILE", "Main"},
@@ -598,41 +472,6 @@ gst_v4l2_h265_enc_slice_header_spacing (GstV4l2Object * v4l2object,
return TRUE;
}
gboolean
gst_v4l2_h265_enc_slice_intrarefresh(GstV4l2Object *v4l2object,
guint32 slice_count, guint32 slice_interval)
{
struct v4l2_ext_control control;
struct v4l2_ext_controls ctrls;
gint ret;
v4l2_ctrl_intra_refresh cuvid_param = {1, slice_interval, slice_count};
v4l2_enc_slice_intrarefresh_param param = {slice_count};
memset(&control, 0, sizeof(control));
memset(&ctrls, 0, sizeof(ctrls));
ctrls.count = 1;
ctrls.controls = &control;
ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
control.id = V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM;
if (is_cuvid)
control.string = (gchar *)&cuvid_param;
else
control.string = (gchar *)&param;
ret = v4l2object->ioctl(v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret < 0)
{
g_print("Error while setting slice intrarefresh params\n");
return FALSE;
}
return TRUE;
}
gboolean
set_v4l2_h265_encoder_properties (GstVideoEncoder * encoder)
{
@@ -660,10 +499,12 @@ set_v4l2_h265_encoder_properties (GstVideoEncoder * encoder)
}
}
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_VUI, self->insert_vui)) {
g_print ("S_EXT_CTRLS for INSERT_VUI failed\n");
return FALSE;
if (self->insert_vui) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_INSERT_VUI, 1)) {
g_print ("S_EXT_CTRLS for INSERT_VUI failed\n");
return FALSE;
}
}
if (self->extended_colorformat) {
@@ -704,9 +545,9 @@ set_v4l2_h265_encoder_properties (GstVideoEncoder * encoder)
}
if (self->SliceIntraRefreshInterval) {
if (!gst_v4l2_h265_enc_slice_intrarefresh (video_enc->v4l2output,
self->SliceIntraRefreshInterval,
video_enc->idrinterval)) {
if (!set_v4l2_video_mpeg_class (video_enc->v4l2output,
V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM,
self->SliceIntraRefreshInterval)) {
g_print ("S_EXT_CTRLS for SLICE_INTRAREFRESH_PARAM failed\n");
return FALSE;
}

View File

@@ -1,5 +1,5 @@
/*
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
* Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -43,7 +43,6 @@ struct _GstV4l2H265Enc
GstV4l2VideoEnc parent;
gboolean insert_sps_pps;
guint profile;
guint level;
guint nBFrames;
guint nRefFrames;
gboolean insert_aud;

View File

@@ -89,6 +89,19 @@ h264_parse_nalu_header (H264NalUnit * nalu)
return TRUE;
}
static gboolean
h264_sps_copy (H264SPS * dst_sps, const H264SPS * src_sps)
{
g_return_val_if_fail (dst_sps != NULL, FALSE);
g_return_val_if_fail (src_sps != NULL, FALSE);
h264_sps_clear (dst_sps);
*dst_sps = *src_sps;
return TRUE;
}
static gboolean
h264_parser_parse_scaling_list (NalReader * nr,
guint8 scaling_lists_4x4[6][16], guint8 scaling_lists_8x8[6][64],
@@ -327,12 +340,20 @@ beach:
}
H264ParserResult
h264_parser_parse_sps (H264NalUnit * nalu,
h264_parser_parse_sps (H264NalParser * nalparser, H264NalUnit * nalu,
H264SPS * sps, gboolean parse_vui_params)
{
H264ParserResult res = h264_parse_sps (nalu, sps, parse_vui_params);
return res;
if (res == H264_PARSER_OK) {
GST_DEBUG ("adding sequence parameter set with id: %d to array", sps->id);
if (!h264_sps_copy (&nalparser->sps[sps->id], sps))
return H264_PARSER_ERROR;
nalparser->last_sps = &nalparser->sps[sps->id];
}
return res;
}
/* Parse seq_parameter_set_data() */
@@ -517,7 +538,7 @@ static gboolean
h265_parse_nalu_header (H265NalUnit * nalu)
{
guint8 *data = nalu->data + nalu->offset;
GstBitReader br = {0};
GstBitReader br;
if (nalu->size < 2)
return FALSE;
@@ -731,7 +752,7 @@ h265_parser_identify_nalu_hevc (H265Parser * parser,
const guint8 * data, guint offset, gsize size, guint8 nal_length_size,
H265NalUnit * nalu)
{
GstBitReader br = {0};
GstBitReader br;
memset (nalu, 0, sizeof (*nalu));
@@ -777,6 +798,13 @@ h265_parser_parse_sps (H265Parser * parser, H265NalUnit * nalu,
{
H265ParserResult res =
h265_parse_sps (parser, nalu, sps, parse_vui_params);
return res;
if (res == H265_PARSER_OK) {
GST_DEBUG ("adding sequence parameter set with id: %d to array", sps->id);
parser->sps[sps->id] = *sps;
parser->last_sps = &parser->sps[sps->id];
}
return res;
}

View File

@@ -215,7 +215,7 @@ H264ParserResult h264_parser_identify_nalu_unchecked (H264NalParser *nalparser,
gsize size, H264NalUnit *nalu);
H264ParserResult h264_parser_parse_sps (H264NalUnit *nalu,
H264ParserResult h264_parser_parse_sps (H264NalParser *nalparser, H264NalUnit *nalu,
H264SPS *sps, gboolean parse_vui_params);

View File

@@ -2,26 +2,21 @@
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
*
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2object.c: base class for V4L2 elements
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* This library is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published
* by the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version. This library is distributed in the hope
* that it will be useful, but WITHOUT ANY WARRANTY; without even the
* implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
* PURPOSE. See the GNU Library General Public License for more details.
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301,
* USA.
*/
#ifdef HAVE_CONFIG_H
@@ -67,12 +62,6 @@ GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define ENCODED_BUFFER_SIZE (4 * 1024 * 1024)
#ifdef USE_V4L2_TARGET_NV
/* Structure to hold the video info inorder to modify the contents, incase of
* GST_VIDEO_FORMAT_I420_12LE format */
static GstVideoFormatInfo video_info;
#endif
enum
{
PROP_0,
@@ -185,7 +174,6 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = {
{V4L2_PIX_FMT_NV24M, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_YUV444_10LE, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_YUV444_12LE, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_P216M, TRUE, GST_V4L2_RAW},
#endif
/* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
{V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_CODEC},
@@ -1155,7 +1143,6 @@ gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt)
case V4L2_PIX_FMT_NV24M:
case V4L2_PIX_FMT_YUV444_10LE:
case V4L2_PIX_FMT_YUV444_12LE:
case V4L2_PIX_FMT_P216M:
#endif
case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
@@ -1197,9 +1184,6 @@ gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt)
case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
rank = YUV_BASE_RANK + 4;
break;
#ifdef USE_V4L2_TARGET_NV
case V4L2_PIX_FMT_YUV422M:
#endif
case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
rank = YUV_BASE_RANK + 8;
break;
@@ -1449,11 +1433,6 @@ gst_v4l2_object_v4l2fourcc_to_video_format (guint32 fourcc)
case V4L2_PIX_FMT_YUV444_12LE:
format = GST_VIDEO_FORMAT_Y444_12LE;
break;
case V4L2_PIX_FMT_P216M:
/* NOTE: Gstreamer does not support P216 video format (16bit NV16) yet.
Hence, as a WAR it is mapped to GST_VIDEO_FORMAT_NV16_10LE32 for now. */
format = GST_VIDEO_FORMAT_NV16_10LE32;
break;
#endif
case V4L2_PIX_FMT_NV12MT:
format = GST_VIDEO_FORMAT_NV12_64Z32;
@@ -1651,7 +1630,6 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc)
case V4L2_PIX_FMT_YUV444:
case V4L2_PIX_FMT_YUV444_10LE:
case V4L2_PIX_FMT_YUV444_12LE:
case V4L2_PIX_FMT_P216M:
#endif
case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
case V4L2_PIX_FMT_YVU410:
@@ -1907,9 +1885,6 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
break;
case GST_VIDEO_FORMAT_Y42B:
fourcc = V4L2_PIX_FMT_YUV422P;
#ifdef USE_V4L2_TARGET_NV
fourcc_nc = V4L2_PIX_FMT_YUV422M;
#endif
break;
case GST_VIDEO_FORMAT_NV12:
fourcc = V4L2_PIX_FMT_NV12;
@@ -1930,9 +1905,6 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
case GST_VIDEO_FORMAT_Y444_12LE:
fourcc_nc = V4L2_PIX_FMT_YUV444_12LE;
break;
case GST_VIDEO_FORMAT_NV16_10LE32:
fourcc = V4L2_PIX_FMT_P216M;
break;
#endif
case GST_VIDEO_FORMAT_NV12_64Z32:
fourcc_nc = V4L2_PIX_FMT_NV12MT;
@@ -2011,7 +1983,7 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
} else if (g_str_equal (mimetype, "image/jpeg")) {
fourcc = V4L2_PIX_FMT_JPEG;
} else if (g_str_equal (mimetype, "video/mpeg")) {
gint version = 0;
gint version;
if (gst_structure_get_int (structure, "mpegversion", &version)) {
switch (version) {
case 1:
@@ -2319,10 +2291,8 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt,
switch (transfer) {
case V4L2_XFER_FUNC_709:
if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
if (fmt->fmt.pix.height >= 2160)
cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
else if (colorspace == V4L2_COLORSPACE_SMPTE170M)
cinfo->transfer = GST_VIDEO_TRANSFER_BT601;
else
cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
break;
@@ -2338,9 +2308,6 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt,
case V4L2_XFER_FUNC_NONE:
cinfo->transfer = GST_VIDEO_TRANSFER_GAMMA10;
break;
case V4L2_XFER_FUNC_SMPTE2084:
cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE2084;
break;
case V4L2_XFER_FUNC_DEFAULT:
/* nothing, just use defaults for colorspace */
break;
@@ -2446,18 +2413,11 @@ gst_v4l2_object_add_interlace_mode (GstV4l2Object * v4l2object,
gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum);
}
#ifdef USE_V4L2_TARGET_NV
if (gst_value_list_get_size (&interlace_formats))
{
#endif
if (gst_v4l2src_value_simplify (&interlace_formats)
|| gst_value_list_get_size (&interlace_formats) > 0)
gst_structure_take_value (s, "interlace-mode", &interlace_formats);
else
GST_WARNING_OBJECT (v4l2object, "Failed to determine interlace mode");
#ifdef USE_V4L2_TARGET_NV
}
#endif
if (gst_v4l2src_value_simplify (&interlace_formats)
|| gst_value_list_get_size (&interlace_formats) > 0)
gst_structure_take_value (s, "interlace-mode", &interlace_formats);
else
GST_WARNING_OBJECT (v4l2object, "Failed to determine interlace mode");
g_value_unset(&interlace_formats);
return;
@@ -2766,7 +2726,7 @@ unknown_type:
static gint
sort_by_frame_size (GstStructure * s1, GstStructure * s2)
{
int w1 = 0, h1 = 0, w2 = 0, h2 = 0;
int w1, h1, w2, h2;
gst_structure_get_int (s1, "width", &w1);
gst_structure_get_int (s1, "height", &h1);
@@ -3188,7 +3148,12 @@ gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps)
|| !strcmp (v4l2object->videodev, V4L2_DEVICE_PATH_NVENC_ALT))) {
/* Currently, DMABUF_IMPORT io mode is used on encoder
output plane, when default mode V4L2_IO_AUTO is set */
mode = GST_V4L2_IO_DMABUF_IMPORT;
if (is_cuvid == TRUE) {
mode = GST_V4L2_IO_MMAP; //TODO make this default to dmabuf_import
} else if (is_cuvid == FALSE) {
mode = GST_V4L2_IO_DMABUF_IMPORT;
}
} else {
if (is_cuvid == TRUE){
mode = GST_V4L2_IO_MMAP;
@@ -3280,15 +3245,19 @@ gst_v4l2_object_set_stride (GstVideoInfo * info, GstVideoAlignment * align,
const GstVideoFormatInfo *finfo = info->finfo;
if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
gint x_tiles, y_tiles, tile_height, padded_height;
gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
tile_height = GST_VIDEO_FORMAT_INFO_TILE_HEIGHT (finfo, plane);
ws = GST_VIDEO_FORMAT_INFO_TILE_WS (finfo);
hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
tile_height = 1 << hs;
padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, plane,
info->height + align->padding_top + align->padding_bottom);
padded_height = GST_ROUND_UP_N (padded_height, tile_height);
x_tiles = stride / GST_VIDEO_FORMAT_INFO_TILE_STRIDE (finfo, plane);
y_tiles = (padded_height + tile_height - 1) / tile_height;
x_tiles = stride >> ws;
y_tiles = padded_height >> hs;
info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE (x_tiles, y_tiles);
} else {
info->stride[plane] = stride;
@@ -3382,10 +3351,12 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object,
padded_height = format->fmt.pix.height;
if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
guint tile_height;
tile_height = GST_VIDEO_FORMAT_INFO_TILE_HEIGHT (finfo,0);
/* Round-up to tile_height as drivers are not forced to do so */
padded_height = (padded_height + tile_height - 1) / tile_height * tile_height;
guint hs, tile_height;
hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
tile_height = 1 << hs;
padded_height = GST_ROUND_UP_N (padded_height, tile_height);
}
align->padding_bottom = padded_height - info->height - align->padding_top;
@@ -3497,10 +3468,9 @@ gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo,
case GST_VIDEO_FORMAT_NV16:
case GST_VIDEO_FORMAT_NV61:
case GST_VIDEO_FORMAT_NV24:
#ifdef USE_V4L2_TARGET_NV
#ifdef USE_V4L2_TARGET_NV
case GST_VIDEO_FORMAT_P010_10LE:
case GST_VIDEO_FORMAT_I420_12LE:
case GST_VIDEO_FORMAT_NV16_10LE32:
#endif
estride = (plane == 0 ? 1 : 2) *
GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, plane, stride);
@@ -3513,17 +3483,6 @@ gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo,
return estride;
}
void post_error_to_bus(GstElement *element, const gchar *error_message)
{
GError *error =
g_error_new_literal(GST_CORE_ERROR, GST_CORE_ERROR_FAILED, error_message);
gst_element_post_message
(GST_ELEMENT(element), gst_message_new_error(GST_OBJECT(element),
error, NULL));
g_error_free(error);
}
static gboolean
gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
gboolean try_only, GstV4l2Error * error)
@@ -3534,8 +3493,8 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
enum v4l2_field field;
guint32 pixelformat;
struct v4l2_fmtdesc *fmtdesc;
GstVideoInfo info = { 0, };
GstVideoAlignment align = { 0, };
GstVideoInfo info;
GstVideoAlignment align;
#ifndef USE_V4L2_TARGET_NV
gint width, height, fps_n, fps_d;
#else
@@ -3545,9 +3504,6 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
guint width, height, fps_n, fps_d;
GstV4l2VideoEnc *videoenc = NULL;
GstV4l2VideoDec *videodec = NULL;
gst_video_info_init (&info);
gst_video_alignment_reset (&align);
if (!strcmp (v4l2object->videodev, V4L2_DEVICE_PATH_NVENC)
|| !strcmp (v4l2object->videodev, V4L2_DEVICE_PATH_NVENC_ALT)) {
videoenc = GST_V4L2_VIDEO_ENC (v4l2object->element);
@@ -3556,7 +3512,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
videodec = GST_V4L2_VIDEO_DEC (v4l2object->element);
}
GstV4l2VideoEncClass *klass = NULL;
{
if (is_cuvid == FALSE) {
if (!strcmp (v4l2object->videodev, V4L2_DEVICE_PATH_NVENC)
|| !strcmp (v4l2object->videodev, V4L2_DEVICE_PATH_NVENC_ALT)) {
klass = GST_V4L2_VIDEO_ENC_GET_CLASS (v4l2object->element);
@@ -3763,8 +3719,8 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
stride = GST_VIDEO_TILE_X_TILES (stride) *
GST_VIDEO_FORMAT_INFO_TILE_STRIDE (info.finfo, i);
stride = GST_VIDEO_TILE_X_TILES (stride) <<
GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
}
@@ -3782,8 +3738,8 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
format.fmt.pix.field = field;
if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
stride = GST_VIDEO_TILE_X_TILES (stride) *
GST_VIDEO_FORMAT_INFO_TILE_STRIDE (info.finfo, i);
stride = GST_VIDEO_TILE_X_TILES (stride) <<
GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
/* try to ask our prefered stride */
format.fmt.pix.bytesperline = stride;
@@ -3858,14 +3814,6 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
if (ret)
goto invalid_ctrl;
}
} else {
if ((!strcmp (v4l2object->videodev, V4L2_DEVICE_PATH_NVDEC)) &&
V4L2_TYPE_IS_OUTPUT (v4l2object->type) &&
(width == 0 || height == 0)) {
GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Invalid caps: %s", gst_caps_to_string(caps));
goto invalid_caps;
}
}
#endif
@@ -3897,144 +3845,89 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
if (ret < 0)
goto invalid_ctrl;
if (videoenc)
{
if (V4L2_TYPE_IS_OUTPUT(v4l2object->type))
{
if (strcmp(klass->codec_name, "H264") == 0 || strcmp(klass->codec_name, "H265") == 0 || strcmp(klass->codec_name, "AV1") == 0)
{
if (!klass->set_encoder_properties(&videoenc->parent))
{
g_print("set_encoder_properties failed\n");
if (is_cuvid == FALSE) {
if (videoenc) {
if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
if (strcmp (klass->codec_name, "H264") == 0
|| strcmp (klass->codec_name, "H265") == 0
|| strcmp (klass->codec_name, "AV1") == 0) {
if (!klass->set_encoder_properties (&videoenc->parent)) {
g_print ("set_encoder_properties failed\n");
return FALSE;
}
}
if (!klass->set_video_encoder_properties (&videoenc->parent)) {
g_print ("set_video_encoder_properties failed\n");
return FALSE;
}
}
if (!klass->set_video_encoder_properties(&videoenc->parent))
{
g_print("set_video_encoder_properties failed\n");
return FALSE;
}
if (is_cuvid)
{
/* CUDA PRESETS are overridden if HW presets are set.*/
if (!videoenc->hw_preset_level)
{
ctl.id = V4L2_CID_MPEG_VIDEOENC_CUDA_PRESET_ID;
ctl.value = videoenc->cudaenc_preset_id;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_CUDA_TUNING_INFO;
ctl.value = videoenc->cudaenc_tuning_info_id;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
}
ctl.id = V4L2_CID_MPEG_VIDEO_CUDA_MEM_TYPE;
ctl.value = videoenc->cudaenc_mem_type;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
v4l2_ctrl_video_constqp constqp;
constqp.constQpI = videoenc->constQpI;
constqp.constQpP = videoenc->constQpP;
constqp.constQpB = videoenc->constQpB;
ctrls.count = 1;
ctrls.controls = &ctl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_CUDA_CONSTQP;
ctl.string = (gchar *)&constqp;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
v4l2_ctrl_video_init_qp init_qp;
init_qp.IInitQP = videoenc->IInitQP;
init_qp.PInitQP = videoenc->PInitQP;
init_qp.BInitQP = videoenc->BInitQP;
ctrls.count = 1;
ctrls.controls = &ctl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_INIT_FRAME_QP;
ctl.string = (gchar *)&init_qp;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEO_MAXBITRATE;
ctl.value = videoenc->maxbitrate;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_VBVBUFSIZE;
ctl.value = videoenc->vbvbufsize;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_VBVINIT;
ctl.value = videoenc->vbvinit;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_ENABLE_AQ;
ctl.value = videoenc->aqStrength;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_ENABLE_TEMPORAL_AQ;
ctl.value = videoenc->enableTemporalAQ;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_TARGET_QUALITY;
ctl.value = videoenc->targetQuality;
ctrls.count = 1;
ctrls.controls = &ctl;
ret = v4l2object->ioctl(fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
}
}
}
if (videoenc) {
if (pixelformat == V4L2_PIX_FMT_AV1) {
set_v4l2_video_mpeg_class (videoenc->v4l2capture,
V4L2_CID_MPEG_VIDEOENC_AV1_HEADERS_WITH_FRAME, videoenc->v4l2capture->Enable_headers);
}
}
if (is_cuvid == FALSE) {
if (videoenc) {
if (pixelformat == V4L2_PIX_FMT_VP8 || pixelformat == V4L2_PIX_FMT_VP9) {
set_v4l2_video_mpeg_class (videoenc->v4l2capture,
V4L2_CID_MPEG_VIDEOENC_VPX_HEADERS_WITH_FRAME, videoenc->v4l2capture->Enable_headers);
}
if (pixelformat == V4L2_PIX_FMT_AV1) {
set_v4l2_video_mpeg_class (videoenc->v4l2capture,
V4L2_CID_MPEG_VIDEOENC_AV1_HEADERS_WITH_FRAME, videoenc->v4l2capture->Enable_headers);
}
}
}
else if (is_cuvid == TRUE) {
if (videoenc) {
ctl.id = V4L2_CID_MPEG_VIDEO_CUDA_GPU_ID;
ctl.value = videoenc->cudaenc_gpu_id;
ctrls.count = 1;
ctrls.controls = &ctl ;
ret = v4l2object->ioctl (fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_CUDA_PRESET_ID;
ctl.value = videoenc->cudaenc_preset_id;
ctrls.count = 1;
ctrls.controls = &ctl ;
ret = v4l2object->ioctl (fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
v4l2_ctrl_video_constqp constqp;
constqp.constQpI = videoenc->constQpI;
constqp.constQpP = videoenc->constQpP;
constqp.constQpB = videoenc->constQpB;
ctrls.count = 1;
ctrls.controls = &ctl ;
ctl.id = V4L2_CID_MPEG_VIDEOENC_CUDA_CONSTQP;
ctl.string = (gchar *) &constqp;
ret = v4l2object->ioctl (fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
v4l2_ctrl_video_init_qp init_qp;
init_qp.IInitQP = videoenc->IInitQP;
init_qp.PInitQP = videoenc->PInitQP;
init_qp.BInitQP = videoenc->BInitQP;
ctrls.count = 1;
ctrls.controls = &ctl ;
ctl.id = V4L2_CID_MPEG_VIDEOENC_INIT_FRAME_QP;
ctl.string = (gchar *) &init_qp;
ret = v4l2object->ioctl (fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
ctl.id = V4L2_CID_MPEG_VIDEOENC_CUDA_TUNING_INFO;
ctl.value = videoenc->cudaenc_tuning_info_id;
ctrls.count = 1;
ctrls.controls = &ctl ;
ret = v4l2object->ioctl (fd, VIDIOC_S_EXT_CTRLS, &ctrls);
if (ret)
goto invalid_ctrl;
}
}
#endif
@@ -4371,27 +4264,6 @@ get_parm_failed:
}
set_parm_failed:
{
if ((v4l2object->is_encode == 1) && (is_cuvid == TRUE))
{
gchar err_msg[100] = "ENCODER INITIALIZATION FAILED";
switch (errno)
{
case EINVAL:
strncpy (err_msg, "INVALID / UNSUPPORTED PARAM", 100);
break;
case ENODEV:
strncpy (err_msg, "INVALID / UNSUPPORTED / NO ENCODE DEVICE", 100);
break;
case ENOSYS:
strncpy (err_msg, "FEATURE UNIMPLEMENTED", 100);
break;
case EPERM:
strncpy (err_msg, "OPERATION NOT PERMITTED", 100);
break;
}
post_error_to_bus (v4l2object->element, err_msg);
}
GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
(_("Video device did not accept new frame rate setting.")),
GST_ERROR_SYSTEM);
@@ -4442,7 +4314,7 @@ gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info)
struct v4l2_rect *r = NULL;
GstVideoFormat format;
guint width, height;
GstVideoAlignment align = { 0, };
GstVideoAlignment align;
gst_video_info_init (info);
gst_video_alignment_reset (&align);
@@ -4602,7 +4474,7 @@ gboolean
gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps)
{
GstStructure *config;
GstCaps *oldcaps = NULL;
GstCaps *oldcaps;
gboolean ret;
if (!v4l2object->pool)
@@ -4773,7 +4645,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query)
GstCaps *caps;
GstBufferPool *pool = NULL, *other_pool = NULL;
GstStructure *config;
guint size = 0, min = 0, max = 0, own_min = 0;
guint size, min, max, own_min = 0;
gboolean update;
gboolean has_video_meta;
gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
@@ -5078,7 +4950,7 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query)
GstBufferPool *pool;
/* we need at least 2 buffers to operate */
guint size, min, max;
GstCaps *caps = NULL;
GstCaps *caps;
gboolean need_pool;
/* Set defaults allocation parameters */
@@ -5099,7 +4971,7 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query)
gst_object_ref (pool);
if (pool != NULL) {
GstCaps *pcaps = NULL;
GstCaps *pcaps;
GstStructure *config;
/* we had a pool, check caps */
@@ -5169,7 +5041,8 @@ set_v4l2_video_mpeg_class (GstV4l2Object * v4l2object, guint label,
if (control.id == V4L2_CID_MPEG_VIDEOENC_VIRTUALBUFFER_SIZE) {
control.string = (gchar *) &buffer_size;
} else if (control.id == V4L2_CID_MPEG_VIDEOENC_NUM_REFERENCE_FRAMES) {
} else if ((control.id == V4L2_CID_MPEG_VIDEOENC_SLICE_INTRAREFRESH_PARAM) ||
(control.id == V4L2_CID_MPEG_VIDEOENC_NUM_REFERENCE_FRAMES)) {
control.string = (gchar *) &params;
} else {
control.value = params;

View File

@@ -2,8 +2,7 @@
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2023, NVIDIA CORPORATION. All rights reserved.
*
* gstv4l2object.h: base class for V4L2 elements
*
@@ -60,7 +59,6 @@ typedef struct _GstV4l2ObjectClassHelper GstV4l2ObjectClassHelper;
#define V4L2_DEVICE_PATH_NVENC "/dev/nvhost-msenc"
#define V4L2_DEVICE_PATH_NVENC_ALT "/dev/v4l2-nvenc"
#define V4L2_DEVICE_PATH_TEGRA_INFO "/sys/firmware/devicetree/base/compatible"
#define V4L2_DEVICE_INFO_SOM_EEPROM "/sys/firmware/devicetree/base/chosen/ids"
#endif
/* max frame width/height */
@@ -140,6 +138,12 @@ typedef gboolean (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object);
return FALSE; \
}
#ifdef USE_V4L2_TARGET_NV
/* Structure to hold the video info inorder to modify the contents, incase of
* GST_VIDEO_FORMAT_I420_12LE format */
GstVideoFormatInfo video_info;
#endif
struct _GstV4l2Object {
GstElement * element;
GstObject * dbg_obj;
@@ -228,7 +232,6 @@ struct _GstV4l2Object {
GMutex cplane_stopped_lock;
guint sei_payload_size;
void* sei_payload;
gchar *sei_uuid;
#endif
/* funcs */
@@ -374,8 +377,6 @@ gboolean gst_v4l2_get_attribute (GstV4l2Object * v4l2object, int attribute
gboolean gst_v4l2_set_attribute (GstV4l2Object * v4l2object, int attribute, const int value);
gboolean gst_v4l2_set_controls (GstV4l2Object * v4l2object, GstStructure * controls);
void post_error_to_bus(GstElement *element, const gchar *error_message);
#ifdef USE_V4L2_TARGET_NV
gboolean set_v4l2_video_mpeg_class (GstV4l2Object * v4l2object, guint label,
gint params);

View File

@@ -1,9 +1,7 @@
/*
* Copyright (C) 2014-2023 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2024, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -19,6 +17,7 @@
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
@@ -47,11 +46,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_dec_debug);
#define ENABLE_DRAIN 1
#ifdef USE_V4L2_TARGET_NV
static gboolean
gst_v4l2_video_remove_padding (GstCapsFeatures * features,
GstStructure * structure, gpointer user_data);
typedef enum {
CAP_BUF_DYNAMIC_ALLOC_DISABLED,
CAP_BUF_DYNAMIC_ALLOC_ENABLED_FOR_FW_PLAYBACK,
@@ -74,8 +68,7 @@ gboolean default_sei_extract_data;
gint default_num_extra_surfaces;
static gboolean enable_latency_measurement = FALSE;
extern uint8_t *parse_sei_data (uint8_t *bs, guint size, uint32_t *payload_size,
char *sei_uuid_string, guint32 pixelformat);
extern uint8_t *parse_sei_data (uint8_t *bs, guint size, uint32_t *payload_size, char *sei_uuid_string);
#ifdef USE_V4L2_TARGET_NV
GstVideoCodecFrame *
@@ -143,7 +136,11 @@ gst_video_dec_capture_buffer_dynamic_allocation (void)
/* Properties specifically applicable on GPU*/
#define GST_TYPE_V4L2_VID_CUDADEC_MEM_TYPE (gst_video_cudadec_mem_type ())
#ifndef __aarch64__
#define DEFAULT_CUDADEC_MEM_TYPE V4L2_CUDA_MEM_TYPE_DEVICE
#else
#define DEFAULT_CUDADEC_MEM_TYPE V4L2_CUDA_MEM_TYPE_DEVICE
#endif
#define DEFAULT_CUDADEC_GPU_ID 0
#define MAX_CUDADEC_NUM_SURFACES 20
@@ -161,10 +158,6 @@ gst_video_cudadec_mem_type (void)
"memtype_pinned"},
{V4L2_CUDA_MEM_TYPE_UNIFIED, "Memory type Unified",
"memtype_unified"},
#if !defined(USE_V4L2_TARGET_NV_X86) && !defined(AARCH64_IS_SBSA)
{V4L2_CUDA_MEM_TYPE_SURFACE_ARRAY, "Memory type Surface Array",
"memtype_surface_array"},
#endif
{0, NULL, NULL}
};
@@ -212,9 +205,7 @@ static GstStaticPadTemplate gst_v4l2dec_sink_template =
"divxversion=(int) [4, 5], "
"width=(int) [1,MAX], " "height=(int) [1,MAX]"
";"
"video/x-av1,"
"stream-format = (string) { obu-stream },"
"alignment = (string) { frame }"
"video/x-av1"
";"
"video/x-vp8"
";" "video/x-vp9," "width=(gint) [1,MAX]," "height=(gint) [1,MAX]" ";")
@@ -252,7 +243,6 @@ enum
PROP_ENABLE_ERROR_CHECK,
PROP_ENABLE_MAX_PERFORMANCE,
PROP_OPEN_MJPEG_BLOCK,
PROP_IS_GDR_STREAM,
/*Properties exposed on dGPU only*/
PROP_CUDADEC_MEM_TYPE,
PROP_CUDADEC_GPU_ID,
@@ -403,11 +393,6 @@ gst_v4l2_video_dec_set_property_tegra (GObject * object,
self->v4l2output->open_mjpeg_block = g_value_get_boolean (value);
break;
case PROP_IS_GDR_STREAM:
self->is_gdr_stream = g_value_get_boolean (value);
self->enable_frame_type_reporting = g_value_get_boolean (value);
break;
case PROP_CAP_BUF_DYNAMIC_ALLOCATION:
self->cap_buf_dynamic_allocation = g_value_get_enum (value);
break;
@@ -459,7 +444,6 @@ gst_v4l2_video_dec_set_property_cuvid (GObject * object,
break;
case PROP_CUDADEC_LOW_LATENCY:
case PROP_DISABLE_DPB:
self->cudadec_low_latency = g_value_get_boolean (value);
break;
@@ -536,10 +520,6 @@ gst_v4l2_video_dec_get_property_tegra (GObject * object,
g_value_set_boolean (value, self->v4l2output->open_mjpeg_block);
break;
case PROP_IS_GDR_STREAM:
g_value_set_boolean (value, self->is_gdr_stream);
break;
case PROP_CAP_BUF_DYNAMIC_ALLOCATION:
g_value_set_enum (value, self->cap_buf_dynamic_allocation);
break;
@@ -590,7 +570,6 @@ gst_v4l2_video_dec_get_property_cuvid (GObject * object,
break;
case PROP_CUDADEC_LOW_LATENCY:
case PROP_DISABLE_DPB:
g_value_set_boolean (value, self->cudadec_low_latency);
break;
@@ -708,7 +687,7 @@ gst_v4l2_video_dec_start (GstVideoDecoder * decoder)
self->decoded_picture_cnt = 0;
#endif
self->hash_pts_systemtime = g_hash_table_new_full (NULL, NULL, NULL, NULL);
self->hash_pts_systemtime = g_hash_table_new(NULL, NULL);
return TRUE;
}
@@ -739,7 +718,6 @@ gst_v4l2_video_dec_stop (GstVideoDecoder * decoder)
gst_v4l2_object_stop (self->v4l2output);
gst_v4l2_object_stop (self->v4l2capture);
g_hash_table_remove_all (self->hash_pts_systemtime);
g_hash_table_destroy (self->hash_pts_systemtime);
if (self->input_state) {
@@ -832,15 +810,6 @@ gboolean set_v4l2_controls (GstV4l2VideoDec *self)
}
}
if (self->is_gdr_stream) {
if (!set_v4l2_video_mpeg_class (self->v4l2output,
V4L2_CID_MPEG_VIDEODEC_GDR_STREAM,
self->is_gdr_stream)) {
g_print ("S_EXT_CTRLS for GDR_STREAM failed\n");
return FALSE;
}
}
if (is_cuvid == TRUE) {
if (!set_v4l2_video_mpeg_class (self->v4l2output,
V4L2_CID_MPEG_VIDEO_CUDA_MEM_TYPE,
@@ -878,18 +847,38 @@ gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder,
goto done;
}
#else
GstV4l2BufferPool *v4l2pool = GST_V4L2_BUFFER_POOL(self->v4l2output->pool);
GstV4l2Object *obj = v4l2pool->obj;
if ((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H264) ||
(GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H265))
{
GST_INFO_OBJECT(self, "Pix format is h264 or h265. Bypassing set_format."
"Reset done from handle_frame");
self->output_flow = GST_FLOW_OK;
self->set_format = true;
return TRUE;
}
if (is_cuvid == TRUE) {
GstV4l2BufferPool *v4l2pool = GST_V4L2_BUFFER_POOL(self->v4l2output->pool);
GstV4l2Object *obj = v4l2pool->obj;
if ((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H264) ||
(GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H265))
{
GST_INFO_OBJECT(self, "pix format is h264 or h265. skipping");
self->output_flow = GST_FLOW_OK;
self->set_format = true;
return TRUE;
}
} else if (is_cuvid == FALSE) {
{
GstStructure *config;
GstCaps *oldcaps;
GstStructure *structure;
gint width = 0;
config = gst_buffer_pool_get_config(self->v4l2output->pool);
gst_buffer_pool_config_get_params(config, &oldcaps, NULL, NULL, NULL);
structure = gst_caps_get_structure(oldcaps, 0);
if (gst_structure_get_int(structure, "width", &width))
self->is_drc = TRUE;
else
self->is_drc = FALSE;
gst_structure_free(config);
}
if (self->is_drc == FALSE)
return TRUE;
else
self->idr_received = FALSE;
}
#endif
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
@@ -921,6 +910,31 @@ gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder,
gst_v4l2_object_stop (self->v4l2capture);
self->output_flow = GST_FLOW_OK;
#ifdef USE_V4L2_TARGET_NV
if (is_cuvid == FALSE) {
if (self->is_drc == TRUE)
{
g_mutex_lock(&self->v4l2capture->cplane_stopped_lock);
while (self->v4l2capture->capture_plane_stopped != TRUE)
{
g_cond_wait(&self->v4l2capture->cplane_stopped_cond,
&self->v4l2capture->cplane_stopped_lock);
}
self->v4l2capture->capture_plane_stopped = FALSE;
g_mutex_unlock(&self->v4l2capture->cplane_stopped_lock);
gst_v4l2_object_close(self->v4l2output);
gst_v4l2_object_close(self->v4l2capture);
gst_v4l2_object_open(self->v4l2output);
if (!gst_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
{
g_print("gstv4l2object open shared failed\n");
if (GST_V4L2_IS_OPEN(self->v4l2output))
gst_v4l2_object_close(self->v4l2output);
return FALSE;
}
}
}
#endif
}
ret = gst_v4l2_object_set_format (self->v4l2output, state->caps, &error);
@@ -967,13 +981,6 @@ gst_v4l2_video_dec_flush (GstVideoDecoder * decoder)
gst_v4l2_object_unlock_stop (self->v4l2output);
gst_v4l2_object_unlock_stop (self->v4l2capture);
#ifdef USE_V4L2_TARGET_NV
if (is_cuvid == TRUE) {
/* Flush out low-level cuvid decoder buffers */
set_v4l2_video_mpeg_class (self->v4l2output, V4L2_CID_MPEG_VIDEODEC_FLUSH_BUFFERS, 1);
}
#endif
if (self->v4l2output->pool)
gst_v4l2_buffer_pool_flush (self->v4l2output->pool);
@@ -1291,12 +1298,10 @@ gst_v4l2_video_dec_loop (GstVideoDecoder * decoder)
if(enable_latency_measurement) /* TODO with better option */
{
g_mutex_lock(&self->pts_hashtable_lock);
gsize frame_pts = frame->pts;
gpointer in_time = g_hash_table_lookup (self->hash_pts_systemtime, GSIZE_TO_POINTER(frame_pts));
gdouble input_time = (gdouble) GPOINTER_TO_SIZE(in_time);
gpointer in_time = g_hash_table_lookup (self->hash_pts_systemtime,
&frame->pts);
gdouble input_time = *((gdouble*)in_time);
gdouble output_time = get_current_system_timestamp ();
if (output_time < input_time)
{
gdouble time = G_MAXDOUBLE - input_time;
@@ -1308,11 +1313,6 @@ gst_v4l2_video_dec_loop (GstVideoDecoder * decoder)
GST_DEBUG_OBJECT (self, "True Decode Latency = %f \n",
(output_time - input_time));
}
//Remove the hash table entry for the frame pts
g_hash_table_remove (self->hash_pts_systemtime, GSIZE_TO_POINTER(frame_pts));
g_mutex_unlock(&self->pts_hashtable_lock);
GstCaps *reference = gst_caps_new_simple ("video/x-raw",
"component_name", G_TYPE_STRING, GST_ELEMENT_NAME(self),
"frame_num", G_TYPE_INT, self->frame_num++,
@@ -1382,7 +1382,7 @@ gst_v4l2_video_remove_padding (GstCapsFeatures * features,
if (!gst_structure_get_int (structure, "height", &height))
return TRUE;
#else
guint width = 0, height = 0;
guint width, height;
if (!gst_structure_get_int (structure, "width", (gint *) & width))
return TRUE;
@@ -1468,6 +1468,7 @@ gst_h264_parse_process_nal (GstV4l2VideoDec *self, H264NalUnit * nalu)
{
guint nal_type;
H264SPS sps = { 0, };
H264NalParser nalparser;
H264ParserResult pres;
/* nothing to do for broken input */
@@ -1486,11 +1487,11 @@ gst_h264_parse_process_nal (GstV4l2VideoDec *self, H264NalUnit * nalu)
{
GST_DEBUG_OBJECT (self, "GOT SPS frame\n");
}
switch (nal_type) {
case H264_NAL_SPS:
/* reset state, everything else is obsolete */
pres = h264_parser_parse_sps (nalu, &sps, TRUE);
pres = h264_parser_parse_sps (&nalparser, nalu, &sps, TRUE);
/* arranged for a fallback sps.id, so use that one and only warn */
if (pres != H264_PARSER_OK) {
@@ -1510,8 +1511,8 @@ gst_h264_parse_process_nal (GstV4l2VideoDec *self, H264NalUnit * nalu)
static GstFlowReturn
gst_v4l2_h265_stream_parser (GstV4l2VideoDec *self, gpointer data, guint32 size)
{
H265NalUnit nalu = {0};
H265ParserResult pres = H265_PARSER_ERROR;
H265NalUnit nalu;
H265ParserResult pres;
gint current_off = 0;
guint nalu_size = 0;
GstFlowReturn ret = GST_FLOW_OK;
@@ -1534,12 +1535,18 @@ gst_v4l2_h265_stream_parser (GstV4l2VideoDec *self, gpointer data, guint32 size)
break;
case H265_PARSER_ERROR:
/* should not really occur either */
GST_WARNING_OBJECT (self, "Error parsing H.265 stream. Invalid H.265 stream");
GST_ELEMENT_ERROR (self, STREAM, FORMAT,
("Error parsing H.265 stream"), ("Invalid H.265 stream"));
ret = GST_FLOW_ERROR;
break;
case H265_PARSER_NO_NAL:
GST_WARNING_OBJECT (self, "No H.265 NAL unit found");
GST_ELEMENT_ERROR (self, STREAM, FORMAT,
("Error parsing H.265 stream"), ("No H.265 NAL unit found"));
ret = GST_FLOW_ERROR;
break;
default:
ret = GST_FLOW_ERROR;
//g_assert_not_reached ();
break;
}
@@ -1587,12 +1594,18 @@ gst_v4l2_h264_stream_parser (GstV4l2VideoDec *self, gpointer data, guint32 size)
break;
case H264_PARSER_ERROR:
/* should not really occur either */
GST_WARNING_OBJECT(self, "Error parsing H.264 stream. Invalid H.264 stream");
GST_ELEMENT_ERROR(self, STREAM, FORMAT,
("Error parsing H.264 stream"), ("Invalid H.264 stream"));
ret = GST_FLOW_ERROR;
break;
case H264_PARSER_NO_NAL:
GST_WARNING_OBJECT(self, "Error parsing H.264 stream. No H.264 NAL unit found");
GST_ELEMENT_ERROR(self, STREAM, FORMAT,
("Error parsing H.264 stream"), ("No H.264 NAL unit found"));
ret = GST_FLOW_ERROR;
break;
default:
ret = GST_FLOW_ERROR;
// g_assert_not_reached ();
break;
}
@@ -1661,7 +1674,7 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
if (((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_VP8) ||
(GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_VP9)) && self->valid_vpx == FALSE)
{
GstMapInfo map = {0};
GstMapInfo map;
if (!gst_buffer_map(frame->input_buffer, &map, GST_MAP_READ))
{
GST_ERROR_OBJECT(self, "couldnt map frame input_buffer\n");
@@ -1680,51 +1693,50 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
gst_buffer_unmap (frame->input_buffer, &map);
}
}
if (((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H264) ||
(GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H265)) &&
!(GST_BUFFER_FLAG_IS_SET(GST_BUFFER_CAST(frame->input_buffer),
GST_BUFFER_FLAG_DELTA_UNIT)))
{
GstMapInfo map = {0};
if (!gst_buffer_map(frame->input_buffer, &map, GST_MAP_READ))
if (((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H264) ||
(GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H265)) &&
!(GST_BUFFER_FLAG_IS_SET(GST_BUFFER_CAST(frame->input_buffer),
GST_BUFFER_FLAG_DELTA_UNIT)))
{
GST_ERROR_OBJECT(self, "couldnt map frame input_buffer\n");
}
if (GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H264)
{
if (gst_v4l2_h264_stream_parser(self, map.data, map.size) != GST_FLOW_OK)
GST_ERROR_OBJECT(self, "h264 stream parsing failed");
}
else if ((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H265))
{
if (gst_v4l2_h265_stream_parser(self, map.data, map.size) != GST_FLOW_OK)
GST_ERROR_OBJECT(self, "h265 stream parsing failed");
}
gst_buffer_unmap(frame->input_buffer, &map);
if (self->old_width || self->old_height)
{
if ((self->old_width != self->current_width) ||
(self->old_height != self->current_height))
GstMapInfo map;
if (!gst_buffer_map(frame->input_buffer, &map, GST_MAP_READ))
{
GST_ERROR_OBJECT(self, "couldnt map frame input_buffer\n");
}
if (GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H264)
{
if (gst_v4l2_h264_stream_parser(self, map.data, map.size) != GST_FLOW_OK)
GST_ERROR_OBJECT(self, "h264 stream parsing failed");
}
else if ((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H265))
{
if (gst_v4l2_h265_stream_parser(self, map.data, map.size) != GST_FLOW_OK)
GST_ERROR_OBJECT(self, "h265 stream parsing failed");
}
gst_buffer_unmap (frame->input_buffer, &map);
if (self->old_width || self->old_height)
{
if ((self->old_width != self->current_width) ||
(self->old_height != self->current_height))
{
GST_INFO_OBJECT(self, "Decoder found new resolution. triggering DRC seq.");
GST_INFO_OBJECT(self, "Old Resolution: [%d x %d], New Resolution: [%d x %d]",
self->old_width, self->old_height, self->current_width, self->current_height);
self->old_width, self->old_height, self->current_width, self->current_height);
trigger_drc = true;
}
}
}
self->old_width = self->current_width;
self->old_height = self->current_height;
self->old_width = self->current_width;
self->old_height = self->current_height;
if (trigger_drc == true)
{
GstStructure *structure = NULL;
GstCaps *dec_sink_caps = gst_pad_get_current_caps(decoder->sinkpad);
if (dec_sink_caps != NULL)
if (trigger_drc == true)
{
GstStructure *structure = NULL;
GstCaps *dec_sink_caps = gst_pad_get_current_caps(decoder->sinkpad);
if (dec_sink_caps != NULL)
{
dec_sink_caps = gst_caps_make_writable(dec_sink_caps);
structure = gst_caps_get_structure(dec_sink_caps, 0);
gst_structure_set(structure, "width", G_TYPE_INT, self->current_width,
@@ -1734,42 +1746,12 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
* with downstream, not coded size. */
gst_caps_map_in_place(dec_sink_caps, gst_v4l2_video_remove_padding, self);
GST_DEBUG_OBJECT(self, "dec_sink_caps: %s", gst_caps_to_string(dec_sink_caps));
}
self->idr_received = FALSE;
gst_v4l2_video_dec_finish(decoder);
gst_v4l2_object_stop(self->v4l2output);
{
const GValue *framerate = NULL;
GstStructure *src_pad_st = NULL, *sink_pad_st = NULL;
GstCaps *dec_src_caps = gst_pad_get_current_caps(decoder->srcpad);
GstCaps *dec_sink_caps = gst_pad_get_current_caps(decoder->sinkpad);
if (dec_src_caps != NULL)
{
gint curr_width = self->current_width;
gint curr_height = self->current_height;
dec_src_caps = gst_caps_make_writable(dec_src_caps);
sink_pad_st = gst_caps_get_structure(dec_sink_caps, 0);
src_pad_st = gst_caps_get_structure(dec_src_caps, 0);
gst_structure_set(src_pad_st, "width", G_TYPE_INT, curr_width,
"height", G_TYPE_INT, curr_height,
NULL);
framerate = gst_structure_get_value(sink_pad_st, "framerate");
if (framerate)
gst_structure_set_value(src_pad_st, "framerate", framerate);
/* Replace coded size with visible size, we want to negotiate visible size
* with downstream, not coded size. */
gst_caps_map_in_place(dec_src_caps, gst_v4l2_video_remove_padding, self);
gst_pad_set_caps(decoder->srcpad, dec_src_caps);
GST_DEBUG_OBJECT(self, "dec_src_caps: %s", gst_caps_to_string(dec_src_caps));
gst_caps_unref(dec_src_caps);
gst_caps_unref(dec_sink_caps);
}
}
self->idr_received = FALSE;
{
gst_v4l2_video_dec_finish(decoder);
gst_v4l2_object_stop(self->v4l2output);
{
GstCaps *caps = gst_pad_get_current_caps(decoder->srcpad);
if (caps)
{
@@ -1778,76 +1760,51 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
gst_query_unref(query);
gst_caps_unref(caps);
}
}
gst_v4l2_object_stop(self->v4l2capture);
self->output_flow = GST_FLOW_OK;
}
gst_v4l2_object_stop(self->v4l2capture);
self->output_flow = GST_FLOW_OK;
g_mutex_lock(&self->v4l2capture->cplane_stopped_lock);
while (self->v4l2capture->capture_plane_stopped != TRUE)
{
g_mutex_lock(&self->v4l2capture->cplane_stopped_lock);
while (self->v4l2capture->capture_plane_stopped != TRUE)
{
g_cond_wait(&self->v4l2capture->cplane_stopped_cond,
&self->v4l2capture->cplane_stopped_lock);
}
self->v4l2capture->capture_plane_stopped = FALSE;
g_mutex_unlock(&self->v4l2capture->cplane_stopped_lock);
}
self->v4l2capture->capture_plane_stopped = FALSE;
g_mutex_unlock(&self->v4l2capture->cplane_stopped_lock);
gst_v4l2_object_close(self->v4l2output);
gst_v4l2_object_close(self->v4l2capture);
if (!gst_v4l2_object_open(self->v4l2output))
gst_v4l2_object_close(self->v4l2output);
gst_v4l2_object_close(self->v4l2capture);
if (!gst_v4l2_object_open(self->v4l2output))
GST_ERROR_OBJECT(self, "gst_v4l2_object_open (self->v4l2output) failed\n");
if (!gst_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
{
if (!gst_v4l2_object_open_shared(self->v4l2capture, self->v4l2output))
{
GST_ERROR_OBJECT(self, "gstv4l2object open shared failed\n");
if (GST_V4L2_IS_OPEN(self->v4l2output))
gst_v4l2_object_close(self->v4l2output);
return GST_FLOW_ERROR;
}
if (dec_sink_caps != NULL)
{
}
if (dec_sink_caps != NULL)
{
ret = gst_v4l2_object_set_format(self->v4l2output, dec_sink_caps, &error);
gst_caps_unref(dec_sink_caps);
}
else
{
} else
{
GST_ERROR_OBJECT(self, "Decoder sink caps == NULL");
}
}
if (ret)
if (ret)
self->input_state = gst_video_codec_state_ref(self->input_state);
else
else
gst_v4l2_error(self, &error);
set_v4l2_controls(self);
GST_INFO_OBJECT(self, "Reset Done");
trigger_drc = false;
self->set_format = false;
}
else if (self->set_format == true)
{
GstStructure *sink_pad_st = NULL, *src_pad_st = NULL;
const GValue *framerate = NULL;
GstCaps *dec_sink_caps = gst_pad_get_current_caps(decoder->sinkpad);
GstCaps *dec_src_caps = gst_pad_get_current_caps(decoder->srcpad);
if (G_UNLIKELY(dec_sink_caps != NULL) && G_UNLIKELY(dec_src_caps != NULL))
{
GST_DEBUG_OBJECT(self, "dec_sink_caps: %s", gst_caps_to_string(dec_sink_caps));
dec_src_caps = gst_caps_make_writable(dec_src_caps);
sink_pad_st = gst_caps_get_structure(dec_sink_caps, 0);
src_pad_st = gst_caps_get_structure(dec_src_caps, 0);
framerate = gst_structure_get_value(sink_pad_st, "framerate");
if (framerate)
gst_structure_set_value(src_pad_st, "framerate", framerate);
GST_DEBUG_OBJECT(self, "dec_src_caps: %s", gst_caps_to_string(dec_src_caps));
gst_pad_set_caps(decoder->srcpad, dec_src_caps);
gst_caps_unref(dec_sink_caps);
gst_caps_unref(dec_src_caps);
}
set_v4l2_controls(self);
self->set_format = false;
set_v4l2_controls(self);
GST_INFO_OBJECT(self, "Reset Done");
}
else if (self->set_format == true)
{
set_v4l2_controls(self);
}
}
}
@@ -1855,10 +1812,8 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
* a frame. In case of RTSP inputs we drop the DELTA units which are not
* decodable independently until we receive I / IDR frame.
*/
if (((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H264) ||
(GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H265) ||
(GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_MPEG4)) &&
self->is_gdr_stream == FALSE)
if ((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H264) ||
(GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_H265))
{
if ((GST_BUFFER_FLAG_IS_SET (GST_BUFFER_CAST(frame->input_buffer),
GST_BUFFER_FLAG_DELTA_UNIT)) &&
@@ -1877,7 +1832,7 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
// Decode only I Frames and drop others.
if (GST_BUFFER_FLAG_IS_SET (GST_BUFFER_CAST(frame->input_buffer),
GST_BUFFER_FLAG_DELTA_UNIT)) {
gst_video_decoder_release_frame (GST_VIDEO_DECODER(self), frame);
gst_video_decoder_drop_frame (decoder, frame);
return GST_FLOW_OK;
}
}
@@ -1885,12 +1840,8 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
if (enable_latency_measurement)
{
g_mutex_lock(&self->pts_hashtable_lock);
gsize frame_pts = frame->pts;
gsize current_time = get_current_system_timestamp ();;
g_hash_table_insert (self->hash_pts_systemtime, GSIZE_TO_POINTER(frame_pts),
GSIZE_TO_POINTER(current_time));
g_mutex_unlock(&self->pts_hashtable_lock);
self->buffer_in_time = get_current_system_timestamp ();
g_hash_table_insert (self->hash_pts_systemtime, &frame->pts, &self->buffer_in_time);
}
if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
@@ -1906,9 +1857,9 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) {
GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
GstVideoInfo info = {0};
GstVideoInfo info;
GstVideoCodecState *output_state;
GstBuffer *codec_data = NULL;
GstBuffer *codec_data;
GstCaps *acquired_caps, *available_caps, *caps, *filter;
GstStructure *st;
@@ -1916,21 +1867,11 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
codec_data = self->input_state->codec_data;
#ifdef USE_V4L2_TARGET_NV
/* For av1 stream with webm container, we have observed that codec_data field is received
along with the caps and is a part of input_state, thus sending this to cuvidparser as
first frame. cuvidparser does not require it. We should send the input frame directly */
if ((GST_V4L2_PIXELFORMAT(obj) == V4L2_PIX_FMT_AV1)) {
codec_data = NULL;
}
#endif
/* We are running in byte-stream mode, so we don't know the headers, but
* we need to send something, otherwise the decoder will refuse to
* intialize.
*/
if (codec_data) {
GST_DEBUG_OBJECT (self, "codec_data field is valid. Using that as input for cuvidparser");
gst_buffer_ref (codec_data);
} else {
codec_data = gst_buffer_ref (frame->input_buffer);
@@ -1987,7 +1928,6 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
ret = self->output_flow;
goto drop;
} else if (ret != GST_FLOW_OK) {
GST_DEBUG_OBJECT (self, "gst_v4l2_buffer_pool_process failed\n");
goto process_failed;
}
}
@@ -2001,27 +1941,25 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
if(!processed)
{
processed = TRUE;
/* Wait for DQEVENT for 1 sec */
/* Wait for DQEVENT for 0.6 sec */
while (dqevent_loop_count < LOOP_COUNT_TO_WAIT_FOR_DQEVENT) {
memset (&ev, 0, sizeof (ev));
retval = obj->ioctl (obj->video_fd, VIDIOC_DQEVENT, &ev);
dqevent_loop_count ++;
if (retval != 0)
{
if (errno == EINVAL) {
GST_DEBUG_OBJECT (self, "VIDIOC_DQEVENT failed. Event Type: %d\n", ev.type);
if (errno == EINVAL)
goto process_failed;
}
if (dqevent_loop_count == LOOP_COUNT_TO_WAIT_FOR_DQEVENT) {
GST_DEBUG_OBJECT (self, "Stream format not found, dropping the frame\n");
goto drop;
}
usleep (WAIT_TIME_PER_LOOP_FOR_DQEVENT); //TODO is this needed ?
continue;
}
else
break;
}
if (dqevent_loop_count == LOOP_COUNT_TO_WAIT_FOR_DQEVENT) {
g_print ("Stream format not found, dropping the frame\n");
goto drop;
}
}
else
#endif
@@ -2031,13 +1969,11 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
retval = obj->ioctl (obj->video_fd, VIDIOC_DQEVENT, &ev);
if (retval != 0)
{
if (errno == EINVAL) {
GST_DEBUG_OBJECT (self, "VIDIOC_DQEVENT failed. Event Type: %d\n", ev.type);
if (errno == EINVAL)
goto process_failed;
}
if (is_cuvid != TRUE) {
if (ev.sequence == 0) {
GST_DEBUG_OBJECT (self, "Stream format not found, dropping the frame\n");
g_print ("Stream format not found, dropping the frame\n");
goto drop;
}
}
@@ -2118,12 +2054,6 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
#ifdef USE_V4L2_TARGET_NV
if (output_state->caps)
gst_caps_unref (output_state->caps);
/* set colorimetry information returned from low level lib
* tegrav4l2. For now it is guarded using cuvid variable to
* avoid any regression on x86.
*/
if (!is_cuvid)
output_state->info.colorimetry = info.colorimetry;
output_state->caps = gst_video_info_to_caps (&output_state->info);
GstCapsFeatures *features = gst_caps_features_new ("memory:NVMM", NULL);
gst_caps_set_features (output_state->caps, 0, features);
@@ -2185,6 +2115,37 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
}
#endif
}
#ifdef USE_V4L2_TARGET_NV
if (is_cuvid == TRUE)
{
if ((trigger_drc == false) && (self->set_format == true))
{
GstStructure *sink_pad_st = NULL, *src_pad_st = NULL;
const GValue *framerate = NULL;
GstCaps *dec_sink_caps = gst_pad_get_current_caps(decoder->sinkpad);
GstCaps *dec_src_caps = gst_pad_get_current_caps(decoder->srcpad);
if (G_UNLIKELY (dec_sink_caps != NULL) && G_UNLIKELY (dec_src_caps != NULL))
{
GST_DEBUG_OBJECT(self, "dec_sink_caps: %s", gst_caps_to_string(dec_sink_caps));
dec_src_caps = gst_caps_make_writable(dec_src_caps);
sink_pad_st = gst_caps_get_structure(dec_sink_caps, 0);
src_pad_st = gst_caps_get_structure(dec_src_caps, 0);
framerate = gst_structure_get_value(sink_pad_st, "framerate");
if (framerate)
gst_structure_set_value(src_pad_st, "framerate", framerate);
GST_DEBUG_OBJECT(self, "dec_src_caps: %s", gst_caps_to_string(dec_src_caps));
gst_pad_set_caps(decoder->srcpad, dec_src_caps);
gst_caps_unref(dec_sink_caps);
gst_caps_unref(dec_src_caps);
}
}
trigger_drc = false;
self->set_format = false;
}
#endif
task_state = gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self));
if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
@@ -2230,7 +2191,6 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
ret = GST_FLOW_OK;
goto drop;
}
GST_DEBUG_OBJECT (self, "gst_v4l2_buffer_pool_process failed\n");
goto process_failed;
}
}
@@ -2244,11 +2204,10 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
GST_BUFFER_COPY_META, 0, 0);
/* Parse SEI data from the bitsream */
if ((is_cuvid == TRUE) && (self->extract_sei_type5_data == TRUE) &&
((obj->fmtdesc->pixelformat == V4L2_PIX_FMT_H264) || (obj->fmtdesc->pixelformat == V4L2_PIX_FMT_H265)))
if ((is_cuvid == TRUE) && (self->extract_sei_type5_data == TRUE))
{
uint8_t *sei_type5_payload = NULL;
GstMapInfo map = {0};
GstMapInfo map;
if (!gst_buffer_map (tmp, &map, GST_MAP_READWRITE))
{
GST_DEBUG_OBJECT (self, "couldnt map\n");
@@ -2259,7 +2218,7 @@ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
uint32_t payload_size = 0;
uint8_t *stream_data = (uint8_t *)map.data;
sei_type5_payload = parse_sei_data (stream_data, map.size,
&payload_size, self->sei_uuid_string, obj->fmtdesc->pixelformat);
&payload_size, self->sei_uuid_string);
if (sei_type5_payload != NULL)
{
GST_DEBUG_OBJECT (self, "sei_type5_payload found\n");
@@ -2312,7 +2271,8 @@ start_task_failed:
process_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
(_("Failed to process frame.")), (NULL));
(_("Failed to process frame.")),
("Maybe be due to not enough memory or failing driver"));
ret = GST_FLOW_ERROR;
goto drop;
}
@@ -2356,7 +2316,7 @@ gst_v4l2_video_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CAPS:{
GstCaps *filter = NULL, *result = NULL;
GstCaps *filter, *result = NULL;
GstPad *pad = GST_VIDEO_DECODER_SRC_PAD (decoder);
gst_query_parse_caps (query, &filter);
@@ -2482,7 +2442,7 @@ gst_v4l2_video_dec_src_event (GstVideoDecoder * decoder, GstEvent * event)
case GST_EVENT_SEEK:
{
GstFormat format;
gdouble rate = 0.0;
gdouble rate;
GstSeekFlags flags;
GstSeekType start_type, stop_type;
gint64 start, stop;
@@ -2560,7 +2520,6 @@ gst_v4l2_video_dec_finalize (GObject * object)
#ifdef USE_V4L2_TARGET_NV
g_cond_clear (&self->v4l2capture->cplane_stopped_cond);
g_mutex_clear (&self->v4l2capture->cplane_stopped_lock);
g_mutex_clear (&self->pts_hashtable_lock);
#endif
gst_v4l2_object_destroy (self->v4l2capture);
@@ -2597,8 +2556,6 @@ gst_v4l2_video_dec_init (GstV4l2VideoDec * self)
self->cudadec_low_latency = default_cudadec_low_latency;
self->idr_received = FALSE;
self->rate = 1;
self->old_width = 0;
self->old_height = 0;
self->cap_buf_dynamic_allocation = DEFAULT_CAP_BUF_DYNAMIC_ALLOCATION;
#endif
@@ -2637,7 +2594,6 @@ gst_v4l2_video_dec_subinstance_init (GTypeInstance * instance, gpointer g_class)
#ifdef USE_V4L2_TARGET_NV
g_mutex_init (&self->v4l2capture->cplane_stopped_lock);
g_cond_init (&self->v4l2capture->cplane_stopped_cond);
g_mutex_init (&self->pts_hashtable_lock);
#endif
}
@@ -2740,12 +2696,6 @@ gst_v4l2_video_dec_class_init (GstV4l2VideoDecClass * klass)
"Enable max performance", "Set to enable max performance",
DEFAULT_MAX_PERFORMANCE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_IS_GDR_STREAM,
g_param_spec_boolean ("is-gdr-stream",
"is-gdr-stream",
"Set the flag to allow GDR decode for H264/HEVC",
FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_CAP_BUF_DYNAMIC_ALLOCATION,
g_param_spec_enum ("capture-buffer-dynamic-allocation",
"Enable capture buffer dynamic allocation",
@@ -2777,12 +2727,6 @@ gst_v4l2_video_dec_class_init (GstV4l2VideoDecClass * klass)
default_cudadec_low_latency,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DISABLE_DPB,
g_param_spec_boolean ("disable-dpb",
"Disable DPB buffer",
"Set to disable DPB buffer for low latency",
DEFAULT_DISABLE_DPB, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_EXTRACT_SEI_TYPE5_DATA,
g_param_spec_boolean ("extract-sei-type5-data",
"extract-sei-type5-data",
@@ -3010,7 +2954,7 @@ void
gst_v4l2_video_dec_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
GTypeQuery type_query = { 0, };
GTypeQuery type_query;
GTypeInfo type_info = { 0, };
GType type, subtype;
GstV4l2VideoDecCData *cdata;

View File

@@ -1,8 +1,7 @@
/*
* Copyright (C) 2014 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
* SPDX-FileCopyrightText: Copyright (c) 2018-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -49,7 +48,7 @@ G_BEGIN_DECLS
#ifdef USE_V4L2_TARGET_NV
#define GstV4l2VideoDec GstNvV4l2VideoDec
#define GstV4l2VideoDecClass GstNvV4l2VideoDecClass
#define LOOP_COUNT_TO_WAIT_FOR_DQEVENT 10
#define LOOP_COUNT_TO_WAIT_FOR_DQEVENT 6
#define WAIT_TIME_PER_LOOP_FOR_DQEVENT 100*1000
#define VP8_START_BYTE_0 0x9D
@@ -83,6 +82,7 @@ struct _GstV4l2VideoDec
guint64 frame_num;
#ifdef USE_V4L2_TARGET_NV
GHashTable* hash_pts_systemtime;
gdouble buffer_in_time;
guint64 decoded_picture_cnt;
guint32 skip_frames;
gboolean idr_received;
@@ -95,7 +95,6 @@ struct _GstV4l2VideoDec
gboolean enable_error_check;
gboolean enable_max_performance;
gboolean set_format;
gboolean is_gdr_stream;
guint32 cudadec_mem_type;
guint32 cudadec_gpu_id;
guint32 cudadec_num_surfaces;
@@ -109,7 +108,6 @@ struct _GstV4l2VideoDec
guint32 old_width;
guint32 old_height;
gboolean valid_vpx;
GMutex pts_hashtable_lock;
#endif
};

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,7 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association.
* Author: ayaka <ayaka@soulik.info>
* SPDX-FileCopyrightText: Copyright (c) 2018-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LGPL-2.0-only
* Copyright (c) 2018-2022, NVIDIA CORPORATION. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@@ -55,9 +54,6 @@ struct _GstV4l2VideoEnc
#ifdef USE_V4L2_TARGET_NV
guint32 ratecontrol;
guint32 bitrate;
guint32 maxbitrate;
guint32 vbvbufsize;
guint32 vbvinit;
guint32 peak_bitrate;
guint32 idrinterval;
guint32 iframeinterval;
@@ -77,25 +73,15 @@ struct _GstV4l2VideoEnc
guint32 PInitQP;
guint32 BInitQP;
gboolean set_qpRange;
gboolean set_intrarefresh;
guint32 enableIntraRefresh;
guint32 intraRefreshPeriod;
guint32 intraRefreshCnt;
gboolean enableTemporalAQ;
guint32 aqStrength;
guint32 targetQuality;
guint32 hw_preset_level;
guint virtual_buffer_size;
gboolean measure_latency;
gboolean ratecontrol_enable;
gboolean force_idr;
gboolean force_intra;
gchar *sei_uuid;
gboolean maxperf_enable;
gboolean copy_timestamp;
FILE *tracing_file_enc;
GQueue *got_frame_pt;
guint32 cudaenc_mem_type;
guint32 cudaenc_gpu_id;
guint32 cudaenc_preset_id;
guint32 cudaenc_tuning_info_id;
@@ -105,7 +91,6 @@ struct _GstV4l2VideoEnc
gdouble buffer_in_time;
GHashTable* hash_pts_systemtime;
gboolean copy_meta;
gboolean enable_hwpreset;
#endif
/* < private > */

View File

@@ -26,15 +26,13 @@
#include <stdio.h>
#include <string.h>
#include <glib.h>
#include "gstv4l2object.h"
#define UUID_SIZE 16
#define USER_DATA_UNREGISTERED_TYPE 5
gboolean check_uuid(uint8_t *stream, char *sei_uuid_string);
uint8_t* parse_sei_unit(uint8_t * bs_ptr, guint *size, char *sei_uuid_string);
uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size,
char *sei_uuid_string, guint32 pixelformat);
uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size, char *sei_uuid_string);
gboolean check_uuid(uint8_t *stream, char *sei_uuid_string)
{
@@ -88,16 +86,12 @@ uint8_t* parse_sei_unit(uint8_t * bs_ptr, guint *size, char *sei_uuid_string)
for (i = 0; i < (payload_size - UUID_SIZE); i++)
{
payload[i] = *bs_ptr;
if (strncmp (sei_uuid_string, "VST_CUSTOM_META", (UUID_SIZE-1)) != 0)
// drop emulation prevention bytes
if ((*(bs_ptr) == 0x03)
&& (*(bs_ptr - 1) == 0x00)
&& (*(bs_ptr - 2) == 0x00))
{
// drop emulation prevention bytes
if ((*(bs_ptr) == 0x03)
&& (*(bs_ptr - 1) == 0x00)
&& (*(bs_ptr - 2) == 0x00))
{
i--;
}
i--;
}
bs_ptr++;
}
@@ -109,25 +103,7 @@ uint8_t* parse_sei_unit(uint8_t * bs_ptr, guint *size, char *sei_uuid_string)
}
}
/*************************************************************
+------H264-----+
|0|1|2|3|4|5|6|7|
+-+-+-+-+-+-+-+-+
|F|NRI| Type |
+---------------+
+------------H265---------------+
|0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|F| Type | LayerId | TID |
+-------------+-----------------+
*************************************************************/
uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size,
char *sei_uuid_string, guint32 pixelformat)
uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size, char *sei_uuid_string)
{
if (sei_uuid_string == NULL)
return NULL;
@@ -144,11 +120,8 @@ uint8_t *parse_sei_data (uint8_t *bs, uint32_t size, uint32_t *payload_size,
checklen++;
else if (checklen == 3 && *bs_ptr++ == 0x01)
checklen++;
else if (checklen == 4 && ((pixelformat == V4L2_PIX_FMT_H264) ? *bs_ptr == 0x06 : (((*bs_ptr >> 1) & 0x3f) == 0x27)))
else if (checklen == 4 && *bs_ptr++ == 0x06)
{
bs_ptr++;
if (pixelformat == V4L2_PIX_FMT_H265)
bs_ptr++;
payload = parse_sei_unit(bs_ptr, &sei_payload_size, sei_uuid_string);
checklen = 0;
if (payload != NULL)

View File

@@ -2,7 +2,7 @@
*
* Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
* 2006 Edgard Lima <edgard.lima@gmail.com>
* Copyright (c) 2018-2024, NVIDIA CORPORATION. All rights reserved.
* Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
*
* v4l2_calls.c - generic V4L2 calls handling
*
@@ -51,22 +51,9 @@
#include "gstv4l2videodec.h"
#include "gst/gst-i18n-plugin.h"
#include <ctype.h>
#include "wsl_utils.h"
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
#ifdef USE_V4L2_TARGET_NV
void __attribute__((constructor)) gstv4l2_constructor_init(void);
static bool is_wsl_system = 0;
void __attribute__((constructor)) gstv4l2_constructor_init(void)
{
is_wsl_system = is_running_in_WSL();
}
#endif
/******************************************************
* gst_v4l2_get_capabilities():
* get the device's capturing capabilities
@@ -553,14 +540,7 @@ gst_v4l2_open (GstV4l2Object * v4l2object)
if (is_cuvid == TRUE) {
for (i = 0; i < 16; i++)
{
if (is_wsl_system) {
/* WSL system doesn't have /dev/nvidia0 node. Use /dev/null instead.
We can use a dummy node since the ioctl calls we use are not true ioctls */
GST_INFO_OBJECT(v4l2object->dbg_obj, "Running inside WSL");
g_snprintf(buf, sizeof(buf), "/dev/null");
} else {
g_snprintf(buf, sizeof(buf), "/dev/nvidia%d", i);
}
g_snprintf(buf, sizeof(buf), "/dev/nvidia%d", i);
v4l2object->video_fd =
open (buf, O_RDWR /* | O_NONBLOCK */ );
if (v4l2object->video_fd != -1)

View File

@@ -1,41 +0,0 @@
/**
* SPDX-FileCopyrightText: Copyright (c) 2024-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LicenseRef-NvidiaProprietary
*
* NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
* property and proprietary rights in and to this material, related
* documentation and any modifications thereto. Any use, reproduction,
* disclosure or distribution of this material and related documentation
* without an express license agreement from NVIDIA CORPORATION or
* its affiliates is strictly prohibited.
*/
#include "wsl_utils.h"
bool is_running_in_WSL(void)
{
static volatile bool verified = false;
static volatile bool ret = false;
if (!verified) {
verified = true;
FILE *versionFile = fopen("/proc/version", "r");
if (versionFile != NULL) {
char versionInfo[512];
if (fgets(versionInfo, sizeof(versionInfo), versionFile) != NULL) {
for (int i=0; versionInfo[i] != '\0'; i++) {
versionInfo[i] = tolower((unsigned char)versionInfo[i]);
}
if (strstr(versionInfo, "microsoft") != NULL) {
/* Yes, Running inside WSL */
ret = true;
}
}
fclose(versionFile);
} else {
printf("ERROR: opening /proc/version failed\n");
}
}
return ret;
}

View File

@@ -1,24 +0,0 @@
/**
* SPDX-FileCopyrightText: Copyright (c) 2024-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LicenseRef-NvidiaProprietary
*
* NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
* property and proprietary rights in and to this material, related
* documentation and any modifications thereto. Any use, reproduction,
* disclosure or distribution of this material and related documentation
* without an express license agreement from NVIDIA CORPORATION or
* its affiliates is strictly prohibited.
*/
#ifndef _WSL_UTILS_
#define _WSL_UTILS_
#include <stdio.h>
#include <stdbool.h>
#include <ctype.h>
#include <string.h>
/* Function to check if running inside Windows Subsystem For Linux (WSL) */
bool is_running_in_WSL(void);
#endif //_WSL_UTILS_

View File

@@ -1,13 +1,11 @@
/*
* SPDX-FileCopyrightText: Copyright (c) 2019-2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
* SPDX-License-Identifier: LicenseRef-NvidiaProprietary
* Copyright (c) 2019-2023, NVIDIA CORPORATION. All rights reserved.
*
* NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
* property and proprietary rights in and to this material, related
* documentation and any modifications thereto. Any use, reproduction,
* disclosure or distribution of this material and related documentation
* without an express license agreement from NVIDIA CORPORATION or
* its affiliates is strictly prohibited.
* NVIDIA Corporation and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA Corporation is strictly prohibited.
*/
/**
@@ -280,38 +278,6 @@ typedef enum
NVBUF_COLOR_FORMAT_NV12_12LE_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:2:0 12-bit multi-planar. */
NVBUF_COLOR_FORMAT_NV12_12LE_709_ER,
/** Specifies 8 bit GRAY scale ER - single plane */
NVBUF_COLOR_FORMAT_GRAY8_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:2 planar */
NVBUF_COLOR_FORMAT_UYVY_709,
/** Specifies BT.709 colorspace - Y/CbCr ER 4:2:2 planar */
NVBUF_COLOR_FORMAT_UYVY_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:2 planar */
NVBUF_COLOR_FORMAT_UYVY_2020,
/** Specifies 16 bit GRAY scale - single plane */
NVBUF_COLOR_FORMAT_GRAY16_LE,
/** Specifies 64 bit BGRA (B16 G16 R16 A16) interleaved */
NVBUF_COLOR_FORMAT_BGRA64_LE,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:2 multi-planar. */
NVBUF_COLOR_FORMAT_NV16_2020,
/** Specifies BT.601_ER colorspace - Y/CbCr 4:2:2 10-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_10LE_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:2 10-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_10LE_709,
/** Specifies BT.709_ER colorspace - Y/CbCr 4:2:2 10-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_10LE_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:2 10-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_10LE_2020,
/** Specifies BT.601 colorspace - Y/CbCr 4:2:2 12-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_12LE,
/** Specifies BT.601_ER colorspace - Y/CbCr 4:2:2 12-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_12LE_ER,
/** Specifies BT.709 colorspace - Y/CbCr 4:2:2 12-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_12LE_709,
/** Specifies BT.709_ER colorspace - Y/CbCr 4:2:2 12-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_12LE_709_ER,
/** Specifies BT.2020 colorspace - Y/CbCr 4:2:2 12-bit semi-planar. */
NVBUF_COLOR_FORMAT_NV16_12LE_2020,
NVBUF_COLOR_FORMAT_LAST
} NvBufSurfaceColorFormat;
@@ -375,9 +341,7 @@ typedef struct NvBufSurfacePlaneParamsEx
uint32_t physicaladdress[NVBUF_MAX_PLANES];
/** flags associated with planes */
uint64_t flags[NVBUF_MAX_PLANES];
/** DRM modifier for plane */
uint64_t drmModifier[NVBUF_MAX_PLANES];
/** Holds the reserved space for future use. */
void * _reserved[STRUCTURE_PADDING * NVBUF_MAX_PLANES];
} NvBufSurfacePlaneParamsEx;
@@ -400,25 +364,19 @@ typedef struct NvBufSurfacePlaneParams
uint32_t psize[NVBUF_MAX_PLANES];
/** Holds the number of bytes occupied by a pixel in each plane. */
uint32_t bytesPerPix[NVBUF_MAX_PLANES];
/** Holds the reserved space for future use. */
void * _reserved[STRUCTURE_PADDING * NVBUF_MAX_PLANES];
} NvBufSurfacePlaneParams;
/**
* Holds Chroma Subsampling parameters for NvBufSurface allocation.
* The members chromaLocHoriz and chromaLocVert accept these values:
* 0: Left horizontal or top vertical position
* 1: Center horizontal or center vertical position
* 2: Right horizontal or bottom vertical position
*/
typedef struct NvBufSurfaceChromaSubsamplingParams
{
/** location settings */
uint8_t chromaLocHoriz;
uint8_t chromaLocVert;
/** Reserved for alignment */
uint8_t _reserved[6];
} NvBufSurfaceChromaSubsamplingParams;
/**
@@ -444,8 +402,6 @@ typedef struct NvBufSurfaceCreateParams {
NvBufSurfaceLayout layout;
/** Holds the type of memory to be allocated. */
NvBufSurfaceMemType memType;
/** Holds the reserved space for future use. */
void * _reserved[STRUCTURE_PADDING];
} NvBufSurfaceCreateParams;
/**
@@ -453,24 +409,24 @@ typedef struct NvBufSurfaceCreateParams {
* (Applicable for NvBufSurfaceAllocate API)
*/
typedef struct NvBufSurfaceAllocateParams {
/** Hold legacy NvBufSurface creation parameters */
/** Hold legacy NvBufSurface creation parameters */
NvBufSurfaceCreateParams params;
/** Display scan format */
/** Display scan format */
NvBufSurfaceDisplayScanFormat displayscanformat;
/** Chroma Subsampling parameters */
/** Chroma Subsampling parameters */
NvBufSurfaceChromaSubsamplingParams chromaSubsampling;
/** components tag to be used for memory allocation */
/** components tag to be used for memory allocation */
NvBufSurfaceTag memtag;
/** disable pitch padding allocation only applicable for cuda and system memory allocation
pitch would be width times bytes per pixel for the plane, for odd width it would be
multiple of 2, also note for some non standard video resolution cuda kernels may fail
/** disable pitch padding allocation only applicable for cuda and system memory allocation
pitch would be width times bytes per pixel for the plane, for odd width it would be
multiple of 2, also note for some non standard video resolution cuda kernels may fail
due to unaligned pitch
*/
*/
bool disablePitchPadding;
/** Used void* from custom param for 64 bit machine, using other uint32_t param */
/** Used void* from custom param for 64 bit machine, using other uint32_t param */
uint32_t _reservedParam;
/** Holds the reserved space for future use. */
void * _reserved[STRUCTURE_PADDING];
void * _reserved[STRUCTURE_PADDING-1];
} NvBufSurfaceAllocateParams;
/**
@@ -481,11 +437,7 @@ typedef struct NvBufSurfaceMappedAddr {
void * addr[NVBUF_MAX_PLANES];
/** Holds a pointer to a mapped EGLImage. */
void *eglImage;
/** Holds a pointer to a mapped NVRM memory */
void *nvmmPtr;
/** Holds a pointer to a mapped CUDA memory */
void *cudaPtr;
/** Holds the reserved space for future use. */
void * _reserved[STRUCTURE_PADDING];
} NvBufSurfaceMappedAddr;
@@ -510,32 +462,6 @@ typedef struct NvBufSurfaceParamsEx {
void * _reserved[STRUCTURE_PADDING];
} NvBufSurfaceParamsEx;
/**
* Holds information of CUDA buffer.
* Applicable for tegra OpenRM only.
*/
typedef struct NvBufSurfaceCudaBuffer {
/**
* Holds a base pointer to allocated CUDA memory.
* It is different from dataPtr when CUDA allocated
* address is not page aligned for image buffers.
* It is same as dataPtr for other buffers.
*/
void *basePtr;
/**
* Holds a page aligned data pointer to CUDA memory for image buffers
* if CUDA allocated address is not page aligned.
* It is same as basePtr for other buffers.
*/
void *dataPtr;
/** Holds a pointer to external CUDA memory for imported CUDA buffers */
void *extMem;
/** Holds a pointer to external CUDA mipmaped array for imported CUDA buffers */
void *mipmap;
/** Reserved */
uint8_t reserved[64];
} NvBufSurfaceCudaBuffer;
/**
* Hold the information of single buffer in the batch.
*/
@@ -565,10 +491,8 @@ typedef struct NvBufSurfaceParams {
NvBufSurfaceMappedAddr mappedAddr;
/** pointers of extended parameters of single buffer in the batch.*/
NvBufSurfaceParamsEx *paramex;
/** Holds a pointer to CUDA buffer. Applicable for only CUDA Device and CUDA Host memory on tegra OpenRM.*/
NvBufSurfaceCudaBuffer *cudaBuffer;
void * _reserved[STRUCTURE_PADDING];
void * _reserved[STRUCTURE_PADDING - 1];
} NvBufSurfaceParams;
/**
@@ -589,8 +513,6 @@ typedef struct NvBufSurface {
NvBufSurfaceMemType memType;
/** Holds a pointer to an array of batched buffers. */
NvBufSurfaceParams *surfaceList;
/** Holds a flag for Imported buffer. */
bool isImportedBuf;
void * _reserved[STRUCTURE_PADDING];
} NvBufSurface;
@@ -620,23 +542,6 @@ typedef struct NvBufSurfaceMapPlaneParams
uint8_t reserved[64];
} NvBufSurfaceMapPlaneParams;
/**
* CUDA IPC memory handle for NvBufSurface
*/
typedef struct NvBufSurfaceCudaIpcMemHandle_t
{
char reserved[64];
} NvBufSurfaceCudaIpcMemHandle;
/**
* The extended map parameters NvBufSurface
*/
typedef struct NvBufSurfaceExtendedMapParams_t
{
NvBufSurfaceCudaIpcMemHandle memHandle;
void *reserved[64];
} NvBufSurfaceExtendedMapParams;
/**
* Holds buffer parameters to map the buffer received from another process.
*/
@@ -661,44 +566,9 @@ typedef struct NvBufSurfaceMapParams {
NvBufSurfaceChromaSubsamplingParams chromaSubsampling;
/** Holds plane parameters */
NvBufSurfaceMapPlaneParams planes[NVBUF_MAX_PLANES];
/** Holds the extended Map parameters */
void *extendedMapParams;
/** Holds the reserved space for future use. */
void *_reserved[STRUCTURE_PADDING];
} NvBufSurfaceMapParams;
/**
* Holds information about mapped CUDA buffer
*/
typedef struct NvBufSurfaceNvmmBuffer {
/** Holds a pointer to mapped nvmm memory */
void *dataPtr;
/** Holds a DMABUF FD */
uint64_t bufferDesc;
/** Reserved */
uint8_t reserved[64];
} NvBufSurfaceNvmmBuffer;
/**
* Defines the type of underlying kernel driver detected for GPU access.
*/
typedef enum {
NVBUF_DRIVER_TYPE_UNKNOWN = 0,
NVBUF_DRIVER_TYPE_NVGPU,
NVBUF_DRIVER_TYPE_RM
} NvBufSurfaceDriverType;
/**
* Holds information about the underlying device.
*/
typedef struct NvBufSurfaceDeviceInfo {
/** The detected device type (nvgpu, OpenRM, etc.). */
NvBufSurfaceDriverType driverType;
/** Indicates if VIC is present on the platform. */
bool isVicPresent;
/** Reserved for future use. */
uint8_t reserved[64];
} NvBufSurfaceDeviceInfo;
} NvBufSurfaceMapParams;
/**
* \brief Allocates a batch of buffers.
@@ -829,7 +699,7 @@ int NvBufSurfaceCopy (NvBufSurface *srcSurf, NvBufSurface *dstSurf);
* This function can be used to copy plane memory content from source raw buffer pointer
* to specific destination batch buffer of supported memory type.
*
* @param[in] Surf pointer to NvBufSurface structure.
* @param[in] surf pointer to NvBufSurface structure.
* @param[in] index index of buffer in the batch.
* @param[in] plane index of plane in buffer.
* @param[in] out_width aligned width of the raw data plane.
@@ -838,7 +708,7 @@ int NvBufSurfaceCopy (NvBufSurface *srcSurf, NvBufSurface *dstSurf);
*
* @return 0 for success, -1 for failure.
*/
int NvBufSurface2Raw (NvBufSurface *Surf, unsigned int index, unsigned int plane, unsigned int out_width, unsigned int out_height, unsigned char *ptr);
int NvBufSurface2Raw (NvBufSurface *Surf, unsigned int index, unsigned int plane, unsigned int outwidth, unsigned int outheight, unsigned char *ptr);
/**
* \brief Copies the raw buffer plane memory content to the NvBufSurface plane memory of a specific
@@ -852,11 +722,11 @@ int NvBufSurface2Raw (NvBufSurface *Surf, unsigned int index, unsigned int plane
* @param[in] plane index of plane in buffer.
* @param[in] in_width aligned width of the raw data plane.
* @param[in] in_height aligned height of the raw data plane.
* @param[in] Surf pointer to NvBufSurface structure.
* @param[in] surf pointer to NvBufSurface structure.
*
* @return 0 for success, -1 for failure.
*/
int Raw2NvBufSurface (unsigned char *ptr, unsigned int index, unsigned int plane, unsigned int in_width, unsigned int in_height, NvBufSurface *Surf);
int Raw2NvBufSurface (unsigned char *ptr, unsigned int index, unsigned int plane, unsigned int inwidth, unsigned int inheight, NvBufSurface *Surf);
/**
* Syncs the HW memory cache for the CPU.
@@ -980,94 +850,7 @@ int NvBufSurfaceImport (NvBufSurface **out_nvbuf_surf, const NvBufSurfaceMapPara
*/
int NvBufSurfaceGetMapParams (const NvBufSurface *surf, int index, NvBufSurfaceMapParams *params);
/**
* \brief Creates an CUDA buffer from the memory of one or more
* \ref NvBufSurface buffers.
*
* Only memory type \ref NVBUF_MEM_SURFACE_ARRAY is supported.
*
* This function returns the created CUDA buffer by storing its address at
* \a surf->surfaceList->mappedAddr->cudaPtr. (\a surf is a pointer to
* an NvBufSurface. \a surfaceList is a pointer to an \ref NvBufSurfaceParams.
* \a mappedAddr is a pointer to an \ref NvBufSurfaceMappedAddr.
* \a cudaPtr is a pointer to an \ref NvBufSurfaceCudaBuffer.
*
* You can use this function in scenarios where a CUDA operation on Jetson
* hardware memory (identified by \ref NVBUF_MEM_SURFACE_ARRAY) is required.
* The NvBufSurfaceCudaBuffer struct provided by this function can be used
* to get dataPtr of CUDA memory.
*
* @param[in,out] surf A pointer to an NvBufSurface structure. The function
* stores a pointer to the created CUDA buffer in
* a descendant of this structure; see the notes above.
* @param[in] index Index of a buffer in the batch. -1 specifies all buffers
* in the batch.
*
* @return 0 for success, or -1 otherwise.
*/
int NvBufSurfaceMapCudaBuffer (NvBufSurface *surf, int index);
/**
* \brief Destroys the previously created CUDA buffer.
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index The index of a buffer in the batch. -1 specifies all
* buffers in the batch.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceUnMapCudaBuffer (NvBufSurface *surf, int index);
/**
* \brief Creates an NVMM buffer from the memory of one or more
* \ref NvBufSurface buffers.
*
* Only memory type \ref NVBUF_MEM_CUDA_DEVICE and \ref NVBUF_MEM_CUDA_PINNED
* are supported.
*
* This function returns the created NVMM buffer by storing its address at
* \a surf->surfaceList->mappedAddr->nvmmPtr. (\a surf is a pointer to
* an NvBufSurface. \a surfaceList is a pointer to an \ref NvBufSurfaceParams.
* \a mappedAddr is a pointer to an \ref NvBufSurfaceMappedAddr.
* \a nvmmPtr is a pointer to NVMM buffer of memory type \ref NVBUF_MEM_SURFACE_ARRAY.
*
* You can use this function in scenarios where a NVBUF_MEM_SURFACE_ARRAY operation
* on Jetson hardware memory identified by \ref NVBUF_MEM_CUDA_DEVICE and
* \ref NVBUF_MEM_CUDA_PINNED are required.
*
* @param[in,out] surf A pointer to an NvBufSurface structure. The function
* stores a pointer to the created NVMM buffer in
* a descendant of this structure; see the notes above.
* @param[in] index Index of a buffer in the batch. -1 specifies all buffers
* in the batch.
*
* @return 0 for success, or -1 otherwise.
*/
int NvBufSurfaceMapNvmmBuffer (NvBufSurface *surf, int index);
/**
* \brief Destroys the previously created NVMM buffer.
*
* @param[in] surf A pointer to an \ref NvBufSurface structure.
* @param[in] index The index of a buffer in the batch. -1 specifies all
* buffers in the batch.
*
* @return 0 if successful, or -1 otherwise.
*/
int NvBufSurfaceUnMapNvmmBuffer (NvBufSurface *surf, int index);
/**
* \brief Retrieves information about the underlying GPU device driver.
*
* @param[out] info Pointer to NvBufSurfaceDeviceInfo structure.
*
* @return 0 if successful, or -1 otherwise.
*
* This function attempts to determine if the system is using 'nvgpu' or
* an OpenRM-based driver by checking loaded kernel modules. Also it checks
* if VIC is present on the platform.
*/
int NvBufSurfaceGetDeviceInfo (NvBufSurfaceDeviceInfo *info);
/** @} */
#ifdef __cplusplus
}

View File

@@ -1 +1 @@
jetson_38.2
jetson_35.6.2

View File

File diff suppressed because it is too large Load Diff