From 960561533bf99fe7d79c00ffec55a86bf3de03d8 Mon Sep 17 00:00:00 2001 From: svcmobrel-release Date: Mon, 15 Aug 2022 08:53:44 -0700 Subject: [PATCH] Updating prebuilts and/or headers 9b47978b5f3b6672dd4d6ad5ebe80c9b945a7eba - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.c 33a285339d714d5546cddb92a710e418853470aa - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.c 6bafa48f47ad43d33ee446cf86f2b1da134f7868 - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.h 599544266262509705c60ca9e8d2c8ade3bdfc30 - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.c aaafd7fd4c0214a52bf73dd2a0ba0af08c675b85 - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.h c028fa403772288daf002520356e8e18cce5cb06 - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.h a5bdf6935960973677a005d9d28a04c023f5ec6f - nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.c 6380a3e21b43fdc01c6de7d3934c602af38c3d12 - nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.c 87556b6e7da0ec3865546f10b7a58959cd8c6bfc - nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.h 4048ce41fcd68ca284bf6146d2d32608fe69ca8a - nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.h Change-Id: Id92555e07546fa0529c0acc45ce064f5097c1d64 --- commitFile.txt | 12 + nvgstapps_src/README.txt | 64 + .../nvgstcapture-1.0/nvgst_x11_common.c | 146 + .../nvgstcapture-1.0/nvgst_x11_common.h | 48 + .../nvgstcapture-1.0_README.txt | 291 + .../nvgstcapture-1.0/nvgstcapture.c | 4715 +++++++++++++++++ .../nvgstcapture-1.0/nvgstcapture.h | 686 +++ .../nvgstplayer-1.0/nvgst_asound_common.c | 48 + .../nvgstplayer-1.0/nvgst_asound_common.h | 15 + .../nvgstplayer-1.0/nvgst_x11_common.c | 142 + .../nvgstplayer-1.0/nvgst_x11_common.h | 46 + .../nvgstplayer-1.0_README.txt | 354 ++ .../nvgstplayer-1.0/nvgstplayer.c | 4050 ++++++++++++++ .../nvgstplayer-1.0/nvgstplayer.h | 435 ++ push_info.txt | 1 + 15 files changed, 11053 insertions(+) create mode 100644 commitFile.txt create mode 100644 nvgstapps_src/README.txt create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.c create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.h create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture-1.0_README.txt create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.c create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.h create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.c create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.h create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.c create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.h create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer-1.0_README.txt create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.c create mode 100644 nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.h create mode 100644 push_info.txt diff --git a/commitFile.txt b/commitFile.txt new file mode 100644 index 0000000..f381c55 --- /dev/null +++ b/commitFile.txt @@ -0,0 +1,12 @@ +Updating prebuilts and/or headers + +9b47978b5f3b6672dd4d6ad5ebe80c9b945a7eba - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.c +33a285339d714d5546cddb92a710e418853470aa - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.c +6bafa48f47ad43d33ee446cf86f2b1da134f7868 - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.h +599544266262509705c60ca9e8d2c8ade3bdfc30 - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.c +aaafd7fd4c0214a52bf73dd2a0ba0af08c675b85 - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.h +c028fa403772288daf002520356e8e18cce5cb06 - nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.h +a5bdf6935960973677a005d9d28a04c023f5ec6f - nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.c +6380a3e21b43fdc01c6de7d3934c602af38c3d12 - nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.c +87556b6e7da0ec3865546f10b7a58959cd8c6bfc - nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.h +4048ce41fcd68ca284bf6146d2d32608fe69ca8a - nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.h diff --git a/nvgstapps_src/README.txt b/nvgstapps_src/README.txt new file mode 100644 index 0000000..e1fb88d --- /dev/null +++ b/nvgstapps_src/README.txt @@ -0,0 +1,64 @@ + +This file explains the procedure to compile NvGstApps sources for hardfp(armhf) +ARM architecture. + + +-------------------------------------------------------------------------------- + Prerequisites for nvgst-1.0 applications +-------------------------------------------------------------------------------- +For nvgstcapture-1.0 and nvgstplayer-1.0 applications: + +* You must install GStreamer-1.0 on the target board using apt-get, as follows: + + sudo apt-get install gstreamer1.0-tools gstreamer1.0-alsa \ + gstreamer1.0-plugins-base gstreamer1.0-plugins-good \ + gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly \ + gstreamer1.0-libav libgstreamer1.0-dev + + +* Download or copy the nvgstapps_src.tbz2 file on device and untar it. + tar -xpf nvgstapps_src.tbz2 + + +* Compile nvgstapps with following procedures. +-------------------------------------------------------------------------------- + Procedure to compile nvgstcapture-1.0: +-------------------------------------------------------------------------------- + + On the target, execute the following commands: + + sudo apt-get install libgstreamer-plugins-base1.0-dev + sudo apt-get install libegl1-mesa-dev + sudo apt-get install libx11-dev libxext-dev + + As the above steps will overwrite the sym-links to the hardware accelerated libegl + binary, to point back to the tegra version, execute the following commands: + + Export ARM application binary interface based on the Linux and ARM platform: + Linux 64bit userspace support: + export TEGRA_ARMABI=aarch64-linux-gnu + Linux 32bit userspace and ARM hardfp support: + export TEGRA_ARMABI=arm-linux-gnueabihf + + cd nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0 + gcc nvgstcapture.c nvgst_x11_common.c -o nvgstcapture-1.0 \ + $(pkg-config --cflags --libs gstreamer-1.0 gstreamer-plugins-base-1.0 \ + gstreamer-pbutils-1.0 x11 xext gstreamer-video-1.0) -ldl + +-------------------------------------------------------------------------------- + Procedure to compile nvgstplayer-1.0: +-------------------------------------------------------------------------------- + + On the target, execute the following commands: + + cd nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0 + gcc nvgstplayer.c nvgst_x11_common.c nvgst_asound_common.c -o nvgstplayer-1.0 \ + $(pkg-config --cflags --libs gstreamer-1.0 gstreamer-plugins-base-1.0 \ + gstreamer-pbutils-1.0 gstreamer-video-1.0 x11 xext alsa) + + +* For nvgstcapture-1.0 usage, refer to + nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture-1.0_README.txt + +* For nvgstplayer-1.0 usage, refer to + nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer-1.0_README.txt diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.c b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.c new file mode 100644 index 0000000..c7cdb9f --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.c @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include "nvgst_x11_common.h" + +void +nvgst_x11_init (displayCtx * dpyCtx) +{ + dpyCtx->isDPMSdisabled = 0; + int screen = 0; + dpyCtx->mDisplay = XOpenDisplay (NULL); + if (!dpyCtx->mDisplay) + printf + ("\nCannot open display specified with DISPLAY environment variable\n"); + else { + screen = DefaultScreen (dpyCtx->mDisplay); + dpyCtx->display_width = DisplayWidth (dpyCtx->mDisplay, screen); + dpyCtx->display_height = DisplayHeight (dpyCtx->mDisplay, screen); + } +} + +void +nvgst_x11_uninit (displayCtx * dpyCtx) +{ + if (dpyCtx->window) + XDestroyWindow (dpyCtx->mDisplay, dpyCtx->window); + XCloseDisplay (dpyCtx->mDisplay); + dpyCtx->mDisplay = NULL; +} + +void +saver_off (displayCtx * dpyCtx) +{ + int nothing; + if (DPMSQueryExtension (dpyCtx->mDisplay, ¬hing, ¬hing)) { + BOOL enabled; + CARD16 powerLevel; + + DPMSInfo (dpyCtx->mDisplay, &powerLevel, &enabled); + if (enabled) { + DPMSDisable (dpyCtx->mDisplay); + DPMSInfo (dpyCtx->mDisplay, &powerLevel, &enabled); + if (enabled) { + printf ("\ncould not disable DPMS\n"); + } + } else { + printf ("\nDPMS already DISABLED\n"); + dpyCtx->isDPMSdisabled = 1; + } + } else + printf ("\nserver does not have extension for -dpms option\n"); +} + +void +saver_on (displayCtx * dpyCtx) +{ + int nothing; + if (DPMSQueryExtension (dpyCtx->mDisplay, ¬hing, ¬hing)) { + BOOL enabled; + CARD16 powerLevel; + + DPMSInfo (dpyCtx->mDisplay, &powerLevel, &enabled); + if (!enabled) { + if (!dpyCtx->isDPMSdisabled) + DPMSEnable (dpyCtx->mDisplay); + DPMSInfo (dpyCtx->mDisplay, &powerLevel, &enabled); + if (!enabled && !dpyCtx->isDPMSdisabled) { + printf ("\ncould not enable DPMS\n"); + } + } else + printf ("\nDPMS already ENABLED\n"); + } else + printf ("\nserver does not have extension for -dpms option\n"); + +} + +void +nvgst_create_window (displayCtx * dpyCtx, char *title) +{ + int screen = 0; + XTextProperty xproperty; + + if (dpyCtx->mDisplay) { + screen = DefaultScreen (dpyCtx->mDisplay); + if (!dpyCtx->width && !dpyCtx->height) { + dpyCtx->width = DisplayWidth (dpyCtx->mDisplay, screen); + dpyCtx->height = DisplayHeight (dpyCtx->mDisplay, screen); + } + + dpyCtx->window = XCreateSimpleWindow (dpyCtx->mDisplay, + RootWindow (dpyCtx->mDisplay, screen), + dpyCtx->x, dpyCtx->y, dpyCtx->width, dpyCtx->height, 0, 0, + BlackPixel (dpyCtx->mDisplay, screen)); + + XSetWindowBackgroundPixmap (dpyCtx->mDisplay, dpyCtx->window, None); + + if (title) { + if ((XStringListToTextProperty (((char **) &title), 1, &xproperty)) != 0) { + XSetWMName (dpyCtx->mDisplay, dpyCtx->window, &xproperty); + XFree (xproperty.value); + + } + } else + printf ("\ncan't set title to window, title NULL\n"); + + /* Tell the window manager we'd like delete client messages instead of + * being killed */ + Atom wmDeleteMessage = + XInternAtom (dpyCtx->mDisplay, "WM_DELETE_WINDOW", False); + if (wmDeleteMessage != None) { + XSetWMProtocols (dpyCtx->mDisplay, dpyCtx->window, &wmDeleteMessage, 1); + } + + XMapRaised (dpyCtx->mDisplay, dpyCtx->window); + + XSync (dpyCtx->mDisplay, 1); //discard the events for now + } else + printf ("\ncan't create window, Display NULL\n"); + +} + +void +nvgst_destroy_window (displayCtx * dpyCtx) +{ + XDestroyWindow (dpyCtx->mDisplay, dpyCtx->window); + dpyCtx->window = (Window) NULL; +} diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.h b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.h new file mode 100644 index 0000000..3d931d5 --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgst_x11_common.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include +#include +#include +#include +#include +#include + +typedef struct +{ + Display *mDisplay; + Window window; + int width; + int height; + int display_width; + int display_height; + int x; + int y; + int isDPMSdisabled; +} displayCtx; + +void nvgst_x11_init (displayCtx * dpyCtx); +void nvgst_x11_uninit (displayCtx * dpyCtx); +void saver_off (displayCtx * dpyCtx); +void saver_on (displayCtx * dpyCtx); +void nvgst_create_window (displayCtx * dpyCtx, char *title); +void nvgst_destroy_window (displayCtx * dpyCtx); diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture-1.0_README.txt b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture-1.0_README.txt new file mode 100644 index 0000000..8179d2b --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture-1.0_README.txt @@ -0,0 +1,291 @@ +/* + * Copyright (c) 2014-2021, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +=> PRE-REQUISITES : + +1. You must install GStreamer-1.0 on the target board using apt-get, as follows: + + sudo apt-get install gstreamer1.0-tools gstreamer1.0-alsa gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav + +2. To check the running gstreamer version, use the following command before you start using the nvgstcapture application: + + gst-inspect-1.0 --version + +3. Execute the following commands on the target board's Ubuntu command line: + + export DISPLAY=:0 + +=> NvGstCapture Usage: + +* The nvgstcapture application can capture video data and encapsulate encoded data in a container file. + +* The nvgstcapture application supports both command-line and runtime options. + +=> NvGstCapture Application Options: + + Usage: ./nvgstcapture-1.0 --help + +* Command-line options for NvArgusCamera: + + --prev-res Preview width & height.Range: 2 to 12 (5632x4224) e.g., --prev-res=3 + --cus-prev-res Custom Preview width & height e.g., --cus-prev-res=1920x1080 + --image-res Image width & height. Range: 2 to 12 (5632x4224) e.g., --image-res=3 + --video-res Video width & height. Range: 2 to 9 (3896x2192) e.g., --video-res=3 + --camsrc Camera Source to use (0=v4l2, 1=csi[default], 2=videotest, 3=eglstream) + -m, --mode Capture mode value (1=still 2=video) + -v, --video-enc Video encoder type (0=h264[HW] 1=vp8[HW] 2=h265[HW] 3=vp9[HW]) + -p, --hw-enc-path Frame Work type (1=V4L2[Default]) + -b, --enc-bitrate Video encoding Bit-rate(in bytes) e.g., --enc-bitrate=4000000 + --enc-controlrate Video encoding Bit-rate control method 0 = Disable, 1 = variable(Default), 2 = constant e.g., --enc-controlrate=1 + --enc-EnableTwopassCBR Enable two pass CBR while encoding 0 = Disable, 1 = Enable e.g., --enc-EnableTwopassCBR=1 + --enc-profile Video encoder profile For H.264: 0=Baseline, 1=Main, 2=High + -J, --image-enc Image encoder type (0=jpeg_SW[jpegenc] 1=jpeg_HW[nvjpegenc]) + -k, --file-type Container file type (0=mp4 1=3gp 2=mkv) + --file-name Captured file name. nvcamtest is used by default + --color-format Color format to use (0=I420,1=NV12[For CSI only and default for CSI], 2=YUY2[For V4L2 only, default for v4l2]) + --enable-meta Enable Sensor MetaData reporting + --app-profile Enable KPI profiling + --kpi-numbers Enable KPI measurement + --cap-dev-node Video capture device node (0=/dev/video0[default], 1=/dev/video1, 2=/dev/video2) e.g., --cap-dev-node=0 + --svs [For USB] (=) chain for video Preview. [For CSI only] use "nveglglessink" + --eglConfig EGL window Coordinates (x_pos y_pos) in that order e.g., --eglConfig="50 100" + --orientation Camera sensor orientation value + -w, --whitebalance Capture whitebalance value + --timeout Capture timeout value + --saturation Camera Saturation value + --sensor-id Camera Sensor ID value + --exposuretimerange Property to adjust exposure time range in nanoseconds e.g., --exposuretimerange="34000 358733000" + --gainrange Property to adjust gain range e.g., --gainrange="1 16" + --ispdigitalgainrange Property to adjust digital gain range e.g., --ispdigitalgainrange="1 8" + --aelock Enable AE Lock, default is disabled + --awblock Enable AWB Lock, default is disabled + --exposurecompensation Property to adjust exposure compensation e.g., --exposurecompensation=0.5 + --aeantibanding Property to set the auto exposure antibanding mode e.g., --aeantibanding=2 + --tnr-mode Property to select temporal noise reduction mode e.g., --tnr-mode=2 + --tnr-strength Property to adjust temporal noise reduction strength e.g., --tnr-strength=0.5 + --ee-mode Property to select edge enhancement mode e.g., --ee-mode=2 + --ee-strength Property to adjust edge enhancement strength e.g., --ee-strength=0.5 + --overlayConfig Overlay Configuration Options index and coordinates in (index, x_pos, y_pos, width, height) order e.g. --overlayConfig="0, 0, 0, 1280, 720" + -A, --automate Run application in automation mode + -S, --start-time Start capture after specified time in seconds. Default = 5 sec (use with --automate or -A only) + -Q, --quit-after Quit application once automation is done after specified time in seconds. Default = 0 sec (use with --automate or -A only) + -C, --count Number of iterations of automation testcase. Default = 1 (use with --automate or -A only) + -N, --num-sensors Number of sensors (use with --automate or -A only) + --capture-gap Number of milliseconds between successive image/video capture. Default = 250 msec (use with --automate and --capture-auto only) + --capture-time Capture video for specified time in seconds. Default = 10 sec (use with --automate and --capture-auto only) + --toggle-mode Toggle between still and video capture modes for count number of times (use with --automate or -A only) + --capture-auto Do image/video capture in automation mode for count number of times(use with --automate or -A only) + --toggle-sensor Toggle between num_sensors if given otherwise between sensor-id 0 and 1 (use with --automate or -A only) + --enum-wb Enumerate all white-balance modes for count number of times (use with --automate or -A only) + --enum-st Enumerate saturation value through 0 to 2 by a step of 0.1 for count number of times (use with --automate or -A only) + + +Supported resolutions in case of NvArgusCamera + (2) : 640x480 + (3) : 1280x720 + (4) : 1920x1080 + (5) : 2104x1560 + (6) : 2592x1944 + (7) : 2616x1472 + (8) : 3840x2160 + (9) : 3896x2192 + (10): 4208x3120 + (11): 5632x3168 + (12): 5632x4224 + +* Runtime options for NvArgusCamera: + + Help : 'h' + Quit : 'q' + Set Capture Mode: + mo: + (1): image + (2): video + Get Capture Mode: + gmo + Set sensor orientation: + so: + (0): none + (1): Rotate counter-clockwise 90 degrees + (2): Rotate 180 degrees + (3): Rotate clockwise 90 degrees + Get sensor orientation: + gso + Set Whitebalance Mode: + wb: + (0): off + (1): auto + (2): incandescent + (3): fluorescent + (4): warm-fluorescent + (5): daylight + (6): cloudy-daylight + (7): twilight + (8): shade + (9): manual + Get Whitebalance Mode: + gwb + Set Saturation (0 to 2): + st: e.g., st:1.25 + Get Saturation: + gst + Set Exposure Compensation (-2 to 2): + ec: e.g., ec:-2 + Get Exposure Compensation: + gec + Set Auto Whitebalance Lock: + awbl: e.g., awbl:0 + Get Auto Whitebalance Lock: + awbl + Set Auto Exposure Lock: + ael: e.g., ael:0 + Get Auto Exposure Lock: + gael + Set TNR Mode: + tnrm: e.g., tnrm:1 + (0): DENOISE_MODE_OFF + (1): DENOISE_MODE_FAST + (2): DENOISE_MODE_HIGH_QUALITY + Get TNR Mode: + gtnrm + Set TNR Strength (-1 to 1): + tnrs: e.g., tnrs:0.5 + Get TNR Strength: + gtnrs + Set EE Mode: + eem: e.g., eem:1 + (0): EDGE_ENHANCE_MODE_OFF + (1): EDGE_ENHANCE_MODE_FAST + (2): EDGE_ENHANCE_MODE_HIGH_QUALITY + Get EE Mode: + geem + Set EE Strength (-1 to 1): + ees: e.g., ees:0.5 + Get EE Strength: + gees + Set Auto Exposure Anti-Banding (0 to 3): + aeab: e.g., aeab:2 + (0): AE_ANTIBANDING_MODE_OFF + (1): AE_ANTIBANDING_MODE_AUTO + (2): AE_ANTIBANDING_MODE_50HZ + (3): AE_ANTIBANDING_MODE_60HZ + Get Auto Exposure Anti-Banding: + gaeab + Set Gain Range: + gr: e.g., gr:1 16 + Get Gain Range: + ggr + Set Exposure Time Range: + etr: e.g., etr:34000 35000 + Get Exposure Time Range: + getr + Set ISP Digital Gain Range: + dgr: e.g., dgr:2 152 + Get ISP Digital Gain Range: + gdgr + Capture: enter 'j' OR + followed by a timer (e.g., jx5000, capture after 5 seconds) OR + followed by multishot count (e.g., j:6, capture 6 images) + timer/multihot values are optional, capture defaults to single shot with timer=0s + Start Recording : enter '1' + Stop Recording : enter '0' + Video snapshot : enter '2' (While recording video) + Get Preview Resolution: + gpcr + Get Image Capture Resolution: + gicr + Get Video Capture Resolution: + gvcr + +Runtime encoder configuration options: + + Set Encoding Bit-rate(in bytes): + br: e.g., br:4000000 + Get Encoding Bit-rate(in bytes): + gbr + Set Encoding Profile(only for H.264): + ep: e.g., ep:1 + (0): Baseline + (1): Main + (2): High + Get Encoding Profile(only for H.264): + gep + Force IDR Frame on video Encoder(only for H.264): + Enter 'f' + +Runtime encoder configuration options: + + Set Encoding Bit-rate(in bytes): + br: e.g., br:4000000 + Get Encoding Bit-rate(in bytes): + gbr + Set Encoding Profile(only for H.264): + ep: e.g., ep:1 + (0): Baseline + (1): Main + (2): High + Get Encoding Profile(only for H.264): + gep + Force IDR Frame on video Encoder(only for H.264): + Enter 'f' + +NOTES: + +1. The nvgstcapture application generates image and video output files in the same directory as the application itself. + +2. Filenames for image and video content are in the formats "nvcamtest__s_.jpg" and + "nvcamtest__s_.mp4" respectively. + where, + is process id, + is camera sensor id + is a counter starting from 0 every time you run the application. + Rename or move files between runs to avoid overwriting results you want to save. + +3. The nvgstcapture application default supports native capture (video only) mode. + +4. Advanced features, like setting zoom, brightness, exposure, and whitebalance, are not supported with USB-Camera. + +5. The nvgstcapture application sets nvjpegenc[HW] by default for Image encoding. + +6. User can specify a sensor supported custom preview resolution. A CSI camera will output at the custom resolution + but visible preview will be capped at 1080p max. Command: + ./nvgstcapture-1.0 --prev-res=2 --cus-prev-res=1280x960 + +7. nvgstcapture-1.0 supports automation. Details of the commands can be found in help. Here are example command lines + for a few use cases: + + i) Capture 50 images with 1080p resolutions: + ./nvgstcapture-1.0 -A -C 50 --capture-auto --image-res=4 + + ii) Capture 5 720p videos, each of length 60 seconds, with a gap of 2 seconds between recordings + and quit application 10 seconds after that: + ./nvgstcapture-1.0 -A -C 5 --capture-auto --video-res=3 --capture-time=60 --mode=2 --capture-gap=2000 --quit-after=10 + +8. nvhdmioverlaysink and nvoverlaysnk has been deprecated. + +9. For a USB camera use "--cap-dev-node" command line option along with "--camsrc=0" to specify the capture node. + +10. Hardware encoder support depends on the Jetson platform. Please refer to the following section in Accelerated GStreamer User Guide for supported codecs for your platform: + Gstreamer-1.0 Installation and Setup > Gstreamer-1.0 Plugin Reference > gst-omx video encoders + +11. The nvgstcapture application uses gst-v4l2 encoders by default. + diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.c b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.c new file mode 100644 index 0000000..7dae829 --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.c @@ -0,0 +1,4715 @@ +/* + * Copyright (c) 2014-2021, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include +#include +#include "nvgstcapture.h" + +#define EGL_PRODUCER_LIBRARY "libnveglstreamproducer.so" + +#ifdef WITH_STREAMING +#include +#include +#include "gstnvrtspserver.h" +static NvGstRtspFunctions nvgst_rtsp_functions; + +static GstFlowReturn +rtsp_video_appsink_new_sample (GstAppSink * appsink, gpointer user_data); +#endif + +static gboolean check_capture_params (void); +gboolean create_capture_pipeline (void); +static gboolean create_native_capture_pipeline (void); +void destroy_capture_pipeline (void); +static void capture_init_params (void); +static void set_encoder_bitrate (guint bitrate); +static void set_encoder_profile (H264EncProfileType profile); +void set_capture_device_node (void); +static void print_help (void); +static void set_new_file_name (int muxer_type); +void restart_capture_pipeline (void); +static gboolean create_svs_bin (void); +static gboolean create_cap_bin (void); +static gboolean create_vid_enc_bin (void); +static gboolean create_img_enc_bin (void); + +static gboolean get_image_encoder (GstElement ** iencoder); +static gboolean get_video_encoder (GstElement ** vencoder); +static gboolean get_muxer (GstElement ** muxer); +static void +cam_image_captured (GstElement * fsink, + GstBuffer * buffer, GstPad * pad, gpointer udata); +static gboolean +parse_spec (const gchar * option_name, + const gchar * value, gpointer data, GError ** error); +static GstPadProbeReturn +prev_buf_prob (GstPad * pad, GstPadProbeInfo * info, gpointer u_data); +static GstPadProbeReturn +enc_buf_prob (GstPad * pad, GstPadProbeInfo * info, gpointer u_data); +gboolean get_preview_resolution (gint res); +static gboolean get_image_capture_resolution (gint res); +static gboolean get_video_capture_resolution (gint res); +static gboolean camera_need_reconfigure (int new_res, + CapturePadType current_pad); + +static CamCtx capp; +CamCtx *app; +static GMainLoop *loop; +static gboolean cintr = FALSE; +gboolean recording = FALSE; +static gboolean snapshot = FALSE; + +/* EGLStream Producer */ +typedef gint (*start_eglstream_producer_func) + (int producer_index, EGLDisplay * display, EGLStreamKHR * stream, + int width, int height); +typedef gint (*stop_eglstream_producer_func) (int producer_index); + +static int is_user_bitrate = 0; + +void set_saturation (gfloat dval); +void set_whitebalance (gint val); +void set_timeout(gint val); +void set_mode (gint newMode); +void set_exposure_saturation (gfloat dval); + +gboolean set_preview_resolution (int new_res); +gboolean set_image_resolution (int new_res); +gboolean set_video_resolution (int new_res); + +void start_video_capture (void); +void stop_video_capture (void); +void trigger_vsnap_capture (void); +void trigger_image_capture (void); +gboolean exit_capture (gpointer data); + +#if !GUI +static void nvgst_handle_xevents (void); +static gpointer nvgst_x_event_thread (gpointer); +#endif + +#define WIDTH_RES 176, 320, 640, 1280, 1920, 2104, 2592, 2616, 3840, 3896, 4208, 5632, 5632 +#define HEIGHT_RES 144, 240, 480, 720, 1080, 1560, 1944, 1472, 2160, 2192, 3120, 3168, 4224 + +gint prevres_width[] = { WIDTH_RES }; +gint prevres_height[] = { HEIGHT_RES }; +static gint image_capture_width[] = { WIDTH_RES }; +static gint image_capture_height[] = { HEIGHT_RES }; +static gint video_capture_width[] = { WIDTH_RES }; +static gint video_capture_height[] = { HEIGHT_RES }; + +/* MetaData structure returned by nvcamerasrc */ +typedef struct AuxBufferData { + gint64 frame_num; + gint64 timestamp; + void * sensor_data; +} AuxData; + + +/** + * a GOptionArgFunc callback function + * + * @param option_name : The name of the option being parsed + * @param value : The value to be parsed. + * @param data : User data added to the GOptionGroup + * @param error : A return location for errors + */ +static gboolean +parse_spec (const gchar * option_name, + const gchar * value, gpointer data, GError ** error) +{ + if (!g_strcmp0 ("--prev-res", option_name)) { + if (TRUE != get_preview_resolution (atoi (value))) + return FALSE; + } else if (!g_strcmp0 ("--image-res", option_name)) { + if (TRUE != get_image_capture_resolution (atoi (value))) + return FALSE; + } else if (!g_strcmp0 ("--video-res", option_name)) { + if (TRUE != get_video_capture_resolution (atoi (value))) + return FALSE; + } else if (!g_strcmp0 ("--cus-prev-res", option_name)) { + gchar *str = NULL; + app->capres.cus_prev_width = atoi (value); + str = g_strrstr (value, "x"); + if (str) { + app->capres.cus_prev_height = atoi (str + 1); + app->use_cus_res = TRUE; + } else { + g_print ("\nInvalid custom preview resolution! Setting to prev_res.\n"); + app->capres.cus_prev_width = app->capres.preview_width; + app->capres.cus_prev_height = app->capres.preview_height; + } + } else if (!g_strcmp0 ("--svs", option_name)) { + app->svs = g_strdup (value); + } else if (!g_strcmp0 ("--saturation", option_name)) { + app->saturation = atof (value); + } else if (!g_strcmp0 ("--cap-dev-node", option_name)) { + g_free (app->cap_dev_node); + app->cap_dev_node = g_strdup (value); + set_capture_device_node (); + } else if (!g_strcmp0 ("--eglstream-id", option_name)) { + app->eglstream_producer_id = atoi (value); + } else if (!g_strcmp0 ("--overlayConfig", option_name)) { + g_free (app->overlayConfig); + app->overlayConfig = g_strdup (value); + } else if (!g_strcmp0 ("--eglConfig", option_name)) { + g_free (app->eglConfig); + app->eglConfig = g_strdup (value); + } else if (!g_strcmp0 ("--exposuretimerange", option_name)) { + g_free (app->exposure_timerange); + app->exposure_timerange = g_strdup (value); + } else if (!g_strcmp0 ("--gainrange", option_name)) { + g_free (app->gain_range); + app->gain_range = g_strdup (value); + } else if (!g_strcmp0 ("--ispdigitalgainrange", option_name)) { + g_free (app->isp_digital_gainrange); + app->isp_digital_gainrange = g_strdup (value); + } else if (!g_strcmp0 ("--exposurecompensation", option_name)) { + app->exposure_compensation = atof (value); + } else if (!g_strcmp0 ("--tnr-strength", option_name)) { + app->tnr_strength = atof (value); + } else if (!g_strcmp0 ("--ee-strength", option_name)) { + app->ee_strength = atof (value); + } + return TRUE; +} + +/** + * get the max capture resolutions + * + * @param res : resolution index + */ +static void +get_max_resolution (gint res, gint * width, gint * height) +{ + if (app->use_cus_res) { + *width = app->capres.cus_prev_width; + *height = app->capres.cus_prev_height; + } else { + *width = image_capture_width[res]; + *height = image_capture_height[res]; + } +} + +/** + * get the preview capture resolutions + * + * @param res : resolution index + */ +gboolean +get_preview_resolution (gint res) +{ + gboolean ret = TRUE; + + if ( (app->cam_src == NV_CAM_SRC_CSI) || + (app->cam_src == NV_CAM_SRC_EGLSTREAM) ) { + if ((res < MIN_CSI_RES) || (res > MAX_CSI_RES)) { + g_print ("Invalid preview resolution\n"); + return FALSE; + } + } else { + if ((res < PR_176x144) || (res > PR_1920x1080)) { + g_print ("Invalid preview resolution\n"); + return FALSE; + } + } + + app->capres.preview_width = prevres_width[res]; + app->capres.preview_height = prevres_height[res]; + app->capres.prev_res_index = res; + return ret; +} + +static gboolean +get_image_capture_resolution (gint res) +{ + gboolean ret = TRUE; + + if ( (app->cam_src == NV_CAM_SRC_CSI) || + (app->cam_src == NV_CAM_SRC_EGLSTREAM) ) { + if ((res < IR_640x480) || (res > IR_5632x4224)) { + g_print ("Invalid image capture resolution\n"); + return FALSE; + } + } else { + if ((res < IR_176x144) || (res > IR_1920x1080)) { + g_print ("Invalid image capture resolution\n"); + return FALSE; + } + } + app->capres.image_cap_width = image_capture_width[res]; + app->capres.image_cap_height = image_capture_height[res]; + app->capres.img_res_index = res; + + return ret; +} + +static gboolean +get_video_capture_resolution (gint res) +{ + gboolean ret = TRUE; + + if ( (app->cam_src == NV_CAM_SRC_CSI) || + (app->cam_src == NV_CAM_SRC_EGLSTREAM) ){ + if ((res < VR_640x480) || (res > VR_3896x2192)) { + g_print ("Invalid video capture resolution\n"); + return FALSE; + } + } else { + if ((res < VR_176x144) || (res > VR_1280x720)) { + g_print ("Invalid video capture resolution\n"); + return FALSE; + } + } + app->capres.video_cap_width = video_capture_width[res]; + app->capres.video_cap_height = video_capture_height[res]; + app->capres.vid_res_index = res; + + return ret; +} + +static gpointer +reset_elements (gpointer data) +{ + gst_element_set_state (app->ele.venc_q, GST_STATE_READY); + gst_element_set_state (app->ele.vid_bin, GST_STATE_READY); + gst_element_set_state (app->ele.svc_vidbin, GST_STATE_READY); + + gst_element_sync_state_with_parent (app->ele.venc_q); + gst_element_sync_state_with_parent (app->ele.vid_bin); + gst_element_sync_state_with_parent (app->ele.svc_vidbin); + + return NULL; +} + +/** + * handler on the bus + * + * @param bus : a GstBus + * @param msg : the GstMessage + * @param data : user data that has been given + */ +static GstBusSyncReply +bus_sync_handler (GstBus * bus, GstMessage * msg, gpointer data) +{ + switch (GST_MESSAGE_TYPE (msg)) { + case GST_MESSAGE_ELEMENT: + if (GST_MESSAGE_SRC (msg) == GST_OBJECT (app->ele.camera)) { + const GstStructure *structure; + structure = gst_message_get_structure (msg); + if (gst_structure_has_name (structure, "image-done")) { + NVGST_INFO_MESSAGE ("image-capture-done"); + g_mutex_lock (app->lock); + + recording = FALSE; + g_cond_signal (app->cond); + g_mutex_unlock (app->lock); + + } else if (gst_structure_has_name (structure, "video-done")) { + NVGST_INFO_MESSAGE ("video-capture-done"); + } else if (gst_structure_has_name (structure, "GstBinForwarded")) { + GstMessage *child_msg; + + if (gst_structure_has_field (structure, "message")) { + const GValue *val = gst_structure_get_value (structure, "message"); + if (G_VALUE_TYPE (val) == GST_TYPE_MESSAGE) { + child_msg = (GstMessage *) g_value_get_boxed (val); + if (GST_MESSAGE_TYPE (child_msg) == GST_MESSAGE_EOS && + GST_MESSAGE_SRC (child_msg) == GST_OBJECT (app->ele.vid_bin)) + { + if (app->reset_thread) + g_thread_unref (app->reset_thread); + app->reset_thread = g_thread_new (NULL, reset_elements, NULL); + + } + } + } + + } + } + return GST_BUS_PASS; + + default: + return GST_BUS_PASS; + } +} + +/** + * Handle received message + * + * @param bus : a GstBus + * @param msg : the GstMessage + * @param data : user data that has been given + */ +static gboolean +bus_call (GstBus * bus, GstMessage * msg, gpointer data) +{ + switch (GST_MESSAGE_TYPE (msg)) { + case GST_MESSAGE_ERROR: + { + GError *err = NULL; + gchar *name, *debug = NULL; + + name = gst_object_get_path_string (msg->src); + gst_message_parse_error (msg, &err, &debug); + + g_printerr ("ERROR on bus: by %s: %s\n", name, err->message); + if (debug != NULL) + g_printerr ("debug info:\n%s\n", debug); + + g_error_free (err); + g_free (debug); + g_free (name); + + app->return_value = -1; + g_main_loop_quit (loop); + } + break; + + case GST_MESSAGE_STATE_CHANGED: + { + GstState old, new_state, pending; + + gst_message_parse_state_changed (msg, &old, &new_state, &pending); + + GST_DEBUG_OBJECT (GST_OBJECT (msg->src), + "changed state from %s to %s, pending %s\n", + gst_element_state_get_name (old), gst_element_state_get_name (new_state), + gst_element_state_get_name (pending)); + if (GST_MESSAGE_SRC (msg) == GST_OBJECT (app->ele.camera) + && pending == GST_STATE_VOID_PENDING && old == GST_STATE_PAUSED + && new_state == GST_STATE_PLAYING) { + } + } + break; + + case GST_MESSAGE_EOS: + { + if (app->mode == CAPTURE_VIDEO && app->cam_src != NV_CAM_SRC_CSI) + restart_capture_pipeline (); + + if (app->mode == CAPTURE_IMAGE) { + g_mutex_lock (app->lock); + + recording = FALSE; + g_cond_signal (app->cond); + + g_mutex_unlock (app->lock); + } + } + break; + + case GST_MESSAGE_APPLICATION: + { + const GstStructure *s; + s = gst_message_get_structure (msg); + + if (gst_structure_has_name (s, "NvGstAppInterrupt")) { + g_print ("Terminating the camera pipeline ...\n"); + g_main_loop_quit (loop); + } + } + break; + + case GST_MESSAGE_ELEMENT: + break; + + default: + break; + } + return TRUE; +} + +static void +set_egl_window_config (gchar *str) +{ + gchar **tokens; + gchar **temp; + gchar *token; + GArray *array; + guint val, x_pos, y_pos; + + if (!str) { + g_print ("Invalid Input\n"); + return; + } + array = g_array_new (FALSE, FALSE, sizeof (guint)); + tokens = g_strsplit_set (str, " \"\'", -1); + temp = tokens; + while (*temp) { + token = *temp++; + if (!g_strcmp0 (token, "")) + continue; + + val = atoi (token); + g_array_append_val (array, val); + } + + if (array->len == 2) { + x_pos = g_array_index (array, guint, 0); + y_pos = g_array_index (array, guint, 1); + app->disp.x = x_pos; + app->disp.y = y_pos; + } else + g_print ("Need two values (x, y) for EGL window coordinates\n"); + + g_array_free (array, FALSE); + g_strfreev (tokens); +} + +static void +set_overlay_configuration (gchar * str) +{ + gchar **tokens; + gchar **temp; + gchar *token; + GArray *array; + guint val; + + if (!str) { + g_print ("Invalid Input\n"); + return; + } + array = g_array_new (FALSE, FALSE, sizeof (gfloat)); + tokens = g_strsplit_set (str, " \"\',", -1); + temp = tokens; + while (*temp) { + token = *temp++; + if (!g_strcmp0 (token, "")) + continue; + + val = atoi (token); + g_array_append_val (array, val); + } + + if (array->len == 5) { + app->overlay_index = g_array_index (array, guint, 0); + app->overlay_x_pos = g_array_index (array, guint, 1); + app->overlay_y_pos = g_array_index (array, guint, 2); + app->overlay_width = g_array_index (array, guint, 3); + app->overlay_height = g_array_index (array, guint, 4); + } else + g_print ("Need five values for overlay configuration\n"); + + g_array_free (array, FALSE); + g_strfreev (tokens); +} + +static void +write_vsnap_buffer (GstElement * fsink, + GstBuffer * buffer, GstPad * pad, gpointer udata) +{ + GstMapInfo info; + + if (gst_buffer_map (buffer, &info, GST_MAP_READ)) { + if (info.size) { + FILE *fp = NULL; + gchar outfile[50]; + + memset (outfile, 0, sizeof (outfile)); + sprintf (outfile, "snapshot_%ld_s%02d_%05d.jpg", (long) getpid(), + app->sensor_id, app->capture_count++); + + CALL_GUI_FUNC (show_text, "Image saved to %s", outfile); + + fp = fopen (outfile, "wb"); + if (fp == NULL) { + g_print ("Can't open file for Image Capture!\n"); + app->cap_success = FALSE; + } else { + if (info.size != fwrite (info.data, 1, info.size, fp)) { + g_print ("Can't write data in file!\n"); + app->cap_success = FALSE; + fclose (fp); + if (remove (outfile) != 0) + g_print ("Unable to delete the file\n"); + } else { + app->cap_success = TRUE; + fclose (fp); + } + } + } + + gst_buffer_unmap (buffer, &info); + g_mutex_lock (app->lock); + snapshot = FALSE; + g_cond_signal (app->cond); + g_mutex_unlock (app->lock); + } else { + NVGST_WARNING_MESSAGE ("video snapshot buffer map failed\n"); + } +} + + +void +trigger_vsnap_capture (void) +{ + if (app->mode != CAPTURE_VIDEO || recording == FALSE) { + g_print ("snapshot is only possible while recording video\n"); + return; + } + + if (app->cam_src != NV_CAM_SRC_CSI) { + g_print ("Video snapshot is supported for CSI camera only\n"); + return; + } + + snapshot = TRUE; + app->cap_success = FALSE; + gst_element_set_state (app->ele.img_sink, GST_STATE_NULL); + /* Set Video Snapshot Mode */ + g_signal_emit_by_name (G_OBJECT (app->ele.cap_tee), "take-vsnap"); + + g_mutex_lock (app->lock); + while (snapshot) { + g_cond_wait (app->cond, app->lock); + } + g_mutex_unlock (app->lock); + + /* Back to Video Mode */ + if (app->cap_success == TRUE) + g_print ("Video Snapshot Captured \n"); +} + +void +trigger_image_capture (void) +{ + g_mutex_lock (app->lock); + recording = TRUE; + app->cap_success = FALSE; + + app->capcount = 0; + app->native_record = GST_PAD_PROBE_OK; + + if (app->cam_src == NV_CAM_SRC_CSI) { + gst_element_set_state (app->ele.vsnap_sink, GST_STATE_NULL); + g_signal_emit_by_name (G_OBJECT (app->ele.cap_tee), "start-capture"); + } + + while (recording) { + g_cond_wait (app->cond, app->lock); + } + + g_mutex_unlock (app->lock); + + if (app->cap_success == TRUE) + g_print ("Image Captured \n"); +} + + +/** +* Reset KPI flags to start +* new measurements +*/ +static void reset_kpi_flags (void) +{ + app->frameCount = 0; + app->currentFrameTime = 0; + app->prevFrameTime = 0; + app->encFrameCount = 0; + app->currentEncFrameTime = 0; + app->prevEncFrameTime = 0; +} + +/** + * Compute preview frame rate + * @param void + */ +static void compute_frame_rate (void) +{ + gfloat avgFrameTime = 0; + gfloat frameRate = 0; + + if (app->enableKpiNumbers) { + + if (app->mode != CAPTURE_VIDEO) { + if (app->frameCount > 0) { + app->frameCount--; + } + + avgFrameTime = (app->frameCount == 0) ? 0 : + ((gfloat)(app->accumulator) / (gfloat)(app->frameCount)); + frameRate = (avgFrameTime == 0) ? 0 : (gfloat)(1000 / avgFrameTime); + g_print("\nKPI average frame rate: %.2lf\n", frameRate); + } + else { + if (app->encFrameCount > 0) { + app->encFrameCount--; + } + + avgFrameTime = (app->encFrameCount == 0) ? 0 : + ((gfloat)(app->encAccumulator) / (gfloat)(app->encFrameCount)); + frameRate = (avgFrameTime == 0) ? 0 : (gfloat)(1000 / avgFrameTime); + g_print("\nKPI average frame rate: %.2lf\n", frameRate); + } + } +} + +void +start_video_capture (void) +{ + reset_kpi_flags (); + set_new_file_name (app->file_type); + recording = TRUE; + app->native_record = GST_PAD_PROBE_OK; + if (app->cam_src == NV_CAM_SRC_CSI) { + /* Set Video Mode */ + g_signal_emit_by_name (G_OBJECT (app->ele.cap_tee), "start-capture"); + } + CALL_GUI_FUNC (start_record); +} + +void +stop_video_capture (void) +{ + if (!app->aut.automate) { + compute_frame_rate (); + } + recording = FALSE; + app->native_record = GST_PAD_PROBE_DROP; + if (app->cam_src == NV_CAM_SRC_CSI) { + g_signal_emit_by_name (G_OBJECT (app->ele.cap_tee), "stop-capture"); + gst_pad_send_event (gst_element_get_static_pad (app->ele.venc_q, "sink"), + gst_event_new_eos ()); + } else { + gst_pad_send_event (gst_element_get_static_pad (app->ele.venc_q, "sink"), + gst_event_new_eos ()); + gst_pad_send_event (gst_element_get_static_pad (app->ele.vsink, "sink"), + gst_event_new_eos ()); + } + g_print ("\nRecording Stopped\n"); + CALL_GUI_FUNC (stop_record); +} + +void +set_mode (gint newMode) +{ + if (newMode != 1 && newMode != 2) { + newMode = NVGST_DEFAULT_CAPTURE_MODE; + g_print ("Invalid input mode, setting mode to image-capture = 1 \n"); + } + g_print ("Changing capture mode to %d\n", newMode); + g_print ("(1): image\n(2): video\n"); + + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_set (app->ele.cap_tee, "mode", newMode, NULL); + } else { + destroy_capture_pipeline (); + g_usleep (250000); + app->mode = newMode; + if (!create_capture_pipeline ()) { + app->return_value = -1; + g_main_loop_quit (loop); + } + } + app->mode = newMode; +} + +gboolean +set_preview_resolution (int new_res) +{ + GstCaps *caps = NULL; + gint width = 0, height = 0; + if (new_res == app->capres.prev_res_index) { + g_print ("\nAlready on same preview resolution\n"); + return TRUE; + } + if (!get_preview_resolution (new_res)) + return FALSE; + + g_object_get (app->ele.svc_prevconv_out_filter, "caps", &caps, NULL); + caps = gst_caps_make_writable (caps); + + gst_caps_set_simple (caps, "width", G_TYPE_INT, + app->capres.preview_width, "height", G_TYPE_INT, + app->capres.preview_height, NULL); + + g_object_set (app->ele.svc_prevconv_out_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + if (camera_need_reconfigure (new_res, CAPTURE_PAD_PREV)) { + g_object_get (app->ele.cap_filter, "caps", &caps, NULL); + caps = gst_caps_make_writable (caps); + + get_max_resolution (app->capres.current_max_res, &width, &height); + gst_caps_set_simple (caps, "width", G_TYPE_INT, + width, "height", G_TYPE_INT, height, NULL); + + g_object_set (app->ele.cap_filter, "caps", caps, NULL); + gst_caps_unref (caps); + } + +#if !GUI +{ + GstElement *vsink = app->ele.vsink; + + if (vsink && GST_IS_VIDEO_OVERLAY (vsink)) { + if (app->capres.preview_width < app->disp.display_width + || app->capres.preview_height < app->disp.display_height) { + app->disp.width = app->capres.preview_width; + app->disp.height = app->capres.preview_height; + } else { + app->disp.width = app->disp.display_width; + app->disp.height = app->disp.display_height; + } + g_mutex_lock (app->lock); + + if (app->disp.window) + nvgst_destroy_window (&app->disp); + nvgst_create_window (&app->disp, "nvgstcapture-1.0"); + gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), + (gulong) app->disp.window); + gst_video_overlay_expose (GST_VIDEO_OVERLAY (vsink)); + + g_mutex_unlock (app->lock); + } +} +#endif + g_print ("Preview resolution = %d x %d\n", + app->capres.preview_width, app->capres.preview_height); + + return TRUE; +} + +gboolean +set_image_resolution (int new_res) +{ + GstCaps *caps = NULL; + gint width = 0, height = 0; + if (new_res == app->capres.img_res_index) { + g_print ("\nAlready on same image capture resolution\n"); + return TRUE; + } + if (!get_image_capture_resolution (new_res)) + return FALSE; + + //configure image + g_object_get (app->ele.svc_imgvconv_out_filter, "caps", &caps, NULL); + caps = gst_caps_make_writable (caps); + + gst_caps_set_simple (caps, "width", G_TYPE_INT, + app->capres.image_cap_width, "height", G_TYPE_INT, + app->capres.image_cap_height, NULL); + + g_object_set (app->ele.svc_imgvconv_out_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + if (camera_need_reconfigure (new_res, CAPTURE_PAD_IMAGE)) { + g_object_get (app->ele.cap_filter, "caps", &caps, NULL); + caps = gst_caps_make_writable (caps); + + get_max_resolution (app->capres.current_max_res, &width, &height); + gst_caps_set_simple (caps, "width", G_TYPE_INT, + width, "height", G_TYPE_INT, height, NULL); + + g_object_set (app->ele.cap_filter, "caps", caps, NULL); + gst_caps_unref (caps); + } + + g_print ("Image Capture Resolution = %d x %d\n", + app->capres.image_cap_width, app->capres.image_cap_height); + return TRUE; +} + +gboolean +set_video_resolution (int new_res) +{ + GstCaps *caps = NULL; + gint width = 0, height = 0; + if (new_res == app->capres.vid_res_index) { + g_print ("\nAlready on same video capture resolution\n"); + return TRUE; + } + if (!get_video_capture_resolution (new_res)) + return FALSE; + + //configure video + g_object_get (app->ele.svc_vidvconv_out_filter, "caps", &caps, NULL); + caps = gst_caps_make_writable (caps); + + gst_caps_set_simple (caps, "width", G_TYPE_INT, + app->capres.video_cap_width, "height", G_TYPE_INT, + app->capres.video_cap_height, NULL); + + g_object_set (app->ele.svc_vidvconv_out_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + if (camera_need_reconfigure (new_res, CAPTURE_PAD_VIDEO)) { + g_object_get (app->ele.cap_filter, "caps", &caps, NULL); + caps = gst_caps_make_writable (caps); + + get_max_resolution (app->capres.current_max_res, &width, &height); + gst_caps_set_simple (caps, "width", G_TYPE_INT, + width, "height", G_TYPE_INT, height, NULL); + + g_object_set (app->ele.cap_filter, "caps", caps, NULL); + gst_caps_unref (caps); + } + g_print ("Video Capture Resolution = %d x %d\n", + app->capres.video_cap_width, app->capres.video_cap_height); + return TRUE; +} + +void +set_saturation (gfloat dval) +{ + app->saturation = dval; + g_object_set (G_OBJECT (app->ele.vsrc), "saturation", dval, NULL); +} + +void +set_exposure_saturation (gfloat dval) +{ + app->exposure_compensation = dval; + g_object_set (G_OBJECT (app->ele.vsrc), "exposurecompensation", dval, NULL); +} + +void +set_whitebalance (gint val) +{ + app->whitebalance = val; + g_object_set (G_OBJECT (app->ele.vsrc), "wbmode", val, NULL); +} + +void +set_timeout(gint val) +{ + app->timeout = val; + g_object_set (G_OBJECT (app->ele.vsrc), "timeout", val, NULL); +} + +static void +set_flip (gint val) +{ + app->flip_method = val; + g_object_set (G_OBJECT (app->ele.svc_prevconv), "flip-method", val, NULL); + g_object_set (G_OBJECT (app->ele.svc_imgvconv), "flip-method", val, NULL); + g_object_set (G_OBJECT (app->ele.svc_vidvconv), "flip-method", val, NULL); + g_object_set (G_OBJECT (app->ele.svc_snapconv), "flip-method", val, NULL); +} + +/** + * Handle on input commands. + * + * @param ichannel : a GIOChannel + * @param cond : the condition to watch for + * @param data : user data passed + */ +static gboolean +on_input (GIOChannel * ichannel, GIOCondition cond, gpointer data) +{ + static gchar buf[256]; + int bytes_read; + gint fd; + gint val; + guint br; + gfloat dval; + gchar *gval; + + fd = g_io_channel_unix_get_fd (ichannel); + bytes_read = read (fd, buf, 256); + buf[bytes_read - 1] = '\0'; + + if (g_str_has_prefix (buf, "h")) { + print_help (); + + } else if (buf[0] == 'q') { + if (app->mode != CAPTURE_VIDEO) { + compute_frame_rate (); + } + g_main_loop_quit (loop); + + } else if (buf[0] == '1' && app->mode == CAPTURE_VIDEO && recording == FALSE) { + start_video_capture (); + g_print + ("\nRecording Started, Enter (0) to stop OR (2) to take snapshot \n"); + + } else if (buf[0] == 'f' && app->mode == CAPTURE_VIDEO && recording == TRUE) { + g_print ("Forcing IDR on video encoder\n"); + g_signal_emit_by_name (G_OBJECT (app->ele.vid_enc), "force-IDR"); + + } else if (buf[0] == '2' && app->mode == CAPTURE_VIDEO && recording == TRUE) { + trigger_vsnap_capture (); + + } else if (buf[0] == '0' && recording == TRUE) { + stop_video_capture (); + + } else if (buf[0] == 'j' && app->mode == CAPTURE_IMAGE && recording == FALSE) { + gint n = 0; + gint count = 1; + gchar *str; + gint stime = 0; + + str = g_strrstr (buf, ":"); + if (str) { + count = atoi (str + 1); + } + str = g_strrstr (buf, "x"); + if (str) { + stime = atoi (str + 1); + if (stime < 500) + stime = 500; + g_usleep (stime * 1000 - 500000); + } + + while (n++ < count) { + trigger_image_capture (); + if (app->cap_success != TRUE) + break; + if (app->return_value == -1) + break; + g_usleep (250000); + } + } else if (recording == FALSE) { + if (g_str_has_prefix (buf, "mo:")) { + gint newMode; + newMode = atoi (buf + 3); + if (newMode == app->mode) { + g_print ("Already in this mode\n"); + return TRUE; + } + set_mode (newMode); + } else if (!g_strcmp0 (buf, "gmo")) { + g_print ("mo = %d\n", app->mode); + g_print ("(1): image\n(2): video\n"); + } else if (g_str_has_prefix (buf, "pcr:")) { + if (app->cam_src == NV_CAM_SRC_CSI) + g_print ("-------> Not supported <------ \n"); + else { + if (TRUE != get_preview_resolution (atoi (buf + 4))) + goto invalid_input; + g_print ("w = %d, h = %d\n", app->capres.preview_width, + app->capres.preview_height); + restart_capture_pipeline (); + } + } else if (!g_strcmp0 (buf, "gpcr")) { + g_print ("w = %d, h = %d\n", app->capres.preview_width, + app->capres.preview_height); + } else if (!g_strcmp0 (buf, "gicr")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_print ("w = %d, h = %d\n", app->capres.image_cap_width, + app->capres.image_cap_height); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gvcr")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_print ("w = %d, h = %d\n", app->capres.video_cap_width, + app->capres.video_cap_height); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "so:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 3); + if (val < 0 || val > 3) { + val = NVGST_DEFAULT_FLIP_METHOD; + g_print ("Invalid input value of sensor orientation, setting orientation" + " to default = 2 \n"); + } + g_print ("sensor orientation = %d\n", val); + set_flip (val); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gso")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_print ("sensor orientation = %d\n", app->flip_method); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "wb:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 3); + if (val < 0 || val > 9) { + val = NVGST_DEFAULT_WHITEBALANCE; + g_print ("Invalid input value of white balance, setting white-balance" + " to auto-value =1 \n"); + } + g_print ("whitebalance = %d\n", val); + set_whitebalance (val); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gwb")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "wbmode", &val, NULL); + app->whitebalance = val; + g_print ("whitebalance = %d\n", app->whitebalance); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "st:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + dval = atof (buf + 3); + if (dval < 0 || dval > 2) { + dval = NVGST_DEFAULT_SATURATION; + g_print ("Invalid input value of saturation, setting saturation" + " to default = 1 \n"); + } + g_print ("saturation = %f\n", dval); + set_saturation (dval); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gst")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "saturation", &dval, NULL); + app->saturation = dval; + g_print ("saturation = %f\n", app->saturation); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "ec:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + dval = atof (buf + 3); + if (dval < MIN_EXPOSURE_COMPENSATION || dval > MAX_EXPOSURE_COMPENSATION) { + dval = NVGST_DEFAULT_EXPOSURE_COMPENSATION; + g_print ("Invalid input value of exposure compensation, setting " + " to default = 0 \n"); + } + g_print ("exposure compensation = %f\n", dval); + set_exposure_saturation (dval); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gec")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "exposurecompensation", &dval, NULL); + app->exposure_compensation = dval; + g_print ("exposure compensation = %f\n", app->exposure_compensation); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "ael:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 4); + if (val < 0 || val > 1) { + val = NVGST_DEFAULT_AE_LOCK; + g_print ("Invalid input value of AE LOCK, setting " + " to default = 0 \n"); + } + g_print ("AE Lock = %d\n", val); + app->enableAeLock = val; + g_object_set (G_OBJECT (app->ele.vsrc), "aelock", app->enableAeLock, NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gael")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "aelock", &val, NULL); + app->enableAeLock = val; + g_print ("AE Lock = %d\n", app->enableAeLock); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "awbl:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 5); + if (val < 0 || val > 1) { + val = NVGST_DEFAULT_AWB_LOCK; + g_print ("Invalid input value of AWB LOCK, setting " + " to default = 0 \n"); + } + g_print ("AWB Lock = %d\n", val); + app->enableAwbLock = val; + g_object_set (G_OBJECT (app->ele.vsrc), "awblock", app->enableAwbLock, NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gawbl")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "awblock", &val, NULL); + app->enableAwbLock = val; + g_print ("AWB Lock = %d\n", app->enableAwbLock); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "tnrm:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 5); + if (val < MIN_TNR_MODE || val > MAX_TNR_MODE) { + val = NVGST_DEFAULT_TNR_MODE; + g_print ("Invalid input value of TNR Mode, setting " + " to default = 0 \n"); + } + g_print ("TNR Mode = %d\n", val); + app->tnr_mode = val; + g_object_set (G_OBJECT (app->ele.vsrc), "tnr-mode", app->tnr_mode , NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gtnrm")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "tnr-mode", &val, NULL); + app->tnr_mode = val; + g_print ("TNR Mode = %d\n", app->tnr_mode); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "tnrs:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + dval = atof (buf + 5); + if (dval < MIN_STRENGTH || dval > MAX_STRENGTH) { + dval = NVGST_DEFAULT_TNR_STRENGTH; + g_print ("Invalid input value of TNR Strength, setting " + " to default = 0 \n"); + } + g_print ("TNR Strength = %f\n", dval); + app->tnr_strength = dval; + g_object_set (G_OBJECT (app->ele.vsrc), "tnr-strength", app->tnr_strength , NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gtnrs")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "tnr-strength", &dval, NULL); + app->tnr_strength = dval; + g_print ("TNR Strength = %f\n", app->tnr_strength); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "eem:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 4); + if (val < MIN_EE_MODE || val > MAX_EE_MODE) { + val = NVGST_DEFAULT_EE_MODE; + g_print ("Invalid input value of EE Mode, setting " + " to default = 0 \n"); + } + g_print ("EE Mode = %d\n", val); + app->ee_mode = val; + g_object_set (G_OBJECT (app->ele.vsrc), "ee-mode", app->ee_mode , NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "geem")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "ee-mode", &val, NULL); + app->ee_mode = val; + g_print ("EE Mode = %d\n", app->ee_mode); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "ees:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + dval = atof (buf + 4); + if (dval < MIN_STRENGTH || dval > MAX_STRENGTH) { + dval = NVGST_DEFAULT_EE_STRENGTH; + g_print ("Invalid input value of EE Strength, setting " + " to default = 0 \n"); + } + g_print ("EE Strength = %f\n", dval); + app->ee_strength = dval; + g_object_set (G_OBJECT (app->ele.vsrc), "ee-strength", app->ee_strength , NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gees")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "ee-strength", &dval, NULL); + app->ee_strength = dval; + g_print ("EE Strength = %f\n", app->ee_strength); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "aeab:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 5); + if (val < MIN_AE_ANTIBANDING_MODE || val > MAX_AE_ANTIBANDING_MODE) { + val = NVGST_DEFAULT_AEANTIBANDING; + g_print ("Invalid input value of AE AntiBanding Mode, setting " + " to default = 0 \n"); + } + g_print ("AE Anti Banding Mode = %d\n", val); + app->ae_antibanding = val; + g_object_set (G_OBJECT (app->ele.vsrc), "aeantibanding", app->ae_antibanding , NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gaeab")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "aeantibanding", &val, NULL); + app->ae_antibanding = val; + g_print ("AE Anti Banding Mode = %d\n", app->ae_antibanding); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "gr:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + app->gain_range = g_strdup (buf+3); + g_print (" Setting Gain Range = %s\n", app->gain_range); + g_object_set (G_OBJECT (app->ele.vsrc), "gainrange", app->gain_range , NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "ggr")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "gainrange", &gval, NULL); + g_print ("Gain Range = %s\n", gval); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "etr:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + app->exposure_timerange = g_strdup (buf+4); + g_print (" Setting Exposure Time Range = %s\n", app->exposure_timerange); + g_object_set (G_OBJECT (app->ele.vsrc), "exposuretimerange", app->exposure_timerange , NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "getr")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "exposuretimerange", &gval, NULL); + g_print ("Exposure Time Range = %s\n", gval); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "dgr:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + app->isp_digital_gainrange = g_strdup (buf+4); + g_print (" Setting ISP Digital Gain Range = %s\n", app->isp_digital_gainrange); + g_object_set (G_OBJECT (app->ele.vsrc), "ispdigitalgainrange", app->isp_digital_gainrange , NULL); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gdgr")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "ispdigitalgainrange", &gval, NULL); + g_print ("ISP Digital Gain Range = %s\n", gval); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "br:")) { + is_user_bitrate = 1; + dval = atof (buf + 3); + set_encoder_bitrate (dval); + } else if (!g_strcmp0 (buf, "gbr")) { + g_object_get (G_OBJECT (app->ele.vid_enc), "bitrate", &br, NULL); + app->encset.bitrate = br; + g_print ("br = %u\n", app->encset.bitrate); + } else if (g_str_has_prefix (buf, "cdn:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_print ("-------> Not supported <------ \n"); + } else { + g_free (app->cap_dev_node); + app->cap_dev_node = g_strdup (buf + 4); + set_capture_device_node (); + g_print ("cdn = %s\n", app->vidcap_device); + restart_capture_pipeline (); + } + } else if (!g_strcmp0 (buf, "gcdn")) { + g_print ("cdn = %s\n", app->vidcap_device); + } else if (g_str_has_prefix (buf, "sid:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 4); + if (val < 0) { + g_print ("Invalid value for Sensor ID, using default\n"); + val = 0; + } + if (app->sensor_id != (guint) val) { + g_print ("sensor id = %d\n", val); + app->sensor_id = val; + restart_capture_pipeline (); + } else { + g_print ("sensor id %d is already set\n", val); + } + } + else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gsid")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "sensor-id", &val, NULL); + app->sensor_id = val; + g_print ("Active Sensor ID = %d\n", app->sensor_id); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "smo:")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + val = atoi (buf + 4); + if (val < NVGST_DEFAULT_SENSOR_MODE) { + g_print ("Invalid selection for sensor mode, using default\n"); + val = NVGST_DEFAULT_SENSOR_MODE; + } + if (app->sensor_mode != (guint) val) { + g_print ("sensor mode = %d\n", val); + app->sensor_mode = val; + restart_capture_pipeline (); + } else { + g_print ("sensor mode %d is already set\n", val); + } + } + else { + g_print ("-------> Not supported <------ \n"); + } + } else if (!g_strcmp0 (buf, "gsmo")) { + if (app->cam_src == NV_CAM_SRC_CSI) { + g_object_get (G_OBJECT (app->ele.vsrc), "sensor-mode", &val, NULL); + app->sensor_mode = val; + g_print ("Active Sensor Mode = %d\n", app->sensor_mode); + } else { + g_print ("-------> Not supported <------ \n"); + } + } else if (g_str_has_prefix (buf, "ep:")) { + gint newEp; + newEp = atoi (buf + 3); + set_encoder_profile (newEp); + } else if (!g_strcmp0 (buf, "gep")) { + if (app->encset.video_enc == FORMAT_H264_HW) { + switch(app->encset.video_enc_profile) { + case PROFILE_BASELINE: + g_print("Encoder Profile = Baseline\n"); + break; + case PROFILE_MAIN: + g_print("Encoder Profile = Main\n"); + break; + case PROFILE_HIGH: + g_print("Encoder Profile = High\n"); + break; + } + } else { + g_print("Profile only supported with H.264\n"); + } + } + } + CALL_GUI_FUNC (trigger_refresh); + return TRUE; + +invalid_input: + g_print ("Invalid input\n"); + return TRUE; +} + +void +set_capture_device_node (void) +{ + gchar *fname = g_strndup ("/dev/video", 12); + fname = strcat (fname, app->cap_dev_node); + + if (app->vidcap_device && access (fname, F_OK) != -1) { + g_free (app->vidcap_device); + app->vidcap_device = NULL; + app->vidcap_device = g_strndup ("/dev/video", 12); + app->vidcap_device = strcat (app->vidcap_device, app->cap_dev_node); + } else { + g_print ("%s does not exist\n",fname); + } + g_free (fname); +} + +static void +set_encoder_profile (H264EncProfileType profile) +{ + const gchar * profile_name; + guint profile_id; + + if (profile < PROFILE_BASELINE || profile > PROFILE_HIGH) { + g_print("Invalid value for profile\n"); + return; + } + + if (app->encset.video_enc != FORMAT_H264_HW) { + g_print("Profile only supported for H.264 encoder\n"); + return; + } + + if (app->mode == CAPTURE_VIDEO && recording) { + g_print("Cannot set profile while recording video\n"); + return; + } + + switch(profile) { + case PROFILE_BASELINE: + profile_id = 0; + profile_name = "Baseline"; + break; + case PROFILE_MAIN: + profile_id = 2; + profile_name = "Main"; + break; + case PROFILE_HIGH: + profile_id = 4; + profile_name = "High"; + break; + } + + if (app->ele.vid_enc) { + g_object_set(G_OBJECT(app->ele.vid_enc), "profile", profile_id, NULL); + } + + app->encset.video_enc_profile = profile; + + g_print("Encoder Profile = %s\n", profile_name); +} + +static void +set_encoder_bitrate (guint bitrate) +{ + if (!app->ele.vid_enc) + g_print ("Encoder null, cannot set bitrate!\n"); + else { + if (!bitrate) { /* Set default bitrates only if user has not set anything */ + if (app->capres.vid_res_index < VR_1280x720) + bitrate = NVGST_DEFAULT_480P_ENCODER_BITRATE; + else if (app->capres.vid_res_index >= VR_1280x720 + && app->capres.vid_res_index < VR_1920x1080) + bitrate = NVGST_DEFAULT_720P_ENCODER_BITRATE; + else if (app->capres.vid_res_index >= VR_1920x1080 + && app->capres.vid_res_index < VR_3840x2160) + bitrate = NVGST_DEFAULT_1080P_ENCODER_BITRATE; + else if (app->capres.vid_res_index >= VR_3840x2160) + bitrate = NVGST_DEFAULT_2160P_ENCODER_BITRATE; + } + app->encset.bitrate = bitrate; + g_print ("bitrate = %u\n", app->encset.bitrate); + g_object_set (G_OBJECT (app->ele.vid_enc), "bitrate", app->encset.bitrate, + NULL); +#ifdef WITH_STREAMING + if (app->streaming_mode) + g_object_set (G_OBJECT (app->ele.colorspace_conv), "bitrate", + app->encset.bitrate, NULL); +#endif + } +} + +/** + * Print runtime command options. + * + * @param void + */ +static void +print_help (void) +{ + /* Camera Runtime Options */ + if (app->cam_src == NV_CAM_SRC_CSI) + g_print ("%s\n", app->csi_options_argus); + else if (app->cam_src == NV_CAM_SRC_V4L2) + g_print ("%s\n", app->usb_options); + else if (app->cam_src == NV_CAM_SRC_TEST) + g_print ("%s\n", app->usb_options); + g_print ("%s\n", app->encoder_options); +} + +/** + * Set encode file name. + * + * @param muxer_type : container type + */ +static void +set_new_file_name (int muxer_type) +{ + gchar filename[100]; + gchar * file_ext = NULL; + switch (app->mode) { + case CAPTURE_VIDEO: + switch (muxer_type) { + case FILE_MP4: + file_ext = "mp4"; + break; + case FILE_3GP: + file_ext = "3gp"; + break; + case FILE_MKV: + file_ext = "mkv"; + break; + case FILE_H265: + file_ext = "h265"; + break; + default: + file_ext = "mp4"; + break; + } + break; + case CAPTURE_IMAGE: + switch (muxer_type) { + case FORMAT_JPEG_SW: + case FORMAT_JPEG_HW: + default: + file_ext = "jpg"; + break; + } + break; + default: + g_print ("Invalid capture Mode, cannot set filename\n"); + break; + } + + sprintf (filename, "%s_%ld_s%02d_%05d.%s", app->file_name, (long) getpid(), + app->sensor_id, app->capture_count++, file_ext); + + CALL_GUI_FUNC(set_video_file_name, filename); + + gst_element_set_state (app->ele.video_sink, GST_STATE_NULL); + g_object_set (G_OBJECT (app->ele.video_sink), "location", filename, NULL); + gst_element_set_locked_state (app->ele.video_sink, FALSE); + gst_element_set_state (app->ele.video_sink, GST_STATE_PLAYING); +} + +/** + * Create image encoder element. + * + * @param iencoder : image encoder type + */ +static gboolean +get_image_encoder (GstElement ** iencoder) +{ + switch (app->encset.image_enc) { + case FORMAT_JPEG_SW: + *iencoder = gst_element_factory_make (NVGST_SW_IMAGE_ENC, NULL); + break; + case FORMAT_JPEG_HW: + *iencoder = gst_element_factory_make (NVGST_DEFAULT_IMAGE_ENC, NULL); + break; + default: + *iencoder = gst_element_factory_make (NVGST_DEFAULT_IMAGE_ENC, NULL); + break; + } + + if (!(*iencoder)) { + app->return_value = -1; + NVGST_ERROR_MESSAGE ("Can't Create image encoder element\n"); + return FALSE; + } + + return TRUE; +} + +/** + * Create video encoder element. + * + * @param vencoder : video encoder type + */ +static gboolean +get_video_encoder (GstElement ** vencoder) +{ + switch (app->encset.video_enc) { + case FORMAT_H264_HW: + *vencoder = gst_element_factory_make (NVGST_PRIMARY_V4L2_H264_VENC, NULL); + set_encoder_bitrate (app->encset.bitrate); + set_encoder_profile (app->encset.video_enc_profile); + break; + case FORMAT_VP8_HW: + *vencoder = gst_element_factory_make (NVGST_PRIMARY_V4L2_VP8_VENC, NULL); + set_encoder_bitrate (app->encset.bitrate); + break; + case FORMAT_H265_HW: + *vencoder = gst_element_factory_make (NVGST_PRIMARY_V4L2_H265_VENC, NULL); + set_encoder_bitrate (app->encset.bitrate); + break; + case FORMAT_VP9_HW: + *vencoder = gst_element_factory_make (NVGST_PRIMARY_V4L2_VP9_VENC, NULL); + set_encoder_bitrate (app->encset.bitrate); + break; + default: + *vencoder = gst_element_factory_make (NVGST_PRIMARY_V4L2_H264_VENC, NULL); + break; + } + + g_object_set (*vencoder, "control-rate", app->encset.controlrate, NULL); + if (app->encset.enabletwopassCBR) + g_object_set (*vencoder, "EnableTwopassCBR", app->encset.enabletwopassCBR, NULL); + g_print ("Encoder control-rate = %u\n", app->encset.controlrate); + g_print ("Encoder EnableTwopassCBR = %d\n", app->encset.enabletwopassCBR); + + if (!(*vencoder)) { + app->return_value = -1; + NVGST_ERROR_MESSAGE ("Can't Create video encoder element\n"); + return FALSE; + } + + return TRUE; +} + +static gboolean +get_parser (GstElement ** parser) +{ + switch (app->encset.video_enc) { + case FORMAT_H264_HW: + *parser = gst_element_factory_make (NVGST_PRIMARY_H264_PARSER, NULL); + break; + case FORMAT_H265_HW: + *parser = gst_element_factory_make (NVGST_PRIMARY_H265_PARSER, NULL); + break; + default: + *parser = gst_element_factory_make (NVGST_PRIMARY_IDENTITY, NULL); + break; + } + + return TRUE; +} + +/** + * Create muxer element. + * + * @param muxer : mux file type + */ +static gboolean +get_muxer (GstElement ** muxer) +{ + if (app->encset.video_enc == FORMAT_VP9_HW) { + if (app->file_type != FILE_MKV) { + NVGST_WARNING_MESSAGE + ("VP9 is only supported format with MKV in current GST version. " + "Selecting MKV as container\n"); + app->file_type = FILE_MKV; + } + } + + app->muxer_is_identity = FALSE; + + switch (app->file_type) { + case FILE_MP4: + *muxer = gst_element_factory_make (NVGST_PRIMARY_MP4_MUXER, NULL); + break; + case FILE_3GP: + *muxer = gst_element_factory_make (NVGST_PRIMARY_3GP_MUXER, NULL); + break; + case FILE_MKV: + *muxer = gst_element_factory_make (NVGST_PRIMARY_MKV_MUXER, NULL); + break; + case FILE_H265: + *muxer = gst_element_factory_make (NVGST_PRIMARY_IDENTITY, NULL); + app->muxer_is_identity = TRUE; + break; + default: + *muxer = gst_element_factory_make (NVGST_PRIMARY_MP4_MUXER, NULL); + break; + } + + if (!(*muxer)) { + app->return_value = -1; + NVGST_ERROR_MESSAGE ("Can't Create muxer element\n"); + return FALSE; + } + + return TRUE; +} + +static gboolean +camera_need_reconfigure (int new_res, CapturePadType current_pad) +{ + int preview, video, image, temp; + if (new_res == app->capres.current_max_res) { + return FALSE; + } + + if (new_res > app->capres.current_max_res) { + app->capres.current_max_res = new_res; + return TRUE; + } + + preview = app->capres.prev_res_index; + video = app->capres.vid_res_index; + image = app->capres.img_res_index; + + temp = MAX (preview, MAX (video, image)); + + if (temp < app->capres.current_max_res) { + app->capres.current_max_res = temp; + return TRUE; + } + return FALSE; +} + +/** + * Initialize capture parameters. + * + * @param void + */ +static void +capture_init_params (void) +{ + app->mode = NVGST_DEFAULT_CAPTURE_MODE; + app->file_type = NVGST_DEFAULT_FILE_TYPE; + + app->cam_src = NV_CAM_SRC_CSI; + app->cap_success = FALSE; + app->use_cus_res = FALSE; + app->svs = NULL; + + app->first_frame = FALSE; + app->enableKpiProfile = FALSE; + app->enableKpiNumbers = FALSE; + app->enableMeta = FALSE; + app->flip_method = NVGST_DEFAULT_FLIP_METHOD; + + get_preview_resolution (PR_640x480); + get_image_capture_resolution (IR_640x480); + get_video_capture_resolution (VR_640x480); + + app->encset.image_enc = NVGST_DEFAULT_IMAGE_ENCODER; + app->encset.video_enc = NVGST_DEFAULT_VIDEO_ENCODER; + app->encset.hw_enc_type = HW_V4L2_ENC; + set_encoder_bitrate (NVGST_DEFAULT_480P_ENCODER_BITRATE); + set_encoder_profile (NVGST_DEFAULT_VIDEO_ENCODER_PROFILE); + app->encset.controlrate = NVGST_DEFAULT_VIDEO_ENCODER_CONTROLRATE; + app->encset.enabletwopassCBR = NVGST_DEFAULT_VIDEO_ENCODER_TWOPASSCBR; + + app->lock = malloc(sizeof(*(app->lock))); + g_mutex_init (app->lock); + app->cond = malloc(sizeof(*(app->cond))); + g_cond_init (app->cond); + app->x_cond = malloc(sizeof(*(app->x_cond))); + g_cond_init (app->x_cond); + + app->native_record = GST_PAD_PROBE_DROP; + app->file_name = g_strdup (NVGST_DEFAULT_FILENAME); + app->vidcap_device = g_strdup (NVGST_DEFAULT_VIDCAP_DEVICE); + app->overlayConfig = NULL; + app->eglstream_producer_id = EGLSTREAM_PRODUCER_ID_SCF_CAMERA; + app->eglConfig = NULL; + app->color_format = -1; + app->color_format_csi = CAPTURE_NV12; + app->color_format_v4l2 = CAPTURE_YUY2; + + /* CSI Camera Default Property Values */ + app->whitebalance = NVGST_DEFAULT_WHITEBALANCE; + app->saturation = NVGST_DEFAULT_SATURATION; + app->sensor_id = NVGST_DEFAULT_SENSOR_ID; + app->sensor_mode = NVGST_DEFAULT_SENSOR_MODE; + app->display_id = NVGST_DEFAULT_DISPLAY_ID; + app->exposure_timerange = NULL; + app->gain_range = NULL; + app->isp_digital_gainrange = NULL; + app->enableAeLock = FALSE; + app->enableAwbLock = FALSE; + app->exposure_compensation = NVGST_DEFAULT_EXPOSURE_COMPENSATION; + app->ae_antibanding = NVGST_DEFAULT_AEANTIBANDING; + app->tnr_mode = NVGST_DEFAULT_TNR_MODE; + app->ee_mode = NVGST_DEFAULT_EE_MODE; + app->ee_strength = NVGST_DEFAULT_EE_STRENGTH; + app->tnr_strength = NVGST_DEFAULT_TNR_STRENGTH; + app->framerate = NVGST_DEFAULT_CAPTURE_FPS; + + /* Automation initialization */ + app->aut.automate = NVGST_DEFAULT_AUTOMATION_MODE; + app->aut.capture_start_time = NVGST_DEFAULT_CAP_START_DELAY; + app->aut.quit_time = NVGST_DEFAULT_QUIT_TIME; + app->aut.iteration_count = NVGST_DEFAULT_ITERATION_COUNT; + app->aut.capture_gap = NVGST_DEFAULT_CAPTURE_GAP; + app->aut.capture_time = NVGST_DEFAULT_CAPTURE_TIME; + app->aut.toggle_mode = NVGST_DEFAULT_TOGGLE_CAMERA_MODE; + app->aut.toggle_sensor = NVGST_DEFAULT_TOGGLE_CAMERA_SENSOR; + app->aut.toggle_sensor_modes = NVGST_DEFAULT_TOGGLE_CAMERA_SENSOR_MODES; + app->aut.num_sensors = NVGST_DEFAULT_NUM_SENSORS; + app->aut.enum_wb = NVGST_DEFAULT_ENUMERATE_WHITEBALANCE; + app->aut.enum_st = NVGST_DEFAULT_ENUMERATE_SATURATION; + app->aut.capture_auto = NVGST_DEFAULT_ENUMERATE_CAPTURE_AUTO; + + app->csi_options_argus = g_strdup ("Supported resolutions in case of ARGUS Camera\n" + " (2) : 640x480\n" + " (3) : 1280x720\n" + " (4) : 1920x1080\n" + " (5) : 2104x1560\n" + " (6) : 2592x1944\n" + " (7) : 2616x1472\n" + " (8) : 3840x2160\n" + " (9) : 3896x2192\n" + " (10): 4208x3120\n" + " (11): 5632x3168\n" + " (12): 5632x4224\n" + "\nRuntime ARGUS Camera Commands:\n\n" + " Help : 'h'\n" + " Quit : 'q'\n" + " Set Capture Mode:\n" + " mo:\n" + " (1): image\n" + " (2): video\n" + " Get Capture Mode:\n" + " gmo\n" + " Set sensor orientation:\n" + " so:\n" + " (0): none\n" + " (1): Rotate counter-clockwise 90 degrees\n" + " (2): Rotate 180 degrees\n" + " (3): Rotate clockwise 90 degrees\n" + " Get sensor orientation:\n" + " gso\n" + " Set sensor mode:\n" + " smo: e.g., smo:1\n" + " Get sensor mode:\n" + " gsmo\n" + " Set Whitebalance Mode:\n" + " wb:\n" + " (0): off\n" + " (1): auto\n" + " (2): incandescent\n" + " (3): fluorescent\n" + " (4): warm-fluorescent\n" + " (5): daylight\n" + " (6): cloudy-daylight\n" + " (7): twilight\n" + " (8): shade\n" + " (9): manual\n" + " Get Whitebalance Mode:\n" + " gwb\n" + " Set Saturation (0 to 2):\n" + " st: e.g., st:1.25\n" + " Get Saturation:\n" + " gst\n" + " Set Exposure Compensation (-2 to 2):\n" + " ec: e.g., ec:-2\n" + " Get Exposure Compensation:\n" + " gec\n" + " Set Auto Whitebalance Lock:\n" + " awbl: e.g., awbl:0\n" + " Get Auto Whitebalance Lock:\n" + " awbl\n" + " Set Auto Exposure Lock:\n" + " ael: e.g., ael:0\n" + " Get Auto Exposure Lock:\n" + " gael\n" + " Set TNR Mode:\n" + " tnrm: e.g., tnrm:1\n" + " (0): OFF\n" + " (1): FAST\n" + " (2): HIGH QUALITY\n" + " Get TNR Mode:\n" + " gtnrm\n" + " Set TNR Strength (-1 to 1):\n" + " tnrs: e.g., tnrs:0.5\n" + " Get TNR Strength:\n" + " gtnrs\n" + " Set EE Mode:\n" + " eem: e.g., eem:1\n" + " (0): OFF\n" + " (1): FAST\n" + " (2): HIGH QUALITY\n" + " Get EE Mode:\n" + " geem\n" + " Set EE Strength (-1 to 1):\n" + " ees: e.g., ees:0.5\n" + " Get EE Strength:\n" + " gees\n" + " Set Auto Exposure Anti-Banding (0 to 3):\n" + " aeab: e.g., aeab:2\n" + " (0): OFF\n" + " (1): MODE AUTO\n" + " (2): MODE 50HZ\n" + " (3): MODE 60HZ\n" + " Get Auto Exposure Anti-Banding:\n" + " gaeab\n" + " Set Gain Range:\n" + " gr: e.g., gr:1 16\n" + " Get Gain Range:\n" + " ggr\n" + " Set Exposure Time Range:\n" + " etr: e.g., etr:34000 35000\n" + " Get Exposure Time Range:\n" + " getr\n" + " Set ISP Digital Gain Range:\n" + " dgr: e.g., dgr:2 152\n" + " Get ISP Digital Gain Range:\n" + " gdgr\n" + " Capture: enter 'j' OR\n" + " followed by a timer (e.g., jx5000, capture after 5 seconds) OR\n" + " followed by multishot count (e.g., j:6, capture 6 images)\n" + " timer/multihot values are optional, capture defaults to single shot with timer=0s\n" + " Start Recording : enter '1'\n" + " Stop Recording : enter '0'\n" + " Video snapshot : enter '2' (While recording video)\n" + " Get Preview Resolution:\n" " gpcr\n" + " Get Image Capture Resolution:\n" " gicr\n" + " Get Video Capture Resolution:\n" " gvcr\n\n"); + app->csi_resolution = g_strdup ("Supported resolutions in case of NvArgusCamera\n" + " (2) : 640x480\n" + " (3) : 1280x720\n" + " (4) : 1920x1080\n" + " (5) : 2104x1560\n" + " (6) : 2592x1944\n" + " (7) : 2616x1472\n" + " (8) : 3840x2160\n" + " (9) : 3896x2192\n" + " (10): 4208x3120\n" + " (11): 5632x3168\n" + " (12): 5632x4224\n"); + + app->usb_options = g_strdup ("Runtime USB Camera Commands:\n\n" + " Help : 'h'\n" + " Quit : 'q'\n" + " Set Capture Mode:\n" + " mo:\n" + " (1): image\n" + " (2): video\n" + " Get Capture Mode:\n" + " gmo\n" + " Capture: enter 'j' OR\n" + " followed by a timer (e.g., jx5000, capture after 5 seconds) OR\n" + " followed by multishot count (e.g., j:6, capture 6 images)\n" + " timer/multihot values are optional, capture defaults to single shot with timer=0s\n" + " Start Recording : enter '1'\n" + " Stop Recording : enter '0'\n" + " Set Preview Resolution:\n" + " pcr: e.g., pcr:2\n" + " (0) : 176x144\n" + " (1) : 320x240\n" + " (2) : 640x480\n" + " (3) : 1280x720\n" + " (4) : 1920x1080\n" + " NOTE: Preview/Encode resolution will be same as Capture resolution for USB-Camera\n" + " Get Preview Resolution:\n" " gpcr\n" + " Get Image Capture Resolution:\n" " gicr\n" + " Get Video Capture Resolution:\n" " gvcr\n" + " Set Capture Device Node:\n" + " cdn: e.g., cdn:0\n" + " (0): /dev/video0\n" + " (1): /dev/video1\n" + " (2): /dev/video2\n" + " Get Capture Device Node:\n" " gcdn\n\n"); + + app->encoder_options = g_strdup ("Runtime encoder configuration options:\n\n" + " Set Encoding Bit-rate(in bytes):\n" + " br: e.g., br:4000000\n" + " Get Encoding Bit-rate(in bytes):\n" " gbr\n" + " Set Encoding Profile(only for H.264):\n" + " ep: e.g., ep:1\n" + " (0): Baseline\n" + " (1): Main\n" + " (2): High\n" + " Get Encoding Profile(only for H.264):\n" " gep\n" + " Force IDR Frame on video Encoder(only for H.264):\n" + " Enter 'f' \n\n"); +} + +/** + * Verification for capture parameters. + * + * @param void + */ +static gboolean +check_capture_params (void) +{ + gboolean ret = TRUE; + + if ((app->mode < 0) || + (app->capres.preview_width < 176) || + (app->capres.preview_height < 144) || + (app->encset.video_enc < FORMAT_H264_HW) || + (app->encset.image_enc < FORMAT_JPEG_SW)) + ret = FALSE; + + return ret; +} + +/** + * Write encoded image to file. + * + * @param fsink : image sink + * @param buffer : gst buffer + * @param pad : element pad + * @param udata : the gpointer to user data + */ +static void +cam_image_captured (GstElement * fsink, + GstBuffer * buffer, GstPad * pad, gpointer udata) +{ + GstMapInfo info; + + if (app->capcount == 0) { + if (gst_buffer_map (buffer, &info, GST_MAP_READ)) { + if (info.size) { + FILE *fp = NULL; + gchar outfile[100]; + gchar temp[100]; + memset (outfile, 0, sizeof (outfile)); + memset (temp, 0, sizeof (temp)); + + strncat (outfile, app->file_name, sizeof(outfile) - 1); + sprintf (outfile + strlen(outfile), "_%ld", (long) getpid()); + sprintf (temp, "_s%02d_%05d.jpg", app->sensor_id, app->capture_count++); + strcat (outfile, temp); + + CALL_GUI_FUNC (show_text, "Image saved to %s", outfile); + + fp = fopen (outfile, "wb"); + if (fp == NULL) { + g_print ("Can't open file for Image Capture!\n"); + app->cap_success = FALSE; + } else { + if (info.size != fwrite (info.data, 1, info.size, fp)) { + g_print ("Can't write data in file, No Space left on Device!\n"); + app->cap_success = FALSE; + fclose (fp); + if (remove (outfile) != 0) + g_print ("Unable to delete the file\n"); + } else { + app->cap_success = TRUE; + fclose (fp); + } + } + } + + app->capcount++; + app->native_record = GST_PAD_PROBE_DROP; + gst_buffer_unmap (buffer, &info); + + g_mutex_lock (app->lock); + recording = FALSE; + g_cond_signal (app->cond); + g_mutex_unlock (app->lock); + } else { + NVGST_WARNING_MESSAGE ("image buffer probe failed\n"); + } + } +} + +/** + * Buffer probe on preview. + * + * @param pad : the GstPad that is blocked + * @param info : GstPadProbeInfo + * @param udata : the gpointer to user data + */ +static GstPadProbeReturn +prev_buf_prob (GstPad * pad, GstPadProbeInfo * info, gpointer u_data) +{ + GstBuffer * buffer = NULL; + buffer = (GstBuffer *) info->data; + AuxData *sensor_metadata = NULL; + GQuark gst_buffer_metadata_quark = 0; + + gst_buffer_metadata_quark = g_quark_from_static_string ("GstBufferMetaData"); + sensor_metadata = (AuxData *) gst_mini_object_get_qdata (GST_MINI_OBJECT_CAST (buffer), + gst_buffer_metadata_quark); + + if(sensor_metadata && app->enableMeta) + GST_INFO_OBJECT (pad, "nvgstcapture: Frame %" G_GINT64_FORMAT "Timestamp %" G_GINT64_FORMAT"\n", + sensor_metadata->frame_num, sensor_metadata->timestamp); + + if (!app->first_frame && app->enableKpiNumbers) { + GET_TIMESTAMP(FIRST_FRAME); + app->first_frame = TRUE; + time_t launch_time = app->timeStampStore[FIRST_FRAME] - app->timeStampStore[APP_LAUNCH]; + g_print("\nKPI launch time in mS: %ld\n", (launch_time / 1000)); + } + + if (app->enableKpiNumbers) { + + if (app->currentFrameTime != 0) { + app->prevFrameTime = app->currentFrameTime; + } + + GET_TIMESTAMP(CURRENT_EVENT); + app->currentFrameTime = app->timeStampStore[CURRENT_EVENT]; + + if (app->prevFrameTime != 0) { + app->accumulator += (app->currentFrameTime - app->prevFrameTime) / 1000; + } + app->frameCount++; + } + + return GST_PAD_PROBE_OK; +} + +/** + * Buffer probe on preview. + * + * @param pad : the GstPad that is blocked + * @param info : GstPadProbeInfo + * @param udata : the gpointer to user data + */ +static GstPadProbeReturn +enc_buf_prob (GstPad * pad, GstPadProbeInfo * info, gpointer u_data) +{ + if (app->enableKpiNumbers) { + + if (app->currentEncFrameTime != 0) { + app->prevEncFrameTime = app->currentEncFrameTime; + } + + GET_TIMESTAMP(CURRENT_EVENT); + app->currentEncFrameTime = app->timeStampStore[CURRENT_EVENT]; + + if (app->prevEncFrameTime != 0) { + app->encAccumulator += (app->currentEncFrameTime - app->prevEncFrameTime) / 1000; + } + app->encFrameCount++; + } + + return app->native_record; +} + +static gboolean +create_csi_cap_bin (void) +{ + GstPad *pad = NULL; + GstCaps *caps = NULL; + GstCapsFeatures *feature = NULL; + gint width = 0, height = 0; + gchar *str_color = NULL; + + app->use_eglstream = 0; + + if (app->cam_src == NV_CAM_SRC_CSI) + { + /* Create the capture source element */ + app->ele.vsrc = gst_element_factory_make (NVGST_VIDEO_CAPTURE_SRC_CSI_ARGUS, NULL); + if (!app->ele.vsrc) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_VIDEO_CAPTURE_SRC_CSI_ARGUS); + goto fail; + } + + /*CSI camera properties tuning */ + g_object_set (G_OBJECT (app->ele.vsrc), "wbmode", app->whitebalance, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "timeout", app->timeout, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "saturation", app->saturation, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "sensor-id", app->sensor_id, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "sensor-mode", app->sensor_mode, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "aelock", app->enableAeLock, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "awblock", app->enableAwbLock, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "exposurecompensation", app->exposure_compensation, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "aeantibanding", app->ae_antibanding, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "tnr-mode", app->tnr_mode , NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "ee-mode", app->ee_mode , NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "tnr-strength", app->tnr_strength, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "ee-strength", app->ee_strength, NULL); + + if (app->exposure_timerange != NULL) + g_object_set (G_OBJECT (app->ele.vsrc), "exposuretimerange", app->exposure_timerange, NULL); + + if (app->gain_range != NULL) + g_object_set (G_OBJECT (app->ele.vsrc), "gainrange", app->gain_range, NULL); + + if (app->isp_digital_gainrange != NULL) + g_object_set (G_OBJECT (app->ele.vsrc), "ispdigitalgainrange", app->isp_digital_gainrange, NULL); + + if (app->overlayConfig) + set_overlay_configuration (app->overlayConfig); + if (app->timeout > 0) { + /* Extra 3 seconds are required for ARGUS clean up */ + g_timeout_add_seconds (app->timeout + 3, exit_capture, NULL); + } + if (app->color_format != -1) + app->color_format_csi = app->color_format; + switch(app->color_format_csi){ + case CAPTURE_NV12: + str_color = "NV12"; + break; + case CAPTURE_I420: + default: + g_print("Color format not supported for NV_CAMERA_ARGUS camera\n"); + goto fail; + } + } + else if (app->cam_src == NV_CAM_SRC_EGLSTREAM) + { + /* Create the capture source element */ + app->ele.vsrc = gst_element_factory_make (NVGST_EGLSTREAM_CAPTURE_SRC, NULL); + if (!app->ele.vsrc) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_VIDEO_CAPTURE_SRC_TEST); + goto fail; + } + + // Pass display and stream + g_object_set (G_OBJECT (app->ele.vsrc), "display", app->display, NULL); + g_object_set (G_OBJECT (app->ele.vsrc), "eglstream", app->stream, NULL); + app->use_eglstream = 1; + app->cam_src = NV_CAM_SRC_CSI; + g_print ("Setting display=%p and EGLStream=%p EGLStream_Producer_ID=%d\n", + app->display, app->stream, app->eglstream_producer_id); + + if (app->overlayConfig) + set_overlay_configuration (app->overlayConfig); + } + + if (app->color_format != -1) + app->color_format_csi = app->color_format; + switch(app->color_format_csi){ + case CAPTURE_I420: + str_color = "I420"; + break; + case CAPTURE_NV12: + str_color = "NV12"; + break; + default: + g_print("Color format not supported for CSI camera\n"); + goto fail; + } + + app->ele.cap_filter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + if (!app->ele.cap_filter) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + + app->capres.current_max_res = + MAX (app->capres.prev_res_index, MAX (app->capres.vid_res_index, + app->capres.img_res_index)); + get_max_resolution (app->capres.current_max_res, &width, &height); + caps = + gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, str_color, + "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", + GST_TYPE_FRACTION, app->framerate, 1, NULL); + + feature = gst_caps_features_new ("memory:NVMM", NULL); + gst_caps_set_features (caps, 0, feature); + + /* Set capture caps on capture filter */ + g_object_set (app->ele.cap_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + /* Create capture pipeline bin */ + app->ele.capbin = gst_bin_new ("cap_bin"); + gst_bin_add_many (GST_BIN (app->ele.capbin), app->ele.vsrc, + app->ele.cap_filter, NULL); + + if ((gst_element_link (app->ele.vsrc, app->ele.cap_filter)) != TRUE) { + NVGST_ERROR_MESSAGE ("Elements could not link nvcamsrc & caps filter\n"); + goto fail; + } + + pad = gst_element_get_static_pad (app->ele.cap_filter, "src"); + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static src pad of capture filter\n"); + goto fail; + } + gst_element_add_pad (app->ele.capbin, gst_ghost_pad_new ("src", pad)); + gst_object_unref (GST_OBJECT (pad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + + +/** + * Create capture bin + * + * @param void : + */ +static gboolean +create_cap_bin (void) +{ + GstPad *pad = NULL; + GstCaps *caps = NULL; + gchar *str_color = NULL; + /* Create the capture source element */ + if (app->cam_src == NV_CAM_SRC_TEST) { + app->ele.vsrc = + gst_element_factory_make (NVGST_VIDEO_CAPTURE_SRC_TEST, NULL); + if (!app->ele.vsrc) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_VIDEO_CAPTURE_SRC_TEST); + goto fail; + } + g_object_set (G_OBJECT (app->ele.vsrc), "is-live", TRUE, NULL); + } else { + app->ele.vsrc = + gst_element_factory_make (NVGST_VIDEO_CAPTURE_SRC_V4L2, NULL); + if (!app->ele.vsrc) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_VIDEO_CAPTURE_SRC_V4L2); + goto fail; + } + g_object_set (G_OBJECT (app->ele.vsrc), "device", app->vidcap_device, NULL); + } + + app->ele.cap_filter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + if (!app->ele.cap_filter) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + if (app->color_format != -1) + app->color_format_v4l2 = app->color_format; + + switch(app->color_format_v4l2){ + case CAPTURE_I420 : + str_color = "I420"; + break; + case CAPTURE_YUY2 : + str_color = "YUY2"; + break; + case CAPTURE_UYVY : + str_color = "UYVY"; + break; + default : + g_print("Color format is not supported on V4L2 \n"); + goto fail; + } + + caps = gst_caps_new_simple (NVGST_DEFAULT_VIDEO_MIMETYPE, + "format", G_TYPE_STRING, str_color, + "width", G_TYPE_INT, app->capres.preview_width, + "height", G_TYPE_INT, app->capres.preview_height, NULL); + + /* Set capture caps on capture filter */ + g_object_set (app->ele.cap_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + /* Create capture pipeline bin */ + app->ele.capbin = gst_bin_new ("cap_bin"); + gst_bin_add_many (GST_BIN (app->ele.capbin), app->ele.vsrc, + app->ele.cap_filter, NULL); + if (!gst_element_link_many (app->ele.vsrc, app->ele.cap_filter, + NULL)) { + NVGST_ERROR_MESSAGE_V ("Element link fail between %s & %s \n", + NVGST_VIDEO_CAPTURE_SRC_V4L2, NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + + pad = gst_element_get_static_pad (app->ele.cap_filter, "src"); + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static src pad of capture filter\n"); + goto fail; + } + gst_element_add_pad (app->ele.capbin, gst_ghost_pad_new ("src", pad)); + gst_object_unref (GST_OBJECT (pad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +/** + * Create video sink bin + * + * @param void : + */ +static gboolean +create_svs_bin (void) +{ + GstPad *pad = NULL; + + /* Create render pipeline bin */ + app->ele.svsbin = gst_bin_new ("svs_bin"); + +#if GUI + app->svs = NULL; +#endif + + if (app->svs == NULL) { + switch (app->cam_src) { + case NV_CAM_SRC_CSI: + app->svs = NVGST_DEFAULT_PREVIEW_SINK_CSI; + break; + case NV_CAM_SRC_V4L2: + app->svs = NVGST_DEFAULT_PREVIEW_SINK_USB; + break; + case NV_CAM_SRC_TEST: + app->svs = NVGST_DEFAULT_PREVIEW_SINK_USB; + break; + default: + g_print ("Invalid camera source, svs not set.\n"); + } + } + + app->ele.vsink = gst_element_factory_make (app->svs, NULL); + if (!app->ele.vsink) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", app->svs); + goto fail; + } + g_object_set (G_OBJECT (app->ele.vsink), "async", FALSE, NULL); + g_object_set (G_OBJECT (app->ele.vsink), "sync", FALSE, NULL); + + /* Create the colorspace converter element */ + if (!g_strcmp0 ("ximagesink", app->svs) || + !g_strcmp0 ("xvimagesink", app->svs)) { + app->ele.colorspace_conv = + gst_element_factory_make (NVGST_DEFAULT_VIDEO_CONVERTER, NULL); + if (!app->ele.colorspace_conv) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_DEFAULT_VIDEO_CONVERTER); + goto fail; + } + gst_bin_add_many (GST_BIN (app->ele.svsbin), app->ele.colorspace_conv, + app->ele.vsink, NULL); + if (!gst_element_link (app->ele.colorspace_conv, app->ele.vsink)) { + NVGST_ERROR_MESSAGE_V ("Element link fail between %s & %s \n", + NVGST_DEFAULT_VIDEO_CONVERTER, app->svs); + goto fail; + } + pad = gst_element_get_static_pad (app->ele.colorspace_conv, "sink"); + } else if (!g_strcmp0 ("nveglglessink", app->svs)) { + app->ele.colorspace_conv = + gst_element_factory_make ("nvegltransform", NULL); + + if (!app->ele.colorspace_conv) { + NVGST_ERROR_MESSAGE ("Element nvegltransform creation failed \n"); + goto fail; + } + gst_bin_add_many (GST_BIN (app->ele.svsbin), app->ele.colorspace_conv, + app->ele.vsink, NULL); + if (!gst_element_link (app->ele.colorspace_conv, app->ele.vsink)) { + NVGST_ERROR_MESSAGE_V ("Element link fail between %s & %s \n", + NVGST_DEFAULT_VIDEO_CONVERTER, app->svs); + goto fail; + } + pad = gst_element_get_static_pad (app->ele.colorspace_conv, "sink"); + + if (app->eglConfig) { + set_egl_window_config (app->eglConfig); + } + } else { + gst_bin_add (GST_BIN (app->ele.svsbin), app->ele.vsink); + pad = gst_element_get_static_pad (app->ele.vsink, "sink"); + + if (app->overlayConfig) { + g_object_set (G_OBJECT (app->ele.vsink), "overlay", app->overlay_index, + NULL); + g_object_set (G_OBJECT (app->ele.vsink), "overlay-x", app->overlay_x_pos, + NULL); + g_object_set (G_OBJECT (app->ele.vsink), "overlay-y", app->overlay_y_pos, + NULL); + g_object_set (G_OBJECT (app->ele.vsink), "overlay-w", app->overlay_width, + NULL); + g_object_set (G_OBJECT (app->ele.vsink), "overlay-h", app->overlay_height, + NULL); + } + } + +#if GUI + gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (app->ele.vsink), + CALL_GUI_FUNC (get_video_window)); +#else +{ + GstElement *vsink = app->ele.vsink; + if (vsink && GST_IS_VIDEO_OVERLAY (vsink)) { + if (!app->disp.mDisplay) { + nvgst_x11_init (&app->disp); + } + + if (app->capres.preview_width < app->disp.display_width + || app->capres.preview_height < app->disp.display_height) { + app->disp.width = app->capres.preview_width; + app->disp.height = app->capres.preview_height; + } else { + app->disp.width = app->disp.display_width; + app->disp.height = app->disp.display_height; + } + g_mutex_lock (app->lock); + + if (app->disp.window) + nvgst_destroy_window (&app->disp); + nvgst_create_window (&app->disp, "nvgstcapture-1.0"); + app->x_event_thread = g_thread_new ("nvgst-window-event-thread", + nvgst_x_event_thread, app); + gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), + (gulong) app->disp.window); + gst_video_overlay_expose (GST_VIDEO_OVERLAY (vsink)); + + g_mutex_unlock (app->lock); + } +} +#endif + + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static sink pad\n"); + goto fail; + } + gst_element_add_pad (app->ele.svsbin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (GST_OBJECT (pad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +/** + * Create preview scaling bin + * + * @param void : + */ +static gboolean +create_preview_scaling_bin (void) +{ + GstPad *sinkpad = NULL; + GstPad *srcpad = NULL; + GstCaps *caps = NULL; + GstCapsFeatures *feature = NULL; + + /* Create scaling pipeline bin */ + app->ele.svc_prebin = gst_bin_new ("svc_prev_bin"); + + app->ele.svc_prevconv = + gst_element_factory_make (NVGST_DEFAULT_VIDEO_CONVERTER_CSI, NULL); + if (!app->ele.svc_prevconv) { + NVGST_ERROR_MESSAGE_V ("svc_prev_bin Element %s creation failed \n", + NVGST_DEFAULT_VIDEO_CONVERTER_CSI); + goto fail; + } + + g_object_set (app->ele.svc_prevconv , "flip-method", app->flip_method, NULL); + + /* Create the capsfilter element */ + { + app->ele.svc_prevconv_out_filter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + if (!app->ele.svc_prevconv_out_filter) { + NVGST_ERROR_MESSAGE_V ("svc_prev_bin Element %s creation failed \n", + NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + + caps = + gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "I420", + "width", G_TYPE_INT, app->capres.preview_width, "height", G_TYPE_INT, + app->capres.preview_height, NULL); + + if (!g_strcmp0 (app->svs, "nvhdmioverlaysink") || +#ifdef WITH_STREAMING + app->streaming_mode || +#endif + !g_strcmp0 (app->svs, "nveglglessink")) { + feature = gst_caps_features_new ("memory:NVMM", NULL); + gst_caps_set_features (caps, 0, feature); + } + + /* Set capture caps on capture filter */ + g_object_set (app->ele.svc_prevconv_out_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + gst_bin_add_many (GST_BIN (app->ele.svc_prebin), + app->ele.svc_prevconv_out_filter, app->ele.svc_prevconv, NULL); + if (!gst_element_link_many (app->ele.svc_prevconv, + app->ele.svc_prevconv_out_filter, NULL)) { + NVGST_ERROR_MESSAGE_V + ("svc_prev_bin Element link fail between %s & %s \n", + NVGST_DEFAULT_CAPTURE_FILTER, NVGST_DEFAULT_VIDEO_CONVERTER_CSI); + goto fail; + } + sinkpad = gst_element_get_static_pad (app->ele.svc_prevconv, "sink"); + srcpad = + gst_element_get_static_pad (app->ele.svc_prevconv_out_filter, "src"); + } + + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("svc_prev_bin can't get static sink/src pad\n"); + goto fail; + } + gst_element_add_pad (app->ele.svc_prebin, gst_ghost_pad_new ("sink", + sinkpad)); + gst_element_add_pad (app->ele.svc_prebin, gst_ghost_pad_new ("src", srcpad)); + gst_object_unref (GST_OBJECT (sinkpad)); + gst_object_unref (GST_OBJECT (srcpad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +/** + * Create image scaling bin + * + * @param void : + */ +static gboolean +create_image_scaling_bin (void) +{ + GstPad *sinkpad = NULL; + GstPad *srcpad = NULL; + GstCaps *caps = NULL; + GstCapsFeatures *feature = NULL; + + /* Create image scaling pipeline bin */ + app->ele.svc_imgbin = gst_bin_new ("svc_img_bin"); + + app->ele.svc_imgvconv = + gst_element_factory_make (NVGST_DEFAULT_VIDEO_CONVERTER_CSI, NULL); + if (!app->ele.svc_imgvconv) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_DEFAULT_VIDEO_CONVERTER_CSI); + goto fail; + } + + g_object_set (app->ele.svc_imgvconv , "flip-method", app->flip_method, NULL); + + /* Create the capsfilter element */ + { + app->ele.svc_imgvconv_out_filter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + if (!app->ele.svc_imgvconv_out_filter) { + NVGST_ERROR_MESSAGE_V ("svc_img_bin Element %s creation failed \n", + NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + + caps = gst_caps_new_simple ("video/x-raw", + "format", G_TYPE_STRING, "I420", + "width", G_TYPE_INT, app->capres.image_cap_width, + "height", G_TYPE_INT, app->capres.image_cap_height, NULL); + + if (app->encset.image_enc == FORMAT_JPEG_HW) { + feature = gst_caps_features_new ("memory:NVMM", NULL); + gst_caps_set_features (caps, 0, feature); + } + + /* Set capture caps on capture filter */ + g_object_set (app->ele.svc_imgvconv_out_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + gst_bin_add_many (GST_BIN (app->ele.svc_imgbin), + app->ele.svc_imgvconv_out_filter, app->ele.svc_imgvconv, NULL); + if (!gst_element_link_many (app->ele.svc_imgvconv, + app->ele.svc_imgvconv_out_filter, NULL)) { + NVGST_ERROR_MESSAGE_V ("svc_img_bin Element link fail between %s & %s \n", + NVGST_DEFAULT_CAPTURE_FILTER, NVGST_DEFAULT_VIDEO_CONVERTER_CSI); + goto fail; + } + sinkpad = gst_element_get_static_pad (app->ele.svc_imgvconv, "sink"); + srcpad = + gst_element_get_static_pad (app->ele.svc_imgvconv_out_filter, "src"); + } + + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("svc_img_bin can't get static sink/src pad\n"); + goto fail; + } + gst_element_add_pad (app->ele.svc_imgbin, gst_ghost_pad_new ("sink", + sinkpad)); + gst_element_add_pad (app->ele.svc_imgbin, gst_ghost_pad_new ("src", srcpad)); + gst_object_unref (GST_OBJECT (sinkpad)); + gst_object_unref (GST_OBJECT (srcpad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +/** + * Create video scaling bin + * + * @param void : + */ +static gboolean +create_video_scaling_bin (void) +{ + GstPad *sinkpad = NULL; + GstPad *srcpad = NULL; + GstCaps *caps = NULL; + GstCapsFeatures *feature = NULL; + + /* Create scaling pipeline bin */ + app->ele.svc_vidbin = gst_bin_new ("svc_vid_bin"); + + app->ele.svc_vidvconv = + gst_element_factory_make (NVGST_DEFAULT_VIDEO_CONVERTER_CSI, NULL); + if (!app->ele.svc_vidvconv) { + NVGST_ERROR_MESSAGE_V ("svc_vid_bin Element %s creation failed \n", + NVGST_DEFAULT_VIDEO_CONVERTER_CSI); + goto fail; + } + + g_object_set (app->ele.svc_vidvconv , "flip-method", app->flip_method, NULL); + + /* Create the capsfilter element */ + { + app->ele.svc_vidvconv_out_filter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + if (!app->ele.svc_vidvconv_out_filter) { + NVGST_ERROR_MESSAGE_V ("svc_vid_bin Element %s creation failed \n", + NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + + caps = gst_caps_new_simple ("video/x-raw", + "format", G_TYPE_STRING, "NV12", + "width", G_TYPE_INT, app->capres.video_cap_width, + "height", G_TYPE_INT, app->capres.video_cap_height, NULL); + + feature = gst_caps_features_new ("memory:NVMM", NULL); + gst_caps_set_features (caps, 0, feature); + + /* Set capture caps on capture filter */ + g_object_set (app->ele.svc_vidvconv_out_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + gst_bin_add_many (GST_BIN (app->ele.svc_vidbin), + app->ele.svc_vidvconv_out_filter, app->ele.svc_vidvconv, NULL); + if (!gst_element_link_many (app->ele.svc_vidvconv, + app->ele.svc_vidvconv_out_filter, NULL)) { + NVGST_ERROR_MESSAGE_V ("svc_vid_bin Element link fail between %s & %s \n", + NVGST_DEFAULT_CAPTURE_FILTER, NVGST_DEFAULT_VIDEO_CONVERTER_CSI); + goto fail; + } + sinkpad = gst_element_get_static_pad (app->ele.svc_vidvconv, "sink"); + srcpad = + gst_element_get_static_pad (app->ele.svc_vidvconv_out_filter, "src"); + } + + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("svc_vid_bin can't get static sink/src pad\n"); + goto fail; + } + gst_element_add_pad (app->ele.svc_vidbin, gst_ghost_pad_new ("sink", + sinkpad)); + gst_element_add_pad (app->ele.svc_vidbin, gst_ghost_pad_new ("src", srcpad)); + gst_object_unref (GST_OBJECT (sinkpad)); + gst_object_unref (GST_OBJECT (srcpad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +/** + * Create encode bin + * + * @param void : + */ + +static gboolean +create_img_enc_bin (void) +{ + GstPad *pad = NULL; + + app->ele.img_bin = gst_bin_new ("img_bin"); + + /* Create image encode chain elements */ + app->ele.img_enc_conv = gst_element_factory_make (NVGST_DEFAULT_IMAGE_ENC_CONVERTER, NULL); + if (!app->ele.img_enc_conv) { + NVGST_ERROR_MESSAGE ("nvvidconv element could not be created for image encode.\n"); + goto fail; + } + + if (!get_image_encoder (&app->ele.img_enc)) { + NVGST_ERROR_MESSAGE ("Image encoder element could not be created.\n"); + goto fail; + } + + app->ele.img_sink = gst_element_factory_make (NVGST_DEFAULT_IENC_SINK, NULL); + if (!app->ele.img_sink) { + NVGST_ERROR_MESSAGE ("Image sink element could be created.\n"); + goto fail; + } + g_object_set (G_OBJECT (app->ele.img_sink), "signal-handoffs", TRUE, NULL); + g_signal_connect (G_OBJECT (app->ele.img_sink), "handoff", + G_CALLBACK (cam_image_captured), NULL); + + gst_bin_add_many (GST_BIN (app->ele.img_bin), app->ele.img_enc_conv, + app->ele.img_enc, app->ele.img_sink, NULL); + + if ((gst_element_link (app->ele.img_enc_conv, app->ele.img_enc)) != TRUE) { + NVGST_ERROR_MESSAGE ("Elements could not link iconverter & iencoder\n"); + goto fail; + } + + if ((gst_element_link (app->ele.img_enc, app->ele.img_sink)) != TRUE) { + NVGST_ERROR_MESSAGE ("Elements could not link iencoder & image_sink\n"); + goto fail; + } + + pad = gst_element_get_static_pad (app->ele.img_enc_conv, "sink"); + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static sink pad of iconverter\n"); + goto fail; + } + gst_element_add_pad (app->ele.img_bin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (GST_OBJECT (pad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +#ifdef WITH_STREAMING +static void +rtsp_video_stream_new (GObject * media) +{ + GstCaps *appsrc_caps; + GstElement *bin; + GstElement *appsrc; + + create_capture_pipeline (); + + g_object_get (media, "element", &bin, NULL); + + appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (bin), "mysrc"); + app->video_streaming_ctx.appsrc = appsrc; + + gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time"); + g_object_set (G_OBJECT (appsrc), "is-live", TRUE, NULL); + + switch (app->encset.video_enc) { + case FORMAT_H264_HW: + appsrc_caps = + gst_caps_from_string + ("video/x-h264, stream-format=byte-stream, alignment=au"); + break; + case FORMAT_VP8_HW: + appsrc_caps = gst_caps_from_string ("video/x-vp8"); + break; + case FORMAT_H265_HW: + appsrc_caps = gst_caps_from_string ("video/x-h265"); + break; + case FORMAT_VP9_HW: + appsrc_caps = gst_caps_from_string ("video/x-vp9"); + break; + default: + appsrc_caps = gst_caps_from_string ("video/x-h264"); + break; + } + g_object_set (G_OBJECT (appsrc), "caps", appsrc_caps, NULL); + + gst_caps_unref (appsrc_caps); +} + +static void +rtsp_video_stream_start (void) +{ +} + +static void +rtsp_video_stream_pause (void) +{ + if (app->streaming_mode == 2) + stop_video_capture (); +} + +static void +rtsp_video_stream_resume (void) +{ + restart_capture_pipeline (); + if (app->streaming_mode == 2) + start_video_capture (); +} + +static void +rtsp_video_stream_stop (void) +{ + if (app->streaming_mode == 2) { + stop_video_capture (); + g_usleep(100000); + } + destroy_capture_pipeline (); +} + +static GstFlowReturn +rtsp_video_appsink_new_sample (GstAppSink * appsink, gpointer user_data) +{ + GstSample *sample = NULL; + GstBuffer *buffer; + GstAppSrc *appsrc = GST_APP_SRC (app->video_streaming_ctx.appsrc); + + sample = gst_app_sink_pull_sample (GST_APP_SINK (app->ele.vsink)); + + buffer = gst_sample_get_buffer (sample); + if (!buffer) + return GST_FLOW_OK; + + if (!appsrc) { + gst_sample_unref (sample); + return GST_FLOW_OK; + } + + gst_buffer_ref(buffer); + gst_sample_unref(sample); + + return gst_app_src_push_buffer (GST_APP_SRC (appsrc), buffer); +} + +static void +cb_streaming_dbin_newpad (GstElement * decodebin, GstPad * pad, gpointer data) +{ + GstCaps *caps = gst_pad_query_caps (pad, NULL); + const GstStructure *str = gst_caps_get_structure (caps, 0); + const gchar *name = gst_structure_get_name (str); + + if (!strncmp (name, "video", 5)) { + GstPad *sinkpad = gst_element_get_static_pad ( + app->video_streaming_ctx.streaming_file_src_conv, + "sink"); + if (!sinkpad) { + NVGST_ERROR_MESSAGE ("could not get pads to link uridecodebin & nvvidconv\n"); + goto done; + } + + if (GST_PAD_LINK_FAILED (gst_pad_link (pad, sinkpad))) { + NVGST_ERROR_MESSAGE ("Failed to link uridecodebin & nvvidconv\n"); + goto done; + } + + gst_element_set_state (app->ele.vsink, GST_STATE_PLAYING); + gst_object_unref (sinkpad); + goto done; + } +done: + gst_caps_unref (caps); +} + +static gboolean +create_streaming_file_src_bin (void) +{ + int width, height; + gchar file_loc[256]; + GstCaps *caps; + GstCapsFeatures * feature; + GstPad *pad; + + app->ele.vsrc = gst_element_factory_make (NVGST_STREAMING_SRC_FILE, NULL); + if (!app->ele.vsrc) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_STREAMING_SRC_FILE); + goto fail; + } + + g_snprintf(file_loc, 255, "file://%s", + app->video_streaming_ctx.streaming_src_file); + g_object_set(G_OBJECT(app->ele.vsrc), "uri", file_loc, NULL); + g_signal_connect (app->ele.vsrc, "pad-added", + G_CALLBACK (cb_streaming_dbin_newpad), app); + + app->video_streaming_ctx.streaming_file_src_conv = + gst_element_factory_make (NVGST_DEFAULT_VIDEO_CONVERTER_CSI, NULL); + if (!app->video_streaming_ctx.streaming_file_src_conv) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_DEFAULT_VIDEO_CONVERTER_CSI); + goto fail; + } + + app->ele.cap_filter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + if (!app->ele.cap_filter) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + + app->capres.current_max_res = + MAX (app->capres.prev_res_index, MAX (app->capres.vid_res_index, + app->capres.img_res_index)); + get_max_resolution (app->capres.current_max_res, &width, &height); + + caps = + gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "I420", + "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); + + feature = gst_caps_features_new ("memory:NVMM", NULL); + gst_caps_set_features (caps, 0, feature); + g_object_set (app->ele.cap_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + app->ele.capbin = gst_bin_new ("cap_bin"); + gst_bin_add_many (GST_BIN (app->ele.capbin), app->ele.vsrc, + app->video_streaming_ctx.streaming_file_src_conv, + app->ele.cap_filter, NULL); + + if (!gst_element_link_many (app->video_streaming_ctx.streaming_file_src_conv, + app->ele.cap_filter, NULL)) { + NVGST_ERROR_MESSAGE_V ("Element link fail between %s & %s \n", + NVGST_DEFAULT_VIDEO_CONVERTER_CSI, NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + + pad = gst_element_get_static_pad (app->ele.cap_filter, "src"); + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static src pad of capture filter\n"); + goto fail; + } + gst_element_add_pad (app->ele.capbin, gst_ghost_pad_new ("src", pad)); + gst_object_unref (GST_OBJECT (pad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +static gboolean +create_streaming_enc_bin (void) +{ + GstPad *pad = NULL; + + GstCaps *appsink_caps; + GstAppSinkCallbacks callbacks = { + NULL, NULL, rtsp_video_appsink_new_sample + }; + + app->ele.svsbin = gst_bin_new ("streaming_bin"); + + app->ele.vsink = gst_element_factory_make ("appsink", NULL); + if (!app->ele.vsink) { + NVGST_ERROR_MESSAGE ("video sink element could not be created.\n"); + goto fail; + } + + g_object_set (G_OBJECT (app->ele.vsink), + "sync", FALSE, "async", FALSE, NULL); + gst_util_set_object_arg (G_OBJECT (app->ele.vsink), "format", "time"); + + switch (app->encset.video_enc) { + case FORMAT_H264_HW: + appsink_caps = + gst_caps_from_string + ("video/x-h264, stream-format=byte-stream, alignment=au"); + break; + case FORMAT_VP8_HW: + appsink_caps = gst_caps_from_string ("video/x-vp8"); + break; + case FORMAT_H265_HW: + appsink_caps = gst_caps_from_string ("video/x-h265"); + break; + case FORMAT_VP9_HW: + appsink_caps = gst_caps_from_string ("video/x-vp9"); + break; + default: + appsink_caps = gst_caps_from_string ("video/x-h264"); + break; + } + g_object_set (G_OBJECT (app->ele.vsink), "caps", appsink_caps, NULL); + gst_caps_unref (appsink_caps); + + gst_app_sink_set_callbacks (GST_APP_SINK (app->ele.vsink), &callbacks, + NULL, NULL); + + if (!get_video_encoder (&app->ele.colorspace_conv)) { + NVGST_ERROR_MESSAGE ("Video encoder element could not be created.\n"); + goto fail; + } + + gst_bin_add_many (GST_BIN (app->ele.vid_bin), app->ele.colorspace_conv, + app->ele.vsink, NULL); + + if ((gst_element_link (app->ele.colorspace_conv, app->ele.vsink)) != TRUE) { + NVGST_ERROR_MESSAGE ("Elements could not link encoder & appsink\n"); + goto fail; + } + + pad = gst_element_get_static_pad (app->ele.colorspace_conv, "sink"); + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static sink pad of encoder\n"); + goto fail; + } + gst_element_add_pad (app->ele.svsbin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (GST_OBJECT (pad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} +#endif + +static gboolean +create_vid_enc_bin (void) +{ + GstPad *pad = NULL; + GstPad *srcpad = NULL; + GstPad *sinkpad = NULL; + GstCapsFeatures *feature = NULL; + GstCaps *caps = NULL; + + app->ele.vid_bin = gst_bin_new ("vid_bin"); + + if (app->cam_src != NV_CAM_SRC_CSI) + { + app->ele.vid_enc_conv = gst_element_factory_make ("nvvidconv", "nvvidconv"); + if (!app->ele.vid_enc_conv) { + NVGST_ERROR_MESSAGE ("nvvidconv element could not be created.\n"); + goto fail; + } + app->ele.vid_enc_cap_filter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + caps = + gst_caps_new_simple ("video/x-raw","format", G_TYPE_STRING, "NV12", NULL); + + feature = gst_caps_features_new ("memory:NVMM", NULL); + gst_caps_set_features (caps, 0, feature); + + g_object_set (G_OBJECT (app->ele.vid_enc_cap_filter), "caps", caps, NULL); + gst_caps_unref (caps); + } + + app->ele.video_sink = + gst_element_factory_make (NVGST_DEFAULT_VENC_SINK, NULL); + if (!app->ele.video_sink) { + NVGST_ERROR_MESSAGE ("video sink element could not be created.\n"); + goto fail; + } + g_object_set (G_OBJECT (app->ele.video_sink), + "location", DEFAULT_LOCATION, "async", FALSE, "sync", FALSE, NULL); + + if (!get_video_encoder (&app->ele.vid_enc)) { + NVGST_ERROR_MESSAGE ("Video encoder element could not be created.\n"); + goto fail; + } + + if (!get_parser (&app->ele.parser)) { + NVGST_ERROR_MESSAGE ("Video parser element could not be created.\n"); + goto fail; + } + + if (!get_muxer (&app->ele.muxer)) { + NVGST_ERROR_MESSAGE ("Video muxer element could not be created.\n"); + goto fail; + } + + if(app->cam_src != NV_CAM_SRC_CSI) + { + gst_bin_add_many (GST_BIN (app->ele.vid_bin), app->ele.vid_enc_conv, app->ele.vid_enc_cap_filter, app->ele.vid_enc, + app->ele.parser, app->ele.muxer, app->ele.video_sink, NULL); + + if ((gst_element_link (app->ele.vid_enc_conv, app->ele.vid_enc_cap_filter)) != TRUE) { + NVGST_ERROR_MESSAGE ("Elements could not link nvvidconv & caps filter\n"); + goto fail; + } + + if ((gst_element_link (app->ele.vid_enc_cap_filter, app->ele.vid_enc)) != TRUE) { + NVGST_ERROR_MESSAGE ("Elements could not link caps filter & encoder \n"); + goto fail; + } + } + else + { + gst_bin_add_many (GST_BIN (app->ele.vid_bin), app->ele.vid_enc, + app->ele.parser, app->ele.muxer, app->ele.video_sink, NULL); + } + + if ((gst_element_link (app->ele.vid_enc, app->ele.parser)) != TRUE) { + NVGST_ERROR_MESSAGE ("Elements could not link encoder & parser\n"); + goto fail; + } + + srcpad = gst_element_get_static_pad (app->ele.parser, "src"); + + + if (app->muxer_is_identity) + { + sinkpad = gst_element_get_static_pad (app->ele.muxer, "sink"); + } + else + { + sinkpad = gst_element_get_request_pad (app->ele.muxer, "video_%u"); + } + + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("could not get pads to link enc & muxer\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("could not link enc & muxer\n"); + goto fail; + } + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + if ((gst_element_link (app->ele.muxer, app->ele.video_sink)) != TRUE) { + NVGST_ERROR_MESSAGE ("Elements could not link muxer & video_sink\n"); + goto fail; + } + + if(app->cam_src != NV_CAM_SRC_CSI) + { + pad = gst_element_get_static_pad (app->ele.vid_enc_conv, "sink"); + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static sink pad of nvvidconv\n"); + goto fail; + } + } + else + { + pad = gst_element_get_static_pad (app->ele.vid_enc, "sink"); + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static sink pad of encoder\n"); + goto fail; + } + } + + gst_element_add_pad (app->ele.vid_bin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (GST_OBJECT (pad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +static gboolean +create_video_snap_bin (void) +{ + GstPad *pad = NULL; + GstCaps *caps = NULL; + GstCapsFeatures *feature = NULL; + + app->ele.vsnap_bin = gst_bin_new ("vsnap_bin"); + + if (!get_image_encoder (&app->ele.vsnap_enc)) { + NVGST_ERROR_MESSAGE ("Image encoder element could not be created.\n"); + goto fail; + } + + app->ele.vsnap_sink = + gst_element_factory_make (NVGST_DEFAULT_IENC_SINK, NULL); + if (!app->ele.vsnap_sink) { + NVGST_ERROR_MESSAGE ("Image sink element could be created.\n"); + goto fail; + } + g_object_set (G_OBJECT (app->ele.vsnap_sink), "signal-handoffs", TRUE, NULL); + g_signal_connect (G_OBJECT (app->ele.vsnap_sink), "handoff", + G_CALLBACK (write_vsnap_buffer), NULL); + + app->ele.svc_snapconv = + gst_element_factory_make (NVGST_DEFAULT_VIDEO_CONVERTER_CSI, NULL); + if (!app->ele.svc_snapconv) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_DEFAULT_VIDEO_CONVERTER_CSI); + goto fail; + } + g_object_set (app->ele.svc_snapconv , "flip-method", app->flip_method, NULL); + + app->ele.svc_snapconv_out_filter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + if (!app->ele.svc_snapconv_out_filter) { + NVGST_ERROR_MESSAGE_V ("Element %s creation failed \n", + NVGST_DEFAULT_CAPTURE_FILTER); + goto fail; + } + + caps = gst_caps_new_simple ("video/x-raw", + "format", G_TYPE_STRING, "I420", + "width", G_TYPE_INT, app->capres.video_cap_width, + "height", G_TYPE_INT, app->capres.video_cap_height, NULL); + + if (app->encset.image_enc == FORMAT_JPEG_HW) { + feature = gst_caps_features_new ("memory:NVMM", NULL); + gst_caps_set_features (caps, 0, feature); + } + + /* Set capture caps on capture filter */ + g_object_set (app->ele.svc_snapconv_out_filter, "caps", caps, NULL); + gst_caps_unref (caps); + + + gst_bin_add_many (GST_BIN (app->ele.vsnap_bin), + app->ele.svc_snapconv, app->ele.svc_snapconv_out_filter, + app->ele.vsnap_enc, app->ele.vsnap_sink, NULL); + + if (!gst_element_link_many (app->ele.svc_snapconv, + app->ele.svc_snapconv_out_filter, app->ele.vsnap_enc, + app->ele.vsnap_sink, NULL)) { + NVGST_ERROR_MESSAGE ("vsnap_bin: Element link fail \n"); + goto fail; + } + + pad = gst_element_get_static_pad (app->ele.svc_snapconv, "sink"); + if (!pad) { + NVGST_ERROR_MESSAGE ("can't get static sink pad of converter \n"); + goto fail; + } + + gst_element_add_pad (app->ele.vsnap_bin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (GST_OBJECT (pad)); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +static gboolean +create_eglstream_producer_pipeline (void) +{ + GstBus *bus = NULL; + GstCaps *caps = NULL; + gchar *str_color = NULL; + GstCapsFeatures *feature = NULL; + app->ele.eglproducer_bin = gst_bin_new ("eglproducer_bin"); + + app->ele.eglproducer_pipeline = gst_pipeline_new ("capture_native_pipeline");; + if (!app->ele.eglproducer_pipeline) { + NVGST_ERROR_MESSAGE ("capture native pipeline creation failed \n"); + goto fail; + } + bus = gst_pipeline_get_bus (GST_PIPELINE (app->ele.eglproducer_pipeline)); + gst_bus_set_sync_handler (bus, bus_sync_handler, + app->ele.eglproducer_pipeline, NULL); + gst_bus_add_watch (bus, bus_call, NULL); + gst_object_unref (bus); + + g_object_set (app->ele.eglproducer_pipeline, "message-forward", TRUE, NULL); + + app->ele.eglproducer_videotestsrc = + gst_element_factory_make (NVGST_VIDEO_CAPTURE_SRC_TEST, NULL); + if (!app->ele.eglproducer_videotestsrc) { + NVGST_ERROR_MESSAGE ("eglproducer_bin VideoTestSrc Element creation failed.\n"); + goto fail; + } + + app->ele.eglproducer_capsfilter = + gst_element_factory_make (NVGST_DEFAULT_CAPTURE_FILTER, NULL); + if (!app->ele.eglproducer_capsfilter) { + NVGST_ERROR_MESSAGE ("eglproducer_bin capsfilter Element creation failed. \n"); + goto fail; + } + + if (app->color_format != -1) + app->color_format_csi = app->color_format; + switch(app->color_format_csi){ + case CAPTURE_I420: + str_color = "I420"; + break; + case CAPTURE_NV12: + str_color = "NV12"; + break; + default: + g_print("Color format not supported for CSI camera\n"); + goto fail; + } + + caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, str_color, + "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, NULL); + + g_object_set (app->ele.eglproducer_capsfilter, "caps", caps, NULL); + gst_caps_unref (caps); + + app->ele.eglproducer_videoconvert = + gst_element_factory_make (NVGST_DEFAULT_VIDEO_CONVERTER_CSI, NULL); + if (!app->ele.eglproducer_videoconvert) { + NVGST_ERROR_MESSAGE ("eglproducer_bin videoconvert Element creation failed. \n"); + goto fail; + } + + app->ele.eglproducer_nvvideosink = + gst_element_factory_make (NVGST_VIDEO_SINK, NULL); + + if (!app->ele.eglproducer_nvvideosink) { + NVGST_ERROR_MESSAGE ("eglproducer_bin nvvideosink Element creation failed \n"); + goto fail; + } + gst_bin_add_many (GST_BIN (app->ele.eglproducer_bin), + app->ele.eglproducer_videotestsrc, app->ele.eglproducer_capsfilter, + app->ele.eglproducer_videoconvert, app->ele.eglproducer_nvvideosink, NULL); + + if ((gst_element_link_many (app->ele.eglproducer_videotestsrc, app->ele.eglproducer_capsfilter, + app->ele.eglproducer_videoconvert, app->ele.eglproducer_nvvideosink, NULL)) != TRUE) + { + NVGST_ERROR_MESSAGE ("eglproducer_bin Elements could not be linked\n"); + goto fail; + } + + caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, str_color, + "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, NULL); + feature = gst_caps_features_new ("memory:NVMM", NULL); + gst_caps_set_features (caps, 0, feature); + + g_object_set (app->ele.eglproducer_nvvideosink, "outcaps", caps, NULL); + gst_caps_unref (caps); + + // get display and stream + g_object_get (G_OBJECT (app->ele.eglproducer_nvvideosink), + "display", &app->display, NULL); + g_object_get (G_OBJECT (app->ele.eglproducer_nvvideosink), + "stream", &app->stream, NULL); + g_print ("GET display=%p and EGLStream=%p \n", app->display, app->stream); + + /* Add elements to camera pipeline */ + gst_bin_add_many (GST_BIN (app->ele.eglproducer_pipeline), + app->ele.eglproducer_bin, NULL); + + return TRUE; + +fail: + return FALSE; +} + +static gboolean +create_csi_capture_pipeline (void) +{ + GstBus *bus = NULL; + GstPad *sinkpad = NULL; + GstPad *srcpad = NULL; + + /* Create the camera pipeline */ + app->ele.camera = gst_pipeline_new ("capture_native_pipeline");; + if (!app->ele.camera) { + NVGST_ERROR_MESSAGE ("capture native pipeline creation failed \n"); + goto fail; + } + bus = gst_pipeline_get_bus (GST_PIPELINE (app->ele.camera)); + gst_bus_set_sync_handler (bus, bus_sync_handler, app->ele.camera, NULL); + gst_bus_add_watch (bus, bus_call, NULL); + gst_object_unref (bus); + + g_object_set (app->ele.camera, "message-forward", TRUE, NULL); + +#ifdef WITH_STREAMING + if (app->streaming_mode && app->video_streaming_ctx.streaming_src_file) { + /* Create capture chain elements */ + if (!create_streaming_file_src_bin ()) { + NVGST_ERROR_MESSAGE ("cap bin creation failed \n"); + goto fail; + } + } else { +#else + { +#endif + /* Create capture chain elements */ + if (!create_csi_cap_bin ()) { + NVGST_ERROR_MESSAGE ("cap bin creation failed \n"); + goto fail; + } + } + + /* Create encode chain elements */ + if (!create_vid_enc_bin ()) { + NVGST_ERROR_MESSAGE ("encode bin creation failed \n"); + goto fail; + } + + if (!create_img_enc_bin ()) { + NVGST_ERROR_MESSAGE ("encode bin creation failed \n"); + goto fail; + } + + if (!create_video_snap_bin ()) { + NVGST_ERROR_MESSAGE ("video snapshot bin creation failed \n"); + goto fail; + } + +#ifdef WITH_STREAMING + if (app->streaming_mode) { + if (!create_streaming_enc_bin ()) { + NVGST_ERROR_MESSAGE ("encode bin creation failed \n"); + goto fail; + } + } else { +#else + { +#endif + /* Create preview chain elements */ + if (!create_svs_bin ()) { + NVGST_ERROR_MESSAGE ("svs bin creation failed \n"); + goto fail; + } + } + + /* Create preview scaling elements */ + if (!create_preview_scaling_bin ()) { + NVGST_ERROR_MESSAGE ("preview scaling bin creation failed \n"); + goto fail; + } + + /* Create image scaling elements */ + if (!create_image_scaling_bin ()) { + NVGST_ERROR_MESSAGE ("image scaling bin creation failed \n"); + goto fail; + } + + /* Create video scaling elements */ + if (!create_video_scaling_bin ()) { + NVGST_ERROR_MESSAGE ("video scaling bin creation failed \n"); + goto fail; + } + + /* Create capture tee for capture streams */ + app->ele.cap_tee = + gst_element_factory_make ("nvtee" /*NVGST_PRIMARY_STREAM_SELECTOR */ , + NULL); + if (!app->ele.cap_tee) { + NVGST_ERROR_MESSAGE ("capture nvtee creation failed \n"); + goto fail; + } + + g_object_set (G_OBJECT (app->ele.cap_tee), "name", "cam_t", NULL); + g_object_set (G_OBJECT (app->ele.cap_tee), "mode", app->mode, NULL); + + /* Create preview & encode queue */ + app->ele.prev_q = gst_element_factory_make (NVGST_PRIMARY_QUEUE, NULL); + app->ele.ienc_q = gst_element_factory_make (NVGST_PRIMARY_QUEUE, NULL); + app->ele.venc_q = gst_element_factory_make (NVGST_PRIMARY_QUEUE, NULL); + app->ele.vsnap_q = gst_element_factory_make (NVGST_PRIMARY_QUEUE, NULL); + if (!app->ele.prev_q || !app->ele.ienc_q || !app->ele.venc_q + || !app->ele.vsnap_q) { + NVGST_ERROR_MESSAGE ("preview/encode queue creation failed \n"); + goto fail; + } + + /* Add elements to camera pipeline */ + gst_bin_add_many (GST_BIN (app->ele.camera), + app->ele.capbin, app->ele.vid_bin, app->ele.img_bin, app->ele.svsbin, + app->ele.svc_prebin, app->ele.svc_imgbin, app->ele.svc_vidbin, + app->ele.cap_tee, app->ele.prev_q, app->ele.ienc_q, app->ele.venc_q, + app->ele.vsnap_q, app->ele.vsnap_bin, NULL); + + /* Manually link the Tee with preview queue */ + srcpad = gst_element_get_static_pad (app->ele.cap_tee, "pre_src"); + sinkpad = gst_element_get_static_pad (app->ele.prev_q, "sink"); + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("fail to get pads from cap_tee & prev_q\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link cap_tee & prev_q\n"); + goto fail; + } + + app->prev_probe_id = gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, + prev_buf_prob, NULL, NULL); + + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + /* Manually link the queue with preview scaling bin */ + srcpad = gst_element_get_static_pad (app->ele.prev_q, "src"); + sinkpad = gst_element_get_static_pad (app->ele.svc_prebin, "sink"); + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("fail to get pads from prev_q & svc_prebin\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link svc_prebin & prev_q\n"); + goto fail; + } + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + /* Manually link the Tee with video queue */ + srcpad = gst_element_get_static_pad (app->ele.cap_tee, "vid_src"); + sinkpad = gst_element_get_static_pad (app->ele.venc_q, "sink"); + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("fail to get pads from cap_tee & enc_q\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link cap_tee & enc_q\n"); + goto fail; + } + app->enc_probe_id = gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, + enc_buf_prob, NULL, NULL); + + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + /* Manually link the video queue with video scaling */ + srcpad = gst_element_get_static_pad (app->ele.venc_q, "src"); + sinkpad = gst_element_get_static_pad (app->ele.svc_vidbin, "sink"); + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("fail to get pads from video queue & video scaling\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link video queue & video scaling\n"); + goto fail; + } + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + /* Manually link the Tee with image queue */ + srcpad = gst_element_get_static_pad (app->ele.cap_tee, "img_src"); + sinkpad = gst_element_get_static_pad (app->ele.ienc_q, "sink"); + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("fail to get pads from cap_tee & enc_q\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link cap_tee & enc_q\n"); + goto fail; + } + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + /* Manually link the image queue with image scaling */ + srcpad = gst_element_get_static_pad (app->ele.ienc_q, "src"); + sinkpad = gst_element_get_static_pad (app->ele.svc_imgbin, "sink"); + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("fail to get pads from image queue & image scaling\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link image queue & image scaling\n"); + goto fail; + } + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + + /* Manually link the Tee with video snapshot queue */ + srcpad = gst_element_get_static_pad (app->ele.cap_tee, "vsnap_src"); + sinkpad = gst_element_get_static_pad (app->ele.vsnap_q, "sink"); + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("fail to get pads from cap_tee & enc_q\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link cap_tee & enc_q\n"); + goto fail; + } + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + /* Manually link video snapshot queue with video snapshot bin */ + srcpad = gst_element_get_static_pad (app->ele.vsnap_q, "src"); + sinkpad = gst_element_get_static_pad (app->ele.vsnap_bin, "sink"); + if (!sinkpad || !srcpad) { + NVGST_ERROR_MESSAGE ("fail to get pads from video snapshot queue & bin\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (srcpad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link video snapshot queue & bin \n"); + goto fail; + } + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + + /* link the capture bin with Tee */ + if (!gst_element_link (app->ele.capbin, app->ele.cap_tee)) { + NVGST_ERROR_MESSAGE ("fail to link capbin & cap_tee\n"); + goto fail; + } + + /* link the preview scaling bin with svs bin */ + if (!gst_element_link (app->ele.svc_prebin, app->ele.svsbin)) { + NVGST_ERROR_MESSAGE ("fail to link svc_prebin & svsbin\n"); + goto fail; + } + + /* link the video scaling bin with encode bin */ + if (!gst_element_link (app->ele.svc_vidbin, app->ele.vid_bin)) { + NVGST_ERROR_MESSAGE ("fail to link svc_vidbin & vidbin\n"); + goto fail; + } + + /* link the image scaling bin with encode bin */ + if (!gst_element_link (app->ele.svc_imgbin, app->ele.img_bin)) { + NVGST_ERROR_MESSAGE ("fail to link svc_imgbin & imgbin\n"); + goto fail; + } + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +/** + * Create native capture pipeline. + * + * @param void + */ +static gboolean +create_native_capture_pipeline (void) +{ + GstBus *bus = NULL; + GstPad *sinkpad = NULL; + GstPad *tee_prev_pad = NULL; + GstPad *tee_vid_pad = NULL; + GstElement *encbin = NULL; + GstElement *enc_q = NULL; + GstPadTemplate *tee_src_pad_template = NULL; + + /* Create the camera pipeline */ + app->ele.camera = gst_pipeline_new ("capture_native_pipeline");; + if (!app->ele.camera) { + NVGST_ERROR_MESSAGE ("capture native pipeline creation failed \n"); + goto fail; + } + bus = gst_pipeline_get_bus (GST_PIPELINE (app->ele.camera)); + gst_bus_set_sync_handler (bus, bus_sync_handler, app->ele.camera, NULL); + gst_bus_add_watch (bus, bus_call, NULL); + gst_object_unref (bus); + + /* Create encode chain elements */ + if (app->mode == CAPTURE_VIDEO) { + if (!create_vid_enc_bin ()) { + NVGST_ERROR_MESSAGE ("encode bin creation failed \n"); + goto fail; + } + + app->ele.venc_q = gst_element_factory_make (NVGST_PRIMARY_QUEUE, NULL); + if (!app->ele.venc_q) { + NVGST_ERROR_MESSAGE ("video encode queue creation failed \n"); + goto fail; + } + + encbin = app->ele.vid_bin; + enc_q = app->ele.venc_q; + } else { + if (!create_img_enc_bin ()) { + NVGST_ERROR_MESSAGE ("encode bin creation failed \n"); + goto fail; + } + + app->ele.ienc_q = gst_element_factory_make (NVGST_PRIMARY_QUEUE, NULL); + if (!app->ele.ienc_q) { + NVGST_ERROR_MESSAGE ("image encode queue creation failed \n"); + goto fail; + } + encbin = app->ele.img_bin; + enc_q = app->ele.ienc_q; + } + + /* Create capture chain elements */ + if (!create_cap_bin ()) { + NVGST_ERROR_MESSAGE ("cap bin creation failed \n"); + goto fail; + } + + /* Create preview chain elements */ + if (!create_svs_bin ()) { + NVGST_ERROR_MESSAGE ("svs bin creation failed \n"); + goto fail; + } + + /* Create capture tee for capture streams */ + app->ele.cap_tee = + gst_element_factory_make (NVGST_PRIMARY_STREAM_SELECTOR, NULL); + if (!app->ele.cap_tee) { + NVGST_ERROR_MESSAGE ("capture tee creation failed \n"); + goto fail; + } + + g_object_set (G_OBJECT (app->ele.cap_tee), "name", "cam_t", NULL); + + /* Create preview & encode queue */ + app->ele.prev_q = gst_element_factory_make (NVGST_PRIMARY_QUEUE, NULL); + + if (!app->ele.prev_q) { + NVGST_ERROR_MESSAGE ("preview queue creation failed \n"); + goto fail; + } + g_object_set (G_OBJECT (app->ele.prev_q), "max-size-time", (guint64) 0, + "max-size-bytes", 0, "max-size-buffers", 1, NULL); + + /* Add elements to camera pipeline */ + gst_bin_add_many (GST_BIN (app->ele.camera), + app->ele.capbin, encbin, app->ele.svsbin, + app->ele.cap_tee, app->ele.prev_q, enc_q, NULL); + + tee_src_pad_template = + gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (app-> + ele.cap_tee), "src_%u"); + if (!tee_src_pad_template) { + NVGST_ERROR_MESSAGE ("fail to get pads template from cap_tee\n"); + goto fail; + } + + /* Manually link the Tee with preview queue */ + tee_prev_pad = + gst_element_request_pad (app->ele.cap_tee, tee_src_pad_template, NULL, + NULL); + sinkpad = gst_element_get_static_pad (app->ele.prev_q, "sink"); + if (!sinkpad || !tee_prev_pad) { + NVGST_ERROR_MESSAGE ("fail to get pads from cap_tee & prev_q\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (tee_prev_pad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link cap_tee & prev_q\n"); + goto fail; + } + app->prev_probe_id = gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, + prev_buf_prob, NULL, NULL); + + gst_object_unref (sinkpad); + gst_object_unref (tee_prev_pad); + + /* Manually link the Tee with capture queue */ + tee_vid_pad = + gst_element_request_pad (app->ele.cap_tee, tee_src_pad_template, NULL, + NULL); + sinkpad = gst_element_get_static_pad (enc_q, "sink"); + if (!sinkpad || !tee_vid_pad) { + NVGST_ERROR_MESSAGE ("fail to get pads from cap_tee & enc_q\n"); + goto fail; + } + if (GST_PAD_LINK_OK != gst_pad_link (tee_vid_pad, sinkpad)) { + NVGST_ERROR_MESSAGE ("fail to link cap_tee & enc_q\n"); + goto fail; + } + + gst_object_unref (sinkpad); + gst_object_unref (tee_vid_pad); + + /* link the capture bin with Tee */ + if (!gst_element_link (app->ele.capbin, app->ele.cap_tee)) { + NVGST_ERROR_MESSAGE ("fail to link capbin & cap_tee\n"); + goto fail; + } + + /* link the preview queue with svs bin */ + if (!gst_element_link (app->ele.prev_q, app->ele.svsbin)) { + NVGST_ERROR_MESSAGE ("fail to link prev_q & svsbin\n"); + goto fail; + } + + /* link the capture queue with encode bin */ + if (!gst_element_link (enc_q, encbin)) { + NVGST_ERROR_MESSAGE ("fail to link enc_q & endbin\n"); + goto fail; + } + + /* Add buffer probe on capture queue sink pad */ + sinkpad = gst_element_get_static_pad (enc_q, "sink"); + app->enc_probe_id = + gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, enc_buf_prob, + NULL, NULL); + gst_object_unref (sinkpad); + + return TRUE; + +fail: + app->return_value = -1; + return FALSE; +} + +/** + * Destroy capture pipeline. + * + * @param void + */ +void +destroy_capture_pipeline (void) +{ + recording = FALSE; + GstPad *sinkpad = NULL; + + if (!app->ele.camera) + return; + + if (GST_STATE_CHANGE_FAILURE == gst_element_set_state (app->ele.camera, + GST_STATE_NULL)) { + g_warning ("can't set camera pipeline to null\n"); + } + + if (app->use_eglstream) { + if (GST_STATE_CHANGE_FAILURE == + gst_element_set_state (app->ele.eglproducer_pipeline, GST_STATE_NULL)) { + g_warning ("can't set nvvideosink eglproducer pipeline " + "to null\n"); + } + } + + if (app->cam_src != NV_CAM_SRC_CSI) { + /* Remove buffer probe from encode queue sink pad */ + if (app->mode == CAPTURE_VIDEO) + sinkpad = gst_element_get_static_pad (app->ele.venc_q, "sink"); + else + sinkpad = gst_element_get_static_pad (app->ele.ienc_q, "sink"); + gst_pad_remove_probe (sinkpad, app->enc_probe_id); + gst_object_unref (sinkpad); + } + + if (app->cam_src == NV_CAM_SRC_CSI) { + sinkpad = gst_element_get_static_pad (app->ele.venc_q, "sink"); + gst_pad_remove_probe (sinkpad, app->enc_probe_id); + gst_object_unref (sinkpad); + } + + sinkpad = gst_element_get_static_pad (app->ele.prev_q, "sink"); + gst_pad_remove_probe (sinkpad, app->prev_probe_id); + gst_object_unref (sinkpad); + + if (app->reset_thread) + g_thread_unref (app->reset_thread); + + app->reset_thread = NULL; + + gst_object_unref (GST_OBJECT (app->ele.camera)); + app->ele.camera = NULL; + app->ele.vsrc = NULL; + app->ele.vsink = NULL; + app->ele.cap_filter = NULL; + app->ele.cap_tee = NULL; + app->ele.prev_q = NULL; + app->ele.venc_q = NULL; + app->ele.ienc_q = NULL; + app->ele.img_enc = NULL; + app->ele.vid_enc = NULL; + app->ele.muxer = NULL; + app->ele.img_sink = NULL; + app->ele.video_sink = NULL; + + app->ele.capbin = NULL; + app->ele.vid_bin = NULL; + app->ele.img_bin = NULL; + app->ele.svsbin = NULL; + + app->ele.vid_enc_conv = NULL; + app->ele.vid_enc_cap_filter = NULL; + +} + +/** + * Restart capture pipeline. + * + * @param void + */ +void +restart_capture_pipeline (void) +{ + destroy_capture_pipeline (); + + g_usleep (250000); + + if (!create_capture_pipeline ()) { + app->return_value = -1; + g_main_loop_quit (loop); + } +} + + +/** + * Create capture pipeline. + * + * @param void + */ +gboolean +create_capture_pipeline (void) +{ + gboolean ret = TRUE; + + FUNCTION_START(); + + /* Check for capture parameters */ + if (!check_capture_params ()) { + NVGST_ERROR_MESSAGE ("Invalid capture parameters \n"); + goto fail; + } + + if (app->cam_src == NV_CAM_SRC_EGLSTREAM) + { + /* nvvideosink acting as EGLStreamProducer */ + if (!create_eglstream_producer_pipeline ()) { + NVGST_ERROR_MESSAGE ("eglstream_producer pipeline creation failed \n"); + goto fail; + } + ret = create_csi_capture_pipeline (); + } + else + { + /* Create capture pipeline elements */ + if (app->cam_src == NV_CAM_SRC_CSI) + ret = create_csi_capture_pipeline (); + else + ret = create_native_capture_pipeline (); + } + + if (!ret) { + NVGST_ERROR_MESSAGE ("can't create capture pipeline\n"); + goto fail; + } + + /* Capture pipeline created, now start capture */ + GST_INFO_OBJECT (app->ele.camera, "camera ready"); + + if (GST_STATE_CHANGE_FAILURE == + gst_element_set_state (app->ele.camera, GST_STATE_PLAYING)) { + NVGST_CRITICAL_MESSAGE ("can't set camera to playing\n"); + goto fail; + } + + if (app->use_eglstream) + { + /* EGLStreamProducer pipeline created, now start EGLStreamConsumer pipeline */ + GST_INFO_OBJECT (app->ele.eglproducer_pipeline, "nvvideosink eglproducer ready"); + + if (GST_STATE_CHANGE_FAILURE == + gst_element_set_state (app->ele.eglproducer_pipeline, GST_STATE_PLAYING)) { + NVGST_CRITICAL_MESSAGE ("can't set nvvideosink eglproducer pipeline " + "to playing\n"); + goto fail; + } + } + + FUNCTION_END(); + + /* Dump Capture - Playing Pipeline into the dot file + * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp" + * Run nvgstcapture-1.0 and 0.00.00.*-nvgstcapture-1.0-playing.dot + * file will be generated. + * Run "dot -Tpng 0.00.00.*-nvgstcapture-1.0-playing.dot > image.png" + * image.png will display the running capture pipeline. + * */ + GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN(app->ele.camera), + GST_DEBUG_GRAPH_SHOW_ALL, "nvgstcapture-1.0-playing"); + + return ret; + +fail: + app->return_value = -1; + FUNCTION_END(); + return FALSE; +} + +gboolean +exit_capture (gpointer data) +{ + compute_frame_rate (); + g_main_loop_quit (loop); + + return FALSE; +} + +#if !GUI +static void +nvgst_handle_xevents () +{ + XEvent e; + Atom wm_delete; + displayCtx *dpyCtx = &app->disp; + + /* Handle Display events */ + while (XPending (dpyCtx->mDisplay)) { + XNextEvent (dpyCtx->mDisplay, &e); + switch (e.type) { + case ClientMessage: + wm_delete = XInternAtom (dpyCtx->mDisplay, "WM_DELETE_WINDOW", 1); + if (wm_delete != None && wm_delete == (Atom) e.xclient.data.l[0]) { + GST_ELEMENT_ERROR (app->ele.camera, RESOURCE, NOT_FOUND, + ("Output window was closed"), (NULL)); + } + } + } +} + +static gpointer +nvgst_x_event_thread (gpointer data) +{ + gint64 end_time; + g_mutex_lock (app->lock); + while (app->disp.window) { + nvgst_handle_xevents (); + end_time = g_get_monotonic_time () + 1; + g_cond_wait_until (app->x_cond, app->lock, end_time); + } + g_mutex_unlock (app->lock); + return NULL; +} +#endif + +static gboolean +auto_capture (gpointer data) +{ + gint count = 0; + gfloat f_count = 0; + guint sensor_modsize; + + g_print ("\nStarting automation...\n"); + + if (app->aut.toggle_mode) { + while (app->aut.iteration_count-- > 0) { + g_usleep (1000000); + app->mode = (CAPTURE_VIDEO + 1) - app->mode; + g_object_set (app->ele.cap_tee, "mode", app->mode, NULL); + g_print ("Mode changed to : %d\n", app->mode); + g_usleep (1000000); + } + goto automation_done; + } + + if (app->aut.toggle_sensor_modes) { + g_object_get (G_OBJECT (app->ele.vsrc), "total-sensor-modes", &sensor_modsize, NULL); + count = app->aut.iteration_count; + while (count-- > 0) { + guint u_count = 0; + while (u_count < sensor_modsize) { + g_usleep (3000000); + app->sensor_mode = u_count; + g_print ("Sensor-mode changed to : %d\n", app->sensor_mode); + restart_capture_pipeline (); + g_usleep (3000000); + u_count++; + } + } + } + + if (app->aut.toggle_sensor) { + while (app->aut.iteration_count-- > 0) { + g_usleep (3000000); //increased sleep time so sensor change can be perceived + app->sensor_id = ( app->sensor_id + 1 ) % app->aut.num_sensors; + g_print ("Sensor-id changed to : %d\n", app->sensor_id); + if (app->aut.toggle_sensor_modes) { + g_object_get (G_OBJECT (app->ele.vsrc), "total-sensor-modes", &sensor_modsize, NULL); + guint u_count = 0; + while (u_count < sensor_modsize) { + g_usleep (3000000); + app->sensor_mode = u_count; + g_print ("Sensor-mode changed to : %d\n", app->sensor_mode); + restart_capture_pipeline (); + g_usleep (3000000); + u_count++; + } + } + else { + restart_capture_pipeline (); + g_usleep (3000000); + } + } + goto automation_done; + } + + if (app->aut.enum_wb) { + while (app->aut.iteration_count-- > 0) { + count = 0; + while (count < 9) { + g_usleep (1000000); + g_object_set (G_OBJECT (app->ele.vsrc), "wbmode", count, NULL); + g_print ("Whitebalance set to : %d\n", count); + g_usleep (1000000); + count++; + } + } + goto automation_done; + } + + if (app->aut.enum_st) { + while (app->aut.iteration_count-- > 0) { + f_count = 0; + count = 0; + while (count < 20) { //Range is from 0 to 2 + g_usleep (1000000); + g_object_set (G_OBJECT (app->ele.vsrc), "saturation", f_count, NULL); + g_print ("Saturation set to : %f\n", f_count); + g_usleep (1000000); + f_count = f_count + (gfloat) 0.1; //step is 0.1 + count++; + } + } + goto automation_done; + } + + if (app->aut.capture_auto) { + while (app->aut.iteration_count-- > 0) { + + if (app->return_value == -1) + break; + + if (app->mode == CAPTURE_IMAGE && recording == FALSE) { + trigger_image_capture (); + + } else if (app->mode == CAPTURE_VIDEO && recording == FALSE) { + { + gint i; + start_video_capture (); + g_print ("\nRecording Started for %d seconds\n", + app->aut.capture_time); + + for (i = 0 ; i < app->aut.capture_time; i++) + g_usleep (1000000); + + stop_video_capture (); + } + } + g_usleep (1000 * app->aut.capture_gap); + } + } + +automation_done: + + g_timeout_add_seconds (app->aut.quit_time, exit_capture, NULL); + return FALSE; +} + +static void +_intr_handler (int signum) +{ + struct sigaction action; + + NVGST_INFO_MESSAGE ("User Interrupted.. \n"); + app->return_value = -1; + memset (&action, 0, sizeof (action)); + action.sa_handler = SIG_DFL; + + sigaction (SIGINT, &action, NULL); + + cintr = TRUE; +} + + +static gboolean +check_for_interrupt (gpointer data) +{ + if (cintr) { + cintr = FALSE; + + gst_element_post_message (GST_ELEMENT (app->ele.camera), + gst_message_new_application (GST_OBJECT (app->ele.camera), + gst_structure_new ("NvGstAppInterrupt", + "message", G_TYPE_STRING, "Pipeline interrupted", NULL))); + + return FALSE; + } + return TRUE; +} + +static void +_intr_setup (void) +{ + struct sigaction action; + + memset (&action, 0, sizeof (action)); + action.sa_handler = _intr_handler; + + sigaction (SIGINT, &action, NULL); +} + +int +main (int argc, char *argv[]) +{ + GOptionContext *ctx; + GOptionGroup *group_argus; + GError *error = NULL; + +#ifdef WITH_STREAMING + void *nvgst_rtsp_lib = NULL; +#endif + + GIOChannel *channel = NULL; + + app = &capp; + memset (app, 0, sizeof (CamCtx)); + + /* Initialize capture params */ + capture_init_params (); + GOptionEntry options_argus[] = { + {"prev-res", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Preview width & height." + "Range: 2 to 12 (5632x4224) e.g., --prev-res=3", + NULL} + , + {"cus-prev-res", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Custom Preview width & height e.g., --cus-prev-res=1920x1080", + NULL} + , + {"image-res", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Image width & height. Range: 2 to 12 (5632x4224) e.g., --image-res=3", + NULL} + , + {"video-res", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Video width & height. Range: 2 to 9 (3896x2192) e.g., --video-res=3", + NULL} + , + {"camsrc", 0, 0, G_OPTION_ARG_INT, &app->cam_src, + "Camera Source to use (0=v4l2, 1=csi[default], 2=videotest, 3=eglstream)", NULL} + , + {"mode", 'm', 0, G_OPTION_ARG_INT, &app->mode, + "Capture mode value (1=still 2=video)", NULL} + , + {"video-enc", 'v', 0, G_OPTION_ARG_INT, &app->encset.video_enc, + "Video encoder type (0=h264[HW] 1=vp8[HW] 2=h265[HW] 3=vp9[HW])", + NULL} + , + {"hw-enc-path", 'p', 0, G_OPTION_ARG_INT, &app->encset.hw_enc_type, + "Frame Work type (1=V4L2 [Default])", + NULL} + , + {"enc-bitrate", 'b', 0, G_OPTION_ARG_INT, &app->encset.bitrate, + "Video encoding Bit-rate(in bytes) e.g., --enc-bitrate=4000000", NULL} + , + {"enc-controlrate", 0, 0, G_OPTION_ARG_INT, &app->encset.controlrate, + "Video encoding Bit-rate control method 0 = Disable, 1 = variable(Default), 2 = constant " + "e.g., --enc-controlrate=1", NULL} + , + {"enc-EnableTwopassCBR", 0, 0, G_OPTION_ARG_INT, &app->encset.enabletwopassCBR, + "Enable two pass CBR while encoding 0 = Disable, 1 = Enable " + "e.g., --enc-EnableTwopassCBR=1", NULL} + , + {"enc-profile", 0, 0, G_OPTION_ARG_INT, &app->encset.video_enc_profile, + "Video encoder profile For H.264: 0=Baseline, 1=Main, 2=High", + NULL} + , + {"image-enc", 'J', 0, G_OPTION_ARG_INT, &app->encset.image_enc, + "Image encoder type (0=jpeg_SW[jpegenc] 1=jpeg_HW[nvjpegenc])", NULL} + , + {"file-type", 'k', 0, G_OPTION_ARG_INT, &app->file_type, + "Container file type (0=mp4 1=3gp 2=mkv)", NULL} + , + {"file-name", 0, 0, G_OPTION_ARG_STRING, &app->file_name, + "Captured file name. nvcamtest is used by default", NULL} + , + {"color-format", 0, 0,G_OPTION_ARG_INT, &app->color_format, + "Color format to use (0=I420," + "1=NV12[For CSI only and default for CSI], 2=YUY2 and 3=UYVY [For V4L2 only, default YUY2 for v4l2])", + NULL} + , + {"enable-meta", 0, 0, G_OPTION_ARG_NONE, &app->enableMeta, + "Enable Sensor MetaData reporting", NULL} + , + {"app-profile", 0, 0, G_OPTION_ARG_NONE, &app->enableKpiProfile, + "Enable KPI profiling", + NULL} + , + {"kpi-numbers", 0, 0, G_OPTION_ARG_NONE, &app->enableKpiNumbers, + "Enable KPI measurement", + NULL} + , + {"cap-dev-node", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Video capture device node (0=/dev/video0[default], 1=/dev/video1, 2=/dev/video2) " + "e.g., --cap-dev-node=0", NULL} + , + {"svs", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "[For USB] (=) chain for video Preview. [For CSI only] use \"nveglglessink\"", + NULL} + , + {"eglConfig", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "EGL window Coordinates (x_pos y_pos) in that order " + " e.g., --eglConfig=\"50 100\"", + NULL} + , + {"orientation", 0, 0, G_OPTION_ARG_INT, &app->flip_method, + "Camera sensor orientation value", NULL} + , + {"whitebalance", 'w', 0, G_OPTION_ARG_INT, &app->whitebalance, + "Capture whitebalance value", NULL} + , + {"timeout", 0, 0, G_OPTION_ARG_INT, &app->timeout, + "Capture timeout value", NULL} + , + {"saturation", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Camera Saturation value", NULL} + , + {"sensor-id", 0, 0, G_OPTION_ARG_INT, &app->sensor_id, + "Camera Sensor ID value", NULL} + , + {"sensor-mode", 0, 0, G_OPTION_ARG_INT, &app->sensor_mode, + "Camera Sensor Mode value", NULL} + , + {"framerate", 0, 0, G_OPTION_ARG_INT, &app->framerate, + "FrameRate of sensor mode (use with --framerate)", NULL} + , + {"exposuretimerange", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Property to adjust exposure time range in nanoseconds" + " e.g., --exposuretimerange=\"34000 358733000\"", + NULL} + , + {"gainrange", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Property to adjust gain range" + " e.g., --gainrange=\"1 16\"", + NULL} + , + {"ispdigitalgainrange", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Property to adjust digital gain range" + " e.g., --ispdigitalgainrange=\"1 8\"", + NULL} + , + {"aelock", 0, 0, G_OPTION_ARG_INT, &app->enableAeLock, + "Enable AE Lock, default is disabled", + NULL} + , + {"awblock", 0, 0, G_OPTION_ARG_INT, &app->enableAwbLock, + "Enable AWB Lock, default is disabled", + NULL} + , + {"exposurecompensation", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Property to adjust exposure compensation" + " e.g., --exposurecompensation=0.5", NULL} + , + {"aeantibanding", 0, 0, G_OPTION_ARG_INT, &app->ae_antibanding, + "Property to set the auto exposure antibanding mode" + " e.g., --aeantibanding=2", NULL} + , + {"tnr-mode", 0, 0, G_OPTION_ARG_INT, &app->tnr_mode, + "Property to select temporal noise reduction mode" + " e.g., --tnr-mode=2", NULL} + , + {"tnr-strength", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Property to adjust temporal noise reduction strength" + " e.g., --tnr-strength=0.5", NULL} + , + {"ee-mode", 0, 0, G_OPTION_ARG_INT, &app->ee_mode, + "Property to select edge enhancement mode" + " e.g., --ee-mode=2", NULL} + , + {"ee-strength", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Property to adjust edge enhancement strength" + " e.g., --ee-strength=0.5", NULL} + , + {"overlayConfig", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "Overlay Configuration Options index and coordinates in (index, x_pos, y_pos, width, height) order " + " e.g. --overlayConfig=\"0, 0, 0, 1280, 720\"", + NULL} + , + {"automate", 'A', 0, G_OPTION_ARG_NONE, &app->aut.automate, + "Run application in automation mode", NULL} + , + {"start-time", 'S', 0, G_OPTION_ARG_INT, &app->aut.capture_start_time, + "Start capture after specified time in seconds. Default = 5 sec (use with --automate or -A only)", + NULL} + , + {"quit-after", 'Q', 0, G_OPTION_ARG_INT, &app->aut.quit_time, + "Quit application once automation is done after specified time in seconds. Default = 0 sec (use with --automate or -A only)", + NULL} + , + {"count", 'C', 0, G_OPTION_ARG_INT, &app->aut.iteration_count, + "Number of iterations of automation testcase. Default = 1 (use with --automate or -A only)", + NULL} + , + {"num-sensors", 'N', 0, G_OPTION_ARG_INT, &app->aut.num_sensors, + "Number of sensors (use with --automate or -A only)", + NULL} + , + {"capture-gap", 0, 0, G_OPTION_ARG_INT, &app->aut.capture_gap, + "Number of milliseconds between successive image/video capture. Default = 250 msec (use with --automate and --capture-auto only)", + NULL} + , + {"capture-time", 0, 0, G_OPTION_ARG_INT, &app->aut.capture_time, + "Capture video for specified time in seconds. Default = 10 sec (use with --automate and --capture-auto only)", + NULL} + , + {"toggle-mode", 0, 0, G_OPTION_ARG_NONE, &app->aut.toggle_mode, + "Toggle between still and video capture modes for count number of times (use with --automate or -A only)", + NULL} + , + {"capture-auto", 0, 0, G_OPTION_ARG_NONE, &app->aut.capture_auto, + "Do image/video capture in automation mode for count number of times(use with --automate or -A only)", + NULL} + , + {"toggle-sensor", 0, 0, G_OPTION_ARG_NONE, &app->aut.toggle_sensor, + "Toggle between num_sensors if given otherwise between sensor-id 0 and 1 (use with --automate or -A only)", + NULL} + , + {"toggle-sensor-modes", 0, 0, G_OPTION_ARG_NONE, &app->aut.toggle_sensor_modes, + "Toggle between all sensor modes if given. (use with --automate or -A only)", + NULL} + , + {"enum-wb", 0, 0, G_OPTION_ARG_NONE, &app->aut.enum_wb, + "Enumerate all white-balance modes for count number of times (use with --automate or -A only)", + NULL} + , + {"enum-st", 0, 0, G_OPTION_ARG_NONE, &app->aut.enum_st, + "Enumerate saturation value through 0 to 2 by a step of 0.1 for count number of times (use with --automate or -A only)", + NULL} + , + {NULL}}; + + ctx = g_option_context_new ("Nvidia GStreamer Camera Model Test"); + g_option_context_set_description (ctx, app->csi_resolution); + group_argus = g_option_group_new ("nvarguscamersrc", "Application Options [NVArgusCamera, USB, V4L2 ONLY]:", "Show nvarguscamerasrc Options", NULL, NULL); + g_option_group_add_entries (group_argus, options_argus); + g_option_context_set_main_group (ctx, group_argus); + g_option_context_add_group (ctx, gst_init_get_option_group ()); + + if (!g_option_context_parse (ctx, &argc, &argv, &error)) { + g_option_context_free (ctx); + NVGST_ERROR_MESSAGE_V ("option parsing failed: %s", error->message); + goto done; + } + if(app->encset.bitrate != 0) + is_user_bitrate = 1; + + g_option_context_free (ctx); + + if (!app->aut.automate) + print_help (); + + gst_init (&argc, &argv); + + GET_TIMESTAMP(APP_LAUNCH); + + loop = g_main_loop_new (NULL, FALSE); + +#ifdef WITH_STREAMING + if (app->streaming_mode) { + char *payloader; + char *parser; + char pipeline[256]; + + NvGstRtspStreamCallbacks rtspcallbacks = { rtsp_video_stream_new, + rtsp_video_stream_start, rtsp_video_stream_pause, + rtsp_video_stream_resume, rtsp_video_stream_stop + }; + + nvgst_rtsp_lib = dlopen (NVGST_RTSP_LIBRARY, RTLD_NOW); + if (!nvgst_rtsp_lib) { + NVGST_ERROR_MESSAGE_V ("Error opening " NVGST_RTSP_LIBRARY ": %s", + dlerror ()); + goto done; + } + + nvgst_rtsp_server_init_fcn nvgst_rtsp_init = + (nvgst_rtsp_server_init_fcn) dlsym (nvgst_rtsp_lib, + "nvgst_rtsp_server_init"); + + if (!nvgst_rtsp_init (&nvgst_rtsp_functions)) { + NVGST_ERROR_MESSAGE ("Could not initialize nvgst_rtsp library"); + goto done; + } + + switch (app->encset.video_enc) { + case FORMAT_H264_HW: + payloader = "rtph264pay"; + parser = "h264parse"; + break; + case FORMAT_VP8_HW: + payloader = "rtpvp8pay"; + parser = "identity"; + break; + case FORMAT_H265_HW: + payloader = "rtph265pay"; + parser = "h265parse"; + break; + case FORMAT_VP9_HW: + payloader = "rtpvp9pay"; + parser = "identity"; + break; + default: + NVGST_ERROR_MESSAGE ("Unsupported codec for streaming"); + goto done; + } + + snprintf(pipeline, sizeof(pipeline) - 1, + "appsrc name=mysrc is-live=0 do-timestamp=1 ! %s ! %s name=pay0 pt=96", + parser, payloader); + + app->video_streaming_ctx.media_factory = + nvgst_rtsp_functions.create_stream ("/test", pipeline, &rtspcallbacks); + if (!app->video_streaming_ctx.media_factory) { + NVGST_ERROR_MESSAGE ("Could not create rtsp video stream"); + goto done; + } + + g_object_set (G_OBJECT(app->video_streaming_ctx.media_factory), "shared", TRUE, NULL); + } +#endif + + if (!app->aut.automate) { + channel = g_io_channel_unix_new (0); + g_io_add_watch (channel, G_IO_IN, on_input, NULL); + } + + _intr_setup (); + g_timeout_add (400, check_for_interrupt, NULL); + + /* Automation stuff */ + if (app->aut.automate) { + + if (app->aut.capture_start_time < 0) { + g_print ("Invalid capture start time. Can't go back in time!/" + "Not even Gstreamer! Setting default time.\n"); + app->aut.capture_start_time = NVGST_DEFAULT_CAP_START_DELAY; + } + + if (app->aut.quit_time < 0) { + g_print ("Invalid quit after time. Setting default quit time = 0.\n"); + app->aut.quit_time = NVGST_DEFAULT_QUIT_TIME; + } + + if (app->aut.capture_gap < 0) { + g_print + ("Invalid capture gap time. Setting default capture gap = 250 ms\n"); + app->aut.capture_gap = NVGST_DEFAULT_CAPTURE_GAP; + } + + if (app->aut.capture_time < 0) { + g_print ("Invalid capture time. Setting default capture time = 10 s\n"); + app->aut.capture_time = NVGST_DEFAULT_CAPTURE_TIME; + } + + if (app->aut.iteration_count < 1) { + g_print ("Invalid iteration count. Setting to default count = 1.\n"); + app->aut.iteration_count = NVGST_DEFAULT_ITERATION_COUNT; + } + + g_timeout_add_seconds (app->aut.capture_start_time, auto_capture, NULL); + + } + + CALL_GUI_FUNC (init, &GET_GUI_CTX (), &argc, &argv); + + /* Start capture pipeline */ +#ifdef WITH_STREAMING + if (app->streaming_mode) { + app->mode = CAPTURE_VIDEO; + g_main_loop_run (loop); + } else +#endif + if (create_capture_pipeline ()) { + NVGST_INFO_MESSAGE ("iterating capture loop ...."); + g_main_loop_run (loop); + } else + NVGST_CRITICAL_MESSAGE ("Capture Pipeline creation failed"); + + /* Out of the main loop, now clean up */ + CALL_GUI_FUNC (finish); + + destroy_capture_pipeline (); + + NVGST_INFO_MESSAGE ("Capture completed"); + +done: + if (channel) + g_io_channel_unref (channel); + +#ifdef WITH_STREAMING + if (nvgst_rtsp_lib) + dlclose (nvgst_rtsp_lib); +#endif + + if (loop) + g_main_loop_unref (loop); + +#if !GUI + g_mutex_lock (app->lock); + if (app->disp.window) + nvgst_destroy_window (&app->disp); + g_mutex_unlock (app->lock); + + g_thread_join(app->x_event_thread); + + if (app->disp.mDisplay) + nvgst_x11_uninit (&app->disp); +#endif + + if (app->lock) { + g_mutex_clear (app->lock); + app->lock = NULL; + } + + if (app->cond) { + g_cond_clear (app->cond); + app->cond = NULL; + } + + if (app->x_cond) { + g_cond_clear (app->x_cond); + app->x_cond = NULL; + } + + g_free (app->vidcap_device); + g_free (app->cap_dev_node); + g_free (app->file_name); + g_free (app->csi_options_argus); + g_free (app->overlayConfig); + g_free (app->eglConfig); + g_free (app->lock); + g_free (app->cond); + g_free (app->x_cond); + + NVGST_INFO_MESSAGE ("Camera application will now exit"); + + return ((app->return_value == -1) ? -1 : 0); +} diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.h b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.h new file mode 100644 index 0000000..be3b484 --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstcapture-1.0/nvgstcapture.h @@ -0,0 +1,686 @@ +/* + * Copyright (c) 2014-2021, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef _NV_GST_CAPTURE_H_ +#define _NV_GST_CAPTURE_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "gst/pbutils/pbutils.h" +#include "gst/pbutils/encoding-profile.h" +#include "gst/pbutils/encoding-target.h" +#include "nvgst_x11_common.h" + +#ifdef WITH_GUI + +#include "nvgstcapture_gui_interface.h" + +#else + +int dummy_func (void); +int dummy_func () +{ + return 0; +} +#define GUI 0 +#define CALL_GUI_FUNC(func, ...) dummy_func() +#define GET_GUI_CTX() NULL + +#endif + +#define FUNCTION_START() \ + time_t startTime = 0; \ + struct timeval start_tv = {0, 0}; \ + time_t endTime = 0; \ + struct timeval end_tv = {0, 0}; \ + if (gettimeofday(&start_tv, NULL) == 0) \ + startTime = start_tv.tv_sec * 1000000 + start_tv.tv_usec; \ + else \ + startTime = 0; + +#define FUNCTION_END() \ + if (gettimeofday(&end_tv, NULL) == 0) \ + endTime = end_tv.tv_sec * 1000000 + end_tv.tv_usec; \ + else \ + endTime = 0; \ + if (app->enableKpiProfile) \ + g_print("\nKPI total time for %s in mS: %ld\n", \ + __func__, ((endTime - startTime) / 1000)); + +#define GET_TIMESTAMP(PLACE) \ + if (gettimeofday(&app->timeStamp, NULL) == 0) \ + app->timeStampStore[PLACE] = (app->timeStamp.tv_sec * 1000000) + app->timeStamp.tv_usec; \ + else \ + app->timeStampStore[PLACE] = 0; + +/* CAPTURE GENERIC */ +#define NVGST_DEFAULT_CAPTURE_MODE CAPTURE_IMAGE +#define NVGST_DEFAULT_FILENAME "nvcamtest" +#define NVGST_DEFAULT_FILE_TYPE FILE_MP4 +#define NVGST_DEFAULT_LOCATION "/dev/null" +#define NVGST_DEFAULT_CAPTURE_FORMAT "I420" +#define NVGST_DEFAULT_CAPTURE_FPS 30 +#define NVGST_DEFAULT_VIDCAP_DEVICE "/dev/video0" +#define DEFAULT_LOCATION "/dev/null" +#define SUCCESS 0 + +/* PREVIEW */ +#define NVGST_DEFAULT_PREVIEW_WIDTH 640 +#define NVGST_DEFAULT_PREVIEW_HEIGHT 480 +#define NVGST_DEFAULT_RENDER_TARGET RENDER_OVERLAY + +/* IMAGE & VIDEO CAPTURE */ +#define NVGST_DEFAULT_VIDEO_MIMETYPE "video/x-raw" +#define NVGST_DEFAULT_CAPTURE_WIDTH 640 +#define NVGST_DEFAULT_CAPTURE_HEIGHT 480 +#define NVGST_DEFAULT_480P_ENCODER_BITRATE 4000000 +#define NVGST_DEFAULT_720P_ENCODER_BITRATE 8000000 +#define NVGST_DEFAULT_1080P_ENCODER_BITRATE 14000000 +#define NVGST_DEFAULT_2160P_ENCODER_BITRATE 20000000 +#define NVGST_DEFAULT_VIDEO_ENCODER_PROFILE PROFILE_HIGH +#define NVGST_DEFAULT_VIDEO_ENCODER_CONTROLRATE CONTROLRATE_VARIABLE +#define NVGST_DEFAULT_VIDEO_ENCODER_TWOPASSCBR FALSE + +#define NVGST_DEFAULT_IMAGE_ENCODER FORMAT_JPEG_HW +#define NVGST_DEFAULT_VIDEO_ENCODER FORMAT_H264_HW +#define NVGST_DEFAULT_FLIP_METHOD 0 + +/* CAPTURE ELEMENTS */ +#define NVGST_VIDEO_CAPTURE_SRC_TEST "videotestsrc" +#define NVGST_VIDEO_CAPTURE_SRC_V4L2 "v4l2src" +#define NVGST_VIDEO_CAPTURE_SRC_CSI_ARGUS "nvarguscamerasrc" +#define NVGST_EGLSTREAM_CAPTURE_SRC "nveglstreamsrc" +#define NVGST_VIDEO_SINK "nvvideosink" +#define NVGST_DEFAULT_VIDEO_CONVERTER "videoconvert" +#define NVGST_DEFAULT_VIDEO_CONVERTER_CSI "nvvidconv" +#define NVGST_DEFAULT_VIDEO_SCALER "videoscale" +#ifdef WITH_GUI +#define NVGST_DEFAULT_PREVIEW_SINK_CSI "nveglglessink" +#else +#define NVGST_DEFAULT_PREVIEW_SINK_CSI "nveglglessink" +#endif +#define NVGST_DEFAULT_PREVIEW_SINK_USB "xvimagesink" +#define NVGST_DEFAULT_CAPTURE_FILTER "capsfilter" +#define NVGST_DEFAULT_IMAGE_ENC "nvjpegenc" +#define NVGST_DEFAULT_IMAGE_ENC_CONVERTER "nvvidconv" +#define NVGST_SW_IMAGE_ENC "jpegenc" +#define NVGST_DEFAULT_IENC_SINK "fakesink" +#define NVGST_DEFAULT_VENC_SINK "filesink" +#define NVGST_DEFAULT_VENC_PARSE "h264parse" +#define NVGST_PRIMARY_V4L2_H264_VENC "nvv4l2h264enc" +#define NVGST_PRIMARY_V4L2_VP8_VENC "nvv4l2vp8enc" +#define NVGST_PRIMARY_V4L2_VP9_VENC "nvv4l2vp9enc" +#define NVGST_PRIMARY_V4L2_H265_VENC "nvv4l2h265enc" +#define NVGST_PRIMARY_H264_PARSER "h264parse" +#define NVGST_PRIMARY_H265_PARSER "h265parse" +#define NVGST_PRIMARY_MP4_MUXER "qtmux" +#define NVGST_PRIMARY_3GP_MUXER "3gppmux" +#define NVGST_PRIMARY_MKV_MUXER "matroskamux" +#define NVGST_PRIMARY_STREAM_SELECTOR "tee" +#define NVGST_PRIMARY_QUEUE "queue" +#define NVGST_PRIMARY_IDENTITY "identity" + +#ifdef WITH_STREAMING +#define NVGST_STREAMING_SRC_FILE "uridecodebin" +#endif + +/* CSI CAMERA DEFAULT PROPERTIES TUNING */ + +#define NVGST_DEFAULT_WHITEBALANCE 1 +#define NVGST_DEFAULT_SATURATION 1 +#define NVGST_DEFAULT_EXPOSURE_COMPENSATION 0 +#define NVGST_DEFAULT_TNR_STRENGTH -1 +#define NVGST_DEFAULT_EE_STRENGTH -1 +#define NVGST_DEFAULT_AEANTIBANDING 0 +#define NVGST_DEFAULT_AE_LOCK 0 +#define NVGST_DEFAULT_AWB_LOCK 0 +#define NVGST_DEFAULT_TNR_MODE 1 +#define NVGST_DEFAULT_EE_MODE 1 +#define NVGST_DEFAULT_SENSOR_ID 0 +#define NVGST_DEFAULT_SENSOR_MODE -1 +#define NVGST_DEFAULT_DISPLAY_ID 0 + +#define MIN_EXPOSURE_COMPENSATION -2 +#define MAX_EXPOSURE_COMPENSATION 2 +#define MIN_TNR_MODE 0 +#define MAX_TNR_MODE 2 +#define MIN_EE_MODE 0 +#define MAX_EE_MODE 2 +#define MIN_STRENGTH -1 +#define MAX_STRENGTH 1 +#define MIN_AE_ANTIBANDING_MODE 0 +#define MAX_AE_ANTIBANDING_MODE 3 + +/* CSI CAMERA DEFAULT AUTOMATION */ + +#define NVGST_DEFAULT_AUTOMATION_MODE FALSE +#define NVGST_DEFAULT_CAP_START_DELAY 5 +#define NVGST_DEFAULT_QUIT_TIME 0 +#define NVGST_DEFAULT_ITERATION_COUNT 1 +#define NVGST_DEFAULT_CAPTURE_GAP 250 +#define NVGST_DEFAULT_CAPTURE_TIME 10 +#define NVGST_DEFAULT_NUM_SENSORS 2 +#define NVGST_DEFAULT_TOGGLE_CAMERA_MODE FALSE +#define NVGST_DEFAULT_TOGGLE_CAMERA_SENSOR FALSE +#define NVGST_DEFAULT_TOGGLE_CAMERA_SENSOR_MODES FALSE +#define NVGST_DEFAULT_ENUMERATE_WHITEBALANCE FALSE +#define NVGST_DEFAULT_ENUMERATE_SATURATION FALSE +#define NVGST_DEFAULT_ENUMERATE_CAPTURE_AUTO FALSE + + +#define MIN_V4L2_RES PR_176x144 +#define MAX_V4L2_RES PR_1920x1080 +#define MIN_CSI_RES PR_640x480 +#define MAX_CSI_RES PR_5632x4224 + +/* DEBUG LOG LEVEL */ +#ifdef NVGST_LOG_LEVEL_DEBUG +#define NVGST_ENTER_FUNCTION() g_print("%s{", __FUNCTION__) +#define NVGST_EXIT_FUNCTION() g_print("%s}", __FUNCTION__) +#define NVGST_EXIT_FUNCTION_VIA(s) g_print("%s}['%s']", __FUNCTION__, s) +#define NVGST_DEBUG_MESSAGE(s) g_debug("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_DEBUG_MESSAGE_V(s, ...) g_debug("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_INFO_MESSAGE(s) g_message("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_INFO_MESSAGE_V(s, ...) g_message("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_WARNING_MESSAGE(s) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_WARNING_MESSAGE_V(s, ...) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_CRITICAL_MESSAGE(s) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_CRITICAL_MESSAGE_V(s, ...) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__,__VA_ARGS__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__,__VA_ARGS__) + +#elif defined NVGST_LOG_LEVEL_INFO +#define NVGST_ENTER_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION_VIA(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE(s) g_message("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_INFO_MESSAGE_V(s, ...) g_message("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_WARNING_MESSAGE(s) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_WARNING_MESSAGE_V(s, ...) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_CRITICAL_MESSAGE(s) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_CRITICAL_MESSAGE_V(s, ...) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) + +#elif defined NVGST_LOG_LEVEL_WARNING +#define NVGST_ENTER_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION_VIA(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE(s) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_WARNING_MESSAGE_V(s, ...) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_CRITICAL_MESSAGE(s) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_CRITICAL_MESSAGE_V(s, ...) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) + +#elif defined NVGST_LOG_LEVEL_CRITICAL +#define NVGST_ENTER_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION_VIA(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_CRITICAL_MESSAGE(s) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_CRITICAL_MESSAGE_V(s, ...) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) + +#else +#define NVGST_ENTER_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION_VIA(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_CRITICAL_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_CRITICAL_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#endif +#define INVALID_SELECTION_ARGUS "Not a valid option for ARGUS Plugin\n" + +/* CAMERA KPI PARAMS */ +typedef enum +{ + FIRST_FRAME = 0, + APP_LAUNCH, + CURRENT_EVENT, + KPI_EVENT_SIZE, +} KpiEvents; + +/* CAMERA AUTOMATION PARAMS */ +typedef struct +{ + gint capture_start_time; + gint quit_time; + gint iteration_count; + gint capture_gap; + gint capture_time; + gint num_sensors; + gboolean automate; + gboolean toggle_mode; + gboolean toggle_sensor; + gboolean toggle_sensor_modes; + gboolean enum_wb; + gboolean enum_st; + gboolean capture_auto; +} Automate; + +/* PREVIEW RESOLUTION */ +typedef enum +{ + PR_176x144 = 0, + PR_320x240, + PR_640x480, + PR_1280x720, + PR_1920x1080, + PR_2104x1560, + PR_2592x1944, + PR_2616x1472, + PR_3840x2160, + PR_3896x2192, + PR_4208x3120, + PR_5632x3168, + PR_5632x4224, +} Prev_Res; + +/* IMAGE CAPTURE RESOLUTION */ +typedef enum +{ + IR_176x144 = 0, + IR_320x240, + IR_640x480, + IR_1280x720, + IR_1920x1080, + IR_2104x1560, + IR_2592x1944, + IR_2616x1472, + IR_3840x2160, + IR_3896x2192, + IR_4208x3120, + IR_5632x3168, + IR_5632x4224, +} Icap_Res; + +/* VIDEO CAPTURE RESOLUTION */ +typedef enum +{ + VR_176x144 = 0, + VR_320x240, + VR_640x480, + VR_1280x720, + VR_1920x1080, + VR_2104x1560, + VR_2592x1944, + VR_2616x1472, + VR_3840x2160, + VR_3896x2192, + VR_4208x3120, + VR_5632x3168, + VR_5632x4224, +} Vcap_Res; + +#define RESOLUTION_STRINGS {"176 x 144", "320 x 240", "640 x 480", \ + "1280 x 720", "1920 x 1080", "2104 x 1560", "2592 x 1944", \ + "2616 x 1472", "3840 x 2160", "3896 x 2192", "4208 x 3120", \ + "5632 x 3168", "5632 x 4224", NULL}; + +/* CAPTURE CONTAINER TYPE */ +typedef enum +{ + FILE_MP4 = 0, + FILE_3GP, + FILE_MKV, + FILE_H265 +} FileType; + +#define FILE_TYPE_STRINGS {"MP4", "3GP", "MKV", "H.265", NULL}; + +/* IMAGE ENCODE TYPE */ +typedef enum +{ + FORMAT_JPEG_SW = 0, + FORMAT_JPEG_HW +} ImageEncFormatType; + +#define IMAGE_ENCODER_STRINGS {"SW JPEG", "HW JPEG", NULL}; + +/* VIDEO ENCODE TYPE */ +typedef enum +{ + FORMAT_H264_HW = 0, + FORMAT_VP8_HW, + FORMAT_H265_HW, + FORMAT_VP9_HW +} VideoEncFormatType; + +#define VIDEO_ENC_STRINGS {"H.264 (HW)", "VP8 (HW)", "H.265 (HW)", "VP9 (HW)", NULL}; + +/* H264 ENCODE PROFILE TYPE */ +typedef enum +{ + PROFILE_BASELINE = 0, + PROFILE_MAIN, + PROFILE_HIGH +} H264EncProfileType; + +/* ENCODER BITRATE CONTROL METHOD */ +typedef enum +{ + CONTROLRATE_DISABLE, + CONTROLRATE_VARIABLE, + CONTROLRATE_CONSTANT +} EncControlRateType; + +/* CAPTURE MODE */ +typedef enum +{ + CAPTURE_NONE = 0, + CAPTURE_IMAGE, + CAPTURE_VIDEO +} CaptureType; + +/* CAPTURE COLOR FORMAT */ +typedef enum +{ + CAPTURE_I420, + CAPTURE_NV12, + CAPTURE_YUY2, + CAPTURE_UYVY, +} CaptureColorFormat; + +/* CAPTURE PAD TYPE */ +typedef enum +{ + CAPTURE_PAD_PREV = 0, + CAPTURE_PAD_IMAGE, + CAPTURE_PAD_VIDEO +} CapturePadType; + +typedef enum +{ + NV_CAM_SRC_V4L2, + NV_CAM_SRC_CSI, + NV_CAM_SRC_TEST, + NV_CAM_SRC_EGLSTREAM +} NvCamSrcType; + +typedef enum +{ + HW_V4L2_ENC = 1 +} HardwareEncoderType; + +/* CAMERA CAPTURE RESOLUTIONS */ +typedef struct +{ + gint preview_width; + gint preview_height; + gint cus_prev_width; + gint cus_prev_height; + gint prev_res_index; + gint image_cap_width; + gint image_cap_height; + gint img_res_index; + gint video_cap_width; + gint video_cap_height; + gint vid_res_index; + gint current_max_res; +} CamRes; + +/* CAMERA ENCODER PARAMS */ +typedef struct +{ + gint image_enc; + gint video_enc; + HardwareEncoderType hw_enc_type; + guint bitrate; + gboolean enabletwopassCBR; + EncControlRateType controlrate; + H264EncProfileType video_enc_profile; +} EncSet; + +/* CAPTURE PIPELINE ELEMENTS */ +typedef struct +{ + GstElement *camera; + GstElement *vsrc; + GstElement *vsink; + GstElement *colorspace_conv; + GstElement *cap_filter; + GstElement *cap_tee; + GstElement *prev_q; + GstElement *ienc_q; + GstElement *venc_q; + GstElement *vid_enc; + GstElement *img_enc; + GstElement *img_enc_conv; + GstElement *parser; + GstElement *muxer; + GstElement *img_sink; + GstElement *video_sink; + GstElement *capbin; + GstElement *vid_bin; + GstElement *img_bin; + GstElement *svsbin; + GstElement *vid_enc_conv; + GstElement *vid_enc_cap_filter; + + /* Elements for EGLStreamProducer */ + GstElement *eglproducer_pipeline; + GstElement *eglproducer_bin; + GstElement *eglproducer_videosink; + GstElement *eglproducer_nvvideosink; + GstElement *eglproducer_videotestsrc; + GstElement *eglproducer_capsfilter; + GstElement *eglproducer_videoconvert; + + /* Scaling elements for preview, image and video */ + GstElement *svc_prebin; + GstElement *svc_prevconv; + GstElement *svc_prevconv_out_filter; + GstElement *svc_imgbin; + GstElement *svc_imgvconv; + GstElement *svc_imgvconv_out_filter; + GstElement *svc_vidbin; + GstElement *svc_vidvconv; + GstElement *svc_vidvconv_out_filter; + + /* Elements for video snapshot */ + GstElement *vsnap_q; + GstElement *vsnap_bin; + GstElement *vsnap_enc; + GstElement *vsnap_sink; + GstElement *svc_snapconv; + GstElement *svc_snapconv_out_filter; +} CamPipe; + +#ifdef WITH_STREAMING +typedef struct +{ + GObject *media_factory; + GstElement *appsrc; + GstElement *streaming_file_src_conv; + gchar *streaming_src_file; +} RTSPStreamingCtx; +#endif + +/* EGLStream Producer ID */ +typedef enum +{ + EGLSTREAM_PRODUCER_ID_SCF_CAMERA = 0, + EGLSTREAM_PRODUCER_ID_MAX, +} EGLStream_Producer_ID; + +/* CAMERA CONTEX PARAMS */ +typedef struct +{ + gint mode; + gint file_type; + gint capture_count; + gint return_value; + gint capcount; + gint color_format; + gint color_format_csi; + gint color_format_v4l2; + gboolean muxer_is_identity; + + /*CSI camera features */ + gint whitebalance; + gint ae_antibanding; + gint tnr_mode; + gint ee_mode; + gint timeout; + gfloat saturation; + gfloat exposure_compensation; + gfloat tnr_strength; + gfloat ee_strength; + guint sensor_id; + guint sensor_mode; + guint framerate; + guint flip_method; + guint display_id; + guint overlay_index; + guint overlay_x_pos; + guint overlay_y_pos; + guint overlay_width; + guint overlay_height; + + GstPadProbeReturn native_record; + + gchar *svs; + gchar *file_name; + gchar *csi_options_argus; + gchar *csi_resolution; + gchar *usb_options; + gchar *encoder_options; + gchar *vidcap_device; + gchar *cap_dev_node; + gchar *overlayConfig; + gchar *eglConfig; + gchar *exposure_timerange; + gchar *gain_range; + gchar *isp_digital_gainrange; + + NvCamSrcType cam_src; + gboolean cap_success; + gboolean use_cus_res; + gboolean use_eglstream; + + gboolean first_frame; + time_t timeStampStore[KPI_EVENT_SIZE]; + struct timeval timeStamp; + gboolean enableKpiProfile; + gboolean enableKpiNumbers; + gboolean enableMeta; + gboolean enableAeLock; + gboolean enableAwbLock; + gulong prev_probe_id; + gulong enc_probe_id; + time_t currentFrameTime; + time_t prevFrameTime; + gulong frameCount; + time_t accumulator; + time_t currentEncFrameTime; + time_t prevEncFrameTime; + gulong encFrameCount; + time_t encAccumulator; + + GMutex *lock; + GCond *cond; + GCond *x_cond; + GThread *reset_thread; + GThread *x_event_thread; + + CamRes capres; + EncSet encset; + CamPipe ele; + displayCtx disp; + + /* EGLStream */ + EGLStream_Producer_ID eglstream_producer_id; + EGLDisplay display; + EGLStreamKHR stream; + + /* EGLStream Producer */ + guint fifosize; + gboolean enable_fifo; + + /* AUTOMATION */ + Automate aut; + +#ifdef WITH_STREAMING + gint streaming_mode; + RTSPStreamingCtx video_streaming_ctx; +#endif +} CamCtx; + +#endif + diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.c b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.c new file mode 100644 index 0000000..89a95c7 --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.c @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2015-2021 NVIDIA Corporation. All rights reserved. + * + * NVIDIA Corporation and its licensors retain all intellectual property + * and proprietary rights in and to this software, related documentation + * and any modifications thereto. Any use, reproduction, disclosure or + * distribution of this software and related documentation without an express + * license agreement from NVIDIA Corporation is strictly prohibited. + * + */ + +#include "nvgst_asound_common.h" + +char * +nvgst_asound_get_device () +{ + int card_num = -1, device_num = -1; + char ctl_name[15]; + char dev_name[20]; + snd_ctl_t *ctl; + snd_pcm_t *handle; + + if (snd_pcm_open (&handle, "default", SND_PCM_STREAM_PLAYBACK, 0) == 0) { + snd_pcm_close (handle); + return strdup ("default"); + } + + while (snd_card_next (&card_num) == 0 && card_num > -1) { + snprintf (ctl_name, sizeof(ctl_name)-1, "hw:%d", card_num); + ctl_name[sizeof (ctl_name)-1] = '\0'; + if (snd_ctl_open (&ctl, ctl_name, 0) < 0) + continue; + device_num = -1; + while (snd_ctl_pcm_next_device (ctl, &device_num) == 0 && device_num > -1) { + snprintf (dev_name, sizeof(dev_name)-1, "hw:%d,%d", card_num, device_num); + dev_name[sizeof (dev_name)-1] = '\0'; + if (snd_pcm_open (&handle, dev_name, SND_PCM_STREAM_PLAYBACK, 0) == 0) { + snd_pcm_close (handle); + snd_ctl_close (ctl); + return strdup (dev_name); + } + } + snd_ctl_close (ctl); + } + + return NULL; +} + diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.h b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.h new file mode 100644 index 0000000..827784e --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_asound_common.h @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2015 NVIDIA Corporation. All rights reserved. + * + * NVIDIA Corporation and its licensors retain all intellectual property + * and proprietary rights in and to this software, related documentation + * and any modifications thereto. Any use, reproduction, disclosure or + * distribution of this software and related documentation without an express + * license agreement from NVIDIA Corporation is strictly prohibited. + * + */ + +#include +#include + +char * nvgst_asound_get_device (void); diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.c b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.c new file mode 100644 index 0000000..9b1866f --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.c @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2011-2015, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include "nvgst_x11_common.h" + + +Display * +nvgst_x11_init (displayCtx * dpyCtx) +{ + dpyCtx->isDPMSdisabled = 0; + dpyCtx->mDisplay = XOpenDisplay (NULL); + if (!dpyCtx->mDisplay) + printf + ("\nCannot open display specified with DISPLAY environment variable\n"); + return dpyCtx->mDisplay; +} + +void +nvgst_x11_uninit (displayCtx * dpyCtx) +{ + if (dpyCtx->window) + XDestroyWindow (dpyCtx->mDisplay, dpyCtx->window); + XCloseDisplay (dpyCtx->mDisplay); + dpyCtx->mDisplay = NULL; +} + +void +saver_off (displayCtx * dpyCtx) +{ + int nothing; + if (DPMSQueryExtension (dpyCtx->mDisplay, ¬hing, ¬hing)) { + BOOL enabled; + CARD16 powerLevel; + + DPMSInfo (dpyCtx->mDisplay, &powerLevel, &enabled); + if (enabled) { + DPMSDisable (dpyCtx->mDisplay); + DPMSInfo (dpyCtx->mDisplay, &powerLevel, &enabled); + if (enabled) { + printf ("\ncould not disable DPMS\n"); + } + } else { + printf ("\nDPMS already DISABLED\n"); + dpyCtx->isDPMSdisabled = 1; + } + } else + printf ("\nserver does not have extension for -dpms option\n"); +} + +void +saver_on (displayCtx * dpyCtx) +{ + int nothing; + if (DPMSQueryExtension (dpyCtx->mDisplay, ¬hing, ¬hing)) { + BOOL enabled; + CARD16 powerLevel; + + DPMSInfo (dpyCtx->mDisplay, &powerLevel, &enabled); + if (!enabled) { + if (!dpyCtx->isDPMSdisabled) + DPMSEnable (dpyCtx->mDisplay); + DPMSInfo (dpyCtx->mDisplay, &powerLevel, &enabled); + if (!enabled && !dpyCtx->isDPMSdisabled) { + printf ("\ncould not enable DPMS\n"); + } + } else + printf ("\nDPMS already ENABLED\n"); + } else + printf ("\nserver does not have extension for -dpms option\n"); + +} + +void +nvgst_create_window (displayCtx * dpyCtx, char *title) +{ + int screen = 0; + XTextProperty xproperty; + + if (dpyCtx->mDisplay) { + screen = DefaultScreen (dpyCtx->mDisplay); + if (!dpyCtx->width && !dpyCtx->height) { + dpyCtx->width = DisplayWidth (dpyCtx->mDisplay, screen); + dpyCtx->height = DisplayHeight (dpyCtx->mDisplay, screen); + } + + dpyCtx->window = XCreateSimpleWindow (dpyCtx->mDisplay, + RootWindow (dpyCtx->mDisplay, screen), + dpyCtx->x, dpyCtx->y, dpyCtx->width, dpyCtx->height, 0, 0, + BlackPixel (dpyCtx->mDisplay, screen)); + + XSetWindowBackgroundPixmap (dpyCtx->mDisplay, dpyCtx->window, None); + + if (title) { + if ((XStringListToTextProperty (((char **) &title), 1, &xproperty)) != 0) { + XSetWMName (dpyCtx->mDisplay, dpyCtx->window, &xproperty); + XFree (xproperty.value); + + } + } else + printf ("\ncan't set title to window, title NULL\n"); + + /* Tell the window manager we'd like delete client messages instead of + * being killed */ + Atom wmDeleteMessage = + XInternAtom (dpyCtx->mDisplay, "WM_DELETE_WINDOW", False); + if (wmDeleteMessage != None) { + XSetWMProtocols (dpyCtx->mDisplay, dpyCtx->window, &wmDeleteMessage, 1); + } + + XMapRaised (dpyCtx->mDisplay, dpyCtx->window); + + XSync (dpyCtx->mDisplay, 1); //discard the events for now + } else + printf ("\ncan't create window, Display NULL\n"); + +} + +void +nvgst_destroy_window (displayCtx * dpyCtx) +{ + XDestroyWindow (dpyCtx->mDisplay, dpyCtx->window); + dpyCtx->window = (Window) NULL; +} diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.h b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.h new file mode 100644 index 0000000..dbe1c59 --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgst_x11_common.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2011-2015, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include +#include +#include +#include +#include +#include + +typedef struct +{ + Display *mDisplay; + Window window; + int width; + int height; + int x; + int y; + int isDPMSdisabled; +} displayCtx; + +Display *nvgst_x11_init (displayCtx * dpyCtx); +void nvgst_x11_uninit (displayCtx * dpyCtx); +void saver_off (displayCtx * dpyCtx); +void saver_on (displayCtx * dpyCtx); +void nvgst_create_window (displayCtx * dpyCtx, char *title); +void nvgst_destroy_window (displayCtx * dpyCtx); diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer-1.0_README.txt b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer-1.0_README.txt new file mode 100644 index 0000000..858ae43 --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer-1.0_README.txt @@ -0,0 +1,354 @@ +/* + * Copyright (c) 2013-2015, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + + +NvGstPlayer Usage (command: ./nvgstplayer-1.0 --help) +Gstreamer Version used (command: ./nvgstplayer-1.0 --version) +======= +=> PREREQUISITES: + +1. You must install GStreamer-1.0 on the target board using apt-get, as follows: + + sudo apt-get install gstreamer1.0-tools gstreamer1.0-alsa gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-ugly gstreamer1.0-plugins-bad gstreamer1.0-libav + +2. Execute the following commands on the target board's Ubuntu command line before starting the player: + + export DISPLAY=:0 + xinit & + +NvGstPlayer Usage +----------------------------------------------- +Run the nvgstplayer application with the following basic syntax: + +./nvgstaplayer-1.0 -i + + nvgstplayer application support following options: + + -u, --urifile Path of the file containing the URIs + -i, --uri input URI + -r, --gst_script Path of the file containing the Script to be used + --automotive Enter the AutoMotive Mode, to support GstMediaPlayer functionalities in NvGstPlayer + -e, --elemfile Element(s) (Properties) file + -x, --cxpr Command sequence expression + -n, --loop Number of times to play the media + -c, --audio-track If stream have multiple audio tracks, play stream with given track no + -v, --video-track If stream have multiple video tracks, play stream with given track no + -a, --start Start of the segment in media in seconds + -d, --duration Play duration of the segment in media in seconds + --no-sync Disable AV Sync + --disable-dpms Unconditionally Disable DPMS/ScreenBlanking during operation and re-enable upon exit + --stealth Operate in stealth mode, alive even when no media is playing + --bg Operate in background mode, keyboard input will be entirely ignored + --use-playbin Use Playbin + --no-audio Disable audio + --no-video Disable video + --disable-anative Disable native audio rendering + --disable-vnative Disable native video rendering + --use-buffering Use Buffering + -l, --low-percent Low threshold for buffering to start, in % + -j, --high-percent High threshold for buffering to finish, in % + --loop-forever Play the URI(s) in loop forever + -t, --max-size-time Max. amount of time in the queue (0=automatic) + -y, --max-size-bytes Max. amount of bytes in the queue (0=automatic) + -b, --max-size-buffers Max. amount of buffers in the queue (0=automatic) + --window-x X coordinate for player window (for non overlay rendering) + --window-y Y coordinate for player window (for non overlay rendering) + --window-width Window width (for non overlay rendering) + --window-height Window height (for non overlay rendering) + --disable-fullscreen Play video in non fullscreen mode (for nvxvimagesink) + -h, --drop-threshold-pct Permittable frames drop percentage, to be used with --stats (only for development purpose) + -k, --image-display-time Image display time in seconds + --show-tags shows tags (metadata), if available + --smartdimmer Enable smart dimmer for power saving + --stats shows stream statistics, if enabled + --stats-file File to dump stream statistics, if enabled + --svd (=) chain for video decoding + --sad (=) chain for audio decoding + --svc (=) chain for video postprocessing + --sac (=) chain for audio postprocessing + --svs (=) chain for video rendering + --sas (=) chain for audio rendering + --shttp (=) chain for http source + --srtsp (=) chain for rtsp source + --sudp (=) chain for udp source + --sfsrc (=) chain for file source + +Runtime Commands: + q quit the application + h print help + Up Key, ] goto next track + c restart current track + Down Key, [ goto previous track + spos query for position + sdur query for duration + s seek to position in seconds, eg "s5.120" + v seek to percent of the duration, eg "v54" + f seek by seconds, relative to current position eg "f23.901" + Left Key, < seek backwards by 10 seconds + Right Key, > seek forward by 10 seconds + p pause playback + r start/resume the playback + z stop the playback + i: enter a single URI + +The nvgstplayer application supports local file playback, and playback from RTSP, HTTP and UDP streams. + +For example: + +./nvgstplayer-1.0 -i /home/test.mp4 +./nvgstplayer-1.0 -i file:///home/test.mp4 +./nvgstplayer-1.0 -i udp://192.168.2.5:123 + +The nvgstplayer application operates in two modes, Playbin (--use-playbin) and Decodebin. Decodebin +allows you to choose elements. +Currently only Decodebin is operational. + +A subset of options is documented below. For a complete description of available options use the --help option. + + +Audio/Video Decoder Chain (--sad/--svd) +--------------------------------------- +Specify a desired decode chain for a particular URI using --sad or --svd, with the -i option or using a URI file with the -u option. + +For example: + + --sad=”avdec_mp3” + --sad=”avdec_amrnb# variant=1 ! audioresample ! audioconvert” + --sad=”aacparse ! faad# min-latency=4000000” + --svd=”avdec_h264# prop=val# prop2=val2” + +Note the ‘#’ and ‘!’ character after every token. + + +Audio/Video PostProcess Chain (--sac/--svc) +------------------------------------------ +Specify a desired postprocess chain for a particular URI using --sac or --svc, with the -i option or using a URI file with the -u option. + +For example: + + --sac=”audioconvert ! audioresample” + --svc= “my_video_postprocess# prop1=val1 ! videoconvert ! videoscale” + +Note the ‘#’ and ‘!’ character after every token. To include this in your pipeline +use the -–disable-anative/--disable-vnative options, or, in a URI a file, +native_video=0/native_audio=0. + +Audio/Video Render Chain (--sas/--svs) +-------------------------------------- +Specify a desired render chain for a particular URI using --sas or --svs with the -i option or using a URI file with the -u option. + +For example: + + --sas=”alsasink# device=hw:0,0” + --sas=”audioconvert ! alsasink” + --svs= “xvimagesink” + --svs = “videoconvert ! videoscale ! ximagesink# sync=0” + +Note the ‘#’ and ‘!’ character after every token. + + +Elements file (--elemfile/-e) +----------------------------- +Specfy with --elemfile (or -e) an input file containing the sources, decoders, parsers, postprocess, and sinks that you want to use. + + +Decoders/Parsers Chain Format +----------------------------- +[capabilties] +type=, val = ‘sad’, ‘svd’ +pipe= + + +Post Process/Render chain Format +------------------------------- +[type] +pipe= + +For example: + +[video/x-h264] +type=svd +pipe=h264parse ! my_h264_dec# prop1=val1 + +[svs] +pipe=xvimagesink + +[sas] +pipe=audioconvert ! osssink# device=/dev/dsp1# latency-time=20000# + +[video/mpeg, mpegversion=4] +type=svd +pipe= ffdec_mpeg4 + +[audio/mpeg, mpegversion=1, layer=3] +type=sad +pipe= mp3parse ! mad + +[audio/mpeg, mpegversion=4] +type=sad +pipe=faad + +[svc] +pipe= videoconvert# qos=0 ! videoscale + + +Command Sequence Expression (--cxpr/-x) +--------------------------------------- +Use the --cxpr (or -x) option (as a string) to provide a series of commands in an expression format. +The following commands can be inserted in an expression: + +r play for seconds until next command in the expression +p pause for seconds until next command in the expression +z stop for seconds until next command in the expression +w do nothing for seconds until next command in the expression +s seek to absolute time seconds +v seek to absolute time in % of duration +f seek to relative time seconds from current position +> seek forward by 10 seconds +< seek behind by 10 seconds +] goto next track +[ goto previous track +c reset current track + +For example: + +r10.5,s80,w5,p7,r +2{p10,r,w7,s90.55,w2,p,w10,r2} +2{r20,s10,w5,p10, 2{s10,w6,r9}, 3{r10, p20}} +5{r8, p10, s90, w10}, 8{s10, w2, p20} + +Random cxpr: +------------ +A random command sequence expression can be generated by giving cxpr as "*" + +For example: + +./nvgstplayer-1.0 -i --cxpr=* + +This will generate a random command sequence expression (up to 100 characters). + + +URI File (--urifile/-u) +----------------------- +Use the --urifile (or -u) option to specify a list of URIs. The player plays all URIs listed in the file. Any options for the URI specified in the file take precedence over the same options set by application command line arguments for that particular URI. + +Format of the URI file and the options for the URI +------------------------------------------------ +[uri1] +option1=val1 +option2=val2 +....... +........ + +[uri2] +option1=val1 +.......... +.......... + +List of all possible options for URIs inside URI file +----------------------------------------------------- +cxpr= command expression for the URI +nop= if '1' , will disable the global cxpr (if given as command line arg) +start= start playback time in seconds/ or percentage of URI duration +startper= '1' indicating that the 'start' option will be % value +duration= desired playback time in seconds +repeats= playback iterations +audio= 0/1 to disable/enable audio in the URI +video= 0/1 to disable/enable video in the URI +native_video= 0/1 to disable/enable native video rendering +native_audio= 0/1 to disable/enable native audio rendering +sync= 1/0 to enabling/disabling AV sync +use-buffering= 1/0 to enable/disable buffering +low-percent= low threshold for buffering in % +high-percent= high threshold for buffering in % +max-size-bytes= max size of bytes in queue for buffering +max-size-buffers= max size of buffers in queue for buffering +max-size-time= max size time in queue for buffering +image-display-time= image display time in seconds (if URI is an image file) +tags= 0/1 disable/enable tags print +svd= video decoding chain +sad= audio decoding chain +svc= video post-precessing chain +sac= audio post-precessing chain +svs= video rendering chain +sas= audio rendering chain +shttp= http source chain +srtsp= rtsp source chain +sudp= udp source chain +sfsrc= file source chain + + +For example, use the following as a URI file with the syntax "./nvgstplayer-1.0 -u TestURIfile": + +TestURIfile + +[/home/user/test.avi] +cxpr=2{p5, r, w10, s20.5, w6.6, p3, 2 {r, w10, p4}} +start=10.45 +repeats=3 +audio=0 +native_video=0 +svd=my_h264dec +svc=videoconvert ! my_pp# prop1=shuba# prop2=val2# ! videoscale + +[/home/user/test.mp3] +sas=audioconvert ! alsasink + +[/home/user/test.mp4] +nop=1 +video=0 + + +NOTES: + +1. Elements to use can be provided in several different ways: + + a. Provided in an elements file. + + b. Provided as command-line arguments applicable to all the streams in the URI file, with the –I option. Elements provided in this way override values (if present) inside an elements file. + + c. Provided in a URI file, specific for each URI, if desired. Elements provided in this way override any elements provided at the command-line or in an elements file. + +2. For Laguna-ERSS on K3.10, no amixer settings are required to enable/disable internal speaker as internal speaker is not present on the board. + + a. To enable audio through Headphones, use: + + amixer cset name="Int Spk Switch" 0 + amixer cset name='Headphone Jack Switch' 1 + + b. To disable audio through Headphones, use: + + amixer cset name='Headphone Jack Switch' 0 + + By default, audio is enabled through Headphones. + +3. The URI list can be looped forever by providing --loop-forever option: + + ./nvgstplayer-1.0 -u --loop-forever + +4. To enable hw accelerated jpeg decoding use following command: + + ./nvgstplayer-1.0 -i --svd=nvjpegdec --svs=nvoverlaysink + +5. nvhdmioverlaysink has been deprecated. Use nvoverlaysink with display-id property instead. + + ./nvgstplayer-1.0 -i --svs="nvoverlaysink # display-id=" diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.c b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.c new file mode 100644 index 0000000..dc34c40 --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.c @@ -0,0 +1,4050 @@ +/* + * Copyright (c) 2013-2021, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#include "nvgstplayer.h" + +static gboolean parse_spec (const gchar *, const gchar *, gpointer, GError **); +static void destroy_current_track (void); +static void reset_current_track (void); +static NvGstReturn setup_track (void); +gboolean goto_next_track (gpointer); +static gboolean on2_input (gpointer); +static void nvgst_handle_xevents (void); +static gpointer nvgst_x_event_thread (gpointer); +NvGstReturn exec_ops (NvGstOperation); +static gboolean build_cmlist (gchar *, attrs_s *); +static void free_cmlist (attrs_s *, gboolean); +static NvGstReturn get_next_command (attrs_s *, gchar *, gint buf_size, gboolean); +static gchar *get_random_cxpr (void); +void get_elem_cfg (gchar * file); +static void set_sync (GstElement * vsink, gboolean sync); +void quit_app (void); +void set_window_handle (Window window); + +static gboolean cintr = FALSE; +static gint timeout_id; +static GMainLoop *loop = NULL; +static guint last_n = 1; +static gint multitrack_instance = 1; + +static GThread *trd = NULL; +static gboolean trd_exit = FALSE; + +appCtx sapp, *app; +gchar *urifile = NULL, *elemfile = NULL, *cxpr = NULL; +gdouble segment_start = 0, segment_duration = 0, max_size_time = 0; +gint iteration_count = 1 ; + +GOptionEntry entries[] = { + {"version", 0, 0, G_OPTION_ARG_NONE, &sapp.version, + "Prints the version of Gstreamer used", NULL}, + {"urifile", 'u', 0, G_OPTION_ARG_STRING, &urifile, + "Path of the file containing the URIs", NULL}, + {"uri", 'i', 0, G_OPTION_ARG_STRING, &sapp.uri, "input URI", NULL}, + {"elemfile", 'e', 0, G_OPTION_ARG_STRING, &elemfile, + "Element(s) (Properties) file", NULL}, + {"cxpr", 'x', 0, G_OPTION_ARG_STRING, &cxpr, "Command sequence expression", + NULL}, + {"loop", 'n', 0, G_OPTION_ARG_INT, &sapp.attrs.repeats, + "Number of times to play the media", NULL}, + {"audio-track", 'c', 0, G_OPTION_ARG_INT, &sapp.attrs.aud_track, + "If stream have multiple audio tracks, play stream with given track no", + NULL}, + {"video-track", 'v', 0, G_OPTION_ARG_INT, &sapp.attrs.vid_track, + "If stream have multiple video tracks, play stream with given track no", + NULL}, + {"start", 'a', 0, G_OPTION_ARG_DOUBLE, &segment_start, + "Start of the segment in media in seconds", NULL}, + {"duration", 'd', 0, G_OPTION_ARG_DOUBLE, &segment_duration, + "Play duration of the segment in media in seconds", NULL}, + {"no-sync", 0, 0, G_OPTION_ARG_NONE, &sapp.attrs.sync, "Disable AV Sync", + NULL}, + {"disable-dpms", 0, 0, G_OPTION_ARG_NONE, &sapp.disable_dpms, + "Unconditionally Disable DPMS/ScreenBlanking during operation and re-enable upon exit", + NULL}, + {"stealth", 0, 0, G_OPTION_ARG_NONE, &sapp.stealth_mode, + "Operate in stealth mode, alive even when no media is playing", NULL}, + {"bg", 0, 0, G_OPTION_ARG_NONE, &sapp.bg_mode, + "Operate in background mode, keyboard input will be entirely ignored", + NULL}, + {"use-playbin", 0, G_OPTION_FLAG_NO_ARG, G_OPTION_ARG_CALLBACK, parse_spec, + "Use Playbin", NULL}, + {"no-audio", 0, G_OPTION_FLAG_NO_ARG, G_OPTION_ARG_CALLBACK, parse_spec, + "Disable audio", NULL}, + {"no-video", 0, G_OPTION_FLAG_NO_ARG, G_OPTION_ARG_CALLBACK, parse_spec, + "Disable video", NULL}, + {"disable-anative", 0, G_OPTION_FLAG_NO_ARG, G_OPTION_ARG_CALLBACK, + parse_spec, "Disable native audio rendering", NULL}, + {"disable-vnative", 0, G_OPTION_FLAG_NO_ARG, G_OPTION_ARG_CALLBACK, + parse_spec, "Disable native video rendering", NULL}, + {"use-buffering", 0, 0, G_OPTION_ARG_NONE, &sapp.attrs.use_buffering, + "Use Buffering", NULL}, + {"low-percent", 'l', 0, G_OPTION_ARG_INT, &sapp.attrs.low_percent, + "Low threshold for buffering to start, in %", NULL}, + {"high-percent", 'j', 0, G_OPTION_ARG_INT, &sapp.attrs.high_percent, + "High threshold for buffering to finish, in %", NULL}, + {"loop-forever", 0, 0, G_OPTION_ARG_NONE, &sapp.attrs.loop_forever, + "Play the URI(s) in loop forever", NULL}, + {"max-size-time", 't', 0, G_OPTION_ARG_DOUBLE, &max_size_time, + "Max. amount of time in the queue (0=automatic)", NULL}, + {"max-size-bytes", 'y', 0, G_OPTION_ARG_INT, &sapp.attrs.max_size_bytes, + "Max. amount of bytes in the queue (0=automatic)", NULL}, + {"max-size-buffers", 'b', 0, G_OPTION_ARG_INT, &sapp.attrs.max_size_buffers, + "Max. amount of buffers in the queue (0=automatic)", NULL}, + {"window-x", 0, 0, G_OPTION_ARG_INT, &sapp.disp.x, + "X coordinate for player window (for non overlay rendering)", NULL}, + {"window-y", 0, 0, G_OPTION_ARG_INT, &sapp.disp.y, + "Y coordinate for player window (for non overlay rendering)", NULL}, + {"window-width", 0, 0, G_OPTION_ARG_INT, &sapp.disp.width, + "Window width (for non overlay rendering)", NULL}, + {"window-height", 0, 0, G_OPTION_ARG_INT, &sapp.disp.height, + "Window height (for non overlay rendering)", NULL}, + {"disable-fullscreen", 0, 0, G_OPTION_ARG_NONE, + &sapp.attrs.disable_fullscreen, + "Play video in non fullscreen mode (for nveglglessink)", NULL}, + {"drop-threshold-pct", 'h', 0, G_OPTION_ARG_INT, + &sapp.attrs.drop_threshold_pct, + "Permittable frames drop percentage, to be used with --stats (only for development purpose)", + NULL}, + {"image-display-time", 'k', 0, G_OPTION_ARG_INT64, + &sapp.attrs.image_display_time, "Image display time in seconds", NULL}, + {"show-tags", 0, 0, G_OPTION_ARG_NONE, &sapp.attrs.show_tags, + "shows tags (metadata), if available", NULL}, +#ifndef WITH_GUI + {"stats", 0, 0, G_OPTION_ARG_NONE, &sapp.stats, + "shows stream statistics, if enabled", NULL}, +#endif + {"stats-file", 0, 0, G_OPTION_ARG_STRING, &sapp.stats_file, + "File to dump stream statistics, if enabled", NULL}, + {"svd", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for video decoding", NULL}, + {"sad", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for audio decoding", NULL}, + {"svc", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for video postprocessing", NULL}, + {"sac", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for audio postprocessing", NULL}, +#ifndef WITH_GUI + {"svs", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for video rendering", NULL}, +#endif + {"sas", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for audio rendering", NULL}, + {"shttp", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for http source", NULL}, + {"srtsp", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for rtsp source", NULL}, + {"sudp", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for udp source", NULL}, + {"sfsrc", 0, 0, G_OPTION_ARG_CALLBACK, parse_spec, + "(=) chain for file source", NULL}, + {NULL}, +}; + + +static void +fps_init (pfData_s * self) +{ + self->max_fps = -1; + self->min_fps = -1; + /* Init counters */ + self->frames_rendered = 0; + self->frames_dropped = 0; + self->frames_dropped_decoder = 0; + self->last_frames_rendered = G_GUINT64_CONSTANT (0); + self->last_frames_dropped = G_GUINT64_CONSTANT (0); + + /* init time stamps */ + self->start_ts = GST_CLOCK_TIME_NONE; + self->last_ts = GST_CLOCK_TIME_NONE; + + self->initial_fps = TRUE; + self->prev_ts = -1; + self->avg_in_diff = -1; +} + + +static void +stats_func(gdouble average_fps, guint64 frames_rendered, guint64 frames_dropped) +{ + + if (average_fps) { + printf("\n\nITERATION %d :",iteration_count); + iteration_count++ ; + + guint64 total_frames; + gdouble percent_dropped; + + total_frames = frames_rendered + frames_dropped; + percent_dropped = (gdouble) frames_dropped / (gdouble) total_frames *100; + + g_printf ("\tTotal Frames = %" G_GUINT64_FORMAT ", Frames rendered = %" + G_GUINT64_FORMAT ", Frames dropped = %" G_GUINT64_FORMAT + ", Average fps = %.2f", total_frames, frames_rendered, frames_dropped, + average_fps); + if (percent_dropped < app->attrs.drop_threshold_pct) + g_printf + ("\n\t\tPercentage frames dropped = %.2f%% which is below acceptable limit of %d%%\n\n", + percent_dropped, app->attrs.drop_threshold_pct); + else { + g_printf + ("\n\t\tPercentage frames dropped = %.2f%% which is above acceptable limit of %d%%\n\n", + percent_dropped, app->attrs.drop_threshold_pct); + app->return_value = -1; + } + } + +} + + +static gboolean +display_current_fps (gpointer data) +{ + pfData_s *self = (pfData_s *) data; + gdouble cur_fps, drop_rate, average_fps; + gchar msg_str[256]; + gdouble diffs, elapsed_time; + guint64 rendered_frames, dropped_frames; + GstClockTime current_ts; + + rendered_frames = g_atomic_int_get (&self->frames_rendered); + dropped_frames = g_atomic_int_get (&self->frames_dropped); + + /* if no QOS event yet */ + if ((rendered_frames + dropped_frames) == 0) { + return TRUE; + } + + current_ts = gst_util_get_timestamp (); + + diffs = (gdouble) (current_ts - self->last_ts) / GST_SECOND; + elapsed_time = (gdouble) (current_ts - self->start_ts) / GST_SECOND; + + cur_fps = (gdouble) (rendered_frames - self->last_frames_rendered) / diffs; + drop_rate = (gdouble) (dropped_frames - self->last_frames_dropped) / diffs; + + average_fps = (gdouble) rendered_frames / g_timer_elapsed (self->timer, NULL); + self->average_fps = average_fps; + + if (self->max_fps == -1 || cur_fps > self->max_fps) { + self->max_fps = cur_fps; + } + if (self->min_fps == -1 || cur_fps < self->min_fps) { + self->min_fps = cur_fps; + } + + if (drop_rate == 0.0) { + g_snprintf (msg_str, 255, + "appox.rend: %" G_GUINT64_FORMAT ", approx.drpd: %" G_GUINT64_FORMAT + ", curfps: %.2f, avgfps: %.2f, avgtsdiff: %" GST_TIME_FORMAT + ", rtime: %" GST_TIME_FORMAT, + rendered_frames, dropped_frames, cur_fps, average_fps, + GST_TIME_ARGS (self->avg_in_diff), GST_TIME_ARGS (current_ts)); + } else { + g_snprintf (msg_str, 255, + "approx.rend: %" G_GUINT64_FORMAT ", approx.drpd: %" G_GUINT64_FORMAT + ", curfps: %.2f, avgfps: %.2f, avgtsdiff: %" GST_TIME_FORMAT + ", drate: %.2f" ", rtime: %" GST_TIME_FORMAT, rendered_frames, + dropped_frames, cur_fps, average_fps, GST_TIME_ARGS (self->avg_in_diff), + drop_rate, GST_TIME_ARGS (current_ts)); + } + + if (app->pfData.file) + g_fprintf (self->file, "%s\n", msg_str); + + self->last_frames_rendered = rendered_frames; + self->last_frames_dropped = dropped_frames; + self->last_ts = current_ts; + + if (G_UNLIKELY (self->initial_fps && elapsed_time > 5.0)) { + self->dps_cb = g_timeout_add (DEFAULT_FPS_UPDATE_INTERVAL_MS, + display_current_fps, self); + self->initial_fps = FALSE; + return FALSE; + } + + return TRUE; +} + + +static gboolean +on_video_sink_flow (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) +{ + pfData_s *self = (pfData_s *) user_data; + GstEvent *ev = GST_PAD_PROBE_INFO_DATA (info); + + if (GST_EVENT_TYPE (ev) == GST_EVENT_QOS) { + GstClockTimeDiff jitter; + GstClockTime ts; + + gst_event_parse_qos (ev, 0, NULL, &jitter, &ts); + + if (G_LIKELY (self->prev_ts != GST_CLOCK_TIME_NONE)) { + if (G_LIKELY (self->prev_ts < ts)) { + if (G_UNLIKELY (ts - self->prev_ts > GST_SECOND)) { + self->prev_ts = -1; + self->avg_in_diff = -1; + g_atomic_int_inc (&self->frames_rendered); + } else { + if (self->avg_in_diff != GST_CLOCK_TIME_NONE) + self->avg_in_diff = CALC_RUNNING_AVERAGE (self->avg_in_diff, ts - self->prev_ts, 8); //windowsize=8 + else + self->avg_in_diff = ts - self->prev_ts; + + //g_print ("%"G_GINT64_FORMAT" ", jitter); + //g_print ("%"G_GINT64_FORMAT" ", self->max_latency); + //g_print ("%"G_GINT64_FORMAT"\n", self->avg_in_diff); + + if (GST_CLOCK_TIME_IS_VALID (self->max_latency) && + jitter > + (GstClockTimeDiff) (self->max_latency + self->avg_in_diff)) { + g_atomic_int_inc (&self->frames_dropped); + } else { + g_atomic_int_inc (&self->frames_rendered); + } + } + } else { + self->prev_ts = -1; + self->avg_in_diff = -1; + g_atomic_int_inc (&self->frames_rendered); + } + + } else { + g_atomic_int_inc (&self->frames_rendered); + } + + self->prev_ts = ts; + } + + return TRUE; +} + + +static gboolean +is_valid_number (gchar * str, gboolean neg, gboolean dec) +{ + gboolean ret = FALSE; + + if (str) { + if (g_ascii_isdigit (*str) || + (neg && (*str == '-')) || (dec && (*str == '.'))) { + gboolean was_digit = FALSE; + + if (*str == '.') + dec = FALSE; + else + was_digit = g_ascii_isdigit (*str); + + str++; + if (*str != '\0') { + while (g_ascii_isdigit (*str) || G_UNLIKELY (dec && *str == '.')) { + if (G_UNLIKELY (*str == '.')) { + dec = FALSE; + was_digit = FALSE; + } else + was_digit = TRUE; + + str++; + } + + if (*str == '\0' && was_digit) + ret = TRUE; + + } else if (was_digit) + ret = TRUE; + } + } + + return ret; +} + +void +quit_app () +{ + + NVGST_INFO_MESSAGE ("quitting the app"); + + if (!app->bg_mode) { + if (!trd_exit) { + trd_exit = TRUE; + g_thread_join (trd); + } + } + + g_main_loop_quit (loop); +} + +static gboolean +on_input (GIOChannel * ichannel, GIOCondition cond, gpointer data) +{ + inAttrs *in = app->input; + gboolean yes, yes1; + NvGstReturn res = NVGST_RET_SUCCESS; + static gchar tbuffer[256]; + + yes1 = (!data && in->operation_mode == NVGST_CMD_SCRIPT) || (data + && in->operation_mode == NVGST_CMD_USER); + yes = yes1 && app->pipeline && app->running && !app->image_eos + && !app->got_eos; + + if (data == NULL) { + res = get_next_command (&in->attrs, tbuffer, sizeof(tbuffer), in->postpone); + in->postpone = FALSE; + + if (res == NVGST_RET_END) { + //g_print("\n in->operation_mode = NVGST_CMD_USER\n"); + in->operation_mode = NVGST_CMD_USER; + goto done; + } + } else { + GQueue *que = (GQueue *) data; + gchar *str = g_queue_pop_head (que); + strncpy (tbuffer, str, (sizeof (tbuffer)-1)); + tbuffer[sizeof (tbuffer)-1] = '\0'; + g_free (str); + } + + NVGST_DEBUG_MESSAGE_V ("\ngot a command %s <%d %d %d>\n", tbuffer, + app->image_eos, app->got_eos, app->running); + + if (!g_strcmp0 (tbuffer, "h")) { + g_print ("%s\n", app->extra_options); + + } else if (!g_strcmp0 (tbuffer, "q")) { + quit_app (); + } else if (g_str_has_prefix (tbuffer, "w") && yes1) { + in->interval = atof (tbuffer + 1) * GST_USECOND; + res = exec_ops (NVGST_OPS_WAIT); + + } else if (g_str_has_prefix (tbuffer, "z") && app->running) { + in->interval = atof (tbuffer + 1) * GST_USECOND; + res = exec_ops (NVGST_OPS_STOP); + + } else if (g_str_has_prefix (tbuffer, "u:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "i:")) { + g_free (app->uri); + + app->uri = g_strdup (g_strstrip (tbuffer + 2)); + app->uriTotal = 1; + app->uriCount = 0; + + goto_next_track (app); + + } else if (g_str_has_prefix (tbuffer, "e:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "x:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "nos:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "sth:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "upb:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "noa:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "nov:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "dan:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "dvn:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "ubf:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "tag:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "a")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "d")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "n")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "l")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "j")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "t")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "y")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "b")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "k")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "svd:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "sad:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "svc:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "sac:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "svs:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "sas:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "shttp:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "srtsp:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "sudp:")) { + /* TODO */ + + } else if (g_str_has_prefix (tbuffer, "sfsrc:")) { + /* TODO */ + + } else if (app->cur_operation == NVGST_OPS_NONE) { + if (g_str_has_prefix (tbuffer, "c") || g_str_has_prefix (tbuffer, "]") || + g_str_has_prefix (tbuffer, "[")) { + + if (g_str_has_prefix (tbuffer, "c")) { + app->uriCount--; + + } else if (g_str_has_prefix (tbuffer, "[")) { + app->uriCount -= 2; + } + + if (app->uriCount < 0) + app->uriCount = 0; + + goto_next_track (app); + goto ret; + + } else if (g_str_has_prefix (tbuffer, "r") && yes1 && app->pipeline) { + GstState state, pending; + in->interval = atof (tbuffer + 1) * GST_USECOND; + + if (app->buffering) { + if (in->operation_mode == NVGST_CMD_SCRIPT) + in->postpone = TRUE; + + } else if (gst_element_get_state (app->pipeline, &state, &pending, + GST_CLOCK_TIME_NONE) + != GST_STATE_CHANGE_FAILURE && state < GST_STATE_PLAYING + && state > GST_STATE_NULL && pending == GST_STATE_VOID_PENDING) { + + if (state == GST_STATE_READY) { + GstStateChangeReturn ret; + NVGST_INFO_MESSAGE ("pausing"); + + in->pending_play = TRUE; + app->cur_operation = NVGST_OPS_PAUSE; + if ((ret = gst_element_set_state (app->pipeline, GST_STATE_PAUSED)) == + GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("pipeline state change failure to %s", + gst_element_state_get_name (GST_STATE_PAUSED)); + res = NVGST_RET_ERR; + CALL_GUI_FUNC (set_playback_status, STATUS_ERROR); + } else { + if (ret == GST_STATE_CHANGE_NO_PREROLL) + app->is_live = TRUE; + res = NVGST_RET_ASYNC; + } + } else { + res = exec_ops (NVGST_OPS_PLAY); + } + } else { + res = NVGST_RET_INVALID; + } + + } else if (g_str_has_prefix (tbuffer, "p") && yes1 && app->pipeline) { + GstState state, pending; + in->interval = atof (tbuffer + 1) * GST_USECOND; + + if (app->buffering) { + if (in->operation_mode == NVGST_CMD_SCRIPT) + in->postpone = TRUE; + + } else if (gst_element_get_state (app->pipeline, &state, &pending, + GST_CLOCK_TIME_NONE) + == GST_STATE_CHANGE_SUCCESS && state > GST_STATE_NULL + && state != GST_STATE_PAUSED && pending == GST_STATE_VOID_PENDING) { + res = exec_ops (NVGST_OPS_PAUSE); + } else + res = NVGST_RET_ERR; + + } else if (!g_strcmp0 (tbuffer, "spos") && app->running) { + GstClockTimeDiff pos; + GstFormat format = GST_FORMAT_TIME; + + if (gst_element_query_position (app->pipeline, format, &pos)) + g_print ("Position: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (pos)); + else + g_print ("Position: Query Failed\n"); + + } else if (!g_strcmp0 (tbuffer, "sdur") && app->running) { + GstClockTimeDiff dur = GST_CLOCK_TIME_NONE; + GstFormat format = GST_FORMAT_TIME; + + if (gst_element_query_duration (app->pipeline, format, &dur) + && GST_CLOCK_TIME_IS_VALID (dur)) + g_print ("Duration: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (dur)); + else + g_print ("Duration: Query Failed\n"); + + } else if (yes) { + if (app->buffering && in->operation_mode == NVGST_CMD_SCRIPT) { + in->postpone = TRUE; + + } else if (g_str_has_prefix (tbuffer, "f") && + is_valid_number (tbuffer + 1, TRUE, TRUE)) { + GstClockTimeDiff pos; + GstFormat format = GST_FORMAT_TIME; + + if (gst_element_query_position (app->pipeline, format, &pos) && + format == GST_FORMAT_TIME) { + GstClockTimeDiff tpos = atof (tbuffer + 1) * GST_SECOND; + + in->interval = pos + tpos; + res = exec_ops (NVGST_OPS_SEEK); + } else { + g_print ("cannot seek\n"); + res = NVGST_RET_ERR; + } + } else if (g_str_has_prefix (tbuffer, "<")) { + GstClockTimeDiff pos; + GstFormat format = GST_FORMAT_TIME; + + if (gst_element_query_position (app->pipeline, format, &pos) && + format == GST_FORMAT_TIME) { + + if ((GstClockTime) pos > 10000000000ULL) + in->interval = pos - 10000000000ULL; + else + in->interval = 0; + + res = exec_ops (NVGST_OPS_SEEK); + } else { + g_print ("cannot seek\n"); + res = NVGST_RET_ERR; + } + } else if (g_str_has_prefix (tbuffer, ">")) { + GstClockTimeDiff pos; + GstFormat format = GST_FORMAT_TIME; + + if (gst_element_query_position (app->pipeline, format, &pos) && + format == GST_FORMAT_TIME) { + GstClockTimeDiff dur; + in->interval = pos + 10000000000ULL; + + if (gst_element_query_duration (app->pipeline, format, &dur) + && GST_CLOCK_TIME_IS_VALID (dur)) { + if (dur < (GstClockTimeDiff) in->interval) { + app->got_eos = TRUE; + goto_next_track (app); + } else { + res = exec_ops (NVGST_OPS_SEEK); + } + } else { + res = exec_ops (NVGST_OPS_SEEK); + } + } else { + g_print ("cannot seek\n"); + res = NVGST_RET_ERR; + } + } else if (g_str_has_prefix (tbuffer, "s") && + is_valid_number (tbuffer + 1, FALSE, TRUE)) { + in->interval = ABS (atof (tbuffer + 1)) * GST_SECOND; + res = exec_ops (NVGST_OPS_SEEK); + + } else if (g_str_has_prefix (tbuffer, "v") && + is_valid_number (tbuffer + 1, FALSE, FALSE)) { + GstFormat format = GST_FORMAT_TIME; + GstClockTimeDiff dur = GST_CLOCK_TIME_NONE; + + in->interval = ABS (atoi (tbuffer + 1)); + + if (gst_element_query_duration (app->pipeline, format, &dur) + && GST_CLOCK_TIME_IS_VALID (dur)) { + in->interval = gst_util_uint64_scale (dur, in->interval, 100); + res = exec_ops (NVGST_OPS_SEEK); + } else { + g_print ("cannot seek\n"); + res = NVGST_RET_ERR; + } + + } else { + res = NVGST_RET_INVALID; + } + } else { + res = NVGST_RET_INVALID; + } + } else { + res = NVGST_RET_INVALID; + } + +done: + if (res < NVGST_RET_SUCCESS) { + if (res == NVGST_RET_ERR) + g_print ("command execution failed\n"); + else if (res != NVGST_RET_END) + g_print ("cannot process the command, mode: %d, cur_operation = %d\n", + in->operation_mode, app->cur_operation); + + in->interval = 0; + if (res != NVGST_RET_INVALID) { + in->pending_play = FALSE; + app->cur_operation = NVGST_OPS_NONE; + } + } + + if (in->postpone) { + app->cmd_id = g_timeout_add (2000, on2_input, NULL); + } else if (res != NVGST_RET_ASYNC) { + if (in->operation_mode == NVGST_CMD_SCRIPT && !data) { + app->cmd_id = g_timeout_add (in->interval, on2_input, NULL); + } + } + +ret: + return TRUE; +} + + +NvGstReturn +exec_ops (NvGstOperation operation) +{ + NvGstReturn ret = NVGST_RET_SUCCESS; + inAttrs *in = app->input; + + app->cur_operation = operation; + + switch (operation) { + case NVGST_OPS_STOP:{ + reset_current_track (); + } + break; + + case NVGST_OPS_SEEK:{ + GstClockTime end = GST_CLOCK_TIME_NONE; + GstClockTime seekPos; + GstSeekFlags flags = GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT; + + seekPos = in->interval; + in->interval = 0; + + NVGST_INFO_MESSAGE_V ("seeking to %" GST_TIME_FORMAT, + GST_TIME_ARGS (seekPos)); + + if (GST_CLOCK_TIME_IS_VALID (in->attrs.segment_duration)) { + GstClockTimeDiff pos; + GstFormat format = GST_FORMAT_TIME; + + if (gst_element_query_position (app->pipeline, format, &pos) && + format == GST_FORMAT_TIME) { + flags |= GST_SEEK_FLAG_SEGMENT; + app->accum_time += (pos - app->last_seek_time); + + NVGST_DEBUG_MESSAGE_V ("segment_duration %" GST_TIME_FORMAT, + GST_TIME_ARGS (in->attrs.segment_duration)); + NVGST_DEBUG_MESSAGE_V ("accum_time: %" GST_TIME_FORMAT, + GST_TIME_ARGS (app->accum_time)); + + if (app->accum_time < in->attrs.segment_duration) { + end = seekPos + (in->attrs.segment_duration - app->accum_time); + app->last_seek_time = seekPos; + } else { + end = seekPos + 1; + } + } + } + + NVGST_DEBUG_MESSAGE_V ("end: %" GST_TIME_FORMAT, GST_TIME_ARGS (end)); + + ret = gst_element_seek (app->seekElement, 1.0, GST_FORMAT_TIME, flags, + GST_SEEK_TYPE_SET, seekPos, GST_SEEK_TYPE_SET, end); + + if (!ret) { + NVGST_CRITICAL_MESSAGE ("seek failed"); + ret = NVGST_RET_ERR; + CALL_GUI_FUNC (set_playback_status, STATUS_ERROR); + } else { + ret = NVGST_RET_ASYNC; + } + } + break; + + case NVGST_OPS_PAUSE:{ + NVGST_INFO_MESSAGE ("pausing"); + + if (gst_element_set_state (app->pipeline, GST_STATE_PAUSED) == + GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("pipeline state change failure to %s", + gst_element_state_get_name (GST_STATE_PAUSED)); + ret = NVGST_RET_ERR; + CALL_GUI_FUNC (set_playback_status, STATUS_ERROR); + } else { + ret = NVGST_RET_ASYNC; + } + } + break; + + case NVGST_OPS_PLAY:{ + NVGST_INFO_MESSAGE ("playing"); + + if (gst_element_set_state (app->pipeline, GST_STATE_PLAYING) == + GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("pipeline state change failure to %s", + gst_element_state_get_name (GST_STATE_PLAYING)); + ret = NVGST_RET_ERR; + CALL_GUI_FUNC (set_playback_status, STATUS_ERROR); + } else { + ret = NVGST_RET_ASYNC; + + if (app->stats) { + pfData_s *self = &app->pfData; + if (app->stats_file) + g_fprintf (self->file, "playing from rtime %" GST_TIME_FORMAT "\n", + GST_TIME_ARGS (gst_util_get_timestamp ())); + + g_assert (app->pfData.dps_cb == 0 + && !GST_CLOCK_TIME_IS_VALID (app->pfData.start_ts)); + self->last_ts = self->start_ts = gst_util_get_timestamp (); + self->dps_cb = g_timeout_add (INITIAL_FPS_UPDATE_INTERVAL_MS, + display_current_fps, self); + + if (self->timer) + g_timer_continue (self->timer); + else + self->timer = g_timer_new (); + } + } + } + break; + + case NVGST_OPS_WAIT:{ + + } + break; + + default: + g_print ("invalid command\n"); + ret = NVGST_RET_INVALID; + break; + } + + if (ret != NVGST_RET_ASYNC) + app->cur_operation = NVGST_OPS_NONE; + + return ret; +} + + +gboolean +goto_next_track (gpointer data) +{ + NvGstReturn ret = NVGST_RET_SUCCESS; + inAttrs *in = app->input; + + if (!app->in_error && app->got_eos && --in->attrs.repeats > 0) { + NVGST_DEBUG_MESSAGE ("resetting the track"); + app->uriCount--; + + /* WAR for bug 200071832 */ + destroy_current_track (); + app->cur_operation = NVGST_OPS_NONE; + + app->input->operation_mode = NVGST_CMD_SCRIPT; + in->attrs.lplist = in->attrs.lplist_head; + in->attrs.cmlist = in->attrs.cmlist_head; + + in->postpone = FALSE; + + } else { + NVGST_DEBUG_MESSAGE ("destroying the track"); + destroy_current_track (); + } + +if(app->stats) + stats_func(app->pfData.average_fps, app->pfData.frames_rendered, app->pfData.frames_dropped); + + NVGST_INFO_MESSAGE_V ("uriCount: %d, uriTotal: %d", (gint) app->uriCount, + (gint) app->uriTotal); + + if (app->uriCount++ >= (gint) app->uriTotal) { + NVGST_INFO_MESSAGE ("done playing all URIs"); + if (app->attrs.loop_forever) { + NVGST_INFO_MESSAGE ("Looping over the URI List \n"); + app->uriCount = 1; + ret = setup_track (); + } else { + ret = NVGST_RET_END; + } + } else { + ret = setup_track (); + } + + if (ret == NVGST_RET_END && !GUI) { + quit_app (); + } else if (ret == NVGST_RET_ERR) { + app->cmd_id = g_idle_add (goto_next_track, app); + } + + return FALSE; +} + + +static void +_error_msg (GstMessage * message) +{ + GError *error = NULL; + gchar *elm_name, *debug = NULL; + + elm_name = gst_object_get_path_string (message->src); + gst_message_parse_error (message, &error, &debug); + + CALL_GUI_FUNC (show_error, error->message); + + g_printerr ("Error by %s: %s\n", elm_name, error->message); + if (debug != NULL) + g_printerr ("debug info:\n%s\n", debug); + + g_error_free (error); + g_free (debug); + g_free (elm_name); + + return; +} + + +static void +_tag_info (const GstTagList * list, const gchar * tag, gpointer data) +{ + gint tagCount, index; + + tagCount = gst_tag_list_get_tag_size (list, tag); + + for (index = 0; index < tagCount; index++) { + gchar *pStr; + + if (gst_tag_get_type (tag) == G_TYPE_STRING) { + if (!gst_tag_list_get_string_index (list, tag, index, &pStr)) + g_assert_not_reached (); + } else if (gst_tag_get_type (tag) == GST_TYPE_BUFFER) { + GstBuffer *buf; + + buf = + gst_value_get_buffer (gst_tag_list_get_value_index (list, tag, + index)); + if (buf) { + pStr = + g_strdup_printf ("Buffer of %zu bytes", gst_buffer_get_size (buf)); + } else { + pStr = g_strdup ("NULL buffer"); + } + + } else { + pStr = + g_strdup_value_contents (gst_tag_list_get_value_index (list, tag, + index)); + } + + if (index == 0) { + g_print ("%16s: %s\n", gst_tag_get_nick (tag), pStr); + } else { + g_print ("%16s: %s\n", "", pStr); + } + + g_free (pStr); + } + + return; +} + + +static gboolean +image_stop (gpointer data) +{ + app->got_eos = TRUE; + + goto_next_track (app); + + return FALSE; +} + + +static gboolean +bus_call (GstBus * bus, GstMessage * msg, gpointer data) +{ + inAttrs *in = app->input; + gboolean in_error = FALSE; + + switch (GST_MESSAGE_TYPE (msg)) { + case GST_MESSAGE_ERROR:{ + _error_msg (msg); + in_error = TRUE; + app->return_value = -1; + } + + case GST_MESSAGE_SEGMENT_DONE:{ + if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_SEGMENT_DONE) + NVGST_DEBUG_MESSAGE ("segment done"); + } + + case GST_MESSAGE_EOS:{ + if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS) + NVGST_INFO_MESSAGE ("eos, END OF STREAM"); + CALL_GUI_FUNC (set_playback_status, STATUS_STOPPED); + if (!app->in_error) { + app->in_error = in_error; + goto_next_track (app); + } + } + break; + + case GST_MESSAGE_QOS:{ + if (app->stats) { + GstElement *src = (GstElement *) GST_MESSAGE_SRC (msg); + GstElementFactory *factory = gst_element_get_factory (src); + const gchar *klass = gst_element_factory_get_klass (factory); + if (strstr (klass, "Decode") && strstr (klass, "Video")) { + guint64 frames_dropped; + gst_message_parse_qos_stats (msg, NULL, NULL, &frames_dropped); + if (frames_dropped > app->pfData.frames_dropped_decoder) { + g_atomic_int_inc (&app->pfData.frames_dropped); + app->pfData.frames_dropped_decoder++; + } + } + } + NVGST_DEBUG_MESSAGE ("QoS, frame dropped"); + } + break; + + case GST_MESSAGE_WARNING:{ + GError *gerror; + gchar *debug; + gchar *name = gst_object_get_path_string (GST_MESSAGE_SRC (msg)); + + gst_message_parse_warning (msg, &gerror, &debug); + NVGST_WARNING_MESSAGE_V ("WARNING on bus from %s: %s", name, + gerror->message); + if (debug) { + NVGST_WARNING_MESSAGE_V ("debug info:\n%s\n", debug); + } + g_error_free (gerror); + g_free (debug); + g_free (name); + } + break; + + case GST_MESSAGE_ELEMENT:{ + const GstStructure *str = gst_message_get_structure (msg); + + if (gst_structure_has_name (str, "decoder-status")) { + guint DecodedMBs, ConcealedMBs, FrameDecodeTime; + + const gchar *decoder_error_str = + gst_structure_get_string (str, "DecodeErrorString"); + gst_structure_get_uint (str, "DecodedMBs", &DecodedMBs); + gst_structure_get_uint (str, "ConcealedMBs", &ConcealedMBs); + gst_structure_get_uint (str, "FrameDecodeTime", &FrameDecodeTime); + g_print + ("\n-----> DecodeError = %s, DecodedMBs = %u, ConcealedMBs = %u, FrameDecodeTime = %u <-----\n", + decoder_error_str, DecodedMBs, ConcealedMBs, FrameDecodeTime); + } + } + break; + + case GST_MESSAGE_TAG:{ + GstTagList *tags; + gst_message_parse_tag (msg, &tags); + CALL_GUI_FUNC (handle_stream_tags, tags); + if (in->attrs.show_tags) { + gchar *name = gst_object_get_path_string (GST_MESSAGE_SRC (msg)); + + g_print ("\n===================== TAGS ======================== \n"); + g_print ("TAG INFO from \"%s\"\n", name); + + gst_tag_list_foreach (tags, _tag_info, NULL); + g_free (name); + g_print ("===================================================\n"); + } + gst_tag_list_free (tags); + } + break; + + case GST_MESSAGE_INFO:{ + GError *gerror; + gchar *debug; + gchar *name = gst_object_get_path_string (GST_MESSAGE_SRC (msg)); + + gst_message_parse_info (msg, &gerror, &debug); + if (debug) { + NVGST_INFO_MESSAGE_V ("INFO on bus by %s:\n%s\n", name, debug); + } + + g_error_free (gerror); + g_free (debug); + g_free (name); + } + break; + + case GST_MESSAGE_BUFFERING:{ + gint percent; + gboolean busy = FALSE; + + gst_message_parse_buffering (msg, &percent); + g_print ("buffering.. %d\r", percent); + + if (app->cur_operation || app->got_eos) { + busy = TRUE; + } + + /* live pipeline */ + if (app->is_live) + break; + + if (percent == 100) { + app->buffering = FALSE; + + if (!busy && app->target_state == GST_STATE_PLAYING) { + NVGST_INFO_MESSAGE + ("buffering complete, setting the pipeline to PLAYING.."); + + if (gst_element_set_state (app->pipeline, GST_STATE_PLAYING) == + GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("pipeline state change failure to %s", + gst_element_state_get_name (GST_STATE_PLAYING)); + } + } + + app->target_state = GST_STATE_VOID_PENDING; + + } else { + if (!busy && app->buffering == FALSE) { + GstState state, pending; + + if (gst_element_get_state (app->pipeline, &state, &pending, + GST_CLOCK_TIME_NONE) == GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE ("failed to query the pipeline for state"); + /* undefined behaviour follows */ + + } else { + g_assert (state > GST_STATE_READY); + g_assert (pending == GST_STATE_VOID_PENDING); + app->target_state = state; + + if (state == GST_STATE_PLAYING) { + NVGST_INFO_MESSAGE + ("buffering start, setting the pipeline to PAUSED.."); + + if (gst_element_set_state (app->pipeline, GST_STATE_PAUSED) == + GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("pipeline state change failure to %s", + gst_element_state_get_name (GST_STATE_PAUSED)); + } + } + } + } + + app->buffering = TRUE; + } + } + break; + + case GST_MESSAGE_LATENCY:{ + NVGST_INFO_MESSAGE ("redistribute the latency..."); + gst_bin_recalculate_latency (GST_BIN (app->pipeline)); + } + break; + + case GST_MESSAGE_STATE_CHANGED:{ + GstState old, new, pending; + inAttrs *in = app->input; + + gst_message_parse_state_changed (msg, &old, &new, &pending); + + NVGST_DEBUG_MESSAGE_V + ("element %s changed state from %s to %s, pending %s", + GST_OBJECT_NAME (msg->src), gst_element_state_get_name (old), + gst_element_state_get_name (new), + gst_element_state_get_name (pending)); + + if (GST_MESSAGE_SRC (msg) == GST_OBJECT (app->pipeline) && + pending == GST_STATE_VOID_PENDING) { + if (app->cur_operation) { + NvGstOperation cur = app->cur_operation; + gboolean done = FALSE; + + if (app->got_eos) { + /* leave everything and just wait for the eos to appear on the bus */ + NVGST_WARNING_MESSAGE + ("***************** VERY RARE SITUATION, got eos while state change"); + cur = app->cur_operation = NVGST_OPS_NONE; + in->operation_mode = NVGST_CMD_NONE; + } + + if (cur == NVGST_OPS_PLAY && new == GST_STATE_PLAYING) { + done = TRUE; + + } else if (cur == NVGST_OPS_PAUSE && new == GST_STATE_PAUSED + && old == GST_STATE_PLAYING) { + if (app->stats) { + g_timer_stop (app->pfData.timer); + display_current_fps (&app->pfData); + if (g_main_context_find_source_by_id (NULL, app->pfData.dps_cb)) + g_source_remove (app->pfData.dps_cb); + app->pfData.dps_cb = 0; + + app->pfData.last_ts = GST_CLOCK_TIME_NONE; + app->pfData.start_ts = GST_CLOCK_TIME_NONE; + app->pfData.prev_ts = -1; + app->pfData.initial_fps = TRUE; + app->pfData.last_frames_rendered = app->pfData.frames_rendered; + app->pfData.last_frames_dropped = app->pfData.frames_dropped; + if (app->pfData.file) + g_fprintf (app->pfData.file, + "paused at rtime %" GST_TIME_FORMAT "\n", + GST_TIME_ARGS (gst_util_get_timestamp ())); + } + + if (app->unpause) { + g_usleep (2500000); + + NVGST_INFO_MESSAGE ("unpausing"); + + if (gst_element_set_state (app->pipeline, GST_STATE_PLAYING) == + GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("pipeline state change failure to %s", + gst_element_state_get_name (GST_STATE_PLAYING)); + } + } else + done = TRUE; + + } else if (cur == NVGST_OPS_PAUSE && new == GST_STATE_PAUSED + && old == GST_STATE_READY) { + if (app->no_more_pads || app->is_live) { + GstFormat format = GST_FORMAT_TIME; + in->duration = GST_CLOCK_TIME_NONE; + app->running = TRUE; + + if (app->stats) { + GstClockTimeDiff duration; + GstFormat format = GST_FORMAT_TIME; + + fps_init (&app->pfData); + + if (gst_element_query_duration (app->pipeline, format, + &duration)) + if (format == GST_FORMAT_TIME + && GST_CLOCK_TIME_IS_VALID (duration) && app->pfData.file) + g_fprintf (app->pfData.file, + "Duration: %" GST_TIME_FORMAT "\n", + GST_TIME_ARGS (duration)); + + if (app->vrender_pad && app->pfData.file) { + GstCaps *caps = gst_pad_get_current_caps (app->vrender_pad); + if (caps) { + gchar *str = gst_caps_to_string (caps); + g_fprintf (app->pfData.file, "Video Render Format: %s\n", + str); + g_free (str); + gst_caps_unref (caps); + } + } + + if (app->arender_pad && app->pfData.file) { + GstCaps *caps = gst_pad_get_current_caps (app->arender_pad); + if (caps) { + gchar *str = gst_caps_to_string (caps); + g_fprintf (app->pfData.file, "Audio Render Format: %s\n", + str); + g_free (str); + gst_caps_unref (caps); + } + } + } + + if (!app->image_eos) { + if (!(gst_element_query_duration (app->pipeline, format, + &in->duration) && format == GST_FORMAT_TIME + && GST_CLOCK_TIME_IS_VALID (in->duration))) { + NVGST_WARNING_MESSAGE ("failed to query duration in time"); + in->duration = GST_CLOCK_TIME_NONE; + } else if (in->attrs.startPer) { + in->attrs.segment_start = gst_util_uint64_scale (in->duration, + in->attrs.segment_start, 100); + } + } else { + app->image_eos = + g_timeout_add_seconds (in->attrs.image_display_time, + image_stop, NULL); + in->pending_play = FALSE; + in->attrs.segment_start = 0; + in->attrs.segment_duration = GST_CLOCK_TIME_NONE; + } + + if (GST_CLOCK_TIME_IS_VALID (in->attrs.segment_duration) + || in->attrs.segment_start > 0) { + GstSeekFlags flags = + GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT; + GstClockTime end; + gboolean res; + + if (GST_CLOCK_TIME_IS_VALID (in->attrs.segment_duration)) { + flags |= GST_SEEK_FLAG_SEGMENT; + end = in->attrs.segment_start + in->attrs.segment_duration; + + } else { + end = GST_CLOCK_TIME_NONE; + } + + app->cur_operation = NVGST_OPS_SEEK; + + res = + gst_element_seek (app->seekElement, 1.0, GST_FORMAT_TIME, + flags, GST_SEEK_TYPE_SET, in->attrs.segment_start, + GST_SEEK_TYPE_SET, end); + + if (!res) { + NVGST_CRITICAL_MESSAGE ("seek failed"); + done = TRUE; + } + + } else { + in->attrs.segment_start = 0; + in->attrs.segment_duration = GST_CLOCK_TIME_NONE; + done = TRUE; + } + } + } else if (cur == NVGST_OPS_SEEK) { + if (new > GST_STATE_READY && old == GST_STATE_PAUSED) + done = TRUE; + in->interval = 0; + } + + if (done) { + app->cur_operation = NVGST_OPS_NONE; + + if (in->pending_play) { + GstStateChangeReturn rt; + + in->pending_play = FALSE; + app->cur_operation = NVGST_OPS_PLAY; + + if (app->stats) { + pfData_s *self = &app->pfData; + if (app->pfData.file) + g_fprintf (self->file, + "playing from rtime %" GST_TIME_FORMAT "\n", + GST_TIME_ARGS (gst_util_get_timestamp ())); + + g_assert (self->dps_cb == 0 + && !GST_CLOCK_TIME_IS_VALID (self->start_ts)); + self->last_ts = self->start_ts = gst_util_get_timestamp (); + self->dps_cb = g_timeout_add (INITIAL_FPS_UPDATE_INTERVAL_MS, + display_current_fps, self); + + if (self->timer) + g_timer_continue (self->timer); + else + self->timer = g_timer_new (); + } + + rt = gst_element_set_state (app->pipeline, GST_STATE_PLAYING); + + /* Dump Playing Pipeline into the dot file + * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp" + * Run nvgstplayer-1.0 and 0.00.00.*-nvgstplayer-1.0-playing.dot + * file will be generated. + * Run "dot -Tpng 0.00.00.*-nvgstplayer-1.0-playing.dot > image.png" + * image.png will display the running pipeline. + * */ + GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (app->pipeline), + GST_DEBUG_GRAPH_SHOW_ALL, "nvgstplayer-1.0-playing"); + + if (rt == GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("pipeline state change failure to %s", + gst_element_state_get_name (GST_STATE_PLAYING)); + app->cur_operation = NVGST_OPS_NONE; + } + } + + if (app->cur_operation == NVGST_OPS_NONE) { + if (in->operation_mode == NVGST_CMD_SCRIPT) { + app->cmd_id = g_timeout_add (in->interval, on2_input, NULL); + } + } + } + } + if (new == GST_STATE_PLAYING) + CALL_GUI_FUNC (set_playback_status, STATUS_PLAYING); + else if (new == GST_STATE_PAUSED) + CALL_GUI_FUNC (set_playback_status, STATUS_PAUSED); + else + CALL_GUI_FUNC (set_playback_status, STATUS_STOPPED); + } + } + break; + + case GST_MESSAGE_APPLICATION:{ + const GstStructure *s; + s = gst_message_get_structure (msg); + + if (gst_structure_has_name (s, "NvGstAppInterrupt")) { + g_print ("Handling the interrupt ...\n"); + + if (!app->bg_mode) { + if (!trd_exit) { + trd_exit = TRUE; + g_thread_join (trd); + } + } + g_main_loop_quit (loop); + + } else if (gst_structure_has_name (s, "NvGstAppVideoBinFailure")) { + g_print ("Handling Video Bin failure...\n"); + + if (!app->bg_mode) { + if (!trd_exit) { + trd_exit = TRUE; + g_thread_join (trd); + } + } + g_main_loop_quit (loop); + } + } + break; + + default: + break; + } + + return TRUE; +} + +static gchar ** +get_keys (GstCaps * caps, gchar * str, gchar * xstr) +{ + gchar **keys = NULL; + if (app->pathCfg) { + inAttrs *in = app->input; + + if (g_key_file_has_key (app->pathCfg, in->uri, str, NULL)) { + g_key_file_set_list_separator (app->pathCfg, '!'); + keys = + g_key_file_get_string_list (app->pathCfg, in->uri, str, NULL, NULL); + g_key_file_set_list_separator (app->pathCfg, ';'); + } + } + + if (!keys && xstr) { + keys = g_strsplit_set (xstr, "!", -1); + } + + if (!keys && app->elemCfg) { + if (g_key_file_has_group (app->elemCfg, str)) { + g_key_file_set_list_separator (app->elemCfg, '!'); + keys = g_key_file_get_string_list (app->elemCfg, str, "pipe", NULL, NULL); + g_key_file_set_list_separator (app->elemCfg, ';'); + } + + /* Only for Decoders */ + if (!keys && caps) { + gchar **pgp = app->elem_gps; + gchar *val; + + while (*pgp) { + if ((val = g_key_file_get_value (app->elemCfg, *pgp, "type", NULL)) + && (!g_strcmp0 (val, "svd") || !g_strcmp0 (val, "sad"))) { + GstCaps *pcaps; + + /*TODO* convert | to ! */ + pcaps = gst_caps_from_string (*pgp); + + g_free (val); + val = NULL; + + if (pcaps) { + if (gst_caps_can_intersect (pcaps, caps)) { + g_key_file_set_list_separator (app->elemCfg, '!'); + keys = + g_key_file_get_string_list (app->elemCfg, *pgp, "pipe", NULL, + NULL); + g_key_file_set_list_separator (app->elemCfg, ';'); + gst_caps_unref (pcaps); + break; + } + gst_caps_unref (pcaps); + } + } + pgp++; + } + } + } + + if (!keys) { + gchar *elems = g_hash_table_lookup (app->htable, str); + if (elems) { + keys = g_strsplit_set (elems, "!", -1); + } + } + + return keys; +} + + +static GstElement * +create_element (GstCaps * caps, gchar * str, gchar * xstr, gchar ** skeys) +{ + GstElement *element = NULL, *previous = NULL; + GstElement *bin = NULL; + gchar **vkey = NULL, **keys = NULL; + gint count = 0; + + if (!skeys) { + keys = get_keys (caps, str, xstr); + } else { + keys = skeys; + } + + if (keys) { + vkey = keys; + while (*vkey) { + gchar **tokens = g_strsplit_set (*vkey, "#", -1); + gchar **vtoken = tokens; + + g_strstrip (*vtoken); + element = gst_element_factory_make (tokens[0], NULL); + if (!element) { + g_strfreev (tokens); + goto fail; + } + + if (count > 0) { + if (count == 1) { + bin = (GstElement *) gst_bin_new (str); + + if (!gst_bin_add ((GstBin *) bin, previous)) { + gst_object_unref (previous); + gst_object_unref (element); + g_strfreev (tokens); + goto fail; + } +#if 0 + if (!GST_OBJECT_FLAG_IS_SET (previous, GST_ELEMENT_IS_SOURCE)) +#endif + { + GstPad *pad = gst_element_get_static_pad (previous, "sink"); + if (!pad) + pad = gst_element_get_static_pad (previous, "video_sink"); + if (pad) { + gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (pad); + } else { + NVGST_CRITICAL_MESSAGE_V ("failed to get sink pad from %s", + GST_ELEMENT_NAME (previous)); + gst_object_unref (element); + goto fail; + } + } + } + + if (!gst_bin_add ((GstBin *) bin, element)) { + g_strfreev (tokens); + gst_object_unref (element); + goto fail; + } + + if (!gst_element_link (previous, element)) { + NVGST_CRITICAL_MESSAGE_V ("failed to link %s to %s", + GST_ELEMENT_NAME (previous), GST_ELEMENT_NAME (element)); + goto fail; + } + } else { + bin = element; + } + + previous = element; + element = NULL; + vtoken++; + + while (*vtoken) { + GParamSpec *param_spec; + GValue prop_value = { 0 }; + gchar *prop[2]; + gchar *temp; + + prop[0] = g_strdup (*vtoken); + prop[1] = g_strdup (strstr (*vtoken, "=") + 1); + if ((temp = strstr (prop[0], "="))) + *temp = 0; + + g_strstrip (prop[0]); + g_strstrip (prop[1]); + + param_spec = + g_object_class_find_property (G_OBJECT_GET_CLASS (previous), + prop[0]); + + if (!param_spec) { + NVGST_WARNING_MESSAGE_V + ("property %s does not exist in element %s, ignoring", prop[0], + tokens[0]); + g_strfreev (prop); + vtoken++; + continue; + } + + g_value_init (&prop_value, param_spec->value_type); + + if (prop[1] && gst_value_deserialize (&prop_value, prop[1])) { + NVGST_INFO_MESSAGE_V + ("setting property \"%s\" having val=\"%s\" on %s\n", prop[0], + prop[1], tokens[0]); + g_object_set_property (G_OBJECT (previous), prop[0], &prop_value); + } else { + NVGST_WARNING_MESSAGE_V ("could not read value for property %s\n", + prop[0]); + } + + g_free (prop[0]); + g_free (prop[1]); + vtoken++; + } + + g_strfreev (tokens); + vkey++; + count++; + } + } + + if (previous == NULL) + goto fail; + + if (app->stats) { + if (GST_OBJECT_FLAG_IS_SET (previous, GST_ELEMENT_FLAG_SINK) + && !g_strcmp0 (str, NVGST_VIDEO_SINK)) { + pfData_s *self = &app->pfData; + + fps_init (self); + + /* FIXME: assuming all videosinks are derived from basesink, which normally are */ + self->max_latency = GST_CLOCK_TIME_NONE; + g_object_set (G_OBJECT (previous), "qos", TRUE, NULL); + g_object_get (G_OBJECT (previous), "max-lateness", &self->max_latency, + NULL); + + app->vrender_pad = gst_element_get_static_pad (previous, "sink"); + + gst_pad_add_probe (app->vrender_pad, GST_PAD_PROBE_TYPE_EVENT_BOTH, + (GstPadProbeCallback) (on_video_sink_flow), (gpointer) self, NULL); + + } else if (GST_OBJECT_FLAG_IS_SET (previous, GST_ELEMENT_FLAG_SINK) + && !g_strcmp0 (str, NVGST_AUDIO_SINK)) { + + app->arender_pad = gst_element_get_static_pad (previous, "sink"); + } + } + + if (count > 1 && !GST_OBJECT_FLAG_IS_SET (previous, GST_ELEMENT_FLAG_SINK)) { + GstPad *pad = gst_element_get_static_pad (previous, "src"); + if (pad) { + gst_element_add_pad (bin, gst_ghost_pad_new ("src", pad)); + gst_object_unref (pad); + } else { + NVGST_CRITICAL_MESSAGE_V ("failed to get src pad from %s", + GST_ELEMENT_NAME (previous)); + goto fail; + } + } + +done: + if (!skeys && keys) + g_strfreev (keys); + + return bin; + +fail: + if (bin) { + gst_object_unref (bin); + bin = NULL; + } + goto done; +} + + +static gboolean +gst_caps_is_raw (GstElement * dbin, GstCaps * icaps) +{ + GstCaps *caps = NULL; + gboolean res = FALSE; + + g_object_get (dbin, "caps", &caps, NULL); + + if (caps) { + res = gst_caps_can_intersect (caps, icaps); + gst_caps_unref (caps); + } + + return res; +} + +static void +set_sync (GstElement * vsink, gboolean sync) +{ + if (GST_IS_BIN (vsink)) { + GValue vvsink = G_VALUE_INIT; + GstIterator *it = gst_bin_iterate_sinks (GST_BIN (vsink)); + GObject *obj = NULL; + + if (it && gst_iterator_next (it, &vvsink) == GST_ITERATOR_OK) { + obj = g_value_get_object (&vvsink); + g_object_set (obj, "sync", sync, NULL); + } + + if (obj) + gst_object_unref (obj); + + if (it) + gst_iterator_free (it); + + } else { + g_object_set (G_OBJECT (vsink), "sync", sync, NULL); + } +} + +void +set_window_handle (Window window) +{ + GstElement *vsink = app->vsink; + + if (GST_IS_BIN (vsink)) { + GValue vvsink = G_VALUE_INIT; + GstIterator *it = gst_bin_iterate_sinks (GST_BIN (app->vsink)); + GObject *obj = NULL; + + if (it && gst_iterator_next (it, &vvsink) == GST_ITERATOR_OK) { + obj = g_value_get_object (&vvsink); + vsink = GST_ELEMENT (obj); + } + + if (obj) + gst_object_unref (obj); + + if (it) + gst_iterator_free (it); + } + + if (GST_IS_VIDEO_OVERLAY (vsink)) { + gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), + (gulong) window); + gst_video_overlay_expose (GST_VIDEO_OVERLAY (vsink)); + } +} + +static GstElement * +create_video_pipeline (GstCaps * caps, GstPad * dbin_pad) +{ + GstElement *bin = NULL; + GstElement *vdec = NULL, *gate = NULL; + GstElement *vsink = NULL, *vconv = NULL; + GstElement *queue = NULL; + GstPad *pad; + NvGstPlayFlags use_conv = app->input->attrs.flags; + inAttrs *in = app->input; + + if (app->vpipe) { + bin = app->vpipe; + + } else { + bin = gst_bin_new ("video_bin"); + + if (!gst_caps_is_raw (app->vdbin, caps) && !in->dbin_video_decoders) { + vdec = create_element (caps, NVGST_VIDEO_DEC, app->svd, in->video_dec); + if (!vdec) { + NVGST_CRITICAL_MESSAGE ("failed to create video decoder pipe"); + goto fail; + } + + if (!gst_bin_add (GST_BIN (bin), vdec)) { + NVGST_CRITICAL_MESSAGE ("failed to add decoder pipe to video_bin"); + gst_object_unref (vdec); + goto fail; + } + } + + queue = gst_element_factory_make ("queue", NULL); + if (!queue) { + NVGST_CRITICAL_MESSAGE ("failed to create element: queue"); + goto fail; + } + + if (!gst_bin_add (GST_BIN (bin), queue)) { + NVGST_CRITICAL_MESSAGE ("failed to add queue to video_bin"); + gst_object_unref (queue); + goto fail; + } + vsink = create_element (NULL, NVGST_VIDEO_SINK, app->svs, NULL); + if (!vsink) { + NVGST_CRITICAL_MESSAGE_V ("failed to create element: %s", + NVGST_VIDEO_SINK); + goto fail; + } + + app->vsink = vsink; + + if (!gst_bin_add (GST_BIN (bin), vsink)) { + NVGST_CRITICAL_MESSAGE_V ("failed to add %s to video_bin", + GST_ELEMENT_NAME (vsink)); + gst_object_unref (vsink); + goto fail; + } + + set_sync (vsink, in->attrs.sync); + GstStructure *str = gst_caps_get_structure (caps, 0); + const gchar *name = gst_structure_get_name (str); + if (!(use_conv & NVGST_PLAY_FLAG_NATIVE_VIDEO) && g_strcmp0(name, "image/jpeg")) { + vconv = create_element (NULL, NVGST_VIDEO_CONV, app->svc, NULL); + if (!vconv) { + NVGST_CRITICAL_MESSAGE_V ("failed to create element: %s", + NVGST_VIDEO_CONV); + goto fail; + } + + if (!gst_bin_add (GST_BIN (bin), vconv)) { + NVGST_CRITICAL_MESSAGE_V ("failed to add %s to video_bin", + GST_ELEMENT_NAME (vconv)); + gst_object_unref (vconv); + goto fail; + } + + if (vdec) { + gst_element_link_many (vdec, vconv, queue, vsink, NULL); + gate = vdec; + } else { + gst_element_link_many (vconv, queue, vsink, NULL); + gate = vconv; + } + } else { + if (vdec) { + if (!gst_element_link_many (vdec, queue, vsink, NULL)) { + NVGST_CRITICAL_MESSAGE_V + ("failed to link %s to %s, try --disable-vnative", + GST_ELEMENT_NAME (vdec), GST_ELEMENT_NAME (vsink)); + goto fail; + } + gate = vdec; + } else { + gst_element_link (queue, vsink); + gate = queue; + } + } + + pad = gst_element_get_static_pad (gate, "sink"); + if (!pad && (gate == vconv)) + pad = gst_element_get_static_pad (gate, "video_sink"); + if (pad) { + gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (pad); + } else { + NVGST_CRITICAL_MESSAGE_V ("failed to get sink pad from %s", + GST_ELEMENT_NAME (gate)); + } + + if (!app->seekElement) + app->seekElement = gst_object_ref (vsink); + } + +done: + return GST_ELEMENT (bin); + +fail: + if (bin) { + gst_object_unref (bin); + bin = NULL; + } + goto done; +} + + +static GstElement * +create_audio_pipeline (GstCaps * caps) +{ + GstElement *bin = NULL; + GstElement *adec = NULL, *gate = NULL; + GstElement *asink = NULL, *aconv = NULL; + GstElement *cfilter = NULL; + GstPad *pad, *asink_pad; + GstCaps *asink_caps, *filtercaps; + NvGstPlayFlags use_conv = app->input->attrs.flags; + inAttrs *in = app->input; + + if (app->apipe) { + bin = app->apipe; + + } else { + bin = gst_bin_new ("audio_bin"); + + if (!gst_caps_is_raw (app->adbin, caps) && !in->dbin_audio_decoders) { + adec = create_element (caps, NVGST_AUDIO_DEC, app->sad, in->audio_dec); + if (!adec) { + NVGST_CRITICAL_MESSAGE ("failed to create audio decoder pipe"); + goto fail; + } + + if (!gst_bin_add (GST_BIN (bin), adec)) { + NVGST_CRITICAL_MESSAGE ("failed to add decoder pipe to audio_bin"); + gst_object_unref (adec); + goto fail; + } + } + + asink = create_element (NULL, NVGST_AUDIO_SINK, app->sas, NULL); + if (!asink) { + NVGST_CRITICAL_MESSAGE_V ("failed to create element: %s", + NVGST_AUDIO_SINK); + goto fail; + } + app->asink = asink; + + if (!gst_bin_add (GST_BIN (bin), asink)) { + NVGST_CRITICAL_MESSAGE_V ("failed to add %s to audio_bin", + GST_ELEMENT_NAME (asink)); + gst_object_unref (asink); + goto fail; + } + + set_sync (asink, in->attrs.sync); + + asink_pad = gst_element_get_static_pad (app->asink, "sink"); + if (asink_pad) { + gboolean intersect = TRUE; + asink_caps = gst_pad_get_pad_template_caps (asink_pad); + intersect = gst_caps_can_intersect (asink_caps, caps); +#ifdef NVGST_TARGET_TEGRA + GstStructure *str = gst_caps_get_structure (caps, 0); + const gchar *format = gst_structure_get_string (str, "format"); + guint64 channel_mask = 0x0; + if (gst_structure_has_field (str, "channel-mask")) + channel_mask = + gst_value_get_bitmask (gst_structure_get_value (str, + "channel-mask")); + if (format) { + if (!intersect || (strstr (format, "F")) || (strstr (format, "U")) + || (channel_mask != 0x3)) { + use_conv = app->input->attrs.flags &= ~NVGST_PLAY_FLAG_NATIVE_AUDIO; + } + } +#else + if (!intersect) { + use_conv = app->input->attrs.flags &= ~NVGST_PLAY_FLAG_NATIVE_AUDIO; + } +#endif + gst_caps_unref (asink_caps); + } + gst_object_unref (asink_pad); + + if (!(use_conv & NVGST_PLAY_FLAG_NATIVE_AUDIO)) { + aconv = create_element (NULL, NVGST_AUDIO_CONV, app->sac, NULL); + if (!aconv) { + g_print ("failed to create element: %s", NVGST_AUDIO_CONV); + goto fail; + } + + if (!gst_bin_add (GST_BIN (bin), aconv)) { + g_print ("failed to add %s to audio_bin", GST_ELEMENT_NAME (aconv)); + goto fail; + } + + cfilter = gst_element_factory_make ("capsfilter", NULL); + if (!cfilter) { + g_print ("failed to create element: capsfilter"); + goto fail; + } + + filtercaps = + gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, "S16LE", + NULL); + + g_object_set (G_OBJECT (cfilter), "caps", filtercaps, NULL); + gst_caps_unref (filtercaps); + + if (!gst_bin_add (GST_BIN (bin), cfilter)) { + g_print ("failed to add cfilter to audio_bin"); + goto fail; + } + + if (adec) { + gst_element_link_many (adec, aconv, cfilter, asink, NULL); + gate = adec; + } else { + gst_element_link_many (aconv, cfilter, asink, NULL); + gate = aconv; + } + } else { + if (adec) { + if (!gst_element_link (adec, asink)) { + NVGST_CRITICAL_MESSAGE_V + ("failed to link %s to %s, try --disable-anative", + GST_ELEMENT_NAME (adec), GST_ELEMENT_NAME (asink)); + goto fail; + } + gate = adec; + } else { + gate = asink; + } + } + + pad = gst_element_get_static_pad (gate, "sink"); + if (pad) { + gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (pad); + } else { + NVGST_CRITICAL_MESSAGE_V ("failed to get sink pad from %s", + GST_ELEMENT_NAME (gate)); + } + + if (!app->seekElement) + app->seekElement = gst_object_ref (asink); + } + +done: + return GST_ELEMENT (bin); + +fail: + if (bin) { + gst_object_unref (bin); + bin = NULL; + } + app->return_value = -1; + goto done; +} + + +static void +cb_newpad (GstElement * decodebin, GstPad * pad, gpointer data) +{ + inAttrs *in = app->input; + GstCaps *caps = gst_pad_query_caps (pad, NULL); + const GstStructure *str = gst_caps_get_structure (caps, 0); + const gchar *name = gst_structure_get_name (str); + GstElement **dec = NULL; + GstElement *sink = NULL; + GstPad *sinkpad; + gboolean create = FALSE; + gint *stryp = NULL; + { + gchar *str = gst_caps_to_string (caps); + NVGST_INFO_MESSAGE_V ("creating the pipe for \"%s\"", str); + g_free (str); + } + + if (!strncmp (name, "video", 5)) { + if ((in->attrs.flags & NVGST_PLAY_FLAG_VIDEO) && multitrack_instance) { + + stryp = &app->vstream_select; + if (app->vstream_select == -1) + app->vstream_select = app->vstreams - 1; + app->vpipe = create_video_pipeline (caps, pad); + dec = &app->vpipe; + create = TRUE; + if (app->vstreams > 1) + multitrack_instance = 0; + } else if (!GUI) { + sink = gst_element_factory_make ("fakesink", NULL); + g_object_set (G_OBJECT (sink), "sync", in->attrs.sync, NULL); + dec = &sink; + } + + } else if (!strncmp (name, "image", 5)) { + if ((in->attrs.flags & NVGST_PLAY_FLAG_VIDEO) && multitrack_instance) { + stryp = &app->vstream_select; + + if (app->vstream_select == -1) + app->vstream_select = app->vstreams - 1; + app->vpipe = create_video_pipeline (caps, pad); + dec = &app->vpipe; + create = TRUE; + if (app->vstreams > 1) + multitrack_instance = 0; + } else if (!GUI) { + sink = gst_element_factory_make ("fakesink", NULL); + g_object_set (G_OBJECT (sink), "sync", in->attrs.sync, NULL); + dec = &sink; + } + + } else if (!strncmp (name, "audio", 5)) { + if (in->attrs.flags & NVGST_PLAY_FLAG_AUDIO) { + stryp = &app->astream_select; + if (app->astream_select == -1) { + app->apipe = create_audio_pipeline (caps); + app->astream_select = app->astreams + 1; + dec = &app->apipe; + create = TRUE; + } else if (app->astreams == (app->astream_select - 1)) { + app->apipe = create_audio_pipeline (caps); + dec = &app->apipe; + create = TRUE; + } + + app->astreams++; + } else if (!GUI) { + sink = gst_element_factory_make ("fakesink", NULL); + g_object_set (G_OBJECT (sink), "sync", in->attrs.sync, NULL); + dec = &sink; + } + } + + if (dec && *dec && stryp) { + if (g_object_is_floating (*dec)) { + + if (!strncmp (name, "image", 5) || !strncmp (name, "video", 5)) { + if (gst_element_set_state (*dec, GST_STATE_READY) == + GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("element: %s state change failure to %s", + GST_ELEMENT_NAME (*dec), + gst_element_state_get_name (GST_STATE_PAUSED)); + + } else if ((app->disp.mDisplay && app->vsink + && !app->attrs.disable_fullscreen) || GUI) { + Window window; + if (GUI) { + window = (Window) CALL_GUI_FUNC (get_video_window); + } else { + nvgst_create_window (&app->disp, APPLICATION_NAME); + app->x_event_thread = g_thread_new ("nvgst-window-event-thread", + nvgst_x_event_thread, app); + window = app->disp.window; + } + set_window_handle (window); + + } + } + if (gst_element_set_state (*dec, GST_STATE_PAUSED) == + GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("element: %s state change failure to %s", + GST_ELEMENT_NAME (*dec), + gst_element_state_get_name (GST_STATE_PAUSED)); + gst_object_unref (*dec); + *dec = NULL; + if (app->attrs.aud_track == -1) + *stryp = -1; + else + *stryp = -2; + + gst_element_post_message (GST_ELEMENT (app->pipeline), + gst_message_new_application (GST_OBJECT (app->pipeline), + gst_structure_new ("NvGstAppVideoBinFailure", + "message", G_TYPE_STRING, + "Video Pipeline state change to Paused failed", NULL))); + + goto done; + } + + if (!gst_bin_add (GST_BIN (app->pipeline), *dec)) { + NVGST_CRITICAL_MESSAGE_V ("could not add element: %s to pipeline", + GST_ELEMENT_NAME (*dec)); + gst_object_unref (*dec); + *dec = NULL; + if (app->attrs.aud_track == -1) + *stryp = -1; + else + *stryp = -2; + + goto done; + } + } + + sinkpad = gst_element_get_static_pad (*dec, "sink"); + if (sinkpad) { + if (GST_PAD_LINK_FAILED (gst_pad_link (pad, sinkpad))) { + NVGST_CRITICAL_MESSAGE_V + ("could not link \"%s\" to the (decode+)render pipeline \"%s\"", + name, GST_ELEMENT_NAME (*dec)); + gst_object_unref (sinkpad); + gst_element_set_state (GST_ELEMENT (*dec), GST_STATE_NULL); + gst_bin_remove (GST_BIN (app->pipeline), *dec); + *dec = NULL; + if (app->attrs.aud_track == -1) + *stryp = -1; + else + *stryp = -2; + + } else + gst_object_unref (sinkpad); + } else { + NVGST_CRITICAL_MESSAGE_V ("failed to get sink pad from %s", + GST_ELEMENT_NAME (*dec)); + gst_element_set_state (GST_ELEMENT (*dec), GST_STATE_NULL); + gst_bin_remove (GST_BIN (app->pipeline), *dec); + *dec = NULL; + if (app->attrs.aud_track == -1) + *stryp = -1; + else + *stryp = -2; + + } + + } else { + NVGST_WARNING_MESSAGE_V ("decoder pipeline for \"%s\" not created <%d>", + name, create); + + if (create) { + NVGST_CRITICAL_MESSAGE ("failed to create/activate the decode pipeline"); + } + } + if (((app->astreams > 1) || (app->vstreams > 1))) { + g_print ("\n\nStream have %d audio tracks and %d video tracks. \n" + "By default it is picking first track \n" + "If want to switch track then give play a stream with an option \n" + "--audio-track or --video-track track no.\n\n", app->astreams, + app->vstreams); + } + +done: + gst_caps_unref (caps); +} + + +static void +bin_element_added (GstElement * dbin, GstElement * element, gpointer * app) +{ + GstElementFactory *factory = gst_element_get_factory (element); + const gchar *klass = gst_element_factory_get_klass (factory); + if (strstr (klass, "Decode") && strstr (klass, "Video")) { + g_object_set (G_OBJECT (element), "full-frame", FALSE, NULL); + g_signal_handlers_disconnect_by_func (dbin, bin_element_added, app); + } + return; +} + + +static NvGstAutoplugSelectResult +autoplug_select (GstElement * dbin, GstPad * pad, GstCaps * caps, + GstElementFactory * factory, gpointer data) +{ + inAttrs *in = app->input; + NvGstAutoplugSelectResult ret = NVGST_AUTOPLUG_SELECT_TRY; + const gchar *klass = gst_element_factory_get_klass (factory); + + if (strstr (klass, "Demux")) { + app->found_demuxer = TRUE; + if (!strcmp ((GST_OBJECT_NAME (factory)), "mpegtsdemux")) { + g_signal_connect (dbin, "element-added", G_CALLBACK (bin_element_added), + app); + } + } + + /* we are only interested in decoders */ + if (strstr (klass, "Decode")) { + if ((strstr (klass, "Audio"))) { + + CALL_GUI_FUNC (set_decoder_caps, caps); + + if (in->attrs.flags & NVGST_PLAY_FLAG_AUDIO) { + if (!in->audio_dec && !in->dbin_audio_decoders) { + in->audio_dec = get_keys (caps, NVGST_AUDIO_DEC, app->sad); + if (in->audio_dec) { + in->dbin_audio_decoders = FALSE; + ret = NVGST_AUTOPLUG_SELECT_EXPOSE; + } else { + in->dbin_audio_decoders = TRUE; + } + } + } else { + + ret = NVGST_AUTOPLUG_SELECT_EXPOSE; + } + + if (app->stats && app->pfData.file) { + GstCaps *scaps = gst_caps_copy (caps); + GstStructure *str = gst_caps_get_structure (scaps, 0); + gst_structure_remove_field (str, "codec_data"); + g_fprintf (app->pfData.file, "Audio Codec: %s\n", + gst_caps_to_string (scaps)); + gst_caps_unref (scaps); + } + + } else if ((strstr (klass, "Video")) || (strstr (klass, "Image"))) { + + CALL_GUI_FUNC (set_decoder_caps, caps); + + if ((in->attrs.flags & NVGST_PLAY_FLAG_VIDEO) && app->vstreams < 1) { + if (!in->video_dec && !in->dbin_video_decoders) { + GstStructure *str = gst_caps_get_structure (caps, 0); + const gchar *name = gst_structure_get_name (str); + /* nvv4l2decoder does not support jpeg/png, h263, wmv format */ + if (!app->svd && g_strcmp0(name, "image/jpeg") && + g_strcmp0(name, "image/png") && + g_strcmp0(name, "video/x-wmv") && + g_strcmp0(name, "video/x-h263")) + app->svd = g_strconcat (NVGST_DEFAULT_VIDEO_DEC, NULL); + in->video_dec = get_keys (caps, NVGST_VIDEO_DEC, app->svd); + + if (in->video_dec) { + in->dbin_video_decoders = FALSE; + ret = NVGST_AUTOPLUG_SELECT_EXPOSE; + } else { + in->dbin_video_decoders = TRUE; + if (CALL_GUI_FUNC (skip_decoder, factory)) + ret = NVGST_AUTOPLUG_SELECT_SKIP; + } + } + } else { + ret = NVGST_AUTOPLUG_SELECT_EXPOSE; + } + + /* image decoders under video class won't be considered, if considered, will race to EOS */ + if (strstr (klass, "Image") && !app->found_demuxer) { + NVGST_DEBUG_MESSAGE ("standalone image display"); + app->image_eos = 1; + } + + if (app->stats && app->pfData.file) { + GstCaps *scaps = gst_caps_copy (caps); + GstStructure *str = gst_caps_get_structure (scaps, 0); + gst_structure_remove_field (str, "codec_data"); + g_fprintf (app->pfData.file, "Video Codec: %s\n", + gst_caps_to_string (scaps)); + gst_caps_unref (scaps); + } + app->vstreams++; + } + } + + if (ret == NVGST_AUTOPLUG_SELECT_TRY) { + gchar *str = gst_caps_to_string (caps); + NVGST_DEBUG_MESSAGE_V ("%s: %s:\n\"%s\"", GST_OBJECT_NAME (factory), klass, + str); + g_free (str); + } + + return ret; +} + + +static void +no_more_pads (GstElement * element, gpointer data) +{ + NVGST_DEBUG_MESSAGE_V ("last pad: by %s\n", GST_ELEMENT_NAME (element)); + app->no_more_pads = TRUE; + + if ((app->astream_select < 0 && app->vstream_select < 0) + || ((app->astream_select > app->astreams) + && (app->vstream_select > app->vstreams))) { + GST_ELEMENT_ERROR (app->pipeline, STREAM, DECODE, + ("The media stream is empty, i.e., it has no audio or video to play!"), + (NULL)); + } else { + if (app->astream_select == -2) + NVGST_CRITICAL_MESSAGE ("Failed to create user selected audio track"); + else if (app->astream_select > app->astreams) + NVGST_CRITICAL_MESSAGE + ("Creation of audio pipeline failed : User selected audio track number is exceeding total number of audio tracks in the stream"); + + if (app->vstream_select == -2) + NVGST_CRITICAL_MESSAGE ("Failed to create user selected video track"); + else if (app->vstream_select > app->vstreams) + NVGST_CRITICAL_MESSAGE + ("Creation of video pipeline failed : User selected video track number is exceeding total number of video tracks in the stream"); + + } +} + + +static void +on_pad_added (GstElement * element, GstPad * pad, gpointer data) +{ + inAttrs *in = app->input; + GstPad *sinkpad; + GstElement **dbin = NULL; + GstStateChangeReturn rt = GST_STATE_CHANGE_SUCCESS; + GstCaps *caps = gst_pad_query_caps (pad, NULL); + const GstStructure *str = gst_caps_get_structure (caps, 0); + const gchar *type = gst_structure_get_string (str, "media"); + + if (g_strcmp0 (type, "audio")) { + dbin = &app->vdbin; + } else if (g_strcmp0 (type, "video")) { + dbin = &app->adbin; + } else { + NVGST_WARNING_MESSAGE ("unknown rtp payload type"); + } + + g_mutex_lock (&app->dbin_lock); + if (dbin) { + if (!*dbin) { + *dbin = gst_element_factory_make ("decodebin", NULL); + if (!*dbin) { + NVGST_CRITICAL_MESSAGE ("failed to create decodebin"); + } + + g_signal_connect (*dbin, "autoplug-select", G_CALLBACK (autoplug_select), + app); + if (GUI) + g_signal_connect (*dbin, "autoplug-sort", + G_CALLBACK (CALL_GUI_FUNC (get_autoplug_sort_callback)), + GET_GUI_CTX ()); + g_signal_connect (*dbin, "pad-added", G_CALLBACK (cb_newpad), app); + g_signal_connect (*dbin, "no-more-pads", G_CALLBACK (no_more_pads), app); + g_object_set (G_OBJECT (*dbin), "use-buffering", in->attrs.use_buffering, + "low-percent", in->attrs.low_percent, "high-percent", + in->attrs.high_percent, "max-size-time", in->attrs.max_size_time, + "max-size-bytes", in->attrs.max_size_bytes, "max-size-buffers", + in->attrs.max_size_buffers, NULL); + + rt = gst_element_set_state (*dbin, GST_STATE_PLAYING); + if (rt == GST_STATE_CHANGE_FAILURE) { + NVGST_CRITICAL_MESSAGE_V ("pipeline state change failure to %s", + gst_element_state_get_name (GST_STATE_PLAYING)); + gst_object_unref (*dbin); + *dbin = NULL; + } + + if (!gst_bin_add (GST_BIN (app->pipeline), *dbin)) { + NVGST_CRITICAL_MESSAGE ("could not add decodebin to pipeline"); + gst_object_unref (*dbin); + *dbin = NULL; + } + } + + sinkpad = gst_element_get_static_pad (*dbin, "sink"); + if (sinkpad) { + if (GST_PAD_LINK_FAILED (gst_pad_link (pad, sinkpad))) { + if (!gst_pad_is_linked (sinkpad)) { + NVGST_CRITICAL_MESSAGE_V ("could not link source: %s to decodebin", + GST_ELEMENT_NAME (element)); + gst_element_set_state (GST_ELEMENT (*dbin), GST_STATE_NULL); + gst_bin_remove (GST_BIN (app->pipeline), *dbin); + *dbin = NULL; + } else { + NVGST_WARNING_MESSAGE_V + ("stream has multiple %s tracks; picking first one", type); + } + gst_object_unref (sinkpad); + } else + gst_object_unref (sinkpad); + } else if (*dbin) { + NVGST_CRITICAL_MESSAGE_V ("failed to get sink pad from %s", + GST_ELEMENT_NAME (*dbin)); + gst_element_set_state (GST_ELEMENT (*dbin), GST_STATE_NULL); + gst_bin_remove (GST_BIN (app->pipeline), *dbin); + *dbin = NULL; + } + } + g_mutex_unlock (&app->dbin_lock); + + gst_caps_unref (caps); + return; +} + + +static void +reset_current_track (void) +{ + app->running = FALSE; + + if (app->image_eos) { + if (g_main_context_find_source_by_id (NULL, app->image_eos)) + g_source_remove (app->image_eos); + app->image_eos = 0; + } + + if (app->pipeline) { + GstStateChangeReturn ret; + + ret = gst_element_set_state (app->pipeline, GST_STATE_READY); + g_assert (ret != GST_STATE_CHANGE_ASYNC); + + if (app->stats) { + g_timer_stop (app->pfData.timer); + display_current_fps (&app->pfData); + + if (g_main_context_find_source_by_id (NULL, app->pfData.dps_cb)) + g_source_remove (app->pfData.dps_cb); + app->pfData.dps_cb = 0; + if (app->pfData.file) { + g_fprintf (app->pfData.file, "Total Running Time: %f seconds\n", + g_timer_elapsed (app->pfData.timer, NULL)); + fflush (app->pfData.file); + } + g_timer_destroy (app->pfData.timer); + app->pfData.timer = NULL; + } + + g_usleep (500000); + } + + g_mutex_lock (&app->window_lock); + if (app->disp.window) + nvgst_destroy_window (&app->disp); + g_mutex_unlock (&app->window_lock); + + if (app->x_event_thread) + g_thread_join (app->x_event_thread); + app->x_event_thread = NULL; + + app->in_error = FALSE; + app->got_eos = FALSE; + app->no_more_pads = FALSE; + app->found_demuxer = FALSE; + app->is_live = FALSE; + app->buffering = FALSE; + app->image_eos = 0; + app->pre_dbin_lp = FALSE; + app->last_seek_time = 0; + app->accum_time = 0; + app->vstreams = 0; + app->astreams = 0; + + return; +} + + +static void +destroy_current_track (void) +{ + inAttrs *in = app->input; + if (multitrack_instance == 0) + multitrack_instance = 1; + + app->running = FALSE; + + if (app->bus_id) { + if (g_main_context_find_source_by_id (NULL, app->bus_id)) + g_source_remove (app->bus_id); + app->bus_id = 0; + } + + if (app->cmd_id) { + if (g_main_context_find_source_by_id (NULL, app->cmd_id)) + g_source_remove (app->cmd_id); + app->cmd_id = 0; + } + + reset_current_track (); + in->postpone = FALSE; + + app->cur_operation = NVGST_OPS_NONE; + + if (app->seekElement) { + gst_object_unref (app->seekElement); + app->seekElement = NULL; + } + + if (app->vrender_pad) { + gst_object_unref (app->vrender_pad); + app->vrender_pad = NULL; + } + + if (app->arender_pad) { + gst_object_unref (app->arender_pad); + app->arender_pad = NULL; + } + + if (app->pipeline) { + GstStateChangeReturn ret; + CALL_GUI_FUNC (set_current_pipeline, NULL); + ret = gst_element_set_state (app->pipeline, GST_STATE_NULL); + g_assert (ret != GST_STATE_CHANGE_ASYNC); + gst_object_unref (app->pipeline); + app->pipeline = NULL; + app->vpipe = NULL; + app->apipe = NULL; + app->source = NULL; + app->vsink = NULL; + app->asink = NULL; + app->vdbin = NULL; + app->adbin = NULL; + } + + g_strfreev (in->audio_dec); + in->audio_dec = NULL; + + g_strfreev (in->video_dec); + in->video_dec = NULL; + + in->dbin_audio_decoders = FALSE; + in->dbin_video_decoders = FALSE; + app->astream_select = -1; + app->vstream_select = -1; + + g_free (in->uri); + in->uri = NULL; + + free_cmlist (&in->attrs, in->selfexpr); + + memset (in, 0, sizeof (inAttrs)); + + return; +} + + +static void +get_uri_details (gint i) +{ + GKeyFile *kf = app->pathCfg; + inAttrs *in = app->input; + gchar **gp = app->uriGroups; + gchar *str; + static int loop_count = 0; + + app->unpause = FALSE; + in->attrs = app->attrs; + in->selfexpr = FALSE; + in->operation_mode = NVGST_CMD_SCRIPT; + + if (app->uri) { + in->attrs.repeats = in->attrs.repeats - loop_count; + loop_count++; + in->uri = g_strdup (app->uri); + } else if (!kf) { + in->uri = g_strdup (gp[i]); + } else { + gint key = 0, repeats = 0; + gdouble keyd; + gdouble start, dur; + GError *err = NULL; + + in->uri = g_strdup (gp[i]); + + NVGST_INFO_MESSAGE_V ("\n\n\n loading the configuration for uri: %s\n", + in->uri); + + str = g_key_file_get_string (kf, gp[i], NVCXPR, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", NVCXPR, + err->message); + g_error_free (err); + err = NULL; + } else if (str) { + in->attrs.lplist_head = in->attrs.cmlist_head = NULL; + if ('*' == *str) { + g_free (str); + str = get_random_cxpr (); + } + if (build_cmlist (str, &in->attrs)) { + in->selfexpr = TRUE; + } else { + in->attrs = app->attrs; + in->selfexpr = FALSE; + } + g_free (str); + } + + key = g_key_file_get_integer (kf, gp[i], NVNOP, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", NVNOP, + err->message); + g_error_free (err); + err = NULL; + } else { + if (key) { + free_cmlist (&in->attrs, in->selfexpr); + build_cmlist ("r", &in->attrs); + in->selfexpr = TRUE; + } + } + + key = g_key_file_get_integer (kf, gp[i], NVSTARTPER, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVSTARTPER, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.startPer = key; + } + + start = g_key_file_get_double (kf, gp[i], NVSTART, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", NVSTART, + err->message); + g_error_free (err); + err = NULL; + } else { + if (in->attrs.startPer) + in->attrs.segment_start = ABS (start); + else + in->attrs.segment_start = ABS (start) * GST_SECOND; + } + + dur = g_key_file_get_double (kf, gp[i], NVDURATION, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVDURATION, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.segment_duration = ABS (dur) * GST_SECOND; + if (!in->attrs.segment_duration) + in->attrs.segment_duration = GST_CLOCK_TIME_NONE; + } + + repeats = g_key_file_get_integer (kf, gp[i], NVREPEATS, NULL); + repeats = repeats > 0 ? repeats : app->attrs.repeats; + in->attrs.repeats = repeats; + + in->attrs.repeats = in->attrs.repeats - loop_count; + loop_count++; + if (repeats - loop_count == 0) + loop_count = 0; + + key = g_key_file_get_integer (kf, gp[i], NVAUDIO, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", NVAUDIO, + err->message); + g_error_free (err); + err = NULL; + } else { + if (key && app->have_alsa_sinks) + in->attrs.flags |= NVGST_PLAY_FLAG_AUDIO; + else + in->attrs.flags &= ~NVGST_PLAY_FLAG_AUDIO; + } + + key = g_key_file_get_integer (kf, gp[i], NVVIDEO, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", NVVIDEO, + err->message); + g_error_free (err); + err = NULL; + } else { + if (key) + in->attrs.flags |= NVGST_PLAY_FLAG_VIDEO; + else + in->attrs.flags &= ~NVGST_PLAY_FLAG_VIDEO; + } + + key = g_key_file_get_integer (kf, gp[i], NVNATIVE_AUDIO, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVNATIVE_AUDIO, err->message); + g_error_free (err); + err = NULL; + } else { + if (key) + in->attrs.flags |= NVGST_PLAY_FLAG_NATIVE_AUDIO; + else + in->attrs.flags &= ~NVGST_PLAY_FLAG_NATIVE_AUDIO; + } + + key = g_key_file_get_integer (kf, gp[i], NVNATIVE_VIDEO, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVNATIVE_VIDEO, err->message); + g_error_free (err); + err = NULL; + } else { + if (key) + in->attrs.flags |= NVGST_PLAY_FLAG_NATIVE_VIDEO; + else + in->attrs.flags &= ~NVGST_PLAY_FLAG_NATIVE_VIDEO; + } + + key = g_key_file_get_integer (kf, gp[i], NVSYNC, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", NVSYNC, + err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.sync = key; + } + + key = g_key_file_get_integer (kf, gp[i], NVUSE_BUFFERING, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVUSE_BUFFERING, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.sync = key; + } + + key = g_key_file_get_integer (kf, gp[i], NVLOW_PERCENT, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVLOW_PERCENT, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.low_percent = (ABS (key) < 99) ? ABS (key) : 10; + } + + key = g_key_file_get_integer (kf, gp[i], NVHIGH_PERCENT, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVHIGH_PERCENT, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.high_percent = + (ABS (key) > in->attrs.low_percent) ? ABS (key) : 99; + } + + keyd = g_key_file_get_double (kf, gp[i], NVMAX_SIZE_TIME, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVMAX_SIZE_TIME, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.max_size_time = ABS (keyd) * GST_SECOND; + } + + key = g_key_file_get_integer (kf, gp[i], NVMAX_SIZE_BYTES, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVMAX_SIZE_BYTES, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.max_size_bytes = ABS (key); + } + + key = g_key_file_get_integer (kf, gp[i], NVMAX_SIZE_BUFFERS, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVMAX_SIZE_BUFFERS, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.max_size_buffers = ABS (key); + } + + key = g_key_file_get_integer (kf, gp[i], NVIMAGE_DISPLAY_TIME, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", + NVIMAGE_DISPLAY_TIME, err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.image_display_time = (ABS (key) > 1) ? ABS (key) : 2; + } + + key = g_key_file_get_integer (kf, gp[i], NVTAGS, &err); + if (err) { + NVGST_DEBUG_MESSAGE_V ("error while reading %s from keyfile: %s", NVTAGS, + err->message); + g_error_free (err); + err = NULL; + } else { + in->attrs.show_tags = key; + } + } + app->astream_select = app->attrs.aud_track; + app->vstream_select = app->attrs.vid_track; +} + + +static GstBusSyncReply +bus_sync_handler (GstBus * bus, GstMessage * msg, gpointer data) +{ + if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS) { + NVGST_DEBUG_MESSAGE ("got eos from pipeline "); + app->got_eos = TRUE; + } + + return GST_BUS_PASS; +} + + +static NvGstReturn +setup_track (void) +{ + GstBus *bus; + GstElement *pipeline = NULL, *source = NULL, *dbin = NULL; + inAttrs *input = app->input; + NvGstReturn ret = NVGST_RET_SUCCESS; + + CALL_GUI_FUNC (setup_new_track, app->uriCount); + + if (!app->pipeline) { + get_uri_details (app->uriCount - 1); + + if (input->attrs.flags & NVGST_PLAY_FLAG_PLAYBIN) { + GstElement *vsink = NULL, *asink = NULL; + gint buf_size; + GstClockTime buf_time; + gchar *uri = NULL; + + pipeline = gst_element_factory_make ("playbin", NULL); + if (!pipeline) { + NVGST_CRITICAL_MESSAGE ("failed to create playbin"); + goto fail; + } + app->pipeline = pipeline; + + CALL_GUI_FUNC (set_current_pipeline, pipeline); + + vsink = create_element (NULL, NVGST_VIDEO_SINK, app->svs, NULL); + if (!vsink) { + NVGST_WARNING_MESSAGE_V ("failed to create %s", NVGST_VIDEO_SINK); + } + app->vsink = vsink; + + asink = create_element (NULL, NVGST_AUDIO_SINK, app->sas, NULL); + if (!asink) { + NVGST_WARNING_MESSAGE_V ("failed to create %s", NVGST_AUDIO_SINK); + } + app->asink = asink; + + + if (!g_str_has_prefix (input->uri, "file://")) { + if (!(g_str_has_prefix (input->uri, "rtsp://") || + g_str_has_prefix (input->uri, "http://") || + g_str_has_prefix (input->uri, "udp://"))) { + uri = g_strconcat ("file://", input->uri, NULL); + } else + uri = g_strdup (input->uri); + } else + uri = g_strdup (input->uri); + + if (input->attrs.use_buffering) { + + input->attrs.flags |= NVGST_PLAY_FLAG_BUFFERING; + } + buf_size = + (input->attrs.max_size_bytes > 0) ? input->attrs.max_size_bytes : -1; + + buf_time = (GST_CLOCK_TIME_IS_VALID (input->attrs.max_size_time) + && input->attrs.max_size_time) ? input->attrs. + max_size_time : (GstClockTime) - 1; + + g_object_set (G_OBJECT (pipeline), "video-sink", vsink, "flags", + input->attrs.flags & (NVGST_PLAY_FLAG_PLAYBIN - 1), "audio-sink", + asink, "uri", uri, "buffer-size", buf_size, "buffer-duration", + buf_time, NULL); + + g_free (uri); + asink = NULL; + app->seekElement = gst_object_ref (pipeline); + app->no_more_pads = TRUE; + } else { + pipeline = gst_pipeline_new ("player"); + if (!pipeline) { + NVGST_CRITICAL_MESSAGE ("failed to create pipeline"); + goto fail; + } + app->pipeline = pipeline; + CALL_GUI_FUNC (set_current_pipeline, pipeline); + + if (g_str_has_prefix (input->uri, "http://")) { + source = create_element (NULL, NVGST_HTTP_SRC, app->shttp, NULL); + } else if (g_str_has_prefix (input->uri, "rtsp://")) { + source = create_element (NULL, NVGST_RTSP_SRC, app->srtsp, NULL); + } else if (g_str_has_prefix (input->uri, "udp://")) { + source = create_element (NULL, NVGST_UDP_SRC, app->sudp, NULL); + } else { + source = create_element (NULL, NVGST_FILE_SRC, app->sfsrc, NULL); + } + + if (!source) { + NVGST_CRITICAL_MESSAGE ("failed to create source\n"); + goto fail; + } else + app->source = source; + + if (g_str_has_prefix (input->uri, "rtsp://")) { + g_signal_connect (source, "pad-added", G_CALLBACK (on_pad_added), app); + + app->pre_dbin_lp = TRUE; + + g_object_set (G_OBJECT (source), "location", input->uri, NULL); + g_object_set (G_OBJECT (source), "buffer-mode", BUFFER_MODE_SLAVE, + NULL); + + if (!gst_bin_add (GST_BIN (pipeline), source)) { + NVGST_CRITICAL_MESSAGE_V ("failed to add source: %s to pipeline", + GST_ELEMENT_NAME (source)); + gst_object_unref (source); + goto fail; + } + + } else { + if (g_str_has_prefix (input->uri, "udp://")) + g_object_set (G_OBJECT (source), "uri", input->uri, NULL); + else if (g_str_has_prefix (input->uri, "file://")) + g_object_set (G_OBJECT (source), "location", input->uri + 7, NULL); + else + g_object_set (G_OBJECT (source), "location", input->uri, NULL); + + if (!gst_bin_add (GST_BIN (pipeline), source)) { + NVGST_CRITICAL_MESSAGE_V ("failed to add source: %s to pipeline", + GST_ELEMENT_NAME (source)); + gst_object_unref (source); + goto fail; + } + + dbin = gst_element_factory_make ("decodebin", NULL); + if (!dbin) { + NVGST_CRITICAL_MESSAGE ("failed to create decodebin"); + goto fail; + } + + if (!gst_bin_add (GST_BIN (pipeline), dbin)) { + NVGST_CRITICAL_MESSAGE ("failed to add decodebin to pipeline"); + gst_object_unref (dbin); + goto fail; + } + g_signal_connect (dbin, "autoplug-select", G_CALLBACK (autoplug_select), + app); + if (GUI) + g_signal_connect (dbin, "autoplug-sort", + G_CALLBACK (CALL_GUI_FUNC (get_autoplug_sort_callback)), + GET_GUI_CTX ()); + g_signal_connect (dbin, "pad-added", G_CALLBACK (cb_newpad), app); + g_signal_connect (dbin, "no-more-pads", G_CALLBACK (no_more_pads), app); + g_object_set (G_OBJECT (dbin), "use-buffering", + input->attrs.use_buffering, "low-percent", input->attrs.low_percent, + "high-percent", input->attrs.high_percent, "max-size-time", + input->attrs.max_size_time, "max-size-bytes", + input->attrs.max_size_bytes, "max-size-buffers", + input->attrs.max_size_buffers, NULL); + + if (!gst_element_link (source, dbin)) { + NVGST_CRITICAL_MESSAGE_V ("failed to link source %s to decodebin", + GST_ELEMENT_NAME (source)); + goto fail; + } + } + + } + app->adbin = app->vdbin = dbin; + + bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); + gst_bus_set_sync_handler (bus, NULL, app, NULL); + gst_bus_set_sync_handler (bus, bus_sync_handler, app, NULL); + app->bus_id = gst_bus_add_watch (bus, bus_call, app); + gst_object_unref (bus); + + } else { + pipeline = app->pipeline; + } + + if (app->stats && app->pfData.file) { + gchar *u = input->uri; + g_fprintf (app->pfData.file, "\n\n\n%s\n", input->uri); + while (*u++ != '\0') + fputc ('-', app->pfData.file); + fputc ('\n', app->pfData.file); + } + + if (gst_element_set_state (pipeline, GST_STATE_READY) != + GST_STATE_CHANGE_SUCCESS) { + ret = NVGST_RET_ERR; + } else + app->cmd_id = g_timeout_add (0, on2_input, NULL); + + +done: + return ret; + +fail: + ret = NVGST_RET_ERR; + goto done; +} + + +static void +_intr_handler (int signum) +{ + struct sigaction action; + + g_print ("User Interrupted.. \n"); + app->return_value = -1; + + memset (&action, 0, sizeof (action)); + action.sa_handler = SIG_DFL; + + sigaction (SIGINT, &action, NULL); + + cintr = TRUE; +} + + +static gboolean +check_for_interrupt (gpointer data) +{ + if (cintr) { + cintr = FALSE; + + if (app->pipeline) { + gst_element_post_message (GST_ELEMENT (app->pipeline), + gst_message_new_application (GST_OBJECT (app->pipeline), + gst_structure_new ("NvGstAppInterrupt", + "message", G_TYPE_STRING, "Pipeline interrupted", NULL))); + } else { + /** Hackish **/ + GstMessage *msg = gst_message_new_custom (GST_MESSAGE_APPLICATION, + NULL, gst_structure_new ("NvGstAppInterrupt", + "message", G_TYPE_STRING, "Pipeline interrupted", NULL)); + bus_call (NULL, msg, NULL); + gst_message_unref (msg); + } + + return FALSE; + } + + return TRUE; +} + + +static void +_intr_setup (void) +{ + struct sigaction action; + + memset (&action, 0, sizeof (action)); + action.sa_handler = _intr_handler; + + sigaction (SIGINT, &action, NULL); +} + + +static gboolean +parse_spec (const gchar * option_name, const gchar * value, + gpointer data, GError ** error) +{ + if (!g_strcmp0 ("--use-playbin", option_name)) { + app->attrs.flags |= NVGST_PLAY_FLAG_PLAYBIN; + } else if (!g_strcmp0 ("--no-audio", option_name)) { + app->attrs.flags &= ~NVGST_PLAY_FLAG_AUDIO; + } else if (!g_strcmp0 ("--no-video", option_name)) { + app->attrs.flags &= ~NVGST_PLAY_FLAG_VIDEO; + } else if (!g_strcmp0 ("--disable-anative", option_name)) { + app->attrs.flags &= ~NVGST_PLAY_FLAG_NATIVE_AUDIO; + } else if (!g_strcmp0 ("--disable-vnative", option_name)) { + app->attrs.flags &= ~NVGST_PLAY_FLAG_NATIVE_VIDEO; + } else if (!g_strcmp0 ("--sas", option_name)) { + g_free (app->sas); + app->sas = g_strdup (value); + + } else if (!g_strcmp0 ("--svs", option_name)) { + g_free (app->svs); + app->svs = g_strdup (value); + + } else if (!g_strcmp0 ("--sac", option_name)) { + g_free (app->sac); + app->sac = g_strdup (value); + + } else if (!g_strcmp0 ("--svc", option_name)) { + g_free (app->svc); + app->svc = g_strdup (value); + + } else if (!g_strcmp0 ("--shttp", option_name)) { + g_free (app->shttp); + app->shttp = g_strdup (value); + + } else if (!g_strcmp0 ("--srtsp", option_name)) { + g_free (app->srtsp); + app->srtsp = g_strdup (value); + + } else if (!g_strcmp0 ("--sfsrc", option_name)) { + g_free (app->sfsrc); + app->sfsrc = g_strdup (value); + + } else if (!g_strcmp0 ("--sad", option_name)) { + g_free (app->sad); + app->sad = g_strdup (value); + + } else if (!g_strcmp0 ("--svd", option_name)) { + g_free (app->svd); + app->svd = g_strdup (value); + } + + return TRUE; +} + +static int +kbhit (void) +{ + struct timeval tv; + fd_set rdfs; + + tv.tv_sec = 0; + tv.tv_usec = 300000; + + FD_ZERO (&rdfs); + FD_SET (STDIN_FILENO, &rdfs); + + select (STDIN_FILENO + 1, &rdfs, NULL, NULL, &tv); + return FD_ISSET (STDIN_FILENO, &rdfs); +} + +static void +changemode (int dir) +{ + static struct termios oldt, newt; + + if (dir == 1) { + tcgetattr (STDIN_FILENO, &oldt); + newt = oldt; + newt.c_lflag &= ~(ICANON); + tcsetattr (STDIN_FILENO, TCSANOW, &newt); + } else + tcsetattr (STDIN_FILENO, TCSANOW, &oldt); +} + +static gboolean +on2_input (gpointer data) +{ + on_input (NULL, 0, data); + + return FALSE; +} + +static void +nvgst_handle_xevents () +{ + XEvent e; + Atom wm_delete; + displayCtx *dpyCtx = &app->disp; + + /* Handle Display events */ + while (XPending (dpyCtx->mDisplay)) { + XNextEvent (dpyCtx->mDisplay, &e); + switch (e.type) { + case ClientMessage: + wm_delete = XInternAtom (dpyCtx->mDisplay, "WM_DELETE_WINDOW", 1); + if (wm_delete != None && wm_delete == (Atom) e.xclient.data.l[0]) { + GST_ELEMENT_ERROR (app->pipeline, RESOURCE, NOT_FOUND, + ("Output window was closed"), (NULL)); + } + if (app->attrs.loop_forever) + app->attrs.loop_forever = 0; + } + } +} + +static gpointer +nvgst_x_event_thread (gpointer data) +{ + g_mutex_lock (&app->window_lock); + while (app->disp.window) { + nvgst_handle_xevents (); + g_mutex_unlock (&app->window_lock); + g_usleep (G_USEC_PER_SEC / 20); + g_mutex_lock (&app->window_lock); + } + g_mutex_unlock (&app->window_lock); + return NULL; +} + + +static gpointer +on_input_thread (gpointer data) +{ + GQueue *que = g_queue_new (); + gchar *buffer = NULL; + int i = 0; + + changemode (1); + + while (!trd_exit) { + if (kbhit ()) { + if (buffer == NULL) + buffer = g_malloc (256); + + buffer[i] = getchar (); + + if (buffer[i] == 27) { + i = 0; + /* TODO: check the ip queue first */ + NVGST_DEBUG_MESSAGE ("\nESC: awaiting one more press to quit\n"); + buffer[0] = getchar (); + if (buffer[0] == 91) { + buffer[0] = getchar (); + switch (buffer[0]) { + case 65: + buffer[0] = ']'; + break; + case 66: + buffer[0] = '['; + break; + case 67: + buffer[0] = '>'; + break; + case 68: + buffer[0] = '<'; + break; + } + } else if (buffer[0] == 27) { + buffer[0] = 'q'; + } + } + + if (buffer[i] != 127) { + if (buffer[i] == 10 || + (!i && (buffer[0] == 'h' || buffer[0] == 'q' + || buffer[0] == 'c' || buffer[0] == 'r' + || buffer[0] == 'p' || buffer[0] == 'z' + || buffer[0] == '[' || buffer[0] == ']' + || buffer[0] == '<' || buffer[0] == '>'))) { + if (buffer[i] == 10) + buffer[i] = 0; + else + buffer[++i] = 0; + i = 0; + + if (g_queue_is_empty (que)) { + g_queue_push_tail (que, buffer); + g_timeout_add (20, on2_input, que); + } + + buffer = NULL; + + } else + i++; + + } else { + if (i > 0) + i--; + } + } + } + + changemode (0); + + while ((buffer = g_queue_pop_head (que))) { + g_free (buffer); + } + + g_queue_free (que); + + return NULL; +} + +static void +build_hash_table (GHashTable * htable) +{ + g_hash_table_insert (htable, NVGST_AUDIO_CONV, NVGST_DEFAULT_AUDIO_CONV); + g_hash_table_insert (htable, NVGST_VIDEO_CONV, NVGST_DEFAULT_VIDEO_CONV); + g_hash_table_insert (htable, NVGST_AUDIO_SINK, NVGST_DEFAULT_AUDIO_SINK); + g_hash_table_insert (htable, NVGST_VIDEO_SINK, NVGST_DEFAULT_VIDEO_SINK); + g_hash_table_insert (htable, NVGST_FILE_SRC, NVGST_DEFAULT_FILE_SRC); + g_hash_table_insert (htable, NVGST_RTSP_SRC, NVGST_DEFAULT_RTSP_SRC); + g_hash_table_insert (htable, NVGST_HTTP_SRC, NVGST_DEFAULT_HTTP_SRC); + g_hash_table_insert (htable, NVGST_UDP_SRC, NVGST_DEFAULT_UDP_SRC); +} + +static NvGstReturn +get_next_command (attrs_s * t, gchar * buffer, gint buf_size, gboolean reuse) +{ + NvGstReturn ret = NVGST_RET_SUCCESS; + GList *cml = t->cmlist; + + if (cml == NULL) { + ret = NVGST_RET_END; + + } else { + sCm *cm = (sCm *) cml->data; + strncpy (buffer, cm->id, buf_size - 1); + } + + if (!reuse) { + if (cml) { + sCm *cm = (sCm *) cml->data; + + if (cm->list) { + GList *list = cm->list; + + do { + sLp *lp = list->data; + + if (++lp->x > lp->n) { + lp->x = 1; + list = g_list_next (list); + if (list == NULL) { + cml = g_list_next (cml); + } + + } else { + cml = lp->c; + list = NULL; + } + } while (list); + + } else { + cml = g_list_next (cml); + } + } + } + + t->cmlist = cml; + + return ret; +} + + +static guint +parse_symbol (GScanner * scanner, attrs_s * t) +{ + guint symbol, next_token; + GList *l_lplist, *l_cmlist; + sLp *lp; + sCm *cm; + + /* expect a valid symbol */ + g_scanner_get_next_token (scanner); + symbol = scanner->token; + + switch (symbol) { + case G_TOKEN_LEFT_CURLY:{ + lp = g_new0 (sLp, 1); + lp->n = last_n; + lp->x = 1; + lp->c = NULL; + last_n = 1; + t->lplist_head = g_list_append (t->lplist_head, lp); + + next_token = g_scanner_peek_next_token (scanner); + if (next_token) { + //TODO: return G_TOKEN_ERROR; + } + } + break; + + case G_TOKEN_RIGHT_CURLY:{ + if (t->lplist_head) { + l_lplist = g_list_last (t->lplist_head); + if (l_lplist && t->cmlist_head) { + l_cmlist = g_list_last (t->cmlist_head); + + if (l_cmlist) { + sCm *cm = (sCm *) l_cmlist->data; + cm->list = g_list_append (cm->list, l_lplist->data); + l_lplist->data = NULL; + t->lplist_head = g_list_delete_link (t->lplist_head, l_lplist); + } else { + return G_TOKEN_ERROR; + } + } else { + return G_TOKEN_ERROR; + } + } else { + return G_TOKEN_ERROR; + } + + next_token = g_scanner_peek_next_token (scanner); + if (next_token) { + //TODO: return G_TOKEN_ERROR; + } + } + break; + + case G_TOKEN_FLOAT:{ + last_n = scanner->value.v_float; + next_token = g_scanner_peek_next_token (scanner); + if (next_token) { + //TODO: return G_TOKEN_ERROR; + } + } + break; + + case G_TOKEN_IDENTIFIER:{ + cm = g_new0 (sCm, 1); + cm->id = g_strdup (scanner->value.v_identifier); + + t->cmlist_head = g_list_append (t->cmlist_head, cm); + l_cmlist = g_list_last (t->cmlist_head); + + if (t->lplist_head) { + l_lplist = g_list_last (t->lplist_head); + while (l_lplist && ((sLp *) l_lplist->data)->c == NULL) { + ((sLp *) l_lplist->data)->c = l_cmlist; + l_lplist = g_list_previous (l_lplist); + } + } + + next_token = g_scanner_peek_next_token (scanner); + if (next_token) { + //TODO: return G_TOKEN_ERROR; + } + } + break; + + default: + return G_TOKEN_ERROR; + } + + return G_TOKEN_NONE; +} + + +static void +_freelp_func (gpointer data, gpointer udata) +{ + g_free (data); + return; +} + + +static void +_freecm_func (gpointer data, gpointer udata) +{ + sCm *cm = (sCm *) data; + + g_free (cm->id); + g_free (cm); + + return; +} + + +static void +free_cmlist (attrs_s * attrs, gboolean force) +{ + if (force) { + g_list_foreach (attrs->cmlist_head, _freecm_func, NULL); + g_list_foreach (attrs->lplist_head, _freelp_func, NULL); + } + + attrs->cmlist_head = NULL; + attrs->lplist_head = NULL; + + attrs->cmlist = NULL; + attrs->lplist = NULL; + + return; +} + + +static gboolean +build_cmlist (gchar * text, attrs_s * attrs) +{ + GScanner *data; + guint result; + gboolean res = TRUE; + + data = g_scanner_new (NULL); + + data->config->numbers_2_int = TRUE; + data->config->int_2_float = TRUE; + data->config->scan_identifier_1char = TRUE; + data->config->symbol_2_token = TRUE; + + data->config->cset_skip_characters = + g_strconcat (data->config->cset_skip_characters, " ", NULL); + data->config->cset_identifier_nth = + g_strconcat (data->config->cset_identifier_nth, ".", "-", "[", "]", + "<", ">", NULL); + data->config->cset_identifier_first = + g_strconcat (data->config->cset_identifier_first, "[", "]", "<", ">", + NULL); + + g_scanner_input_text (data, text, strlen (text)); + + data->input_name = text; + + do { + result = parse_symbol (data, attrs); + + g_scanner_peek_next_token (data); + } + while (result == G_TOKEN_NONE && + data->next_token != G_TOKEN_EOF && data->next_token != G_TOKEN_ERROR); + + if (result != G_TOKEN_NONE) { + g_scanner_unexp_token (data, result, NULL, "symbol", NULL, NULL, TRUE); + res = FALSE; + /* TODO free all list */ + } + + attrs->lplist = attrs->lplist_head; + attrs->cmlist = attrs->cmlist_head; + + /* finish parsing */ + g_scanner_destroy (data); + +#if 1 + { + int k = 0; + GList *cml = attrs->cmlist; + while (cml) { + sCm *cm = (sCm *) cml->data; + + printf ("%d %s\n", ++k, cm->id); + + if (cm->list) { + GList *list = cm->list; + + do { + sLp *lp = list->data; + + if (++lp->x > lp->n) { + lp->x = 1; + list = g_list_next (list); + if (list == NULL) { + cml = g_list_next (cml); + } + + } else { + cml = lp->c; + list = NULL; + } + } while (list); + + } else { + cml = g_list_next (cml); + } + } + } +#endif + return res; +} + +gchar * +get_random_cxpr (void) +{ + GRand *cxpr_rand = NULL; + gint32 cmd_num = 0; + gdouble time_num = 0; + GString *rand_string = NULL; + + /*Local macros defines only for this function */ +#define MAX_RANDOM_STR_LENGTH 100 +#define MIN_SEEK_RANGE 0.0 +#define MAX_SEEK_RANGE 50.0 + +#define MIN_TIME_RANGE 5.0 +#define MAX_TIME_RANGE 20.0 + + cxpr_rand = g_rand_new (); + if (!cxpr_rand) { + NVGST_CRITICAL_MESSAGE ("Error in allocating memory !!! "); + return NULL; + } + + rand_string = g_string_new ("r"); + if (!rand_string) { + NVGST_CRITICAL_MESSAGE ("Error in allocating memory !!! "); + return NULL; + } + + + while (rand_string->len < MAX_RANDOM_STR_LENGTH) { + + rand_string = g_string_append (rand_string, " "); + cmd_num = g_rand_int_range (cxpr_rand, 1, 11); + + switch (cmd_num) { + case 1: //maps to resume + rand_string = g_string_append (rand_string, "r"); + break; + case 2: //maps to pause + rand_string = g_string_append (rand_string, "p"); + break; + + case 3: //maps to stop + rand_string = g_string_append (rand_string, "z"); + break; + + case 4: //maps to play-seek (r s w) + time_num = + g_rand_double_range (cxpr_rand, MIN_SEEK_RANGE, MAX_SEEK_RANGE); + g_string_append_printf (rand_string, "r s%.2f", time_num); + + time_num = + g_rand_double_range (cxpr_rand, MIN_TIME_RANGE, MAX_TIME_RANGE); + g_string_append_printf (rand_string, " w%.2f", time_num); + break; + + case 5: //maps to wait w + time_num = + g_rand_double_range (cxpr_rand, MIN_TIME_RANGE, MAX_TIME_RANGE); + g_string_append_printf (rand_string, "w%.2f", time_num); + break; + + case 6: //maps to play-seek-percentage (r v w) + time_num = g_rand_double_range (cxpr_rand, 0.0, 100.0); + g_string_append_printf (rand_string, "r v%.2f", time_num); + + time_num = + g_rand_double_range (cxpr_rand, MIN_TIME_RANGE, MAX_TIME_RANGE); + g_string_append_printf (rand_string, " w%.2f", time_num); + break; + + case 7: //maps to absolute seek s + time_num = + g_rand_double_range (cxpr_rand, MIN_SEEK_RANGE, MAX_SEEK_RANGE); + g_string_append_printf (rand_string, "s%.2f", time_num); + break; + + case 8: //maps to percentage seek v + time_num = g_rand_double_range (cxpr_rand, 0.0, 100.0); + g_string_append_printf (rand_string, "v%.2f", time_num); + break; + + case 9: //maps to > + rand_string = g_string_append (rand_string, ">"); + break; + + case 10: //maps to < + rand_string = g_string_append (rand_string, "<"); + break; + + default: + continue; + } + } + + /*End it with r */ + rand_string = g_string_append (rand_string, " r"); + + NVGST_INFO_MESSAGE_V ("Random expression generated is %s\n", + rand_string->str); + + g_rand_free (cxpr_rand); + return g_string_free (rand_string, FALSE); +} + +void +get_elem_cfg (gchar * file) +{ + GError *error = NULL; + + g_free (app->elem_file); + app->elem_file = NULL; + + if (app->elemCfg) + g_key_file_free (app->elemCfg); + app->elemCfg = NULL; + + if (file) { + const GKeyFileFlags flags = G_KEY_FILE_KEEP_COMMENTS + | G_KEY_FILE_KEEP_TRANSLATIONS; + app->elemCfg = g_key_file_new (); + if (!g_key_file_load_from_file (app->elemCfg, file, flags, &error)) { + NVGST_WARNING_MESSAGE_V ("failed to load elem file: err: %s", + error->message); + g_error_free (error); + g_key_file_free (app->elemCfg); + app->elemCfg = NULL; + app->return_value = -1; + } + + g_strfreev (app->elem_gps); + app->elem_gps = g_key_file_get_groups (app->elemCfg, NULL); + app->elem_file = strdup (file); + } +} + +int +main (int argc, char *argv[]) +{ + char stats[50]; + GOptionContext *ctx = NULL; + GOptionGroup *group = NULL; + GError *error = NULL; + char *alsa_device = NULL; + + app = &sapp; + memset (app, 0, sizeof (appCtx)); + app->extra_options = g_strdup ("Runtime Commands:\n \ + " " q quit the application\n \ + " " h print help\n \ + " + " Up Key, ] goto next track\n \ + " + " c restart current track\n \ + " + " Down Key, [ goto previous track\n \ + " + " spos query for position\n \ + " " sdur query for duration\n \ + " " s seek to position in seconds, eg \"s5.120\"\n \ + " " v seek to percent of the duration, eg \"v54\"\n \ + " " f seek by seconds, relative to current position eg \"f23.901\"\n \ + " + " Left Key, < seek backwards by 10 seconds\n \ + " + " Right Key, > seek forward by 10 seconds\n \ + " + " p pause playback\n \ + " " r start/resume the playback\n \ + " " z stop the playback\n \ + " " i: enter a single URI\n"); + + ctx = g_option_context_new ("Nvidia GStreamer Model Test"); + group = g_option_group_new ("Cotigao", NULL, NULL, NULL, NULL); + g_option_group_add_entries (group, entries); + g_option_context_set_description (ctx, app->extra_options); + g_option_context_set_main_group (ctx, group); + g_option_context_add_group (ctx, gst_init_get_option_group ()); + + app->attrs.flags = NVGST_PLAY_FLAG_AUDIO; + app->attrs.flags |= NVGST_PLAY_FLAG_VIDEO; + app->attrs.flags |= NVGST_PLAY_FLAG_NATIVE_AUDIO; + app->attrs.flags |= NVGST_PLAY_FLAG_NATIVE_VIDEO; + app->attrs.repeats = 1; + app->attrs.segment_duration = GST_CLOCK_TIME_NONE; + app->attrs.low_percent = 10; + app->attrs.high_percent = 99; + app->attrs.image_display_time = 5; + app->stealth_mode = FALSE; + app->bg_mode = FALSE; + app->disable_dpms = FALSE; + app->disp.mDisplay = NULL; + app->attrs.aud_track = -1; + app->attrs.vid_track = -1; + app->attrs.disable_fullscreen = FALSE; + app->version = FALSE; + g_mutex_init (&app->dbin_lock); + /* Default permittable frames droped percentage is 2 incase not specified at run-time */ + app->attrs.drop_threshold_pct = 2; + + g_mutex_init (&app->window_lock); + + if (!g_option_context_parse (ctx, &argc, &argv, &error)) { + g_option_context_free (ctx); + g_print ("ERROR-<%d>: %s\n", (int) strlen (app->extra_options), + error->message); + goto done; + } + + if (app->version) { + g_print ("\nGstreamer Version ==> %s\n\n", gst_version_string ()); + goto done; + } + + alsa_device = nvgst_asound_get_device (); + if (!alsa_device) { + g_print + ("No audio playback devices found. Audio playback through alsa has been disabled\n"); + if ((app->sas && strstr (app->sas, "alsasink")) || (!app->sas + && !strncmp (NVGST_DEFAULT_AUDIO_SINK, "alsasink", 8))) { + app->attrs.flags &= ~NVGST_PLAY_FLAG_AUDIO; + } + app->have_alsa_sinks = FALSE; + } else if (g_strcmp0 (alsa_device, "default")) { + gchar sas[256]; + if (app->sas && strstr (app->sas, "alsasink") + && !strstr (app->sas, "device")) { + g_sprintf (sas, "%s # device = %s", app->sas, alsa_device); + g_free (app->sas); + app->sas = g_strdup (sas); + } else if (!app->sas && !strncmp (NVGST_DEFAULT_AUDIO_SINK, "alsasink", 8)) { + g_sprintf (sas, "%s # device = %s", NVGST_DEFAULT_AUDIO_SINK, + alsa_device); + app->sas = g_strdup (sas); + } + free (alsa_device); + app->have_alsa_sinks = TRUE; + } + + app->disp.mDisplay = nvgst_x11_init (&app->disp); + + g_set_application_name (APPLICATION_NAME); + + g_print ("%s\n", app->extra_options); + + g_option_context_free (ctx); + + app->input = g_malloc0 (sizeof (inAttrs)); + + loop = g_main_loop_new (NULL, FALSE); + + if (app->stats) { + if (app->stats_file) + app->pfData.file = fopen (app->stats_file, "w"); + else { + snprintf (stats, sizeof(stats)-1, "gst_statistics_%ld.txt", (long) getpid()); + stats[sizeof (stats)-1] = '\0'; + app->pfData.file = fopen (stats, "w"); + } + if (app->pfData.file == NULL) { + g_print ("File can not be opened for stats : %s\n", strerror (errno)); + app->return_value = -1; + goto done; + } + } + + if (app->disable_dpms && app->disp.mDisplay) { + saver_off (&app->disp); + } + + if (app->uri) { + app->uriTotal = 1; + + } else if (urifile) { + const GKeyFileFlags flags = + G_KEY_FILE_KEEP_COMMENTS | G_KEY_FILE_KEEP_TRANSLATIONS; + + app->pathCfg = g_key_file_new (); + if (!g_key_file_load_from_file (app->pathCfg, urifile, flags, &error)) { + NVGST_WARNING_MESSAGE_V ("failed to load uri file: err: %s", + error->message); + g_error_free (error); + g_key_file_free (app->pathCfg); + app->pathCfg = NULL; + app->return_value = -1; + goto done; + } else { + app->uriGroups = g_key_file_get_groups (app->pathCfg, &app->uriTotal); + } + } + + g_free (urifile); + + if (elemfile) { + get_elem_cfg (elemfile); + g_free (elemfile); + } + + timeout_id = g_timeout_add (400, check_for_interrupt, app); + + app->astream_select = app->attrs.aud_track; + app->vstream_select = app->attrs.vid_track; + app->attrs.repeats = app->attrs.repeats > 0 ? app->attrs.repeats : 1; + app->attrs.sync = !app->attrs.sync; + app->attrs.low_percent = + (ABS (app->attrs.low_percent) < 99) ? ABS (app->attrs.low_percent) : 10; + app->attrs.high_percent = + (ABS (app->attrs.high_percent) > + app->attrs.low_percent) ? ABS (app->attrs.high_percent) : 99; + app->attrs.max_size_time = ABS (max_size_time) * GST_SECOND; + app->attrs.max_size_bytes = ABS (app->attrs.max_size_bytes); + app->attrs.max_size_buffers = ABS (app->attrs.max_size_buffers); + app->attrs.image_display_time = + app->attrs.image_display_time > 4 ? app->attrs.image_display_time : 5; + app->attrs.segment_duration = ABS (segment_duration) * GST_SECOND; + if (!app->attrs.segment_duration) + app->attrs.segment_duration = GST_CLOCK_TIME_NONE; + if (app->attrs.startPer) + app->attrs.segment_start = ABS (segment_start); + else + app->attrs.segment_start = ABS (segment_start) * GST_SECOND; + + + if (cxpr) { + if ('*' == *cxpr) { + g_free (cxpr); + cxpr = get_random_cxpr (); + } + build_cmlist (cxpr, &app->attrs); + g_free (cxpr); + } else { + g_assert (build_cmlist ("r", &app->attrs)); + } + + + _intr_setup (); + + app->htable = g_hash_table_new (g_str_hash, g_str_equal); + build_hash_table (app->htable); + if (!app->bg_mode) + trd = g_thread_new ("on-input-thread", on_input_thread, app); + + CALL_GUI_FUNC (init, argc, argv); + + /* Start rolling! */ + g_idle_add (goto_next_track, app); + + NVGST_INFO_MESSAGE ("iterating..."); + + g_main_loop_run (loop); + + /* Out of the main loop, clean up nicely */ + CALL_GUI_FUNC (destroy); + + destroy_current_track (); + + g_print ("Playback completed!\n"); +done: + + if (app->pfData.file) + fclose (app->pfData.file); + + if (app->pathCfg) + g_key_file_free (app->pathCfg); + + if (app->elemCfg) { + g_key_file_free (app->elemCfg); + + g_strfreev (app->elem_gps); + } + + g_free (app->uri); + + g_strfreev (app->uriGroups); + + app->astream_select = -1; + app->vstream_select = -1; + + g_free (app->svd); + g_free (app->sad); + g_free (app->svc); + g_free (app->sac); + g_free (app->svs); + g_free (app->sas); + g_free (app->shttp); + g_free (app->srtsp); + g_free (app->sudp); + g_free (app->sfsrc); + + if (app->attrs.cmlist) + free_cmlist (&app->attrs, TRUE); + + if (app->htable) + g_hash_table_unref (app->htable); + + g_free (app->extra_options); + g_free (app->input); + + if (loop) + g_main_loop_unref (loop); + + if (app->disable_dpms) + saver_on (&app->disp); + + if (app->disp.mDisplay) + nvgst_x11_uninit (&app->disp); + + g_mutex_clear (&app->window_lock); + g_mutex_clear (&app->dbin_lock); + + g_print ("Application will now exit!\n"); + + return ((app->return_value == -1) ? -1 : 0); +} diff --git a/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.h b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.h new file mode 100644 index 0000000..77c5287 --- /dev/null +++ b/nvgstapps_src/nvgst_sample_apps/nvgstplayer-1.0/nvgstplayer.h @@ -0,0 +1,435 @@ +/* + * Copyright (c) 2013-2019, NVIDIA CORPORATION. All rights reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ + +#ifndef __NVGSTPLAYER_H__ +#define __NVGSTPLAYER_H__ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "nvgst_x11_common.h" +#include "nvgst_asound_common.h" + +#ifdef WITH_GUI + +#include "nvgstplayer_gui_interface.h" + +#else + +gpointer dummy_func (void); +gpointer +dummy_func () +{ + return NULL; +} + +#define GUI 0 +#define CALL_GUI_FUNC(func, ...) dummy_func() +#define GET_GUI_CTX() NULL +#define GUI_CALLBACKS "nvgstplayer.h" +#endif + +#define APPLICATION_NAME "nvgstplayer" +#define NVSTARTPER "startper" +#define NVSTART "start" +#define NVDURATION "duration" +#define NVCXPR "cxpr" +#define NVREPEATS "repeats" +#define NVAUDIO "audio" +#define NVVIDEO "video" +#define NVNATIVE_AUDIO "native_audio" +#define NVNATIVE_VIDEO "native_video" +#define NVNOP "nop" +#define NVSYNC "sync" +#define NVUSE_BUFFERING "use_buffering" +#define NVLOW_PERCENT "low_percent" +#define NVHIGH_PERCENT "high_percent" +#define NVMAX_SIZE_TIME "max_size_time" +#define NVMAX_SIZE_BYTES "max_size_bytes" +#define NVMAX_SIZE_BUFFERS "max_size_buffers" +#define NVIMAGE_DISPLAY_TIME "image_display_time" +#define NVTAGS "tags" + +#define NVGST_AUDIO_DEC "sad" +#define NVGST_VIDEO_DEC "svd" +#define NVGST_AUDIO_CONV "sac" +#define NVGST_VIDEO_CONV "svc" +#define NVGST_AUDIO_SINK "sas" +#define NVGST_VIDEO_SINK "svs" +#define NVGST_FILE_SRC "sfsrc" +#define NVGST_RTSP_SRC "srtsp" +#define NVGST_HTTP_SRC "shttp" +#define NVGST_UDP_SRC "sudp" + +#define NVGST_DEFAULT_AUDIO_CONV "audioconvert ! audioresample" +#define NVGST_DEFAULT_VIDEO_CONV "nvvidconv" +#define NVGST_DEFAULT_AUDIO_SINK "alsasink" +#define NVGST_DEFAULT_VIDEO_SINK "nv3dsink" +#define NVGST_DEFAULT_VIDEO_DEC "nvv4l2decoder" +#define NVGST_DEFAULT_FILE_SRC "filesrc" +#define NVGST_DEFAULT_RTSP_SRC "rtspsrc" +#define NVGST_DEFAULT_HTTP_SRC "souphttpsrc" +#define NVGST_DEFAULT_UDP_SRC "udpsrc" + +#ifdef NVGST_LOG_LEVEL_DEBUG +#define NVGST_ENTER_FUNCTION() g_print("%s{", __FUNCTION__) +#define NVGST_EXIT_FUNCTION() g_print("%s}", __FUNCTION__) +#define NVGST_EXIT_FUNCTION_VIA(s) g_print("%s}['%s']", __FUNCTION__, s) +#define NVGST_DEBUG_MESSAGE(s) g_debug("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_DEBUG_MESSAGE_V(s, ...) g_debug("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_INFO_MESSAGE(s) g_message("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_INFO_MESSAGE_V(s, ...) g_message("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_WARNING_MESSAGE(s) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_WARNING_MESSAGE_V(s, ...) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_CRITICAL_MESSAGE(s) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_CRITICAL_MESSAGE_V(s, ...) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__,__VA_ARGS__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__,__VA_ARGS__) + +#elif defined NVGST_LOG_LEVEL_INFO +#define NVGST_ENTER_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION_VIA(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE(s) g_message("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_INFO_MESSAGE_V(s, ...) g_message("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_WARNING_MESSAGE(s) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_WARNING_MESSAGE_V(s, ...) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_CRITICAL_MESSAGE(s) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_CRITICAL_MESSAGE_V(s, ...) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) + +#elif defined NVGST_LOG_LEVEL_WARNING +#define NVGST_ENTER_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION_VIA(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE(s) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_WARNING_MESSAGE_V(s, ...) g_warning("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#define NVGST_CRITICAL_MESSAGE(s) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_CRITICAL_MESSAGE_V(s, ...) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) + +#elif defined NVGST_LOG_LEVEL_CRITICAL +#define NVGST_ENTER_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION_VIA(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_CRITICAL_MESSAGE(s) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_CRITICAL_MESSAGE_V(s, ...) do {\ + g_critical("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__);\ + app->return_value = -1;\ + } while (0) +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) + +#else +#define NVGST_ENTER_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION() G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_EXIT_FUNCTION_VIA(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_DEBUG_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_INFO_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_WARNING_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_CRITICAL_MESSAGE(s) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_CRITICAL_MESSAGE_V(s, ...) G_STMT_START{ (void)0; }G_STMT_END +#define NVGST_ERROR_MESSAGE(s) g_error("<%s:%d> "s, __FUNCTION__, __LINE__) +#define NVGST_ERROR_MESSAGE_V(s, ...) g_error("<%s:%d> "s, __FUNCTION__, __LINE__, __VA_ARGS__) +#endif + +#define INITIAL_FPS_UPDATE_INTERVAL_MS 400 +#ifdef WITH_GUI +#define DEFAULT_FPS_UPDATE_INTERVAL_MS 400 +#else +#define DEFAULT_FPS_UPDATE_INTERVAL_MS 5000 +#endif + +#define CALC_RUNNING_AVERAGE(avg,val,size) (((val) + ((size)-1) * (avg)) / (size)) + + +typedef enum +{ + NVGST_RET_ASYNC = 1, + NVGST_RET_SUCCESS = 0, + NVGST_RET_ERR = -1, + NVGST_RET_END = -2, + NVGST_RET_INVALID = -3 +} NvGstReturn; + + +typedef enum +{ + NVGST_AUTOPLUG_SELECT_TRY = 0, + NVGST_AUTOPLUG_SELECT_EXPOSE, + NVGST_AUTOPLUG_SELECT_SKIP +} NvGstAutoplugSelectResult; + + +typedef enum +{ + NVGST_PLAY_FLAG_VIDEO = (1 << 0), + NVGST_PLAY_FLAG_AUDIO = (1 << 1), + NVGST_PLAY_FLAG_TEXT = (1 << 2), + NVGST_PLAY_FLAG_VIS = (1 << 3), + NVGST_PLAY_FLAG_SOFT_VOLUME = (1 << 4), + NVGST_PLAY_FLAG_NATIVE_AUDIO = (1 << 5), + NVGST_PLAY_FLAG_NATIVE_VIDEO = (1 << 6), + NVGST_PLAY_FLAG_DOWNLOAD = (1 << 7), + NVGST_PLAY_FLAG_BUFFERING = (1 << 8), + NVGST_PLAY_FLAG_DEINTERLACE = (1 << 9), + /* added */ + NVGST_PLAY_FLAG_PLAYBIN = (1 << 10) +} NvGstPlayFlags; + + +typedef enum +{ + NVGST_OPS_NONE, + NVGST_OPS_PAUSE, + NVGST_OPS_PLAY, + NVGST_OPS_SEEK, + NVGST_OPS_WAIT, + NVGST_OPS_STOP +} NvGstOperation; + + +typedef enum +{ + NVGST_CMD_NONE, + NVGST_CMD_SCRIPT, + NVGST_CMD_USER +} NvGstOpMode; + + +typedef enum +{ + BUFFER_MODE_NONE = 0, + BUFFER_MODE_SLAVE = 1, + BUFFER_MODE_BUFFER = 2, + BUFFER_MODE_AUTO = 3 +} JitterBufferMode; + + +typedef struct +{ + gint repeats; + gboolean startPer; + GstClockTimeDiff segment_start; + GstClockTimeDiff segment_duration; + NvGstPlayFlags flags; + gboolean sync; + gboolean use_buffering; + gboolean disable_fullscreen; + gint low_percent; + gint high_percent; + gint aud_track; + gint vid_track; + gint drop_threshold_pct; + gboolean loop_forever; + gint max_size_buffers; + gint max_size_bytes; + GstClockTime max_size_time; + gint image_display_time; + gboolean show_tags; + GList *lplist_head; + GList *cmlist_head; + GList *lplist; + GList *cmlist; +} attrs_s; + + +typedef struct +{ + gchar *uri; + NvGstOpMode operation_mode; + GstClockTimeDiff duration; + gchar **audio_dec; + gchar **video_dec; + attrs_s attrs; + gboolean selfexpr; + gboolean pending_play; + guint64 interval; + gboolean postpone; + gboolean dbin_audio_decoders; + gboolean dbin_video_decoders; +} inAttrs; + + +typedef struct +{ + FILE *file; + + guint frames_rendered, frames_dropped, frames_dropped_decoder; + guint64 last_frames_rendered, last_frames_dropped; + + GstClockTime start_ts; + GstClockTime last_ts; + + gdouble max_fps; + gdouble min_fps; + gdouble average_fps; + + GTimer *timer; + guint dps_cb; + GstClockTime prev_ts; + GstClockTime avg_in_diff; + GstClockTime max_latency; + gboolean initial_fps; +} pfData_s; + + +typedef struct +{ + inAttrs *input; + gchar *extra_options; + GKeyFile *pathCfg; + GKeyFile *elemCfg; + GHashTable *htable; + NvGstOperation cur_operation; + attrs_s attrs; + GstElement *pipeline; + GstElement *source; + GstElement *vpipe; + GstElement *apipe; + GstElement *vsink; + GstElement *asink; + GstElement *adbin; + GstElement *vdbin; + GstElement *seekElement; + gboolean no_more_pads; + gint cmd_id; + gint bus_id; + gint uriCount; + gsize uriTotal; + gchar **uriGroups; + gboolean version; + gboolean have_alsa_sinks; + gboolean found_demuxer; + gboolean got_eos; + gboolean is_live; + gboolean in_error; + gboolean pre_dbin_lp; + gboolean unpause; + gboolean buffering; + gboolean running; + guint image_eos; + gint return_value; + GstState target_state; + GstClockTimeDiff last_seek_time; + GstClockTimeDiff accum_time; + GstClockTime seekPos; + gchar *uri; + gchar **elem_gps; + gboolean stealth_mode; + gboolean bg_mode; + gchar *svd; + gchar *sad; + gchar *svc; + gchar *sac; + gchar *svs; + gchar *sas; + gchar *shttp; + gchar *srtsp; + gchar *sudp; + gchar *sfsrc; + gint astreams; + gint vstreams; + gint astream_select; + gint vstream_select; + gboolean disable_dpms; + displayCtx disp; + GThread *x_event_thread; + GMutex window_lock; + + /* stats */ + gboolean stats; + gchar *stats_file; + pfData_s pfData; + GstPad *vrender_pad; + GstPad *arender_pad; + gchar *elem_file; + GMutex dbin_lock; + +} appCtx; + + +typedef struct +{ + guint x; + guint n; + GList *c; +} sLp; + + +typedef struct +{ + gchar *id; + GList *list; +} sCm; +#endif diff --git a/push_info.txt b/push_info.txt new file mode 100644 index 0000000..392a48b --- /dev/null +++ b/push_info.txt @@ -0,0 +1 @@ +jetson_35.1