From 96377180a4228c2b8baa490f8c4b70b2daf4b45d Mon Sep 17 00:00:00 2001 From: Marco Randazzo Date: Mon, 17 Jun 2024 11:45:39 +0200 Subject: [PATCH 1/3] added preliminary version of yarp-gstreamer plugins video sink updated to use binary data improvements, working version bugfix Improved yarpVideoSink (tested) yarpvideosource test fix Improved Log components in yarpvideosource, yarpvideosink added cond variabiale in yarpvideosource improved CMakeLists.txt to detect missing dependencies fixed issue renamed and added documentation added yarp opencv display fixed yarplaserscannergui not dependent on qt5 improved display added gstreamerplugin videopassthrough Improved debug messages with verbosity level in other gstreamer plugins add fixes to yarpvideopassthrough added gestreamer carrier removed h264 carrier updated gstreamer/h264 carrier documentation --- .ci/initial-cache.gh.linux.cmake | 2 +- cmake/YarpFindDependencies.cmake | 7 +- doc/001_installation/3_install_linux.md | 2 +- doc/090_tutorials.dox | 2 +- ..._howto.dox => carrier_gstreamer_howto.dox} | 115 ++-- .../module_gstreamerplugins.dox | 124 ++++ scripts/admin/update-license | 2 +- src/CMakeLists.txt | 11 +- src/carriers/CMakeLists.txt | 2 +- src/carriers/gstreamer_carrier/CMakeLists.txt | 61 ++ .../gstreamer_carrier/GstreamerCarrier.cpp | 193 +++++++ .../gstreamer_carrier/GstreamerCarrier.h | 75 +++ .../gstreamer_carrier/GstreamerDecoder.cpp | 232 ++++++++ .../gstreamer_carrier/GstreamerDecoder.h | 64 +++ .../gstreamer_carrier/GstreamerStream.cpp | 231 ++++++++ .../gstreamer_carrier/GstreamerStream.h | 112 ++++ src/carriers/h264_carrier/CMakeLists.txt | 66 --- src/carriers/h264_carrier/H264Carrier.cpp | 220 -------- src/carriers/h264_carrier/H264Carrier.h | 117 ---- src/carriers/h264_carrier/H264Decoder.cpp | 534 ------------------ src/carriers/h264_carrier/H264Decoder.h | 55 -- .../h264_carrier/H264LogComponent.cpp | 13 - src/carriers/h264_carrier/H264LogComponent.h | 13 - src/carriers/h264_carrier/H264Stream.cpp | 231 -------- src/carriers/h264_carrier/H264Stream.h | 51 -- src/yarpgstreamerplugins/CMakeLists.txt | 11 + .../videopassthrough/CMakeLists.txt | 48 ++ .../videopassthrough/yarpVideoPassthrough.cpp | 265 +++++++++ .../videopassthrough/yarpVideoPassthrough.h | 14 + .../videosink/CMakeLists.txt | 48 ++ .../videosink/yarpVideoSink.cpp | 330 +++++++++++ .../videosink/yarpVideoSink.h | 38 ++ .../videosource/CMakeLists.txt | 48 ++ .../videosource/yarpVideoSource.cpp | 398 +++++++++++++ .../videosource/yarpVideoSource.h | 117 ++++ src/yarplaserscannergui/CMakeLists.txt | 31 +- src/yarpopencvdisplay/CMakeLists.txt | 50 ++ src/yarpopencvdisplay/main.cpp | 197 +++++++ 38 files changed, 2736 insertions(+), 1394 deletions(-) rename doc/module_carriers/{carrier_h264_howto.dox => carrier_gstreamer_howto.dox} (64%) create mode 100644 doc/module_gstreamerplugins/module_gstreamerplugins.dox create mode 100644 src/carriers/gstreamer_carrier/CMakeLists.txt create mode 100644 src/carriers/gstreamer_carrier/GstreamerCarrier.cpp create mode 100644 src/carriers/gstreamer_carrier/GstreamerCarrier.h create mode 100644 src/carriers/gstreamer_carrier/GstreamerDecoder.cpp create mode 100644 src/carriers/gstreamer_carrier/GstreamerDecoder.h create mode 100644 src/carriers/gstreamer_carrier/GstreamerStream.cpp create mode 100644 src/carriers/gstreamer_carrier/GstreamerStream.h delete mode 100644 src/carriers/h264_carrier/CMakeLists.txt delete mode 100644 src/carriers/h264_carrier/H264Carrier.cpp delete mode 100644 src/carriers/h264_carrier/H264Carrier.h delete mode 100644 src/carriers/h264_carrier/H264Decoder.cpp delete mode 100644 src/carriers/h264_carrier/H264Decoder.h delete mode 100644 src/carriers/h264_carrier/H264LogComponent.cpp delete mode 100644 src/carriers/h264_carrier/H264LogComponent.h delete mode 100644 src/carriers/h264_carrier/H264Stream.cpp delete mode 100644 src/carriers/h264_carrier/H264Stream.h create mode 100644 src/yarpgstreamerplugins/CMakeLists.txt create mode 100644 src/yarpgstreamerplugins/videopassthrough/CMakeLists.txt create mode 100644 src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp create mode 100644 src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.h create mode 100644 src/yarpgstreamerplugins/videosink/CMakeLists.txt create mode 100644 src/yarpgstreamerplugins/videosink/yarpVideoSink.cpp create mode 100644 src/yarpgstreamerplugins/videosink/yarpVideoSink.h create mode 100644 src/yarpgstreamerplugins/videosource/CMakeLists.txt create mode 100644 src/yarpgstreamerplugins/videosource/yarpVideoSource.cpp create mode 100644 src/yarpgstreamerplugins/videosource/yarpVideoSource.h create mode 100644 src/yarpopencvdisplay/CMakeLists.txt create mode 100644 src/yarpopencvdisplay/main.cpp diff --git a/.ci/initial-cache.gh.linux.cmake b/.ci/initial-cache.gh.linux.cmake index f440b2bb365..0ee9931b4a4 100644 --- a/.ci/initial-cache.gh.linux.cmake +++ b/.ci/initial-cache.gh.linux.cmake @@ -24,7 +24,7 @@ set(ENABLE_yarpcar_portmonitor ON CACHE BOOL "") set(ENABLE_yarppm_depthimage_to_mono ON CACHE BOOL "") set(ENABLE_yarppm_depthimage_to_rgb ON CACHE BOOL "") set(ENABLE_yarppm_segmentationimage_to_rgb ON CACHE BOOL "") -set(ENABLE_yarpcar_h264 ON CACHE BOOL "" ON CACHE BOOL "") +set(ENABLE_yarpcar_gstreamer ON CACHE BOOL "" ON CACHE BOOL "") set(ENABLE_yarpcar_unix_stream ON CACHE BOOL "") set(ENABLE_yarppm_image_compression_ffmpeg ON CACHE BOOL "") set(ENABLE_yarppm_sound_compression_mp3 ON CACHE BOOL "") diff --git a/cmake/YarpFindDependencies.cmake b/cmake/YarpFindDependencies.cmake index c47eb391a48..71561a5c746 100644 --- a/cmake/YarpFindDependencies.cmake +++ b/cmake/YarpFindDependencies.cmake @@ -558,7 +558,11 @@ yarp_dependent_option( ) yarp_dependent_option( YARP_COMPILE_yarplaserscannergui "Do you want to compile yarplaserscannergui?" ON - "YARP_COMPILE_EXECUTABLES;YARP_COMPILE_GUIS;YARP_HAS_Qt5;YARP_HAS_OpenCV" OFF + "YARP_COMPILE_EXECUTABLES;YARP_COMPILE_GUIS;YARP_HAS_OpenCV" OFF +) +yarp_dependent_option( + YARP_COMPILE_yarpopencvdisplay "Do you want to compile yarpopencvdisplay?" ON + "YARP_COMPILE_EXECUTABLES;YARP_COMPILE_GUIS;YARP_HAS_OpenCV" OFF ) yarp_dependent_option( YARP_COMPILE_yarpviz "Do you want to compile yarpviz?" ON @@ -684,6 +688,7 @@ yarp_print_feature(YARP_COMPILE_yarpdataplayer 2 "Compile yarpdataplayer${YARP_C yarp_print_feature("YARP_COMPILE_yarpdataplayer AND YARP_HAS_OpenCV" 3 "yarpdataplayer video support") yarp_print_feature(YARP_COMPILE_yarpmotorgui 2 "Compile yarpmotorgui${YARP_COMPILE_yarpmotorgui_disable_reason}") yarp_print_feature(YARP_COMPILE_yarplaserscannergui 2 "Compile yarplaserscannergui${YARP_COMPILE_yarplaserscannergui_disable_reason}") +yarp_print_feature(YARP_COMPILE_yarpopencvdisplay 2 "Compile yarpopencvdisplay${YARP_COMPILE_yarpopencvdisplay_disable_reason}") yarp_print_feature(YARP_COMPILE_yarpbatterygui 2 "Compile yarpbatterygui${YARP_COMPILE_yarpbatterygui_disable_reason}") yarp_print_feature(YARP_COMPILE_yarpviz 2 "Compile yarpviz${YARP_COMPILE_yarpviz_disable_reason}") diff --git a/doc/001_installation/3_install_linux.md b/doc/001_installation/3_install_linux.md index a15d86540ce..4ffff6d6d55 100644 --- a/doc/001_installation/3_install_linux.md +++ b/doc/001_installation/3_install_linux.md @@ -168,7 +168,7 @@ sudo apt-get install libjpeg-dev ### GStreamer {#install_gstreamer_debian} -GStreamer is required to enable the h264 carrier +GStreamer is required to enable the gstreamer carrier ~~~{.sh} sudo apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \ diff --git a/doc/090_tutorials.dox b/doc/090_tutorials.dox index d576a89798c..7f12faf6c17 100644 --- a/doc/090_tutorials.dox +++ b/doc/090_tutorials.dox @@ -49,7 +49,7 @@ Here are a collection of tutorials on various topics in YARP. - \ref yarp_pointcloud - \ref yarp_code_examples - \ref using_cmake -- \ref carrier_h264_howto +- \ref carrier_gstreamer_howto \section tutorial_protocols Communication protocol details: - \ref yarp_protocol diff --git a/doc/module_carriers/carrier_h264_howto.dox b/doc/module_carriers/carrier_gstreamer_howto.dox similarity index 64% rename from doc/module_carriers/carrier_h264_howto.dox rename to doc/module_carriers/carrier_gstreamer_howto.dox index ad540d362f6..6c321b8dbb9 100644 --- a/doc/module_carriers/carrier_h264_howto.dox +++ b/doc/module_carriers/carrier_gstreamer_howto.dox @@ -1,6 +1,6 @@ /** \ingroup carriers_examples -\defgroup carrier_h264_howto h264 carrier +\defgroup carrier_gstreamer_howto Gstreamer carrier \tableofcontents @@ -13,7 +13,7 @@ This document contains a brief introduction to Gstreamer tool and explains how t \note They are still work in progress and should be considered experimental. Please report any problems. \section gstreamer_introduction Gstreamer: brief introduction -Gstreamer is a free framework for media applications; it provides a set of plugins that let the user to build applications by connecting them as in a pipeline. It has been ported to a wide range of operating systems, processors and compilers. Also Nvidia developed plugins for its platforms and we employ its h264 encode in order to take advantage from its hardware codec. +Gstreamer is a free framework for media applications; it provides a set of plugins that let the user to build applications by connecting them as in a pipeline. It has been ported to a wide range of operating systems, compilers and processors, including Nvidia GPUs. A Gstreamer application is composed by a chain of elements, the base construction block of a Gstreamer application. An element takes an input stream from previous element in the chain, carries out its function, like encode, and passes the modified stream to the next element. Usually each element is a plugin. @@ -21,15 +21,15 @@ The user can develop application in two way: the first consists in write an appl \verbatim -gst-launch-1.0 -v videotestsrc ! ‘video/x-raw, format=(string)I420, width=(int)640, height=(int)480’ ! x264enc ! h264parse ! avdec_h264 ! autovideosink \endverbatim +gst-launch-1.0 -v videotestsrc ! ‘video/x-raw, format=(string)I420, width=(int)640, height=(int)480’ ! x264enc ! h264parse ! avdec_h264 ! autovideosink +\endverbatim This command creates a source video test with the properties specified in this string “video/x-raw, format=(string)I420, width=(int)640, height=(int)480”; after it is encoded in h264, then decoded and shown. Each element of this pipeline, except the property element, is plugins dynamically loaded. The videotestsrc element lets the user to see a stream without using camera. -The previous command works on Linux, but since Gstreamer is platform independent, we can launch the same command on Windows taking care to change only hardware dependent plugin. So the same command on Window is: +The previous command works on Linux, but since Gstreamer is platform independent, we can launch the same command on Windows taking care to change only hardware dependent plugin. So the same command on Windows is: \verbatim -gst-launch-1.0 -v videotestsrc ! “video/x-raw, format=(string)I420, width=(int)640, height=(int)480” ! - openh264enc ! h264parse ! avdec_h264 ! autovideosink +gst-launch-1.0 -v videotestsrc ! “video/x-raw, format=(string)I420, width=(int)640, height=(int)480” ! openh264enc ! h264parse ! avdec_h264 ! autovideosink \endverbatim It’s important to notice that the changed element is the encoder (openh264enc), while the decoder is the same. This because the decoder belongs to the plugin that wraps libav library, a cross-platform library to convert stream in a wide range of multimedia formats. [see \ref references chapter] @@ -37,20 +37,15 @@ It’s important to notice that the changed element is the encoder (openh264enc) Please see \ref notes section about commands in this tutorial. -\section how_to_stream_h264 How to stream in h264 +\section how_to_stream_h264 How to stream using h264 encoder The server grabs images from cameras, so it needs to run on where cameras are connected. The server is a Gstreamer command pipeline, while the client could be a yarp or a Gstreamer application connected to the robot’s network. -Since Gstreamer is wide spread framework for media application, wrap it into yarp it is not interesting and is worthless. -Instead is more intriguing that a yarp application can read “standard” streams using the h264 carrier. -For these reasons, the server streamer is a native Gstreamer pipeline, while the client side has been developed in yarp like a carrier. - \subsection server_side Server side: The server application consists in the following Gstreamer command: \verbatim -gst-launch-1.0 -v v4l2src device="/dev/video1" ! ‘video/x-raw, width=1280, height=480, format=(string)I420’ ! - omxh264enc ! h264parse ! rtph264pay pt=96 config-interval=5 ! udpsink host=224.0.0.1 auto-multicast=true port=33000 +gst-launch-1.0 -v v4l2src device="/dev/video1" ! ‘video/x-raw, width=1280, height=480, format=(string)I420’ ! omxh264enc ! h264parse ! rtph264pay pt=96 config-interval=5 ! udpsink host=224.0.0.1 auto-multicast=true port=33000 \endverbatim @@ -62,48 +57,31 @@ gst-launch-1.0 -v v4l2src device="/dev/video1" ! ‘video/x-raw, width=1280, hei \li udpsink: this is the last element and sends out the stream. In this case we use multicast, but it is possible to send the stream using unicast in this way: udpsink host=IP_ADDRESS_OF_CLIENT port=NOT_WELL_KNOWN_PORT_NUMBER -Currently is not available a Gstreamer application that implements the pipeline. In the near future, it could be developed in order to make easier configure the server. - - \subsection client_side Client side -The client can read the stream using Gstreamer native command or yarp. +The client can read the stream using Gstreamer native command: -In the first case the Gstreamer command is: \verbatim -gst-launch-1.0 -v udpsrc multicast-group=224.0.0.1 auto-multicast=true port=3000 caps="application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" ! - rtph264depay ! h264parse ! avdec_h264! autovideosink +gst-launch-1.0 -v udpsrc multicast-group=224.0.0.1 auto-multicast=true port=3000 caps="application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! h264parse ! avdec_h264 ! autovideosink \endverbatim -If you want use a yarp application to read the stream you need to: -\verbatim -1) install Gstreamer (see \ref how_to_install_gstreamer ) -2) compile yarp with “ENABLE_yarpcar_h264” option enabled. -3) register the stream server to the yarp server: “yarp name register h264 ” -4) run your application -5) connect client and server port using h264 carrier: “yarp connect h264” -\endverbatim - -\subsection some_options Some options +\subsection some_options Some options and extra notes \li set the frame rate : in server side exist the parameter framerate=30/1, that configures the framerate to which grab images. Insert in in property element: 'video/x-raw, format=(string)I420, width=(int)640, height=(int)480, framerate=30/1' \li In some cases could be useful that server streams video at constant rate. You can achieve this adding this parameter to the encoder plugin: control-rate=2 bitrate=5000000 -\li remove the jitter: in the client it is possible adding the rtpjitterbuffer plugin in this way: - If you want to remove the jitter in h264 yarp carrier, please add parameter “+removeJitter.1” in connect command. (Note that the syntax is the usual used to specify parameters to yarp carriers). Therefore the connect command to your application could be: - \verbatim - yarp connect h264+removeJitter.1 - \endverbatim -\li The yarp carrier lets you to \b crop each frame specifying the number of pixel to crop on each side of image in carrier parameter in connection command. For example if you want to crop 200 pixel on top the command appears like: - \verbatimyarp connect h264+cropTop.200.\endverbatim - Instead, if you want to use native Gstreamer client the plugin “videocrop” performs this operation. +\li You can use the native Gstreamer plugin “videocrop” to crop the video: \verbatim - gst-launch-1.0 -v udpsrc port=33000 caps="application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" ! - rtph264depay ! h264parse ! avdec_h264 ! videocrop left=10, right=30, top=50, bottom=50 ! autovideosink + gst-launch-1.0 -v udpsrc port=33000 caps="application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! h264parse ! avdec_h264 ! videocrop left=10, right=30, top=50, bottom=50 ! autovideosink \endverbatim - +\li fakevideosink plugin can be used instead of autovideosink to test the pipeline without displaying anything. +\li videotestsrc followed by the format specifier can be used to generate a test image +\li filesrc / filesink plugins can used to read from/write to file. +\li other useful encoder plugins: x264,x265,avenc_mjpeg. Suggested decoders: avdec_h265,avdec_h265,avdec_mjpeg +\li official plugins list: https://gstreamer.freedesktop.org/documentation/plugins_doc.html?gi-language=c +\li another common format for video/x-raw instead of I420 is RGB \section how_to_install_gstreamer How to install Gstreamer -Currently we are using 1.8.3 version +Currently we are using 1.24.4 version. \subsection ubuntu On Ubuntu \li Packages required to build @@ -116,6 +94,16 @@ Currently we are using 1.8.3 version - gstreamer1.0-libav (for avdec_h264) \li Useful packages but not required - gstreamer1.0-tools + +\verbatim +sudo apt-get install libgstreamer1.0-dev \ +libgstreamer-plugins-base1.0-dev \ +gstreamer1.0-plugins-base \ +gstreamer1.0-plugins-good \ +gstreamer1.0-plugins-bad \ +gstreamer1.0-libav \ +gstreamer1.0-tools +\endverbatim \subsection windows On windows You need to download both the main package and the devel package from here: @@ -135,37 +123,54 @@ Installation of grstreamer devel package: \li Add in path environment variable the path to executable (Usually is C:\\gstreamer\\1.0\\x86_64\\bin) \subsection check_installation Verify your installation -First of all, you need to verify if Gstreamer has been installed successfully by using these commands: - +You can verify the installation by running a simple test application composed by a server and a client : \subsubsection server Server side (example on Windows) \verbatim -gst-launch-1.0 -v videotestsrc ! "video/x-raw, format=(string)I420, width=(int)640, height=(int)480" ! - openh264enc ! h264parse ! rtph264pay pt=96 config-interval=5 ! udpsink host= port= +gst-launch-1.0 -v videotestsrc ! "video/x-raw, format=(string)I420, width=(int)640, height=(int)480" ! openh264enc ! h264parse ! rtph264pay pt=96 config-interval=5 ! udpsink host= port= \endverbatim \subsubsection client Client side \verbatim -gst-launch-1.0 -v udpsrc port= caps="application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" ! - rtph264depay ! h264parse ! avdec_h264! autovideosink +gst-launch-1.0 -v udpsrc port= caps="application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! h264parse ! avdec_h264 ! autovideosink \endverbatim -After you can substitute the client side with a yarp application, for example yarpview. -So, after yarp server had been launched, please run: +\section yarp_usage Usage with yarp -\li yarpview -\li yarp name register /gst h264 -\li yarp connect /gst /yarpview/img:i h264 +If you want use a yarp application to read the stream you need to: +-# install Gstreamer (see \ref how_to_install_gstreamer ) +-# compile yarp with “ENABLE_yarpcar_gstreamer” option enabled. +-# Run the server as mentioned above. +-# register a fake port to the yarp server, in this example called gstreamer_src: +\verbatim +yarp name register /gstreamer_src gstreamer +\endverbatim +-# set up your decoding pipeline by setting an environment variable with a string containing a string. +For example on windows: +\verbatim +set GSTREAMER_ENV=udpsrc port= caps="application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! h264parse ! avdec_h264 +\endverbatim +on linux: +\verbatim +export GSTREAMER_ENV="udpsrc port=15000 caps=\"application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! h264parse ! avdec_h264" +\endverbatim +-# run your application (e.g. yarpview, or your own application). This must be executed after setting the environment variable, i.e. the environment variable should be accessible to the executable. +\verbatim +yarpview --name /view +\endverbatim +-# connect client and server port using gstreamer carrier: +\verbatim +yarp connect /gstreamer_src /view gstreamer+pipelineEnv.GSTREAMER_ENV +\endverbatim Now on yarp view you can see the following image, where in the bottom right box there is snow pattern. \image html h264GstVideoTestSrc.png "" \section notes Notes -\subsection tricks Tricks -\li On Ubuntu 18.04 the plugin “autovideosink” has a bug, please use “xvimagesink” \li On Windows the property element uses ‘ instead of “ +\li Another completely different way to use Yarp with Gstreamer is to use `Yarp Gstreamer Plugins`. They also allow to feed a yarp image inside a gstreamer pipeline. See documentation: \ref gstreamerplugins_module \section references References [1] Gstreamer documentation diff --git a/doc/module_gstreamerplugins/module_gstreamerplugins.dox b/doc/module_gstreamerplugins/module_gstreamerplugins.dox new file mode 100644 index 00000000000..157ee0e15f2 --- /dev/null +++ b/doc/module_gstreamerplugins/module_gstreamerplugins.dox @@ -0,0 +1,124 @@ +/** +\defgroup gstreamerplugins_module Gstreamer plugins + +\tableofcontents + + +\section gstreamer_intro What are gstreamer plugins? + +GStreamer plugins are components that extend the functionality of the GStreamer multimedia framework. +GStreamer (https://gstreamer.freedesktop.orgl) is an open-source pipeline-based multimedia framework that allows the creation of various types of media-handling components, such as audio and video playback, recording, streaming, and editing. +GStreamer itself is designed to be highly modular. The core framework provides the basic functionalities and defines the API, while plugins are used to implement the actual media processing capabilities. These plugins include: +- Source Plugins: Read data from various sources (e.g., files, cameras, network streams). +- Sink Plugins: Write data to various destinations (e.g., display, file, network). +- Filter/Effect Plugins: Process data (e.g., decode, encode, apply effects, convert formats). +- Parser and Demuxer Plugins: Interpret and split container formats into individual streams. +- Muxer Plugins: Combine multiple media streams into a single container format. + +Plugins can be combined into pipelines, which are sequences of plugins where the output of one plugin is fed into the input of the next. +An example of pipeline which converts a test input video in grayscale and then displays it on the screen is shown below: +\verbatim +gst-launch-1.0 videotestsrc ! videoconvert ! videobalance saturation=0.0 ! autovideosink +\endverbatim + +Similary to `yarprobotinterface`, `gst-launch-1.0` is a stand-alone executable which loads and manages the plugins, which have the form of dynamic libraries. +Capabilites of a specific plugin can be displayed by the `gst-inspect-1.0` command. In order to properly execute, the capabilities of plugins belonging to a pipeline must match, so that the output (source pad) of a plugin can be fed into the input (sink pad) of the next plugin. +\verbatim +gst-inspect-1.0 videobalance +\endverbatim + +\section yarp_plugins Yarp plugins for gstreamer +Yarp and gstreamer can be interoperate through two Yarp plugins, `yarpVideoSource` which allows to connect a yarp video stream to gstreamer and `yarpVideoSink` which allows to connect a gstreamer video to yarp. + +\section yarpVideoSource yarpVideoSource plugin +This plugin opens a yarp input port which can accept a yarp stream and propagate it to a Gstreamer pipeline. The yarpVideoSource plugin must be the final step of a gstreamer pipeline. +The plugin accepts the following options: +- `localPortName` the name of the yarp port opened by the plugin. +- `remotePortName` If specified, the plugin automatically creates a yarp connection from the `remotePortName` to the `localPortName`, using the protocol specified by `connectionProtocol`. +- `connectionProtocol` the yarp carrier to be used to perform the automatic connection mentioned above. +- `portType` if set to `rgb`, the plugins selects a `yarp::sig::ImageOf` input. If set to `bin`, the plugin accepts a custom binary data, specifically designed to communicate only with +a yarpVideoSink plugin. This is required if you are transmitting/receiving encoded images (see examples below) + +The yarpVideoSource plugin is currently able to handle the following gstreamer streams: x-raw(rgb), h264, h265. Check the plugins caps (with `gst-inspect-1.0`) for further details. + +\section yarpVideoSink yarpVideoSink plugin +This plugin opens a yarp output port. It receives a stream from the gstreamer pipeline and broadcast it to the yarp network. The yarpVideoSink plugin must be the final step of a gstreamer pipeline. +The plugin accepts the following options: +- `localPortName` the name of the yarp port opened by the plugin. +- `remotePortName` If specified, the plugin automatically creates a yarp connection from the `localPortName` to `remotePortName`, using the protocol specified by `connectionProtocol`. +- `connectionProtocol` the yarp carrier to be used to perform the automatic connection mentioned above. +- `portType` if set to `rgb`, the plugins selects a `yarp::sig::ImageOf` input. If set to `bin`, the plugin accepts a custom binary data, specifically designed to communicate only with +a yarpVideoSink plugin. This is required if you are transmitting/receiving encoded images (see examples below) + +The yarpVideoSource plugin is currently able to handle the following gstreamer streams: x-raw(rgb), h264, h265. Check the plugins caps (with `gst-inspect-1.0`) for further details. + +\section yarp_gstreamer_examples Some examples: + +Example 1: Feeding a yarp image into gstreamer: +\verbatim +yarpdev --device fakeFrameGrabber --width 640 --height 480 --period 0.33 +gst-launch-1.0 yarpVideoSource --localPortName="/gstreamer:i" ! videoconvert ! videobalance saturation=0.0 ! autovideosink +yarp connect /grabber /gstreamer:i +\endverbatim + +Example 2: Stream a gstreamer video to yarp: +\verbatim +gst-launch-1.0 videotestsrc ! videoconvert ! yarpVideoSink --localPortName="/grabber:o" +yarpview --name /view +yarp connect /gstreamer:o /view +\endverbatim + +Example 3: Load a video file and display it +\verbatim +gst-launch-1.0 filesrc location=your_video_file.mp4 ! decodebin ! autovideosink +\endverbatim + +Example 4: Encode a video and send it to a yarp port +\verbatim +gst-launch-1.0 videotestsrc ! videoconvert ! h_264 ! yarpVideoSink --localPortName="/grabber:o" portType="bin" +\endverbatim + +Example 5: Receive an encoded video and display it +\verbatim +gst-launch-1.0 yarpVideoSource --localPortName="/gstreamer:i" --portType="bin" ! h_264dev ! videoconvert ! autovideosink +\endverbatim + +Example 5: Receive an yarp video, encode it, transmit it, received it, decode it, send to a yarp port +\verbatim +gst-launch-1.0 yarpVideoSource --localPortName="/gstreamer:i" | h_264 ! yarpVideoSink --localPortName="/grabber:o" portType="bin" +gst-launch-1.0 yarpVideoSource --localPortName="/gstreamer:i" --portType="bin" ! h_264dev ! videoconvert ! arpVideoSink --localPortName="/grabber:o" +\endverbatim + + +\section yarp_plugins_gstreamer_install need What do I need to use yarp plugins for gstreamer? +Gstreamer plugins for yarp can be optionally compiled (default is off) if the yarp gstreamer dependencies are satisfied. +On Ubuntu: +\verbatim +sudo apt-get install \ +libgstreamer1.0-dev \ +libgstreamer-plugins-base1.0-dev \ +gstreamer1.0-plugins-base \ +gstreamer1.0-plugins-good \ +gstreamer1.0-plugins-bad \ +gstreamer1.0-libav \ +gstreamer1.0-tools +\endverbatim + +On windows: +You can download the official gstreamer binary packages from: +https://gstreamer.freedesktop.org/data/pkg/windows/ + +NB: After compiling the plugins, in order to have gstreamer executables (`gst-launch-1.0`, `gst-inspect-1.0`) find the yarp gstreamer plugins, you need to add the path where the plugins have been built to the environment variable `GST_PLUGIN_PATH`, i.e. +On Linux: +\verbatim +export GST_PLUGIN_PATH=$GST_PLUGIN_PATH:/usr/local/src/yarp/build/bin +\endverbatim +On windows: +\verbatim +set GST_PLUGIN_PATH=%GST_PLUGIN_PATH%;C:\yarp\build\bin\Release +\endverbatim +*/ + +\section yarp_plugins_gstreamer_notes Extra notes: +Yarp also has a specific carrier to receive video streams from gstreamer. This is a completely different (but less flexible) strategy which does not requires the receiver side to use the gst-launch-1.0 executable. +See documentation:\ref carrier_gstreamer_howto \ No newline at end of file diff --git a/scripts/admin/update-license b/scripts/admin/update-license index 79f0f988b11..fe3cda0abae 100755 --- a/scripts/admin/update-license +++ b/scripts/admin/update-license @@ -141,7 +141,7 @@ GPL-2.0 or later, GPL-3.0 or later, or Apache-2.0 License: [libdc1394](http://damien.douxchamps.net/ieee1394/libdc1394/) or, if the library is not available, links statically a few files taken from [libdc1394](http://libdc1394.git.sourceforge.net/git/gitweb.cgi?p=libdc1394/libdc1394;a=blob_plain;f=libdc1394/dc1394/bayer.c;hb=HEAD). - + The \`h264\` carrier uses glib (LGPLv2.1 or later) and gstreamer (LGPLv2.1 or + + The \`gstreamer\` carrier uses glib (LGPLv2.1 or later) and gstreamer (LGPLv2.1 or later). + The \`mpi\` carrier uses MPI (license dependent on the implementation). + The \`dynamixelAX12Ftdi\` device uses diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 2fdc237b1f0..9c29c1ba143 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -56,8 +56,6 @@ if(YARP_COMPILE_EXECUTABLES) add_subdirectory(yarpdataplayer-console) add_subdirectory(yarpdatadumper) - add_subdirectory(yarpDeviceParamParserGenerator) - # Qt5 GUIs add_subdirectory(yarpview) add_subdirectory(yarpscope) @@ -67,10 +65,17 @@ if(YARP_COMPILE_EXECUTABLES) add_subdirectory(yarpmotorgui) add_subdirectory(yarpbatterygui) add_subdirectory(yarpmobilebasegui) - add_subdirectory(yarplaserscannergui) add_subdirectory(yarpviz) add_subdirectory(yarpaudiocontrolgui) add_subdirectory(yarpllmgui) + + # Other GUIs + add_subdirectory(yarplaserscannergui) + add_subdirectory(yarpopencvdisplay) + + #other + add_subdirectory(yarpgstreamerplugins) + add_subdirectory(yarpDeviceParamParserGenerator) endif() # Robot Testing Framework addons diff --git a/src/carriers/CMakeLists.txt b/src/carriers/CMakeLists.txt index 892280eeff8..5a59a0f7416 100644 --- a/src/carriers/CMakeLists.txt +++ b/src/carriers/CMakeLists.txt @@ -17,9 +17,9 @@ yarp_begin_plugin_library(yarpcar add_subdirectory(bayer_carrier) add_subdirectory(priority_carrier) add_subdirectory(portmonitor_carrier) - add_subdirectory(h264_carrier) add_subdirectory(unix) add_subdirectory(websocket) + add_subdirectory(gstreamer_carrier) yarp_end_plugin_library(yarpcar QUIET) add_library(YARP::yarpcar ALIAS yarpcar) diff --git a/src/carriers/gstreamer_carrier/CMakeLists.txt b/src/carriers/gstreamer_carrier/CMakeLists.txt new file mode 100644 index 00000000000..b8093896293 --- /dev/null +++ b/src/carriers/gstreamer_carrier/CMakeLists.txt @@ -0,0 +1,61 @@ +# SPDX-FileCopyrightText: 2024 Istituto Italiano di Tecnologia (IIT) +# SPDX-License-Identifier: BSD-3-Clause + +yarp_prepare_plugin(gstreamer + CATEGORY carrier + TYPE GstreamerCarrier + INCLUDE GstreamerCarrier.h + DEPENDS "YARP_HAS_GObject;YARP_HAS_GLIB2;YARP_HAS_GStreamer;YARP_HAS_GStreamerPluginsBase" +) + +if(NOT SKIP_gstreamer) + yarp_add_plugin(yarp_gstreamer) + + if(YARP_HAS_ACE) + target_compile_definitions(yarp_gstreamer PRIVATE YARP_HAS_ACE) + target_link_libraries(yarp_gstreamer PRIVATE ACE::ACE) + endif() + + target_sources(yarp_gstreamer + PRIVATE + GstreamerCarrier.h + GstreamerCarrier.cpp + GstreamerStream.h + GstreamerStream.cpp + GstreamerDecoder.h + GstreamerDecoder.cpp + ) + + target_link_libraries(yarp_gstreamer + PRIVATE + YARP::YARP_os + YARP::YARP_sig + ) + list(APPEND YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS + YARP_os + YARP_sig + ) + + #Gstreamer stuff + target_link_libraries(yarp_gstreamer PRIVATE ${GOBJECT_LIBRARIES}) + target_include_directories(yarp_gstreamer SYSTEM PRIVATE ${GOBJECT_INCLUDE_DIR}) + target_link_libraries(yarp_gstreamer PRIVATE ${GLIB2_LIBRARIES}) + target_include_directories(yarp_gstreamer SYSTEM PRIVATE ${GLIB2_INCLUDE_DIR}) + target_include_directories(yarp_gstreamer SYSTEM PRIVATE ${GSTREAMER_INCLUDE_DIRS}) + target_link_libraries(yarp_gstreamer PRIVATE ${GSTREAMER_LIBRARY}) + target_include_directories(yarp_gstreamer SYSTEM PRIVATE ${GSTREAMER_app_INCLUDE_DIR}) + target_link_libraries(yarp_gstreamer PRIVATE ${GSTREAMER_APP_LIBRARY}) + + yarp_install( + TARGETS yarp_gstreamer + EXPORT YARP_${YARP_PLUGIN_MASTER} + COMPONENT ${YARP_PLUGIN_MASTER} + LIBRARY DESTINATION ${YARP_DYNAMIC_PLUGINS_INSTALL_DIR} + ARCHIVE DESTINATION ${YARP_STATIC_PLUGINS_INSTALL_DIR} + YARP_INI DESTINATION ${YARP_PLUGIN_MANIFESTS_INSTALL_DIR} + ) + + set(YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS ${YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS} PARENT_SCOPE) + + set_property(TARGET yarp_gstreamer PROPERTY FOLDER "Plugins/Carrier") +endif() diff --git a/src/carriers/gstreamer_carrier/GstreamerCarrier.cpp b/src/carriers/gstreamer_carrier/GstreamerCarrier.cpp new file mode 100644 index 00000000000..3c81c3a46c4 --- /dev/null +++ b/src/carriers/gstreamer_carrier/GstreamerCarrier.cpp @@ -0,0 +1,193 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include "GstreamerCarrier.h" +#include "GstreamerStream.h" + +#include +#include +#include +#include +#include +#include + +#include + +using namespace yarp::os; + +YARP_LOG_COMPONENT(GSTREAMER_CARRIER, + "yarp.carrier.gstreamer.gstreamerCarrier", + yarp::os::Log::minimumPrintLevel(), + yarp::os::Log::LogTypeReserved, + yarp::os::Log::printCallback(), + nullptr) + +GstreamerCarrier::GstreamerCarrier() = default; + +yarp::os::Carrier* GstreamerCarrier::create() const +{ + return new GstreamerCarrier(); +} + +std::string GstreamerCarrier::getName() const +{ + return "gstreamer"; +} + +bool GstreamerCarrier::checkHeader(const Bytes& header) +{ + return true; +} + +void GstreamerCarrier::getHeader(Bytes& header) const +{ +} + +void GstreamerCarrier::setParameters(const Bytes& header) +{ + YARP_UNUSED(header); +} + +bool GstreamerCarrier::isConnectionless() const +{ + return true; +} + + +bool GstreamerCarrier::respondToHeader(ConnectionState& proto) +{ + auto* stream = new GstreamerStream(); + yAssert(stream != nullptr); + + Contact remote = proto.getStreams().getRemoteAddress(); + bool ok = stream->open(this->pipeline_string,remote); + if (!ok) { + delete stream; + return false; + } + + int myPort = stream->getLocalAddress().getPort(); + proto.takeStreams(stream); + + return true; +} + +bool GstreamerCarrier::expectReplyToHeader(ConnectionState& proto) +{ + auto* stream = new GstreamerStream(); + if (stream == nullptr) { + return false; + } + + yarp::os::Contact remote = proto.getStreams().getRemoteAddress(); + bool ok = stream->open(this->pipeline_string,remote); + + // std::cout << "Remote contact info: host=" << proto.getRoute().getToContact().getHost() << " port= " << proto.getRoute().getToContact().getPort() < + +/** + * GstreamerCarrier + */ +class GstreamerCarrier : + public yarp::os::Carrier +{ + std::string pipeline_string; + +public: + GstreamerCarrier(); + + Carrier* create() const override; + + std::string getName() const override; + + bool checkHeader(const yarp::os::Bytes& header) override; + void getHeader(yarp::os::Bytes& header) const override; + void setParameters(const yarp::os::Bytes& header) override; + bool requireAck() const override { return false; } + bool isConnectionless() const override; + bool respondToHeader(yarp::os::ConnectionState& proto) override; + bool expectReplyToHeader(yarp::os::ConnectionState& proto) override; + + /// + bool isPush() const override { return false; } + bool canAccept() const override { return true; } + bool canOffer() const override { return true; } + bool isTextMode() const override { return false; } + bool canEscape() const override { return false; } + + bool supportReply() const override { return false; } + + bool isLocal() const override { return false; } + + bool prepareSend(yarp::os::ConnectionState& proto) override; + bool sendHeader(yarp::os::ConnectionState& proto) override; + bool expectSenderSpecifier(yarp::os::ConnectionState& proto) override; + bool expectExtraHeader(yarp::os::ConnectionState& proto) override; + bool isActive() const override + { + return true; + } + + bool expectAck(yarp::os::ConnectionState& proto) override; + bool write(yarp::os::ConnectionState& proto, yarp::os::SizedWriter& writer) override; + bool reply(yarp::os::ConnectionState& proto, yarp::os::SizedWriter& writer) override; + std::string toString() const override; + bool expectIndex(yarp::os::ConnectionState& proto) override; + + bool sendAck(yarp::os::ConnectionState& proto) override; + + yarp::os::Face* createFace() const override; +}; + + +/*example: +gst-launch-1.0 -v videotestsrc ! "video/x-raw, format=(string)RGB, width=(int)640, height=(int)480" ! videoconvert ! openh264enc ! h264parse ! rtph264pay pt=96 config-interval=5 ! udpsink host=127.0.0.1 port=15000 +gst-launch-1.0 -v videotestsrc ! "video/x-raw, format=(string)I420, width=(int)640, height=(int)480" ! openh264enc ! h264parse ! udpsink host = 127.0.0.1 port = 15000 +yarp name register /gst gstreamer 127.0.0.1 15000 +yarp read /ccc //DEBUG +yarp connect /gst /ccc gstreamer + +*/ + +#endif diff --git a/src/carriers/gstreamer_carrier/GstreamerDecoder.cpp b/src/carriers/gstreamer_carrier/GstreamerDecoder.cpp new file mode 100644 index 00000000000..5a40d1ed9cc --- /dev/null +++ b/src/carriers/gstreamer_carrier/GstreamerDecoder.cpp @@ -0,0 +1,232 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include "GstreamerDecoder.h" + +#include + + +#include +#include + +#include +#include +#include + +//#define debug_time 1 + +#ifdef debug_time + #include + #define DBG_TIME_PERIOD_PRINTS 10 //10 sec +#endif + +using namespace yarp::sig; +using namespace yarp::os; + +YARP_LOG_COMPONENT(GSTREAMER_DECODER, + "yarp.carrier.gstreamer.GstDecoder", + yarp::os::Log::minimumPrintLevel(), + yarp::os::Log::LogTypeReserved, + yarp::os::Log::printCallback(), + nullptr) + +//------------------------------------------------------------------- +//--------------- CALLBACK FUNCTIONS ------------------------------- +//------------------------------------------------------------------- + +GstFlowReturn new_sample_func(GstAppSink *appsink, gpointer user_data) +{ + static yarp::sig::ImageOf curr_frame; +#ifdef debug_time + static bool isFirst = true; + double start_time = Time::now(); + double end_time=0; + + static double last_call; + static double sumOf_timeBetweenCalls = 0; + static double sumOf_timeOfNewSampleFunc = 0; + static uint32_t count=0; + #define MAX_COUNT 100 + + + if(!isFirst) + sumOf_timeBetweenCalls+=(start_time -last_call); + + last_call = start_time; +#endif + + data_for_gst_callback* dec_data = (data_for_gst_callback*)user_data; + dec_data->img_pointer = &curr_frame; + dec_data->sem_pointer_stream->wait(); + + GstSample* sample = gst_app_sink_pull_sample(appsink); + if (!sample) + { + yCWarning(GSTREAMER_DECODER, "could not take a sample!"); + return GST_FLOW_OK; + } + + GstCaps *caps = gst_sample_get_caps (sample); + if(!caps) + { + yCError(GSTREAMER_DECODER, "could not get caps of sample!"); + return GST_FLOW_OK; + } + GstStructure *struc = gst_caps_get_structure(caps, 0); + if(!struc) + { + yCError(GSTREAMER_DECODER, "could not get struct of caps!"); + return GST_FLOW_OK; + } + gint width, height; + gboolean res; + res = gst_structure_get_int(struc, "width", &width); + if(!res) + { + yCError(GSTREAMER_DECODER, "could not get width!"); + return GST_FLOW_ERROR; + } + + res = gst_structure_get_int(struc, "height", &height); + if(!res) + { + yCError(GSTREAMER_DECODER, "GSTREAMER: could not get height!"); + return GST_FLOW_ERROR; + } + yCTrace(GSTREAMER_DECODER, "Image has size %d x %d", width, height); + + GstBuffer *buffer = gst_sample_get_buffer(sample); + GstMapInfo map; + if(!gst_buffer_map(buffer, &map, GST_MAP_READ)) + { + yCError(GSTREAMER_DECODER, "could not get map!"); + return GST_FLOW_ERROR; + } + + dec_data->mutex_pointer->lock(); + curr_frame.resize(width, height); + size_t map_size = map.size; + size_t img_size = width * height * 3; + size_t img_size2 = curr_frame.getRawImageSize(); + unsigned char* ydata_ptr = curr_frame.getRawImage(); + memcpy(ydata_ptr, map.data, map_size); + + dec_data->mutex_pointer->unlock(); + gst_buffer_unmap(buffer, &map); + + gst_sample_unref(sample); + dec_data->sem_pointer_gst->post(); + +#ifdef debug_time + end_time = Time::now(); + sumOf_timeOfNewSampleFunc += (end_time-start_time); + count++; + isFirst=false; + + if(count>=MAX_COUNT) + { + yCDebug(H264CARRIER, + "On %d times: NewSampleFunc is long %.6f sec and sleeps %.6f sec", + MAX_COUNT, + (sumOf_timeOfNewSampleFunc/MAX_COUNT), + (sumOf_timeBetweenCalls/MAX_COUNT) ); + count = 0; + isFirst = true; + sumOf_timeBetweenCalls = 0; + sumOf_timeOfNewSampleFunc = 0; + } +#endif + + return GST_FLOW_OK; +} + + + + +//---------------------------------------------------------------------- + +GstYarpDecoder::GstYarpDecoder(std::mutex* _m, yarp::os::Semaphore* _s, GstYarpDecoder_cfgParamters& config) +{ + m_pointer_mutex = _m; + m_gst_cbk_data.mutex_pointer = m_pointer_mutex; + m_gst_cbk_data.sem_pointer_gst = &m_semaphore_reading_from_gst; + m_gst_cbk_data.sem_pointer_stream = &m_semaphore_reading_from_stream; +} + +bool GstYarpDecoder::init(std::string pipeline_string) +{ + gst_init(nullptr, nullptr); + + // Create the pipeline and add an appsink to it + pipeline_string += " ! videoconvert ! video/x-raw,format=RGB ! appsink name = myappsink "; + + yCInfo(GSTREAMER_DECODER) << "Using the following pipeline string:" << pipeline_string; + + GError* error_out = nullptr; + m_pipeline = gst_parse_launch(pipeline_string.c_str(), &error_out); + if (m_pipeline == nullptr) + { + yCError(GSTREAMER_DECODER) << "Pipeline syntax failure(1):" << pipeline_string; + return false; + } + if (error_out) + { + yCError(GSTREAMER_DECODER) << "Pipeline syntax failure(2):" << pipeline_string << error_out->message; + g_clear_error(&error_out); + return false; + } + + //configure to appsink to use the new_sample_func callback + GstElement* appsink = gst_bin_get_by_name(GST_BIN(m_pipeline), "myappsink"); + if (appsink == nullptr) + { + yCError(GSTREAMER_DECODER) << "Pipeline syntax failure(3):" << pipeline_string; + return false; + } + g_object_set(appsink, "emit-signals", false, NULL); + GstAppSinkCallbacks callbacks = {nullptr, nullptr, new_sample_func}; + gst_app_sink_set_callbacks(GST_APP_SINK(appsink), &callbacks, &m_gst_cbk_data, nullptr); + + yCDebug(GSTREAMER_DECODER) << "init ok"; + return true; +} + +bool GstYarpDecoder::start() +{ + GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING); + if (ret == GST_STATE_CHANGE_FAILURE) + { + yCDebug(GSTREAMER_DECODER) << "pipeline failed to start!"; + return false; + } + yCDebug(GSTREAMER_DECODER) << "pipeline started!"; + this->prepareNextFrame(); + return true; +} + +bool GstYarpDecoder::stop() +{ + gst_element_set_state(m_pipeline, GST_STATE_NULL); + gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)), nullptr, nullptr, nullptr); + yCDebug(GSTREAMER_DECODER) << "deleting pipeline"; + gst_object_unref(GST_OBJECT(m_pipeline)); + return true; +} + +GstYarpDecoder::~GstYarpDecoder() +{ + stop(); +} + +ImageOf* GstYarpDecoder::getLastFrame() +{ + m_semaphore_reading_from_gst.wait(); + return m_gst_cbk_data.img_pointer; +} + +void GstYarpDecoder::prepareNextFrame() +{ + m_semaphore_reading_from_stream.post(); +} diff --git a/src/carriers/gstreamer_carrier/GstreamerDecoder.h b/src/carriers/gstreamer_carrier/GstreamerDecoder.h new file mode 100644 index 00000000000..11b1d136ca4 --- /dev/null +++ b/src/carriers/gstreamer_carrier/GstreamerDecoder.h @@ -0,0 +1,64 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + +#ifndef YARP_GSTREAMERDECODER_H +#define YARP_GSTREAMERDECODER_H + +#include +#include +#include +#include + +#include +#include +#include + +struct GstYarpDecoder_cfgParamters +{ + GstYarpDecoder_cfgParamters() : + remotePort(-1) + {} + + int remotePort; // the port on which the server send data +}; + +//--------------------------------------------------------------------------------------------- +struct data_for_gst_callback +{ + data_for_gst_callback() = default; + + std::mutex* mutex_pointer = nullptr; + yarp::sig::ImageOf* img_pointer = nullptr; + yarp::os::Semaphore* sem_pointer_gst = nullptr; + yarp::os::Semaphore* sem_pointer_stream = nullptr; +}; + +//--------------------------------------------------------------------------------------------- +class GstYarpDecoder +{ +private: + GstElement* m_pipeline = nullptr; + + data_for_gst_callback m_gst_cbk_data; + + yarp::sig::ImageOf* m_pointer_frame = nullptr; + std::mutex* m_pointer_mutex = nullptr; + yarp::os::Semaphore m_semaphore_reading_from_gst = 0; + yarp::os::Semaphore m_semaphore_reading_from_stream; + + + GstYarpDecoder_cfgParamters m_cfg; + +public: + GstYarpDecoder(std::mutex* m, yarp::os::Semaphore* s, GstYarpDecoder_cfgParamters& config); + ~GstYarpDecoder(); + bool init(std::string pipeline_string); + bool start(); + bool stop(); + yarp::sig::ImageOf* getLastFrame(); + void prepareNextFrame(); +}; + +#endif diff --git a/src/carriers/gstreamer_carrier/GstreamerStream.cpp b/src/carriers/gstreamer_carrier/GstreamerStream.cpp new file mode 100644 index 00000000000..edec6a5a934 --- /dev/null +++ b/src/carriers/gstreamer_carrier/GstreamerStream.cpp @@ -0,0 +1,231 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include "GstreamerStream.h" + +#include +#include + +#include +#include + +#include +#include +#include +#include + +#include +#include + +using namespace yarp::os::impl; +using namespace yarp::os; + + +YARP_LOG_COMPONENT(GSTREAMER_STREAM, + "yarp.carrier.gstreamer.gstreamerStream", + yarp::os::Log::minimumPrintLevel(), + yarp::os::Log::LogTypeReserved, + yarp::os::Log::printCallback(), + nullptr) + +bool GstreamerStream::open(const std::string& pipeline_string, const Contact& remote) +{ + Contact local = Contact("127.0.0.1", -1); + return open(pipeline_string, local, remote); +} + +bool GstreamerStream::open(const std::string& pipeline_string, const Contact& local, const Contact& remote) +{ + m_localAddress = local; + m_remoteAddress = remote; + + //yCDebug(GSTREAMER_STREAM, "Update: GstreamerStream::open() from %s to %s", m_localAddress.toURI().c_str(), m_remoteAddress.toURI().c_str()); + + GstYarpDecoder_cfgParamters params; + m_decoder = new GstYarpDecoder(&m_mutex, &m_sema, params); + + bool ret = false; + ret = m_decoder->init(pipeline_string); + if (!ret) + { + return false; + } + ret = m_decoder->start(); + if (!ret) + { + return false; + } + + return true; +} + +GstreamerStream::~GstreamerStream() +{ + closeMain(); +} + +void GstreamerStream::interrupt() +{ + bool act = false; + m_mutex.lock(); + if ((!m_closed) && (!m_interrupting)) + { + act = true; + m_interrupting = true; + m_closed = true; + } + m_mutex.unlock(); + + // wait for interruption to be done + if (m_interrupting) + { + while (m_interrupting) + { + yCDebug(GSTREAMER_STREAM, "waiting for interrupt to be finished..."); + yarp::os::SystemClock::delaySystem(0.1); + } + } +} + +void GstreamerStream::closeMain() +{ + if (m_decoder) + { + m_decoder->stop(); + + delete m_decoder; + m_decoder = nullptr; + } +} + +bool GstreamerStream::isOk() const +{ + return true; +} + +//once that you enter here, it is guaranteed that frame will be not modified until prepareNextFrame() is called. +yarp::sig::ImageOf* GstreamerStream::getFrame() +{ + //create a fake frame, just for test + if (m_debug_test_image_generation) + { + static yarp::sig::ImageOf frame; + frame.resize(640, 480); + static int val = 0; + if (val) { + memset(frame.getRawImage(), 90, frame.getRawImageSize()); + val = 0; + } else { + memset(frame.getRawImage(), 200, frame.getRawImageSize()); + val = 1; + } + return &frame; + } + + //get the frame from the decoder. + //this call is blocking until the frame is ready. + return m_decoder->getLastFrame(); +} + +void GstreamerStream::prepareNextFrame() +{ + if (m_debug_test_image_generation) + { + return; + } + return m_decoder->prepareNextFrame(); +} + +yarp::conf::ssize_t GstreamerStream::read(Bytes& b) +{ + size_t bl = b.length(); + + if (m_enum_phases == enum_phases::PHASE_0_GET_IMG) + { + m_pointer_last_frame = getFrame(); + m_enum_phases = enum_phases::PHASE_1_PREPARE_HEADER; + } + if (m_enum_phases == enum_phases::PHASE_1_PREPARE_HEADER) + { + m_imgHeader.setFromImage(*m_pointer_last_frame); + m_enum_phases = enum_phases::PHASE_2_SEND_HEADER; + } + if (m_enum_phases == enum_phases::PHASE_2_SEND_HEADER) + { + sendHeaderLabel: + // starting to send the header + if (m_remaining == 0) + { + // cursor is set to the header, which has lenght = remaining + m_cursor = (char*)(&m_imgHeader); + m_remaining = sizeof(m_imgHeader); + } + + size_t bytestobecopied = m_remaining; + if (bytestobecopied > bl) + { + bytestobecopied = bl; + memcpy(b.get(), m_cursor, bytestobecopied); + m_remaining -= bytestobecopied; + m_cursor += bytestobecopied; + return bytestobecopied; + } + else + { + memcpy(b.get(), m_cursor, bytestobecopied); + m_remaining = 0; + m_enum_phases = enum_phases::PHASE_3_SEND_IMAGE; + return bytestobecopied; + } + } + if (m_enum_phases == enum_phases::PHASE_3_SEND_IMAGE) + { + // starting to send the image + if (m_remaining == 0) { + // cursor is set to the image, which has lenght = remaining + m_cursor = (char*)(m_pointer_last_frame->getRawImage()); + m_remaining = m_pointer_last_frame->getRawImageSize(); + } + + size_t bytestobecopied = m_remaining; + if (bytestobecopied > bl) + { + bytestobecopied = bl; + memcpy(b.get(), m_cursor, bytestobecopied); + m_remaining -= bytestobecopied; + m_cursor += bytestobecopied; + return bytestobecopied; + } + else + { + memcpy(b.get(), m_cursor, bytestobecopied); + m_remaining = 0; + m_enum_phases = enum_phases::PHASE_0_GET_IMG; + this->prepareNextFrame(); + return bytestobecopied; + } + } + + // unreachable code + yError("Logic bug 2"); + yAssert(false); + return -1; +} + +void GstreamerStream::write(const Bytes& b) +{ +} + +void GstreamerStream::reset() +{ +} + +void GstreamerStream::beginPacket() +{ +} + +void GstreamerStream::endPacket() +{ +} diff --git a/src/carriers/gstreamer_carrier/GstreamerStream.h b/src/carriers/gstreamer_carrier/GstreamerStream.h new file mode 100644 index 00000000000..7d383b004e0 --- /dev/null +++ b/src/carriers/gstreamer_carrier/GstreamerStream.h @@ -0,0 +1,112 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + +#ifndef YARP_GSTREAMERTREAM_H +#define YARP_GSTREAMERTREAM_H + +#include +#include +#include +#include "GstreamerDecoder.h" + +#include +#include + +#include +#include + +class GstreamerStream : + public yarp::os::TwoWayStream, + public yarp::os::InputStream, + public yarp::os::OutputStream +{ +private: + GstYarpDecoder* m_decoder = nullptr; + +public: + GstreamerStream() : + m_closed(false), + m_interrupting(false), + m_mutex() + { + } + + virtual bool open(const std::string& pipeline_string, const yarp::os::Contact& remote); + + virtual bool open(const std::string& pipeline_string, const yarp::os::Contact& local, const yarp::os::Contact& remote); + + virtual ~GstreamerStream(); + + InputStream& getInputStream() override + { + return *this; + } + + OutputStream& getOutputStream() override + { + return *this; + } + + const yarp::os::Contact& getLocalAddress() const override + { + return m_localAddress; + } + + const yarp::os::Contact& getRemoteAddress() const override + { + return m_remoteAddress; + } + + yarp::sig::ImageOf* getFrame(); + void prepareNextFrame(); + + void interrupt() override; + + void close() override + { + closeMain(); + } + + virtual void closeMain(); + + bool isOk() const override; + + using yarp::os::InputStream::read; + yarp::conf::ssize_t read(yarp::os::Bytes& b) override; + + using yarp::os::OutputStream::write; + void write(const yarp::os::Bytes& b) override; + + void reset() override; + + void beginPacket() override; + + void endPacket() override; + +private: + bool m_closed, m_interrupting; + + yarp::sig::ImageOf* m_pointer_last_frame=nullptr; + yarp::sig::ImageNetworkHeader m_imgHeader; + enum class enum_phases + { + PHASE_0_GET_IMG = 0, + PHASE_1_PREPARE_HEADER = 1, + PHASE_2_SEND_HEADER = 2, + PHASE_3_SEND_IMAGE = 3 + } m_enum_phases = enum_phases::PHASE_0_GET_IMG; + + bool m_debug_test_image_generation = false; + + yarp::os::Contact m_localAddress, m_remoteAddress; + std::mutex m_mutex; + yarp::os::Semaphore m_sema; + + char* m_cursor = nullptr; + size_t m_remaining = 0; + +}; + +#endif diff --git a/src/carriers/h264_carrier/CMakeLists.txt b/src/carriers/h264_carrier/CMakeLists.txt deleted file mode 100644 index 38087b713eb..00000000000 --- a/src/carriers/h264_carrier/CMakeLists.txt +++ /dev/null @@ -1,66 +0,0 @@ -# SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) -# SPDX-License-Identifier: BSD-3-Clause - -yarp_prepare_plugin(h264 - CATEGORY carrier - TYPE H264Carrier - INCLUDE H264Carrier.h - DEPENDS "YARP_HAS_GObject;YARP_HAS_GLIB2;YARP_HAS_GStreamer;YARP_HAS_GStreamerPluginsBase" -) - -if(NOT SKIP_h264) - yarp_add_plugin(yarp_h264) - - target_sources(yarp_h264 - PRIVATE - H264Carrier.h - H264Carrier.cpp - H264Stream.h - H264Stream.cpp - H264Decoder.cpp - H264Decoder.h - H264LogComponent.cpp - H264LogComponent.h - ) - - target_link_libraries(yarp_h264 - PRIVATE - YARP::YARP_os - YARP::YARP_sig - ) - list(APPEND YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS - YARP_os - YARP_sig - ) - - # GObject is required by GStreamer - target_link_libraries(yarp_h264 PRIVATE ${GOBJECT_LIBRARIES}) - target_include_directories(yarp_h264 SYSTEM PRIVATE ${GOBJECT_INCLUDE_DIR}) -# list(APPEND YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS GObject) (not using targets) - - # GLIB2 is required by GStreamer - target_link_libraries(yarp_h264 PRIVATE ${GLIB2_LIBRARIES}) - target_include_directories(yarp_h264 SYSTEM PRIVATE ${GLIB2_INCLUDE_DIR}) -# list(APPEND YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS GLIB2) (not using targets) - - target_include_directories(yarp_h264 SYSTEM PRIVATE ${GSTREAMER_INCLUDE_DIRS}) - target_link_libraries(yarp_h264 PRIVATE ${GSTREAMER_LIBRARY}) -# list(APPEND YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS GSTREAMER) (not using targets) - - target_include_directories(yarp_h264 SYSTEM PRIVATE ${GSTREAMER_app_INCLUDE_DIR}) - target_link_libraries(yarp_h264 PRIVATE ${GSTREAMER_APP_LIBRARY}) -# list(APPEND YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS GStreamerPluginsBase) (not using targets) - - yarp_install( - TARGETS yarp_h264 - EXPORT YARP_${YARP_PLUGIN_MASTER} - COMPONENT ${YARP_PLUGIN_MASTER} - LIBRARY DESTINATION ${YARP_DYNAMIC_PLUGINS_INSTALL_DIR} - ARCHIVE DESTINATION ${YARP_STATIC_PLUGINS_INSTALL_DIR} - YARP_INI DESTINATION ${YARP_PLUGIN_MANIFESTS_INSTALL_DIR} - ) - - set(YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS ${YARP_${YARP_PLUGIN_MASTER}_PRIVATE_DEPS} PARENT_SCOPE) - - set_property(TARGET yarp_h264 PROPERTY FOLDER "Plugins/Carrier") -endif() diff --git a/src/carriers/h264_carrier/H264Carrier.cpp b/src/carriers/h264_carrier/H264Carrier.cpp deleted file mode 100644 index ddb6c8e84e6..00000000000 --- a/src/carriers/h264_carrier/H264Carrier.cpp +++ /dev/null @@ -1,220 +0,0 @@ -/* - * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) - * SPDX-License-Identifier: BSD-3-Clause - */ - -#include "H264Carrier.h" -#include "H264Stream.h" -#include "H264LogComponent.h" -#include -#include -#include -#include -#include - - -using namespace yarp::os; -using namespace yarp::sig; - - - -std::string H264Carrier::getName() const -{ - return "h264"; -} - -bool H264Carrier::isConnectionless() const -{ - return true; -} - -bool H264Carrier::canAccept() const -{ - return true; -} - -bool H264Carrier::canOffer() const -{ - return true; -} - -bool H264Carrier::isTextMode() const -{ - return false; -} - -bool H264Carrier::canEscape() const -{ - return false; -} - -void H264Carrier::handleEnvelope(const std::string& envelope) -{ - this->envelope = envelope; -} - -bool H264Carrier::requireAck() const -{ - return false; -} - -bool H264Carrier::supportReply() const -{ - return false; -} - -bool H264Carrier::isLocal() const -{ - return false; -} - -// this is important - flips expected flow of messages -bool H264Carrier::isPush() const -{ - return false; -} - -std::string H264Carrier::toString() const -{ - return "h264_carrier"; -} - -void H264Carrier::getHeader(Bytes& header) const -{ -} - -bool H264Carrier::checkHeader(const Bytes& header) -{ - return true; -} - -void H264Carrier::setParameters(const Bytes& header) -{ - // no parameters - no carrier variants - yCTrace(H264CARRIER, "setParameters"); -} - - -static int getIntParam(Name &n, const char *param) -{ - bool hasField; - std::string strValue = n.getCarrierModifier(param, &hasField); - Value *v = Value::makeValue(strValue); - int intvalue = 0; - if((hasField) && v->isInt32()) - { - intvalue = v->asInt32(); - } - - delete v; - return intvalue; -} - -// Now, the initial hand-shaking -bool H264Carrier::prepareSend(ConnectionState& proto) -{ - //get all parameters of this carrier - Name n(proto.getRoute().getCarrierName() + "://test"); - - cfgParams.crop.left = getIntParam(n, "cropLeft"); - cfgParams.crop.right = getIntParam(n, "cropRight"); - cfgParams.crop.top = getIntParam(n, "cropTop"); - cfgParams.crop.bottom = getIntParam(n, "cropBottom"); - cfgParams.fps_max = getIntParam(n, "max_fps"); - cfgParams.removeJitter = (getIntParam(n, "removeJitter") > 0) ? true : false; - return true; -} - -bool H264Carrier::sendHeader(ConnectionState& proto) -{ - yCTrace(H264CARRIER, "sendHeader"); - return true; -} - -bool H264Carrier::expectSenderSpecifier(ConnectionState& proto) -{ - yCTrace(H264CARRIER, "expectSenderSpecifier"); - return true; -} - -bool H264Carrier::expectExtraHeader(ConnectionState& proto) -{ - yCTrace(H264CARRIER, "expectExtraHeader"); - return true; -} - -bool H264Carrier::respondToHeader(ConnectionState& proto) -{ - yCTrace(H264CARRIER, "respondToHeader"); - return true; -} - -bool H264Carrier::expectReplyToHeader(ConnectionState& proto) -{ - // I'm the receiver... - - cfgParams.remotePort = proto.getRoute().getToContact().getPort(); - - auto* stream = new H264Stream(cfgParams); - if (stream==nullptr) { return false; } - - yarp::os::Contact remote = proto.getStreams().getRemoteAddress(); - bool ok = stream->open(remote); - - //std::cout << "Remote contact info: host=" << proto.getRoute().getToContact().getHost() << " port= " << proto.getRoute().getToContact().getPort() <start(); - - proto.takeStreams(stream); - return true; -} - -bool H264Carrier::isActive() const -{ - return true; -} - -bool H264Carrier::write(ConnectionState& proto, SizedWriter& writer) -{ - //I should not be here: the carried doesn't perform writing - return false; -} - -bool H264Carrier::reply(ConnectionState& proto, SizedWriter& writer) -{ - return false; -} - -bool H264Carrier::sendIndex(ConnectionState& proto, SizedWriter& writer) -{ - return true; -} - -bool H264Carrier::expectIndex(ConnectionState& proto) -{ - return true; -} - -bool H264Carrier::sendAck(ConnectionState& proto) -{ - return true; -} - -bool H264Carrier::expectAck(ConnectionState& proto) -{ - return true; -} - -std::string H264Carrier::getBootstrapCarrierName() const -{ - return {}; -} - -yarp::os::Face* H264Carrier::createFace() const -{ - return new yarp::os::impl::FakeFace(); -} diff --git a/src/carriers/h264_carrier/H264Carrier.h b/src/carriers/h264_carrier/H264Carrier.h deleted file mode 100644 index b9257b595af..00000000000 --- a/src/carriers/h264_carrier/H264Carrier.h +++ /dev/null @@ -1,117 +0,0 @@ -/* - * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) - * SPDX-License-Identifier: BSD-3-Clause - */ - -#ifndef H264CARRIER_INC -#define H264CARRIER_INC - -#include -#include -#include "H264Decoder.h" - - -/** - * \ingroup carriers_lists - * A carrier for receiving frames compressed in h264 over rtp. - * This carrier uses gstreamer libraries (libgstreamer1.0-dev and libgstreamer-plugins-base1.0-dev) to read rtp packets and to decode the h264 stream. - * - * Use this carrier in the following way: - * - suppose there is a server that streams video frames to IP x.x.x.x and to port p: - * register this port to yarp by yarp command "yarp name register" in this way: yarp name register /serverH264Stream h264 x.x.x.x p - * - you need to connect your client port (for example /yarpview/img:i ) to /serverH264Stream port by h264 carrier to get the video stream: - * yarp connect /serverH264Stream /yarpview/img:i h264 - * You can configure the carrier to crop frames by passing parameters to the carrier with usual syntax: +pramName.paramValue: - * - +cropLeft.100 ==> the carrier crops 100 pxel from left side - * - +cropRight.100 ==> the carrier crops 100 pxel from right side - * - +cropTop.100 ==> the carrier crops 100 pxel from top side - * - +cropBottom.100 ==> the carrier crops 100 pxel from bottom side - * - +removeJitter.1 ==> the carrier removes the jitter. If you put 0, the jitter is not removed (default behaviour). - * - +verbose.1 ==> enables verbose mode (default is not verbose) (+verbose.0 disables it.) - */ - -class H264Carrier : - public yarp::os::Carrier -{ -private: - std::string envelope; - h264Decoder_cfgParamters cfgParams; -public: - H264Carrier() - {} - - Carrier *create() const override - { - return new H264Carrier(); - } - - std::string getName() const override; - - bool isConnectionless() const override; - - bool canAccept() const override; - - bool canOffer() const override; - - bool isTextMode() const override; - - bool canEscape() const override; - - void handleEnvelope(const std::string& envelope) override; - - bool requireAck() const override; - - bool supportReply() const override; - - bool isLocal() const override; - - // this is important - flips expected flow of messages - bool isPush() const override; - - std::string toString() const override; - - void getHeader(yarp::os::Bytes& header) const override; - - bool checkHeader(const yarp::os::Bytes& header) override; - - void setParameters(const yarp::os::Bytes& header) override; - - - // Now, the initial hand-shaking - - bool prepareSend(yarp::os::ConnectionState& proto) override; - - bool sendHeader(yarp::os::ConnectionState& proto) override; - - bool expectSenderSpecifier(yarp::os::ConnectionState& proto) override; - - bool expectExtraHeader(yarp::os::ConnectionState& proto) override; - - bool respondToHeader(yarp::os::ConnectionState& proto) override; - - bool expectReplyToHeader(yarp::os::ConnectionState& proto) override; - - bool isActive() const override; - - - // Payload time! - - bool write(yarp::os::ConnectionState& proto, yarp::os::SizedWriter& writer) override; - - bool reply(yarp::os::ConnectionState& proto, yarp::os::SizedWriter& writer) override; - - virtual bool sendIndex(yarp::os::ConnectionState& proto, yarp::os::SizedWriter& writer); - - bool expectIndex(yarp::os::ConnectionState& proto) override; - - bool sendAck(yarp::os::ConnectionState& proto) override; - - bool expectAck(yarp::os::ConnectionState& proto) override; - - std::string getBootstrapCarrierName() const override; - - yarp::os::Face* createFace() const override; - -}; - -#endif diff --git a/src/carriers/h264_carrier/H264Decoder.cpp b/src/carriers/h264_carrier/H264Decoder.cpp deleted file mode 100644 index e742d107ff4..00000000000 --- a/src/carriers/h264_carrier/H264Decoder.cpp +++ /dev/null @@ -1,534 +0,0 @@ -/* - * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) - * SPDX-License-Identifier: BSD-3-Clause - */ - -#include "H264Decoder.h" -#include "H264LogComponent.h" - -#include - - -#include -#include - -#include -#include -#include -#include - -//#define debug_time 1 - -#ifdef debug_time - #include - #define DBG_TIME_PERIOD_PRINTS 10 //10 sec -#endif - -using namespace yarp::sig; -using namespace yarp::os; - - -struct data_for_gst_callback -{ - data_for_gst_callback() = default; - - std::mutex *m{nullptr}; - ImageOf *img{nullptr}; - bool isNew{false}; - Semaphore *s{nullptr}; - bool isReq{false}; -}; -//------------------------------------------------------------------- -//--------------- CALLBACK FUNCTIONS ------------------------------- -//------------------------------------------------------------------- - -/* -static GstBusSyncReply bus_call (GstBus *bus, GstMessage *msg, gpointer data) -{ - GstElement *pipeline = (GstElement *) data; - - switch (GST_MESSAGE_TYPE (msg)) - { - - case GST_MESSAGE_EOS: - { - yCTrace(H264CARRIER, "End of stream"); - gst_element_set_state (pipeline, GST_STATE_NULL); - // g_main_loop_quit (loop); - break; - } - - case GST_MESSAGE_ERROR: - { - gchar *debug; - GError *error; - - gst_message_parse_error (msg, &error, &debug); - g_free (debug); - - yCError(H264CARRIER, "GSTREAMER: Error: %s", error->message); - g_error_free (error); - - gst_element_set_state (pipeline, GST_STATE_NULL); - break; - } - default: - { - yCTrace("GSTREAMER: I received message of type %d", GST_MESSAGE_TYPE (msg)); - break; - } - } - - return GST_BUS_PASS; -} -*/ - -static gboolean link_videosrc2nextWithCaps(GstElement *e1, GstElement *e2) -{ - gboolean link_ok; - GstCaps *caps; - -/* -// "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96, a-framerate=(string)30" - caps = gst_caps_new_simple("application/x-rtp", - "media", G_TYPE_STRING, "video", - "clock-rate", G_TYPE_INT, 90000, - "encoding-name", G_TYPE_STRING, "H264", - "payload", G_TYPE_INT, 96, - "a-framerate", G_TYPE_STRING, "30", - NULL); -*/ -// "application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96" - caps = gst_caps_new_simple("application/x-rtp", - "media", G_TYPE_STRING, "video", - "encoding-name", G_TYPE_STRING, "H264", - "payload", G_TYPE_INT, 96, - NULL); - - - link_ok = gst_element_link_filtered(e1, e2, caps); - if(!link_ok) - { - yCError(H264CARRIER) << "H264Decoder-GSTREAMER: link_videosrc2nextWithCaps failed"; - } - else - { - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: link_videosrc2nextWithCaps OK"; - } - - return (link_ok); -} - - - -static gboolean link_convert2next(GstElement *e1, GstElement *e2) -{ - gboolean link_ok; - GstCaps *caps; - - caps = gst_caps_new_simple("video/x-raw", - "format", G_TYPE_STRING, "RGB", - NULL); - - - link_ok = gst_element_link_filtered(e1, e2, caps); - - if(!link_ok) - { - yCError(H264CARRIER) << "H264Decoder-GSTREAMER: link_convert2next failed"; - } - else - { - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: link_convert2next OK"; - } - - return (link_ok); -} - - -GstFlowReturn new_sample(GstAppSink *appsink, gpointer user_data) -{ -#ifdef debug_time - static bool isFirst = true; - double start_time = Time::now(); - double end_time=0; - - static double last_call; - static double sumOf_timeBetweenCalls = 0; - static double sumOf_timeOfNewSampleFunc = 0; - static uint32_t count=0; - #define MAX_COUNT 100 - - - if(!isFirst) - sumOf_timeBetweenCalls+=(start_time -last_call); - - last_call = start_time; - - -#endif - - auto* dec_data = (data_for_gst_callback*)user_data; - - GstSample *sample = nullptr; - g_signal_emit_by_name (appsink, "pull-sample", &sample, NULL); - if(!sample) - { - yCWarning(H264CARRIER, "GSTREAMER: could not take a sample!"); - return GST_FLOW_OK; - } - - GstCaps *caps = gst_sample_get_caps (sample); - if(!caps) - { - yCError(H264CARRIER, "GSTREAMER: could not get caps of sample!"); - return GST_FLOW_ERROR; - } - GstStructure *struc = gst_caps_get_structure(caps, 0); - if(!struc) - { - yCError(H264CARRIER, "GSTREAMER: could not get struct of caps!"); - return GST_FLOW_ERROR; - } - gint width, height; - gboolean res; - res = gst_structure_get_int(struc, "width", &width); - if(!res) - { - yCError(H264CARRIER, "GSTREAMER: could not get width!"); - return GST_FLOW_ERROR; - } - - res = gst_structure_get_int(struc, "height", &height); - if(!res) - { - yCError(H264CARRIER, "GSTREAMER: could not get height!"); - return GST_FLOW_ERROR; - } - yCTrace(H264CARRIER, "Image has size %d x %d", width, height); - - GstBuffer *buffer = gst_sample_get_buffer(sample); - GstMapInfo map; - if(!gst_buffer_map(buffer, &map, GST_MAP_READ)) - { - yCError(H264CARRIER, "GSTREAMER: could not get map!"); - return GST_FLOW_ERROR; - } - //HERE I GET MY IMAGE!!!! - //DO SOMETHING... - //ImageOf &yframebuff = yarp_stuff_ptr->yport_ptr->prepare(); - dec_data->m->lock(); - dec_data->isNew = true; - dec_data->img->resize(width, height); - - unsigned char *ydata_ptr = dec_data->img->getRawImage(); - memcpy(ydata_ptr, map.data, width*height*3); - - dec_data->m->unlock(); - gst_buffer_unmap(buffer, &map); - - gst_sample_unref(sample); - if (dec_data->isReq) { - dec_data->s->post(); - } - - -#ifdef debug_time - end_time = Time::now(); - sumOf_timeOfNewSampleFunc += (end_time-start_time); - count++; - isFirst=false; - - if(count>=MAX_COUNT) - { - yCDebug(H264CARRIER, - "On %d times: NewSampleFunc is long %.6f sec and sleeps %.6f sec", - MAX_COUNT, - (sumOf_timeOfNewSampleFunc/MAX_COUNT), - (sumOf_timeBetweenCalls/MAX_COUNT) ); - count = 0; - isFirst = true; - sumOf_timeBetweenCalls = 0; - sumOf_timeOfNewSampleFunc = 0; - } - - -#endif - - - return GST_FLOW_OK; - -} - - - - - - - -//---------------------------------------------------------------------- - - - - - - - - -class H264DecoderHelper -{ -public: - //GMainLoop *loop; - - GstElement *pipeline; - GstElement *source; - GstElement *sink; - GstElement *jitterBuff; - GstElement *rtpDepay; - GstElement *parser; - GstElement *convert; - GstElement *decoder; - GstElement *sizeChanger; - - data_for_gst_callback gst_cbk_data; - - GstBus *bus; //maybe can be moved in function where i use it - guint bus_watch_id; - - ImageOf myframe; - - H264DecoderHelper(std::mutex* m_ptr, Semaphore* s_ptr) : - pipeline(nullptr), - source(nullptr), - sink(nullptr), - jitterBuff(nullptr), - rtpDepay(nullptr), - parser(nullptr), - convert(nullptr), - decoder(nullptr), - sizeChanger(nullptr), - bus(nullptr), - bus_watch_id(0) - { - gst_cbk_data.m = m_ptr; - gst_cbk_data.img = &myframe; - gst_cbk_data.s = s_ptr; - } - ~H264DecoderHelper(){;} - - - bool istantiateElements(h264Decoder_cfgParamters &cfgParams) - { - gst_init(nullptr, nullptr); - pipeline = gst_pipeline_new ("video-player"); - source = gst_element_factory_make ("udpsrc", "video-source"); - rtpDepay = gst_element_factory_make ("rtph264depay", "rtp-depay"); - parser = gst_element_factory_make ("h264parse", "parser"); - decoder = gst_element_factory_make ("avdec_h264", "decoder"); - sizeChanger = gst_element_factory_make ("videocrop", "cropper"); - convert = gst_element_factory_make ("videoconvert", "convert"); //because use RGB space - sink = gst_element_factory_make ("appsink", "video-output"); - - if (!pipeline || !source || !rtpDepay || !parser || !decoder || !convert || !sink || !sizeChanger) - { - yCError(H264CARRIER) << "H264Decoder-GSTREAMER: one element could not be created. Exiting."; - return false; - } - if (cfgParams.removeJitter) - { - jitterBuff = gst_element_factory_make("rtpjitterbuffer", "jitterBuffer"); - if (!jitterBuff) - { - yCError(H264CARRIER) << "H264Decoder-GSTREAMER: rtpjitterbuffer could not be created. Exiting."; - return false; - } - } - - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: istantiateElements OK"; - - return true; - } - - bool configureElements(h264Decoder_cfgParamters &cfgParams) //maybe i can make callbak configurable in the future..... - { - // 1) configure source port - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to configure source port with value" << cfgParams.remotePort; - g_object_set(source, "port", cfgParams.remotePort, NULL); - yCDebug(H264CARRIER) << "H264Decoder-GSTREAMER: configured source port with" << cfgParams.remotePort; - - // 2) configure callback on new frame - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to configure appsink.... "; - //I decided to use callback mechanism because it should have less overhead - g_object_set( sink, "emit-signals", false, NULL ); - - GstAppSinkCallbacks cbs; // Does this need to be kept alive? - - // Set Video Sink callback methods - cbs.eos = nullptr; - cbs.new_preroll = nullptr; - cbs.new_sample = &new_sample; - gst_app_sink_set_callbacks( GST_APP_SINK( sink ), &cbs, &gst_cbk_data, nullptr ); - - /* //3) add watch ( a message handler) - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - //bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); - gst_object_unref (bus); - - gst_bus_set_sync_handler(bus, bus_call, pipeline, NULL); - gst_object_unref (bus); - */ - - //videocrop - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to set new size: left" << cfgParams.crop.left << "right=" << cfgParams.crop.right << "top=" << cfgParams.crop.top << "bottom" << cfgParams.crop.bottom; - g_object_set(G_OBJECT(sizeChanger), "left", cfgParams.crop.left, "right", cfgParams.crop.right, "top", cfgParams.crop.top, "bottom", cfgParams.crop.bottom, NULL); - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: set new size: left" << cfgParams.crop.left << "right=" << cfgParams.crop.right << "top=" << cfgParams.crop.top << "bottom" << cfgParams.crop.bottom; - - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: configureElements OK"; - return true; - - } - - bool linkElements() - { - - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to add elements to pipeline..... "; - /* we add all elements into the pipeline */ - gst_bin_add_many (GST_BIN (pipeline), - source, rtpDepay, parser, decoder, sizeChanger, convert, sink, NULL); - - gboolean result; - - if (jitterBuff != nullptr) - { - result = gst_bin_add(GST_BIN(pipeline), jitterBuff); - if (!result) { yCError(H264CARRIER) << "H264Decoder: Error adding jitterBuff to the bin"; return false; } - } - - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: elements have been added in pipeline!"; - - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to link_convert2next..... "; - result = link_convert2next(convert, sink); - if (!result) { yCError(H264CARRIER) << "H264Decoder: Error linking converter to sink "; return false; } - - /* autovideosrc ! "video/x-raw, width=640, height=480, format=(string)I420" ! videoconvert ! 'video/x-raw, format=(string)RGB' ! yarpdevice ! glimagesink */ - - if (jitterBuff) - { - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to link videosrc to rtpjitterBuffer....."; - result = link_videosrc2nextWithCaps(source, jitterBuff); - if (!result){ yCError(H264CARRIER) << "H264Decoder: Error linking videosrc to rtpjitterBuffer "; return false;} - - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to link jitterBuff to rtpDapay....."; - result = gst_element_link(jitterBuff, rtpDepay); - if (!result) { yCError(H264CARRIER) << "H264Decoder: Error linking jitterBuff to rtpDapay "; return false; } - - } - else - { - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to videosrc to rtpDepay"; - result = link_videosrc2nextWithCaps(source, rtpDepay); - if (!result) { yCError(H264CARRIER) << "H264Decoder: Error linking videosrc to rtpDepay "; return false; } - - } - - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to link all other elements....."; - gst_element_link_many(rtpDepay, parser, decoder, sizeChanger, convert, NULL); - - yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: linkElements OK"; - return true; - } - - -}; - - - -#define GET_HELPER(x) (*((H264DecoderHelper*)(x))) - -H264Decoder::H264Decoder(h264Decoder_cfgParamters &config) : - sysResource(new H264DecoderHelper(&mutex, &semaphore)), - cfg(config) -{ -} - -bool H264Decoder::init() -{ - H264DecoderHelper &helper = GET_HELPER(sysResource); - if(!helper.istantiateElements(cfg)) - { - yCError(H264CARRIER) << "H264Decoder: Error in istantiateElements"; - return false; - } - - if(!helper.configureElements(cfg)) - { - yCError(H264CARRIER) << "Error in configureElements"; - return false; - } - - if(!helper.linkElements()) - { - yCError(H264CARRIER) << "Error in linkElements"; - return false; - } - - yCDebug(H264CARRIER) << "H264Decoder-GSTREAMER: init ok"; - return true; - -} - - -bool H264Decoder::start() -{ - H264DecoderHelper &helper = GET_HELPER(sysResource); - gst_element_set_state (helper.pipeline, GST_STATE_PLAYING); - yCDebug(H264CARRIER) << "H264Decoder: pipeline started!"; - - return true; - -} - -bool H264Decoder::stop() -{ - H264DecoderHelper &helper = GET_HELPER(sysResource); - gst_element_set_state (helper.pipeline, GST_STATE_NULL); - gst_bus_set_sync_handler(gst_pipeline_get_bus (GST_PIPELINE (helper.pipeline)), nullptr, nullptr, nullptr); - yCDebug(H264CARRIER) << "H264Decoder: deleting pipeline"; - gst_object_unref (GST_OBJECT (helper.pipeline)); - return true; -} - -H264Decoder::~H264Decoder() -{ - stop(); - delete &GET_HELPER(sysResource); - - -} - -ImageOf & H264Decoder::getLastFrame() -{ - H264DecoderHelper &helper = GET_HELPER(sysResource); - helper.gst_cbk_data.isNew = false; - helper.gst_cbk_data.isReq = false; - return helper.myframe; -} - -bool H264Decoder::newFrameIsAvailable() -{ - H264DecoderHelper &helper = GET_HELPER(sysResource); - return helper.gst_cbk_data.isNew; -} - -int H264Decoder::getLastFrameSize() -{ - H264DecoderHelper &helper = GET_HELPER(sysResource); - return (helper.myframe.width() * helper.myframe.height() * 3); -} - -void H264Decoder::setReq() -{ - H264DecoderHelper &helper = GET_HELPER(sysResource); - helper.gst_cbk_data.isReq = true; - -} diff --git a/src/carriers/h264_carrier/H264Decoder.h b/src/carriers/h264_carrier/H264Decoder.h deleted file mode 100644 index 2cc367c8297..00000000000 --- a/src/carriers/h264_carrier/H264Decoder.h +++ /dev/null @@ -1,55 +0,0 @@ -/* - * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) - * SPDX-License-Identifier: BSD-3-Clause - */ - -#ifndef H264DECODER_INC -#define H264DECODER_INC - -#include -#include -#include - -struct h264Decoder_cfgParamters -{ - h264Decoder_cfgParamters() : - crop{0,0,0,0}, - fps_max(0), - remotePort(-1) - {} - - struct - { - int left; //number of pixel to crop from left - int right; //number of pixel to crop from right - int top; //number of pixel to crop from top - int bottom; //number of pixel to crop from bottom - } crop; - - int fps_max; //max value of fps. it is imposed by gstreamer - int remotePort; // the port on which the server send data - bool removeJitter; //If true, the carrier reorders and removes duplicate RTP packets as they are received from a network source. -}; - -class H264Decoder -{ -private: - void *sysResource; - h264Decoder_cfgParamters cfg; - -public: - std::mutex mutex ; //==>create functions to work with it - yarp::os::Semaphore semaphore; - - H264Decoder(h264Decoder_cfgParamters &config); - ~H264Decoder(); - bool init(); - bool start(); - bool stop(); - yarp::sig::ImageOf& getLastFrame(); - int getLastFrameSize(); - bool newFrameIsAvailable(); - void setReq(); -}; - -#endif diff --git a/src/carriers/h264_carrier/H264LogComponent.cpp b/src/carriers/h264_carrier/H264LogComponent.cpp deleted file mode 100644 index 457589d81ad..00000000000 --- a/src/carriers/h264_carrier/H264LogComponent.cpp +++ /dev/null @@ -1,13 +0,0 @@ -/* - * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) - * SPDX-License-Identifier: BSD-3-Clause - */ - -#include "H264LogComponent.h" - -YARP_LOG_COMPONENT(H264CARRIER, - "yarp.carrier.h264", - yarp::os::Log::minimumPrintLevel(), - yarp::os::Log::LogTypeReserved, - yarp::os::Log::printCallback(), - nullptr) diff --git a/src/carriers/h264_carrier/H264LogComponent.h b/src/carriers/h264_carrier/H264LogComponent.h deleted file mode 100644 index 7b38a5c54d7..00000000000 --- a/src/carriers/h264_carrier/H264LogComponent.h +++ /dev/null @@ -1,13 +0,0 @@ -/* - * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) - * SPDX-License-Identifier: BSD-3-Clause - */ - -#ifndef YARP_H264LOGCOMPONENT_H -#define YARP_H264LOGCOMPONENT_H - -#include - -YARP_DECLARE_LOG_COMPONENT(H264CARRIER) - -#endif // YARP_H264LOGCOMPONENT_H diff --git a/src/carriers/h264_carrier/H264Stream.cpp b/src/carriers/h264_carrier/H264Stream.cpp deleted file mode 100644 index 3fb69ef19fd..00000000000 --- a/src/carriers/h264_carrier/H264Stream.cpp +++ /dev/null @@ -1,231 +0,0 @@ -/* - * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) - * SPDX-License-Identifier: BSD-3-Clause - */ - -#include "H264Stream.h" -#include "H264LogComponent.h" - -#include -#include -#include - -#include -#include - - -//#define debug_time 1 - -#ifdef debug_time - #include - #define DBG_TIME_PERIOD_PRINTS 10 //10 sec -#endif - - -using namespace yarp::os; -using namespace yarp::sig; - -H264Stream::H264Stream(h264Decoder_cfgParamters &config) : - delegate(nullptr), - phase(0), - cursor(nullptr), - remaining(0), - decoder(nullptr), - cfg(config) -{} - -H264Stream::~H264Stream() -{ - delete decoder; - delete delegate; -} - - - -bool H264Stream::setStream(yarp::os::impl::DgramTwoWayStream *stream) -{ - delegate = stream; - if(nullptr == delegate) - { - return false; - } - return true; -} - -void H264Stream::start() -{ - decoder = new H264Decoder(this->cfg); - decoder->init(); - decoder->start(); -} - -InputStream& H264Stream::getInputStream() -{ - return *this; -} - -OutputStream& H264Stream::getOutputStream() -{ - return *this; -} - -//using yarp::os::OutputStream::write; - - -//using yarp::os::InputStream::read; - -bool H264Stream::setReadEnvelopeCallback(InputStream::readEnvelopeCallbackType callback, void* data) -{ - return true; -} - -yarp::conf::ssize_t H264Stream::read(Bytes& b) -{ - -#ifdef debug_time - static bool isFirst = true; - double start_time = Time::now(); - double start_timeCopy; - double end_time=0; - static double last_call; - static double sumOf_timeBetweenCalls=0; - - static double sumOf_timeOnMutex = 0; - static double sumOf_timeOfCopyPerPahse[5] ={0}; - static uint32_t count=0; - static uint32_t countPerPhase[5]={0}; - #define MAX_COUNT 100 - - - if(isFirst) - { - last_call = start_time; - isFirst = false; - } - else - { - sumOf_timeBetweenCalls+=(start_time -last_call); - last_call = start_time; - } - - -#endif - - if (remaining==0) - { - if (phase==1) - { - phase = 2; - cursor = (char*)(img.getRawImage()); - remaining = img.getRawImageSize(); - } else if (phase==3) - { - phase = 4; - cursor = nullptr; - remaining = 0; - } else - { - phase = 0; - } - } - while (phase==0) - { - decoder->mutex.lock(); - int len = 0; - if(decoder->newFrameIsAvailable()) - { - ImageOf & img_dec = decoder->getLastFrame(); - img.copy(img_dec); - len = decoder->getLastFrameSize(); - decoder->mutex.unlock(); - #ifdef debug_time - end_time = Time::now(); - sumOf_timeOnMutex +=(end_time - start_time); - count++; - if(count>=MAX_COUNT) - { - yCDebug(H264CARRIER, - "STREAM On %d times: timeOnMutex is long %.6f sec", - MAX_COUNT, (sumOf_timeOnMutex/MAX_COUNT) ); - for(int x=0; x<5; x++) - { - yCDebug(H264CARRIER, - "STREAM: phase:%d, count=%u, time=%.6f sec", - x, - countPerPhase[x], - ((countPerPhase[x]==0) ? 0: sumOf_timeOfCopyPerPahse[x]/countPerPhase[x]) ); - countPerPhase[x] = 0; - sumOf_timeOfCopyPerPahse[x] = 0; - } - yCDebug(H264CARRIER, "sleep=%.6f", sumOf_timeBetweenCalls/count); - yCDebug(H264CARRIER); - count = 0; - isFirst = true; - sumOf_timeOnMutex = 0; - sumOf_timeBetweenCalls = 0; - } - #endif - - } - else - { - yCTrace(H264CARRIER, "h264Stream::read has been called but no frame is available!!"); - phase = 0; - remaining = 0; - cursor = nullptr; - decoder->setReq(); - decoder->mutex.unlock(); - decoder->semaphore.waitWithTimeout(1); - return 0; - } - - yCTrace(H264CARRIER, "Length is \"%d\"", len); - - imgHeader.setFromImage(img); - phase = 1; - cursor = (char*)(&imgHeader); - remaining = sizeof(imgHeader); - } - - if (remaining>0) - { - size_t allow = remaining; - if (b.length()getInputStream().read(b); - yCTrace(H264CARRIER, "Read %zu bytes", result); - if (result>0) - { - remaining-=result; - yCTrace(H264CARRIER, "%zu bytes of meat", result); - return result; - } - } - } - return -1; -} - - -void H264Stream::write(const Bytes& b) -{ - delegate->getOutputStream().write(b); -} diff --git a/src/carriers/h264_carrier/H264Stream.h b/src/carriers/h264_carrier/H264Stream.h deleted file mode 100644 index d60330adee8..00000000000 --- a/src/carriers/h264_carrier/H264Stream.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) - * SPDX-License-Identifier: BSD-3-Clause - */ - -#ifndef H264STREAM_INC -#define H264STREAM_INC - -#include -#include -#include -#include "H264Decoder.h" -#include - - -class H264Stream : - public yarp::os::impl::DgramTwoWayStream -{ -private: - - DgramTwoWayStream *delegate; - yarp::sig::ImageOf img; - yarp::sig::ImageNetworkHeader imgHeader; - int phase; - char *cursor; - size_t remaining; - H264Decoder *decoder; - h264Decoder_cfgParamters cfg; -public: - H264Stream(h264Decoder_cfgParamters &config); - - virtual ~H264Stream(); - - bool setStream(yarp::os::impl::DgramTwoWayStream *stream); - - void start(); - - InputStream& getInputStream() override; - OutputStream& getOutputStream() override; - - using yarp::os::OutputStream::write; - void write(const yarp::os::Bytes& b) override; - - using yarp::os::InputStream::read; - yarp::conf::ssize_t read(yarp::os::Bytes& b) override; - - bool setReadEnvelopeCallback(InputStream::readEnvelopeCallbackType callback, void* data) override; - -}; - -#endif diff --git a/src/yarpgstreamerplugins/CMakeLists.txt b/src/yarpgstreamerplugins/CMakeLists.txt new file mode 100644 index 00000000000..28cdf0e562d --- /dev/null +++ b/src/yarpgstreamerplugins/CMakeLists.txt @@ -0,0 +1,11 @@ +yarp_dependent_option (YARP_COMPILE_gstreamerplugins "Do you want to compile gstreamerplugins?" OFF + "YARP_HAS_GObject;YARP_HAS_GLIB2;YARP_HAS_GStreamer;YARP_HAS_GStreamerPluginsBase" OFF +) + +if(YARP_COMPILE_gstreamerplugins) + +add_subdirectory(videosource) +add_subdirectory(videosink) +add_subdirectory(videopassthrough) + +endif() \ No newline at end of file diff --git a/src/yarpgstreamerplugins/videopassthrough/CMakeLists.txt b/src/yarpgstreamerplugins/videopassthrough/CMakeLists.txt new file mode 100644 index 00000000000..e17c8bb58a2 --- /dev/null +++ b/src/yarpgstreamerplugins/videopassthrough/CMakeLists.txt @@ -0,0 +1,48 @@ +cmake_minimum_required(VERSION 3.10) + +project(gstyarpvideopassthrough LANGUAGES CXX) + +# Include the directories for the required packages +include_directories(${GSTREAMER_INCLUDE_DIRS} ${YARP_INCLUDE_DIRS}) + +# Set the source files +set(SOURCES yarpVideoPassthrough.cpp yarpVideoPassthrough.h) + +# Set the output directory +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) + +# Add the library target +add_library(gstyarpvideopassthrough SHARED ${SOURCES}) + +# Link the required libraries +target_link_libraries(gstyarpvideopassthrough PRIVATE ${GOBJECT_LIBRARIES}) +target_include_directories(gstyarpvideopassthrough SYSTEM PRIVATE ${GOBJECT_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideopassthrough PRIVATE ${GLIB2_LIBRARIES}) +target_include_directories(gstyarpvideopassthrough SYSTEM PRIVATE ${GLIB2_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideopassthrough PRIVATE ${GSTREAMER_LIBRARY}) +target_include_directories(gstyarpvideopassthrough SYSTEM PRIVATE ${GSTREAMER_INCLUDE_DIRS}) + +target_link_libraries(gstyarpvideopassthrough PRIVATE ${GSTREAMER_APP_LIBRARY}) +target_include_directories(gstyarpvideopassthrough SYSTEM PRIVATE ${GSTREAMER_app_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideopassthrough PRIVATE + YARP::YARP_os + YARP::YARP_sig + YARP::YARP_dev + YARP::YARP_init + gstvideo-1.0 +) + +# Set the properties for the shared library +set_target_properties(gstyarpvideopassthrough PROPERTIES +# VERSION 1.0 +# SOVERSION 1 + LIBRARY_OUTPUT_NAME "gstyarpvideopassthrough" +) + +# Install the plugin to the specified directory +install(TARGETS gstyarpvideopassthrough + LIBRARY DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/gstreamer-1.0 +) diff --git a/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp new file mode 100644 index 00000000000..bf143135894 --- /dev/null +++ b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp @@ -0,0 +1,265 @@ +#include "yarp/os/Time.h" +#include "yarp/os/Network.h" +#include "yarp/os/Log.h" +#include "yarp/os/LogStream.h" + +#include +#include +#include +#include + +#include "yarpVideoPassthrough.h" + +#include +#include +#include + +enum +{ + PROP_0, + PROP_YARPNAME, + PROP_YARPVERBOSELEVEL +}; + +GST_DEBUG_CATEGORY_STATIC(yarp_video_passthrough_debug); +#define GST_CAT_DEFAULT yarp_video_passthrough_debug + +// Define the structures for the class and instance +typedef struct _GstYarpVideoPassthrough +{ + GstVideoFilter parent; + yarp::os::Network* yarpnet = nullptr; + std::string s_name; + int verbosity_level = 0; + double prev_time = yarp::os::Time::now(); + int frame_counter=0; +} GstYarpVideoPassthrough; + +typedef struct _GstYarpVideoPassthroughClass +{ + GstVideoFilterClass parent_class; +} GstYarpVideoPassthroughClass; + +G_DEFINE_TYPE(GstYarpVideoPassthrough, gst_yarp_video_passthrough, GST_TYPE_VIDEO_FILTER) +#define GST_MY_PLUGIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), gst_yarp_video_passthrough_get_type(), GstYarpVideoPassthrough)) + +// G_DEFINE_TYPE(GstYarpVideoPassthrough, gst_yarp_video_passthrough, GST_TYPE_VIDEO_SINK) + +// #define GST_TYPE_GRAY_FILTER (gst_gray_filter_get_type()) + +// G_DECLARE_FINAL_TYPE(GstYarpVideoPassthrough, gst_yarp_video_passthrough, GST, GRAY_FILTER, GstBaseTransform) + +/* Pads */ + +#define MY_SOURCE_CAPS \ + "video/x-raw, " \ + "format=(string){RGB,I420,NV12,YUY2};" \ + "video/x-h264, " \ + "stream-format=(string){avc,byte-stream}," \ + "alignment=(string){au,nal};" \ + "video/x-h265, " \ + "stream-format=(string){avc,byte-stream}," \ + "alignment=(string){au,nal}" + +static GstStaticPadTemplate gst_yarp_video_passthrough_sink_template = GST_STATIC_PAD_TEMPLATE("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS(MY_SOURCE_CAPS)); + +static GstStaticPadTemplate gst_yarp_video_passthrough_src_template = GST_STATIC_PAD_TEMPLATE("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS(MY_SOURCE_CAPS)); + +/* Function prototypes */ +static GstFlowReturn gst_yarp_video_passthrough_transform_frame(GstVideoFilter* filter, GstVideoFrame* inframe, GstVideoFrame* outframe); +static void gst_yarp_video_passthrough_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec); +static void gst_yarp_video_passthrough_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec); +//static gboolean gst_yarp_video_passthrough_start(GstBaseTransform* elem); +//static gboolean gst_yarp_video_passthrough_stop(GstBaseTransform* elem); + + /* Initialize the class */ +static void gst_yarp_video_passthrough_class_init(GstYarpVideoPassthroughClass* klass) +{ + GObjectClass* gobject_class = (GObjectClass*)klass; + GstElementClass* gstelement_class = GST_ELEMENT_CLASS(klass); + GstVideoFilterClass* video_filter_class = GST_VIDEO_FILTER_CLASS(klass); + video_filter_class->transform_frame = GST_DEBUG_FUNCPTR(gst_yarp_video_passthrough_transform_frame); + // GstBaseTransformClass* gstbase_class = GST_BASE_TRANSFORM_CLASS(klass); + + gst_element_class_add_pad_template(GST_ELEMENT_CLASS(klass), + gst_static_pad_template_get(&gst_yarp_video_passthrough_sink_template)); + + gst_element_class_add_pad_template(GST_ELEMENT_CLASS(klass), + gst_static_pad_template_get(&gst_yarp_video_passthrough_src_template)); + + gst_element_class_set_static_metadata(gstelement_class, + "YARP Test Sink", + "Sink/Video", + "Sinks", + "Your Name "); + + gobject_class->set_property = gst_yarp_video_passthrough_set_property; + gobject_class->get_property = gst_yarp_video_passthrough_get_property; + //video_filter_class->start = gst_yarp_video_passthrough_start; + // gstbase_class->stop = gst_yarp_video_passthrough_stop; + g_object_class_install_property(gobject_class, PROP_YARPNAME, g_param_spec_string("yarpname", "yarpname (string)", "Name of the component", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_YARPVERBOSELEVEL, g_param_spec_int("yarpverbose", "yarpverbose (int)", "Verbosity level", 0,100, 0, G_PARAM_READWRITE)); + + GST_DEBUG_CATEGORY_INIT(yarp_video_passthrough_debug, "yarpvideopassthrough", 0, "Yarp Video Passthrough"); +} + +static void gst_yarp_video_passthrough_init(GstYarpVideoPassthrough* filter) +{ + filter->yarpnet = new yarp::os::Network; +} + +static void gst_yarp_video_passthrough_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec) +{ + _GstYarpVideoPassthrough* self = GST_MY_PLUGIN(object); + + switch (prop_id) { + case PROP_YARPNAME: + self->s_name = (g_value_get_string(value)); + yCInfo(YVP_COMP, "set name: %s", self->s_name.c_str()); + break; + case PROP_YARPVERBOSELEVEL: + self->verbosity_level = (g_value_get_int(value)); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } + yTrace(); +} + +static void gst_yarp_video_passthrough_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec) +{ + _GstYarpVideoPassthrough* self = GST_MY_PLUGIN(object); + + switch (prop_id) { + case PROP_YARPNAME: + g_value_set_string(value, self->s_name.c_str()); + yCInfo(YVP_COMP, "get name: %s", self->s_name.c_str()); + break; + case PROP_YARPVERBOSELEVEL: + g_value_set_int(value, self->verbosity_level); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } +} + +/* Frame methods */ +static GstFlowReturn gst_yarp_video_passthrough_transform_frame(GstVideoFilter* filter, GstVideoFrame* inframe, GstVideoFrame* outframe) +{ + if (inframe == nullptr || inframe->buffer ==nullptr) + { + printf ("----------------\n"); + return GST_FLOW_ERROR; + } + if (outframe == nullptr || outframe->buffer == nullptr) + { + printf ("44444444444444444\n"); + return GST_FLOW_ERROR; + } + + _GstYarpVideoPassthrough* self = GST_MY_PLUGIN(filter); + + double time1 = yarp::os::Time::now(); + gst_video_frame_copy(outframe, inframe); + double time2 = yarp::os::Time::now(); + + double diff = time1 - self->prev_time; + self->prev_time = time1; + + switch (self->verbosity_level) + { + case 0: + break; + case 1: + //yCInfo(YVP_COMP, "%s Timestamp: %+3.3f %+3.3f", s_name.c_str(), time1, time2 - time1); + printf("%s %d Timestamp: %+3.3f %+3.3f\n", self->s_name.c_str(), self->frame_counter, time1, time2 - time1); + break; + case 2: + //yCInfo(YVP_COMP, "%s Timestamp: %+3.3f %+3.3f %+3.3f", s_name.c_str(), time1, time2 - time1, diff); + printf("%s %d Timestamp: %+3.3f %+3.3f %+3.3f\n", self->s_name.c_str(), self->frame_counter, time1, time2 - time1, diff); + break; + case 3: + if (diff > 0.04) + //yCInfo(YVP_COMP, "%s Timestamp: %+3.3f %+3.3f %+3.3f", s_name.c_str(), time1, time2 - time1, diff); + printf("%s %d Timestamp: %+3.3f %+3.3f %+3.3f\n", self->s_name.c_str(), self->frame_counter, time1, time2 - time1, diff); + break; + case 4: + if (diff > 0.035 || diff < 0.025) + // yCInfo(YVP_COMP, "%s Timestamp: %+3.3f %+3.3f %+3.3f", s_name.c_str(), time1, time2 - time1, diff); + printf("%s %d Timestamp: %+3.3f %+3.3f %+3.3f\n", self->s_name.c_str(), self->frame_counter, time1, time2 - time1, diff); + break; + case 5: + if (self->frame_counter%(30*10)==0) + printf("%s %d Timestamp: %+3.3f %+3.3f %+3.3f\n", self->s_name.c_str(), self->frame_counter, time1, time2 - time1, diff); + break; + } + + + + + + + /* + GstMapInfo info; + gst_buffer_map(buf, &info, GST_MAP_WRITE); + + guint8* data = info.data; + for (guint i = 0; i < info.size; i += 3) { + guint8 gray = (data[i] + data[i + 1] + data[i + 2]) / 3; + data[i] = data[i + 1] = data[i + 2] = gray; + } + + gst_buffer_unmap(buf, &info); + */ + + self->frame_counter++; + + return GST_FLOW_OK; +} + +/* Start/Stop methods */ +static gboolean gst_yarp_video_passthrough_start(GstBaseTransform* elem) +{ + return TRUE; +} + +static gboolean gst_yarp_video_passthrough_stop(GstBaseTransform* elem) +{ + _GstYarpVideoPassthrough* self = GST_MY_PLUGIN(elem); + if (self->yarpnet) + { + delete self->yarpnet; + self->yarpnet = nullptr; + } + return TRUE; +} + + /* Register the plugin */ +static gboolean yarp_video_passthrough_init(GstPlugin* plugin) +{ + return gst_element_register(plugin, "yarpvideopassthrough", GST_RANK_NONE, gst_yarp_video_passthrough_get_type()); +} + +#define PACKAGE "CCC" + +GST_PLUGIN_DEFINE( + GST_VERSION_MAJOR, + GST_VERSION_MINOR, + yarpvideopassthrough, + "Grayscale video filter", + yarp_video_passthrough_init, + "1.0", + "LGPL", + "GStreamer", + "https://gstreamer.freedesktop.org/") + + +///---------------------------------------------------------------------- diff --git a/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.h b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.h new file mode 100644 index 00000000000..282a785bc36 --- /dev/null +++ b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.h @@ -0,0 +1,14 @@ +#ifndef GST_YARP_VIDEO_PASSTHROUGH_H +#define GST_YARP_VIDEO_PASSTHROUGH_H + +#include +#include +#include + +#include +#include +#include + +YARP_LOG_COMPONENT(YVP_COMP, "yarp.gstreamerplugin.yarpvideopassthrough") + +#endif // GST_YARP_VIDEO_PASSTHROUGH_H diff --git a/src/yarpgstreamerplugins/videosink/CMakeLists.txt b/src/yarpgstreamerplugins/videosink/CMakeLists.txt new file mode 100644 index 00000000000..de3e8af205d --- /dev/null +++ b/src/yarpgstreamerplugins/videosink/CMakeLists.txt @@ -0,0 +1,48 @@ +cmake_minimum_required(VERSION 3.10) + +project(gstyarpvideosink LANGUAGES CXX) + +# Include the directories for the required packages +include_directories(${GSTREAMER_INCLUDE_DIRS} ${YARP_INCLUDE_DIRS}) + +# Set the source files +set(SOURCES yarpVideoSink.cpp yarpVideoSink.h) + +# Set the output directory +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) + +# Add the library target +add_library(gstyarpvideosink SHARED ${SOURCES}) + +# Link the required libraries +target_link_libraries(gstyarpvideosink PRIVATE ${GOBJECT_LIBRARIES}) +target_include_directories(gstyarpvideosink SYSTEM PRIVATE ${GOBJECT_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideosink PRIVATE ${GLIB2_LIBRARIES}) +target_include_directories(gstyarpvideosink SYSTEM PRIVATE ${GLIB2_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideosink PRIVATE ${GSTREAMER_LIBRARY}) +target_include_directories(gstyarpvideosink SYSTEM PRIVATE ${GSTREAMER_INCLUDE_DIRS}) + +target_link_libraries(gstyarpvideosink PRIVATE ${GSTREAMER_APP_LIBRARY}) +target_include_directories(gstyarpvideosink SYSTEM PRIVATE ${GSTREAMER_app_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideosink PRIVATE + YARP::YARP_os + YARP::YARP_sig + YARP::YARP_dev + YARP::YARP_init + gstvideo-1.0 +) + +# Set the properties for the shared library +set_target_properties(gstyarpvideosink PROPERTIES +# VERSION 1.0 +# SOVERSION 1 + LIBRARY_OUTPUT_NAME "gstyarpvideosink" +) + +# Install the plugin to the specified directory +install(TARGETS gstyarpvideosink + LIBRARY DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/gstreamer-1.0 +) diff --git a/src/yarpgstreamerplugins/videosink/yarpVideoSink.cpp b/src/yarpgstreamerplugins/videosink/yarpVideoSink.cpp new file mode 100644 index 00000000000..35a137e4529 --- /dev/null +++ b/src/yarpgstreamerplugins/videosink/yarpVideoSink.cpp @@ -0,0 +1,330 @@ +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "yarpVideoSink.h" + +GST_DEBUG_CATEGORY_STATIC(yarp_video_sink_debug); +#define GST_CAT_DEFAULT yarp_video_sink_debug + +/* Structure to hold all relevant information for this element */ +typedef struct _GstYarpVideoSink +{ + GstVideoSink parent; +} GstYarpVideoSink; + +typedef struct _GstYarpVideoSinkClass +{ + GstVideoSinkClass parent_class; +} GstYarpVideoSinkClass; + + +G_DEFINE_TYPE(GstYarpVideoSink, gst_yarp_video_sink, GST_TYPE_VIDEO_SINK); + +/* Yarp stuff */ +yarp_handler_class* yarp_handler = nullptr; + +enum +{ + PROP_0, + PROP_LOCAL_PORTNAME, + PROP_REMOTE_PORTNAME, + PROP_CONNECTION_PROTO, + PROP_PORT_TYPE, + PROP_YARPVERBOSELEVEL +}; + +/* Function prototypes */ +static GstFlowReturn gst_yarp_video_sink_show_frame(GstVideoSink* sink, GstBuffer* buf); +static void gst_yarp_video_sink_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec); +static void gst_yarp_video_sink_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec); +static gboolean gst_yarp_video_sink_start(GstBaseSink* sink); +static gboolean gst_yarp_video_sink_stop(GstBaseSink* sink); + +/* Initialize the class */ +static void gst_yarp_video_sink_class_init(GstYarpVideoSinkClass* klass) +{ + GObjectClass* gobject_class = (GObjectClass*)klass; + GstElementClass* gstelement_class = GST_ELEMENT_CLASS(klass); + GstVideoSinkClass* gstvideo_sink_class = GST_VIDEO_SINK_CLASS(klass); + GstBaseSinkClass* gstbase_sink_class = GST_BASE_SINK_CLASS(klass); + + gobject_class->set_property = gst_yarp_video_sink_set_property; + gobject_class->get_property = gst_yarp_video_sink_get_property; + gstbase_sink_class->start = gst_yarp_video_sink_start; + gstbase_sink_class->stop = gst_yarp_video_sink_stop; + + gst_element_class_set_static_metadata(gstelement_class, + "YARP Test Sink", + "Sink/Video", + "Sinks", + "Your Name "); + + gstvideo_sink_class->show_frame = GST_DEBUG_FUNCPTR(gst_yarp_video_sink_show_frame); + + g_object_class_install_property(gobject_class, PROP_LOCAL_PORTNAME, g_param_spec_string("localPortname", "localPortname (string)", "Name of the local port", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_REMOTE_PORTNAME, g_param_spec_string("remotePortname", "remotePortname (string)", "Name of the remote port to perform automatic connection (disabled by default)", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_CONNECTION_PROTO, g_param_spec_string("connectionProtocol", "connectionProtocol (string)", "Name of the protocol to performa automatic conenction (disabled by default)", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_PORT_TYPE, g_param_spec_string("portType", "portType (string)", "(default rgb)", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_YARPVERBOSELEVEL, g_param_spec_int("yarpverbose", "yarpverbose (int)", "Verbosity level", 0, 100, 0, G_PARAM_READWRITE)); + + GST_DEBUG_CATEGORY_INIT(yarp_video_sink_debug, "yarpvideosink", 0, "Yarp Video Sink"); + + //Define the sink capabilities + GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE( + "sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ( + "video/x-raw," + "format=(string){RGB};" + "video/x-h264," + "stream-format=(string){avc,byte-stream}," + "alignment=(string){au,nal};" + "video/x-h265," + "stream-format=(string){avc,byte-stream}," + "alignment=(string){au,nal}" + )); + gst_element_class_add_static_pad_template(gstelement_class, &sink_pad_template); +} + +static void gst_yarp_video_sink_init(GstYarpVideoSink* sink) +{ + yarp_handler = new yarp_handler_class; + yTrace(); +} + +/* Set/get Property methods */ +static void gst_yarp_video_sink_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec) +{ + switch (prop_id) + { + case PROP_YARPVERBOSELEVEL: + yarp_handler->verbosity_level = (g_value_get_int(value)); + break; + case PROP_LOCAL_PORTNAME: + yarp_handler->output_port_name = std::string(g_value_dup_string(value)); + yCInfo(YVSNK_COMP) << "Local port name set to:" << yarp_handler->output_port_name; + break; + case PROP_REMOTE_PORTNAME: + yarp_handler->remote_port_name = std::string(g_value_dup_string(value)); + yCInfo(YVSNK_COMP) << "Remote port name set to:" << yarp_handler->remote_port_name; + break; + case PROP_CONNECTION_PROTO: + yarp_handler->connection_protocol = std::string(g_value_dup_string(value)); + yCInfo(YVSNK_COMP) << "Connection protocol set to:" << yarp_handler->connection_protocol; + break; + case PROP_PORT_TYPE: + if (strcmp(g_value_dup_string(value), "rgb")==0) + { + yarp_handler->port_type = yarp_handler_class::port_type_enum::RGB_TYPE; + yCInfo(YVSNK_COMP) << "Port type (on request) = rgb"; + } + else if (strcmp(g_value_dup_string(value), "bin")==0) + { + yarp_handler->port_type = yarp_handler_class::port_type_enum::BINARY_TYPE; + yCInfo(YVSNK_COMP) << "Port type (on request) = bin"; + } + else + { + yarp_handler->port_type = yarp_handler_class::port_type_enum::RGB_TYPE; + yCInfo(YVSNK_COMP) << "Port type (default value) = rgb"; + } + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } + yTrace(); +} + +static void gst_yarp_video_sink_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec) +{ + switch (prop_id) + { + case PROP_YARPVERBOSELEVEL: + g_value_set_int(value, yarp_handler->verbosity_level); + break; + case PROP_LOCAL_PORTNAME: + g_value_set_string(value, yarp_handler->output_port_name.c_str()); + break; + case PROP_REMOTE_PORTNAME: + g_value_set_string(value, yarp_handler->remote_port_name.c_str()); + break; + case PROP_CONNECTION_PROTO: + g_value_set_string(value, yarp_handler->connection_protocol.c_str()); + break; + case PROP_PORT_TYPE: + if (yarp_handler->port_type == yarp_handler_class::port_type_enum::RGB_TYPE) + { + g_value_set_string(value, "rgb"); + } + else if (yarp_handler->port_type == yarp_handler_class::port_type_enum::BINARY_TYPE) + { + g_value_set_string(value, "bin"); + } + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } +} + +/* Start/Stop methods */ +static gboolean gst_yarp_video_sink_start(GstBaseSink* sink) +{ + bool b = yarp_handler->output_port.open(yarp_handler->output_port_name); + if (!b) + { + yCError(YVSNK_COMP) << "Unable to open port: " << yarp_handler->output_port_name; + return FALSE; + } + if (!yarp_handler->remote_port_name.empty()) + { + b = yarp::os::Network::connect(yarp_handler->output_port_name, yarp_handler->remote_port_name, yarp_handler->connection_protocol); + if (!b) + { + yCError(YVSNK_COMP) << "Could not perform connection between: " << yarp_handler->output_port_name + << " and " << yarp_handler->remote_port_name << " via " << yarp_handler->connection_protocol; + return FALSE; + } + } + return TRUE; +} + +static gboolean gst_yarp_video_sink_stop(GstBaseSink* sink) +{ + yarp_handler->output_port.close(); + + if (yarp_handler) + { + delete yarp_handler; + yarp_handler = nullptr; + } + return TRUE; +} + +/* Frame methods */ +static GstFlowReturn gst_yarp_video_sink_show_frame(GstVideoSink* sink, GstBuffer* buf) +{ + GstMapInfo info; + if (gst_buffer_map(buf, &info, GST_MAP_READ)) + { + if (yarp_handler->verbosity_level>0) + { + yCDebug(YVSNK_COMP) << "Received frame of size: " << info.size << " bytes"; + } + if (yarp_handler->output_port.getOutputCount() > 0) + { + //Get info about the stream + GstCaps *caps = gst_pad_get_current_caps(GST_BASE_SINK_PAD(sink)); + if (caps==nullptr) + { + yCError(YVSNK_COMP) << "gst_pad_get_current_caps() failed"; + return GST_FLOW_ERROR; + } + const GstStructure* structure = gst_caps_get_structure(caps,0); + const gchar* format = gst_structure_get_name(structure); + + //if the stream contains rgb frames... + if (g_str_has_prefix (format, "video/x-raw")) + { + if (yarp_handler->port_type == yarp_handler_class::port_type_enum::RGB_TYPE) + { + int width = 0; + int height = 0; + gst_structure_get_int(structure, "width", &width); + gst_structure_get_int(structure, "height", &height); + //yCDebug(YVSNK_COMP) << width << height; + + static yarp::sig::ImageOf data; + //data.resize(width, height); + data.setQuantum(8); + data.setExternal(info.data, width, height); + yarp_handler->output_port.write(data); + } + else if (yarp_handler->port_type == yarp_handler_class::port_type_enum::BINARY_TYPE) + { + yarp::os::Bottle bot; + yarp::os::Value data_val(info.data,info.size); + bot.addInt64(info.size); + bot.add(data_val); + yarp_handler->output_port.write(bot); + } + else + { + yCError(YVSNK_COMP) << "Invalid yarp output format. Please choose either yarp image or binary."; + } + } + //if the stream contains encoded binary data + else if (g_str_has_prefix (format, "video/x-h264") || + g_str_has_prefix (format, "video/x-h265")) + { + if (yarp_handler->port_type == yarp_handler_class::port_type_enum::BINARY_TYPE) + { + yarp::os::Bottle bot; + yarp::os::Value data_val(info.data, info.size); + bot.addInt64(info.size); + bot.add(data_val); + yarp_handler->output_port.write(bot); + } + else + { + yCError(YVSNK_COMP) << "The input frame is encoded. Only binary output is allowed on the yarp port."; + } + } + else + { + yCError(YVSNK_COMP) << "Invalid input stream. Only video/x-raw or video/x-h264 video/x-h265 is currently implemented."; + } + } + + gst_buffer_unmap(buf, &info); + } + return GST_FLOW_OK; +} + +/* Register the plugin */ +static gboolean yarp_video_sink_plugin_init(GstPlugin* plugin) +{ + return gst_element_register(plugin, "yarpvideosink", GST_RANK_NONE, gst_yarp_video_sink_get_type()); +} + +#define VERSION "1.0" +#define PACKAGE "BBB" + +GST_PLUGIN_DEFINE( + GST_VERSION_MAJOR, + GST_VERSION_MINOR, + yarpvideosink, + "Yarp Video Sink", + yarp_video_sink_plugin_init, + "1.0", + "LGPL", + "GStreamer", + "https://gstreamer.freedesktop.org/") + +// TO test it: +// export GST_PLUGIN_PATH=$GST_PLUGIN_PATH:C:\Software\iCubSoftware\yarp\build\bin\Release +// set GST_PLUGIN_PATH=%GST_PLUGIN_PATH%;C:\Software\iCubSoftware\yarp\build\bin\Release +// gst-inspect-1.0 yarpvideosinkd +// gst-launch-1.0 videotestsource !yarpvideosinkd +// gst-launch-1.0 yarptestsourced localPortname="/aaa" !videoconvert !x264enc !h264parse !avdec_h264 !videoconvert !autovideosink + +// grabber_yarp -> -> fast_tcp -> *porta_rgb_2_gs_image* -> sink +// telecamera -> h264enc -> blob2porta -> fast_tcp -> porta_blob_2_gs_image -> 264dec -> sink + +/* +C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstreamer-1.0.lib +C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstapp-1.0.lib +C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstbase-1.0.lib +C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstbase-1.0.lib +C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstvideo-1.0.lib +*/ diff --git a/src/yarpgstreamerplugins/videosink/yarpVideoSink.h b/src/yarpgstreamerplugins/videosink/yarpVideoSink.h new file mode 100644 index 00000000000..5ab44b688c2 --- /dev/null +++ b/src/yarpgstreamerplugins/videosink/yarpVideoSink.h @@ -0,0 +1,38 @@ +#ifndef GST_YARP_VIDEO_SINK_H +#define GST_YARP_VIDEO_SINK_H + +#include +#include +#include + +#include +#include +#include + +YARP_LOG_COMPONENT(YVSNK_COMP, "yarp.gstreamerplugin.yarpvideosink") + +class yarp_handler_class +{ +public: + enum port_type_enum + { + RGB_TYPE = 0, + BINARY_TYPE = 1 + } port_type; + yarp::os::Network yarpnet; + yarp::os::Port output_port; + std::string output_port_name = "/gstreamer/yarp_plugin:o"; + std::string remote_port_name = ""; + std::string connection_protocol = "fast_tcp"; + int verbosity_level = 0; + +public: + yarp_handler_class() + { + } + virtual ~yarp_handler_class() + { + } +}; + +#endif // GST_YARP_VIDEO_SINK_H \ No newline at end of file diff --git a/src/yarpgstreamerplugins/videosource/CMakeLists.txt b/src/yarpgstreamerplugins/videosource/CMakeLists.txt new file mode 100644 index 00000000000..e9ebbc61e1a --- /dev/null +++ b/src/yarpgstreamerplugins/videosource/CMakeLists.txt @@ -0,0 +1,48 @@ +cmake_minimum_required(VERSION 3.10) + +project(gstyarpvideosource LANGUAGES CXX) + +# Include the directories for the required packages +include_directories(${GSTREAMER_INCLUDE_DIRS} ${YARP_INCLUDE_DIRS}) + +# Set the source files +set(SOURCES yarpVideoSource.cpp yarpVideoSource.h) + +# Set the output directory +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) + +# Add the library target +add_library(gstyarpvideosource SHARED ${SOURCES}) + +# Link the required libraries +target_link_libraries(gstyarpvideosource PRIVATE ${GOBJECT_LIBRARIES}) +target_include_directories(gstyarpvideosource SYSTEM PRIVATE ${GOBJECT_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideosource PRIVATE ${GLIB2_LIBRARIES}) +target_include_directories(gstyarpvideosource SYSTEM PRIVATE ${GLIB2_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideosource PRIVATE ${GSTREAMER_LIBRARY}) +target_include_directories(gstyarpvideosource SYSTEM PRIVATE ${GSTREAMER_INCLUDE_DIRS}) + +target_link_libraries(gstyarpvideosource PRIVATE ${GSTREAMER_APP_LIBRARY}) +target_include_directories(gstyarpvideosource SYSTEM PRIVATE ${GSTREAMER_app_INCLUDE_DIR}) + +target_link_libraries(gstyarpvideosource PRIVATE + YARP::YARP_os + YARP::YARP_sig + YARP::YARP_dev + YARP::YARP_init + gstvideo-1.0 +) + +# Set the properties for the shared library +set_target_properties(gstyarpvideosource PROPERTIES +# VERSION 1.0 +# SOVERSION 1 + LIBRARY_OUTPUT_NAME "gstyarpvideosource" +) + +# Install the plugin to the specified directory +install(TARGETS gstyarpvideosource + LIBRARY DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/gstreamer-1.0 +) diff --git a/src/yarpgstreamerplugins/videosource/yarpVideoSource.cpp b/src/yarpgstreamerplugins/videosource/yarpVideoSource.cpp new file mode 100644 index 00000000000..7c4c6437e82 --- /dev/null +++ b/src/yarpgstreamerplugins/videosource/yarpVideoSource.cpp @@ -0,0 +1,398 @@ +#include "yarp/os/Time.h" +#include "yarp/os/Network.h" +#include "yarp/os/Log.h" +#include "yarp/os/LogStream.h" + +#include +#include +#include +#include + +#include "yarpVideoSource.h" + +GST_DEBUG_CATEGORY_STATIC(gst_yarp_video_source_debug); +#define GST_CAT_DEFAULT gst_yarp_video_source_debug + +/* Structure to hold all relevant information for this element */ +typedef struct _GstYarpVideoSource +{ + GstPushSrc parent; + GstVideoInfo info; +} GstYarpVideoSource; + +typedef struct _GstYarpVideoSourceClass +{ + GstPushSrcClass parent_class; +} GstYarpVideoSourceClass; + +G_DEFINE_TYPE(GstYarpVideoSource, gst_yarp_video_source, GST_TYPE_PUSH_SRC); +//#define GST_TYPE_YARP_VIDEO_SOURCE (gst_yarp_video_source_get_type()) +//G_DECLARE_FINAL_TYPE(GstYarpVideoSource, gst_yarp_test_source, GST, YARP_VIDEO_SOURCE, GstPushSrc) + + +/* Yarp stuff */ +yarp_handler_class* yarp_handler = nullptr; + +enum +{ + PROP_0, + PROP_LOCAL_PORTNAME, + PROP_REMOTE_PORTNAME, + PROP_CONNECTION_PROTO, + PROP_PORT_TYPE, + PROP_YARPVERBOSELEVEL +}; + +/* Function prototypes */ +static void gst_yarp_video_source_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec); +static void gst_yarp_video_source_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec); +static GstCaps* gst_yarp_video_source_get_caps(GstBaseSrc* src, GstCaps* filter); +static gboolean gst_yarp_video_source_set_caps(GstBaseSrc* src, GstCaps* caps); +static gboolean gst_yarp_video_source_start(GstBaseSrc* src); +static gboolean gst_yarp_video_source_stop(GstBaseSrc* src); + +static GstFlowReturn gst_yarp_video_source_fill(GstPushSrc* src, GstBuffer* buf); +static GstFlowReturn gst_yarp_video_source_alloc(GstPushSrc* src, GstBuffer** buf); + + +#define MY_SOURCE_CAPS \ + "video/x-raw, " \ + "width = (int) 640, " \ + "height = (int) 480, " \ + "format=(string){RGB};" \ + "video/x-h264, " \ + "stream-format=(string){avc,byte-stream}," \ + "alignment=(string){au,nal};" \ + "video/x-h265, " \ + "stream-format=(string){avc,byte-stream}," \ + "alignment=(string){au,nal}" + +/* +#define MY_SOURCE_CAPS \ + "video/x-raw, " \ + "format=(string) {RGB}, " //\ +// "width = (int) 640, " \ +// "height = (int) 480 " +*/ + +#define VTS_VIDEO_CAPS GST_VIDEO_CAPS_MAKE(GST_VIDEO_FORMATS_ALL) MY_SOURCE_CAPS + +/* +#define VTS_VIDEO_CAPS GST_VIDEO_CAPS_MAKE(GST_VIDEO_FORMATS_ALL) "," \ + "multiview-mode = { mono, left, right }" \ + ";" \ + "video/x-bayer, format=(string) { bggr, rggb, grbg, gbrg }, " \ + "width = " GST_VIDEO_SIZE_RANGE ", " \ + "height = " GST_VIDEO_SIZE_RANGE ", " \ + "framerate = " GST_VIDEO_FPS_RANGE ", " \ + "multiview-mode = { mono, left, right }" +*/ + +static GstStaticPadTemplate gst_video_test_src_template = GST_STATIC_PAD_TEMPLATE("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS(MY_SOURCE_CAPS)); + +/* Initialize the class */ +static void gst_yarp_video_source_class_init(GstYarpVideoSourceClass* klass) +{ + GObjectClass* gobject_class = (GObjectClass*)klass; + GstElementClass* gstelement_class = (GstElementClass*)klass; + GstBaseSrcClass* gstbasesrc_class = (GstBaseSrcClass*)klass; + GstPushSrcClass* gstpushsrc_class = (GstPushSrcClass*)klass; + + gobject_class->set_property = gst_yarp_video_source_set_property; + gobject_class->get_property = gst_yarp_video_source_get_property; + + gst_element_class_set_static_metadata(gstelement_class, + "YARP Test Source", + "Source/Video", + "Generates a video alternating between green and red", + "Your Name "); + + gstbasesrc_class->get_caps = gst_yarp_video_source_get_caps; + gstbasesrc_class->set_caps = gst_yarp_video_source_set_caps; + gstbasesrc_class->start = gst_yarp_video_source_start; + gstbasesrc_class->stop = gst_yarp_video_source_stop; + + //gstpushsrc_class->create = gst_yarp_video_source_create; + gstpushsrc_class->fill = gst_yarp_video_source_fill; + gstpushsrc_class->alloc = gst_yarp_video_source_alloc; + + g_object_class_install_property(gobject_class, PROP_LOCAL_PORTNAME, g_param_spec_string("localPortname", "localPortname (string)", "Name of the local port", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_REMOTE_PORTNAME, g_param_spec_string("remotePortname", "remotePortname (string)", "Name of the remote port to perform automatic connection (disabled by default)", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_CONNECTION_PROTO, g_param_spec_string("connectionProtocol", "connectionProtocol (string)", "Name of the protocol to performa automatic conenction (disabled by default)", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_PORT_TYPE, g_param_spec_string("portType", "portType (string)", "(default rgb)", NULL, G_PARAM_READWRITE)); + g_object_class_install_property(gobject_class, PROP_YARPVERBOSELEVEL, g_param_spec_int("yarpverbose", "yarpverbose (int)", "Verbosity level", 0, 100, 0, G_PARAM_READWRITE)); + + GST_DEBUG_CATEGORY_INIT(gst_yarp_video_source_debug, "yarpvideosource", 0, "YARP Video Source"); + + gst_element_class_add_static_pad_template(gstelement_class, &gst_video_test_src_template); +} + +static void gst_yarp_video_source_init(GstYarpVideoSource* src) +{ + yarp_handler = new yarp_handler_class; + yTrace(); + gst_base_src_set_format(GST_BASE_SRC(src), GST_FORMAT_TIME); +} + +/* Set/get Property methods */ +static void gst_yarp_video_source_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec) +{ + switch (prop_id) + { + case PROP_YARPVERBOSELEVEL: + yarp_handler->verbosity_level = (g_value_get_int(value)); + break; + case PROP_LOCAL_PORTNAME: + yarp_handler->input_port_name = std::string(g_value_dup_string(value)); + yCInfo(YVS_COMP) << "Local port name set to:" << yarp_handler->input_port_name; + break; + case PROP_REMOTE_PORTNAME: + yarp_handler->remote_port_name = std::string(g_value_dup_string(value)); + yCInfo(YVS_COMP) << "Remote port name set to:" << yarp_handler->remote_port_name; + break; + case PROP_CONNECTION_PROTO: + yarp_handler->connection_protocol = std::string(g_value_dup_string(value)); + yCInfo(YVS_COMP) << "Connection protocol set to:" << yarp_handler->connection_protocol; + break; + case PROP_PORT_TYPE: + if (strcmp(g_value_dup_string(value), "rgb") == 0) + { + yarp_handler->input_port_reader.set_port_type( yarp_handler_class::port_type_enum::RGB_TYPE ); + yCInfo(YVS_COMP) << "Port type (on request) = rgb"; + } + else if (strcmp(g_value_dup_string(value), "bin") == 0) + { + yarp_handler->input_port_reader.set_port_type(yarp_handler_class::port_type_enum::BINARY_TYPE); + yCInfo(YVS_COMP) << "Port type (on request) = bin"; + } + else + { + yarp_handler->input_port_reader.set_port_type(yarp_handler_class::port_type_enum::RGB_TYPE); + yCInfo(YVS_COMP) << "Port type (default value) = rgb"; + } + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } + yTrace(); +} + +static void gst_yarp_video_source_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec) +{ + switch (prop_id) + { + case PROP_YARPVERBOSELEVEL: + g_value_set_int(value, yarp_handler->verbosity_level); + break; + case PROP_LOCAL_PORTNAME: + g_value_set_string(value, yarp_handler->input_port_name.c_str()); + break; + case PROP_REMOTE_PORTNAME: + g_value_set_string(value, yarp_handler->remote_port_name.c_str()); + break; + case PROP_CONNECTION_PROTO: + g_value_set_string(value, yarp_handler->connection_protocol.c_str()); + break; + case PROP_PORT_TYPE: + if (yarp_handler->input_port_reader.get_port_type() == yarp_handler_class::port_type_enum::RGB_TYPE) + { + g_value_set_string(value, "rgb"); + } + else if (yarp_handler->input_port_reader.get_port_type() == yarp_handler_class::port_type_enum::BINARY_TYPE) + { + g_value_set_string(value, "bin"); + } + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } +} + +static GstCaps* gst_yarp_video_source_get_caps(GstBaseSrc* src, GstCaps* filter) +{ + yTrace(); + + GstCaps* caps = gst_caps_from_string(MY_SOURCE_CAPS); + + if (filter) + { + GstCaps* intersection; + intersection = gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + return intersection; + } + else + { + return caps; + } +} + +static gboolean gst_yarp_video_source_set_caps(GstBaseSrc* src, GstCaps* caps) +{ + GstYarpVideoSource* yarp_src; + yarp_src = (GstYarpVideoSource*)(src); + + return gst_video_info_from_caps(&yarp_src->info, caps); +} + +/* Start/Stop methods */ +static gboolean gst_yarp_video_source_start(GstBaseSrc* src) +{ + //open YARP port + bool b = yarp_handler->input_port.open(yarp_handler->input_port_name); + if (!b) + { + yCError(YVS_COMP) << "Unable to open port: " << yarp_handler->input_port_name; + return FALSE; + } + if (!yarp_handler->remote_port_name.empty()) + { + b = yarp::os::Network::connect(yarp_handler->remote_port_name, yarp_handler->input_port_name, yarp_handler->connection_protocol); + if (!b) + { + yCError(YVS_COMP) << "Could not perform connection between: " << yarp_handler->remote_port_name + << " and " << yarp_handler->input_port_name << " via " << yarp_handler->connection_protocol; + return FALSE; + } + } + + return TRUE; +} + +static gboolean gst_yarp_video_source_stop(GstBaseSrc* src) +{ + // Close YARP port + yarp_handler->input_port.close(); + + if (yarp_handler) + { + delete yarp_handler; + yarp_handler = nullptr; + } + return TRUE; +} + +/* Frame methods */ +static GstFlowReturn gst_yarp_video_source_fill(GstPushSrc* src, GstBuffer* buf) +{ + //yCInfo(YVS_COMP) << ">>Fill"; + GstMapInfo map; + gst_buffer_map(buf, &map, GST_MAP_WRITE); + + GstYarpVideoSource* yarp_src = (GstYarpVideoSource*)(src); + guint gst_size = yarp_src->info.width * yarp_src->info.height * 3; // RGB format + + yarp_handler->input_port_reader.image_mutex.lock(); + if (yarp_handler->input_port_reader.get_port_type() == yarp_handler_class::port_type_enum::RGB_TYPE) + { + if (yarp_handler->input_port_reader.image_size == gst_size) + { + //yCDebug(YVS_COMP) << "bcopy" << yarp_handler->input_port_reader.image_size << gst_size; + memcpy(map.data, yarp_handler->input_port_reader.image_buffer, yarp_handler->input_port_reader.image_size); + //yCDebug(YVS_COMP) << "acopy"; + } + else if (yarp_handler->input_port_reader.image_size == 0) + { + yCError(YVS_COMP) << "No image received yet (rgb image mode)"; + } + else + { + yCError(YVS_COMP) << "size mismatch! gst:" << gst_size << "vs yarp:" << yarp_handler->input_port_reader.image_size; + } + } + else if (yarp_handler->input_port_reader.get_port_type() == yarp_handler_class::port_type_enum::BINARY_TYPE) + { + if (yarp_handler->input_port_reader.image_size != 0) + { + //yCDebug() << "bbinary"; + memcpy(map.data, yarp_handler->input_port_reader.image_buffer, yarp_handler->input_port_reader.image_size); + //yCDebug() << "abinary"; + } + else if (yarp_handler->input_port_reader.image_size == 0) + { + yCError(YVS_COMP) << "No image received yet (binary mode)"; + } + } + else + { + yCError(YVS_COMP) << "unreachable"; + //unreachable code + } + + yarp_handler->input_port_reader.image_mutex.unlock(); + + gst_buffer_unmap(buf, &map); + + return GST_FLOW_OK; +} + +static GstFlowReturn gst_yarp_video_source_alloc(GstPushSrc* src, GstBuffer** buf) +{ + if (yarp_handler->input_port.getInputCount() == 0) + { + yCInfo(YVS_COMP) << "Waiting port connection.."; + } + std::unique_lock lk(yarp_handler->input_port_reader.cvar_mutex); + yarp_handler->input_port_reader.cvar.wait(lk, [] + { return yarp_handler->input_port_reader.frame_ready; }); + yarp_handler->input_port_reader.frame_ready = false; + + GstYarpVideoSource* yarp_src = (GstYarpVideoSource*)(src); + yarp_src->info.width=640; //<<<<<<<<<<<<<<<<<<<<<<<< + yarp_src->info.height=480; //<<<<<<<<<<<<<<<<<<<<<<<< + guint size = yarp_src->info.width * yarp_src->info.height * 3; // RGB format + + GstBuffer* buffer = gst_buffer_new_allocate(NULL, size, NULL); + *buf = buffer; + + return GST_FLOW_OK; +} + +/* Register the plugin */ +static gboolean yarp_video_source_plugin_init(GstPlugin* plugin) +{ + return gst_element_register(plugin, "yarpvideosource", GST_RANK_NONE, gst_yarp_video_source_get_type()); +} + +#define VERSION "1.0" +#define PACKAGE "AAA" + +GST_PLUGIN_DEFINE( + GST_VERSION_MAJOR, + GST_VERSION_MINOR, + yarpvideosource, + "YARP Video Source", + yarp_video_source_plugin_init, + VERSION, + "LGPL", + "GStreamer", + "http://gstreamer.net/") + + + // TO test it: +// export GST_PLUGIN_PATH=$GST_PLUGIN_PATH:C:\Software\iCubSoftware\yarp\build\bin\Release +// set GST_PLUGIN_PATH=%GST_PLUGIN_PATH%;C:\Software\iCubSoftware\yarp\build\bin\Release +// gst-inspect-1.0 yarpvideosource +// gst-launch-1.0 yarpvideosource localPortname="/aaa" portType = "rgb" ! videoconvert ! autovideosink +// gst-launch-1.0 yarpvideosource localPortname="/aaa" ! videoconvert ! autovideosink +//gst-launch-1.0 yarpvideosource ! "video/x-raw, format=(string)I420, width=(int)640, height=(int)480" ! autovideosink +//gst-launch-1.0 yarpvideosource ! "video/x-raw, format=(string)I420, width=(int)640, height=(int)480" ! autovideosink +// gst-launch-1.0 yarpvideosource ! glimagesink +//gst-launch-1.0 yarpvideosource localPortname="/aaa" !videoconvert !x264enc !h264parse !avdec_h264 !videoconvert !autovideosink + +//grabber_yarp -> -> fast_tcp -> *porta_rgb_2_gs_image* -> sink +//telecamera -> h264enc -> blob2porta -> fast_tcp -> porta_blob_2_gs_image -> 264dec -> sink + + /* + C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstreamer-1.0.lib + C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstapp-1.0.lib + C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstbase-1.0.lib + C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstbase-1.0.lib + C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstvideo-1.0.lib + */ + diff --git a/src/yarpgstreamerplugins/videosource/yarpVideoSource.h b/src/yarpgstreamerplugins/videosource/yarpVideoSource.h new file mode 100644 index 00000000000..94d3485ab28 --- /dev/null +++ b/src/yarpgstreamerplugins/videosource/yarpVideoSource.h @@ -0,0 +1,117 @@ +#ifndef GST_YARP_VIDEO_SOURCE_H +#define GST_YARP_VIDEO_SOURCE_H + +#include + +#include +#include +#include + +#include +#include +#include + +YARP_LOG_COMPONENT(YVS_COMP, "yarp.gstreamerplugin.yarpvideosource") + +class yarp_handler_class +{ + public: + enum port_type_enum + { + RGB_TYPE = 0, + BINARY_TYPE = 1 + }; + + class input_reader : public yarp::os::PortReader + { + port_type_enum type=port_type_enum::RGB_TYPE; + yarp::sig::ImageOf priv_data_image; + yarp::os::Bottle priv_data_bottle; + + public: + std::mutex image_mutex; + std::mutex cvar_mutex; + std::condition_variable cvar; + bool frame_ready = false; + unsigned char* image_buffer=nullptr; + size_t image_size=0; + double timestamp=0; + + port_type_enum get_port_type() + { + return type; + } + + void set_port_type(port_type_enum t) + { + type = t; + } + + public: + virtual bool read(yarp::os::ConnectionReader& connection) override + { + std::unique_lock lk(cvar_mutex); + frame_ready = false; + + bool ret=true; + + if (type == port_type_enum::RGB_TYPE) + { + image_mutex.lock(); + ret = priv_data_image.read(connection); + image_buffer = priv_data_image.getRawImage(); + image_size = priv_data_image.getRawImageSize(); + timestamp = yarp::os::Time::now(); + frame_ready = true; + cvar.notify_all(); + + image_mutex.unlock(); + } + else if (type == port_type_enum::BINARY_TYPE) + { + image_mutex.lock(); + ret = priv_data_bottle.read(connection); + priv_data_bottle.get(0).asInt64(); + image_buffer = (unsigned char*) priv_data_bottle.get(1).asBlob(); + image_size = priv_data_bottle.get(1).asBlobLength(); + timestamp = yarp::os::Time::now(); + frame_ready = true; + cvar.notify_all(); + image_mutex.unlock(); + } + else + { + ret=false; + } + + if (ret==false) + { + yCError(YVS_COMP) << "Data type conversion failed in read(yarp::os::ConnectionReader&)"; + } + return true; + } + + input_reader() = default; + }; + +public: + yarp::os::Network yarpnet; + yarp::os::Port input_port; + input_reader input_port_reader; + std::string input_port_name = "/gstreamer/yarp_plugin:i"; + std::string remote_port_name = ""; + std::string connection_protocol = "fast_tcp"; + int verbosity_level = 0; + +public: + yarp_handler_class() + { + input_port_reader.set_port_type(yarp_handler_class::port_type_enum::RGB_TYPE); + input_port.setReader(input_port_reader); + } + virtual ~yarp_handler_class() + { + } +}; + +#endif // GST_YARP_VIDEO_SOURCE_H diff --git a/src/yarplaserscannergui/CMakeLists.txt b/src/yarplaserscannergui/CMakeLists.txt index 6eb851e6821..39612f1e1ee 100644 --- a/src/yarplaserscannergui/CMakeLists.txt +++ b/src/yarplaserscannergui/CMakeLists.txt @@ -3,21 +3,11 @@ if(YARP_COMPILE_yarplaserscannergui) - include(YarpUseQt5) include(YarpMacOSUtilities) - - add_executable(yarplaserscannergui WIN32) - - # set(yarplaserscannergui_SRCS main.cpp gui.cpp ) - # set(yarplaserscannergui_HDRS gui.h ) - # set(yarplaserscannergui_QRC_FILES res.qrc) - # set(yarplaserscannergui_UI_FILES gui.ui) + add_executable(yarplaserscannergui) set(yarplaserscannergui_SRCS main.cpp) - # qt5_add_resources(yarplaserscannergui_QRC_GEN_SRCS ${yarplaserscannergui_QRC_FILES}) - qt5_wrap_ui(yarplaserscannergui_UI_GEN_SRCS ${yarplaserscannerguii_UI_FILES}) - source_group( TREE "${CMAKE_CURRENT_SOURCE_DIR}" PREFIX "Source Files" @@ -28,28 +18,11 @@ if(YARP_COMPILE_yarplaserscannergui) PREFIX "Header Files" FILES ${yarplaserscannergui_HDRS} ) - source_group( - "Resources Files" - FILES ${yarplaserscannergui_QRC_FILES} - ) - source_group( - "UI Files" - FILES ${yarplaserscannergui_UI_FILES} - ) - source_group( - "Generated Files" - FILES ${yarpLaserScannergui_QRC_GEN_SRCS} - ${yarplaserscannergui_UI_GEN_SRCS} - ) target_sources(yarplaserscannergui PRIVATE ${yarplaserscannergui_SRCS} ${yarplaserscannergui_HDRS} - ${yarplaserscannergui_QRC_FILES} - ${yarplaserscannergui_QRC_GEN_SRCS} - ${yarplaserscannergui_UI_FILES} - ${yarplaserscannergui_UI_GEN_SRCS} ) target_link_libraries(yarplaserscannergui @@ -58,8 +31,6 @@ if(YARP_COMPILE_yarplaserscannergui) YARP::YARP_init YARP::YARP_sig YARP::YARP_dev - Qt5::Widgets - Qt5::Gui ) target_include_directories(yarplaserscannergui PRIVATE ${OpenCV_INCLUDE_DIRS}) diff --git a/src/yarpopencvdisplay/CMakeLists.txt b/src/yarpopencvdisplay/CMakeLists.txt new file mode 100644 index 00000000000..1ebcd82ce41 --- /dev/null +++ b/src/yarpopencvdisplay/CMakeLists.txt @@ -0,0 +1,50 @@ +# SPDX-FileCopyrightText: 2024 Istituto Italiano di Tecnologia (IIT) +# SPDX-License-Identifier: BSD-3-Clause + +if(YARP_COMPILE_yarpopencvdisplay) + + include(YarpMacOSUtilities) + add_executable(yarpopencvdisplay) + + set(yarpopencvdisplay_SRCS main.cpp) + + source_group( + TREE "${CMAKE_CURRENT_SOURCE_DIR}" + PREFIX "Source Files" + FILES ${yarpopencvdisplay_SRCS} + ) + source_group( + TREE "${CMAKE_CURRENT_SOURCE_DIR}" + PREFIX "Header Files" + FILES ${yarpopencvdisplay_HDRS} + ) + + + target_sources(yarpopencvdisplay + PRIVATE + ${yarpopencvdisplay_SRCS} + ${yarpopencvdisplay_HDRS} + ) + + target_link_libraries(yarpopencvdisplay + PRIVATE + YARP::YARP_os + YARP::YARP_init + YARP::YARP_sig + YARP::YARP_dev + ) + + target_include_directories(yarpopencvdisplay PRIVATE ${OpenCV_INCLUDE_DIRS}) + target_link_libraries(yarpopencvdisplay PRIVATE ${OpenCV_LIBS}) + + install(TARGETS yarpopencvdisplay COMPONENT utilities DESTINATION ${CMAKE_INSTALL_BINDIR}) + + set_property(TARGET yarpopencvdisplay PROPERTY FOLDER "Graphical User Interfaces") + + yarp_macos_duplicate_and_add_bundle( + TARGET yarpopencvdisplay + INSTALL_COMPONENT utilities + INSTALL_DESTINATION "${CMAKE_INSTALL_BINDIR}" + ) + +endif() diff --git a/src/yarpopencvdisplay/main.cpp b/src/yarpopencvdisplay/main.cpp new file mode 100644 index 00000000000..c40dc48005a --- /dev/null +++ b/src/yarpopencvdisplay/main.cpp @@ -0,0 +1,197 @@ +/* + * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: LGPL-2.1-or-later + */ + +#define _USE_MATH_DEFINES + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace yarp::os; +using namespace yarp::sig; + +CvFont font; +CvFont fontBig; + +const CvScalar color_bwhite = cvScalar(200,200,255); +const CvScalar color_white = cvScalar(255,255,255); +const CvScalar color_red = cvScalar(0,0,255); +const CvScalar color_yellow = cvScalar(0,255,255); +const CvScalar color_black = cvScalar(0,0,0); +const CvScalar color_gray = cvScalar(100,100,100); + +struct stats_struct +{ + double interval_time = 0; + double copy_time = 0; +} stats; + +void drawImageExtraStuff(IplImage* img) +{ + //cvLine(img,cvPoint(0,0),cvPoint(img->width,img->height),color_black); + + char buff[20]; + snprintf(buff, 20, "%5.3f", stats.interval_time); + cvPutText(img, buff, cvPoint(20,20), &font, cvScalar(90, 90, 90, 0)); + +} + +void display_help() +{ + yInfo() << "Available options:"; + yInfo() << "--verbose"; + yInfo() << "--remote "; + yInfo() << "--local "; + yInfo() << "--carrier "; +} + +int main(int argc, char *argv[]) +{ + yarp::os::Network yarp(yarp::os::YARP_CLOCK_SYSTEM); + + ResourceFinder rf; + rf.setDefaultConfigFile("yarpopencvdisplay.ini"); + rf.configure(argc, argv); + if (rf.check("help")) + { + display_help(); + return 0; + } + + bool verbose = rf.check("verbose", Value(false), "verbose [0/1]").asBool(); + std::string remote = rf.check("remote", Value(""), "remote port name").asString(); + std::string carrier = rf.check("carrier", Value("fast_tcp"), "carrier name").asString(); + std::string local = rf.check("local", Value("/yarpopencvdisplay:i"), "local port name").asString(); + + BufferedPort > inputPort; + if (inputPort.open(local) == false) + { + yError() << "Failed to open port" << local; + return 0; + } + if (!remote.empty()) + { + if (yarp::os::Network::connect(remote, local, carrier)) + { + yInfo() << "Successfully connected to port:" << remote; + } + else + { + yError() << "Failed to connect to port:" << remote; + return 0; + } + } + + std::string window_name = "yarpopencvdisplay: " + local; + IplImage* iplimg = nullptr; + cvNamedWindow(window_name.c_str(),CV_WINDOW_AUTOSIZE); + cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 0.4, 0.4, 0, 1, CV_AA); + cvInitFont(&fontBig, CV_FONT_HERSHEY_SIMPLEX, 0.8, 0.8, 0, 1, CV_AA); + + bool exit = false; + + while(!exit) + { + void *v = cvGetWindowHandle(window_name.c_str()); + if (v == nullptr) + { + exit = true; + break; + } + + //Receive image and draw + { + auto* imgport = inputPort.read(false); + if (imgport) + { + static double old = yarp::os::Time::now(); + double now = yarp::os::Time::now(); + stats.interval_time = now - old; + old = yarp::os::Time::now(); + + if (iplimg == nullptr) + { + iplimg = cvCreateImage(cvSize(imgport->width(), imgport->height()), IPL_DEPTH_8U, 3); + } + else if (iplimg->width != imgport->width() || iplimg->height != imgport->height()) + + { + cvReleaseImage(&iplimg); + iplimg = cvCreateImage(cvSize(imgport->width(), imgport->height()), IPL_DEPTH_8U, 3); + } + + double a = yarp::os::Time::now(); + for (int y = 0; y < imgport->height(); y++) { + for (int x = 0; x < imgport->width(); x++) { + PixelRgb& yarpPixel = imgport->pixel(x, y); + CvScalar cvPixel = cvScalar(yarpPixel.b, yarpPixel.g, yarpPixel.r); + cvSet2D(iplimg, y, x, cvPixel); + } + } + double b = yarp::os::Time::now(); + stats.copy_time = b - a; + } + if (verbose) + { + yDebug("copytime: %5.3f frameintervale %5.3f", stats.copy_time, stats.interval_time); + drawImageExtraStuff(iplimg); + } + cvShowImage(window_name.c_str(), iplimg); + } + + SystemClock::delaySystem(0.001); + + //if ESC is pressed, exit. + int keypressed = cvWaitKey(2); //wait 2ms. Lower values do not work under Linux + keypressed &= 0xFF; //this mask is required in Linux systems + if (keypressed == 27) + { + exit = true; + } + if(keypressed == 'w') + { + } + if(keypressed == 's') + { + } + if(keypressed == 'v' ) + { + verbose= (!verbose); + if (verbose) { + yInfo("verbose mode is now ON"); + } else { + yInfo("verbose mode is now OFF"); + } + } + } + + inputPort.close(); + + cvDestroyAllWindows(); + + if (iplimg) + { + cvReleaseImage(&iplimg); + } +} From cc0dbe35fe471ec63d1d60931174c25ea4707a00 Mon Sep 17 00:00:00 2001 From: Marco Randazzo Date: Mon, 26 Aug 2024 16:57:35 +0200 Subject: [PATCH 2/3] style fix --- src/CMakeLists.txt | 4 +-- src/carriers/gstreamer_carrier/CMakeLists.txt | 4 +-- src/yarpgstreamerplugins/CMakeLists.txt | 5 ++- .../videopassthrough/CMakeLists.txt | 3 ++ .../videopassthrough/yarpVideoPassthrough.cpp | 7 ++++- .../videopassthrough/yarpVideoPassthrough.h | 5 +++ .../videosink/CMakeLists.txt | 3 ++ .../videosink/yarpVideoSink.cpp | 5 +++ .../videosink/yarpVideoSink.h | 7 ++++- .../videosource/CMakeLists.txt | 3 ++ .../videosource/yarpVideoSource.cpp | 31 ++++--------------- .../videosource/yarpVideoSource.h | 5 +++ 12 files changed, 50 insertions(+), 32 deletions(-) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 9c29c1ba143..13ed8b014e3 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -71,8 +71,8 @@ if(YARP_COMPILE_EXECUTABLES) # Other GUIs add_subdirectory(yarplaserscannergui) - add_subdirectory(yarpopencvdisplay) - + add_subdirectory(yarpopencvdisplay) + #other add_subdirectory(yarpgstreamerplugins) add_subdirectory(yarpDeviceParamParserGenerator) diff --git a/src/carriers/gstreamer_carrier/CMakeLists.txt b/src/carriers/gstreamer_carrier/CMakeLists.txt index b8093896293..9d358627fc4 100644 --- a/src/carriers/gstreamer_carrier/CMakeLists.txt +++ b/src/carriers/gstreamer_carrier/CMakeLists.txt @@ -22,8 +22,8 @@ if(NOT SKIP_gstreamer) GstreamerCarrier.cpp GstreamerStream.h GstreamerStream.cpp - GstreamerDecoder.h - GstreamerDecoder.cpp + GstreamerDecoder.h + GstreamerDecoder.cpp ) target_link_libraries(yarp_gstreamer diff --git a/src/yarpgstreamerplugins/CMakeLists.txt b/src/yarpgstreamerplugins/CMakeLists.txt index 28cdf0e562d..187bd90c027 100644 --- a/src/yarpgstreamerplugins/CMakeLists.txt +++ b/src/yarpgstreamerplugins/CMakeLists.txt @@ -1,3 +1,6 @@ +# SPDX-FileCopyrightText: 2024 Istituto Italiano di Tecnologia (IIT) +# SPDX-License-Identifier: BSD-3-Clause + yarp_dependent_option (YARP_COMPILE_gstreamerplugins "Do you want to compile gstreamerplugins?" OFF "YARP_HAS_GObject;YARP_HAS_GLIB2;YARP_HAS_GStreamer;YARP_HAS_GStreamerPluginsBase" OFF ) @@ -8,4 +11,4 @@ add_subdirectory(videosource) add_subdirectory(videosink) add_subdirectory(videopassthrough) -endif() \ No newline at end of file +endif() diff --git a/src/yarpgstreamerplugins/videopassthrough/CMakeLists.txt b/src/yarpgstreamerplugins/videopassthrough/CMakeLists.txt index e17c8bb58a2..8f888dbe3ff 100644 --- a/src/yarpgstreamerplugins/videopassthrough/CMakeLists.txt +++ b/src/yarpgstreamerplugins/videopassthrough/CMakeLists.txt @@ -1,3 +1,6 @@ +# SPDX-FileCopyrightText: 2024 Istituto Italiano di Tecnologia (IIT) +# SPDX-License-Identifier: BSD-3-Clause + cmake_minimum_required(VERSION 3.10) project(gstyarpvideopassthrough LANGUAGES CXX) diff --git a/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp index bf143135894..9071052db29 100644 --- a/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp +++ b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp @@ -1,3 +1,8 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + #include "yarp/os/Time.h" #include "yarp/os/Network.h" #include "yarp/os/Log.h" @@ -244,7 +249,7 @@ static gboolean gst_yarp_video_passthrough_stop(GstBaseTransform* elem) /* Register the plugin */ static gboolean yarp_video_passthrough_init(GstPlugin* plugin) -{ +{ return gst_element_register(plugin, "yarpvideopassthrough", GST_RANK_NONE, gst_yarp_video_passthrough_get_type()); } diff --git a/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.h b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.h index 282a785bc36..3c97b4982b0 100644 --- a/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.h +++ b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.h @@ -1,3 +1,8 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + #ifndef GST_YARP_VIDEO_PASSTHROUGH_H #define GST_YARP_VIDEO_PASSTHROUGH_H diff --git a/src/yarpgstreamerplugins/videosink/CMakeLists.txt b/src/yarpgstreamerplugins/videosink/CMakeLists.txt index de3e8af205d..9841aca6513 100644 --- a/src/yarpgstreamerplugins/videosink/CMakeLists.txt +++ b/src/yarpgstreamerplugins/videosink/CMakeLists.txt @@ -1,3 +1,6 @@ +# SPDX-FileCopyrightText: 2024 Istituto Italiano di Tecnologia (IIT) +# SPDX-License-Identifier: BSD-3-Clause + cmake_minimum_required(VERSION 3.10) project(gstyarpvideosink LANGUAGES CXX) diff --git a/src/yarpgstreamerplugins/videosink/yarpVideoSink.cpp b/src/yarpgstreamerplugins/videosink/yarpVideoSink.cpp index 35a137e4529..da14176e783 100644 --- a/src/yarpgstreamerplugins/videosink/yarpVideoSink.cpp +++ b/src/yarpgstreamerplugins/videosink/yarpVideoSink.cpp @@ -1,3 +1,8 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + #include #include #include diff --git a/src/yarpgstreamerplugins/videosink/yarpVideoSink.h b/src/yarpgstreamerplugins/videosink/yarpVideoSink.h index 5ab44b688c2..b0914a66f6c 100644 --- a/src/yarpgstreamerplugins/videosink/yarpVideoSink.h +++ b/src/yarpgstreamerplugins/videosink/yarpVideoSink.h @@ -1,3 +1,8 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + #ifndef GST_YARP_VIDEO_SINK_H #define GST_YARP_VIDEO_SINK_H @@ -35,4 +40,4 @@ class yarp_handler_class } }; -#endif // GST_YARP_VIDEO_SINK_H \ No newline at end of file +#endif // GST_YARP_VIDEO_SINK_H diff --git a/src/yarpgstreamerplugins/videosource/CMakeLists.txt b/src/yarpgstreamerplugins/videosource/CMakeLists.txt index e9ebbc61e1a..14c589e14eb 100644 --- a/src/yarpgstreamerplugins/videosource/CMakeLists.txt +++ b/src/yarpgstreamerplugins/videosource/CMakeLists.txt @@ -1,3 +1,6 @@ +# SPDX-FileCopyrightText: 2024 Istituto Italiano di Tecnologia (IIT) +# SPDX-License-Identifier: BSD-3-Clause + cmake_minimum_required(VERSION 3.10) project(gstyarpvideosource LANGUAGES CXX) diff --git a/src/yarpgstreamerplugins/videosource/yarpVideoSource.cpp b/src/yarpgstreamerplugins/videosource/yarpVideoSource.cpp index 7c4c6437e82..74d1c7a353f 100644 --- a/src/yarpgstreamerplugins/videosource/yarpVideoSource.cpp +++ b/src/yarpgstreamerplugins/videosource/yarpVideoSource.cpp @@ -1,3 +1,8 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + #include "yarp/os/Time.h" #include "yarp/os/Network.h" #include "yarp/os/Log.h" @@ -255,7 +260,7 @@ static gboolean gst_yarp_video_source_start(GstBaseSrc* src) b = yarp::os::Network::connect(yarp_handler->remote_port_name, yarp_handler->input_port_name, yarp_handler->connection_protocol); if (!b) { - yCError(YVS_COMP) << "Could not perform connection between: " << yarp_handler->remote_port_name + yCError(YVS_COMP) << "Could not perform connection between: " << yarp_handler->remote_port_name << " and " << yarp_handler->input_port_name << " via " << yarp_handler->connection_protocol; return FALSE; } @@ -372,27 +377,3 @@ GST_PLUGIN_DEFINE( "LGPL", "GStreamer", "http://gstreamer.net/") - - - // TO test it: -// export GST_PLUGIN_PATH=$GST_PLUGIN_PATH:C:\Software\iCubSoftware\yarp\build\bin\Release -// set GST_PLUGIN_PATH=%GST_PLUGIN_PATH%;C:\Software\iCubSoftware\yarp\build\bin\Release -// gst-inspect-1.0 yarpvideosource -// gst-launch-1.0 yarpvideosource localPortname="/aaa" portType = "rgb" ! videoconvert ! autovideosink -// gst-launch-1.0 yarpvideosource localPortname="/aaa" ! videoconvert ! autovideosink -//gst-launch-1.0 yarpvideosource ! "video/x-raw, format=(string)I420, width=(int)640, height=(int)480" ! autovideosink -//gst-launch-1.0 yarpvideosource ! "video/x-raw, format=(string)I420, width=(int)640, height=(int)480" ! autovideosink -// gst-launch-1.0 yarpvideosource ! glimagesink -//gst-launch-1.0 yarpvideosource localPortname="/aaa" !videoconvert !x264enc !h264parse !avdec_h264 !videoconvert !autovideosink - -//grabber_yarp -> -> fast_tcp -> *porta_rgb_2_gs_image* -> sink -//telecamera -> h264enc -> blob2porta -> fast_tcp -> porta_blob_2_gs_image -> 264dec -> sink - - /* - C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstreamer-1.0.lib - C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstapp-1.0.lib - C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstbase-1.0.lib - C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstbase-1.0.lib - C:\Software\gstreamer\1.0\msvc_x86_64\lib\gstvideo-1.0.lib - */ - diff --git a/src/yarpgstreamerplugins/videosource/yarpVideoSource.h b/src/yarpgstreamerplugins/videosource/yarpVideoSource.h index 94d3485ab28..8318682e4e1 100644 --- a/src/yarpgstreamerplugins/videosource/yarpVideoSource.h +++ b/src/yarpgstreamerplugins/videosource/yarpVideoSource.h @@ -1,3 +1,8 @@ +/* + * SPDX-FileCopyrightText: 2024-2024 Istituto Italiano di Tecnologia (IIT) + * SPDX-License-Identifier: BSD-3-Clause + */ + #ifndef GST_YARP_VIDEO_SOURCE_H #define GST_YARP_VIDEO_SOURCE_H From a376db43b8ec8f650f3e540d0b28bbb15ab0a54e Mon Sep 17 00:00:00 2001 From: Marco Randazzo Date: Tue, 27 Aug 2024 17:28:52 +0200 Subject: [PATCH 3/3] documentation update yarpvideopassthrough plugin update --- .../module_gstreamerplugins.dox | 57 ++++++++------ .../videopassthrough/yarpVideoPassthrough.cpp | 78 +++++++++---------- 2 files changed, 69 insertions(+), 66 deletions(-) diff --git a/doc/module_gstreamerplugins/module_gstreamerplugins.dox b/doc/module_gstreamerplugins/module_gstreamerplugins.dox index 157ee0e15f2..a54cf5a652e 100644 --- a/doc/module_gstreamerplugins/module_gstreamerplugins.dox +++ b/doc/module_gstreamerplugins/module_gstreamerplugins.dox @@ -28,42 +28,49 @@ gst-inspect-1.0 videobalance \endverbatim \section yarp_plugins Yarp plugins for gstreamer -Yarp and gstreamer can be interoperate through two Yarp plugins, `yarpVideoSource` which allows to connect a yarp video stream to gstreamer and `yarpVideoSink` which allows to connect a gstreamer video to yarp. +Yarp and gstreamer can be interoperate through two Yarp plugins, `yarpvideosource` which allows to connect a yarp video stream to gstreamer and `yarpvideosink` which allows to connect a gstreamer video to yarp. -\section yarpVideoSource yarpVideoSource plugin -This plugin opens a yarp input port which can accept a yarp stream and propagate it to a Gstreamer pipeline. The yarpVideoSource plugin must be the final step of a gstreamer pipeline. +\section yarpVideoSource yarpvideosource plugin +This plugin opens a yarp input port which can accept a yarp stream and propagate it to a Gstreamer pipeline. The yarpvideosource plugin must be the final step of a gstreamer pipeline. The plugin accepts the following options: -- `localPortName` the name of the yarp port opened by the plugin. -- `remotePortName` If specified, the plugin automatically creates a yarp connection from the `remotePortName` to the `localPortName`, using the protocol specified by `connectionProtocol`. +- `localPortname` the name of the yarp port opened by the plugin. +- `remotePortname` If specified, the plugin automatically creates a yarp connection from the `remotePortname` to the `localPortname`, using the protocol specified by `connectionProtocol`. - `connectionProtocol` the yarp carrier to be used to perform the automatic connection mentioned above. - `portType` if set to `rgb`, the plugins selects a `yarp::sig::ImageOf` input. If set to `bin`, the plugin accepts a custom binary data, specifically designed to communicate only with -a yarpVideoSink plugin. This is required if you are transmitting/receiving encoded images (see examples below) +a yarpvideosink plugin. This is required if you are transmitting/receiving encoded images (see examples below) -The yarpVideoSource plugin is currently able to handle the following gstreamer streams: x-raw(rgb), h264, h265. Check the plugins caps (with `gst-inspect-1.0`) for further details. +The yarpvideosource plugin is currently able to handle the following gstreamer streams: x-raw(rgb), h264, h265. Check the plugins caps (with `gst-inspect-1.0`) for further details. -\section yarpVideoSink yarpVideoSink plugin -This plugin opens a yarp output port. It receives a stream from the gstreamer pipeline and broadcast it to the yarp network. The yarpVideoSink plugin must be the final step of a gstreamer pipeline. +\section yarpVideoSink yarpvideosink plugin +This plugin opens a yarp output port. It receives a stream from the gstreamer pipeline and broadcast it to the yarp network. The yarpvideosink plugin must be the final step of a gstreamer pipeline. The plugin accepts the following options: -- `localPortName` the name of the yarp port opened by the plugin. -- `remotePortName` If specified, the plugin automatically creates a yarp connection from the `localPortName` to `remotePortName`, using the protocol specified by `connectionProtocol`. +- `localPortname` the name of the yarp port opened by the plugin. +- `remotePortname` If specified, the plugin automatically creates a yarp connection from the `localPortname` to `remotePortname`, using the protocol specified by `connectionProtocol`. - `connectionProtocol` the yarp carrier to be used to perform the automatic connection mentioned above. - `portType` if set to `rgb`, the plugins selects a `yarp::sig::ImageOf` input. If set to `bin`, the plugin accepts a custom binary data, specifically designed to communicate only with -a yarpVideoSink plugin. This is required if you are transmitting/receiving encoded images (see examples below) +a yarpvideosink plugin. This is required if you are transmitting/receiving encoded images (see examples below) -The yarpVideoSource plugin is currently able to handle the following gstreamer streams: x-raw(rgb), h264, h265. Check the plugins caps (with `gst-inspect-1.0`) for further details. +The yarpvideosource plugin is currently able to handle the following gstreamer streams: x-raw(rgb), h264, h265. Check the plugins caps (with `gst-inspect-1.0`) for further details. + +\section yarpVideoPassthrough yarpvideopassthrough plugin +This plugin can be used for debug purposes. It simply receives data and pass it to the next element of the pipeline. +It can used as a template to perform modifications to the frame buffer (e.g. filtering) or to print the timestamp and monitor the pipeline latency. +The plugin accepts the following options: +- `yarpname` the name of the element, if more than a single yarpvideopassthrough plugin is used in the pipeline. +- `yarpverbose` an integer > 0 will activate timestamp stats. 0 (default) disable the debug print. \section yarp_gstreamer_examples Some examples: -Example 1: Feeding a yarp image into gstreamer: +Example 1: Feeding a yarp image into gstreamer \verbatim yarpdev --device fakeFrameGrabber --width 640 --height 480 --period 0.33 -gst-launch-1.0 yarpVideoSource --localPortName="/gstreamer:i" ! videoconvert ! videobalance saturation=0.0 ! autovideosink +gst-launch-1.0 yarpvideosource localPortname="/gstreamer:i" ! videoconvert ! videobalance saturation=0.0 ! autovideosink yarp connect /grabber /gstreamer:i \endverbatim -Example 2: Stream a gstreamer video to yarp: +Example 2: Stream a gstreamer video to yarp \verbatim -gst-launch-1.0 videotestsrc ! videoconvert ! yarpVideoSink --localPortName="/grabber:o" +gst-launch-1.0 videotestsrc ! videoconvert ! yarpvideosink localPortname="/grabber:o" yarpview --name /view yarp connect /gstreamer:o /view \endverbatim @@ -75,22 +82,26 @@ gst-launch-1.0 filesrc location=your_video_file.mp4 ! decodebin ! autovideosink Example 4: Encode a video and send it to a yarp port \verbatim -gst-launch-1.0 videotestsrc ! videoconvert ! h_264 ! yarpVideoSink --localPortName="/grabber:o" portType="bin" +gst-launch-1.0 videotestsrc ! videoconvert ! openh264enc ! yarpvideosink localPortname="/grabber:o" portType="bin" \endverbatim Example 5: Receive an encoded video and display it \verbatim -gst-launch-1.0 yarpVideoSource --localPortName="/gstreamer:i" --portType="bin" ! h_264dev ! videoconvert ! autovideosink +gst-launch-1.0 yarpvideosource localPortname="/gstreamer:i" portType="bin" ! avdec_h264 ! videoconvert ! autovideosink \endverbatim -Example 5: Receive an yarp video, encode it, transmit it, received it, decode it, send to a yarp port +Example 6: Receive an yarp video, encode it, transmit it, received it, decode it, send to a yarp port \verbatim -gst-launch-1.0 yarpVideoSource --localPortName="/gstreamer:i" | h_264 ! yarpVideoSink --localPortName="/grabber:o" portType="bin" -gst-launch-1.0 yarpVideoSource --localPortName="/gstreamer:i" --portType="bin" ! h_264dev ! videoconvert ! arpVideoSink --localPortName="/grabber:o" +gst-launch-1.0 yarpvideosource localPortname="/gstreamer:i" | openh264enc ! yarpvideosink localPortname="/grabber:o" portType="bin" +gst-launch-1.0 yarpvideosource localPortname="/gstreamer:i" portType="bin" ! avdec_h264 ! videoconvert ! yarpvideosink localPortname="/grabber:o" \endverbatim +Example 7: Usage of a passthrough plugin to print the time required to encode/decode a video frame +\verbatim +gst-launch-1.0 videotestsrc ! yarpvideopassthrough yarpverbose=2 yarpname="before encoding" ! openh264enc ! avdec_h264 ! yarpvideopassthrough yarpverbose=2 yarpname="after decoding" ! autovideosink +\endverbatim -\section yarp_plugins_gstreamer_install need What do I need to use yarp plugins for gstreamer? +\section yarp_plugins_gstreamer_install How to use yarp plugins for gstreamer Gstreamer plugins for yarp can be optionally compiled (default is off) if the yarp gstreamer dependencies are satisfied. On Ubuntu: \verbatim diff --git a/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp index 9071052db29..704996085ad 100644 --- a/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp +++ b/src/yarpgstreamerplugins/videopassthrough/yarpVideoPassthrough.cpp @@ -32,7 +32,7 @@ GST_DEBUG_CATEGORY_STATIC(yarp_video_passthrough_debug); // Define the structures for the class and instance typedef struct _GstYarpVideoPassthrough { - GstVideoFilter parent; + GstBaseTransform parent; yarp::os::Network* yarpnet = nullptr; std::string s_name; int verbosity_level = 0; @@ -42,20 +42,13 @@ typedef struct _GstYarpVideoPassthrough typedef struct _GstYarpVideoPassthroughClass { - GstVideoFilterClass parent_class; + GstBaseTransformClass parent_class; } GstYarpVideoPassthroughClass; -G_DEFINE_TYPE(GstYarpVideoPassthrough, gst_yarp_video_passthrough, GST_TYPE_VIDEO_FILTER) +G_DEFINE_TYPE(GstYarpVideoPassthrough, gst_yarp_video_passthrough, GST_TYPE_BASE_TRANSFORM) #define GST_MY_PLUGIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), gst_yarp_video_passthrough_get_type(), GstYarpVideoPassthrough)) -// G_DEFINE_TYPE(GstYarpVideoPassthrough, gst_yarp_video_passthrough, GST_TYPE_VIDEO_SINK) - -// #define GST_TYPE_GRAY_FILTER (gst_gray_filter_get_type()) - -// G_DECLARE_FINAL_TYPE(GstYarpVideoPassthrough, gst_yarp_video_passthrough, GST, GRAY_FILTER, GstBaseTransform) - /* Pads */ - #define MY_SOURCE_CAPS \ "video/x-raw, " \ "format=(string){RGB,I420,NV12,YUY2};" \ @@ -77,10 +70,12 @@ static GstStaticPadTemplate gst_yarp_video_passthrough_src_template = GST_STATIC GST_STATIC_CAPS(MY_SOURCE_CAPS)); /* Function prototypes */ -static GstFlowReturn gst_yarp_video_passthrough_transform_frame(GstVideoFilter* filter, GstVideoFrame* inframe, GstVideoFrame* outframe); +static GstFlowReturn gst_yarp_video_passthrough_transform_frame(GstBaseTransform* base, GstBuffer* inbuf, GstBuffer* outbuf); static void gst_yarp_video_passthrough_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec); static void gst_yarp_video_passthrough_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec); -//static gboolean gst_yarp_video_passthrough_start(GstBaseTransform* elem); +static GstFlowReturn my_passthrough_prepare_output_buffer(GstBaseTransform* trans, GstBuffer* inbuf, GstBuffer** outbuf); + + //static gboolean gst_yarp_video_passthrough_start(GstBaseTransform* elem); //static gboolean gst_yarp_video_passthrough_stop(GstBaseTransform* elem); /* Initialize the class */ @@ -88,8 +83,9 @@ static void gst_yarp_video_passthrough_class_init(GstYarpVideoPassthroughClass* { GObjectClass* gobject_class = (GObjectClass*)klass; GstElementClass* gstelement_class = GST_ELEMENT_CLASS(klass); - GstVideoFilterClass* video_filter_class = GST_VIDEO_FILTER_CLASS(klass); - video_filter_class->transform_frame = GST_DEBUG_FUNCPTR(gst_yarp_video_passthrough_transform_frame); + GstBaseTransformClass* video_filter_class = GST_BASE_TRANSFORM_CLASS(klass); + video_filter_class->transform = GST_DEBUG_FUNCPTR(gst_yarp_video_passthrough_transform_frame); + video_filter_class->prepare_output_buffer = GST_DEBUG_FUNCPTR(my_passthrough_prepare_output_buffer); // GstBaseTransformClass* gstbase_class = GST_BASE_TRANSFORM_CLASS(klass); gst_element_class_add_pad_template(GST_ELEMENT_CLASS(klass), @@ -125,7 +121,7 @@ static void gst_yarp_video_passthrough_set_property(GObject* object, guint prop_ switch (prop_id) { case PROP_YARPNAME: - self->s_name = (g_value_get_string(value)); + self->s_name = std::string(g_value_dup_string(value)); yCInfo(YVP_COMP, "set name: %s", self->s_name.c_str()); break; case PROP_YARPVERBOSELEVEL: @@ -157,28 +153,39 @@ static void gst_yarp_video_passthrough_get_property(GObject* object, guint prop_ } /* Frame methods */ -static GstFlowReturn gst_yarp_video_passthrough_transform_frame(GstVideoFilter* filter, GstVideoFrame* inframe, GstVideoFrame* outframe) +static GstFlowReturn my_passthrough_prepare_output_buffer(GstBaseTransform* trans, GstBuffer* inbuf, GstBuffer** outbuf) { - if (inframe == nullptr || inframe->buffer ==nullptr) - { - printf ("----------------\n"); - return GST_FLOW_ERROR; - } - if (outframe == nullptr || outframe->buffer == nullptr) - { - printf ("44444444444444444\n"); + // Create a new output buffer with the same size as the input buffer + *outbuf = gst_buffer_new_allocate(NULL, gst_buffer_get_size(inbuf), NULL); + + // Check for allocation failure + if (*outbuf == NULL) { + GST_ERROR_OBJECT(trans, "Failed to allocate output buffer."); return GST_FLOW_ERROR; } - _GstYarpVideoPassthrough* self = GST_MY_PLUGIN(filter); + return GST_FLOW_OK; +} +static GstFlowReturn gst_yarp_video_passthrough_transform_frame(GstBaseTransform* base, GstBuffer* inbuf, GstBuffer* outbuf) +{ + _GstYarpVideoPassthrough* self = GST_MY_PLUGIN(base); + + //get memory + GstMapInfo in_map; + GstMapInfo out_map; + gst_buffer_map(inbuf, &in_map, GST_MAP_READ); + gst_buffer_map(outbuf, &out_map, GST_MAP_WRITE); + + //copy data double time1 = yarp::os::Time::now(); - gst_video_frame_copy(outframe, inframe); + memcpy(out_map.data, in_map.data, in_map.size); double time2 = yarp::os::Time::now(); double diff = time1 - self->prev_time; self->prev_time = time1; + //print stats switch (self->verbosity_level) { case 0: @@ -207,23 +214,8 @@ static GstFlowReturn gst_yarp_video_passthrough_transform_frame(GstVideoFilter* break; } - - - - - - /* - GstMapInfo info; - gst_buffer_map(buf, &info, GST_MAP_WRITE); - - guint8* data = info.data; - for (guint i = 0; i < info.size; i += 3) { - guint8 gray = (data[i] + data[i + 1] + data[i + 2]) / 3; - data[i] = data[i + 1] = data[i + 2] = gray; - } - - gst_buffer_unmap(buf, &info); - */ + gst_buffer_unmap(inbuf, &in_map); + gst_buffer_unmap(outbuf, &out_map); self->frame_counter++;