1 /*M///////////////////////////////////////////////////////////////////////////////////////
2 //
3 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4 //
5 //  By downloading, copying, installing or using the software you agree to this license.
6 //  If you do not agree to this license, do not download, install,
7 //  copy or use the software.
8 //
9 //
10 //                        Intel License Agreement
11 //                For Open Source Computer Vision Library
12 //
13 // Copyright (C) 2008, 2011, Nils Hasler, all rights reserved.
14 // Third party copyrights are property of their respective owners.
15 //
16 // Redistribution and use in source and binary forms, with or without modification,
17 // are permitted provided that the following conditions are met:
18 //
19 //   * Redistribution's of source code must retain the above copyright notice,
20 //     this list of conditions and the following disclaimer.
21 //
22 //   * Redistribution's in binary form must reproduce the above copyright notice,
23 //     this list of conditions and the following disclaimer in the documentation
24 //     and/or other materials provided with the distribution.
25 //
26 //   * The name of Intel Corporation may not be used to endorse or promote products
27 //     derived from this software without specific prior written permission.
28 //
29 // This software is provided by the copyright holders and contributors "as is" and
30 // any express or implied warranties, including, but not limited to, the implied
31 // warranties of merchantability and fitness for a particular purpose are disclaimed.
32 // In no event shall the Intel Corporation or contributors be liable for any direct,
33 // indirect, incidental, special, exemplary, or consequential damages
34 // (including, but not limited to, procurement of substitute goods or services;
35 // loss of use, data, or profits; or business interruption) however caused
36 // and on any theory of liability, whether in contract, strict liability,
37 // or tort (including negligence or otherwise) arising in any way out of
38 // the use of this software, even if advised of the possibility of such damage.
39 //
40 //M*/
41 
42 /*!
43  * \file cap_gstreamer.cpp
44  * \author Nils Hasler <hasler@mpi-inf.mpg.de>
45  *         Max-Planck-Institut Informatik
46  * \author Dirk Van Haerenborgh <vhdirk@gmail.com>
47  *
48  * \brief Use GStreamer to read/write video
49  */
50 #include "precomp.hpp"
51 #include <unistd.h>
52 #include <string.h>
53 #include <gst/gst.h>
54 #include <gst/gstbuffer.h>
55 #include <gst/video/video.h>
56 #include <gst/app/gstappsink.h>
57 #include <gst/app/gstappsrc.h>
58 #include <gst/riff/riff-media.h>
59 #include <gst/pbutils/missing-plugins.h>
60 
61 #define VERSION_NUM(major, minor, micro) (major * 1000000 + minor * 1000 + micro)
62 #define FULL_GST_VERSION VERSION_NUM(GST_VERSION_MAJOR, GST_VERSION_MINOR, GST_VERSION_MICRO)
63 
64 #if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
65 #include <gst/pbutils/encoding-profile.h>
66 //#include <gst/base/gsttypefindhelper.h>
67 #endif
68 
69 
70 #ifdef NDEBUG
71 #define CV_WARN(message)
72 #else
73 #define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
74 #endif
75 
76 #if GST_VERSION_MAJOR == 0
77 #define COLOR_ELEM "ffmpegcolorspace"
78 #elif FULL_GST_VERSION < VERSION_NUM(1,5,0)
79 #define COLOR_ELEM "videoconvert"
80 #else
81 #define COLOR_ELEM "autovideoconvert"
82 #endif
83 
84 void toFraction(double decimal, double &numerator, double &denominator);
85 void handleMessage(GstElement * pipeline);
86 
87 
88 static cv::Mutex gst_initializer_mutex;
89 
90 /*!
91  * \brief The gst_initializer class
92  * Initializes gstreamer once in the whole process
93  */
94 class gst_initializer
95 {
96 public:
init()97     static void init()
98     {
99         gst_initializer_mutex.lock();
100         static gst_initializer init;
101         gst_initializer_mutex.unlock();
102     }
103 private:
gst_initializer()104     gst_initializer()
105     {
106         gst_init(NULL, NULL);
107 //        gst_debug_set_active(1);
108 //        gst_debug_set_colored(1);
109 //        gst_debug_set_default_threshold(GST_LEVEL_INFO);
110     }
111 };
112 
113 /*!
114  * \brief The CvCapture_GStreamer class
115  * Use GStreamer to capture video
116  */
117 class CvCapture_GStreamer : public CvCapture
118 {
119 public:
CvCapture_GStreamer()120     CvCapture_GStreamer() { init(); }
~CvCapture_GStreamer()121     virtual ~CvCapture_GStreamer() { close(); }
122 
123     virtual bool open( int type, const char* filename );
124     virtual void close();
125 
126     virtual double getProperty(int) const;
127     virtual bool setProperty(int, double);
128     virtual bool grabFrame();
129     virtual IplImage* retrieveFrame(int);
130 
131 protected:
132     void init();
133     bool reopen();
134     bool isPipelinePlaying();
135     void startPipeline();
136     void stopPipeline();
137     void restartPipeline();
138     void setFilter(const char* prop, int type, int v1, int v2 = 0);
139     void removeFilter(const char *filter);
140     static void newPad(GstElement *myelement,
141                        GstPad     *pad,
142                        gpointer    data);
143     GstElement*   pipeline;
144     GstElement*   uridecodebin;
145     GstElement*   color;
146     GstElement*   sink;
147 #if GST_VERSION_MAJOR > 0
148     GstSample*    sample;
149     GstMapInfo*   info;
150 #endif
151     GstBuffer*    buffer;
152     GstCaps*      caps;
153     IplImage*     frame;
154     gint64        duration;
155     gint          width;
156     gint          height;
157     double        fps;
158 };
159 
160 /*!
161  * \brief CvCapture_GStreamer::init
162  * inits the class
163  */
init()164 void CvCapture_GStreamer::init()
165 {
166     pipeline = NULL;
167     uridecodebin = NULL;
168     color = NULL;
169     sink = NULL;
170 #if GST_VERSION_MAJOR > 0
171     sample = NULL;
172     info = new GstMapInfo;
173 #endif
174     buffer = NULL;
175     caps = NULL;
176     frame = NULL;
177     duration = -1;
178     width = -1;
179     height = -1;
180     fps = -1;
181 }
182 
183 /*!
184  * \brief CvCapture_GStreamer::close
185  * Closes the pipeline and destroys all instances
186  */
close()187 void CvCapture_GStreamer::close()
188 {
189     if (isPipelinePlaying())
190         this->stopPipeline();
191 
192     if(pipeline) {
193         gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
194         gst_object_unref(GST_OBJECT(pipeline));
195         pipeline = NULL;
196     }
197 
198     duration = -1;
199     width = -1;
200     height = -1;
201     fps = -1;
202 }
203 
204 /*!
205  * \brief CvCapture_GStreamer::grabFrame
206  * \return
207  * Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
208  * The pipeline is started if it was not running yet
209  */
grabFrame()210 bool CvCapture_GStreamer::grabFrame()
211 {
212     if(!pipeline)
213         return false;
214 
215     // start the pipeline if it was not in playing state yet
216     if(!this->isPipelinePlaying())
217         this->startPipeline();
218 
219     // bail out if EOS
220     if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
221         return false;
222 
223 #if GST_VERSION_MAJOR == 0
224     if(buffer)
225         gst_buffer_unref(buffer);
226 
227     buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
228 #else
229     if(sample)
230         gst_sample_unref(sample);
231 
232     sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
233 
234     if(!sample)
235         return false;
236 
237     buffer = gst_sample_get_buffer(sample);
238 #endif
239 
240     if(!buffer)
241         return false;
242 
243     return true;
244 }
245 
246 /*!
247  * \brief CvCapture_GStreamer::retrieveFrame
248  * \return IplImage pointer. [Transfer Full]
249  *  Retreive the previously grabbed buffer, and wrap it in an IPLImage structure
250  */
retrieveFrame(int)251 IplImage * CvCapture_GStreamer::retrieveFrame(int)
252 {
253     if(!buffer)
254         return 0;
255 
256     //construct a frame header if we did not have any yet
257     if(!frame)
258     {
259 #if GST_VERSION_MAJOR == 0
260         GstCaps* buffer_caps = gst_buffer_get_caps(buffer);
261 #else
262         GstCaps* buffer_caps = gst_sample_get_caps(sample);
263 #endif
264         // bail out in no caps
265         assert(gst_caps_get_size(buffer_caps) == 1);
266         GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
267 
268         // bail out if width or height are 0
269         if(!gst_structure_get_int(structure, "width", &width) ||
270                 !gst_structure_get_int(structure, "height", &height))
271         {
272             gst_caps_unref(buffer_caps);
273             return 0;
274         }
275 
276         int depth = 3;
277 #if GST_VERSION_MAJOR > 0
278         depth = 0;
279         const gchar* name = gst_structure_get_name(structure);
280         const gchar* format = gst_structure_get_string(structure, "format");
281 
282         if (!name || !format)
283             return 0;
284 
285         // we support 3 types of data:
286         //     video/x-raw, format=BGR   -> 8bit, 3 channels
287         //     video/x-raw, format=GRAY8 -> 8bit, 1 channel
288         //     video/x-bayer             -> 8bit, 1 channel
289         // bayer data is never decoded, the user is responsible for that
290         // everything is 8 bit, so we just test the caps for bit depth
291 
292         if (strcasecmp(name, "video/x-raw") == 0)
293         {
294             if (strcasecmp(format, "BGR") == 0) {
295                 depth = 3;
296             }
297             else if(strcasecmp(format, "GRAY8") == 0){
298                 depth = 1;
299             }
300         }
301         else if (strcasecmp(name, "video/x-bayer") == 0)
302         {
303             depth = 1;
304         }
305 #endif
306         if (depth > 0) {
307             frame = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, depth);
308         } else {
309             gst_caps_unref(buffer_caps);
310             return 0;
311         }
312 
313         gst_caps_unref(buffer_caps);
314     }
315 
316     // gstreamer expects us to handle the memory at this point
317     // so we can just wrap the raw buffer and be done with it
318 #if GST_VERSION_MAJOR == 0
319     frame->imageData = (char *)GST_BUFFER_DATA(buffer);
320 #else
321     // the data ptr in GstMapInfo is only valid throughout the mapifo objects life.
322     // TODO: check if reusing the mapinfo object is ok.
323 
324     gboolean success = gst_buffer_map(buffer,info, (GstMapFlags)GST_MAP_READ);
325     if (!success){
326         //something weird went wrong here. abort. abort.
327         //fprintf(stderr,"GStreamer: unable to map buffer");
328         return 0;
329     }
330     frame->imageData = (char*)info->data;
331     gst_buffer_unmap(buffer,info);
332 #endif
333 
334     return frame;
335 }
336 
337 
338 /*!
339  * \brief CvCapture_GStreamer::isPipelinePlaying
340  * \return if the pipeline is currently playing.
341  */
isPipelinePlaying()342 bool CvCapture_GStreamer::isPipelinePlaying()
343 {
344     GstState current, pending;
345     GstClockTime timeout = 5*GST_SECOND;
346     if(!GST_IS_ELEMENT(pipeline)){
347         return false;
348     }
349 
350     GstStateChangeReturn ret = gst_element_get_state(GST_ELEMENT(pipeline),&current, &pending, timeout);
351     if (!ret){
352         //fprintf(stderr, "GStreamer: unable to query pipeline state\n");
353         return false;
354     }
355 
356     return current == GST_STATE_PLAYING;
357 }
358 
359 /*!
360  * \brief CvCapture_GStreamer::startPipeline
361  * Start the pipeline by setting it to the playing state
362  */
startPipeline()363 void CvCapture_GStreamer::startPipeline()
364 {
365     CV_FUNCNAME("icvStartPipeline");
366 
367     __BEGIN__;
368 
369     //fprintf(stderr, "relinked, pausing\n");
370     GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
371     if (status == GST_STATE_CHANGE_ASYNC)
372     {
373         // wait for status update
374         GstState st1;
375         GstState st2;
376         status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
377     }
378     if (status == GST_STATE_CHANGE_FAILURE)
379     {
380         handleMessage(pipeline);
381         gst_object_unref(pipeline);
382         pipeline = NULL;
383         CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
384         return;
385     }
386 
387     //printf("state now playing\n");
388     handleMessage(pipeline);
389     __END__;
390 }
391 
392 
393 /*!
394  * \brief CvCapture_GStreamer::stopPipeline
395  * Stop the pipeline by setting it to NULL
396  */
stopPipeline()397 void CvCapture_GStreamer::stopPipeline()
398 {
399     CV_FUNCNAME("icvStopPipeline");
400 
401     __BEGIN__;
402 
403     //fprintf(stderr, "restarting pipeline, going to ready\n");
404     if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL) ==
405             GST_STATE_CHANGE_FAILURE) {
406         CV_ERROR(CV_StsError, "GStreamer: unable to stop pipeline\n");
407         gst_object_unref(pipeline);
408         pipeline = NULL;
409         return;
410     }
411     __END__;
412 }
413 
414 /*!
415  * \brief CvCapture_GStreamer::restartPipeline
416  * Restart the pipeline
417  */
restartPipeline()418 void CvCapture_GStreamer::restartPipeline()
419 {
420     handleMessage(pipeline);
421 
422     this->stopPipeline();
423     this->startPipeline();
424 }
425 
426 
427 /*!
428  * \brief CvCapture_GStreamer::setFilter
429  * \param prop the property name
430  * \param type glib property type
431  * \param v1 the value
432  * \param v2 second value of property type requires it, else NULL
433  * Filter the output formats by setting appsink caps properties
434  */
setFilter(const char * prop,int type,int v1,int v2)435 void CvCapture_GStreamer::setFilter(const char *prop, int type, int v1, int v2)
436 {
437     //printf("GStreamer: setFilter \n");
438     if(!caps || !( GST_IS_CAPS (caps) ))
439     {
440         if(type == G_TYPE_INT)
441         {
442 #if GST_VERSION_MAJOR == 0
443             caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL);
444 #else
445             caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL);
446 #endif
447         }
448         else
449         {
450 #if GST_VERSION_MAJOR == 0
451             caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL);
452 #else
453             caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL);
454 #endif
455         }
456     }
457     else
458     {
459 #if GST_VERSION_MAJOR > 0
460         if (! gst_caps_is_writable(caps))
461             caps = gst_caps_make_writable (caps);
462 #endif
463         if(type == G_TYPE_INT){
464             gst_caps_set_simple(caps, prop, type, v1, NULL);
465         }else{
466             gst_caps_set_simple(caps, prop, type, v1, v2, NULL);
467         }
468     }
469 
470 #if GST_VERSION_MAJOR > 0
471     caps = gst_caps_fixate(caps);
472 #endif
473 
474     gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
475     //printf("filtering with %s\n", gst_caps_to_string(caps));
476 }
477 
478 
479 /*!
480  * \brief CvCapture_GStreamer::removeFilter
481  * \param filter filter to remove
482  * remove the specified filter from the appsink template caps
483  */
removeFilter(const char * filter)484 void CvCapture_GStreamer::removeFilter(const char *filter)
485 {
486     if(!caps)
487         return;
488 
489 #if GST_VERSION_MAJOR > 0
490     if (! gst_caps_is_writable(caps))
491         caps = gst_caps_make_writable (caps);
492 #endif
493 
494     GstStructure *s = gst_caps_get_structure(caps, 0);
495     gst_structure_remove_field(s, filter);
496 
497     gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
498 }
499 
500 /*!
501  * \brief CvCapture_GStreamer::newPad link dynamic padd
502  * \param pad
503  * \param data
504  * decodebin creates pads based on stream information, which is not known upfront
505  * on receiving the pad-added signal, we connect it to the colorspace conversion element
506  */
newPad(GstElement *,GstPad * pad,gpointer data)507 void CvCapture_GStreamer::newPad(GstElement * /*elem*/,
508                                  GstPad     *pad,
509                                  gpointer    data)
510 {
511     GstPad *sinkpad;
512     GstElement *color = (GstElement *) data;
513 
514     sinkpad = gst_element_get_static_pad (color, "sink");
515     if (!sinkpad){
516         //fprintf(stderr, "Gstreamer: no pad named sink\n");
517         return;
518     }
519 
520     gst_pad_link (pad, sinkpad);
521     gst_object_unref (sinkpad);
522 }
523 
524 /*!
525  * \brief CvCapture_GStreamer::open Open the given file with gstreamer
526  * \param type CvCapture type. One of CV_CAP_GSTREAMER_*
527  * \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE
528  * \return boolean. Specifies if opening was successful.
529  *
530  * In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
531  *    v4l2src ! autoconvert ! appsink
532  *
533  *
534  * The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
535  *
536  *  - a normal filesystem path:
537  *        e.g. video.avi or /path/to/video.avi or C:\\video.avi
538  *  - an uri:
539  *        e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
540  *  - a gstreamer pipeline description:
541  *        e.g. videotestsrc ! videoconvert ! appsink
542  *        the appsink name should be either 'appsink0' (the default) or 'opencvsink'
543  *
544  *  When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval
545  *  larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
546  *  a live source)
547  *
548  *  The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
549  *  is really slow if we need to restart the pipeline over and over again.
550  *
551  *  TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used.
552  *  I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
553  *
554  */
open(int type,const char * filename)555 bool CvCapture_GStreamer::open( int type, const char* filename )
556 {
557     CV_FUNCNAME("cvCaptureFromCAM_GStreamer");
558 
559     __BEGIN__;
560 
561     gst_initializer::init();
562 
563     bool file = false;
564     bool stream = false;
565     bool manualpipeline = false;
566     char *uri = NULL;
567     uridecodebin = NULL;
568     GstElementFactory * testfac;
569     GstStateChangeReturn status;
570 
571     if (type == CV_CAP_GSTREAMER_V4L){
572         testfac = gst_element_factory_find("v4lsrc");
573         if (!testfac){
574             return false;
575         }
576         g_object_unref(G_OBJECT(testfac));
577         filename = "v4lsrc ! " COLOR_ELEM " ! appsink";
578     }
579     if (type == CV_CAP_GSTREAMER_V4L2){
580         testfac = gst_element_factory_find("v4l2src");
581         if (!testfac){
582             return false;
583         }
584         g_object_unref(G_OBJECT(testfac));
585         filename = "v4l2src ! " COLOR_ELEM " ! appsink";
586     }
587 
588 
589     // test if we have a valid uri. If so, open it with an uridecodebin
590     // else, we might have a file or a manual pipeline.
591     // if gstreamer cannot parse the manual pipeline, we assume we were given and
592     // ordinary file path.
593     if(!gst_uri_is_valid(filename))
594     {
595         uri = realpath(filename, NULL);
596         stream = false;
597         if(uri)
598         {
599             uri = g_filename_to_uri(uri, NULL, NULL);
600             if(uri)
601             {
602                 file = true;
603             }
604             else
605             {
606                 CV_WARN("GStreamer: Error opening file\n");
607                 close();
608                 return false;
609             }
610         }
611         else
612         {
613             GError *err = NULL;
614             uridecodebin = gst_parse_launch(filename, &err);
615             if(!uridecodebin)
616             {
617                 fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
618                 return false;
619             }
620             stream = true;
621             manualpipeline = true;
622         }
623     } else {
624         stream = true;
625         uri = g_strdup(filename);
626     }
627 
628     bool element_from_uri = false;
629     if(!uridecodebin)
630     {
631         // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
632         // This means that we cannot use an uridecodebin when dealing with v4l2, since setting
633         // capture properties will not work.
634         // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
635         gchar * protocol = gst_uri_get_protocol(uri);
636         if (!strcasecmp(protocol , "v4l2"))
637         {
638 #if GST_VERSION_MAJOR == 0
639             uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
640 #else
641             uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
642 #endif
643             element_from_uri = true;
644         }else{
645             uridecodebin = gst_element_factory_make("uridecodebin", NULL);
646             g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL);
647         }
648         g_free(protocol);
649 
650         if(!uridecodebin) {
651             //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
652             close();
653             return false;
654         }
655     }
656 
657     if(manualpipeline)
658     {
659         GstIterator *it = NULL;
660 #if GST_VERSION_MAJOR == 0
661         it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
662         if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
663             CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
664             return false;
665         }
666 #else
667         it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));
668 
669         gboolean done = FALSE;
670         GstElement *element = NULL;
671         gchar* name = NULL;
672         GValue value = G_VALUE_INIT;
673 
674         while (!done) {
675           switch (gst_iterator_next (it, &value)) {
676             case GST_ITERATOR_OK:
677               element = GST_ELEMENT (g_value_get_object (&value));
678               name = gst_element_get_name(element);
679               if (name){
680                 if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
681                   sink = GST_ELEMENT ( gst_object_ref (element) );
682                   done = TRUE;
683                 }
684                 g_free(name);
685               }
686               g_value_unset (&value);
687 
688               break;
689             case GST_ITERATOR_RESYNC:
690               gst_iterator_resync (it);
691               break;
692             case GST_ITERATOR_ERROR:
693             case GST_ITERATOR_DONE:
694               done = TRUE;
695               break;
696           }
697         }
698         gst_iterator_free (it);
699 
700 
701         if (!sink){
702             CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
703             return false;
704         }
705 #endif
706         pipeline = uridecodebin;
707     }
708     else
709     {
710         pipeline = gst_pipeline_new(NULL);
711         // videoconvert (in 0.10: ffmpegcolorspace, in 1.x autovideoconvert)
712         //automatically selects the correct colorspace conversion based on caps.
713         color = gst_element_factory_make(COLOR_ELEM, NULL);
714         sink = gst_element_factory_make("appsink", NULL);
715 
716         gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
717 
718         if(element_from_uri) {
719             if(!gst_element_link(uridecodebin, color)) {
720                 CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
721                 gst_object_unref(pipeline);
722                 pipeline = NULL;
723                 return false;
724             }
725         }else{
726             g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
727         }
728 
729         if(!gst_element_link(color, sink)) {
730             CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
731             gst_object_unref(pipeline);
732             pipeline = NULL;
733             return false;
734         }
735     }
736 
737     //TODO: is 1 single buffer really high enough?
738     gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
739     gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
740     //do not emit signals: all calls will be synchronous and blocking
741     gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);
742 
743 #if GST_VERSION_MAJOR == 0
744     caps = gst_caps_new_simple("video/x-raw-rgb",
745                                "bpp",        G_TYPE_INT, 24,
746                                "red_mask",   G_TYPE_INT, 0x0000FF,
747                                "green_mask", G_TYPE_INT, 0x00FF00,
748                                "blue_mask",  G_TYPE_INT, 0xFF0000,
749                                NULL);
750 #else
751     // support 1 and 3 channel 8 bit data, as well as bayer (also  1 channel, 8bit)
752     caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
753 #endif
754     gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
755     gst_caps_unref(caps);
756 
757     // For video files only: set pipeline to PAUSED state to get its duration
758     if (file)
759     {
760         status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED);
761         if (status == GST_STATE_CHANGE_ASYNC)
762         {
763             // wait for status update
764             GstState st1;
765             GstState st2;
766             status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
767         }
768         if (status == GST_STATE_CHANGE_FAILURE)
769         {
770             handleMessage(pipeline);
771             gst_object_unref(pipeline);
772             pipeline = NULL;
773             CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
774             return false;
775         }
776 
777         GstFormat format;
778 
779         format = GST_FORMAT_DEFAULT;
780 #if GST_VERSION_MAJOR == 0
781         if(!gst_element_query_duration(sink, &format, &duration))
782 #else
783         if(!gst_element_query_duration(sink, format, &duration))
784 #endif
785         {
786             handleMessage(pipeline);
787             CV_WARN("GStreamer: unable to query duration of stream");
788             duration = -1;
789         }
790 
791         GstPad* pad = gst_element_get_static_pad(color, "src");
792 #if GST_VERSION_MAJOR == 0
793         GstCaps* buffer_caps = gst_pad_get_caps(pad);
794 #else
795         GstCaps* buffer_caps = gst_pad_get_current_caps(pad);
796 #endif
797         const GstStructure *structure = gst_caps_get_structure (buffer_caps, 0);
798 
799         if (!gst_structure_get_int (structure, "width", &width))
800         {
801             CV_WARN("Cannot query video width\n");
802         }
803 
804         if (!gst_structure_get_int (structure, "height", &height))
805         {
806             CV_WARN("Cannot query video heigth\n");
807         }
808 
809         gint num = 0, denom=1;
810         if(!gst_structure_get_fraction(structure, "framerate", &num, &denom))
811         {
812             CV_WARN("Cannot query video fps\n");
813         }
814 
815         fps = (double)num/(double)denom;
816 
817          // GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
818     }
819     else
820     {
821         duration = -1;
822         width = -1;
823         height = -1;
824         fps = -1;
825     }
826 
827     __END__;
828 
829     return true;
830 }
831 
832 /*!
833  * \brief CvCapture_GStreamer::getProperty retreive the requested property from the pipeline
834  * \param propId requested property
835  * \return property value
836  *
837  * There are two ways the properties can be retreived. For seek-based properties we can query the pipeline.
838  * For frame-based properties, we use the caps of the lasst receivef sample. This means that some properties
839  * are not available until a first frame was received
840  */
getProperty(int propId) const841 double CvCapture_GStreamer::getProperty( int propId ) const
842 {
843     GstFormat format;
844     gint64 value;
845     gboolean status;
846 
847 #if GST_VERSION_MAJOR == 0
848 #define FORMAT &format
849 #else
850 #define FORMAT format
851 #endif
852 
853     if(!pipeline) {
854         CV_WARN("GStreamer: no pipeline");
855         return false;
856     }
857 
858     switch(propId) {
859     case CV_CAP_PROP_POS_MSEC:
860         format = GST_FORMAT_TIME;
861         status = gst_element_query_position(sink, FORMAT, &value);
862         if(!status) {
863             CV_WARN("GStreamer: unable to query position of stream");
864             return false;
865         }
866         return value * 1e-6; // nano seconds to milli seconds
867     case CV_CAP_PROP_POS_FRAMES:
868         format = GST_FORMAT_DEFAULT;
869         status = gst_element_query_position(sink, FORMAT, &value);
870         if(!status) {
871             CV_WARN("GStreamer: unable to query position of stream");
872             return false;
873         }
874         return value;
875     case CV_CAP_PROP_POS_AVI_RATIO:
876         format = GST_FORMAT_PERCENT;
877         status = gst_element_query_position(sink, FORMAT, &value);
878         if(!status) {
879             CV_WARN("GStreamer: unable to query position of stream");
880             return false;
881         }
882         return ((double) value) / GST_FORMAT_PERCENT_MAX;
883     case CV_CAP_PROP_FRAME_WIDTH:
884         return width;
885     case CV_CAP_PROP_FRAME_HEIGHT:
886         return height;
887     case CV_CAP_PROP_FPS:
888         return fps;
889     case CV_CAP_PROP_FOURCC:
890         break;
891     case CV_CAP_PROP_FRAME_COUNT:
892         return duration;
893     case CV_CAP_PROP_FORMAT:
894     case CV_CAP_PROP_MODE:
895     case CV_CAP_PROP_BRIGHTNESS:
896     case CV_CAP_PROP_CONTRAST:
897     case CV_CAP_PROP_SATURATION:
898     case CV_CAP_PROP_HUE:
899     case CV_CAP_PROP_GAIN:
900     case CV_CAP_PROP_CONVERT_RGB:
901         break;
902     case CV_CAP_GSTREAMER_QUEUE_LENGTH:
903         if(!sink) {
904             CV_WARN("GStreamer: there is no sink yet");
905             return false;
906         }
907         return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
908     default:
909         CV_WARN("GStreamer: unhandled property");
910         break;
911     }
912 
913 #undef FORMAT
914 
915     return false;
916 }
917 
918 /*!
919  * \brief CvCapture_GStreamer::setProperty
920  * \param propId
921  * \param value
922  * \return success
923  * Sets the desired property id with val. If the pipeline is running,
924  * it is briefly stopped and started again after the property was set
925  */
setProperty(int propId,double value)926 bool CvCapture_GStreamer::setProperty( int propId, double value )
927 {
928     GstFormat format;
929     GstSeekFlags flags;
930 
931     if(!pipeline) {
932         CV_WARN("GStreamer: no pipeline");
933         return false;
934     }
935 
936     bool wasPlaying = this->isPipelinePlaying();
937     if (wasPlaying)
938         this->stopPipeline();
939 
940 
941     switch(propId) {
942     case CV_CAP_PROP_POS_MSEC:
943         format = GST_FORMAT_TIME;
944         flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
945         if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
946                                     flags, (gint64) (value * GST_MSECOND))) {
947             CV_WARN("GStreamer: unable to seek");
948         }
949         break;
950     case CV_CAP_PROP_POS_FRAMES:
951         format = GST_FORMAT_DEFAULT;
952         flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
953         if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
954                                     flags, (gint64) value)) {
955             CV_WARN("GStreamer: unable to seek");
956         }
957         break;
958     case CV_CAP_PROP_POS_AVI_RATIO:
959         format = GST_FORMAT_PERCENT;
960         flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
961         if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
962                                     flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
963             CV_WARN("GStreamer: unable to seek");
964         }
965         break;
966     case CV_CAP_PROP_FRAME_WIDTH:
967         if(value > 0)
968             setFilter("width", G_TYPE_INT, (int) value, 0);
969         else
970             removeFilter("width");
971         break;
972     case CV_CAP_PROP_FRAME_HEIGHT:
973         if(value > 0)
974             setFilter("height", G_TYPE_INT, (int) value, 0);
975         else
976             removeFilter("height");
977         break;
978     case CV_CAP_PROP_FPS:
979         if(value > 0) {
980             double num=0, denom = 1;
981             toFraction(value, num,  denom);
982             setFilter("framerate", GST_TYPE_FRACTION, value, denom);
983         } else
984             removeFilter("framerate");
985         break;
986     case CV_CAP_PROP_FOURCC:
987     case CV_CAP_PROP_FRAME_COUNT:
988     case CV_CAP_PROP_FORMAT:
989     case CV_CAP_PROP_MODE:
990     case CV_CAP_PROP_BRIGHTNESS:
991     case CV_CAP_PROP_CONTRAST:
992     case CV_CAP_PROP_SATURATION:
993     case CV_CAP_PROP_HUE:
994     case CV_CAP_PROP_GAIN:
995     case CV_CAP_PROP_CONVERT_RGB:
996         break;
997     case CV_CAP_GSTREAMER_QUEUE_LENGTH:
998         if(!sink)
999             break;
1000         gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
1001         break;
1002     default:
1003         CV_WARN("GStreamer: unhandled property");
1004     }
1005 
1006     if (wasPlaying)
1007         this->startPipeline();
1008 
1009     return false;
1010 }
1011 
1012 /*!
1013  * \brief cvCreateCapture_GStreamer
1014  * \param type
1015  * \param filename
1016  * \return
1017  */
cvCreateCapture_GStreamer(int type,const char * filename)1018 CvCapture* cvCreateCapture_GStreamer(int type, const char* filename )
1019 {
1020     CvCapture_GStreamer* capture = new CvCapture_GStreamer;
1021 
1022     if( capture->open( type, filename ))
1023         return capture;
1024 
1025     delete capture;
1026     return 0;
1027 }
1028 
1029 
1030 /*!
1031  * \brief The CvVideoWriter_GStreamer class
1032  * Use Gstreamer to write video
1033  */
1034 class CvVideoWriter_GStreamer : public CvVideoWriter
1035 {
1036 public:
CvVideoWriter_GStreamer()1037     CvVideoWriter_GStreamer() { init(); }
~CvVideoWriter_GStreamer()1038     virtual ~CvVideoWriter_GStreamer() { close(); }
1039 
1040     virtual bool open( const char* filename, int fourcc,
1041                        double fps, CvSize frameSize, bool isColor );
1042     virtual void close();
1043     virtual bool writeFrame( const IplImage* image );
1044 protected:
1045     void init();
1046     const char* filenameToMimetype(const char* filename);
1047     GstElement* pipeline;
1048     GstElement* source;
1049     GstElement* encodebin;
1050     GstElement* file;
1051 
1052     GstBuffer* buffer;
1053     int input_pix_fmt;
1054     int num_frames;
1055     double framerate;
1056 };
1057 
1058 /*!
1059  * \brief CvVideoWriter_GStreamer::init
1060  * initialise all variables
1061  */
init()1062 void CvVideoWriter_GStreamer::init()
1063 {
1064     pipeline = NULL;
1065     source = NULL;
1066     encodebin = NULL;
1067     file = NULL;
1068     buffer = NULL;
1069 
1070     num_frames = 0;
1071     framerate = 0;
1072 }
1073 
1074 /*!
1075  * \brief CvVideoWriter_GStreamer::close
1076  * ends the pipeline by sending EOS and destroys the pipeline and all
1077  * elements afterwards
1078  */
close()1079 void CvVideoWriter_GStreamer::close()
1080 {
1081     GstStateChangeReturn status;
1082     if (pipeline)
1083     {
1084         handleMessage(pipeline);
1085 
1086         if (gst_app_src_end_of_stream(GST_APP_SRC(source)) != GST_FLOW_OK)
1087         {
1088             CV_WARN("Cannot send EOS to GStreamer pipeline\n");
1089             return;
1090         }
1091 
1092         //wait for EOS to trickle down the pipeline. This will let all elements finish properly
1093         GstBus* bus = gst_element_get_bus(pipeline);
1094         GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
1095         if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR)
1096         {
1097             CV_WARN("Error during VideoWriter finalization\n");
1098             return;
1099         }
1100 
1101         if(msg != NULL)
1102         {
1103             gst_message_unref(msg);
1104             g_object_unref(G_OBJECT(bus));
1105         }
1106 
1107         status = gst_element_set_state (pipeline, GST_STATE_NULL);
1108         if (status == GST_STATE_CHANGE_ASYNC)
1109         {
1110             // wait for status update
1111             GstState st1;
1112             GstState st2;
1113             status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
1114         }
1115         if (status == GST_STATE_CHANGE_FAILURE)
1116         {
1117             handleMessage (pipeline);
1118             gst_object_unref (GST_OBJECT (pipeline));
1119             pipeline = NULL;
1120             CV_WARN("Unable to stop gstreamer pipeline\n");
1121             return;
1122         }
1123 
1124         gst_object_unref (GST_OBJECT (pipeline));
1125         pipeline = NULL;
1126     }
1127 }
1128 
1129 
1130 /*!
1131  * \brief CvVideoWriter_GStreamer::filenameToMimetype
1132  * \param filename
1133  * \return mimetype
1134  * Resturns a container mime type for a given filename by looking at it's extension
1135  */
filenameToMimetype(const char * filename)1136 const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename)
1137 {
1138     //get extension
1139     const char *ext = strrchr(filename, '.');
1140     if(!ext || ext == filename) return NULL;
1141     ext += 1; //exclude the dot
1142 
1143     // return a container mime based on the given extension.
1144     // gstreamer's function returns too much possibilities, which is not useful to us
1145 
1146     //return the appropriate mime
1147     if (strncasecmp(ext,"avi", 3) == 0)
1148         return (const char*)"video/x-msvideo";
1149 
1150     if (strncasecmp(ext,"mkv", 3) == 0 || strncasecmp(ext,"mk3d",4) == 0  || strncasecmp(ext,"webm",4) == 0 )
1151         return (const char*)"video/x-matroska";
1152 
1153     if (strncasecmp(ext,"wmv", 3) == 0)
1154         return (const char*)"video/x-ms-asf";
1155 
1156     if (strncasecmp(ext,"mov", 3) == 0)
1157         return (const char*)"video/x-quicktime";
1158 
1159     if (strncasecmp(ext,"ogg", 3) == 0 || strncasecmp(ext,"ogv", 3) == 0)
1160         return (const char*)"application/ogg";
1161 
1162     if (strncasecmp(ext,"rm", 3) == 0)
1163         return (const char*)"vnd.rn-realmedia";
1164 
1165     if (strncasecmp(ext,"swf", 3) == 0)
1166         return (const char*)"application/x-shockwave-flash";
1167 
1168     if (strncasecmp(ext,"mp4", 3) == 0)
1169         return (const char*)"video/x-quicktime, variant=(string)iso";
1170 
1171     //default to avi
1172     return (const char*)"video/x-msvideo";
1173 }
1174 
1175 /*!
1176  * \brief CvVideoWriter_GStreamer::open
1177  * \param filename filename to output to
1178  * \param fourcc desired codec fourcc
1179  * \param fps desired framerate
1180  * \param frameSize the size of the expected frames
1181  * \param is_color color or grayscale
1182  * \return success
1183  *
1184  * We support 2 modes of operation. Either the user enters a filename and a fourcc
1185  * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
1186  * In the latter case, we just push frames on the appsink with appropriate caps.
1187  * In the former case, we try to deduce the correct container from the filename,
1188  * and the correct encoder from the fourcc profile.
1189  *
1190  * If the file extension did was not recognize, an avi container is used
1191  *
1192  */
open(const char * filename,int fourcc,double fps,CvSize frameSize,bool is_color)1193 bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
1194                                     double fps, CvSize frameSize, bool is_color )
1195 {
1196     CV_FUNCNAME("CvVideoWriter_GStreamer::open");
1197 
1198     // check arguments
1199     assert (filename);
1200     assert (fps > 0);
1201     assert (frameSize.width > 0  &&  frameSize.height > 0);
1202 
1203     // init gstreamer
1204     gst_initializer::init();
1205 
1206     // init vars
1207     bool manualpipeline = true;
1208     int  bufsize = 0;
1209     GError *err = NULL;
1210     const char* mime = NULL;
1211     GstStateChangeReturn stateret;
1212 
1213     GstCaps* caps = NULL;
1214     GstCaps* videocaps = NULL;
1215 
1216 #if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
1217     GstCaps* containercaps = NULL;
1218     GstEncodingContainerProfile* containerprofile = NULL;
1219     GstEncodingVideoProfile* videoprofile = NULL;
1220 #endif
1221 
1222     GstIterator* it = NULL;
1223     gboolean done = FALSE;
1224     GstElement *element = NULL;
1225     gchar* name = NULL;
1226 
1227 #if GST_VERSION_MAJOR == 0
1228     GstElement* splitter = NULL;
1229     GstElement* combiner = NULL;
1230 #endif
1231 
1232     // we first try to construct a pipeline from the given string.
1233     // if that fails, we assume it is an ordinary filename
1234 
1235     __BEGIN__;
1236 
1237     encodebin = gst_parse_launch(filename, &err);
1238     manualpipeline = (encodebin != NULL);
1239 
1240     if(manualpipeline)
1241     {
1242 #if GST_VERSION_MAJOR == 0
1243         it = gst_bin_iterate_sources(GST_BIN(encodebin));
1244         if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
1245             CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
1246             return false;
1247         }
1248 #else
1249         it = gst_bin_iterate_sources (GST_BIN(encodebin));
1250         GValue value = G_VALUE_INIT;
1251 
1252         while (!done) {
1253           switch (gst_iterator_next (it, &value)) {
1254             case GST_ITERATOR_OK:
1255               element = GST_ELEMENT (g_value_get_object (&value));
1256               name = gst_element_get_name(element);
1257               if (name){
1258                 if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
1259                   source = GST_ELEMENT ( gst_object_ref (element) );
1260                   done = TRUE;
1261                 }
1262                 g_free(name);
1263               }
1264               g_value_unset (&value);
1265 
1266               break;
1267             case GST_ITERATOR_RESYNC:
1268               gst_iterator_resync (it);
1269               break;
1270             case GST_ITERATOR_ERROR:
1271             case GST_ITERATOR_DONE:
1272               done = TRUE;
1273               break;
1274           }
1275         }
1276         gst_iterator_free (it);
1277 
1278         if (!source){
1279             CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
1280             return false;
1281         }
1282 #endif
1283         pipeline = encodebin;
1284     }
1285     else
1286     {
1287         pipeline = gst_pipeline_new (NULL);
1288 
1289         // we just got a filename and a fourcc code.
1290         // first, try to guess the container from the filename
1291         //encodebin = gst_element_factory_make("encodebin", NULL);
1292 
1293         //proxy old non existing fourcc ids. These were used in previous opencv versions,
1294         //but do not even exist in gstreamer any more
1295         if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
1296         if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
1297         if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');
1298 
1299 
1300         //create encoder caps from fourcc
1301 
1302         videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
1303         if (!videocaps){
1304             CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
1305         }
1306 
1307         //create container caps from file extension
1308         mime = filenameToMimetype(filename);
1309         if (!mime) {
1310             CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
1311         }
1312 
1313 #if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
1314         containercaps = gst_caps_from_string(mime);
1315 
1316         //create encodebin profile
1317         containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
1318         videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
1319         gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
1320 #endif
1321 
1322         //create pipeline elements
1323         encodebin = gst_element_factory_make("encodebin", NULL);
1324 
1325 #if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
1326         g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
1327 #endif
1328         source = gst_element_factory_make("appsrc", NULL);
1329         file = gst_element_factory_make("filesink", NULL);
1330         g_object_set(G_OBJECT(file), "location", filename, NULL);
1331     }
1332 
1333     if (is_color)
1334     {
1335         input_pix_fmt = GST_VIDEO_FORMAT_BGR;
1336         bufsize = frameSize.width * frameSize.height * 3;
1337 
1338 #if GST_VERSION_MAJOR == 0
1339         caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
1340                                          frameSize.width,
1341                                          frameSize.height,
1342                                          int(fps), 1,
1343                                          1, 1);
1344 #else
1345         caps = gst_caps_new_simple("video/x-raw",
1346                                    "format", G_TYPE_STRING, "BGR",
1347                                    "width", G_TYPE_INT, frameSize.width,
1348                                    "height", G_TYPE_INT, frameSize.height,
1349                                    "framerate", GST_TYPE_FRACTION, int(fps), 1,
1350                                    NULL);
1351         caps = gst_caps_fixate(caps);
1352 
1353 #endif
1354 
1355     }
1356     else
1357     {
1358 #if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
1359         input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
1360         bufsize = frameSize.width * frameSize.height;
1361 
1362 #if GST_VERSION_MAJOR == 0
1363         caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
1364                                          frameSize.width,
1365                                          frameSize.height,
1366                                          int(fps), 1,
1367                                          1, 1);
1368 #else
1369         caps = gst_caps_new_simple("video/x-raw",
1370                                    "format", G_TYPE_STRING, "GRAY8",
1371                                    "width", G_TYPE_INT, frameSize.width,
1372                                    "height", G_TYPE_INT, frameSize.height,
1373                                    "framerate", GST_TYPE_FRACTION, int(fps), 1,
1374                                    NULL);
1375         caps = gst_caps_fixate(caps);
1376 #endif
1377 #else
1378         CV_Assert(!"Gstreamer 0.10.29 or newer is required for grayscale input");
1379 #endif
1380     }
1381 
1382     gst_app_src_set_caps(GST_APP_SRC(source), caps);
1383     gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
1384     gst_app_src_set_size (GST_APP_SRC(source), -1);
1385 
1386     g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
1387     g_object_set(G_OBJECT(source), "block", 1, NULL);
1388     g_object_set(G_OBJECT(source), "is-live", 0, NULL);
1389 
1390 
1391     if(!manualpipeline)
1392     {
1393         g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
1394         gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
1395         if(!gst_element_link_many(source, encodebin, file, NULL)) {
1396             CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
1397         }
1398     }
1399 
1400 #if GST_VERSION_MAJOR == 0
1401     // HACK: remove streamsplitter and streamcombiner from
1402     // encodebin pipeline to prevent early EOF event handling
1403     // We always fetch BGR or gray-scale frames, so combiner->spliter
1404     // endge in graph is useless.
1405     it = gst_bin_iterate_recurse (GST_BIN(encodebin));
1406     while (!done) {
1407       switch (gst_iterator_next (it, (void**)&element)) {
1408         case GST_ITERATOR_OK:
1409           name = gst_element_get_name(element);
1410           if (strstr(name, "streamsplitter"))
1411             splitter = element;
1412           else if (strstr(name, "streamcombiner"))
1413             combiner = element;
1414           break;
1415         case GST_ITERATOR_RESYNC:
1416           gst_iterator_resync (it);
1417           break;
1418         case GST_ITERATOR_ERROR:
1419           done = true;
1420           break;
1421         case GST_ITERATOR_DONE:
1422           done = true;
1423           break;
1424       }
1425     }
1426 
1427     gst_iterator_free (it);
1428 
1429     if (splitter && combiner)
1430     {
1431         gst_element_unlink(splitter, combiner);
1432 
1433         GstPad* src  = gst_element_get_pad(combiner, "src");
1434         GstPad* sink = gst_element_get_pad(combiner, "encodingsink");
1435 
1436         GstPad* srcPeer = gst_pad_get_peer(src);
1437         GstPad* sinkPeer = gst_pad_get_peer(sink);
1438 
1439         gst_pad_unlink(sinkPeer, sink);
1440         gst_pad_unlink(src, srcPeer);
1441 
1442         gst_pad_link(sinkPeer, srcPeer);
1443 
1444         src = gst_element_get_pad(splitter, "encodingsrc");
1445         sink = gst_element_get_pad(splitter, "sink");
1446 
1447         srcPeer = gst_pad_get_peer(src);
1448         sinkPeer = gst_pad_get_peer(sink);
1449 
1450         gst_pad_unlink(sinkPeer, sink);
1451         gst_pad_unlink(src, srcPeer);
1452 
1453         gst_pad_link(sinkPeer, srcPeer);
1454     }
1455 #endif
1456 
1457     stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
1458     if(stateret  == GST_STATE_CHANGE_FAILURE) {
1459         handleMessage(pipeline);
1460         CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
1461     }
1462 
1463     framerate = fps;
1464     num_frames = 0;
1465 
1466     handleMessage(pipeline);
1467 
1468     __END__;
1469 
1470     return true;
1471 }
1472 
1473 
1474 /*!
1475  * \brief CvVideoWriter_GStreamer::writeFrame
1476  * \param image
1477  * \return
1478  * Pushes the given frame on the pipeline.
1479  * The timestamp for the buffer is generated from the framerate set in open
1480  * and ensures a smooth video
1481  */
writeFrame(const IplImage * image)1482 bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
1483 {
1484     CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame");
1485 
1486     GstClockTime duration, timestamp;
1487     GstFlowReturn ret;
1488     int size;
1489 
1490     __BEGIN__;
1491 
1492     handleMessage(pipeline);
1493 
1494     if (input_pix_fmt == GST_VIDEO_FORMAT_BGR) {
1495         if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
1496             CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
1497         }
1498     }
1499 #if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
1500     else if (input_pix_fmt == GST_VIDEO_FORMAT_GRAY8) {
1501         if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
1502             CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
1503         }
1504     }
1505 #endif
1506     else {
1507         CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs BGR or grayscale images\n");
1508         return false;
1509     }
1510 
1511     size = image->imageSize;
1512     duration = ((double)1/framerate) * GST_SECOND;
1513     timestamp = num_frames * duration;
1514 
1515     //gst_app_src_push_buffer takes ownership of the buffer, so we need to supply it a copy
1516 #if GST_VERSION_MAJOR == 0
1517     buffer = gst_buffer_try_new_and_alloc (size);
1518     if (!buffer)
1519     {
1520         CV_ERROR(CV_StsBadSize, "Cannot create GStreamer buffer");
1521     }
1522 
1523     memcpy(GST_BUFFER_DATA (buffer), (guint8*)image->imageData, size);
1524     GST_BUFFER_DURATION(buffer) = duration;
1525     GST_BUFFER_TIMESTAMP(buffer) = timestamp;
1526 #else
1527     buffer = gst_buffer_new_allocate (NULL, size, NULL);
1528     GstMapInfo info;
1529     gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ);
1530     memcpy(info.data, (guint8*)image->imageData, size);
1531     gst_buffer_unmap(buffer, &info);
1532     GST_BUFFER_DURATION(buffer) = duration;
1533     GST_BUFFER_PTS(buffer) = timestamp;
1534     GST_BUFFER_DTS(buffer) = timestamp;
1535 #endif
1536     //set the current number in the frame
1537     GST_BUFFER_OFFSET(buffer) =  num_frames;
1538 
1539     ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer);
1540     if (ret != GST_FLOW_OK) {
1541         CV_WARN("Error pushing buffer to GStreamer pipeline");
1542         return false;
1543     }
1544 
1545     //GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
1546 
1547     ++num_frames;
1548 
1549     __END__;
1550 
1551     return true;
1552 }
1553 
1554 /*!
1555  * \brief cvCreateVideoWriter_GStreamer
1556  * \param filename
1557  * \param fourcc
1558  * \param fps
1559  * \param frameSize
1560  * \param isColor
1561  * \return
1562  * Constructor
1563  */
cvCreateVideoWriter_GStreamer(const char * filename,int fourcc,double fps,CvSize frameSize,int isColor)1564 CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps,
1565                                              CvSize frameSize, int isColor )
1566 {
1567     CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer;
1568     if( wrt->open(filename, fourcc, fps,frameSize, isColor))
1569         return wrt;
1570 
1571     delete wrt;
1572     return 0;
1573 }
1574 
1575 // utility functions
1576 
1577 /*!
1578  * \brief toFraction
1579  * \param decimal
1580  * \param numerator
1581  * \param denominator
1582  * Split a floating point value into numerator and denominator
1583  */
toFraction(double decimal,double & numerator,double & denominator)1584 void toFraction(double decimal, double &numerator, double &denominator)
1585 {
1586     double dummy;
1587     double whole;
1588     decimal = modf (decimal, &whole);
1589     for (denominator = 1; denominator<=100; denominator++){
1590         if (modf(denominator * decimal, &dummy) < 0.001f)
1591             break;
1592     }
1593     numerator = denominator * decimal;
1594 }
1595 
1596 
1597 /*!
1598  * \brief handleMessage
1599  * Handles gstreamer bus messages. Mainly for debugging purposes and ensuring clean shutdown on error
1600  */
handleMessage(GstElement * pipeline)1601 void handleMessage(GstElement * pipeline)
1602 {
1603     CV_FUNCNAME("handlemessage");
1604 
1605     GError *err = NULL;
1606     gchar *debug = NULL;
1607     GstBus* bus = NULL;
1608     GstStreamStatusType tp;
1609     GstElement * elem = NULL;
1610     GstMessage* msg  = NULL;
1611 
1612     __BEGIN__;
1613     bus = gst_element_get_bus(pipeline);
1614 
1615     while(gst_bus_have_pending(bus)) {
1616         msg = gst_bus_pop(bus);
1617 
1618         //printf("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg));
1619 
1620         if(gst_is_missing_plugin_message(msg))
1621         {
1622             CV_ERROR(CV_StsError, "GStreamer: your gstreamer installation is missing a required plugin\n");
1623         }
1624         else
1625         {
1626             switch (GST_MESSAGE_TYPE (msg)) {
1627             case GST_MESSAGE_STATE_CHANGED:
1628                 GstState oldstate, newstate, pendstate;
1629                 gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
1630                 //fprintf(stderr, "state changed from %s to %s (pending: %s)\n", gst_element_state_get_name(oldstate),
1631                 //                gst_element_state_get_name(newstate), gst_element_state_get_name(pendstate));
1632                 break;
1633             case GST_MESSAGE_ERROR:
1634                 gst_message_parse_error(msg, &err, &debug);
1635                 fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n",
1636                                 gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);
1637 
1638                 g_error_free(err);
1639                 g_free(debug);
1640 
1641                 gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
1642                 break;
1643             case GST_MESSAGE_EOS:
1644                 //fprintf(stderr, "reached the end of the stream.");
1645                 break;
1646             case GST_MESSAGE_STREAM_STATUS:
1647                 gst_message_parse_stream_status(msg,&tp,&elem);
1648                 //fprintf(stderr, "stream status: elem %s, %i\n", GST_ELEMENT_NAME(elem), tp);
1649                 break;
1650             default:
1651                 //fprintf(stderr, "unhandled message %s\n",GST_MESSAGE_TYPE_NAME(msg));
1652                 break;
1653             }
1654         }
1655         gst_message_unref(msg);
1656     }
1657 
1658     gst_object_unref(GST_OBJECT(bus));
1659 
1660     __END__
1661 }
1662