Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/videoio/src/cap_gstreamer.cpp
16354 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// Intel License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2008, 2011, Nils Hasler, all rights reserved.
14
// Third party copyrights are property of their respective owners.
15
//
16
// Redistribution and use in source and binary forms, with or without modification,
17
// are permitted provided that the following conditions are met:
18
//
19
// * Redistribution's of source code must retain the above copyright notice,
20
// this list of conditions and the following disclaimer.
21
//
22
// * Redistribution's in binary form must reproduce the above copyright notice,
23
// this list of conditions and the following disclaimer in the documentation
24
// and/or other materials provided with the distribution.
25
//
26
// * The name of Intel Corporation may not be used to endorse or promote products
27
// derived from this software without specific prior written permission.
28
//
29
// This software is provided by the copyright holders and contributors "as is" and
30
// any express or implied warranties, including, but not limited to, the implied
31
// warranties of merchantability and fitness for a particular purpose are disclaimed.
32
// In no event shall the Intel Corporation or contributors be liable for any direct,
33
// indirect, incidental, special, exemplary, or consequential damages
34
// (including, but not limited to, procurement of substitute goods or services;
35
// loss of use, data, or profits; or business interruption) however caused
36
// and on any theory of liability, whether in contract, strict liability,
37
// or tort (including negligence or otherwise) arising in any way out of
38
// the use of this software, even if advised of the possibility of such damage.
39
//
40
//M*/
41
42
/*!
43
* \file cap_gstreamer.cpp
44
* \author Nils Hasler <[email protected]>
45
* Max-Planck-Institut Informatik
46
* \author Dirk Van Haerenborgh <[email protected]>
47
*
48
* \brief Use GStreamer to read/write video
49
*/
50
#include "precomp.hpp"
51
#include <iostream>
52
using namespace std;
53
#ifndef _MSC_VER
54
#include <unistd.h>
55
#endif
56
#include <string.h>
57
#include <gst/gst.h>
58
#include <gst/gstbuffer.h>
59
#include <gst/video/video.h>
60
#include <gst/app/gstappsink.h>
61
#include <gst/app/gstappsrc.h>
62
#include <gst/riff/riff-media.h>
63
#include <gst/pbutils/missing-plugins.h>
64
65
#define VERSION_NUM(major, minor, micro) (major * 1000000 + minor * 1000 + micro)
66
#define FULL_GST_VERSION VERSION_NUM(GST_VERSION_MAJOR, GST_VERSION_MINOR, GST_VERSION_MICRO)
67
68
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
69
#include <gst/pbutils/encoding-profile.h>
70
//#include <gst/base/gsttypefindhelper.h>
71
#endif
72
73
74
#ifdef NDEBUG
75
#define CV_WARN(message)
76
#else
77
#define CV_WARN(message) fprintf(stderr, "OpenCV | GStreamer warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
78
#endif
79
80
#if GST_VERSION_MAJOR == 0
81
#define COLOR_ELEM "ffmpegcolorspace"
82
#define COLOR_ELEM_NAME "ffmpegcsp"
83
#else
84
#define COLOR_ELEM "videoconvert"
85
#define COLOR_ELEM_NAME COLOR_ELEM
86
#endif
87
88
#if defined(_WIN32) || defined(_WIN64)
89
#if defined(__MINGW32__)
90
inline char *realpath(const char *path, char *resolved_path)
91
{
92
return _fullpath(resolved_path,path,PATH_MAX);
93
}
94
#endif
95
#define snprintf _snprintf
96
#define vsnprintf _vsnprintf
97
#define strcasecmp _stricmp
98
#define strncasecmp _strnicmp
99
#include <sys/stat.h>
100
#endif
101
102
void toFraction(double decimal, double &numerator, double &denominator);
103
void handleMessage(GstElement * pipeline);
104
105
using namespace cv;
106
107
static cv::Mutex gst_initializer_mutex;
108
109
/*!
110
* \brief The gst_initializer class
111
* Initializes gstreamer once in the whole process
112
*/
113
class gst_initializer
114
{
115
public:
116
static void init()
117
{
118
gst_initializer_mutex.lock();
119
static gst_initializer init;
120
gst_initializer_mutex.unlock();
121
}
122
private:
123
gst_initializer()
124
{
125
gst_init(NULL, NULL);
126
guint major, minor, micro, nano;
127
gst_version(&major, &minor, &micro, &nano);
128
if (GST_VERSION_MAJOR != major)
129
{
130
CV_WARN("incompatible gstreamer version");
131
}
132
// gst_debug_set_active(1);
133
// gst_debug_set_colored(1);
134
// gst_debug_set_default_threshold(GST_LEVEL_INFO);
135
}
136
};
137
138
inline static string get_gst_propname(int propId)
139
{
140
switch (propId)
141
{
142
case CV_CAP_PROP_BRIGHTNESS: return "brightness";
143
case CV_CAP_PROP_CONTRAST: return "contrast";
144
case CV_CAP_PROP_SATURATION: return "saturation";
145
case CV_CAP_PROP_HUE: return "hue";
146
default: return string();
147
}
148
}
149
150
inline static bool is_gst_element_exists(const std::string & name)
151
{
152
GstElementFactory * testfac = gst_element_factory_find(name.c_str());
153
if (!testfac)
154
return false;
155
g_object_unref(G_OBJECT(testfac));
156
return true;
157
}
158
159
//==================================================================================================
160
161
class GStreamerCapture CV_FINAL : public IVideoCapture
162
{
163
private:
164
GstElement* pipeline;
165
GstElement* v4l2src;
166
GstElement* sink;
167
#if GST_VERSION_MAJOR > 0
168
GstSample* sample;
169
#else
170
void * sample; // unused
171
GstBuffer* buffer;
172
#endif
173
GstCaps* caps;
174
gint64 duration;
175
gint width;
176
gint height;
177
gint channels;
178
double fps;
179
bool isPosFramesSupported;
180
bool isPosFramesEmulated;
181
gint64 emulatedFrameNumber;
182
bool isOutputByteBuffer;
183
184
public:
185
GStreamerCapture();
186
~GStreamerCapture();
187
virtual bool grabFrame() CV_OVERRIDE;
188
virtual bool retrieveFrame(int /*unused*/, OutputArray dst) CV_OVERRIDE;
189
virtual double getProperty(int propId) const CV_OVERRIDE;
190
virtual bool setProperty(int propId, double value) CV_OVERRIDE;
191
virtual bool isOpened() const CV_OVERRIDE;
192
virtual int getCaptureDomain() CV_OVERRIDE { return cv::CAP_GSTREAMER; }
193
bool open(int id);
194
bool open(const String &filename_);
195
static void newPad(GstElement * /*elem*/, GstPad *pad, gpointer data);
196
197
protected:
198
bool determineFrameDims(Size & sz);
199
bool isPipelinePlaying();
200
void startPipeline();
201
void stopPipeline();
202
void restartPipeline();
203
void setFilter(const char *prop, int type, int v1, int v2);
204
void removeFilter(const char *filter);
205
};
206
207
/*!
208
* \brief CvCapture_GStreamer::init
209
* inits the class
210
*/
211
GStreamerCapture::GStreamerCapture() :
212
pipeline(NULL), v4l2src(NULL), sink(NULL), sample(NULL),
213
#if GST_VERSION_MAJOR == 0
214
buffer(NULL),
215
#endif
216
caps(NULL),
217
duration(-1), width(-1), height(-1), channels(0), fps(-1),
218
isPosFramesSupported(false),
219
isPosFramesEmulated(false),
220
emulatedFrameNumber(-1),
221
isOutputByteBuffer(false)
222
{
223
}
224
225
/*!
226
* \brief CvCapture_GStreamer::close
227
* Closes the pipeline and destroys all instances
228
*/
229
GStreamerCapture::~GStreamerCapture()
230
{
231
if (isPipelinePlaying())
232
stopPipeline();
233
if (pipeline && GST_IS_ELEMENT(pipeline))
234
{
235
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
236
gst_object_unref(GST_OBJECT(pipeline));
237
}
238
}
239
240
/*!
241
* \brief CvCapture_GStreamer::grabFrame
242
* \return
243
* Grabs a sample from the pipeline, awaiting consumation by retreiveFrame.
244
* The pipeline is started if it was not running yet
245
*/
246
bool GStreamerCapture::grabFrame()
247
{
248
if(!pipeline)
249
return false;
250
251
// start the pipeline if it was not in playing state yet
252
if(!this->isPipelinePlaying())
253
this->startPipeline();
254
255
// bail out if EOS
256
if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
257
return false;
258
259
#if GST_VERSION_MAJOR == 0
260
if(buffer)
261
gst_buffer_unref(buffer);
262
buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
263
if(!buffer)
264
return false;
265
#else
266
if(sample)
267
gst_sample_unref(sample);
268
sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
269
if(!sample)
270
return false;
271
#endif
272
273
if (isPosFramesEmulated)
274
emulatedFrameNumber++;
275
276
return true;
277
}
278
279
/*!
280
* \brief CvCapture_GStreamer::retrieveFrame
281
* \return IplImage pointer. [Transfer Full]
282
* Retrieve the previously grabbed buffer, and wrap it in an IPLImage structure
283
*/
284
bool GStreamerCapture::retrieveFrame(int, OutputArray dst)
285
{
286
#if GST_VERSION_MAJOR == 0
287
if (!buffer)
288
return false;
289
#else
290
if(!sample)
291
return false;
292
#endif
293
Size sz;
294
if (!determineFrameDims(sz))
295
return false;
296
297
// gstreamer expects us to handle the memory at this point
298
// so we can just wrap the raw buffer and be done with it
299
#if GST_VERSION_MAJOR == 0
300
Mat src(sz, CV_8UC1, (uchar*)GST_BUFFER_DATA(buffer));
301
src.copyTo(dst);
302
#else
303
GstBuffer * buf = gst_sample_get_buffer(sample);
304
if (!buf)
305
return false;
306
GstMapInfo info;
307
if (!gst_buffer_map(buf, &info, GST_MAP_READ))
308
{
309
//something weird went wrong here. abort. abort.
310
CV_WARN("Failed to map GStreamerbuffer to system memory");
311
return false;
312
}
313
314
{
315
Mat src;
316
if (isOutputByteBuffer)
317
src = Mat(Size(info.size, 1), CV_8UC1, info.data);
318
else
319
src = Mat(sz, CV_MAKETYPE(CV_8U, channels), info.data);
320
CV_Assert(src.isContinuous());
321
src.copyTo(dst);
322
}
323
gst_buffer_unmap(buf, &info);
324
#endif
325
326
return true;
327
}
328
329
bool GStreamerCapture::determineFrameDims(Size &sz)
330
{
331
#if GST_VERSION_MAJOR == 0
332
GstCaps * frame_caps = gst_buffer_get_caps(buffer);
333
#else
334
GstCaps * frame_caps = gst_sample_get_caps(sample);
335
#endif
336
// bail out in no caps
337
if (!GST_CAPS_IS_SIMPLE(frame_caps))
338
return false;
339
340
GstStructure* structure = gst_caps_get_structure(frame_caps, 0);
341
342
// bail out if width or height are 0
343
if (!gst_structure_get_int(structure, "width", &width)
344
|| !gst_structure_get_int(structure, "height", &height))
345
return false;
346
347
sz = Size(width, height);
348
349
#if GST_VERSION_MAJOR > 0
350
const gchar* name = gst_structure_get_name(structure);
351
352
if (!name)
353
return false;
354
355
// we support 11 types of data:
356
// video/x-raw, format=BGR -> 8bit, 3 channels
357
// video/x-raw, format=GRAY8 -> 8bit, 1 channel
358
// video/x-raw, format=UYVY -> 8bit, 2 channel
359
// video/x-raw, format=YUY2 -> 8bit, 2 channel
360
// video/x-raw, format=YVYU -> 8bit, 2 channel
361
// video/x-raw, format=NV12 -> 8bit, 1 channel (height is 1.5x larger than true height)
362
// video/x-raw, format=NV21 -> 8bit, 1 channel (height is 1.5x larger than true height)
363
// video/x-raw, format=YV12 -> 8bit, 1 channel (height is 1.5x larger than true height)
364
// video/x-raw, format=I420 -> 8bit, 1 channel (height is 1.5x larger than true height)
365
// video/x-bayer -> 8bit, 1 channel
366
// image/jpeg -> 8bit, mjpeg: buffer_size x 1 x 1
367
// bayer data is never decoded, the user is responsible for that
368
// everything is 8 bit, so we just test the caps for bit depth
369
if (strcasecmp(name, "video/x-raw") == 0)
370
{
371
const gchar* format = gst_structure_get_string(structure, "format");
372
if (!format)
373
return false;
374
if (strcasecmp(format, "BGR") == 0)
375
{
376
channels = 3;
377
}
378
else if( (strcasecmp(format, "UYVY") == 0) || (strcasecmp(format, "YUY2") == 0) || (strcasecmp(format, "YVYU") == 0) )
379
{
380
channels = 2;
381
}
382
else if( (strcasecmp(format, "NV12") == 0) || (strcasecmp(format, "NV21") == 0) || (strcasecmp(format, "YV12") == 0) || (strcasecmp(format, "I420") == 0) )
383
{
384
channels = 1;
385
sz.height = sz.height * 3 / 2;
386
}
387
else if(strcasecmp(format, "GRAY8") == 0)
388
{
389
channels = 1;
390
}
391
}
392
else if (strcasecmp(name, "video/x-bayer") == 0)
393
{
394
channels = 1;
395
}
396
else if(strcasecmp(name, "image/jpeg") == 0)
397
{
398
// the correct size will be set once the first frame arrives
399
channels = 1;
400
isOutputByteBuffer = true;
401
}
402
#else
403
// we support only video/x-raw, format=BGR -> 8bit, 3 channels
404
channels = 3;
405
#endif
406
return true;
407
}
408
409
/*!
410
* \brief CvCapture_GStreamer::isPipelinePlaying
411
* \return if the pipeline is currently playing.
412
*/
413
bool GStreamerCapture::isPipelinePlaying()
414
{
415
GstState current, pending;
416
GstClockTime timeout = 5*GST_SECOND;
417
GstStateChangeReturn ret = gst_element_get_state(pipeline, &current, &pending, timeout);
418
if (!ret)
419
{
420
CV_WARN("GStreamer: unable to query pipeline state");
421
return false;
422
}
423
return current == GST_STATE_PLAYING;
424
}
425
426
/*!
427
* \brief CvCapture_GStreamer::startPipeline
428
* Start the pipeline by setting it to the playing state
429
*/
430
void GStreamerCapture::startPipeline()
431
{
432
//fprintf(stderr, "relinked, pausing\n");
433
GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
434
if (status == GST_STATE_CHANGE_ASYNC)
435
{
436
// wait for status update
437
status = gst_element_get_state(pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
438
}
439
if (status == GST_STATE_CHANGE_FAILURE)
440
{
441
handleMessage(pipeline);
442
gst_object_unref(pipeline);
443
pipeline = NULL;
444
CV_WARN("GStreamer: unable to start pipeline");
445
return;
446
}
447
448
if (isPosFramesEmulated)
449
emulatedFrameNumber = 0;
450
451
//printf("state now playing\n");
452
handleMessage(pipeline);
453
}
454
455
/*!
456
* \brief CvCapture_GStreamer::stopPipeline
457
* Stop the pipeline by setting it to NULL
458
*/
459
void GStreamerCapture::stopPipeline()
460
{
461
//fprintf(stderr, "restarting pipeline, going to ready\n");
462
if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
463
{
464
CV_WARN("GStreamer: unable to stop pipeline");
465
gst_object_unref(pipeline);
466
pipeline = NULL;
467
}
468
}
469
470
/*!
471
* \brief CvCapture_GStreamer::restartPipeline
472
* Restart the pipeline
473
*/
474
void GStreamerCapture::restartPipeline()
475
{
476
handleMessage(pipeline);
477
478
this->stopPipeline();
479
this->startPipeline();
480
}
481
482
/*!
483
* \brief CvCapture_GStreamer::setFilter
484
* \param prop the property name
485
* \param type glib property type
486
* \param v1 the value
487
* \param v2 second value of property type requires it, else NULL
488
* Filter the output formats by setting appsink caps properties
489
*/
490
void GStreamerCapture::setFilter(const char *prop, int type, int v1, int v2)
491
{
492
//printf("GStreamer: setFilter \n");
493
if(!caps || !( GST_IS_CAPS (caps) ))
494
{
495
if(type == G_TYPE_INT)
496
{
497
#if GST_VERSION_MAJOR == 0
498
caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL);
499
#else
500
caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL);
501
#endif
502
}
503
else
504
{
505
#if GST_VERSION_MAJOR == 0
506
caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL);
507
#else
508
caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL);
509
#endif
510
}
511
}
512
else
513
{
514
#if GST_VERSION_MAJOR > 0
515
if (! gst_caps_is_writable(caps))
516
caps = gst_caps_make_writable (caps);
517
#endif
518
if(type == G_TYPE_INT){
519
gst_caps_set_simple(caps, prop, type, v1, NULL);
520
}else{
521
gst_caps_set_simple(caps, prop, type, v1, v2, NULL);
522
}
523
}
524
525
#if GST_VERSION_MAJOR > 0
526
caps = gst_caps_fixate(caps);
527
#endif
528
529
gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
530
//printf("filtering with %s\n", gst_caps_to_string(caps));
531
}
532
533
/*!
534
* \brief CvCapture_GStreamer::removeFilter
535
* \param filter filter to remove
536
* remove the specified filter from the appsink template caps
537
*/
538
void GStreamerCapture::removeFilter(const char *filter)
539
{
540
if(!caps)
541
return;
542
543
#if GST_VERSION_MAJOR > 0
544
if (! gst_caps_is_writable(caps))
545
caps = gst_caps_make_writable (caps);
546
#endif
547
548
GstStructure *s = gst_caps_get_structure(caps, 0);
549
gst_structure_remove_field(s, filter);
550
551
gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
552
}
553
554
/*!
555
* \brief CvCapture_GStreamer::newPad link dynamic padd
556
* \param pad
557
* \param data
558
* decodebin creates pads based on stream information, which is not known upfront
559
* on receiving the pad-added signal, we connect it to the colorspace conversion element
560
*/
561
void GStreamerCapture::newPad(GstElement *, GstPad *pad, gpointer data)
562
{
563
GstPad *sinkpad;
564
GstElement *color = (GstElement *) data;
565
566
sinkpad = gst_element_get_static_pad (color, "sink");
567
if (!sinkpad){
568
//fprintf(stderr, "Gstreamer: no pad named sink\n");
569
return;
570
}
571
572
gst_pad_link (pad, sinkpad);
573
gst_object_unref (sinkpad);
574
}
575
576
bool GStreamerCapture::isOpened() const
577
{
578
return pipeline != NULL;
579
}
580
581
/*!
582
* \brief CvCapture_GStreamer::open Open the given file with gstreamer
583
* \param type CvCapture type. One of CV_CAP_GSTREAMER_*
584
* \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE
585
* \return boolean. Specifies if opening was successful.
586
*
587
* In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
588
* v4l2src ! autoconvert ! appsink
589
*
590
*
591
* The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
592
*
593
* - a normal filesystem path:
594
* e.g. video.avi or /path/to/video.avi or C:\\video.avi
595
* - an uri:
596
* e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
597
* - a gstreamer pipeline description:
598
* e.g. videotestsrc ! videoconvert ! appsink
599
* the appsink name should be either 'appsink0' (the default) or 'opencvsink'
600
*
601
* When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval
602
* larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
603
* a live source)
604
*
605
* The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
606
* is really slow if we need to restart the pipeline over and over again.
607
*
608
* TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used.
609
* I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
610
*
611
*/
612
bool GStreamerCapture::open(int id)
613
{
614
gst_initializer::init();
615
616
if (!is_gst_element_exists("v4l2src"))
617
return false;
618
std::ostringstream desc;
619
desc << "v4l2src device=/dev/video" << id
620
<< " ! " << COLOR_ELEM
621
<< " ! appsink";
622
return open(desc.str());
623
}
624
625
bool GStreamerCapture::open(const String &filename_)
626
{
627
gst_initializer::init();
628
629
const gchar * filename = filename_.c_str();
630
631
bool file = false;
632
//bool stream = false;
633
bool manualpipeline = false;
634
char *uri = NULL;
635
GstElement* uridecodebin = NULL;
636
GstElement* color = NULL;
637
GstStateChangeReturn status;
638
639
// test if we have a valid uri. If so, open it with an uridecodebin
640
// else, we might have a file or a manual pipeline.
641
// if gstreamer cannot parse the manual pipeline, we assume we were given and
642
// ordinary file path.
643
if (!gst_uri_is_valid(filename))
644
{
645
#ifdef _MSC_VER
646
uri = new char[2048];
647
DWORD pathSize = GetFullPathName(filename, 2048, uri, NULL);
648
struct stat buf;
649
if (pathSize == 0 || stat(uri, &buf) != 0)
650
{
651
delete[] uri;
652
uri = NULL;
653
}
654
#else
655
uri = realpath(filename, NULL);
656
#endif
657
//stream = false;
658
if(uri)
659
{
660
uri = g_filename_to_uri(uri, NULL, NULL);
661
if(uri)
662
{
663
file = true;
664
}
665
else
666
{
667
CV_WARN("GStreamer: Error opening file\n");
668
CV_WARN(filename);
669
CV_WARN(uri);
670
return false;
671
}
672
}
673
else
674
{
675
GError *err = NULL;
676
uridecodebin = gst_parse_launch(filename, &err);
677
if(!uridecodebin)
678
{
679
fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
680
return false;
681
}
682
//stream = true;
683
manualpipeline = true;
684
}
685
}
686
else
687
{
688
//stream = true;
689
uri = g_strdup(filename);
690
}
691
692
bool element_from_uri = false;
693
if(!uridecodebin)
694
{
695
// At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
696
// This means that we cannot use an uridecodebin when dealing with v4l2, since setting
697
// capture properties will not work.
698
// The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
699
gchar * protocol = gst_uri_get_protocol(uri);
700
if (!strcasecmp(protocol , "v4l2"))
701
{
702
#if GST_VERSION_MAJOR == 0
703
uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
704
#else
705
uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
706
#endif
707
element_from_uri = true;
708
}
709
else
710
{
711
uridecodebin = gst_element_factory_make("uridecodebin", NULL);
712
g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL);
713
}
714
g_free(protocol);
715
716
if(!uridecodebin)
717
{
718
CV_WARN("Can not parse GStreamer URI bin");
719
return false;
720
}
721
}
722
723
if (manualpipeline)
724
{
725
GstIterator *it = gst_bin_iterate_elements(GST_BIN(uridecodebin));
726
727
GstElement *element = NULL;
728
gboolean done = false;
729
gchar* name = NULL;
730
#if GST_VERSION_MAJOR > 0
731
GValue value = G_VALUE_INIT;
732
#endif
733
734
while (!done)
735
{
736
#if GST_VERSION_MAJOR > 0
737
switch (gst_iterator_next (it, &value))
738
{
739
case GST_ITERATOR_OK:
740
element = GST_ELEMENT (g_value_get_object (&value));
741
#else
742
switch (gst_iterator_next (it, (gpointer *)&element))
743
{
744
case GST_ITERATOR_OK:
745
#endif
746
name = gst_element_get_name(element);
747
if (name)
748
{
749
if (strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL)
750
{
751
sink = GST_ELEMENT ( gst_object_ref (element) );
752
}
753
else if (strstr(name, COLOR_ELEM_NAME) != NULL)
754
{
755
color = GST_ELEMENT ( gst_object_ref (element) );
756
}
757
else if (strstr(name, "v4l") != NULL)
758
{
759
v4l2src = GST_ELEMENT ( gst_object_ref (element) );
760
}
761
g_free(name);
762
763
done = sink && color && v4l2src;
764
}
765
#if GST_VERSION_MAJOR > 0
766
g_value_unset (&value);
767
#endif
768
769
break;
770
case GST_ITERATOR_RESYNC:
771
gst_iterator_resync (it);
772
break;
773
case GST_ITERATOR_ERROR:
774
case GST_ITERATOR_DONE:
775
done = TRUE;
776
break;
777
}
778
}
779
gst_iterator_free (it);
780
781
if (!sink)
782
{
783
CV_WARN("GStreamer: cannot find appsink in manual pipeline\n");
784
return false;
785
}
786
787
pipeline = uridecodebin;
788
}
789
else
790
{
791
pipeline = gst_pipeline_new(NULL);
792
// videoconvert (in 0.10: ffmpegcolorspace, in 1.x autovideoconvert)
793
//automatically selects the correct colorspace conversion based on caps.
794
color = gst_element_factory_make(COLOR_ELEM, NULL);
795
sink = gst_element_factory_make("appsink", NULL);
796
797
gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
798
799
if(element_from_uri)
800
{
801
if(!gst_element_link(uridecodebin, color))
802
{
803
CV_WARN("cannot link color -> sink");
804
gst_object_unref(pipeline);
805
pipeline = NULL;
806
return false;
807
}
808
}
809
else
810
{
811
g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
812
}
813
814
if(!gst_element_link(color, sink))
815
{
816
CV_WARN("GStreamer: cannot link color -> sink\n");
817
gst_object_unref(pipeline);
818
pipeline = NULL;
819
return false;
820
}
821
}
822
823
//TODO: is 1 single buffer really high enough?
824
gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
825
// gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
826
//do not emit signals: all calls will be synchronous and blocking
827
gst_app_sink_set_emit_signals (GST_APP_SINK(sink), FALSE);
828
// gst_base_sink_set_sync(GST_BASE_SINK(sink), FALSE);
829
830
#if GST_VERSION_MAJOR == 0
831
caps = gst_caps_new_simple("video/x-raw-rgb",
832
"bpp", G_TYPE_INT, 24,
833
"red_mask", G_TYPE_INT, 0x0000FF,
834
"green_mask", G_TYPE_INT, 0x00FF00,
835
"blue_mask", G_TYPE_INT, 0xFF0000,
836
NULL);
837
#else
838
839
caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}; image/jpeg");
840
841
if(manualpipeline){
842
GstPad* sink_pad = gst_element_get_static_pad(sink, "sink");
843
GstCaps* peer_caps = gst_pad_peer_query_caps(sink_pad,NULL);
844
if (!gst_caps_can_intersect(caps, peer_caps)) {
845
gst_caps_unref(caps);
846
caps = gst_caps_from_string("video/x-raw, format=(string){UYVY,YUY2,YVYU,NV12,NV21,YV12,I420}");
847
}
848
gst_object_unref(sink_pad);
849
gst_caps_unref(peer_caps);
850
}
851
852
#endif
853
gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
854
gst_caps_unref(caps);
855
856
{
857
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-init");
858
859
status = gst_element_set_state(GST_ELEMENT(pipeline),
860
file ? GST_STATE_PAUSED : GST_STATE_PLAYING);
861
if (status == GST_STATE_CHANGE_ASYNC)
862
{
863
// wait for status update
864
status = gst_element_get_state(pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
865
}
866
if (status == GST_STATE_CHANGE_FAILURE)
867
{
868
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-error");
869
handleMessage(pipeline);
870
gst_object_unref(pipeline);
871
pipeline = NULL;
872
CV_WARN("GStreamer: unable to start pipeline\n");
873
return false;
874
}
875
876
GstFormat format;
877
878
format = GST_FORMAT_DEFAULT;
879
#if GST_VERSION_MAJOR == 0
880
if(!gst_element_query_duration(sink, &format, &duration))
881
#else
882
if(!gst_element_query_duration(sink, format, &duration))
883
#endif
884
{
885
handleMessage(pipeline);
886
CV_WARN("GStreamer: unable to query duration of stream");
887
duration = -1;
888
}
889
890
handleMessage(pipeline);
891
892
GstPad* pad = gst_element_get_static_pad(sink, "sink");
893
#if GST_VERSION_MAJOR == 0
894
GstCaps* buffer_caps = gst_pad_get_caps(pad);
895
#else
896
GstCaps* buffer_caps = gst_pad_get_current_caps(pad);
897
#endif
898
const GstStructure *structure = gst_caps_get_structure (buffer_caps, 0);
899
900
if (!gst_structure_get_int (structure, "width", &width))
901
{
902
CV_WARN("Cannot query video width\n");
903
}
904
905
if (!gst_structure_get_int (structure, "height", &height))
906
{
907
CV_WARN("Cannot query video height\n");
908
}
909
910
gint num = 0, denom=1;
911
if(!gst_structure_get_fraction(structure, "framerate", &num, &denom))
912
{
913
CV_WARN("Cannot query video fps\n");
914
}
915
916
fps = (double)num/(double)denom;
917
918
{
919
GstFormat format_;
920
gint64 value_ = -1;
921
gboolean status_;
922
923
format_ = GST_FORMAT_DEFAULT;
924
#if GST_VERSION_MAJOR == 0
925
#define FORMAT &format_
926
#else
927
#define FORMAT format_
928
#endif
929
status_ = gst_element_query_position(sink, FORMAT, &value_);
930
#undef FORMAT
931
if (!status_ || value_ != 0 || duration < 0)
932
{
933
CV_WARN(cv::format("Cannot query video position: status=%d value=%lld duration=%lld\n",
934
(int)status_, (long long int)value_, (long long int)duration).c_str());
935
isPosFramesSupported = false;
936
isPosFramesEmulated = true;
937
emulatedFrameNumber = 0;
938
}
939
else
940
isPosFramesSupported = true;
941
}
942
943
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
944
}
945
946
return true;
947
}
948
949
/*!
950
* \brief CvCapture_GStreamer::getProperty retrieve the requested property from the pipeline
951
* \param propId requested property
952
* \return property value
953
*
954
* There are two ways the properties can be retrieved. For seek-based properties we can query the pipeline.
955
* For frame-based properties, we use the caps of the lasst receivef sample. This means that some properties
956
* are not available until a first frame was received
957
*/
958
double GStreamerCapture::getProperty(int propId) const
959
{
960
GstFormat format;
961
gint64 value;
962
gboolean status;
963
964
#if GST_VERSION_MAJOR == 0
965
#define FORMAT &format
966
#else
967
#define FORMAT format
968
#endif
969
970
if(!pipeline) {
971
CV_WARN("GStreamer: no pipeline");
972
return 0;
973
}
974
975
switch(propId) {
976
case CV_CAP_PROP_POS_MSEC:
977
format = GST_FORMAT_TIME;
978
status = gst_element_query_position(sink, FORMAT, &value);
979
if(!status) {
980
handleMessage(pipeline);
981
CV_WARN("GStreamer: unable to query position of stream");
982
return 0;
983
}
984
return value * 1e-6; // nano seconds to milli seconds
985
case CV_CAP_PROP_POS_FRAMES:
986
if (!isPosFramesSupported)
987
{
988
if (isPosFramesEmulated)
989
return emulatedFrameNumber;
990
return 0; // TODO getProperty() "unsupported" value should be changed
991
}
992
format = GST_FORMAT_DEFAULT;
993
status = gst_element_query_position(sink, FORMAT, &value);
994
if(!status) {
995
handleMessage(pipeline);
996
CV_WARN("GStreamer: unable to query position of stream");
997
return 0;
998
}
999
return value;
1000
case CV_CAP_PROP_POS_AVI_RATIO:
1001
format = GST_FORMAT_PERCENT;
1002
status = gst_element_query_position(sink, FORMAT, &value);
1003
if(!status) {
1004
handleMessage(pipeline);
1005
CV_WARN("GStreamer: unable to query position of stream");
1006
return 0;
1007
}
1008
return ((double) value) / GST_FORMAT_PERCENT_MAX;
1009
case CV_CAP_PROP_FRAME_WIDTH:
1010
return width;
1011
case CV_CAP_PROP_FRAME_HEIGHT:
1012
return height;
1013
case CV_CAP_PROP_FPS:
1014
return fps;
1015
case CV_CAP_PROP_FRAME_COUNT:
1016
return duration;
1017
case CV_CAP_PROP_BRIGHTNESS:
1018
case CV_CAP_PROP_CONTRAST:
1019
case CV_CAP_PROP_SATURATION:
1020
case CV_CAP_PROP_HUE:
1021
if (v4l2src)
1022
{
1023
string propName = get_gst_propname(propId);
1024
if (!propName.empty())
1025
{
1026
gint32 val = 0;
1027
g_object_get(G_OBJECT(v4l2src), propName.c_str(), &val, NULL);
1028
return static_cast<double>(val);
1029
}
1030
}
1031
break;
1032
case CV_CAP_GSTREAMER_QUEUE_LENGTH:
1033
if(!sink)
1034
{
1035
CV_WARN("there is no sink yet");
1036
return 0;
1037
}
1038
return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
1039
default:
1040
CV_WARN("GStreamer: unhandled property");
1041
break;
1042
}
1043
1044
#undef FORMAT
1045
1046
return 0;
1047
}
1048
1049
/*!
1050
* \brief CvCapture_GStreamer::setProperty
1051
* \param propId
1052
* \param value
1053
* \return success
1054
* Sets the desired property id with val. If the pipeline is running,
1055
* it is briefly stopped and started again after the property was set
1056
*/
1057
bool GStreamerCapture::setProperty(int propId, double value)
1058
{
1059
const GstSeekFlags flags = (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE);
1060
1061
if(!pipeline)
1062
{
1063
CV_WARN("no pipeline");
1064
return false;
1065
}
1066
1067
bool wasPlaying = this->isPipelinePlaying();
1068
if (wasPlaying)
1069
this->stopPipeline();
1070
1071
switch(propId)
1072
{
1073
case CV_CAP_PROP_POS_MSEC:
1074
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_TIME,
1075
flags, (gint64) (value * GST_MSECOND))) {
1076
handleMessage(pipeline);
1077
CV_WARN("GStreamer: unable to seek");
1078
}
1079
else
1080
{
1081
if (isPosFramesEmulated)
1082
{
1083
if (value == 0)
1084
{
1085
emulatedFrameNumber = 0;
1086
return true;
1087
}
1088
else
1089
{
1090
isPosFramesEmulated = false; // reset frame counter emulation
1091
}
1092
}
1093
}
1094
break;
1095
case CV_CAP_PROP_POS_FRAMES:
1096
{
1097
if (!isPosFramesSupported)
1098
{
1099
if (isPosFramesEmulated)
1100
{
1101
if (value == 0)
1102
{
1103
restartPipeline();
1104
emulatedFrameNumber = 0;
1105
return true;
1106
}
1107
}
1108
return false;
1109
CV_WARN("unable to seek");
1110
}
1111
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_DEFAULT,
1112
flags, (gint64) value)) {
1113
handleMessage(pipeline);
1114
CV_WARN("GStreamer: unable to seek");
1115
break;
1116
}
1117
// wait for status update
1118
gst_element_get_state(pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1119
return true;
1120
}
1121
case CV_CAP_PROP_POS_AVI_RATIO:
1122
if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_PERCENT,
1123
flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
1124
handleMessage(pipeline);
1125
CV_WARN("GStreamer: unable to seek");
1126
}
1127
else
1128
{
1129
if (isPosFramesEmulated)
1130
{
1131
if (value == 0)
1132
{
1133
emulatedFrameNumber = 0;
1134
return true;
1135
}
1136
else
1137
{
1138
isPosFramesEmulated = false; // reset frame counter emulation
1139
}
1140
}
1141
}
1142
break;
1143
case CV_CAP_PROP_FRAME_WIDTH:
1144
if(value > 0)
1145
setFilter("width", G_TYPE_INT, (int) value, 0);
1146
else
1147
removeFilter("width");
1148
break;
1149
case CV_CAP_PROP_FRAME_HEIGHT:
1150
if(value > 0)
1151
setFilter("height", G_TYPE_INT, (int) value, 0);
1152
else
1153
removeFilter("height");
1154
break;
1155
case CV_CAP_PROP_FPS:
1156
if(value > 0) {
1157
double num=0, denom = 1;
1158
toFraction(value, num, denom);
1159
setFilter("framerate", GST_TYPE_FRACTION, value, denom);
1160
} else
1161
removeFilter("framerate");
1162
break;
1163
case CV_CAP_PROP_BRIGHTNESS:
1164
case CV_CAP_PROP_CONTRAST:
1165
case CV_CAP_PROP_SATURATION:
1166
case CV_CAP_PROP_HUE:
1167
if (v4l2src)
1168
{
1169
string propName = get_gst_propname(propId);
1170
if (!propName.empty())
1171
{
1172
gint32 val = cv::saturate_cast<gint32>(value);
1173
g_object_set(G_OBJECT(v4l2src), propName.c_str(), &val, NULL);
1174
return true;
1175
}
1176
}
1177
return false;
1178
case CV_CAP_PROP_GAIN:
1179
case CV_CAP_PROP_CONVERT_RGB:
1180
break;
1181
case CV_CAP_GSTREAMER_QUEUE_LENGTH:
1182
{
1183
if(!sink)
1184
{
1185
CV_WARN("there is no sink yet");
1186
return false;
1187
}
1188
gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
1189
return true;
1190
}
1191
default:
1192
CV_WARN("GStreamer: unhandled property");
1193
}
1194
1195
if (wasPlaying)
1196
this->startPipeline();
1197
1198
return false;
1199
}
1200
1201
1202
Ptr<IVideoCapture> cv::createGStreamerCapture(const String& filename)
1203
{
1204
Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>();
1205
if (cap && cap->open(filename))
1206
return cap;
1207
return Ptr<IVideoCapture>();
1208
}
1209
1210
Ptr<IVideoCapture> cv::createGStreamerCapture(int index)
1211
{
1212
Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>();
1213
if (cap && cap->open(index))
1214
return cap;
1215
return Ptr<IVideoCapture>();
1216
}
1217
1218
//==================================================================================================
1219
1220
/*!
1221
* \brief The CvVideoWriter_GStreamer class
1222
* Use Gstreamer to write video
1223
*/
1224
class CvVideoWriter_GStreamer : public CvVideoWriter
1225
{
1226
public:
1227
CvVideoWriter_GStreamer()
1228
: pipeline(0), source(0), encodebin(0), file(0), buffer(0), input_pix_fmt(0),
1229
num_frames(0), framerate(0)
1230
{
1231
}
1232
virtual ~CvVideoWriter_GStreamer() CV_OVERRIDE { close(); }
1233
1234
int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_GSTREAMER; }
1235
1236
virtual bool open( const char* filename, int fourcc,
1237
double fps, CvSize frameSize, bool isColor );
1238
virtual void close();
1239
virtual bool writeFrame( const IplImage* image ) CV_OVERRIDE;
1240
protected:
1241
const char* filenameToMimetype(const char* filename);
1242
GstElement* pipeline;
1243
GstElement* source;
1244
GstElement* encodebin;
1245
GstElement* file;
1246
1247
GstBuffer* buffer;
1248
int input_pix_fmt;
1249
int num_frames;
1250
double framerate;
1251
};
1252
1253
/*!
1254
* \brief CvVideoWriter_GStreamer::close
1255
* ends the pipeline by sending EOS and destroys the pipeline and all
1256
* elements afterwards
1257
*/
1258
void CvVideoWriter_GStreamer::close()
1259
{
1260
GstStateChangeReturn status;
1261
if (pipeline)
1262
{
1263
handleMessage(pipeline);
1264
1265
if (gst_app_src_end_of_stream(GST_APP_SRC(source)) != GST_FLOW_OK)
1266
{
1267
CV_WARN("Cannot send EOS to GStreamer pipeline\n");
1268
return;
1269
}
1270
1271
//wait for EOS to trickle down the pipeline. This will let all elements finish properly
1272
GstBus* bus = gst_element_get_bus(pipeline);
1273
GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
1274
if (!msg || GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR)
1275
{
1276
CV_WARN("Error during VideoWriter finalization\n");
1277
if(msg != NULL)
1278
{
1279
gst_message_unref(msg);
1280
g_object_unref(G_OBJECT(bus));
1281
}
1282
return;
1283
}
1284
1285
gst_message_unref(msg);
1286
g_object_unref(G_OBJECT(bus));
1287
1288
status = gst_element_set_state (pipeline, GST_STATE_NULL);
1289
if (status == GST_STATE_CHANGE_ASYNC)
1290
{
1291
// wait for status update
1292
GstState st1;
1293
GstState st2;
1294
status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
1295
}
1296
if (status == GST_STATE_CHANGE_FAILURE)
1297
{
1298
handleMessage (pipeline);
1299
gst_object_unref (GST_OBJECT (pipeline));
1300
pipeline = NULL;
1301
CV_WARN("Unable to stop gstreamer pipeline\n");
1302
return;
1303
}
1304
1305
gst_object_unref (GST_OBJECT (pipeline));
1306
pipeline = NULL;
1307
}
1308
}
1309
1310
1311
/*!
1312
* \brief CvVideoWriter_GStreamer::filenameToMimetype
1313
* \param filename
1314
* \return mimetype
1315
* Resturns a container mime type for a given filename by looking at it's extension
1316
*/
1317
const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename)
1318
{
1319
//get extension
1320
const char *ext = strrchr(filename, '.');
1321
if(!ext || ext == filename) return NULL;
1322
ext += 1; //exclude the dot
1323
1324
// return a container mime based on the given extension.
1325
// gstreamer's function returns too much possibilities, which is not useful to us
1326
1327
//return the appropriate mime
1328
if (strncasecmp(ext,"avi", 3) == 0)
1329
return (const char*)"video/x-msvideo";
1330
1331
if (strncasecmp(ext,"mkv", 3) == 0 || strncasecmp(ext,"mk3d",4) == 0 || strncasecmp(ext,"webm",4) == 0 )
1332
return (const char*)"video/x-matroska";
1333
1334
if (strncasecmp(ext,"wmv", 3) == 0)
1335
return (const char*)"video/x-ms-asf";
1336
1337
if (strncasecmp(ext,"mov", 3) == 0)
1338
return (const char*)"video/x-quicktime";
1339
1340
if (strncasecmp(ext,"ogg", 3) == 0 || strncasecmp(ext,"ogv", 3) == 0)
1341
return (const char*)"application/ogg";
1342
1343
if (strncasecmp(ext,"rm", 3) == 0)
1344
return (const char*)"vnd.rn-realmedia";
1345
1346
if (strncasecmp(ext,"swf", 3) == 0)
1347
return (const char*)"application/x-shockwave-flash";
1348
1349
if (strncasecmp(ext,"mp4", 3) == 0)
1350
return (const char*)"video/x-quicktime, variant=(string)iso";
1351
1352
//default to avi
1353
return (const char*)"video/x-msvideo";
1354
}
1355
1356
/*!
1357
* \brief CvVideoWriter_GStreamer::open
1358
* \param filename filename to output to
1359
* \param fourcc desired codec fourcc
1360
* \param fps desired framerate
1361
* \param frameSize the size of the expected frames
1362
* \param is_color color or grayscale
1363
* \return success
1364
*
1365
* We support 2 modes of operation. Either the user enters a filename and a fourcc
1366
* code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
1367
* In the latter case, we just push frames on the appsink with appropriate caps.
1368
* In the former case, we try to deduce the correct container from the filename,
1369
* and the correct encoder from the fourcc profile.
1370
*
1371
* If the file extension did was not recognize, an avi container is used
1372
*
1373
*/
1374
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
1375
double fps, CvSize frameSize, bool is_color )
1376
{
1377
// check arguments
1378
assert (filename);
1379
assert (fps > 0);
1380
assert (frameSize.width > 0 && frameSize.height > 0);
1381
1382
// init gstreamer
1383
gst_initializer::init();
1384
1385
// init vars
1386
bool manualpipeline = true;
1387
int bufsize = 0;
1388
GError *err = NULL;
1389
const char* mime = NULL;
1390
GstStateChangeReturn stateret;
1391
1392
GstCaps* caps = NULL;
1393
GstCaps* videocaps = NULL;
1394
1395
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
1396
GstCaps* containercaps = NULL;
1397
GstEncodingContainerProfile* containerprofile = NULL;
1398
GstEncodingVideoProfile* videoprofile = NULL;
1399
#endif
1400
1401
GstIterator* it = NULL;
1402
gboolean done = FALSE;
1403
GstElement *element = NULL;
1404
gchar* name = NULL;
1405
1406
#if GST_VERSION_MAJOR == 0
1407
GstElement* splitter = NULL;
1408
GstElement* combiner = NULL;
1409
#endif
1410
1411
// we first try to construct a pipeline from the given string.
1412
// if that fails, we assume it is an ordinary filename
1413
1414
encodebin = gst_parse_launch(filename, &err);
1415
manualpipeline = (encodebin != NULL);
1416
1417
if(manualpipeline)
1418
{
1419
#if GST_VERSION_MAJOR == 0
1420
it = gst_bin_iterate_sources(GST_BIN(encodebin));
1421
if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
1422
CV_WARN("GStreamer: cannot find appsink in manual pipeline\n");
1423
return false;
1424
}
1425
#else
1426
it = gst_bin_iterate_sources (GST_BIN(encodebin));
1427
GValue value = G_VALUE_INIT;
1428
1429
while (!done) {
1430
switch (gst_iterator_next (it, &value)) {
1431
case GST_ITERATOR_OK:
1432
element = GST_ELEMENT (g_value_get_object (&value));
1433
name = gst_element_get_name(element);
1434
if (name){
1435
if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
1436
source = GST_ELEMENT ( gst_object_ref (element) );
1437
done = TRUE;
1438
}
1439
g_free(name);
1440
}
1441
g_value_unset (&value);
1442
1443
break;
1444
case GST_ITERATOR_RESYNC:
1445
gst_iterator_resync (it);
1446
break;
1447
case GST_ITERATOR_ERROR:
1448
case GST_ITERATOR_DONE:
1449
done = TRUE;
1450
break;
1451
}
1452
}
1453
gst_iterator_free (it);
1454
1455
if (!source){
1456
CV_WARN("GStreamer: cannot find appsrc in manual pipeline\n");
1457
return false;
1458
}
1459
#endif
1460
pipeline = encodebin;
1461
}
1462
else
1463
{
1464
pipeline = gst_pipeline_new (NULL);
1465
1466
// we just got a filename and a fourcc code.
1467
// first, try to guess the container from the filename
1468
//encodebin = gst_element_factory_make("encodebin", NULL);
1469
1470
//proxy old non existing fourcc ids. These were used in previous opencv versions,
1471
//but do not even exist in gstreamer any more
1472
if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
1473
if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
1474
if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');
1475
1476
1477
//create encoder caps from fourcc
1478
1479
videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
1480
if (!videocaps){
1481
CV_WARN("Gstreamer Opencv backend does not support this codec.");
1482
return false;
1483
}
1484
1485
//create container caps from file extension
1486
mime = filenameToMimetype(filename);
1487
if (!mime) {
1488
CV_WARN("Gstreamer Opencv backend does not support this file type.");
1489
return false;
1490
}
1491
1492
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
1493
containercaps = gst_caps_from_string(mime);
1494
1495
//create encodebin profile
1496
containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
1497
videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
1498
gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
1499
#endif
1500
1501
//create pipeline elements
1502
encodebin = gst_element_factory_make("encodebin", NULL);
1503
1504
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
1505
g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
1506
#endif
1507
source = gst_element_factory_make("appsrc", NULL);
1508
file = gst_element_factory_make("filesink", NULL);
1509
g_object_set(G_OBJECT(file), "location", filename, NULL);
1510
}
1511
1512
if (fourcc == CV_FOURCC('M','J','P','G') && frameSize.height == 1)
1513
{
1514
#if GST_VERSION_MAJOR > 0
1515
input_pix_fmt = GST_VIDEO_FORMAT_ENCODED;
1516
caps = gst_caps_new_simple("image/jpeg",
1517
"framerate", GST_TYPE_FRACTION, int(fps), 1,
1518
NULL);
1519
caps = gst_caps_fixate(caps);
1520
#else
1521
CV_WARN("Gstreamer 0.10 Opencv backend does not support writing encoded MJPEG data.");
1522
return false;
1523
#endif
1524
}
1525
else if(is_color)
1526
{
1527
input_pix_fmt = GST_VIDEO_FORMAT_BGR;
1528
bufsize = frameSize.width * frameSize.height * 3;
1529
1530
#if GST_VERSION_MAJOR == 0
1531
caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
1532
frameSize.width,
1533
frameSize.height,
1534
int(fps), 1,
1535
1, 1);
1536
#else
1537
caps = gst_caps_new_simple("video/x-raw",
1538
"format", G_TYPE_STRING, "BGR",
1539
"width", G_TYPE_INT, frameSize.width,
1540
"height", G_TYPE_INT, frameSize.height,
1541
"framerate", GST_TYPE_FRACTION, int(fps), 1,
1542
NULL);
1543
caps = gst_caps_fixate(caps);
1544
1545
#endif
1546
1547
}
1548
else
1549
{
1550
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
1551
input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
1552
bufsize = frameSize.width * frameSize.height;
1553
1554
#if GST_VERSION_MAJOR == 0
1555
caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
1556
frameSize.width,
1557
frameSize.height,
1558
int(fps), 1,
1559
1, 1);
1560
#else
1561
caps = gst_caps_new_simple("video/x-raw",
1562
"format", G_TYPE_STRING, "GRAY8",
1563
"width", G_TYPE_INT, frameSize.width,
1564
"height", G_TYPE_INT, frameSize.height,
1565
"framerate", GST_TYPE_FRACTION, int(fps), 1,
1566
NULL);
1567
caps = gst_caps_fixate(caps);
1568
#endif
1569
#else
1570
CV_Error(Error::StsError,
1571
"Gstreamer 0.10.29 or newer is required for grayscale input");
1572
#endif
1573
}
1574
1575
gst_app_src_set_caps(GST_APP_SRC(source), caps);
1576
gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
1577
gst_app_src_set_size (GST_APP_SRC(source), -1);
1578
1579
g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
1580
g_object_set(G_OBJECT(source), "block", 1, NULL);
1581
g_object_set(G_OBJECT(source), "is-live", 0, NULL);
1582
1583
1584
if(!manualpipeline)
1585
{
1586
g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
1587
gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
1588
if(!gst_element_link_many(source, encodebin, file, NULL)) {
1589
CV_WARN("GStreamer: cannot link elements\n");
1590
return false;
1591
}
1592
}
1593
1594
#if GST_VERSION_MAJOR == 0
1595
// HACK: remove streamsplitter and streamcombiner from
1596
// encodebin pipeline to prevent early EOF event handling
1597
// We always fetch BGR or gray-scale frames, so combiner->spliter
1598
// endge in graph is useless.
1599
it = gst_bin_iterate_recurse (GST_BIN(encodebin));
1600
while (!done) {
1601
switch (gst_iterator_next (it, (void**)&element)) {
1602
case GST_ITERATOR_OK:
1603
name = gst_element_get_name(element);
1604
if (strstr(name, "streamsplitter"))
1605
splitter = element;
1606
else if (strstr(name, "streamcombiner"))
1607
combiner = element;
1608
break;
1609
case GST_ITERATOR_RESYNC:
1610
gst_iterator_resync (it);
1611
break;
1612
case GST_ITERATOR_ERROR:
1613
done = true;
1614
break;
1615
case GST_ITERATOR_DONE:
1616
done = true;
1617
break;
1618
}
1619
}
1620
1621
gst_iterator_free (it);
1622
1623
if (splitter && combiner)
1624
{
1625
gst_element_unlink(splitter, combiner);
1626
1627
GstPad* src = gst_element_get_pad(combiner, "src");
1628
GstPad* sink = gst_element_get_pad(combiner, "encodingsink");
1629
1630
GstPad* srcPeer = gst_pad_get_peer(src);
1631
GstPad* sinkPeer = gst_pad_get_peer(sink);
1632
1633
gst_pad_unlink(sinkPeer, sink);
1634
gst_pad_unlink(src, srcPeer);
1635
1636
gst_pad_link(sinkPeer, srcPeer);
1637
1638
src = gst_element_get_pad(splitter, "encodingsrc");
1639
sink = gst_element_get_pad(splitter, "sink");
1640
1641
srcPeer = gst_pad_get_peer(src);
1642
sinkPeer = gst_pad_get_peer(sink);
1643
1644
gst_pad_unlink(sinkPeer, sink);
1645
gst_pad_unlink(src, srcPeer);
1646
1647
gst_pad_link(sinkPeer, srcPeer);
1648
}
1649
#endif
1650
1651
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "write-pipeline");
1652
1653
stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
1654
if(stateret == GST_STATE_CHANGE_FAILURE) {
1655
handleMessage(pipeline);
1656
CV_WARN("GStreamer: cannot put pipeline to play\n");
1657
return false;
1658
}
1659
1660
framerate = fps;
1661
num_frames = 0;
1662
1663
handleMessage(pipeline);
1664
1665
return true;
1666
}
1667
1668
1669
/*!
1670
* \brief CvVideoWriter_GStreamer::writeFrame
1671
* \param image
1672
* \return
1673
* Pushes the given frame on the pipeline.
1674
* The timestamp for the buffer is generated from the framerate set in open
1675
* and ensures a smooth video
1676
*/
1677
bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
1678
{
1679
GstClockTime duration, timestamp;
1680
GstFlowReturn ret;
1681
int size;
1682
1683
handleMessage(pipeline);
1684
1685
#if GST_VERSION_MAJOR > 0
1686
if (input_pix_fmt == GST_VIDEO_FORMAT_ENCODED) {
1687
if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U || image->height != 1) {
1688
CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U, nChannels = 1 and height = 1.");
1689
return false;
1690
}
1691
}
1692
else
1693
#endif
1694
if(input_pix_fmt == GST_VIDEO_FORMAT_BGR) {
1695
if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
1696
CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
1697
return false;
1698
}
1699
}
1700
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
1701
else if (input_pix_fmt == GST_VIDEO_FORMAT_GRAY8) {
1702
if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
1703
CV_WARN("cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
1704
return false;
1705
}
1706
}
1707
#endif
1708
else {
1709
CV_WARN("cvWriteFrame() needs BGR or grayscale images\n");
1710
return false;
1711
}
1712
1713
size = image->imageSize;
1714
duration = ((double)1/framerate) * GST_SECOND;
1715
timestamp = num_frames * duration;
1716
1717
//gst_app_src_push_buffer takes ownership of the buffer, so we need to supply it a copy
1718
#if GST_VERSION_MAJOR == 0
1719
buffer = gst_buffer_try_new_and_alloc (size);
1720
if (!buffer)
1721
{
1722
CV_WARN("Cannot create GStreamer buffer");
1723
}
1724
1725
memcpy(GST_BUFFER_DATA (buffer), (guint8*)image->imageData, size);
1726
GST_BUFFER_DURATION(buffer) = duration;
1727
GST_BUFFER_TIMESTAMP(buffer) = timestamp;
1728
#else
1729
buffer = gst_buffer_new_allocate (NULL, size, NULL);
1730
GstMapInfo info;
1731
gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ);
1732
memcpy(info.data, (guint8*)image->imageData, size);
1733
gst_buffer_unmap(buffer, &info);
1734
GST_BUFFER_DURATION(buffer) = duration;
1735
GST_BUFFER_PTS(buffer) = timestamp;
1736
GST_BUFFER_DTS(buffer) = timestamp;
1737
#endif
1738
//set the current number in the frame
1739
GST_BUFFER_OFFSET(buffer) = num_frames;
1740
1741
ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer);
1742
if (ret != GST_FLOW_OK) {
1743
CV_WARN("Error pushing buffer to GStreamer pipeline");
1744
return false;
1745
}
1746
1747
//GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
1748
1749
++num_frames;
1750
1751
return true;
1752
}
1753
1754
/*!
1755
* \brief cvCreateVideoWriter_GStreamer
1756
* \param filename
1757
* \param fourcc
1758
* \param fps
1759
* \param frameSize
1760
* \param isColor
1761
* \return
1762
* Constructor
1763
*/
1764
CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps,
1765
CvSize frameSize, int isColor )
1766
{
1767
CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer;
1768
if( wrt->open(filename, fourcc, fps,frameSize, isColor))
1769
return wrt;
1770
1771
delete wrt;
1772
return 0;
1773
}
1774
1775
// utility functions
1776
1777
/*!
1778
* \brief toFraction
1779
* \param decimal
1780
* \param numerator
1781
* \param denominator
1782
* Split a floating point value into numerator and denominator
1783
*/
1784
void toFraction(double decimal, double &numerator, double &denominator)
1785
{
1786
double dummy;
1787
double whole;
1788
decimal = modf (decimal, &whole);
1789
for (denominator = 1; denominator<=100; denominator++){
1790
if (modf(denominator * decimal, &dummy) < 0.001f)
1791
break;
1792
}
1793
numerator = denominator * decimal;
1794
}
1795
1796
1797
/*!
1798
* \brief handleMessage
1799
* Handles gstreamer bus messages. Mainly for debugging purposes and ensuring clean shutdown on error
1800
*/
1801
void handleMessage(GstElement * pipeline)
1802
{
1803
GError *err = NULL;
1804
gchar *debug = NULL;
1805
GstBus* bus = NULL;
1806
GstStreamStatusType tp;
1807
GstElement * elem = NULL;
1808
GstMessage* msg = NULL;
1809
1810
bus = gst_element_get_bus(pipeline);
1811
1812
while(gst_bus_have_pending(bus)) {
1813
msg = gst_bus_pop(bus);
1814
1815
//printf("\t\tGot %s message\n", GST_MESSAGE_TYPE_NAME(msg));
1816
1817
if(gst_is_missing_plugin_message(msg))
1818
{
1819
CV_WARN("your gstreamer installation is missing a required plugin\n");
1820
}
1821
else
1822
{
1823
switch (GST_MESSAGE_TYPE (msg)) {
1824
case GST_MESSAGE_STATE_CHANGED:
1825
GstState oldstate, newstate, pendstate;
1826
gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
1827
//fprintf(stderr, "\t\t%s: state changed from %s to %s (pending: %s)\n",
1828
// gst_element_get_name(GST_MESSAGE_SRC (msg)),
1829
// gst_element_state_get_name(oldstate),
1830
// gst_element_state_get_name(newstate), gst_element_state_get_name(pendstate));
1831
break;
1832
case GST_MESSAGE_ERROR:
1833
gst_message_parse_error(msg, &err, &debug);
1834
//fprintf(stderr, "\t\tGStreamer Plugin: Embedded video playback halted; module %s reported: %s\n",
1835
// gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);
1836
1837
g_error_free(err);
1838
g_free(debug);
1839
1840
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
1841
break;
1842
case GST_MESSAGE_EOS:
1843
//fprintf(stderr, "\t\treached the end of the stream.");
1844
break;
1845
case GST_MESSAGE_STREAM_STATUS:
1846
gst_message_parse_stream_status(msg,&tp,&elem);
1847
//fprintf(stderr, "\t\tstream status: elem %s, %i\n", GST_ELEMENT_NAME(elem), tp);
1848
break;
1849
default:
1850
//fprintf(stderr, "\t\tunhandled message %s\n",GST_MESSAGE_TYPE_NAME(msg));
1851
break;
1852
}
1853
}
1854
gst_message_unref(msg);
1855
}
1856
1857
gst_object_unref(GST_OBJECT(bus));
1858
}
1859
1860