1 /*M///////////////////////////////////////////////////////////////////////////////////////
2 //
3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4 //
5 // By downloading, copying, installing or using the software you agree to this license.
6 // If you do not agree to this license, do not download, install,
7 // copy or use the software.
8 //
9 //
10 // Intel License Agreement
11 // For Open Source Computer Vision Library
12 //
13 // Copyright (C) 2000, Intel Corporation, all rights reserved.
14 // Third party copyrights are property of their respective owners.
15 //
16 // Redistribution and use in source and binary forms, with or without modification,
17 // are permitted provided that the following conditions are met:
18 //
19 // * Redistribution's of source code must retain the above copyright notice,
20 // this list of conditions and the following disclaimer.
21 //
22 // * Redistribution's in binary form must reproduce the above copyright notice,
23 // this list of conditions and the following disclaimer in the documentation
24 // and/or other materials provided with the distribution.
25 //
26 // * The name of Intel Corporation may not be used to endorse or promote products
27 // derived from this software without specific prior written permission.
28 //
29 // This software is provided by the copyright holders and contributors "as is" and
30 // any express or implied warranties, including, but not limited to, the implied
31 // warranties of merchantability and fitness for a particular purpose are disclaimed.
32 // In no event shall the Intel Corporation or contributors be liable for any direct,
33 // indirect, incidental, special, exemplary, or consequential damages
34 // (including, but not limited to, procurement of substitute goods or services;
35 // loss of use, data, or profits; or business interruption) however caused
36 // and on any theory of liability, whether in contract, strict liability,
37 // or tort (including negligence or otherwise) arising in any way out of
38 // the use of this software, even if advised of the possibility of such damage.
39 //
40 //M*/
41 #include "precomp.hpp"
42 #if (defined WIN32 || defined _WIN32) && defined HAVE_MSMF
43 /*
44 Media Foundation-based Video Capturing module is based on
45 videoInput library by Evgeny Pereguda:
46 http://www.codeproject.com/Articles/559437/Capturing-of-video-from-web-camera-on-Windows-7-an
47 Originaly licensed under The Code Project Open License (CPOL) 1.02:
48 http://www.codeproject.com/info/cpol10.aspx
49 */
50 //require Windows 8 for some of the formats defined otherwise could baseline on lower version
51 #if WINVER < _WIN32_WINNT_WIN7
52 #undef WINVER
53 #define WINVER _WIN32_WINNT_WIN7
54 #endif
55 #if defined _MSC_VER && _MSC_VER >= 1600
56 #define HAVE_CONCURRENCY
57 #endif
58 #include <windows.h>
59 #include <guiddef.h>
60 #include <mfidl.h>
61 #include <Mfapi.h>
62 #include <mfplay.h>
63 #include <mfobjects.h>
64 #include <tchar.h>
65 #include <strsafe.h>
66 #include <Mfreadwrite.h>
67 #include <new>
68 #include <map>
69 #include <vector>
70 #include <string>
71 #include <stdio.h>
72 #include <stdarg.h>
73 #include <string.h>
74
75 #ifdef _MSC_VER
76 #pragma warning(disable:4503)
77 #pragma comment(lib, "mfplat")
78 #pragma comment(lib, "mf")
79 #pragma comment(lib, "mfuuid")
80 #pragma comment(lib, "Strmiids")
81 #pragma comment(lib, "Mfreadwrite")
82 #if (WINVER >= 0x0602) // Available since Win 8
83 #pragma comment(lib, "MinCore_Downlevel")
84 #endif
85 #endif
86
87 #include <mferror.h>
88
89 #ifdef WINRT
90 // for ComPtr usage
91 #include <wrl/client.h>
92 #ifdef __cplusplus_winrt
93 #include <agile.h>
94 #include <vccorlib.h>
95 #endif
96
97 #include <wrl\async.h>
98 #include <wrl\implements.h>
99 #include <wrl\module.h>
100 #include <wrl\wrappers\corewrappers.h>
101 #include <windows.media.capture.h>
102 #include <windows.devices.enumeration.h>
103 #ifdef HAVE_CONCURRENCY
104 #include <concrt.h>
105 #ifndef __cplusplus_winrt
106 __declspec(noreturn) void __stdcall __abi_WinRTraiseException(long);
107
__abi_ThrowIfFailed(long __hrArg)108 inline void __abi_ThrowIfFailed(long __hrArg)
109 {
110 if (__hrArg < 0)
111 {
112 __abi_WinRTraiseException(__hrArg);
113 }
114 }
115
116 struct Guid
117 {
118 public:
119 Guid();
120 Guid(__rcGUID_t);
121 operator ::__rcGUID_t();
122 bool Equals(Guid __guidArg);
123 bool Equals(__rcGUID_t __guidArg);
124 Guid(unsigned int __aArg, unsigned short __bArg, unsigned short __cArg, unsigned __int8 __dArg,
125 unsigned __int8 __eArg, unsigned __int8 __fArg, unsigned __int8 __gArg, unsigned __int8 __hArg,
126 unsigned __int8 __iArg, unsigned __int8 __jArg, unsigned __int8 __kArg);
127 Guid(unsigned int __aArg, unsigned short __bArg, unsigned short __cArg, const unsigned __int8* __dArg);
128 private:
129 unsigned long __a;
130 unsigned short __b;
131 unsigned short __c;
132 unsigned char __d;
133 unsigned char __e;
134 unsigned char __f;
135 unsigned char __g;
136 unsigned char __h;
137 unsigned char __i;
138 unsigned char __j;
139 unsigned char __k;
140 };
141
142 static_assert(sizeof(Guid) == sizeof(::_GUID), "Incorect size for Guid");
143 static_assert(sizeof(__rcGUID_t) == sizeof(::_GUID), "Incorect size for __rcGUID_t");
144
145 ////////////////////////////////////////////////////////////////////////////////
Guid()146 inline Guid::Guid() : __a(0), __b(0), __c(0), __d(0), __e(0), __f(0), __g(0), __h(0), __i(0), __j(0), __k(0)
147 {
148 }
149
Guid(__rcGUID_t __guid)150 inline Guid::Guid(__rcGUID_t __guid) :
151 __a(reinterpret_cast<const __s_GUID&>(__guid).Data1),
152 __b(reinterpret_cast<const __s_GUID&>(__guid).Data2),
153 __c(reinterpret_cast<const __s_GUID&>(__guid).Data3),
154 __d(reinterpret_cast<const __s_GUID&>(__guid).Data4[0]),
155 __e(reinterpret_cast<const __s_GUID&>(__guid).Data4[1]),
156 __f(reinterpret_cast<const __s_GUID&>(__guid).Data4[2]),
157 __g(reinterpret_cast<const __s_GUID&>(__guid).Data4[3]),
158 __h(reinterpret_cast<const __s_GUID&>(__guid).Data4[4]),
159 __i(reinterpret_cast<const __s_GUID&>(__guid).Data4[5]),
160 __j(reinterpret_cast<const __s_GUID&>(__guid).Data4[6]),
161 __k(reinterpret_cast<const __s_GUID&>(__guid).Data4[7])
162 {
163 }
164
operator ::__rcGUID_t()165 inline Guid::operator ::__rcGUID_t()
166 {
167 return reinterpret_cast<__rcGUID_t>(*this);
168 }
169
Equals(Guid __guidArg)170 inline bool Guid::Equals(Guid __guidArg)
171 {
172 return *this == __guidArg;
173 }
174
Equals(__rcGUID_t __guidArg)175 inline bool Guid::Equals(__rcGUID_t __guidArg)
176 {
177 return *this == static_cast< Guid>(__guidArg);
178 }
179
operator ==(Guid __aArg,Guid __bArg)180 inline bool operator==(Guid __aArg, Guid __bArg)
181 {
182 auto __a = reinterpret_cast<unsigned long*>(&__aArg);
183 auto __b = reinterpret_cast<unsigned long*>(&__bArg);
184
185 return (__a[0] == __b[0] && __a[1] == __b[1] && __a[2] == __b[2] && __a[3] == __b[3]);
186 }
187
operator !=(Guid __aArg,Guid __bArg)188 inline bool operator!=(Guid __aArg, Guid __bArg)
189 {
190 return !(__aArg == __bArg);
191 }
192
operator <(Guid __aArg,Guid __bArg)193 inline bool operator<(Guid __aArg, Guid __bArg)
194 {
195 auto __a = reinterpret_cast<unsigned long*>(&__aArg);
196 auto __b = reinterpret_cast<unsigned long*>(&__bArg);
197
198 if (__a[0] != __b[0])
199 {
200 return __a[0] < __b[0];
201 }
202
203 if (__a[1] != __b[1])
204 {
205 return __a[1] < __b[1];
206 }
207
208 if (__a[2] != __b[2])
209 {
210 return __a[2] < __b[2];
211 }
212
213 if (__a[3] != __b[3])
214 {
215 return __a[3] < __b[3];
216 }
217
218 return false;
219 }
220
Guid(unsigned int __aArg,unsigned short __bArg,unsigned short __cArg,unsigned __int8 __dArg,unsigned __int8 __eArg,unsigned __int8 __fArg,unsigned __int8 __gArg,unsigned __int8 __hArg,unsigned __int8 __iArg,unsigned __int8 __jArg,unsigned __int8 __kArg)221 inline Guid::Guid(unsigned int __aArg, unsigned short __bArg, unsigned short __cArg, unsigned __int8 __dArg,
222 unsigned __int8 __eArg, unsigned __int8 __fArg, unsigned __int8 __gArg, unsigned __int8 __hArg,
223 unsigned __int8 __iArg, unsigned __int8 __jArg, unsigned __int8 __kArg) :
224 __a(__aArg), __b(__bArg), __c(__cArg), __d(__dArg), __e(__eArg), __f(__fArg), __g(__gArg), __h(__hArg), __i(__iArg), __j(__jArg), __k(__kArg)
225 {
226 }
227
Guid(unsigned int __aArg,unsigned short __bArg,unsigned short __cArg,const unsigned __int8 __dArg[8])228 inline Guid::Guid(unsigned int __aArg, unsigned short __bArg, unsigned short __cArg, const unsigned __int8 __dArg[8]) :
229 __a(__aArg), __b(__bArg), __c(__cArg)
230 {
231 __d = __dArg[0];
232 __e = __dArg[1];
233 __f = __dArg[2];
234 __g = __dArg[3];
235 __h = __dArg[4];
236 __i = __dArg[5];
237 __j = __dArg[6];
238 __k = __dArg[7];
239 }
240
241 __declspec(selectany) Guid __winrt_GUID_NULL(0x00000000, 0x0000, 0x0000, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00);
242
243 //
244 //// Don't want to define the real IUnknown from unknown.h here. That would means if the user has
245 //// any broken code that uses it, compile errors will take the form of e.g.:
246 //// predefined C++ WinRT types (compiler internal)(41) : see declaration of 'IUnknown::QueryInterface'
247 //// This is not helpful. If they use IUnknown, we still need to point them to the actual unknown.h so
248 //// that they can see the original definition.
249 ////
250 //// For WinRT, we'll instead have a parallel COM interface hierarchy for basic interfaces starting with _.
251 //// The type mismatch is not an issue. COM passes types through GUID / void* combos - the original type
252 //// doesn't come into play unless the user static_casts an implementation type to one of these, but
253 //// the WinRT implementation types are hidden.
254 __interface __declspec(uuid("00000000-0000-0000-C000-000000000046")) __abi_IUnknown
255 {
256 public:
257 virtual long __stdcall __abi_QueryInterface(Guid&, void**) = 0;
258 virtual unsigned long __stdcall __abi_AddRef() = 0;
259 virtual unsigned long __stdcall __abi_Release() = 0;
260 };
261 #endif
262 #include "ppltasks_winrt.h"
263 #endif
264 #else
265 #include <comdef.h>
266 #endif
267
268 struct IMFMediaType;
269 #ifndef WINRT
270 struct IMFActivate;
271 struct IMFMediaSource;
272 #endif
273 struct IMFAttributes;
274
275 namespace
276 {
277
SafeRelease(T ** ppT)278 template <class T> void SafeRelease(T **ppT)
279 {
280 if (*ppT)
281 {
282 (*ppT)->Release();
283 *ppT = NULL;
284 }
285 }
286
287 #ifdef _DEBUG
288 /// Class for printing info into console
289 class DPO
290 {
291 public:
292 ~DPO(void);
293 static DPO& getInstance();
294 void printOut(const wchar_t *format, ...);
295 void setVerbose(bool state);
296 bool verbose;
297 private:
298 DPO(void);
299 };
300 #define DebugPrintOut(...) DPO::getInstance().printOut(__VA_ARGS__)
301 #else
302 #define DebugPrintOut(...) void()
303 #endif
304
305 #include "cap_msmf.hpp"
306
307 // Structure for collecting info about types of video, which are supported by current video device
308 struct MediaType
309 {
310 unsigned int MF_MT_FRAME_SIZE;
311 unsigned int height;
312 unsigned int width;
313 unsigned int MF_MT_YUV_MATRIX;
314 unsigned int MF_MT_VIDEO_LIGHTING;
315 int MF_MT_DEFAULT_STRIDE; // stride is negative if image is bottom-up
316 unsigned int MF_MT_VIDEO_CHROMA_SITING;
317 GUID MF_MT_AM_FORMAT_TYPE;
318 wchar_t *pMF_MT_AM_FORMAT_TYPEName;
319 unsigned int MF_MT_FIXED_SIZE_SAMPLES;
320 unsigned int MF_MT_VIDEO_NOMINAL_RANGE;
321 unsigned int MF_MT_FRAME_RATE_NUMERATOR;
322 unsigned int MF_MT_FRAME_RATE_DENOMINATOR;
323 unsigned int MF_MT_PIXEL_ASPECT_RATIO;
324 unsigned int MF_MT_PIXEL_ASPECT_RATIO_low;
325 unsigned int MF_MT_ALL_SAMPLES_INDEPENDENT;
326 unsigned int MF_MT_FRAME_RATE_RANGE_MIN;
327 unsigned int MF_MT_FRAME_RATE_RANGE_MIN_low;
328 unsigned int MF_MT_SAMPLE_SIZE;
329 unsigned int MF_MT_VIDEO_PRIMARIES;
330 unsigned int MF_MT_INTERLACE_MODE;
331 unsigned int MF_MT_FRAME_RATE_RANGE_MAX;
332 unsigned int MF_MT_FRAME_RATE_RANGE_MAX_low;
333 GUID MF_MT_MAJOR_TYPE;
334 GUID MF_MT_SUBTYPE;
335 wchar_t *pMF_MT_MAJOR_TYPEName;
336 wchar_t *pMF_MT_SUBTYPEName;
337 MediaType();
338 ~MediaType();
339 void Clear();
340 };
341
342 /// Class for parsing info from IMFMediaType into the local MediaType
343 class FormatReader
344 {
345 public:
346 static MediaType Read(IMFMediaType *pType);
347 ~FormatReader(void);
348 private:
349 FormatReader(void);
350 };
351
352 DWORD WINAPI MainThreadFunction( LPVOID lpParam );
353 typedef void(*emergensyStopEventCallback)(int, void *);
354
355 class RawImage
356 {
357 public:
358 ~RawImage(void);
359 // Function of creation of the instance of the class
360 static long CreateInstance(RawImage **ppRImage,unsigned int size);
361 void setCopy(const BYTE * pSampleBuffer);
362 void fastCopy(const BYTE * pSampleBuffer);
363 unsigned char * getpPixels();
364 bool isNew();
365 unsigned int getSize();
366 private:
367 bool ri_new;
368 unsigned int ri_size;
369 unsigned char *ri_pixels;
370 RawImage(unsigned int size);
371 };
372
373 class ImageGrabberCallback : public IMFSampleGrabberSinkCallback
374 {
375 public:
376 void pauseGrabbing();
377 void resumeGrabbing();
378 RawImage *getRawImage();
379 // IMFClockStateSink methods
380 STDMETHODIMP OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset);
381 STDMETHODIMP OnClockStop(MFTIME hnsSystemTime);
382 STDMETHODIMP OnClockPause(MFTIME hnsSystemTime);
383 STDMETHODIMP OnClockRestart(MFTIME hnsSystemTime);
384 STDMETHODIMP OnClockSetRate(MFTIME hnsSystemTime, float flRate);
385 // IMFSampleGrabberSinkCallback methods
386 STDMETHODIMP OnSetPresentationClock(IMFPresentationClock* pClock);
387 STDMETHODIMP OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
388 LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
389 DWORD dwSampleSize);
390 STDMETHODIMP OnShutdown();
391
392 const HANDLE ig_hFrameReady;
393 const HANDLE ig_hFrameGrabbed;
394 const HANDLE ig_hFinish;
395 protected:
396 ImageGrabberCallback(bool synchronous);
397 bool ig_RIE;
398 bool ig_Close;
399 bool ig_Synchronous;
400 long m_cRef;
401
402 RawImage *ig_RIFirst;
403 RawImage *ig_RISecond;
404 RawImage *ig_RIOut;
405 private:
406 ImageGrabberCallback& operator=(const ImageGrabberCallback&); // Declared to fix compilation warning.
407 };
408
409 #ifdef WINRT
410 extern const __declspec(selectany) WCHAR RuntimeClass_CV_ImageGrabberWinRT[] = L"cv.ImageGrabberWinRT";
411
412 class ImageGrabberWinRT :
413 public Microsoft::WRL::RuntimeClass<
414 Microsoft::WRL::RuntimeClassFlags< Microsoft::WRL::RuntimeClassType::WinRtClassicComMix>,
415 IMFSampleGrabberSinkCallback>, public ImageGrabberCallback
416 {
417 InspectableClass(RuntimeClass_CV_ImageGrabberWinRT, BaseTrust)
418 public:
419 ImageGrabberWinRT(bool synchronous);
420 ~ImageGrabberWinRT(void);
421
422 HRESULT initImageGrabber(MAKE_WRL_REF(_MediaCapture) pSource,
423 GUID VideoFormat);
424 HRESULT startGrabbing(MAKE_WRL_REF(_AsyncAction)* action);
425 HRESULT stopGrabbing(MAKE_WRL_REF(_AsyncAction)* action);
426 // IMFClockStateSink methods
OnClockStart(MFTIME hnsSystemTime,LONGLONG llClockStartOffset)427 STDMETHODIMP OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset) { return ImageGrabberCallback::OnClockStart(hnsSystemTime, llClockStartOffset); }
OnClockStop(MFTIME hnsSystemTime)428 STDMETHODIMP OnClockStop(MFTIME hnsSystemTime) { return ImageGrabberCallback::OnClockStop(hnsSystemTime); }
OnClockPause(MFTIME hnsSystemTime)429 STDMETHODIMP OnClockPause(MFTIME hnsSystemTime) { return ImageGrabberCallback::OnClockPause(hnsSystemTime); }
OnClockRestart(MFTIME hnsSystemTime)430 STDMETHODIMP OnClockRestart(MFTIME hnsSystemTime) { return ImageGrabberCallback::OnClockRestart(hnsSystemTime); }
OnClockSetRate(MFTIME hnsSystemTime,float flRate)431 STDMETHODIMP OnClockSetRate(MFTIME hnsSystemTime, float flRate) { return ImageGrabberCallback::OnClockSetRate(hnsSystemTime, flRate); }
432 // IMFSampleGrabberSinkCallback methods
OnSetPresentationClock(IMFPresentationClock * pClock)433 STDMETHODIMP OnSetPresentationClock(IMFPresentationClock* pClock) { return ImageGrabberCallback::OnSetPresentationClock(pClock); }
OnProcessSample(REFGUID guidMajorMediaType,DWORD dwSampleFlags,LONGLONG llSampleTime,LONGLONG llSampleDuration,const BYTE * pSampleBuffer,DWORD dwSampleSize)434 STDMETHODIMP OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
435 LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
436 DWORD dwSampleSize) { return ImageGrabberCallback::OnProcessSample(guidMajorMediaType, dwSampleFlags, llSampleTime, llSampleDuration, pSampleBuffer, dwSampleSize); }
OnShutdown()437 STDMETHODIMP OnShutdown() { return ImageGrabberCallback::OnShutdown(); }
438 // Function of creation of the instance of the class
439 static HRESULT CreateInstance(ImageGrabberWinRT **ppIG, bool synchronous = false);
440 private:
441 MAKE_WRL_AGILE_REF(_MediaCapture) ig_pMedCapSource;
442 MediaSink* ig_pMediaSink;
443 };
444 #endif
445
446 // Class for grabbing image from video stream
447 class ImageGrabber : public ImageGrabberCallback
448 {
449 public:
450 ~ImageGrabber(void);
451 HRESULT initImageGrabber(IMFMediaSource *pSource);
452 HRESULT startGrabbing(void);
453 void stopGrabbing();
454 // IUnknown methods
455 STDMETHODIMP QueryInterface(REFIID iid, void** ppv);
456 STDMETHODIMP_(ULONG) AddRef();
457 STDMETHODIMP_(ULONG) Release();
458 // Function of creation of the instance of the class
459 static HRESULT CreateInstance(ImageGrabber **ppIG, unsigned int deviceID, bool synchronous = false);
460
461 private:
462 unsigned int ig_DeviceID;
463
464 IMFMediaSource *ig_pSource;
465 IMFMediaSession *ig_pSession;
466 IMFTopology *ig_pTopology;
467 ImageGrabber(unsigned int deviceID, bool synchronous);
468 HRESULT CreateTopology(IMFMediaSource *pSource, IMFActivate *pSinkActivate, IMFTopology **ppTopo);
469 HRESULT AddSourceNode(IMFTopology *pTopology, IMFMediaSource *pSource,
470 IMFPresentationDescriptor *pPD, IMFStreamDescriptor *pSD, IMFTopologyNode **ppNode);
471 HRESULT AddOutputNode(IMFTopology *pTopology, IMFActivate *pActivate, DWORD dwId, IMFTopologyNode **ppNode);
472
473 ImageGrabber& operator=(const ImageGrabber&); // Declared to fix comiplation error.
474 };
475
476 /// Class for controlling of thread of the grabbing raw data from video device
477 class ImageGrabberThread
478 {
479 friend DWORD WINAPI MainThreadFunction( LPVOID lpParam );
480 public:
481 ~ImageGrabberThread(void);
482 static HRESULT CreateInstance(ImageGrabberThread **ppIGT, IMFMediaSource *pSource, unsigned int deviceID, bool synchronious = false);
483 void start();
484 void stop();
485 void setEmergencyStopEvent(void *userData, void(*func)(int, void *));
486 ImageGrabber *getImageGrabber();
487 protected:
488 virtual void run();
489 private:
490 ImageGrabberThread(IMFMediaSource *pSource, unsigned int deviceID, bool synchronious);
491 HANDLE igt_Handle;
492 DWORD igt_ThreadIdArray;
493 ImageGrabber *igt_pImageGrabber;
494 emergensyStopEventCallback igt_func;
495 void *igt_userData;
496 bool igt_stop;
497 unsigned int igt_DeviceID;
498 };
499
500 // Structure for collecting info about one parametr of current video device
501 struct Parametr
502 {
503 long CurrentValue;
504 long Min;
505 long Max;
506 long Step;
507 long Default;
508 long Flag;
509 Parametr();
510 };
511
512 // Structure for collecting info about 17 parametrs of current video device
513 struct CamParametrs
514 {
515 Parametr Brightness;
516 Parametr Contrast;
517 Parametr Hue;
518 Parametr Saturation;
519 Parametr Sharpness;
520 Parametr Gamma;
521 Parametr ColorEnable;
522 Parametr WhiteBalance;
523 Parametr BacklightCompensation;
524 Parametr Gain;
525 Parametr Pan;
526 Parametr Tilt;
527 Parametr Roll;
528 Parametr Zoom;
529 Parametr Exposure;
530 Parametr Iris;
531 Parametr Focus;
532 };
533
534 typedef std::wstring String;
535 typedef std::vector<int> vectorNum;
536 typedef std::map<String, vectorNum> SUBTYPEMap;
537 typedef std::map<UINT64, SUBTYPEMap> FrameRateMap;
538 typedef void(*emergensyStopEventCallback)(int, void *);
539
540 /// Class for controlling of video device
541 class videoDevice
542 {
543 public:
544 videoDevice(void);
545 ~videoDevice(void);
546 void closeDevice();
547 CamParametrs getParametrs();
548 void setParametrs(CamParametrs parametrs);
549 void setEmergencyStopEvent(void *userData, void(*func)(int, void *));
550 #ifdef WINRT
551 long readInfoOfDevice(MAKE_WRL_REF(_IDeviceInformation) pDevice, unsigned int Num);
waitForDevice()552 void waitForDevice()
553 {
554 if (vd_pAction) {
555 #ifdef HAVE_CONCURRENCY
556 CREATE_TASK DEFINE_RET_TYPE(void)(vd_pAction).wait();
557 #endif
558 vd_pAction = nullptr;
559 }
560 }
561 #else
562 long readInfoOfDevice(IMFActivate *pActivate, unsigned int Num);
563 #endif
564 wchar_t *getName();
565 int getCountFormats();
566 unsigned int getWidth();
567 unsigned int getHeight();
568 unsigned int getFrameRate() const;
569 MediaType getFormat(unsigned int id);
570 bool setupDevice(unsigned int w, unsigned int h, unsigned int idealFramerate = 0);
571 bool setupDevice(unsigned int id);
572 bool isDeviceSetup();
573 bool isDeviceMediaSource();
574 bool isDeviceRawDataSource();
575 bool isFrameNew();
576 IMFMediaSource *getMediaSource();
577 RawImage *getRawImageOut();
578 private:
579 enum typeLock
580 {
581 MediaSourceLock,
582 RawDataLock,
583 OpenLock
584 } vd_LockOut;
585 wchar_t *vd_pFriendlyName;
586 ImageGrabberThread *vd_pImGrTh;
587 CamParametrs vd_PrevParametrs;
588 unsigned int vd_Width;
589 unsigned int vd_Height;
590 unsigned int vd_FrameRate;
591 unsigned int vd_CurrentNumber;
592 bool vd_IsSetuped;
593 std::map<UINT64, FrameRateMap> vd_CaptureFormats;
594 std::vector<MediaType> vd_CurrentFormats;
595 IMFMediaSource *vd_pSource;
596 #ifdef WINRT
597 MAKE_WRL_AGILE_REF(_MediaCapture) vd_pMedCap;
598 EventRegistrationToken vd_cookie;
599 ImageGrabberWinRT *vd_pImGr;
600 MAKE_WRL_REF(_AsyncAction) vd_pAction;
601 #ifdef HAVE_CONCURRENCY
602 Concurrency::critical_section vd_lock;
603 #endif
604 #endif
605 emergensyStopEventCallback vd_func;
606 void *vd_userData;
607 HRESULT enumerateCaptureFormats(IMFMediaSource *pSource);
608 long setDeviceFormat(IMFMediaSource *pSource, unsigned long dwFormatIndex);
609 void buildLibraryofTypes();
610 int findType(unsigned int size, unsigned int frameRate = 0);
611 #ifdef WINRT
612 HRESULT enumerateCaptureFormats(MAKE_WRL_REF(_MediaCapture) pSource);
613 long setDeviceFormat(MAKE_WRL_REF(_MediaCapture) pSource, unsigned long dwFormatIndex, MAKE_WRL_REF(_AsyncAction)* pAction);
614 long resetDevice(MAKE_WRL_REF(_IDeviceInformation) pDevice);
615 #ifdef HAVE_CONCURRENCY
616 long checkDevice(_DeviceClass devClass, DEFINE_TASK<void>* pTask, MAKE_WRL_REF(_IDeviceInformation)* ppDevice);
617 #endif
618 #else
619 long resetDevice(IMFActivate *pActivate);
620 long checkDevice(IMFAttributes *pAttributes, IMFActivate **pDevice);
621 #endif
622 long initDevice();
623 };
624
625 /// Class for managing of list of video devices
626 class videoDevices
627 {
628 public:
629 ~videoDevices(void);
630 #ifdef WINRT
631 long initDevices(_DeviceClass devClass);
waitInit()632 void waitInit() {
633 if (vds_enumTask) {
634 #ifdef HAVE_CONCURRENCY
635 CREATE_TASK DEFINE_RET_TYPE(void)(vds_enumTask).wait();
636 #endif
637 vds_enumTask = nullptr;
638 }
639 }
640 #else
641 long initDevices(IMFAttributes *pAttributes);
642 #endif
643 static videoDevices& getInstance();
644 videoDevice *getDevice(unsigned int i);
645 unsigned int getCount();
646 void clearDevices();
647 private:
648 UINT32 count;
649 #ifdef WINRT
650 MAKE_WRL_REF(_AsyncAction) vds_enumTask;
651 #endif
652 std::vector<videoDevice *> vds_Devices;
653 videoDevices(void);
654 };
655
656 // Class for creating of Media Foundation context
657 class Media_Foundation
658 {
659 public:
660 virtual ~Media_Foundation(void);
661 static Media_Foundation& getInstance();
662 bool buildListOfDevices();
663 private:
664 Media_Foundation(void);
665 };
666
667 /// The only visiable class for controlling of video devices in format singelton
668 class videoInput
669 {
670 public:
671 virtual ~videoInput(void);
672 // Getting of static instance of videoInput class
673 static videoInput& getInstance();
674 // Closing video device with deviceID
675 void closeDevice(int deviceID);
676 // Setting callback function for emergency events(for example: removing video device with deviceID) with userData
677 void setEmergencyStopEvent(int deviceID, void *userData, void(*func)(int, void *));
678 // Closing all devices
679 void closeAllDevices();
680 // Getting of parametrs of video device with deviceID
681 CamParametrs getParametrs(int deviceID);
682 // Setting of parametrs of video device with deviceID
683 void setParametrs(int deviceID, CamParametrs parametrs);
684 // Getting numbers of existence videodevices with listing in consol
685 unsigned int listDevices(bool silent = false);
686 // Getting numbers of formats, which are supported by videodevice with deviceID
687 unsigned int getCountFormats(int deviceID) const;
688 // Getting width of image, which is getting from videodevice with deviceID
689 unsigned int getWidth(int deviceID) const;
690 // Getting height of image, which is getting from videodevice with deviceID
691 unsigned int getHeight(int deviceID) const;
692 // Getting frame rate, which is getting from videodevice with deviceID
693 unsigned int getFrameRate(int deviceID) const;
694 // Getting name of videodevice with deviceID
695 wchar_t *getNameVideoDevice(int deviceID);
696 // Getting interface MediaSource for Media Foundation from videodevice with deviceID
697 IMFMediaSource *getMediaSource(int deviceID);
698 // Getting format with id, which is supported by videodevice with deviceID
699 MediaType getFormat(int deviceID, int unsigned id);
700 // Checking of existence of the suitable video devices
701 bool isDevicesAcceable();
702 // Checking of using the videodevice with deviceID
703 bool isDeviceSetup(int deviceID);
704 // Checking of using MediaSource from videodevice with deviceID
705 bool isDeviceMediaSource(int deviceID);
706 // Checking of using Raw Data of pixels from videodevice with deviceID
707 bool isDeviceRawDataSource(int deviceID);
708 #ifdef _DEBUG
709 // Setting of the state of outprinting info in console
710 static void setVerbose(bool state);
711 #endif
712 // Initialization of video device with deviceID by media type with id
713 bool setupDevice(int deviceID, unsigned int id = 0);
714 // Initialization of video device with deviceID by wisth w, height h and fps idealFramerate
715 bool setupDevice(int deviceID, unsigned int w, unsigned int h, unsigned int idealFramerate = 30);
716 // Checking of recivig of new frame from video device with deviceID
717 bool isFrameNew(int deviceID);
718 #ifdef WINRT
719 void waitForDevice(int deviceID);
720 #endif
721 // Writing of Raw Data pixels from video device with deviceID with correction of RedAndBlue flipping flipRedAndBlue and vertical flipping flipImage
722 bool getPixels(int deviceID, unsigned char * pixels, bool flipRedAndBlue = false, bool flipImage = false);
723 static void processPixels(unsigned char * src, unsigned char * dst, unsigned int width, unsigned int height, unsigned int bpp, bool bRGB, bool bFlip);
724 private:
725 bool accessToDevices;
726 videoInput(void);
727 void updateListOfDevices();
728 };
729
730 #ifdef _DEBUG
DPO(void)731 DPO::DPO(void):verbose(true)
732 {
733 }
734
~DPO(void)735 DPO::~DPO(void)
736 {
737 }
738
getInstance()739 DPO& DPO::getInstance()
740 {
741 static DPO instance;
742 return instance;
743 }
744
printOut(const wchar_t * format,...)745 void DPO::printOut(const wchar_t *format, ...)
746 {
747 if(verbose)
748 {
749 int i = 0;
750 wchar_t *p = NULL;
751 va_list args;
752 va_start(args, format);
753 if( ::IsDebuggerPresent() )
754 {
755 WCHAR szMsg[512];
756 ::StringCchVPrintfW(szMsg, sizeof(szMsg)/sizeof(szMsg[0]), format, args);
757 ::OutputDebugStringW(szMsg);
758 }
759 else
760 {
761 if(wcscmp(format, L"%i"))
762 {
763 i = va_arg (args, int);
764 }
765 if(wcscmp(format, L"%s"))
766 {
767 p = va_arg (args, wchar_t *);
768 }
769 wprintf(format, i,p);
770 }
771 va_end (args);
772 }
773 }
774
setVerbose(bool state)775 void DPO::setVerbose(bool state)
776 {
777 verbose = state;
778 }
779 #endif
780
781 LPCWSTR GetGUIDNameConstNew(const GUID& guid);
782 HRESULT GetGUIDNameNew(const GUID& guid, WCHAR **ppwsz);
783 HRESULT LogAttributeValueByIndexNew(IMFAttributes *pAttr, DWORD index);
784 HRESULT SpecialCaseAttributeValueNew(GUID guid, const PROPVARIANT& var, MediaType &out);
785
GetParametr(GUID guid,MediaType & out)786 unsigned int *GetParametr(GUID guid, MediaType &out)
787 {
788 if(guid == MF_MT_YUV_MATRIX)
789 return &(out.MF_MT_YUV_MATRIX);
790 if(guid == MF_MT_VIDEO_LIGHTING)
791 return &(out.MF_MT_VIDEO_LIGHTING);
792 if(guid == MF_MT_DEFAULT_STRIDE)
793 return (unsigned int*)&(out.MF_MT_DEFAULT_STRIDE);
794 if(guid == MF_MT_VIDEO_CHROMA_SITING)
795 return &(out.MF_MT_VIDEO_CHROMA_SITING);
796 if(guid == MF_MT_VIDEO_NOMINAL_RANGE)
797 return &(out.MF_MT_VIDEO_NOMINAL_RANGE);
798 if(guid == MF_MT_ALL_SAMPLES_INDEPENDENT)
799 return &(out.MF_MT_ALL_SAMPLES_INDEPENDENT);
800 if(guid == MF_MT_FIXED_SIZE_SAMPLES)
801 return &(out.MF_MT_FIXED_SIZE_SAMPLES);
802 if(guid == MF_MT_SAMPLE_SIZE)
803 return &(out.MF_MT_SAMPLE_SIZE);
804 if(guid == MF_MT_VIDEO_PRIMARIES)
805 return &(out.MF_MT_VIDEO_PRIMARIES);
806 if(guid == MF_MT_INTERLACE_MODE)
807 return &(out.MF_MT_INTERLACE_MODE);
808 return NULL;
809 }
810
LogAttributeValueByIndexNew(IMFAttributes * pAttr,DWORD index,MediaType & out)811 HRESULT LogAttributeValueByIndexNew(IMFAttributes *pAttr, DWORD index, MediaType &out)
812 {
813 WCHAR *pGuidName = NULL;
814 WCHAR *pGuidValName = NULL;
815 GUID guid = { 0 };
816 PROPVARIANT var;
817 PropVariantInit(&var);
818 HRESULT hr = pAttr->GetItemByIndex(index, &guid, &var);
819 if (FAILED(hr))
820 {
821 goto done;
822 }
823 hr = GetGUIDNameNew(guid, &pGuidName);
824 if (FAILED(hr))
825 {
826 goto done;
827 }
828 hr = SpecialCaseAttributeValueNew(guid, var, out);
829 unsigned int *p;
830 if (FAILED(hr))
831 {
832 goto done;
833 }
834 if (hr == S_FALSE)
835 {
836 switch (var.vt)
837 {
838 case VT_UI4:
839 p = GetParametr(guid, out);
840 if(p)
841 {
842 *p = var.ulVal;
843 }
844 break;
845 case VT_UI8:
846 break;
847 case VT_R8:
848 break;
849 case VT_CLSID:
850 if(guid == MF_MT_AM_FORMAT_TYPE)
851 {
852 hr = GetGUIDNameNew(*var.puuid, &pGuidValName);
853 if (SUCCEEDED(hr))
854 {
855 out.MF_MT_AM_FORMAT_TYPE = *var.puuid;
856 out.pMF_MT_AM_FORMAT_TYPEName = pGuidValName;
857 pGuidValName = NULL;
858 }
859 }
860 if(guid == MF_MT_MAJOR_TYPE)
861 {
862 hr = GetGUIDNameNew(*var.puuid, &pGuidValName);
863 if (SUCCEEDED(hr))
864 {
865 out.MF_MT_MAJOR_TYPE = *var.puuid;
866 out.pMF_MT_MAJOR_TYPEName = pGuidValName;
867 pGuidValName = NULL;
868 }
869 }
870 if(guid == MF_MT_SUBTYPE)
871 {
872 hr = GetGUIDNameNew(*var.puuid, &pGuidValName);
873 if (SUCCEEDED(hr))
874 {
875 out.MF_MT_SUBTYPE = *var.puuid;
876 out.pMF_MT_SUBTYPEName = pGuidValName;
877 pGuidValName = NULL;
878 }
879 }
880 break;
881 case VT_LPWSTR:
882 break;
883 case VT_VECTOR | VT_UI1:
884 break;
885 case VT_UNKNOWN:
886 break;
887 default:
888 break;
889 }
890 }
891 done:
892 CoTaskMemFree(pGuidName);
893 CoTaskMemFree(pGuidValName);
894 PropVariantClear(&var);
895 return hr;
896 }
897
GetGUIDNameNew(const GUID & guid,WCHAR ** ppwsz)898 HRESULT GetGUIDNameNew(const GUID& guid, WCHAR **ppwsz)
899 {
900 HRESULT hr = S_OK;
901 WCHAR *pName = NULL;
902 LPCWSTR pcwsz = GetGUIDNameConstNew(guid);
903 if (pcwsz)
904 {
905 size_t cchLength = 0;
906 hr = StringCchLengthW(pcwsz, STRSAFE_MAX_CCH, &cchLength);
907 if (FAILED(hr))
908 {
909 goto done;
910 }
911 pName = (WCHAR*)CoTaskMemAlloc((cchLength + 1) * sizeof(WCHAR));
912 if (pName == NULL)
913 {
914 hr = E_OUTOFMEMORY;
915 goto done;
916 }
917 hr = StringCchCopyW(pName, cchLength + 1, pcwsz);
918 if (FAILED(hr))
919 {
920 goto done;
921 }
922 }
923 else
924 {
925 hr = StringFromCLSID(guid, &pName);
926 }
927 done:
928 if (FAILED(hr))
929 {
930 *ppwsz = NULL;
931 CoTaskMemFree(pName);
932 }
933 else
934 {
935 *ppwsz = pName;
936 }
937 return hr;
938 }
939
LogUINT32AsUINT64New(const PROPVARIANT & var,UINT32 & uHigh,UINT32 & uLow)940 void LogUINT32AsUINT64New(const PROPVARIANT& var, UINT32 &uHigh, UINT32 &uLow)
941 {
942 Unpack2UINT32AsUINT64(var.uhVal.QuadPart, &uHigh, &uLow);
943 }
944
OffsetToFloatNew(const MFOffset & offset)945 float OffsetToFloatNew(const MFOffset& offset)
946 {
947 return offset.value + (static_cast<float>(offset.fract) / 65536.0f);
948 }
949
LogVideoAreaNew(const PROPVARIANT & var)950 HRESULT LogVideoAreaNew(const PROPVARIANT& var)
951 {
952 if (var.caub.cElems < sizeof(MFVideoArea))
953 {
954 return S_OK;
955 }
956 return S_OK;
957 }
958
SpecialCaseAttributeValueNew(GUID guid,const PROPVARIANT & var,MediaType & out)959 HRESULT SpecialCaseAttributeValueNew(GUID guid, const PROPVARIANT& var, MediaType &out)
960 {
961 if (guid == MF_MT_DEFAULT_STRIDE)
962 {
963 out.MF_MT_DEFAULT_STRIDE = var.intVal;
964 } else
965 if (guid == MF_MT_FRAME_SIZE)
966 {
967 UINT32 uHigh = 0, uLow = 0;
968 LogUINT32AsUINT64New(var, uHigh, uLow);
969 out.width = uHigh;
970 out.height = uLow;
971 out.MF_MT_FRAME_SIZE = out.width * out.height;
972 }
973 else
974 if (guid == MF_MT_FRAME_RATE)
975 {
976 UINT32 uHigh = 0, uLow = 0;
977 LogUINT32AsUINT64New(var, uHigh, uLow);
978 out.MF_MT_FRAME_RATE_NUMERATOR = uHigh;
979 out.MF_MT_FRAME_RATE_DENOMINATOR = uLow;
980 }
981 else
982 if (guid == MF_MT_FRAME_RATE_RANGE_MAX)
983 {
984 UINT32 uHigh = 0, uLow = 0;
985 LogUINT32AsUINT64New(var, uHigh, uLow);
986 out.MF_MT_FRAME_RATE_RANGE_MAX = uHigh;
987 out.MF_MT_FRAME_RATE_RANGE_MAX_low = uLow;
988 }
989 else
990 if (guid == MF_MT_FRAME_RATE_RANGE_MIN)
991 {
992 UINT32 uHigh = 0, uLow = 0;
993 LogUINT32AsUINT64New(var, uHigh, uLow);
994 out.MF_MT_FRAME_RATE_RANGE_MIN = uHigh;
995 out.MF_MT_FRAME_RATE_RANGE_MIN_low = uLow;
996 }
997 else
998 if (guid == MF_MT_PIXEL_ASPECT_RATIO)
999 {
1000 UINT32 uHigh = 0, uLow = 0;
1001 LogUINT32AsUINT64New(var, uHigh, uLow);
1002 out.MF_MT_PIXEL_ASPECT_RATIO = uHigh;
1003 out.MF_MT_PIXEL_ASPECT_RATIO_low = uLow;
1004 }
1005 else
1006 {
1007 return S_FALSE;
1008 }
1009 return S_OK;
1010 }
1011
1012 #ifndef IF_EQUAL_RETURN
1013 #define IF_EQUAL_RETURN(param, val) if(val == param) return L#val
1014 #endif
1015
GetGUIDNameConstNew(const GUID & guid)1016 LPCWSTR GetGUIDNameConstNew(const GUID& guid)
1017 {
1018 IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE);
1019 IF_EQUAL_RETURN(guid, MF_MT_MAJOR_TYPE);
1020 IF_EQUAL_RETURN(guid, MF_MT_SUBTYPE);
1021 IF_EQUAL_RETURN(guid, MF_MT_ALL_SAMPLES_INDEPENDENT);
1022 IF_EQUAL_RETURN(guid, MF_MT_FIXED_SIZE_SAMPLES);
1023 IF_EQUAL_RETURN(guid, MF_MT_COMPRESSED);
1024 IF_EQUAL_RETURN(guid, MF_MT_SAMPLE_SIZE);
1025 IF_EQUAL_RETURN(guid, MF_MT_WRAPPED_TYPE);
1026 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_NUM_CHANNELS);
1027 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_SECOND);
1028 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FLOAT_SAMPLES_PER_SECOND);
1029 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_AVG_BYTES_PER_SECOND);
1030 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BLOCK_ALIGNMENT);
1031 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_BITS_PER_SAMPLE);
1032 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_VALID_BITS_PER_SAMPLE);
1033 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_SAMPLES_PER_BLOCK);
1034 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_CHANNEL_MASK);
1035 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_FOLDDOWN_MATRIX);
1036 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKREF);
1037 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_PEAKTARGET);
1038 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGREF);
1039 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_WMADRC_AVGTARGET);
1040 IF_EQUAL_RETURN(guid, MF_MT_AUDIO_PREFER_WAVEFORMATEX);
1041 IF_EQUAL_RETURN(guid, MF_MT_AAC_PAYLOAD_TYPE);
1042 IF_EQUAL_RETURN(guid, MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION);
1043 IF_EQUAL_RETURN(guid, MF_MT_FRAME_SIZE);
1044 IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE);
1045 IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MAX);
1046 IF_EQUAL_RETURN(guid, MF_MT_FRAME_RATE_RANGE_MIN);
1047 IF_EQUAL_RETURN(guid, MF_MT_PIXEL_ASPECT_RATIO);
1048 IF_EQUAL_RETURN(guid, MF_MT_DRM_FLAGS);
1049 IF_EQUAL_RETURN(guid, MF_MT_PAD_CONTROL_FLAGS);
1050 IF_EQUAL_RETURN(guid, MF_MT_SOURCE_CONTENT_HINT);
1051 IF_EQUAL_RETURN(guid, MF_MT_VIDEO_CHROMA_SITING);
1052 IF_EQUAL_RETURN(guid, MF_MT_INTERLACE_MODE);
1053 IF_EQUAL_RETURN(guid, MF_MT_TRANSFER_FUNCTION);
1054 IF_EQUAL_RETURN(guid, MF_MT_VIDEO_PRIMARIES);
1055 IF_EQUAL_RETURN(guid, MF_MT_CUSTOM_VIDEO_PRIMARIES);
1056 IF_EQUAL_RETURN(guid, MF_MT_YUV_MATRIX);
1057 IF_EQUAL_RETURN(guid, MF_MT_VIDEO_LIGHTING);
1058 IF_EQUAL_RETURN(guid, MF_MT_VIDEO_NOMINAL_RANGE);
1059 IF_EQUAL_RETURN(guid, MF_MT_GEOMETRIC_APERTURE);
1060 IF_EQUAL_RETURN(guid, MF_MT_MINIMUM_DISPLAY_APERTURE);
1061 IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_APERTURE);
1062 IF_EQUAL_RETURN(guid, MF_MT_PAN_SCAN_ENABLED);
1063 IF_EQUAL_RETURN(guid, MF_MT_AVG_BITRATE);
1064 IF_EQUAL_RETURN(guid, MF_MT_AVG_BIT_ERROR_RATE);
1065 IF_EQUAL_RETURN(guid, MF_MT_MAX_KEYFRAME_SPACING);
1066 IF_EQUAL_RETURN(guid, MF_MT_DEFAULT_STRIDE);
1067 IF_EQUAL_RETURN(guid, MF_MT_PALETTE);
1068 IF_EQUAL_RETURN(guid, MF_MT_USER_DATA);
1069 IF_EQUAL_RETURN(guid, MF_MT_AM_FORMAT_TYPE);
1070 IF_EQUAL_RETURN(guid, MF_MT_MPEG_START_TIME_CODE);
1071 IF_EQUAL_RETURN(guid, MF_MT_MPEG2_PROFILE);
1072 IF_EQUAL_RETURN(guid, MF_MT_MPEG2_LEVEL);
1073 IF_EQUAL_RETURN(guid, MF_MT_MPEG2_FLAGS);
1074 IF_EQUAL_RETURN(guid, MF_MT_MPEG_SEQUENCE_HEADER);
1075 IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_0);
1076 IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_0);
1077 IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_SRC_PACK_1);
1078 IF_EQUAL_RETURN(guid, MF_MT_DV_AAUX_CTRL_PACK_1);
1079 IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_SRC_PACK);
1080 IF_EQUAL_RETURN(guid, MF_MT_DV_VAUX_CTRL_PACK);
1081 IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_HEADER);
1082 IF_EQUAL_RETURN(guid, MF_MT_ARBITRARY_FORMAT);
1083 IF_EQUAL_RETURN(guid, MF_MT_IMAGE_LOSS_TOLERANT);
1084 IF_EQUAL_RETURN(guid, MF_MT_MPEG4_SAMPLE_DESCRIPTION);
1085 IF_EQUAL_RETURN(guid, MF_MT_MPEG4_CURRENT_SAMPLE_ENTRY);
1086 IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_4CC);
1087 IF_EQUAL_RETURN(guid, MF_MT_ORIGINAL_WAVE_FORMAT_TAG);
1088 // Media types
1089 IF_EQUAL_RETURN(guid, MFMediaType_Audio);
1090 IF_EQUAL_RETURN(guid, MFMediaType_Video);
1091 IF_EQUAL_RETURN(guid, MFMediaType_Protected);
1092 IF_EQUAL_RETURN(guid, MFMediaType_SAMI);
1093 IF_EQUAL_RETURN(guid, MFMediaType_Script);
1094 IF_EQUAL_RETURN(guid, MFMediaType_Image);
1095 IF_EQUAL_RETURN(guid, MFMediaType_HTML);
1096 IF_EQUAL_RETURN(guid, MFMediaType_Binary);
1097 IF_EQUAL_RETURN(guid, MFMediaType_FileTransfer);
1098 IF_EQUAL_RETURN(guid, MFVideoFormat_AI44); // FCC('AI44')
1099 IF_EQUAL_RETURN(guid, MFVideoFormat_ARGB32); // D3DFMT_A8R8G8B8
1100 IF_EQUAL_RETURN(guid, MFVideoFormat_AYUV); // FCC('AYUV')
1101 IF_EQUAL_RETURN(guid, MFVideoFormat_DV25); // FCC('dv25')
1102 IF_EQUAL_RETURN(guid, MFVideoFormat_DV50); // FCC('dv50')
1103 IF_EQUAL_RETURN(guid, MFVideoFormat_DVH1); // FCC('dvh1')
1104 IF_EQUAL_RETURN(guid, MFVideoFormat_DVSD); // FCC('dvsd')
1105 IF_EQUAL_RETURN(guid, MFVideoFormat_DVSL); // FCC('dvsl')
1106 IF_EQUAL_RETURN(guid, MFVideoFormat_H264); // FCC('H264')
1107 IF_EQUAL_RETURN(guid, MFVideoFormat_I420); // FCC('I420')
1108 IF_EQUAL_RETURN(guid, MFVideoFormat_IYUV); // FCC('IYUV')
1109 IF_EQUAL_RETURN(guid, MFVideoFormat_M4S2); // FCC('M4S2')
1110 IF_EQUAL_RETURN(guid, MFVideoFormat_MJPG);
1111 IF_EQUAL_RETURN(guid, MFVideoFormat_MP43); // FCC('MP43')
1112 IF_EQUAL_RETURN(guid, MFVideoFormat_MP4S); // FCC('MP4S')
1113 IF_EQUAL_RETURN(guid, MFVideoFormat_MP4V); // FCC('MP4V')
1114 IF_EQUAL_RETURN(guid, MFVideoFormat_MPG1); // FCC('MPG1')
1115 IF_EQUAL_RETURN(guid, MFVideoFormat_MSS1); // FCC('MSS1')
1116 IF_EQUAL_RETURN(guid, MFVideoFormat_MSS2); // FCC('MSS2')
1117 IF_EQUAL_RETURN(guid, MFVideoFormat_NV11); // FCC('NV11')
1118 IF_EQUAL_RETURN(guid, MFVideoFormat_NV12); // FCC('NV12')
1119 IF_EQUAL_RETURN(guid, MFVideoFormat_P010); // FCC('P010')
1120 IF_EQUAL_RETURN(guid, MFVideoFormat_P016); // FCC('P016')
1121 IF_EQUAL_RETURN(guid, MFVideoFormat_P210); // FCC('P210')
1122 IF_EQUAL_RETURN(guid, MFVideoFormat_P216); // FCC('P216')
1123 IF_EQUAL_RETURN(guid, MFVideoFormat_RGB24); // D3DFMT_R8G8B8
1124 IF_EQUAL_RETURN(guid, MFVideoFormat_RGB32); // D3DFMT_X8R8G8B8
1125 IF_EQUAL_RETURN(guid, MFVideoFormat_RGB555); // D3DFMT_X1R5G5B5
1126 IF_EQUAL_RETURN(guid, MFVideoFormat_RGB565); // D3DFMT_R5G6B5
1127 IF_EQUAL_RETURN(guid, MFVideoFormat_RGB8);
1128 IF_EQUAL_RETURN(guid, MFVideoFormat_UYVY); // FCC('UYVY')
1129 IF_EQUAL_RETURN(guid, MFVideoFormat_v210); // FCC('v210')
1130 IF_EQUAL_RETURN(guid, MFVideoFormat_v410); // FCC('v410')
1131 IF_EQUAL_RETURN(guid, MFVideoFormat_WMV1); // FCC('WMV1')
1132 IF_EQUAL_RETURN(guid, MFVideoFormat_WMV2); // FCC('WMV2')
1133 IF_EQUAL_RETURN(guid, MFVideoFormat_WMV3); // FCC('WMV3')
1134 IF_EQUAL_RETURN(guid, MFVideoFormat_WVC1); // FCC('WVC1')
1135 IF_EQUAL_RETURN(guid, MFVideoFormat_Y210); // FCC('Y210')
1136 IF_EQUAL_RETURN(guid, MFVideoFormat_Y216); // FCC('Y216')
1137 IF_EQUAL_RETURN(guid, MFVideoFormat_Y410); // FCC('Y410')
1138 IF_EQUAL_RETURN(guid, MFVideoFormat_Y416); // FCC('Y416')
1139 IF_EQUAL_RETURN(guid, MFVideoFormat_Y41P);
1140 IF_EQUAL_RETURN(guid, MFVideoFormat_Y41T);
1141 IF_EQUAL_RETURN(guid, MFVideoFormat_YUY2); // FCC('YUY2')
1142 IF_EQUAL_RETURN(guid, MFVideoFormat_YV12); // FCC('YV12')
1143 IF_EQUAL_RETURN(guid, MFVideoFormat_YVYU);
1144 IF_EQUAL_RETURN(guid, MFAudioFormat_PCM); // WAVE_FORMAT_PCM
1145 IF_EQUAL_RETURN(guid, MFAudioFormat_Float); // WAVE_FORMAT_IEEE_FLOAT
1146 IF_EQUAL_RETURN(guid, MFAudioFormat_DTS); // WAVE_FORMAT_DTS
1147 IF_EQUAL_RETURN(guid, MFAudioFormat_Dolby_AC3_SPDIF); // WAVE_FORMAT_DOLBY_AC3_SPDIF
1148 IF_EQUAL_RETURN(guid, MFAudioFormat_DRM); // WAVE_FORMAT_DRM
1149 IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV8); // WAVE_FORMAT_WMAUDIO2
1150 IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudioV9); // WAVE_FORMAT_WMAUDIO3
1151 IF_EQUAL_RETURN(guid, MFAudioFormat_WMAudio_Lossless); // WAVE_FORMAT_WMAUDIO_LOSSLESS
1152 IF_EQUAL_RETURN(guid, MFAudioFormat_WMASPDIF); // WAVE_FORMAT_WMASPDIF
1153 IF_EQUAL_RETURN(guid, MFAudioFormat_MSP1); // WAVE_FORMAT_WMAVOICE9
1154 IF_EQUAL_RETURN(guid, MFAudioFormat_MP3); // WAVE_FORMAT_MPEGLAYER3
1155 IF_EQUAL_RETURN(guid, MFAudioFormat_MPEG); // WAVE_FORMAT_MPEG
1156 IF_EQUAL_RETURN(guid, MFAudioFormat_AAC); // WAVE_FORMAT_MPEG_HEAAC
1157 IF_EQUAL_RETURN(guid, MFAudioFormat_ADTS); // WAVE_FORMAT_MPEG_ADTS_AAC
1158 return NULL;
1159 }
1160
FormatReader(void)1161 FormatReader::FormatReader(void)
1162 {
1163 }
1164
Read(IMFMediaType * pType)1165 MediaType FormatReader::Read(IMFMediaType *pType)
1166 {
1167 UINT32 count = 0;
1168 MediaType out;
1169 HRESULT hr = pType->LockStore();
1170 if (FAILED(hr))
1171 {
1172 return out;
1173 }
1174 hr = pType->GetCount(&count);
1175 if (FAILED(hr))
1176 {
1177 return out;
1178 }
1179 for (UINT32 i = 0; i < count; i++)
1180 {
1181 hr = LogAttributeValueByIndexNew(pType, i, out);
1182 if (FAILED(hr))
1183 {
1184 break;
1185 }
1186 }
1187 hr = pType->UnlockStore();
1188 if (FAILED(hr))
1189 {
1190 return out;
1191 }
1192 return out;
1193 }
1194
~FormatReader(void)1195 FormatReader::~FormatReader(void)
1196 {
1197 }
1198
1199 #define CHECK_HR(x) if (FAILED(x)) { goto done; }
1200
ImageGrabberCallback(bool synchronous)1201 ImageGrabberCallback::ImageGrabberCallback(bool synchronous):
1202 m_cRef(1),
1203 ig_RIE(true),
1204 ig_Close(false),
1205 ig_Synchronous(synchronous),
1206 ig_hFrameReady(synchronous ? CreateEvent(NULL, FALSE, FALSE, NULL): 0),
1207 ig_hFrameGrabbed(synchronous ? CreateEvent(NULL, FALSE, TRUE, NULL): 0),
1208 ig_hFinish(CreateEvent(NULL, TRUE, FALSE, NULL))
1209 {}
1210
ImageGrabber(unsigned int deviceID,bool synchronous)1211 ImageGrabber::ImageGrabber(unsigned int deviceID, bool synchronous):
1212 ImageGrabberCallback(synchronous),
1213 ig_DeviceID(deviceID),
1214 ig_pSource(NULL),
1215 ig_pSession(NULL),
1216 ig_pTopology(NULL)
1217 {}
1218
~ImageGrabber(void)1219 ImageGrabber::~ImageGrabber(void)
1220 {
1221 if (ig_pSession)
1222 {
1223 ig_pSession->Shutdown();
1224 }
1225
1226 CloseHandle(ig_hFinish);
1227
1228 if (ig_Synchronous)
1229 {
1230 CloseHandle(ig_hFrameReady);
1231 CloseHandle(ig_hFrameGrabbed);
1232 }
1233
1234 SafeRelease(&ig_pSession);
1235 SafeRelease(&ig_pTopology);
1236
1237 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: Destroying instance of the ImageGrabber class\n", ig_DeviceID);
1238 }
1239
1240 #ifdef WINRT
1241
ImageGrabberWinRT(bool synchronous)1242 ImageGrabberWinRT::ImageGrabberWinRT(bool synchronous):
1243 ImageGrabberCallback(synchronous),
1244 ig_pMediaSink(NULL)
1245 {
1246 ig_pMedCapSource = nullptr;
1247 }
1248
~ImageGrabberWinRT(void)1249 ImageGrabberWinRT::~ImageGrabberWinRT(void)
1250 {
1251 //stop must already be performed and complete by object owner
1252 if (ig_pMediaSink != NULL) {
1253 ((IMFMediaSink*)ig_pMediaSink)->Shutdown();
1254 }
1255 SafeRelease(&ig_pMediaSink);
1256 RELEASE_AGILE_WRL(ig_pMedCapSource)
1257
1258 CloseHandle(ig_hFinish);
1259
1260 if (ig_Synchronous)
1261 {
1262 CloseHandle(ig_hFrameReady);
1263 CloseHandle(ig_hFrameGrabbed);
1264 }
1265
1266 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE: Destroying instance of the ImageGrabberWinRT class\n");
1267 }
1268
initImageGrabber(MAKE_WRL_REF (_MediaCapture)pSource,GUID VideoFormat)1269 HRESULT ImageGrabberWinRT::initImageGrabber(MAKE_WRL_REF(_MediaCapture) pSource,
1270 GUID VideoFormat)
1271 {
1272 HRESULT hr;
1273 MAKE_WRL_OBJ(_VideoDeviceController) pDevCont;
1274 WRL_PROP_GET(pSource, VideoDeviceController, pDevCont, hr)
1275 if (FAILED(hr)) return hr;
1276 GET_WRL_OBJ_FROM_OBJ(_MediaDeviceController, pMedDevCont, pDevCont, hr)
1277 if (FAILED(hr)) return hr;
1278 MAKE_WRL_OBJ(_MediaEncodingProperties) pMedEncProps;
1279 WRL_METHOD(pMedDevCont, GetMediaStreamProperties, pMedEncProps, hr, WRL_ENUM_GET(_MediaStreamType, MediaStreamType, VideoPreview))
1280 if (FAILED(hr)) return hr;
1281 GET_WRL_OBJ_FROM_OBJ(_VideoEncodingProperties, pVidProps, pMedEncProps, hr);
1282 if (FAILED(hr)) return hr;
1283 _ComPtr<IMFMediaType> pType = NULL;
1284 hr = MediaSink::ConvertPropertiesToMediaType(DEREF_AS_NATIVE_WRL_OBJ(ABI::Windows::Media::MediaProperties::IMediaEncodingProperties, pMedEncProps), &pType);
1285 if (FAILED(hr)) return hr;
1286 MediaType MT = FormatReader::Read(pType.Get());
1287 unsigned int sizeRawImage = 0;
1288 if(VideoFormat == MFVideoFormat_RGB24)
1289 {
1290 sizeRawImage = MT.MF_MT_FRAME_SIZE * 3;
1291 }
1292 else if(VideoFormat == MFVideoFormat_RGB32)
1293 {
1294 sizeRawImage = MT.MF_MT_FRAME_SIZE * 4;
1295 }
1296 sizeRawImage = MT.MF_MT_SAMPLE_SIZE;
1297 CHECK_HR(hr = RawImage::CreateInstance(&ig_RIFirst, sizeRawImage));
1298 CHECK_HR(hr = RawImage::CreateInstance(&ig_RISecond, sizeRawImage));
1299 ig_RIOut = ig_RISecond;
1300 ig_pMedCapSource = pSource;
1301 done:
1302 return hr;
1303 }
1304
stopGrabbing(MAKE_WRL_REF (_AsyncAction)* action)1305 HRESULT ImageGrabberWinRT::stopGrabbing(MAKE_WRL_REF(_AsyncAction)* action)
1306 {
1307 HRESULT hr = S_OK;
1308 if (ig_pMedCapSource != nullptr) {
1309 GET_WRL_OBJ_FROM_REF(_MediaCaptureVideoPreview, imedPrevCap, DEREF_AGILE_WRL_OBJ(ig_pMedCapSource), hr)
1310 if (FAILED(hr)) return hr;
1311 MAKE_WRL_REF(_AsyncAction) pAction;
1312 WRL_METHOD_BASE(imedPrevCap, StopPreviewAsync, pAction, hr)
1313 if (SUCCEEDED(hr)) {
1314 #ifdef HAVE_CONCURRENCY
1315 DEFINE_TASK<void> _task = CREATE_TASK DEFINE_RET_TYPE(void)(pAction);
1316 *action = reinterpret_cast<MAKE_WRL_REF(_AsyncAction)>(BEGIN_CREATE_ASYNC(void, _task, this)
1317 HRESULT hr = S_OK;
1318 _task.wait();
1319 SafeRelease(&ig_pMediaSink);
1320 SetEvent(ig_hFinish);
1321 END_CREATE_ASYNC(hr));
1322 #else
1323 *action = nullptr;
1324 #endif
1325 }
1326 }
1327 return hr;
1328 }
1329
startGrabbing(MAKE_WRL_REF (_AsyncAction)* action)1330 HRESULT ImageGrabberWinRT::startGrabbing(MAKE_WRL_REF(_AsyncAction)* action)
1331 {
1332 HRESULT hr = S_OK;
1333 GET_WRL_OBJ_FROM_REF(_MediaCaptureVideoPreview, imedPrevCap, DEREF_AGILE_WRL_OBJ(ig_pMedCapSource), hr)
1334 if (FAILED(hr)) return hr;
1335 ACTIVATE_OBJ(RuntimeClass_Windows_Foundation_Collections_PropertySet, _PropertySet, pSet, hr)
1336 if (FAILED(hr)) return hr;
1337 GET_WRL_OBJ_FROM_OBJ(_Map, spSetting, pSet, hr)
1338 if (FAILED(hr)) return hr;
1339 ACTIVATE_STATIC_OBJ(RuntimeClass_Windows_Foundation_PropertyValue, MAKE_WRL_OBJ(_PropertyValueStatics), spPropVal, hr)
1340 if (FAILED(hr)) return hr;
1341 _ObjectObj pVal;
1342 boolean bReplaced;
1343 WRL_METHOD(spPropVal, CreateUInt32, pVal, hr, (unsigned int)WRL_ENUM_GET(_MediaStreamType, MediaStreamType, VideoPreview))
1344 if (FAILED(hr)) return hr;
1345 WRL_METHOD(spSetting, Insert, bReplaced, hr, DEREF_WRL_OBJ(_StringReference(MF_PROP_VIDTYPE)), DEREF_WRL_OBJ(pVal))
1346 if (FAILED(hr)) return hr;
1347 WRL_METHOD(spSetting, Insert, bReplaced, hr, DEREF_WRL_OBJ(_StringReference(MF_PROP_SAMPLEGRABBERCALLBACK)), reinterpret_cast<_Object>(this))
1348 if (FAILED(hr)) return hr;
1349 MAKE_WRL_OBJ(_VideoDeviceController) pDevCont;
1350 WRL_PROP_GET(ig_pMedCapSource, VideoDeviceController, pDevCont, hr)
1351 if (FAILED(hr)) return hr;
1352 GET_WRL_OBJ_FROM_OBJ(_MediaDeviceController, pMedDevCont, pDevCont, hr)
1353 if (FAILED(hr)) return hr;
1354 MAKE_WRL_OBJ(_MediaEncodingProperties) pMedEncProps;
1355 WRL_METHOD(pMedDevCont, GetMediaStreamProperties, pMedEncProps, hr, WRL_ENUM_GET(_MediaStreamType, MediaStreamType, VideoPreview))
1356 if (FAILED(hr)) return hr;
1357 GET_WRL_OBJ_FROM_OBJ(_VideoEncodingProperties, pVidProps, pMedEncProps, hr);
1358 if (FAILED(hr)) return hr;
1359 ACTIVATE_OBJ(RuntimeClass_Windows_Media_MediaProperties_MediaEncodingProfile, _MediaEncodingProfile, pEncProps, hr)
1360 if (FAILED(hr)) return hr;
1361 WRL_PROP_PUT(pEncProps, Video, DEREF_WRL_OBJ(pVidProps), hr)
1362 if (FAILED(hr)) return hr;
1363 WRL_METHOD(spSetting, Insert, bReplaced, hr, DEREF_WRL_OBJ(_StringReference(MF_PROP_VIDENCPROPS)), DEREF_WRL_OBJ(pVidProps))
1364 if (SUCCEEDED(hr)) {
1365 //can start/stop multiple times with same MediaCapture object if using activatable class
1366 WRL_METHOD(imedPrevCap, _StartPreviewToCustomSinkIdAsync, *action, hr, DEREF_WRL_OBJ(pEncProps), DEREF_WRL_OBJ(_StringReference(RuntimeClass_CV_MediaSink)), DEREF_WRL_OBJ(pSet))
1367 if (FAILED(hr) && hr == REGDB_E_CLASSNOTREG) {
1368 hr = Microsoft::WRL::Make<MediaSink>().CopyTo(&ig_pMediaSink);
1369 if (FAILED(hr)) return hr;
1370 hr = ((ABI::Windows::Media::IMediaExtension*)ig_pMediaSink)->SetProperties(DEREF_AS_NATIVE_WRL_OBJ(ABI::Windows::Foundation::Collections::IPropertySet, pSet));
1371 if (FAILED(hr)) return hr;
1372 WRL_METHOD(imedPrevCap, StartPreviewToCustomSinkAsync, *action, hr, DEREF_WRL_OBJ(pEncProps), reinterpret_cast<MAKE_WRL_REF(_MediaExtension)>(ig_pMediaSink))
1373 }
1374 }
1375 return hr;
1376 }
1377
CreateInstance(ImageGrabberWinRT ** ppIG,bool synchronous)1378 HRESULT ImageGrabberWinRT::CreateInstance(ImageGrabberWinRT **ppIG, bool synchronous)
1379 {
1380 *ppIG = Microsoft::WRL::Make<ImageGrabberWinRT>(synchronous).Detach();
1381 if (ppIG == NULL)
1382 {
1383 return E_OUTOFMEMORY;
1384 }
1385 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE: Creating instance of ImageGrabberWinRT\n");
1386 return S_OK;
1387 }
1388 #endif
1389
initImageGrabber(IMFMediaSource * pSource)1390 HRESULT ImageGrabber::initImageGrabber(IMFMediaSource *pSource)
1391 {
1392 _ComPtr<IMFActivate> pSinkActivate = NULL;
1393 _ComPtr<IMFMediaType> pType = NULL;
1394 _ComPtr<IMFPresentationDescriptor> pPD = NULL;
1395 _ComPtr<IMFStreamDescriptor> pSD = NULL;
1396 _ComPtr<IMFMediaTypeHandler> pHandler = NULL;
1397 _ComPtr<IMFMediaType> pCurrentType = NULL;
1398 MediaType MT;
1399 // Clean up.
1400 if (ig_pSession)
1401 {
1402 ig_pSession->Shutdown();
1403 }
1404 SafeRelease(&ig_pSession);
1405 SafeRelease(&ig_pTopology);
1406 ig_pSource = pSource;
1407 HRESULT hr = pSource->CreatePresentationDescriptor(&pPD);
1408 if (FAILED(hr))
1409 {
1410 goto err;
1411 }
1412 BOOL fSelected;
1413 hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
1414 if (FAILED(hr)) {
1415 goto err;
1416 }
1417 hr = pSD->GetMediaTypeHandler(&pHandler);
1418 if (FAILED(hr)) {
1419 goto err;
1420 }
1421 DWORD cTypes = 0;
1422 hr = pHandler->GetMediaTypeCount(&cTypes);
1423 if (FAILED(hr)) {
1424 goto err;
1425 }
1426 if(cTypes > 0)
1427 {
1428 hr = pHandler->GetCurrentMediaType(&pCurrentType);
1429 if (FAILED(hr)) {
1430 goto err;
1431 }
1432 MT = FormatReader::Read(pCurrentType.Get());
1433 }
1434 err:
1435 CHECK_HR(hr);
1436 CHECK_HR(hr = RawImage::CreateInstance(&ig_RIFirst, MT.MF_MT_SAMPLE_SIZE));
1437 CHECK_HR(hr = RawImage::CreateInstance(&ig_RISecond, MT.MF_MT_SAMPLE_SIZE));
1438 ig_RIOut = ig_RISecond;
1439 // Configure the media type that the Sample Grabber will receive.
1440 // Setting the major and subtype is usually enough for the topology loader
1441 // to resolve the topology.
1442 CHECK_HR(hr = MFCreateMediaType(pType.GetAddressOf()));
1443 CHECK_HR(hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MT.MF_MT_MAJOR_TYPE));
1444 CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, MT.MF_MT_SUBTYPE));
1445 // Create the sample grabber sink.
1446 CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pType.Get(), this, pSinkActivate.GetAddressOf()));
1447 // To run as fast as possible, set this attribute (requires Windows 7):
1448 CHECK_HR(hr = pSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
1449 // Create the Media Session.
1450 CHECK_HR(hr = MFCreateMediaSession(NULL, &ig_pSession));
1451 // Create the topology.
1452 CHECK_HR(hr = CreateTopology(pSource, pSinkActivate.Get(), &ig_pTopology));
1453 done:
1454 // Clean up.
1455 if (FAILED(hr))
1456 {
1457 if (ig_pSession)
1458 {
1459 ig_pSession->Shutdown();
1460 }
1461 SafeRelease(&ig_pSession);
1462 SafeRelease(&ig_pTopology);
1463 }
1464 return hr;
1465 }
1466
stopGrabbing()1467 void ImageGrabber::stopGrabbing()
1468 {
1469 if(ig_pSession)
1470 ig_pSession->Stop();
1471 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: Stopping of of grabbing of images\n", ig_DeviceID);
1472 }
1473
startGrabbing(void)1474 HRESULT ImageGrabber::startGrabbing(void)
1475 {
1476 PROPVARIANT var;
1477 PropVariantInit(&var);
1478 HRESULT hr = ig_pSession->SetTopology(0, ig_pTopology);
1479 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: Start Grabbing of the images\n", ig_DeviceID);
1480 hr = ig_pSession->Start(&GUID_NULL, &var);
1481 for(;;)
1482 {
1483 _ComPtr<IMFMediaEvent> pEvent = NULL;
1484 HRESULT hrStatus = S_OK;
1485 MediaEventType met;
1486 if(!ig_pSession) break;
1487 hr = ig_pSession->GetEvent(0, &pEvent);
1488 if(!SUCCEEDED(hr))
1489 {
1490 hr = S_OK;
1491 goto done;
1492 }
1493 hr = pEvent->GetStatus(&hrStatus);
1494 if(!SUCCEEDED(hr))
1495 {
1496 hr = S_OK;
1497 goto done;
1498 }
1499 hr = pEvent->GetType(&met);
1500 if(!SUCCEEDED(hr))
1501 {
1502 hr = S_OK;
1503 goto done;
1504 }
1505 if (met == MESessionEnded)
1506 {
1507 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: MESessionEnded\n", ig_DeviceID);
1508 ig_pSession->Stop();
1509 break;
1510 }
1511 if (met == MESessionStopped)
1512 {
1513 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: MESessionStopped \n", ig_DeviceID);
1514 break;
1515 }
1516 #if (WINVER >= 0x0602) // Available since Win 8
1517 if (met == MEVideoCaptureDeviceRemoved)
1518 {
1519 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: MEVideoCaptureDeviceRemoved \n", ig_DeviceID);
1520 break;
1521 }
1522 #endif
1523 if ((met == MEError) || (met == MENonFatalError))
1524 {
1525 pEvent->GetStatus(&hrStatus);
1526 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: MEError | MENonFatalError: %u\n", ig_DeviceID, hrStatus);
1527 break;
1528 }
1529 }
1530 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: Finish startGrabbing \n", ig_DeviceID);
1531
1532 done:
1533 SetEvent(ig_hFinish);
1534
1535 return hr;
1536 }
1537
pauseGrabbing()1538 void ImageGrabberCallback::pauseGrabbing()
1539 {
1540 }
1541
resumeGrabbing()1542 void ImageGrabberCallback::resumeGrabbing()
1543 {
1544 }
1545
CreateTopology(IMFMediaSource * pSource,IMFActivate * pSinkActivate,IMFTopology ** ppTopo)1546 HRESULT ImageGrabber::CreateTopology(IMFMediaSource *pSource, IMFActivate *pSinkActivate, IMFTopology **ppTopo)
1547 {
1548 IMFTopology* pTopology = NULL;
1549 _ComPtr<IMFPresentationDescriptor> pPD = NULL;
1550 _ComPtr<IMFStreamDescriptor> pSD = NULL;
1551 _ComPtr<IMFMediaTypeHandler> pHandler = NULL;
1552 _ComPtr<IMFTopologyNode> pNode1 = NULL;
1553 _ComPtr<IMFTopologyNode> pNode2 = NULL;
1554 HRESULT hr = S_OK;
1555 DWORD cStreams = 0;
1556 CHECK_HR(hr = MFCreateTopology(&pTopology));
1557 CHECK_HR(hr = pSource->CreatePresentationDescriptor(pPD.GetAddressOf()));
1558 CHECK_HR(hr = pPD->GetStreamDescriptorCount(&cStreams));
1559 for (DWORD i = 0; i < cStreams; i++)
1560 {
1561 // In this example, we look for audio streams and connect them to the sink.
1562 BOOL fSelected = FALSE;
1563 GUID majorType;
1564 CHECK_HR(hr = pPD->GetStreamDescriptorByIndex(i, &fSelected, &pSD));
1565 CHECK_HR(hr = pSD->GetMediaTypeHandler(&pHandler));
1566 CHECK_HR(hr = pHandler->GetMajorType(&majorType));
1567 if (majorType == MFMediaType_Video && fSelected)
1568 {
1569 CHECK_HR(hr = AddSourceNode(pTopology, pSource, pPD.Get(), pSD.Get(), pNode1.GetAddressOf()));
1570 CHECK_HR(hr = AddOutputNode(pTopology, pSinkActivate, 0, pNode2.GetAddressOf()));
1571 CHECK_HR(hr = pNode1->ConnectOutput(0, pNode2.Get(), 0));
1572 break;
1573 }
1574 else
1575 {
1576 CHECK_HR(hr = pPD->DeselectStream(i));
1577 }
1578 }
1579 *ppTopo = pTopology;
1580 (*ppTopo)->AddRef();
1581
1582 done:
1583 return hr;
1584 }
1585
AddSourceNode(IMFTopology * pTopology,IMFMediaSource * pSource,IMFPresentationDescriptor * pPD,IMFStreamDescriptor * pSD,IMFTopologyNode ** ppNode)1586 HRESULT ImageGrabber::AddSourceNode(
1587 IMFTopology *pTopology, // Topology.
1588 IMFMediaSource *pSource, // Media source.
1589 IMFPresentationDescriptor *pPD, // Presentation descriptor.
1590 IMFStreamDescriptor *pSD, // Stream descriptor.
1591 IMFTopologyNode **ppNode) // Receives the node pointer.
1592 {
1593 _ComPtr<IMFTopologyNode> pNode = NULL;
1594 HRESULT hr = S_OK;
1595 CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_SOURCESTREAM_NODE, pNode.GetAddressOf()));
1596 CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_SOURCE, pSource));
1597 CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_PRESENTATION_DESCRIPTOR, pPD));
1598 CHECK_HR(hr = pNode->SetUnknown(MF_TOPONODE_STREAM_DESCRIPTOR, pSD));
1599 CHECK_HR(hr = pTopology->AddNode(pNode.Get()));
1600 // Return the pointer to the caller.
1601 *ppNode = pNode.Get();
1602 (*ppNode)->AddRef();
1603
1604 done:
1605 return hr;
1606 }
1607
AddOutputNode(IMFTopology * pTopology,IMFActivate * pActivate,DWORD dwId,IMFTopologyNode ** ppNode)1608 HRESULT ImageGrabber::AddOutputNode(
1609 IMFTopology *pTopology, // Topology.
1610 IMFActivate *pActivate, // Media sink activation object.
1611 DWORD dwId, // Identifier of the stream sink.
1612 IMFTopologyNode **ppNode) // Receives the node pointer.
1613 {
1614 _ComPtr<IMFTopologyNode> pNode = NULL;
1615 HRESULT hr = S_OK;
1616 CHECK_HR(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, pNode.GetAddressOf()));
1617 CHECK_HR(hr = pNode->SetObject(pActivate));
1618 CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_STREAMID, dwId));
1619 CHECK_HR(hr = pNode->SetUINT32(MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE));
1620 CHECK_HR(hr = pTopology->AddNode(pNode.Get()));
1621 // Return the pointer to the caller.
1622 *ppNode = pNode.Get();
1623 (*ppNode)->AddRef();
1624
1625 done:
1626 return hr;
1627 }
1628
CreateInstance(ImageGrabber ** ppIG,unsigned int deviceID,bool synchronious)1629 HRESULT ImageGrabber::CreateInstance(ImageGrabber **ppIG, unsigned int deviceID, bool synchronious)
1630 {
1631 *ppIG = new (std::nothrow) ImageGrabber(deviceID, synchronious);
1632 if (ppIG == NULL)
1633 {
1634 return E_OUTOFMEMORY;
1635 }
1636 DebugPrintOut(L"IMAGEGRABBER VIDEODEVICE %i: Creating instance of ImageGrabber\n", deviceID);
1637 return S_OK;
1638 }
1639
QueryInterface(REFIID riid,void ** ppv)1640 STDMETHODIMP ImageGrabber::QueryInterface(REFIID riid, void** ppv)
1641 {
1642 HRESULT hr = E_NOINTERFACE;
1643 *ppv = NULL;
1644 if(riid == IID_IUnknown || riid == IID_IMFSampleGrabberSinkCallback)
1645 {
1646 *ppv = static_cast<IMFSampleGrabberSinkCallback *>(this);
1647 hr = S_OK;
1648 }
1649 if(riid == IID_IMFClockStateSink)
1650 {
1651 *ppv = static_cast<IMFClockStateSink *>(this);
1652 hr = S_OK;
1653 }
1654 if(SUCCEEDED(hr))
1655 {
1656 reinterpret_cast<IUnknown *>(*ppv)->AddRef();
1657 }
1658 return hr;
1659 }
1660
STDMETHODIMP_(ULONG)1661 STDMETHODIMP_(ULONG) ImageGrabber::AddRef()
1662 {
1663 return InterlockedIncrement(&m_cRef);
1664 }
1665
STDMETHODIMP_(ULONG)1666 STDMETHODIMP_(ULONG) ImageGrabber::Release()
1667 {
1668 ULONG cRef = InterlockedDecrement(&m_cRef);
1669 if (cRef == 0)
1670 {
1671 delete this;
1672 }
1673 return cRef;
1674 }
1675
OnClockStart(MFTIME hnsSystemTime,LONGLONG llClockStartOffset)1676 STDMETHODIMP ImageGrabberCallback::OnClockStart(MFTIME hnsSystemTime, LONGLONG llClockStartOffset)
1677 {
1678 (void)hnsSystemTime;
1679 (void)llClockStartOffset;
1680 return S_OK;
1681 }
1682
OnClockStop(MFTIME hnsSystemTime)1683 STDMETHODIMP ImageGrabberCallback::OnClockStop(MFTIME hnsSystemTime)
1684 {
1685 (void)hnsSystemTime;
1686 return S_OK;
1687 }
1688
OnClockPause(MFTIME hnsSystemTime)1689 STDMETHODIMP ImageGrabberCallback::OnClockPause(MFTIME hnsSystemTime)
1690 {
1691 (void)hnsSystemTime;
1692 return S_OK;
1693 }
1694
OnClockRestart(MFTIME hnsSystemTime)1695 STDMETHODIMP ImageGrabberCallback::OnClockRestart(MFTIME hnsSystemTime)
1696 {
1697 (void)hnsSystemTime;
1698 return S_OK;
1699 }
1700
OnClockSetRate(MFTIME hnsSystemTime,float flRate)1701 STDMETHODIMP ImageGrabberCallback::OnClockSetRate(MFTIME hnsSystemTime, float flRate)
1702 {
1703 (void)flRate;
1704 (void)hnsSystemTime;
1705 return S_OK;
1706 }
1707
OnSetPresentationClock(IMFPresentationClock * pClock)1708 STDMETHODIMP ImageGrabberCallback::OnSetPresentationClock(IMFPresentationClock* pClock)
1709 {
1710 (void)pClock;
1711 return S_OK;
1712 }
1713
OnProcessSample(REFGUID guidMajorMediaType,DWORD dwSampleFlags,LONGLONG llSampleTime,LONGLONG llSampleDuration,const BYTE * pSampleBuffer,DWORD dwSampleSize)1714 STDMETHODIMP ImageGrabberCallback::OnProcessSample(REFGUID guidMajorMediaType, DWORD dwSampleFlags,
1715 LONGLONG llSampleTime, LONGLONG llSampleDuration, const BYTE * pSampleBuffer,
1716 DWORD dwSampleSize)
1717 {
1718 (void)guidMajorMediaType;
1719 (void)llSampleTime;
1720 (void)dwSampleFlags;
1721 (void)llSampleDuration;
1722 (void)dwSampleSize;
1723
1724 HANDLE tmp[] = {ig_hFinish, ig_hFrameGrabbed, NULL};
1725
1726 DWORD status = WaitForMultipleObjects(2, tmp, FALSE, INFINITE);
1727 if (status == WAIT_OBJECT_0)
1728 {
1729 DebugPrintOut(L"OnProcessFrame called after ig_hFinish event\n");
1730 return S_OK;
1731 }
1732
1733 if(ig_RIE)
1734 {
1735 ig_RIFirst->fastCopy(pSampleBuffer);
1736 ig_RIOut = ig_RIFirst;
1737 }
1738 else
1739 {
1740 ig_RISecond->fastCopy(pSampleBuffer);
1741 ig_RIOut = ig_RISecond;
1742 }
1743
1744 if (ig_Synchronous)
1745 {
1746 SetEvent(ig_hFrameReady);
1747 }
1748 else
1749 {
1750 ig_RIE = !ig_RIE;
1751 }
1752
1753 return S_OK;
1754 }
1755
OnShutdown()1756 STDMETHODIMP ImageGrabberCallback::OnShutdown()
1757 {
1758 SetEvent(ig_hFinish);
1759 return S_OK;
1760 }
1761
getRawImage()1762 RawImage *ImageGrabberCallback::getRawImage()
1763 {
1764 return ig_RIOut;
1765 }
1766
MainThreadFunction(LPVOID lpParam)1767 DWORD WINAPI MainThreadFunction( LPVOID lpParam )
1768 {
1769 ImageGrabberThread *pIGT = (ImageGrabberThread *)lpParam;
1770 pIGT->run();
1771 return 0;
1772 }
1773
CreateInstance(ImageGrabberThread ** ppIGT,IMFMediaSource * pSource,unsigned int deviceID,bool synchronious)1774 HRESULT ImageGrabberThread::CreateInstance(ImageGrabberThread **ppIGT, IMFMediaSource *pSource, unsigned int deviceID, bool synchronious)
1775 {
1776 *ppIGT = new (std::nothrow) ImageGrabberThread(pSource, deviceID, synchronious);
1777 if (ppIGT == NULL)
1778 {
1779 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Memory cannot be allocated\n", deviceID);
1780 return E_OUTOFMEMORY;
1781 }
1782 else
1783 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Creating of the instance of ImageGrabberThread\n", deviceID);
1784 return S_OK;
1785 }
1786
ImageGrabberThread(IMFMediaSource * pSource,unsigned int deviceID,bool synchronious)1787 ImageGrabberThread::ImageGrabberThread(IMFMediaSource *pSource, unsigned int deviceID, bool synchronious) :
1788 igt_func(NULL),
1789 igt_Handle(NULL),
1790 igt_stop(false)
1791 {
1792 HRESULT hr = ImageGrabber::CreateInstance(&igt_pImageGrabber, deviceID, synchronious);
1793 igt_DeviceID = deviceID;
1794 if(SUCCEEDED(hr))
1795 {
1796 hr = igt_pImageGrabber->initImageGrabber(pSource);
1797 if(!SUCCEEDED(hr))
1798 {
1799 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: There is a problem with initialization of the instance of the ImageGrabber class\n", deviceID);
1800 }
1801 else
1802 {
1803 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Initialization of instance of the ImageGrabber class\n", deviceID);
1804 }
1805 }
1806 else
1807 {
1808 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: There is a problem with creation of the instance of the ImageGrabber class\n", deviceID);
1809 }
1810 }
1811
setEmergencyStopEvent(void * userData,void (* func)(int,void *))1812 void ImageGrabberThread::setEmergencyStopEvent(void *userData, void(*func)(int, void *))
1813 {
1814 if(func)
1815 {
1816 igt_func = func;
1817 igt_userData = userData;
1818 }
1819 }
1820
~ImageGrabberThread(void)1821 ImageGrabberThread::~ImageGrabberThread(void)
1822 {
1823 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Destroing ImageGrabberThread\n", igt_DeviceID);
1824 if (igt_Handle)
1825 WaitForSingleObject(igt_Handle, INFINITE);
1826 delete igt_pImageGrabber;
1827 }
1828
stop()1829 void ImageGrabberThread::stop()
1830 {
1831 igt_stop = true;
1832 if(igt_pImageGrabber)
1833 {
1834 igt_pImageGrabber->stopGrabbing();
1835 }
1836 }
1837
start()1838 void ImageGrabberThread::start()
1839 {
1840 igt_Handle = CreateThread(
1841 NULL, // default security attributes
1842 0, // use default stack size
1843 MainThreadFunction, // thread function name
1844 this, // argument to thread function
1845 0, // use default creation flags
1846 &igt_ThreadIdArray); // returns the thread identifier
1847 }
1848
run()1849 void ImageGrabberThread::run()
1850 {
1851 if(igt_pImageGrabber)
1852 {
1853 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Thread for grabbing images is started\n", igt_DeviceID);
1854 HRESULT hr = igt_pImageGrabber->startGrabbing();
1855 if(!SUCCEEDED(hr))
1856 {
1857 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: There is a problem with starting the process of grabbing\n", igt_DeviceID);
1858 }
1859 }
1860 else
1861 {
1862 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i The thread is finished without execution of grabbing\n", igt_DeviceID);
1863 }
1864 if(!igt_stop)
1865 {
1866 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Emergency Stop thread\n", igt_DeviceID);
1867 if(igt_func)
1868 {
1869 igt_func(igt_DeviceID, igt_userData);
1870 }
1871 }
1872 else
1873 DebugPrintOut(L"IMAGEGRABBERTHREAD VIDEODEVICE %i: Finish thread\n", igt_DeviceID);
1874 }
1875
getImageGrabber()1876 ImageGrabber *ImageGrabberThread::getImageGrabber()
1877 {
1878 return igt_pImageGrabber;
1879 }
1880
Media_Foundation(void)1881 Media_Foundation::Media_Foundation(void)
1882 {
1883 HRESULT hr = MFStartup(MF_VERSION);
1884 if(!SUCCEEDED(hr))
1885 {
1886 DebugPrintOut(L"MEDIA FOUNDATION: It cannot be created!!!\n");
1887 }
1888 }
1889
~Media_Foundation(void)1890 Media_Foundation::~Media_Foundation(void)
1891 {
1892 HRESULT hr = MFShutdown();
1893 if(!SUCCEEDED(hr))
1894 {
1895 DebugPrintOut(L"MEDIA FOUNDATION: Resources cannot be released\n");
1896 }
1897 }
1898
buildListOfDevices()1899 bool Media_Foundation::buildListOfDevices()
1900 {
1901 HRESULT hr = S_OK;
1902 #ifdef WINRT
1903 videoDevices *vDs = &videoDevices::getInstance();
1904 hr = vDs->initDevices(WRL_ENUM_GET(_DeviceClass, DeviceClass, VideoCapture));
1905 #else
1906 _ComPtr<IMFAttributes> pAttributes = NULL;
1907 CoInitialize(NULL);
1908 hr = MFCreateAttributes(pAttributes.GetAddressOf(), 1);
1909 if (SUCCEEDED(hr))
1910 {
1911 hr = pAttributes->SetGUID(
1912 MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
1913 MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
1914 );
1915 }
1916 if (SUCCEEDED(hr))
1917 {
1918 videoDevices *vDs = &videoDevices::getInstance();
1919 hr = vDs->initDevices(pAttributes.Get());
1920 }
1921 #endif
1922 if (FAILED(hr))
1923 {
1924 DebugPrintOut(L"MEDIA FOUNDATION: The access to the video cameras denied\n");
1925 }
1926
1927 return (SUCCEEDED(hr));
1928 }
1929
getInstance()1930 Media_Foundation& Media_Foundation::getInstance()
1931 {
1932 static Media_Foundation instance;
1933 return instance;
1934 }
1935
RawImage(unsigned int size)1936 RawImage::RawImage(unsigned int size): ri_new(false), ri_pixels(NULL)
1937 {
1938 ri_size = size;
1939 ri_pixels = new unsigned char[size];
1940 memset((void *)ri_pixels,0,ri_size);
1941 }
1942
isNew()1943 bool RawImage::isNew()
1944 {
1945 return ri_new;
1946 }
1947
getSize()1948 unsigned int RawImage::getSize()
1949 {
1950 return ri_size;
1951 }
1952
~RawImage(void)1953 RawImage::~RawImage(void)
1954 {
1955 delete []ri_pixels;
1956 ri_pixels = NULL;
1957 }
1958
CreateInstance(RawImage ** ppRImage,unsigned int size)1959 long RawImage::CreateInstance(RawImage **ppRImage,unsigned int size)
1960 {
1961 *ppRImage = new (std::nothrow) RawImage(size);
1962 if (ppRImage == NULL)
1963 {
1964 return E_OUTOFMEMORY;
1965 }
1966 return S_OK;
1967 }
1968
setCopy(const BYTE * pSampleBuffer)1969 void RawImage::setCopy(const BYTE * pSampleBuffer)
1970 {
1971 memcpy(ri_pixels, pSampleBuffer, ri_size);
1972 ri_new = true;
1973 }
1974
fastCopy(const BYTE * pSampleBuffer)1975 void RawImage::fastCopy(const BYTE * pSampleBuffer)
1976 {
1977 memcpy(ri_pixels, pSampleBuffer, ri_size);
1978 ri_new = true;
1979 }
1980
getpPixels()1981 unsigned char * RawImage::getpPixels()
1982 {
1983 ri_new = false;
1984 return ri_pixels;
1985 }
1986
videoDevice(void)1987 videoDevice::videoDevice(void): vd_IsSetuped(false), vd_LockOut(OpenLock), vd_pFriendlyName(NULL),
1988 vd_Width(0), vd_Height(0), vd_FrameRate(0), vd_pSource(NULL), vd_pImGrTh(NULL), vd_func(NULL), vd_userData(NULL)
1989 {
1990 #ifdef WINRT
1991 vd_pMedCap = nullptr;
1992 vd_cookie.value = 0;
1993 vd_pImGr = NULL;
1994 vd_pAction = nullptr;
1995 #endif
1996 }
1997
setParametrs(CamParametrs parametrs)1998 void videoDevice::setParametrs(CamParametrs parametrs)
1999 {
2000 if(vd_IsSetuped)
2001 {
2002 if(vd_pSource)
2003 {
2004 Parametr *pParametr = (Parametr *)(¶metrs);
2005 Parametr *pPrevParametr = (Parametr *)(&vd_PrevParametrs);
2006 IAMVideoProcAmp *pProcAmp = NULL;
2007 HRESULT hr = vd_pSource->QueryInterface(IID_PPV_ARGS(&pProcAmp));
2008 if (SUCCEEDED(hr))
2009 {
2010 for(unsigned int i = 0; i < 10; i++)
2011 {
2012 if(pPrevParametr[i].CurrentValue != pParametr[i].CurrentValue || pPrevParametr[i].Flag != pParametr[i].Flag)
2013 hr = pProcAmp->Set(VideoProcAmp_Brightness + i, pParametr[i].CurrentValue, pParametr[i].Flag);
2014 }
2015 pProcAmp->Release();
2016 }
2017 IAMCameraControl *pProcControl = NULL;
2018 hr = vd_pSource->QueryInterface(IID_PPV_ARGS(&pProcControl));
2019 if (SUCCEEDED(hr))
2020 {
2021 for(unsigned int i = 0; i < 7; i++)
2022 {
2023 if(pPrevParametr[10 + i].CurrentValue != pParametr[10 + i].CurrentValue || pPrevParametr[10 + i].Flag != pParametr[10 + i].Flag)
2024 hr = pProcControl->Set(CameraControl_Pan+i, pParametr[10 + i].CurrentValue, pParametr[10 + i].Flag);
2025 }
2026 pProcControl->Release();
2027 }
2028 vd_PrevParametrs = parametrs;
2029 }
2030 }
2031 }
2032
getParametrs()2033 CamParametrs videoDevice::getParametrs()
2034 {
2035 CamParametrs out;
2036 if(vd_IsSetuped)
2037 {
2038 if(vd_pSource)
2039 {
2040 Parametr *pParametr = (Parametr *)(&out);
2041 IAMVideoProcAmp *pProcAmp = NULL;
2042 HRESULT hr = vd_pSource->QueryInterface(IID_PPV_ARGS(&pProcAmp));
2043 if (SUCCEEDED(hr))
2044 {
2045 for(unsigned int i = 0; i < 10; i++)
2046 {
2047 Parametr temp;
2048 hr = pProcAmp->GetRange(VideoProcAmp_Brightness+i, &temp.Min, &temp.Max, &temp.Step, &temp.Default, &temp.Flag);
2049 if (SUCCEEDED(hr))
2050 {
2051 temp.CurrentValue = temp.Default;
2052 pParametr[i] = temp;
2053 }
2054 }
2055 pProcAmp->Release();
2056 }
2057 IAMCameraControl *pProcControl = NULL;
2058 hr = vd_pSource->QueryInterface(IID_PPV_ARGS(&pProcControl));
2059 if (SUCCEEDED(hr))
2060 {
2061 for(unsigned int i = 0; i < 7; i++)
2062 {
2063 Parametr temp;
2064 hr = pProcControl->GetRange(CameraControl_Pan+i, &temp.Min, &temp.Max, &temp.Step, &temp.Default, &temp.Flag);
2065 if (SUCCEEDED(hr))
2066 {
2067 temp.CurrentValue = temp.Default;
2068 pParametr[10 + i] = temp;
2069 }
2070 }
2071 pProcControl->Release();
2072 }
2073 }
2074 }
2075 return out;
2076 }
2077
2078 #ifdef WINRT
resetDevice(MAKE_WRL_REF (_IDeviceInformation)pDevice)2079 long videoDevice::resetDevice(MAKE_WRL_REF(_IDeviceInformation) pDevice)
2080 #else
2081 long videoDevice::resetDevice(IMFActivate *pActivate)
2082 #endif
2083 {
2084 HRESULT hr = E_FAIL;
2085 vd_CurrentFormats.clear();
2086 if(vd_pFriendlyName)
2087 CoTaskMemFree(vd_pFriendlyName);
2088 vd_pFriendlyName = NULL;
2089 #ifdef WINRT
2090 if (pDevice)
2091 {
2092 ACTIVATE_OBJ(RuntimeClass_Windows_Media_Capture_MediaCapture, _MediaCapture, pIMedCap, hr)
2093 if (FAILED(hr)) return hr;
2094 ACTIVATE_OBJ(RuntimeClass_Windows_Media_Capture_MediaCaptureInitializationSettings, _MediaCaptureInitializationSettings, pCapInitSet, hr)
2095 if (FAILED(hr)) return hr;
2096 _StringObj str;
2097 WRL_PROP_GET(pDevice, Name, *REF_WRL_OBJ(str), hr)
2098 if (FAILED(hr)) return hr;
2099 unsigned int length = 0;
2100 PCWSTR wstr = WindowsGetStringRawBuffer(reinterpret_cast<HSTRING>(DEREF_WRL_OBJ(str)), &length);
2101 vd_pFriendlyName = (wchar_t*)CoTaskMemAlloc((length + 1) * sizeof(wchar_t));
2102 wcscpy(vd_pFriendlyName, wstr);
2103 WRL_PROP_GET(pDevice, Id, *REF_WRL_OBJ(str), hr)
2104 if (FAILED(hr)) return hr;
2105 WRL_PROP_PUT(pCapInitSet, VideoDeviceId, DEREF_WRL_OBJ(str), hr)
2106 if (FAILED(hr)) return hr;
2107 WRL_PROP_PUT(pCapInitSet, StreamingCaptureMode, WRL_ENUM_GET(_StreamingCaptureMode, StreamingCaptureMode, Video), hr)
2108 if (FAILED(hr)) return hr;
2109 MAKE_WRL_REF(_AsyncAction) pAction;
2110 WRL_METHOD(DEREF_WRL_OBJ(pIMedCap), _InitializeWithSettingsAsync, pAction, hr, DEREF_WRL_OBJ(pCapInitSet))
2111 #ifdef HAVE_CONCURRENCY
2112 DEFINE_TASK<void> _task = CREATE_TASK DEFINE_RET_TYPE(void)(pAction);
2113 if (FAILED(hr)) return hr;
2114 MAKE_WRL_AGILE_REF(_MediaCapture) pAgileMedCap;
2115 pAgileMedCap = PREPARE_TRANSFER_WRL_OBJ(pIMedCap);
2116 Concurrency::critical_section::scoped_lock _LockHolder(vd_lock);
2117 MAKE_WRL_REF(_AsyncAction) pOldAction = vd_pAction;
2118 SAVE_CURRENT_CONTEXT(context);
2119 vd_pAction = reinterpret_cast<MAKE_WRL_REF(_AsyncAction)>(BEGIN_CREATE_ASYNC(void, _task, pOldAction, context, &pAgileMedCap, this)
2120 HRESULT hr = S_OK;
2121 if (pOldAction) CREATE_TASK DEFINE_RET_TYPE(void)(pOldAction).wait();
2122 _task.wait();
2123 if (SUCCEEDED(hr)) {
2124 //all camera capture calls only in original context
2125 BEGIN_CALL_IN_CONTEXT(hr, context, pAgileMedCap, this)
2126 enumerateCaptureFormats(DEREF_AGILE_WRL_OBJ(pAgileMedCap));
2127 END_CALL_IN_CONTEXT_BASE
2128 }
2129 buildLibraryofTypes();
2130 RELEASE_AGILE_WRL(pAgileMedCap)
2131 END_CREATE_ASYNC(hr));
2132 #endif
2133 }
2134 #else
2135 if(pActivate)
2136 {
2137 IMFMediaSource *pSource = NULL;
2138 hr = pActivate->GetAllocatedString(
2139 MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
2140 &vd_pFriendlyName,
2141 NULL
2142 );
2143 hr = pActivate->ActivateObject(
2144 __uuidof(IMFMediaSource),
2145 (void**)&pSource
2146 );
2147 enumerateCaptureFormats(pSource);
2148 buildLibraryofTypes();
2149 SafeRelease(&pSource);
2150 if(FAILED(hr))
2151 {
2152 vd_pFriendlyName = NULL;
2153 DebugPrintOut(L"VIDEODEVICE %i: IMFMediaSource interface cannot be created \n", vd_CurrentNumber);
2154 }
2155 }
2156 #endif
2157 return hr;
2158 }
2159
2160 #ifdef WINRT
readInfoOfDevice(MAKE_WRL_REF (_IDeviceInformation)pDevice,unsigned int Num)2161 long videoDevice::readInfoOfDevice(MAKE_WRL_REF(_IDeviceInformation) pDevice, unsigned int Num)
2162 {
2163 HRESULT hr = -1;
2164 vd_CurrentNumber = Num;
2165 hr = resetDevice(pDevice);
2166 return hr;
2167 }
2168 #else
readInfoOfDevice(IMFActivate * pActivate,unsigned int Num)2169 long videoDevice::readInfoOfDevice(IMFActivate *pActivate, unsigned int Num)
2170 {
2171 vd_CurrentNumber = Num;
2172 return resetDevice(pActivate);
2173 }
2174 #endif
2175
2176 #ifdef WINRT
2177 #ifdef HAVE_CONCURRENCY
checkDevice(_DeviceClass devClass,DEFINE_TASK<void> * pTask,MAKE_WRL_REF (_IDeviceInformation)* ppDevice)2178 long videoDevice::checkDevice(_DeviceClass devClass, DEFINE_TASK<void>* pTask, MAKE_WRL_REF(_IDeviceInformation)* ppDevice)
2179 {
2180 HRESULT hr = S_OK;
2181 ACTIVATE_STATIC_OBJ(RuntimeClass_Windows_Devices_Enumeration_DeviceInformation, MAKE_WRL_OBJ(_DeviceInformationStatics), pDevStat, hr)
2182 if (FAILED(hr)) return hr;
2183 MAKE_WRL_REF(_AsyncOperation<MAKE_WRL_REF(_DeviceInformationCollection)>) pAction;
2184 WRL_METHOD(pDevStat, _FindAllAsyncDeviceClass, pAction, hr, devClass)
2185 if (SUCCEEDED(hr)) {
2186 *pTask = CREATE_TASK DEFINE_RET_TYPE(void)([pAction, &ppDevice, this]() -> DEFINE_RET_FORMAL(void) {
2187 HRESULT hr = S_OK;
2188 MAKE_WRL_OBJ(_VectorView<MAKE_WRL_REF(_DeviceInformation)>) pVector =
2189 CREATE_TASK DEFINE_RET_TYPE(MAKE_WRL_REF(_VectorView<MAKE_WRL_REF(_DeviceInformation)>))(pAction).get();
2190 UINT32 count = 0;
2191 if (SUCCEEDED(hr)) WRL_PROP_GET(pVector, Size, count, hr)
2192 if (SUCCEEDED(hr) && count > 0) {
2193 for (UINT32 i = 0; i < count; i++) {
2194 MAKE_WRL_OBJ(_IDeviceInformation) pDevice;
2195 WRL_METHOD(pVector, GetAt, pDevice, hr, i)
2196 if (SUCCEEDED(hr)) {
2197 _StringObj str;
2198 unsigned int length = 0;
2199 WRL_PROP_GET(pDevice, Name, *REF_WRL_OBJ(str), hr)
2200 PCWSTR wstr = WindowsGetStringRawBuffer(reinterpret_cast<HSTRING>(DEREF_WRL_OBJ(str)), &length);
2201 if (wcscmp(wstr, vd_pFriendlyName) == 0) {
2202 *ppDevice = PREPARE_TRANSFER_WRL_OBJ(pDevice);
2203 }
2204 }
2205 }
2206 }
2207 RET_VAL_BASE;
2208 });
2209 }
2210 return hr;
2211 }
2212 #endif
2213 #else
checkDevice(IMFAttributes * pAttributes,IMFActivate ** pDevice)2214 long videoDevice::checkDevice(IMFAttributes *pAttributes, IMFActivate **pDevice)
2215 {
2216 IMFActivate **ppDevices = NULL;
2217 UINT32 count;
2218 wchar_t *newFriendlyName = NULL;
2219 HRESULT hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count);
2220 if (SUCCEEDED(hr))
2221 {
2222 if(count > 0)
2223 {
2224 if(count > vd_CurrentNumber)
2225 {
2226 hr = ppDevices[vd_CurrentNumber]->GetAllocatedString(
2227 MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
2228 &newFriendlyName,
2229 NULL
2230 );
2231 if (SUCCEEDED(hr))
2232 {
2233 if(wcscmp(newFriendlyName, vd_pFriendlyName) != 0)
2234 {
2235 DebugPrintOut(L"VIDEODEVICE %i: Chosen device cannot be found \n", vd_CurrentNumber);
2236 hr = E_INVALIDARG;
2237 pDevice = NULL;
2238 }
2239 else
2240 {
2241 *pDevice = ppDevices[vd_CurrentNumber];
2242 (*pDevice)->AddRef();
2243 }
2244 }
2245 else
2246 {
2247 DebugPrintOut(L"VIDEODEVICE %i: Name of device cannot be gotten \n", vd_CurrentNumber);
2248 }
2249 }
2250 else
2251 {
2252 DebugPrintOut(L"VIDEODEVICE %i: Number of devices more than corrent number of the device \n", vd_CurrentNumber);
2253 hr = E_INVALIDARG;
2254 }
2255 for(UINT32 i = 0; i < count; i++)
2256 {
2257 SafeRelease(&ppDevices[i]);
2258 }
2259 SafeRelease(ppDevices);
2260 }
2261 else
2262 hr = E_FAIL;
2263 }
2264 else
2265 {
2266 DebugPrintOut(L"VIDEODEVICE %i: List of DeviceSources cannot be enumerated \n", vd_CurrentNumber);
2267 }
2268 return hr;
2269 }
2270 #endif
2271
initDevice()2272 long videoDevice::initDevice()
2273 {
2274 HRESULT hr = S_OK;
2275 CoInitialize(NULL);
2276 #ifdef WINRT
2277 #ifdef HAVE_CONCURRENCY
2278 Concurrency::critical_section::scoped_lock _LockHolder(vd_lock);
2279 MAKE_WRL_REF(_AsyncAction) pOldAction = vd_pAction;
2280 SAVE_CURRENT_CONTEXT(context);
2281 vd_pAction = reinterpret_cast<MAKE_WRL_REF(_AsyncAction)>(BEGIN_CREATE_ASYNC(void, pOldAction, context, this)
2282 HRESULT hr;
2283 if (pOldAction) CREATE_TASK DEFINE_RET_TYPE(void)(pOldAction).wait();
2284 DEFINE_TASK<void> pTask;
2285 MAKE_WRL_OBJ(_IDeviceInformation) pDevInfo;
2286 hr = checkDevice(WRL_ENUM_GET(_DeviceClass, DeviceClass, VideoCapture), &pTask, REF_WRL_OBJ(pDevInfo));
2287 if (SUCCEEDED(hr)) pTask.wait();
2288 if (SUCCEEDED(hr)) {
2289 DEFINE_TASK<void> _task;
2290 BEGIN_CALL_IN_CONTEXT(hr, context, pDevInfo, &_task, context, this)
2291 HRESULT hr;
2292 ACTIVATE_OBJ(RuntimeClass_Windows_Media_Capture_MediaCapture, _MediaCapture, pIMedCap, hr)
2293 if (SUCCEEDED(hr)) {
2294 RELEASE_WRL(vd_pMedCap);
2295 vd_pMedCap = PREPARE_TRANSFER_WRL_OBJ(pIMedCap);
2296 ACTIVATE_OBJ(RuntimeClass_Windows_Media_Capture_MediaCaptureInitializationSettings, _MediaCaptureInitializationSettings, pCapInitSet, hr)
2297 _StringObj str;
2298 if (SUCCEEDED(hr)) {
2299 WRL_PROP_GET(pDevInfo, Id, *REF_WRL_OBJ(str), hr)
2300 if (SUCCEEDED(hr)) {
2301 WRL_PROP_PUT(pCapInitSet, VideoDeviceId, DEREF_WRL_OBJ(str), hr)
2302 }
2303 }
2304 if (SUCCEEDED(hr))
2305 WRL_PROP_PUT(pCapInitSet, StreamingCaptureMode, WRL_ENUM_GET(_StreamingCaptureMode, StreamingCaptureMode, Video), hr)
2306 if (SUCCEEDED(hr)) reinterpret_cast<ABI::Windows::Media::Capture::IMediaCapture*>(DEREF_AGILE_WRL_OBJ(vd_pMedCap))->add_Failed(Microsoft::WRL::Callback<ABI::Windows::Media::Capture::IMediaCaptureFailedEventHandler>([this, context](ABI::Windows::Media::Capture::IMediaCapture*, ABI::Windows::Media::Capture::IMediaCaptureFailedEventArgs*) -> HRESULT {
2307 HRESULT hr;
2308 BEGIN_CALL_IN_CONTEXT(hr, context, this)
2309 closeDevice();
2310 END_CALL_IN_CONTEXT_BASE
2311 return hr;
2312 }).Get(), &vd_cookie);
2313 MAKE_WRL_OBJ(_AsyncAction) pAction;
2314 if (SUCCEEDED(hr)) WRL_METHOD(vd_pMedCap, _InitializeWithSettingsAsync, *REF_WRL_OBJ(pAction), hr, DEREF_WRL_OBJ(pCapInitSet))
2315 if (SUCCEEDED(hr)) _task = CREATE_TASK DEFINE_RET_TYPE(void)(DEREF_WRL_OBJ(pAction));
2316 }
2317 END_CALL_IN_CONTEXT(hr)
2318 _task.wait();
2319 }
2320 END_CREATE_ASYNC(hr));
2321 #endif
2322 #else
2323 _ComPtr<IMFAttributes> pAttributes = NULL;
2324 IMFActivate *vd_pActivate = NULL;
2325 hr = MFCreateAttributes(pAttributes.GetAddressOf(), 1);
2326 if (SUCCEEDED(hr))
2327 {
2328 hr = pAttributes->SetGUID(
2329 MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
2330 MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
2331 );
2332 }
2333 if (SUCCEEDED(hr))
2334 {
2335 hr = checkDevice(pAttributes.Get(), &vd_pActivate);
2336 if (SUCCEEDED(hr) && vd_pActivate)
2337 {
2338 SafeRelease(&vd_pSource);
2339 hr = vd_pActivate->ActivateObject(
2340 __uuidof(IMFMediaSource),
2341 (void**)&vd_pSource
2342 );
2343 if (SUCCEEDED(hr))
2344 {
2345 }
2346 SafeRelease(&vd_pActivate);
2347 }
2348 else
2349 {
2350 DebugPrintOut(L"VIDEODEVICE %i: Device there is not \n", vd_CurrentNumber);
2351 }
2352 }
2353 else
2354 {
2355 DebugPrintOut(L"VIDEODEVICE %i: The attribute of video cameras cannot be getting \n", vd_CurrentNumber);
2356 }
2357 #endif
2358 return hr;
2359 }
2360
getFormat(unsigned int id)2361 MediaType videoDevice::getFormat(unsigned int id)
2362 {
2363 if(id < vd_CurrentFormats.size())
2364 {
2365 return vd_CurrentFormats[id];
2366 }
2367 else return MediaType();
2368 }
getCountFormats()2369 int videoDevice::getCountFormats()
2370 {
2371 return (int)vd_CurrentFormats.size();
2372 }
setEmergencyStopEvent(void * userData,void (* func)(int,void *))2373 void videoDevice::setEmergencyStopEvent(void *userData, void(*func)(int, void *))
2374 {
2375 vd_func = func;
2376 vd_userData = userData;
2377 }
closeDevice()2378 void videoDevice::closeDevice()
2379 {
2380 if(vd_IsSetuped)
2381 {
2382 vd_IsSetuped = false;
2383
2384 #ifdef WINRT
2385 #ifdef HAVE_CONCURRENCY
2386 if (DEREF_AGILE_WRL_OBJ(vd_pMedCap)) {
2387 MAKE_WRL_REF(_AsyncAction) action;
2388 Concurrency::critical_section::scoped_lock _LockHolder(vd_lock);
2389 MAKE_WRL_REF(_AsyncAction) pOldAction = vd_pAction;
2390 vd_pImGr->stopGrabbing(&action);
2391 reinterpret_cast<ABI::Windows::Media::Capture::IMediaCapture*>(DEREF_AGILE_WRL_OBJ(vd_pMedCap))->remove_Failed(vd_cookie);
2392 vd_cookie.value = 0;
2393 vd_pAction = reinterpret_cast<MAKE_WRL_REF(_AsyncAction)>(BEGIN_CREATE_ASYNC(void, action, pOldAction, this)
2394 HRESULT hr = S_OK;
2395 if (pOldAction) CREATE_TASK DEFINE_RET_TYPE(void)(pOldAction).wait();
2396 CREATE_TASK DEFINE_RET_TYPE(void)(action).wait();
2397 RELEASE_WRL(vd_pMedCap)
2398 if(vd_LockOut == RawDataLock) {
2399 delete vd_pImGr;
2400 }
2401 vd_pImGr = NULL;
2402 vd_LockOut = OpenLock;
2403 END_CREATE_ASYNC(hr));
2404 return;
2405 }
2406 #endif
2407 #endif
2408
2409 vd_pSource->Shutdown();
2410 SafeRelease(&vd_pSource);
2411 if(vd_LockOut == RawDataLock)
2412 {
2413 vd_pImGrTh->stop();
2414 Sleep(500);
2415 delete vd_pImGrTh;
2416 }
2417 vd_pImGrTh = NULL;
2418 vd_LockOut = OpenLock;
2419 DebugPrintOut(L"VIDEODEVICE %i: Device is stopped \n", vd_CurrentNumber);
2420 }
2421 }
getWidth()2422 unsigned int videoDevice::getWidth()
2423 {
2424 if(vd_IsSetuped)
2425 return vd_Width;
2426 else
2427 return 0;
2428 }
getHeight()2429 unsigned int videoDevice::getHeight()
2430 {
2431 if(vd_IsSetuped)
2432 return vd_Height;
2433 else
2434 return 0;
2435 }
2436
getFrameRate() const2437 unsigned int videoDevice::getFrameRate() const
2438 {
2439 if(vd_IsSetuped)
2440 return vd_FrameRate;
2441 else
2442 return 0;
2443 }
2444
getMediaSource()2445 IMFMediaSource *videoDevice::getMediaSource()
2446 {
2447 IMFMediaSource *out = NULL;
2448 if(vd_LockOut == OpenLock)
2449 {
2450 vd_LockOut = MediaSourceLock;
2451 out = vd_pSource;
2452 }
2453 return out;
2454 }
findType(unsigned int size,unsigned int frameRate)2455 int videoDevice::findType(unsigned int size, unsigned int frameRate)
2456 {
2457 // For required frame size look for the suitable video format.
2458 // If not found, get the format for the largest available frame size.
2459 FrameRateMap FRM;
2460 std::map<UINT64, FrameRateMap>::const_iterator fmt;
2461 fmt = vd_CaptureFormats.find(size);
2462 if( fmt != vd_CaptureFormats.end() )
2463 FRM = fmt->second;
2464 else if( !vd_CaptureFormats.empty() )
2465 FRM = vd_CaptureFormats.rbegin()->second;
2466
2467 if( FRM.empty() )
2468 return -1;
2469
2470 UINT64 frameRateMax = 0; SUBTYPEMap STMMax;
2471 if(frameRate == 0)
2472 {
2473 std::map<UINT64, SUBTYPEMap>::iterator f = FRM.begin();
2474 for(; f != FRM.end(); f++)
2475 {
2476 // Looking for highest possible frame rate.
2477 if((*f).first >= frameRateMax)
2478 {
2479 frameRateMax = (*f).first;
2480 STMMax = (*f).second;
2481 }
2482 }
2483 }
2484 else
2485 {
2486 std::map<UINT64, SUBTYPEMap>::iterator f = FRM.begin();
2487 for(; f != FRM.end(); f++)
2488 {
2489 // Looking for frame rate higher that recently found but not higher then demanded.
2490 if( (*f).first >= frameRateMax && (*f).first <= frameRate )
2491 {
2492 frameRateMax = (*f).first;
2493 STMMax = (*f).second;
2494 }
2495 }
2496 }
2497 // Get first (default) item from the list if no suitable frame rate found.
2498 if( STMMax.empty() )
2499 STMMax = FRM.begin()->second;
2500
2501 // Check if there are any format types on the list.
2502 if( STMMax.empty() )
2503 return -1;
2504
2505 vectorNum VN = STMMax.begin()->second;
2506 if( VN.empty() )
2507 return -1;
2508
2509 return VN[0];
2510 }
2511
buildLibraryofTypes()2512 void videoDevice::buildLibraryofTypes()
2513 {
2514 unsigned int size;
2515 unsigned int framerate;
2516 std::vector<MediaType>::iterator i = vd_CurrentFormats.begin();
2517 int count = 0;
2518 for(; i != vd_CurrentFormats.end(); i++)
2519 {
2520 // Count only supported video formats.
2521 if( (*i).MF_MT_SUBTYPE == MFVideoFormat_RGB24 )
2522 {
2523 size = (*i).MF_MT_FRAME_SIZE;
2524 framerate = (*i).MF_MT_FRAME_RATE_NUMERATOR / (*i).MF_MT_FRAME_RATE_DENOMINATOR;
2525 FrameRateMap FRM = vd_CaptureFormats[size];
2526 SUBTYPEMap STM = FRM[framerate];
2527 String subType((*i).pMF_MT_SUBTYPEName);
2528 vectorNum VN = STM[subType];
2529 VN.push_back(count);
2530 STM[subType] = VN;
2531 FRM[framerate] = STM;
2532 vd_CaptureFormats[size] = FRM;
2533 }
2534 count++;
2535 }
2536 }
2537
2538 #ifdef WINRT
setDeviceFormat(MAKE_WRL_REF (_MediaCapture)pSource,unsigned long dwFormatIndex,MAKE_WRL_REF (_AsyncAction)* pAction)2539 long videoDevice::setDeviceFormat(MAKE_WRL_REF(_MediaCapture) pSource, unsigned long dwFormatIndex, MAKE_WRL_REF(_AsyncAction)* pAction)
2540 {
2541 HRESULT hr;
2542 MAKE_WRL_OBJ(_VideoDeviceController) pDevCont;
2543 WRL_PROP_GET(pSource, VideoDeviceController, pDevCont, hr)
2544 if (FAILED(hr)) return hr;
2545 GET_WRL_OBJ_FROM_OBJ(_MediaDeviceController, pMedDevCont, pDevCont, hr)
2546 if (FAILED(hr)) return hr;
2547 MAKE_WRL_OBJ(_VectorView<MAKE_WRL_REF(_MediaEncodingProperties)>) pVector;
2548 WRL_METHOD(pMedDevCont, GetAvailableMediaStreamProperties, pVector, hr, WRL_ENUM_GET(_MediaStreamType, MediaStreamType, VideoPreview))
2549 if (FAILED(hr)) return hr;
2550 MAKE_WRL_OBJ(_MediaEncodingProperties) pMedEncProps;
2551 WRL_METHOD(pVector, GetAt, pMedEncProps, hr, dwFormatIndex)
2552 if (FAILED(hr)) return hr;
2553 WRL_METHOD(pMedDevCont, SetMediaStreamPropertiesAsync, *pAction, hr, WRL_ENUM_GET(_MediaStreamType, MediaStreamType, VideoPreview), DEREF_WRL_OBJ(pMedEncProps))
2554 return hr;
2555 }
2556 #endif
2557
setDeviceFormat(IMFMediaSource * pSource,unsigned long dwFormatIndex)2558 long videoDevice::setDeviceFormat(IMFMediaSource *pSource, unsigned long dwFormatIndex)
2559 {
2560 _ComPtr<IMFPresentationDescriptor> pPD = NULL;
2561 _ComPtr<IMFStreamDescriptor> pSD = NULL;
2562 _ComPtr<IMFMediaTypeHandler> pHandler = NULL;
2563 _ComPtr<IMFMediaType> pType = NULL;
2564 HRESULT hr = pSource->CreatePresentationDescriptor(pPD.GetAddressOf());
2565 if (FAILED(hr))
2566 {
2567 goto done;
2568 }
2569 BOOL fSelected;
2570 hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, pSD.GetAddressOf());
2571 if (FAILED(hr))
2572 {
2573 goto done;
2574 }
2575 hr = pSD->GetMediaTypeHandler(pHandler.GetAddressOf());
2576 if (FAILED(hr))
2577 {
2578 goto done;
2579 }
2580 hr = pHandler->GetMediaTypeByIndex((DWORD)dwFormatIndex, pType.GetAddressOf());
2581 if (FAILED(hr))
2582 {
2583 goto done;
2584 }
2585 hr = pHandler->SetCurrentMediaType(pType.Get());
2586
2587 done:
2588 return hr;
2589 }
2590
isDeviceSetup()2591 bool videoDevice::isDeviceSetup()
2592 {
2593 return vd_IsSetuped;
2594 }
2595
getRawImageOut()2596 RawImage * videoDevice::getRawImageOut()
2597 {
2598 if(!vd_IsSetuped) return NULL;
2599 #ifdef WINRT
2600 if(vd_pImGr) return vd_pImGr->getRawImage();
2601 #endif
2602 if(vd_pImGrTh)
2603 return vd_pImGrTh->getImageGrabber()->getRawImage();
2604 else
2605 {
2606 DebugPrintOut(L"VIDEODEVICE %i: The instance of ImageGrabberThread class does not exist \n", vd_CurrentNumber);
2607 }
2608 return NULL;
2609 }
2610
isFrameNew()2611 bool videoDevice::isFrameNew()
2612 {
2613 if(!vd_IsSetuped) return false;
2614 if(vd_LockOut == RawDataLock || vd_LockOut == OpenLock)
2615 {
2616 if(vd_LockOut == OpenLock)
2617 {
2618 vd_LockOut = RawDataLock;
2619
2620 //must already be closed
2621 #ifdef WINRT
2622 if (DEREF_AGILE_WRL_OBJ(vd_pMedCap)) {
2623 MAKE_WRL_REF(_AsyncAction) action;
2624 if (FAILED(ImageGrabberWinRT::CreateInstance(&vd_pImGr))) return false;
2625 if (FAILED(vd_pImGr->initImageGrabber(DEREF_AGILE_WRL_OBJ(vd_pMedCap), MFVideoFormat_RGB24)) || FAILED(vd_pImGr->startGrabbing(&action))) {
2626 delete vd_pImGr;
2627 return false;
2628 }
2629 #ifdef HAVE_CONCURRENCY
2630 Concurrency::critical_section::scoped_lock _LockHolder(vd_lock);
2631 MAKE_WRL_REF(_AsyncAction) pOldAction = vd_pAction;
2632 DEFINE_TASK<void> _task = CREATE_TASK DEFINE_RET_TYPE(void)(action);
2633 vd_pAction = reinterpret_cast<MAKE_WRL_REF(_AsyncAction)>(BEGIN_CREATE_ASYNC(void, _task, pOldAction, this)
2634 HRESULT hr = S_OK;
2635 if (pOldAction) CREATE_TASK DEFINE_RET_TYPE(void)(pOldAction).wait();
2636 _task.wait();
2637 END_CREATE_ASYNC(hr));
2638 #endif
2639 return true;
2640 }
2641 #endif
2642 HRESULT hr = ImageGrabberThread::CreateInstance(&vd_pImGrTh, vd_pSource, vd_CurrentNumber);
2643 if(FAILED(hr))
2644 {
2645 DebugPrintOut(L"VIDEODEVICE %i: The instance of ImageGrabberThread class cannot be created.\n", vd_CurrentNumber);
2646 return false;
2647 }
2648 vd_pImGrTh->setEmergencyStopEvent(vd_userData, vd_func);
2649 vd_pImGrTh->start();
2650 return true;
2651 }
2652 #ifdef WINRT
2653 if(vd_pImGr)
2654 return vd_pImGr->getRawImage()->isNew();
2655 #endif
2656 if(vd_pImGrTh)
2657 return vd_pImGrTh->getImageGrabber()->getRawImage()->isNew();
2658 }
2659 return false;
2660 }
2661
isDeviceMediaSource()2662 bool videoDevice::isDeviceMediaSource()
2663 {
2664 if(vd_LockOut == MediaSourceLock) return true;
2665 return false;
2666 }
2667
isDeviceRawDataSource()2668 bool videoDevice::isDeviceRawDataSource()
2669 {
2670 if(vd_LockOut == RawDataLock) return true;
2671 return false;
2672 }
2673
setupDevice(unsigned int id)2674 bool videoDevice::setupDevice(unsigned int id)
2675 {
2676 if(!vd_IsSetuped)
2677 {
2678 HRESULT hr = initDevice();
2679 if(SUCCEEDED(hr))
2680 {
2681 #ifdef WINRT
2682 #ifdef HAVE_CONCURRENCY
2683 Concurrency::critical_section::scoped_lock _LockHolder(vd_lock);
2684 MAKE_WRL_REF(_AsyncAction) pOldAction = vd_pAction;
2685 SAVE_CURRENT_CONTEXT(context);
2686 vd_pAction = reinterpret_cast<MAKE_WRL_REF(_AsyncAction)>(BEGIN_CREATE_ASYNC(void, pOldAction, context, id, this)
2687 HRESULT hr;
2688 if (pOldAction) CREATE_TASK DEFINE_RET_TYPE(void)(pOldAction).wait();
2689 #endif
2690 #endif
2691 vd_Width = vd_CurrentFormats[id].width;
2692 vd_Height = vd_CurrentFormats[id].height;
2693 vd_FrameRate = vd_CurrentFormats[id].MF_MT_FRAME_RATE_NUMERATOR /
2694 vd_CurrentFormats[id].MF_MT_FRAME_RATE_DENOMINATOR;
2695 #ifdef WINRT
2696 #ifdef HAVE_CONCURRENCY
2697 if (DEREF_AGILE_WRL_OBJ(vd_pMedCap)) {
2698 DEFINE_TASK<void> _task;
2699 BEGIN_CALL_IN_CONTEXT(hr, context, id, &_task, this)
2700 MAKE_WRL_REF(_AsyncAction) pAction;
2701 HRESULT hr = setDeviceFormat(DEREF_AGILE_WRL_OBJ(vd_pMedCap), (DWORD) id, &pAction);
2702 if (SUCCEEDED(hr)) _task = CREATE_TASK DEFINE_RET_TYPE(void)(pAction);
2703 END_CALL_IN_CONTEXT(hr)
2704 if (SUCCEEDED(hr)) _task.wait();
2705 } else
2706 #endif
2707 #endif
2708 hr = setDeviceFormat(vd_pSource, (DWORD) id);
2709 vd_IsSetuped = (SUCCEEDED(hr));
2710 if(vd_IsSetuped)
2711 DebugPrintOut(L"\n\nVIDEODEVICE %i: Device is setuped \n", vd_CurrentNumber);
2712 vd_PrevParametrs = getParametrs();
2713 #ifdef WINRT
2714 #ifdef HAVE_CONCURRENCY
2715 END_CREATE_ASYNC(hr));
2716 #endif
2717 return true;
2718 #else
2719 return vd_IsSetuped;
2720 #endif
2721 }
2722 else
2723 {
2724 DebugPrintOut(L"VIDEODEVICE %i: Interface IMFMediaSource cannot be got \n", vd_CurrentNumber);
2725 return false;
2726 }
2727 }
2728 else
2729 {
2730 DebugPrintOut(L"VIDEODEVICE %i: Device is setuped already \n", vd_CurrentNumber);
2731 return false;
2732 }
2733 }
2734
setupDevice(unsigned int w,unsigned int h,unsigned int idealFramerate)2735 bool videoDevice::setupDevice(unsigned int w, unsigned int h, unsigned int idealFramerate)
2736 {
2737 unsigned int id = findType(w * h, idealFramerate);
2738 if( id < 0 )
2739 return false;
2740
2741 return setupDevice(id);
2742 }
2743
getName()2744 wchar_t *videoDevice::getName()
2745 {
2746 return vd_pFriendlyName;
2747 }
2748
~videoDevice(void)2749 videoDevice::~videoDevice(void)
2750 {
2751 closeDevice();
2752 #ifdef WINRT
2753 RELEASE_WRL(vd_pMedCap)
2754 #endif
2755 SafeRelease(&vd_pSource);
2756 if(vd_pFriendlyName)
2757 CoTaskMemFree(vd_pFriendlyName);
2758 }
2759
2760 #ifdef WINRT
enumerateCaptureFormats(MAKE_WRL_REF (_MediaCapture)pSource)2761 HRESULT videoDevice::enumerateCaptureFormats(MAKE_WRL_REF(_MediaCapture) pSource)
2762 {
2763 HRESULT hr;
2764 MAKE_WRL_OBJ(_VideoDeviceController) pDevCont;
2765 WRL_PROP_GET(pSource, VideoDeviceController, pDevCont, hr)
2766 if (FAILED(hr)) return hr;
2767 GET_WRL_OBJ_FROM_OBJ(_MediaDeviceController, pMedDevCont, pDevCont, hr)
2768 if (FAILED(hr)) return hr;
2769 MAKE_WRL_OBJ(_VectorView<MAKE_WRL_REF(_MediaEncodingProperties)>) pVector;
2770 WRL_METHOD(pMedDevCont, GetAvailableMediaStreamProperties, pVector, hr, WRL_ENUM_GET(_MediaStreamType, MediaStreamType, VideoPreview))
2771 if (FAILED(hr)) return hr;
2772 UINT32 count;
2773 WRL_PROP_GET(pVector, Size, count, hr)
2774 if (FAILED(hr)) return hr;
2775 for (UINT32 i = 0; i < count; i++) {
2776 MAKE_WRL_OBJ(_MediaEncodingProperties) pMedEncProps;
2777 WRL_METHOD(pVector, GetAt, pMedEncProps, hr, i)
2778 if (FAILED(hr)) return hr;
2779 _ComPtr<IMFMediaType> pType = NULL;
2780 hr = MediaSink::ConvertPropertiesToMediaType(DEREF_AS_NATIVE_WRL_OBJ(ABI::Windows::Media::MediaProperties::IMediaEncodingProperties, pMedEncProps), &pType);
2781 if (FAILED(hr)) return hr;
2782 MediaType MT = FormatReader::Read(pType.Get());
2783 vd_CurrentFormats.push_back(MT);
2784 }
2785 return hr;
2786 }
2787 #endif
2788
enumerateCaptureFormats(IMFMediaSource * pSource)2789 HRESULT videoDevice::enumerateCaptureFormats(IMFMediaSource *pSource)
2790 {
2791 _ComPtr<IMFPresentationDescriptor> pPD = NULL;
2792 _ComPtr<IMFStreamDescriptor> pSD = NULL;
2793 _ComPtr<IMFMediaTypeHandler> pHandler = NULL;
2794 _ComPtr<IMFMediaType> pType = NULL;
2795 HRESULT hr = pSource->CreatePresentationDescriptor(pPD.GetAddressOf());
2796 if (FAILED(hr))
2797 {
2798 goto done;
2799 }
2800 BOOL fSelected;
2801 hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, pSD.GetAddressOf());
2802 if (FAILED(hr))
2803 {
2804 goto done;
2805 }
2806 hr = pSD->GetMediaTypeHandler(pHandler.GetAddressOf());
2807 if (FAILED(hr))
2808 {
2809 goto done;
2810 }
2811 DWORD cTypes = 0;
2812 hr = pHandler->GetMediaTypeCount(&cTypes);
2813 if (FAILED(hr))
2814 {
2815 goto done;
2816 }
2817 for (DWORD i = 0; i < cTypes; i++)
2818 {
2819 hr = pHandler->GetMediaTypeByIndex(i, pType.GetAddressOf());
2820 if (FAILED(hr))
2821 {
2822 goto done;
2823 }
2824 MediaType MT = FormatReader::Read(pType.Get());
2825 vd_CurrentFormats.push_back(MT);
2826 }
2827
2828 done:
2829 return hr;
2830 }
2831
videoDevices(void)2832 videoDevices::videoDevices(void): count(0)
2833 {
2834 #ifdef WINRT
2835 vds_enumTask = nullptr;
2836 #endif
2837 }
2838
clearDevices()2839 void videoDevices::clearDevices()
2840 {
2841 std::vector<videoDevice *>::iterator i = vds_Devices.begin();
2842 for(; i != vds_Devices.end(); ++i)
2843 delete (*i);
2844 vds_Devices.clear();
2845 }
2846
~videoDevices(void)2847 videoDevices::~videoDevices(void)
2848 {
2849 clearDevices();
2850 }
2851
getDevice(unsigned int i)2852 videoDevice * videoDevices::getDevice(unsigned int i)
2853 {
2854 if(i >= vds_Devices.size())
2855 {
2856 return NULL;
2857 }
2858 if(i < 0)
2859 {
2860 return NULL;
2861 }
2862 return vds_Devices[i];
2863 }
2864
2865 #ifdef WINRT
initDevices(_DeviceClass devClass)2866 long videoDevices::initDevices(_DeviceClass devClass)
2867 {
2868 HRESULT hr = S_OK;
2869 ACTIVATE_STATIC_OBJ(RuntimeClass_Windows_Devices_Enumeration_DeviceInformation, MAKE_WRL_OBJ(_DeviceInformationStatics), pDevStat, hr)
2870 if (FAILED(hr)) return hr;
2871 MAKE_WRL_REF(_AsyncOperation<MAKE_WRL_REF(_DeviceInformationCollection)>) pAction;
2872 WRL_METHOD(pDevStat, _FindAllAsyncDeviceClass, pAction, hr, devClass)
2873 if (SUCCEEDED(hr)) {
2874 #ifdef HAVE_CONCURRENCY
2875 SAVE_CURRENT_CONTEXT(context);
2876 vds_enumTask = reinterpret_cast<MAKE_WRL_REF(_AsyncAction)>(BEGIN_CREATE_ASYNC(void, pAction, context, this)
2877 HRESULT hr = S_OK;
2878 MAKE_WRL_OBJ(_VectorView<MAKE_WRL_REF(_DeviceInformation)>) pVector =
2879 CREATE_TASK DEFINE_RET_TYPE(MAKE_WRL_REF(_VectorView<MAKE_WRL_REF(_DeviceInformation)>))(pAction).get();
2880 if (SUCCEEDED(hr)) WRL_PROP_GET(pVector, Size, count, hr)
2881 if (SUCCEEDED(hr) && count > 0) {
2882 for (UINT32 i = 0; i < count; i++) {
2883 videoDevice *vd = new videoDevice;
2884 MAKE_WRL_OBJ(_IDeviceInformation) pDevice;
2885 WRL_METHOD(pVector, GetAt, pDevice, hr, i)
2886 if (SUCCEEDED(hr)) {
2887 BEGIN_CALL_IN_CONTEXT(hr, context, vd, pDevice, i)
2888 vd->readInfoOfDevice(DEREF_WRL_OBJ(pDevice), i);
2889 END_CALL_IN_CONTEXT_BASE
2890 vds_Devices.push_back(vd);
2891 }
2892 }
2893 }
2894 END_CREATE_ASYNC(hr));
2895 #endif
2896 }
2897 return hr;
2898 }
2899 #else
initDevices(IMFAttributes * pAttributes)2900 long videoDevices::initDevices(IMFAttributes *pAttributes)
2901 {
2902 clearDevices();
2903 IMFActivate **ppDevices = NULL;
2904 HRESULT hr = MFEnumDeviceSources(pAttributes, &ppDevices, &count);
2905 if (SUCCEEDED(hr))
2906 {
2907 if(count > 0)
2908 {
2909 for(UINT32 i = 0; i < count; i++)
2910 {
2911 videoDevice *vd = new videoDevice;
2912 vd->readInfoOfDevice(ppDevices[i], i);
2913 vds_Devices.push_back(vd);
2914 SafeRelease(&ppDevices[i]);
2915 }
2916 SafeRelease(ppDevices);
2917 }
2918 else
2919 hr = E_INVALIDARG;
2920 }
2921 else
2922 {
2923 DebugPrintOut(L"VIDEODEVICES: The instances of the videoDevice class cannot be created\n");
2924 }
2925 return hr;
2926 }
2927 #endif
2928
getCount()2929 unsigned int videoDevices::getCount()
2930 {
2931 return (unsigned int)vds_Devices.size();
2932 }
2933
getInstance()2934 videoDevices& videoDevices::getInstance()
2935 {
2936 static videoDevices instance;
2937 return instance;
2938 }
2939
Parametr()2940 Parametr::Parametr()
2941 {
2942 CurrentValue = 0;
2943 Min = 0;
2944 Max = 0;
2945 Step = 0;
2946 Default = 0;
2947 Flag = 0;
2948 }
2949
MediaType()2950 MediaType::MediaType()
2951 {
2952 pMF_MT_AM_FORMAT_TYPEName = NULL;
2953 pMF_MT_MAJOR_TYPEName = NULL;
2954 pMF_MT_SUBTYPEName = NULL;
2955 Clear();
2956 }
2957
~MediaType()2958 MediaType::~MediaType()
2959 {
2960 Clear();
2961 }
2962
Clear()2963 void MediaType::Clear()
2964 {
2965 MF_MT_FRAME_SIZE = 0;
2966 height = 0;
2967 width = 0;
2968 MF_MT_YUV_MATRIX = 0;
2969 MF_MT_VIDEO_LIGHTING = 0;
2970 MF_MT_DEFAULT_STRIDE = 0;
2971 MF_MT_VIDEO_CHROMA_SITING = 0;
2972 MF_MT_FIXED_SIZE_SAMPLES = 0;
2973 MF_MT_VIDEO_NOMINAL_RANGE = 0;
2974 MF_MT_FRAME_RATE_NUMERATOR = 0;
2975 MF_MT_FRAME_RATE_DENOMINATOR = 0;
2976 MF_MT_PIXEL_ASPECT_RATIO = 0;
2977 MF_MT_PIXEL_ASPECT_RATIO_low = 0;
2978 MF_MT_ALL_SAMPLES_INDEPENDENT = 0;
2979 MF_MT_FRAME_RATE_RANGE_MIN = 0;
2980 MF_MT_FRAME_RATE_RANGE_MIN_low = 0;
2981 MF_MT_SAMPLE_SIZE = 0;
2982 MF_MT_VIDEO_PRIMARIES = 0;
2983 MF_MT_INTERLACE_MODE = 0;
2984 MF_MT_FRAME_RATE_RANGE_MAX = 0;
2985 MF_MT_FRAME_RATE_RANGE_MAX_low = 0;
2986 memset(&MF_MT_MAJOR_TYPE, 0, sizeof(GUID));
2987 memset(&MF_MT_AM_FORMAT_TYPE, 0, sizeof(GUID));
2988 memset(&MF_MT_SUBTYPE, 0, sizeof(GUID));
2989 }
2990
videoInput(void)2991 videoInput::videoInput(void): accessToDevices(false)
2992 {
2993 DebugPrintOut(L"\n***** VIDEOINPUT LIBRARY - 2013 (Author: Evgeny Pereguda) *****\n\n");
2994 updateListOfDevices();
2995 if(!accessToDevices)
2996 DebugPrintOut(L"INITIALIZATION: There is not any suitable video device\n");
2997 }
2998
updateListOfDevices()2999 void videoInput::updateListOfDevices()
3000 {
3001 Media_Foundation *MF = &Media_Foundation::getInstance();
3002 accessToDevices = MF->buildListOfDevices();
3003 if(!accessToDevices)
3004 DebugPrintOut(L"UPDATING: There is not any suitable video device\n");
3005 }
3006
~videoInput(void)3007 videoInput::~videoInput(void)
3008 {
3009 DebugPrintOut(L"\n***** CLOSE VIDEOINPUT LIBRARY - 2013 *****\n\n");
3010 }
3011
getMediaSource(int deviceID)3012 IMFMediaSource *videoInput::getMediaSource(int deviceID)
3013 {
3014 if(accessToDevices)
3015 {
3016 videoDevice * VD = videoDevices::getInstance().getDevice(deviceID);
3017 if(VD)
3018 {
3019 IMFMediaSource *out = VD->getMediaSource();
3020 if(!out)
3021 DebugPrintOut(L"VideoDevice %i: There is not any suitable IMFMediaSource interface\n", deviceID);
3022 return out;
3023 }
3024 }
3025 else
3026 {
3027 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3028 }
3029 return NULL;
3030 }
3031
setupDevice(int deviceID,unsigned int id)3032 bool videoInput::setupDevice(int deviceID, unsigned int id)
3033 {
3034 if (deviceID < 0 )
3035 {
3036 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3037 return false;
3038 }
3039 if(accessToDevices)
3040 {
3041 videoDevices *VDS = &videoDevices::getInstance();
3042 videoDevice * VD = VDS->getDevice(deviceID);
3043 if(VD)
3044 {
3045 bool out = VD->setupDevice(id);
3046 if(!out)
3047 DebugPrintOut(L"VIDEODEVICE %i: This device cannot be started\n", deviceID);
3048 return out;
3049 }
3050 }
3051 else
3052 {
3053 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3054 }
3055 return false;
3056 }
3057
setupDevice(int deviceID,unsigned int w,unsigned int h,unsigned int idealFramerate)3058 bool videoInput::setupDevice(int deviceID, unsigned int w, unsigned int h, unsigned int idealFramerate)
3059 {
3060 if (deviceID < 0 )
3061 {
3062 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3063 return false;
3064 }
3065 if(accessToDevices)
3066 {
3067 videoDevices *VDS = &videoDevices::getInstance();
3068 videoDevice * VD = VDS->getDevice(deviceID);
3069 if(VD)
3070 {
3071 bool out = VD->setupDevice(w, h, idealFramerate);
3072 if(!out)
3073 DebugPrintOut(L"VIDEODEVICE %i: this device cannot be started\n", deviceID);
3074 return out;
3075 }
3076 }
3077 else
3078 {
3079 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n", deviceID);
3080 }
3081 return false;
3082 }
3083
getFormat(int deviceID,unsigned int id)3084 MediaType videoInput::getFormat(int deviceID, unsigned int id)
3085 {
3086 if (deviceID < 0)
3087 {
3088 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3089 return MediaType();
3090 }
3091 if(accessToDevices)
3092 {
3093 videoDevices *VDS = &videoDevices::getInstance();
3094 videoDevice * VD = VDS->getDevice(deviceID);
3095 if(VD)
3096 return VD->getFormat(id);
3097 }
3098 else
3099 {
3100 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3101 }
3102 return MediaType();
3103 }
3104
isDeviceSetup(int deviceID)3105 bool videoInput::isDeviceSetup(int deviceID)
3106 {
3107 if (deviceID < 0)
3108 {
3109 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3110 return false;
3111 }
3112 if(accessToDevices)
3113 {
3114 videoDevices *VDS = &videoDevices::getInstance();
3115 videoDevice * VD = VDS->getDevice(deviceID);
3116 if(VD)
3117 return VD->isDeviceSetup();
3118 }
3119 else
3120 {
3121 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3122 }
3123 return false;
3124 }
3125
isDeviceMediaSource(int deviceID)3126 bool videoInput::isDeviceMediaSource(int deviceID)
3127 {
3128 if (deviceID < 0)
3129 {
3130 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3131 return false;
3132 }
3133 if(accessToDevices)
3134 {
3135 videoDevices *VDS = &videoDevices::getInstance();
3136 videoDevice * VD = VDS->getDevice(deviceID);
3137 if(VD)
3138 return VD->isDeviceMediaSource();
3139 }
3140 else
3141 {
3142 DebugPrintOut(L"Device(s): There is not any suitable video device\n");
3143 }
3144 return false;
3145 }
3146
isDeviceRawDataSource(int deviceID)3147 bool videoInput::isDeviceRawDataSource(int deviceID)
3148 {
3149 if (deviceID < 0)
3150 {
3151 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3152 return false;
3153 }
3154 if(accessToDevices)
3155 {
3156 videoDevices *VDS = &videoDevices::getInstance();
3157 videoDevice * VD = VDS->getDevice(deviceID);
3158 if(VD)
3159 {
3160 bool isRaw = VD->isDeviceRawDataSource();
3161 return isRaw;
3162 }
3163 }
3164 else
3165 {
3166 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3167 }
3168 return false;
3169 }
3170
isFrameNew(int deviceID)3171 bool videoInput::isFrameNew(int deviceID)
3172 {
3173 if (deviceID < 0)
3174 {
3175 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3176 return false;
3177 }
3178 if(accessToDevices)
3179 {
3180 if(!isDeviceSetup(deviceID))
3181 {
3182 if(isDeviceMediaSource(deviceID))
3183 return false;
3184 }
3185 videoDevices *VDS = &videoDevices::getInstance();
3186 videoDevice * VD = VDS->getDevice(deviceID);
3187 if(VD)
3188 {
3189 return VD->isFrameNew();
3190 }
3191 }
3192 else
3193 {
3194 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3195 }
3196 return false;
3197 }
3198
3199 #ifdef WINRT
waitForDevice(int deviceID)3200 void videoInput::waitForDevice(int deviceID)
3201 {
3202 if (deviceID < 0)
3203 {
3204 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3205 return;
3206 }
3207 if(accessToDevices)
3208 {
3209 if(!isDeviceSetup(deviceID))
3210 {
3211 if(isDeviceMediaSource(deviceID))
3212 return;
3213 }
3214 videoDevices *VDS = &videoDevices::getInstance();
3215 videoDevice * VD = VDS->getDevice(deviceID);
3216 if(VD)
3217 {
3218 VD->waitForDevice();
3219 }
3220 }
3221 else
3222 {
3223 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3224 }
3225 return;
3226 }
3227 #endif
3228
getCountFormats(int deviceID) const3229 unsigned int videoInput::getCountFormats(int deviceID) const
3230 {
3231 if (deviceID < 0)
3232 {
3233 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3234 return 0;
3235 }
3236 if(accessToDevices)
3237 {
3238 videoDevices *VDS = &videoDevices::getInstance();
3239 videoDevice * VD = VDS->getDevice(deviceID);
3240 if(VD)
3241 return VD->getCountFormats();
3242 }
3243 else
3244 {
3245 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3246 }
3247 return 0;
3248 }
3249
closeAllDevices()3250 void videoInput::closeAllDevices()
3251 {
3252 videoDevices *VDS = &videoDevices::getInstance();
3253 for(unsigned int i = 0; i < VDS->getCount(); i++)
3254 closeDevice(i);
3255 }
3256
setParametrs(int deviceID,CamParametrs parametrs)3257 void videoInput::setParametrs(int deviceID, CamParametrs parametrs)
3258 {
3259 if (deviceID < 0)
3260 {
3261 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3262 return;
3263 }
3264 if(accessToDevices)
3265 {
3266 videoDevices *VDS = &videoDevices::getInstance();
3267 videoDevice *VD = VDS->getDevice(deviceID);
3268 if(VD)
3269 VD->setParametrs(parametrs);
3270 }
3271 else
3272 {
3273 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3274 }
3275 }
3276
getParametrs(int deviceID)3277 CamParametrs videoInput::getParametrs(int deviceID)
3278 {
3279 CamParametrs out;
3280 if (deviceID < 0)
3281 {
3282 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3283 return out;
3284 }
3285 if(accessToDevices)
3286 {
3287 videoDevices *VDS = &videoDevices::getInstance();
3288 videoDevice *VD = VDS->getDevice(deviceID);
3289 if(VD)
3290 out = VD->getParametrs();
3291 }
3292 else
3293 {
3294 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3295 }
3296 return out;
3297 }
3298
closeDevice(int deviceID)3299 void videoInput::closeDevice(int deviceID)
3300 {
3301 if (deviceID < 0)
3302 {
3303 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3304 return;
3305 }
3306 if(accessToDevices)
3307 {
3308 videoDevices *VDS = &videoDevices::getInstance();
3309 videoDevice *VD = VDS->getDevice(deviceID);
3310 if(VD)
3311 VD->closeDevice();
3312 }
3313 else
3314 {
3315 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3316 }
3317 }
3318
getWidth(int deviceID) const3319 unsigned int videoInput::getWidth(int deviceID) const
3320 {
3321 if (deviceID < 0)
3322 {
3323 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3324 return 0;
3325 }
3326 if(accessToDevices)
3327 {
3328 videoDevices *VDS = &videoDevices::getInstance();
3329 videoDevice * VD = VDS->getDevice(deviceID);
3330 if(VD)
3331 return VD->getWidth();
3332 }
3333 else
3334 {
3335 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3336 }
3337 return 0;
3338 }
3339
getHeight(int deviceID) const3340 unsigned int videoInput::getHeight(int deviceID) const
3341 {
3342 if (deviceID < 0)
3343 {
3344 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3345 return 0;
3346 }
3347 if(accessToDevices)
3348 {
3349 videoDevices *VDS = &videoDevices::getInstance();
3350 videoDevice * VD = VDS->getDevice(deviceID);
3351 if(VD)
3352 return VD->getHeight();
3353 }
3354 else
3355 {
3356 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3357 }
3358 return 0;
3359 }
3360
getFrameRate(int deviceID) const3361 unsigned int videoInput::getFrameRate(int deviceID) const
3362 {
3363 if (deviceID < 0)
3364 {
3365 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3366 return 0;
3367 }
3368 if(accessToDevices)
3369 {
3370 videoDevice * VD = videoDevices::getInstance().getDevice(deviceID);
3371 if(VD)
3372 return VD->getFrameRate();
3373 }
3374 else
3375 {
3376 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3377 }
3378 return 0;
3379 }
3380
getNameVideoDevice(int deviceID)3381 wchar_t *videoInput::getNameVideoDevice(int deviceID)
3382 {
3383 if (deviceID < 0)
3384 {
3385 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3386 return NULL;
3387 }
3388 if(accessToDevices)
3389 {
3390 videoDevices *VDS = &videoDevices::getInstance();
3391 videoDevice * VD = VDS->getDevice(deviceID);
3392 if(VD)
3393 return VD->getName();
3394 }
3395 else
3396 {
3397 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3398 }
3399 return L"Empty";
3400 }
3401
listDevices(bool silent)3402 unsigned int videoInput::listDevices(bool silent)
3403 {
3404 int out = 0;
3405 if(accessToDevices)
3406 {
3407 videoDevices *VDS = &videoDevices::getInstance();
3408 #ifdef WINRT
3409 VDS->waitInit();
3410 #endif
3411 out = VDS->getCount();
3412 if(!silent) DebugPrintOut(L"\nVIDEOINPUT SPY MODE!\n\n");
3413 if(!silent) DebugPrintOut(L"SETUP: Looking For Capture Devices\n");
3414 for(int i = 0; i < out; i++)
3415 {
3416 if(!silent) DebugPrintOut(L"SETUP: %i) %s \n",i, getNameVideoDevice(i));
3417 }
3418 if(!silent) DebugPrintOut(L"SETUP: %i Device(s) found\n\n", out);
3419 }
3420 else
3421 {
3422 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3423 }
3424 return out;
3425 }
3426
getInstance()3427 videoInput& videoInput::getInstance()
3428 {
3429 static videoInput instance;
3430 return instance;
3431 }
3432
isDevicesAcceable()3433 bool videoInput::isDevicesAcceable()
3434 {
3435 return accessToDevices;
3436 }
3437
3438 #ifdef _DEBUG
setVerbose(bool state)3439 void videoInput::setVerbose(bool state)
3440 {
3441 DPO *dpo = &DPO::getInstance();
3442 dpo->setVerbose(state);
3443 }
3444 #endif
3445
setEmergencyStopEvent(int deviceID,void * userData,void (* func)(int,void *))3446 void videoInput::setEmergencyStopEvent(int deviceID, void *userData, void(*func)(int, void *))
3447 {
3448 if (deviceID < 0)
3449 {
3450 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3451 return;
3452 }
3453 if(accessToDevices)
3454 {
3455 if(func)
3456 {
3457 videoDevices *VDS = &videoDevices::getInstance();
3458 videoDevice * VD = VDS->getDevice(deviceID);
3459 if(VD)
3460 VD->setEmergencyStopEvent(userData, func);
3461 }
3462 }
3463 else
3464 {
3465 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3466 }
3467 }
3468
getPixels(int deviceID,unsigned char * dstBuffer,bool flipRedAndBlue,bool flipImage)3469 bool videoInput::getPixels(int deviceID, unsigned char * dstBuffer, bool flipRedAndBlue, bool flipImage)
3470 {
3471 bool success = false;
3472 if (deviceID < 0)
3473 {
3474 DebugPrintOut(L"VIDEODEVICE %i: Invalid device ID\n", deviceID);
3475 return success;
3476 }
3477 if(accessToDevices)
3478 {
3479 bool isRaw = isDeviceRawDataSource(deviceID);
3480 if(isRaw)
3481 {
3482 videoDevice *VD = videoDevices::getInstance().getDevice(deviceID);
3483 RawImage *RIOut = VD->getRawImageOut();
3484 if(RIOut)
3485 {
3486 const unsigned int bytes = 3;
3487 const unsigned int height = VD->getHeight();
3488 const unsigned int width = VD->getWidth();
3489 const unsigned int size = bytes * width * height;
3490 if(size == RIOut->getSize())
3491 {
3492 processPixels(RIOut->getpPixels(), dstBuffer, width, height, bytes, flipRedAndBlue, flipImage);
3493 success = true;
3494 }
3495 else
3496 {
3497 DebugPrintOut(L"ERROR: GetPixels() - bufferSizes do not match!\n");
3498 }
3499 }
3500 else
3501 {
3502 DebugPrintOut(L"ERROR: GetPixels() - Unable to grab frame for device %i\n", deviceID);
3503 }
3504 }
3505 else
3506 {
3507 DebugPrintOut(L"ERROR: GetPixels() - Not raw data source device %i\n", deviceID);
3508 }
3509 }
3510 else
3511 {
3512 DebugPrintOut(L"VIDEODEVICE(s): There is not any suitable video device\n");
3513 }
3514 return success;
3515 }
3516
processPixels(unsigned char * src,unsigned char * dst,unsigned int width,unsigned int height,unsigned int bpp,bool bRGB,bool bFlip)3517 void videoInput::processPixels(unsigned char * src, unsigned char * dst, unsigned int width,
3518 unsigned int height, unsigned int bpp, bool bRGB, bool bFlip)
3519 {
3520 unsigned int widthInBytes = width * bpp;
3521 unsigned int numBytes = widthInBytes * height;
3522 int *dstInt, *srcInt;
3523 if(!bRGB)
3524 {
3525 if(bFlip)
3526 {
3527 for(unsigned int y = 0; y < height; y++)
3528 {
3529 dstInt = (int *)(dst + (y * widthInBytes));
3530 srcInt = (int *)(src + ( (height -y -1) * widthInBytes));
3531 memcpy(dstInt, srcInt, widthInBytes);
3532 }
3533 }
3534 else
3535 {
3536 memcpy(dst, src, numBytes);
3537 }
3538 }
3539 else
3540 {
3541 if(bFlip)
3542 {
3543 unsigned int x = 0;
3544 unsigned int y = (height - 1) * widthInBytes;
3545 src += y;
3546 for(unsigned int i = 0; i < numBytes; i+=3)
3547 {
3548 if(x >= width)
3549 {
3550 x = 0;
3551 src -= widthInBytes*2;
3552 }
3553 *dst = *(src+2);
3554 dst++;
3555 *dst = *(src+1);
3556 dst++;
3557 *dst = *src;
3558 dst++;
3559 src+=3;
3560 x++;
3561 }
3562 }
3563 else
3564 {
3565 for(unsigned int i = 0; i < numBytes; i+=3)
3566 {
3567 *dst = *(src+2);
3568 dst++;
3569 *dst = *(src+1);
3570 dst++;
3571 *dst = *src;
3572 dst++;
3573 src+=3;
3574 }
3575 }
3576 }
3577 }
3578 }
3579
3580 /******* Capturing video from camera via Microsoft Media Foundation **********/
3581 class CvCaptureCAM_MSMF : public CvCapture
3582 {
3583 public:
3584 CvCaptureCAM_MSMF();
3585 virtual ~CvCaptureCAM_MSMF();
3586 virtual bool open( int index );
3587 virtual void close();
3588 virtual double getProperty(int) const;
3589 virtual bool setProperty(int, double);
3590 virtual bool grabFrame();
3591 virtual IplImage* retrieveFrame(int);
getCaptureDomain()3592 virtual int getCaptureDomain() { return CV_CAP_MSMF; } // Return the type of the capture object: CV_CAP_VFW, etc...
3593 protected:
3594 void init();
3595 int index, width, height, fourcc;
3596 IplImage* frame;
3597 videoInput VI;
3598 #ifdef WINRT
3599 #ifdef HAVE_CONCURRENCY
3600 DEFINE_TASK<bool> openTask;
3601 Concurrency::critical_section lock;
3602 #endif
3603 #endif
3604 };
3605
3606 #ifdef _DEBUG
3607 struct SuppressVideoInputMessages
3608 {
SuppressVideoInputMessagesSuppressVideoInputMessages3609 SuppressVideoInputMessages() { videoInput::setVerbose(true); }
3610 };
3611
3612 static SuppressVideoInputMessages do_it;
3613 #endif
3614
CvCaptureCAM_MSMF()3615 CvCaptureCAM_MSMF::CvCaptureCAM_MSMF():
3616 index(-1),
3617 width(-1),
3618 height(-1),
3619 fourcc(-1),
3620 frame(NULL),
3621 VI(videoInput::getInstance())
3622 {
3623 CoInitialize(0);
3624 }
3625
~CvCaptureCAM_MSMF()3626 CvCaptureCAM_MSMF::~CvCaptureCAM_MSMF()
3627 {
3628 close();
3629 CoUninitialize();
3630 }
3631
close()3632 void CvCaptureCAM_MSMF::close()
3633 {
3634 if( index >= 0 )
3635 {
3636 VI.closeDevice(index);
3637 index = -1;
3638 cvReleaseImage(&frame);
3639 }
3640 width = height = -1;
3641 }
3642
3643 // Initialize camera input
open(int _index)3644 bool CvCaptureCAM_MSMF::open( int _index )
3645 {
3646 #ifdef WINRT
3647 #ifdef HAVE_CONCURRENCY
3648 SAVE_CURRENT_CONTEXT(context);
3649 auto func = [_index, context, this](DEFINE_RET_VAL(bool)) -> DEFINE_RET_FORMAL(bool) {
3650 #endif
3651 #endif
3652 int try_index = _index;
3653 int devices = 0;
3654 close();
3655 devices = VI.listDevices(true);
3656 if (devices == 0)
3657 return false;
3658 try_index = try_index < 0 ? 0 : (try_index > devices-1 ? devices-1 : try_index);
3659 #ifdef WINRT
3660 HRESULT hr;
3661 #ifdef HAVE_CONCURRENCY
3662 BEGIN_CALL_IN_CONTEXT(hr, context, this, try_index)
3663 #endif
3664 #endif
3665 VI.setupDevice(try_index, 0, 0, 0); // With maximum frame size.
3666 #ifdef WINRT
3667 #ifdef HAVE_CONCURRENCY
3668 END_CALL_IN_CONTEXT_BASE
3669 VI.waitForDevice(try_index);
3670 BEGIN_CALL_IN_CONTEXT(hr, context, this, try_index)
3671 HRESULT hr = S_OK;
3672 #endif
3673 #endif
3674 if( !VI.isFrameNew(try_index) )
3675 #ifdef WINRT
3676 hr = E_FAIL;
3677 #else
3678 return false;
3679 #endif
3680 index = try_index;
3681 #ifdef WINRT
3682 #ifdef HAVE_CONCURRENCY
3683 END_CALL_IN_CONTEXT_BASE
3684 RET_VAL(true)
3685 };
3686 Concurrency::critical_section::scoped_lock _LockHolder(lock);
3687 CREATE_OR_CONTINUE_TASK(openTask, bool, func)
3688 #endif
3689 #endif
3690 return true;
3691 }
3692
grabFrame()3693 bool CvCaptureCAM_MSMF::grabFrame()
3694 {
3695 while (VI.isDeviceSetup(index) && !VI.isFrameNew(index))
3696 Sleep(1);
3697 return VI.isDeviceSetup(index);
3698 }
3699
retrieveFrame(int)3700 IplImage* CvCaptureCAM_MSMF::retrieveFrame(int)
3701 {
3702 const int w = (int)VI.getWidth(index);
3703 const int h = (int)VI.getHeight(index);
3704 if( !frame || w != frame->width || h != frame->height )
3705 {
3706 if (frame)
3707 cvReleaseImage( &frame );
3708 frame = cvCreateImage( cvSize(w,h), 8, 3 );
3709 }
3710 VI.getPixels( index, (uchar*)frame->imageData, false, true );
3711 return frame;
3712 }
3713
getProperty(int property_id) const3714 double CvCaptureCAM_MSMF::getProperty( int property_id ) const
3715 {
3716 // image format proprrties
3717 switch( property_id )
3718 {
3719 case CV_CAP_PROP_FRAME_WIDTH:
3720 return VI.getWidth(index);
3721 case CV_CAP_PROP_FRAME_HEIGHT:
3722 return VI.getHeight(index);
3723 case CV_CAP_PROP_FPS:
3724 return VI.getFrameRate(index);
3725 default:
3726 break;
3727 }
3728 return 0;
3729 }
setProperty(int property_id,double value)3730 bool CvCaptureCAM_MSMF::setProperty( int property_id, double value )
3731 {
3732 // image capture properties
3733 unsigned int fps = 0;
3734 bool handled = false;
3735 switch( property_id )
3736 {
3737 case CV_CAP_PROP_FRAME_WIDTH:
3738 width = cvRound(value);
3739 fps = VI.getFrameRate(index);
3740 handled = true;
3741 break;
3742 case CV_CAP_PROP_FRAME_HEIGHT:
3743 height = cvRound(value);
3744 fps = VI.getFrameRate(index);
3745 handled = true;
3746 break;
3747 case CV_CAP_PROP_FPS:
3748 width = (int)VI.getHeight(index);
3749 height = (int)VI.getWidth(index);
3750 fps = cvRound(value);
3751 break;
3752 }
3753
3754 if ( handled ) {
3755 if( width > 0 && height > 0 )
3756 {
3757 if( (width != (int)VI.getWidth(index) || height != (int)VI.getHeight(index) || fps != VI.getFrameRate(index))
3758 && VI.isDeviceSetup(index))//|| fourcc != VI.getFourcc(index) )
3759 {
3760 VI.closeDevice(index);
3761 VI.setupDevice(index, width, height, fps);
3762 }
3763 width = height = -1;
3764 return VI.isDeviceSetup(index);
3765 }
3766 return true;
3767 }
3768
3769 return false;
3770 }
3771
3772 class CvCaptureFile_MSMF : public CvCapture
3773 {
3774 public:
3775 CvCaptureFile_MSMF();
3776 virtual ~CvCaptureFile_MSMF();
3777
3778 virtual bool open( const char* filename );
3779 virtual void close();
3780
3781 virtual double getProperty(int) const;
3782 virtual bool setProperty(int, double);
3783 virtual bool grabFrame();
3784 virtual IplImage* retrieveFrame(int);
getCaptureDomain()3785 virtual int getCaptureDomain() { return CV_CAP_MSMF; }
3786 protected:
3787 ImageGrabberThread* grabberThread;
3788 IMFMediaSource* videoFileSource;
3789 std::vector<MediaType> captureFormats;
3790 int captureFormatIndex;
3791 IplImage* frame;
3792 bool isOpened;
3793
3794 HRESULT enumerateCaptureFormats(IMFMediaSource *pSource);
3795 HRESULT getSourceDuration(IMFMediaSource *pSource, MFTIME *pDuration) const;
3796 };
3797
CvCaptureFile_MSMF()3798 CvCaptureFile_MSMF::CvCaptureFile_MSMF():
3799 grabberThread(NULL),
3800 videoFileSource(NULL),
3801 captureFormatIndex(0),
3802 frame(NULL),
3803 isOpened(false)
3804 {
3805 MFStartup(MF_VERSION);
3806 }
3807
~CvCaptureFile_MSMF()3808 CvCaptureFile_MSMF::~CvCaptureFile_MSMF()
3809 {
3810 close();
3811 MFShutdown();
3812 }
3813
open(const char * filename)3814 bool CvCaptureFile_MSMF::open(const char* filename)
3815 {
3816 if (!filename)
3817 return false;
3818
3819 wchar_t* unicodeFileName = new wchar_t[strlen(filename)+1];
3820 MultiByteToWideChar(CP_ACP, 0, filename, -1, unicodeFileName, (int)strlen(filename)+1);
3821
3822 MF_OBJECT_TYPE ObjectType = MF_OBJECT_INVALID;
3823
3824 _ComPtr<IMFSourceResolver> pSourceResolver = NULL;
3825 IUnknown* pUnkSource = NULL;
3826
3827 HRESULT hr = MFCreateSourceResolver(pSourceResolver.GetAddressOf());
3828
3829 if (SUCCEEDED(hr))
3830 {
3831 hr = pSourceResolver->CreateObjectFromURL(
3832 unicodeFileName,
3833 MF_RESOLUTION_MEDIASOURCE,
3834 NULL, // Optional property store.
3835 &ObjectType,
3836 &pUnkSource
3837 );
3838 }
3839
3840 // Get the IMFMediaSource from the IUnknown pointer.
3841 if (SUCCEEDED(hr))
3842 {
3843 hr = pUnkSource->QueryInterface(IID_PPV_ARGS(&videoFileSource));
3844 }
3845
3846 SafeRelease(&pUnkSource);
3847
3848 if (SUCCEEDED(hr))
3849 {
3850 hr = enumerateCaptureFormats(videoFileSource);
3851 }
3852
3853 if( captureFormats.empty() )
3854 {
3855 isOpened = false;
3856 }
3857 else
3858 {
3859 if (SUCCEEDED(hr))
3860 {
3861 hr = ImageGrabberThread::CreateInstance(&grabberThread, videoFileSource, (unsigned int)-2, true);
3862 }
3863
3864 isOpened = SUCCEEDED(hr);
3865 }
3866
3867 if (isOpened)
3868 {
3869 grabberThread->start();
3870 }
3871
3872 return isOpened;
3873 }
3874
close()3875 void CvCaptureFile_MSMF::close()
3876 {
3877 if (grabberThread)
3878 {
3879 isOpened = false;
3880 SetEvent(grabberThread->getImageGrabber()->ig_hFinish);
3881 grabberThread->stop();
3882 delete grabberThread;
3883 }
3884
3885 if (videoFileSource)
3886 {
3887 videoFileSource->Shutdown();
3888 }
3889 }
3890
setProperty(int property_id,double value)3891 bool CvCaptureFile_MSMF::setProperty(int property_id, double value)
3892 {
3893 // image capture properties
3894 // FIXME: implement method in VideoInput back end
3895 (void) property_id;
3896 (void) value;
3897 return false;
3898 }
3899
getProperty(int property_id) const3900 double CvCaptureFile_MSMF::getProperty(int property_id) const
3901 {
3902 // image format proprrties
3903 switch( property_id )
3904 {
3905 case CV_CAP_PROP_FRAME_WIDTH:
3906 return captureFormats[captureFormatIndex].width;
3907 case CV_CAP_PROP_FRAME_HEIGHT:
3908 return captureFormats[captureFormatIndex].height;
3909 case CV_CAP_PROP_FRAME_COUNT:
3910 {
3911 MFTIME duration;
3912 getSourceDuration(this->videoFileSource, &duration);
3913 double fps = ((double)captureFormats[captureFormatIndex].MF_MT_FRAME_RATE_NUMERATOR) /
3914 ((double)captureFormats[captureFormatIndex].MF_MT_FRAME_RATE_DENOMINATOR);
3915 return (double)floor(((double)duration/1e7)*fps+0.5);
3916 }
3917 case CV_CAP_PROP_FOURCC:
3918 return captureFormats[captureFormatIndex].MF_MT_SUBTYPE.Data1;
3919 case CV_CAP_PROP_FPS:
3920 return ((double)captureFormats[captureFormatIndex].MF_MT_FRAME_RATE_NUMERATOR) /
3921 ((double)captureFormats[captureFormatIndex].MF_MT_FRAME_RATE_DENOMINATOR);
3922 }
3923
3924 return -1;
3925 }
3926
grabFrame()3927 bool CvCaptureFile_MSMF::grabFrame()
3928 {
3929 DWORD waitResult = (DWORD)-1;
3930 if (isOpened)
3931 {
3932 SetEvent(grabberThread->getImageGrabber()->ig_hFrameGrabbed);
3933 HANDLE tmp[] = {grabberThread->getImageGrabber()->ig_hFrameReady, grabberThread->getImageGrabber()->ig_hFinish, 0};
3934 waitResult = WaitForMultipleObjects(2, tmp, FALSE, INFINITE);
3935 }
3936
3937 return isOpened && grabberThread->getImageGrabber()->getRawImage()->isNew() && (waitResult == WAIT_OBJECT_0);
3938 }
3939
retrieveFrame(int)3940 IplImage* CvCaptureFile_MSMF::retrieveFrame(int)
3941 {
3942 unsigned int width = captureFormats[captureFormatIndex].width;
3943 unsigned int height = captureFormats[captureFormatIndex].height;
3944 unsigned int bytes = 3;
3945 if( !frame || (int)width != frame->width || (int)height != frame->height )
3946 {
3947 if (frame)
3948 cvReleaseImage( &frame );
3949 frame = cvCreateImage( cvSize(width,height), 8, 3 );
3950 }
3951
3952 RawImage *RIOut = grabberThread->getImageGrabber()->getRawImage();
3953 unsigned int size = bytes * width * height;
3954
3955 bool verticalFlip = captureFormats[captureFormatIndex].MF_MT_DEFAULT_STRIDE < 0;
3956
3957 if(RIOut && size == RIOut->getSize())
3958 {
3959 videoInput::processPixels(RIOut->getpPixels(), (unsigned char*)frame->imageData, width,
3960 height, bytes, false, verticalFlip);
3961 }
3962
3963 return frame;
3964 }
3965
enumerateCaptureFormats(IMFMediaSource * pSource)3966 HRESULT CvCaptureFile_MSMF::enumerateCaptureFormats(IMFMediaSource *pSource)
3967 {
3968 _ComPtr<IMFPresentationDescriptor> pPD = NULL;
3969 _ComPtr<IMFStreamDescriptor> pSD = NULL;
3970 _ComPtr<IMFMediaTypeHandler> pHandler = NULL;
3971 _ComPtr<IMFMediaType> pType = NULL;
3972 HRESULT hr = pSource->CreatePresentationDescriptor(pPD.GetAddressOf());
3973 if (FAILED(hr))
3974 {
3975 goto done;
3976 }
3977
3978 BOOL fSelected;
3979 hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, pSD.GetAddressOf());
3980 if (FAILED(hr))
3981 {
3982 goto done;
3983 }
3984 hr = pSD->GetMediaTypeHandler(pHandler.GetAddressOf());
3985 if (FAILED(hr))
3986 {
3987 goto done;
3988 }
3989 DWORD cTypes = 0;
3990 hr = pHandler->GetMediaTypeCount(&cTypes);
3991 if (FAILED(hr))
3992 {
3993 goto done;
3994 }
3995 for (DWORD i = 0; i < cTypes; i++)
3996 {
3997 hr = pHandler->GetMediaTypeByIndex(i, pType.GetAddressOf());
3998 if (FAILED(hr))
3999 {
4000 goto done;
4001 }
4002 MediaType MT = FormatReader::Read(pType.Get());
4003 // We can capture only RGB video.
4004 if( MT.MF_MT_SUBTYPE == MFVideoFormat_RGB24 )
4005 captureFormats.push_back(MT);
4006 }
4007
4008 done:
4009 return hr;
4010 }
4011
getSourceDuration(IMFMediaSource * pSource,MFTIME * pDuration) const4012 HRESULT CvCaptureFile_MSMF::getSourceDuration(IMFMediaSource *pSource, MFTIME *pDuration) const
4013 {
4014 *pDuration = 0;
4015
4016 IMFPresentationDescriptor *pPD = NULL;
4017
4018 HRESULT hr = pSource->CreatePresentationDescriptor(&pPD);
4019 if (SUCCEEDED(hr))
4020 {
4021 hr = pPD->GetUINT64(MF_PD_DURATION, (UINT64*)pDuration);
4022 pPD->Release();
4023 }
4024 return hr;
4025 }
4026
cvCreateCameraCapture_MSMF(int index)4027 CvCapture* cvCreateCameraCapture_MSMF( int index )
4028 {
4029 CvCaptureCAM_MSMF* capture = new CvCaptureCAM_MSMF;
4030 try
4031 {
4032 if( capture->open( index ))
4033 return capture;
4034 }
4035 catch(...)
4036 {
4037 delete capture;
4038 throw;
4039 }
4040 delete capture;
4041 return 0;
4042 }
4043
cvCreateFileCapture_MSMF(const char * filename)4044 CvCapture* cvCreateFileCapture_MSMF (const char* filename)
4045 {
4046 CvCaptureFile_MSMF* capture = new CvCaptureFile_MSMF;
4047 try
4048 {
4049 if( capture->open(filename) )
4050 return capture;
4051 else
4052 {
4053 delete capture;
4054 return NULL;
4055 }
4056 }
4057 catch(...)
4058 {
4059 delete capture;
4060 throw;
4061 }
4062 }
4063
4064 //
4065 //
4066 // Media Foundation-based Video Writer
4067 //
4068 //
4069
4070 class CvVideoWriter_MSMF : public CvVideoWriter
4071 {
4072 public:
4073 CvVideoWriter_MSMF();
4074 virtual ~CvVideoWriter_MSMF();
4075 virtual bool open(const char* filename, int fourcc,
4076 double fps, CvSize frameSize, bool isColor);
4077 virtual void close();
4078 virtual bool writeFrame(const IplImage* img);
4079
4080 private:
4081 UINT32 videoWidth;
4082 UINT32 videoHeight;
4083 double fps;
4084 UINT32 bitRate;
4085 UINT32 frameSize;
4086 GUID encodingFormat;
4087 GUID inputFormat;
4088
4089 DWORD streamIndex;
4090 _ComPtr<IMFSinkWriter> sinkWriter;
4091
4092 bool initiated;
4093
4094 LONGLONG rtStart;
4095 UINT64 rtDuration;
4096
4097 HRESULT InitializeSinkWriter(const char* filename);
4098 static const GUID FourCC2GUID(int fourcc);
4099 HRESULT WriteFrame(DWORD *videoFrameBuffer, const LONGLONG& rtStart, const LONGLONG& rtDuration);
4100 };
4101
CvVideoWriter_MSMF()4102 CvVideoWriter_MSMF::CvVideoWriter_MSMF():
4103 initiated(false)
4104 {
4105 }
4106
~CvVideoWriter_MSMF()4107 CvVideoWriter_MSMF::~CvVideoWriter_MSMF()
4108 {
4109 close();
4110 }
4111
FourCC2GUID(int fourcc)4112 const GUID CvVideoWriter_MSMF::FourCC2GUID(int fourcc)
4113 {
4114 switch(fourcc)
4115 {
4116 case CV_FOURCC_MACRO('d', 'v', '2', '5'):
4117 return MFVideoFormat_DV25; break;
4118 case CV_FOURCC_MACRO('d', 'v', '5', '0'):
4119 return MFVideoFormat_DV50; break;
4120 case CV_FOURCC_MACRO('d', 'v', 'c', ' '):
4121 return MFVideoFormat_DVC; break;
4122 case CV_FOURCC_MACRO('d', 'v', 'h', '1'):
4123 return MFVideoFormat_DVH1; break;
4124 case CV_FOURCC_MACRO('d', 'v', 'h', 'd'):
4125 return MFVideoFormat_DVHD; break;
4126 case CV_FOURCC_MACRO('d', 'v', 's', 'd'):
4127 return MFVideoFormat_DVSD; break;
4128 case CV_FOURCC_MACRO('d', 'v', 's', 'l'):
4129 return MFVideoFormat_DVSL; break;
4130 #if (WINVER >= 0x0602)
4131 case CV_FOURCC_MACRO('H', '2', '6', '3'): // Available only for Win 8 target.
4132 return MFVideoFormat_H263; break;
4133 #endif
4134 case CV_FOURCC_MACRO('H', '2', '6', '4'):
4135 return MFVideoFormat_H264; break;
4136 case CV_FOURCC_MACRO('M', '4', 'S', '2'):
4137 return MFVideoFormat_M4S2; break;
4138 case CV_FOURCC_MACRO('M', 'J', 'P', 'G'):
4139 return MFVideoFormat_MJPG; break;
4140 case CV_FOURCC_MACRO('M', 'P', '4', '3'):
4141 return MFVideoFormat_MP43; break;
4142 case CV_FOURCC_MACRO('M', 'P', '4', 'S'):
4143 return MFVideoFormat_MP4S; break;
4144 case CV_FOURCC_MACRO('M', 'P', '4', 'V'):
4145 return MFVideoFormat_MP4V; break;
4146 case CV_FOURCC_MACRO('M', 'P', 'G', '1'):
4147 return MFVideoFormat_MPG1; break;
4148 case CV_FOURCC_MACRO('M', 'S', 'S', '1'):
4149 return MFVideoFormat_MSS1; break;
4150 case CV_FOURCC_MACRO('M', 'S', 'S', '2'):
4151 return MFVideoFormat_MSS2; break;
4152 case CV_FOURCC_MACRO('W', 'M', 'V', '1'):
4153 return MFVideoFormat_WMV1; break;
4154 case CV_FOURCC_MACRO('W', 'M', 'V', '2'):
4155 return MFVideoFormat_WMV2; break;
4156 case CV_FOURCC_MACRO('W', 'M', 'V', '3'):
4157 return MFVideoFormat_WMV3; break;
4158 case CV_FOURCC_MACRO('W', 'V', 'C', '1'):
4159 return MFVideoFormat_WVC1; break;
4160 default:
4161 return MFVideoFormat_H264;
4162 }
4163 }
4164
open(const char * filename,int fourcc,double _fps,CvSize frameSize,bool)4165 bool CvVideoWriter_MSMF::open( const char* filename, int fourcc,
4166 double _fps, CvSize frameSize, bool /*isColor*/ )
4167 {
4168 videoWidth = frameSize.width;
4169 videoHeight = frameSize.height;
4170 fps = _fps;
4171 bitRate = (UINT32)fps*videoWidth*videoHeight; // 1-bit per pixel
4172 encodingFormat = FourCC2GUID(fourcc);
4173 inputFormat = MFVideoFormat_RGB32;
4174
4175 HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
4176 if (SUCCEEDED(hr))
4177 {
4178 hr = MFStartup(MF_VERSION);
4179 if (SUCCEEDED(hr))
4180 {
4181 hr = InitializeSinkWriter(filename);
4182 if (SUCCEEDED(hr))
4183 {
4184 initiated = true;
4185 rtStart = 0;
4186 MFFrameRateToAverageTimePerFrame((UINT32)fps, 1, &rtDuration);
4187 }
4188 }
4189 }
4190
4191 return SUCCEEDED(hr);
4192 }
4193
close()4194 void CvVideoWriter_MSMF::close()
4195 {
4196 if (!initiated)
4197 {
4198 return;
4199 }
4200
4201 initiated = false;
4202 sinkWriter->Finalize();
4203 MFShutdown();
4204 }
4205
writeFrame(const IplImage * img)4206 bool CvVideoWriter_MSMF::writeFrame(const IplImage* img)
4207 {
4208 if (!img)
4209 return false;
4210
4211 int length = img->width * img->height * 4;
4212 DWORD* target = new DWORD[length];
4213
4214 for (int rowIdx = 0; rowIdx < img->height; rowIdx++)
4215 {
4216 char* rowStart = img->imageData + rowIdx*img->widthStep;
4217 for (int colIdx = 0; colIdx < img->width; colIdx++)
4218 {
4219 BYTE b = rowStart[colIdx * img->nChannels + 0];
4220 BYTE g = rowStart[colIdx * img->nChannels + 1];
4221 BYTE r = rowStart[colIdx * img->nChannels + 2];
4222
4223 target[rowIdx*img->width+colIdx] = (r << 16) + (g << 8) + b;
4224 }
4225 }
4226
4227 // Send frame to the sink writer.
4228 HRESULT hr = WriteFrame(target, rtStart, rtDuration);
4229 if (FAILED(hr))
4230 {
4231 delete[] target;
4232 return false;
4233 }
4234 rtStart += rtDuration;
4235
4236 delete[] target;
4237
4238 return true;
4239 }
4240
InitializeSinkWriter(const char * filename)4241 HRESULT CvVideoWriter_MSMF::InitializeSinkWriter(const char* filename)
4242 {
4243 _ComPtr<IMFAttributes> spAttr;
4244 _ComPtr<IMFMediaType> mediaTypeOut;
4245 _ComPtr<IMFMediaType> mediaTypeIn;
4246 _ComPtr<IMFByteStream> spByteStream;
4247
4248 MFCreateAttributes(&spAttr, 10);
4249 spAttr->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, true);
4250
4251 wchar_t* unicodeFileName = new wchar_t[strlen(filename)+1];
4252 MultiByteToWideChar(CP_ACP, 0, filename, -1, unicodeFileName, (int)strlen(filename)+1);
4253
4254 HRESULT hr = MFCreateSinkWriterFromURL(unicodeFileName, NULL, spAttr.Get(), &sinkWriter);
4255
4256 delete[] unicodeFileName;
4257
4258 // Set the output media type.
4259 if (SUCCEEDED(hr))
4260 {
4261 hr = MFCreateMediaType(&mediaTypeOut);
4262 }
4263 if (SUCCEEDED(hr))
4264 {
4265 hr = mediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
4266 }
4267 if (SUCCEEDED(hr))
4268 {
4269 hr = mediaTypeOut->SetGUID(MF_MT_SUBTYPE, encodingFormat);
4270 }
4271 if (SUCCEEDED(hr))
4272 {
4273 hr = mediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, bitRate);
4274 }
4275 if (SUCCEEDED(hr))
4276 {
4277 hr = mediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
4278 }
4279 if (SUCCEEDED(hr))
4280 {
4281 hr = MFSetAttributeSize(mediaTypeOut.Get(), MF_MT_FRAME_SIZE, videoWidth, videoHeight);
4282 }
4283 if (SUCCEEDED(hr))
4284 {
4285 hr = MFSetAttributeRatio(mediaTypeOut.Get(), MF_MT_FRAME_RATE, (UINT32)fps, 1);
4286 }
4287 if (SUCCEEDED(hr))
4288 {
4289 hr = MFSetAttributeRatio(mediaTypeOut.Get(), MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
4290 }
4291
4292 if (SUCCEEDED(hr))
4293 {
4294 hr = sinkWriter->AddStream(mediaTypeOut.Get(), &streamIndex);
4295 }
4296
4297 // Set the input media type.
4298 if (SUCCEEDED(hr))
4299 {
4300 hr = MFCreateMediaType(&mediaTypeIn);
4301 }
4302 if (SUCCEEDED(hr))
4303 {
4304 hr = mediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
4305 }
4306 if (SUCCEEDED(hr))
4307 {
4308 hr = mediaTypeIn->SetGUID(MF_MT_SUBTYPE, inputFormat);
4309 }
4310 if (SUCCEEDED(hr))
4311 {
4312 hr = mediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
4313 }
4314 if (SUCCEEDED(hr))
4315 {
4316 hr = MFSetAttributeSize(mediaTypeIn.Get(), MF_MT_FRAME_SIZE, videoWidth, videoHeight);
4317 }
4318 if (SUCCEEDED(hr))
4319 {
4320 hr = MFSetAttributeRatio(mediaTypeIn.Get(), MF_MT_FRAME_RATE, (UINT32)fps, 1);
4321 }
4322 if (SUCCEEDED(hr))
4323 {
4324 hr = MFSetAttributeRatio(mediaTypeIn.Get(), MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
4325 }
4326
4327 if (SUCCEEDED(hr))
4328 {
4329 hr = sinkWriter->SetInputMediaType(streamIndex, mediaTypeIn.Get(), NULL);
4330 }
4331
4332 // Tell the sink writer to start accepting data.
4333 if (SUCCEEDED(hr))
4334 {
4335 hr = sinkWriter->BeginWriting();
4336 }
4337
4338 return hr;
4339 }
4340
WriteFrame(DWORD * videoFrameBuffer,const LONGLONG & Start,const LONGLONG & Duration)4341 HRESULT CvVideoWriter_MSMF::WriteFrame(DWORD *videoFrameBuffer, const LONGLONG& Start, const LONGLONG& Duration)
4342 {
4343 _ComPtr<IMFSample> sample;
4344 _ComPtr<IMFMediaBuffer> buffer;
4345
4346 const LONG cbWidth = 4 * videoWidth;
4347 const DWORD cbBuffer = cbWidth * videoHeight;
4348
4349 BYTE *pData = NULL;
4350
4351 // Create a new memory buffer.
4352 HRESULT hr = MFCreateMemoryBuffer(cbBuffer, &buffer);
4353
4354 // Lock the buffer and copy the video frame to the buffer.
4355 if (SUCCEEDED(hr))
4356 {
4357 hr = buffer->Lock(&pData, NULL, NULL);
4358 }
4359
4360 if (SUCCEEDED(hr))
4361 {
4362 #if defined(_M_ARM)
4363 hr = MFCopyImage(
4364 pData, // Destination buffer.
4365 -cbWidth, // Destination stride.
4366 (BYTE*)videoFrameBuffer, // First row in source image.
4367 cbWidth, // Source stride.
4368 cbWidth, // Image width in bytes.
4369 videoHeight // Image height in pixels.
4370 );
4371 #else
4372 hr = MFCopyImage(
4373 pData, // Destination buffer.
4374 cbWidth, // Destination stride.
4375 (BYTE*)videoFrameBuffer, // First row in source image.
4376 cbWidth, // Source stride.
4377 cbWidth, // Image width in bytes.
4378 videoHeight // Image height in pixels.
4379 );
4380 #endif
4381 }
4382
4383 if (buffer)
4384 {
4385 buffer->Unlock();
4386 }
4387
4388 // Set the data length of the buffer.
4389 if (SUCCEEDED(hr))
4390 {
4391 hr = buffer->SetCurrentLength(cbBuffer);
4392 }
4393
4394 // Create a media sample and add the buffer to the sample.
4395 if (SUCCEEDED(hr))
4396 {
4397 hr = MFCreateSample(&sample);
4398 }
4399 if (SUCCEEDED(hr))
4400 {
4401 hr = sample->AddBuffer(buffer.Get());
4402 }
4403
4404 // Set the time stamp and the duration.
4405 if (SUCCEEDED(hr))
4406 {
4407 hr = sample->SetSampleTime(Start);
4408 }
4409 if (SUCCEEDED(hr))
4410 {
4411 hr = sample->SetSampleDuration(Duration);
4412 }
4413
4414 // Send the sample to the Sink Writer.
4415 if (SUCCEEDED(hr))
4416 {
4417 hr = sinkWriter->WriteSample(streamIndex, sample.Get());
4418 }
4419
4420 return hr;
4421 }
4422
cvCreateVideoWriter_MSMF(const char * filename,int fourcc,double fps,CvSize frameSize,int isColor)4423 CvVideoWriter* cvCreateVideoWriter_MSMF( const char* filename, int fourcc,
4424 double fps, CvSize frameSize, int isColor )
4425 {
4426 CvVideoWriter_MSMF* writer = new CvVideoWriter_MSMF;
4427 if( writer->open( filename, fourcc, fps, frameSize, isColor != 0 ))
4428 return writer;
4429 delete writer;
4430 return NULL;
4431 }
4432
4433 #endif
4434