1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "modules/video_capture/windows/sink_filter_ds.h"
12
13 #include <dvdmedia.h> // VIDEOINFOHEADER2
14 #include <initguid.h>
15
16 #include <algorithm>
17 #include <list>
18
19 #include "rtc_base/arraysize.h"
20 #include "rtc_base/checks.h"
21 #include "rtc_base/logging.h"
22 #include "rtc_base/platform_thread.h"
23 #include "rtc_base/string_utils.h"
24
25 DEFINE_GUID(CLSID_SINKFILTER,
26 0x88cdbbdc,
27 0xa73b,
28 0x4afa,
29 0xac,
30 0xbf,
31 0x15,
32 0xd5,
33 0xe2,
34 0xce,
35 0x12,
36 0xc3);
37
38 namespace webrtc {
39 namespace videocapturemodule {
40 namespace {
41
42 // Simple enumeration implementation that enumerates over a single pin :-/
43 class EnumPins : public IEnumPins {
44 public:
EnumPins(IPin * pin)45 EnumPins(IPin* pin) : pin_(pin) {}
46
47 protected:
~EnumPins()48 virtual ~EnumPins() {}
49
50 private:
STDMETHOD(QueryInterface)51 STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override {
52 if (riid == IID_IUnknown || riid == IID_IEnumPins) {
53 *ppv = static_cast<IEnumPins*>(this);
54 AddRef();
55 return S_OK;
56 }
57 return E_NOINTERFACE;
58 }
59
STDMETHOD(Clone)60 STDMETHOD(Clone)(IEnumPins** pins) {
61 RTC_DCHECK(false);
62 return E_NOTIMPL;
63 }
64
STDMETHOD(Next)65 STDMETHOD(Next)(ULONG count, IPin** pins, ULONG* fetched) {
66 RTC_DCHECK(count > 0);
67 RTC_DCHECK(pins);
68 // fetched may be NULL.
69
70 if (pos_ > 0) {
71 if (fetched)
72 *fetched = 0;
73 return S_FALSE;
74 }
75
76 ++pos_;
77 pins[0] = pin_.get();
78 pins[0]->AddRef();
79 if (fetched)
80 *fetched = 1;
81
82 return count == 1 ? S_OK : S_FALSE;
83 }
84
STDMETHOD(Skip)85 STDMETHOD(Skip)(ULONG count) {
86 RTC_DCHECK(false);
87 return E_NOTIMPL;
88 }
89
STDMETHOD(Reset)90 STDMETHOD(Reset)() {
91 pos_ = 0;
92 return S_OK;
93 }
94
95 rtc::scoped_refptr<IPin> pin_;
96 int pos_ = 0;
97 };
98
IsMediaTypePartialMatch(const AM_MEDIA_TYPE & a,const AM_MEDIA_TYPE & b)99 bool IsMediaTypePartialMatch(const AM_MEDIA_TYPE& a, const AM_MEDIA_TYPE& b) {
100 if (b.majortype != GUID_NULL && a.majortype != b.majortype)
101 return false;
102
103 if (b.subtype != GUID_NULL && a.subtype != b.subtype)
104 return false;
105
106 if (b.formattype != GUID_NULL) {
107 // if the format block is specified then it must match exactly
108 if (a.formattype != b.formattype)
109 return false;
110
111 if (a.cbFormat != b.cbFormat)
112 return false;
113
114 if (a.cbFormat != 0 && memcmp(a.pbFormat, b.pbFormat, a.cbFormat) != 0)
115 return false;
116 }
117
118 return true;
119 }
120
IsMediaTypeFullySpecified(const AM_MEDIA_TYPE & type)121 bool IsMediaTypeFullySpecified(const AM_MEDIA_TYPE& type) {
122 return type.majortype != GUID_NULL && type.formattype != GUID_NULL;
123 }
124
AllocMediaTypeFormatBuffer(AM_MEDIA_TYPE * media_type,ULONG length)125 BYTE* AllocMediaTypeFormatBuffer(AM_MEDIA_TYPE* media_type, ULONG length) {
126 RTC_DCHECK(length);
127 if (media_type->cbFormat == length)
128 return media_type->pbFormat;
129
130 BYTE* buffer = static_cast<BYTE*>(CoTaskMemAlloc(length));
131 if (!buffer)
132 return nullptr;
133
134 if (media_type->pbFormat) {
135 RTC_DCHECK(media_type->cbFormat);
136 CoTaskMemFree(media_type->pbFormat);
137 media_type->pbFormat = nullptr;
138 }
139
140 media_type->cbFormat = length;
141 media_type->pbFormat = buffer;
142 return buffer;
143 }
144
GetSampleProperties(IMediaSample * sample,AM_SAMPLE2_PROPERTIES * props)145 void GetSampleProperties(IMediaSample* sample, AM_SAMPLE2_PROPERTIES* props) {
146 rtc::scoped_refptr<IMediaSample2> sample2;
147 if (SUCCEEDED(GetComInterface(sample, &sample2))) {
148 sample2->GetProperties(sizeof(*props), reinterpret_cast<BYTE*>(props));
149 return;
150 }
151
152 // Get the properties the hard way.
153 props->cbData = sizeof(*props);
154 props->dwTypeSpecificFlags = 0;
155 props->dwStreamId = AM_STREAM_MEDIA;
156 props->dwSampleFlags = 0;
157
158 if (sample->IsDiscontinuity() == S_OK)
159 props->dwSampleFlags |= AM_SAMPLE_DATADISCONTINUITY;
160
161 if (sample->IsPreroll() == S_OK)
162 props->dwSampleFlags |= AM_SAMPLE_PREROLL;
163
164 if (sample->IsSyncPoint() == S_OK)
165 props->dwSampleFlags |= AM_SAMPLE_SPLICEPOINT;
166
167 if (SUCCEEDED(sample->GetTime(&props->tStart, &props->tStop)))
168 props->dwSampleFlags |= AM_SAMPLE_TIMEVALID | AM_SAMPLE_STOPVALID;
169
170 if (sample->GetMediaType(&props->pMediaType) == S_OK)
171 props->dwSampleFlags |= AM_SAMPLE_TYPECHANGED;
172
173 sample->GetPointer(&props->pbBuffer);
174 props->lActual = sample->GetActualDataLength();
175 props->cbBuffer = sample->GetSize();
176 }
177
178 // Returns true if the media type is supported, false otherwise.
179 // For supported types, the |capability| will be populated accordingly.
TranslateMediaTypeToVideoCaptureCapability(const AM_MEDIA_TYPE * media_type,VideoCaptureCapability * capability)180 bool TranslateMediaTypeToVideoCaptureCapability(
181 const AM_MEDIA_TYPE* media_type,
182 VideoCaptureCapability* capability) {
183 RTC_DCHECK(capability);
184 if (!media_type || media_type->majortype != MEDIATYPE_Video ||
185 !media_type->pbFormat) {
186 return false;
187 }
188
189 const BITMAPINFOHEADER* bih = nullptr;
190 if (media_type->formattype == FORMAT_VideoInfo) {
191 bih = &reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat)->bmiHeader;
192 } else if (media_type->formattype != FORMAT_VideoInfo2) {
193 bih = &reinterpret_cast<VIDEOINFOHEADER2*>(media_type->pbFormat)->bmiHeader;
194 } else {
195 return false;
196 }
197
198 RTC_LOG(LS_INFO) << "TranslateMediaTypeToVideoCaptureCapability width:"
199 << bih->biWidth << " height:" << bih->biHeight
200 << " Compression:0x" << rtc::ToHex(bih->biCompression);
201
202 const GUID& sub_type = media_type->subtype;
203 if (sub_type == MEDIASUBTYPE_MJPG &&
204 bih->biCompression == MAKEFOURCC('M', 'J', 'P', 'G')) {
205 capability->videoType = VideoType::kMJPEG;
206 } else if (sub_type == MEDIASUBTYPE_I420 &&
207 bih->biCompression == MAKEFOURCC('I', '4', '2', '0')) {
208 capability->videoType = VideoType::kI420;
209 } else if (sub_type == MEDIASUBTYPE_YUY2 &&
210 bih->biCompression == MAKEFOURCC('Y', 'U', 'Y', '2')) {
211 capability->videoType = VideoType::kYUY2;
212 } else if (sub_type == MEDIASUBTYPE_UYVY &&
213 bih->biCompression == MAKEFOURCC('U', 'Y', 'V', 'Y')) {
214 capability->videoType = VideoType::kUYVY;
215 } else if (sub_type == MEDIASUBTYPE_HDYC) {
216 capability->videoType = VideoType::kUYVY;
217 } else if (sub_type == MEDIASUBTYPE_RGB24 && bih->biCompression == BI_RGB) {
218 capability->videoType = VideoType::kRGB24;
219 } else {
220 return false;
221 }
222
223 // Store the incoming width and height
224 capability->width = bih->biWidth;
225
226 // Store the incoming height,
227 // for RGB24 we assume the frame to be upside down
228 if (sub_type == MEDIASUBTYPE_RGB24 && bih->biHeight > 0) {
229 capability->height = -(bih->biHeight);
230 } else {
231 capability->height = abs(bih->biHeight);
232 }
233
234 return true;
235 }
236
237 class MediaTypesEnum : public IEnumMediaTypes {
238 public:
MediaTypesEnum(const VideoCaptureCapability & capability)239 MediaTypesEnum(const VideoCaptureCapability& capability)
240 : capability_(capability),
241 format_preference_order_(
242 {// Default preferences, sorted by cost-to-convert-to-i420.
243 VideoType::kI420, VideoType::kYUY2, VideoType::kRGB24,
244 VideoType::kUYVY, VideoType::kMJPEG}) {
245 // Use the preferred video type, if supported.
246 auto it = std::find(format_preference_order_.begin(),
247 format_preference_order_.end(), capability_.videoType);
248 if (it != format_preference_order_.end()) {
249 RTC_LOG(LS_INFO) << "Selected video type: " << *it;
250 // Move it to the front of the list, if it isn't already there.
251 if (it != format_preference_order_.begin()) {
252 format_preference_order_.splice(format_preference_order_.begin(),
253 format_preference_order_, it,
254 std::next(it));
255 }
256 } else {
257 RTC_LOG(LS_WARNING) << "Unsupported video type: " << *it
258 << ", using default preference list.";
259 }
260 }
261
262 protected:
~MediaTypesEnum()263 virtual ~MediaTypesEnum() {}
264
265 private:
STDMETHOD(QueryInterface)266 STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override {
267 if (riid == IID_IUnknown || riid == IID_IEnumMediaTypes) {
268 *ppv = static_cast<IEnumMediaTypes*>(this);
269 AddRef();
270 return S_OK;
271 }
272 return E_NOINTERFACE;
273 }
274
275 // IEnumMediaTypes
STDMETHOD(Clone)276 STDMETHOD(Clone)(IEnumMediaTypes** pins) {
277 RTC_DCHECK(false);
278 return E_NOTIMPL;
279 }
280
STDMETHOD(Next)281 STDMETHOD(Next)(ULONG count, AM_MEDIA_TYPE** types, ULONG* fetched) {
282 RTC_DCHECK(count > 0);
283 RTC_DCHECK(types);
284 // fetched may be NULL.
285 if (fetched)
286 *fetched = 0;
287
288 for (ULONG i = 0;
289 i < count && pos_ < static_cast<int>(format_preference_order_.size());
290 ++i) {
291 AM_MEDIA_TYPE* media_type = reinterpret_cast<AM_MEDIA_TYPE*>(
292 CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE)));
293 ZeroMemory(media_type, sizeof(*media_type));
294 types[i] = media_type;
295 VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(
296 AllocMediaTypeFormatBuffer(media_type, sizeof(VIDEOINFOHEADER)));
297 ZeroMemory(vih, sizeof(*vih));
298 vih->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
299 vih->bmiHeader.biPlanes = 1;
300 vih->bmiHeader.biClrImportant = 0;
301 vih->bmiHeader.biClrUsed = 0;
302 if (capability_.maxFPS != 0)
303 vih->AvgTimePerFrame = 10000000 / capability_.maxFPS;
304
305 SetRectEmpty(&vih->rcSource); // we want the whole image area rendered.
306 SetRectEmpty(&vih->rcTarget); // no particular destination rectangle
307
308 media_type->majortype = MEDIATYPE_Video;
309 media_type->formattype = FORMAT_VideoInfo;
310 media_type->bTemporalCompression = FALSE;
311
312 // Set format information.
313 auto format_it = std::next(format_preference_order_.begin(), pos_++);
314 SetMediaInfoFromVideoType(*format_it, &vih->bmiHeader, media_type);
315
316 vih->bmiHeader.biWidth = capability_.width;
317 vih->bmiHeader.biHeight = capability_.height;
318 vih->bmiHeader.biSizeImage = ((vih->bmiHeader.biBitCount / 4) *
319 capability_.height * capability_.width) /
320 2;
321
322 RTC_DCHECK(vih->bmiHeader.biSizeImage);
323 media_type->lSampleSize = vih->bmiHeader.biSizeImage;
324 media_type->bFixedSizeSamples = true;
325 if (fetched)
326 ++(*fetched);
327 }
328 return pos_ == static_cast<int>(format_preference_order_.size()) ? S_FALSE
329 : S_OK;
330 }
331
SetMediaInfoFromVideoType(VideoType video_type,BITMAPINFOHEADER * bitmap_header,AM_MEDIA_TYPE * media_type)332 static void SetMediaInfoFromVideoType(VideoType video_type,
333 BITMAPINFOHEADER* bitmap_header,
334 AM_MEDIA_TYPE* media_type) {
335 switch (video_type) {
336 case VideoType::kI420:
337 bitmap_header->biCompression = MAKEFOURCC('I', '4', '2', '0');
338 bitmap_header->biBitCount = 12; // bit per pixel
339 media_type->subtype = MEDIASUBTYPE_I420;
340 break;
341 case VideoType::kYUY2:
342 bitmap_header->biCompression = MAKEFOURCC('Y', 'U', 'Y', '2');
343 bitmap_header->biBitCount = 16; // bit per pixel
344 media_type->subtype = MEDIASUBTYPE_YUY2;
345 break;
346 case VideoType::kRGB24:
347 bitmap_header->biCompression = BI_RGB;
348 bitmap_header->biBitCount = 24; // bit per pixel
349 media_type->subtype = MEDIASUBTYPE_RGB24;
350 break;
351 case VideoType::kUYVY:
352 bitmap_header->biCompression = MAKEFOURCC('U', 'Y', 'V', 'Y');
353 bitmap_header->biBitCount = 16; // bit per pixel
354 media_type->subtype = MEDIASUBTYPE_UYVY;
355 break;
356 case VideoType::kMJPEG:
357 bitmap_header->biCompression = MAKEFOURCC('M', 'J', 'P', 'G');
358 bitmap_header->biBitCount = 12; // bit per pixel
359 media_type->subtype = MEDIASUBTYPE_MJPG;
360 break;
361 default:
362 RTC_NOTREACHED();
363 }
364 }
365
STDMETHOD(Skip)366 STDMETHOD(Skip)(ULONG count) {
367 RTC_DCHECK(false);
368 return E_NOTIMPL;
369 }
370
STDMETHOD(Reset)371 STDMETHOD(Reset)() {
372 pos_ = 0;
373 return S_OK;
374 }
375
376 int pos_ = 0;
377 const VideoCaptureCapability capability_;
378 std::list<VideoType> format_preference_order_;
379 };
380
381 } // namespace
382
CaptureInputPin(CaptureSinkFilter * filter)383 CaptureInputPin::CaptureInputPin(CaptureSinkFilter* filter) {
384 capture_checker_.Detach();
385 // No reference held to avoid circular references.
386 info_.pFilter = filter;
387 info_.dir = PINDIR_INPUT;
388 }
389
~CaptureInputPin()390 CaptureInputPin::~CaptureInputPin() {
391 RTC_DCHECK_RUN_ON(&main_checker_);
392 ResetMediaType(&media_type_);
393 }
394
SetRequestedCapability(const VideoCaptureCapability & capability)395 HRESULT CaptureInputPin::SetRequestedCapability(
396 const VideoCaptureCapability& capability) {
397 RTC_DCHECK_RUN_ON(&main_checker_);
398 RTC_DCHECK(Filter()->IsStopped());
399 requested_capability_ = capability;
400 resulting_capability_ = VideoCaptureCapability();
401 return S_OK;
402 }
403
OnFilterActivated()404 void CaptureInputPin::OnFilterActivated() {
405 RTC_DCHECK_RUN_ON(&main_checker_);
406 runtime_error_ = false;
407 flushing_ = false;
408 capture_checker_.Detach();
409 capture_thread_id_ = 0;
410 }
411
OnFilterDeactivated()412 void CaptureInputPin::OnFilterDeactivated() {
413 RTC_DCHECK_RUN_ON(&main_checker_);
414 // Expedite shutdown by raising the flushing flag so no further processing
415 // on the capture thread occurs. When the graph is stopped and all filters
416 // have been told to stop, the media controller (graph) will wait for the
417 // capture thread to stop.
418 flushing_ = true;
419 if (allocator_)
420 allocator_->Decommit();
421 }
422
Filter() const423 CaptureSinkFilter* CaptureInputPin::Filter() const {
424 return static_cast<CaptureSinkFilter*>(info_.pFilter);
425 }
426
AttemptConnection(IPin * receive_pin,const AM_MEDIA_TYPE * media_type)427 HRESULT CaptureInputPin::AttemptConnection(IPin* receive_pin,
428 const AM_MEDIA_TYPE* media_type) {
429 RTC_DCHECK_RUN_ON(&main_checker_);
430 RTC_DCHECK(Filter()->IsStopped());
431
432 // Check that the connection is valid -- need to do this for every
433 // connect attempt since BreakConnect will undo it.
434 HRESULT hr = CheckDirection(receive_pin);
435 if (FAILED(hr))
436 return hr;
437
438 if (!TranslateMediaTypeToVideoCaptureCapability(media_type,
439 &resulting_capability_)) {
440 ClearAllocator(true);
441 return VFW_E_TYPE_NOT_ACCEPTED;
442 }
443
444 // See if the other pin will accept this type.
445 hr = receive_pin->ReceiveConnection(static_cast<IPin*>(this), media_type);
446 if (FAILED(hr)) {
447 receive_pin_ = nullptr; // Should already be null, but just in case.
448 return hr;
449 }
450
451 // Should have been set as part of the connect process.
452 RTC_DCHECK_EQ(receive_pin_, receive_pin);
453
454 ResetMediaType(&media_type_);
455 CopyMediaType(&media_type_, media_type);
456
457 return S_OK;
458 }
459
DetermineCandidateFormats(IPin * receive_pin,const AM_MEDIA_TYPE * media_type)460 std::vector<AM_MEDIA_TYPE*> CaptureInputPin::DetermineCandidateFormats(
461 IPin* receive_pin,
462 const AM_MEDIA_TYPE* media_type) {
463 RTC_DCHECK_RUN_ON(&main_checker_);
464 RTC_DCHECK(receive_pin);
465 RTC_DCHECK(media_type);
466
467 std::vector<AM_MEDIA_TYPE*> ret;
468
469 for (int i = 0; i < 2; i++) {
470 IEnumMediaTypes* types = nullptr;
471 if (i == 0) {
472 // First time around, try types from receive_pin.
473 receive_pin->EnumMediaTypes(&types);
474 } else {
475 // Then try ours.
476 EnumMediaTypes(&types);
477 }
478
479 if (types) {
480 while (true) {
481 ULONG fetched = 0;
482 AM_MEDIA_TYPE* this_type = nullptr;
483 if (types->Next(1, &this_type, &fetched) != S_OK)
484 break;
485
486 if (IsMediaTypePartialMatch(*this_type, *media_type)) {
487 ret.push_back(this_type);
488 } else {
489 FreeMediaType(this_type);
490 }
491 }
492 types->Release();
493 }
494 }
495
496 return ret;
497 }
498
ClearAllocator(bool decommit)499 void CaptureInputPin::ClearAllocator(bool decommit) {
500 RTC_DCHECK_RUN_ON(&main_checker_);
501 if (!allocator_)
502 return;
503 if (decommit)
504 allocator_->Decommit();
505 allocator_ = nullptr;
506 }
507
CheckDirection(IPin * pin) const508 HRESULT CaptureInputPin::CheckDirection(IPin* pin) const {
509 RTC_DCHECK_RUN_ON(&main_checker_);
510 PIN_DIRECTION pd;
511 pin->QueryDirection(&pd);
512 // Fairly basic check, make sure we don't pair input with input etc.
513 return pd == info_.dir ? VFW_E_INVALID_DIRECTION : S_OK;
514 }
515
QueryInterface(REFIID riid,void ** ppv)516 STDMETHODIMP CaptureInputPin::QueryInterface(REFIID riid, void** ppv) {
517 (*ppv) = nullptr;
518 if (riid == IID_IUnknown || riid == IID_IMemInputPin) {
519 *ppv = static_cast<IMemInputPin*>(this);
520 } else if (riid == IID_IPin) {
521 *ppv = static_cast<IPin*>(this);
522 }
523
524 if (!(*ppv))
525 return E_NOINTERFACE;
526
527 static_cast<IMemInputPin*>(this)->AddRef();
528 return S_OK;
529 }
530
Connect(IPin * receive_pin,const AM_MEDIA_TYPE * media_type)531 STDMETHODIMP CaptureInputPin::Connect(IPin* receive_pin,
532 const AM_MEDIA_TYPE* media_type) {
533 RTC_DCHECK_RUN_ON(&main_checker_);
534 if (!media_type || !receive_pin)
535 return E_POINTER;
536
537 if (!Filter()->IsStopped())
538 return VFW_E_NOT_STOPPED;
539
540 if (receive_pin_) {
541 RTC_DCHECK(false);
542 return VFW_E_ALREADY_CONNECTED;
543 }
544
545 if (IsMediaTypeFullySpecified(*media_type))
546 return AttemptConnection(receive_pin, media_type);
547
548 auto types = DetermineCandidateFormats(receive_pin, media_type);
549 bool connected = false;
550 for (auto* type : types) {
551 if (!connected && AttemptConnection(receive_pin, media_type) == S_OK)
552 connected = true;
553
554 FreeMediaType(type);
555 }
556
557 return connected ? S_OK : VFW_E_NO_ACCEPTABLE_TYPES;
558 }
559
ReceiveConnection(IPin * connector,const AM_MEDIA_TYPE * media_type)560 STDMETHODIMP CaptureInputPin::ReceiveConnection(
561 IPin* connector,
562 const AM_MEDIA_TYPE* media_type) {
563 RTC_DCHECK_RUN_ON(&main_checker_);
564 RTC_DCHECK(Filter()->IsStopped());
565
566 if (receive_pin_) {
567 RTC_DCHECK(false);
568 return VFW_E_ALREADY_CONNECTED;
569 }
570
571 HRESULT hr = CheckDirection(connector);
572 if (FAILED(hr))
573 return hr;
574
575 if (!TranslateMediaTypeToVideoCaptureCapability(media_type,
576 &resulting_capability_))
577 return VFW_E_TYPE_NOT_ACCEPTED;
578
579 // Complete the connection
580
581 receive_pin_ = connector;
582 ResetMediaType(&media_type_);
583 CopyMediaType(&media_type_, media_type);
584
585 return S_OK;
586 }
587
Disconnect()588 STDMETHODIMP CaptureInputPin::Disconnect() {
589 RTC_DCHECK_RUN_ON(&main_checker_);
590 if (!Filter()->IsStopped())
591 return VFW_E_NOT_STOPPED;
592
593 if (!receive_pin_)
594 return S_FALSE;
595
596 ClearAllocator(true);
597 receive_pin_ = nullptr;
598
599 return S_OK;
600 }
601
ConnectedTo(IPin ** pin)602 STDMETHODIMP CaptureInputPin::ConnectedTo(IPin** pin) {
603 RTC_DCHECK_RUN_ON(&main_checker_);
604
605 if (!receive_pin_)
606 return VFW_E_NOT_CONNECTED;
607
608 *pin = receive_pin_.get();
609 receive_pin_->AddRef();
610
611 return S_OK;
612 }
613
ConnectionMediaType(AM_MEDIA_TYPE * media_type)614 STDMETHODIMP CaptureInputPin::ConnectionMediaType(AM_MEDIA_TYPE* media_type) {
615 RTC_DCHECK_RUN_ON(&main_checker_);
616
617 if (!receive_pin_)
618 return VFW_E_NOT_CONNECTED;
619
620 CopyMediaType(media_type, &media_type_);
621
622 return S_OK;
623 }
624
QueryPinInfo(PIN_INFO * info)625 STDMETHODIMP CaptureInputPin::QueryPinInfo(PIN_INFO* info) {
626 RTC_DCHECK_RUN_ON(&main_checker_);
627 *info = info_;
628 if (info_.pFilter)
629 info_.pFilter->AddRef();
630 return S_OK;
631 }
632
QueryDirection(PIN_DIRECTION * pin_dir)633 STDMETHODIMP CaptureInputPin::QueryDirection(PIN_DIRECTION* pin_dir) {
634 RTC_DCHECK_RUN_ON(&main_checker_);
635 *pin_dir = info_.dir;
636 return S_OK;
637 }
638
QueryId(LPWSTR * id)639 STDMETHODIMP CaptureInputPin::QueryId(LPWSTR* id) {
640 RTC_DCHECK_RUN_ON(&main_checker_);
641 size_t len = lstrlenW(info_.achName);
642 *id = reinterpret_cast<LPWSTR>(CoTaskMemAlloc((len + 1) * sizeof(wchar_t)));
643 lstrcpyW(*id, info_.achName);
644 return S_OK;
645 }
646
QueryAccept(const AM_MEDIA_TYPE * media_type)647 STDMETHODIMP CaptureInputPin::QueryAccept(const AM_MEDIA_TYPE* media_type) {
648 RTC_DCHECK_RUN_ON(&main_checker_);
649 RTC_DCHECK(Filter()->IsStopped());
650 VideoCaptureCapability capability(resulting_capability_);
651 return TranslateMediaTypeToVideoCaptureCapability(media_type, &capability)
652 ? S_FALSE
653 : S_OK;
654 }
655
EnumMediaTypes(IEnumMediaTypes ** types)656 STDMETHODIMP CaptureInputPin::EnumMediaTypes(IEnumMediaTypes** types) {
657 RTC_DCHECK_RUN_ON(&main_checker_);
658 *types = new ComRefCount<MediaTypesEnum>(requested_capability_);
659 (*types)->AddRef();
660 return S_OK;
661 }
662
QueryInternalConnections(IPin ** pins,ULONG * count)663 STDMETHODIMP CaptureInputPin::QueryInternalConnections(IPin** pins,
664 ULONG* count) {
665 return E_NOTIMPL;
666 }
667
EndOfStream()668 STDMETHODIMP CaptureInputPin::EndOfStream() {
669 return S_OK;
670 }
671
BeginFlush()672 STDMETHODIMP CaptureInputPin::BeginFlush() {
673 RTC_DCHECK_RUN_ON(&main_checker_);
674 flushing_ = true;
675 return S_OK;
676 }
677
EndFlush()678 STDMETHODIMP CaptureInputPin::EndFlush() {
679 RTC_DCHECK_RUN_ON(&main_checker_);
680 flushing_ = false;
681 runtime_error_ = false;
682 return S_OK;
683 }
684
NewSegment(REFERENCE_TIME start,REFERENCE_TIME stop,double rate)685 STDMETHODIMP CaptureInputPin::NewSegment(REFERENCE_TIME start,
686 REFERENCE_TIME stop,
687 double rate) {
688 RTC_DCHECK_RUN_ON(&main_checker_);
689 return S_OK;
690 }
691
GetAllocator(IMemAllocator ** allocator)692 STDMETHODIMP CaptureInputPin::GetAllocator(IMemAllocator** allocator) {
693 RTC_DCHECK_RUN_ON(&main_checker_);
694 if (allocator_ == nullptr) {
695 HRESULT hr = CoCreateInstance(CLSID_MemoryAllocator, 0,
696 CLSCTX_INPROC_SERVER, IID_IMemAllocator,
697 reinterpret_cast<void**>(allocator));
698 if (FAILED(hr))
699 return hr;
700 allocator_.swap(allocator);
701 }
702 *allocator = allocator_;
703 allocator_->AddRef();
704 return S_OK;
705 }
706
NotifyAllocator(IMemAllocator * allocator,BOOL read_only)707 STDMETHODIMP CaptureInputPin::NotifyAllocator(IMemAllocator* allocator,
708 BOOL read_only) {
709 RTC_DCHECK_RUN_ON(&main_checker_);
710 allocator_.swap(&allocator);
711 if (allocator_)
712 allocator_->AddRef();
713 if (allocator)
714 allocator->Release();
715 return S_OK;
716 }
717
GetAllocatorRequirements(ALLOCATOR_PROPERTIES * props)718 STDMETHODIMP CaptureInputPin::GetAllocatorRequirements(
719 ALLOCATOR_PROPERTIES* props) {
720 return E_NOTIMPL;
721 }
722
Receive(IMediaSample * media_sample)723 STDMETHODIMP CaptureInputPin::Receive(IMediaSample* media_sample) {
724 RTC_DCHECK_RUN_ON(&capture_checker_);
725
726 CaptureSinkFilter* const filter = static_cast<CaptureSinkFilter*>(Filter());
727
728 if (flushing_.load(std::memory_order_relaxed))
729 return S_FALSE;
730
731 if (runtime_error_.load(std::memory_order_relaxed))
732 return VFW_E_RUNTIME_ERROR;
733
734 if (!capture_thread_id_) {
735 // Make sure we set the thread name only once.
736 capture_thread_id_ = GetCurrentThreadId();
737 rtc::SetCurrentThreadName("webrtc_video_capture");
738 }
739
740 AM_SAMPLE2_PROPERTIES sample_props = {};
741 GetSampleProperties(media_sample, &sample_props);
742 // Has the format changed in this sample?
743 if (sample_props.dwSampleFlags & AM_SAMPLE_TYPECHANGED) {
744 // Check the derived class accepts the new format.
745 // This shouldn't fail as the source must call QueryAccept first.
746
747 // Note: This will modify resulting_capability_.
748 // That should be OK as long as resulting_capability_ is only modified
749 // on this thread while it is running (filter is not stopped), and only
750 // modified on the main thread when the filter is stopped (i.e. this thread
751 // is not running).
752 if (!TranslateMediaTypeToVideoCaptureCapability(sample_props.pMediaType,
753 &resulting_capability_)) {
754 // Raise a runtime error if we fail the media type
755 runtime_error_ = true;
756 EndOfStream();
757 Filter()->NotifyEvent(EC_ERRORABORT, VFW_E_TYPE_NOT_ACCEPTED, 0);
758 return VFW_E_INVALIDMEDIATYPE;
759 }
760 }
761
762 filter->ProcessCapturedFrame(sample_props.pbBuffer, sample_props.lActual,
763 resulting_capability_);
764
765 return S_OK;
766 }
767
ReceiveMultiple(IMediaSample ** samples,long count,long * processed)768 STDMETHODIMP CaptureInputPin::ReceiveMultiple(IMediaSample** samples,
769 long count,
770 long* processed) {
771 HRESULT hr = S_OK;
772 *processed = 0;
773 while (count-- > 0) {
774 hr = Receive(samples[*processed]);
775 if (hr != S_OK)
776 break;
777 ++(*processed);
778 }
779 return hr;
780 }
781
ReceiveCanBlock()782 STDMETHODIMP CaptureInputPin::ReceiveCanBlock() {
783 return S_FALSE;
784 }
785
786 // ----------------------------------------------------------------------------
787
CaptureSinkFilter(VideoCaptureImpl * capture_observer)788 CaptureSinkFilter::CaptureSinkFilter(VideoCaptureImpl* capture_observer)
789 : input_pin_(new ComRefCount<CaptureInputPin>(this)),
790 capture_observer_(capture_observer) {}
791
~CaptureSinkFilter()792 CaptureSinkFilter::~CaptureSinkFilter() {
793 RTC_DCHECK_RUN_ON(&main_checker_);
794 }
795
SetRequestedCapability(const VideoCaptureCapability & capability)796 HRESULT CaptureSinkFilter::SetRequestedCapability(
797 const VideoCaptureCapability& capability) {
798 RTC_DCHECK_RUN_ON(&main_checker_);
799 // Called on the same thread as capture is started on.
800 return input_pin_->SetRequestedCapability(capability);
801 }
802
GetState(DWORD msecs,FILTER_STATE * state)803 STDMETHODIMP CaptureSinkFilter::GetState(DWORD msecs, FILTER_STATE* state) {
804 RTC_DCHECK_RUN_ON(&main_checker_);
805 *state = state_;
806 return S_OK;
807 }
808
SetSyncSource(IReferenceClock * clock)809 STDMETHODIMP CaptureSinkFilter::SetSyncSource(IReferenceClock* clock) {
810 RTC_DCHECK_RUN_ON(&main_checker_);
811 return S_OK;
812 }
813
GetSyncSource(IReferenceClock ** clock)814 STDMETHODIMP CaptureSinkFilter::GetSyncSource(IReferenceClock** clock) {
815 RTC_DCHECK_RUN_ON(&main_checker_);
816 return E_NOTIMPL;
817 }
818
Pause()819 STDMETHODIMP CaptureSinkFilter::Pause() {
820 RTC_DCHECK_RUN_ON(&main_checker_);
821 state_ = State_Paused;
822 return S_OK;
823 }
824
Run(REFERENCE_TIME tStart)825 STDMETHODIMP CaptureSinkFilter::Run(REFERENCE_TIME tStart) {
826 RTC_DCHECK_RUN_ON(&main_checker_);
827 if (state_ == State_Stopped)
828 Pause();
829
830 state_ = State_Running;
831 input_pin_->OnFilterActivated();
832
833 return S_OK;
834 }
835
Stop()836 STDMETHODIMP CaptureSinkFilter::Stop() {
837 RTC_DCHECK_RUN_ON(&main_checker_);
838 if (state_ == State_Stopped)
839 return S_OK;
840
841 state_ = State_Stopped;
842 input_pin_->OnFilterDeactivated();
843
844 return S_OK;
845 }
846
EnumPins(IEnumPins ** pins)847 STDMETHODIMP CaptureSinkFilter::EnumPins(IEnumPins** pins) {
848 RTC_DCHECK_RUN_ON(&main_checker_);
849 *pins = new ComRefCount<class EnumPins>(input_pin_.get());
850 (*pins)->AddRef();
851 return S_OK;
852 }
853
FindPin(LPCWSTR id,IPin ** pin)854 STDMETHODIMP CaptureSinkFilter::FindPin(LPCWSTR id, IPin** pin) {
855 RTC_DCHECK_RUN_ON(&main_checker_);
856 // There's no ID assigned to our input pin, so looking it up based on one
857 // is pointless (and in practice, this method isn't being used).
858 return VFW_E_NOT_FOUND;
859 }
860
QueryFilterInfo(FILTER_INFO * info)861 STDMETHODIMP CaptureSinkFilter::QueryFilterInfo(FILTER_INFO* info) {
862 RTC_DCHECK_RUN_ON(&main_checker_);
863 *info = info_;
864 if (info->pGraph)
865 info->pGraph->AddRef();
866 return S_OK;
867 }
868
JoinFilterGraph(IFilterGraph * graph,LPCWSTR name)869 STDMETHODIMP CaptureSinkFilter::JoinFilterGraph(IFilterGraph* graph,
870 LPCWSTR name) {
871 RTC_DCHECK_RUN_ON(&main_checker_);
872 RTC_DCHECK(IsStopped());
873
874 // Note, since a reference to the filter is held by the graph manager,
875 // filters must not hold a reference to the graph. If they would, we'd have
876 // a circular reference. Instead, a pointer to the graph can be held without
877 // reference. See documentation for IBaseFilter::JoinFilterGraph for more.
878 info_.pGraph = graph; // No AddRef().
879 sink_ = nullptr;
880
881 if (info_.pGraph) {
882 // make sure we don't hold on to the reference we may receive.
883 // Note that this assumes the same object identity, but so be it.
884 rtc::scoped_refptr<IMediaEventSink> sink;
885 GetComInterface(info_.pGraph, &sink);
886 sink_ = sink.get();
887 }
888
889 info_.achName[0] = L'\0';
890 if (name)
891 lstrcpynW(info_.achName, name, arraysize(info_.achName));
892
893 return S_OK;
894 }
895
QueryVendorInfo(LPWSTR * vendor_info)896 STDMETHODIMP CaptureSinkFilter::QueryVendorInfo(LPWSTR* vendor_info) {
897 return E_NOTIMPL;
898 }
899
ProcessCapturedFrame(unsigned char * buffer,size_t length,const VideoCaptureCapability & frame_info)900 void CaptureSinkFilter::ProcessCapturedFrame(
901 unsigned char* buffer,
902 size_t length,
903 const VideoCaptureCapability& frame_info) {
904 // Called on the capture thread.
905 capture_observer_->IncomingFrame(buffer, length, frame_info);
906 }
907
NotifyEvent(long code,LONG_PTR param1,LONG_PTR param2)908 void CaptureSinkFilter::NotifyEvent(long code,
909 LONG_PTR param1,
910 LONG_PTR param2) {
911 // Called on the capture thread.
912 if (!sink_)
913 return;
914
915 if (EC_COMPLETE == code)
916 param2 = reinterpret_cast<LONG_PTR>(static_cast<IBaseFilter*>(this));
917 sink_->Notify(code, param1, param2);
918 }
919
IsStopped() const920 bool CaptureSinkFilter::IsStopped() const {
921 RTC_DCHECK_RUN_ON(&main_checker_);
922 return state_ == State_Stopped;
923 }
924
QueryInterface(REFIID riid,void ** ppv)925 STDMETHODIMP CaptureSinkFilter::QueryInterface(REFIID riid, void** ppv) {
926 if (riid == IID_IUnknown || riid == IID_IPersist || riid == IID_IBaseFilter) {
927 *ppv = static_cast<IBaseFilter*>(this);
928 AddRef();
929 return S_OK;
930 }
931 return E_NOINTERFACE;
932 }
933
GetClassID(CLSID * clsid)934 STDMETHODIMP CaptureSinkFilter::GetClassID(CLSID* clsid) {
935 *clsid = CLSID_SINKFILTER;
936 return S_OK;
937 }
938
939 } // namespace videocapturemodule
940 } // namespace webrtc
941