• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "content/renderer/media/media_stream_renderer_factory.h"
6 
7 #include "base/strings/utf_string_conversions.h"
8 #include "content/renderer/media/media_stream.h"
9 #include "content/renderer/media/media_stream_video_track.h"
10 #include "content/renderer/media/rtc_video_renderer.h"
11 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
12 #include "content/renderer/media/webrtc_audio_renderer.h"
13 #include "content/renderer/media/webrtc_local_audio_renderer.h"
14 #include "content/renderer/render_thread_impl.h"
15 #include "media/base/audio_hardware_config.h"
16 #include "third_party/WebKit/public/platform/WebMediaStream.h"
17 #include "third_party/WebKit/public/platform/WebURL.h"
18 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
19 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
20 
21 namespace content {
22 
23 namespace {
24 
GetPeerConnectionDependencyFactory()25 PeerConnectionDependencyFactory* GetPeerConnectionDependencyFactory() {
26   return RenderThreadImpl::current()->GetPeerConnectionDependencyFactory();
27 }
28 
GetDefaultOutputDeviceParams(int * output_sample_rate,int * output_buffer_size)29 void GetDefaultOutputDeviceParams(
30     int* output_sample_rate, int* output_buffer_size) {
31   // Fetch the default audio output hardware config.
32   media::AudioHardwareConfig* hardware_config =
33       RenderThreadImpl::current()->GetAudioHardwareConfig();
34   *output_sample_rate = hardware_config->GetOutputSampleRate();
35   *output_buffer_size = hardware_config->GetOutputBufferSize();
36 }
37 
38 
39 // Returns a valid session id if a single capture device is currently open
40 // (and then the matching session_id), otherwise -1.
41 // This is used to pass on a session id to a webrtc audio renderer (either
42 // local or remote), so that audio will be rendered to a matching output
43 // device, should one exist.
44 // Note that if there are more than one open capture devices the function
45 // will not be able to pick an appropriate device and return false.
GetAuthorizedDeviceInfoForAudioRenderer(int * session_id,int * output_sample_rate,int * output_frames_per_buffer)46 bool GetAuthorizedDeviceInfoForAudioRenderer(
47     int* session_id,
48     int* output_sample_rate,
49     int* output_frames_per_buffer) {
50   WebRtcAudioDeviceImpl* audio_device =
51       GetPeerConnectionDependencyFactory()->GetWebRtcAudioDevice();
52   if (!audio_device)
53     return false;
54 
55   return audio_device->GetAuthorizedDeviceInfoForAudioRenderer(
56       session_id, output_sample_rate, output_frames_per_buffer);
57 }
58 
CreateRemoteAudioRenderer(webrtc::MediaStreamInterface * stream,int routing_id,int render_frame_id)59 scoped_refptr<WebRtcAudioRenderer> CreateRemoteAudioRenderer(
60     webrtc::MediaStreamInterface* stream,
61     int routing_id,
62     int render_frame_id) {
63   if (stream->GetAudioTracks().empty())
64     return NULL;
65 
66   DVLOG(1) << "MediaStreamRendererFactory::CreateRemoteAudioRenderer label:"
67            << stream->label();
68 
69   // TODO(tommi): Change the default value of session_id to be
70   // StreamDeviceInfo::kNoId.  Also update AudioOutputDevice etc.
71   int session_id = 0, sample_rate = 0, buffer_size = 0;
72   if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
73                                                &sample_rate,
74                                                &buffer_size)) {
75     GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
76   }
77 
78   return new WebRtcAudioRenderer(
79       stream, routing_id, render_frame_id,  session_id,
80       sample_rate, buffer_size);
81 }
82 
83 
CreateLocalAudioRenderer(const blink::WebMediaStreamTrack & audio_track,int routing_id,int render_frame_id)84 scoped_refptr<WebRtcLocalAudioRenderer> CreateLocalAudioRenderer(
85     const blink::WebMediaStreamTrack& audio_track,
86     int routing_id,
87     int render_frame_id) {
88   DVLOG(1) << "MediaStreamRendererFactory::CreateLocalAudioRenderer";
89 
90   int session_id = 0, sample_rate = 0, buffer_size = 0;
91   if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
92                                                &sample_rate,
93                                                &buffer_size)) {
94     GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
95   }
96 
97   // Create a new WebRtcLocalAudioRenderer instance and connect it to the
98   // existing WebRtcAudioCapturer so that the renderer can use it as source.
99   return new WebRtcLocalAudioRenderer(
100       audio_track,
101       routing_id,
102       render_frame_id,
103       session_id,
104       buffer_size);
105 }
106 
107 }  // namespace
108 
109 
MediaStreamRendererFactory()110 MediaStreamRendererFactory::MediaStreamRendererFactory() {
111 }
112 
~MediaStreamRendererFactory()113 MediaStreamRendererFactory::~MediaStreamRendererFactory() {
114 }
115 
116 scoped_refptr<VideoFrameProvider>
GetVideoFrameProvider(const GURL & url,const base::Closure & error_cb,const VideoFrameProvider::RepaintCB & repaint_cb)117 MediaStreamRendererFactory::GetVideoFrameProvider(
118     const GURL& url,
119     const base::Closure& error_cb,
120     const VideoFrameProvider::RepaintCB& repaint_cb) {
121   blink::WebMediaStream web_stream =
122       blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
123   DCHECK(!web_stream.isNull());
124 
125   DVLOG(1) << "MediaStreamRendererFactory::GetVideoFrameProvider stream:"
126            << base::UTF16ToUTF8(web_stream.id());
127 
128   blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
129   web_stream.videoTracks(video_tracks);
130   if (video_tracks.isEmpty() ||
131       !MediaStreamVideoTrack::GetTrack(video_tracks[0])) {
132     return NULL;
133   }
134 
135   return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb);
136 }
137 
138 scoped_refptr<MediaStreamAudioRenderer>
GetAudioRenderer(const GURL & url,int render_view_id,int render_frame_id)139 MediaStreamRendererFactory::GetAudioRenderer(
140     const GURL& url, int render_view_id, int render_frame_id) {
141   blink::WebMediaStream web_stream =
142       blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
143 
144   if (web_stream.isNull() || !web_stream.extraData())
145     return NULL;  // This is not a valid stream.
146 
147   DVLOG(1) << "MediaStreamRendererFactory::GetAudioRenderer stream:"
148            << base::UTF16ToUTF8(web_stream.id());
149 
150   MediaStream* native_stream = MediaStream::GetMediaStream(web_stream);
151 
152   // TODO(tommi): MediaStreams do not have a 'local or not' concept.
153   // Tracks _might_, but even so, we need to fix the data flow so that
154   // it works the same way for all track implementations, local, remote or what
155   // have you.
156   // In this function, we should simply create a renderer object that receives
157   // and mixes audio from all the tracks that belong to the media stream.
158   // We need to remove the |is_local| property from MediaStreamExtraData since
159   // this concept is peerconnection specific (is a previously recorded stream
160   // local or remote?).
161   if (native_stream->is_local()) {
162     // Create the local audio renderer if the stream contains audio tracks.
163     blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
164     web_stream.audioTracks(audio_tracks);
165     if (audio_tracks.isEmpty())
166       return NULL;
167 
168     // TODO(xians): Add support for the case where the media stream contains
169     // multiple audio tracks.
170     return CreateLocalAudioRenderer(audio_tracks[0], render_view_id,
171                                     render_frame_id);
172   }
173 
174   webrtc::MediaStreamInterface* stream =
175       MediaStream::GetAdapter(web_stream);
176   if (stream->GetAudioTracks().empty())
177     return NULL;
178 
179   // This is a remote WebRTC media stream.
180   WebRtcAudioDeviceImpl* audio_device =
181       GetPeerConnectionDependencyFactory()->GetWebRtcAudioDevice();
182 
183   // Share the existing renderer if any, otherwise create a new one.
184   scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer());
185   if (!renderer.get()) {
186     renderer = CreateRemoteAudioRenderer(stream, render_view_id,
187                                          render_frame_id);
188 
189     if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get()))
190       renderer = NULL;
191   }
192 
193   return renderer.get() ?
194       renderer->CreateSharedAudioRendererProxy(stream) : NULL;
195 }
196 
197 }  // namespace content
198