1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include <errno.h>
12 #include <fcntl.h>
13 #include <linux/videodev2.h>
14 #include <stdio.h>
15 #include <string.h>
16 #include <sys/ioctl.h>
17 #include <sys/mman.h>
18 #include <sys/stat.h>
19 #include <unistd.h>
20
21 #include <iostream>
22 #include <new>
23
24 #include "webrtc/modules/video_capture/linux/video_capture_linux.h"
25 #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
26 #include "webrtc/system_wrappers/include/ref_count.h"
27 #include "webrtc/system_wrappers/include/trace.h"
28
29 namespace webrtc
30 {
31 namespace videocapturemodule
32 {
Create(const int32_t id,const char * deviceUniqueId)33 VideoCaptureModule* VideoCaptureImpl::Create(const int32_t id,
34 const char* deviceUniqueId)
35 {
36 RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>* implementation =
37 new RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>(id);
38
39 if (!implementation || implementation->Init(deviceUniqueId) != 0)
40 {
41 delete implementation;
42 implementation = NULL;
43 }
44
45 return implementation;
46 }
47
VideoCaptureModuleV4L2(const int32_t id)48 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2(const int32_t id)
49 : VideoCaptureImpl(id),
50 _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()),
51 _deviceId(-1),
52 _deviceFd(-1),
53 _buffersAllocatedByDevice(-1),
54 _currentWidth(-1),
55 _currentHeight(-1),
56 _currentFrameRate(-1),
57 _captureStarted(false),
58 _captureVideoType(kVideoI420),
59 _pool(NULL)
60 {
61 }
62
Init(const char * deviceUniqueIdUTF8)63 int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8)
64 {
65 int len = strlen((const char*) deviceUniqueIdUTF8);
66 _deviceUniqueId = new (std::nothrow) char[len + 1];
67 if (_deviceUniqueId)
68 {
69 memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
70 }
71
72 int fd;
73 char device[32];
74 bool found = false;
75
76 /* detect /dev/video [0-63] entries */
77 int n;
78 for (n = 0; n < 64; n++)
79 {
80 sprintf(device, "/dev/video%d", n);
81 if ((fd = open(device, O_RDONLY)) != -1)
82 {
83 // query device capabilities
84 struct v4l2_capability cap;
85 if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
86 {
87 if (cap.bus_info[0] != 0)
88 {
89 if (strncmp((const char*) cap.bus_info,
90 (const char*) deviceUniqueIdUTF8,
91 strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
92 {
93 close(fd);
94 found = true;
95 break; // fd matches with device unique id supplied
96 }
97 }
98 }
99 close(fd); // close since this is not the matching device
100 }
101 }
102 if (!found)
103 {
104 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
105 return -1;
106 }
107 _deviceId = n; //store the device id
108 return 0;
109 }
110
~VideoCaptureModuleV4L2()111 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2()
112 {
113 StopCapture();
114 if (_captureCritSect)
115 {
116 delete _captureCritSect;
117 }
118 if (_deviceFd != -1)
119 close(_deviceFd);
120 }
121
StartCapture(const VideoCaptureCapability & capability)122 int32_t VideoCaptureModuleV4L2::StartCapture(
123 const VideoCaptureCapability& capability)
124 {
125 if (_captureStarted)
126 {
127 if (capability.width == _currentWidth &&
128 capability.height == _currentHeight &&
129 _captureVideoType == capability.rawType)
130 {
131 return 0;
132 }
133 else
134 {
135 StopCapture();
136 }
137 }
138
139 CriticalSectionScoped cs(_captureCritSect);
140 //first open /dev/video device
141 char device[20];
142 sprintf(device, "/dev/video%d", (int) _deviceId);
143
144 if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0)
145 {
146 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
147 "error in opening %s errono = %d", device, errno);
148 return -1;
149 }
150
151 // Supported video formats in preferred order.
152 // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
153 // I420 otherwise.
154 const int nFormats = 5;
155 unsigned int fmts[nFormats];
156 if (capability.width > 640 || capability.height > 480) {
157 fmts[0] = V4L2_PIX_FMT_MJPEG;
158 fmts[1] = V4L2_PIX_FMT_YUV420;
159 fmts[2] = V4L2_PIX_FMT_YUYV;
160 fmts[3] = V4L2_PIX_FMT_UYVY;
161 fmts[4] = V4L2_PIX_FMT_JPEG;
162 } else {
163 fmts[0] = V4L2_PIX_FMT_YUV420;
164 fmts[1] = V4L2_PIX_FMT_YUYV;
165 fmts[2] = V4L2_PIX_FMT_UYVY;
166 fmts[3] = V4L2_PIX_FMT_MJPEG;
167 fmts[4] = V4L2_PIX_FMT_JPEG;
168 }
169
170 // Enumerate image formats.
171 struct v4l2_fmtdesc fmt;
172 int fmtsIdx = nFormats;
173 memset(&fmt, 0, sizeof(fmt));
174 fmt.index = 0;
175 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
176 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
177 "Video Capture enumerats supported image formats:");
178 while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
179 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
180 " { pixelformat = %c%c%c%c, description = '%s' }",
181 fmt.pixelformat & 0xFF, (fmt.pixelformat>>8) & 0xFF,
182 (fmt.pixelformat>>16) & 0xFF, (fmt.pixelformat>>24) & 0xFF,
183 fmt.description);
184 // Match the preferred order.
185 for (int i = 0; i < nFormats; i++) {
186 if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
187 fmtsIdx = i;
188 }
189 // Keep enumerating.
190 fmt.index++;
191 }
192
193 if (fmtsIdx == nFormats)
194 {
195 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
196 "no supporting video formats found");
197 return -1;
198 } else {
199 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
200 "We prefer format %c%c%c%c",
201 fmts[fmtsIdx] & 0xFF, (fmts[fmtsIdx]>>8) & 0xFF,
202 (fmts[fmtsIdx]>>16) & 0xFF, (fmts[fmtsIdx]>>24) & 0xFF);
203 }
204
205 struct v4l2_format video_fmt;
206 memset(&video_fmt, 0, sizeof(struct v4l2_format));
207 video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
208 video_fmt.fmt.pix.sizeimage = 0;
209 video_fmt.fmt.pix.width = capability.width;
210 video_fmt.fmt.pix.height = capability.height;
211 video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
212
213 if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
214 _captureVideoType = kVideoYUY2;
215 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
216 _captureVideoType = kVideoI420;
217 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
218 _captureVideoType = kVideoUYVY;
219 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
220 video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
221 _captureVideoType = kVideoMJPEG;
222
223 //set format and frame size now
224 if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0)
225 {
226 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
227 "error in VIDIOC_S_FMT, errno = %d", errno);
228 return -1;
229 }
230
231 // initialize current width and height
232 _currentWidth = video_fmt.fmt.pix.width;
233 _currentHeight = video_fmt.fmt.pix.height;
234 _captureDelay = 120;
235
236 // Trying to set frame rate, before check driver capability.
237 bool driver_framerate_support = true;
238 struct v4l2_streamparm streamparms;
239 memset(&streamparms, 0, sizeof(streamparms));
240 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
241 if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
242 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
243 "error in VIDIOC_G_PARM errno = %d", errno);
244 driver_framerate_support = false;
245 // continue
246 } else {
247 // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
248 if (streamparms.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) {
249 // driver supports the feature. Set required framerate.
250 memset(&streamparms, 0, sizeof(streamparms));
251 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
252 streamparms.parm.capture.timeperframe.numerator = 1;
253 streamparms.parm.capture.timeperframe.denominator = capability.maxFPS;
254 if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
255 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
256 "Failed to set the framerate. errno=%d", errno);
257 driver_framerate_support = false;
258 } else {
259 _currentFrameRate = capability.maxFPS;
260 }
261 }
262 }
263 // If driver doesn't support framerate control, need to hardcode.
264 // Hardcoding the value based on the frame size.
265 if (!driver_framerate_support) {
266 if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) {
267 _currentFrameRate = 15;
268 } else {
269 _currentFrameRate = 30;
270 }
271 }
272
273 if (!AllocateVideoBuffers())
274 {
275 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
276 "failed to allocate video capture buffers");
277 return -1;
278 }
279
280 //start capture thread;
281 if (!_captureThread)
282 {
283 _captureThread.reset(new rtc::PlatformThread(
284 VideoCaptureModuleV4L2::CaptureThread, this, "CaptureThread"));
285 _captureThread->Start();
286 _captureThread->SetPriority(rtc::kHighPriority);
287 }
288
289 // Needed to start UVC camera - from the uvcview application
290 enum v4l2_buf_type type;
291 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
292 if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1)
293 {
294 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
295 "Failed to turn on stream");
296 return -1;
297 }
298
299 _captureStarted = true;
300 return 0;
301 }
302
StopCapture()303 int32_t VideoCaptureModuleV4L2::StopCapture()
304 {
305 if (_captureThread) {
306 // Make sure the capture thread stop stop using the critsect.
307 _captureThread->Stop();
308 _captureThread.reset();
309 }
310
311 CriticalSectionScoped cs(_captureCritSect);
312 if (_captureStarted)
313 {
314 _captureStarted = false;
315
316 DeAllocateVideoBuffers();
317 close(_deviceFd);
318 _deviceFd = -1;
319 }
320
321 return 0;
322 }
323
324 //critical section protected by the caller
325
AllocateVideoBuffers()326 bool VideoCaptureModuleV4L2::AllocateVideoBuffers()
327 {
328 struct v4l2_requestbuffers rbuffer;
329 memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
330
331 rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
332 rbuffer.memory = V4L2_MEMORY_MMAP;
333 rbuffer.count = kNoOfV4L2Bufffers;
334
335 if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0)
336 {
337 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
338 "Could not get buffers from device. errno = %d", errno);
339 return false;
340 }
341
342 if (rbuffer.count > kNoOfV4L2Bufffers)
343 rbuffer.count = kNoOfV4L2Bufffers;
344
345 _buffersAllocatedByDevice = rbuffer.count;
346
347 //Map the buffers
348 _pool = new Buffer[rbuffer.count];
349
350 for (unsigned int i = 0; i < rbuffer.count; i++)
351 {
352 struct v4l2_buffer buffer;
353 memset(&buffer, 0, sizeof(v4l2_buffer));
354 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
355 buffer.memory = V4L2_MEMORY_MMAP;
356 buffer.index = i;
357
358 if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0)
359 {
360 return false;
361 }
362
363 _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
364 _deviceFd, buffer.m.offset);
365
366 if (MAP_FAILED == _pool[i].start)
367 {
368 for (unsigned int j = 0; j < i; j++)
369 munmap(_pool[j].start, _pool[j].length);
370 return false;
371 }
372
373 _pool[i].length = buffer.length;
374
375 if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0)
376 {
377 return false;
378 }
379 }
380 return true;
381 }
382
DeAllocateVideoBuffers()383 bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers()
384 {
385 // unmap buffers
386 for (int i = 0; i < _buffersAllocatedByDevice; i++)
387 munmap(_pool[i].start, _pool[i].length);
388
389 delete[] _pool;
390
391 // turn off stream
392 enum v4l2_buf_type type;
393 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
394 if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0)
395 {
396 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
397 "VIDIOC_STREAMOFF error. errno: %d", errno);
398 }
399
400 return true;
401 }
402
CaptureStarted()403 bool VideoCaptureModuleV4L2::CaptureStarted()
404 {
405 return _captureStarted;
406 }
407
CaptureThread(void * obj)408 bool VideoCaptureModuleV4L2::CaptureThread(void* obj)
409 {
410 return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess();
411 }
CaptureProcess()412 bool VideoCaptureModuleV4L2::CaptureProcess()
413 {
414 int retVal = 0;
415 fd_set rSet;
416 struct timeval timeout;
417
418 _captureCritSect->Enter();
419
420 FD_ZERO(&rSet);
421 FD_SET(_deviceFd, &rSet);
422 timeout.tv_sec = 1;
423 timeout.tv_usec = 0;
424
425 retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
426 if (retVal < 0 && errno != EINTR) // continue if interrupted
427 {
428 // select failed
429 _captureCritSect->Leave();
430 return false;
431 }
432 else if (retVal == 0)
433 {
434 // select timed out
435 _captureCritSect->Leave();
436 return true;
437 }
438 else if (!FD_ISSET(_deviceFd, &rSet))
439 {
440 // not event on camera handle
441 _captureCritSect->Leave();
442 return true;
443 }
444
445 if (_captureStarted)
446 {
447 struct v4l2_buffer buf;
448 memset(&buf, 0, sizeof(struct v4l2_buffer));
449 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
450 buf.memory = V4L2_MEMORY_MMAP;
451 // dequeue a buffer - repeat until dequeued properly!
452 while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0)
453 {
454 if (errno != EINTR)
455 {
456 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
457 "could not sync on a buffer on device %s", strerror(errno));
458 _captureCritSect->Leave();
459 return true;
460 }
461 }
462 VideoCaptureCapability frameInfo;
463 frameInfo.width = _currentWidth;
464 frameInfo.height = _currentHeight;
465 frameInfo.rawType = _captureVideoType;
466
467 // convert to to I420 if needed
468 IncomingFrame((unsigned char*) _pool[buf.index].start,
469 buf.bytesused, frameInfo);
470 // enqueue the buffer again
471 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1)
472 {
473 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
474 "Failed to enqueue capture buffer");
475 }
476 }
477 _captureCritSect->Leave();
478 usleep(0);
479 return true;
480 }
481
CaptureSettings(VideoCaptureCapability & settings)482 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings)
483 {
484 settings.width = _currentWidth;
485 settings.height = _currentHeight;
486 settings.maxFPS = _currentFrameRate;
487 settings.rawType=_captureVideoType;
488
489 return 0;
490 }
491 } // namespace videocapturemodule
492 } // namespace webrtc
493