1 #include <linux/videodev2.h>
2 #include <cstdio>
3 #include <string.h>
4 #include <poll.h>
5 #include <fcntl.h>
6 #include <unistd.h>
7 #include <fstream>
8 #include <sys/ioctl.h>
9 #include <xf86drm.h>
10 #include <glob.h>
11
12 #include <kms++/kms++.h>
13 #include <kms++util/kms++util.h>
14
15 #define CAMERA_BUF_QUEUE_SIZE 3
16 #define MAX_CAMERA 9
17
18 using namespace std;
19 using namespace kms;
20
21 enum class BufferProvider {
22 DRM,
23 V4L2,
24 };
25
26 class CameraPipeline
27 {
28 public:
29 CameraPipeline(int cam_fd, Card& card, Crtc* crtc, Plane* plane, uint32_t x, uint32_t y,
30 uint32_t iw, uint32_t ih, PixelFormat pixfmt,
31 BufferProvider buffer_provider);
32 ~CameraPipeline();
33
34 CameraPipeline(const CameraPipeline& other) = delete;
35 CameraPipeline& operator=(const CameraPipeline& other) = delete;
36
37 void show_next_frame(AtomicReq &req);
fd() const38 int fd() const { return m_fd; }
39 void start_streaming();
40 private:
41 ExtFramebuffer* GetExtFrameBuffer(Card& card, uint32_t i, PixelFormat pixfmt);
42 int m_fd; /* camera file descriptor */
43 Crtc* m_crtc;
44 Plane* m_plane;
45 BufferProvider m_buffer_provider;
46 vector<Framebuffer*> m_fb;
47 int m_prev_fb_index;
48 uint32_t m_in_width, m_in_height; /* camera capture resolution */
49 /* image properties for display */
50 uint32_t m_out_width, m_out_height;
51 uint32_t m_out_x, m_out_y;
52 };
53
buffer_export(int v4lfd,enum v4l2_buf_type bt,uint32_t index,int * dmafd)54 static int buffer_export(int v4lfd, enum v4l2_buf_type bt, uint32_t index, int *dmafd)
55 {
56 struct v4l2_exportbuffer expbuf;
57
58 memset(&expbuf, 0, sizeof(expbuf));
59 expbuf.type = bt;
60 expbuf.index = index;
61 if (ioctl(v4lfd, VIDIOC_EXPBUF, &expbuf) == -1) {
62 perror("VIDIOC_EXPBUF");
63 return -1;
64 }
65
66 *dmafd = expbuf.fd;
67
68 return 0;
69 }
70
GetExtFrameBuffer(Card & card,uint32_t i,PixelFormat pixfmt)71 ExtFramebuffer* CameraPipeline::GetExtFrameBuffer(Card& card, uint32_t i, PixelFormat pixfmt)
72 {
73 int r, dmafd;
74
75 r = buffer_export(m_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, i, &dmafd);
76 ASSERT(r == 0);
77
78 const PixelFormatInfo& format_info = get_pixel_format_info(pixfmt);
79 ASSERT(format_info.num_planes == 1);
80
81 vector<int> fds { dmafd };
82 vector<uint32_t> pitches { m_in_width * (format_info.planes[0].bitspp / 8) };
83 vector<uint32_t> offsets { 0 };
84
85 return new ExtFramebuffer(card, m_in_width, m_in_height, pixfmt,
86 fds, pitches, offsets);
87 }
88
better_size(struct v4l2_frmsize_discrete * v4ldisc,uint32_t iw,uint32_t ih,uint32_t best_w,uint32_t best_h)89 bool inline better_size(struct v4l2_frmsize_discrete* v4ldisc,
90 uint32_t iw, uint32_t ih,
91 uint32_t best_w, uint32_t best_h)
92 {
93 if (v4ldisc->width <= iw && v4ldisc->height <= ih &&
94 (v4ldisc->width >= best_w || v4ldisc->height >= best_h))
95 return true;
96
97 return false;
98 }
99
CameraPipeline(int cam_fd,Card & card,Crtc * crtc,Plane * plane,uint32_t x,uint32_t y,uint32_t iw,uint32_t ih,PixelFormat pixfmt,BufferProvider buffer_provider)100 CameraPipeline::CameraPipeline(int cam_fd, Card& card, Crtc *crtc, Plane* plane, uint32_t x, uint32_t y,
101 uint32_t iw, uint32_t ih, PixelFormat pixfmt,
102 BufferProvider buffer_provider)
103 : m_fd(cam_fd), m_crtc(crtc), m_buffer_provider(buffer_provider), m_prev_fb_index(-1)
104 {
105
106 int r;
107 uint32_t best_w = 320;
108 uint32_t best_h = 240;
109
110 struct v4l2_frmsizeenum v4lfrms = { };
111 v4lfrms.pixel_format = (uint32_t)pixfmt;
112 while (ioctl(m_fd, VIDIOC_ENUM_FRAMESIZES, &v4lfrms) == 0) {
113 if (v4lfrms.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
114 v4lfrms.index++;
115 continue;
116 }
117
118 if (v4lfrms.discrete.width > iw || v4lfrms.discrete.height > ih) {
119 //skip
120 } else if (v4lfrms.discrete.width == iw && v4lfrms.discrete.height == ih) {
121 // Exact match
122 best_w = v4lfrms.discrete.width;
123 best_h = v4lfrms.discrete.height;
124 break;
125 } else if (v4lfrms.discrete.width >= best_w || v4lfrms.discrete.height >= ih) {
126 best_w = v4lfrms.discrete.width;
127 best_h = v4lfrms.discrete.height;
128 }
129
130 v4lfrms.index++;
131 };
132
133 m_out_width = m_in_width = best_w;
134 m_out_height = m_in_height = best_h;
135 /* Move it to the middle of the requested area */
136 m_out_x = x + iw / 2 - m_out_width / 2;
137 m_out_y = y + ih / 2 - m_out_height / 2;
138
139 printf("Capture: %ux%u\n", best_w, best_h);
140
141 struct v4l2_format v4lfmt = { };
142 v4lfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
143 r = ioctl(m_fd, VIDIOC_G_FMT, &v4lfmt);
144 ASSERT(r == 0);
145
146 v4lfmt.fmt.pix.pixelformat = (uint32_t)pixfmt;
147 v4lfmt.fmt.pix.width = m_in_width;
148 v4lfmt.fmt.pix.height = m_in_height;
149
150 r = ioctl(m_fd, VIDIOC_S_FMT, &v4lfmt);
151 ASSERT(r == 0);
152
153 uint32_t v4l_mem;
154
155 if (m_buffer_provider == BufferProvider::V4L2)
156 v4l_mem = V4L2_MEMORY_MMAP;
157 else
158 v4l_mem = V4L2_MEMORY_DMABUF;
159
160 struct v4l2_requestbuffers v4lreqbuf = { };
161 v4lreqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
162 v4lreqbuf.memory = v4l_mem;
163 v4lreqbuf.count = CAMERA_BUF_QUEUE_SIZE;
164 r = ioctl(m_fd, VIDIOC_REQBUFS, &v4lreqbuf);
165 ASSERT(r == 0);
166 ASSERT(v4lreqbuf.count == CAMERA_BUF_QUEUE_SIZE);
167
168 struct v4l2_buffer v4lbuf = { };
169 v4lbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
170 v4lbuf.memory = v4l_mem;
171
172 for (unsigned i = 0; i < CAMERA_BUF_QUEUE_SIZE; i++) {
173 Framebuffer *fb;
174
175 if (m_buffer_provider == BufferProvider::V4L2)
176 fb = GetExtFrameBuffer(card, i, pixfmt);
177 else
178 fb = new DumbFramebuffer(card, m_in_width,
179 m_in_height, pixfmt);
180
181 v4lbuf.index = i;
182 if (m_buffer_provider == BufferProvider::DRM)
183 v4lbuf.m.fd = fb->prime_fd(0);
184 r = ioctl(m_fd, VIDIOC_QBUF, &v4lbuf);
185 ASSERT(r == 0);
186
187 m_fb.push_back(fb);
188 }
189
190 m_plane = plane;
191
192 // Do initial plane setup with first fb, so that we only need to
193 // set the FB when page flipping
194 AtomicReq req(card);
195
196 Framebuffer *fb = m_fb[0];
197
198 req.add(m_plane, "CRTC_ID", m_crtc->id());
199 req.add(m_plane, "FB_ID", fb->id());
200
201 req.add(m_plane, "CRTC_X", m_out_x);
202 req.add(m_plane, "CRTC_Y", m_out_y);
203 req.add(m_plane, "CRTC_W", m_out_width);
204 req.add(m_plane, "CRTC_H", m_out_height);
205
206 req.add(m_plane, "SRC_X", 0);
207 req.add(m_plane, "SRC_Y", 0);
208 req.add(m_plane, "SRC_W", m_in_width << 16);
209 req.add(m_plane, "SRC_H", m_in_height << 16);
210
211 r = req.commit_sync();
212 FAIL_IF(r, "initial plane setup failed");
213 }
214
~CameraPipeline()215 CameraPipeline::~CameraPipeline()
216 {
217 for (unsigned i = 0; i < m_fb.size(); i++)
218 delete m_fb[i];
219
220 ::close(m_fd);
221 }
222
start_streaming()223 void CameraPipeline::start_streaming()
224 {
225 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
226
227 int r = ioctl(m_fd, VIDIOC_STREAMON, &type);
228 FAIL_IF(r, "Failed to enable camera stream: %d", r);
229 }
230
show_next_frame(AtomicReq & req)231 void CameraPipeline::show_next_frame(AtomicReq& req)
232 {
233 int r;
234 uint32_t v4l_mem;
235
236 if (m_buffer_provider == BufferProvider::V4L2)
237 v4l_mem = V4L2_MEMORY_MMAP;
238 else
239 v4l_mem = V4L2_MEMORY_DMABUF;
240
241 struct v4l2_buffer v4l2buf = { };
242 v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
243 v4l2buf.memory = v4l_mem;
244 r = ioctl(m_fd, VIDIOC_DQBUF, &v4l2buf);
245 if (r != 0) {
246 printf("VIDIOC_DQBUF ioctl failed with %d\n", errno);
247 return;
248 }
249
250 unsigned fb_index = v4l2buf.index;
251
252 Framebuffer *fb = m_fb[fb_index];
253
254 req.add(m_plane, "FB_ID", fb->id());
255
256 if (m_prev_fb_index >= 0) {
257 memset(&v4l2buf, 0, sizeof(v4l2buf));
258 v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
259 v4l2buf.memory = v4l_mem;
260 v4l2buf.index = m_prev_fb_index;
261 if (m_buffer_provider == BufferProvider::DRM)
262 v4l2buf.m.fd = m_fb[m_prev_fb_index]->prime_fd(0);
263 r = ioctl(m_fd, VIDIOC_QBUF, &v4l2buf);
264 ASSERT(r == 0);
265
266 }
267
268 m_prev_fb_index = fb_index;
269 }
270
is_capture_dev(int fd)271 static bool is_capture_dev(int fd)
272 {
273 struct v4l2_capability cap = { };
274 int r = ioctl(fd, VIDIOC_QUERYCAP, &cap);
275 ASSERT(r == 0);
276 return cap.capabilities & V4L2_CAP_VIDEO_CAPTURE;
277 }
278
glob(const std::string & pat)279 std::vector<std::string> glob(const std::string& pat)
280 {
281 glob_t glob_result;
282 glob(pat.c_str(), 0, NULL, &glob_result);
283 vector<string> ret;
284 for(unsigned i = 0; i < glob_result.gl_pathc; ++i)
285 ret.push_back(string(glob_result.gl_pathv[i]));
286 globfree(&glob_result);
287 return ret;
288 }
289
290 static const char* usage_str =
291 "Usage: kmscapture [OPTIONS]\n\n"
292 "Options:\n"
293 " -s, --single Single camera mode. Open only /dev/video0\n"
294 " --buffer-type=<drm|v4l> Use DRM or V4L provided buffers. Default: DRM\n"
295 " -h, --help Print this help\n"
296 ;
297
main(int argc,char ** argv)298 int main(int argc, char** argv)
299 {
300 BufferProvider buffer_provider = BufferProvider::DRM;
301 bool single_cam = false;
302
303 OptionSet optionset = {
304 Option("s|single", [&]()
305 {
306 single_cam = true;
307 }),
308 Option("|buffer-type=", [&](string s)
309 {
310 if (s == "v4l")
311 buffer_provider = BufferProvider::V4L2;
312 else if (s == "drm")
313 buffer_provider = BufferProvider::DRM;
314 else
315 FAIL("Invalid buffer provider: %s", s.c_str());
316 }),
317 Option("h|help", [&]()
318 {
319 puts(usage_str);
320 exit(-1);
321 }),
322 };
323
324 optionset.parse(argc, argv);
325
326 if (optionset.params().size() > 0) {
327 puts(usage_str);
328 exit(-1);
329 }
330
331 auto pixfmt = PixelFormat::YUYV;
332
333 Card card;
334
335 auto conn = card.get_first_connected_connector();
336 auto crtc = conn->get_current_crtc();
337 printf("Display: %dx%d\n", crtc->width(), crtc->height());
338 printf("Buffer provider: %s\n", buffer_provider == BufferProvider::V4L2? "V4L" : "DRM");
339
340 vector<int> camera_fds;
341
342 for (string vidpath : glob("/dev/video*")) {
343 int fd = ::open(vidpath.c_str(), O_RDWR | O_NONBLOCK);
344
345 if (fd < 0)
346 continue;
347
348 if (!is_capture_dev(fd)) {
349 close(fd);
350 continue;
351 }
352
353 camera_fds.push_back(fd);
354 printf("Using %s\n", vidpath.c_str());
355
356 if (single_cam)
357 break;
358 }
359
360 FAIL_IF(camera_fds.size() == 0, "No cameras found");
361
362 vector<Plane*> available_planes;
363 for (Plane* p : crtc->get_possible_planes()) {
364 if (p->plane_type() != PlaneType::Overlay)
365 continue;
366
367 if (!p->supports_format(pixfmt))
368 continue;
369
370 available_planes.push_back(p);
371 }
372
373 FAIL_IF(available_planes.size() < camera_fds.size(), "Not enough video planes for cameras");
374
375 uint32_t plane_w = crtc->width() / camera_fds.size();
376 vector<CameraPipeline*> cameras;
377
378 for (unsigned i = 0; i < camera_fds.size(); ++i) {
379 int cam_fd = camera_fds[i];
380 Plane* plane = available_planes[i];
381
382 auto cam = new CameraPipeline(cam_fd, card, crtc, plane, i * plane_w, 0,
383 plane_w, crtc->height(), pixfmt, buffer_provider);
384 cameras.push_back(cam);
385 }
386
387 unsigned nr_cameras = cameras.size();
388
389 vector<pollfd> fds(nr_cameras + 1);
390
391 for (unsigned i = 0; i < nr_cameras; i++) {
392 fds[i].fd = cameras[i]->fd();
393 fds[i].events = POLLIN;
394 }
395 fds[nr_cameras].fd = 0;
396 fds[nr_cameras].events = POLLIN;
397
398 for (auto cam : cameras)
399 cam->start_streaming();
400
401 while (true) {
402 int r = poll(fds.data(), nr_cameras + 1, -1);
403 ASSERT(r > 0);
404
405 if (fds[nr_cameras].revents != 0)
406 break;
407
408 AtomicReq req(card);
409
410 for (unsigned i = 0; i < nr_cameras; i++) {
411 if (!fds[i].revents)
412 continue;
413 cameras[i]->show_next_frame(req);
414 fds[i].revents = 0;
415 }
416
417 r = req.test();
418 FAIL_IF(r, "Atomic commit failed: %d", r);
419
420 req.commit_sync();
421 }
422
423 for (auto cam : cameras)
424 delete cam;
425 }
426