1 #include <linux/videodev2.h>
2 #include <cstdio>
3 #include <string.h>
4 #include <poll.h>
5 #include <fcntl.h>
6 #include <unistd.h>
7 #include <fstream>
8 #include <sys/ioctl.h>
9 #include <xf86drm.h>
10 #include <glob.h>
12 #include <kms++.h>
13 #include <kms++util.h>
14 #include <opts.h>
16 #define CAMERA_BUF_QUEUE_SIZE 3
17 #define MAX_CAMERA 9
19 using namespace std;
20 using namespace kms;
22 enum class BufferProvider {
23 DRM,
24 V4L2,
25 };
27 class CameraPipeline
28 {
29 public:
30 CameraPipeline(int cam_fd, Card& card, Crtc* crtc, Plane* plane, uint32_t x, uint32_t y,
31 uint32_t iw, uint32_t ih, PixelFormat pixfmt,
32 BufferProvider buffer_provider);
33 ~CameraPipeline();
35 CameraPipeline(const CameraPipeline& other) = delete;
36 CameraPipeline& operator=(const CameraPipeline& other) = delete;
38 void show_next_frame(AtomicReq &req);
39 int fd() const { return m_fd; }
40 void start_streaming();
41 private:
42 ExtFramebuffer* GetExtFrameBuffer(Card& card, uint32_t i, PixelFormat pixfmt);
43 int m_fd; /* camera file descriptor */
44 Crtc* m_crtc;
45 Plane* m_plane;
46 BufferProvider m_buffer_provider;
47 vector<DumbFramebuffer*> m_fb; /* framebuffers for DRM buffers */
48 vector<ExtFramebuffer*> m_extfb; /* framebuffers for V4L2 buffers */
49 int m_prev_fb_index;
50 uint32_t m_in_width, m_in_height; /* camera capture resolution */
51 /* image properties for display */
52 uint32_t m_out_width, m_out_height;
53 uint32_t m_out_x, m_out_y;
54 };
56 static int buffer_export(int v4lfd, enum v4l2_buf_type bt, uint32_t index, int *dmafd)
57 {
58 struct v4l2_exportbuffer expbuf;
60 memset(&expbuf, 0, sizeof(expbuf));
61 expbuf.type = bt;
62 expbuf.index = index;
63 if (ioctl(v4lfd, VIDIOC_EXPBUF, &expbuf) == -1) {
64 perror("VIDIOC_EXPBUF");
65 return -1;
66 }
68 *dmafd = expbuf.fd;
70 return 0;
71 }
73 ExtFramebuffer* CameraPipeline::GetExtFrameBuffer(Card& card, uint32_t i, PixelFormat pixfmt)
74 {
75 int r, dmafd;
77 r = buffer_export(m_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, i, &dmafd);
78 ASSERT(r == 0);
80 uint32_t handle;
81 r = drmPrimeFDToHandle(card.fd(), dmafd, &handle);
82 ASSERT(r == 0);
84 const PixelFormatInfo& format_info = get_pixel_format_info(pixfmt);
85 ASSERT(format_info.num_planes == 1);
87 uint32_t handles[4] { handle };
88 uint32_t pitches[4] { m_in_width * (format_info.planes[0].bitspp / 8) };
89 uint32_t offsets[4] { };
91 return new ExtFramebuffer(card, m_in_width, m_in_height, pixfmt,
92 handles, pitches, offsets);
93 }
95 bool inline better_size(struct v4l2_frmsize_discrete* v4ldisc,
96 uint32_t iw, uint32_t ih,
97 uint32_t best_w, uint32_t best_h)
98 {
99 if (v4ldisc->width <= iw && v4ldisc->height <= ih &&
100 (v4ldisc->width >= best_w || v4ldisc->height >= best_h))
101 return true;
103 return false;
104 }
106 CameraPipeline::CameraPipeline(int cam_fd, Card& card, Crtc *crtc, Plane* plane, uint32_t x, uint32_t y,
107 uint32_t iw, uint32_t ih, PixelFormat pixfmt,
108 BufferProvider buffer_provider)
109 : m_fd(cam_fd), m_crtc(crtc), m_buffer_provider(buffer_provider), m_prev_fb_index(-1)
110 {
112 int r;
113 uint32_t best_w = 320;
114 uint32_t best_h = 240;
116 struct v4l2_frmsizeenum v4lfrms = { };
117 v4lfrms.pixel_format = (uint32_t)pixfmt;
118 while (ioctl(m_fd, VIDIOC_ENUM_FRAMESIZES, &v4lfrms) == 0) {
119 if (v4lfrms.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
120 if (better_size(&v4lfrms.discrete, iw, ih,
121 best_w, best_h)) {
122 best_w = v4lfrms.discrete.width;
123 best_h = v4lfrms.discrete.height;
124 }
125 } else {
126 break;
127 }
128 v4lfrms.index++;
129 };
131 m_out_width = m_in_width = best_w;
132 m_out_height = m_in_height = best_h;
133 /* Move it to the middle of the requested area */
134 m_out_x = x + iw / 2 - m_out_width / 2;
135 m_out_y = y + ih / 2 - m_out_height / 2;
137 struct v4l2_format v4lfmt = { };
138 v4lfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
139 r = ioctl(m_fd, VIDIOC_G_FMT, &v4lfmt);
140 ASSERT(r == 0);
142 v4lfmt.fmt.pix.pixelformat = (uint32_t)pixfmt;
143 v4lfmt.fmt.pix.width = m_in_width;
144 v4lfmt.fmt.pix.height = m_in_height;
146 r = ioctl(m_fd, VIDIOC_S_FMT, &v4lfmt);
147 ASSERT(r == 0);
149 uint32_t v4l_mem;
151 if (m_buffer_provider == BufferProvider::V4L2)
152 v4l_mem = V4L2_MEMORY_MMAP;
153 else
154 v4l_mem = V4L2_MEMORY_DMABUF;
156 struct v4l2_requestbuffers v4lreqbuf = { };
157 v4lreqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
158 v4lreqbuf.memory = v4l_mem;
159 v4lreqbuf.count = CAMERA_BUF_QUEUE_SIZE;
160 r = ioctl(m_fd, VIDIOC_REQBUFS, &v4lreqbuf);
161 ASSERT(r == 0);
162 ASSERT(v4lreqbuf.count == CAMERA_BUF_QUEUE_SIZE);
164 struct v4l2_buffer v4lbuf = { };
165 v4lbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
166 v4lbuf.memory = v4l_mem;
168 for (unsigned i = 0; i < CAMERA_BUF_QUEUE_SIZE; i++) {
169 DumbFramebuffer *fb = NULL;
170 ExtFramebuffer *extfb = NULL;
172 if (m_buffer_provider == BufferProvider::V4L2)
173 extfb = GetExtFrameBuffer(card, i, pixfmt);
174 else
175 fb = new DumbFramebuffer(card, m_in_width,
176 m_in_height, pixfmt);
178 v4lbuf.index = i;
179 if (m_buffer_provider == BufferProvider::DRM)
180 v4lbuf.m.fd = fb->prime_fd(0);
181 r = ioctl(m_fd, VIDIOC_QBUF, &v4lbuf);
182 ASSERT(r == 0);
184 if (m_buffer_provider == BufferProvider::V4L2)
185 m_extfb.push_back(extfb);
186 else
187 m_fb.push_back(fb);
188 }
190 m_plane = plane;
192 // Do initial plane setup with first fb, so that we only need to
193 // set the FB when page flipping
194 AtomicReq req(card);
196 Framebuffer *fb;
197 if (m_buffer_provider == BufferProvider::V4L2)
198 fb = m_extfb[0];
199 else
200 fb = m_fb[0];
202 req.add(m_plane, "CRTC_ID", m_crtc->id());
203 req.add(m_plane, "FB_ID", fb->id());
205 req.add(m_plane, "CRTC_X", m_out_x);
206 req.add(m_plane, "CRTC_Y", m_out_y);
207 req.add(m_plane, "CRTC_W", m_out_width);
208 req.add(m_plane, "CRTC_H", m_out_height);
210 req.add(m_plane, "SRC_X", 0);
211 req.add(m_plane, "SRC_Y", 0);
212 req.add(m_plane, "SRC_W", m_in_width << 16);
213 req.add(m_plane, "SRC_H", m_in_height << 16);
215 r = req.commit_sync();
216 FAIL_IF(r, "initial plane setup failed");
217 }
219 CameraPipeline::~CameraPipeline()
220 {
221 for (unsigned i = 0; i < m_fb.size(); i++)
222 delete m_fb[i];
224 for (unsigned i = 0; i < m_extfb.size(); i++)
225 delete m_extfb[i];
227 ::close(m_fd);
228 }
230 void CameraPipeline::start_streaming()
231 {
232 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
234 int r = ioctl(m_fd, VIDIOC_STREAMON, &type);
235 FAIL_IF(r, "Failed to enable camera stream: %d", r);
236 }
238 void CameraPipeline::show_next_frame(AtomicReq& req)
239 {
240 int r;
241 uint32_t v4l_mem;
243 if (m_buffer_provider == BufferProvider::V4L2)
244 v4l_mem = V4L2_MEMORY_MMAP;
245 else
246 v4l_mem = V4L2_MEMORY_DMABUF;
248 struct v4l2_buffer v4l2buf = { };
249 v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
250 v4l2buf.memory = v4l_mem;
251 r = ioctl(m_fd, VIDIOC_DQBUF, &v4l2buf);
252 if (r != 0) {
253 printf("VIDIOC_DQBUF ioctl failed with %d\n", errno);
254 return;
255 }
257 unsigned fb_index = v4l2buf.index;
259 Framebuffer *fb;
260 if (m_buffer_provider == BufferProvider::V4L2)
261 fb = m_extfb[fb_index];
262 else
263 fb = m_fb[fb_index];
265 req.add(m_plane, "FB_ID", fb->id());
267 if (m_prev_fb_index >= 0) {
268 memset(&v4l2buf, 0, sizeof(v4l2buf));
269 v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
270 v4l2buf.memory = v4l_mem;
271 v4l2buf.index = m_prev_fb_index;
272 if (m_buffer_provider == BufferProvider::DRM)
273 v4l2buf.m.fd = m_fb[m_prev_fb_index]->prime_fd(0);
274 r = ioctl(m_fd, VIDIOC_QBUF, &v4l2buf);
275 ASSERT(r == 0);
277 }
279 m_prev_fb_index = fb_index;
280 }
282 static bool is_capture_dev(int fd)
283 {
284 struct v4l2_capability cap = { };
285 int r = ioctl(fd, VIDIOC_QUERYCAP, &cap);
286 ASSERT(r == 0);
287 return cap.capabilities & V4L2_CAP_VIDEO_CAPTURE;
288 }
290 std::vector<std::string> glob(const std::string& pat)
291 {
292 glob_t glob_result;
293 glob(pat.c_str(), 0, NULL, &glob_result);
294 vector<string> ret;
295 for(unsigned i = 0; i < glob_result.gl_pathc; ++i)
296 ret.push_back(string(glob_result.gl_pathv[i]));
297 globfree(&glob_result);
298 return ret;
299 }
301 static const char* usage_str =
302 "Usage: kmscapture [OPTIONS]\n\n"
303 "Options:\n"
304 " -s, --single Single camera mode. Open only /dev/video0\n"
305 " --buffer-type=<drm|v4l> Use DRM or V4L provided buffers. Default: DRM\n"
306 " -h, --help Print this help\n"
307 ;
309 int main(int argc, char** argv)
310 {
311 BufferProvider buffer_provider = BufferProvider::DRM;
312 bool single_cam = false;
314 OptionSet optionset = {
315 Option("s|single", [&]()
316 {
317 single_cam = true;
318 }),
319 Option("|buffer-type=", [&](string s)
320 {
321 if (s == "v4l")
322 buffer_provider = BufferProvider::V4L2;
323 else if (s == "drm")
324 buffer_provider = BufferProvider::DRM;
325 else
326 FAIL("Invalid buffer provider: %s", s.c_str());
327 }),
328 Option("h|help", [&]()
329 {
330 puts(usage_str);
331 exit(-1);
332 }),
333 };
335 optionset.parse(argc, argv);
337 if (optionset.params().size() > 0) {
338 puts(usage_str);
339 exit(-1);
340 }
342 auto pixfmt = PixelFormat::YUYV;
344 Card card;
346 auto conn = card.get_first_connected_connector();
347 auto crtc = conn->get_current_crtc();
348 printf("Display: %dx%d\n", crtc->width(), crtc->height());
349 printf("Buffer provider: %s\n", buffer_provider == BufferProvider::V4L2? "V4L" : "DRM");
351 vector<int> camera_fds;
353 for (string vidpath : glob("/dev/video*")) {
354 int fd = ::open(vidpath.c_str(), O_RDWR | O_NONBLOCK);
356 if (fd < 0)
357 continue;
359 if (!is_capture_dev(fd)) {
360 close(fd);
361 continue;
362 }
364 camera_fds.push_back(fd);
366 if (single_cam)
367 break;
368 }
370 FAIL_IF(camera_fds.size() == 0, "No cameras found");
372 vector<Plane*> available_planes;
373 for (Plane* p : crtc->get_possible_planes()) {
374 if (p->plane_type() != PlaneType::Overlay)
375 continue;
377 if (!p->supports_format(pixfmt))
378 continue;
380 available_planes.push_back(p);
381 }
383 FAIL_IF(available_planes.size() < camera_fds.size(), "Not enough video planes for cameras");
385 uint32_t plane_w = crtc->width() / camera_fds.size();
386 vector<CameraPipeline*> cameras;
388 for (unsigned i = 0; i < camera_fds.size(); ++i) {
389 int cam_fd = camera_fds[i];
390 Plane* plane = available_planes[i];
392 auto cam = new CameraPipeline(cam_fd, card, crtc, plane, i * plane_w, 0,
393 plane_w, crtc->height(), pixfmt, buffer_provider);
394 cameras.push_back(cam);
395 }
397 unsigned nr_cameras = cameras.size();
399 vector<pollfd> fds(nr_cameras + 1);
401 for (unsigned i = 0; i < nr_cameras; i++) {
402 fds[i].fd = cameras[i]->fd();
403 fds[i].events = POLLIN;
404 }
405 fds[nr_cameras].fd = 0;
406 fds[nr_cameras].events = POLLIN;
408 for (auto cam : cameras)
409 cam->start_streaming();
411 while (true) {
412 int r = poll(fds.data(), nr_cameras + 1, -1);
413 ASSERT(r > 0);
415 if (fds[nr_cameras].revents != 0)
416 break;
418 AtomicReq req(card);
420 for (unsigned i = 0; i < nr_cameras; i++) {
421 if (!fds[i].revents)
422 continue;
423 cameras[i]->show_next_frame(req);
424 fds[i].revents = 0;
425 }
427 r = req.test();
428 FAIL_IF(r, "Atomic commit failed: %d", r);
430 req.commit_sync();
431 }
433 for (auto cam : cameras)
434 delete cam;
435 }