1 #include <linux/videodev2.h>
2 #include <cstdio>
3 #include <string.h>
4 #include <poll.h>
5 #include <fcntl.h>
6 #include <unistd.h>
7 #include <fstream>
8 #include <sys/ioctl.h>
9 #include <xf86drm.h>
10 #include <glob.h>
12 #include <kms++/kms++.h>
13 #include <kms++util/kms++util.h>
15 #define CAMERA_BUF_QUEUE_SIZE 3
16 #define MAX_CAMERA 9
18 using namespace std;
19 using namespace kms;
21 enum class BufferProvider {
22 DRM,
23 V4L2,
24 };
26 class CameraPipeline
27 {
28 public:
29 CameraPipeline(int cam_fd, Card& card, Crtc* crtc, Plane* plane, uint32_t x, uint32_t y,
30 uint32_t iw, uint32_t ih, PixelFormat pixfmt,
31 BufferProvider buffer_provider);
32 ~CameraPipeline();
34 CameraPipeline(const CameraPipeline& other) = delete;
35 CameraPipeline& operator=(const CameraPipeline& other) = delete;
37 void show_next_frame(AtomicReq &req);
38 int fd() const { return m_fd; }
39 void start_streaming();
40 private:
41 ExtFramebuffer* GetExtFrameBuffer(Card& card, uint32_t i, PixelFormat pixfmt);
42 int m_fd; /* camera file descriptor */
43 Crtc* m_crtc;
44 Plane* m_plane;
45 BufferProvider m_buffer_provider;
46 vector<DumbFramebuffer*> m_fb; /* framebuffers for DRM buffers */
47 vector<ExtFramebuffer*> m_extfb; /* framebuffers for V4L2 buffers */
48 int m_prev_fb_index;
49 uint32_t m_in_width, m_in_height; /* camera capture resolution */
50 /* image properties for display */
51 uint32_t m_out_width, m_out_height;
52 uint32_t m_out_x, m_out_y;
53 };
55 static int buffer_export(int v4lfd, enum v4l2_buf_type bt, uint32_t index, int *dmafd)
56 {
57 struct v4l2_exportbuffer expbuf;
59 memset(&expbuf, 0, sizeof(expbuf));
60 expbuf.type = bt;
61 expbuf.index = index;
62 if (ioctl(v4lfd, VIDIOC_EXPBUF, &expbuf) == -1) {
63 perror("VIDIOC_EXPBUF");
64 return -1;
65 }
67 *dmafd = expbuf.fd;
69 return 0;
70 }
72 ExtFramebuffer* CameraPipeline::GetExtFrameBuffer(Card& card, uint32_t i, PixelFormat pixfmt)
73 {
74 int r, dmafd;
76 r = buffer_export(m_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, i, &dmafd);
77 ASSERT(r == 0);
79 uint32_t handle;
80 r = drmPrimeFDToHandle(card.fd(), dmafd, &handle);
81 ASSERT(r == 0);
83 const PixelFormatInfo& format_info = get_pixel_format_info(pixfmt);
84 ASSERT(format_info.num_planes == 1);
86 uint32_t handles[4] { handle };
87 uint32_t pitches[4] { m_in_width * (format_info.planes[0].bitspp / 8) };
88 uint32_t offsets[4] { };
90 return new ExtFramebuffer(card, m_in_width, m_in_height, pixfmt,
91 handles, pitches, offsets);
92 }
94 bool inline better_size(struct v4l2_frmsize_discrete* v4ldisc,
95 uint32_t iw, uint32_t ih,
96 uint32_t best_w, uint32_t best_h)
97 {
98 if (v4ldisc->width <= iw && v4ldisc->height <= ih &&
99 (v4ldisc->width >= best_w || v4ldisc->height >= best_h))
100 return true;
102 return false;
103 }
105 CameraPipeline::CameraPipeline(int cam_fd, Card& card, Crtc *crtc, Plane* plane, uint32_t x, uint32_t y,
106 uint32_t iw, uint32_t ih, PixelFormat pixfmt,
107 BufferProvider buffer_provider)
108 : m_fd(cam_fd), m_crtc(crtc), m_buffer_provider(buffer_provider), m_prev_fb_index(-1)
109 {
111 int r;
112 uint32_t best_w = 320;
113 uint32_t best_h = 240;
115 struct v4l2_frmsizeenum v4lfrms = { };
116 v4lfrms.pixel_format = (uint32_t)pixfmt;
117 while (ioctl(m_fd, VIDIOC_ENUM_FRAMESIZES, &v4lfrms) == 0) {
118 if (v4lfrms.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
119 if (better_size(&v4lfrms.discrete, iw, ih,
120 best_w, best_h)) {
121 best_w = v4lfrms.discrete.width;
122 best_h = v4lfrms.discrete.height;
123 }
124 } else {
125 break;
126 }
127 v4lfrms.index++;
128 };
130 m_out_width = m_in_width = best_w;
131 m_out_height = m_in_height = best_h;
132 /* Move it to the middle of the requested area */
133 m_out_x = x + iw / 2 - m_out_width / 2;
134 m_out_y = y + ih / 2 - m_out_height / 2;
136 struct v4l2_format v4lfmt = { };
137 v4lfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
138 r = ioctl(m_fd, VIDIOC_G_FMT, &v4lfmt);
139 ASSERT(r == 0);
141 v4lfmt.fmt.pix.pixelformat = (uint32_t)pixfmt;
142 v4lfmt.fmt.pix.width = m_in_width;
143 v4lfmt.fmt.pix.height = m_in_height;
145 r = ioctl(m_fd, VIDIOC_S_FMT, &v4lfmt);
146 ASSERT(r == 0);
148 uint32_t v4l_mem;
150 if (m_buffer_provider == BufferProvider::V4L2)
151 v4l_mem = V4L2_MEMORY_MMAP;
152 else
153 v4l_mem = V4L2_MEMORY_DMABUF;
155 struct v4l2_requestbuffers v4lreqbuf = { };
156 v4lreqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
157 v4lreqbuf.memory = v4l_mem;
158 v4lreqbuf.count = CAMERA_BUF_QUEUE_SIZE;
159 r = ioctl(m_fd, VIDIOC_REQBUFS, &v4lreqbuf);
160 ASSERT(r == 0);
161 ASSERT(v4lreqbuf.count == CAMERA_BUF_QUEUE_SIZE);
163 struct v4l2_buffer v4lbuf = { };
164 v4lbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
165 v4lbuf.memory = v4l_mem;
167 for (unsigned i = 0; i < CAMERA_BUF_QUEUE_SIZE; i++) {
168 DumbFramebuffer *fb = NULL;
169 ExtFramebuffer *extfb = NULL;
171 if (m_buffer_provider == BufferProvider::V4L2)
172 extfb = GetExtFrameBuffer(card, i, pixfmt);
173 else
174 fb = new DumbFramebuffer(card, m_in_width,
175 m_in_height, pixfmt);
177 v4lbuf.index = i;
178 if (m_buffer_provider == BufferProvider::DRM)
179 v4lbuf.m.fd = fb->prime_fd(0);
180 r = ioctl(m_fd, VIDIOC_QBUF, &v4lbuf);
181 ASSERT(r == 0);
183 if (m_buffer_provider == BufferProvider::V4L2)
184 m_extfb.push_back(extfb);
185 else
186 m_fb.push_back(fb);
187 }
189 m_plane = plane;
191 // Do initial plane setup with first fb, so that we only need to
192 // set the FB when page flipping
193 AtomicReq req(card);
195 Framebuffer *fb;
196 if (m_buffer_provider == BufferProvider::V4L2)
197 fb = m_extfb[0];
198 else
199 fb = m_fb[0];
201 req.add(m_plane, "CRTC_ID", m_crtc->id());
202 req.add(m_plane, "FB_ID", fb->id());
204 req.add(m_plane, "CRTC_X", m_out_x);
205 req.add(m_plane, "CRTC_Y", m_out_y);
206 req.add(m_plane, "CRTC_W", m_out_width);
207 req.add(m_plane, "CRTC_H", m_out_height);
209 req.add(m_plane, "SRC_X", 0);
210 req.add(m_plane, "SRC_Y", 0);
211 req.add(m_plane, "SRC_W", m_in_width << 16);
212 req.add(m_plane, "SRC_H", m_in_height << 16);
214 r = req.commit_sync();
215 FAIL_IF(r, "initial plane setup failed");
216 }
218 CameraPipeline::~CameraPipeline()
219 {
220 for (unsigned i = 0; i < m_fb.size(); i++)
221 delete m_fb[i];
223 for (unsigned i = 0; i < m_extfb.size(); i++)
224 delete m_extfb[i];
226 ::close(m_fd);
227 }
229 void CameraPipeline::start_streaming()
230 {
231 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
233 int r = ioctl(m_fd, VIDIOC_STREAMON, &type);
234 FAIL_IF(r, "Failed to enable camera stream: %d", r);
235 }
237 void CameraPipeline::show_next_frame(AtomicReq& req)
238 {
239 int r;
240 uint32_t v4l_mem;
242 if (m_buffer_provider == BufferProvider::V4L2)
243 v4l_mem = V4L2_MEMORY_MMAP;
244 else
245 v4l_mem = V4L2_MEMORY_DMABUF;
247 struct v4l2_buffer v4l2buf = { };
248 v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
249 v4l2buf.memory = v4l_mem;
250 r = ioctl(m_fd, VIDIOC_DQBUF, &v4l2buf);
251 if (r != 0) {
252 printf("VIDIOC_DQBUF ioctl failed with %d\n", errno);
253 return;
254 }
256 unsigned fb_index = v4l2buf.index;
258 Framebuffer *fb;
259 if (m_buffer_provider == BufferProvider::V4L2)
260 fb = m_extfb[fb_index];
261 else
262 fb = m_fb[fb_index];
264 req.add(m_plane, "FB_ID", fb->id());
266 if (m_prev_fb_index >= 0) {
267 memset(&v4l2buf, 0, sizeof(v4l2buf));
268 v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
269 v4l2buf.memory = v4l_mem;
270 v4l2buf.index = m_prev_fb_index;
271 if (m_buffer_provider == BufferProvider::DRM)
272 v4l2buf.m.fd = m_fb[m_prev_fb_index]->prime_fd(0);
273 r = ioctl(m_fd, VIDIOC_QBUF, &v4l2buf);
274 ASSERT(r == 0);
276 }
278 m_prev_fb_index = fb_index;
279 }
281 static bool is_capture_dev(int fd)
282 {
283 struct v4l2_capability cap = { };
284 int r = ioctl(fd, VIDIOC_QUERYCAP, &cap);
285 ASSERT(r == 0);
286 return cap.capabilities & V4L2_CAP_VIDEO_CAPTURE;
287 }
289 std::vector<std::string> glob(const std::string& pat)
290 {
291 glob_t glob_result;
292 glob(pat.c_str(), 0, NULL, &glob_result);
293 vector<string> ret;
294 for(unsigned i = 0; i < glob_result.gl_pathc; ++i)
295 ret.push_back(string(glob_result.gl_pathv[i]));
296 globfree(&glob_result);
297 return ret;
298 }
300 static const char* usage_str =
301 "Usage: kmscapture [OPTIONS]\n\n"
302 "Options:\n"
303 " -s, --single Single camera mode. Open only /dev/video0\n"
304 " --buffer-type=<drm|v4l> Use DRM or V4L provided buffers. Default: DRM\n"
305 " -h, --help Print this help\n"
306 ;
308 int main(int argc, char** argv)
309 {
310 BufferProvider buffer_provider = BufferProvider::DRM;
311 bool single_cam = false;
313 OptionSet optionset = {
314 Option("s|single", [&]()
315 {
316 single_cam = true;
317 }),
318 Option("|buffer-type=", [&](string s)
319 {
320 if (s == "v4l")
321 buffer_provider = BufferProvider::V4L2;
322 else if (s == "drm")
323 buffer_provider = BufferProvider::DRM;
324 else
325 FAIL("Invalid buffer provider: %s", s.c_str());
326 }),
327 Option("h|help", [&]()
328 {
329 puts(usage_str);
330 exit(-1);
331 }),
332 };
334 optionset.parse(argc, argv);
336 if (optionset.params().size() > 0) {
337 puts(usage_str);
338 exit(-1);
339 }
341 auto pixfmt = PixelFormat::YUYV;
343 Card card;
345 auto conn = card.get_first_connected_connector();
346 auto crtc = conn->get_current_crtc();
347 printf("Display: %dx%d\n", crtc->width(), crtc->height());
348 printf("Buffer provider: %s\n", buffer_provider == BufferProvider::V4L2? "V4L" : "DRM");
350 vector<int> camera_fds;
352 for (string vidpath : glob("/dev/video*")) {
353 int fd = ::open(vidpath.c_str(), O_RDWR | O_NONBLOCK);
355 if (fd < 0)
356 continue;
358 if (!is_capture_dev(fd)) {
359 close(fd);
360 continue;
361 }
363 camera_fds.push_back(fd);
365 if (single_cam)
366 break;
367 }
369 FAIL_IF(camera_fds.size() == 0, "No cameras found");
371 vector<Plane*> available_planes;
372 for (Plane* p : crtc->get_possible_planes()) {
373 if (p->plane_type() != PlaneType::Overlay)
374 continue;
376 if (!p->supports_format(pixfmt))
377 continue;
379 available_planes.push_back(p);
380 }
382 FAIL_IF(available_planes.size() < camera_fds.size(), "Not enough video planes for cameras");
384 uint32_t plane_w = crtc->width() / camera_fds.size();
385 vector<CameraPipeline*> cameras;
387 for (unsigned i = 0; i < camera_fds.size(); ++i) {
388 int cam_fd = camera_fds[i];
389 Plane* plane = available_planes[i];
391 auto cam = new CameraPipeline(cam_fd, card, crtc, plane, i * plane_w, 0,
392 plane_w, crtc->height(), pixfmt, buffer_provider);
393 cameras.push_back(cam);
394 }
396 unsigned nr_cameras = cameras.size();
398 vector<pollfd> fds(nr_cameras + 1);
400 for (unsigned i = 0; i < nr_cameras; i++) {
401 fds[i].fd = cameras[i]->fd();
402 fds[i].events = POLLIN;
403 }
404 fds[nr_cameras].fd = 0;
405 fds[nr_cameras].events = POLLIN;
407 for (auto cam : cameras)
408 cam->start_streaming();
410 while (true) {
411 int r = poll(fds.data(), nr_cameras + 1, -1);
412 ASSERT(r > 0);
414 if (fds[nr_cameras].revents != 0)
415 break;
417 AtomicReq req(card);
419 for (unsigned i = 0; i < nr_cameras; i++) {
420 if (!fds[i].revents)
421 continue;
422 cameras[i]->show_next_frame(req);
423 fds[i].revents = 0;
424 }
426 r = req.test();
427 FAIL_IF(r, "Atomic commit failed: %d", r);
429 req.commit_sync();
430 }
432 for (auto cam : cameras)
433 delete cam;
434 }