feat: node-to-node MJPEG streaming CLIs and shared V4L2 format header

Add stream_send_cli (V4L2 capture → TCP → VIDEO_FRAME) and
stream_recv_cli (TCP → threaded frame slot → GLFW display) to
exercise end-to-end streaming between two nodes on the same machine
or across the network.

Add include/stream_stats.h (header-only rolling-window fps/Mbps tracker)
and include/v4l2_fmt.h (header-only V4L2 format enumeration shared between
v4l2_view_cli and stream_send_cli). Refactor v4l2_view_cli to use the
shared header.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-28 22:31:54 +00:00
parent 611376dbc1
commit 61c81398bb
8 changed files with 889 additions and 165 deletions

View File

@@ -3,157 +3,14 @@
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <time.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/select.h>
#include <linux/videodev2.h>
#include "v4l2_fmt.h"
#include "xorg.h"
#define N_BUFS 4
#define MAX_OPTS 1024
/* ------------------------------------------------------------------ */
/* Format option — one (pixfmt, size, fps) combination */
/* ------------------------------------------------------------------ */
typedef struct {
uint32_t pixfmt;
int w, h;
int fps_n; /* fps = fps_n / fps_d */
int fps_d;
} Fmt_Option;
/* fps_a > fps_b ? */
static int fps_gt(const Fmt_Option *a, const Fmt_Option *b)
{
return (long long)a->fps_n * b->fps_d > (long long)b->fps_n * a->fps_d;
}
static int fps_eq(const Fmt_Option *a, const Fmt_Option *b)
{
return (long long)a->fps_n * b->fps_d == (long long)b->fps_n * a->fps_d;
}
/* ------------------------------------------------------------------ */
/* Format enumeration */
/* ------------------------------------------------------------------ */
typedef struct {
Fmt_Option *opts;
int n;
int max;
} Opt_List;
static void opt_push(Opt_List *l, uint32_t pixfmt, int w, int h, int fps_n, int fps_d)
{
if (l->n >= l->max) { return; }
l->opts[l->n++] = (Fmt_Option){ pixfmt, w, h, fps_n, fps_d };
}
static int xioctl(int fd, unsigned long req, void *arg)
{
int r;
do { r = ioctl(fd, req, arg); } while (r == -1 && errno == EINTR);
return r;
}
static void collect_intervals(int fd, uint32_t pixfmt, int w, int h, Opt_List *l)
{
struct v4l2_frmivalenum fie = {0};
fie.pixel_format = pixfmt;
fie.width = (uint32_t)w;
fie.height = (uint32_t)h;
for (fie.index = 0; xioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &fie) == 0; fie.index++) {
if (fie.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
opt_push(l, pixfmt, w, h,
(int)fie.discrete.denominator,
(int)fie.discrete.numerator);
} else {
/* Stepwise/continuous: record the fastest (minimum) interval. */
opt_push(l, pixfmt, w, h,
(int)fie.stepwise.min.denominator,
(int)fie.stepwise.min.numerator);
break;
}
}
}
static void collect_sizes(int fd, uint32_t pixfmt, Opt_List *l)
{
struct v4l2_frmsizeenum fse = {0};
fse.pixel_format = pixfmt;
for (fse.index = 0; xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &fse) == 0; fse.index++) {
if (fse.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
collect_intervals(fd, pixfmt,
(int)fse.discrete.width,
(int)fse.discrete.height, l);
} else {
/* Stepwise/continuous: only probe the maximum size. */
collect_intervals(fd, pixfmt,
(int)fse.stepwise.max_width,
(int)fse.stepwise.max_height, l);
break;
}
}
}
/*
* Enumerate all (pixfmt, size, fps) combinations the device supports.
* Filtered to formats we can handle (MJPEG, YUYV).
* If fmt_filter is non-zero, only that pixel format is considered.
*/
static int enumerate_formats(int fd, Fmt_Option *buf, int buf_max,
uint32_t fmt_filter)
{
Opt_List l = { buf, 0, buf_max };
struct v4l2_fmtdesc fd_desc = {0};
fd_desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (fd_desc.index = 0;
xioctl(fd, VIDIOC_ENUM_FMT, &fd_desc) == 0;
fd_desc.index++) {
uint32_t pf = fd_desc.pixelformat;
if (pf != V4L2_PIX_FMT_MJPEG && pf != V4L2_PIX_FMT_YUYV) { continue; }
if (fmt_filter && pf != fmt_filter) { continue; }
collect_sizes(fd, pf, &l);
}
return l.n;
}
/*
* Select the best option from the list:
* 1. Highest FPS
* 2. Largest area (w×h) among equal-FPS entries
* 3. MJPEG preferred over YUYV for equal FPS and area
*/
static const Fmt_Option *select_best(const Fmt_Option *opts, int n)
{
if (n == 0) { return NULL; }
const Fmt_Option *best = &opts[0];
for (int i = 1; i < n; i++) {
const Fmt_Option *o = &opts[i];
if (fps_gt(o, best)) {
best = o;
} else if (fps_eq(o, best)) {
int o_area = o->w * o->h;
int b_area = best->w * best->h;
if (o_area > b_area) {
best = o;
} else if (o_area == b_area &&
o->pixfmt == V4L2_PIX_FMT_MJPEG &&
best->pixfmt != V4L2_PIX_FMT_MJPEG) {
best = o;
}
}
}
return best;
}
/* ------------------------------------------------------------------ */
/* YUYV → planar YUV420 conversion */
@@ -269,7 +126,7 @@ int main(int argc, char **argv)
if (fd < 0) { perror(device); return 1; }
struct v4l2_capability cap = {0};
if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
if (v4l2_xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
perror("VIDIOC_QUERYCAP"); close(fd); return 1;
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
@@ -301,14 +158,14 @@ int main(int argc, char **argv)
use_mjpeg = 0;
if (fmt_filter != V4L2_PIX_FMT_YUYV) {
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
if (xioctl(fd, VIDIOC_S_FMT, &fmt) == 0 &&
if (v4l2_xioctl(fd, VIDIOC_S_FMT, &fmt) == 0 &&
fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
use_mjpeg = 1;
}
}
if (!use_mjpeg) {
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
if (xioctl(fd, VIDIOC_S_FMT, &fmt) < 0 ||
if (v4l2_xioctl(fd, VIDIOC_S_FMT, &fmt) < 0 ||
fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV) {
fprintf(stderr, "%s: could not set %dx%d in MJPEG or YUYV\n",
device, req_width, req_height);
@@ -321,16 +178,16 @@ int main(int argc, char **argv)
sel_fps_n = 0; sel_fps_d = 1; /* unknown until G_PARM below */
} else {
/* Enumerate all supported modes and pick the best. */
Fmt_Option *opts = malloc(MAX_OPTS * sizeof(*opts));
V4l2_Fmt_Option *opts = malloc(V4L2_FMT_MAX_OPTS * sizeof(*opts));
if (!opts) { fprintf(stderr, "out of memory\n"); close(fd); return 1; }
int n = enumerate_formats(fd, opts, MAX_OPTS, fmt_filter);
int n = v4l2_enumerate_formats(fd, opts, V4L2_FMT_MAX_OPTS, fmt_filter);
if (n == 0) {
fprintf(stderr, "%s: no usable formats found (MJPEG/YUYV)\n", device);
free(opts); close(fd); return 1;
}
const Fmt_Option *best = select_best(opts, n);
const V4l2_Fmt_Option *best = v4l2_select_best(opts, n);
/* Apply the selected format. */
struct v4l2_format fmt = {0};
@@ -339,7 +196,7 @@ int main(int argc, char **argv)
fmt.fmt.pix.width = (uint32_t)best->w;
fmt.fmt.pix.height = (uint32_t)best->h;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
if (xioctl(fd, VIDIOC_S_FMT, &fmt) < 0) {
if (v4l2_xioctl(fd, VIDIOC_S_FMT, &fmt) < 0) {
perror("VIDIOC_S_FMT"); free(opts); close(fd); return 1;
}
@@ -358,9 +215,9 @@ int main(int argc, char **argv)
parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
parm.parm.capture.timeperframe.numerator = (uint32_t)sel_fps_d;
parm.parm.capture.timeperframe.denominator = (uint32_t)sel_fps_n;
xioctl(fd, VIDIOC_S_PARM, &parm);
v4l2_xioctl(fd, VIDIOC_S_PARM, &parm);
/* Read back what the driver actually set. */
if (xioctl(fd, VIDIOC_G_PARM, &parm) == 0 &&
if (v4l2_xioctl(fd, VIDIOC_G_PARM, &parm) == 0 &&
parm.parm.capture.timeperframe.denominator > 0) {
sel_fps_n = (int)parm.parm.capture.timeperframe.denominator;
sel_fps_d = (int)parm.parm.capture.timeperframe.numerator;
@@ -380,7 +237,7 @@ int main(int argc, char **argv)
req.count = N_BUFS;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(fd, VIDIOC_REQBUFS, &req) < 0) {
if (v4l2_xioctl(fd, VIDIOC_REQBUFS, &req) < 0) {
perror("VIDIOC_REQBUFS"); close(fd); return 1;
}
@@ -390,7 +247,7 @@ int main(int argc, char **argv)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(fd, VIDIOC_QUERYBUF, &buf) < 0) {
if (v4l2_xioctl(fd, VIDIOC_QUERYBUF, &buf) < 0) {
perror("VIDIOC_QUERYBUF"); close(fd); return 1;
}
bufs[i].length = buf.length;
@@ -406,13 +263,13 @@ int main(int argc, char **argv)
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(fd, VIDIOC_QBUF, &buf) < 0) {
if (v4l2_xioctl(fd, VIDIOC_QBUF, &buf) < 0) {
perror("VIDIOC_QBUF"); close(fd); return 1;
}
}
enum v4l2_buf_type stream_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_STREAMON, &stream_type) < 0) {
if (v4l2_xioctl(fd, VIDIOC_STREAMON, &stream_type) < 0) {
perror("VIDIOC_STREAMON"); close(fd); return 1;
}
@@ -423,7 +280,7 @@ int main(int argc, char **argv)
Xorg_Viewer *v = xorg_viewer_open(win_x, win_y, width, height, "v4l2_view_cli");
if (!v) {
fprintf(stderr, "v4l2_view_cli: failed to open viewer window\n");
xioctl(fd, VIDIOC_STREAMOFF, &stream_type);
v4l2_xioctl(fd, VIDIOC_STREAMOFF, &stream_type);
close(fd); return 1;
}
xorg_viewer_set_scale(v, scale);
@@ -439,7 +296,7 @@ int main(int argc, char **argv)
if (!yuv420_buf) {
fprintf(stderr, "v4l2_view_cli: out of memory\n");
xorg_viewer_close(v);
xioctl(fd, VIDIOC_STREAMOFF, &stream_type);
v4l2_xioctl(fd, VIDIOC_STREAMOFF, &stream_type);
close(fd); return 1;
}
}
@@ -480,7 +337,7 @@ int main(int argc, char **argv)
struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (xioctl(fd, VIDIOC_DQBUF, &buf) < 0) {
if (v4l2_xioctl(fd, VIDIOC_DQBUF, &buf) < 0) {
if (errno == EAGAIN) { continue; }
perror("VIDIOC_DQBUF"); break;
}
@@ -497,7 +354,7 @@ int main(int argc, char **argv)
xorg_viewer_push_yuv420(v, y_p, cb_p, cr_p, width, height);
}
if (xioctl(fd, VIDIOC_QBUF, &buf) < 0) {
if (v4l2_xioctl(fd, VIDIOC_QBUF, &buf) < 0) {
perror("VIDIOC_QBUF"); break;
}
@@ -525,7 +382,7 @@ int main(int argc, char **argv)
/* ---------------------------------------------------------------- */
xorg_viewer_close(v);
xioctl(fd, VIDIOC_STREAMOFF, &stream_type);
v4l2_xioctl(fd, VIDIOC_STREAMOFF, &stream_type);
for (unsigned i = 0; i < req.count; i++) {
if (bufs[i].start && bufs[i].start != MAP_FAILED) {
munmap(bufs[i].start, bufs[i].length);