#include "precomp.hpp"
#if !defined _WIN32 && (defined HAVE_CAMV4L2 || defined HAVE_VIDEOIO)
#include <stdio.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/mman.h>
#include <string.h>
#include <stdlib.h>
#include <assert.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#ifdef HAVE_CAMV4L2
#include <asm/types.h>
#include <linux/videodev2.h>
#endif
#ifdef HAVE_VIDEOIO
#include <sys/videoio.h>
#endif
#define DEFAULT_V4L_WIDTH 640
#define DEFAULT_V4L_HEIGHT 480
#define DEFAULT_V4L_FPS 30
#define CHANNEL_NUMBER 1
#define MAX_CAMERAS 8
#define MAX_V4L_BUFFERS 10
#define DEFAULT_V4L_BUFFERS 4
#define V4L_ABORT_BADJPEG
#define MAX_DEVICE_DRIVER_NAME 80
namespace cv {
struct buffer
{
void * start;
size_t length;
};
struct CvCaptureCAM_V4L CV_FINAL : public CvCapture
{
int getCaptureDomain() CV_OVERRIDE { return cv::CAP_V4L; }
int deviceHandle;
int bufferIndex;
int FirstCapture;
String deviceName;
char *memoryMap;
IplImage frame;
__u32 palette;
int width, height;
int width_set, height_set;
int bufferSize;
__u32 fps;
bool convert_rgb;
bool frame_allocated;
bool returnFrame;
buffer buffers[MAX_V4L_BUFFERS + 1];
v4l2_capability cap;
v4l2_input inp;
v4l2_format form;
v4l2_crop crop;
v4l2_cropcap cropcap;
v4l2_requestbuffers req;
v4l2_buf_type type;
v4l2_queryctrl queryctrl;
timeval timestamp;
Range focus, brightness, contrast, saturation, hue, gain, exposure;
bool open(int _index);
bool open(const char* deviceName);
virtual double getProperty(int) const CV_OVERRIDE;
virtual bool setProperty(int, double) CV_OVERRIDE;
virtual bool grabFrame() CV_OVERRIDE;
virtual IplImage* retrieveFrame(int) CV_OVERRIDE;
Range getRange(int property_id) const {
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
return brightness;
case CV_CAP_PROP_CONTRAST:
return contrast;
case CV_CAP_PROP_SATURATION:
return saturation;
case CV_CAP_PROP_HUE:
return hue;
case CV_CAP_PROP_GAIN:
return gain;
case CV_CAP_PROP_EXPOSURE:
return exposure;
case CV_CAP_PROP_FOCUS:
return focus;
case CV_CAP_PROP_AUTOFOCUS:
return Range(0, 1);
case CV_CAP_PROP_AUTO_EXPOSURE:
return Range(0, 4);
default:
return Range(0, 255);
}
}
virtual ~CvCaptureCAM_V4L();
};
static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture );
static bool icvGrabFrameCAM_V4L( CvCaptureCAM_V4L* capture );
static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int );
static double icvGetPropertyCAM_V4L( const CvCaptureCAM_V4L* capture, int property_id );
static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, int property_id, double value );
CvCaptureCAM_V4L::~CvCaptureCAM_V4L() {
icvCloseCAM_V4L(this);
}
static bool try_palette_v4l2(CvCaptureCAM_V4L* capture)
{
capture->form = v4l2_format();
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->form.fmt.pix.pixelformat = capture->palette;
capture->form.fmt.pix.field = V4L2_FIELD_ANY;
capture->form.fmt.pix.width = capture->width;
capture->form.fmt.pix.height = capture->height;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_S_FMT, &capture->form))
return false;
return capture->palette == capture->form.fmt.pix.pixelformat;
}
static int try_init_v4l2(CvCaptureCAM_V4L* capture, const char *deviceName)
{
int deviceIndex;
capture->deviceHandle = open (deviceName, O_RDWR | O_NONBLOCK, 0);
if (-1 == capture->deviceHandle)
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 open \"%s\": %s\n", deviceName, strerror(errno));
#endif
icvCloseCAM_V4L(capture);
return -1;
}
capture->cap = v4l2_capability();
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYCAP, &capture->cap))
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_QUERYCAP \"%s\": %s\n", deviceName, strerror(errno));
#endif
icvCloseCAM_V4L(capture);
return 0;
}
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_INPUT, &deviceIndex))
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_G_INPUT \"%s\": %s\n", deviceName, strerror(errno));
#endif
icvCloseCAM_V4L(capture);
return 0;
}
capture->inp = v4l2_input();
capture->inp.index = deviceIndex;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp))
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_ENUMINPUT \"%s\": %s\n", deviceName, strerror(errno));
#endif
icvCloseCAM_V4L(capture);
return 0;
}
return 1;
}
static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture) {
if(capture->palette != 0 and try_palette_v4l2(capture)){
return 0;
}
__u32 try_order[] = {
V4L2_PIX_FMT_BGR24,
V4L2_PIX_FMT_RGB24,
V4L2_PIX_FMT_YVU420,
V4L2_PIX_FMT_YUV420,
V4L2_PIX_FMT_YUV411P,
V4L2_PIX_FMT_YUYV,
V4L2_PIX_FMT_UYVY,
V4L2_PIX_FMT_SBGGR8,
V4L2_PIX_FMT_SGBRG8,
V4L2_PIX_FMT_SN9C10X,
#ifdef HAVE_JPEG
V4L2_PIX_FMT_MJPEG,
V4L2_PIX_FMT_JPEG,
#endif
V4L2_PIX_FMT_Y16,
V4L2_PIX_FMT_GREY
};
for (size_t i = 0; i < sizeof(try_order) / sizeof(__u32); i++) {
capture->palette = try_order[i];
if (try_palette_v4l2(capture)) {
return 0;
}
}
fprintf(stderr,
"VIDEOIO ERROR: V4L2: Pixel format of incoming image is unsupported by OpenCV\n");
icvCloseCAM_V4L(capture);
return -1;
}
static void v4l2_control_range(CvCaptureCAM_V4L* cap, __u32 id)
{
cap->queryctrl= v4l2_queryctrl();
cap->queryctrl.id = id;
if(0 != ioctl(cap->deviceHandle, VIDIOC_QUERYCTRL, &cap->queryctrl))
{
if (errno != EINVAL)
perror ("VIDIOC_QUERYCTRL");
return;
}
if (cap->queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
return;
Range range(cap->queryctrl.minimum, cap->queryctrl.maximum);
switch(cap->queryctrl.id) {
case V4L2_CID_BRIGHTNESS:
cap->brightness = range;
break;
case V4L2_CID_CONTRAST:
cap->contrast = range;
break;
case V4L2_CID_SATURATION:
cap->saturation = range;
break;
case V4L2_CID_HUE:
cap->hue = range;
break;
case V4L2_CID_GAIN:
cap->gain = range;
break;
case V4L2_CID_EXPOSURE_ABSOLUTE:
cap->exposure = range;
break;
case V4L2_CID_FOCUS_ABSOLUTE:
cap->focus = range;
break;
}
}
static void v4l2_scan_controls(CvCaptureCAM_V4L* capture)
{
__u32 ctrl_id;
for (ctrl_id = V4L2_CID_BASE; ctrl_id < V4L2_CID_LASTP1; ctrl_id++)
{
v4l2_control_range(capture, ctrl_id);
}
for (ctrl_id = V4L2_CID_PRIVATE_BASE;;ctrl_id++)
{
errno = 0;
v4l2_control_range(capture, ctrl_id);
if (errno)
break;
}
v4l2_control_range(capture, V4L2_CID_FOCUS_ABSOLUTE);
}
static int v4l2_set_fps(CvCaptureCAM_V4L* capture) {
v4l2_streamparm setfps = v4l2_streamparm();
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.parm.capture.timeperframe.numerator = 1;
setfps.parm.capture.timeperframe.denominator = capture->fps;
return ioctl (capture->deviceHandle, VIDIOC_S_PARM, &setfps);
}
static int v4l2_num_channels(__u32 palette) {
switch(palette) {
case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG:
case V4L2_PIX_FMT_Y16:
case V4L2_PIX_FMT_GREY:
return 1;
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_UYVY:
return 2;
case V4L2_PIX_FMT_BGR24:
case V4L2_PIX_FMT_RGB24:
return 3;
default:
return 0;
}
}
static void v4l2_create_frame(CvCaptureCAM_V4L *capture) {
CvSize size = {capture->form.fmt.pix.width, capture->form.fmt.pix.height};
int channels = 3;
int depth = IPL_DEPTH_8U;
if (!capture->convert_rgb) {
channels = v4l2_num_channels(capture->palette);
switch(capture->palette) {
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG:
size = cvSize(capture->buffers[capture->bufferIndex].length, 1);
break;
case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_YUV420:
size.height = size.height * 3 / 2;
break;
case V4L2_PIX_FMT_Y16:
if(!capture->convert_rgb){
depth = IPL_DEPTH_16U;
}
break;
}
}
cvInitImageHeader(&capture->frame, size, depth, channels);
if(capture->convert_rgb) {
capture->frame.imageData = (char*)cvAlloc(capture->frame.imageSize);
}
capture->frame_allocated = capture->convert_rgb;
}
static int _capture_V4L2 (CvCaptureCAM_V4L *capture)
{
const char* deviceName = capture->deviceName.c_str();
if (try_init_v4l2(capture, deviceName) != 1) {
return -1;
}
v4l2_scan_controls(capture);
if ((capture->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: device %s is unable to capture video memory.\n",deviceName);
icvCloseCAM_V4L(capture);
return -1;
}
if(capture->inp.index > 0) {
capture->inp = v4l2_input();
capture->inp.index = CHANNEL_NUMBER;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp))
{
fprintf (stderr, "VIDEOIO ERROR: V4L2: Aren't able to set channel number\n");
icvCloseCAM_V4L (capture);
return -1;
}
}
capture->form = v4l2_format();
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form)) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: Could not obtain specifics of capture window.\n\n");
icvCloseCAM_V4L(capture);
return -1;
}
if (autosetup_capture_mode_v4l2(capture) == -1)
return -1;
v4l2_set_fps(capture);
unsigned int min;
min = capture->form.fmt.pix.width * 2;
if (capture->form.fmt.pix.bytesperline < min)
capture->form.fmt.pix.bytesperline = min;
min = capture->form.fmt.pix.bytesperline * capture->form.fmt.pix.height;
if (capture->form.fmt.pix.sizeimage < min)
capture->form.fmt.pix.sizeimage = min;
capture->req = v4l2_requestbuffers();
unsigned int buffer_number = capture->bufferSize;
try_again:
capture->req.count = buffer_number;
capture->req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->req.memory = V4L2_MEMORY_MMAP;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_REQBUFS, &capture->req))
{
if (EINVAL == errno)
{
fprintf (stderr, "%s does not support memory mapping\n", deviceName);
} else {
perror ("VIDIOC_REQBUFS");
}
icvCloseCAM_V4L (capture);
return -1;
}
if (capture->req.count < buffer_number)
{
if (buffer_number == 1)
{
fprintf (stderr, "Insufficient buffer memory on %s\n", deviceName);
icvCloseCAM_V4L (capture);
return -1;
} else {
buffer_number--;
fprintf (stderr, "Insufficient buffer memory on %s -- decreaseing buffers\n", deviceName);
goto try_again;
}
}
for (unsigned int n_buffers = 0; n_buffers < capture->req.count; ++n_buffers)
{
v4l2_buffer buf = v4l2_buffer();
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYBUF, &buf)) {
perror ("VIDIOC_QUERYBUF");
icvCloseCAM_V4L (capture);
return -1;
}
capture->buffers[n_buffers].length = buf.length;
capture->buffers[n_buffers].start =
mmap (NULL ,
buf.length,
PROT_READ | PROT_WRITE ,
MAP_SHARED ,
capture->deviceHandle, buf.m.offset);
if (MAP_FAILED == capture->buffers[n_buffers].start) {
perror ("mmap");
icvCloseCAM_V4L (capture);
return -1;
}
if (n_buffers == 0) {
capture->buffers[MAX_V4L_BUFFERS].start = malloc( buf.length );
capture->buffers[MAX_V4L_BUFFERS].length = buf.length;
}
}
v4l2_create_frame(capture);
capture->FirstCapture = 1;
return 1;
};
static bool v4l2_reset( CvCaptureCAM_V4L* capture) {
String deviceName = capture->deviceName;
icvCloseCAM_V4L(capture);
capture->deviceName = deviceName;
return _capture_V4L2(capture) == 1;
}
bool CvCaptureCAM_V4L::open(int _index)
{
cv::String name;
if (_index < 0)
{
for (int autoindex = 0; autoindex < MAX_CAMERAS; ++autoindex)
{
name = cv::format("/dev/video%d", autoindex);
int h = ::open(name.c_str(), O_RDONLY);
if (h != -1)
{
::close(h);
_index = autoindex;
break;
}
}
if (_index < 0)
{
fprintf(stderr, "VIDEOIO ERROR: V4L: can't find camera device\n");
name.clear();
return false;
}
}
else
{
name = cv::format("/dev/video%d", _index);
}
bool res = open(name.c_str());
if (!res)
{
fprintf(stderr, "VIDEOIO ERROR: V4L: can't open camera by index %d\n", _index);
}
return res;
}
bool CvCaptureCAM_V4L::open(const char* _deviceName)
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) V4L: opening %s\n", _deviceName);
#endif
FirstCapture = 1;
width = DEFAULT_V4L_WIDTH;
height = DEFAULT_V4L_HEIGHT;
width_set = height_set = 0;
bufferSize = DEFAULT_V4L_BUFFERS;
fps = DEFAULT_V4L_FPS;
convert_rgb = true;
deviceName = _deviceName;
returnFrame = true;
return _capture_V4L2(this) == 1;
}
static int read_frame_v4l2(CvCaptureCAM_V4L* capture) {
v4l2_buffer buf = v4l2_buffer();
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_DQBUF, &buf)) {
switch (errno) {
case EAGAIN:
return 0;
case EIO:
if (!(buf.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE)))
{
if (ioctl(capture->deviceHandle, VIDIOC_QBUF, &buf) == -1)
{
return 0;
}
}
return 0;
default:
capture->returnFrame = false;
perror ("VIDIOC_DQBUF");
return -1;
}
}
assert(buf.index < capture->req.count);
memcpy(capture->buffers[MAX_V4L_BUFFERS].start,
capture->buffers[buf.index].start,
capture->buffers[MAX_V4L_BUFFERS].length );
capture->bufferIndex = MAX_V4L_BUFFERS;
capture->timestamp = buf.timestamp;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf))
perror ("VIDIOC_QBUF");
return 1;
}
static int mainloop_v4l2(CvCaptureCAM_V4L* capture) {
for (;;) {
fd_set fds;
struct timeval tv;
int r;
FD_ZERO (&fds);
FD_SET (capture->deviceHandle, &fds);
tv.tv_sec = 10;
tv.tv_usec = 0;
r = select (capture->deviceHandle+1, &fds, NULL, NULL, &tv);
if (-1 == r) {
if (EINTR == errno)
continue;
perror ("select");
}
if (0 == r) {
fprintf (stderr, "select timeout\n");
break;
}
int returnCode = read_frame_v4l2 (capture);
if(returnCode == -1)
return -1;
if(returnCode == 1)
return 1;
}
return 0;
}
static bool icvGrabFrameCAM_V4L(CvCaptureCAM_V4L* capture) {
if (capture->FirstCapture) {
{
for (capture->bufferIndex = 0;
capture->bufferIndex < ((int)capture->req.count);
++capture->bufferIndex)
{
v4l2_buffer buf = v4l2_buffer();
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = (unsigned long)capture->bufferIndex;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf)) {
perror ("VIDIOC_QBUF");
return false;
}
}
capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_STREAMON,
&capture->type)) {
perror ("VIDIOC_STREAMON");
return false;
}
}
#if defined(V4L_ABORT_BADJPEG)
if(mainloop_v4l2(capture) != 1)
return false;
#endif
capture->FirstCapture = 0;
}
if(mainloop_v4l2(capture) != 1) return false;
return true;
}
#define LIMIT(x) ((x)>0xffffff?0xff: ((x)<=0xffff?0:((x)>>16)))
static inline void
move_411_block(int yTL, int yTR, int yBL, int yBR, int u, int v,
int , unsigned char * rgb)
{
const int rvScale = 91881;
const int guScale = -22553;
const int gvScale = -46801;
const int buScale = 116129;
const int yScale = 65536;
int r, g, b;
g = guScale * u + gvScale * v;
r = rvScale * v;
b = buScale * u;
yTL *= yScale; yTR *= yScale;
yBL *= yScale; yBR *= yScale;
rgb[0] = LIMIT(b+yTL); rgb[1] = LIMIT(g+yTL);
rgb[2] = LIMIT(r+yTL);
rgb[3] = LIMIT(b+yTR); rgb[4] = LIMIT(g+yTR);
rgb[5] = LIMIT(r+yTR);
rgb += 6;
rgb[0] = LIMIT(b+yBL); rgb[1] = LIMIT(g+yBL);
rgb[2] = LIMIT(r+yBL);
rgb[3] = LIMIT(b+yBR); rgb[4] = LIMIT(g+yBR);
rgb[5] = LIMIT(r+yBR);
}
static inline void
yuv420p_to_rgb24(int width, int height, uchar* src, uchar* dst, bool isYUV)
{
cvtColor(Mat(height * 3 / 2, width, CV_8U, src), Mat(height, width, CV_8UC3, dst),
isYUV ? COLOR_YUV2BGR_IYUV : COLOR_YUV2BGR_YV12);
}
static void
yuv411p_to_rgb24(int width, int height,
unsigned char *pIn0, unsigned char *pOut0)
{
const int numpix = width * height;
const int bytes = 24 >> 3;
int i, j, y00, y01, y10, y11, u, v;
unsigned char *pY = pIn0;
unsigned char *pU = pY + numpix;
unsigned char *pV = pU + numpix / 4;
unsigned char *pOut = pOut0;
for (j = 0; j <= height; j++) {
for (i = 0; i <= width - 4; i += 4) {
y00 = *pY;
y01 = *(pY + 1);
y10 = *(pY + 2);
y11 = *(pY + 3);
u = (*pU++) - 128;
v = (*pV++) - 128;
move_411_block(y00, y01, y10, y11, u, v,
width, pOut);
pY += 4;
pOut += 4 * bytes;
}
}
}
static void
yuyv_to_rgb24(int width, int height, unsigned char* src, unsigned char* dst) {
cvtColor(Mat(height, width, CV_8UC2, src), Mat(height, width, CV_8UC3, dst),
COLOR_YUV2BGR_YUYV);
}
static inline void
uyvy_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
{
cvtColor(Mat(height, width, CV_8UC2, src), Mat(height, width, CV_8UC3, dst),
COLOR_YUV2BGR_UYVY);
}
static inline void
y16_to_rgb24 (int width, int height, unsigned char* src, unsigned char* dst)
{
Mat gray8;
Mat(height, width, CV_16UC1, src).convertTo(gray8, CV_8U, 0.00390625);
cvtColor(gray8,Mat(height, width, CV_8UC3, dst),COLOR_GRAY2BGR);
}
static inline void
y8_to_rgb24 (int width, int height, unsigned char* src, unsigned char* dst)
{
Mat gray8(height, width, CV_8UC1, src);
cvtColor(gray8,Mat(height, width, CV_8UC3, dst),COLOR_GRAY2BGR);
}
#ifdef HAVE_JPEG
static bool
mjpeg_to_rgb24(int width, int height, unsigned char* src, int length, IplImage* dst) {
Mat temp = cvarrToMat(dst);
imdecode(Mat(1, length, CV_8U, src), IMREAD_COLOR, &temp);
return temp.data && temp.cols == width && temp.rows == height;
}
#endif
static void bayer2rgb24(long int WIDTH, long int HEIGHT, unsigned char *src, unsigned char *dst)
{
long int i;
unsigned char *rawpt, *scanpt;
long int size;
rawpt = src;
scanpt = dst;
size = WIDTH*HEIGHT;
for ( i = 0; i < size; i++ ) {
if ( (i/WIDTH) % 2 == 0 ) {
if ( (i % 2) == 0 ) {
if ( (i > WIDTH) && ((i % WIDTH) > 0) ) {
*scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+
*(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4;
*scanpt++ = (*(rawpt-1)+*(rawpt+1)+
*(rawpt+WIDTH)+*(rawpt-WIDTH))/4;
*scanpt++ = *rawpt;
} else {
*scanpt++ = *(rawpt+WIDTH+1);
*scanpt++ = (*(rawpt+1)+*(rawpt+WIDTH))/2;
*scanpt++ = *rawpt;
}
} else {
if ( (i > WIDTH) && ((i % WIDTH) < (WIDTH-1)) ) {
*scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2;
*scanpt++ = *rawpt;
*scanpt++ = (*(rawpt-1)+*(rawpt+1))/2;
} else {
*scanpt++ = *(rawpt+WIDTH);
*scanpt++ = *rawpt;
*scanpt++ = *(rawpt-1);
}
}
} else {
if ( (i % 2) == 0 ) {
if ( (i < (WIDTH*(HEIGHT-1))) && ((i % WIDTH) > 0) ) {
*scanpt++ = (*(rawpt-1)+*(rawpt+1))/2;
*scanpt++ = *rawpt;
*scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2;
} else {
*scanpt++ = *(rawpt+1);
*scanpt++ = *rawpt;
*scanpt++ = *(rawpt-WIDTH);
}
} else {
if ( i < (WIDTH*(HEIGHT-1)) && ((i % WIDTH) < (WIDTH-1)) ) {
*scanpt++ = *rawpt;
*scanpt++ = (*(rawpt-1)+*(rawpt+1)+
*(rawpt-WIDTH)+*(rawpt+WIDTH))/4;
*scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+
*(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4;
} else {
*scanpt++ = *rawpt;
*scanpt++ = (*(rawpt-1)+*(rawpt-WIDTH))/2;
*scanpt++ = *(rawpt-WIDTH-1);
}
}
}
rawpt++;
}
}
static void sgbrg2rgb24(long int WIDTH, long int HEIGHT, unsigned char *src, unsigned char *dst)
{
long int i;
unsigned char *rawpt, *scanpt;
long int size;
rawpt = src;
scanpt = dst;
size = WIDTH*HEIGHT;
for ( i = 0; i < size; i++ )
{
if ( (i/WIDTH) % 2 == 0 )
{
if ( (i % 2) == 0 )
{
if ( (i > WIDTH) && ((i % WIDTH) > 0) )
{
*scanpt++ = (*(rawpt-1)+*(rawpt+1))/2;
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-WIDTH) + *(rawpt+WIDTH))/2;
} else
{
*scanpt++ = *(rawpt+1);
*scanpt++ = *(rawpt);
*scanpt++ = *(rawpt+WIDTH);
}
} else
{
if ( (i > WIDTH) && ((i % WIDTH) < (WIDTH-1)) )
{
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-1)+*(rawpt+1)+*(rawpt-WIDTH)+*(rawpt+WIDTH))/4;
*scanpt++ = (*(rawpt-WIDTH-1) + *(rawpt-WIDTH+1) + *(rawpt+WIDTH-1) + *(rawpt+WIDTH+1))/4;
} else
{
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-1)+*(rawpt+WIDTH))/2;
*scanpt++ = *(rawpt+WIDTH-1);
}
}
} else
{
if ( (i % 2) == 0 )
{
if ( (i < (WIDTH*(HEIGHT-1))) && ((i % WIDTH) > 0) )
{
*scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+*(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4;
*scanpt++ = (*(rawpt-1)+*(rawpt+1)+*(rawpt-WIDTH)+*(rawpt+WIDTH))/4;
*scanpt++ = *(rawpt);
} else
{
*scanpt++ = *(rawpt-WIDTH+1);
*scanpt++ = (*(rawpt+1)+*(rawpt-WIDTH))/2;
*scanpt++ = *(rawpt);
}
} else
{
if ( i < (WIDTH*(HEIGHT-1)) && ((i % WIDTH) < (WIDTH-1)) )
{
*scanpt++ = (*(rawpt-WIDTH)+*(rawpt+WIDTH))/2;
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-1)+*(rawpt+1))/2;
} else
{
*scanpt++ = (*(rawpt-WIDTH));
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-1));
}
}
}
rawpt++;
}
}
static inline void
rgb24_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
{
cvtColor(Mat(height, width, CV_8UC3, src), Mat(height, width, CV_8UC3, dst), COLOR_RGB2BGR);
}
#define CLAMP(x) ((x)<0?0:((x)>255)?255:(x))
typedef struct {
int is_abs;
int len;
int val;
} code_table_t;
static code_table_t table[256];
static int init_done = 0;
static void sonix_decompress_init(void)
{
int i;
int is_abs, val, len;
for (i = 0; i < 256; i++) {
is_abs = 0;
val = 0;
len = 0;
if ((i & 0x80) == 0) {
val = 0;
len = 1;
}
else if ((i & 0xE0) == 0x80) {
val = +4;
len = 3;
}
else if ((i & 0xE0) == 0xA0) {
val = -4;
len = 3;
}
else if ((i & 0xF0) == 0xD0) {
val = +11;
len = 4;
}
else if ((i & 0xF0) == 0xF0) {
val = -11;
len = 4;
}
else if ((i & 0xF8) == 0xC8) {
val = +20;
len = 5;
}
else if ((i & 0xFC) == 0xC0) {
val = -20;
len = 6;
}
else if ((i & 0xFC) == 0xC4) {
val = 0;
len = 8;
}
else if ((i & 0xF0) == 0xE0) {
is_abs = 1;
val = (i & 0x0F) << 4;
len = 8;
}
table[i].is_abs = is_abs;
table[i].val = val;
table[i].len = len;
}
init_done = 1;
}
static int sonix_decompress(int width, int height, unsigned char *inp, unsigned char *outp)
{
int row, col;
int val;
int bitpos;
unsigned char code;
unsigned char *addr;
if (!init_done) {
return -1;
}
bitpos = 0;
for (row = 0; row < height; row++) {
col = 0;
if (row < 2) {
addr = inp + (bitpos >> 3);
code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
bitpos += 8;
*outp++ = code;
addr = inp + (bitpos >> 3);
code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
bitpos += 8;
*outp++ = code;
col += 2;
}
while (col < width) {
addr = inp + (bitpos >> 3);
code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
bitpos += table[code].len;
val = table[code].val;
if (!table[code].is_abs) {
if (col < 2) {
val += outp[-2*width];
}
else if (row < 2) {
val += outp[-2];
}
else {
val += (outp[-2] + outp[-2*width]) / 2;
}
}
*outp++ = CLAMP(val);
col++;
}
}
return 0;
}
static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) {
bool recreate_frame = capture->frame_allocated != capture->convert_rgb;
if (!capture->convert_rgb) {
recreate_frame += capture->frame.imageSize != (int)capture->buffers[capture->bufferIndex].length;
}
if(recreate_frame) {
if(capture->frame_allocated)
cvFree(&capture->frame.imageData);
v4l2_create_frame(capture);
}
if(!capture->convert_rgb) {
capture->frame.imageData = (char*)capture->buffers[capture->bufferIndex].start;
return &capture->frame;
}
switch (capture->palette)
{
case V4L2_PIX_FMT_BGR24:
memcpy((char *)capture->frame.imageData,
(char *)capture->buffers[capture->bufferIndex].start,
capture->frame.imageSize);
break;
case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_YUV420:
yuv420p_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData,
capture->palette == V4L2_PIX_FMT_YUV420);
break;
case V4L2_PIX_FMT_YUV411P:
yuv411p_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
#ifdef HAVE_JPEG
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG:
if (!mjpeg_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex]
.start),
capture->buffers[capture->bufferIndex].length,
&capture->frame))
return 0;
break;
#endif
case V4L2_PIX_FMT_YUYV:
yuyv_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_UYVY:
uyvy_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_SBGGR8:
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_SN9C10X:
sonix_decompress_init();
sonix_decompress(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start);
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_SGBRG8:
sgbrg2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_RGB24:
rgb24_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case V4L2_PIX_FMT_Y16:
if(capture->convert_rgb){
y16_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->frame.imageData);
}else{
memcpy((char *)capture->frame.imageData,
(char *)capture->buffers[capture->bufferIndex].start,
capture->frame.imageSize);
}
break;
case V4L2_PIX_FMT_GREY:
if(capture->convert_rgb){
y8_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->frame.imageData);
}else{
memcpy((char *)capture->frame.imageData,
(char *)capture->buffers[capture->bufferIndex].start,
capture->frame.imageSize);
}
break;
}
if (capture->returnFrame)
return(&capture->frame);
else
return 0;
}
static inline __u32 capPropertyToV4L2(int prop) {
switch (prop) {
case CV_CAP_PROP_BRIGHTNESS:
return V4L2_CID_BRIGHTNESS;
case CV_CAP_PROP_CONTRAST:
return V4L2_CID_CONTRAST;
case CV_CAP_PROP_SATURATION:
return V4L2_CID_SATURATION;
case CV_CAP_PROP_HUE:
return V4L2_CID_HUE;
case CV_CAP_PROP_GAIN:
return V4L2_CID_GAIN;
case CV_CAP_PROP_AUTO_EXPOSURE:
return V4L2_CID_EXPOSURE_AUTO;
case CV_CAP_PROP_EXPOSURE:
return V4L2_CID_EXPOSURE_ABSOLUTE;
case CV_CAP_PROP_AUTOFOCUS:
return V4L2_CID_FOCUS_AUTO;
case CV_CAP_PROP_FOCUS:
return V4L2_CID_FOCUS_ABSOLUTE;
default:
return -1;
}
}
static double icvGetPropertyCAM_V4L (const CvCaptureCAM_V4L* capture,
int property_id ) {
{
v4l2_format form;
memset(&form, 0, sizeof(v4l2_format));
form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &form)) {
perror ("VIDIOC_G_FMT");
return -1;
}
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
return form.fmt.pix.width;
case CV_CAP_PROP_FRAME_HEIGHT:
return form.fmt.pix.height;
case CV_CAP_PROP_FOURCC:
case CV_CAP_PROP_MODE:
return capture->palette;
case CV_CAP_PROP_FORMAT:
return CV_MAKETYPE(IPL2CV_DEPTH(capture->frame.depth), capture->frame.nChannels);
case CV_CAP_PROP_CONVERT_RGB:
return capture->convert_rgb;
case CV_CAP_PROP_BUFFERSIZE:
return capture->bufferSize;
}
if(property_id == CV_CAP_PROP_FPS) {
v4l2_streamparm sp = v4l2_streamparm();
sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(capture->deviceHandle, VIDIOC_G_PARM, &sp) < 0){
fprintf(stderr, "VIDEOIO ERROR: V4L: Unable to get camera FPS\n");
return -1;
}
return sp.parm.capture.timeperframe.denominator / (double)sp.parm.capture.timeperframe.numerator;
}
if(property_id == CV_CAP_PROP_POS_MSEC) {
if (capture->FirstCapture) {
return 0;
} else {
return 1000 * capture->timestamp.tv_sec + ((double) capture->timestamp.tv_usec) / 1000;
}
}
__u32 v4l2id = capPropertyToV4L2(property_id);
if(v4l2id == __u32(-1)) {
fprintf(stderr,
"VIDEOIO ERROR: V4L2: getting property #%d is not supported\n",
property_id);
return -1;
}
v4l2_control control = {v4l2id, 0};
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_CTRL,
&control)) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: ");
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
fprintf (stderr, "Brightness");
break;
case CV_CAP_PROP_CONTRAST:
fprintf (stderr, "Contrast");
break;
case CV_CAP_PROP_SATURATION:
fprintf (stderr, "Saturation");
break;
case CV_CAP_PROP_HUE:
fprintf (stderr, "Hue");
break;
case CV_CAP_PROP_GAIN:
fprintf (stderr, "Gain");
break;
case CV_CAP_PROP_AUTO_EXPOSURE:
fprintf (stderr, "Auto Exposure");
break;
case CV_CAP_PROP_EXPOSURE:
fprintf (stderr, "Exposure");
break;
case CV_CAP_PROP_AUTOFOCUS:
fprintf (stderr, "Autofocus");
break;
case CV_CAP_PROP_FOCUS:
fprintf (stderr, "Focus");
break;
}
fprintf (stderr, " is not supported by your device\n");
return -1;
}
Range range = capture->getRange(property_id);
return ((double)control.value - range.start) / range.size();
}
};
static bool icvSetControl (CvCaptureCAM_V4L* capture,
int property_id, double value) {
if (value < 0.0) {
value = 0.0;
} else if (value > 1.0) {
value = 1.0;
}
__u32 v4l2id = capPropertyToV4L2(property_id);
if(v4l2id == __u32(-1)) {
fprintf(stderr,
"VIDEOIO ERROR: V4L2: setting property #%d is not supported\n",
property_id);
return false;
}
Range range = capture->getRange(property_id);
value = value * range.size() + range.start;
v4l2_control control = {v4l2id, int(value)};
if (-1 == ioctl(capture->deviceHandle, VIDIOC_S_CTRL, &control) && errno != ERANGE) {
perror ("VIDIOC_S_CTRL");
return false;
}
if(control.id == V4L2_CID_EXPOSURE_AUTO && control.value == V4L2_EXPOSURE_MANUAL) {
v4l2_control_range(capture, V4L2_CID_EXPOSURE_ABSOLUTE);
}
return true;
}
static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture,
int property_id, double value ){
bool retval = false;
bool possible;
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
{
int& width = capture->width_set;
int& height = capture->height_set;
width = cvRound(value);
retval = width != 0;
if(width !=0 && height != 0) {
capture->width = width;
capture->height = height;
retval = v4l2_reset(capture);
width = height = 0;
}
}
break;
case CV_CAP_PROP_FRAME_HEIGHT:
{
int& width = capture->width_set;
int& height = capture->height_set;
height = cvRound(value);
retval = height != 0;
if(width !=0 && height != 0) {
capture->width = width;
capture->height = height;
retval = v4l2_reset(capture);
width = height = 0;
}
}
break;
case CV_CAP_PROP_FPS:
capture->fps = value;
retval = v4l2_reset(capture);
break;
case CV_CAP_PROP_CONVERT_RGB:
possible = v4l2_num_channels(capture->palette);
capture->convert_rgb = bool(value) && possible;
retval = possible || !bool(value);
break;
case CV_CAP_PROP_FOURCC:
{
__u32 old_palette = capture->palette;
__u32 new_palette = static_cast<__u32>(value);
capture->palette = new_palette;
if (v4l2_reset(capture)) {
retval = true;
} else {
capture->palette = old_palette;
v4l2_reset(capture);
retval = false;
}
}
break;
case CV_CAP_PROP_BUFFERSIZE:
if ((int)value > MAX_V4L_BUFFERS || (int)value < 1) {
fprintf(stderr, "V4L: Bad buffer size %d, buffer size must be from 1 to %d\n", (int)value, MAX_V4L_BUFFERS);
retval = false;
} else {
capture->bufferSize = (int)value;
if (capture->bufferIndex > capture->bufferSize) {
capture->bufferIndex = 0;
}
retval = v4l2_reset(capture);
}
break;
default:
retval = icvSetControl(capture, property_id, value);
break;
}
return retval;
}
static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture ){
if (!capture->deviceName.empty())
{
if (capture->deviceHandle != -1)
{
capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(capture->deviceHandle, VIDIOC_STREAMOFF, &capture->type)) {
perror ("Unable to stop the stream");
}
for (unsigned int n_buffers = 0; n_buffers < MAX_V4L_BUFFERS; ++n_buffers)
{
if (capture->buffers[n_buffers].start) {
if (-1 == munmap (capture->buffers[n_buffers].start, capture->buffers[n_buffers].length)) {
perror ("munmap");
} else {
capture->buffers[n_buffers].start = 0;
}
}
}
if (capture->buffers[MAX_V4L_BUFFERS].start)
{
free(capture->buffers[MAX_V4L_BUFFERS].start);
capture->buffers[MAX_V4L_BUFFERS].start = 0;
}
}
if (capture->deviceHandle != -1)
close(capture->deviceHandle);
if (capture->frame_allocated && capture->frame.imageData)
cvFree(&capture->frame.imageData);
capture->deviceName.clear();
}
};
bool CvCaptureCAM_V4L::grabFrame()
{
return icvGrabFrameCAM_V4L( this );
}
IplImage* CvCaptureCAM_V4L::retrieveFrame(int)
{
return icvRetrieveFrameCAM_V4L( this, 0 );
}
double CvCaptureCAM_V4L::getProperty( int propId ) const
{
return icvGetPropertyCAM_V4L( this, propId );
}
bool CvCaptureCAM_V4L::setProperty( int propId, double value )
{
return icvSetPropertyCAM_V4L( this, propId, value );
}
}
CvCapture* cvCreateCameraCapture_V4L( int index )
{
cv::CvCaptureCAM_V4L* capture = new cv::CvCaptureCAM_V4L();
if(capture->open(index))
return capture;
delete capture;
return NULL;
}
CvCapture* cvCreateCameraCapture_V4L( const char * deviceName )
{
cv::CvCaptureCAM_V4L* capture = new cv::CvCaptureCAM_V4L();
if(capture->open( deviceName ))
return capture;
delete capture;
return NULL;
}
#endif