CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
hrydgard

CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!

GitHub Repository: hrydgard/ppsspp
Path: blob/master/Core/HW/Camera.cpp
Views: 1401
1
// Copyright (c) 2020- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
#include "ppsspp_config.h"
18
#include "Camera.h"
19
#include "Core/Config.h"
20
21
#ifdef USE_FFMPEG
22
void convert_frame(int inw, int inh, unsigned char *inData, AVPixelFormat inFormat,
23
int outw, int outh, unsigned char **outData, int *outLen) {
24
struct SwsContext *sws_context = sws_getContext(
25
inw, inh, inFormat,
26
outw, outh, AV_PIX_FMT_RGB24,
27
SWS_BICUBIC, NULL, NULL, NULL);
28
29
// resize
30
uint8_t *src[4] = {0};
31
uint8_t *dst[4] = {0};
32
int srcStride[4], dstStride[4];
33
34
unsigned char *rgbData = (unsigned char*)malloc(outw * outh * 4);
35
36
av_image_fill_linesizes(srcStride, inFormat, inw);
37
av_image_fill_linesizes(dstStride, AV_PIX_FMT_RGB24, outw);
38
39
av_image_fill_pointers(src, inFormat, inh, inData, srcStride);
40
av_image_fill_pointers(dst, AV_PIX_FMT_RGB24, outh, rgbData, dstStride);
41
42
sws_scale(sws_context,
43
src, srcStride, 0, inh,
44
dst, dstStride);
45
46
// compress jpeg
47
*outLen = outw * outh * 2;
48
*outData = (unsigned char*)malloc(*outLen);
49
50
jpge::params params;
51
params.m_quality = 60;
52
params.m_subsampling = jpge::H2V2;
53
params.m_two_pass_flag = false;
54
jpge::compress_image_to_jpeg_file_in_memory(
55
*outData, *outLen, outw, outh, 3, rgbData, params);
56
free(rgbData);
57
}
58
#endif //USE_FFMPEG
59
60
61
void __cameraDummyImage(int width, int height, unsigned char** outData, int* outLen) {
62
#ifdef USE_FFMPEG
63
unsigned char *rgbData = (unsigned char *)malloc(3 * width * height);
64
if (!rgbData) {
65
*outData = nullptr;
66
return;
67
}
68
for (int y = 0; y < height; y++) {
69
for (int x = 0; x < width; x++) {
70
rgbData[3 * (y * width + x) + 0] = x*255/width;
71
rgbData[3 * (y * width + x) + 1] = x*255/width;
72
rgbData[3 * (y * width + x) + 2] = y*255/height;
73
}
74
}
75
76
*outLen = width * height * 2;
77
*outData = (unsigned char*)malloc(*outLen);
78
79
jpge::params params;
80
params.m_quality = 60;
81
params.m_subsampling = jpge::H2V2;
82
params.m_two_pass_flag = false;
83
jpge::compress_image_to_jpeg_file_in_memory(
84
*outData, *outLen, width, height, 3, rgbData, params);
85
free(rgbData);
86
#endif //USE_FFMPEG
87
}
88
89
90
#if defined(USING_QT_UI)
91
92
std::vector<std::string> __qt_getDeviceList() {
93
std::vector<std::string> deviceList;
94
const QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
95
for (const QCameraInfo &cameraInfo : cameras) {
96
deviceList.push_back(cameraInfo.deviceName().toStdString()
97
+ " (" + cameraInfo.description().toStdString() + ")");
98
}
99
return deviceList;
100
}
101
102
QList<QVideoFrame::PixelFormat> MyViewfinder::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const {
103
Q_UNUSED(handleType);
104
// Return the formats you will support
105
return QList<QVideoFrame::PixelFormat>()
106
<< QVideoFrame::Format_RGB24
107
<< QVideoFrame::Format_YUYV
108
;
109
}
110
111
bool MyViewfinder::present(const QVideoFrame &frame) {
112
#ifdef USE_FFMPEG
113
if (frame.isValid()) {
114
QVideoFrame cloneFrame(frame);
115
cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
116
117
unsigned char *jpegData = nullptr;
118
int jpegLen = 0;
119
120
QVideoFrame::PixelFormat frameFormat = cloneFrame.pixelFormat();
121
if (frameFormat == QVideoFrame::Format_RGB24) {
122
convert_frame(cloneFrame.size().width(), cloneFrame.size().height(),
123
(unsigned char*)cloneFrame.bits(), AV_PIX_FMT_RGB24,
124
qtc_ideal_width, qtc_ideal_height, &jpegData, &jpegLen);
125
126
} else if (frameFormat == QVideoFrame::Format_YUYV) {
127
convert_frame(cloneFrame.size().width(), cloneFrame.size().height(),
128
(unsigned char*)cloneFrame.bits(), AV_PIX_FMT_YUYV422,
129
qtc_ideal_width, qtc_ideal_height, &jpegData, &jpegLen);
130
}
131
132
if (jpegData) {
133
Camera::pushCameraImage(jpegLen, jpegData);
134
free(jpegData);
135
jpegData = nullptr;
136
}
137
138
cloneFrame.unmap();
139
return true;
140
}
141
#endif //USE_FFMPEG
142
return false;
143
}
144
145
int __qt_startCapture(int width, int height) {
146
if (qt_camera != nullptr) {
147
ERROR_LOG(Log::HLE, "camera already started");
148
return -1;
149
}
150
151
char selectedCamera[80];
152
sscanf(g_Config.sCameraDevice.c_str(), "%80s ", &selectedCamera[0]);
153
154
const QList<QCameraInfo> availableCameras = QCameraInfo::availableCameras();
155
if (availableCameras.size() < 1) {
156
delete qt_camera;
157
qt_camera = nullptr;
158
ERROR_LOG(Log::HLE, "no camera found");
159
return -1;
160
}
161
for (const QCameraInfo &cameraInfo : availableCameras) {
162
if (cameraInfo.deviceName() == selectedCamera) {
163
qt_camera = new QCamera(cameraInfo);
164
}
165
}
166
if (qt_camera == nullptr) {
167
qt_camera = new QCamera();
168
if (qt_camera == nullptr) {
169
ERROR_LOG(Log::HLE, "cannot open camera");
170
return -1;
171
}
172
}
173
174
qtc_ideal_width = width;
175
qtc_ideal_height = height;
176
177
qt_viewfinder = new MyViewfinder;
178
179
QCameraViewfinderSettings viewfinderSettings = qt_camera->viewfinderSettings();
180
viewfinderSettings.setResolution(640, 480);
181
viewfinderSettings.setMinimumFrameRate(15.0);
182
viewfinderSettings.setMaximumFrameRate(15.0);
183
184
qt_camera->setViewfinderSettings(viewfinderSettings);
185
qt_camera->setViewfinder(qt_viewfinder);
186
qt_camera->start();
187
188
return 0;
189
}
190
191
int __qt_stopCapture() {
192
if (qt_camera != nullptr) {
193
qt_camera->stop();
194
qt_camera->unload();
195
delete qt_camera;
196
delete qt_viewfinder;
197
qt_camera = nullptr;
198
}
199
return 0;
200
}
201
202
//endif defined(USING_QT_UI)
203
#elif PPSSPP_PLATFORM(LINUX) && !PPSSPP_PLATFORM(ANDROID)
204
205
std::vector<std::string> __v4l_getDeviceList() {
206
std::vector<std::string> deviceList;
207
#ifdef USE_FFMPEG
208
for (int i = 0; i < 64; i++) {
209
char path[256];
210
snprintf(path, sizeof(path), "/dev/video%d", i);
211
if (access(path, F_OK) < 0) {
212
break;
213
}
214
int fd = -1;
215
if((fd = open(path, O_RDONLY)) < 0) {
216
ERROR_LOG(Log::HLE, "Cannot open '%s'; errno=%d(%s)", path, errno, strerror(errno));
217
continue;
218
}
219
struct v4l2_capability video_cap;
220
if(ioctl(fd, VIDIOC_QUERYCAP, &video_cap) < 0) {
221
ERROR_LOG(Log::HLE, "VIDIOC_QUERYCAP");
222
goto cont;
223
} else {
224
char device[256];
225
snprintf(device, sizeof(device), "%d:%s", i, video_cap.card);
226
deviceList.push_back(device);
227
}
228
cont:
229
close(fd);
230
fd = -1;
231
}
232
#endif //USE_FFMPEG
233
return deviceList;
234
}
235
236
void *v4l_loop(void *data) {
237
#ifdef USE_FFMPEG
238
SetCurrentThreadName("v4l_loop");
239
while (v4l_fd >= 0) {
240
struct v4l2_buffer buf;
241
memset(&buf, 0, sizeof(buf));
242
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
243
buf.memory = V4L2_MEMORY_MMAP;
244
245
if (ioctl(v4l_fd, VIDIOC_DQBUF, &buf) == -1) {
246
ERROR_LOG(Log::HLE, "VIDIOC_DQBUF; errno=%d(%s)", errno, strerror(errno));
247
switch (errno) {
248
case EAGAIN:
249
continue;
250
default:
251
return nullptr;
252
}
253
}
254
255
unsigned char *jpegData = nullptr;
256
int jpegLen = 0;
257
258
if (v4l_format == V4L2_PIX_FMT_YUYV) {
259
convert_frame(v4l_hw_width, v4l_hw_height, (unsigned char*)v4l_buffers[buf.index].start, AV_PIX_FMT_YUYV422,
260
v4l_ideal_width, v4l_ideal_height, &jpegData, &jpegLen);
261
} else if (v4l_format == V4L2_PIX_FMT_JPEG
262
|| v4l_format == V4L2_PIX_FMT_MJPEG) {
263
// decompress jpeg
264
int width, height, req_comps;
265
unsigned char *rgbData = jpgd::decompress_jpeg_image_from_memory(
266
(unsigned char*)v4l_buffers[buf.index].start, buf.bytesused, &width, &height, &req_comps, 3);
267
268
convert_frame(v4l_hw_width, v4l_hw_height, (unsigned char*)rgbData, AV_PIX_FMT_RGB24,
269
v4l_ideal_width, v4l_ideal_height, &jpegData, &jpegLen);
270
free(rgbData);
271
}
272
273
if (jpegData) {
274
Camera::pushCameraImage(jpegLen, jpegData);
275
free(jpegData);
276
jpegData = nullptr;
277
}
278
279
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
280
buf.memory = V4L2_MEMORY_MMAP;
281
if (ioctl(v4l_fd, VIDIOC_QBUF, &buf) == -1) {
282
ERROR_LOG(Log::HLE, "VIDIOC_QBUF");
283
return nullptr;
284
}
285
}
286
return nullptr;
287
#endif //USE_FFMPEG
288
}
289
290
int __v4l_startCapture(int ideal_width, int ideal_height) {
291
#ifdef USE_FFMPEG
292
if (v4l_fd >= 0) {
293
__v4l_stopCapture();
294
}
295
v4l_ideal_width = ideal_width;
296
v4l_ideal_height = ideal_height;
297
298
int dev_index = 0;
299
char dev_name[64];
300
sscanf(g_Config.sCameraDevice.c_str(), "%d:", &dev_index);
301
snprintf(dev_name, sizeof(dev_name), "/dev/video%d", dev_index);
302
303
if ((v4l_fd = open(dev_name, O_RDWR)) == -1) {
304
ERROR_LOG(Log::HLE, "Cannot open '%s'; errno=%d(%s)", dev_name, errno, strerror(errno));
305
return -1;
306
}
307
308
struct v4l2_capability cap;
309
memset(&cap, 0, sizeof(cap));
310
if (ioctl(v4l_fd, VIDIOC_QUERYCAP, &cap) == -1) {
311
ERROR_LOG(Log::HLE, "VIDIOC_QUERYCAP");
312
return -1;
313
}
314
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
315
ERROR_LOG(Log::HLE, "V4L2_CAP_VIDEO_CAPTURE");
316
return -1;
317
}
318
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
319
ERROR_LOG(Log::HLE, "V4L2_CAP_STREAMING");
320
return -1;
321
}
322
323
struct v4l2_format fmt;
324
memset(&fmt, 0, sizeof(fmt));
325
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
326
fmt.fmt.pix.pixelformat = 0;
327
328
// select a pixel format
329
struct v4l2_fmtdesc desc;
330
memset(&desc, 0, sizeof(desc));
331
desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
332
while (ioctl(v4l_fd, VIDIOC_ENUM_FMT, &desc) == 0) {
333
desc.index++;
334
INFO_LOG(Log::HLE, "V4L2: pixel format supported: %s", desc.description);
335
if (fmt.fmt.pix.pixelformat != 0) {
336
continue;
337
} else if (desc.pixelformat == V4L2_PIX_FMT_YUYV
338
|| desc.pixelformat == V4L2_PIX_FMT_JPEG
339
|| desc.pixelformat == V4L2_PIX_FMT_MJPEG) {
340
INFO_LOG(Log::HLE, "V4L2: %s selected", desc.description);
341
fmt.fmt.pix.pixelformat = desc.pixelformat;
342
v4l_format = desc.pixelformat;
343
}
344
}
345
if (fmt.fmt.pix.pixelformat == 0) {
346
ERROR_LOG(Log::HLE, "V4L2: No supported format found");
347
return -1;
348
}
349
350
// select a frame size
351
fmt.fmt.pix.width = 0;
352
fmt.fmt.pix.height = 0;
353
struct v4l2_frmsizeenum frmsize;
354
memset(&frmsize, 0, sizeof(frmsize));
355
frmsize.pixel_format = fmt.fmt.pix.pixelformat;
356
while (ioctl(v4l_fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) {
357
frmsize.index++;
358
if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
359
INFO_LOG(Log::HLE, "V4L2: frame size supported: %dx%d", frmsize.discrete.width, frmsize.discrete.height);
360
bool matchesIdeal = frmsize.discrete.width >= ideal_width && frmsize.discrete.height >= ideal_height;
361
bool zeroPix = fmt.fmt.pix.width == 0 && fmt.fmt.pix.height == 0;
362
bool pixLarger = frmsize.discrete.width < fmt.fmt.pix.width && frmsize.discrete.height < fmt.fmt.pix.height;
363
if (matchesIdeal && (zeroPix || pixLarger)) {
364
fmt.fmt.pix.width = frmsize.discrete.width;
365
fmt.fmt.pix.height = frmsize.discrete.height;
366
}
367
}
368
}
369
370
if (fmt.fmt.pix.width == 0 && fmt.fmt.pix.height == 0) {
371
fmt.fmt.pix.width = ideal_width;
372
fmt.fmt.pix.height = ideal_height;
373
}
374
INFO_LOG(Log::HLE, "V4L2: asking for %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
375
if (ioctl(v4l_fd, VIDIOC_S_FMT, &fmt) == -1) {
376
ERROR_LOG(Log::HLE, "VIDIOC_S_FMT");
377
return -1;
378
}
379
v4l_hw_width = fmt.fmt.pix.width;
380
v4l_hw_height = fmt.fmt.pix.height;
381
INFO_LOG(Log::HLE, "V4L2: will receive %dx%d", v4l_hw_width, v4l_hw_height);
382
v4l_height_fixed_aspect = v4l_hw_width * ideal_height / ideal_width;
383
INFO_LOG(Log::HLE, "V4L2: will use %dx%d", v4l_hw_width, v4l_height_fixed_aspect);
384
385
struct v4l2_requestbuffers req;
386
memset(&req, 0, sizeof(req));
387
req.count = 1;
388
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
389
req.memory = V4L2_MEMORY_MMAP;
390
if (ioctl(v4l_fd, VIDIOC_REQBUFS, &req) == -1) {
391
ERROR_LOG(Log::HLE, "VIDIOC_REQBUFS");
392
return -1;
393
}
394
v4l_buffer_count = req.count;
395
INFO_LOG(Log::HLE, "V4L2: buffer count: %d", v4l_buffer_count);
396
v4l_buffers = (v4l_buf_t*) calloc(v4l_buffer_count, sizeof(v4l_buf_t));
397
398
for (int buf_id = 0; buf_id < v4l_buffer_count; buf_id++) {
399
struct v4l2_buffer buf;
400
memset(&buf, 0, sizeof(buf));
401
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
402
buf.memory = V4L2_MEMORY_MMAP;
403
buf.index = buf_id;
404
if (ioctl(v4l_fd, VIDIOC_QUERYBUF, &buf) == -1) {
405
ERROR_LOG(Log::HLE, "VIDIOC_QUERYBUF");
406
return -1;
407
}
408
409
v4l_buffers[buf_id].length = buf.length;
410
v4l_buffers[buf_id].start = mmap(NULL,
411
buf.length,
412
PROT_READ | PROT_WRITE,
413
MAP_SHARED,
414
v4l_fd, buf.m.offset);
415
if (v4l_buffers[buf_id].start == MAP_FAILED) {
416
ERROR_LOG(Log::HLE, "MAP_FAILED");
417
return -1;
418
}
419
420
memset(&buf, 0, sizeof(buf));
421
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
422
buf.memory = V4L2_MEMORY_MMAP;
423
buf.index = buf_id;
424
if (ioctl(v4l_fd, VIDIOC_QBUF, &buf) == -1) {
425
ERROR_LOG(Log::HLE, "VIDIOC_QBUF");
426
return -1;
427
}
428
}
429
430
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
431
if (ioctl(v4l_fd, VIDIOC_STREAMON, &type) == -1) {
432
ERROR_LOG(Log::HLE, "VIDIOC_STREAMON");
433
return -1;
434
}
435
436
pthread_create(&v4l_thread, NULL, v4l_loop, NULL);
437
#endif //USE_FFMPEG
438
return 0;
439
}
440
441
int __v4l_stopCapture() {
442
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
443
444
if (v4l_fd < 0) {
445
goto exit;
446
}
447
448
if (ioctl(v4l_fd, VIDIOC_STREAMOFF, &type) == -1) {
449
ERROR_LOG(Log::HLE, "VIDIOC_STREAMOFF");
450
goto exit;
451
}
452
453
for (int buf_id = 0; buf_id < v4l_buffer_count; buf_id++) {
454
if (munmap(v4l_buffers[buf_id].start, v4l_buffers[buf_id].length) == -1) {
455
ERROR_LOG(Log::HLE, "munmap");
456
goto exit;
457
}
458
}
459
460
if (close(v4l_fd) == -1) {
461
ERROR_LOG(Log::HLE, "close");
462
goto exit;
463
}
464
465
v4l_fd = -1;
466
//pthread_join(v4l_thread, NULL);
467
468
exit:
469
v4l_fd = -1;
470
return 0;
471
}
472
473
#endif // PPSSPP_PLATFORM(LINUX) && !PPSSPP_PLATFORM(ANDROID)
474
475