Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Core/HW/Camera.cpp
5654 views
1
// Copyright (c) 2020- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
#include "ppsspp_config.h"
18
#include "Camera.h"
19
#include "Core/Config.h"
20
21
#ifdef USE_FFMPEG
22
void convert_frame(int inw, int inh, unsigned char *inData, AVPixelFormat inFormat,
23
int outw, int outh, unsigned char **outData, int *outLen) {
24
struct SwsContext *sws_context = sws_getContext(
25
inw, inh, inFormat,
26
outw, outh, AV_PIX_FMT_RGB24,
27
SWS_BICUBIC, NULL, NULL, NULL);
28
29
// resize
30
uint8_t *src[4] = {0};
31
uint8_t *dst[4] = {0};
32
int srcStride[4], dstStride[4];
33
34
unsigned char *rgbData = (unsigned char*)malloc(outw * outh * 4);
35
36
av_image_fill_linesizes(srcStride, inFormat, inw);
37
av_image_fill_linesizes(dstStride, AV_PIX_FMT_RGB24, outw);
38
39
av_image_fill_pointers(src, inFormat, inh, inData, srcStride);
40
av_image_fill_pointers(dst, AV_PIX_FMT_RGB24, outh, rgbData, dstStride);
41
42
sws_scale(sws_context,
43
src, srcStride, 0, inh,
44
dst, dstStride);
45
46
// compress jpeg
47
*outLen = outw * outh * 2;
48
*outData = (unsigned char*)malloc(*outLen);
49
50
jpge::params params;
51
params.m_quality = 60;
52
params.m_subsampling = jpge::H2V2;
53
params.m_two_pass_flag = false;
54
jpge::compress_image_to_jpeg_file_in_memory(
55
*outData, *outLen, outw, outh, 3, rgbData, params);
56
free(rgbData);
57
}
58
#endif //USE_FFMPEG
59
60
61
void __cameraDummyImage(int width, int height, unsigned char** outData, int* outLen) {
62
#ifdef USE_FFMPEG
63
unsigned char *rgbData = (unsigned char *)malloc(3 * width * height);
64
if (!rgbData) {
65
*outData = nullptr;
66
return;
67
}
68
// Generate a solid RED surface for camera detection (e.g., Invizimals)
69
// RGB: (255, 0, 0) = Red
70
for (int y = 0; y < height; y++) {
71
for (int x = 0; x < width; x++) {
72
rgbData[3 * (y * width + x) + 0] = 255; // Red channel: full intensity
73
rgbData[3 * (y * width + x) + 1] = 0; // Green channel: off
74
rgbData[3 * (y * width + x) + 2] = 0; // Blue channel: off
75
}
76
}
77
78
*outLen = width * height * 2;
79
*outData = (unsigned char*)malloc(*outLen);
80
81
jpge::params params;
82
params.m_quality = 60;
83
params.m_subsampling = jpge::H2V2;
84
params.m_two_pass_flag = false;
85
jpge::compress_image_to_jpeg_file_in_memory(
86
*outData, *outLen, width, height, 3, rgbData, params);
87
free(rgbData);
88
#endif //USE_FFMPEG
89
}
90
91
92
#if defined(USING_QT_UI)
93
94
std::vector<std::string> __qt_getDeviceList() {
95
std::vector<std::string> deviceList;
96
const QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
97
for (const QCameraInfo &cameraInfo : cameras) {
98
deviceList.push_back(cameraInfo.deviceName().toStdString()
99
+ " (" + cameraInfo.description().toStdString() + ")");
100
}
101
return deviceList;
102
}
103
104
QList<QVideoFrame::PixelFormat> MyViewfinder::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const {
105
Q_UNUSED(handleType);
106
// Return the formats you will support
107
return QList<QVideoFrame::PixelFormat>()
108
<< QVideoFrame::Format_RGB24
109
<< QVideoFrame::Format_YUYV
110
;
111
}
112
113
bool MyViewfinder::present(const QVideoFrame &frame) {
114
#ifdef USE_FFMPEG
115
if (frame.isValid()) {
116
QVideoFrame cloneFrame(frame);
117
cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
118
119
unsigned char *jpegData = nullptr;
120
int jpegLen = 0;
121
122
QVideoFrame::PixelFormat frameFormat = cloneFrame.pixelFormat();
123
if (frameFormat == QVideoFrame::Format_RGB24) {
124
convert_frame(cloneFrame.size().width(), cloneFrame.size().height(),
125
(unsigned char*)cloneFrame.bits(), AV_PIX_FMT_RGB24,
126
qtc_ideal_width, qtc_ideal_height, &jpegData, &jpegLen);
127
128
} else if (frameFormat == QVideoFrame::Format_YUYV) {
129
convert_frame(cloneFrame.size().width(), cloneFrame.size().height(),
130
(unsigned char*)cloneFrame.bits(), AV_PIX_FMT_YUYV422,
131
qtc_ideal_width, qtc_ideal_height, &jpegData, &jpegLen);
132
}
133
134
if (jpegData) {
135
Camera::pushCameraImage(jpegLen, jpegData);
136
free(jpegData);
137
jpegData = nullptr;
138
}
139
140
cloneFrame.unmap();
141
return true;
142
}
143
#endif //USE_FFMPEG
144
return false;
145
}
146
147
int __qt_startCapture(int width, int height) {
148
if (qt_camera != nullptr) {
149
ERROR_LOG(Log::HLE, "camera already started");
150
return -1;
151
}
152
153
char selectedCamera[80];
154
sscanf(g_Config.sCameraDevice.c_str(), "%80s ", &selectedCamera[0]);
155
156
const QList<QCameraInfo> availableCameras = QCameraInfo::availableCameras();
157
if (availableCameras.size() < 1) {
158
delete qt_camera;
159
qt_camera = nullptr;
160
ERROR_LOG(Log::HLE, "no camera found");
161
return -1;
162
}
163
for (const QCameraInfo &cameraInfo : availableCameras) {
164
if (cameraInfo.deviceName() == selectedCamera) {
165
qt_camera = new QCamera(cameraInfo);
166
}
167
}
168
if (qt_camera == nullptr) {
169
qt_camera = new QCamera();
170
if (qt_camera == nullptr) {
171
ERROR_LOG(Log::HLE, "cannot open camera");
172
return -1;
173
}
174
}
175
176
qtc_ideal_width = width;
177
qtc_ideal_height = height;
178
179
qt_viewfinder = new MyViewfinder;
180
181
QCameraViewfinderSettings viewfinderSettings = qt_camera->viewfinderSettings();
182
viewfinderSettings.setResolution(640, 480);
183
viewfinderSettings.setMinimumFrameRate(15.0);
184
viewfinderSettings.setMaximumFrameRate(15.0);
185
186
qt_camera->setViewfinderSettings(viewfinderSettings);
187
qt_camera->setViewfinder(qt_viewfinder);
188
qt_camera->start();
189
190
return 0;
191
}
192
193
int __qt_stopCapture() {
194
if (qt_camera != nullptr) {
195
qt_camera->stop();
196
qt_camera->unload();
197
delete qt_camera;
198
delete qt_viewfinder;
199
qt_camera = nullptr;
200
}
201
return 0;
202
}
203
204
//endif defined(USING_QT_UI)
205
#elif PPSSPP_PLATFORM(LINUX) && !PPSSPP_PLATFORM(ANDROID)
206
207
std::vector<std::string> __v4l_getDeviceList() {
208
std::vector<std::string> deviceList;
209
#ifdef USE_FFMPEG
210
for (int i = 0; i < 64; i++) {
211
char path[256];
212
snprintf(path, sizeof(path), "/dev/video%d", i);
213
if (access(path, F_OK) < 0) {
214
break;
215
}
216
int fd = -1;
217
if((fd = open(path, O_RDONLY)) < 0) {
218
ERROR_LOG(Log::HLE, "Cannot open '%s'; errno=%d(%s)", path, errno, strerror(errno));
219
continue;
220
}
221
struct v4l2_capability video_cap;
222
if(ioctl(fd, VIDIOC_QUERYCAP, &video_cap) < 0) {
223
ERROR_LOG(Log::HLE, "VIDIOC_QUERYCAP");
224
goto cont;
225
} else {
226
char device[256];
227
snprintf(device, sizeof(device), "%d:%s", i, video_cap.card);
228
deviceList.push_back(device);
229
}
230
cont:
231
close(fd);
232
fd = -1;
233
}
234
#endif //USE_FFMPEG
235
return deviceList;
236
}
237
238
void *v4l_loop(void *data) {
239
#ifdef USE_FFMPEG
240
SetCurrentThreadName("v4l_loop");
241
while (v4l_fd >= 0) {
242
struct v4l2_buffer buf;
243
memset(&buf, 0, sizeof(buf));
244
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
245
buf.memory = V4L2_MEMORY_MMAP;
246
247
if (ioctl(v4l_fd, VIDIOC_DQBUF, &buf) == -1) {
248
ERROR_LOG(Log::HLE, "VIDIOC_DQBUF; errno=%d(%s)", errno, strerror(errno));
249
switch (errno) {
250
case EAGAIN:
251
continue;
252
default:
253
return nullptr;
254
}
255
}
256
257
unsigned char *jpegData = nullptr;
258
int jpegLen = 0;
259
260
if (v4l_format == V4L2_PIX_FMT_YUYV) {
261
convert_frame(v4l_hw_width, v4l_hw_height, (unsigned char*)v4l_buffers[buf.index].start, AV_PIX_FMT_YUYV422,
262
v4l_ideal_width, v4l_ideal_height, &jpegData, &jpegLen);
263
} else if (v4l_format == V4L2_PIX_FMT_JPEG
264
|| v4l_format == V4L2_PIX_FMT_MJPEG) {
265
// decompress jpeg
266
int width, height, req_comps;
267
unsigned char *rgbData = jpgd::decompress_jpeg_image_from_memory(
268
(unsigned char*)v4l_buffers[buf.index].start, buf.bytesused, &width, &height, &req_comps, 3);
269
270
convert_frame(v4l_hw_width, v4l_hw_height, (unsigned char*)rgbData, AV_PIX_FMT_RGB24,
271
v4l_ideal_width, v4l_ideal_height, &jpegData, &jpegLen);
272
free(rgbData);
273
}
274
275
if (jpegData) {
276
Camera::pushCameraImage(jpegLen, jpegData);
277
free(jpegData);
278
jpegData = nullptr;
279
}
280
281
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
282
buf.memory = V4L2_MEMORY_MMAP;
283
if (ioctl(v4l_fd, VIDIOC_QBUF, &buf) == -1) {
284
ERROR_LOG(Log::HLE, "VIDIOC_QBUF");
285
return nullptr;
286
}
287
}
288
#endif //USE_FFMPEG
289
return nullptr;
290
}
291
292
int __v4l_startCapture(int ideal_width, int ideal_height) {
293
#ifdef USE_FFMPEG
294
if (v4l_fd >= 0) {
295
__v4l_stopCapture();
296
}
297
v4l_ideal_width = ideal_width;
298
v4l_ideal_height = ideal_height;
299
300
int dev_index = 0;
301
char dev_name[64];
302
sscanf(g_Config.sCameraDevice.c_str(), "%d:", &dev_index);
303
snprintf(dev_name, sizeof(dev_name), "/dev/video%d", dev_index);
304
305
if ((v4l_fd = open(dev_name, O_RDWR)) == -1) {
306
ERROR_LOG(Log::HLE, "Cannot open '%s'; errno=%d(%s)", dev_name, errno, strerror(errno));
307
return -1;
308
}
309
310
struct v4l2_capability cap;
311
memset(&cap, 0, sizeof(cap));
312
if (ioctl(v4l_fd, VIDIOC_QUERYCAP, &cap) == -1) {
313
ERROR_LOG(Log::HLE, "VIDIOC_QUERYCAP");
314
return -1;
315
}
316
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
317
ERROR_LOG(Log::HLE, "V4L2_CAP_VIDEO_CAPTURE");
318
return -1;
319
}
320
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
321
ERROR_LOG(Log::HLE, "V4L2_CAP_STREAMING");
322
return -1;
323
}
324
325
struct v4l2_format fmt;
326
memset(&fmt, 0, sizeof(fmt));
327
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
328
fmt.fmt.pix.pixelformat = 0;
329
330
// select a pixel format
331
struct v4l2_fmtdesc desc;
332
memset(&desc, 0, sizeof(desc));
333
desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
334
while (ioctl(v4l_fd, VIDIOC_ENUM_FMT, &desc) == 0) {
335
desc.index++;
336
INFO_LOG(Log::HLE, "V4L2: pixel format supported: %s", desc.description);
337
if (fmt.fmt.pix.pixelformat != 0) {
338
continue;
339
} else if (desc.pixelformat == V4L2_PIX_FMT_YUYV
340
|| desc.pixelformat == V4L2_PIX_FMT_JPEG
341
|| desc.pixelformat == V4L2_PIX_FMT_MJPEG) {
342
INFO_LOG(Log::HLE, "V4L2: %s selected", desc.description);
343
fmt.fmt.pix.pixelformat = desc.pixelformat;
344
v4l_format = desc.pixelformat;
345
}
346
}
347
if (fmt.fmt.pix.pixelformat == 0) {
348
ERROR_LOG(Log::HLE, "V4L2: No supported format found");
349
return -1;
350
}
351
352
// select a frame size
353
fmt.fmt.pix.width = 0;
354
fmt.fmt.pix.height = 0;
355
struct v4l2_frmsizeenum frmsize;
356
memset(&frmsize, 0, sizeof(frmsize));
357
frmsize.pixel_format = fmt.fmt.pix.pixelformat;
358
while (ioctl(v4l_fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) {
359
frmsize.index++;
360
if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
361
INFO_LOG(Log::HLE, "V4L2: frame size supported: %dx%d", frmsize.discrete.width, frmsize.discrete.height);
362
bool matchesIdeal = frmsize.discrete.width >= ideal_width && frmsize.discrete.height >= ideal_height;
363
bool zeroPix = fmt.fmt.pix.width == 0 && fmt.fmt.pix.height == 0;
364
bool pixLarger = frmsize.discrete.width < fmt.fmt.pix.width && frmsize.discrete.height < fmt.fmt.pix.height;
365
if (matchesIdeal && (zeroPix || pixLarger)) {
366
fmt.fmt.pix.width = frmsize.discrete.width;
367
fmt.fmt.pix.height = frmsize.discrete.height;
368
}
369
}
370
}
371
372
if (fmt.fmt.pix.width == 0 && fmt.fmt.pix.height == 0) {
373
fmt.fmt.pix.width = ideal_width;
374
fmt.fmt.pix.height = ideal_height;
375
}
376
INFO_LOG(Log::HLE, "V4L2: asking for %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
377
if (ioctl(v4l_fd, VIDIOC_S_FMT, &fmt) == -1) {
378
ERROR_LOG(Log::HLE, "VIDIOC_S_FMT");
379
return -1;
380
}
381
v4l_hw_width = fmt.fmt.pix.width;
382
v4l_hw_height = fmt.fmt.pix.height;
383
INFO_LOG(Log::HLE, "V4L2: will receive %dx%d", v4l_hw_width, v4l_hw_height);
384
v4l_height_fixed_aspect = v4l_hw_width * ideal_height / ideal_width;
385
INFO_LOG(Log::HLE, "V4L2: will use %dx%d", v4l_hw_width, v4l_height_fixed_aspect);
386
387
struct v4l2_requestbuffers req;
388
memset(&req, 0, sizeof(req));
389
req.count = 1;
390
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
391
req.memory = V4L2_MEMORY_MMAP;
392
if (ioctl(v4l_fd, VIDIOC_REQBUFS, &req) == -1) {
393
ERROR_LOG(Log::HLE, "VIDIOC_REQBUFS");
394
return -1;
395
}
396
v4l_buffer_count = req.count;
397
INFO_LOG(Log::HLE, "V4L2: buffer count: %d", v4l_buffer_count);
398
v4l_buffers = (v4l_buf_t*) calloc(v4l_buffer_count, sizeof(v4l_buf_t));
399
400
for (int buf_id = 0; buf_id < v4l_buffer_count; buf_id++) {
401
struct v4l2_buffer buf;
402
memset(&buf, 0, sizeof(buf));
403
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
404
buf.memory = V4L2_MEMORY_MMAP;
405
buf.index = buf_id;
406
if (ioctl(v4l_fd, VIDIOC_QUERYBUF, &buf) == -1) {
407
ERROR_LOG(Log::HLE, "VIDIOC_QUERYBUF");
408
return -1;
409
}
410
411
v4l_buffers[buf_id].length = buf.length;
412
v4l_buffers[buf_id].start = mmap(NULL,
413
buf.length,
414
PROT_READ | PROT_WRITE,
415
MAP_SHARED,
416
v4l_fd, buf.m.offset);
417
if (v4l_buffers[buf_id].start == MAP_FAILED) {
418
ERROR_LOG(Log::HLE, "MAP_FAILED");
419
return -1;
420
}
421
422
memset(&buf, 0, sizeof(buf));
423
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
424
buf.memory = V4L2_MEMORY_MMAP;
425
buf.index = buf_id;
426
if (ioctl(v4l_fd, VIDIOC_QBUF, &buf) == -1) {
427
ERROR_LOG(Log::HLE, "VIDIOC_QBUF");
428
return -1;
429
}
430
}
431
432
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
433
if (ioctl(v4l_fd, VIDIOC_STREAMON, &type) == -1) {
434
ERROR_LOG(Log::HLE, "VIDIOC_STREAMON");
435
return -1;
436
}
437
438
pthread_create(&v4l_thread, NULL, v4l_loop, NULL);
439
#endif //USE_FFMPEG
440
return 0;
441
}
442
443
int __v4l_stopCapture() {
444
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
445
446
if (v4l_fd < 0) {
447
goto exit;
448
}
449
450
if (ioctl(v4l_fd, VIDIOC_STREAMOFF, &type) == -1) {
451
ERROR_LOG(Log::HLE, "VIDIOC_STREAMOFF");
452
goto exit;
453
}
454
455
for (int buf_id = 0; buf_id < v4l_buffer_count; buf_id++) {
456
if (munmap(v4l_buffers[buf_id].start, v4l_buffers[buf_id].length) == -1) {
457
ERROR_LOG(Log::HLE, "munmap");
458
goto exit;
459
}
460
}
461
462
if (close(v4l_fd) == -1) {
463
ERROR_LOG(Log::HLE, "close");
464
goto exit;
465
}
466
467
v4l_fd = -1;
468
//pthread_join(v4l_thread, NULL);
469
470
exit:
471
v4l_fd = -1;
472
return 0;
473
}
474
475
#endif // PPSSPP_PLATFORM(LINUX) && !PPSSPP_PLATFORM(ANDROID)
476
477