CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
hrydgard

CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!

GitHub Repository: hrydgard/ppsspp
Path: blob/master/Windows/CaptureDevice.cpp
Views: 1401
1
// Copyright (c) 2020- PPSSPP Project.
2
3
// This program is free software: you can redistribute it and/or modify
4
// it under the terms of the GNU General Public License as published by
5
// the Free Software Foundation, version 2.0 or later versions.
6
7
// This program is distributed in the hope that it will be useful,
8
// but WITHOUT ANY WARRANTY; without even the implied warranty of
9
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10
// GNU General Public License 2.0 for more details.
11
12
// A copy of the GPL 2.0 should have been included with the program.
13
// If not, see http://www.gnu.org/licenses/
14
15
// Official git repository and contact information can be found at
16
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18
#include <shlwapi.h>
19
20
#include "Common/Thread/ThreadUtil.h"
21
#include "CaptureDevice.h"
22
#include "BufferLock.h"
23
#include "ext/jpge/jpge.h"
24
#include "CommonTypes.h"
25
#include "Core/HLE/sceUsbCam.h"
26
#include "Core/Config.h"
27
28
namespace MFAPI {
29
HINSTANCE Mflib;
30
HINSTANCE Mfplatlib;
31
HINSTANCE Mfreadwritelib;
32
33
typedef HRESULT(WINAPI *MFEnumDeviceSourcesFunc)(IMFAttributes *, IMFActivate ***, UINT32 *);
34
typedef HRESULT(WINAPI *MFGetStrideForBitmapInfoHeaderFunc)(DWORD, DWORD, LONG *);
35
typedef HRESULT(WINAPI *MFCreateSourceReaderFromMediaSourceFunc)(IMFMediaSource *, IMFAttributes *, IMFSourceReader **);
36
typedef HRESULT(WINAPI *MFCopyImageFunc)(BYTE *, LONG, const BYTE *, LONG, DWORD, DWORD);
37
38
MFEnumDeviceSourcesFunc EnumDeviceSources;
39
MFGetStrideForBitmapInfoHeaderFunc GetStrideForBitmapInfoHeader;
40
MFCreateSourceReaderFromMediaSourceFunc CreateSourceReaderFromMediaSource;
41
MFCopyImageFunc CopyImage;
42
}
43
44
using namespace MFAPI;
45
46
bool RegisterCMPTMFApis(){
47
//For the compatibility,these funcs don't be supported on vista.
48
Mflib = LoadLibrary(L"Mf.dll");
49
Mfplatlib = LoadLibrary(L"Mfplat.dll");
50
Mfreadwritelib = LoadLibrary(L"Mfreadwrite.dll");
51
if (!Mflib || !Mfplatlib || !Mfreadwritelib)
52
return false;
53
54
EnumDeviceSources = (MFEnumDeviceSourcesFunc)GetProcAddress(Mflib, "MFEnumDeviceSources");
55
GetStrideForBitmapInfoHeader = (MFGetStrideForBitmapInfoHeaderFunc)GetProcAddress(Mfplatlib, "MFGetStrideForBitmapInfoHeader");
56
MFAPI::CopyImage = (MFCopyImageFunc)GetProcAddress(Mfplatlib, "MFCopyImage");
57
CreateSourceReaderFromMediaSource = (MFCreateSourceReaderFromMediaSourceFunc)GetProcAddress(Mfreadwritelib, "MFCreateSourceReaderFromMediaSource");
58
if (!EnumDeviceSources || !GetStrideForBitmapInfoHeader || !CreateSourceReaderFromMediaSource || !MFAPI::CopyImage)
59
return false;
60
61
return true;
62
}
63
64
bool unRegisterCMPTMFApis() {
65
if (Mflib) {
66
FreeLibrary(Mflib);
67
Mflib = nullptr;
68
}
69
70
if (Mfplatlib) {
71
FreeLibrary(Mfplatlib);
72
Mfplatlib = nullptr;
73
}
74
75
if (Mfreadwritelib) {
76
FreeLibrary(Mfreadwritelib);
77
Mfreadwritelib = nullptr;
78
}
79
80
EnumDeviceSources = nullptr;
81
GetStrideForBitmapInfoHeader = nullptr;
82
CreateSourceReaderFromMediaSource = nullptr;
83
MFAPI::CopyImage = nullptr;
84
85
return true;
86
}
87
88
WindowsCaptureDevice *winCamera;
89
WindowsCaptureDevice *winMic;
90
91
// TODO: Add more formats, but need some tests.
92
VideoFormatTransform g_VideoFormats[] =
93
{
94
{ MFVideoFormat_RGB32, AV_PIX_FMT_RGBA },
95
{ MFVideoFormat_RGB24, AV_PIX_FMT_RGB24 },
96
{ MFVideoFormat_YUY2, AV_PIX_FMT_YUYV422 },
97
{ MFVideoFormat_NV12, AV_PIX_FMT_NV12 }
98
};
99
100
AudioFormatTransform g_AudioFormats[] = {
101
{ MFAudioFormat_PCM, 8, AV_SAMPLE_FMT_U8 },
102
{ MFAudioFormat_PCM, 16, AV_SAMPLE_FMT_S16 },
103
{ MFAudioFormat_PCM, 32, AV_SAMPLE_FMT_S32 },
104
{ MFAudioFormat_Float, 32, AV_SAMPLE_FMT_FLT }
105
};
106
107
const int g_cVideoFormats = ARRAYSIZE(g_VideoFormats);
108
const int g_cAudioFormats = ARRAYSIZE(g_AudioFormats);
109
110
MediaParam defaultVideoParam = { { 640, 480, 0, MFVideoFormat_RGB24 } };
111
MediaParam defaultAudioParam = { { 44100, 2, 16, MFAudioFormat_PCM } };
112
113
HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride);
114
115
ReaderCallback::ReaderCallback(WindowsCaptureDevice *_device) : device(_device) {}
116
117
ReaderCallback::~ReaderCallback() {
118
#ifdef USE_FFMPEG
119
if (img_convert_ctx) {
120
sws_freeContext(img_convert_ctx);
121
}
122
if (resample_ctx) {
123
swr_free(&resample_ctx);
124
}
125
#endif
126
}
127
128
HRESULT ReaderCallback::QueryInterface(REFIID riid, void** ppv)
129
{
130
static const QITAB qit[] =
131
{
132
QITABENT(ReaderCallback, IMFSourceReaderCallback),
133
{ 0 },
134
};
135
return QISearch(this, qit, riid, ppv);
136
}
137
138
HRESULT ReaderCallback::OnReadSample(
139
HRESULT hrStatus,
140
DWORD dwStreamIndex,
141
DWORD dwStreamFlags,
142
LONGLONG llTimestamp,
143
IMFSample *pSample) {
144
HRESULT hr = S_OK;
145
IMFMediaBuffer *pBuffer = nullptr;
146
std::lock_guard<std::mutex> lock(device->sdMutex);
147
if (device->isShutDown())
148
return hr;
149
150
if (FAILED(hrStatus))
151
hr = hrStatus;
152
153
if (SUCCEEDED(hr)) {
154
if (pSample) {
155
hr = pSample->GetBufferByIndex(0, &pBuffer);
156
}
157
}
158
if (SUCCEEDED(hr)) {
159
switch (device->type) {
160
case CAPTUREDEVIDE_TYPE::VIDEO: {
161
BYTE *pbScanline0 = nullptr;
162
VideoBufferLock *videoBuffer = nullptr;
163
int imgJpegSize = device->imgJpegSize;
164
unsigned char* invertedSrcImg = nullptr;
165
LONG srcPadding = 0;
166
LONG lStride = 0;
167
168
UINT32 srcW = device->deviceParam.width;
169
UINT32 srcH = device->deviceParam.height;
170
UINT32 dstW = device->targetMediaParam.width;
171
UINT32 dstH = device->targetMediaParam.height;
172
GUID srcMFVideoFormat = device->deviceParam.videoFormat;
173
174
// pSample can be null, in this case ReadSample still should be called to request next frame.
175
if (pSample) {
176
videoBuffer = new VideoBufferLock(pBuffer);
177
hr = videoBuffer->LockBuffer(device->deviceParam.default_stride, device->deviceParam.height, &pbScanline0, &lStride);
178
179
if (lStride > 0)
180
srcPadding = lStride - device->deviceParam.default_stride;
181
else
182
srcPadding = device->deviceParam.default_stride - lStride;
183
184
#ifdef USE_FFMPEG
185
if (SUCCEEDED(hr)) {
186
// Convert image to RGB24
187
if (lStride > 0) {
188
imgConvert(
189
device->imageRGB, dstW, dstH, device->imgRGBLineSizes,
190
pbScanline0, srcW, srcH, srcMFVideoFormat, srcPadding);
191
} else {
192
// If stride < 0, the pointer to the first row of source image is the last row in memory,should invert it in memory.
193
invertedSrcImg = (unsigned char*)av_malloc(av_image_get_buffer_size(getAVVideoFormatbyMFVideoFormat(srcMFVideoFormat), srcW, srcH, 1));
194
imgInvert(invertedSrcImg, pbScanline0, srcW, srcH, device->deviceParam.videoFormat, lStride);
195
// We alloc a inverted image with no padding, set padding to zero.
196
srcPadding = 0;
197
imgConvert(
198
device->imageRGB, dstW, dstH, device->imgRGBLineSizes,
199
invertedSrcImg, srcW, srcH, srcMFVideoFormat, srcPadding);
200
av_free(invertedSrcImg);
201
}
202
203
// Mirror the image in-place if needed.
204
if (g_Config.bCameraMirrorHorizontal) {
205
for (int y = 0; y < dstH; y++) {
206
uint8_t *line = device->imageRGB + y * device->imgRGBLineSizes[0];
207
for (int x = 0; x < dstW / 2; x++) {
208
const int invX = dstW - 1 - x;
209
const uint8_t r = line[x * 3 + 0];
210
const uint8_t g = line[x * 3 + 1];
211
const uint8_t b = line[x * 3 + 2];
212
line[x * 3 + 0] = line[invX * 3 + 0];
213
line[x * 3 + 1] = line[invX * 3 + 1];
214
line[x * 3 + 2] = line[invX * 3 + 2];
215
line[invX * 3 + 0] = r;
216
line[invX * 3 + 1] = g;
217
line[invX * 3 + 2] = b;
218
}
219
}
220
}
221
222
// Compress image to jpeg from RGB24.
223
jpge::compress_image_to_jpeg_file_in_memory(
224
device->imageJpeg, imgJpegSize,
225
dstW,
226
dstH,
227
3,
228
device->imageRGB);
229
}
230
#endif
231
Camera::pushCameraImage(imgJpegSize, device->imageJpeg);
232
}
233
// Request the next frame.
234
if (SUCCEEDED(hr)) {
235
hr = device->m_pReader->ReadSample(
236
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
237
0,
238
nullptr,
239
nullptr,
240
nullptr,
241
nullptr
242
);
243
}
244
delete videoBuffer;
245
break;
246
}
247
case CAPTUREDEVIDE_TYPE::Audio: {
248
BYTE *sampleBuf = nullptr;
249
DWORD length = 0;
250
u32 sizeAfterResample = 0;
251
// pSample can be null, in this case ReadSample still should be called to request next frame.
252
if (pSample) {
253
pBuffer->Lock(&sampleBuf, nullptr, &length);
254
if (device->needResample()) {
255
sizeAfterResample = doResample(
256
&device->resampleBuf, device->targetMediaParam.sampleRate, device->targetMediaParam.channels, &device->resampleBufSize,
257
sampleBuf, device->deviceParam.sampleRate, device->deviceParam.channels, device->deviceParam.audioFormat, length, device->deviceParam.bitsPerSample);
258
if (device->resampleBuf)
259
Microphone::addAudioData(device->resampleBuf, sizeAfterResample);
260
} else {
261
Microphone::addAudioData(sampleBuf, length);
262
}
263
pBuffer->Unlock();
264
}
265
// Request the next frame.
266
if (SUCCEEDED(hr)) {
267
hr = device->m_pReader->ReadSample(
268
(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
269
0,
270
nullptr,
271
nullptr,
272
nullptr,
273
nullptr
274
);
275
}
276
break;
277
}
278
}
279
}
280
281
SafeRelease(&pBuffer);
282
return hr;
283
}
284
285
AVPixelFormat ReaderCallback::getAVVideoFormatbyMFVideoFormat(const GUID &MFVideoFormat) {
286
for (int i = 0; i < g_cVideoFormats; i++) {
287
if (MFVideoFormat == g_VideoFormats[i].MFVideoFormat)
288
return g_VideoFormats[i].AVVideoFormat;
289
}
290
return AV_PIX_FMT_RGB24;
291
}
292
293
AVSampleFormat ReaderCallback::getAVAudioFormatbyMFAudioFormat(const GUID &MFAudioFormat, const u32 &bitsPerSample) {
294
for (int i = 0; i < g_cAudioFormats; i++) {
295
if (MFAudioFormat == g_AudioFormats[i].MFAudioFormat && bitsPerSample == g_AudioFormats[i].bitsPerSample)
296
return g_AudioFormats[i].AVAudioFormat;
297
}
298
return AV_SAMPLE_FMT_S16;
299
}
300
301
void ReaderCallback::imgConvert(
302
unsigned char *dst, unsigned int &dstW, unsigned int &dstH, int dstLineSizes[4],
303
unsigned char *src, const unsigned int &srcW, const unsigned int &srcH, const GUID &srcFormat,
304
const int &srcPadding) {
305
#ifdef USE_FFMPEG
306
int srcLineSizes[4] = { 0, 0, 0, 0 };
307
unsigned char *pSrc[4];
308
unsigned char *pDst[4];
309
310
AVPixelFormat srcAVFormat = getAVVideoFormatbyMFVideoFormat(srcFormat);
311
312
313
av_image_fill_linesizes(srcLineSizes, srcAVFormat, srcW);
314
315
// Is this correct?
316
if (srcPadding != 0) {
317
for (int i = 0; i < 4; i++) {
318
if (srcLineSizes[i] != 0)
319
srcLineSizes[i] += srcPadding;
320
}
321
}
322
323
av_image_fill_pointers(pSrc, srcAVFormat, srcH, src, srcLineSizes);
324
av_image_fill_pointers(pDst, AV_PIX_FMT_RGB24, dstH, dst, dstLineSizes);
325
326
if (img_convert_ctx == nullptr) {
327
img_convert_ctx = sws_getContext(
328
srcW,
329
srcH,
330
srcAVFormat,
331
dstW,
332
dstH,
333
AV_PIX_FMT_RGB24,
334
SWS_BICUBIC,
335
nullptr,
336
nullptr,
337
nullptr
338
);
339
}
340
341
if (img_convert_ctx) {
342
sws_scale(img_convert_ctx,
343
(const uint8_t *const *)pSrc,
344
srcLineSizes,
345
0,
346
srcH,
347
(uint8_t *const *)pDst,
348
dstLineSizes
349
);
350
}
351
#endif
352
}
353
354
void ReaderCallback::imgInvert(unsigned char *dst, unsigned char *src, const int &srcW, const int &srcH, const GUID &srcFormat, const int &srcStride) {
355
#ifdef USE_FFMPEG
356
AVPixelFormat srcAvFormat = getAVVideoFormatbyMFVideoFormat(srcFormat);
357
int dstLineSizes[4] = { 0, 0, 0, 0 };
358
359
av_image_fill_linesizes(dstLineSizes, srcAvFormat, srcW);
360
361
if(srcFormat == MFVideoFormat_RGB32)
362
imgInvertRGBA(dst, dstLineSizes[0], src, srcStride, srcH);
363
else if(srcFormat == MFVideoFormat_RGB24)
364
imgInvertRGB(dst, dstLineSizes[0], src, srcStride, srcH);
365
else if (srcFormat == MFVideoFormat_YUY2)
366
imgInvertYUY2(dst, dstLineSizes[0], src, srcStride, srcH);
367
else if (srcFormat == MFVideoFormat_NV12)
368
imgInvertNV12(dst, dstLineSizes[0], src, srcStride, srcH);;
369
#endif
370
}
371
372
void ReaderCallback::imgInvertRGBA(unsigned char *dst, int &dstStride, unsigned char *src, const int &srcStride, const int &h) {
373
MFAPI::CopyImage(dst, dstStride, src, srcStride, dstStride, h);
374
}
375
376
void ReaderCallback::imgInvertRGB(unsigned char *dst, int &dstStride, unsigned char *src, const int &srcStride, const int &h) {
377
for (int y = 0; y < h; y++) {
378
for (int srcx = dstStride - 1, dstx = 0; dstx < dstStride; srcx--, dstx++) {
379
dst[dstx] = src[srcx];
380
}
381
dst += dstStride;
382
src += srcStride;
383
}
384
}
385
386
void ReaderCallback::imgInvertYUY2(unsigned char *dst, int &dstStride, unsigned char *src, const int &srcStride, const int &h) {
387
for (int y = 0; y < h; y++) {
388
for (int srcx = dstStride - 1, dstx = 0; dstx < dstStride; srcx--, dstx++) {
389
dst[dstx] = src[srcx];
390
}
391
dst += dstStride;
392
src += srcStride;
393
}
394
}
395
396
void ReaderCallback::imgInvertNV12(unsigned char *dst, int &dstStride, unsigned char *src, const int &srcStride, const int &h) {
397
unsigned char *dstY = dst;
398
unsigned char *dstU = dst + dstStride * h;
399
unsigned char *srcY = src;
400
unsigned char *srcV = src + srcStride * h;
401
402
unsigned char *srcY1 = srcY;
403
unsigned char *srcY2 = srcY1 + srcStride;
404
unsigned char *dstY1 = dstY;
405
unsigned char *dstY2 = dstY1 + dstStride;
406
407
bool isodd = h % 2 != 0;
408
409
for (int y = 0; y < (isodd ? h - 1 : h); y += 2) {
410
for (int srcx = dstStride - 1, dstx = 0; dstx < dstStride; srcx--, dstx++) {
411
dstY1[dstx] = srcY1[srcx];
412
dstY2[dstx] = srcY2[srcx];
413
dstU[dstx] = srcV[srcx];
414
}
415
dstY += dstStride * 2;
416
srcY1 += srcStride * 2;
417
srcY2 += srcStride *2;
418
srcV += srcStride;
419
dstU += dstStride;
420
}
421
}
422
423
u32 ReaderCallback::doResample(u8 **dst, u32 &dstSampleRate, u32 &dstChannels, u32 *dstSize, u8 *src, const u32 &srcSampleRate, const u32 &srcChannels, const GUID &srcFormat, const u32& srcSize, const u32& srcBitsPerSample) {
424
#ifdef USE_FFMPEG
425
AVSampleFormat srcAVFormat = getAVAudioFormatbyMFAudioFormat(srcFormat, srcBitsPerSample);
426
int outSamplesCount = 0;
427
if (resample_ctx == nullptr) {
428
resample_ctx = swr_alloc_set_opts(nullptr,
429
av_get_default_channel_layout(dstChannels),
430
AV_SAMPLE_FMT_S16,
431
dstSampleRate,
432
av_get_default_channel_layout(srcChannels),
433
srcAVFormat,
434
srcSampleRate,
435
0,
436
nullptr);
437
if (resample_ctx == nullptr || swr_init(resample_ctx) < 0) {
438
swr_free(&resample_ctx);
439
return 0;
440
}
441
}
442
int srcSamplesCount = srcSize / srcChannels / av_get_bytes_per_sample(srcAVFormat); // per channel.
443
int outCount = srcSamplesCount * dstSampleRate / srcSampleRate + 256;
444
unsigned int outSize = av_samples_get_buffer_size(nullptr, dstChannels, outCount, AV_SAMPLE_FMT_S16, 0);
445
446
if (!*dst) {
447
*dst = (u8 *)av_malloc(outSize);
448
*dstSize = outSize;
449
}
450
if (!*dst)
451
return 0;
452
453
if(*dstSize < outSize)
454
av_fast_malloc(dst, dstSize, outSize);
455
456
outSamplesCount = swr_convert(resample_ctx, dst, outCount, (const uint8_t **)&src, srcSamplesCount);
457
if (outSamplesCount < 0)
458
return 0;
459
return av_samples_get_buffer_size(nullptr, dstChannels, outSamplesCount, AV_SAMPLE_FMT_S16, 0);
460
#else
461
return 0;
462
#endif
463
}
464
465
WindowsCaptureDevice::WindowsCaptureDevice(CAPTUREDEVIDE_TYPE _type) :
466
type(_type),
467
error(CAPTUREDEVIDE_ERROR_NO_ERROR),
468
state(CAPTUREDEVIDE_STATE::UNINITIALIZED) {
469
param = { 0 };
470
deviceParam = { { 0 } };
471
472
switch (type) {
473
case CAPTUREDEVIDE_TYPE::VIDEO:
474
targetMediaParam = defaultVideoParam;
475
break;
476
case CAPTUREDEVIDE_TYPE::Audio:
477
targetMediaParam = defaultAudioParam;
478
break;
479
}
480
481
std::thread t(&WindowsCaptureDevice::messageHandler, this);
482
t.detach();
483
}
484
485
WindowsCaptureDevice::~WindowsCaptureDevice() {
486
#ifdef USE_FFMPEG
487
switch (type) {
488
case CAPTUREDEVIDE_TYPE::VIDEO:
489
av_freep(&imageRGB);
490
av_freep(&imageJpeg);
491
break;
492
case CAPTUREDEVIDE_TYPE::Audio:
493
av_freep(&resampleBuf);
494
break;
495
}
496
#endif
497
}
498
499
void WindowsCaptureDevice::CheckDevices() {
500
isDeviceChanged = true;
501
}
502
503
bool WindowsCaptureDevice::init() {
504
HRESULT hr = S_OK;
505
506
if (!RegisterCMPTMFApis()) {
507
setError(CAPTUREDEVIDE_ERROR_INIT_FAILED, "Cannot register devices");
508
return false;
509
}
510
std::unique_lock<std::mutex> lock(paramMutex);
511
hr = enumDevices();
512
lock.unlock();
513
514
if (FAILED(hr)) {
515
setError(CAPTUREDEVIDE_ERROR_INIT_FAILED, "Cannot enumerate devices");
516
return false;
517
}
518
519
updateState(CAPTUREDEVIDE_STATE::STOPPED);
520
return true;
521
}
522
523
bool WindowsCaptureDevice::start(void *startParam) {
524
HRESULT hr = S_OK;
525
IMFAttributes *pAttributes = nullptr;
526
IMFMediaType *pType = nullptr;
527
UINT32 selection = 0;
528
UINT32 count = 0;
529
530
// Release old sources first(if any).
531
SafeRelease(&m_pSource);
532
SafeRelease(&m_pReader);
533
if (m_pCallback) {
534
delete m_pCallback;
535
m_pCallback = nullptr;
536
}
537
// Need to re-enumerate the list,because old sources were released.
538
std::vector<std::string> deviceList = getDeviceList(true);
539
540
if (deviceList.size() < 1) {
541
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Has no device");
542
return false;
543
}
544
545
m_pCallback = new ReaderCallback(this);
546
547
std::string selectedDeviceName = type == CAPTUREDEVIDE_TYPE::VIDEO ? g_Config.sCameraDevice : g_Config.sMicDevice;
548
549
switch (state) {
550
case CAPTUREDEVIDE_STATE::STOPPED:
551
for (auto &name : deviceList) {
552
if (name == selectedDeviceName) {
553
selection = count;
554
break;
555
}
556
++count;
557
}
558
setSelction(selection);
559
hr = param.ppDevices[param.selection]->ActivateObject(
560
__uuidof(IMFMediaSource),
561
(void**)&m_pSource);
562
563
if (SUCCEEDED(hr))
564
hr = MFCreateAttributes(&pAttributes, 2);
565
566
// Use async mode
567
if (SUCCEEDED(hr))
568
hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, m_pCallback);
569
570
if (SUCCEEDED(hr))
571
hr = pAttributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, TRUE);
572
573
if (SUCCEEDED(hr)) {
574
hr = CreateSourceReaderFromMediaSource(
575
m_pSource,
576
pAttributes,
577
&m_pReader
578
);
579
}
580
581
if (!m_pReader)
582
hr = E_FAIL;
583
584
if (SUCCEEDED(hr)) {
585
switch (type) {
586
case CAPTUREDEVIDE_TYPE::VIDEO: {
587
if (startParam) {
588
std::vector<int> *resolution = static_cast<std::vector<int>*>(startParam);
589
targetMediaParam.width = resolution->at(0);
590
targetMediaParam.height = resolution->at(1);
591
delete resolution;
592
}
593
#ifdef USE_FFMPEG
594
595
av_freep(&imageRGB);
596
av_freep(&imageJpeg);
597
imageRGB = (unsigned char*)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_RGB24, targetMediaParam.width, targetMediaParam.height, 1));
598
imgJpegSize = av_image_get_buffer_size(AV_PIX_FMT_YUVJ411P, targetMediaParam.width, targetMediaParam.height, 1);
599
imageJpeg = (unsigned char*)av_malloc(imgJpegSize);
600
av_image_fill_linesizes(imgRGBLineSizes, AV_PIX_FMT_RGB24, targetMediaParam.width);
601
#endif
602
603
for (DWORD i = 0; ; i++) {
604
hr = m_pReader->GetNativeMediaType(
605
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
606
i,
607
&pType
608
);
609
610
if (FAILED(hr)) { break; }
611
612
hr = setDeviceParam(pType);
613
614
if (SUCCEEDED(hr))
615
break;
616
}
617
/*
618
hr = m_pReader->GetNativeMediaType(
619
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
620
(DWORD)0xFFFFFFFF,//MF_SOURCE_READER_CURRENT_TYPE_INDEX
621
&pType
622
);
623
if (SUCCEEDED(hr))
624
hr = setDeviceParam(pType);*/ // Don't support on Win7
625
626
// Request the first frame, in async mode, OnReadSample will be called when ReadSample completed.
627
if (SUCCEEDED(hr)) {
628
hr = m_pReader->ReadSample(
629
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
630
0,
631
nullptr,
632
nullptr,
633
nullptr,
634
nullptr
635
);
636
}
637
break;
638
}
639
640
case CAPTUREDEVIDE_TYPE::Audio: {
641
if (startParam) {
642
std::vector<u32> *micParam = static_cast<std::vector<u32>*>(startParam);
643
targetMediaParam.sampleRate = micParam->at(0);
644
targetMediaParam.channels = micParam->at(1);
645
delete micParam;
646
}
647
648
for (DWORD i = 0; ; i++) {
649
hr = m_pReader->GetNativeMediaType(
650
(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
651
i,
652
&pType
653
);
654
655
if (FAILED(hr)) { break; }
656
657
hr = setDeviceParam(pType);
658
659
if (SUCCEEDED(hr))
660
break;
661
}
662
663
if (SUCCEEDED(hr)) {
664
hr = m_pReader->ReadSample(
665
(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
666
0,
667
nullptr,
668
nullptr,
669
nullptr,
670
nullptr
671
);
672
}
673
break;
674
}
675
}
676
}
677
678
if (FAILED(hr)) {
679
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Cannot start");
680
if(m_pSource)
681
m_pSource->Shutdown();
682
SafeRelease(&m_pSource);
683
SafeRelease(&pAttributes);
684
SafeRelease(&pType);
685
SafeRelease(&m_pReader);
686
return false;
687
}
688
689
SafeRelease(&pAttributes);
690
SafeRelease(&pType);
691
updateState(CAPTUREDEVIDE_STATE::STARTED);
692
break;
693
case CAPTUREDEVIDE_STATE::LOST:
694
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Device has lost");
695
return false;
696
case CAPTUREDEVIDE_STATE::STARTED:
697
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Device has started");
698
return false;
699
case CAPTUREDEVIDE_STATE::UNINITIALIZED:
700
setError(CAPTUREDEVIDE_ERROR_START_FAILED, "Device doesn't initialize");
701
return false;
702
default:
703
break;
704
}
705
return true;
706
}
707
708
bool WindowsCaptureDevice::stop() {
709
if (state == CAPTUREDEVIDE_STATE::STOPPED)
710
return true;
711
if (m_pSource)
712
m_pSource->Stop();
713
714
updateState(CAPTUREDEVIDE_STATE::STOPPED);
715
716
return true;
717
};
718
719
std::vector<std::string> WindowsCaptureDevice::getDeviceList(bool forceEnum, int *pActuallCount) {
720
HRESULT hr = S_OK;
721
UINT32 count = 0;
722
LPWSTR pwstrName = nullptr;
723
char *cstrName = nullptr;
724
std::string strName;
725
DWORD dwMinSize = 0;
726
std::vector<std::string> deviceList;
727
728
if (isDeviceChanged || forceEnum) {
729
std::unique_lock<std::mutex> lock(paramMutex);
730
for (DWORD i = 0; i < param.count; i++) {
731
SafeRelease(&param.ppDevices[i]);
732
}
733
CoTaskMemFree(param.ppDevices); // Null pointer is okay.
734
735
hr = enumDevices();
736
737
lock.unlock();
738
739
if (SUCCEEDED(hr))
740
isDeviceChanged = false;
741
else
742
return deviceList;
743
}
744
745
for (; count < param.count; count++) {
746
hr = param.ppDevices[count]->GetAllocatedString(
747
MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
748
&pwstrName,
749
nullptr
750
);
751
752
if (SUCCEEDED(hr)) {
753
// Get the size needed first
754
dwMinSize = WideCharToMultiByte(CP_UTF8, NULL, pwstrName, -1, nullptr, 0, nullptr, FALSE);
755
if (dwMinSize == 0)
756
hr = E_FAIL;
757
}
758
if (SUCCEEDED(hr)) {
759
cstrName = new char[dwMinSize];
760
WideCharToMultiByte(CP_UTF8, NULL, pwstrName, -1, cstrName, dwMinSize, NULL, FALSE);
761
strName = cstrName;
762
delete[] cstrName;
763
764
deviceList.push_back(strName);
765
}
766
767
CoTaskMemFree(pwstrName);
768
769
if (FAILED(hr)) {
770
setError(CAPTUREDEVIDE_ERROR_GETNAMES_FAILED, "Error occurred,gotten " + std::to_string((int)count) + " device names");
771
if(pActuallCount)
772
*pActuallCount = count;
773
return deviceList;
774
}
775
}
776
if (pActuallCount)
777
*pActuallCount = count + 1;
778
return deviceList;
779
}
780
781
HRESULT WindowsCaptureDevice::setDeviceParam(IMFMediaType *pType) {
782
HRESULT hr = S_OK;
783
GUID subtype = { 0 };
784
bool getFormat = false;
785
786
switch (type) {
787
case CAPTUREDEVIDE_TYPE::VIDEO:
788
hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
789
if (FAILED(hr))
790
break;
791
792
for (int i = 0; i < g_cVideoFormats; i++) {
793
if (subtype == g_VideoFormats[i].MFVideoFormat) {
794
deviceParam.videoFormat = subtype;
795
getFormat = true;
796
break;
797
}
798
}
799
800
if (!getFormat) {
801
for (int i = 0; i < g_cVideoFormats; i++) {
802
hr = pType->SetGUID(MF_MT_SUBTYPE, g_VideoFormats[i].MFVideoFormat);
803
if (FAILED(hr))
804
continue;
805
806
hr = m_pReader->SetCurrentMediaType(
807
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
808
NULL,
809
pType
810
);
811
812
if (SUCCEEDED(hr)) {
813
deviceParam.videoFormat = g_VideoFormats[i].MFVideoFormat;
814
getFormat = true;
815
break;
816
}
817
}
818
}
819
if (SUCCEEDED(hr))
820
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &deviceParam.width, &deviceParam.height);
821
822
if (SUCCEEDED(hr))
823
hr = GetDefaultStride(pType, &deviceParam.default_stride);
824
825
break;
826
case CAPTUREDEVIDE_TYPE::Audio:
827
hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
828
if (FAILED(hr))
829
break;
830
831
for (int i = 0; i < g_cVideoFormats; i++) {
832
if (subtype == g_AudioFormats[i].MFAudioFormat) {
833
deviceParam.audioFormat = subtype;
834
getFormat = true;
835
break;
836
}
837
}
838
839
if (!getFormat) {
840
for (int i = 0; i < g_cAudioFormats; i++) {
841
hr = pType->SetGUID(MF_MT_SUBTYPE, g_AudioFormats[i].MFAudioFormat);
842
if (FAILED(hr))
843
continue;
844
845
hr = m_pReader->SetCurrentMediaType(
846
(DWORD)MF_SOURCE_READER_FIRST_AUDIO_STREAM,
847
NULL,
848
pType
849
);
850
851
if (SUCCEEDED(hr)) {
852
deviceParam.audioFormat = g_AudioFormats[i].MFAudioFormat;
853
getFormat = true;
854
break;
855
}
856
}
857
}
858
if (SUCCEEDED(hr))
859
hr = pType->GetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, &deviceParam.sampleRate);
860
861
if (SUCCEEDED(hr))
862
hr = pType->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &deviceParam.channels);
863
864
if (SUCCEEDED(hr))
865
hr = pType->GetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, (UINT32 *)&deviceParam.bitsPerSample);
866
867
break;
868
}
869
870
return hr;
871
}
872
873
void WindowsCaptureDevice::sendMessage(CAPTUREDEVIDE_MESSAGE message) {
874
// Must be unique lock
875
std::unique_lock<std::mutex> lock(mutex);
876
messageQueue.push(message);
877
cond.notify_one();
878
}
879
880
CAPTUREDEVIDE_MESSAGE WindowsCaptureDevice::getMessage() {
881
// Must be unique lock
882
std::unique_lock<std::mutex> lock(mutex);
883
CAPTUREDEVIDE_MESSAGE message;
884
cond.wait(lock, [this]() { return !messageQueue.empty(); });
885
message = messageQueue.front();
886
messageQueue.pop();
887
888
return message;
889
}
890
891
void WindowsCaptureDevice::updateState(const CAPTUREDEVIDE_STATE &newState) {
892
state = newState;
893
if (isShutDown()) {
894
std::unique_lock<std::mutex> guard(stateMutex_);
895
stateCond_.notify_all();
896
}
897
}
898
899
void WindowsCaptureDevice::waitShutDown() {
900
sendMessage({ CAPTUREDEVIDE_COMMAND::SHUTDOWN, nullptr });
901
902
std::unique_lock<std::mutex> guard(stateMutex_);
903
while (!isShutDown()) {
904
stateCond_.wait(guard);
905
}
906
}
907
908
void WindowsCaptureDevice::messageHandler() {
909
CoInitializeEx(NULL, COINIT_MULTITHREADED);
910
MFStartup(MF_VERSION);
911
CAPTUREDEVIDE_MESSAGE message;
912
913
if (type == CAPTUREDEVIDE_TYPE::VIDEO) {
914
SetCurrentThreadName("Camera");
915
} else if (type == CAPTUREDEVIDE_TYPE::Audio) {
916
SetCurrentThreadName("Microphone");
917
}
918
919
while ((message = getMessage()).command != CAPTUREDEVIDE_COMMAND::SHUTDOWN) {
920
switch (message.command) {
921
case CAPTUREDEVIDE_COMMAND::INITIALIZE:
922
init();
923
break;
924
case CAPTUREDEVIDE_COMMAND::START:
925
start(message.opacity);
926
break;
927
case CAPTUREDEVIDE_COMMAND::STOP:
928
stop();
929
break;
930
case CAPTUREDEVIDE_COMMAND::UPDATE_STATE:
931
updateState((*(CAPTUREDEVIDE_STATE *)message.opacity));
932
break;
933
}
934
}
935
936
if (state != CAPTUREDEVIDE_STATE::STOPPED)
937
stop();
938
939
std::lock_guard<std::mutex> lock(sdMutex);
940
SafeRelease(&m_pSource);
941
SafeRelease(&m_pReader);
942
delete m_pCallback;
943
unRegisterCMPTMFApis();
944
945
std::unique_lock<std::mutex> lock2(paramMutex);
946
for (DWORD i = 0; i < param.count; i++) {
947
SafeRelease(&param.ppDevices[i]);
948
}
949
CoTaskMemFree(param.ppDevices); // Null pointer is okay.
950
lock2.unlock();
951
952
MFShutdown();
953
CoUninitialize();
954
955
updateState(CAPTUREDEVIDE_STATE::SHUTDOWN);
956
}
957
958
HRESULT WindowsCaptureDevice::enumDevices() {
959
HRESULT hr = S_OK;
960
IMFAttributes *pAttributes = nullptr;
961
962
hr = MFCreateAttributes(&pAttributes, 1);
963
if (SUCCEEDED(hr)) {
964
switch (type) {
965
case CAPTUREDEVIDE_TYPE::VIDEO:
966
hr = pAttributes->SetGUID(
967
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
968
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID
969
);
970
971
break;
972
case CAPTUREDEVIDE_TYPE::Audio:
973
hr = pAttributes->SetGUID(
974
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
975
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_AUDCAP_GUID
976
);
977
978
break;
979
default:
980
setError(CAPTUREDEVIDE_ERROR_UNKNOWN_TYPE, "Unknown device type");
981
return E_FAIL;
982
}
983
}
984
if (SUCCEEDED(hr)) {
985
hr = EnumDeviceSources(pAttributes, &param.ppDevices, &param.count);
986
}
987
988
SafeRelease(&pAttributes);
989
return hr;
990
}
991
992
bool WindowsCaptureDevice::needResample() {
993
return deviceParam.sampleRate != targetMediaParam.sampleRate ||
994
deviceParam.channels != targetMediaParam.channels ||
995
deviceParam.audioFormat != targetMediaParam.audioFormat ||
996
deviceParam.bitsPerSample != targetMediaParam.bitsPerSample;
997
}
998
999
//-----------------------------------------------------------------------------
1000
// GetDefaultStride
1001
//
1002
// Gets the default stride for a video frame, assuming no extra padding bytes.
1003
//
1004
//-----------------------------------------------------------------------------
1005
1006
HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride)
1007
{
1008
LONG lStride = 0;
1009
1010
// Try to get the default stride from the media type.
1011
HRESULT hr = pType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&lStride);
1012
if (FAILED(hr))
1013
{
1014
// Attribute not set. Try to calculate the default stride.
1015
GUID subtype = GUID_NULL;
1016
1017
UINT32 width = 0;
1018
UINT32 height = 0;
1019
1020
// Get the subtype and the image size.
1021
hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
1022
if (SUCCEEDED(hr))
1023
{
1024
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
1025
}
1026
if (SUCCEEDED(hr))
1027
{
1028
hr = GetStrideForBitmapInfoHeader(subtype.Data1, width, &lStride);
1029
}
1030
1031
// Set the attribute for later reference.
1032
if (SUCCEEDED(hr))
1033
{
1034
(void)pType->SetUINT32(MF_MT_DEFAULT_STRIDE, UINT32(lStride));
1035
}
1036
}
1037
1038
if (SUCCEEDED(hr))
1039
{
1040
*plStride = lStride;
1041
}
1042
return hr;
1043
}
1044
1045