CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!
CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!
Path: blob/master/Core/HW/MediaEngine.cpp
Views: 1401
// Copyright (c) 2012- PPSSPP Project.12// This program is free software: you can redistribute it and/or modify3// it under the terms of the GNU General Public License as published by4// the Free Software Foundation, version 2.0 or later versions.56// This program is distributed in the hope that it will be useful,7// but WITHOUT ANY WARRANTY; without even the implied warranty of8// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the9// GNU General Public License 2.0 for more details.1011// A copy of the GPL 2.0 should have been included with the program.12// If not, see http://www.gnu.org/licenses/1314// Official git repository and contact information can be found at15// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.1617#include "Common/Serialize/SerializeFuncs.h"18#include "Common/Math/CrossSIMD.h"19#include "Core/Config.h"20#include "Core/Core.h"21#include "Core/Debugger/MemBlockInfo.h"22#include "Core/HW/MediaEngine.h"23#include "Core/MemMap.h"24#include "Core/MIPS/MIPS.h"25#include "Core/Reporting.h"26#include "GPU/GPUState.h" // Used by TextureDecoder.h when templates get instanced27#include "GPU/Common/TextureDecoder.h"28#include "GPU/GPUInterface.h"29#include "Core/HW/SimpleAudioDec.h"3031#include <algorithm>3233#ifdef _M_SSE34#include <emmintrin.h>35#endif3637#if PPSSPP_ARCH(ARM_NEON)38#if defined(_MSC_VER) && PPSSPP_ARCH(ARM64)39#include <arm64_neon.h>40#else41#include <arm_neon.h>42#endif43#endif4445#ifdef USE_FFMPEG4647extern "C" {4849#include "libavcodec/avcodec.h"50#include "libavformat/avformat.h"51#include "libavutil/imgutils.h"52#include "libswscale/swscale.h"5354}55#endif // USE_FFMPEG5657#ifdef USE_FFMPEG5859#include "Core/FFMPEGCompat.h"6061static AVPixelFormat getSwsFormat(int pspFormat)62{63switch (pspFormat)64{65case GE_CMODE_16BIT_BGR5650:66return AV_PIX_FMT_BGR565LE;67case GE_CMODE_16BIT_ABGR5551:68return AV_PIX_FMT_BGR555LE;69case GE_CMODE_16BIT_ABGR4444:70return AV_PIX_FMT_BGR444LE;71case GE_CMODE_32BIT_ABGR8888:72return AV_PIX_FMT_RGBA;73default:74ERROR_LOG(Log::ME, "Unknown pixel format");75return (AVPixelFormat)0;76}77}7879void ffmpeg_logger(void *, int level, const char *format, va_list va_args) {80// We're still called even if the level doesn't match.81if (level > av_log_get_level())82return;8384char tmp[1024];85vsnprintf(tmp, sizeof(tmp), format, va_args);86tmp[sizeof(tmp) - 1] = '\0';8788// Strip off any trailing newline.89size_t len = strlen(tmp);90if (tmp[len - 1] == '\n')91tmp[len - 1] = '\0';9293if (!strcmp(tmp, "GHA Phase shifting")) {94Reporting::ReportMessage("Atrac3+: GHA phase shifting");95}9697// Let's color the log line appropriately.98if (level <= AV_LOG_PANIC) {99ERROR_LOG(Log::ME, "FF: %s", tmp);100} else if (level >= AV_LOG_VERBOSE) {101DEBUG_LOG(Log::ME, "FF: %s", tmp);102} else {103INFO_LOG(Log::ME, "FF: %s", tmp);104}105}106107bool InitFFmpeg() {108#ifdef _DEBUG109av_log_set_level(AV_LOG_VERBOSE);110#else111av_log_set_level(AV_LOG_WARNING);112#endif113av_log_set_callback(&ffmpeg_logger);114115return true;116}117#endif118119static int getPixelFormatBytes(int pspFormat)120{121switch (pspFormat)122{123case GE_CMODE_16BIT_BGR5650:124case GE_CMODE_16BIT_ABGR5551:125case GE_CMODE_16BIT_ABGR4444:126return 2;127case GE_CMODE_32BIT_ABGR8888:128return 4;129130default:131ERROR_LOG(Log::ME, "Unknown pixel format");132return 4;133}134}135136MediaEngine::MediaEngine() {137m_bufSize = 0x2000;138139m_mpegheaderSize = sizeof(m_mpegheader);140m_audioType = PSP_CODEC_AT3PLUS; // in movie, we use only AT3+ audio141}142143MediaEngine::~MediaEngine() {144closeMedia();145}146147void MediaEngine::closeMedia() {148closeContext();149delete m_pdata;150delete m_demux;151m_pdata = nullptr;152m_demux = nullptr;153AudioClose(&m_audioContext);154m_isVideoEnd = false;155}156157void MediaEngine::DoState(PointerWrap &p) {158auto s = p.Section("MediaEngine", 1, 7);159if (!s)160return;161162Do(p, m_videoStream);163Do(p, m_audioStream);164165DoArray(p, m_mpegheader, sizeof(m_mpegheader));166if (s >= 4) {167Do(p, m_mpegheaderSize);168} else {169m_mpegheaderSize = sizeof(m_mpegheader);170}171if (s >= 5) {172Do(p, m_mpegheaderReadPos);173} else {174m_mpegheaderReadPos = m_mpegheaderSize;175}176if (s >= 6) {177Do(p, m_expectedVideoStreams);178} else {179m_expectedVideoStreams = 0;180}181182Do(p, m_ringbuffersize);183184u32 hasloadStream = m_pdata != nullptr;185Do(p, hasloadStream);186if (hasloadStream && p.mode == p.MODE_READ)187reloadStream();188#ifdef USE_FFMPEG189u32 hasopencontext = m_pFormatCtx != nullptr;190#else191u32 hasopencontext = false;192#endif193Do(p, hasopencontext);194if (m_pdata)195m_pdata->DoState(p);196if (m_demux)197m_demux->DoState(p);198199Do(p, m_videopts);200if (s >= 7) {201Do(p, m_lastPts);202} else {203m_lastPts = m_videopts;204}205Do(p, m_audiopts);206207if (s >= 2) {208Do(p, m_firstTimeStamp);209Do(p, m_lastTimeStamp);210}211212if (hasopencontext && p.mode == p.MODE_READ) {213openContext(true);214}215216Do(p, m_isVideoEnd);217bool noAudioDataRemoved;218Do(p, noAudioDataRemoved);219if (s >= 3) {220Do(p, m_audioType);221} else {222m_audioType = PSP_CODEC_AT3PLUS;223}224}225226int MediaEngine::MpegReadbuffer(void *opaque, uint8_t *buf, int buf_size) {227MediaEngine *mpeg = (MediaEngine *)opaque;228229int size = buf_size;230if (mpeg->m_mpegheaderReadPos < mpeg->m_mpegheaderSize) {231size = std::min(buf_size, mpeg->m_mpegheaderSize - mpeg->m_mpegheaderReadPos);232memcpy(buf, mpeg->m_mpegheader + mpeg->m_mpegheaderReadPos, size);233mpeg->m_mpegheaderReadPos += size;234} else {235size = mpeg->m_pdata->pop_front(buf, buf_size);236if (size > 0)237mpeg->m_decodingsize = size;238}239return size;240}241242bool MediaEngine::SetupStreams() {243#ifdef USE_FFMPEG244const u32 magic = *(u32_le *)&m_mpegheader[0];245if (magic != PSMF_MAGIC) {246WARN_LOG_REPORT(Log::ME, "Could not setup streams, bad magic: %08x", magic);247return false;248}249int numStreams = *(u16_be *)&m_mpegheader[0x80];250if (numStreams <= 0 || numStreams > 8) {251// Looks crazy. Let's bail out and let FFmpeg handle it.252WARN_LOG_REPORT(Log::ME, "Could not setup streams, unexpected stream count: %d", numStreams);253return false;254}255256// Looking good. Let's add those streams.257int videoStreamNum = -1;258for (int i = 0; i < numStreams; i++) {259const u8 *const currentStreamAddr = m_mpegheader + 0x82 + i * 16;260int streamId = currentStreamAddr[0];261262// We only set video streams. We demux the audio stream separately.263if ((streamId & PSMF_VIDEO_STREAM_ID) == PSMF_VIDEO_STREAM_ID) {264++videoStreamNum;265addVideoStream(videoStreamNum, streamId);266}267}268// Add the streams to meet the expectation.269for (int i = videoStreamNum + 1; i < m_expectedVideoStreams; i++) {270addVideoStream(i);271}272#endif273274return true;275}276277bool MediaEngine::openContext(bool keepReadPos) {278#ifdef USE_FFMPEG279InitFFmpeg();280281if (m_pFormatCtx || !m_pdata)282return false;283if (!keepReadPos) {284m_mpegheaderReadPos = 0;285}286m_decodingsize = 0;287288m_bufSize = std::max(m_bufSize, m_mpegheaderSize);289u8 *tempbuf = (u8*)av_malloc(m_bufSize);290291m_pFormatCtx = avformat_alloc_context();292m_pIOContext = avio_alloc_context(tempbuf, m_bufSize, 0, (void*)this, &MpegReadbuffer, nullptr, nullptr);293m_pFormatCtx->pb = m_pIOContext;294295// Open video file296AVDictionary *open_opt = nullptr;297av_dict_set_int(&open_opt, "probesize", m_mpegheaderSize, 0);298if (avformat_open_input((AVFormatContext**)&m_pFormatCtx, nullptr, nullptr, &open_opt) != 0) {299av_dict_free(&open_opt);300return false;301}302av_dict_free(&open_opt);303304bool usedFFMPEGFindStreamInfo = false;305if (!SetupStreams() || PSP_CoreParameter().compat.flags().UseFFMPEGFindStreamInfo) {306// Fallback to old behavior. Reads too much and corrupts when game doesn't read fast enough.307// SetupStreams sometimes work for newer FFmpeg 3.1+ now, but sometimes framerate is missing.308WARN_LOG_REPORT_ONCE(setupStreams, Log::ME, "Failed to read valid video stream data from header");309if (avformat_find_stream_info(m_pFormatCtx, nullptr) < 0) {310closeContext();311return false;312}313usedFFMPEGFindStreamInfo = true;314}315316if (m_videoStream >= (int)m_pFormatCtx->nb_streams) {317WARN_LOG_REPORT(Log::ME, "Bad video stream %d", m_videoStream);318m_videoStream = -1;319}320321if (m_videoStream == -1) {322// Find the first video stream323for (int i = 0; i < (int)m_pFormatCtx->nb_streams; i++) {324const AVStream *s = m_pFormatCtx->streams[i];325#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)326AVMediaType type = s->codecpar->codec_type;327#else328AVMediaType type = s->codec->codec_type;329#endif330if (type == AVMEDIA_TYPE_VIDEO) {331m_videoStream = i;332break;333}334}335if (m_videoStream == -1)336return false;337}338339if (!setVideoStream(m_videoStream, true))340return false;341342setVideoDim();343m_audioContext = CreateAudioDecoder((PSPAudioType)m_audioType);344m_isVideoEnd = false;345346if (PSP_CoreParameter().compat.flags().UseFFMPEGFindStreamInfo && usedFFMPEGFindStreamInfo) {347m_mpegheaderReadPos++;348av_seek_frame(m_pFormatCtx, m_videoStream, 0, 0);349}350#endif // USE_FFMPEG351return true;352}353354void MediaEngine::closeContext()355{356#ifdef USE_FFMPEG357if (m_buffer)358av_free(m_buffer);359if (m_pFrameRGB)360av_frame_free(&m_pFrameRGB);361if (m_pFrame)362av_frame_free(&m_pFrame);363if (m_pIOContext && m_pIOContext->buffer)364av_free(m_pIOContext->buffer);365if (m_pIOContext)366av_free(m_pIOContext);367for (auto it : m_pCodecCtxs) {368#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)369avcodec_free_context(&it.second);370#else371avcodec_close(it.second);372#endif373}374m_pCodecCtxs.clear();375// These are streams allocated from avformat_new_stream.376for (auto it : m_codecsToClose) {377avcodec_close(it);378}379m_codecsToClose.clear();380if (m_pFormatCtx)381avformat_close_input(&m_pFormatCtx);382sws_freeContext(m_sws_ctx);383m_sws_ctx = nullptr;384m_pIOContext = nullptr;385#endif386m_buffer = nullptr;387}388389bool MediaEngine::loadStream(const u8 *buffer, int readSize, int RingbufferSize)390{391closeMedia();392393m_videopts = 0;394m_lastPts = -1;395m_audiopts = 0;396m_ringbuffersize = RingbufferSize;397m_pdata = new BufferQueue(RingbufferSize + 2048);398m_pdata->push(buffer, readSize);399m_firstTimeStamp = getMpegTimeStamp(buffer + PSMF_FIRST_TIMESTAMP_OFFSET);400m_lastTimeStamp = getMpegTimeStamp(buffer + PSMF_LAST_TIMESTAMP_OFFSET);401int mpegoffset = (int)(*(s32_be*)(buffer + 8));402m_demux = new MpegDemux(RingbufferSize + 2048, mpegoffset);403m_demux->addStreamData(buffer, readSize);404return true;405}406407bool MediaEngine::reloadStream()408{409return loadStream(m_mpegheader, 2048, m_ringbuffersize);410}411412bool MediaEngine::addVideoStream(int streamNum, int streamId) {413#ifdef USE_FFMPEG414if (m_pFormatCtx) {415// no need to add an existing stream.416if ((u32)streamNum < m_pFormatCtx->nb_streams)417return true;418AVCodec *h264_codec = avcodec_find_decoder(AV_CODEC_ID_H264);419if (!h264_codec)420return false;421AVStream *stream = avformat_new_stream(m_pFormatCtx, h264_codec);422if (stream) {423// Reference ISO/IEC 13818-1.424if (streamId == -1)425streamId = PSMF_VIDEO_STREAM_ID | streamNum;426427stream->id = 0x00000100 | streamId;428#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)429stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;430stream->codecpar->codec_id = AV_CODEC_ID_H264;431#else432stream->request_probe = 0;433stream->need_parsing = AVSTREAM_PARSE_FULL;434#endif435// We could set the width here, but we don't need to.436if (streamNum >= m_expectedVideoStreams) {437++m_expectedVideoStreams;438}439440#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(59, 16, 100)441AVCodec *codec = avcodec_find_decoder(stream->codecpar->codec_id);442AVCodecContext *codecCtx = avcodec_alloc_context3(codec);443#else444AVCodecContext *codecCtx = stream->codec;445#endif446m_codecsToClose.push_back(codecCtx);447return true;448}449}450#endif451if (streamNum >= m_expectedVideoStreams) {452++m_expectedVideoStreams;453}454return false;455}456457int MediaEngine::addStreamData(const u8 *buffer, int addSize) {458int size = addSize;459if (size > 0 && m_pdata) {460if (!m_pdata->push(buffer, size))461size = 0;462if (m_demux) {463m_demux->addStreamData(buffer, addSize);464}465#ifdef USE_FFMPEG466if (!m_pFormatCtx && m_pdata->getQueueSize() >= 2048) {467m_mpegheaderSize = m_pdata->get_front(m_mpegheader, sizeof(m_mpegheader));468int streamOffset = (int)(*(s32_be *)(m_mpegheader + 8));469if (streamOffset <= m_mpegheaderSize) {470m_mpegheaderSize = streamOffset;471m_pdata->pop_front(0, m_mpegheaderSize);472openContext();473}474}475#endif // USE_FFMPEG476477// We added data, so... not the end anymore?478m_isVideoEnd = false;479}480return size;481}482483bool MediaEngine::seekTo(s64 timestamp, int videoPixelMode) {484if (timestamp <= 0) {485return true;486}487488// Just doing it the not so great way to be sure audio is in sync.489int timeout = 1000;490while (getVideoTimeStamp() < timestamp - 3003) {491if (getAudioTimeStamp() < getVideoTimeStamp() - 4180 * 2) {492getNextAudioFrame(NULL, NULL, NULL);493}494if (!stepVideo(videoPixelMode, true)) {495return false;496}497if (--timeout <= 0) {498return true;499}500}501502while (getAudioTimeStamp() < getVideoTimeStamp() - 4180 * 2) {503if (getNextAudioFrame(NULL, NULL, NULL) == 0) {504return false;505}506if (--timeout <= 0) {507return true;508}509}510511return true;512}513514bool MediaEngine::setVideoStream(int streamNum, bool force) {515if (m_videoStream == streamNum && !force) {516// Yay, nothing to do.517return true;518}519520#ifdef USE_FFMPEG521if (m_pFormatCtx && m_pCodecCtxs.find(streamNum) == m_pCodecCtxs.end()) {522// Get a pointer to the codec context for the video stream523if ((u32)streamNum >= m_pFormatCtx->nb_streams) {524return false;525}526527AVStream *stream = m_pFormatCtx->streams[streamNum];528#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)529AVCodec *pCodec = avcodec_find_decoder(stream->codecpar->codec_id);530if (!pCodec) {531WARN_LOG_REPORT(Log::ME, "Could not find decoder for %d", (int)stream->codecpar->codec_id);532return false;533}534AVCodecContext *m_pCodecCtx = avcodec_alloc_context3(pCodec);535int paramResult = avcodec_parameters_to_context(m_pCodecCtx, stream->codecpar);536if (paramResult < 0) {537WARN_LOG_REPORT(Log::ME, "Failed to prepare context parameters: %08x", paramResult);538return false;539}540#else541AVCodecContext *m_pCodecCtx = stream->codec;542// Find the decoder for the video stream543AVCodec *pCodec = avcodec_find_decoder(m_pCodecCtx->codec_id);544if (pCodec == nullptr) {545return false;546}547#endif548549m_pCodecCtx->flags |= AV_CODEC_FLAG_OUTPUT_CORRUPT | AV_CODEC_FLAG_LOW_DELAY;550551AVDictionary *opt = nullptr;552// Allow ffmpeg to use any number of threads it wants. Without this, it doesn't use threads.553av_dict_set(&opt, "threads", "0", 0);554int openResult = avcodec_open2(m_pCodecCtx, pCodec, &opt);555av_dict_free(&opt);556if (openResult < 0) {557return false;558}559560m_pCodecCtxs[streamNum] = m_pCodecCtx;561}562#endif563m_videoStream = streamNum;564565return true;566}567568bool MediaEngine::setVideoDim(int width, int height)569{570#ifdef USE_FFMPEG571auto codecIter = m_pCodecCtxs.find(m_videoStream);572if (codecIter == m_pCodecCtxs.end())573return false;574AVCodecContext *m_pCodecCtx = codecIter->second;575576if (width == 0 && height == 0)577{578// use the orignal video size579m_desWidth = m_pCodecCtx->width;580m_desHeight = m_pCodecCtx->height;581}582else583{584m_desWidth = width;585m_desHeight = height;586}587588// Allocate video frame589if (!m_pFrame) {590m_pFrame = av_frame_alloc();591}592593sws_freeContext(m_sws_ctx);594m_sws_ctx = nullptr;595m_sws_fmt = -1;596597if (m_desWidth == 0 || m_desHeight == 0) {598// Can't setup SWS yet, so stop for now.599return false;600}601602updateSwsFormat(GE_CMODE_32BIT_ABGR8888);603604// Allocate video frame for RGB24605m_pFrameRGB = av_frame_alloc();606#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)607int numBytes = av_image_get_buffer_size((AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight, 1);608#else609int numBytes = avpicture_get_size((AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight);610#endif611m_buffer = (u8*)av_malloc(numBytes * sizeof(uint8_t));612613// Assign appropriate parts of buffer to image planes in m_pFrameRGB614#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)615av_image_fill_arrays(m_pFrameRGB->data, m_pFrameRGB->linesize, m_buffer, (AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight, 1);616#else617avpicture_fill((AVPicture *)m_pFrameRGB, m_buffer, (AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight);618#endif619#endif // USE_FFMPEG620return true;621}622623void MediaEngine::updateSwsFormat(int videoPixelMode) {624#ifdef USE_FFMPEG625auto codecIter = m_pCodecCtxs.find(m_videoStream);626AVCodecContext *m_pCodecCtx = codecIter == m_pCodecCtxs.end() ? 0 : codecIter->second;627628AVPixelFormat swsDesired = getSwsFormat(videoPixelMode);629if (swsDesired != m_sws_fmt && m_pCodecCtx != 0) {630m_sws_fmt = swsDesired;631m_sws_ctx = sws_getCachedContext632(633m_sws_ctx,634m_pCodecCtx->width,635m_pCodecCtx->height,636m_pCodecCtx->pix_fmt,637m_desWidth,638m_desHeight,639(AVPixelFormat)m_sws_fmt,640SWS_BILINEAR,641NULL,642NULL,643NULL644);645646int *inv_coefficients;647int *coefficients;648int srcRange, dstRange;649int brightness, contrast, saturation;650651if (sws_getColorspaceDetails(m_sws_ctx, &inv_coefficients, &srcRange, &coefficients, &dstRange, &brightness, &contrast, &saturation) != -1) {652srcRange = 0;653dstRange = 0;654sws_setColorspaceDetails(m_sws_ctx, inv_coefficients, srcRange, coefficients, dstRange, brightness, contrast, saturation);655}656}657#endif658}659660bool MediaEngine::stepVideo(int videoPixelMode, bool skipFrame) {661#ifdef USE_FFMPEG662auto codecIter = m_pCodecCtxs.find(m_videoStream);663AVCodecContext *m_pCodecCtx = codecIter == m_pCodecCtxs.end() ? 0 : codecIter->second;664665if (!m_pFormatCtx)666return false;667if (!m_pCodecCtx)668return false;669if (!m_pFrame)670return false;671672AVPacket packet;673av_init_packet(&packet);674int frameFinished;675bool bGetFrame = false;676while (!bGetFrame) {677bool dataEnd = av_read_frame(m_pFormatCtx, &packet) < 0;678// Even if we've read all frames, some may have been re-ordered frames at the end.679// Still need to decode those, so keep calling avcodec_decode_video2() / avcodec_receive_frame().680if (dataEnd || packet.stream_index == m_videoStream) {681// avcodec_decode_video2() / avcodec_send_packet() gives us the re-ordered frames with a NULL packet.682#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)683if (dataEnd)684av_packet_unref(&packet);685#else686if (dataEnd)687av_free_packet(&packet);688#endif689690#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 48, 101)691if (packet.size != 0)692avcodec_send_packet(m_pCodecCtx, &packet);693int result = avcodec_receive_frame(m_pCodecCtx, m_pFrame);694if (result == 0) {695result = m_pFrame->pkt_size;696frameFinished = 1;697} else if (result == AVERROR(EAGAIN)) {698result = 0;699frameFinished = 0;700} else {701frameFinished = 0;702}703#else704int result = avcodec_decode_video2(m_pCodecCtx, m_pFrame, &frameFinished, &packet);705#endif706if (frameFinished) {707if (!m_pFrameRGB) {708setVideoDim();709}710if (m_pFrameRGB && !skipFrame) {711updateSwsFormat(videoPixelMode);712// TODO: Technically we could set this to frameWidth instead of m_desWidth for better perf.713// Update the linesize for the new format too. We started with the largest size, so it should fit.714m_pFrameRGB->linesize[0] = getPixelFormatBytes(videoPixelMode) * m_desWidth;715716sws_scale(m_sws_ctx, m_pFrame->data, m_pFrame->linesize, 0,717m_pCodecCtx->height, m_pFrameRGB->data, m_pFrameRGB->linesize);718}719720#if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(55, 58, 100)721int64_t bestPts = m_pFrame->best_effort_timestamp;722int64_t ptsDuration = m_pFrame->pkt_duration;723#else724int64_t bestPts = av_frame_get_best_effort_timestamp(m_pFrame);725int64_t ptsDuration = av_frame_get_pkt_duration(m_pFrame);726#endif727if (ptsDuration == 0) {728if (m_lastPts == bestPts - m_firstTimeStamp || bestPts == AV_NOPTS_VALUE) {729// TODO: Assuming 29.97 if missing.730m_videopts += 3003;731} else {732m_videopts = bestPts - m_firstTimeStamp;733m_lastPts = m_videopts;734}735} else if (bestPts != AV_NOPTS_VALUE) {736m_videopts = bestPts + ptsDuration - m_firstTimeStamp;737m_lastPts = m_videopts;738} else {739m_videopts += ptsDuration;740m_lastPts = m_videopts;741}742bGetFrame = true;743}744if (result <= 0 && dataEnd) {745// Sometimes, m_readSize is less than m_streamSize at the end, but not by much.746// This is kinda a hack, but the ringbuffer would have to be prematurely empty too.747m_isVideoEnd = !bGetFrame && (m_pdata->getQueueSize() == 0);748if (m_isVideoEnd)749m_decodingsize = 0;750break;751}752}753#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)754av_packet_unref(&packet);755#else756av_free_packet(&packet);757#endif758}759return bGetFrame;760#else761// If video engine is not available, just add to the timestamp at least.762m_videopts += 3003;763return true;764#endif // USE_FFMPEG765}766767// Helpers that null out alpha (which seems to be the case on the PSP.)768// Some games depend on this, for example Sword Art Online (doesn't clear A's from buffer.)769inline void writeVideoLineRGBA(void *destp, const void *srcp, int width) {770// TODO: Use SSE/NEON, investigate why AV_PIX_FMT_RGB0 does not work.771u32_le *dest = (u32_le *)destp;772const u32_le *src = (u32_le *)srcp;773774int count = width;775776#if PPSSPP_ARCH(SSE2)777__m128i mask = _mm_set1_epi32(0x00FFFFFF);778while (count >= 8) {779__m128i pixels1 = _mm_and_si128(_mm_loadu_si128((const __m128i *)src), mask);780__m128i pixels2 = _mm_and_si128(_mm_loadu_si128((const __m128i *)src + 1), mask);781_mm_storeu_si128((__m128i *)dest, pixels1);782_mm_storeu_si128((__m128i *)dest + 1, pixels2);783src += 8;784dest += 8;785count -= 8;786}787#elif PPSSPP_ARCH(ARM_NEON)788uint32x4_t mask = vdupq_n_u32(0x00FFFFFF);789while (count >= 8) {790uint32x4_t pixels1 = vandq_u32(vld1q_u32(src), mask);791uint32x4_t pixels2 = vandq_u32(vld1q_u32(src + 4), mask);792vst1q_u32(dest, pixels1);793vst1q_u32(dest + 4, pixels2);794src += 8;795dest += 8;796count -= 8;797}798#endif799const u32 mask32 = 0x00FFFFFF;800DO_NOT_VECTORIZE_LOOP801while (count--) {802*dest++ = *src++ & mask32;803}804}805806inline void writeVideoLineABGR5650(void *destp, const void *srcp, int width) {807memcpy(destp, srcp, width * sizeof(u16));808}809810inline void writeVideoLineABGR5551(void *destp, const void *srcp, int width) {811// TODO: Use SSE/NEON.812u16_le *dest = (u16_le *)destp;813const u16_le *src = (u16_le *)srcp;814815const u16 mask = 0x7FFF;816for (int i = 0; i < width; ++i) {817dest[i] = src[i] & mask;818}819}820821inline void writeVideoLineABGR4444(void *destp, const void *srcp, int width) {822// TODO: Use SSE/NEON.823u16_le *dest = (u16_le *)destp;824const u16_le *src = (u16_le *)srcp;825826const u16 mask = 0x0FFF;827for (int i = 0; i < width; ++i) {828dest[i] = src[i] & mask;829}830}831832int MediaEngine::writeVideoImage(u32 bufferPtr, int frameWidth, int videoPixelMode) {833int videoLineSize = 0;834switch (videoPixelMode) {835case GE_CMODE_32BIT_ABGR8888:836videoLineSize = frameWidth * sizeof(u32);837break;838case GE_CMODE_16BIT_BGR5650:839case GE_CMODE_16BIT_ABGR5551:840case GE_CMODE_16BIT_ABGR4444:841videoLineSize = frameWidth * sizeof(u16);842break;843}844845int videoImageSize = videoLineSize * m_desHeight;846847if (!Memory::IsValidRange(bufferPtr, videoImageSize) || frameWidth > 2048) {848// Clearly invalid values. Let's just not.849ERROR_LOG_REPORT(Log::ME, "Ignoring invalid video decode address %08x/%x", bufferPtr, frameWidth);850return 0;851}852853u8 *buffer = Memory::GetPointerWriteUnchecked(bufferPtr);854855#ifdef USE_FFMPEG856if (!m_pFrame || !m_pFrameRGB)857return 0;858859// lock the image size860int height = m_desHeight;861int width = m_desWidth;862u8 *imgbuf = buffer;863const u8 *data = m_pFrameRGB->data[0];864865bool swizzle = Memory::IsVRAMAddress(bufferPtr) && (bufferPtr & 0x00200000) == 0x00200000;866if (swizzle) {867imgbuf = new u8[videoImageSize];868}869870switch (videoPixelMode) {871case GE_CMODE_32BIT_ABGR8888:872for (int y = 0; y < height; y++) {873writeVideoLineRGBA(imgbuf + videoLineSize * y, data, width);874data += width * sizeof(u32);875}876break;877878case GE_CMODE_16BIT_BGR5650:879for (int y = 0; y < height; y++) {880writeVideoLineABGR5650(imgbuf + videoLineSize * y, data, width);881data += width * sizeof(u16);882}883break;884885case GE_CMODE_16BIT_ABGR5551:886for (int y = 0; y < height; y++) {887writeVideoLineABGR5551(imgbuf + videoLineSize * y, data, width);888data += width * sizeof(u16);889}890break;891892case GE_CMODE_16BIT_ABGR4444:893for (int y = 0; y < height; y++) {894writeVideoLineABGR4444(imgbuf + videoLineSize * y, data, width);895data += width * sizeof(u16);896}897break;898899default:900ERROR_LOG_REPORT(Log::ME, "Unsupported video pixel format %d", videoPixelMode);901break;902}903904if (swizzle) {905const int bxc = videoLineSize / 16;906int byc = (height + 7) / 8;907if (byc == 0)908byc = 1;909910DoSwizzleTex16((const u32 *)imgbuf, buffer, bxc, byc, videoLineSize);911delete [] imgbuf;912}913914NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, videoImageSize, "VideoDecode");915916return videoImageSize;917#endif // USE_FFMPEG918return 0;919}920921int MediaEngine::writeVideoImageWithRange(u32 bufferPtr, int frameWidth, int videoPixelMode,922int xpos, int ypos, int width, int height) {923int videoLineSize = 0;924switch (videoPixelMode) {925case GE_CMODE_32BIT_ABGR8888:926videoLineSize = frameWidth * sizeof(u32);927break;928case GE_CMODE_16BIT_BGR5650:929case GE_CMODE_16BIT_ABGR5551:930case GE_CMODE_16BIT_ABGR4444:931videoLineSize = frameWidth * sizeof(u16);932break;933}934int videoImageSize = videoLineSize * height;935936if (!Memory::IsValidRange(bufferPtr, videoImageSize) || frameWidth > 2048) {937// Clearly invalid values. Let's just not.938ERROR_LOG_REPORT(Log::ME, "Ignoring invalid video decode address %08x/%x", bufferPtr, frameWidth);939return 0;940}941942u8 *buffer = Memory::GetPointerWriteUnchecked(bufferPtr);943944#ifdef USE_FFMPEG945if (!m_pFrame || !m_pFrameRGB)946return 0;947948// lock the image size949u8 *imgbuf = buffer;950const u8 *data = m_pFrameRGB->data[0];951952bool swizzle = Memory::IsVRAMAddress(bufferPtr) && (bufferPtr & 0x00200000) == 0x00200000;953if (swizzle) {954imgbuf = new u8[videoImageSize];955}956957if (width > m_desWidth - xpos)958width = m_desWidth - xpos;959if (height > m_desHeight - ypos)960height = m_desHeight - ypos;961962switch (videoPixelMode) {963case GE_CMODE_32BIT_ABGR8888:964data += (ypos * m_desWidth + xpos) * sizeof(u32);965for (int y = 0; y < height; y++) {966writeVideoLineRGBA(imgbuf, data, width);967data += m_desWidth * sizeof(u32);968imgbuf += videoLineSize;969}970break;971972case GE_CMODE_16BIT_BGR5650:973data += (ypos * m_desWidth + xpos) * sizeof(u16);974for (int y = 0; y < height; y++) {975writeVideoLineABGR5650(imgbuf, data, width);976data += m_desWidth * sizeof(u16);977imgbuf += videoLineSize;978}979break;980981case GE_CMODE_16BIT_ABGR5551:982data += (ypos * m_desWidth + xpos) * sizeof(u16);983for (int y = 0; y < height; y++) {984writeVideoLineABGR5551(imgbuf, data, width);985data += m_desWidth * sizeof(u16);986imgbuf += videoLineSize;987}988break;989990case GE_CMODE_16BIT_ABGR4444:991data += (ypos * m_desWidth + xpos) * sizeof(u16);992for (int y = 0; y < height; y++) {993writeVideoLineABGR4444(imgbuf, data, width);994data += m_desWidth * sizeof(u16);995imgbuf += videoLineSize;996}997break;998999default:1000ERROR_LOG_REPORT(Log::ME, "Unsupported video pixel format %d", videoPixelMode);1001break;1002}10031004if (swizzle) {1005WARN_LOG_REPORT_ONCE(vidswizzle, Log::ME, "Swizzling Video with range");10061007const int bxc = videoLineSize / 16;1008int byc = (height + 7) / 8;1009if (byc == 0)1010byc = 1;10111012DoSwizzleTex16((const u32 *)imgbuf, buffer, bxc, byc, videoLineSize);1013delete [] imgbuf;1014}1015NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, videoImageSize, "VideoDecodeRange");10161017return videoImageSize;1018#endif // USE_FFMPEG1019return 0;1020}10211022u8 *MediaEngine::getFrameImage() {1023#ifdef USE_FFMPEG1024return m_pFrameRGB->data[0];1025#else1026return nullptr;1027#endif1028}10291030int MediaEngine::getRemainSize() {1031if (!m_pdata)1032return 0;1033return std::max(m_pdata->getRemainSize() - m_decodingsize - 2048, 0);1034}10351036int MediaEngine::getAudioRemainSize() {1037if (!m_demux) {1038// No audio, so it can't be full, return video instead.1039return getRemainSize();1040}10411042return m_demux->getRemainSize();1043}10441045int MediaEngine::getNextAudioFrame(u8 **buf, int *headerCode1, int *headerCode2) {1046// When getting a frame, increment pts1047m_audiopts += 4180;10481049// Demux now (rather than on add data) so that we select the right stream.1050m_demux->demux(m_audioStream);10511052s64 pts = 0;1053int result = m_demux->getNextAudioFrame(buf, headerCode1, headerCode2, &pts);1054if (pts != 0) {1055// m_audiopts is supposed to be after the returned frame.1056m_audiopts = pts - m_firstTimeStamp + 4180;1057}1058return result;1059}10601061int MediaEngine::getAudioSamples(u32 bufferPtr) {1062int16_t *buffer = (int16_t *)Memory::GetPointerWriteRange(bufferPtr, 8192);1063if (buffer == nullptr) {1064ERROR_LOG_REPORT(Log::ME, "Ignoring bad audio decode address %08x during video playback", bufferPtr);1065}1066if (!m_demux) {1067return 0;1068}10691070u8 *audioFrame = nullptr;1071int headerCode1, headerCode2;1072int frameSize = getNextAudioFrame(&audioFrame, &headerCode1, &headerCode2);1073if (frameSize == 0) {1074return 0;1075}1076int outSamples = 0;10771078if (m_audioContext != nullptr) {1079if (headerCode1 == 0x24) {1080// This means mono audio - tell the decoder to expect it before the first frame.1081// Note that it will always send us back stereo audio.1082m_audioContext->SetChannels(1);1083}10841085int inbytesConsumed = 0;1086if (!m_audioContext->Decode(audioFrame, frameSize, &inbytesConsumed, 2, buffer, &outSamples)) {1087ERROR_LOG(Log::ME, "Audio (%s) decode failed during video playback", GetCodecName(m_audioType));1088}1089int outBytes = outSamples * sizeof(int16_t) * 2;10901091NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, outBytes, "VideoDecodeAudio");1092}10931094return 0x2000;1095}10961097bool MediaEngine::IsNoAudioData() {1098if (!m_demux) {1099return true;1100}11011102// Let's double check. Here should be a safe enough place to demux.1103m_demux->demux(m_audioStream);1104return !m_demux->hasNextAudioFrame(NULL, NULL, NULL, NULL);1105}11061107bool MediaEngine::IsActuallyPlayingAudio() {1108return getAudioTimeStamp() >= 0;1109}11101111s64 MediaEngine::getVideoTimeStamp() {1112return m_videopts;1113}11141115s64 MediaEngine::getAudioTimeStamp() {1116return m_demux ? m_audiopts - 4180 : -1;1117}11181119s64 MediaEngine::getLastTimeStamp() {1120if (!m_pdata)1121return 0;1122return m_lastTimeStamp - m_firstTimeStamp;1123}112411251126