diff --git a/src/ChargeVideo.hpp b/src/ChargeVideo.hpp index 4c5c518..4065895 100644 --- a/src/ChargeVideo.hpp +++ b/src/ChargeVideo.hpp @@ -4,7 +4,7 @@ #include #include #include -#include +#include #include #include @@ -42,23 +42,17 @@ class Time { public: static void AdvanceTime(); static float DeltaTime; - static float AverageDeltaTime; - static uint16_t ADTMaxSample; private: static Timeline time; - static float rollingSum; - static uint16_t ADTIndex, videoIDCounter; - static bool ADTFirstCycle; + static uint16_t videoIDCounter; static std::unordered_map> videoPlayMethods; - static std::vector deltaAverage; static std::vector toUnhook; // Specific for internal controls static uint16_t hookVideo(std::function videoPlay); static void unhookVideo(uint16_t ID); - friend class Video; // friend allows other classes to use private methods of a - // class without having to make it public for all + friend class Video; }; class Video { @@ -100,17 +94,18 @@ private: // Time specific int8_t videoStreamNum = -1, audioStreamNum = -1; uint32_t currentFrameNumber = 0; - float timeSink = 0.0f, frameTime = 0.0f; + double timeBase = 0, clock = 0; // Audio ChargeAudio::Engine *audioEngine; ChargeAudio::SoundContainer bufferedAudio; + // Channel data _ffmpeg::AVChannelLayout outLayout; _ffmpeg::AVSampleFormat sampleFormat = _ffmpeg::AV_SAMPLE_FMT_FLT; // Buffering - std::queue frameBuffer; + std::map frameBuffer; uint32_t bufferMaxFrames = 0; // SAR / Sizing @@ -118,8 +113,10 @@ private: // Frame handling bool frameSet = false; + + // Methods void continueVideo(); - Containers::Array loadNextFrame(); + std::pair> loadNextFrame(); inline void frameDebug(_ffmpeg::AVFrame *frame); inline void frameSetScaleSAR(_ffmpeg::AVFrame *frame); inline void frameConvert(_ffmpeg::AVFrame *sourceFrame, diff --git a/src/Time.cpp b/src/Time.cpp index 9dd4e93..c9f2f84 100644 --- a/src/Time.cpp +++ b/src/Time.cpp @@ -5,37 +5,21 @@ using namespace ChargeVideo; // ================== Video Timing ================== -float Time::DeltaTime = 0.0f, Time::AverageDeltaTime = 0.0f, - Time::rollingSum = 0.0f; -uint16_t Time::ADTMaxSample = 90, Time::ADTIndex = 0, Time::videoIDCounter = 0; -bool Time::ADTFirstCycle = true; -std::vector Time::deltaAverage; +float Time::DeltaTime = 0.0f; +uint16_t Time::videoIDCounter = 0; + std::unordered_map> Time::videoPlayMethods; std::vector Time::toUnhook; Timeline Time::time{}; void Time::AdvanceTime() { - if (deltaAverage.size() != ADTMaxSample) { - deltaAverage.resize(ADTMaxSample, 0.0f); - } - if (time.currentFrameTime() == 0.0f) { time.start(); } // We are giving average delta for frame timing stablisation DeltaTime = time.currentFrameDuration(); - rollingSum += DeltaTime - deltaAverage[ADTIndex]; - deltaAverage[ADTIndex] = DeltaTime; - - // First cycle would be ruined if we use MaxSample since not all the slots - // would be filled yet - if (ADTFirstCycle && ADTIndex == ADTMaxSample - 1) { - ADTFirstCycle = false; - } - AverageDeltaTime = rollingSum / (ADTFirstCycle ? ADTIndex + 1 : ADTMaxSample); - ADTIndex = (ADTIndex + 1) % ADTMaxSample; for (auto processVideo : videoPlayMethods) { processVideo.second(); diff --git a/src/Video.cpp b/src/Video.cpp index 5ac5eef..5b191f0 100644 --- a/src/Video.cpp +++ b/src/Video.cpp @@ -3,15 +3,20 @@ #include "ChargeVideo.hpp" #include +#include #include #include #include +#include #include #include #include +#include #include #include +#include +#include using namespace ChargeVideo; using namespace _ffmpeg; @@ -61,6 +66,7 @@ Video::Video(std::string path, ChargeAudio::Engine *engine, } // Actual stream + // Video Codec vCodec = avcodec_find_decoder(videoStream->codecpar->codec_id); vCodecCtx = avcodec_alloc_context3(vCodec); avcodec_parameters_to_context(vCodecCtx, videoStream->codecpar); @@ -71,6 +77,7 @@ Video::Video(std::string path, ChargeAudio::Engine *engine, NULL); // open2 is such a stupid name // Some videos do not have audio streams + // Audio Codec if (audioStreamNum != -1 && audioEngine) { aCodec = avcodec_find_decoder(audioStream->codecpar->codec_id); aCodecCtx = avcodec_alloc_context3(aCodec); @@ -94,9 +101,8 @@ Video::Video(std::string path, ChargeAudio::Engine *engine, bufferedAudio = audioEngine->CreateSound(10); } - // Timing stuff - frameTime = 1 / av_q2d(videoStream->avg_frame_rate); - bufferMaxFrames = (1 / frameTime) * BufferLenghtInSeconds; + bufferMaxFrames = av_q2d(videoStream->avg_frame_rate) * BufferLenghtInSeconds; + timeBase = av_q2d(videoStream->time_base); } Video::~Video() { @@ -108,7 +114,7 @@ Video::~Video() { } // ================== Public Video Controls ================== -void Video::AdvanceToNextFrame() { loadTexture(loadNextFrame()); } +void Video::AdvanceToNextFrame() { loadTexture(loadNextFrame().second); } void Video::Play() { if (ID != 0) { @@ -143,6 +149,7 @@ void Video::StartLooping() { isVideoLooping = true; } // ================== Private Video Controls ================== void Video::continueVideo() { // Looping handling + /* Shelved for now if (currentFrameNumber >= videoStream->nb_frames - 2) { if (!isVideoLooping) { isVideoOver = true; @@ -150,33 +157,36 @@ void Video::continueVideo() { return; // We remove what we are returning TO } restartVideo(); - } + }*/ // Timing - float variableFrameTime = frameTime - Time::AverageDeltaTime; - if (timeSink < variableFrameTime) { - timeSink += Time::DeltaTime; - - if (!isVideoOver && frameBuffer.size() < bufferMaxFrames) { - frameBuffer.push(loadImage(loadNextFrame())); - } - return; - } - // This allows the lag to not accumillate - timeSink -= variableFrameTime; - - if (frameBuffer.size() == 0) { - frameBuffer.push(loadImage(loadNextFrame())); + // Audio Synced + if (audioStreamNum != -1) { + clock = (double)bufferedAudio->GetPlayedSampleCount() / + audioEngine->GetSampleRate(); + } else { + clock += Time::DeltaTime; } - loadTexture(frameBuffer.front()); - if (bufferedAudio->GetState() == ChargeAudio::Sound::SoundState::Idle) + // Load frame + auto nextFrame = frameBuffer.begin(); + if (frameBuffer.size() > 0 && nextFrame->first <= clock) { + loadTexture(nextFrame->second); + frameBuffer.erase(nextFrame); + } + + if (frameBuffer.size() < bufferMaxFrames) { + auto frameData = loadNextFrame(); + frameBuffer.insert_or_assign(frameData.first, + loadImage(std::move(frameData.second))); + } + + if (bufferedAudio->GetState() != ChargeAudio::Sound::SoundState::Playing) bufferedAudio->Play(); - frameBuffer.pop(); } // ======================== HELPERS ======================== -Containers::Array Video::loadNextFrame() { +std::pair> Video::loadNextFrame() { AVFrame *frame = av_frame_alloc(), *convertedFrame = av_frame_alloc(), *audioFrame = av_frame_alloc(), *convertedAudioFrame = av_frame_alloc(); @@ -210,18 +220,15 @@ Containers::Array Video::loadNextFrame() { av_packet_unref(packet); if (frame->format != -1) { - // FrameDebug(frame); frameSetScaleSAR(frame); frameFlip(frame); - frameConvert(frame, convertedFrame); - // FrameDebug(convertedFrame); break; } } av_packet_unref(packet); } - // You cannot use strlen(data) it does not work + size_t dataSize = av_image_get_buffer_size( static_cast(convertedFrame->format), Dimensions.x(), Dimensions.y(), 3); @@ -229,6 +236,8 @@ Containers::Array Video::loadNextFrame() { std::memcpy(data.data(), convertedFrame->data[0], dataSize); currentFrameNumber++; + double ptsInSeconds = timeBase * frame->pts; + // Cleanup time cus this is a C library yay (ironic) av_frame_free( &convertedFrame); // Data[0] from here needs to be owned by someone else @@ -237,7 +246,7 @@ Containers::Array Video::loadNextFrame() { av_frame_free(&audioFrame); av_packet_free(&packet); - return data; + return {ptsInSeconds, std::move(data)}; } Image2D Video::loadImage(Containers::Array data) { @@ -347,6 +356,6 @@ void Video::restartVideo() { } void Video::dumpAndRefillBuffer() { - std::queue().swap(frameBuffer); - loadTexture(loadNextFrame()); + std::map().swap(frameBuffer); + loadTexture(loadNextFrame().second); }