Added PTS, removed time sink, removed average delta time, cleanup of codebase
This commit is contained in:
@@ -4,7 +4,7 @@
|
|||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
#include <functional>
|
#include <functional>
|
||||||
#include <queue>
|
#include <map>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include <Corrade/Containers/Array.h>
|
#include <Corrade/Containers/Array.h>
|
||||||
@@ -42,23 +42,17 @@ class Time {
|
|||||||
public:
|
public:
|
||||||
static void AdvanceTime();
|
static void AdvanceTime();
|
||||||
static float DeltaTime;
|
static float DeltaTime;
|
||||||
static float AverageDeltaTime;
|
|
||||||
static uint16_t ADTMaxSample;
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static Timeline time;
|
static Timeline time;
|
||||||
static float rollingSum;
|
static uint16_t videoIDCounter;
|
||||||
static uint16_t ADTIndex, videoIDCounter;
|
|
||||||
static bool ADTFirstCycle;
|
|
||||||
static std::unordered_map<uint16_t, std::function<void()>> videoPlayMethods;
|
static std::unordered_map<uint16_t, std::function<void()>> videoPlayMethods;
|
||||||
static std::vector<float> deltaAverage;
|
|
||||||
static std::vector<uint16_t> toUnhook;
|
static std::vector<uint16_t> toUnhook;
|
||||||
|
|
||||||
// Specific for internal controls
|
// Specific for internal controls
|
||||||
static uint16_t hookVideo(std::function<void()> videoPlay);
|
static uint16_t hookVideo(std::function<void()> videoPlay);
|
||||||
static void unhookVideo(uint16_t ID);
|
static void unhookVideo(uint16_t ID);
|
||||||
friend class Video; // friend allows other classes to use private methods of a
|
friend class Video;
|
||||||
// class without having to make it public for all
|
|
||||||
};
|
};
|
||||||
|
|
||||||
class Video {
|
class Video {
|
||||||
@@ -100,17 +94,18 @@ private:
|
|||||||
// Time specific
|
// Time specific
|
||||||
int8_t videoStreamNum = -1, audioStreamNum = -1;
|
int8_t videoStreamNum = -1, audioStreamNum = -1;
|
||||||
uint32_t currentFrameNumber = 0;
|
uint32_t currentFrameNumber = 0;
|
||||||
float timeSink = 0.0f, frameTime = 0.0f;
|
double timeBase = 0, clock = 0;
|
||||||
|
|
||||||
// Audio
|
// Audio
|
||||||
ChargeAudio::Engine *audioEngine;
|
ChargeAudio::Engine *audioEngine;
|
||||||
ChargeAudio::SoundContainer bufferedAudio;
|
ChargeAudio::SoundContainer bufferedAudio;
|
||||||
|
|
||||||
|
// Channel data
|
||||||
_ffmpeg::AVChannelLayout outLayout;
|
_ffmpeg::AVChannelLayout outLayout;
|
||||||
_ffmpeg::AVSampleFormat sampleFormat = _ffmpeg::AV_SAMPLE_FMT_FLT;
|
_ffmpeg::AVSampleFormat sampleFormat = _ffmpeg::AV_SAMPLE_FMT_FLT;
|
||||||
|
|
||||||
// Buffering
|
// Buffering
|
||||||
std::queue<Image2D> frameBuffer;
|
std::map<double, Image2D> frameBuffer;
|
||||||
uint32_t bufferMaxFrames = 0;
|
uint32_t bufferMaxFrames = 0;
|
||||||
|
|
||||||
// SAR / Sizing
|
// SAR / Sizing
|
||||||
@@ -118,8 +113,10 @@ private:
|
|||||||
|
|
||||||
// Frame handling
|
// Frame handling
|
||||||
bool frameSet = false;
|
bool frameSet = false;
|
||||||
|
|
||||||
|
// Methods
|
||||||
void continueVideo();
|
void continueVideo();
|
||||||
Containers::Array<char> loadNextFrame();
|
std::pair<double, Containers::Array<char>> loadNextFrame();
|
||||||
inline void frameDebug(_ffmpeg::AVFrame *frame);
|
inline void frameDebug(_ffmpeg::AVFrame *frame);
|
||||||
inline void frameSetScaleSAR(_ffmpeg::AVFrame *frame);
|
inline void frameSetScaleSAR(_ffmpeg::AVFrame *frame);
|
||||||
inline void frameConvert(_ffmpeg::AVFrame *sourceFrame,
|
inline void frameConvert(_ffmpeg::AVFrame *sourceFrame,
|
||||||
|
|||||||
22
src/Time.cpp
22
src/Time.cpp
@@ -5,37 +5,21 @@
|
|||||||
using namespace ChargeVideo;
|
using namespace ChargeVideo;
|
||||||
|
|
||||||
// ================== Video Timing ==================
|
// ================== Video Timing ==================
|
||||||
float Time::DeltaTime = 0.0f, Time::AverageDeltaTime = 0.0f,
|
float Time::DeltaTime = 0.0f;
|
||||||
Time::rollingSum = 0.0f;
|
uint16_t Time::videoIDCounter = 0;
|
||||||
uint16_t Time::ADTMaxSample = 90, Time::ADTIndex = 0, Time::videoIDCounter = 0;
|
|
||||||
bool Time::ADTFirstCycle = true;
|
|
||||||
std::vector<float> Time::deltaAverage;
|
|
||||||
std::unordered_map<uint16_t, std::function<void()>> Time::videoPlayMethods;
|
std::unordered_map<uint16_t, std::function<void()>> Time::videoPlayMethods;
|
||||||
std::vector<uint16_t> Time::toUnhook;
|
std::vector<uint16_t> Time::toUnhook;
|
||||||
|
|
||||||
Timeline Time::time{};
|
Timeline Time::time{};
|
||||||
|
|
||||||
void Time::AdvanceTime() {
|
void Time::AdvanceTime() {
|
||||||
if (deltaAverage.size() != ADTMaxSample) {
|
|
||||||
deltaAverage.resize(ADTMaxSample, 0.0f);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (time.currentFrameTime() == 0.0f) {
|
if (time.currentFrameTime() == 0.0f) {
|
||||||
time.start();
|
time.start();
|
||||||
}
|
}
|
||||||
|
|
||||||
// We are giving average delta for frame timing stablisation
|
// We are giving average delta for frame timing stablisation
|
||||||
DeltaTime = time.currentFrameDuration();
|
DeltaTime = time.currentFrameDuration();
|
||||||
rollingSum += DeltaTime - deltaAverage[ADTIndex];
|
|
||||||
deltaAverage[ADTIndex] = DeltaTime;
|
|
||||||
|
|
||||||
// First cycle would be ruined if we use MaxSample since not all the slots
|
|
||||||
// would be filled yet
|
|
||||||
if (ADTFirstCycle && ADTIndex == ADTMaxSample - 1) {
|
|
||||||
ADTFirstCycle = false;
|
|
||||||
}
|
|
||||||
AverageDeltaTime = rollingSum / (ADTFirstCycle ? ADTIndex + 1 : ADTMaxSample);
|
|
||||||
ADTIndex = (ADTIndex + 1) % ADTMaxSample;
|
|
||||||
|
|
||||||
for (auto processVideo : videoPlayMethods) {
|
for (auto processVideo : videoPlayMethods) {
|
||||||
processVideo.second();
|
processVideo.second();
|
||||||
|
|||||||
@@ -3,15 +3,20 @@
|
|||||||
#include "ChargeVideo.hpp"
|
#include "ChargeVideo.hpp"
|
||||||
|
|
||||||
#include <Charge/ChargeAudio.hpp>
|
#include <Charge/ChargeAudio.hpp>
|
||||||
|
#include <Corrade/Containers/Pair.h>
|
||||||
#include <Corrade/Utility/Debug.h>
|
#include <Corrade/Utility/Debug.h>
|
||||||
#include <Corrade/Utility/Utility.h>
|
#include <Corrade/Utility/Utility.h>
|
||||||
|
|
||||||
#include <Magnum/GL/TextureFormat.h>
|
#include <Magnum/GL/TextureFormat.h>
|
||||||
|
#include <Magnum/Image.h>
|
||||||
#include <Magnum/Math/Functions.h>
|
#include <Magnum/Math/Functions.h>
|
||||||
#include <Magnum/PixelFormat.h>
|
#include <Magnum/PixelFormat.h>
|
||||||
#include <libavutil/channel_layout.h>
|
#include <libavutil/channel_layout.h>
|
||||||
|
#include <libavutil/rational.h>
|
||||||
#include <libavutil/samplefmt.h>
|
#include <libavutil/samplefmt.h>
|
||||||
#include <libswresample/swresample.h>
|
#include <libswresample/swresample.h>
|
||||||
|
#include <map>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
using namespace ChargeVideo;
|
using namespace ChargeVideo;
|
||||||
using namespace _ffmpeg;
|
using namespace _ffmpeg;
|
||||||
@@ -61,6 +66,7 @@ Video::Video(std::string path, ChargeAudio::Engine *engine,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Actual stream
|
// Actual stream
|
||||||
|
// Video Codec
|
||||||
vCodec = avcodec_find_decoder(videoStream->codecpar->codec_id);
|
vCodec = avcodec_find_decoder(videoStream->codecpar->codec_id);
|
||||||
vCodecCtx = avcodec_alloc_context3(vCodec);
|
vCodecCtx = avcodec_alloc_context3(vCodec);
|
||||||
avcodec_parameters_to_context(vCodecCtx, videoStream->codecpar);
|
avcodec_parameters_to_context(vCodecCtx, videoStream->codecpar);
|
||||||
@@ -71,6 +77,7 @@ Video::Video(std::string path, ChargeAudio::Engine *engine,
|
|||||||
NULL); // open2 is such a stupid name
|
NULL); // open2 is such a stupid name
|
||||||
|
|
||||||
// Some videos do not have audio streams
|
// Some videos do not have audio streams
|
||||||
|
// Audio Codec
|
||||||
if (audioStreamNum != -1 && audioEngine) {
|
if (audioStreamNum != -1 && audioEngine) {
|
||||||
aCodec = avcodec_find_decoder(audioStream->codecpar->codec_id);
|
aCodec = avcodec_find_decoder(audioStream->codecpar->codec_id);
|
||||||
aCodecCtx = avcodec_alloc_context3(aCodec);
|
aCodecCtx = avcodec_alloc_context3(aCodec);
|
||||||
@@ -94,9 +101,8 @@ Video::Video(std::string path, ChargeAudio::Engine *engine,
|
|||||||
bufferedAudio = audioEngine->CreateSound(10);
|
bufferedAudio = audioEngine->CreateSound(10);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Timing stuff
|
bufferMaxFrames = av_q2d(videoStream->avg_frame_rate) * BufferLenghtInSeconds;
|
||||||
frameTime = 1 / av_q2d(videoStream->avg_frame_rate);
|
timeBase = av_q2d(videoStream->time_base);
|
||||||
bufferMaxFrames = (1 / frameTime) * BufferLenghtInSeconds;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Video::~Video() {
|
Video::~Video() {
|
||||||
@@ -108,7 +114,7 @@ Video::~Video() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ================== Public Video Controls ==================
|
// ================== Public Video Controls ==================
|
||||||
void Video::AdvanceToNextFrame() { loadTexture(loadNextFrame()); }
|
void Video::AdvanceToNextFrame() { loadTexture(loadNextFrame().second); }
|
||||||
|
|
||||||
void Video::Play() {
|
void Video::Play() {
|
||||||
if (ID != 0) {
|
if (ID != 0) {
|
||||||
@@ -143,6 +149,7 @@ void Video::StartLooping() { isVideoLooping = true; }
|
|||||||
// ================== Private Video Controls ==================
|
// ================== Private Video Controls ==================
|
||||||
void Video::continueVideo() {
|
void Video::continueVideo() {
|
||||||
// Looping handling
|
// Looping handling
|
||||||
|
/* Shelved for now
|
||||||
if (currentFrameNumber >= videoStream->nb_frames - 2) {
|
if (currentFrameNumber >= videoStream->nb_frames - 2) {
|
||||||
if (!isVideoLooping) {
|
if (!isVideoLooping) {
|
||||||
isVideoOver = true;
|
isVideoOver = true;
|
||||||
@@ -150,33 +157,36 @@ void Video::continueVideo() {
|
|||||||
return; // We remove what we are returning TO
|
return; // We remove what we are returning TO
|
||||||
}
|
}
|
||||||
restartVideo();
|
restartVideo();
|
||||||
}
|
}*/
|
||||||
|
|
||||||
// Timing
|
// Timing
|
||||||
float variableFrameTime = frameTime - Time::AverageDeltaTime;
|
// Audio Synced
|
||||||
if (timeSink < variableFrameTime) {
|
if (audioStreamNum != -1) {
|
||||||
timeSink += Time::DeltaTime;
|
clock = (double)bufferedAudio->GetPlayedSampleCount() /
|
||||||
|
audioEngine->GetSampleRate();
|
||||||
if (!isVideoOver && frameBuffer.size() < bufferMaxFrames) {
|
} else {
|
||||||
frameBuffer.push(loadImage(loadNextFrame()));
|
clock += Time::DeltaTime;
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// This allows the lag to not accumillate
|
|
||||||
timeSink -= variableFrameTime;
|
|
||||||
|
|
||||||
if (frameBuffer.size() == 0) {
|
|
||||||
frameBuffer.push(loadImage(loadNextFrame()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
loadTexture(frameBuffer.front());
|
// Load frame
|
||||||
if (bufferedAudio->GetState() == ChargeAudio::Sound::SoundState::Idle)
|
auto nextFrame = frameBuffer.begin();
|
||||||
|
if (frameBuffer.size() > 0 && nextFrame->first <= clock) {
|
||||||
|
loadTexture(nextFrame->second);
|
||||||
|
frameBuffer.erase(nextFrame);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (frameBuffer.size() < bufferMaxFrames) {
|
||||||
|
auto frameData = loadNextFrame();
|
||||||
|
frameBuffer.insert_or_assign(frameData.first,
|
||||||
|
loadImage(std::move(frameData.second)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bufferedAudio->GetState() != ChargeAudio::Sound::SoundState::Playing)
|
||||||
bufferedAudio->Play();
|
bufferedAudio->Play();
|
||||||
frameBuffer.pop();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ======================== HELPERS ========================
|
// ======================== HELPERS ========================
|
||||||
Containers::Array<char> Video::loadNextFrame() {
|
std::pair<double, Containers::Array<char>> Video::loadNextFrame() {
|
||||||
AVFrame *frame = av_frame_alloc(), *convertedFrame = av_frame_alloc(),
|
AVFrame *frame = av_frame_alloc(), *convertedFrame = av_frame_alloc(),
|
||||||
*audioFrame = av_frame_alloc(),
|
*audioFrame = av_frame_alloc(),
|
||||||
*convertedAudioFrame = av_frame_alloc();
|
*convertedAudioFrame = av_frame_alloc();
|
||||||
@@ -210,18 +220,15 @@ Containers::Array<char> Video::loadNextFrame() {
|
|||||||
av_packet_unref(packet);
|
av_packet_unref(packet);
|
||||||
|
|
||||||
if (frame->format != -1) {
|
if (frame->format != -1) {
|
||||||
// FrameDebug(frame);
|
|
||||||
frameSetScaleSAR(frame);
|
frameSetScaleSAR(frame);
|
||||||
frameFlip(frame);
|
frameFlip(frame);
|
||||||
|
|
||||||
frameConvert(frame, convertedFrame);
|
frameConvert(frame, convertedFrame);
|
||||||
// FrameDebug(convertedFrame);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
av_packet_unref(packet);
|
av_packet_unref(packet);
|
||||||
}
|
}
|
||||||
// You cannot use strlen(data) it does not work
|
|
||||||
size_t dataSize = av_image_get_buffer_size(
|
size_t dataSize = av_image_get_buffer_size(
|
||||||
static_cast<AVPixelFormat>(convertedFrame->format), Dimensions.x(),
|
static_cast<AVPixelFormat>(convertedFrame->format), Dimensions.x(),
|
||||||
Dimensions.y(), 3);
|
Dimensions.y(), 3);
|
||||||
@@ -229,6 +236,8 @@ Containers::Array<char> Video::loadNextFrame() {
|
|||||||
std::memcpy(data.data(), convertedFrame->data[0], dataSize);
|
std::memcpy(data.data(), convertedFrame->data[0], dataSize);
|
||||||
currentFrameNumber++;
|
currentFrameNumber++;
|
||||||
|
|
||||||
|
double ptsInSeconds = timeBase * frame->pts;
|
||||||
|
|
||||||
// Cleanup time cus this is a C library yay (ironic)
|
// Cleanup time cus this is a C library yay (ironic)
|
||||||
av_frame_free(
|
av_frame_free(
|
||||||
&convertedFrame); // Data[0] from here needs to be owned by someone else
|
&convertedFrame); // Data[0] from here needs to be owned by someone else
|
||||||
@@ -237,7 +246,7 @@ Containers::Array<char> Video::loadNextFrame() {
|
|||||||
av_frame_free(&audioFrame);
|
av_frame_free(&audioFrame);
|
||||||
av_packet_free(&packet);
|
av_packet_free(&packet);
|
||||||
|
|
||||||
return data;
|
return {ptsInSeconds, std::move(data)};
|
||||||
}
|
}
|
||||||
|
|
||||||
Image2D Video::loadImage(Containers::Array<char> data) {
|
Image2D Video::loadImage(Containers::Array<char> data) {
|
||||||
@@ -347,6 +356,6 @@ void Video::restartVideo() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Video::dumpAndRefillBuffer() {
|
void Video::dumpAndRefillBuffer() {
|
||||||
std::queue<Image2D>().swap(frameBuffer);
|
std::map<double, Image2D>().swap(frameBuffer);
|
||||||
loadTexture(loadNextFrame());
|
loadTexture(loadNextFrame().second);
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user