WebCoreDecompressionSession.h [plain text]
#pragma once
#if USE(VIDEOTOOLBOX)
#include <CoreMedia/CMTime.h>
#include <functional>
#include <wtf/Lock.h>
#include <wtf/MediaTime.h>
#include <wtf/OSObjectPtr.h>
#include <wtf/Ref.h>
#include <wtf/RetainPtr.h>
#include <wtf/ThreadSafeRefCounted.h>
typedef CFTypeRef CMBufferRef;
typedef const struct __CFArray * CFArrayRef;
typedef struct opaqueCMBufferQueue *CMBufferQueueRef;
typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
typedef struct OpaqueCMTimebase* CMTimebaseRef;
typedef signed long CMItemCount;
typedef struct __CVBuffer *CVPixelBufferRef;
typedef struct __CVBuffer *CVImageBufferRef;
typedef UInt32 VTDecodeInfoFlags;
typedef UInt32 VTDecodeInfoFlags;
typedef struct OpaqueVTDecompressionSession* VTDecompressionSessionRef;
namespace WebCore {
class WebCoreDecompressionSession : public ThreadSafeRefCounted<WebCoreDecompressionSession> {
public:
static Ref<WebCoreDecompressionSession> createOpenGL() { return adoptRef(*new WebCoreDecompressionSession(OpenGL)); }
static Ref<WebCoreDecompressionSession> createRGB() { return adoptRef(*new WebCoreDecompressionSession(RGB)); }
void invalidate();
bool isInvalidated() const { return m_invalidated; }
void enqueueSample(CMSampleBufferRef, bool displaying = true);
bool isReadyForMoreMediaData() const;
void requestMediaDataWhenReady(std::function<void()>);
void stopRequestingMediaData();
void notifyWhenHasAvailableVideoFrame(std::function<void()>);
RetainPtr<CVPixelBufferRef> decodeSampleSync(CMSampleBufferRef);
void setTimebase(CMTimebaseRef);
CMTimebaseRef timebase() const { return m_timebase.get(); }
enum ImageForTimeFlags { ExactTime, AllowEarlier, AllowLater };
RetainPtr<CVPixelBufferRef> imageForTime(const MediaTime&, ImageForTimeFlags = ExactTime);
void flush();
unsigned long totalVideoFrames() { return m_totalVideoFrames; }
unsigned long droppedVideoFrames() { return m_droppedVideoFrames; }
unsigned long corruptedVideoFrames() { return m_corruptedVideoFrames; }
MediaTime totalFrameDelay() { return m_totalFrameDelay; }
private:
enum Mode {
OpenGL,
RGB,
};
WebCoreDecompressionSession(Mode);
void ensureDecompressionSessionForSample(CMSampleBufferRef);
void decodeSample(CMSampleBufferRef, bool displaying);
void enqueueDecodedSample(CMSampleBufferRef, bool displaying);
void handleDecompressionOutput(bool displaying, OSStatus, VTDecodeInfoFlags, CVImageBufferRef, CMTime presentationTimeStamp, CMTime presentationDuration);
RetainPtr<CVPixelBufferRef> getFirstVideoFrame();
void resetAutomaticDequeueTimer();
void automaticDequeue();
bool shouldDecodeSample(CMSampleBufferRef, bool displaying);
static CMTime getDecodeTime(CMBufferRef, void* refcon);
static CMTime getPresentationTime(CMBufferRef, void* refcon);
static CMTime getDuration(CMBufferRef, void* refcon);
static CFComparisonResult compareBuffers(CMBufferRef buf1, CMBufferRef buf2, void* refcon);
void maybeBecomeReadyForMoreMediaData();
void resetQosTier();
void increaseQosTier();
void decreaseQosTier();
void updateQosWithDecodeTimeStatistics(double ratio);
static const CMItemCount kMaximumCapacity = 120;
static const CMItemCount kHighWaterMark = 60;
static const CMItemCount kLowWaterMark = 15;
Mode m_mode;
RetainPtr<VTDecompressionSessionRef> m_decompressionSession;
RetainPtr<CMBufferQueueRef> m_producerQueue;
RetainPtr<CMBufferQueueRef> m_consumerQueue;
RetainPtr<CMTimebaseRef> m_timebase;
OSObjectPtr<dispatch_queue_t> m_decompressionQueue;
OSObjectPtr<dispatch_queue_t> m_enqueingQueue;
OSObjectPtr<dispatch_semaphore_t> m_hasAvailableImageSemaphore;
OSObjectPtr<dispatch_source_t> m_timerSource;
std::function<void()> m_notificationCallback;
std::function<void()> m_hasAvailableFrameCallback;
RetainPtr<CFArrayRef> m_qosTiers;
long m_currentQosTier { 0 };
unsigned long m_framesSinceLastQosCheck { 0 };
double m_decodeRatioMovingAverage { 0 };
bool m_invalidated { false };
int m_framesBeingDecoded { 0 };
unsigned long m_totalVideoFrames { 0 };
unsigned long m_droppedVideoFrames { 0 };
unsigned long m_corruptedVideoFrames { 0 };
MediaTime m_totalFrameDelay;
};
}
#endif