SourceBufferPrivateAVFObjC.h [plain text]
#ifndef SourceBufferPrivateAVFObjC_h
#define SourceBufferPrivateAVFObjC_h
#if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
#include "SourceBufferPrivate.h"
#include <dispatch/semaphore.h>
#include <wtf/Deque.h>
#include <wtf/HashMap.h>
#include <wtf/MediaTime.h>
#include <wtf/OSObjectPtr.h>
#include <wtf/RefPtr.h>
#include <wtf/RetainPtr.h>
#include <wtf/Vector.h>
#include <wtf/WeakPtr.h>
#include <wtf/text/AtomicString.h>
OBJC_CLASS AVAsset;
OBJC_CLASS AVStreamDataParser;
OBJC_CLASS AVSampleBufferAudioRenderer;
OBJC_CLASS AVSampleBufferDisplayLayer;
OBJC_CLASS NSData;
OBJC_CLASS NSError;
OBJC_CLASS NSObject;
OBJC_CLASS WebAVStreamDataParserListener;
OBJC_CLASS WebAVSampleBufferErrorListener;
typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
typedef const struct opaqueCMFormatDescription *CMFormatDescriptionRef;
namespace WebCore {
class CDMSessionMediaSourceAVFObjC;
class MediaSourcePrivateAVFObjC;
class TimeRanges;
class AudioTrackPrivate;
class VideoTrackPrivate;
class AudioTrackPrivateMediaSourceAVFObjC;
class VideoTrackPrivateMediaSourceAVFObjC;
class SourceBufferPrivateAVFObjCErrorClient {
public:
virtual ~SourceBufferPrivateAVFObjCErrorClient() { }
virtual void layerDidReceiveError(AVSampleBufferDisplayLayer *, NSError *, bool& shouldIgnore) = 0;
virtual void rendererDidReceiveError(AVSampleBufferAudioRenderer *, NSError *, bool& shouldIgnore) = 0;
};
class SourceBufferPrivateAVFObjC final : public SourceBufferPrivate {
public:
static RefPtr<SourceBufferPrivateAVFObjC> create(MediaSourcePrivateAVFObjC*);
virtual ~SourceBufferPrivateAVFObjC();
void clearMediaSource() { m_mediaSource = nullptr; }
void didParseStreamDataAsAsset(AVAsset*);
void didFailToParseStreamDataWithError(NSError*);
void didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef, const String& mediaType, unsigned flags);
void didReachEndOfTrackWithTrackID(int trackID, const String& mediaType);
void willProvideContentKeyRequestInitializationDataForTrackID(int trackID);
void didProvideContentKeyRequestInitializationDataForTrackID(NSData*, int trackID, OSObjectPtr<dispatch_semaphore_t>);
bool processCodedFrame(int trackID, CMSampleBufferRef, const String& mediaType);
bool hasVideo() const;
bool hasAudio() const;
void trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC*);
void trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC*);
void seekToTime(MediaTime);
MediaTime fastSeekTimeForMediaTime(MediaTime, MediaTime negativeThreshold, MediaTime positiveThreshold);
FloatSize naturalSize();
int protectedTrackID() const { return m_protectedTrackID; }
AVStreamDataParser* parser() const { return m_parser.get(); }
void setCDMSession(CDMSessionMediaSourceAVFObjC*);
void flush();
void registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient*);
void unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient*);
void layerDidReceiveError(AVSampleBufferDisplayLayer *, NSError *);
void rendererDidReceiveError(AVSampleBufferAudioRenderer *, NSError *);
private:
explicit SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC*);
void setClient(SourceBufferPrivateClient*) override;
void append(const unsigned char* data, unsigned length) override;
void abort() override;
void removedFromMediaSource() override;
MediaPlayer::ReadyState readyState() const override;
void setReadyState(MediaPlayer::ReadyState) override;
void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AtomicString trackID) override;
void enqueueSample(PassRefPtr<MediaSample>, AtomicString trackID) override;
bool isReadyForMoreSamples(AtomicString trackID) override;
void setActive(bool) override;
void notifyClientWhenReadyForMoreSamples(AtomicString trackID) override;
void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AVSampleBufferAudioRenderer*);
void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AVSampleBufferDisplayLayer*);
void didBecomeReadyForMoreSamples(int trackID);
void appendCompleted();
void destroyParser();
void destroyRenderers();
WeakPtr<SourceBufferPrivateAVFObjC> createWeakPtr() { return m_weakFactory.createWeakPtr(); }
Vector<RefPtr<VideoTrackPrivateMediaSourceAVFObjC>> m_videoTracks;
Vector<RefPtr<AudioTrackPrivateMediaSourceAVFObjC>> m_audioTracks;
Vector<SourceBufferPrivateAVFObjCErrorClient*> m_errorClients;
WeakPtrFactory<SourceBufferPrivateAVFObjC> m_weakFactory;
RetainPtr<AVStreamDataParser> m_parser;
RetainPtr<AVAsset> m_asset;
RetainPtr<AVSampleBufferDisplayLayer> m_displayLayer;
HashMap<int, RetainPtr<AVSampleBufferAudioRenderer>> m_audioRenderers;
RetainPtr<WebAVStreamDataParserListener> m_delegate;
RetainPtr<WebAVSampleBufferErrorListener> m_errorListener;
RetainPtr<NSError> m_hdcpError;
OSObjectPtr<dispatch_semaphore_t> m_hasSessionSemaphore;
MediaSourcePrivateAVFObjC* m_mediaSource;
SourceBufferPrivateClient* m_client;
CDMSessionMediaSourceAVFObjC* m_session { nullptr };
FloatSize m_cachedSize;
bool m_parsingSucceeded;
int m_enabledVideoTrackID;
int m_protectedTrackID;
};
}
#endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
#endif