MediaRecorderPrivateWriterCocoa.h [plain text]
#pragma once
#if ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)
#include "AudioStreamDescription.h"
#include "SharedBuffer.h"
#include <wtf/CompletionHandler.h>
#include <wtf/Deque.h>
#include <wtf/Lock.h>
#include <wtf/RetainPtr.h>
#include <wtf/ThreadSafeRefCounted.h>
#include <wtf/WeakPtr.h>
#include <wtf/threads/BinarySemaphore.h>
#include <CoreAudio/CoreAudioTypes.h>
#include <CoreMedia/CMTime.h>
typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
typedef const struct opaqueCMFormatDescription* CMFormatDescriptionRef;
typedef struct opaqueCMBufferQueueTriggerToken *CMBufferQueueTriggerToken;
OBJC_CLASS AVAssetWriter;
OBJC_CLASS AVAssetWriterInput;
OBJC_CLASS WebAVAssetWriterDelegate;
namespace WTF {
class MediaTime;
}
namespace WebCore {
class AudioSampleBufferCompressor;
class AudioStreamDescription;
class MediaSample;
class MediaStreamTrackPrivate;
class PlatformAudioData;
class VideoSampleBufferCompressor;
struct MediaRecorderPrivateOptions;
class WEBCORE_EXPORT MediaRecorderPrivateWriter : public ThreadSafeRefCounted<MediaRecorderPrivateWriter, WTF::DestructionThread::Main>, public CanMakeWeakPtr<MediaRecorderPrivateWriter, WeakPtrFactoryInitialization::Eager> {
public:
static RefPtr<MediaRecorderPrivateWriter> create(bool hasAudio, bool hasVideo, const MediaRecorderPrivateOptions&);
~MediaRecorderPrivateWriter();
void appendVideoSampleBuffer(MediaSample&);
void appendAudioSampleBuffer(const PlatformAudioData&, const AudioStreamDescription&, const WTF::MediaTime&, size_t);
void stopRecording();
void fetchData(CompletionHandler<void(RefPtr<SharedBuffer>&&)>&&);
void appendData(const char*, size_t);
void appendData(Ref<SharedBuffer>&&);
const String& mimeType() const;
private:
MediaRecorderPrivateWriter(bool hasAudio, bool hasVideo);
void clear();
bool initialize();
void setOptions(const MediaRecorderPrivateOptions&);
static void compressedVideoOutputBufferCallback(void*, CMBufferQueueTriggerToken);
static void compressedAudioOutputBufferCallback(void*, CMBufferQueueTriggerToken);
void startAssetWriter();
void appendCompressedSampleBuffers();
bool appendCompressedAudioSampleBufferIfPossible();
bool appendCompressedVideoSampleBufferIfPossible();
void appendCompressedVideoSampleBuffer(CMSampleBufferRef);
void processNewCompressedAudioSampleBuffers();
void processNewCompressedVideoSampleBuffers();
void finishAppendingCompressedAudioSampleBuffers(CompletionHandler<void()>&&);
void finishAppendingCompressedVideoSampleBuffers(CompletionHandler<void()>&&);
void flushCompressedSampleBuffers(CompletionHandler<void()>&&);
void finishedFlushingSamples();
bool m_hasAudio { false };
bool m_hasVideo { false };
bool m_hasStartedWriting { false };
bool m_isStopped { false };
bool m_isStopping { false };
RetainPtr<AVAssetWriter> m_writer;
RefPtr<SharedBuffer> m_data;
CompletionHandler<void(RefPtr<SharedBuffer>&&)> m_fetchDataCompletionHandler;
RetainPtr<CMFormatDescriptionRef> m_audioFormatDescription;
std::unique_ptr<AudioSampleBufferCompressor> m_audioCompressor;
RetainPtr<AVAssetWriterInput> m_audioAssetWriterInput;
RetainPtr<CMFormatDescriptionRef> m_videoFormatDescription;
std::unique_ptr<VideoSampleBufferCompressor> m_videoCompressor;
RetainPtr<AVAssetWriterInput> m_videoAssetWriterInput;
CMTime m_lastVideoPresentationTime { kCMTimeInvalid };
CMTime m_lastVideoDecodingTime { kCMTimeInvalid };
bool m_hasEncodedVideoSamples { false };
RetainPtr<WebAVAssetWriterDelegate> m_writerDelegate;
Deque<RetainPtr<CMSampleBufferRef>> m_pendingVideoSampleQueue;
Deque<RetainPtr<CMSampleBufferRef>> m_pendingAudioSampleQueue;
bool m_isFlushingSamples { false };
bool m_shouldStopAfterFlushingSamples { false };
bool m_firstVideoFrame { false };
Optional<CGAffineTransform> m_videoTransform;
CMTime m_firstVideoSampleTime { kCMTimeZero };
CMTime m_currentAudioSampleTime { kCMTimeZero };
};
}
#endif // ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)