RemoteMediaRecorder.h [plain text]
#pragma once
#if PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)
#include "MediaRecorderIdentifier.h"
#include "MessageReceiver.h"
#include "SharedMemory.h"
#include <WebCore/CAAudioStreamDescription.h>
#include <WebCore/MediaRecorderPrivateWriterCocoa.h>
#include <wtf/MediaTime.h>
namespace IPC {
class Connection;
class DataReference;
class Decoder;
}
namespace WebCore {
class CARingBuffer;
class ImageTransferSessionVT;
class RemoteVideoSample;
}
namespace WebKit {
class GPUConnectionToWebProcess;
class SharedRingBufferStorage;
class RemoteMediaRecorder : private IPC::MessageReceiver {
WTF_MAKE_FAST_ALLOCATED;
public:
static std::unique_ptr<RemoteMediaRecorder> create(GPUConnectionToWebProcess&, MediaRecorderIdentifier, bool recordAudio, bool recordVideo);
~RemoteMediaRecorder();
void didReceiveMessage(IPC::Connection&, IPC::Decoder&) final;
private:
RemoteMediaRecorder(GPUConnectionToWebProcess&, MediaRecorderIdentifier, Ref<WebCore::MediaRecorderPrivateWriter>&&, bool recordAudio);
void audioSamplesStorageChanged(const SharedMemory::IPCHandle&, const WebCore::CAAudioStreamDescription&, uint64_t numberOfFrames);
void audioSamplesAvailable(MediaTime, uint64_t numberOfFrames, uint64_t startFrame, uint64_t endFrame);
void videoSampleAvailable(WebCore::RemoteVideoSample&&);
void fetchData(CompletionHandler<void(IPC::DataReference&&, const String& mimeType)>&&);
void stopRecording();
SharedRingBufferStorage& storage();
GPUConnectionToWebProcess& m_gpuConnectionToWebProcess;
MediaRecorderIdentifier m_identifier;
Ref<WebCore::MediaRecorderPrivateWriter> m_writer;
WebCore::CAAudioStreamDescription m_description;
std::unique_ptr<WebCore::CARingBuffer> m_ringBuffer;
std::unique_ptr<WebCore::ImageTransferSessionVT> m_imageTransferSession;
};
}
#endif // PLATFORM(COCOA) && ENABLE(GPU_PROCESS) && ENABLE(MEDIA_STREAM) && HAVE(AVASSETWRITERDELEGATE)