AVMediaCaptureSource.h [plain text]
#ifndef AVMediaCaptureSource_h
#define AVMediaCaptureSource_h
#if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
#include "MediaStreamSource.h"
#include <wtf/RetainPtr.h>
OBJC_CLASS AVCaptureAudioDataOutput;
OBJC_CLASS AVCaptureConnection;
OBJC_CLASS AVCaptureDevice;
OBJC_CLASS AVCaptureOutput;
OBJC_CLASS AVCaptureSession;
OBJC_CLASS AVCaptureVideoDataOutput;
OBJC_CLASS WebCoreAVMediaCaptureSourceObserver;
typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
namespace WebCore {
class AVMediaCaptureSource : public MediaStreamSource {
public:
virtual ~AVMediaCaptureSource();
virtual void captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutput*, CMSampleBufferRef, AVCaptureConnection*) = 0;
virtual void captureSessionStoppedRunning();
protected:
AVMediaCaptureSource(AVCaptureDevice*, const AtomicString&, MediaStreamSource::Type, PassRefPtr<MediaConstraints>);
virtual const MediaStreamSourceStates& states() override;
virtual void startProducingData() override;
virtual void stopProducingData() override;
virtual void setupCaptureSession() = 0;
virtual void updateStates() = 0;
AVCaptureSession *session() const { return m_session.get(); }
AVCaptureDevice *device() const { return m_device.get(); }
MediaStreamSourceStates* currentStates() { return &m_currentStates; }
MediaConstraints* constraints() { return m_constraints.get(); }
void setVideoSampleBufferDelegate(AVCaptureVideoDataOutput*);
void setAudioSampleBufferDelegate(AVCaptureAudioDataOutput*);
private:
void setupSession();
RetainPtr<WebCoreAVMediaCaptureSourceObserver> m_objcObserver;
RefPtr<MediaConstraints> m_constraints;
MediaStreamSourceStates m_currentStates;
RetainPtr<AVCaptureSession> m_session;
RetainPtr<AVCaptureDevice> m_device;
bool m_isRunning;
};
}
#endif // ENABLE(MEDIA_STREAM)
#endif // AVMediaCaptureSource_h