AVVideoCaptureSource.h [plain text]
#ifndef AVVideoCaptureSource_h
#define AVVideoCaptureSource_h
#if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
#include "AVMediaCaptureSource.h"
OBJC_CLASS CALayer;
typedef struct CGImage *CGImageRef;
typedef const struct opaqueCMFormatDescription *CMFormatDescriptionRef;
typedef struct opaqueCMSampleBuffer *CMSampleBufferRef;
namespace WebCore {
class FloatRect;
class GraphicsContext;
class AVVideoCaptureSource : public AVMediaCaptureSource {
public:
static RefPtr<AVMediaCaptureSource> create(AVCaptureDevice*, const AtomicString&, PassRefPtr<MediaConstraints>);
int32_t width() const { return m_width; }
int32_t height() const { return m_height; }
private:
AVVideoCaptureSource(AVCaptureDevice*, const AtomicString&, PassRefPtr<MediaConstraints>);
virtual ~AVVideoCaptureSource();
void setupCaptureSession() override;
void shutdownCaptureSession() override;
void updateSettings(RealtimeMediaSourceSettings&) override;
void initializeCapabilities(RealtimeMediaSourceCapabilities&) override;
void initializeSupportedConstraints(RealtimeMediaSourceSupportedConstraints&) override;
bool applyConstraints(MediaConstraints*);
bool setFrameRateConstraint(float minFrameRate, float maxFrameRate);
bool updateFramerate(CMSampleBufferRef);
void captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutput*, CMSampleBufferRef, AVCaptureConnection*) override;
void processNewFrame(RetainPtr<CMSampleBufferRef>);
void paintCurrentFrameInContext(GraphicsContext&, const FloatRect&) override;
PlatformLayer* platformLayer() const override;
RetainPtr<CGImageRef> currentFrameCGImage();
RefPtr<Image> currentFrameImage() override;
RetainPtr<CMSampleBufferRef> m_buffer;
RetainPtr<CGImageRef> m_lastImage;
Vector<Float64> m_videoFrameTimeStamps;
mutable RetainPtr<PlatformLayer> m_videoPreviewLayer;
Float64 m_frameRate { 0 };
int32_t m_width { 0 };
int32_t m_height { 0 };
};
}
#endif // ENABLE(MEDIA_STREAM)
#endif // AVVideoCaptureSource_h