AVAudioCaptureSource.h [plain text]
#ifndef AVAudioCaptureSource_h
#define AVAudioCaptureSource_h
#if ENABLE(MEDIA_STREAM) && USE(AVFOUNDATION)
#include "AVMediaCaptureSource.h"
#include <wtf/Lock.h>
typedef struct AudioStreamBasicDescription AudioStreamBasicDescription;
typedef const struct opaqueCMFormatDescription *CMFormatDescriptionRef;
namespace WebCore {
class WebAudioSourceProviderAVFObjC;
class AVAudioCaptureSource : public AVMediaCaptureSource {
public:
class Observer {
public:
virtual ~Observer() { }
virtual void prepare(const AudioStreamBasicDescription *) = 0;
virtual void unprepare() = 0;
virtual void process(CMFormatDescriptionRef, CMSampleBufferRef) = 0;
};
static RefPtr<AVMediaCaptureSource> create(AVCaptureDevice*, const AtomicString&, PassRefPtr<MediaConstraints>);
void addObserver(Observer*);
void removeObserver(Observer*);
private:
AVAudioCaptureSource(AVCaptureDevice*, const AtomicString&, PassRefPtr<MediaConstraints>);
virtual ~AVAudioCaptureSource();
void initializeCapabilities(RealtimeMediaSourceCapabilities&) override;
void initializeSupportedConstraints(RealtimeMediaSourceSupportedConstraints&) override;
void captureOutputDidOutputSampleBufferFromConnection(AVCaptureOutput*, CMSampleBufferRef, AVCaptureConnection*) override;
void setupCaptureSession() override;
void shutdownCaptureSession() override;
void updateSettings(RealtimeMediaSourceSettings&) override;
AudioSourceProvider* audioSourceProvider() override;
RetainPtr<AVCaptureConnection> m_audioConnection;
RefPtr<WebAudioSourceProviderAVFObjC> m_audioSourceProvider;
std::unique_ptr<AudioStreamBasicDescription> m_inputDescription;
Vector<Observer*> m_observers;
Lock m_lock;
};
}
#endif // ENABLE(MEDIA_STREAM)
#endif // AVVideoCaptureSource_h