GStreamerAudioCaptureSource.h [plain text]
#pragma once
#if ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER)
#include "CaptureDevice.h"
#include "GStreamerAudioCapturer.h"
#include "GStreamerCaptureDevice.h"
#include "RealtimeMediaSource.h"
namespace WebCore {
class GStreamerAudioCaptureSource : public RealtimeMediaSource {
public:
static CaptureSourceOrError create(String&& deviceID, String&& hashSalt, const MediaConstraints*);
WEBCORE_EXPORT static AudioCaptureFactory& factory();
const RealtimeMediaSourceCapabilities& capabilities() override;
const RealtimeMediaSourceSettings& settings() override;
GstElement* pipeline() { return m_capturer->pipeline(); }
GStreamerCapturer* capturer() { return m_capturer.get(); }
protected:
GStreamerAudioCaptureSource(GStreamerCaptureDevice, String&& hashSalt);
GStreamerAudioCaptureSource(String&& deviceID, String&& name, String&& hashSalt);
virtual ~GStreamerAudioCaptureSource();
void startProducingData() override;
void stopProducingData() override;
CaptureDevice::DeviceType deviceType() const override { return CaptureDevice::DeviceType::Microphone; }
mutable Optional<RealtimeMediaSourceCapabilities> m_capabilities;
mutable Optional<RealtimeMediaSourceSettings> m_currentSettings;
private:
bool isCaptureSource() const final { return true; }
void settingsDidChange(OptionSet<RealtimeMediaSourceSettings::Flag>) final;
std::unique_ptr<GStreamerAudioCapturer> m_capturer;
static GstFlowReturn newSampleCallback(GstElement*, GStreamerAudioCaptureSource*);
void triggerSampleAvailable(GstSample*);
};
}
#endif // ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER)