RealtimeOutgoingAudioSource.h [plain text]
#pragma once
#if USE(LIBWEBRTC)
#include "LibWebRTCMacros.h"
#include "MediaStreamTrackPrivate.h"
#include "Timer.h"
#include <webrtc/api/mediastreaminterface.h>
#include <wtf/ThreadSafeRefCounted.h>
namespace webrtc {
class AudioTrackInterface;
class AudioTrackSinkInterface;
}
namespace WebCore {
class RealtimeOutgoingAudioSource : public ThreadSafeRefCounted<RealtimeOutgoingAudioSource>, public webrtc::AudioSourceInterface, private MediaStreamTrackPrivate::Observer {
public:
static Ref<RealtimeOutgoingAudioSource> create(Ref<MediaStreamTrackPrivate>&& audioSource);
~RealtimeOutgoingAudioSource() { stop(); }
void stop();
bool setSource(Ref<MediaStreamTrackPrivate>&&);
MediaStreamTrackPrivate& source() const { return m_audioSource.get(); }
protected:
explicit RealtimeOutgoingAudioSource(Ref<MediaStreamTrackPrivate>&&);
virtual void pullAudioData() { }
bool isSilenced() const { return m_muted || !m_enabled; }
Vector<webrtc::AudioTrackSinkInterface*> m_sinks;
private:
virtual void AddSink(webrtc::AudioTrackSinkInterface* sink) { m_sinks.append(sink); }
virtual void RemoveSink(webrtc::AudioTrackSinkInterface* sink) { m_sinks.removeFirst(sink); }
void AddRef() const final { ref(); }
rtc::RefCountReleaseStatus Release() const final
{
callOnMainThread([this] {
deref();
});
return rtc::RefCountReleaseStatus::kOtherRefsRemained;
}
SourceState state() const final { return kLive; }
bool remote() const final { return false; }
void RegisterObserver(webrtc::ObserverInterface*) final { }
void UnregisterObserver(webrtc::ObserverInterface*) final { }
void sourceMutedChanged();
void sourceEnabledChanged();
virtual void audioSamplesAvailable(const MediaTime&, const PlatformAudioData&, const AudioStreamDescription&, size_t) { };
virtual bool isReachingBufferedAudioDataHighLimit() { return false; };
virtual bool isReachingBufferedAudioDataLowLimit() { return false; };
virtual bool hasBufferedEnoughData() { return false; };
void trackMutedChanged(MediaStreamTrackPrivate&) final { sourceMutedChanged(); }
void trackEnabledChanged(MediaStreamTrackPrivate&) final { sourceEnabledChanged(); }
void audioSamplesAvailable(MediaStreamTrackPrivate&, const MediaTime& mediaTime, const PlatformAudioData& data, const AudioStreamDescription& description, size_t sampleCount) { audioSamplesAvailable(mediaTime, data, description, sampleCount); }
void trackEnded(MediaStreamTrackPrivate&) final { }
void trackSettingsChanged(MediaStreamTrackPrivate&) final { }
void initializeConverter();
Ref<MediaStreamTrackPrivate> m_audioSource;
bool m_muted { false };
bool m_enabled { true };
};
}
#endif // USE(LIBWEBRTC)