RealtimeOutgoingVideoSource.h [plain text]
#pragma once
#if USE(LIBWEBRTC)
#include "LibWebRTCMacros.h"
#include "MediaStreamTrackPrivate.h"
#include <Timer.h>
ALLOW_UNUSED_PARAMETERS_BEGIN
#include <webrtc/api/media_stream_interface.h>
#include <webrtc/common_video/include/i420_buffer_pool.h>
ALLOW_UNUSED_PARAMETERS_END
#include <wtf/LoggerHelper.h>
#include <wtf/Optional.h>
#include <wtf/ThreadSafeRefCounted.h>
namespace WebCore {
class RealtimeOutgoingVideoSource
: public ThreadSafeRefCounted<RealtimeOutgoingVideoSource, WTF::DestructionThread::Main>
, public webrtc::VideoTrackSourceInterface
, private MediaStreamTrackPrivate::Observer
#if !RELEASE_LOG_DISABLED
, private LoggerHelper
#endif
{
public:
static Ref<RealtimeOutgoingVideoSource> create(Ref<MediaStreamTrackPrivate>&& videoSource);
~RealtimeOutgoingVideoSource();
void start() { observeSource(); }
void stop();
void setSource(Ref<MediaStreamTrackPrivate>&&);
MediaStreamTrackPrivate& source() const { return m_videoSource.get(); }
void AddRef() const final { ref(); }
rtc::RefCountReleaseStatus Release() const final
{
deref();
return rtc::RefCountReleaseStatus::kOtherRefsRemained;
}
void setApplyRotation(bool shouldApplyRotation) { m_shouldApplyRotation = shouldApplyRotation; }
protected:
explicit RealtimeOutgoingVideoSource(Ref<MediaStreamTrackPrivate>&&);
void sendFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer>&&);
bool isSilenced() const { return m_muted || !m_enabled; }
virtual rtc::scoped_refptr<webrtc::VideoFrameBuffer> createBlackFrame(size_t width, size_t height) = 0;
bool m_shouldApplyRotation { false };
webrtc::VideoRotation m_currentRotation { webrtc::kVideoRotation_0 };
#if !RELEASE_LOG_DISABLED
const Logger& logger() const final { return m_logger.get(); }
const void* logIdentifier() const final { return m_logIdentifier; }
const char* logClassName() const final { return "RealtimeOutgoingVideoSource"; }
WTFLogChannel& logChannel() const final;
#endif
private:
void sendBlackFramesIfNeeded();
void sendOneBlackFrame();
void initializeFromSource();
void updateBlackFramesSending();
void observeSource();
void unobserveSource();
void RegisterObserver(webrtc::ObserverInterface*) final { }
void UnregisterObserver(webrtc::ObserverInterface*) final { }
bool is_screencast() const final { return false; }
absl::optional<bool> needs_denoising() const final { return absl::optional<bool>(); }
bool GetStats(Stats*) final { return false; };
SourceState state() const final { return SourceState(); }
bool remote() const final { return true; }
void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>*, const rtc::VideoSinkWants&) final;
void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>*) final;
void sourceMutedChanged();
void sourceEnabledChanged();
void trackMutedChanged(MediaStreamTrackPrivate&) final { sourceMutedChanged(); }
void trackEnabledChanged(MediaStreamTrackPrivate&) final { sourceEnabledChanged(); }
void trackSettingsChanged(MediaStreamTrackPrivate&) final { initializeFromSource(); }
void sampleBufferUpdated(MediaStreamTrackPrivate&, MediaSample&) override { }
void trackEnded(MediaStreamTrackPrivate&) final { }
Ref<MediaStreamTrackPrivate> m_videoSource;
Timer m_blackFrameTimer;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> m_blackFrame;
mutable RecursiveLock m_sinksLock;
HashSet<rtc::VideoSinkInterface<webrtc::VideoFrame>*> m_sinks;
bool m_enabled { true };
bool m_muted { false };
uint32_t m_width { 0 };
uint32_t m_height { 0 };
#if !RELEASE_LOG_DISABLED
Ref<const Logger> m_logger;
const void* m_logIdentifier;
MonotonicTime m_lastFrameLogTime;
unsigned m_frameCount { 0 };
#endif
};
}
#endif // USE(LIBWEBRTC)