#include "config.h"
#include "MediaSource.h"
#if ENABLE(MEDIA_SOURCE)
#include "AudioTrackList.h"
#include "ContentType.h"
#include "Event.h"
#include "EventNames.h"
#include "HTMLMediaElement.h"
#include "Logging.h"
#include "MediaSourcePrivate.h"
#include "MediaSourceRegistry.h"
#include "Quirks.h"
#include "Settings.h"
#include "SourceBuffer.h"
#include "SourceBufferList.h"
#include "SourceBufferPrivate.h"
#include "TextTrackList.h"
#include "TimeRanges.h"
#include "VideoTrackList.h"
#include <wtf/IsoMallocInlines.h>
namespace WebCore {
WTF_MAKE_ISO_ALLOCATED_IMPL(MediaSource);
String convertEnumerationToString(MediaSourcePrivate::AddStatus enumerationValue)
{
static const NeverDestroyed<String> values[] = {
MAKE_STATIC_STRING_IMPL("Ok"),
MAKE_STATIC_STRING_IMPL("NotSupported"),
MAKE_STATIC_STRING_IMPL("ReachedIdLimit"),
};
static_assert(static_cast<size_t>(MediaSourcePrivate::AddStatus::Ok) == 0, "MediaSourcePrivate::AddStatus::Ok is not 0 as expected");
static_assert(static_cast<size_t>(MediaSourcePrivate::AddStatus::NotSupported) == 1, "MediaSourcePrivate::AddStatus::NotSupported is not 1 as expected");
static_assert(static_cast<size_t>(MediaSourcePrivate::AddStatus::ReachedIdLimit) == 2, "MediaSourcePrivate::AddStatus::ReachedIdLimit is not 2 as expected");
ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
return values[static_cast<size_t>(enumerationValue)];
}
String convertEnumerationToString(MediaSourcePrivate::EndOfStreamStatus enumerationValue)
{
static const NeverDestroyed<String> values[] = {
MAKE_STATIC_STRING_IMPL("EosNoError"),
MAKE_STATIC_STRING_IMPL("EosNetworkError"),
MAKE_STATIC_STRING_IMPL("EosDecodeError"),
};
static_assert(static_cast<size_t>(MediaSourcePrivate::EndOfStreamStatus::EosNoError) == 0, "MediaSourcePrivate::EndOfStreamStatus::EosNoError is not 0 as expected");
static_assert(static_cast<size_t>(MediaSourcePrivate::EndOfStreamStatus::EosNetworkError) == 1, "MediaSourcePrivate::EndOfStreamStatus::EosNetworkError is not 1 as expected");
static_assert(static_cast<size_t>(MediaSourcePrivate::EndOfStreamStatus::EosDecodeError) == 2, "MediaSourcePrivate::EndOfStreamStatus::EosDecodeError is not 2 as expected");
ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
return values[static_cast<size_t>(enumerationValue)];
}
URLRegistry* MediaSource::s_registry;
void MediaSource::setRegistry(URLRegistry* registry)
{
ASSERT(!s_registry);
s_registry = registry;
}
Ref<MediaSource> MediaSource::create(ScriptExecutionContext& context)
{
auto mediaSource = adoptRef(*new MediaSource(context));
mediaSource->suspendIfNeeded();
return mediaSource;
}
MediaSource::MediaSource(ScriptExecutionContext& context)
: ActiveDOMObject(&context)
, m_duration(MediaTime::invalidTime())
, m_pendingSeekTime(MediaTime::invalidTime())
, m_asyncEventQueue(MainThreadGenericEventQueue::create(*this))
#if !RELEASE_LOG_DISABLED
, m_logger(downcast<Document>(context).logger())
#endif
{
m_sourceBuffers = SourceBufferList::create(scriptExecutionContext());
m_activeSourceBuffers = SourceBufferList::create(scriptExecutionContext());
}
MediaSource::~MediaSource()
{
ALWAYS_LOG(LOGIDENTIFIER);
ASSERT(isClosed());
}
void MediaSource::setPrivateAndOpen(Ref<MediaSourcePrivate>&& mediaSourcePrivate)
{
DEBUG_LOG(LOGIDENTIFIER);
ASSERT(!m_private);
ASSERT(m_mediaElement);
m_private = WTFMove(mediaSourcePrivate);
if (!isClosed()) {
m_mediaElement->mediaLoadingFailedFatally(MediaPlayer::NetworkState::NetworkError);
return;
}
m_mediaElement->setShouldDelayLoadEvent(false);
setReadyState(ReadyState::Open);
}
void MediaSource::addedToRegistry()
{
DEBUG_LOG(LOGIDENTIFIER);
setPendingActivity(*this);
}
void MediaSource::removedFromRegistry()
{
DEBUG_LOG(LOGIDENTIFIER);
unsetPendingActivity(*this);
}
MediaTime MediaSource::duration() const
{
return m_duration;
}
MediaTime MediaSource::currentTime() const
{
return m_mediaElement ? m_mediaElement->currentMediaTime() : MediaTime::zeroTime();
}
std::unique_ptr<PlatformTimeRanges> MediaSource::buffered() const
{
if (m_buffered && m_activeSourceBuffers->length() && std::all_of(m_activeSourceBuffers->begin(), m_activeSourceBuffers->end(), [](auto& buffer) { return !buffer->isBufferedDirty(); }))
return makeUnique<PlatformTimeRanges>(*m_buffered);
m_buffered = makeUnique<PlatformTimeRanges>();
for (auto& sourceBuffer : *m_activeSourceBuffers)
sourceBuffer->setBufferedDirty(false);
Vector<PlatformTimeRanges> activeRanges = this->activeRanges();
if (activeRanges.isEmpty())
return makeUnique<PlatformTimeRanges>(*m_buffered);
MediaTime highestEndTime = MediaTime::zeroTime();
for (auto& ranges : activeRanges) {
unsigned length = ranges.length();
if (length)
highestEndTime = std::max(highestEndTime, ranges.end(length - 1));
}
if (!highestEndTime)
return makeUnique<PlatformTimeRanges>(*m_buffered);
m_buffered->add(MediaTime::zeroTime(), highestEndTime);
bool ended = readyState() == ReadyState::Ended;
for (auto& sourceRanges : activeRanges) {
if (ended && sourceRanges.length())
sourceRanges.add(sourceRanges.start(sourceRanges.length() - 1), highestEndTime);
m_buffered->intersectWith(sourceRanges);
}
return makeUnique<PlatformTimeRanges>(*m_buffered);
}
void MediaSource::seekToTime(const MediaTime& time)
{
if (isClosed())
return;
ALWAYS_LOG(LOGIDENTIFIER, time);
m_pendingSeekTime = time;
if (!hasBufferedTime(time)) {
m_private->setReadyState(MediaPlayer::ReadyState::HaveMetadata);
m_private->waitForSeekCompleted();
return;
}
#if !USE(GSTREAMER)
m_private->waitForSeekCompleted();
#endif
completeSeek();
}
void MediaSource::completeSeek()
{
if (isClosed())
return;
ASSERT(m_pendingSeekTime.isValid());
ALWAYS_LOG(LOGIDENTIFIER, m_pendingSeekTime);
MediaTime pendingSeekTime = m_pendingSeekTime;
m_pendingSeekTime = MediaTime::invalidTime();
for (auto& sourceBuffer : *m_activeSourceBuffers)
sourceBuffer->seekToTime(pendingSeekTime);
m_private->seekCompleted();
monitorSourceBuffers();
}
Ref<TimeRanges> MediaSource::seekable()
{
if (m_duration.isInvalid())
return TimeRanges::create();
if (m_duration.isPositiveInfinite()) {
auto buffered = this->buffered();
if (m_liveSeekable && m_liveSeekable->length()) {
buffered->unionWith(*m_liveSeekable);
buffered->add(buffered->start(0), buffered->maximumBufferedTime());
return TimeRanges::create(*buffered);
}
if (!buffered->length())
return TimeRanges::create();
return TimeRanges::create({MediaTime::zeroTime(), buffered->maximumBufferedTime()});
}
return TimeRanges::create({MediaTime::zeroTime(), m_duration});
}
ExceptionOr<void> MediaSource::setLiveSeekableRange(double start, double end)
{
ALWAYS_LOG(LOGIDENTIFIER, "start = ", start, ", end = ", end);
if (!isOpen())
return Exception { InvalidStateError };
if (start < 0 || start > end)
return Exception { TypeError };
m_liveSeekable = makeUnique<PlatformTimeRanges>(MediaTime::createWithDouble(start), MediaTime::createWithDouble(end));
return { };
}
ExceptionOr<void> MediaSource::clearLiveSeekableRange()
{
ALWAYS_LOG(LOGIDENTIFIER);
if (!isOpen())
return Exception { InvalidStateError };
m_liveSeekable = nullptr;
return { };
}
const MediaTime& MediaSource::currentTimeFudgeFactor()
{
static NeverDestroyed<MediaTime> fudgeFactor(2002, 24000);
return fudgeFactor;
}
bool MediaSource::contentTypeShouldGenerateTimestamps(const ContentType& contentType)
{
return contentType.containerType() == "audio/aac" || contentType.containerType() == "audio/mpeg";
}
bool MediaSource::hasBufferedTime(const MediaTime& time)
{
if (time > duration())
return false;
auto ranges = buffered();
if (!ranges->length())
return false;
return abs(ranges->nearest(time) - time) <= currentTimeFudgeFactor();
}
bool MediaSource::hasCurrentTime()
{
return hasBufferedTime(currentTime());
}
bool MediaSource::hasFutureTime()
{
MediaTime currentTime = this->currentTime();
MediaTime duration = this->duration();
if (currentTime >= duration)
return true;
auto ranges = buffered();
MediaTime nearest = ranges->nearest(currentTime);
if (abs(nearest - currentTime) > currentTimeFudgeFactor())
return false;
size_t found = ranges->find(nearest);
if (found == notFound)
return false;
MediaTime localEnd = ranges->end(found);
if (localEnd == duration)
return true;
return localEnd - currentTime > currentTimeFudgeFactor();
}
void MediaSource::monitorSourceBuffers()
{
if (isClosed())
return;
if (!m_activeSourceBuffers) {
m_private->setReadyState(MediaPlayer::ReadyState::HaveNothing);
return;
}
if (mediaElement()->readyState() == HTMLMediaElement::HAVE_NOTHING) {
return;
}
if (!hasCurrentTime()) {
m_private->setReadyState(MediaPlayer::ReadyState::HaveMetadata);
return;
}
auto ranges = buffered();
if (std::all_of(m_activeSourceBuffers->begin(), m_activeSourceBuffers->end(), [&](auto& sourceBuffer) {
return sourceBuffer->canPlayThroughRange(*ranges);
})) {
m_private->setReadyState(MediaPlayer::ReadyState::HaveEnoughData);
if (m_pendingSeekTime.isValid())
completeSeek();
return;
}
if (hasFutureTime()) {
m_private->setReadyState(MediaPlayer::ReadyState::HaveFutureData);
if (m_pendingSeekTime.isValid())
completeSeek();
return;
}
m_private->setReadyState(MediaPlayer::ReadyState::HaveCurrentData);
if (m_pendingSeekTime.isValid())
completeSeek();
}
ExceptionOr<void> MediaSource::setDuration(double duration)
{
ALWAYS_LOG(LOGIDENTIFIER, duration);
if (duration < 0.0 || std::isnan(duration))
return Exception { TypeError };
if (!isOpen())
return Exception { InvalidStateError };
for (auto& sourceBuffer : *m_sourceBuffers) {
if (sourceBuffer->updating())
return Exception { InvalidStateError };
}
return setDurationInternal(MediaTime::createWithDouble(duration));
}
ExceptionOr<void> MediaSource::setDurationInternal(const MediaTime& duration)
{
MediaTime newDuration = duration;
if (newDuration == m_duration)
return { };
MediaTime highestPresentationTimestamp;
MediaTime highestEndTime;
for (auto& sourceBuffer : *m_sourceBuffers) {
highestPresentationTimestamp = std::max(highestPresentationTimestamp, sourceBuffer->highestPresentationTimestamp());
highestEndTime = std::max(highestEndTime, sourceBuffer->bufferedInternal().ranges().maximumBufferedTime());
}
if (highestPresentationTimestamp.isValid() && newDuration < highestPresentationTimestamp)
return Exception { InvalidStateError };
if (highestEndTime.isValid() && newDuration < highestEndTime)
newDuration = highestEndTime;
m_duration = newDuration;
ALWAYS_LOG(LOGIDENTIFIER, duration);
m_private->durationChanged();
return { };
}
void MediaSource::setReadyState(ReadyState state)
{
auto oldState = readyState();
if (oldState == state)
return;
m_readyState = state;
onReadyStateChange(oldState, state);
}
ExceptionOr<void> MediaSource::endOfStream(Optional<EndOfStreamError> error)
{
ALWAYS_LOG(LOGIDENTIFIER);
if (!isOpen())
return Exception { InvalidStateError };
if (std::any_of(m_sourceBuffers->begin(), m_sourceBuffers->end(), [](auto& sourceBuffer) { return sourceBuffer->updating(); }))
return Exception { InvalidStateError };
streamEndedWithError(error);
return { };
}
void MediaSource::streamEndedWithError(Optional<EndOfStreamError> error)
{
#if !RELEASE_LOG_DISABLED
if (error)
ALWAYS_LOG(LOGIDENTIFIER, error.value());
else
ALWAYS_LOG(LOGIDENTIFIER);
#endif
if (isClosed())
return;
setReadyState(ReadyState::Ended);
if (!error) {
MediaTime maxEndTime;
for (auto& sourceBuffer : *m_sourceBuffers) {
if (auto length = sourceBuffer->bufferedInternal().length())
maxEndTime = std::max(sourceBuffer->bufferedInternal().ranges().end(length - 1), maxEndTime);
}
setDurationInternal(maxEndTime);
for (auto& sourceBuffer : *m_sourceBuffers)
sourceBuffer->trySignalAllSamplesEnqueued();
m_private->markEndOfStream(MediaSourcePrivate::EosNoError);
} else if (error == EndOfStreamError::Network) {
ASSERT(m_mediaElement);
if (m_mediaElement->readyState() == HTMLMediaElement::HAVE_NOTHING) {
m_mediaElement->mediaLoadingFailed(MediaPlayer::NetworkState::NetworkError);
} else {
m_mediaElement->mediaLoadingFailedFatally(MediaPlayer::NetworkState::NetworkError);
}
} else {
ASSERT(error == EndOfStreamError::Decode);
ASSERT(m_mediaElement);
if (m_mediaElement->readyState() == HTMLMediaElement::HAVE_NOTHING) {
m_mediaElement->mediaLoadingFailed(MediaPlayer::NetworkState::FormatError);
} else {
m_mediaElement->mediaLoadingFailedFatally(MediaPlayer::NetworkState::DecodeError);
}
}
}
static ContentType addVP9FullRangeVideoFlagToContentType(const ContentType& type)
{
auto countPeriods = [] (const String& codec) {
unsigned count = 0;
unsigned position = 0;
while (codec.find('.', position) != notFound) {
++count;
++position;
}
return count;
};
for (auto codec : type.codecs()) {
if (!codec.startsWith("vp09") || countPeriods(codec) != 7)
continue;
auto rawType = type.raw();
auto position = rawType.find(codec);
ASSERT(position != notFound);
if (position == notFound)
continue;
rawType.insert(".00", position + codec.length());
return ContentType(rawType);
}
return type;
}
ExceptionOr<Ref<SourceBuffer>> MediaSource::addSourceBuffer(const String& type)
{
DEBUG_LOG(LOGIDENTIFIER, type);
if (type.isEmpty())
return Exception { TypeError };
Vector<ContentType> mediaContentTypesRequiringHardwareSupport;
if (m_mediaElement)
mediaContentTypesRequiringHardwareSupport.appendVector(m_mediaElement->document().settings().mediaContentTypesRequiringHardwareSupport());
auto context = scriptExecutionContext();
if (!context)
return Exception { NotAllowedError };
if (!isTypeSupported(*context, type, WTFMove(mediaContentTypesRequiringHardwareSupport)))
return Exception { NotSupportedError };
if (!isOpen())
return Exception { InvalidStateError };
ContentType contentType(type);
if (context->isDocument() && downcast<Document>(context)->quirks().needsVP9FullRangeFlagQuirk())
contentType = addVP9FullRangeVideoFlagToContentType(contentType);
auto sourceBufferPrivate = createSourceBufferPrivate(contentType);
if (sourceBufferPrivate.hasException()) {
return sourceBufferPrivate.releaseException();
}
auto buffer = SourceBuffer::create(sourceBufferPrivate.releaseReturnValue(), this);
DEBUG_LOG(LOGIDENTIFIER, "created SourceBuffer");
bool shouldGenerateTimestamps = contentTypeShouldGenerateTimestamps(contentType);
buffer->setShouldGenerateTimestamps(shouldGenerateTimestamps);
buffer->setMode(shouldGenerateTimestamps ? SourceBuffer::AppendMode::Sequence : SourceBuffer::AppendMode::Segments);
m_sourceBuffers->add(buffer.copyRef());
regenerateActiveSourceBuffers();
return buffer;
}
ExceptionOr<void> MediaSource::removeSourceBuffer(SourceBuffer& buffer)
{
DEBUG_LOG(LOGIDENTIFIER);
Ref<SourceBuffer> protect(buffer);
if (!m_sourceBuffers->length() || !m_sourceBuffers->contains(buffer))
return Exception { NotFoundError };
buffer.abortIfUpdating();
ASSERT(scriptExecutionContext());
if (!scriptExecutionContext()->activeDOMObjectsAreStopped()) {
auto* audioTracks = buffer.audioTracksIfExists();
if (audioTracks && audioTracks->length()) {
bool removedEnabledAudioTrack = false;
while (audioTracks->length()) {
auto& track = *audioTracks->lastItem();
track.setSourceBuffer(nullptr);
if (track.enabled())
removedEnabledAudioTrack = true;
if (mediaElement())
mediaElement()->removeAudioTrack(track);
audioTracks->remove(track);
}
if (removedEnabledAudioTrack)
mediaElement()->ensureAudioTracks().scheduleChangeEvent();
}
auto* videoTracks = buffer.videoTracksIfExists();
if (videoTracks && videoTracks->length()) {
bool removedSelectedVideoTrack = false;
while (videoTracks->length()) {
auto& track = *videoTracks->lastItem();
track.setSourceBuffer(nullptr);
if (track.selected())
removedSelectedVideoTrack = true;
if (mediaElement())
mediaElement()->removeVideoTrack(track);
videoTracks->remove(track);
}
if (removedSelectedVideoTrack)
mediaElement()->ensureVideoTracks().scheduleChangeEvent();
}
auto* textTracks = buffer.textTracksIfExists();
if (textTracks && textTracks->length()) {
bool removedEnabledTextTrack = false;
while (textTracks->length()) {
auto& track = *textTracks->lastItem();
track.setSourceBuffer(nullptr);
if (track.mode() == TextTrack::Mode::Showing || track.mode() == TextTrack::Mode::Hidden)
removedEnabledTextTrack = true;
if (mediaElement())
mediaElement()->removeTextTrack(track);
textTracks->remove(track);
}
if (removedEnabledTextTrack)
mediaElement()->ensureTextTracks().scheduleChangeEvent();
}
}
m_activeSourceBuffers->remove(buffer);
m_sourceBuffers->remove(buffer);
buffer.removedFromMediaSource();
return { };
}
bool MediaSource::isTypeSupported(ScriptExecutionContext& context, const String& type)
{
Vector<ContentType> mediaContentTypesRequiringHardwareSupport;
if (context.isDocument()) {
auto& document = downcast<Document>(context);
mediaContentTypesRequiringHardwareSupport.appendVector(document.settings().mediaContentTypesRequiringHardwareSupport());
}
return isTypeSupported(context, type, WTFMove(mediaContentTypesRequiringHardwareSupport));
}
bool MediaSource::isTypeSupported(ScriptExecutionContext& context, const String& type, Vector<ContentType>&& contentTypesRequiringHardwareSupport)
{
if (type.isNull() || type.isEmpty())
return false;
ContentType contentType(type);
if (context.isDocument() && downcast<Document>(context).quirks().needsVP9FullRangeFlagQuirk())
contentType = addVP9FullRangeVideoFlagToContentType(contentType);
String codecs = contentType.parameter("codecs");
if (contentType.containerType().isEmpty())
return false;
MediaEngineSupportParameters parameters;
parameters.type = contentType;
parameters.isMediaSource = true;
parameters.contentTypesRequiringHardwareSupport = WTFMove(contentTypesRequiringHardwareSupport);
MediaPlayer::SupportsType supported = MediaPlayer::supportsType(parameters);
if (codecs.isEmpty())
return supported != MediaPlayer::SupportsType::IsNotSupported;
return supported == MediaPlayer::SupportsType::IsSupported;
}
bool MediaSource::isOpen() const
{
return readyState() == ReadyState::Open;
}
bool MediaSource::isClosed() const
{
return readyState() == ReadyState::Closed;
}
bool MediaSource::isEnded() const
{
return readyState() == ReadyState::Ended;
}
void MediaSource::detachFromElement(HTMLMediaElement& element)
{
ALWAYS_LOG(LOGIDENTIFIER);
ASSERT_UNUSED(element, m_mediaElement == &element);
setReadyState(ReadyState::Closed);
m_duration = MediaTime::invalidTime();
while (m_activeSourceBuffers->length())
removeSourceBuffer(*m_activeSourceBuffers->item(0));
while (m_sourceBuffers->length())
removeSourceBuffer(*m_sourceBuffers->item(0));
m_private = nullptr;
m_mediaElement = nullptr;
}
void MediaSource::sourceBufferDidChangeActiveState(SourceBuffer&, bool)
{
regenerateActiveSourceBuffers();
}
bool MediaSource::attachToElement(HTMLMediaElement& element)
{
if (m_mediaElement)
return false;
ASSERT(isClosed());
m_mediaElement = makeWeakPtr(&element);
return true;
}
void MediaSource::openIfInEndedState()
{
if (m_readyState != ReadyState::Ended)
return;
ALWAYS_LOG(LOGIDENTIFIER);
setReadyState(ReadyState::Open);
m_private->unmarkEndOfStream();
}
bool MediaSource::virtualHasPendingActivity() const
{
return m_private || m_asyncEventQueue->hasPendingActivity();
}
void MediaSource::stop()
{
ALWAYS_LOG(LOGIDENTIFIER);
if (m_mediaElement)
m_mediaElement->detachMediaSource();
m_readyState = ReadyState::Closed;
m_private = nullptr;
}
const char* MediaSource::activeDOMObjectName() const
{
return "MediaSource";
}
void MediaSource::onReadyStateChange(ReadyState oldState, ReadyState newState)
{
ALWAYS_LOG(LOGIDENTIFIER, "old state = ", oldState, ", new state = ", newState);
for (auto& buffer : *m_sourceBuffers)
buffer->readyStateChanged();
if (isOpen()) {
scheduleEvent(eventNames().sourceopenEvent);
return;
}
if (oldState == ReadyState::Open && newState == ReadyState::Ended) {
scheduleEvent(eventNames().sourceendedEvent);
return;
}
ASSERT(isClosed());
scheduleEvent(eventNames().sourcecloseEvent);
}
Vector<PlatformTimeRanges> MediaSource::activeRanges() const
{
Vector<PlatformTimeRanges> activeRanges;
for (auto& sourceBuffer : *m_activeSourceBuffers)
activeRanges.append(sourceBuffer->bufferedInternal().ranges());
return activeRanges;
}
ExceptionOr<Ref<SourceBufferPrivate>> MediaSource::createSourceBufferPrivate(const ContentType& incomingType)
{
ContentType type { incomingType };
auto context = scriptExecutionContext();
if (context && context->isDocument() && downcast<Document>(context)->quirks().needsVP9FullRangeFlagQuirk())
type = addVP9FullRangeVideoFlagToContentType(incomingType);
RefPtr<SourceBufferPrivate> sourceBufferPrivate;
switch (m_private->addSourceBuffer(type, sourceBufferPrivate)) {
case MediaSourcePrivate::Ok:
return sourceBufferPrivate.releaseNonNull();
case MediaSourcePrivate::NotSupported:
return Exception { NotSupportedError };
case MediaSourcePrivate::ReachedIdLimit:
return Exception { QuotaExceededError };
}
ASSERT_NOT_REACHED();
return Exception { QuotaExceededError };
}
void MediaSource::scheduleEvent(const AtomString& eventName)
{
DEBUG_LOG(LOGIDENTIFIER, "scheduling '", eventName, "'");
auto event = Event::create(eventName, Event::CanBubble::No, Event::IsCancelable::No);
event->setTarget(this);
m_asyncEventQueue->enqueueEvent(WTFMove(event));
}
ScriptExecutionContext* MediaSource::scriptExecutionContext() const
{
return ActiveDOMObject::scriptExecutionContext();
}
EventTargetInterface MediaSource::eventTargetInterface() const
{
return MediaSourceEventTargetInterfaceType;
}
URLRegistry& MediaSource::registry() const
{
return MediaSourceRegistry::registry();
}
void MediaSource::regenerateActiveSourceBuffers()
{
Vector<RefPtr<SourceBuffer>> newList;
for (auto& sourceBuffer : *m_sourceBuffers) {
if (sourceBuffer->active())
newList.append(sourceBuffer);
}
m_activeSourceBuffers->swap(newList);
for (auto& sourceBuffer : *m_activeSourceBuffers)
sourceBuffer->setBufferedDirty(true);
}
#if !RELEASE_LOG_DISABLED
void MediaSource::setLogIdentifier(const void* identifier)
{
m_logIdentifier = identifier;
ALWAYS_LOG(LOGIDENTIFIER);
}
WTFLogChannel& MediaSource::logChannel() const
{
return LogMediaSource;
}
#endif
void MediaSource::failedToCreateRenderer(RendererType type)
{
if (auto context = scriptExecutionContext())
context->addConsoleMessage(MessageSource::JS, MessageLevel::Error, makeString("MediaSource ", type == RendererType::Video ? "video" : "audio", " renderer creation failed."));
}
}
#endif