SourceBufferPrivate.cpp [plain text]
#include "config.h"
#include "SourceBufferPrivate.h"
#if ENABLE(MEDIA_SOURCE)
#include "Logging.h"
#include "MediaDescription.h"
#include "MediaSample.h"
#include "PlatformTimeRanges.h"
#include "SampleMap.h"
#include "SourceBufferPrivateClient.h"
#include "TimeRanges.h"
#include <wtf/MediaTime.h>
#include <wtf/StringPrintStream.h>
namespace WebCore {
static const MediaTime discontinuityTolerance = MediaTime(1, 1);
SourceBufferPrivate::TrackBuffer::TrackBuffer()
: lastDecodeTimestamp(MediaTime::invalidTime())
, greatestDecodeDuration(MediaTime::invalidTime())
, lastFrameDuration(MediaTime::invalidTime())
, highestPresentationTimestamp(MediaTime::invalidTime())
, highestEnqueuedPresentationTime(MediaTime::invalidTime())
, lastEnqueuedDecodeKey({MediaTime::invalidTime(), MediaTime::invalidTime()})
, enqueueDiscontinuityBoundary(discontinuityTolerance)
{
}
SourceBufferPrivate::SourceBufferPrivate()
: m_buffered(TimeRanges::create())
{
}
SourceBufferPrivate::~SourceBufferPrivate() = default;
void SourceBufferPrivate::resetTimestampOffsetInTrackBuffers()
{
for (auto& trackBuffer : m_trackBufferMap.values()) {
trackBuffer.lastFrameTimescale = 0;
trackBuffer.roundedTimestampOffset = MediaTime::invalidTime();
}
}
void SourceBufferPrivate::setBufferedDirty(bool flag)
{
if (m_client)
m_client->sourceBufferPrivateBufferedDirtyChanged(flag);
}
void SourceBufferPrivate::resetTrackBuffers()
{
for (auto& trackBufferPair : m_trackBufferMap.values()) {
trackBufferPair.lastDecodeTimestamp = MediaTime::invalidTime();
trackBufferPair.greatestDecodeDuration = MediaTime::invalidTime();
trackBufferPair.lastFrameDuration = MediaTime::invalidTime();
trackBufferPair.highestPresentationTimestamp = MediaTime::invalidTime();
trackBufferPair.needRandomAccessFlag = true;
}
}
void SourceBufferPrivate::updateHighestPresentationTimestamp()
{
MediaTime highestTime;
for (auto& trackBuffer : m_trackBufferMap.values()) {
auto lastSampleIter = trackBuffer.samples.presentationOrder().rbegin();
if (lastSampleIter == trackBuffer.samples.presentationOrder().rend())
continue;
highestTime = std::max(highestTime, lastSampleIter->first);
}
if (m_highestPresentationTimestamp == highestTime)
return;
m_highestPresentationTimestamp = highestTime;
if (m_client)
m_client->sourceBufferPrivateHighestPresentationTimestampChanged(m_highestPresentationTimestamp);
}
void SourceBufferPrivate::setBufferedRanges(const PlatformTimeRanges& timeRanges)
{
m_buffered->ranges() = timeRanges;
if (m_client)
m_client->sourceBufferPrivateBufferedRangesChanged(m_buffered->ranges());
}
void SourceBufferPrivate::updateBufferedFromTrackBuffers(bool sourceIsEnded)
{
MediaTime highestEndTime = MediaTime::negativeInfiniteTime();
for (auto& trackBuffer : m_trackBufferMap.values()) {
if (!trackBuffer.buffered.length())
continue;
highestEndTime = std::max(highestEndTime, trackBuffer.buffered.maximumBufferedTime());
}
if (highestEndTime.isNegativeInfinite()) {
setBufferedRanges(PlatformTimeRanges());
return;
}
PlatformTimeRanges intersectionRanges { MediaTime::zeroTime(), highestEndTime };
for (auto& trackBuffer : m_trackBufferMap.values()) {
PlatformTimeRanges trackRanges = trackBuffer.buffered;
if (!trackRanges.length())
continue;
if (sourceIsEnded)
trackRanges.add(trackRanges.maximumBufferedTime(), highestEndTime);
intersectionRanges.intersectWith(trackRanges);
}
setBufferedRanges(intersectionRanges);
setBufferedDirty(true);
}
void SourceBufferPrivate::appendCompleted(bool parsingSucceeded, bool isEnded)
{
updateBufferedFromTrackBuffers(isEnded);
if (m_client) {
m_client->sourceBufferPrivateAppendComplete(parsingSucceeded ? SourceBufferPrivateClient::AppendResult::AppendSucceeded : SourceBufferPrivateClient::AppendResult::ParsingFailed);
m_client->sourceBufferPrivateReportExtraMemoryCost(totalTrackBufferSizeInBytes());
}
}
void SourceBufferPrivate::reenqueSamples(const AtomString& trackID)
{
if (!m_isAttached)
return;
auto it = m_trackBufferMap.find(trackID);
if (it == m_trackBufferMap.end())
return;
auto& trackBuffer = it->value;
trackBuffer.needsReenqueueing = true;
reenqueueMediaForTime(trackBuffer, trackID, currentMediaTime());
}
void SourceBufferPrivate::seekToTime(const MediaTime& time)
{
for (auto& trackBufferPair : m_trackBufferMap) {
TrackBuffer& trackBuffer = trackBufferPair.value;
const AtomString& trackID = trackBufferPair.key;
trackBuffer.needsReenqueueing = true;
reenqueueMediaForTime(trackBuffer, trackID, time);
}
}
void SourceBufferPrivate::clearTrackBuffers()
{
for (auto& trackBufferPair : m_trackBufferMap.values()) {
trackBufferPair.samples.clear();
trackBufferPair.decodeQueue.clear();
}
}
void SourceBufferPrivate::bufferedSamplesForTrackId(const AtomString& trackId, CompletionHandler<void(Vector<String>&&)>&& completionHandler)
{
auto it = m_trackBufferMap.find(trackId);
if (it == m_trackBufferMap.end())
completionHandler({ });
auto& trackBuffer = it->value;
Vector<String> sampleDescriptions;
for (auto& pair : trackBuffer.samples.decodeOrder())
sampleDescriptions.append(toString(*pair.second));
completionHandler(WTFMove(sampleDescriptions));
}
MediaTime SourceBufferPrivate::fastSeekTimeForMediaTime(const MediaTime& targetTime, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
{
if (!m_client)
return targetTime;
MediaTime seekTime = targetTime;
for (auto& trackBuffer : m_trackBufferMap.values()) {
auto futureSyncSampleIterator = trackBuffer.samples.decodeOrder().findSyncSampleAfterPresentationTime(targetTime, positiveThreshold);
auto pastSyncSampleIterator = trackBuffer.samples.decodeOrder().findSyncSamplePriorToPresentationTime(targetTime, negativeThreshold);
auto upperBound = trackBuffer.samples.decodeOrder().end();
auto lowerBound = trackBuffer.samples.decodeOrder().rend();
if (futureSyncSampleIterator == upperBound && pastSyncSampleIterator == lowerBound)
continue;
MediaTime futureSeekTime = MediaTime::positiveInfiniteTime();
if (futureSyncSampleIterator != upperBound) {
RefPtr<MediaSample>& sample = futureSyncSampleIterator->second;
futureSeekTime = sample->presentationTime();
}
MediaTime pastSeekTime = MediaTime::negativeInfiniteTime();
if (pastSyncSampleIterator != lowerBound) {
RefPtr<MediaSample>& sample = pastSyncSampleIterator->second;
pastSeekTime = sample->presentationTime();
}
MediaTime trackSeekTime = abs(targetTime - futureSeekTime) < abs(targetTime - pastSeekTime) ? futureSeekTime : pastSeekTime;
if (abs(targetTime - trackSeekTime) > abs(targetTime - seekTime))
seekTime = trackSeekTime;
}
return seekTime;
}
void SourceBufferPrivate::updateMinimumUpcomingPresentationTime(TrackBuffer& trackBuffer, const AtomString& trackID)
{
if (!canSetMinimumUpcomingPresentationTime(trackID))
return;
if (trackBuffer.decodeQueue.empty()) {
trackBuffer.minimumEnqueuedPresentationTime = MediaTime::invalidTime();
clearMinimumUpcomingPresentationTime(trackID);
return;
}
auto minPts = std::min_element(trackBuffer.decodeQueue.begin(), trackBuffer.decodeQueue.end(), [](auto& left, auto& right) -> bool {
return left.second->presentationTime() < right.second->presentationTime();
});
if (minPts == trackBuffer.decodeQueue.end()) {
trackBuffer.minimumEnqueuedPresentationTime = MediaTime::invalidTime();
clearMinimumUpcomingPresentationTime(trackID);
return;
}
trackBuffer.minimumEnqueuedPresentationTime = minPts->second->presentationTime();
setMinimumUpcomingPresentationTime(trackID, trackBuffer.minimumEnqueuedPresentationTime);
}
void SourceBufferPrivate::setMediaSourceEnded(bool isEnded)
{
if (m_isMediaSourceEnded == isEnded)
return;
m_isMediaSourceEnded = isEnded;
if (m_isMediaSourceEnded) {
for (auto& trackBufferPair : m_trackBufferMap) {
TrackBuffer& trackBuffer = trackBufferPair.value;
const AtomString& trackID = trackBufferPair.key;
trySignalAllSamplesInTrackEnqueued(trackBuffer, trackID);
}
}
}
void SourceBufferPrivate::trySignalAllSamplesInTrackEnqueued(TrackBuffer& trackBuffer, const AtomString& trackID)
{
if (m_isMediaSourceEnded && trackBuffer.decodeQueue.empty()) {
DEBUG_LOG(LOGIDENTIFIER, "All samples in track \"", trackID, "\" enqueued.");
allSamplesInTrackEnqueued(trackID);
}
}
void SourceBufferPrivate::provideMediaData(const AtomString& trackID)
{
auto it = m_trackBufferMap.find(trackID);
if (it == m_trackBufferMap.end())
return;
provideMediaData(it->value, trackID);
}
void SourceBufferPrivate::provideMediaData(TrackBuffer& trackBuffer, const AtomString& trackID)
{
if (!m_isAttached || isSeeking())
return;
#if !RELEASE_LOG_DISABLED
unsigned enqueuedSamples = 0;
#endif
if (trackBuffer.needsMinimumUpcomingPresentationTimeUpdating && canSetMinimumUpcomingPresentationTime(trackID)) {
trackBuffer.minimumEnqueuedPresentationTime = MediaTime::invalidTime();
clearMinimumUpcomingPresentationTime(trackID);
}
while (!trackBuffer.decodeQueue.empty()) {
if (!isReadyForMoreSamples(trackID)) {
DEBUG_LOG(LOGIDENTIFIER, "bailing early, track id ", trackID, " is not ready for more data");
notifyClientWhenReadyForMoreSamples(trackID);
break;
}
auto sample = trackBuffer.decodeQueue.begin()->second;
if (sample->decodeTime() > trackBuffer.enqueueDiscontinuityBoundary) {
DEBUG_LOG(LOGIDENTIFIER, "bailing early because of unbuffered gap, new sample: ", sample->decodeTime(), " >= the current discontinuity boundary: ", trackBuffer.enqueueDiscontinuityBoundary);
break;
}
trackBuffer.decodeQueue.erase(trackBuffer.decodeQueue.begin());
MediaTime samplePresentationEnd = sample->presentationTime() + sample->duration();
if (trackBuffer.highestEnqueuedPresentationTime.isInvalid() || samplePresentationEnd > trackBuffer.highestEnqueuedPresentationTime)
trackBuffer.highestEnqueuedPresentationTime = samplePresentationEnd;
trackBuffer.lastEnqueuedDecodeKey = {sample->decodeTime(), sample->presentationTime()};
trackBuffer.enqueueDiscontinuityBoundary = sample->decodeTime() + sample->duration() + discontinuityTolerance;
enqueueSample(sample.releaseNonNull(), trackID);
#if !RELEASE_LOG_DISABLED
++enqueuedSamples;
#endif
}
updateMinimumUpcomingPresentationTime(trackBuffer, trackID);
#if !RELEASE_LOG_DISABLED
DEBUG_LOG(LOGIDENTIFIER, "enqueued ", enqueuedSamples, " samples, ", static_cast<uint64_t>(trackBuffer.decodeQueue.size()), " remaining");
#endif
trySignalAllSamplesInTrackEnqueued(trackBuffer, trackID);
}
void SourceBufferPrivate::reenqueueMediaForTime(TrackBuffer& trackBuffer, const AtomString& trackID, const MediaTime& time)
{
flush(trackID);
trackBuffer.decodeQueue.clear();
trackBuffer.highestEnqueuedPresentationTime = MediaTime::invalidTime();
trackBuffer.lastEnqueuedDecodeKey = {MediaTime::invalidTime(), MediaTime::invalidTime()};
trackBuffer.enqueueDiscontinuityBoundary = time + discontinuityTolerance;
auto currentSamplePTSIterator = trackBuffer.samples.presentationOrder().findSampleContainingPresentationTime(time);
if (currentSamplePTSIterator == trackBuffer.samples.presentationOrder().end())
currentSamplePTSIterator = trackBuffer.samples.presentationOrder().findSampleStartingOnOrAfterPresentationTime(time);
if (currentSamplePTSIterator == trackBuffer.samples.presentationOrder().end()
|| (currentSamplePTSIterator->first - time) > timeFudgeFactor())
return;
DecodeOrderSampleMap::KeyType decodeKey(currentSamplePTSIterator->second->decodeTime(), currentSamplePTSIterator->second->presentationTime());
auto currentSampleDTSIterator = trackBuffer.samples.decodeOrder().findSampleWithDecodeKey(decodeKey);
ASSERT(currentSampleDTSIterator != trackBuffer.samples.decodeOrder().end());
auto reverseCurrentSampleIter = --DecodeOrderSampleMap::reverse_iterator(currentSampleDTSIterator);
auto reverseLastSyncSampleIter = trackBuffer.samples.decodeOrder().findSyncSamplePriorToDecodeIterator(reverseCurrentSampleIter);
if (reverseLastSyncSampleIter == trackBuffer.samples.decodeOrder().rend())
return;
for (auto iter = reverseLastSyncSampleIter; iter != reverseCurrentSampleIter; --iter) {
auto copy = iter->second->createNonDisplayingCopy();
DecodeOrderSampleMap::KeyType decodeKey(copy->decodeTime(), copy->presentationTime());
trackBuffer.decodeQueue.insert(DecodeOrderSampleMap::MapType::value_type(decodeKey, WTFMove(copy)));
}
for (auto iter = currentSampleDTSIterator; iter != trackBuffer.samples.decodeOrder().end(); ++iter)
trackBuffer.decodeQueue.insert(*iter);
provideMediaData(trackBuffer, trackID);
trackBuffer.needsReenqueueing = false;
}
void SourceBufferPrivate::reenqueueMediaIfNeeded(const MediaTime& currentTime, uint64_t pendingAppendDataCapacity, uint64_t maximumBufferSize)
{
for (auto& trackBufferPair : m_trackBufferMap) {
TrackBuffer& trackBuffer = trackBufferPair.value;
const AtomString& trackID = trackBufferPair.key;
if (trackBuffer.needsReenqueueing) {
DEBUG_LOG(LOGIDENTIFIER, "reenqueuing at time ", currentTime);
reenqueueMediaForTime(trackBuffer, trackID, currentTime);
} else
provideMediaData(trackBuffer, trackID);
}
if (totalTrackBufferSizeInBytes() + pendingAppendDataCapacity > maximumBufferSize)
m_bufferFull = true;
}
static WARN_UNUSED_RETURN bool decodeTimeComparator(const PresentationOrderSampleMap::MapType::value_type& a, const PresentationOrderSampleMap::MapType::value_type& b)
{
return a.second->decodeTime() < b.second->decodeTime();
}
#if !RELEASE_LOG_DISABLED
static PlatformTimeRanges removeSamplesFromTrackBuffer(const DecodeOrderSampleMap::MapType& samples, SourceBufferPrivate::TrackBuffer& trackBuffer, const SourceBufferPrivate* sourceBufferPrivate, const char* logPrefix)
#else
static PlatformTimeRanges removeSamplesFromTrackBuffer(const DecodeOrderSampleMap::MapType& samples, SourceBufferPrivate::TrackBuffer& trackBuffer)
#endif
{
#if !RELEASE_LOG_DISABLED
MediaTime earliestSample = MediaTime::positiveInfiniteTime();
MediaTime latestSample = MediaTime::zeroTime();
uint64_t bytesRemoved = 0;
auto logIdentifier = WTF::Logger::LogSiteIdentifier(sourceBufferPrivate->logClassName(), logPrefix, sourceBufferPrivate->logIdentifier());
auto& logger = sourceBufferPrivate->logger();
auto willLog = logger.willLog(sourceBufferPrivate->logChannel(), WTFLogLevel::Debug);
#endif
PlatformTimeRanges erasedRanges;
for (const auto& sampleIt : samples) {
const DecodeOrderSampleMap::KeyType& decodeKey = sampleIt.first;
#if !RELEASE_LOG_DISABLED
uint64_t startBufferSize = trackBuffer.samples.sizeInBytes();
#endif
const RefPtr<MediaSample>& sample = sampleIt.second;
#if !RELEASE_LOG_DISABLED
if (willLog)
logger.debug(sourceBufferPrivate->logChannel(), logIdentifier, "removing sample ", *sampleIt.second);
#endif
trackBuffer.samples.removeSample(sample.get());
trackBuffer.decodeQueue.erase(decodeKey);
auto startTime = sample->presentationTime();
auto endTime = startTime + sample->duration();
erasedRanges.add(startTime, endTime);
#if !RELEASE_LOG_DISABLED
bytesRemoved += startBufferSize - trackBuffer.samples.sizeInBytes();
if (startTime < earliestSample)
earliestSample = startTime;
if (endTime > latestSample)
latestSample = endTime;
#endif
}
PlatformTimeRanges additionalErasedRanges;
for (unsigned i = 0; i < erasedRanges.length(); ++i) {
auto erasedStart = erasedRanges.start(i);
auto erasedEnd = erasedRanges.end(i);
auto startIterator = trackBuffer.samples.presentationOrder().reverseFindSampleBeforePresentationTime(erasedStart);
if (startIterator == trackBuffer.samples.presentationOrder().rend())
additionalErasedRanges.add(MediaTime::zeroTime(), erasedStart);
else {
auto& previousSample = *startIterator->second;
if (previousSample.presentationTime() + previousSample.duration() < erasedStart)
additionalErasedRanges.add(previousSample.presentationTime() + previousSample.duration(), erasedStart);
}
auto endIterator = trackBuffer.samples.presentationOrder().findSampleStartingOnOrAfterPresentationTime(erasedEnd);
if (endIterator == trackBuffer.samples.presentationOrder().end())
additionalErasedRanges.add(erasedEnd, MediaTime::positiveInfiniteTime());
else {
auto& nextSample = *endIterator->second;
if (nextSample.presentationTime() > erasedEnd)
additionalErasedRanges.add(erasedEnd, nextSample.presentationTime());
}
}
if (additionalErasedRanges.length())
erasedRanges.unionWith(additionalErasedRanges);
#if !RELEASE_LOG_DISABLED
if (bytesRemoved && willLog)
logger.debug(sourceBufferPrivate->logChannel(), logIdentifier, "removed ", bytesRemoved, ", start = ", earliestSample, ", end = ", latestSample);
#endif
return erasedRanges;
}
void SourceBufferPrivate::removeCodedFrames(const MediaTime& start, const MediaTime& end, const MediaTime& currentTime, bool isEnded, CompletionHandler<void()>&& completionHandler)
{
ASSERT(start < end);
if (start >= end) {
completionHandler();
return;
}
for (auto& trackBufferKeyValue : m_trackBufferMap) {
TrackBuffer& trackBuffer = trackBufferKeyValue.value;
AtomString trackID = trackBufferKeyValue.key;
auto divideSampleIfPossibleAtPresentationTime = [&] (const MediaTime& time) {
auto sampleIterator = trackBuffer.samples.presentationOrder().findSampleContainingPresentationTime(time);
if (sampleIterator == trackBuffer.samples.presentationOrder().end())
return;
RefPtr<MediaSample> sample = sampleIterator->second;
if (!sample->isDivisable())
return;
std::pair<RefPtr<MediaSample>, RefPtr<MediaSample>> replacementSamples = sample->divide(time);
if (!replacementSamples.first || !replacementSamples.second)
return;
DEBUG_LOG(LOGIDENTIFIER, "splitting sample ", *sample, " into ", *replacementSamples.first, " and ", *replacementSamples.second);
trackBuffer.samples.removeSample(sample.get());
trackBuffer.samples.addSample(*replacementSamples.first);
trackBuffer.samples.addSample(*replacementSamples.second);
};
divideSampleIfPossibleAtPresentationTime(start);
divideSampleIfPossibleAtPresentationTime(end);
auto removePresentationStart = trackBuffer.samples.presentationOrder().findSampleContainingOrAfterPresentationTime(start);
auto removePresentationEnd = trackBuffer.samples.presentationOrder().findSampleStartingOnOrAfterPresentationTime(end);
if (removePresentationStart == removePresentationEnd)
continue;
auto minmaxDecodeTimeIterPair = std::minmax_element(removePresentationStart, removePresentationEnd, decodeTimeComparator);
auto& firstSample = *minmaxDecodeTimeIterPair.first->second;
auto& lastSample = *minmaxDecodeTimeIterPair.second->second;
auto removeDecodeStart = trackBuffer.samples.decodeOrder().findSampleWithDecodeKey({firstSample.decodeTime(), firstSample.presentationTime()});
auto removeDecodeLast = trackBuffer.samples.decodeOrder().findSampleWithDecodeKey({lastSample.decodeTime(), lastSample.presentationTime()});
auto removeDecodeEnd = trackBuffer.samples.decodeOrder().findSyncSampleAfterDecodeIterator(removeDecodeLast);
DecodeOrderSampleMap::MapType erasedSamples(removeDecodeStart, removeDecodeEnd);
#if !RELEASE_LOG_DISABLED
PlatformTimeRanges erasedRanges = removeSamplesFromTrackBuffer(erasedSamples, trackBuffer, this, "removeCodedFrames");
#else
PlatformTimeRanges erasedRanges = removeSamplesFromTrackBuffer(erasedSamples, trackBuffer);
#endif
if (trackBuffer.highestEnqueuedPresentationTime.isValid() && currentTime < trackBuffer.highestEnqueuedPresentationTime) {
PlatformTimeRanges possiblyEnqueuedRanges(currentTime, trackBuffer.highestEnqueuedPresentationTime);
possiblyEnqueuedRanges.intersectWith(erasedRanges);
if (possiblyEnqueuedRanges.length()) {
trackBuffer.needsReenqueueing = true;
DEBUG_LOG(LOGIDENTIFIER, "the range in removeCodedFrames() includes already enqueued samples, reenqueueing from ", currentTime);
reenqueueMediaForTime(trackBuffer, trackID, currentTime);
}
}
erasedRanges.invert();
trackBuffer.buffered.intersectWith(erasedRanges);
setBufferedDirty(true);
if (isActive() && currentTime >= start && currentTime < end && readyState() > MediaPlayer::ReadyState::HaveMetadata)
setReadyState(MediaPlayer::ReadyState::HaveMetadata);
}
updateBufferedFromTrackBuffers(isEnded);
updateHighestPresentationTimestamp();
LOG(Media, "SourceBuffer::removeCodedFrames(%p) - buffered = %s", this, toString(m_buffered->ranges()).utf8().data());
completionHandler();
}
void SourceBufferPrivate::evictCodedFrames(uint64_t newDataSize, uint64_t pendingAppendDataCapacity, uint64_t maximumBufferSize, const MediaTime& currentTime, const MediaTime& duration, bool isEnded)
{
if (!m_isAttached)
return;
if (!m_bufferFull)
return;
MediaTime thirtySeconds = MediaTime(30, 1);
MediaTime maximumRangeEnd = currentTime - thirtySeconds;
#if !RELEASE_LOG_DISABLED
uint64_t initialBufferedSize = totalTrackBufferSizeInBytes();
DEBUG_LOG(LOGIDENTIFIER, "currentTime = ", currentTime, ", require ", initialBufferedSize + newDataSize, " bytes, maximum buffer size is ", maximumBufferSize);
#endif
MediaTime rangeStart = MediaTime::zeroTime();
MediaTime rangeEnd = rangeStart + thirtySeconds;
while (rangeStart < maximumRangeEnd) {
removeCodedFrames(rangeStart, std::min(rangeEnd, maximumRangeEnd), currentTime, isEnded);
if (totalTrackBufferSizeInBytes() + pendingAppendDataCapacity + newDataSize < maximumBufferSize) {
m_bufferFull = false;
break;
}
rangeStart += thirtySeconds;
rangeEnd += thirtySeconds;
}
if (!m_bufferFull) {
#if !RELEASE_LOG_DISABLED
DEBUG_LOG(LOGIDENTIFIER, "evicted ", initialBufferedSize - totalTrackBufferSizeInBytes());
#endif
return;
}
auto buffered = m_buffered->ranges();
uint64_t currentTimeRange = buffered.find(currentTime);
if (currentTimeRange == buffered.length() - 1) {
#if !RELEASE_LOG_DISABLED
ERROR_LOG(LOGIDENTIFIER, "FAILED to free enough after evicting ", initialBufferedSize - totalTrackBufferSizeInBytes());
#endif
return;
}
MediaTime minimumRangeStart = currentTime + thirtySeconds;
rangeEnd = duration;
if (!rangeEnd.isFinite()) {
rangeEnd = buffered.maximumBufferedTime();
#if !RELEASE_LOG_DISABLED
DEBUG_LOG(LOGIDENTIFIER, "MediaSource duration is not a finite value, using maximum buffered time: ", rangeEnd);
#endif
}
rangeStart = rangeEnd - thirtySeconds;
while (rangeStart > minimumRangeStart) {
uint64_t startTimeRange = buffered.find(rangeStart);
if (currentTimeRange != notFound && startTimeRange == currentTimeRange) {
uint64_t endTimeRange = buffered.find(rangeEnd);
if (currentTimeRange != notFound && endTimeRange == currentTimeRange)
break;
rangeEnd = buffered.start(endTimeRange);
}
removeCodedFrames(std::max(minimumRangeStart, rangeStart), rangeEnd, currentTime, isEnded);
if (totalTrackBufferSizeInBytes() + pendingAppendDataCapacity + newDataSize < maximumBufferSize) {
m_bufferFull = false;
break;
}
rangeStart -= thirtySeconds;
rangeEnd -= thirtySeconds;
}
#if !RELEASE_LOG_DISABLED
if (m_bufferFull)
ERROR_LOG(LOGIDENTIFIER, "FAILED to free enough after evicting ", initialBufferedSize - totalTrackBufferSizeInBytes());
else
DEBUG_LOG(LOGIDENTIFIER, "evicted ", initialBufferedSize - totalTrackBufferSizeInBytes());
#endif
}
uint64_t SourceBufferPrivate::totalTrackBufferSizeInBytes() const
{
uint64_t totalSizeInBytes = 0;
for (auto& trackBuffer : m_trackBufferMap.values())
totalSizeInBytes += trackBuffer.samples.sizeInBytes();
return totalSizeInBytes;
}
void SourceBufferPrivate::addTrackBuffer(const AtomString& trackId, RefPtr<MediaDescription>&& description)
{
ASSERT(!m_trackBufferMap.contains(trackId));
auto& trackBuffer = m_trackBufferMap.add(trackId, TrackBuffer()).iterator->value;
trackBuffer.description = description;
m_hasAudio = m_hasAudio || trackBuffer.description->isAudio();
m_hasVideo = m_hasVideo || trackBuffer.description->isVideo();
}
void SourceBufferPrivate::updateTrackIds(Vector<std::pair<AtomString, AtomString>>&& trackIdPairs)
{
for (auto& trackIdPair : trackIdPairs) {
auto oldId = trackIdPair.first;
auto newId = trackIdPair.second;
ASSERT(oldId != newId);
auto trackBuffer = m_trackBufferMap.take(oldId);
m_trackBufferMap.add(newId, WTFMove(trackBuffer));
}
}
void SourceBufferPrivate::setAllTrackBuffersNeedRandomAccess()
{
for (auto& trackBuffer : m_trackBufferMap.values())
trackBuffer.needRandomAccessFlag = true;
}
void SourceBufferPrivate::didReceiveInitializationSegment(SourceBufferPrivateClient::InitializationSegment&& segment, CompletionHandler<void()>&& completionHandler)
{
if (!m_client) {
completionHandler();
return;
}
if (m_receivedFirstInitializationSegment && !validateInitializationSegment(segment)) {
m_client->sourceBufferPrivateAppendError(true);
return;
}
m_client->sourceBufferPrivateDidReceiveInitializationSegment(WTFMove(segment), WTFMove(completionHandler));
m_receivedFirstInitializationSegment = true;
m_pendingInitializationSegmentForChangeType = false;
}
bool SourceBufferPrivate::validateInitializationSegment(const SourceBufferPrivateClient::InitializationSegment& segment)
{
if (segment.audioTracks.size() >= 2) {
for (auto& audioTrackInfo : segment.audioTracks) {
if (!m_trackBufferMap.contains(audioTrackInfo.track->id()))
return false;
}
}
if (segment.videoTracks.size() >= 2) {
for (auto& videoTrackInfo : segment.videoTracks) {
if (!m_trackBufferMap.contains(videoTrackInfo.track->id()))
return false;
}
}
if (segment.textTracks.size() >= 2) {
for (auto& textTrackInfo : segment.videoTracks) {
if (!m_trackBufferMap.contains(textTrackInfo.track->id()))
return false;
}
}
return true;
}
void SourceBufferPrivate::didReceiveSample(MediaSample& sample)
{
if (!m_isAttached)
return;
if ((!m_receivedFirstInitializationSegment || m_pendingInitializationSegmentForChangeType) && m_client) {
m_client->sourceBufferPrivateAppendError(true);
return;
}
do {
MediaTime presentationTimestamp;
MediaTime decodeTimestamp;
MediaTime frameDuration = sample.duration();
if (m_shouldGenerateTimestamps) {
presentationTimestamp = { 0, frameDuration.timeScale() };
decodeTimestamp = { 0, frameDuration.timeScale() };
} else {
presentationTimestamp = sample.presentationTime();
decodeTimestamp = sample.decodeTime();
}
if (m_appendMode == SourceBufferAppendMode::Sequence && m_groupStartTimestamp.isValid()) {
m_timestampOffset = m_groupStartTimestamp;
for (auto& trackBuffer : m_trackBufferMap.values()) {
trackBuffer.lastFrameTimescale = 0;
trackBuffer.roundedTimestampOffset = MediaTime::invalidTime();
}
m_groupEndTimestamp = m_groupStartTimestamp;
for (auto& trackBuffer : m_trackBufferMap.values())
trackBuffer.needRandomAccessFlag = true;
m_groupStartTimestamp = MediaTime::invalidTime();
}
AtomString trackID = sample.trackID();
auto it = m_trackBufferMap.find(trackID);
if (it == m_trackBufferMap.end()) {
m_client->sourceBufferPrivateDidDropSample();
return;
}
TrackBuffer& trackBuffer = it->value;
MediaTime microsecond(1, 1000000);
auto roundTowardsTimeScaleWithRoundingMargin = [] (const MediaTime& time, uint32_t timeScale, const MediaTime& roundingMargin) {
while (true) {
MediaTime roundedTime = time.toTimeScale(timeScale);
if (abs(roundedTime - time) < roundingMargin || timeScale >= MediaTime::MaximumTimeScale)
return roundedTime;
if (!WTF::safeMultiply(timeScale, 2, timeScale) || timeScale > MediaTime::MaximumTimeScale)
timeScale = MediaTime::MaximumTimeScale;
}
};
if (m_timestampOffset) {
if (!trackBuffer.roundedTimestampOffset.isValid() || presentationTimestamp.timeScale() != trackBuffer.lastFrameTimescale) {
trackBuffer.lastFrameTimescale = presentationTimestamp.timeScale();
trackBuffer.roundedTimestampOffset = roundTowardsTimeScaleWithRoundingMargin(m_timestampOffset, trackBuffer.lastFrameTimescale, microsecond);
}
presentationTimestamp += trackBuffer.roundedTimestampOffset;
decodeTimestamp += trackBuffer.roundedTimestampOffset;
}
MediaTime decodeDurationToCheck = trackBuffer.greatestDecodeDuration;
if (decodeDurationToCheck.isValid() && trackBuffer.lastFrameDuration.isValid()
&& (trackBuffer.lastFrameDuration > decodeDurationToCheck))
decodeDurationToCheck = trackBuffer.lastFrameDuration;
if (trackBuffer.lastDecodeTimestamp.isValid() && (decodeTimestamp < trackBuffer.lastDecodeTimestamp
|| (decodeDurationToCheck.isValid() && abs(decodeTimestamp - trackBuffer.lastDecodeTimestamp) > (decodeDurationToCheck * 2)))) {
if (m_appendMode == SourceBufferAppendMode::Segments) {
m_groupEndTimestamp = presentationTimestamp;
} else {
m_groupStartTimestamp = m_groupEndTimestamp;
}
for (auto& trackBuffer : m_trackBufferMap.values()) {
trackBuffer.lastDecodeTimestamp = MediaTime::invalidTime();
trackBuffer.greatestDecodeDuration = MediaTime::invalidTime();
trackBuffer.lastFrameDuration = MediaTime::invalidTime();
trackBuffer.highestPresentationTimestamp = MediaTime::invalidTime();
trackBuffer.needRandomAccessFlag = true;
}
continue;
}
if (m_appendMode == SourceBufferAppendMode::Sequence) {
sample.setTimestamps(presentationTimestamp, decodeTimestamp);
} else if (trackBuffer.roundedTimestampOffset) {
sample.offsetTimestampsBy(trackBuffer.roundedTimestampOffset);
}
DEBUG_LOG(LOGIDENTIFIER, sample);
MediaTime frameEndTimestamp = presentationTimestamp + frameDuration;
if (presentationTimestamp < m_appendWindowStart || frameEndTimestamp > m_appendWindowEnd) {
trackBuffer.needRandomAccessFlag = true;
m_client->sourceBufferPrivateDidDropSample();
return;
}
MediaTime presentationStartTime = MediaTime::zeroTime();
if (presentationTimestamp < presentationStartTime) {
ERROR_LOG(LOGIDENTIFIER, "failing because presentationTimestamp (", presentationTimestamp, ") < presentationStartTime (", presentationStartTime, ")");
m_client->sourceBufferPrivateStreamEndedWithDecodeError();
return;
}
if (trackBuffer.needRandomAccessFlag) {
if (!sample.isSync()) {
m_client->sourceBufferPrivateDidDropSample();
return;
}
trackBuffer.needRandomAccessFlag = false;
}
SampleMap erasedSamples;
if (trackBuffer.lastDecodeTimestamp.isInvalid()) {
auto iter = trackBuffer.samples.presentationOrder().findSampleContainingPresentationTime(presentationTimestamp);
if (iter != trackBuffer.samples.presentationOrder().end()) {
RefPtr<MediaSample> overlappedFrame = iter->second;
if (trackBuffer.description && trackBuffer.description->isVideo()) {
MediaTime overlappedFramePresentationTimestamp = overlappedFrame->presentationTime();
MediaTime removeWindowTimestamp = overlappedFramePresentationTimestamp + microsecond;
if (presentationTimestamp < removeWindowTimestamp)
erasedSamples.addSample(*iter->second);
}
}
}
if (trackBuffer.highestPresentationTimestamp.isInvalid()) {
auto iterPair = trackBuffer.samples.presentationOrder().findSamplesBetweenPresentationTimes(presentationTimestamp, frameEndTimestamp);
if (iterPair.first != trackBuffer.samples.presentationOrder().end())
erasedSamples.addRange(iterPair.first, iterPair.second);
}
do {
if (!sample.isSync())
break;
DecodeOrderSampleMap::KeyType decodeKey(sample.decodeTime(), sample.presentationTime());
auto nextSampleInDecodeOrder = trackBuffer.samples.decodeOrder().findSampleAfterDecodeKey(decodeKey);
if (nextSampleInDecodeOrder == trackBuffer.samples.decodeOrder().end())
break;
if (nextSampleInDecodeOrder->second->isSync())
break;
auto nextSyncSample = trackBuffer.samples.decodeOrder().findSyncSampleAfterDecodeIterator(nextSampleInDecodeOrder);
INFO_LOG(LOGIDENTIFIER, "Discovered out-of-order frames, from: ", *nextSampleInDecodeOrder->second, " to: ", (nextSyncSample == trackBuffer.samples.decodeOrder().end() ? "[end]"_s : toString(*nextSyncSample->second)));
erasedSamples.addRange(nextSampleInDecodeOrder, nextSyncSample);
} while (false);
const MediaTime contiguousFrameTolerance = MediaTime(1, 1000);
if (trackBuffer.highestPresentationTimestamp.isValid() && trackBuffer.highestPresentationTimestamp - contiguousFrameTolerance <= presentationTimestamp) {
do {
unsigned bufferedLength = trackBuffer.buffered.length();
if (!bufferedLength)
break;
MediaTime highestBufferedTime = trackBuffer.buffered.maximumBufferedTime();
MediaTime eraseBeginTime = trackBuffer.highestPresentationTimestamp - contiguousFrameTolerance;
MediaTime eraseEndTime = frameEndTimestamp - contiguousFrameTolerance;
PresentationOrderSampleMap::iterator_range range;
if (highestBufferedTime - trackBuffer.highestPresentationTimestamp < trackBuffer.lastFrameDuration) {
range = trackBuffer.samples.presentationOrder().findSamplesBetweenPresentationTimesFromEnd(eraseBeginTime, eraseEndTime);
} else {
range = trackBuffer.samples.presentationOrder().findSamplesBetweenPresentationTimes(eraseBeginTime, eraseEndTime);
}
if (range.first != trackBuffer.samples.presentationOrder().end())
erasedSamples.addRange(range.first, range.second);
} while (false);
}
DecodeOrderSampleMap::MapType dependentSamples;
if (!erasedSamples.empty()) {
auto firstDecodeIter = trackBuffer.samples.decodeOrder().findSampleWithDecodeKey(erasedSamples.decodeOrder().begin()->first);
auto lastDecodeIter = trackBuffer.samples.decodeOrder().findSampleWithDecodeKey(erasedSamples.decodeOrder().rbegin()->first);
auto nextSyncIter = trackBuffer.samples.decodeOrder().findSyncSampleAfterDecodeIterator(lastDecodeIter);
dependentSamples.insert(firstDecodeIter, nextSyncIter);
DecodeOrderSampleMap::KeyType decodeKey(sample.decodeTime(), sample.presentationTime());
auto samplesWithHigherDecodeTimes = trackBuffer.samples.decodeOrder().findSamplesBetweenDecodeKeys(decodeKey, erasedSamples.decodeOrder().begin()->first);
if (samplesWithHigherDecodeTimes.first != samplesWithHigherDecodeTimes.second)
dependentSamples.insert(samplesWithHigherDecodeTimes.first, samplesWithHigherDecodeTimes.second);
#if !RELEASE_LOG_DISABLED
PlatformTimeRanges erasedRanges = removeSamplesFromTrackBuffer(dependentSamples, trackBuffer, this, "didReceiveSample");
#else
PlatformTimeRanges erasedRanges = removeSamplesFromTrackBuffer(dependentSamples, trackBuffer);
#endif
MediaTime currentTime = currentMediaTime();
if (trackBuffer.highestEnqueuedPresentationTime.isValid() && currentTime < trackBuffer.highestEnqueuedPresentationTime) {
PlatformTimeRanges possiblyEnqueuedRanges(currentTime, trackBuffer.highestEnqueuedPresentationTime);
possiblyEnqueuedRanges.intersectWith(erasedRanges);
if (possiblyEnqueuedRanges.length())
trackBuffer.needsReenqueueing = true;
}
erasedRanges.invert();
trackBuffer.buffered.intersectWith(erasedRanges);
setBufferedDirty(true);
}
trackBuffer.samples.addSample(sample);
DecodeOrderSampleMap::KeyType decodeKey(sample.decodeTime(), sample.presentationTime());
if (trackBuffer.lastEnqueuedDecodeKey.first.isInvalid() || decodeKey > trackBuffer.lastEnqueuedDecodeKey) {
trackBuffer.decodeQueue.insert(DecodeOrderSampleMap::MapType::value_type(decodeKey, &sample));
if (trackBuffer.minimumEnqueuedPresentationTime.isValid() && sample.presentationTime() < trackBuffer.minimumEnqueuedPresentationTime)
trackBuffer.needsMinimumUpcomingPresentationTimeUpdating = true;
}
if (trackBuffer.lastDecodeTimestamp.isValid()) {
MediaTime lastDecodeDuration = decodeTimestamp - trackBuffer.lastDecodeTimestamp;
if (!trackBuffer.greatestDecodeDuration.isValid() || lastDecodeDuration > trackBuffer.greatestDecodeDuration)
trackBuffer.greatestDecodeDuration = lastDecodeDuration;
}
trackBuffer.lastDecodeTimestamp = decodeTimestamp;
trackBuffer.lastFrameDuration = frameDuration;
if (trackBuffer.highestPresentationTimestamp.isInvalid() || frameEndTimestamp > trackBuffer.highestPresentationTimestamp)
trackBuffer.highestPresentationTimestamp = frameEndTimestamp;
if (m_groupEndTimestamp.isInvalid() || frameEndTimestamp > m_groupEndTimestamp)
m_groupEndTimestamp = frameEndTimestamp;
if (m_shouldGenerateTimestamps) {
m_timestampOffset = frameEndTimestamp;
for (auto& trackBuffer : m_trackBufferMap.values()) {
trackBuffer.lastFrameTimescale = 0;
trackBuffer.roundedTimestampOffset = MediaTime::invalidTime();
}
}
auto presentationEndTime = presentationTimestamp + frameDuration;
auto nearestToPresentationStartTime = trackBuffer.buffered.nearest(presentationTimestamp);
if (nearestToPresentationStartTime.isValid() && (presentationTimestamp - nearestToPresentationStartTime).isBetween(MediaTime::zeroTime(), timeFudgeFactor()))
presentationTimestamp = nearestToPresentationStartTime;
auto nearestToPresentationEndTime = trackBuffer.buffered.nearest(presentationEndTime);
if (nearestToPresentationEndTime.isValid() && (nearestToPresentationEndTime - presentationEndTime).isBetween(MediaTime::zeroTime(), timeFudgeFactor()))
presentationEndTime = nearestToPresentationEndTime;
trackBuffer.buffered.add(presentationTimestamp, presentationEndTime);
m_client->sourceBufferPrivateDidParseSample(frameDuration.toDouble());
setBufferedDirty(true);
break;
} while (true);
if (m_groupEndTimestamp > duration())
m_client->sourceBufferPrivateDurationChanged(m_groupEndTimestamp);
updateHighestPresentationTimestamp();
}
}
#endif