#include "config.h"
#include "BitmapImage.h"
#include "FloatRect.h"
#include "GraphicsContext.h"
#include "ImageBuffer.h"
#include "ImageObserver.h"
#include "IntRect.h"
#include "Logging.h"
#include "Settings.h"
#include "TextStream.h"
#include "Timer.h"
#include <wtf/CurrentTime.h>
#include <wtf/Vector.h>
#include <wtf/text/WTFString.h>
namespace WebCore {
BitmapImage::BitmapImage(ImageObserver* observer)
: Image(observer)
, m_source(this)
{
}
BitmapImage::BitmapImage(NativeImagePtr&& image, ImageObserver* observer)
: Image(observer)
, m_source(WTFMove(image))
{
}
BitmapImage::~BitmapImage()
{
invalidatePlatformData();
clearTimer();
m_source.clearImage();
m_source.stopAsyncDecodingQueue();
}
void BitmapImage::updateFromSettings(const Settings& settings)
{
m_allowSubsampling = settings.imageSubsamplingEnabled();
m_allowAnimatedImageAsyncDecoding = settings.animatedImageAsyncDecodingEnabled();
m_showDebugBackground = settings.showDebugBorders();
}
void BitmapImage::destroyDecodedData(bool destroyAll)
{
LOG(Images, "BitmapImage::%s - %p - url: %s", __FUNCTION__, this, sourceURL().string().utf8().data());
if (!destroyAll)
m_source.destroyDecodedDataBeforeFrame(m_currentFrame);
else if (!canDestroyDecodedData())
m_source.destroyAllDecodedDataExcludeFrame(m_currentFrame);
else {
m_source.destroyAllDecodedData();
m_currentFrameDecodingStatus = DecodingStatus::Invalid;
}
if (!destroyAll || m_source.hasAsyncDecodingQueue())
m_source.clearFrameBufferCache(m_currentFrame);
else
m_source.resetData(data());
invalidatePlatformData();
}
void BitmapImage::destroyDecodedDataIfNecessary(bool destroyAll)
{
if (!data() && frameCount())
return;
if (m_source.decodedSize() < LargeAnimationCutoff)
return;
destroyDecodedData(destroyAll);
}
EncodedDataStatus BitmapImage::dataChanged(bool allDataReceived)
{
if (m_source.decodedSize() && !canUseAsyncDecodingForLargeImages())
m_source.destroyIncompleteDecodedData();
m_currentFrameDecodingStatus = DecodingStatus::Invalid;
return m_source.dataChanged(data(), allDataReceived);
}
void BitmapImage::setCurrentFrameDecodingStatusIfNecessary(DecodingStatus decodingStatus)
{
if (m_currentFrameDecodingStatus != DecodingStatus::Decoding)
return;
m_currentFrameDecodingStatus = decodingStatus;
}
NativeImagePtr BitmapImage::frameImageAtIndexCacheIfNeeded(size_t index, SubsamplingLevel subsamplingLevel, const GraphicsContext* targetContext)
{
if (!frameHasFullSizeNativeImageAtIndex(index, subsamplingLevel)) {
LOG(Images, "BitmapImage::%s - %p - url: %s [subsamplingLevel was %d, resampling]", __FUNCTION__, this, sourceURL().string().utf8().data(), static_cast<int>(frameSubsamplingLevelAtIndex(index)));
invalidatePlatformData();
}
return m_source.frameImageAtIndexCacheIfNeeded(index, subsamplingLevel, targetContext);
}
NativeImagePtr BitmapImage::nativeImage(const GraphicsContext* targetContext)
{
return frameImageAtIndexCacheIfNeeded(0, SubsamplingLevel::Default, targetContext);
}
NativeImagePtr BitmapImage::nativeImageForCurrentFrame(const GraphicsContext* targetContext)
{
return frameImageAtIndexCacheIfNeeded(m_currentFrame, SubsamplingLevel::Default, targetContext);
}
#if USE(CG)
NativeImagePtr BitmapImage::nativeImageOfSize(const IntSize& size, const GraphicsContext* targetContext)
{
size_t count = frameCount();
for (size_t i = 0; i < count; ++i) {
auto image = frameImageAtIndexCacheIfNeeded(i, SubsamplingLevel::Default, targetContext);
if (image && nativeImageSize(image) == size)
return image;
}
return frameImageAtIndexCacheIfNeeded(0, SubsamplingLevel::Default, targetContext);
}
Vector<NativeImagePtr> BitmapImage::framesNativeImages()
{
Vector<NativeImagePtr> images;
size_t count = frameCount();
for (size_t i = 0; i < count; ++i) {
if (auto image = frameImageAtIndexCacheIfNeeded(i))
images.append(image);
}
return images;
}
#endif
#if !ASSERT_DISABLED
bool BitmapImage::notSolidColor()
{
return size().width() != 1 || size().height() != 1 || frameCount() > 1;
}
#endif
ImageDrawResult BitmapImage::draw(GraphicsContext& context, const FloatRect& destRect, const FloatRect& srcRect, CompositeOperator op, BlendMode mode, DecodingMode decodingMode, ImageOrientationDescription description)
{
if (destRect.isEmpty() || srcRect.isEmpty())
return ImageDrawResult::DidNothing;
FloatSize scaleFactorForDrawing = context.scaleFactorForDrawing(destRect, srcRect);
IntSize sizeForDrawing = expandedIntSize(size() * scaleFactorForDrawing);
ImageDrawResult result = ImageDrawResult::DidDraw;
m_currentSubsamplingLevel = m_allowSubsampling ? m_source.subsamplingLevelForScaleFactor(context, scaleFactorForDrawing) : SubsamplingLevel::Default;
LOG(Images, "BitmapImage::%s - %p - url: %s [subsamplingLevel = %d scaleFactorForDrawing = (%.4f, %.4f)]", __FUNCTION__, this, sourceURL().string().utf8().data(), static_cast<int>(m_currentSubsamplingLevel), scaleFactorForDrawing.width(), scaleFactorForDrawing.height());
NativeImagePtr image;
if (decodingMode == DecodingMode::Asynchronous) {
ASSERT(!canAnimate());
ASSERT(!m_currentFrame || m_animationFinished);
bool frameIsCompatible = frameHasDecodedNativeImageCompatibleWithOptionsAtIndex(m_currentFrame, m_currentSubsamplingLevel, DecodingOptions(sizeForDrawing));
bool frameIsBeingDecoded = frameIsBeingDecodedAndIsCompatibleWithOptionsAtIndex(m_currentFrame, DecodingOptions(sizeForDrawing));
if ((!frameIsCompatible && !frameIsBeingDecoded) || m_currentFrameDecodingStatus == DecodingStatus::Invalid) {
LOG(Images, "BitmapImage::%s - %p - url: %s [requesting large async decoding]", __FUNCTION__, this, sourceURL().string().utf8().data());
m_source.requestFrameAsyncDecodingAtIndex(m_currentFrame, m_currentSubsamplingLevel, sizeForDrawing);
m_currentFrameDecodingStatus = DecodingStatus::Decoding;
}
if (m_currentFrameDecodingStatus == DecodingStatus::Decoding)
result = ImageDrawResult::DidRequestDecoding;
if (!frameHasDecodedNativeImageCompatibleWithOptionsAtIndex(m_currentFrame, m_currentSubsamplingLevel, DecodingMode::Asynchronous)) {
if (m_showDebugBackground)
fillWithSolidColor(context, destRect, Color(Color::yellow).colorWithAlpha(0.5), op);
return result;
}
image = frameImageAtIndex(m_currentFrame);
LOG(Images, "BitmapImage::%s - %p - url: %s [a decoded frame will be used for asynchronous drawing]", __FUNCTION__, this, sourceURL().string().utf8().data());
} else {
StartAnimationStatus status = internalStartAnimation();
ASSERT_IMPLIES(status == StartAnimationStatus::DecodingActive, (!m_currentFrame && !m_repetitionsComplete) || frameHasFullSizeNativeImageAtIndex(m_currentFrame, m_currentSubsamplingLevel));
if (status == StartAnimationStatus::DecodingActive && m_showDebugBackground) {
fillWithSolidColor(context, destRect, Color(Color::yellow).colorWithAlpha(0.5), op);
return result;
}
if (m_currentFrameDecodingStatus == DecodingStatus::Invalid)
m_source.destroyIncompleteDecodedData();
bool frameIsCompatible = frameHasDecodedNativeImageCompatibleWithOptionsAtIndex(m_currentFrame, m_currentSubsamplingLevel, DecodingOptions(sizeForDrawing));
bool frameIsBeingDecoded = frameIsBeingDecodedAndIsCompatibleWithOptionsAtIndex(m_currentFrame, DecodingMode::Asynchronous);
if (frameIsCompatible) {
image = frameImageAtIndex(m_currentFrame);
LOG(Images, "BitmapImage::%s - %p - url: %s [a decoded frame will reused for synchronous drawing]", __FUNCTION__, this, sourceURL().string().utf8().data());
} else if (frameIsBeingDecoded) {
if (m_showDebugBackground) {
fillWithSolidColor(context, destRect, Color(Color::yellow).colorWithAlpha(0.5), op);
LOG(Images, "BitmapImage::%s - %p - url: %s [waiting for async decoding to finish]", __FUNCTION__, this, sourceURL().string().utf8().data());
}
return ImageDrawResult::DidRequestDecoding;
} else {
image = frameImageAtIndexCacheIfNeeded(m_currentFrame, m_currentSubsamplingLevel, &context);
LOG(Images, "BitmapImage::%s - %p - url: %s [an image frame will be decoded synchronously]", __FUNCTION__, this, sourceURL().string().utf8().data());
}
if (!image) return ImageDrawResult::DidNothing;
if (m_currentFrameDecodingStatus != DecodingStatus::Complete)
++m_decodeCountForTesting;
}
ASSERT(image);
Color color = singlePixelSolidColor();
if (color.isValid()) {
fillWithSolidColor(context, destRect, color, op);
return result;
}
ImageOrientation orientation(description.imageOrientation());
if (description.respectImageOrientation() == RespectImageOrientation)
orientation = frameOrientationAtIndex(m_currentFrame);
drawNativeImage(image, context, destRect, srcRect, IntSize(size()), op, mode, orientation);
m_currentFrameDecodingStatus = frameDecodingStatusAtIndex(m_currentFrame);
if (imageObserver())
imageObserver()->didDraw(*this);
return result;
}
void BitmapImage::drawPattern(GraphicsContext& ctxt, const FloatRect& destRect, const FloatRect& tileRect, const AffineTransform& transform, const FloatPoint& phase, const FloatSize& spacing, CompositeOperator op, BlendMode blendMode)
{
if (tileRect.isEmpty())
return;
if (!ctxt.drawLuminanceMask()) {
Image::drawPattern(ctxt, destRect, tileRect, transform, phase, spacing, op, blendMode);
return;
}
if (!m_cachedImage) {
auto buffer = ImageBuffer::createCompatibleBuffer(expandedIntSize(tileRect.size()), ColorSpaceSRGB, ctxt);
if (!buffer)
return;
ImageObserver* observer = imageObserver();
setImageObserver(nullptr);
draw(buffer->context(), tileRect, tileRect, op, blendMode, DecodingMode::Synchronous, ImageOrientationDescription());
setImageObserver(observer);
buffer->convertToLuminanceMask();
m_cachedImage = buffer->copyImage(DontCopyBackingStore, Unscaled);
if (!m_cachedImage)
return;
}
ctxt.setDrawLuminanceMask(false);
m_cachedImage->drawPattern(ctxt, destRect, tileRect, transform, phase, spacing, op, blendMode);
}
bool BitmapImage::shouldAnimate() const
{
return repetitionCount() && !m_animationFinished && imageObserver();
}
bool BitmapImage::canAnimate() const
{
return shouldAnimate() && frameCount() > 1;
}
bool BitmapImage::canUseAsyncDecodingForLargeImages() const
{
return !canAnimate() && m_source.canUseAsyncDecoding();
}
bool BitmapImage::shouldUseAsyncDecodingForAnimatedImages() const
{
return canAnimate() && m_allowAnimatedImageAsyncDecoding && (shouldUseAsyncDecodingForAnimatedImagesForTesting() || m_source.canUseAsyncDecoding());
}
void BitmapImage::clearTimer()
{
m_frameTimer = nullptr;
}
void BitmapImage::startTimer(Seconds delay)
{
ASSERT(!m_frameTimer);
m_frameTimer = std::make_unique<Timer>(*this, &BitmapImage::advanceAnimation);
m_frameTimer->startOneShot(delay);
}
bool BitmapImage::canDestroyDecodedData()
{
if (m_source.hasAsyncDecodingQueue())
return false;
if (!canUseAsyncDecodingForLargeImages())
return true;
return !imageObserver() || imageObserver()->canDestroyDecodedData(*this);
}
BitmapImage::StartAnimationStatus BitmapImage::internalStartAnimation()
{
if (!canAnimate())
return StartAnimationStatus::CannotStart;
if (m_frameTimer)
return StartAnimationStatus::TimerActive;
size_t nextFrame = (m_currentFrame + 1) % frameCount();
if (frameIsBeingDecodedAndIsCompatibleWithOptionsAtIndex(nextFrame, DecodingMode::Asynchronous)) {
LOG(Images, "BitmapImage::%s - %p - url: %s [nextFrame = %ld is being decoded]", __FUNCTION__, this, sourceURL().string().utf8().data(), nextFrame);
return StartAnimationStatus::DecodingActive;
}
if (m_currentFrame >= frameCount() - 1) {
if (!m_source.isAllDataReceived() && repetitionCount() == RepetitionCountOnce)
return StartAnimationStatus::IncompleteData;
++m_repetitionsComplete;
if (repetitionCount() != RepetitionCountInfinite && m_repetitionsComplete >= repetitionCount()) {
m_animationFinished = true;
destroyDecodedDataIfNecessary(false);
return StartAnimationStatus::CannotStart;
}
destroyDecodedDataIfNecessary(true);
}
if (!m_source.isAllDataReceived() && !frameIsCompleteAtIndex(nextFrame))
return StartAnimationStatus::IncompleteData;
MonotonicTime time = MonotonicTime::now();
if (!m_desiredFrameStartTime)
m_desiredFrameStartTime = time;
m_desiredFrameStartTime = std::max(time, m_desiredFrameStartTime + Seconds { frameDurationAtIndex(m_currentFrame) });
if (shouldUseAsyncDecodingForAnimatedImages()) {
if (frameHasDecodedNativeImageCompatibleWithOptionsAtIndex(nextFrame, m_currentSubsamplingLevel, { }))
LOG(Images, "BitmapImage::%s - %p - url: %s [cachedFrameCount = %ld nextFrame = %ld]", __FUNCTION__, this, sourceURL().string().utf8().data(), ++m_cachedFrameCount, nextFrame);
else {
m_source.requestFrameAsyncDecodingAtIndex(nextFrame, m_currentSubsamplingLevel);
m_currentFrameDecodingStatus = DecodingStatus::Decoding;
LOG(Images, "BitmapImage::%s - %p - url: %s [requesting async decoding for nextFrame = %ld]", __FUNCTION__, this, sourceURL().string().utf8().data(), nextFrame);
}
m_desiredFrameDecodeTimeForTesting = time + std::max(m_frameDecodingDurationForTesting, 0_s);
if (m_clearDecoderAfterAsyncFrameRequestForTesting)
m_source.resetData(data());
}
ASSERT(!m_frameTimer);
startTimer(m_desiredFrameStartTime - time);
return StartAnimationStatus::Started;
}
void BitmapImage::advanceAnimation()
{
clearTimer();
if (shouldUseAsyncDecodingForAnimatedImagesForTesting()) {
MonotonicTime time = MonotonicTime::now();
if (m_desiredFrameDecodeTimeForTesting > std::max(time, m_desiredFrameStartTime)) {
startTimer(m_desiredFrameDecodeTimeForTesting - time);
return;
}
}
size_t nextFrame = (m_currentFrame + 1) % frameCount();
if (!frameIsBeingDecodedAndIsCompatibleWithOptionsAtIndex(nextFrame, DecodingMode::Asynchronous))
internalAdvanceAnimation();
else {
if (m_showDebugBackground)
imageObserver()->changedInRect(*this);
LOG(Images, "BitmapImage::%s - %p - url: %s [lateFrameCount = %ld nextFrame = %ld]", __FUNCTION__, this, sourceURL().string().utf8().data(), ++m_lateFrameCount, nextFrame);
}
}
void BitmapImage::internalAdvanceAnimation()
{
m_currentFrame = (m_currentFrame + 1) % frameCount();
ASSERT(!frameIsBeingDecodedAndIsCompatibleWithOptionsAtIndex(m_currentFrame, DecodingMode::Asynchronous));
destroyDecodedDataIfNecessary(false);
DecodingStatus decodingStatus = frameDecodingStatusAtIndex(m_currentFrame);
setCurrentFrameDecodingStatusIfNecessary(decodingStatus);
if (imageObserver())
imageObserver()->imageFrameAvailable(*this, ImageAnimatingState::Yes, nullptr, decodingStatus);
LOG(Images, "BitmapImage::%s - %p - url: %s [m_currentFrame = %ld]", __FUNCTION__, this, sourceURL().string().utf8().data(), m_currentFrame);
}
bool BitmapImage::isAnimating() const
{
return !!m_frameTimer;
}
void BitmapImage::stopAnimation()
{
clearTimer();
if (canAnimate())
m_source.stopAsyncDecodingQueue();
}
void BitmapImage::resetAnimation()
{
stopAnimation();
m_currentFrame = 0;
m_repetitionsComplete = RepetitionCountNone;
m_desiredFrameStartTime = { };
m_animationFinished = false;
destroyDecodedDataIfNecessary(true);
}
void BitmapImage::imageFrameAvailableAtIndex(size_t index)
{
LOG(Images, "BitmapImage::%s - %p - url: %s [requested frame %ld is now available]", __FUNCTION__, this, sourceURL().string().utf8().data(), index);
if (canAnimate()) {
if (index == (m_currentFrame + 1) % frameCount()) {
if (!m_frameTimer)
internalAdvanceAnimation();
else
LOG(Images, "BitmapImage::%s - %p - url: %s [earlyFrameCount = %ld nextFrame = %ld]", __FUNCTION__, this, sourceURL().string().utf8().data(), ++m_earlyFrameCount, index);
return;
}
ASSERT(!m_repetitionsComplete);
LOG(Images, "BitmapImage::%s - %p - url: %s [More data makes frameCount() > 1]", __FUNCTION__, this, sourceURL().string().utf8().data());
}
ASSERT(index == m_currentFrame && !m_currentFrame);
if (m_source.isAsyncDecodingQueueIdle())
m_source.stopAsyncDecodingQueue();
DecodingStatus decodingStatus = frameDecodingStatusAtIndex(m_currentFrame);
setCurrentFrameDecodingStatusIfNecessary(decodingStatus);
if (m_currentFrameDecodingStatus == DecodingStatus::Complete)
++m_decodeCountForTesting;
if (imageObserver())
imageObserver()->imageFrameAvailable(*this, ImageAnimatingState::No, nullptr, decodingStatus);
}
unsigned BitmapImage::decodeCountForTesting() const
{
return m_decodeCountForTesting;
}
void BitmapImage::dump(TextStream& ts) const
{
Image::dump(ts);
if (isAnimated())
ts.dumpProperty("current-frame", m_currentFrame);
m_source.dump(ts);
}
}