#include "config.h"
#include "ImageBuffer.h"
#include "Base64.h"
#include "BitmapImage.h"
#include "GraphicsContext.h"
#include "GraphicsContextCG.h"
#include "ImageData.h"
#include "MIMETypeRegistry.h"
#include <ApplicationServices/ApplicationServices.h>
#include <math.h>
#include <wtf/Assertions.h>
#include <wtf/CheckedArithmetic.h>
#include <wtf/MainThread.h>
#include <wtf/OwnArrayPtr.h>
#include <wtf/RetainPtr.h>
#include <wtf/UnusedParam.h>
#include <wtf/text/WTFString.h>
#if PLATFORM(MAC) || PLATFORM(CHROMIUM)
#include "WebCoreSystemInterface.h"
#endif
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
#include <IOSurface/IOSurface.h>
#endif
#if defined(BUILDING_ON_LION)
#include <wtf/CurrentTime.h>
#endif
using namespace std;
namespace WebCore {
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
static const int maxIOSurfaceDimension = 4096;
static const int minIOSurfaceArea = 50 * 100;
static RetainPtr<IOSurfaceRef> createIOSurface(const IntSize& size)
{
unsigned pixelFormat = 'BGRA';
unsigned bytesPerElement = 4;
int width = size.width();
int height = size.height();
unsigned long bytesPerRow = IOSurfaceAlignProperty(kIOSurfaceBytesPerRow, size.width() * bytesPerElement);
if (!bytesPerRow)
return 0;
unsigned long allocSize = IOSurfaceAlignProperty(kIOSurfaceAllocSize, size.height() * bytesPerRow);
if (!allocSize)
return 0;
const void *keys[6];
const void *values[6];
keys[0] = kIOSurfaceWidth;
values[0] = CFNumberCreate(0, kCFNumberIntType, &width);
keys[1] = kIOSurfaceHeight;
values[1] = CFNumberCreate(0, kCFNumberIntType, &height);
keys[2] = kIOSurfacePixelFormat;
values[2] = CFNumberCreate(0, kCFNumberIntType, &pixelFormat);
keys[3] = kIOSurfaceBytesPerElement;
values[3] = CFNumberCreate(0, kCFNumberIntType, &bytesPerElement);
keys[4] = kIOSurfaceBytesPerRow;
values[4] = CFNumberCreate(0, kCFNumberLongType, &bytesPerRow);
keys[5] = kIOSurfaceAllocSize;
values[5] = CFNumberCreate(0, kCFNumberLongType, &allocSize);
RetainPtr<CFDictionaryRef> dict(AdoptCF, CFDictionaryCreate(0, keys, values, 6, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
for (unsigned i = 0; i < 6; i++)
CFRelease(values[i]);
return RetainPtr<IOSurfaceRef>(AdoptCF, IOSurfaceCreate(dict.get()));
}
#endif
static void releaseImageData(void*, const void* data, size_t)
{
fastFree(const_cast<void*>(data));
}
ImageBuffer::ImageBuffer(const IntSize& size, float resolutionScale, ColorSpace imageColorSpace, RenderingMode renderingMode, DeferralMode, bool& success)
: m_data(size) , m_logicalSize(size)
, m_resolutionScale(resolutionScale)
{
float scaledWidth = ceilf(resolutionScale * size.width());
float scaledHeight = ceilf(resolutionScale * size.height());
if (!FloatSize(scaledWidth, scaledHeight).isExpressibleAsIntSize())
return;
m_size = IntSize(scaledWidth, scaledHeight);
success = false; bool accelerateRendering = renderingMode == Accelerated;
if (m_size.width() <= 0 || m_size.height() <= 0)
return;
Checked<int, RecordOverflow> width = m_size.width();
Checked<int, RecordOverflow> height = m_size.height();
m_data.m_bytesPerRow = 4 * width;
Checked<size_t, RecordOverflow> numBytes = height * m_data.m_bytesPerRow;
if (numBytes.hasOverflowed())
return;
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
if (width.unsafeGet() >= maxIOSurfaceDimension || height.unsafeGet() >= maxIOSurfaceDimension || (width * height).unsafeGet() < minIOSurfaceArea)
accelerateRendering = false;
#else
ASSERT(renderingMode == Unaccelerated);
#endif
switch (imageColorSpace) {
case ColorSpaceDeviceRGB:
m_data.m_colorSpace = deviceRGBColorSpaceRef();
break;
case ColorSpaceSRGB:
m_data.m_colorSpace = sRGBColorSpaceRef();
break;
case ColorSpaceLinearRGB:
m_data.m_colorSpace = linearRGBColorSpaceRef();
break;
}
RetainPtr<CGContextRef> cgContext;
if (accelerateRendering) {
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
m_data.m_surface = createIOSurface(m_size);
cgContext.adoptCF(wkIOSurfaceContextCreate(m_data.m_surface.get(), width.unsafeGet(), height.unsafeGet(), m_data.m_colorSpace));
#endif
if (!cgContext)
accelerateRendering = false; }
if (!accelerateRendering) {
if (!tryFastCalloc(height.unsafeGet(), m_data.m_bytesPerRow.unsafeGet()).getValue(m_data.m_data))
return;
ASSERT(!(reinterpret_cast<size_t>(m_data.m_data) & 2));
m_data.m_bitmapInfo = kCGImageAlphaPremultipliedLast;
cgContext.adoptCF(CGBitmapContextCreate(m_data.m_data, width.unsafeGet(), height.unsafeGet(), 8, m_data.m_bytesPerRow.unsafeGet(), m_data.m_colorSpace, m_data.m_bitmapInfo));
m_data.m_dataProvider.adoptCF(CGDataProviderCreateWithData(0, m_data.m_data, numBytes.unsafeGet(), releaseImageData));
}
if (!cgContext)
return;
m_context = adoptPtr(new GraphicsContext(cgContext.get()));
m_context->applyDeviceScaleFactor(m_resolutionScale);
m_context->scale(FloatSize(1, -1));
m_context->translate(0, -size.height());
m_context->setIsAcceleratedContext(accelerateRendering);
#if defined(BUILDING_ON_LION)
m_data.m_lastFlushTime = currentTimeMS();
#endif
success = true;
}
ImageBuffer::~ImageBuffer()
{
}
GraphicsContext* ImageBuffer::context() const
{
#if defined(BUILDING_ON_LION)
if (m_context->isAcceleratedContext()) {
double elapsedTime = currentTimeMS() - m_data.m_lastFlushTime;
double maxFlushInterval = 20;
if (elapsedTime > maxFlushInterval) {
CGContextRef context = m_context->platformContext();
CGContextFlush(context);
m_data.m_lastFlushTime = currentTimeMS();
}
}
#endif
return m_context.get();
}
PassRefPtr<Image> ImageBuffer::copyImage(BackingStoreCopy copyBehavior, ScaleBehavior scaleBehavior) const
{
RetainPtr<CGImageRef> image;
if (m_resolutionScale == 1 || scaleBehavior == Unscaled)
image = copyNativeImage(copyBehavior);
else {
image.adoptCF(copyNativeImage(DontCopyBackingStore));
RetainPtr<CGContextRef> context(AdoptCF, CGBitmapContextCreate(0, logicalSize().width(), logicalSize().height(), 8, 4 * logicalSize().width(), deviceRGBColorSpaceRef(), kCGImageAlphaPremultipliedLast));
CGContextSetBlendMode(context.get(), kCGBlendModeCopy);
CGContextDrawImage(context.get(), CGRectMake(0, 0, logicalSize().width(), logicalSize().height()), image.get());
image = CGBitmapContextCreateImage(context.get());
}
if (!image)
return 0;
return BitmapImage::create(image.get());
}
NativeImagePtr ImageBuffer::copyNativeImage(BackingStoreCopy copyBehavior) const
{
CGImageRef image = 0;
if (!m_context->isAcceleratedContext()) {
switch (copyBehavior) {
case DontCopyBackingStore:
image = CGImageCreate(internalSize().width(), internalSize().height(), 8, 32, m_data.m_bytesPerRow.unsafeGet(), m_data.m_colorSpace, m_data.m_bitmapInfo, m_data.m_dataProvider.get(), 0, true, kCGRenderingIntentDefault);
break;
case CopyBackingStore:
image = CGBitmapContextCreateImage(context()->platformContext());
break;
default:
ASSERT_NOT_REACHED();
break;
}
}
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
else {
image = wkIOSurfaceContextCreateImage(context()->platformContext());
#if defined(BUILDING_ON_LION)
m_data.m_lastFlushTime = currentTimeMS();
#endif
}
#endif
return image;
}
void ImageBuffer::draw(GraphicsContext* destContext, ColorSpace styleColorSpace, const FloatRect& destRect, const FloatRect& srcRect, CompositeOperator op, bool useLowQualityScale)
{
UNUSED_PARAM(useLowQualityScale);
ColorSpace colorSpace = (destContext == m_context) ? ColorSpaceDeviceRGB : styleColorSpace;
RetainPtr<CGImageRef> image;
if (destContext == m_context || destContext->isAcceleratedContext())
image.adoptCF(copyNativeImage(CopyBackingStore)); else
image.adoptCF(copyNativeImage(DontCopyBackingStore));
FloatRect adjustedSrcRect = srcRect;
adjustedSrcRect.scale(m_resolutionScale, m_resolutionScale);
destContext->drawNativeImage(image.get(), internalSize(), colorSpace, destRect, adjustedSrcRect, op);
}
void ImageBuffer::drawPattern(GraphicsContext* destContext, const FloatRect& srcRect, const AffineTransform& patternTransform, const FloatPoint& phase, ColorSpace styleColorSpace, CompositeOperator op, const FloatRect& destRect)
{
FloatRect adjustedSrcRect = srcRect;
adjustedSrcRect.scale(m_resolutionScale, m_resolutionScale);
if (!m_context->isAcceleratedContext()) {
if (destContext == m_context || destContext->isAcceleratedContext()) {
RefPtr<Image> copy = copyImage(CopyBackingStore); copy->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
} else {
RefPtr<Image> imageForRendering = copyImage(DontCopyBackingStore);
imageForRendering->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
}
} else {
RefPtr<Image> copy = copyImage(CopyBackingStore);
copy->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
}
}
void ImageBuffer::clip(GraphicsContext* contextToClip, const FloatRect& rect) const
{
CGContextRef platformContextToClip = contextToClip->platformContext();
RetainPtr<CGImageRef> image(AdoptCF, copyNativeImage(DontCopyBackingStore));
CGContextTranslateCTM(platformContextToClip, rect.x(), rect.y() + rect.height());
CGContextScaleCTM(platformContextToClip, 1, -1);
CGContextClipToMask(platformContextToClip, FloatRect(FloatPoint(), rect.size()), image.get());
CGContextScaleCTM(platformContextToClip, 1, -1);
CGContextTranslateCTM(platformContextToClip, -rect.x(), -rect.y() - rect.height());
}
PassRefPtr<Uint8ClampedArray> ImageBuffer::getUnmultipliedImageData(const IntRect& rect, CoordinateSystem coordinateSystem) const
{
if (m_context->isAcceleratedContext()) {
CGContextFlush(context()->platformContext());
#if defined(BUILDING_ON_LION)
m_data.m_lastFlushTime = currentTimeMS();
#endif
}
return m_data.getData(rect, internalSize(), m_context->isAcceleratedContext(), true, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
}
PassRefPtr<Uint8ClampedArray> ImageBuffer::getPremultipliedImageData(const IntRect& rect, CoordinateSystem coordinateSystem) const
{
if (m_context->isAcceleratedContext()) {
CGContextFlush(context()->platformContext());
#if defined(BUILDING_ON_LION)
m_data.m_lastFlushTime = currentTimeMS();
#endif
}
return m_data.getData(rect, internalSize(), m_context->isAcceleratedContext(), false, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
}
void ImageBuffer::putByteArray(Multiply multiplied, Uint8ClampedArray* source, const IntSize& sourceSize, const IntRect& sourceRect, const IntPoint& destPoint, CoordinateSystem coordinateSystem)
{
if (!m_context->isAcceleratedContext()) {
m_data.putData(source, sourceSize, sourceRect, destPoint, internalSize(), m_context->isAcceleratedContext(), multiplied == Unmultiplied, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
return;
}
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
IntSize sourceCopySize(sourceRect.width(), sourceRect.height());
OwnPtr<ImageBuffer> sourceCopy = ImageBuffer::create(sourceCopySize, 1, ColorSpaceDeviceRGB, Unaccelerated);
if (!sourceCopy)
return;
sourceCopy->m_data.putData(source, sourceSize, sourceRect, IntPoint(-sourceRect.x(), -sourceRect.y()), sourceCopy->internalSize(), sourceCopy->context()->isAcceleratedContext(), multiplied == Unmultiplied, 1);
CGContextRef destContext = context()->platformContext();
CGContextSaveGState(destContext);
if (coordinateSystem == LogicalCoordinateSystem)
CGContextConcatCTM(destContext, AffineTransform(wkGetUserToBaseCTM(destContext)).inverse());
else
CGContextConcatCTM(destContext, AffineTransform(CGContextGetCTM(destContext)).inverse());
wkCGContextResetClip(destContext);
CGContextSetInterpolationQuality(destContext, kCGInterpolationNone);
CGContextSetAlpha(destContext, 1.0);
CGContextSetBlendMode(destContext, kCGBlendModeCopy);
CGContextSetShadowWithColor(destContext, CGSizeZero, 0, 0);
IntPoint destPointInCGCoords(destPoint.x() + sourceRect.x(), (coordinateSystem == LogicalCoordinateSystem ? logicalSize() : internalSize()).height() - (destPoint.y() + sourceRect.y()) - sourceRect.height());
IntRect destRectInCGCoords(destPointInCGCoords, sourceCopySize);
RetainPtr<CGImageRef> sourceCopyImage(AdoptCF, sourceCopy->copyNativeImage());
CGContextDrawImage(destContext, destRectInCGCoords, sourceCopyImage.get());
CGContextRestoreGState(destContext);
#endif
}
static inline CFStringRef jpegUTI()
{
#if PLATFORM(WIN)
static const CFStringRef kUTTypeJPEG = CFSTR("public.jpeg");
#endif
return kUTTypeJPEG;
}
static RetainPtr<CFStringRef> utiFromMIMEType(const String& mimeType)
{
#if PLATFORM(MAC)
RetainPtr<CFStringRef> mimeTypeCFString(AdoptCF, mimeType.createCFString());
return RetainPtr<CFStringRef>(AdoptCF, UTTypeCreatePreferredIdentifierForTag(kUTTagClassMIMEType, mimeTypeCFString.get(), 0));
#else
ASSERT(isMainThread());
static const CFStringRef kUTTypePNG = CFSTR("public.png");
static const CFStringRef kUTTypeGIF = CFSTR("com.compuserve.gif");
if (equalIgnoringCase(mimeType, "image/png"))
return kUTTypePNG;
if (equalIgnoringCase(mimeType, "image/jpeg"))
return jpegUTI();
if (equalIgnoringCase(mimeType, "image/gif"))
return kUTTypeGIF;
ASSERT_NOT_REACHED();
return kUTTypePNG;
#endif
}
static String CGImageToDataURL(CGImageRef image, const String& mimeType, const double* quality)
{
if (!image)
return "data:,";
RetainPtr<CFMutableDataRef> data(AdoptCF, CFDataCreateMutable(kCFAllocatorDefault, 0));
if (!data)
return "data:,";
RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
ASSERT(uti);
RetainPtr<CGImageDestinationRef> destination(AdoptCF, CGImageDestinationCreateWithData(data.get(), uti.get(), 1, 0));
if (!destination)
return "data:,";
RetainPtr<CFDictionaryRef> imageProperties = 0;
if (CFEqual(uti.get(), jpegUTI()) && quality && *quality >= 0.0 && *quality <= 1.0) {
RetainPtr<CFNumberRef> compressionQuality(AdoptCF, CFNumberCreate(kCFAllocatorDefault, kCFNumberDoubleType, quality));
const void* key = kCGImageDestinationLossyCompressionQuality;
const void* value = compressionQuality.get();
imageProperties.adoptCF(CFDictionaryCreate(0, &key, &value, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
}
CGImageDestinationAddImage(destination.get(), image, imageProperties.get());
CGImageDestinationFinalize(destination.get());
Vector<char> base64Data;
base64Encode(reinterpret_cast<const char*>(CFDataGetBytePtr(data.get())), CFDataGetLength(data.get()), base64Data);
return "data:" + mimeType + ";base64," + base64Data;
}
String ImageBuffer::toDataURL(const String& mimeType, const double* quality, CoordinateSystem) const
{
ASSERT(MIMETypeRegistry::isSupportedImageMIMETypeForEncoding(mimeType));
RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
ASSERT(uti);
RefPtr<Uint8ClampedArray> premultipliedData;
RetainPtr<CGImageRef> image;
if (CFEqual(uti.get(), jpegUTI())) {
premultipliedData = getPremultipliedImageData(IntRect(IntPoint(0, 0), logicalSize()));
if (!premultipliedData)
return "data:,";
RetainPtr<CGDataProviderRef> dataProvider;
dataProvider.adoptCF(CGDataProviderCreateWithData(0, premultipliedData->data(), 4 * logicalSize().width() * logicalSize().height(), 0));
if (!dataProvider)
return "data:,";
image.adoptCF(CGImageCreate(logicalSize().width(), logicalSize().height(), 8, 32, 4 * logicalSize().width(),
deviceRGBColorSpaceRef(), kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast,
dataProvider.get(), 0, false, kCGRenderingIntentDefault));
} else if (m_resolutionScale == 1)
image.adoptCF(copyNativeImage(CopyBackingStore));
else {
image.adoptCF(copyNativeImage(DontCopyBackingStore));
RetainPtr<CGContextRef> context(AdoptCF, CGBitmapContextCreate(0, logicalSize().width(), logicalSize().height(), 8, 4 * logicalSize().width(), deviceRGBColorSpaceRef(), kCGImageAlphaPremultipliedLast));
CGContextSetBlendMode(context.get(), kCGBlendModeCopy);
CGContextDrawImage(context.get(), CGRectMake(0, 0, logicalSize().width(), logicalSize().height()), image.get());
image.adoptCF(CGBitmapContextCreateImage(context.get()));
}
return CGImageToDataURL(image.get(), mimeType, quality);
}
String ImageDataToDataURL(const ImageData& source, const String& mimeType, const double* quality)
{
ASSERT(MIMETypeRegistry::isSupportedImageMIMETypeForEncoding(mimeType));
RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
ASSERT(uti);
unsigned char* data = source.data()->data();
Vector<uint8_t> dataVector;
if (CFEqual(uti.get(), jpegUTI())) {
dataVector.resize(4 * source.width() * source.height());
unsigned char *out = dataVector.data();
for (int i = 0; i < source.width() * source.height(); i++) {
int alpha = data[4 * i + 3];
if (alpha != 255) {
out[4 * i + 0] = data[4 * i + 0] * alpha / 255;
out[4 * i + 1] = data[4 * i + 1] * alpha / 255;
out[4 * i + 2] = data[4 * i + 2] * alpha / 255;
} else {
out[4 * i + 0] = data[4 * i + 0];
out[4 * i + 1] = data[4 * i + 1];
out[4 * i + 2] = data[4 * i + 2];
}
out[4 * i + 3] = 255;
}
data = out;
}
RetainPtr<CGDataProviderRef> dataProvider;
dataProvider.adoptCF(CGDataProviderCreateWithData(0, data, 4 * source.width() * source.height(), 0));
if (!dataProvider)
return "data:,";
RetainPtr<CGImageRef> image;
image.adoptCF(CGImageCreate(source.width(), source.height(), 8, 32, 4 * source.width(),
deviceRGBColorSpaceRef(), kCGBitmapByteOrderDefault | kCGImageAlphaLast,
dataProvider.get(), 0, false, kCGRenderingIntentDefault));
return CGImageToDataURL(image.get(), mimeType, quality);
}
}