1/*
2 * Copyright (C) 2010 Google Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1.  Redistributions of source code must retain the above copyright
9 *     notice, this list of conditions and the following disclaimer.
10 * 2.  Redistributions in binary form must reproduce the above copyright
11 *     notice, this list of conditions and the following disclaimer in the
12 *     documentation and/or other materials provided with the distribution.
13 * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
14 *     its contributors may be used to endorse or promote products derived
15 *     from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include "config.h"
30#include "platform/image-decoders/webp/WEBPImageDecoder.h"
31
32#include "platform/PlatformInstrumentation.h"
33#include "platform/RuntimeEnabledFeatures.h"
34
35#if USE(QCMSLIB)
36#include "qcms.h"
37#endif
38
39#if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN)
40#error Blink assumes a little-endian target.
41#endif
42
43#if SK_B32_SHIFT // Output little-endian RGBA pixels (Android).
44inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : MODE_RGBA; }
45#else // Output little-endian BGRA pixels.
46inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : MODE_BGRA; }
47#endif
48
49inline uint8_t blendChannel(uint8_t src, uint8_t srcA, uint8_t dst, uint8_t dstA, unsigned scale)
50{
51    unsigned blendUnscaled = src * srcA + dst * dstA;
52    ASSERT(blendUnscaled < (1ULL << 32) / scale);
53    return (blendUnscaled * scale) >> 24;
54}
55
56inline uint32_t blendSrcOverDstNonPremultiplied(uint32_t src, uint32_t dst)
57{
58    uint8_t srcA = SkGetPackedA32(src);
59    if (srcA == 0)
60        return dst;
61
62    uint8_t dstA = SkGetPackedA32(dst);
63    uint8_t dstFactorA = (dstA * SkAlpha255To256(255 - srcA)) >> 8;
64    ASSERT(srcA + dstFactorA < (1U << 8));
65    uint8_t blendA = srcA + dstFactorA;
66    unsigned scale = (1UL << 24) / blendA;
67
68    uint8_t blendR = blendChannel(SkGetPackedR32(src), srcA, SkGetPackedR32(dst), dstFactorA, scale);
69    uint8_t blendG = blendChannel(SkGetPackedG32(src), srcA, SkGetPackedG32(dst), dstFactorA, scale);
70    uint8_t blendB = blendChannel(SkGetPackedB32(src), srcA, SkGetPackedB32(dst), dstFactorA, scale);
71
72    return SkPackARGB32NoCheck(blendA, blendR, blendG, blendB);
73}
74
75// Returns two point ranges (<left, width> pairs) at row 'canvasY', that belong to 'src' but not 'dst'.
76// A point range is empty if the corresponding width is 0.
77inline void findBlendRangeAtRow(const blink::IntRect& src, const blink::IntRect& dst, int canvasY, int& left1, int& width1, int& left2, int& width2)
78{
79    ASSERT_WITH_SECURITY_IMPLICATION(canvasY >= src.y() && canvasY < src.maxY());
80    left1 = -1;
81    width1 = 0;
82    left2 = -1;
83    width2 = 0;
84
85    if (canvasY < dst.y() || canvasY >= dst.maxY() || src.x() >= dst.maxX() || src.maxX() <= dst.x()) {
86        left1 = src.x();
87        width1 = src.width();
88        return;
89    }
90
91    if (src.x() < dst.x()) {
92        left1 = src.x();
93        width1 = dst.x() - src.x();
94    }
95
96    if (src.maxX() > dst.maxX()) {
97        left2 = dst.maxX();
98        width2 = src.maxX() - dst.maxX();
99    }
100}
101
102void alphaBlendPremultiplied(blink::ImageFrame& src, blink::ImageFrame& dst, int canvasY, int left, int width)
103{
104    for (int x = 0; x < width; ++x) {
105        int canvasX = left + x;
106        blink::ImageFrame::PixelData& pixel = *src.getAddr(canvasX, canvasY);
107        if (SkGetPackedA32(pixel) != 0xff) {
108            blink::ImageFrame::PixelData prevPixel = *dst.getAddr(canvasX, canvasY);
109            pixel = SkPMSrcOver(pixel, prevPixel);
110        }
111    }
112}
113
114void alphaBlendNonPremultiplied(blink::ImageFrame& src, blink::ImageFrame& dst, int canvasY, int left, int width)
115{
116    for (int x = 0; x < width; ++x) {
117        int canvasX = left + x;
118        blink::ImageFrame::PixelData& pixel = *src.getAddr(canvasX, canvasY);
119        if (SkGetPackedA32(pixel) != 0xff) {
120            blink::ImageFrame::PixelData prevPixel = *dst.getAddr(canvasX, canvasY);
121            pixel = blendSrcOverDstNonPremultiplied(pixel, prevPixel);
122        }
123    }
124}
125
126namespace blink {
127
128WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption,
129    ImageSource::GammaAndColorProfileOption gammaAndColorProfileOption,
130    size_t maxDecodedBytes)
131    : ImageDecoder(alphaOption, gammaAndColorProfileOption, maxDecodedBytes)
132    , m_decoder(0)
133    , m_formatFlags(0)
134    , m_frameBackgroundHasAlpha(false)
135    , m_hasColorProfile(false)
136#if USE(QCMSLIB)
137    , m_haveReadProfile(false)
138    , m_transform(0)
139#endif
140    , m_demux(0)
141    , m_demuxState(WEBP_DEMUX_PARSING_HEADER)
142    , m_haveAlreadyParsedThisData(false)
143    , m_haveReadAnimationParameters(false)
144    , m_repetitionCount(cAnimationLoopOnce)
145    , m_decodedHeight(0)
146{
147    m_blendFunction = (alphaOption == ImageSource::AlphaPremultiplied) ? alphaBlendPremultiplied : alphaBlendNonPremultiplied;
148}
149
150WEBPImageDecoder::~WEBPImageDecoder()
151{
152    clear();
153}
154
155void WEBPImageDecoder::clear()
156{
157#if USE(QCMSLIB)
158    clearColorTransform();
159#endif
160    WebPDemuxDelete(m_demux);
161    m_demux = 0;
162    clearDecoder();
163}
164
165void WEBPImageDecoder::clearDecoder()
166{
167    WebPIDelete(m_decoder);
168    m_decoder = 0;
169    m_decodedHeight = 0;
170    m_frameBackgroundHasAlpha = false;
171}
172
173bool WEBPImageDecoder::isSizeAvailable()
174{
175    if (!ImageDecoder::isSizeAvailable())
176        updateDemuxer();
177
178    return ImageDecoder::isSizeAvailable();
179}
180
181size_t WEBPImageDecoder::frameCount()
182{
183    if (!updateDemuxer())
184        return 0;
185
186    return m_frameBufferCache.size();
187}
188
189ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index)
190{
191    if (index >= frameCount())
192        return 0;
193
194    ImageFrame& frame = m_frameBufferCache[index];
195    if (frame.status() == ImageFrame::FrameComplete)
196        return &frame;
197
198    Vector<size_t> framesToDecode;
199    size_t frameToDecode = index;
200    do {
201        framesToDecode.append(frameToDecode);
202        frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFrameIndex();
203    } while (frameToDecode != kNotFound && m_frameBufferCache[frameToDecode].status() != ImageFrame::FrameComplete);
204
205    ASSERT(m_demux);
206    for (size_t i = framesToDecode.size(); i > 0; --i) {
207        size_t frameIndex = framesToDecode[i - 1];
208        if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(frameIndex))
209            return 0;
210        WebPIterator webpFrame;
211        if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame))
212            return 0;
213        PlatformInstrumentation::willDecodeImage("WEBP");
214        decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, frameIndex);
215        PlatformInstrumentation::didDecodeImage();
216        WebPDemuxReleaseIterator(&webpFrame);
217
218        if (failed())
219            return 0;
220
221        // We need more data to continue decoding.
222        if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComplete)
223            break;
224    }
225
226    // It is also a fatal error if all data is received and we have decoded all
227    // frames available but the file is truncated.
228    if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_demux && m_demuxState != WEBP_DEMUX_DONE)
229        setFailed();
230
231    frame.notifyBitmapIfPixelsChanged();
232    return &frame;
233}
234
235void WEBPImageDecoder::setData(SharedBuffer* data, bool allDataReceived)
236{
237    if (failed())
238        return;
239    ImageDecoder::setData(data, allDataReceived);
240    m_haveAlreadyParsedThisData = false;
241}
242
243int WEBPImageDecoder::repetitionCount() const
244{
245    return failed() ? cAnimationLoopOnce : m_repetitionCount;
246}
247
248bool WEBPImageDecoder::frameIsCompleteAtIndex(size_t index) const
249{
250    if (!m_demux || m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
251        return false;
252    if (!(m_formatFlags & ANIMATION_FLAG))
253        return ImageDecoder::frameIsCompleteAtIndex(index);
254    bool frameIsLoadedAtIndex = index < m_frameBufferCache.size();
255    return frameIsLoadedAtIndex;
256}
257
258float WEBPImageDecoder::frameDurationAtIndex(size_t index) const
259{
260    return index < m_frameBufferCache.size() ? m_frameBufferCache[index].duration() : 0;
261}
262
263bool WEBPImageDecoder::updateDemuxer()
264{
265    if (failed())
266        return false;
267
268    if (m_haveAlreadyParsedThisData)
269        return true;
270
271    m_haveAlreadyParsedThisData = true;
272
273    const unsigned webpHeaderSize = 20;
274    if (m_data->size() < webpHeaderSize)
275        return false; // Wait for headers so that WebPDemuxPartial doesn't return null.
276
277    WebPDemuxDelete(m_demux);
278    WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() };
279    m_demux = WebPDemuxPartial(&inputData, &m_demuxState);
280    if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE))
281        return setFailed();
282
283    if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
284        return false; // Not enough data for parsing canvas width/height yet.
285
286    bool hasAnimation = (m_formatFlags & ANIMATION_FLAG);
287    if (!ImageDecoder::isSizeAvailable()) {
288        m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS);
289        hasAnimation = (m_formatFlags & ANIMATION_FLAG);
290        if (!hasAnimation)
291            m_repetitionCount = cAnimationNone;
292        else
293            m_formatFlags &= ~ICCP_FLAG; // FIXME: Implement ICC profile support for animated images.
294#if USE(QCMSLIB)
295        if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile())
296            m_hasColorProfile = true;
297#endif
298        if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT)))
299            return setFailed();
300    }
301
302    ASSERT(ImageDecoder::isSizeAvailable());
303    const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT);
304    if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) {
305        // As we have parsed at least one frame (even if partially),
306        // we must already have parsed the animation properties.
307        // This is because ANIM chunk always precedes ANMF chunks.
308        m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT);
309        ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits.
310        // |m_repetitionCount| is the total number of animation cycles to show,
311        // with 0 meaning "infinite". But ImageSource::repetitionCount()
312        // returns -1 for "infinite", and 0 and up for "show the animation one
313        // cycle more than this value". By subtracting one here, we convert
314        // both finite and infinite cases correctly.
315        --m_repetitionCount;
316        m_haveReadAnimationParameters = true;
317    }
318
319    const size_t oldFrameCount = m_frameBufferCache.size();
320    if (newFrameCount > oldFrameCount) {
321        m_frameBufferCache.resize(newFrameCount);
322        for (size_t i = oldFrameCount; i < newFrameCount; ++i) {
323            m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha);
324            if (!hasAnimation) {
325                ASSERT(!i);
326                m_frameBufferCache[i].setRequiredPreviousFrameIndex(kNotFound);
327                continue;
328            }
329            WebPIterator animatedFrame;
330            WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame);
331            ASSERT(animatedFrame.complete == 1);
332            m_frameBufferCache[i].setDuration(animatedFrame.duration);
333            m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFrame::DisposeKeep);
334            m_frameBufferCache[i].setAlphaBlendSource(animatedFrame.blend_method == WEBP_MUX_BLEND ? ImageFrame::BlendAtopPreviousFrame : ImageFrame::BlendAtopBgcolor);
335            IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset, animatedFrame.width, animatedFrame.height);
336            // Make sure the frameRect doesn't extend outside the buffer.
337            if (frameRect.maxX() > size().width())
338                frameRect.setWidth(size().width() - animatedFrame.x_offset);
339            if (frameRect.maxY() > size().height())
340                frameRect.setHeight(size().height() - animatedFrame.y_offset);
341            m_frameBufferCache[i].setOriginalFrameRect(frameRect);
342            m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i, !animatedFrame.has_alpha));
343            WebPDemuxReleaseIterator(&animatedFrame);
344        }
345    }
346
347    return true;
348}
349
350bool WEBPImageDecoder::initFrameBuffer(size_t frameIndex)
351{
352    ImageFrame& buffer = m_frameBufferCache[frameIndex];
353    if (buffer.status() != ImageFrame::FrameEmpty) // Already initialized.
354        return true;
355
356    const size_t requiredPreviousFrameIndex = buffer.requiredPreviousFrameIndex();
357    if (requiredPreviousFrameIndex == kNotFound) {
358        // This frame doesn't rely on any previous data.
359        if (!buffer.setSize(size().width(), size().height()))
360            return setFailed();
361        m_frameBackgroundHasAlpha = !buffer.originalFrameRect().contains(IntRect(IntPoint(), size()));
362    } else {
363        const ImageFrame& prevBuffer = m_frameBufferCache[requiredPreviousFrameIndex];
364        ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
365
366        // Preserve the last frame as the starting state for this frame.
367        if (!buffer.copyBitmapData(prevBuffer))
368            return setFailed();
369
370        if (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor) {
371            // We want to clear the previous frame to transparent, without
372            // affecting pixels in the image outside of the frame.
373            const IntRect& prevRect = prevBuffer.originalFrameRect();
374            ASSERT(!prevRect.contains(IntRect(IntPoint(), size())));
375            buffer.zeroFillFrameRect(prevRect);
376        }
377
378        m_frameBackgroundHasAlpha = prevBuffer.hasAlpha() || (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor);
379    }
380
381    buffer.setStatus(ImageFrame::FramePartial);
382    // The buffer is transparent outside the decoded area while the image is loading.
383    // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
384    buffer.setHasAlpha(true);
385    return true;
386}
387
388size_t WEBPImageDecoder::clearCacheExceptFrame(size_t clearExceptFrame)
389{
390    // If |clearExceptFrame| has status FrameComplete, we preserve that frame.
391    // Otherwise, we preserve a previous frame with status FrameComplete whose data is required
392    // to decode |clearExceptFrame|, either in initFrameBuffer() or ApplyPostProcessing().
393    // All other frames can be cleared.
394    while ((clearExceptFrame < m_frameBufferCache.size()) && (m_frameBufferCache[clearExceptFrame].status() != ImageFrame::FrameComplete))
395        clearExceptFrame = m_frameBufferCache[clearExceptFrame].requiredPreviousFrameIndex();
396
397    return ImageDecoder::clearCacheExceptFrame(clearExceptFrame);
398}
399
400void WEBPImageDecoder::clearFrameBuffer(size_t frameIndex)
401{
402    if (m_demux && m_demuxState >= WEBP_DEMUX_PARSED_HEADER && m_frameBufferCache[frameIndex].status() == ImageFrame::FramePartial) {
403        // Clear the decoder state so that this partial frame can be decoded again when requested.
404        clearDecoder();
405    }
406    ImageDecoder::clearFrameBuffer(frameIndex);
407}
408
409#if USE(QCMSLIB)
410
411void WEBPImageDecoder::clearColorTransform()
412{
413    if (m_transform)
414        qcms_transform_release(m_transform);
415    m_transform = 0;
416}
417
418bool WEBPImageDecoder::createColorTransform(const char* data, size_t size)
419{
420    clearColorTransform();
421
422    qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile();
423    if (!deviceProfile)
424        return false;
425    qcms_profile* inputProfile = qcms_profile_from_memory(data, size);
426    if (!inputProfile)
427        return false;
428
429    // We currently only support color profiles for RGB profiled images.
430    ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile));
431    // The input image pixels are RGBA format.
432    qcms_data_type format = QCMS_DATA_RGBA_8;
433    // FIXME: Don't force perceptual intent if the image profile contains an intent.
434    m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCMS_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL);
435
436    qcms_profile_release(inputProfile);
437    return !!m_transform;
438}
439
440void WEBPImageDecoder::readColorProfile()
441{
442    WebPChunkIterator chunkIterator;
443    if (!WebPDemuxGetChunk(m_demux, "ICCP", 1, &chunkIterator)) {
444        WebPDemuxReleaseChunkIterator(&chunkIterator);
445        return;
446    }
447
448    const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk.bytes);
449    size_t profileSize = chunkIterator.chunk.size;
450
451    // Only accept RGB color profiles from input class devices.
452    bool ignoreProfile = false;
453    if (profileSize < ImageDecoder::iccColorProfileHeaderLength)
454        ignoreProfile = true;
455    else if (!ImageDecoder::rgbColorProfile(profileData, profileSize))
456        ignoreProfile = true;
457    else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize))
458        ignoreProfile = true;
459
460    if (!ignoreProfile)
461        createColorTransform(profileData, profileSize);
462
463    WebPDemuxReleaseChunkIterator(&chunkIterator);
464}
465
466#endif // USE(QCMSLIB)
467
468void WEBPImageDecoder::applyPostProcessing(size_t frameIndex)
469{
470    ImageFrame& buffer = m_frameBufferCache[frameIndex];
471    int width;
472    int decodedHeight;
473    if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0))
474        return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062
475    if (decodedHeight <= 0)
476        return;
477
478    const IntRect& frameRect = buffer.originalFrameRect();
479    ASSERT_WITH_SECURITY_IMPLICATION(width == frameRect.width());
480    ASSERT_WITH_SECURITY_IMPLICATION(decodedHeight <= frameRect.height());
481    const int left = frameRect.x();
482    const int top = frameRect.y();
483
484#if USE(QCMSLIB)
485    if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) {
486        if (!m_haveReadProfile) {
487            readColorProfile();
488            m_haveReadProfile = true;
489        }
490        for (int y = m_decodedHeight; y < decodedHeight; ++y) {
491            const int canvasY = top + y;
492            uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(left, canvasY));
493            if (qcms_transform* transform = colorTransform())
494                qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT_RGBX);
495            uint8_t* pixel = row;
496            for (int x = 0; x < width; ++x, pixel += 4) {
497                const int canvasX = left + x;
498                buffer.setRGBA(canvasX, canvasY, pixel[0], pixel[1], pixel[2], pixel[3]);
499            }
500        }
501    }
502#endif // USE(QCMSLIB)
503
504    // During the decoding of current frame, we may have set some pixels to be transparent (i.e. alpha < 255).
505    // However, the value of each of these pixels should have been determined by blending it against the value
506    // of that pixel in the previous frame if alpha blend source was 'BlendAtopPreviousFrame'. So, we correct these
507    // pixels based on disposal method of the previous frame and the previous frame buffer.
508    // FIXME: This could be avoided if libwebp decoder had an API that used the previous required frame
509    // to do the alpha-blending by itself.
510    if ((m_formatFlags & ANIMATION_FLAG) && frameIndex && buffer.alphaBlendSource() == ImageFrame::BlendAtopPreviousFrame && buffer.requiredPreviousFrameIndex() != kNotFound) {
511        ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1];
512        ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
513        ImageFrame::DisposalMethod prevDisposalMethod = prevBuffer.disposalMethod();
514        if (prevDisposalMethod == ImageFrame::DisposeKeep) { // Blend transparent pixels with pixels in previous canvas.
515            for (int y = m_decodedHeight; y < decodedHeight; ++y) {
516                m_blendFunction(buffer, prevBuffer, top + y, left, width);
517            }
518        } else if (prevDisposalMethod == ImageFrame::DisposeOverwriteBgcolor) {
519            const IntRect& prevRect = prevBuffer.originalFrameRect();
520            // We need to blend a transparent pixel with its value just after initFrame() call. That is:
521            //   * Blend with fully transparent pixel if it belongs to prevRect <-- This is a no-op.
522            //   * Blend with the pixel in the previous canvas otherwise <-- Needs alpha-blending.
523            for (int y = m_decodedHeight; y < decodedHeight; ++y) {
524                int canvasY = top + y;
525                int left1, width1, left2, width2;
526                findBlendRangeAtRow(frameRect, prevRect, canvasY, left1, width1, left2, width2);
527                if (width1 > 0)
528                    m_blendFunction(buffer, prevBuffer, canvasY, left1, width1);
529                if (width2 > 0)
530                    m_blendFunction(buffer, prevBuffer, canvasY, left2, width2);
531            }
532        }
533    }
534
535    m_decodedHeight = decodedHeight;
536    buffer.setPixelsChanged(true);
537}
538
539bool WEBPImageDecoder::decode(const uint8_t* dataBytes, size_t dataSize, bool onlySize, size_t frameIndex)
540{
541    if (failed())
542        return false;
543
544    if (!ImageDecoder::isSizeAvailable()) {
545        static const size_t imageHeaderSize = 30;
546        if (dataSize < imageHeaderSize)
547            return false;
548        int width, height;
549        WebPBitstreamFeatures features;
550        if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK)
551            return setFailed();
552        width = features.width;
553        height = features.height;
554        m_formatFlags = features.has_alpha ? ALPHA_FLAG : 0;
555        if (!setSize(width, height))
556            return setFailed();
557    }
558
559    ASSERT(ImageDecoder::isSizeAvailable());
560    if (onlySize)
561        return true;
562
563    ASSERT(m_frameBufferCache.size() > frameIndex);
564    ImageFrame& buffer = m_frameBufferCache[frameIndex];
565    ASSERT(buffer.status() != ImageFrame::FrameComplete);
566
567    if (buffer.status() == ImageFrame::FrameEmpty) {
568        if (!buffer.setSize(size().width(), size().height()))
569            return setFailed();
570        buffer.setStatus(ImageFrame::FramePartial);
571        // The buffer is transparent outside the decoded area while the image is loading.
572        // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
573        buffer.setHasAlpha(true);
574        buffer.setOriginalFrameRect(IntRect(IntPoint(), size()));
575    }
576
577    const IntRect& frameRect = buffer.originalFrameRect();
578    if (!m_decoder) {
579        WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG);
580        if (!m_premultiplyAlpha)
581            mode = outputMode(false);
582#if USE(QCMSLIB)
583        if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile())
584            mode = MODE_RGBA; // Decode to RGBA for input to libqcms.
585#endif
586        WebPInitDecBuffer(&m_decoderBuffer);
587        m_decoderBuffer.colorspace = mode;
588        m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::PixelData);
589        m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.height();
590        m_decoderBuffer.is_external_memory = 1;
591        m_decoder = WebPINewDecoder(&m_decoderBuffer);
592        if (!m_decoder)
593            return setFailed();
594    }
595
596    m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(frameRect.x(), frameRect.y()));
597
598    switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) {
599    case VP8_STATUS_OK:
600        applyPostProcessing(frameIndex);
601        buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasAlpha);
602        buffer.setStatus(ImageFrame::FrameComplete);
603        clearDecoder();
604        return true;
605    case VP8_STATUS_SUSPENDED:
606        if (!isAllDataReceived() && !frameIsCompleteAtIndex(frameIndex)) {
607            applyPostProcessing(frameIndex);
608            return false;
609        }
610        // FALLTHROUGH
611    default:
612        clear();
613        return setFailed();
614    }
615}
616
617} // namespace blink
618