1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <string.h>
18 #include "JNIHelpers.h"
19 #include "utils/log.h"
20 #include "utils/math.h"
21 #include "webp/format_constants.h"
22 
23 #include "FrameSequence_webp.h"
24 
25 #define WEBP_DEBUG 0
26 
27 ////////////////////////////////////////////////////////////////////////////////
28 // Frame sequence
29 ////////////////////////////////////////////////////////////////////////////////
30 
GetLE32(const uint8_t * const data)31 static uint32_t GetLE32(const uint8_t* const data) {
32     return MKFOURCC(data[0], data[1], data[2], data[3]);
33 }
34 
35 // Returns true if the frame covers full canvas.
isFullFrame(const WebPIterator & frame,int canvasWidth,int canvasHeight)36 static bool isFullFrame(const WebPIterator& frame, int canvasWidth, int canvasHeight) {
37     return (frame.width == canvasWidth && frame.height == canvasHeight);
38 }
39 
40 // Returns true if the rectangle defined by 'frame' contains pixel (x, y).
FrameContainsPixel(const WebPIterator & frame,int x,int y)41 static bool FrameContainsPixel(const WebPIterator& frame, int x, int y) {
42     const int left = frame.x_offset;
43     const int right = left + frame.width;
44     const int top = frame.y_offset;
45     const int bottom = top + frame.height;
46     return x >= left && x < right && y >= top && y < bottom;
47 }
48 
49 // Construct mIsKeyFrame array.
constructDependencyChain()50 void FrameSequence_webp::constructDependencyChain() {
51     const size_t frameCount = getFrameCount();
52     mIsKeyFrame = new bool[frameCount];
53     const int canvasWidth = getWidth();
54     const int canvasHeight = getHeight();
55 
56     WebPIterator prev;
57     WebPIterator curr;
58 
59     // Note: WebPDemuxGetFrame() uses base-1 counting.
60     int ok = WebPDemuxGetFrame(mDemux, 1, &curr);
61     ALOG_ASSERT(ok, "Could not retrieve frame# 0");
62     mIsKeyFrame[0] = true;  // 0th frame is always a key frame.
63     for (size_t i = 1; i < frameCount; i++) {
64         prev = curr;
65         ok = WebPDemuxGetFrame(mDemux, i + 1, &curr);  // Get ith frame.
66         ALOG_ASSERT(ok, "Could not retrieve frame# %d", i);
67 
68         if ((!curr.has_alpha || curr.blend_method == WEBP_MUX_NO_BLEND) &&
69                 isFullFrame(curr, canvasWidth, canvasHeight)) {
70             mIsKeyFrame[i] = true;
71         } else {
72             mIsKeyFrame[i] = (prev.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
73                     (isFullFrame(prev, canvasWidth, canvasHeight) || mIsKeyFrame[i - 1]);
74         }
75     }
76     WebPDemuxReleaseIterator(&prev);
77     WebPDemuxReleaseIterator(&curr);
78 
79 #if WEBP_DEBUG
80     ALOGD("Dependency chain:");
81     for (size_t i = 0; i < frameCount; i++) {
82         ALOGD("Frame# %zu: %s", i, mIsKeyFrame[i] ? "Key frame" : "NOT a key frame");
83     }
84 #endif
85 }
86 
FrameSequence_webp(Stream * stream)87 FrameSequence_webp::FrameSequence_webp(Stream* stream) {
88     if (stream->getRawBuffer() != NULL) {
89         mData.size = stream->getRawBufferSize();
90         mData.bytes = stream->getRawBufferAddr();
91         mRawByteBuffer = stream->getRawBuffer();
92     } else {
93         // Read RIFF header to get file size.
94         uint8_t riff_header[RIFF_HEADER_SIZE];
95         if (stream->read(riff_header, RIFF_HEADER_SIZE) != RIFF_HEADER_SIZE) {
96             ALOGE("WebP header load failed");
97             return;
98         }
99         mData.size = CHUNK_HEADER_SIZE + GetLE32(riff_header + TAG_SIZE);
100         mData.bytes = new uint8_t[mData.size];
101         memcpy((void*)mData.bytes, riff_header, RIFF_HEADER_SIZE);
102 
103         // Read rest of the bytes.
104         void* remaining_bytes = (void*)(mData.bytes + RIFF_HEADER_SIZE);
105         size_t remaining_size = mData.size - RIFF_HEADER_SIZE;
106         if (stream->read(remaining_bytes, remaining_size) != remaining_size) {
107             ALOGE("WebP full load failed");
108             return;
109         }
110     }
111 
112     // Construct demux.
113     mDemux = WebPDemux(&mData);
114     if (!mDemux) {
115         ALOGE("Parsing of WebP container file failed");
116         return;
117     }
118     mLoopCount = WebPDemuxGetI(mDemux, WEBP_FF_LOOP_COUNT);
119     mFormatFlags = WebPDemuxGetI(mDemux, WEBP_FF_FORMAT_FLAGS);
120 #if WEBP_DEBUG
121     ALOGD("FrameSequence_webp created with size = %d x %d, number of frames = %d, flags = 0x%X",
122           getWidth(), getHeight(), getFrameCount(), mFormatFlags);
123 #endif
124     constructDependencyChain();
125 }
126 
~FrameSequence_webp()127 FrameSequence_webp::~FrameSequence_webp() {
128     WebPDemuxDelete(mDemux);
129     delete[] mIsKeyFrame;
130     if (mRawByteBuffer == NULL) {
131         delete[] mData.bytes;
132     }
133 }
134 
createState() const135 FrameSequenceState* FrameSequence_webp::createState() const {
136     return new FrameSequenceState_webp(*this);
137 }
138 
139 ////////////////////////////////////////////////////////////////////////////////
140 // draw helpers
141 ////////////////////////////////////////////////////////////////////////////////
142 
willBeCleared(const WebPIterator & iter)143 static bool willBeCleared(const WebPIterator& iter) {
144     return iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND;
145 }
146 
147 // return true if area of 'target' completely covers area of 'covered'
checkIfCover(const WebPIterator & target,const WebPIterator & covered)148 static bool checkIfCover(const WebPIterator& target, const WebPIterator& covered) {
149     const int covered_x_max = covered.x_offset + covered.width;
150     const int target_x_max = target.x_offset + target.width;
151     const int covered_y_max = covered.y_offset + covered.height;
152     const int target_y_max = target.y_offset + target.height;
153     return target.x_offset <= covered.x_offset
154            && covered_x_max <= target_x_max
155            && target.y_offset <= covered.y_offset
156            && covered_y_max <= target_y_max;
157 }
158 
159 // Clear all pixels in a line to transparent.
clearLine(Color8888 * dst,int width)160 static void clearLine(Color8888* dst, int width) {
161     memset(dst, 0, width * sizeof(*dst));  // Note: Assumes TRANSPARENT == 0x0.
162 }
163 
164 // Copy all pixels from 'src' to 'dst'.
copyFrame(const Color8888 * src,int srcStride,Color8888 * dst,int dstStride,int width,int height)165 static void copyFrame(const Color8888* src, int srcStride, Color8888* dst, int dstStride,
166         int width, int height) {
167     for (int y = 0; y < height; y++) {
168         memcpy(dst, src, width * sizeof(*dst));
169         src += srcStride;
170         dst += dstStride;
171     }
172 }
173 
174 ////////////////////////////////////////////////////////////////////////////////
175 // Frame sequence state
176 ////////////////////////////////////////////////////////////////////////////////
177 
FrameSequenceState_webp(const FrameSequence_webp & frameSequence)178 FrameSequenceState_webp::FrameSequenceState_webp(const FrameSequence_webp& frameSequence) :
179         mFrameSequence(frameSequence) {
180     WebPInitDecoderConfig(&mDecoderConfig);
181     mDecoderConfig.output.is_external_memory = 1;
182     mDecoderConfig.output.colorspace = MODE_rgbA;  // Pre-multiplied alpha mode.
183     const int canvasWidth = mFrameSequence.getWidth();
184     const int canvasHeight = mFrameSequence.getHeight();
185     mPreservedBuffer = new Color8888[canvasWidth * canvasHeight];
186 }
187 
~FrameSequenceState_webp()188 FrameSequenceState_webp::~FrameSequenceState_webp() {
189     delete[] mPreservedBuffer;
190 }
191 
initializeFrame(const WebPIterator & currIter,Color8888 * currBuffer,int currStride,const WebPIterator & prevIter,const Color8888 * prevBuffer,int prevStride)192 void FrameSequenceState_webp::initializeFrame(const WebPIterator& currIter, Color8888* currBuffer,
193         int currStride, const WebPIterator& prevIter, const Color8888* prevBuffer, int prevStride) {
194     const int canvasWidth = mFrameSequence.getWidth();
195     const int canvasHeight = mFrameSequence.getHeight();
196     const bool currFrameIsKeyFrame = mFrameSequence.isKeyFrame(currIter.frame_num - 1);
197 
198     if (currFrameIsKeyFrame) {  // Clear canvas.
199         for (int y = 0; y < canvasHeight; y++) {
200             Color8888* dst = currBuffer + y * currStride;
201             clearLine(dst, canvasWidth);
202         }
203     } else {
204         // Preserve previous frame as starting state of current frame.
205         copyFrame(prevBuffer, prevStride, currBuffer, currStride, canvasWidth, canvasHeight);
206 
207         // Dispose previous frame rectangle to Background if needed.
208         bool prevFrameCompletelyCovered =
209                 (!currIter.has_alpha || currIter.blend_method == WEBP_MUX_NO_BLEND) &&
210                 checkIfCover(currIter, prevIter);
211         if ((prevIter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
212                 !prevFrameCompletelyCovered) {
213             Color8888* dst = currBuffer + prevIter.x_offset + prevIter.y_offset * currStride;
214             for (int j = 0; j < prevIter.height; j++) {
215                 clearLine(dst, prevIter.width);
216                 dst += currStride;
217             }
218         }
219     }
220 }
221 
decodeFrame(const WebPIterator & currIter,Color8888 * currBuffer,int currStride,const WebPIterator & prevIter,const Color8888 * prevBuffer,int prevStride)222 bool FrameSequenceState_webp::decodeFrame(const WebPIterator& currIter, Color8888* currBuffer,
223         int currStride, const WebPIterator& prevIter, const Color8888* prevBuffer, int prevStride) {
224     Color8888* dst = currBuffer + currIter.x_offset + currIter.y_offset * currStride;
225     mDecoderConfig.output.u.RGBA.rgba = (uint8_t*)dst;
226     mDecoderConfig.output.u.RGBA.stride = currStride * 4;
227     mDecoderConfig.output.u.RGBA.size = mDecoderConfig.output.u.RGBA.stride * currIter.height;
228 
229     const WebPData& currFrame = currIter.fragment;
230     if (WebPDecode(currFrame.bytes, currFrame.size, &mDecoderConfig) != VP8_STATUS_OK) {
231         return false;
232     }
233 
234     const int canvasWidth = mFrameSequence.getWidth();
235     const int canvasHeight = mFrameSequence.getHeight();
236     const bool currFrameIsKeyFrame = mFrameSequence.isKeyFrame(currIter.frame_num - 1);
237     // During the decoding of current frame, we may have set some pixels to be transparent
238     // (i.e. alpha < 255). However, the value of each of these pixels should have been determined
239     // by blending it against the value of that pixel in the previous frame if WEBP_MUX_BLEND was
240     // specified. So, we correct these pixels based on disposal method of the previous frame and
241     // the previous frame buffer.
242     if (currIter.blend_method == WEBP_MUX_BLEND && !currFrameIsKeyFrame) {
243         if (prevIter.dispose_method == WEBP_MUX_DISPOSE_NONE) {
244             for (int y = 0; y < currIter.height; y++) {
245                 const int canvasY = currIter.y_offset + y;
246                 for (int x = 0; x < currIter.width; x++) {
247                     const int canvasX = currIter.x_offset + x;
248                     Color8888& currPixel = currBuffer[canvasY * currStride + canvasX];
249                     // FIXME: Use alpha-blending when alpha is between 0 and 255.
250                     if (!(currPixel & COLOR_8888_ALPHA_MASK)) {
251                         const Color8888 prevPixel = prevBuffer[canvasY * prevStride + canvasX];
252                         currPixel = prevPixel;
253                     }
254                 }
255             }
256         } else {  // prevIter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND
257             // Need to restore transparent pixels to as they were just after frame initialization.
258             // That is:
259             //   * Transparent if it belongs to previous frame rectangle <-- This is a no-op.
260             //   * Pixel in the previous canvas otherwise <-- Need to restore.
261             for (int y = 0; y < currIter.height; y++) {
262                 const int canvasY = currIter.y_offset + y;
263                 for (int x = 0; x < currIter.width; x++) {
264                     const int canvasX = currIter.x_offset + x;
265                     Color8888& currPixel = currBuffer[canvasY * currStride + canvasX];
266                     // FIXME: Use alpha-blending when alpha is between 0 and 255.
267                     if (!(currPixel & COLOR_8888_ALPHA_MASK)
268                             && !FrameContainsPixel(prevIter, canvasX, canvasY)) {
269                         const Color8888 prevPixel = prevBuffer[canvasY * prevStride + canvasX];
270                         currPixel = prevPixel;
271                     }
272                 }
273             }
274         }
275     }
276     return true;
277 }
278 
drawFrame(int frameNr,Color8888 * outputPtr,int outputPixelStride,int previousFrameNr)279 long FrameSequenceState_webp::drawFrame(int frameNr,
280         Color8888* outputPtr, int outputPixelStride, int previousFrameNr) {
281     WebPDemuxer* demux = mFrameSequence.getDemuxer();
282     ALOG_ASSERT(demux, "Cannot drawFrame, mDemux is NULL");
283 
284 #if WEBP_DEBUG
285     ALOGD("  drawFrame called for frame# %d, previous frame# %d", frameNr, previousFrameNr);
286 #endif
287 
288     const int canvasWidth = mFrameSequence.getWidth();
289     const int canvasHeight = mFrameSequence.getHeight();
290 
291     // Find the first frame to be decoded.
292     int start = max(previousFrameNr + 1, 0);
293     int earliestRequired = frameNr;
294     while (earliestRequired > start) {
295         if (mFrameSequence.isKeyFrame(earliestRequired)) {
296             start = earliestRequired;
297             break;
298         }
299         earliestRequired--;
300     }
301 
302     WebPIterator currIter;
303     WebPIterator prevIter;
304     int ok = WebPDemuxGetFrame(demux, start, &currIter);  // Get frame number 'start - 1'.
305     ALOG_ASSERT(ok, "Could not retrieve frame# %d", start - 1);
306 
307     // Use preserve buffer only if needed.
308     Color8888* prevBuffer = (frameNr == 0) ? outputPtr : mPreservedBuffer;
309     int prevStride = (frameNr == 0) ? outputPixelStride : canvasWidth;
310     Color8888* currBuffer = outputPtr;
311     int currStride = outputPixelStride;
312 
313     for (int i = start; i <= frameNr; i++) {
314         prevIter = currIter;
315         ok = WebPDemuxGetFrame(demux, i + 1, &currIter);  // Get ith frame.
316         ALOG_ASSERT(ok, "Could not retrieve frame# %d", i);
317 #if WEBP_DEBUG
318         ALOGD("      producing frame %d (has_alpha = %d, dispose = %s, blend = %s, duration = %d)",
319               i, currIter.has_alpha,
320               (currIter.dispose_method == WEBP_MUX_DISPOSE_NONE) ? "none" : "background",
321               (currIter.blend_method == WEBP_MUX_BLEND) ? "yes" : "no", currIter.duration);
322 #endif
323         // We swap the prev/curr buffers as we go.
324         Color8888* tmpBuffer = prevBuffer;
325         prevBuffer = currBuffer;
326         currBuffer = tmpBuffer;
327 
328         int tmpStride = prevStride;
329         prevStride = currStride;
330         currStride = tmpStride;
331 
332 #if WEBP_DEBUG
333         ALOGD("            prev = %p, curr = %p, out = %p, tmp = %p",
334               prevBuffer, currBuffer, outputPtr, mPreservedBuffer);
335 #endif
336         // Process this frame.
337         initializeFrame(currIter, currBuffer, currStride, prevIter, prevBuffer, prevStride);
338 
339         if (i == frameNr || !willBeCleared(currIter)) {
340             if (!decodeFrame(currIter, currBuffer, currStride, prevIter, prevBuffer, prevStride)) {
341                 ALOGE("Error decoding frame# %d", i);
342                 return -1;
343             }
344         }
345     }
346 
347     if (outputPtr != currBuffer) {
348         copyFrame(currBuffer, currStride, outputPtr, outputPixelStride, canvasWidth, canvasHeight);
349     }
350 
351     // Return last frame's delay.
352     const int frameCount = mFrameSequence.getFrameCount();
353     const int lastFrame = (frameNr + frameCount - 1) % frameCount;
354     ok = WebPDemuxGetFrame(demux, lastFrame, &currIter);
355     ALOG_ASSERT(ok, "Could not retrieve frame# %d", lastFrame - 1);
356     const int lastFrameDelay = currIter.duration;
357 
358     WebPDemuxReleaseIterator(&currIter);
359     WebPDemuxReleaseIterator(&prevIter);
360 
361     return lastFrameDelay;
362 }
363 
364 ////////////////////////////////////////////////////////////////////////////////
365 // Registry
366 ////////////////////////////////////////////////////////////////////////////////
367 
368 #include "Registry.h"
369 
isWebP(void * header,int header_size)370 static bool isWebP(void* header, int header_size) {
371     const uint8_t* const header_str = (const uint8_t*)header;
372     return (header_size >= RIFF_HEADER_SIZE) &&
373             !memcmp("RIFF", header_str, 4) &&
374             !memcmp("WEBP", header_str + 8, 4);
375 }
376 
acceptsWebPBuffer()377 static bool acceptsWebPBuffer() {
378     return true;
379 }
380 
createFramesequence(Stream * stream)381 static FrameSequence* createFramesequence(Stream* stream) {
382     return new FrameSequence_webp(stream);
383 }
384 
385 static RegistryEntry gEntry = {
386         RIFF_HEADER_SIZE,
387         isWebP,
388         createFramesequence,
389         NULL,
390         acceptsWebPBuffer,
391 };
392 static Registry gRegister(gEntry);
393 
394