1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <netinet/in.h>
22 
23 #include <binder/MemoryBase.h>
24 #include <binder/MemoryHeapBase.h>
25 #include <utils/Log.h>
26 #include <utils/Trace.h>
27 #include <gui/Surface.h>
28 
29 #include "common/CameraDeviceBase.h"
30 #include "api1/Camera2Client.h"
31 #include "api1/client2/Camera2Heap.h"
32 #include "api1/client2/CaptureSequencer.h"
33 #include "api1/client2/JpegProcessor.h"
34 
35 namespace android {
36 namespace camera2 {
37 
JpegProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)38 JpegProcessor::JpegProcessor(
39     sp<Camera2Client> client,
40     wp<CaptureSequencer> sequencer):
41         Thread(false),
42         mDevice(client->getCameraDevice()),
43         mSequencer(sequencer),
44         mId(client->getCameraId()),
45         mCaptureAvailable(false),
46         mCaptureStreamId(NO_STREAM) {
47 }
48 
~JpegProcessor()49 JpegProcessor::~JpegProcessor() {
50     ALOGV("%s: Exit", __FUNCTION__);
51     deleteStream();
52 }
53 
onFrameAvailable(const BufferItem &)54 void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
55     Mutex::Autolock l(mInputMutex);
56     if (!mCaptureAvailable) {
57         mCaptureAvailable = true;
58         mCaptureAvailableSignal.signal();
59     }
60 }
61 
updateStream(const Parameters & params)62 status_t JpegProcessor::updateStream(const Parameters &params) {
63     ATRACE_CALL();
64     ALOGV("%s", __FUNCTION__);
65     status_t res;
66 
67     Mutex::Autolock l(mInputMutex);
68 
69     sp<CameraDeviceBase> device = mDevice.promote();
70     if (device == 0) {
71         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
72         return INVALID_OPERATION;
73     }
74 
75     // Find out buffer size for JPEG
76     ssize_t maxJpegSize = device->getJpegBufferSize(params.pictureWidth, params.pictureHeight);
77     if (maxJpegSize <= 0) {
78         ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
79                 __FUNCTION__, mId, maxJpegSize);
80         return INVALID_OPERATION;
81     }
82 
83     if (mCaptureConsumer == 0) {
84         // Create CPU buffer queue endpoint
85         sp<IGraphicBufferProducer> producer;
86         sp<IGraphicBufferConsumer> consumer;
87         BufferQueue::createBufferQueue(&producer, &consumer);
88         mCaptureConsumer = new CpuConsumer(consumer, 1);
89         mCaptureConsumer->setFrameAvailableListener(this);
90         mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
91         mCaptureWindow = new Surface(producer);
92     }
93 
94     // Since ashmem heaps are rounded up to page size, don't reallocate if
95     // the capture heap isn't exactly the same size as the required JPEG buffer
96     const size_t HEAP_SLACK_FACTOR = 2;
97     if (mCaptureHeap == 0 ||
98             (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
99             (mCaptureHeap->getSize() >
100                     static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
101         // Create memory for API consumption
102         mCaptureHeap.clear();
103         mCaptureHeap =
104                 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
105         if (mCaptureHeap->getSize() == 0) {
106             ALOGE("%s: Camera %d: Unable to allocate memory for capture",
107                     __FUNCTION__, mId);
108             return NO_MEMORY;
109         }
110     }
111     ALOGV("%s: Camera %d: JPEG capture heap now %d bytes; requested %d bytes",
112             __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
113 
114     if (mCaptureStreamId != NO_STREAM) {
115         // Check if stream parameters have to change
116         uint32_t currentWidth, currentHeight;
117         res = device->getStreamInfo(mCaptureStreamId,
118                 &currentWidth, &currentHeight, 0, 0);
119         if (res != OK) {
120             ALOGE("%s: Camera %d: Error querying capture output stream info: "
121                     "%s (%d)", __FUNCTION__,
122                     mId, strerror(-res), res);
123             return res;
124         }
125         if (currentWidth != (uint32_t)params.pictureWidth ||
126                 currentHeight != (uint32_t)params.pictureHeight) {
127             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
128                 __FUNCTION__, mId, mCaptureStreamId);
129             res = device->deleteStream(mCaptureStreamId);
130             if (res == -EBUSY) {
131                 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
132                       " after it becomes idle", __FUNCTION__, mId);
133                 return res;
134             } else if (res != OK) {
135                 ALOGE("%s: Camera %d: Unable to delete old output stream "
136                         "for capture: %s (%d)", __FUNCTION__,
137                         mId, strerror(-res), res);
138                 return res;
139             }
140             mCaptureStreamId = NO_STREAM;
141         }
142     }
143 
144     if (mCaptureStreamId == NO_STREAM) {
145         // Create stream for HAL production
146         res = device->createStream(mCaptureWindow,
147                 params.pictureWidth, params.pictureHeight,
148                 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_JFIF,
149                 CAMERA3_STREAM_ROTATION_0, &mCaptureStreamId);
150         if (res != OK) {
151             ALOGE("%s: Camera %d: Can't create output stream for capture: "
152                     "%s (%d)", __FUNCTION__, mId,
153                     strerror(-res), res);
154             return res;
155         }
156 
157     }
158     return OK;
159 }
160 
deleteStream()161 status_t JpegProcessor::deleteStream() {
162     ATRACE_CALL();
163 
164     Mutex::Autolock l(mInputMutex);
165 
166     if (mCaptureStreamId != NO_STREAM) {
167         sp<CameraDeviceBase> device = mDevice.promote();
168         if (device == 0) {
169             ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
170             return INVALID_OPERATION;
171         }
172 
173         device->deleteStream(mCaptureStreamId);
174 
175         mCaptureHeap.clear();
176         mCaptureWindow.clear();
177         mCaptureConsumer.clear();
178 
179         mCaptureStreamId = NO_STREAM;
180     }
181     return OK;
182 }
183 
getStreamId() const184 int JpegProcessor::getStreamId() const {
185     Mutex::Autolock l(mInputMutex);
186     return mCaptureStreamId;
187 }
188 
dump(int,const Vector<String16> &) const189 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
190 }
191 
threadLoop()192 bool JpegProcessor::threadLoop() {
193     status_t res;
194 
195     {
196         Mutex::Autolock l(mInputMutex);
197         while (!mCaptureAvailable) {
198             res = mCaptureAvailableSignal.waitRelative(mInputMutex,
199                     kWaitDuration);
200             if (res == TIMED_OUT) return true;
201         }
202         mCaptureAvailable = false;
203     }
204 
205     do {
206         res = processNewCapture();
207     } while (res == OK);
208 
209     return true;
210 }
211 
processNewCapture()212 status_t JpegProcessor::processNewCapture() {
213     ATRACE_CALL();
214     status_t res;
215     sp<Camera2Heap> captureHeap;
216     sp<MemoryBase> captureBuffer;
217 
218     CpuConsumer::LockedBuffer imgBuffer;
219 
220     {
221         Mutex::Autolock l(mInputMutex);
222         if (mCaptureStreamId == NO_STREAM) {
223             ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
224             return INVALID_OPERATION;
225         }
226 
227         res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
228         if (res != OK) {
229             if (res != BAD_VALUE) {
230                 ALOGE("%s: Camera %d: Error receiving still image buffer: "
231                         "%s (%d)", __FUNCTION__,
232                         mId, strerror(-res), res);
233             }
234             return res;
235         }
236 
237         ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
238                 mId);
239 
240         if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
241             ALOGE("%s: Camera %d: Unexpected format for still image: "
242                     "%x, expected %x", __FUNCTION__, mId,
243                     imgBuffer.format,
244                     HAL_PIXEL_FORMAT_BLOB);
245             mCaptureConsumer->unlockBuffer(imgBuffer);
246             return OK;
247         }
248 
249         // Find size of JPEG image
250         size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
251         if (jpegSize == 0) { // failed to find size, default to whole buffer
252             jpegSize = imgBuffer.width;
253         }
254         size_t heapSize = mCaptureHeap->getSize();
255         if (jpegSize > heapSize) {
256             ALOGW("%s: JPEG image is larger than expected, truncating "
257                     "(got %zu, expected at most %zu bytes)",
258                     __FUNCTION__, jpegSize, heapSize);
259             jpegSize = heapSize;
260         }
261 
262         // TODO: Optimize this to avoid memcopy
263         captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
264         void* captureMemory = mCaptureHeap->getBase();
265         memcpy(captureMemory, imgBuffer.data, jpegSize);
266 
267         mCaptureConsumer->unlockBuffer(imgBuffer);
268     }
269 
270     sp<CaptureSequencer> sequencer = mSequencer.promote();
271     if (sequencer != 0) {
272         sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
273     }
274 
275     return OK;
276 }
277 
278 /*
279  * JPEG FILE FORMAT OVERVIEW.
280  * http://www.jpeg.org/public/jfif.pdf
281  * (JPEG is the image compression algorithm, actual file format is called JFIF)
282  *
283  * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
284  * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
285  * (inclusive).  Because every marker begins with the same byte, they are
286  * referred to by the second byte's value.
287  *
288  * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
289  * Following it, "segment" sections begin with other markers, followed by a
290  * 2-byte length (in network byte order), then the segment data.
291  *
292  * For our purposes we will ignore the data, and just use the length to skip to
293  * the next segment.  This is necessary because the data inside segments are
294  * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
295  * naievely scanning until the end.
296  *
297  * After all the segments are processed, the jpeg compressed image stream begins.
298  * This can be considered an opaque format with one requirement: all 0xFF bytes
299  * in this stream must be followed with a 0x00 byte.  This prevents any of the
300  * image data to be interpreted as a segment.  The only exception to this is at
301  * the end of the image stream there is an End of Image (EOI) marker, which is
302  * 0xFF followed by a non-zero (0xD9) byte.
303  */
304 
305 const uint8_t MARK = 0xFF; // First byte of marker
306 const uint8_t SOI = 0xD8; // Start of Image
307 const uint8_t EOI = 0xD9; // End of Image
308 const size_t MARKER_LENGTH = 2; // length of a marker
309 
310 #pragma pack(push)
311 #pragma pack(1)
312 typedef struct segment {
313     uint8_t marker[MARKER_LENGTH];
314     uint16_t length;
315 } segment_t;
316 #pragma pack(pop)
317 
318 /* HELPER FUNCTIONS */
319 
320 // check for Start of Image marker
checkJpegStart(uint8_t * buf)321 bool checkJpegStart(uint8_t* buf) {
322     return buf[0] == MARK && buf[1] == SOI;
323 }
324 // check for End of Image marker
checkJpegEnd(uint8_t * buf)325 bool checkJpegEnd(uint8_t *buf) {
326     return buf[0] == MARK && buf[1] == EOI;
327 }
328 // check for arbitrary marker, returns marker type (second byte)
329 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)330 uint8_t checkJpegMarker(uint8_t *buf) {
331     if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
332         return buf[1];
333     }
334     return 0;
335 }
336 
337 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)338 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
339     size_t size;
340 
341     // First check for JPEG transport header at the end of the buffer
342     uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
343     struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
344     if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
345         size = blob->jpeg_size;
346         if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
347             // Verify SOI and EOI markers
348             size_t offset = size - MARKER_LENGTH;
349             uint8_t *end = jpegBuffer + offset;
350             if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
351                 ALOGV("Found JPEG transport header, img size %zu", size);
352                 return size;
353             } else {
354                 ALOGW("Found JPEG transport header with bad Image Start/End");
355             }
356         } else {
357             ALOGW("Found JPEG transport header with bad size %zu", size);
358         }
359     }
360 
361     // Check Start of Image
362     if ( !checkJpegStart(jpegBuffer) ) {
363         ALOGE("Could not find start of JPEG marker");
364         return 0;
365     }
366 
367     // Read JFIF segment markers, skip over segment data
368     size = 0;
369     while (size <= maxSize - MARKER_LENGTH) {
370         segment_t *segment = (segment_t*)(jpegBuffer + size);
371         uint8_t type = checkJpegMarker(segment->marker);
372         if (type == 0) { // invalid marker, no more segments, begin JPEG data
373             ALOGV("JPEG stream found beginning at offset %zu", size);
374             break;
375         }
376         if (type == EOI || size > maxSize - sizeof(segment_t)) {
377             ALOGE("Got premature End before JPEG data, offset %zu", size);
378             return 0;
379         }
380         size_t length = ntohs(segment->length);
381         ALOGV("JFIF Segment, type %x length %zx", type, length);
382         size += length + MARKER_LENGTH;
383     }
384 
385     // Find End of Image
386     // Scan JPEG buffer until End of Image (EOI)
387     bool foundEnd = false;
388     for ( ; size <= maxSize - MARKER_LENGTH; size++) {
389         if ( checkJpegEnd(jpegBuffer + size) ) {
390             foundEnd = true;
391             size += MARKER_LENGTH;
392             break;
393         }
394     }
395     if (!foundEnd) {
396         ALOGE("Could not find end of JPEG marker");
397         return 0;
398     }
399 
400     if (size > maxSize) {
401         ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
402         size = maxSize;
403     }
404     ALOGV("Final JPEG size %zu", size);
405     return size;
406 }
407 
408 }; // namespace camera2
409 }; // namespace android
410