1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "DngCreator_JNI"
19 #include <inttypes.h>
20 #include <string.h>
21 #include <algorithm>
22 #include <array>
23 #include <memory>
24 #include <vector>
25 #include <cmath>
26 
27 #include <android-base/properties.h>
28 #include <utils/Log.h>
29 #include <utils/Errors.h>
30 #include <utils/StrongPointer.h>
31 #include <utils/RefBase.h>
32 #include <utils/Vector.h>
33 #include <utils/String8.h>
34 #include <system/camera_metadata.h>
35 #include <camera/CameraMetadata.h>
36 #include <img_utils/DngUtils.h>
37 #include <img_utils/TagDefinitions.h>
38 #include <img_utils/TiffIfd.h>
39 #include <img_utils/TiffWriter.h>
40 #include <img_utils/Output.h>
41 #include <img_utils/Input.h>
42 #include <img_utils/StripSource.h>
43 
44 #include "core_jni_helpers.h"
45 
46 #include "android_runtime/AndroidRuntime.h"
47 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
48 
49 #include <jni.h>
50 #include <nativehelper/JNIHelp.h>
51 #include <nativehelper/ScopedUtfChars.h>
52 
53 using namespace android;
54 using namespace img_utils;
55 using android::base::GetProperty;
56 
57 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
58     if ((expr) != OK) { \
59         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
60                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
61         return false; \
62     }
63 
64 
65 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
66     if ((expr) != OK) { \
67         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
68                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
69         return nullptr; \
70     }
71 
72 
73 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
74     if ((expr) != OK) { \
75         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
76                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
77         return -1; \
78     }
79 
80 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
81     if ((entry).count == 0) { \
82         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
83                 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
84         return nullptr; \
85     }
86 
87 #define BAIL_IF_EMPTY_RET_BOOL(entry, jnienv, tagId, writer)               \
88     if ((entry).count == 0) {                                              \
89         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
90                              "Missing metadata fields for tag %s (%x)",    \
91                              (writer)->getTagName(tagId), (tagId));        \
92         return false;                                                      \
93     }
94 
95 #define BAIL_IF_EMPTY_RET_STATUS(entry, jnienv, tagId, writer)             \
96     if ((entry).count == 0) {                                              \
97         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
98                              "Missing metadata fields for tag %s (%x)",    \
99                              (writer)->getTagName(tagId), (tagId));        \
100         return BAD_VALUE;                                                  \
101     }
102 
103 #define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
104     if (expr) { \
105         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
106                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
107         return nullptr; \
108     }
109 
110 
111 #define ANDROID_DNGCREATOR_CTX_JNI_ID     "mNativeContext"
112 
113 static struct {
114     jfieldID mNativeContext;
115 } gDngCreatorClassInfo;
116 
117 static struct {
118     jmethodID mWriteMethod;
119 } gOutputStreamClassInfo;
120 
121 static struct {
122     jmethodID mReadMethod;
123     jmethodID mSkipMethod;
124 } gInputStreamClassInfo;
125 
126 static struct {
127     jmethodID mGetMethod;
128 } gInputByteBufferClassInfo;
129 
130 enum {
131     BITS_PER_SAMPLE = 16,
132     BYTES_PER_SAMPLE = 2,
133     BYTES_PER_RGB_PIXEL = 3,
134     BITS_PER_RGB_SAMPLE = 8,
135     BYTES_PER_RGB_SAMPLE = 1,
136     SAMPLES_PER_RGB_PIXEL = 3,
137     SAMPLES_PER_RAW_PIXEL = 1,
138     TIFF_IFD_0 = 0,
139     TIFF_IFD_SUB1 = 1,
140     TIFF_IFD_GPSINFO = 2,
141 };
142 
143 
144 /**
145  * POD container class for GPS tag data.
146  */
147 class GpsData {
148 public:
149     enum {
150         GPS_VALUE_LENGTH = 6,
151         GPS_REF_LENGTH = 2,
152         GPS_DATE_LENGTH = 11,
153     };
154 
155     uint32_t mLatitude[GPS_VALUE_LENGTH];
156     uint32_t mLongitude[GPS_VALUE_LENGTH];
157     uint32_t mTimestamp[GPS_VALUE_LENGTH];
158     uint8_t mLatitudeRef[GPS_REF_LENGTH];
159     uint8_t mLongitudeRef[GPS_REF_LENGTH];
160     uint8_t mDate[GPS_DATE_LENGTH];
161 };
162 
163 // ----------------------------------------------------------------------------
164 
165 /**
166  * Container class for the persistent native context.
167  */
168 
169 class NativeContext : public LightRefBase<NativeContext> {
170 public:
171     enum {
172         DATETIME_COUNT = 20,
173     };
174 
175     NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
176     virtual ~NativeContext();
177 
178     TiffWriter* getWriter();
179 
180     std::shared_ptr<const CameraMetadata> getCharacteristics() const;
181     std::shared_ptr<const CameraMetadata> getResult() const;
182 
183     uint32_t getThumbnailWidth() const;
184     uint32_t getThumbnailHeight() const;
185     const uint8_t* getThumbnail() const;
186     bool hasThumbnail() const;
187 
188     bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
189 
190     void setOrientation(uint16_t orientation);
191     uint16_t getOrientation() const;
192 
193     void setDescription(const String8& desc);
194     String8 getDescription() const;
195     bool hasDescription() const;
196 
197     void setGpsData(const GpsData& data);
198     GpsData getGpsData() const;
199     bool hasGpsData() const;
200 
201     void setCaptureTime(const String8& formattedCaptureTime);
202     String8 getCaptureTime() const;
203     bool hasCaptureTime() const;
204 
205 private:
206     Vector<uint8_t> mCurrentThumbnail;
207     TiffWriter mWriter;
208     std::shared_ptr<CameraMetadata> mCharacteristics;
209     std::shared_ptr<CameraMetadata> mResult;
210     uint32_t mThumbnailWidth;
211     uint32_t mThumbnailHeight;
212     uint16_t mOrientation;
213     bool mThumbnailSet;
214     bool mGpsSet;
215     bool mDescriptionSet;
216     bool mCaptureTimeSet;
217     String8 mDescription;
218     GpsData mGpsData;
219     String8 mFormattedCaptureTime;
220 };
221 
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)222 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
223         mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
224         mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
225         mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false),
226         mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {}
227 
~NativeContext()228 NativeContext::~NativeContext() {}
229 
getWriter()230 TiffWriter* NativeContext::getWriter() {
231     return &mWriter;
232 }
233 
getCharacteristics() const234 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
235     return mCharacteristics;
236 }
237 
getResult() const238 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
239     return mResult;
240 }
241 
getThumbnailWidth() const242 uint32_t NativeContext::getThumbnailWidth() const {
243     return mThumbnailWidth;
244 }
245 
getThumbnailHeight() const246 uint32_t NativeContext::getThumbnailHeight() const {
247     return mThumbnailHeight;
248 }
249 
getThumbnail() const250 const uint8_t* NativeContext::getThumbnail() const {
251     return mCurrentThumbnail.array();
252 }
253 
hasThumbnail() const254 bool NativeContext::hasThumbnail() const {
255     return mThumbnailSet;
256 }
257 
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)258 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
259     mThumbnailWidth = width;
260     mThumbnailHeight = height;
261 
262     size_t size = BYTES_PER_RGB_PIXEL * width * height;
263     if (mCurrentThumbnail.resize(size) < 0) {
264         ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
265         return false;
266     }
267 
268     uint8_t* thumb = mCurrentThumbnail.editArray();
269     memcpy(thumb, buffer, size);
270     mThumbnailSet = true;
271     return true;
272 }
273 
setOrientation(uint16_t orientation)274 void NativeContext::setOrientation(uint16_t orientation) {
275     mOrientation = orientation;
276 }
277 
getOrientation() const278 uint16_t NativeContext::getOrientation() const {
279     return mOrientation;
280 }
281 
setDescription(const String8 & desc)282 void NativeContext::setDescription(const String8& desc) {
283     mDescription = desc;
284     mDescriptionSet = true;
285 }
286 
getDescription() const287 String8 NativeContext::getDescription() const {
288     return mDescription;
289 }
290 
hasDescription() const291 bool NativeContext::hasDescription() const {
292     return mDescriptionSet;
293 }
294 
setGpsData(const GpsData & data)295 void NativeContext::setGpsData(const GpsData& data) {
296     mGpsData = data;
297     mGpsSet = true;
298 }
299 
getGpsData() const300 GpsData NativeContext::getGpsData() const {
301     return mGpsData;
302 }
303 
hasGpsData() const304 bool NativeContext::hasGpsData() const {
305     return mGpsSet;
306 }
307 
setCaptureTime(const String8 & formattedCaptureTime)308 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
309     mFormattedCaptureTime = formattedCaptureTime;
310     mCaptureTimeSet = true;
311 }
312 
getCaptureTime() const313 String8 NativeContext::getCaptureTime() const {
314     return mFormattedCaptureTime;
315 }
316 
hasCaptureTime() const317 bool NativeContext::hasCaptureTime() const {
318     return mCaptureTimeSet;
319 }
320 
321 // End of NativeContext
322 // ----------------------------------------------------------------------------
323 
324 /**
325  * Wrapper class for a Java OutputStream.
326  *
327  * This class is not intended to be used across JNI calls.
328  */
329 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
330 public:
331     JniOutputStream(JNIEnv* env, jobject outStream);
332 
333     virtual ~JniOutputStream();
334 
335     status_t open();
336 
337     status_t write(const uint8_t* buf, size_t offset, size_t count);
338 
339     status_t close();
340 private:
341     enum {
342         BYTE_ARRAY_LENGTH = 4096
343     };
344     jobject mOutputStream;
345     JNIEnv* mEnv;
346     jbyteArray mByteArray;
347 };
348 
JniOutputStream(JNIEnv * env,jobject outStream)349 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
350         mEnv(env) {
351     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
352     if (mByteArray == nullptr) {
353         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
354     }
355 }
356 
~JniOutputStream()357 JniOutputStream::~JniOutputStream() {
358     mEnv->DeleteLocalRef(mByteArray);
359 }
360 
open()361 status_t JniOutputStream::open() {
362     // Do nothing
363     return OK;
364 }
365 
write(const uint8_t * buf,size_t offset,size_t count)366 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
367     while(count > 0) {
368         size_t len = BYTE_ARRAY_LENGTH;
369         len = (count > len) ? len : count;
370         mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
371 
372         if (mEnv->ExceptionCheck()) {
373             return BAD_VALUE;
374         }
375 
376         mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
377                 0, len);
378 
379         if (mEnv->ExceptionCheck()) {
380             return BAD_VALUE;
381         }
382 
383         count -= len;
384         offset += len;
385     }
386     return OK;
387 }
388 
close()389 status_t JniOutputStream::close() {
390     // Do nothing
391     return OK;
392 }
393 
394 // End of JniOutputStream
395 // ----------------------------------------------------------------------------
396 
397 /**
398  * Wrapper class for a Java InputStream.
399  *
400  * This class is not intended to be used across JNI calls.
401  */
402 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
403 public:
404     JniInputStream(JNIEnv* env, jobject inStream);
405 
406     status_t open();
407 
408     status_t close();
409 
410     ssize_t read(uint8_t* buf, size_t offset, size_t count);
411 
412     ssize_t skip(size_t count);
413 
414     virtual ~JniInputStream();
415 private:
416     enum {
417         BYTE_ARRAY_LENGTH = 4096
418     };
419     jobject mInStream;
420     JNIEnv* mEnv;
421     jbyteArray mByteArray;
422 
423 };
424 
JniInputStream(JNIEnv * env,jobject inStream)425 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
426     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
427     if (mByteArray == nullptr) {
428         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
429     }
430 }
431 
~JniInputStream()432 JniInputStream::~JniInputStream() {
433     mEnv->DeleteLocalRef(mByteArray);
434 }
435 
read(uint8_t * buf,size_t offset,size_t count)436 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
437 
438     jint realCount = BYTE_ARRAY_LENGTH;
439     if (count < BYTE_ARRAY_LENGTH) {
440         realCount = count;
441     }
442     jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
443             realCount);
444 
445     if (actual < 0) {
446         return NOT_ENOUGH_DATA;
447     }
448 
449     if (mEnv->ExceptionCheck()) {
450         return BAD_VALUE;
451     }
452 
453     mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
454     if (mEnv->ExceptionCheck()) {
455         return BAD_VALUE;
456     }
457     return actual;
458 }
459 
skip(size_t count)460 ssize_t JniInputStream::skip(size_t count) {
461     jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
462             static_cast<jlong>(count));
463 
464     if (mEnv->ExceptionCheck()) {
465         return BAD_VALUE;
466     }
467     if (actual < 0) {
468         return NOT_ENOUGH_DATA;
469     }
470     return actual;
471 }
472 
open()473 status_t JniInputStream::open() {
474     // Do nothing
475     return OK;
476 }
477 
close()478 status_t JniInputStream::close() {
479     // Do nothing
480     return OK;
481 }
482 
483 // End of JniInputStream
484 // ----------------------------------------------------------------------------
485 
486 /**
487  * Wrapper class for a non-direct Java ByteBuffer.
488  *
489  * This class is not intended to be used across JNI calls.
490  */
491 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
492 public:
493     JniInputByteBuffer(JNIEnv* env, jobject inBuf);
494 
495     status_t open();
496 
497     status_t close();
498 
499     ssize_t read(uint8_t* buf, size_t offset, size_t count);
500 
501     virtual ~JniInputByteBuffer();
502 private:
503     enum {
504         BYTE_ARRAY_LENGTH = 4096
505     };
506     jobject mInBuf;
507     JNIEnv* mEnv;
508     jbyteArray mByteArray;
509 };
510 
JniInputByteBuffer(JNIEnv * env,jobject inBuf)511 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
512     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
513     if (mByteArray == nullptr) {
514         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
515     }
516 }
517 
~JniInputByteBuffer()518 JniInputByteBuffer::~JniInputByteBuffer() {
519     mEnv->DeleteLocalRef(mByteArray);
520 }
521 
read(uint8_t * buf,size_t offset,size_t count)522 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
523     jint realCount = BYTE_ARRAY_LENGTH;
524     if (count < BYTE_ARRAY_LENGTH) {
525         realCount = count;
526     }
527 
528     jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
529             mByteArray, 0, realCount);
530     mEnv->DeleteLocalRef(chainingBuf);
531 
532     if (mEnv->ExceptionCheck()) {
533         ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
534         return BAD_VALUE;
535     }
536 
537     mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
538     if (mEnv->ExceptionCheck()) {
539         ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
540         return BAD_VALUE;
541     }
542     return realCount;
543 }
544 
open()545 status_t JniInputByteBuffer::open() {
546     // Do nothing
547     return OK;
548 }
549 
close()550 status_t JniInputByteBuffer::close() {
551     // Do nothing
552     return OK;
553 }
554 
555 // End of JniInputByteBuffer
556 // ----------------------------------------------------------------------------
557 
558 /**
559  * StripSource subclass for Input types.
560  *
561  * This class is not intended to be used across JNI calls.
562  */
563 
564 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
565 public:
566     InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
567             uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
568             uint32_t samplesPerPixel);
569 
570     virtual ~InputStripSource();
571 
572     virtual status_t writeToStream(Output& stream, uint32_t count);
573 
574     virtual uint32_t getIfd() const;
575 protected:
576     uint32_t mIfd;
577     Input* mInput;
578     uint32_t mWidth;
579     uint32_t mHeight;
580     uint32_t mPixStride;
581     uint32_t mRowStride;
582     uint64_t mOffset;
583     JNIEnv* mEnv;
584     uint32_t mBytesPerSample;
585     uint32_t mSamplesPerPixel;
586 };
587 
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)588 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
589         uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
590         uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
591         mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
592         mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
593         mSamplesPerPixel(samplesPerPixel) {}
594 
~InputStripSource()595 InputStripSource::~InputStripSource() {}
596 
writeToStream(Output & stream,uint32_t count)597 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
598     uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
599     jlong offset = mOffset;
600 
601     if (fullSize != count) {
602         ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
603                 fullSize);
604         jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
605         return BAD_VALUE;
606     }
607 
608     // Skip offset
609     while (offset > 0) {
610         ssize_t skipped = mInput->skip(offset);
611         if (skipped <= 0) {
612             if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
613                 jniThrowExceptionFmt(mEnv, "java/io/IOException",
614                         "Early EOF encountered in skip, not enough pixel data for image of size %u",
615                         fullSize);
616                 skipped = NOT_ENOUGH_DATA;
617             } else {
618                 if (!mEnv->ExceptionCheck()) {
619                     jniThrowException(mEnv, "java/io/IOException",
620                             "Error encountered while skip bytes in input stream.");
621                 }
622             }
623 
624             return skipped;
625         }
626         offset -= skipped;
627     }
628 
629     Vector<uint8_t> row;
630     if (row.resize(mRowStride) < 0) {
631         jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
632         return BAD_VALUE;
633     }
634 
635     uint8_t* rowBytes = row.editArray();
636 
637     for (uint32_t i = 0; i < mHeight; ++i) {
638         size_t rowFillAmt = 0;
639         size_t rowSize = mRowStride;
640 
641         while (rowFillAmt < mRowStride) {
642             ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
643             if (bytesRead <= 0) {
644                 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
645                     ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
646                             __FUNCTION__, i, bytesRead);
647                     jniThrowExceptionFmt(mEnv, "java/io/IOException",
648                             "Early EOF encountered, not enough pixel data for image of size %"
649                             PRIu32, fullSize);
650                     bytesRead = NOT_ENOUGH_DATA;
651                 } else {
652                     if (!mEnv->ExceptionCheck()) {
653                         jniThrowException(mEnv, "java/io/IOException",
654                                 "Error encountered while reading");
655                     }
656                 }
657                 return bytesRead;
658             }
659             rowFillAmt += bytesRead;
660             rowSize -= bytesRead;
661         }
662 
663         if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
664             ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
665 
666             if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
667                     mEnv->ExceptionCheck()) {
668                 if (!mEnv->ExceptionCheck()) {
669                     jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
670                 }
671                 return BAD_VALUE;
672             }
673         } else {
674             ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
675             jniThrowException(mEnv, "java/lang/IllegalStateException",
676                     "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
677             return BAD_VALUE;
678 
679             // TODO: Add support for non-contiguous pixels if needed.
680         }
681     }
682     return OK;
683 }
684 
getIfd() const685 uint32_t InputStripSource::getIfd() const {
686     return mIfd;
687 }
688 
689 // End of InputStripSource
690 // ----------------------------------------------------------------------------
691 
692 /**
693  * StripSource subclass for direct buffer types.
694  *
695  * This class is not intended to be used across JNI calls.
696  */
697 
698 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
699 public:
700     DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
701             uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
702             uint32_t bytesPerSample, uint32_t samplesPerPixel);
703 
704     virtual ~DirectStripSource();
705 
706     virtual status_t writeToStream(Output& stream, uint32_t count);
707 
708     virtual uint32_t getIfd() const;
709 protected:
710     uint32_t mIfd;
711     const uint8_t* mPixelBytes;
712     uint32_t mWidth;
713     uint32_t mHeight;
714     uint32_t mPixStride;
715     uint32_t mRowStride;
716     uint16_t mOffset;
717     JNIEnv* mEnv;
718     uint32_t mBytesPerSample;
719     uint32_t mSamplesPerPixel;
720 };
721 
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)722 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
723             uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
724             uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
725             mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
726             mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
727             mSamplesPerPixel(samplesPerPixel) {}
728 
~DirectStripSource()729 DirectStripSource::~DirectStripSource() {}
730 
writeToStream(Output & stream,uint32_t count)731 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
732     uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
733 
734     if (fullSize != count) {
735         ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
736                 fullSize);
737         jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
738         return BAD_VALUE;
739     }
740 
741 
742     if (mPixStride == mBytesPerSample * mSamplesPerPixel
743             && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
744         ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
745 
746         if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
747             if (!mEnv->ExceptionCheck()) {
748                 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
749             }
750             return BAD_VALUE;
751         }
752     } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
753         ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
754 
755         for (size_t i = 0; i < mHeight; ++i) {
756             if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
757                         mEnv->ExceptionCheck()) {
758                 if (!mEnv->ExceptionCheck()) {
759                     jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
760                 }
761                 return BAD_VALUE;
762             }
763         }
764     } else {
765         ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
766 
767         jniThrowException(mEnv, "java/lang/IllegalStateException",
768                 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
769         return BAD_VALUE;
770 
771         // TODO: Add support for non-contiguous pixels if needed.
772     }
773     return OK;
774 
775 }
776 
getIfd() const777 uint32_t DirectStripSource::getIfd() const {
778     return mIfd;
779 }
780 
781 // End of DirectStripSource
782 // ----------------------------------------------------------------------------
783 
784 // Get the appropriate tag corresponding to default / maximum resolution mode.
getAppropriateModeTag(int32_t tag,bool maximumResolution)785 static int32_t getAppropriateModeTag(int32_t tag, bool maximumResolution) {
786     if (!maximumResolution) {
787         return tag;
788     }
789     switch (tag) {
790         case ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE:
791             return ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION;
792         case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
793             return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
794         case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
795             return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
796         default:
797             ALOGE("%s: Tag %d doesn't have sensor info related maximum resolution counterpart",
798                   __FUNCTION__, tag);
799             return -1;
800     }
801 }
802 
isMaximumResolutionModeImage(const CameraMetadata & characteristics,uint32_t imageWidth,uint32_t imageHeight,const sp<TiffWriter> writer,JNIEnv * env)803 static bool isMaximumResolutionModeImage(const CameraMetadata& characteristics, uint32_t imageWidth,
804                                          uint32_t imageHeight, const sp<TiffWriter> writer,
805                                          JNIEnv* env) {
806     // If this isn't an ultra-high resolution sensor, return false;
807     camera_metadata_ro_entry capabilitiesEntry =
808             characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
809     size_t capsCount = capabilitiesEntry.count;
810     const uint8_t* caps = capabilitiesEntry.data.u8;
811     if (std::find(caps, caps + capsCount,
812                   ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) ==
813         caps + capsCount) {
814         // not an ultra-high resolution sensor, cannot have a maximum resolution
815         // mode image.
816         return false;
817     }
818 
819     // If the image width and height are either the maximum resolution
820     // pre-correction active array size or the maximum resolution pixel array
821     // size, this image is a maximum resolution RAW_SENSOR image.
822 
823     // Check dimensions
824     camera_metadata_ro_entry entry = characteristics.find(
825             ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION);
826 
827     BAIL_IF_EMPTY_RET_BOOL(entry, env, TAG_IMAGEWIDTH, writer);
828 
829     uint32_t preWidth = static_cast<uint32_t>(entry.data.i32[2]);
830     uint32_t preHeight = static_cast<uint32_t>(entry.data.i32[3]);
831 
832     camera_metadata_ro_entry pixelArrayEntry =
833             characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION);
834 
835     BAIL_IF_EMPTY_RET_BOOL(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
836 
837     uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
838     uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
839 
840     return (imageWidth == preWidth && imageHeight == preHeight) ||
841             (imageWidth == pixWidth && imageHeight == pixHeight);
842 }
843 
844 /**
845  * Calculate the default crop relative to the "active area" of the image sensor (this active area
846  * will always be the pre-correction active area rectangle), and set this.
847  */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer,bool maximumResolutionMode)848 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
849                                     sp<TiffWriter> writer, bool maximumResolutionMode) {
850     camera_metadata_ro_entry entry = characteristics.find(
851             getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
852                                   maximumResolutionMode));
853     BAIL_IF_EMPTY_RET_STATUS(entry, env, TAG_IMAGEWIDTH, writer);
854     uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
855     uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
856 
857     const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
858 
859     if (width < margin * 2 || height < margin * 2) {
860         ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
861                 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
862         jniThrowException(env, "java/lang/IllegalStateException",
863                 "Pre-correction active area is too small.");
864         return BAD_VALUE;
865     }
866 
867     uint32_t defaultCropOrigin[] = {margin, margin};
868     uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
869                                   height - defaultCropOrigin[1] - margin};
870 
871     BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
872             TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
873     BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
874             TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
875 
876     return OK;
877 }
878 
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)879 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
880         const CameraMetadata& characteristics, jint width, jint height) {
881     if (width <= 0) {
882         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
883                         "Image width %d is invalid", width);
884         return false;
885     }
886 
887     if (height <= 0) {
888         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
889                         "Image height %d is invalid", height);
890         return false;
891     }
892     bool isMaximumResolutionMode =
893             isMaximumResolutionModeImage(characteristics, static_cast<uint32_t>(width),
894                                          static_cast<uint32_t>(height), writer, env);
895 
896     camera_metadata_ro_entry preCorrectionEntry = characteristics.find(
897             getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
898                                   isMaximumResolutionMode));
899     BAIL_IF_EMPTY_RET_BOOL(preCorrectionEntry, env, TAG_IMAGEWIDTH, writer);
900 
901     camera_metadata_ro_entry pixelArrayEntry = characteristics.find(
902             getAppropriateModeTag(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, isMaximumResolutionMode));
903     BAIL_IF_EMPTY_RET_BOOL(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
904 
905     int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
906     int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
907     int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
908     int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
909 
910     bool matchesPixelArray = (pWidth == width && pHeight == height);
911     bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
912 
913     if (!(matchesPixelArray || matchesPreCorrectionArray)) {
914         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
915                         "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
916                         "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
917                         width, height, pWidth, pHeight, cWidth, cHeight);
918         return false;
919     }
920 
921     return true;
922 }
923 
924 /**
925  * Write CFA pattern for given CFA enum into cfaOut.  cfaOut must have length >= 4.
926  * Returns OK on success, or a negative error code if the CFA enum was invalid.
927  */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)928 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
929     camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
930             static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
931             cfaEnum);
932     switch(cfa) {
933         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
934             cfaOut[0] = 0;
935             cfaOut[1] = 1;
936             cfaOut[2] = 1;
937             cfaOut[3] = 2;
938             break;
939         }
940         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
941             cfaOut[0] = 1;
942             cfaOut[1] = 0;
943             cfaOut[2] = 2;
944             cfaOut[3] = 1;
945             break;
946         }
947         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
948             cfaOut[0] = 1;
949             cfaOut[1] = 2;
950             cfaOut[2] = 0;
951             cfaOut[3] = 1;
952             break;
953         }
954         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
955             cfaOut[0] = 2;
956             cfaOut[1] = 1;
957             cfaOut[2] = 1;
958             cfaOut[3] = 0;
959             break;
960         }
961         // MONO and NIR are degenerate case of RGGB pattern: only Red channel
962         // will be used.
963         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO:
964         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: {
965             cfaOut[0] = 0;
966             break;
967         }
968         default: {
969             return BAD_VALUE;
970         }
971     }
972     return OK;
973 }
974 
975 /**
976  * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
977  * RGGB for an unknown enum.
978  */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)979 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
980     camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
981             static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
982             cfaEnum);
983     switch(cfa) {
984         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
985             return OpcodeListBuilder::CFA_RGGB;
986         }
987         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
988             return OpcodeListBuilder::CFA_GRBG;
989         }
990         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
991             return OpcodeListBuilder::CFA_GBRG;
992         }
993         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
994             return OpcodeListBuilder::CFA_BGGR;
995         }
996         default: {
997             return OpcodeListBuilder::CFA_RGGB;
998         }
999     }
1000 }
1001 
1002 /**
1003  * For each color plane, find the corresponding noise profile coefficients given in the
1004  * per-channel noise profile.  If multiple channels in the CFA correspond to a color in the color
1005  * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
1006  *
1007  * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
1008  * cfa - numChannels color channels corresponding to each of the per-channel noise profile
1009  *       coefficients.
1010  * numChannels - the number of noise profile coefficient pairs and color channels given in
1011  *       the perChannelNoiseProfile and cfa arguments, respectively.
1012  * planeColors - the color planes in the noise profile output.
1013  * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
1014  * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
1015  *
1016  * returns OK, or a negative error code on failure.
1017  */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)1018 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
1019         size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
1020         /*out*/double* noiseProfile) {
1021 
1022     for (size_t p = 0; p < numPlanes; ++p) {
1023         size_t S = p * 2;
1024         size_t O = p * 2 + 1;
1025 
1026         noiseProfile[S] = 0;
1027         noiseProfile[O] = 0;
1028         bool uninitialized = true;
1029         for (size_t c = 0; c < numChannels; ++c) {
1030             if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
1031                 noiseProfile[S] = perChannelNoiseProfile[c * 2];
1032                 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
1033                 uninitialized = false;
1034             }
1035         }
1036         if (uninitialized) {
1037             ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
1038                   __FUNCTION__, p);
1039             return BAD_VALUE;
1040         }
1041     }
1042     return OK;
1043 }
1044 
undistort(double & x,double & y,const std::array<float,6> & distortion,const float cx,const float cy,const float f)1045 static void undistort(/*inout*/double& x, /*inout*/double& y,
1046         const std::array<float, 6>& distortion,
1047         const float cx, const float cy, const float f) {
1048     double xp = (x - cx) / f;
1049     double yp = (y - cy) / f;
1050 
1051     double x2 = xp * xp;
1052     double y2 = yp * yp;
1053     double r2 = x2 + y2;
1054     double xy2 = 2.0 * xp * yp;
1055 
1056     const float k0 = distortion[0];
1057     const float k1 = distortion[1];
1058     const float k2 = distortion[2];
1059     const float k3 = distortion[3];
1060     const float p1 = distortion[4];
1061     const float p2 = distortion[5];
1062 
1063     double kr = k0 + ((k3 * r2 + k2) * r2 + k1) * r2;
1064     double xpp = xp * kr + p1 * xy2 + p2 * (r2 + 2.0 * x2);
1065     double ypp = yp * kr + p1 * (r2 + 2.0 * y2) + p2 * xy2;
1066 
1067     x = xpp * f + cx;
1068     y = ypp * f + cy;
1069     return;
1070 }
1071 
unDistortWithinPreCorrArray(double x,double y,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1072 static inline bool unDistortWithinPreCorrArray(
1073         double x, double y,
1074         const std::array<float, 6>& distortion,
1075         const float cx, const float cy, const float f,
1076         const int preCorrW, const int preCorrH, const int xMin, const int yMin) {
1077     undistort(x, y, distortion, cx, cy, f);
1078     // xMin and yMin are inclusive, and xMax and yMax are exclusive.
1079     int xMax = xMin + preCorrW;
1080     int yMax = yMin + preCorrH;
1081     if (x < xMin || y < yMin || x >= xMax || y >= yMax) {
1082         return false;
1083     }
1084     return true;
1085 }
1086 
boxWithinPrecorrectionArray(int left,int top,int right,int bottom,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1087 static inline bool boxWithinPrecorrectionArray(
1088         int left, int top, int right, int bottom,
1089         const std::array<float, 6>& distortion,
1090         const float cx, const float cy, const float f,
1091         const int preCorrW, const int preCorrH, const int xMin, const int yMin){
1092     // Top row
1093     if (!unDistortWithinPreCorrArray(left, top,
1094             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1095         return false;
1096     }
1097 
1098     if (!unDistortWithinPreCorrArray(cx, top,
1099             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1100         return false;
1101     }
1102 
1103     if (!unDistortWithinPreCorrArray(right, top,
1104             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1105         return false;
1106     }
1107 
1108     // Middle row
1109     if (!unDistortWithinPreCorrArray(left, cy,
1110             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1111         return false;
1112     }
1113 
1114     if (!unDistortWithinPreCorrArray(right, cy,
1115             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1116         return false;
1117     }
1118 
1119     // Bottom row
1120     if (!unDistortWithinPreCorrArray(left, bottom,
1121             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1122         return false;
1123     }
1124 
1125     if (!unDistortWithinPreCorrArray(cx, bottom,
1126             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1127         return false;
1128     }
1129 
1130     if (!unDistortWithinPreCorrArray(right, bottom,
1131             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1132         return false;
1133     }
1134     return true;
1135 }
1136 
scaledBoxWithinPrecorrectionArray(double scale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1137 static inline bool scaledBoxWithinPrecorrectionArray(
1138         double scale/*must be <= 1.0*/,
1139         const std::array<float, 6>& distortion,
1140         const float cx, const float cy, const float f,
1141         const int preCorrW, const int preCorrH,
1142         const int xMin, const int yMin){
1143 
1144     double left = cx * (1.0 - scale);
1145     double right = (preCorrW - 1) * scale + cx * (1.0 - scale);
1146     double top = cy * (1.0 - scale);
1147     double bottom = (preCorrH - 1) * scale + cy * (1.0 - scale);
1148 
1149     return boxWithinPrecorrectionArray(left, top, right, bottom,
1150             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin);
1151 }
1152 
findPostCorrectionScale(double stepSize,double minScale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin,double * outScale)1153 static status_t findPostCorrectionScale(
1154         double stepSize, double minScale,
1155         const std::array<float, 6>& distortion,
1156         const float cx, const float cy, const float f,
1157         const int preCorrW, const int preCorrH, const int xMin, const int yMin,
1158         /*out*/ double* outScale) {
1159     if (outScale == nullptr) {
1160         ALOGE("%s: outScale must not be null", __FUNCTION__);
1161         return BAD_VALUE;
1162     }
1163 
1164     for (double scale = 1.0; scale > minScale; scale -= stepSize) {
1165         if (scaledBoxWithinPrecorrectionArray(
1166                 scale, distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1167             *outScale = scale;
1168             return OK;
1169         }
1170     }
1171     ALOGE("%s: cannot find cropping scale for lens distortion: stepSize %f, minScale %f",
1172             __FUNCTION__, stepSize, minScale);
1173     return BAD_VALUE;
1174 }
1175 
1176 // Apply a scale factor to distortion coefficients so that the image is zoomed out and all pixels
1177 // are sampled within the precorrection array
normalizeLensDistortion(std::array<float,6> & distortion,float cx,float cy,float f,int preCorrW,int preCorrH,int xMin=0,int yMin=0)1178 static void normalizeLensDistortion(
1179         /*inout*/std::array<float, 6>& distortion,
1180         float cx, float cy, float f, int preCorrW, int preCorrH, int xMin = 0, int yMin = 0) {
1181     ALOGV("%s: distortion [%f, %f, %f, %f, %f, %f], (cx,cy) (%f, %f), f %f, (W,H) (%d, %d)"
1182             ", (xmin, ymin, xmax, ymax) (%d, %d, %d, %d)",
1183             __FUNCTION__, distortion[0], distortion[1], distortion[2],
1184             distortion[3], distortion[4], distortion[5],
1185             cx, cy, f, preCorrW, preCorrH,
1186             xMin, yMin, xMin + preCorrW - 1, yMin + preCorrH - 1);
1187 
1188     // Only update distortion coeffients if we can find a good bounding box
1189     double scale = 1.0;
1190     if (OK == findPostCorrectionScale(0.002, 0.5,
1191             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin,
1192             /*out*/&scale)) {
1193         ALOGV("%s: scaling distortion coefficients by %f", __FUNCTION__, scale);
1194         // The formula:
1195         // xc = xi * (k0 + k1*r^2 + k2*r^4 + k3*r^6) + k4 * (2*xi*yi) + k5 * (r^2 + 2*xi^2)
1196         // To create effective zoom we want to replace xi by xi *m, yi by yi*m and r^2 by r^2*m^2
1197         // Factor the extra m power terms into k0~k6
1198         std::array<float, 6> scalePowers = {1, 3, 5, 7, 2, 2};
1199         for (size_t i = 0; i < 6; i++) {
1200             distortion[i] *= pow(scale, scalePowers[i]);
1201         }
1202     }
1203     return;
1204 }
1205 
1206 // ----------------------------------------------------------------------------
1207 extern "C" {
1208 
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)1209 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
1210     ALOGV("%s:", __FUNCTION__);
1211     return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
1212             gDngCreatorClassInfo.mNativeContext));
1213 }
1214 
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)1215 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
1216     ALOGV("%s:", __FUNCTION__);
1217     NativeContext* current = DngCreator_getNativeContext(env, thiz);
1218 
1219     if (context != nullptr) {
1220         context->incStrong((void*) DngCreator_setNativeContext);
1221     }
1222 
1223     if (current) {
1224         current->decStrong((void*) DngCreator_setNativeContext);
1225     }
1226 
1227     env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
1228             reinterpret_cast<jlong>(context.get()));
1229 }
1230 
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)1231 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
1232     ALOGV("%s:", __FUNCTION__);
1233 
1234     gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
1235             clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
1236 
1237     jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
1238     gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
1239             outputStreamClazz, "write", "([BII)V");
1240 
1241     jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
1242     gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
1243     gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1244 
1245     jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1246     gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1247             inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1248 }
1249 
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1250 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1251         jobject resultsPtr, jstring formattedCaptureTime) {
1252     ALOGV("%s:", __FUNCTION__);
1253     CameraMetadata characteristics;
1254     CameraMetadata results;
1255     if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1256          jniThrowException(env, "java/lang/AssertionError",
1257                 "No native metadata defined for camera characteristics.");
1258          return;
1259     }
1260     if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1261         jniThrowException(env, "java/lang/AssertionError",
1262                 "No native metadata defined for capture results.");
1263         return;
1264     }
1265 
1266     sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1267 
1268     ScopedUtfChars captureTime(env, formattedCaptureTime);
1269     if (captureTime.size() + 1 != NativeContext::DATETIME_COUNT) {
1270         jniThrowException(env, "java/lang/IllegalArgumentException",
1271                 "Formatted capture time string length is not required 20 characters");
1272         return;
1273     }
1274 
1275     nativeContext->setCaptureTime(String8(captureTime.c_str()));
1276 
1277     DngCreator_setNativeContext(env, thiz, nativeContext);
1278 }
1279 
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1280 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1281         uint32_t imageHeight) {
1282 
1283     NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1284 
1285     if (nativeContext == nullptr) {
1286         jniThrowException(env, "java/lang/AssertionError",
1287                 "No native context, must call init before other operations.");
1288         return nullptr;
1289     }
1290 
1291     CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1292     CameraMetadata results = *(nativeContext->getResult());
1293 
1294     sp<TiffWriter> writer = new TiffWriter();
1295 
1296     uint32_t preXMin = 0;
1297     uint32_t preYMin = 0;
1298     uint32_t preWidth = 0;
1299     uint32_t preHeight = 0;
1300     uint8_t colorFilter = 0;
1301     bool isBayer = true;
1302     bool isMaximumResolutionMode =
1303             isMaximumResolutionModeImage(characteristics, imageWidth, imageHeight, writer, env);
1304     {
1305         // Check dimensions
1306         camera_metadata_entry entry = characteristics.find(
1307                 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1308                                       isMaximumResolutionMode));
1309         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1310         preXMin = static_cast<uint32_t>(entry.data.i32[0]);
1311         preYMin = static_cast<uint32_t>(entry.data.i32[1]);
1312         preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1313         preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1314 
1315         camera_metadata_entry pixelArrayEntry =
1316                 characteristics.find(getAppropriateModeTag(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1317                                                            isMaximumResolutionMode));
1318 
1319         BAIL_IF_EMPTY_RET_NULL_SP(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
1320         uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1321         uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1322 
1323         if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1324                 (imageWidth == pixWidth && imageHeight == pixHeight))) {
1325             jniThrowException(env, "java/lang/AssertionError",
1326                               "Height and width of image buffer did not match height and width of"
1327                               " either the preCorrectionActiveArraySize or the pixelArraySize.");
1328             return nullptr;
1329         }
1330 
1331         camera_metadata_entry colorFilterEntry =
1332                 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1333         colorFilter = colorFilterEntry.data.u8[0];
1334         camera_metadata_entry capabilitiesEntry =
1335                 characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
1336         size_t capsCount = capabilitiesEntry.count;
1337         uint8_t* caps = capabilitiesEntry.data.u8;
1338         if (std::find(caps, caps+capsCount, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME)
1339                 != caps+capsCount) {
1340             isBayer = false;
1341         } else if (colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO ||
1342                 colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
1343             jniThrowException(env, "java/lang/AssertionError",
1344                     "A camera device with MONO/NIR color filter must have MONOCHROME capability.");
1345             return nullptr;
1346         }
1347     }
1348 
1349     writer->addIfd(TIFF_IFD_0);
1350 
1351     status_t err = OK;
1352 
1353     const uint32_t samplesPerPixel = 1;
1354     const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1355 
1356     OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE;
1357     uint8_t cfaPlaneColor[3] = {0, 1, 2};
1358     camera_metadata_entry cfaEntry =
1359             characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1360     BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer);
1361     uint8_t cfaEnum = cfaEntry.data.u8[0];
1362 
1363     // TODO: Greensplit.
1364     // TODO: Add remaining non-essential tags
1365 
1366     // Setup main image tags
1367 
1368     {
1369         // Set orientation
1370         uint16_t orientation = TAG_ORIENTATION_NORMAL;
1371         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1372                 env, TAG_ORIENTATION, writer);
1373     }
1374 
1375     {
1376         // Set subfiletype
1377         uint32_t subfileType = 0; // Main image
1378         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1379                 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1380     }
1381 
1382     {
1383         // Set bits per sample
1384         uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1385         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1386                 TAG_BITSPERSAMPLE, writer);
1387     }
1388 
1389     {
1390         // Set compression
1391         uint16_t compression = 1; // None
1392         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1393                 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1394     }
1395 
1396     {
1397         // Set dimensions
1398         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1399                 env, TAG_IMAGEWIDTH, writer);
1400         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1401                 env, TAG_IMAGELENGTH, writer);
1402     }
1403 
1404     {
1405         // Set photometric interpretation
1406         uint16_t interpretation = isBayer ? 32803 /* CFA */ :
1407                 34892; /* Linear Raw */;
1408         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1409                 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1410     }
1411 
1412     {
1413         uint16_t repeatDim[2] = {2, 2};
1414         if (!isBayer) {
1415             repeatDim[0] = repeatDim[1] = 1;
1416         }
1417         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1418                 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1419 
1420         // Set blacklevel tags, using dynamic black level if available
1421         camera_metadata_entry entry =
1422                 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
1423         uint32_t blackLevelRational[8] = {0};
1424         if (entry.count != 0) {
1425             BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1426             for (size_t i = 0; i < entry.count; i++) {
1427                 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.f[i] * 100);
1428                 blackLevelRational[i * 2 + 1] = 100;
1429             }
1430         } else {
1431             // Fall back to static black level which is guaranteed
1432             entry = characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1433             BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1434             for (size_t i = 0; i < entry.count; i++) {
1435                 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
1436                 blackLevelRational[i * 2 + 1] = 1;
1437             }
1438         }
1439         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1],
1440                 blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1441     }
1442 
1443     {
1444         // Set samples per pixel
1445         uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1446         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1447                 env, TAG_SAMPLESPERPIXEL, writer);
1448     }
1449 
1450     {
1451         // Set planar configuration
1452         uint16_t config = 1; // Chunky
1453         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1454                 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1455     }
1456 
1457     // All CFA pattern tags are not necessary for monochrome cameras.
1458     if (isBayer) {
1459         // Set CFA pattern dimensions
1460         uint16_t repeatDim[2] = {2, 2};
1461         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1462                 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1463 
1464         // Set CFA pattern
1465         const int cfaLength = 4;
1466         uint8_t cfa[cfaLength];
1467         if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1468             jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1469                         "Invalid metadata for tag %d", TAG_CFAPATTERN);
1470         }
1471 
1472         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1473                 env, TAG_CFAPATTERN, writer);
1474 
1475         opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1476 
1477         // Set CFA plane color
1478         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1479                 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1480 
1481         // Set CFA layout
1482         uint16_t cfaLayout = 1;
1483         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1484                 env, TAG_CFALAYOUT, writer);
1485     }
1486 
1487     {
1488         // image description
1489         uint8_t imageDescription = '\0'; // empty
1490         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1491                 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1492     }
1493 
1494     {
1495         // make
1496         // Use "" to represent unknown make as suggested in TIFF/EP spec.
1497         std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1498         uint32_t count = static_cast<uint32_t>(manufacturer.size()) + 1;
1499 
1500         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1501                 reinterpret_cast<const uint8_t*>(manufacturer.c_str()), TIFF_IFD_0), env, TAG_MAKE,
1502                 writer);
1503     }
1504 
1505     {
1506         // model
1507         // Use "" to represent unknown model as suggested in TIFF/EP spec.
1508         std::string model = GetProperty("ro.product.model", "");
1509         uint32_t count = static_cast<uint32_t>(model.size()) + 1;
1510 
1511         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1512                 reinterpret_cast<const uint8_t*>(model.c_str()), TIFF_IFD_0), env, TAG_MODEL,
1513                 writer);
1514     }
1515 
1516     {
1517         // x resolution
1518         uint32_t xres[] = { 72, 1 }; // default 72 ppi
1519         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1520                 env, TAG_XRESOLUTION, writer);
1521 
1522         // y resolution
1523         uint32_t yres[] = { 72, 1 }; // default 72 ppi
1524         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1525                 env, TAG_YRESOLUTION, writer);
1526 
1527         uint16_t unit = 2; // inches
1528         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1529                 env, TAG_RESOLUTIONUNIT, writer);
1530     }
1531 
1532     {
1533         // software
1534         std::string software = GetProperty("ro.build.fingerprint", "");
1535         uint32_t count = static_cast<uint32_t>(software.size()) + 1;
1536         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1537                 reinterpret_cast<const uint8_t*>(software.c_str()), TIFF_IFD_0), env, TAG_SOFTWARE,
1538                 writer);
1539     }
1540 
1541     if (nativeContext->hasCaptureTime()) {
1542         // datetime
1543         String8 captureTime = nativeContext->getCaptureTime();
1544 
1545         if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1546                              reinterpret_cast<const uint8_t*>(captureTime.c_str()),
1547                              TIFF_IFD_0) != OK) {
1548             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1549                     "Invalid metadata for tag %x", TAG_DATETIME);
1550             return nullptr;
1551         }
1552 
1553         // datetime original
1554         if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1555                              reinterpret_cast<const uint8_t*>(captureTime.c_str()),
1556                              TIFF_IFD_0) != OK) {
1557             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1558                     "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1559             return nullptr;
1560         }
1561     }
1562 
1563     {
1564         // TIFF/EP standard id
1565         uint8_t standardId[] = { 1, 0, 0, 0 };
1566         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1567                 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1568     }
1569 
1570     {
1571         // copyright
1572         uint8_t copyright = '\0'; // empty
1573         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, &copyright,
1574                 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1575     }
1576 
1577     {
1578         // exposure time
1579         camera_metadata_entry entry =
1580             results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1581         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1582 
1583         int64_t exposureTime = *(entry.data.i64);
1584 
1585         if (exposureTime < 0) {
1586             // Should be unreachable
1587             jniThrowException(env, "java/lang/IllegalArgumentException",
1588                     "Negative exposure time in metadata");
1589             return nullptr;
1590         }
1591 
1592         // Ensure exposure time doesn't overflow (for exposures > 4s)
1593         uint32_t denominator = 1000000000;
1594         while (exposureTime > UINT32_MAX) {
1595             exposureTime >>= 1;
1596             denominator >>= 1;
1597             if (denominator == 0) {
1598                 // Should be unreachable
1599                 jniThrowException(env, "java/lang/IllegalArgumentException",
1600                         "Exposure time too long");
1601                 return nullptr;
1602             }
1603         }
1604 
1605         uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1606         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1607                 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1608 
1609     }
1610 
1611     {
1612         // ISO speed ratings
1613         camera_metadata_entry entry =
1614             results.find(ANDROID_SENSOR_SENSITIVITY);
1615         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1616 
1617         int32_t tempIso = *(entry.data.i32);
1618         if (tempIso < 0) {
1619             jniThrowException(env, "java/lang/IllegalArgumentException",
1620                                     "Negative ISO value");
1621             return nullptr;
1622         }
1623 
1624         if (tempIso > UINT16_MAX) {
1625             ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1626             tempIso = UINT16_MAX;
1627         }
1628 
1629         uint16_t iso = static_cast<uint16_t>(tempIso);
1630         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1631                 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1632     }
1633 
1634     {
1635         // Baseline exposure
1636         camera_metadata_entry entry =
1637                 results.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
1638         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer);
1639 
1640         // post RAW gain should be boostValue / 100
1641         double postRAWGain = static_cast<double> (entry.data.i32[0]) / 100.f;
1642         // Baseline exposure should be in EV units so log2(gain) =
1643         // log10(gain)/log10(2)
1644         double baselineExposure = std::log(postRAWGain) / std::log(2.0f);
1645         int32_t baseExposureSRat[] = { static_cast<int32_t> (baselineExposure * 100),
1646                 100 };
1647         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1,
1648                 baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer);
1649     }
1650 
1651     {
1652         // focal length
1653         camera_metadata_entry entry =
1654             results.find(ANDROID_LENS_FOCAL_LENGTH);
1655         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1656 
1657         uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1658         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1659                 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1660     }
1661 
1662     {
1663         // f number
1664         camera_metadata_entry entry =
1665             results.find(ANDROID_LENS_APERTURE);
1666         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1667 
1668         uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1669         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1670                 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1671     }
1672 
1673     {
1674         // Set DNG version information
1675         uint8_t version[4] = {1, 4, 0, 0};
1676         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1677                 env, TAG_DNGVERSION, writer);
1678 
1679         uint8_t backwardVersion[4] = {1, 1, 0, 0};
1680         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1681                 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1682     }
1683 
1684     {
1685         // Set whitelevel
1686         camera_metadata_entry entry =
1687                 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1688         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1689         uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1690         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1691                 env, TAG_WHITELEVEL, writer);
1692     }
1693 
1694     {
1695         // Set default scale
1696         uint32_t defaultScale[4] = {1, 1, 1, 1};
1697         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1698                 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1699     }
1700 
1701     bool singleIlluminant = false;
1702     if (isBayer) {
1703         // Set calibration illuminants
1704         camera_metadata_entry entry1 =
1705             characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1706         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1707         camera_metadata_entry entry2 =
1708             characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1709         if (entry2.count == 0) {
1710             singleIlluminant = true;
1711         }
1712         uint16_t ref1 = entry1.data.u8[0];
1713 
1714         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1715                 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1716 
1717         if (!singleIlluminant) {
1718             uint16_t ref2 = entry2.data.u8[0];
1719             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1720                     TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1721         }
1722     }
1723 
1724     if (isBayer) {
1725         // Set color transforms
1726         camera_metadata_entry entry1 =
1727             characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1728         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1729 
1730         int32_t colorTransform1[entry1.count * 2];
1731 
1732         size_t ctr = 0;
1733         for(size_t i = 0; i < entry1.count; ++i) {
1734             colorTransform1[ctr++] = entry1.data.r[i].numerator;
1735             colorTransform1[ctr++] = entry1.data.r[i].denominator;
1736         }
1737 
1738         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1739                 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1740 
1741         if (!singleIlluminant) {
1742             camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1743             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1744             int32_t colorTransform2[entry2.count * 2];
1745 
1746             ctr = 0;
1747             for(size_t i = 0; i < entry2.count; ++i) {
1748                 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1749                 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1750             }
1751 
1752             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1753                     colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1754         }
1755     }
1756 
1757     if (isBayer) {
1758         // Set calibration transforms
1759         camera_metadata_entry entry1 =
1760             characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1761         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1762 
1763         int32_t calibrationTransform1[entry1.count * 2];
1764 
1765         size_t ctr = 0;
1766         for(size_t i = 0; i < entry1.count; ++i) {
1767             calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1768             calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1769         }
1770 
1771         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1772                 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1773 
1774         if (!singleIlluminant) {
1775             camera_metadata_entry entry2 =
1776                 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1777             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1778             int32_t calibrationTransform2[entry2.count * 2];
1779 
1780             ctr = 0;
1781             for(size_t i = 0; i < entry2.count; ++i) {
1782                 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1783                 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1784             }
1785 
1786             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1787                     calibrationTransform2, TIFF_IFD_0),  env, TAG_CAMERACALIBRATION2, writer);
1788         }
1789     }
1790 
1791     if (isBayer) {
1792         // Set forward transforms
1793         camera_metadata_entry entry1 =
1794             characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1795         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1796 
1797         int32_t forwardTransform1[entry1.count * 2];
1798 
1799         size_t ctr = 0;
1800         for(size_t i = 0; i < entry1.count; ++i) {
1801             forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1802             forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1803         }
1804 
1805         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1806                 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1807 
1808         if (!singleIlluminant) {
1809             camera_metadata_entry entry2 =
1810                 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1811             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1812             int32_t forwardTransform2[entry2.count * 2];
1813 
1814             ctr = 0;
1815             for(size_t i = 0; i < entry2.count; ++i) {
1816                 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1817                 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1818             }
1819 
1820             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1821                     forwardTransform2, TIFF_IFD_0),  env, TAG_FORWARDMATRIX2, writer);
1822         }
1823     }
1824 
1825     if (isBayer) {
1826         // Set camera neutral
1827         camera_metadata_entry entry =
1828             results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1829         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1830         uint32_t cameraNeutral[entry.count * 2];
1831 
1832         size_t ctr = 0;
1833         for(size_t i = 0; i < entry.count; ++i) {
1834             cameraNeutral[ctr++] =
1835                     static_cast<uint32_t>(entry.data.r[i].numerator);
1836             cameraNeutral[ctr++] =
1837                     static_cast<uint32_t>(entry.data.r[i].denominator);
1838         }
1839 
1840         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1841                 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1842     }
1843 
1844 
1845     {
1846         // Set dimensions
1847         if (calculateAndSetCrop(env, characteristics, writer, isMaximumResolutionMode) != OK) {
1848             return nullptr;
1849         }
1850         camera_metadata_entry entry = characteristics.find(
1851                 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1852                                       isMaximumResolutionMode));
1853         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1854         uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1855         uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1856         uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1857         uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1858 
1859         // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1860         // relative to the pixel array.
1861         if (imageWidth == width && imageHeight == height) {
1862             xmin = 0;
1863             ymin = 0;
1864         }
1865 
1866         uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1867         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1868                 env, TAG_ACTIVEAREA, writer);
1869     }
1870 
1871     {
1872         // Setup unique camera model tag
1873         std::string model = GetProperty("ro.product.model", "");
1874         std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1875         std::string brand = GetProperty("ro.product.brand", "");
1876 
1877         String8 cameraModel(model.c_str());
1878         cameraModel += "-";
1879         cameraModel += manufacturer.c_str();
1880         cameraModel += "-";
1881         cameraModel += brand.c_str();
1882 
1883         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1884                                                      reinterpret_cast<const uint8_t*>(
1885                                                              cameraModel.c_str()),
1886                                                      TIFF_IFD_0),
1887                                     env, TAG_UNIQUECAMERAMODEL, writer);
1888     }
1889 
1890     {
1891         // Setup sensor noise model
1892         camera_metadata_entry entry =
1893             results.find(ANDROID_SENSOR_NOISE_PROFILE);
1894 
1895         const unsigned long numPlaneColors = isBayer ? 3 : 1;
1896         const unsigned long numCfaChannels = isBayer ? 4 : 1;
1897 
1898         uint8_t cfaOut[numCfaChannels];
1899         if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1900             jniThrowException(env, "java/lang/IllegalArgumentException",
1901                     "Invalid CFA from camera characteristics");
1902             return nullptr;
1903         }
1904 
1905         double noiseProfile[numPlaneColors * 2];
1906 
1907         if (entry.count > 0) {
1908             if (entry.count != numCfaChannels * 2) {
1909                 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1910                       "in characteristics, no noise profile tag written...",
1911                       __FUNCTION__, entry.count);
1912             } else {
1913                 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1914                         cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1915 
1916                     BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1917                             numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1918                             writer);
1919                 } else {
1920                     ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1921                             " tag written...", __FUNCTION__);
1922                 }
1923             }
1924         } else {
1925             ALOGW("%s: No noise profile found in result metadata.  Image quality may be reduced.",
1926                     __FUNCTION__);
1927         }
1928     }
1929 
1930     {
1931         // Set up opcode List 2
1932         OpcodeListBuilder builder;
1933         status_t err = OK;
1934 
1935         // Set up lens shading map
1936         camera_metadata_entry entry1 =
1937                 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1938 
1939         uint32_t lsmWidth = 0;
1940         uint32_t lsmHeight = 0;
1941 
1942         if (entry1.count != 0) {
1943             lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1944             lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1945         }
1946 
1947         camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1948 
1949         camera_metadata_entry entry = characteristics.find(
1950                 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1951                                       isMaximumResolutionMode));
1952         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1953         uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1954         uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1955         uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1956         uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1957         if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1958             // GainMap rectangle is relative to the active area origin.
1959             err = builder.addGainMapsForMetadata(lsmWidth,
1960                                                  lsmHeight,
1961                                                  0,
1962                                                  0,
1963                                                  height,
1964                                                  width,
1965                                                  opcodeCfaLayout,
1966                                                  entry2.data.f);
1967             if (err != OK) {
1968                 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1969                 jniThrowRuntimeException(env, "failed to add lens shading map.");
1970                 return nullptr;
1971             }
1972         }
1973 
1974         // Hot pixel map is specific to bayer camera per DNG spec.
1975         if (isBayer) {
1976             // Set up bad pixel correction list
1977             // We first check the capture result. If the hot pixel map is not
1978             // available, as a fallback, try the static characteristics.
1979             camera_metadata_entry entry3 = results.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1980             if (entry3.count == 0) {
1981                 entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1982             }
1983 
1984             if ((entry3.count % 2) != 0) {
1985                 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
1986                         __FUNCTION__);
1987                 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1988                 return nullptr;
1989             }
1990 
1991             // Adjust the bad pixel coordinates to be relative to the origin of the active area
1992             // DNG tag
1993             std::vector<uint32_t> v;
1994             for (size_t i = 0; i < entry3.count; i += 2) {
1995                 int32_t x = entry3.data.i32[i];
1996                 int32_t y = entry3.data.i32[i + 1];
1997                 x -= static_cast<int32_t>(xmin);
1998                 y -= static_cast<int32_t>(ymin);
1999                 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
2000                         static_cast<uint32_t>(y) >= height) {
2001                     continue;
2002                 }
2003                 v.push_back(x);
2004                 v.push_back(y);
2005             }
2006             const uint32_t* badPixels = &v[0];
2007             uint32_t badPixelCount = v.size();
2008 
2009             if (badPixelCount > 0) {
2010                 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
2011 
2012                 if (err != OK) {
2013                     ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
2014                     jniThrowRuntimeException(env, "failed to add hotpixel map.");
2015                     return nullptr;
2016                 }
2017             }
2018         }
2019 
2020         if (builder.getCount() > 0) {
2021             size_t listSize = builder.getSize();
2022             uint8_t opcodeListBuf[listSize];
2023             err = builder.buildOpList(opcodeListBuf);
2024             if (err == OK) {
2025                 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize,
2026                         opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
2027             } else {
2028                 ALOGE("%s: Could not build list of opcodes for lens shading map and bad pixel "
2029                         "correction.", __FUNCTION__);
2030                 jniThrowRuntimeException(env, "failed to construct opcode list for lens shading "
2031                         "map and bad pixel correction");
2032                 return nullptr;
2033             }
2034         }
2035     }
2036 
2037     {
2038         // Set up opcode List 3
2039         OpcodeListBuilder builder;
2040         status_t err = OK;
2041 
2042         // Set up rectilinear distortion correction
2043         std::array<float, 6> distortion = {1.f, 0.f, 0.f, 0.f, 0.f, 0.f};
2044         bool gotDistortion = false;
2045 
2046         // The capture result would have the correct intrinsic calibration
2047         // regardless of the sensor pixel mode.
2048         camera_metadata_entry entry4 =
2049                 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
2050 
2051         if (entry4.count == 5) {
2052             float cx = entry4.data.f[/*c_x*/2];
2053             float cy = entry4.data.f[/*c_y*/3];
2054             // Assuming f_x = f_y, or at least close enough.
2055             // Also assuming s = 0, or at least close enough.
2056             float f = entry4.data.f[/*f_x*/0];
2057 
2058             camera_metadata_entry entry3 =
2059                     results.find(ANDROID_LENS_DISTORTION);
2060             if (entry3.count == 5) {
2061                 gotDistortion = true;
2062 
2063                 // Scale the distortion coefficients to create a zoom in warpped image so that all
2064                 // pixels are drawn within input image.
2065                 for (size_t i = 0; i < entry3.count; i++) {
2066                     distortion[i+1] = entry3.data.f[i];
2067                 }
2068 
2069                 if (preWidth == imageWidth && preHeight == imageHeight) {
2070                     normalizeLensDistortion(distortion, cx, cy, f, preWidth, preHeight);
2071                 } else {
2072                     // image size == pixel array size (contains optical black pixels)
2073                     // cx/cy is defined in preCorrArray so adding the offset
2074                     // Also changes default xmin/ymin so that pixels are only
2075                     // sampled within preCorrection array
2076                     normalizeLensDistortion(
2077                             distortion, cx + preXMin, cy + preYMin, f, preWidth, preHeight,
2078                             preXMin, preYMin);
2079                 }
2080 
2081                 float m_x = std::fmaxf(preWidth - cx, cx);
2082                 float m_y = std::fmaxf(preHeight - cy, cy);
2083                 float m_sq = m_x*m_x + m_y*m_y;
2084                 float m = sqrtf(m_sq); // distance to farthest corner from optical center
2085                 float f_sq = f * f;
2086                 // Conversion factors from Camera2 K factors for new LENS_DISTORTION field
2087                 // to DNG spec.
2088                 //
2089                 //       Camera2 / OpenCV assume distortion is applied in a space where focal length
2090                 //       is factored out, while DNG assumes a normalized space where the distance
2091                 //       from optical center to the farthest corner is 1.
2092                 //       Scale from camera2 to DNG spec accordingly.
2093                 //       distortion[0] is always 1 with the new LENS_DISTORTION field.
2094                 const double convCoeff[5] = {
2095                     m_sq / f_sq,
2096                     pow(m_sq, 2) / pow(f_sq, 2),
2097                     pow(m_sq, 3) / pow(f_sq, 3),
2098                     m / f,
2099                     m / f
2100                 };
2101                 for (size_t i = 0; i < entry3.count; i++) {
2102                     distortion[i+1] *= convCoeff[i];
2103                 }
2104             } else {
2105                 entry3 = results.find(ANDROID_LENS_RADIAL_DISTORTION);
2106                 if (entry3.count == 6) {
2107                     gotDistortion = true;
2108                     // Conversion factors from Camera2 K factors to DNG spec. K factors:
2109                     //
2110                     //      Note: these are necessary because our unit system assumes a
2111                     //      normalized max radius of sqrt(2), whereas the DNG spec's
2112                     //      WarpRectilinear opcode assumes a normalized max radius of 1.
2113                     //      Thus, each K coefficient must include the domain scaling
2114                     //      factor (the DNG domain is scaled by sqrt(2) to emulate the
2115                     //      domain used by the Camera2 specification).
2116                     const double convCoeff[6] = {
2117                         sqrt(2),
2118                         2 * sqrt(2),
2119                         4 * sqrt(2),
2120                         8 * sqrt(2),
2121                         2,
2122                         2
2123                     };
2124                     for (size_t i = 0; i < entry3.count; i++) {
2125                         distortion[i] = entry3.data.f[i] * convCoeff[i];
2126                     }
2127                 }
2128             }
2129             if (gotDistortion) {
2130                 err = builder.addWarpRectilinearForMetadata(
2131                         distortion.data(), preWidth, preHeight, cx, cy);
2132                 if (err != OK) {
2133                     ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
2134                     jniThrowRuntimeException(env, "failed to add distortion correction.");
2135                     return nullptr;
2136                 }
2137             }
2138         }
2139 
2140         if (builder.getCount() > 0) {
2141             size_t listSize = builder.getSize();
2142             uint8_t opcodeListBuf[listSize];
2143             err = builder.buildOpList(opcodeListBuf);
2144             if (err == OK) {
2145                 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize,
2146                         opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
2147             } else {
2148                 ALOGE("%s: Could not build list of opcodes for distortion correction.",
2149                         __FUNCTION__);
2150                 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
2151                         " correction");
2152                 return nullptr;
2153             }
2154         }
2155     }
2156 
2157     {
2158         // Set up orientation tags.
2159         // Note: There's only one orientation field for the whole file, in IFD0
2160         // The main image and any thumbnails therefore have the same orientation.
2161         uint16_t orientation = nativeContext->getOrientation();
2162         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
2163                 env, TAG_ORIENTATION, writer);
2164 
2165     }
2166 
2167     if (nativeContext->hasDescription()){
2168         // Set Description
2169         String8 description = nativeContext->getDescription();
2170         size_t len = description.bytes() + 1;
2171         if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
2172                              reinterpret_cast<const uint8_t*>(description.c_str()),
2173                              TIFF_IFD_0) != OK) {
2174             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
2175                     "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
2176         }
2177     }
2178 
2179     if (nativeContext->hasGpsData()) {
2180         // Set GPS tags
2181         GpsData gpsData = nativeContext->getGpsData();
2182         if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
2183             if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
2184                 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
2185                         TIFF_IFD_0);
2186                 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
2187                 return nullptr;
2188             }
2189         }
2190 
2191         {
2192             uint8_t version[] = {2, 3, 0, 0};
2193             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
2194                     TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
2195         }
2196 
2197         {
2198             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
2199                     GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
2200                     TAG_GPSLATITUDEREF, writer);
2201         }
2202 
2203         {
2204             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
2205                     GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
2206                     TAG_GPSLONGITUDEREF, writer);
2207         }
2208 
2209         {
2210             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
2211                     TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
2212         }
2213 
2214         {
2215             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
2216                     TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
2217         }
2218 
2219         {
2220             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
2221                     TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
2222         }
2223 
2224         {
2225             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
2226                     GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
2227                     TAG_GPSDATESTAMP, writer);
2228         }
2229     }
2230 
2231 
2232     if (nativeContext->hasThumbnail()) {
2233         if (!writer->hasIfd(TIFF_IFD_SUB1)) {
2234             if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
2235                 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
2236                         TIFF_IFD_0);
2237                 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
2238                 return nullptr;
2239             }
2240         }
2241 
2242         // Setup thumbnail tags
2243 
2244         {
2245             // Set photometric interpretation
2246             uint16_t interpretation = 2; // RGB
2247             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
2248                     &interpretation, TIFF_IFD_SUB1), env, TAG_PHOTOMETRICINTERPRETATION, writer);
2249         }
2250 
2251         {
2252             // Set planar configuration
2253             uint16_t config = 1; // Chunky
2254             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
2255                     TIFF_IFD_SUB1), env, TAG_PLANARCONFIGURATION, writer);
2256         }
2257 
2258         {
2259             // Set samples per pixel
2260             uint16_t samples = SAMPLES_PER_RGB_PIXEL;
2261             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
2262                     TIFF_IFD_SUB1), env, TAG_SAMPLESPERPIXEL, writer);
2263         }
2264 
2265         {
2266             // Set bits per sample
2267             uint16_t bits[SAMPLES_PER_RGB_PIXEL];
2268             for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE;
2269             BAIL_IF_INVALID_RET_NULL_SP(
2270                     writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_SUB1),
2271                     env, TAG_BITSPERSAMPLE, writer);
2272         }
2273 
2274         {
2275             // Set subfiletype
2276             uint32_t subfileType = 1; // Thumbnail image
2277             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
2278                     TIFF_IFD_SUB1), env, TAG_NEWSUBFILETYPE, writer);
2279         }
2280 
2281         {
2282             // Set compression
2283             uint16_t compression = 1; // None
2284             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
2285                     TIFF_IFD_SUB1), env, TAG_COMPRESSION, writer);
2286         }
2287 
2288         {
2289             // Set dimensions
2290             uint32_t uWidth = nativeContext->getThumbnailWidth();
2291             uint32_t uHeight = nativeContext->getThumbnailHeight();
2292             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_SUB1),
2293                     env, TAG_IMAGEWIDTH, writer);
2294             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight,
2295                     TIFF_IFD_SUB1), env, TAG_IMAGELENGTH, writer);
2296         }
2297 
2298         {
2299             // x resolution
2300             uint32_t xres[] = { 72, 1 }; // default 72 ppi
2301             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_SUB1),
2302                     env, TAG_XRESOLUTION, writer);
2303 
2304             // y resolution
2305             uint32_t yres[] = { 72, 1 }; // default 72 ppi
2306             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_SUB1),
2307                     env, TAG_YRESOLUTION, writer);
2308 
2309             uint16_t unit = 2; // inches
2310             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit,
2311                     TIFF_IFD_SUB1), env, TAG_RESOLUTIONUNIT, writer);
2312         }
2313     }
2314 
2315     if (writer->addStrip(TIFF_IFD_0) != OK) {
2316         ALOGE("%s: Could not setup main image strip tags.", __FUNCTION__);
2317         jniThrowException(env, "java/lang/IllegalStateException",
2318                 "Failed to setup main image strip tags.");
2319         return nullptr;
2320     }
2321 
2322     if (writer->hasIfd(TIFF_IFD_SUB1)) {
2323         if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
2324             ALOGE("%s: Could not thumbnail image strip tags.", __FUNCTION__);
2325             jniThrowException(env, "java/lang/IllegalStateException",
2326                     "Failed to setup thumbnail image strip tags.");
2327             return nullptr;
2328         }
2329     }
2330     return writer;
2331 }
2332 
DngCreator_destroy(JNIEnv * env,jobject thiz)2333 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
2334     ALOGV("%s:", __FUNCTION__);
2335     DngCreator_setNativeContext(env, thiz, nullptr);
2336 }
2337 
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)2338 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
2339     ALOGV("%s:", __FUNCTION__);
2340 
2341     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2342     if (context == nullptr) {
2343         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2344         jniThrowException(env, "java/lang/AssertionError",
2345                 "setOrientation called with uninitialized DngCreator");
2346         return;
2347     }
2348 
2349     uint16_t orientation = static_cast<uint16_t>(orient);
2350     context->setOrientation(orientation);
2351 }
2352 
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)2353 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
2354     ALOGV("%s:", __FUNCTION__);
2355 
2356     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2357     if (context == nullptr) {
2358         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2359         jniThrowException(env, "java/lang/AssertionError",
2360                 "setDescription called with uninitialized DngCreator");
2361         return;
2362     }
2363 
2364     const char* desc = env->GetStringUTFChars(description, nullptr);
2365     context->setDescription(String8(desc));
2366     env->ReleaseStringUTFChars(description, desc);
2367 }
2368 
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)2369 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
2370         jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
2371     ALOGV("%s:", __FUNCTION__);
2372 
2373     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2374     if (context == nullptr) {
2375         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2376         jniThrowException(env, "java/lang/AssertionError",
2377                 "setGpsTags called with uninitialized DngCreator");
2378         return;
2379     }
2380 
2381     GpsData data;
2382 
2383     jsize latLen = env->GetArrayLength(latTag);
2384     jsize longLen = env->GetArrayLength(longTag);
2385     jsize timeLen = env->GetArrayLength(timeTag);
2386     if (latLen != GpsData::GPS_VALUE_LENGTH) {
2387         jniThrowException(env, "java/lang/IllegalArgumentException",
2388                 "invalid latitude tag length");
2389         return;
2390     } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
2391         jniThrowException(env, "java/lang/IllegalArgumentException",
2392                 "invalid longitude tag length");
2393         return;
2394     } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2395         jniThrowException(env, "java/lang/IllegalArgumentException",
2396                 "invalid time tag length");
2397         return;
2398     }
2399 
2400     env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2401             reinterpret_cast<jint*>(&data.mLatitude));
2402     env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2403             reinterpret_cast<jint*>(&data.mLongitude));
2404     env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2405             reinterpret_cast<jint*>(&data.mTimestamp));
2406 
2407 
2408     env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2409     data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2410     env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2411     data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2412     env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2413             reinterpret_cast<char*>(&data.mDate));
2414     data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2415 
2416     context->setGpsData(data);
2417 }
2418 
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2419 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2420         jint height) {
2421     ALOGV("%s:", __FUNCTION__);
2422 
2423     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2424     if (context == nullptr) {
2425         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2426         jniThrowException(env, "java/lang/AssertionError",
2427                 "setThumbnail called with uninitialized DngCreator");
2428         return;
2429     }
2430 
2431     size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2432     jlong capacity = env->GetDirectBufferCapacity(buffer);
2433     if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2434         jniThrowExceptionFmt(env, "java/lang/AssertionError",
2435                 "Invalid size %d for thumbnail, expected size was %d",
2436                 capacity, fullSize);
2437         return;
2438     }
2439 
2440     uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2441     if (pixelBytes == nullptr) {
2442         ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2443         jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2444         return;
2445     }
2446 
2447     if (!context->setThumbnail(pixelBytes, width, height)) {
2448         jniThrowException(env, "java/lang/IllegalStateException",
2449                 "Failed to set thumbnail.");
2450         return;
2451     }
2452 }
2453 
2454 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2455 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2456         jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2457         jboolean isDirect) {
2458     ALOGV("%s:", __FUNCTION__);
2459     ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2460           "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2461           height, rowStride, pixStride, offset);
2462     uint32_t rStride = static_cast<uint32_t>(rowStride);
2463     uint32_t pStride = static_cast<uint32_t>(pixStride);
2464     uint32_t uWidth = static_cast<uint32_t>(width);
2465     uint32_t uHeight = static_cast<uint32_t>(height);
2466     uint64_t uOffset = static_cast<uint64_t>(offset);
2467 
2468     sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2469     if(env->ExceptionCheck()) {
2470         ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2471         return;
2472     }
2473 
2474     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2475     if (context == nullptr) {
2476         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2477         jniThrowException(env, "java/lang/AssertionError",
2478                 "Write called with uninitialized DngCreator");
2479         return;
2480     }
2481     sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2482 
2483     if (writer.get() == nullptr) {
2484         return;
2485     }
2486 
2487     // Validate DNG size
2488     if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2489         return;
2490     }
2491 
2492     sp<JniInputByteBuffer> inBuf;
2493     Vector<StripSource*> sources;
2494     sp<DirectStripSource> thumbnailSource;
2495     uint32_t targetIfd = TIFF_IFD_0;
2496     bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2497     if (hasThumbnail) {
2498         ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2499         uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2500         uint32_t thumbWidth = context->getThumbnailWidth();
2501         thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_SUB1,
2502                 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2503                 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2504                 SAMPLES_PER_RGB_PIXEL);
2505     }
2506 
2507     if (isDirect) {
2508         size_t fullSize = rStride * uHeight;
2509         jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2510         if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2511             jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2512                     "Invalid size %d for Image, size given in metadata is %d at current stride",
2513                     capacity, fullSize);
2514             return;
2515         }
2516 
2517         uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2518         if (pixelBytes == nullptr) {
2519             ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2520             jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2521             return;
2522         }
2523 
2524         ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2525         DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2526                 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2527         sources.add(&stripSource);
2528         if (thumbnailSource.get() != nullptr) {
2529             sources.add(thumbnailSource.get());
2530         }
2531 
2532         status_t ret = OK;
2533         if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2534             ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2535             if (!env->ExceptionCheck()) {
2536                 jniThrowExceptionFmt(env, "java/io/IOException",
2537                         "Encountered error %d while writing file.", ret);
2538             }
2539             return;
2540         }
2541     } else {
2542         inBuf = new JniInputByteBuffer(env, inBuffer);
2543 
2544         ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2545         InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2546                  rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2547         sources.add(&stripSource);
2548         if (thumbnailSource.get() != nullptr) {
2549             sources.add(thumbnailSource.get());
2550         }
2551 
2552         status_t ret = OK;
2553         if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2554             ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2555             if (!env->ExceptionCheck()) {
2556                 jniThrowExceptionFmt(env, "java/io/IOException",
2557                         "Encountered error %d while writing file.", ret);
2558             }
2559             return;
2560         }
2561     }
2562 
2563 }
2564 
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2565 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2566         jobject inStream, jint width, jint height, jlong offset) {
2567     ALOGV("%s:", __FUNCTION__);
2568 
2569     uint32_t rowStride = width * BYTES_PER_SAMPLE;
2570     uint32_t pixStride = BYTES_PER_SAMPLE;
2571     uint32_t uWidth = static_cast<uint32_t>(width);
2572     uint32_t uHeight = static_cast<uint32_t>(height);
2573     uint64_t uOffset = static_cast<uint32_t>(offset);
2574 
2575     ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2576           "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2577           height, rowStride, pixStride, offset);
2578 
2579     sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2580     if (env->ExceptionCheck()) {
2581         ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2582         return;
2583     }
2584 
2585     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2586     if (context == nullptr) {
2587         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2588         jniThrowException(env, "java/lang/AssertionError",
2589                 "Write called with uninitialized DngCreator");
2590         return;
2591     }
2592     sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2593 
2594     if (writer.get() == nullptr) {
2595         return;
2596     }
2597 
2598     // Validate DNG size
2599     if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2600         return;
2601     }
2602 
2603     sp<DirectStripSource> thumbnailSource;
2604     uint32_t targetIfd = TIFF_IFD_0;
2605     Vector<StripSource*> sources;
2606 
2607 
2608     sp<JniInputStream> in = new JniInputStream(env, inStream);
2609 
2610     ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2611     InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2612              rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2613     sources.add(&stripSource);
2614 
2615     bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2616     if (hasThumbnail) {
2617         ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2618         uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2619         uint32_t width = context->getThumbnailWidth();
2620         thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_SUB1,
2621                 width, context->getThumbnailHeight(), bytesPerPixel,
2622                 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2623                 SAMPLES_PER_RGB_PIXEL);
2624         sources.add(thumbnailSource.get());
2625     }
2626 
2627     status_t ret = OK;
2628     if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2629         ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2630         if (!env->ExceptionCheck()) {
2631             jniThrowExceptionFmt(env, "java/io/IOException",
2632                     "Encountered error %d while writing file.", ret);
2633         }
2634         return;
2635     }
2636 }
2637 
2638 } /*extern "C" */
2639 
2640 static const JNINativeMethod gDngCreatorMethods[] = {
2641     {"nativeClassInit",        "()V", (void*) DngCreator_nativeClassInit},
2642     {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2643             "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2644             (void*) DngCreator_init},
2645     {"nativeDestroy",           "()V",      (void*) DngCreator_destroy},
2646     {"nativeSetOrientation",    "(I)V",     (void*) DngCreator_nativeSetOrientation},
2647     {"nativeSetDescription",    "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
2648     {"nativeSetGpsTags",    "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2649             (void*) DngCreator_nativeSetGpsTags},
2650     {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
2651     {"nativeWriteImage",        "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2652             (void*) DngCreator_nativeWriteImage},
2653     {"nativeWriteInputStream",    "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2654             (void*) DngCreator_nativeWriteInputStream},
2655 };
2656 
register_android_hardware_camera2_DngCreator(JNIEnv * env)2657 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2658     return RegisterMethodsOrDie(env,
2659             "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2660 }
2661