1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "DngCreator_JNI"
19 #include <inttypes.h>
20 #include <string.h>
21 #include <algorithm>
22 #include <memory>
23 #include <vector>
24 #include <cmath>
25 
26 #include <android-base/properties.h>
27 #include <utils/Log.h>
28 #include <utils/Errors.h>
29 #include <utils/StrongPointer.h>
30 #include <utils/RefBase.h>
31 #include <utils/Vector.h>
32 #include <utils/String8.h>
33 #include <system/camera_metadata.h>
34 #include <camera/CameraMetadata.h>
35 #include <img_utils/DngUtils.h>
36 #include <img_utils/TagDefinitions.h>
37 #include <img_utils/TiffIfd.h>
38 #include <img_utils/TiffWriter.h>
39 #include <img_utils/Output.h>
40 #include <img_utils/Input.h>
41 #include <img_utils/StripSource.h>
42 
43 #include "core_jni_helpers.h"
44 
45 #include "android_runtime/AndroidRuntime.h"
46 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
47 
48 #include <jni.h>
49 #include <nativehelper/JNIHelp.h>
50 
51 using namespace android;
52 using namespace img_utils;
53 using android::base::GetProperty;
54 
55 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
56     if ((expr) != OK) { \
57         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
58                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
59         return false; \
60     }
61 
62 
63 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
64     if ((expr) != OK) { \
65         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
66                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
67         return nullptr; \
68     }
69 
70 
71 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
72     if ((expr) != OK) { \
73         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
74                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
75         return -1; \
76     }
77 
78 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
79     if ((entry).count == 0) { \
80         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
81                 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
82         return nullptr; \
83     }
84 
85 #define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
86     if (expr) { \
87         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
88                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
89         return nullptr; \
90     }
91 
92 
93 #define ANDROID_DNGCREATOR_CTX_JNI_ID     "mNativeContext"
94 
95 static struct {
96     jfieldID mNativeContext;
97 } gDngCreatorClassInfo;
98 
99 static struct {
100     jmethodID mWriteMethod;
101 } gOutputStreamClassInfo;
102 
103 static struct {
104     jmethodID mReadMethod;
105     jmethodID mSkipMethod;
106 } gInputStreamClassInfo;
107 
108 static struct {
109     jmethodID mGetMethod;
110 } gInputByteBufferClassInfo;
111 
112 enum {
113     BITS_PER_SAMPLE = 16,
114     BYTES_PER_SAMPLE = 2,
115     BYTES_PER_RGB_PIXEL = 3,
116     BITS_PER_RGB_SAMPLE = 8,
117     BYTES_PER_RGB_SAMPLE = 1,
118     SAMPLES_PER_RGB_PIXEL = 3,
119     SAMPLES_PER_RAW_PIXEL = 1,
120     TIFF_IFD_0 = 0,
121     TIFF_IFD_SUB1 = 1,
122     TIFF_IFD_GPSINFO = 2,
123 };
124 
125 
126 /**
127  * POD container class for GPS tag data.
128  */
129 class GpsData {
130 public:
131     enum {
132         GPS_VALUE_LENGTH = 6,
133         GPS_REF_LENGTH = 2,
134         GPS_DATE_LENGTH = 11,
135     };
136 
137     uint32_t mLatitude[GPS_VALUE_LENGTH];
138     uint32_t mLongitude[GPS_VALUE_LENGTH];
139     uint32_t mTimestamp[GPS_VALUE_LENGTH];
140     uint8_t mLatitudeRef[GPS_REF_LENGTH];
141     uint8_t mLongitudeRef[GPS_REF_LENGTH];
142     uint8_t mDate[GPS_DATE_LENGTH];
143 };
144 
145 // ----------------------------------------------------------------------------
146 
147 /**
148  * Container class for the persistent native context.
149  */
150 
151 class NativeContext : public LightRefBase<NativeContext> {
152 public:
153     enum {
154         DATETIME_COUNT = 20,
155     };
156 
157     NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
158     virtual ~NativeContext();
159 
160     TiffWriter* getWriter();
161 
162     std::shared_ptr<const CameraMetadata> getCharacteristics() const;
163     std::shared_ptr<const CameraMetadata> getResult() const;
164 
165     uint32_t getThumbnailWidth() const;
166     uint32_t getThumbnailHeight() const;
167     const uint8_t* getThumbnail() const;
168     bool hasThumbnail() const;
169 
170     bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
171 
172     void setOrientation(uint16_t orientation);
173     uint16_t getOrientation() const;
174 
175     void setDescription(const String8& desc);
176     String8 getDescription() const;
177     bool hasDescription() const;
178 
179     void setGpsData(const GpsData& data);
180     GpsData getGpsData() const;
181     bool hasGpsData() const;
182 
183     void setCaptureTime(const String8& formattedCaptureTime);
184     String8 getCaptureTime() const;
185     bool hasCaptureTime() const;
186 
187 private:
188     Vector<uint8_t> mCurrentThumbnail;
189     TiffWriter mWriter;
190     std::shared_ptr<CameraMetadata> mCharacteristics;
191     std::shared_ptr<CameraMetadata> mResult;
192     uint32_t mThumbnailWidth;
193     uint32_t mThumbnailHeight;
194     uint16_t mOrientation;
195     bool mThumbnailSet;
196     bool mGpsSet;
197     bool mDescriptionSet;
198     bool mCaptureTimeSet;
199     String8 mDescription;
200     GpsData mGpsData;
201     String8 mFormattedCaptureTime;
202 };
203 
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)204 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
205         mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
206         mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
207         mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false),
208         mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {}
209 
~NativeContext()210 NativeContext::~NativeContext() {}
211 
getWriter()212 TiffWriter* NativeContext::getWriter() {
213     return &mWriter;
214 }
215 
getCharacteristics() const216 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
217     return mCharacteristics;
218 }
219 
getResult() const220 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
221     return mResult;
222 }
223 
getThumbnailWidth() const224 uint32_t NativeContext::getThumbnailWidth() const {
225     return mThumbnailWidth;
226 }
227 
getThumbnailHeight() const228 uint32_t NativeContext::getThumbnailHeight() const {
229     return mThumbnailHeight;
230 }
231 
getThumbnail() const232 const uint8_t* NativeContext::getThumbnail() const {
233     return mCurrentThumbnail.array();
234 }
235 
hasThumbnail() const236 bool NativeContext::hasThumbnail() const {
237     return mThumbnailSet;
238 }
239 
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)240 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
241     mThumbnailWidth = width;
242     mThumbnailHeight = height;
243 
244     size_t size = BYTES_PER_RGB_PIXEL * width * height;
245     if (mCurrentThumbnail.resize(size) < 0) {
246         ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
247         return false;
248     }
249 
250     uint8_t* thumb = mCurrentThumbnail.editArray();
251     memcpy(thumb, buffer, size);
252     mThumbnailSet = true;
253     return true;
254 }
255 
setOrientation(uint16_t orientation)256 void NativeContext::setOrientation(uint16_t orientation) {
257     mOrientation = orientation;
258 }
259 
getOrientation() const260 uint16_t NativeContext::getOrientation() const {
261     return mOrientation;
262 }
263 
setDescription(const String8 & desc)264 void NativeContext::setDescription(const String8& desc) {
265     mDescription = desc;
266     mDescriptionSet = true;
267 }
268 
getDescription() const269 String8 NativeContext::getDescription() const {
270     return mDescription;
271 }
272 
hasDescription() const273 bool NativeContext::hasDescription() const {
274     return mDescriptionSet;
275 }
276 
setGpsData(const GpsData & data)277 void NativeContext::setGpsData(const GpsData& data) {
278     mGpsData = data;
279     mGpsSet = true;
280 }
281 
getGpsData() const282 GpsData NativeContext::getGpsData() const {
283     return mGpsData;
284 }
285 
hasGpsData() const286 bool NativeContext::hasGpsData() const {
287     return mGpsSet;
288 }
289 
setCaptureTime(const String8 & formattedCaptureTime)290 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
291     mFormattedCaptureTime = formattedCaptureTime;
292     mCaptureTimeSet = true;
293 }
294 
getCaptureTime() const295 String8 NativeContext::getCaptureTime() const {
296     return mFormattedCaptureTime;
297 }
298 
hasCaptureTime() const299 bool NativeContext::hasCaptureTime() const {
300     return mCaptureTimeSet;
301 }
302 
303 // End of NativeContext
304 // ----------------------------------------------------------------------------
305 
306 /**
307  * Wrapper class for a Java OutputStream.
308  *
309  * This class is not intended to be used across JNI calls.
310  */
311 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
312 public:
313     JniOutputStream(JNIEnv* env, jobject outStream);
314 
315     virtual ~JniOutputStream();
316 
317     status_t open();
318 
319     status_t write(const uint8_t* buf, size_t offset, size_t count);
320 
321     status_t close();
322 private:
323     enum {
324         BYTE_ARRAY_LENGTH = 4096
325     };
326     jobject mOutputStream;
327     JNIEnv* mEnv;
328     jbyteArray mByteArray;
329 };
330 
JniOutputStream(JNIEnv * env,jobject outStream)331 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
332         mEnv(env) {
333     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
334     if (mByteArray == nullptr) {
335         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
336     }
337 }
338 
~JniOutputStream()339 JniOutputStream::~JniOutputStream() {
340     mEnv->DeleteLocalRef(mByteArray);
341 }
342 
open()343 status_t JniOutputStream::open() {
344     // Do nothing
345     return OK;
346 }
347 
write(const uint8_t * buf,size_t offset,size_t count)348 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
349     while(count > 0) {
350         size_t len = BYTE_ARRAY_LENGTH;
351         len = (count > len) ? len : count;
352         mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
353 
354         if (mEnv->ExceptionCheck()) {
355             return BAD_VALUE;
356         }
357 
358         mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
359                 0, len);
360 
361         if (mEnv->ExceptionCheck()) {
362             return BAD_VALUE;
363         }
364 
365         count -= len;
366         offset += len;
367     }
368     return OK;
369 }
370 
close()371 status_t JniOutputStream::close() {
372     // Do nothing
373     return OK;
374 }
375 
376 // End of JniOutputStream
377 // ----------------------------------------------------------------------------
378 
379 /**
380  * Wrapper class for a Java InputStream.
381  *
382  * This class is not intended to be used across JNI calls.
383  */
384 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
385 public:
386     JniInputStream(JNIEnv* env, jobject inStream);
387 
388     status_t open();
389 
390     status_t close();
391 
392     ssize_t read(uint8_t* buf, size_t offset, size_t count);
393 
394     ssize_t skip(size_t count);
395 
396     virtual ~JniInputStream();
397 private:
398     enum {
399         BYTE_ARRAY_LENGTH = 4096
400     };
401     jobject mInStream;
402     JNIEnv* mEnv;
403     jbyteArray mByteArray;
404 
405 };
406 
JniInputStream(JNIEnv * env,jobject inStream)407 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
408     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
409     if (mByteArray == nullptr) {
410         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
411     }
412 }
413 
~JniInputStream()414 JniInputStream::~JniInputStream() {
415     mEnv->DeleteLocalRef(mByteArray);
416 }
417 
read(uint8_t * buf,size_t offset,size_t count)418 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
419 
420     jint realCount = BYTE_ARRAY_LENGTH;
421     if (count < BYTE_ARRAY_LENGTH) {
422         realCount = count;
423     }
424     jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
425             realCount);
426 
427     if (actual < 0) {
428         return NOT_ENOUGH_DATA;
429     }
430 
431     if (mEnv->ExceptionCheck()) {
432         return BAD_VALUE;
433     }
434 
435     mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
436     if (mEnv->ExceptionCheck()) {
437         return BAD_VALUE;
438     }
439     return actual;
440 }
441 
skip(size_t count)442 ssize_t JniInputStream::skip(size_t count) {
443     jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
444             static_cast<jlong>(count));
445 
446     if (mEnv->ExceptionCheck()) {
447         return BAD_VALUE;
448     }
449     if (actual < 0) {
450         return NOT_ENOUGH_DATA;
451     }
452     return actual;
453 }
454 
open()455 status_t JniInputStream::open() {
456     // Do nothing
457     return OK;
458 }
459 
close()460 status_t JniInputStream::close() {
461     // Do nothing
462     return OK;
463 }
464 
465 // End of JniInputStream
466 // ----------------------------------------------------------------------------
467 
468 /**
469  * Wrapper class for a non-direct Java ByteBuffer.
470  *
471  * This class is not intended to be used across JNI calls.
472  */
473 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
474 public:
475     JniInputByteBuffer(JNIEnv* env, jobject inBuf);
476 
477     status_t open();
478 
479     status_t close();
480 
481     ssize_t read(uint8_t* buf, size_t offset, size_t count);
482 
483     virtual ~JniInputByteBuffer();
484 private:
485     enum {
486         BYTE_ARRAY_LENGTH = 4096
487     };
488     jobject mInBuf;
489     JNIEnv* mEnv;
490     jbyteArray mByteArray;
491 };
492 
JniInputByteBuffer(JNIEnv * env,jobject inBuf)493 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
494     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
495     if (mByteArray == nullptr) {
496         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
497     }
498 }
499 
~JniInputByteBuffer()500 JniInputByteBuffer::~JniInputByteBuffer() {
501     mEnv->DeleteLocalRef(mByteArray);
502 }
503 
read(uint8_t * buf,size_t offset,size_t count)504 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
505     jint realCount = BYTE_ARRAY_LENGTH;
506     if (count < BYTE_ARRAY_LENGTH) {
507         realCount = count;
508     }
509 
510     jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
511             mByteArray, 0, realCount);
512     mEnv->DeleteLocalRef(chainingBuf);
513 
514     if (mEnv->ExceptionCheck()) {
515         ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
516         return BAD_VALUE;
517     }
518 
519     mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
520     if (mEnv->ExceptionCheck()) {
521         ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
522         return BAD_VALUE;
523     }
524     return realCount;
525 }
526 
open()527 status_t JniInputByteBuffer::open() {
528     // Do nothing
529     return OK;
530 }
531 
close()532 status_t JniInputByteBuffer::close() {
533     // Do nothing
534     return OK;
535 }
536 
537 // End of JniInputByteBuffer
538 // ----------------------------------------------------------------------------
539 
540 /**
541  * StripSource subclass for Input types.
542  *
543  * This class is not intended to be used across JNI calls.
544  */
545 
546 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
547 public:
548     InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
549             uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
550             uint32_t samplesPerPixel);
551 
552     virtual ~InputStripSource();
553 
554     virtual status_t writeToStream(Output& stream, uint32_t count);
555 
556     virtual uint32_t getIfd() const;
557 protected:
558     uint32_t mIfd;
559     Input* mInput;
560     uint32_t mWidth;
561     uint32_t mHeight;
562     uint32_t mPixStride;
563     uint32_t mRowStride;
564     uint64_t mOffset;
565     JNIEnv* mEnv;
566     uint32_t mBytesPerSample;
567     uint32_t mSamplesPerPixel;
568 };
569 
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)570 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
571         uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
572         uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
573         mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
574         mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
575         mSamplesPerPixel(samplesPerPixel) {}
576 
~InputStripSource()577 InputStripSource::~InputStripSource() {}
578 
writeToStream(Output & stream,uint32_t count)579 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
580     uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
581     jlong offset = mOffset;
582 
583     if (fullSize != count) {
584         ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
585                 fullSize);
586         jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
587         return BAD_VALUE;
588     }
589 
590     // Skip offset
591     while (offset > 0) {
592         ssize_t skipped = mInput->skip(offset);
593         if (skipped <= 0) {
594             if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
595                 jniThrowExceptionFmt(mEnv, "java/io/IOException",
596                         "Early EOF encountered in skip, not enough pixel data for image of size %u",
597                         fullSize);
598                 skipped = NOT_ENOUGH_DATA;
599             } else {
600                 if (!mEnv->ExceptionCheck()) {
601                     jniThrowException(mEnv, "java/io/IOException",
602                             "Error encountered while skip bytes in input stream.");
603                 }
604             }
605 
606             return skipped;
607         }
608         offset -= skipped;
609     }
610 
611     Vector<uint8_t> row;
612     if (row.resize(mRowStride) < 0) {
613         jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
614         return BAD_VALUE;
615     }
616 
617     uint8_t* rowBytes = row.editArray();
618 
619     for (uint32_t i = 0; i < mHeight; ++i) {
620         size_t rowFillAmt = 0;
621         size_t rowSize = mRowStride;
622 
623         while (rowFillAmt < mRowStride) {
624             ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
625             if (bytesRead <= 0) {
626                 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
627                     ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
628                             __FUNCTION__, i, bytesRead);
629                     jniThrowExceptionFmt(mEnv, "java/io/IOException",
630                             "Early EOF encountered, not enough pixel data for image of size %"
631                             PRIu32, fullSize);
632                     bytesRead = NOT_ENOUGH_DATA;
633                 } else {
634                     if (!mEnv->ExceptionCheck()) {
635                         jniThrowException(mEnv, "java/io/IOException",
636                                 "Error encountered while reading");
637                     }
638                 }
639                 return bytesRead;
640             }
641             rowFillAmt += bytesRead;
642             rowSize -= bytesRead;
643         }
644 
645         if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
646             ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
647 
648             if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
649                     mEnv->ExceptionCheck()) {
650                 if (!mEnv->ExceptionCheck()) {
651                     jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
652                 }
653                 return BAD_VALUE;
654             }
655         } else {
656             ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
657             jniThrowException(mEnv, "java/lang/IllegalStateException",
658                     "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
659             return BAD_VALUE;
660 
661             // TODO: Add support for non-contiguous pixels if needed.
662         }
663     }
664     return OK;
665 }
666 
getIfd() const667 uint32_t InputStripSource::getIfd() const {
668     return mIfd;
669 }
670 
671 // End of InputStripSource
672 // ----------------------------------------------------------------------------
673 
674 /**
675  * StripSource subclass for direct buffer types.
676  *
677  * This class is not intended to be used across JNI calls.
678  */
679 
680 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
681 public:
682     DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
683             uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
684             uint32_t bytesPerSample, uint32_t samplesPerPixel);
685 
686     virtual ~DirectStripSource();
687 
688     virtual status_t writeToStream(Output& stream, uint32_t count);
689 
690     virtual uint32_t getIfd() const;
691 protected:
692     uint32_t mIfd;
693     const uint8_t* mPixelBytes;
694     uint32_t mWidth;
695     uint32_t mHeight;
696     uint32_t mPixStride;
697     uint32_t mRowStride;
698     uint16_t mOffset;
699     JNIEnv* mEnv;
700     uint32_t mBytesPerSample;
701     uint32_t mSamplesPerPixel;
702 };
703 
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)704 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
705             uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
706             uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
707             mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
708             mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
709             mSamplesPerPixel(samplesPerPixel) {}
710 
~DirectStripSource()711 DirectStripSource::~DirectStripSource() {}
712 
writeToStream(Output & stream,uint32_t count)713 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
714     uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
715 
716     if (fullSize != count) {
717         ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
718                 fullSize);
719         jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
720         return BAD_VALUE;
721     }
722 
723 
724     if (mPixStride == mBytesPerSample * mSamplesPerPixel
725             && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
726         ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
727 
728         if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
729             if (!mEnv->ExceptionCheck()) {
730                 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
731             }
732             return BAD_VALUE;
733         }
734     } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
735         ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
736 
737         for (size_t i = 0; i < mHeight; ++i) {
738             if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
739                         mEnv->ExceptionCheck()) {
740                 if (!mEnv->ExceptionCheck()) {
741                     jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
742                 }
743                 return BAD_VALUE;
744             }
745         }
746     } else {
747         ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
748 
749         jniThrowException(mEnv, "java/lang/IllegalStateException",
750                 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
751         return BAD_VALUE;
752 
753         // TODO: Add support for non-contiguous pixels if needed.
754     }
755     return OK;
756 
757 }
758 
getIfd() const759 uint32_t DirectStripSource::getIfd() const {
760     return mIfd;
761 }
762 
763 // End of DirectStripSource
764 // ----------------------------------------------------------------------------
765 
766 /**
767  * Calculate the default crop relative to the "active area" of the image sensor (this active area
768  * will always be the pre-correction active area rectangle), and set this.
769  */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer)770 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
771         sp<TiffWriter> writer) {
772 
773     camera_metadata_ro_entry entry =
774             characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
775     uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
776     uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
777 
778     const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
779 
780     if (width < margin * 2 || height < margin * 2) {
781         ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
782                 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
783         jniThrowException(env, "java/lang/IllegalStateException",
784                 "Pre-correction active area is too small.");
785         return BAD_VALUE;
786     }
787 
788     uint32_t defaultCropOrigin[] = {margin, margin};
789     uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
790                                   height - defaultCropOrigin[1] - margin};
791 
792     BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
793             TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
794     BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
795             TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
796 
797     return OK;
798 }
799 
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)800 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
801         const CameraMetadata& characteristics, jint width, jint height) {
802     if (width <= 0) {
803         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
804                         "Image width %d is invalid", width);
805         return false;
806     }
807 
808     if (height <= 0) {
809         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
810                         "Image height %d is invalid", height);
811         return false;
812     }
813 
814     camera_metadata_ro_entry preCorrectionEntry =
815             characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
816     camera_metadata_ro_entry pixelArrayEntry =
817             characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
818 
819     int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
820     int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
821     int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
822     int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
823 
824     bool matchesPixelArray = (pWidth == width && pHeight == height);
825     bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
826 
827     if (!(matchesPixelArray || matchesPreCorrectionArray)) {
828         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
829                         "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
830                         "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
831                         width, height, pWidth, pHeight, cWidth, cHeight);
832         return false;
833     }
834 
835     return true;
836 }
837 
moveEntries(sp<TiffWriter> writer,uint32_t ifdFrom,uint32_t ifdTo,const Vector<uint16_t> & entries)838 static status_t moveEntries(sp<TiffWriter> writer, uint32_t ifdFrom, uint32_t ifdTo,
839         const Vector<uint16_t>& entries) {
840     for (size_t i = 0; i < entries.size(); ++i) {
841         uint16_t tagId = entries[i];
842         sp<TiffEntry> entry = writer->getEntry(tagId, ifdFrom);
843         if (entry.get() == nullptr) {
844             ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId,
845                     ifdFrom);
846             return BAD_VALUE;
847         }
848         if (writer->addEntry(entry, ifdTo) != OK) {
849             ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId,
850                     ifdFrom);
851             return BAD_VALUE;
852         }
853         writer->removeEntry(tagId, ifdFrom);
854     }
855     return OK;
856 }
857 
858 /**
859  * Write CFA pattern for given CFA enum into cfaOut.  cfaOut must have length >= 4.
860  * Returns OK on success, or a negative error code if the CFA enum was invalid.
861  */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)862 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
863     camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
864             static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
865             cfaEnum);
866     switch(cfa) {
867         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
868             cfaOut[0] = 0;
869             cfaOut[1] = 1;
870             cfaOut[2] = 1;
871             cfaOut[3] = 2;
872             break;
873         }
874         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
875             cfaOut[0] = 1;
876             cfaOut[1] = 0;
877             cfaOut[2] = 2;
878             cfaOut[3] = 1;
879             break;
880         }
881         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
882             cfaOut[0] = 1;
883             cfaOut[1] = 2;
884             cfaOut[2] = 0;
885             cfaOut[3] = 1;
886             break;
887         }
888         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
889             cfaOut[0] = 2;
890             cfaOut[1] = 1;
891             cfaOut[2] = 1;
892             cfaOut[3] = 0;
893             break;
894         }
895         default: {
896             return BAD_VALUE;
897         }
898     }
899     return OK;
900 }
901 
902 /**
903  * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
904  * RGGB for an unknown enum.
905  */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)906 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
907     camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
908             static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
909             cfaEnum);
910     switch(cfa) {
911         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
912             return OpcodeListBuilder::CFA_RGGB;
913         }
914         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
915             return OpcodeListBuilder::CFA_GRBG;
916         }
917         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
918             return OpcodeListBuilder::CFA_GBRG;
919         }
920         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
921             return OpcodeListBuilder::CFA_BGGR;
922         }
923         default: {
924             return OpcodeListBuilder::CFA_RGGB;
925         }
926     }
927 }
928 
929 /**
930  * For each color plane, find the corresponding noise profile coefficients given in the
931  * per-channel noise profile.  If multiple channels in the CFA correspond to a color in the color
932  * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
933  *
934  * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
935  * cfa - numChannels color channels corresponding to each of the per-channel noise profile
936  *       coefficients.
937  * numChannels - the number of noise profile coefficient pairs and color channels given in
938  *       the perChannelNoiseProfile and cfa arguments, respectively.
939  * planeColors - the color planes in the noise profile output.
940  * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
941  * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
942  *
943  * returns OK, or a negative error code on failure.
944  */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)945 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
946         size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
947         /*out*/double* noiseProfile) {
948 
949     for (size_t p = 0; p < numPlanes; ++p) {
950         size_t S = p * 2;
951         size_t O = p * 2 + 1;
952 
953         noiseProfile[S] = 0;
954         noiseProfile[O] = 0;
955         bool uninitialized = true;
956         for (size_t c = 0; c < numChannels; ++c) {
957             if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
958                 noiseProfile[S] = perChannelNoiseProfile[c * 2];
959                 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
960                 uninitialized = false;
961             }
962         }
963         if (uninitialized) {
964             ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
965                   __FUNCTION__, p);
966             return BAD_VALUE;
967         }
968     }
969     return OK;
970 }
971 
972 // ----------------------------------------------------------------------------
973 extern "C" {
974 
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)975 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
976     ALOGV("%s:", __FUNCTION__);
977     return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
978             gDngCreatorClassInfo.mNativeContext));
979 }
980 
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)981 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
982     ALOGV("%s:", __FUNCTION__);
983     NativeContext* current = DngCreator_getNativeContext(env, thiz);
984 
985     if (context != nullptr) {
986         context->incStrong((void*) DngCreator_setNativeContext);
987     }
988 
989     if (current) {
990         current->decStrong((void*) DngCreator_setNativeContext);
991     }
992 
993     env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
994             reinterpret_cast<jlong>(context.get()));
995 }
996 
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)997 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
998     ALOGV("%s:", __FUNCTION__);
999 
1000     gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
1001             clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
1002 
1003     jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
1004     gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
1005             outputStreamClazz, "write", "([BII)V");
1006 
1007     jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
1008     gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
1009     gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1010 
1011     jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1012     gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1013             inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1014 }
1015 
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1016 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1017         jobject resultsPtr, jstring formattedCaptureTime) {
1018     ALOGV("%s:", __FUNCTION__);
1019     CameraMetadata characteristics;
1020     CameraMetadata results;
1021     if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1022          jniThrowException(env, "java/lang/AssertionError",
1023                 "No native metadata defined for camera characteristics.");
1024          return;
1025     }
1026     if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1027         jniThrowException(env, "java/lang/AssertionError",
1028                 "No native metadata defined for capture results.");
1029         return;
1030     }
1031 
1032     sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1033 
1034     const char* captureTime = env->GetStringUTFChars(formattedCaptureTime, nullptr);
1035 
1036     size_t len = strlen(captureTime) + 1;
1037     if (len != NativeContext::DATETIME_COUNT) {
1038         jniThrowException(env, "java/lang/IllegalArgumentException",
1039                 "Formatted capture time string length is not required 20 characters");
1040         return;
1041     }
1042 
1043     nativeContext->setCaptureTime(String8(captureTime));
1044 
1045     DngCreator_setNativeContext(env, thiz, nativeContext);
1046 }
1047 
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1048 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1049         uint32_t imageHeight) {
1050 
1051     NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1052 
1053     if (nativeContext == nullptr) {
1054         jniThrowException(env, "java/lang/AssertionError",
1055                 "No native context, must call init before other operations.");
1056         return nullptr;
1057     }
1058 
1059     CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1060     CameraMetadata results = *(nativeContext->getResult());
1061 
1062     sp<TiffWriter> writer = new TiffWriter();
1063 
1064     uint32_t preWidth = 0;
1065     uint32_t preHeight = 0;
1066     {
1067         // Check dimensions
1068         camera_metadata_entry entry =
1069                 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1070         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1071         preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1072         preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1073 
1074         camera_metadata_entry pixelArrayEntry =
1075                 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
1076         uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1077         uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1078 
1079         if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1080             (imageWidth == pixWidth && imageHeight == pixHeight))) {
1081             jniThrowException(env, "java/lang/AssertionError",
1082                     "Height and width of imate buffer did not match height and width of"
1083                     "either the preCorrectionActiveArraySize or the pixelArraySize.");
1084             return nullptr;
1085         }
1086     }
1087 
1088 
1089 
1090     writer->addIfd(TIFF_IFD_0);
1091 
1092     status_t err = OK;
1093 
1094     const uint32_t samplesPerPixel = 1;
1095     const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1096 
1097     OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB;
1098     uint8_t cfaPlaneColor[3] = {0, 1, 2};
1099     uint8_t cfaEnum = -1;
1100 
1101     // TODO: Greensplit.
1102     // TODO: Add remaining non-essential tags
1103 
1104     // Setup main image tags
1105 
1106     {
1107         // Set orientation
1108         uint16_t orientation = TAG_ORIENTATION_NORMAL;
1109         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1110                 env, TAG_ORIENTATION, writer);
1111     }
1112 
1113     {
1114         // Set subfiletype
1115         uint32_t subfileType = 0; // Main image
1116         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1117                 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1118     }
1119 
1120     {
1121         // Set bits per sample
1122         uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1123         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1124                 TAG_BITSPERSAMPLE, writer);
1125     }
1126 
1127     {
1128         // Set compression
1129         uint16_t compression = 1; // None
1130         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1131                 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1132     }
1133 
1134     {
1135         // Set dimensions
1136         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1137                 env, TAG_IMAGEWIDTH, writer);
1138         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1139                 env, TAG_IMAGELENGTH, writer);
1140     }
1141 
1142     {
1143         // Set photometric interpretation
1144         uint16_t interpretation = 32803; // CFA
1145         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1146                 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1147     }
1148 
1149     {
1150         // Set blacklevel tags, using dynamic black level if available
1151         camera_metadata_entry entry =
1152                 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
1153         uint32_t blackLevelRational[8] = {0};
1154         if (entry.count != 0) {
1155             BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1156             for (size_t i = 0; i < entry.count; i++) {
1157                 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.f[i] * 100);
1158                 blackLevelRational[i * 2 + 1] = 100;
1159             }
1160         } else {
1161             // Fall back to static black level which is guaranteed
1162             entry = characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1163             BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1164             for (size_t i = 0; i < entry.count; i++) {
1165                 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
1166                 blackLevelRational[i * 2 + 1] = 1;
1167             }
1168 
1169         }
1170         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, 4, blackLevelRational,
1171                 TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1172 
1173         uint16_t repeatDim[2] = {2, 2};
1174         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1175                 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1176     }
1177 
1178     {
1179         // Set samples per pixel
1180         uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1181         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1182                 env, TAG_SAMPLESPERPIXEL, writer);
1183     }
1184 
1185     {
1186         // Set planar configuration
1187         uint16_t config = 1; // Chunky
1188         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1189                 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1190     }
1191 
1192     {
1193         // Set CFA pattern dimensions
1194         uint16_t repeatDim[2] = {2, 2};
1195         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1196                 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1197     }
1198 
1199     {
1200         // Set CFA pattern
1201         camera_metadata_entry entry =
1202                         characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1203         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer);
1204 
1205         const int cfaLength = 4;
1206         cfaEnum = entry.data.u8[0];
1207         uint8_t cfa[cfaLength];
1208         if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1209             jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1210                         "Invalid metadata for tag %d", TAG_CFAPATTERN);
1211         }
1212 
1213         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1214                 env, TAG_CFAPATTERN, writer);
1215 
1216         opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1217     }
1218 
1219     {
1220         // Set CFA plane color
1221         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1222                 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1223     }
1224 
1225     {
1226         // Set CFA layout
1227         uint16_t cfaLayout = 1;
1228         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1229                 env, TAG_CFALAYOUT, writer);
1230     }
1231 
1232     {
1233         // image description
1234         uint8_t imageDescription = '\0'; // empty
1235         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1236                 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1237     }
1238 
1239     {
1240         // make
1241         // Use "" to represent unknown make as suggested in TIFF/EP spec.
1242         std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1243         uint32_t count = static_cast<uint32_t>(manufacturer.size()) + 1;
1244 
1245         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1246                 reinterpret_cast<const uint8_t*>(manufacturer.c_str()), TIFF_IFD_0), env, TAG_MAKE,
1247                 writer);
1248     }
1249 
1250     {
1251         // model
1252         // Use "" to represent unknown model as suggested in TIFF/EP spec.
1253         std::string model = GetProperty("ro.product.model", "");
1254         uint32_t count = static_cast<uint32_t>(model.size()) + 1;
1255 
1256         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1257                 reinterpret_cast<const uint8_t*>(model.c_str()), TIFF_IFD_0), env, TAG_MODEL,
1258                 writer);
1259     }
1260 
1261     {
1262         // x resolution
1263         uint32_t xres[] = { 72, 1 }; // default 72 ppi
1264         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1265                 env, TAG_XRESOLUTION, writer);
1266 
1267         // y resolution
1268         uint32_t yres[] = { 72, 1 }; // default 72 ppi
1269         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1270                 env, TAG_YRESOLUTION, writer);
1271 
1272         uint16_t unit = 2; // inches
1273         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1274                 env, TAG_RESOLUTIONUNIT, writer);
1275     }
1276 
1277     {
1278         // software
1279         std::string software = GetProperty("ro.build.fingerprint", "");
1280         uint32_t count = static_cast<uint32_t>(software.size()) + 1;
1281         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1282                 reinterpret_cast<const uint8_t*>(software.c_str()), TIFF_IFD_0), env, TAG_SOFTWARE,
1283                 writer);
1284     }
1285 
1286     if (nativeContext->hasCaptureTime()) {
1287         // datetime
1288         String8 captureTime = nativeContext->getCaptureTime();
1289 
1290         if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1291                 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1292             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1293                     "Invalid metadata for tag %x", TAG_DATETIME);
1294             return nullptr;
1295         }
1296 
1297         // datetime original
1298         if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1299                 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1300             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1301                     "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1302             return nullptr;
1303         }
1304     }
1305 
1306     {
1307         // TIFF/EP standard id
1308         uint8_t standardId[] = { 1, 0, 0, 0 };
1309         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1310                 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1311     }
1312 
1313     {
1314         // copyright
1315         uint8_t copyright = '\0'; // empty
1316         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, &copyright,
1317                 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1318     }
1319 
1320     {
1321         // exposure time
1322         camera_metadata_entry entry =
1323             results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1324         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1325 
1326         int64_t exposureTime = *(entry.data.i64);
1327 
1328         if (exposureTime < 0) {
1329             // Should be unreachable
1330             jniThrowException(env, "java/lang/IllegalArgumentException",
1331                     "Negative exposure time in metadata");
1332             return nullptr;
1333         }
1334 
1335         // Ensure exposure time doesn't overflow (for exposures > 4s)
1336         uint32_t denominator = 1000000000;
1337         while (exposureTime > UINT32_MAX) {
1338             exposureTime >>= 1;
1339             denominator >>= 1;
1340             if (denominator == 0) {
1341                 // Should be unreachable
1342                 jniThrowException(env, "java/lang/IllegalArgumentException",
1343                         "Exposure time too long");
1344                 return nullptr;
1345             }
1346         }
1347 
1348         uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1349         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1350                 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1351 
1352     }
1353 
1354     {
1355         // ISO speed ratings
1356         camera_metadata_entry entry =
1357             results.find(ANDROID_SENSOR_SENSITIVITY);
1358         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1359 
1360         int32_t tempIso = *(entry.data.i32);
1361         if (tempIso < 0) {
1362             jniThrowException(env, "java/lang/IllegalArgumentException",
1363                                     "Negative ISO value");
1364             return nullptr;
1365         }
1366 
1367         if (tempIso > UINT16_MAX) {
1368             ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1369             tempIso = UINT16_MAX;
1370         }
1371 
1372         uint16_t iso = static_cast<uint16_t>(tempIso);
1373         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1374                 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1375     }
1376 
1377     {
1378         // Baseline exposure
1379         camera_metadata_entry entry =
1380                 results.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
1381         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer);
1382 
1383         // post RAW gain should be boostValue / 100
1384         double postRAWGain = static_cast<double> (entry.data.i32[0]) / 100.f;
1385         // Baseline exposure should be in EV units so log2(gain) =
1386         // log10(gain)/log10(2)
1387         double baselineExposure = std::log(postRAWGain) / std::log(2.0f);
1388         int32_t baseExposureSRat[] = { static_cast<int32_t> (baselineExposure * 100),
1389                 100 };
1390         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1,
1391                 baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer);
1392     }
1393 
1394     {
1395         // focal length
1396         camera_metadata_entry entry =
1397             results.find(ANDROID_LENS_FOCAL_LENGTH);
1398         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1399 
1400         uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1401         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1402                 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1403     }
1404 
1405     {
1406         // f number
1407         camera_metadata_entry entry =
1408             results.find(ANDROID_LENS_APERTURE);
1409         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1410 
1411         uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1412         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1413                 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1414     }
1415 
1416     {
1417         // Set DNG version information
1418         uint8_t version[4] = {1, 4, 0, 0};
1419         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1420                 env, TAG_DNGVERSION, writer);
1421 
1422         uint8_t backwardVersion[4] = {1, 1, 0, 0};
1423         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1424                 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1425     }
1426 
1427     {
1428         // Set whitelevel
1429         camera_metadata_entry entry =
1430                 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1431         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1432         uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1433         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1434                 env, TAG_WHITELEVEL, writer);
1435     }
1436 
1437     {
1438         // Set default scale
1439         uint32_t defaultScale[4] = {1, 1, 1, 1};
1440         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1441                 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1442     }
1443 
1444     bool singleIlluminant = false;
1445     {
1446         // Set calibration illuminants
1447         camera_metadata_entry entry1 =
1448             characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1449         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1450         camera_metadata_entry entry2 =
1451             characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1452         if (entry2.count == 0) {
1453             singleIlluminant = true;
1454         }
1455         uint16_t ref1 = entry1.data.u8[0];
1456 
1457         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1458                 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1459 
1460         if (!singleIlluminant) {
1461             uint16_t ref2 = entry2.data.u8[0];
1462             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1463                     TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1464         }
1465     }
1466 
1467     {
1468         // Set color transforms
1469         camera_metadata_entry entry1 =
1470             characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1471         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1472 
1473         int32_t colorTransform1[entry1.count * 2];
1474 
1475         size_t ctr = 0;
1476         for(size_t i = 0; i < entry1.count; ++i) {
1477             colorTransform1[ctr++] = entry1.data.r[i].numerator;
1478             colorTransform1[ctr++] = entry1.data.r[i].denominator;
1479         }
1480 
1481         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1482                 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1483 
1484         if (!singleIlluminant) {
1485             camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1486             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1487             int32_t colorTransform2[entry2.count * 2];
1488 
1489             ctr = 0;
1490             for(size_t i = 0; i < entry2.count; ++i) {
1491                 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1492                 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1493             }
1494 
1495             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1496                     colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1497         }
1498     }
1499 
1500     {
1501         // Set calibration transforms
1502         camera_metadata_entry entry1 =
1503             characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1504         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1505 
1506         int32_t calibrationTransform1[entry1.count * 2];
1507 
1508         size_t ctr = 0;
1509         for(size_t i = 0; i < entry1.count; ++i) {
1510             calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1511             calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1512         }
1513 
1514         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1515                 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1516 
1517         if (!singleIlluminant) {
1518             camera_metadata_entry entry2 =
1519                 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1520             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1521             int32_t calibrationTransform2[entry2.count * 2];
1522 
1523             ctr = 0;
1524             for(size_t i = 0; i < entry2.count; ++i) {
1525                 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1526                 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1527             }
1528 
1529             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1530                     calibrationTransform2, TIFF_IFD_0),  env, TAG_CAMERACALIBRATION2, writer);
1531         }
1532     }
1533 
1534     {
1535         // Set forward transforms
1536         camera_metadata_entry entry1 =
1537             characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1538         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1539 
1540         int32_t forwardTransform1[entry1.count * 2];
1541 
1542         size_t ctr = 0;
1543         for(size_t i = 0; i < entry1.count; ++i) {
1544             forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1545             forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1546         }
1547 
1548         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1549                 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1550 
1551         if (!singleIlluminant) {
1552             camera_metadata_entry entry2 =
1553                 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1554             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1555             int32_t forwardTransform2[entry2.count * 2];
1556 
1557             ctr = 0;
1558             for(size_t i = 0; i < entry2.count; ++i) {
1559                 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1560                 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1561             }
1562 
1563             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1564                     forwardTransform2, TIFF_IFD_0),  env, TAG_FORWARDMATRIX2, writer);
1565         }
1566     }
1567 
1568     {
1569         // Set camera neutral
1570         camera_metadata_entry entry =
1571             results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1572         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1573         uint32_t cameraNeutral[entry.count * 2];
1574 
1575         size_t ctr = 0;
1576         for(size_t i = 0; i < entry.count; ++i) {
1577             cameraNeutral[ctr++] =
1578                     static_cast<uint32_t>(entry.data.r[i].numerator);
1579             cameraNeutral[ctr++] =
1580                     static_cast<uint32_t>(entry.data.r[i].denominator);
1581         }
1582 
1583         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1584                 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1585     }
1586 
1587 
1588     {
1589         // Set dimensions
1590         if (calculateAndSetCrop(env, characteristics, writer) != OK) {
1591             return nullptr;
1592         }
1593         camera_metadata_entry entry =
1594                 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1595         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1596         uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1597         uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1598         uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1599         uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1600 
1601         // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1602         // relative to the pixel array.
1603         if (imageWidth == width && imageHeight == height) {
1604             xmin = 0;
1605             ymin = 0;
1606         }
1607 
1608         uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1609         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1610                 env, TAG_ACTIVEAREA, writer);
1611     }
1612 
1613     {
1614         // Setup unique camera model tag
1615         std::string model = GetProperty("ro.product.model", "");
1616         std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1617         std::string brand = GetProperty("ro.product.brand", "");
1618 
1619         String8 cameraModel(model.c_str());
1620         cameraModel += "-";
1621         cameraModel += manufacturer.c_str();
1622         cameraModel += "-";
1623         cameraModel += brand.c_str();
1624 
1625         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1626                 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env,
1627                 TAG_UNIQUECAMERAMODEL, writer);
1628     }
1629 
1630     {
1631         // Setup sensor noise model
1632         camera_metadata_entry entry =
1633             results.find(ANDROID_SENSOR_NOISE_PROFILE);
1634 
1635         const status_t numPlaneColors = 3;
1636         const status_t numCfaChannels = 4;
1637 
1638         uint8_t cfaOut[numCfaChannels];
1639         if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1640             jniThrowException(env, "java/lang/IllegalArgumentException",
1641                     "Invalid CFA from camera characteristics");
1642             return nullptr;
1643         }
1644 
1645         double noiseProfile[numPlaneColors * 2];
1646 
1647         if (entry.count > 0) {
1648             if (entry.count != numCfaChannels * 2) {
1649                 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1650                       "in characteristics, no noise profile tag written...",
1651                       __FUNCTION__, entry.count);
1652             } else {
1653                 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1654                         cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1655 
1656                     BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1657                             numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1658                             writer);
1659                 } else {
1660                     ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1661                             " tag written...", __FUNCTION__);
1662                 }
1663             }
1664         } else {
1665             ALOGW("%s: No noise profile found in result metadata.  Image quality may be reduced.",
1666                     __FUNCTION__);
1667         }
1668     }
1669 
1670     {
1671         // Set up opcode List 2
1672         OpcodeListBuilder builder;
1673         status_t err = OK;
1674 
1675         // Set up lens shading map
1676         camera_metadata_entry entry1 =
1677                 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1678 
1679         uint32_t lsmWidth = 0;
1680         uint32_t lsmHeight = 0;
1681 
1682         if (entry1.count != 0) {
1683             lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1684             lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1685         }
1686 
1687         camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1688 
1689         camera_metadata_entry entry =
1690                 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1691         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1692         uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1693         uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1694         uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1695         uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1696         if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1697             // GainMap rectangle is relative to the active area origin.
1698             err = builder.addGainMapsForMetadata(lsmWidth,
1699                                                  lsmHeight,
1700                                                  0,
1701                                                  0,
1702                                                  height,
1703                                                  width,
1704                                                  opcodeCfaLayout,
1705                                                  entry2.data.f);
1706             if (err != OK) {
1707                 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1708                 jniThrowRuntimeException(env, "failed to add lens shading map.");
1709                 return nullptr;
1710             }
1711         }
1712 
1713 
1714         // Set up bad pixel correction list
1715         camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1716 
1717         if ((entry3.count % 2) != 0) {
1718             ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
1719                     __FUNCTION__);
1720             jniThrowRuntimeException(env, "failed to add hotpixel map.");
1721             return nullptr;
1722         }
1723 
1724         // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
1725         std::vector<uint32_t> v;
1726         for (size_t i = 0; i < entry3.count; i += 2) {
1727             int32_t x = entry3.data.i32[i];
1728             int32_t y = entry3.data.i32[i + 1];
1729             x -= static_cast<int32_t>(xmin);
1730             y -= static_cast<int32_t>(ymin);
1731             if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
1732                     static_cast<uint32_t>(y) >= height) {
1733                 continue;
1734             }
1735             v.push_back(x);
1736             v.push_back(y);
1737         }
1738         const uint32_t* badPixels = &v[0];
1739         uint32_t badPixelCount = v.size();
1740 
1741         if (badPixelCount > 0) {
1742             err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
1743 
1744             if (err != OK) {
1745                 ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
1746                 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1747                 return nullptr;
1748             }
1749         }
1750 
1751 
1752         size_t listSize = builder.getSize();
1753         uint8_t opcodeListBuf[listSize];
1754         err = builder.buildOpList(opcodeListBuf);
1755         if (err == OK) {
1756             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize, opcodeListBuf,
1757                     TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
1758         } else {
1759             ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading"
1760                     "map.", __FUNCTION__);
1761             jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
1762                     " correction and lens shading map");
1763             return nullptr;
1764         }
1765     }
1766 
1767     {
1768         // Set up opcode List 3
1769         OpcodeListBuilder builder;
1770         status_t err = OK;
1771 
1772         // Set up rectilinear distortion correction
1773         float distortion[6] {1.f, 0.f, 0.f, 0.f, 0.f, 0.f};
1774         bool gotDistortion = false;
1775 
1776         camera_metadata_entry entry4 =
1777                 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
1778 
1779         if (entry4.count == 5) {
1780             float cx = entry4.data.f[/*c_x*/2];
1781             float cy = entry4.data.f[/*c_y*/3];
1782             // Assuming f_x = f_y, or at least close enough.
1783             // Also assuming s = 0, or at least close enough.
1784             float f = entry4.data.f[/*f_x*/0];
1785 
1786             camera_metadata_entry entry3 =
1787                     results.find(ANDROID_LENS_DISTORTION);
1788             if (entry3.count == 5) {
1789                 gotDistortion = true;
1790                 float m_x = std::fmaxf(preWidth-1 - cx, cx);
1791                 float m_y = std::fmaxf(preHeight-1 - cy, cy);
1792                 float m_sq = m_x*m_x + m_y*m_y;
1793                 float m = sqrtf(m_sq); // distance to farthest corner from optical center
1794                 float f_sq = f * f;
1795                 // Conversion factors from Camera2 K factors for new LENS_DISTORTION field
1796                 // to DNG spec.
1797                 //
1798                 //       Camera2 / OpenCV assume distortion is applied in a space where focal length
1799                 //       is factored out, while DNG assumes a normalized space where the distance
1800                 //       from optical center to the farthest corner is 1.
1801                 //       Scale from camera2 to DNG spec accordingly.
1802                 //       distortion[0] is always 1 with the new LENS_DISTORTION field.
1803                 const double convCoeff[5] = {
1804                     m_sq / f_sq,
1805                     pow(m_sq, 2) / pow(f_sq, 2),
1806                     pow(m_sq, 3) / pow(f_sq, 3),
1807                     m / f,
1808                     m / f
1809                 };
1810                 for (size_t i = 0; i < entry3.count; i++) {
1811                     distortion[i+1] = convCoeff[i] * entry3.data.f[i];
1812                 }
1813             } else {
1814                 entry3 = results.find(ANDROID_LENS_RADIAL_DISTORTION);
1815                 if (entry3.count == 6) {
1816                     gotDistortion = true;
1817                     // Conversion factors from Camera2 K factors to DNG spec. K factors:
1818                     //
1819                     //      Note: these are necessary because our unit system assumes a
1820                     //      normalized max radius of sqrt(2), whereas the DNG spec's
1821                     //      WarpRectilinear opcode assumes a normalized max radius of 1.
1822                     //      Thus, each K coefficient must include the domain scaling
1823                     //      factor (the DNG domain is scaled by sqrt(2) to emulate the
1824                     //      domain used by the Camera2 specification).
1825                     const double convCoeff[6] = {
1826                         sqrt(2),
1827                         2 * sqrt(2),
1828                         4 * sqrt(2),
1829                         8 * sqrt(2),
1830                         2,
1831                         2
1832                     };
1833                     for (size_t i = 0; i < entry3.count; i++) {
1834                         distortion[i] = entry3.data.f[i] * convCoeff[i];
1835                     }
1836                 }
1837             }
1838             if (gotDistortion) {
1839                 err = builder.addWarpRectilinearForMetadata(distortion, preWidth, preHeight, cx,
1840                         cy);
1841                 if (err != OK) {
1842                     ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
1843                     jniThrowRuntimeException(env, "failed to add distortion correction.");
1844                     return nullptr;
1845                 }
1846             }
1847         }
1848 
1849         size_t listSize = builder.getSize();
1850         uint8_t opcodeListBuf[listSize];
1851         err = builder.buildOpList(opcodeListBuf);
1852         if (err == OK) {
1853             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize, opcodeListBuf,
1854                     TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
1855         } else {
1856             ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading"
1857                     "map.", __FUNCTION__);
1858             jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
1859                     " correction and lens shading map");
1860             return nullptr;
1861         }
1862     }
1863 
1864     {
1865         // Set up orientation tags.
1866         // Note: There's only one orientation field for the whole file, in IFD0
1867         // The main image and any thumbnails therefore have the same orientation.
1868         uint16_t orientation = nativeContext->getOrientation();
1869         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1870                 env, TAG_ORIENTATION, writer);
1871 
1872     }
1873 
1874     if (nativeContext->hasDescription()){
1875         // Set Description
1876         String8 description = nativeContext->getDescription();
1877         size_t len = description.bytes() + 1;
1878         if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
1879                 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) {
1880             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1881                     "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
1882         }
1883     }
1884 
1885     if (nativeContext->hasGpsData()) {
1886         // Set GPS tags
1887         GpsData gpsData = nativeContext->getGpsData();
1888         if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
1889             if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
1890                 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
1891                         TIFF_IFD_0);
1892                 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
1893                 return nullptr;
1894             }
1895         }
1896 
1897         {
1898             uint8_t version[] = {2, 3, 0, 0};
1899             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
1900                     TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
1901         }
1902 
1903         {
1904             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
1905                     GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
1906                     TAG_GPSLATITUDEREF, writer);
1907         }
1908 
1909         {
1910             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
1911                     GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
1912                     TAG_GPSLONGITUDEREF, writer);
1913         }
1914 
1915         {
1916             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
1917                     TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
1918         }
1919 
1920         {
1921             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
1922                     TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
1923         }
1924 
1925         {
1926             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
1927                     TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
1928         }
1929 
1930         {
1931             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
1932                     GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
1933                     TAG_GPSDATESTAMP, writer);
1934         }
1935     }
1936 
1937 
1938     if (nativeContext->hasThumbnail()) {
1939         if (!writer->hasIfd(TIFF_IFD_SUB1)) {
1940             if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
1941                 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
1942                         TIFF_IFD_0);
1943                 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
1944                 return nullptr;
1945             }
1946         }
1947 
1948         Vector<uint16_t> tagsToMove;
1949         tagsToMove.add(TAG_NEWSUBFILETYPE);
1950         tagsToMove.add(TAG_ACTIVEAREA);
1951         tagsToMove.add(TAG_BITSPERSAMPLE);
1952         tagsToMove.add(TAG_COMPRESSION);
1953         tagsToMove.add(TAG_IMAGEWIDTH);
1954         tagsToMove.add(TAG_IMAGELENGTH);
1955         tagsToMove.add(TAG_PHOTOMETRICINTERPRETATION);
1956         tagsToMove.add(TAG_BLACKLEVEL);
1957         tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
1958         tagsToMove.add(TAG_SAMPLESPERPIXEL);
1959         tagsToMove.add(TAG_PLANARCONFIGURATION);
1960         tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
1961         tagsToMove.add(TAG_CFAPATTERN);
1962         tagsToMove.add(TAG_CFAPLANECOLOR);
1963         tagsToMove.add(TAG_CFALAYOUT);
1964         tagsToMove.add(TAG_XRESOLUTION);
1965         tagsToMove.add(TAG_YRESOLUTION);
1966         tagsToMove.add(TAG_RESOLUTIONUNIT);
1967         tagsToMove.add(TAG_WHITELEVEL);
1968         tagsToMove.add(TAG_DEFAULTSCALE);
1969         tagsToMove.add(TAG_DEFAULTCROPORIGIN);
1970         tagsToMove.add(TAG_DEFAULTCROPSIZE);
1971         tagsToMove.add(TAG_OPCODELIST2);
1972         tagsToMove.add(TAG_OPCODELIST3);
1973 
1974         if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) {
1975             jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries");
1976             return nullptr;
1977         }
1978 
1979         // Setup thumbnail tags
1980 
1981         {
1982             // Set photometric interpretation
1983             uint16_t interpretation = 2; // RGB
1984             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1985                     &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1986         }
1987 
1988         {
1989             // Set planar configuration
1990             uint16_t config = 1; // Chunky
1991             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1992                     TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1993         }
1994 
1995         {
1996             // Set samples per pixel
1997             uint16_t samples = SAMPLES_PER_RGB_PIXEL;
1998             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
1999                     TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer);
2000         }
2001 
2002         {
2003             // Set bits per sample
2004             uint16_t bits[SAMPLES_PER_RGB_PIXEL];
2005             for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE;
2006             BAIL_IF_INVALID_RET_NULL_SP(
2007                     writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_0),
2008                     env, TAG_BITSPERSAMPLE, writer);
2009         }
2010 
2011         {
2012             // Set subfiletype
2013             uint32_t subfileType = 1; // Thumbnail image
2014             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
2015                     TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
2016         }
2017 
2018         {
2019             // Set compression
2020             uint16_t compression = 1; // None
2021             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
2022                     TIFF_IFD_0), env, TAG_COMPRESSION, writer);
2023         }
2024 
2025         {
2026             // Set dimensions
2027             uint32_t uWidth = nativeContext->getThumbnailWidth();
2028             uint32_t uHeight = nativeContext->getThumbnailHeight();
2029             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0),
2030                     env, TAG_IMAGEWIDTH, writer);
2031             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0),
2032                     env, TAG_IMAGELENGTH, writer);
2033         }
2034 
2035         {
2036             // x resolution
2037             uint32_t xres[] = { 72, 1 }; // default 72 ppi
2038             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
2039                     env, TAG_XRESOLUTION, writer);
2040 
2041             // y resolution
2042             uint32_t yres[] = { 72, 1 }; // default 72 ppi
2043             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
2044                     env, TAG_YRESOLUTION, writer);
2045 
2046             uint16_t unit = 2; // inches
2047             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
2048                     env, TAG_RESOLUTIONUNIT, writer);
2049         }
2050     }
2051 
2052     if (writer->addStrip(TIFF_IFD_0) != OK) {
2053         ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__);
2054         jniThrowException(env, "java/lang/IllegalStateException",
2055                 "Failed to setup thumbnail strip tags.");
2056         return nullptr;
2057     }
2058 
2059     if (writer->hasIfd(TIFF_IFD_SUB1)) {
2060         if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
2061             ALOGE("%s: Could not main image strip tags.", __FUNCTION__);
2062             jniThrowException(env, "java/lang/IllegalStateException",
2063                     "Failed to setup main image strip tags.");
2064             return nullptr;
2065         }
2066     }
2067     return writer;
2068 }
2069 
DngCreator_destroy(JNIEnv * env,jobject thiz)2070 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
2071     ALOGV("%s:", __FUNCTION__);
2072     DngCreator_setNativeContext(env, thiz, nullptr);
2073 }
2074 
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)2075 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
2076     ALOGV("%s:", __FUNCTION__);
2077 
2078     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2079     if (context == nullptr) {
2080         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2081         jniThrowException(env, "java/lang/AssertionError",
2082                 "setOrientation called with uninitialized DngCreator");
2083         return;
2084     }
2085 
2086     uint16_t orientation = static_cast<uint16_t>(orient);
2087     context->setOrientation(orientation);
2088 }
2089 
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)2090 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
2091     ALOGV("%s:", __FUNCTION__);
2092 
2093     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2094     if (context == nullptr) {
2095         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2096         jniThrowException(env, "java/lang/AssertionError",
2097                 "setDescription called with uninitialized DngCreator");
2098         return;
2099     }
2100 
2101     const char* desc = env->GetStringUTFChars(description, nullptr);
2102     context->setDescription(String8(desc));
2103     env->ReleaseStringUTFChars(description, desc);
2104 }
2105 
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)2106 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
2107         jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
2108     ALOGV("%s:", __FUNCTION__);
2109 
2110     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2111     if (context == nullptr) {
2112         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2113         jniThrowException(env, "java/lang/AssertionError",
2114                 "setGpsTags called with uninitialized DngCreator");
2115         return;
2116     }
2117 
2118     GpsData data;
2119 
2120     jsize latLen = env->GetArrayLength(latTag);
2121     jsize longLen = env->GetArrayLength(longTag);
2122     jsize timeLen = env->GetArrayLength(timeTag);
2123     if (latLen != GpsData::GPS_VALUE_LENGTH) {
2124         jniThrowException(env, "java/lang/IllegalArgumentException",
2125                 "invalid latitude tag length");
2126         return;
2127     } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
2128         jniThrowException(env, "java/lang/IllegalArgumentException",
2129                 "invalid longitude tag length");
2130         return;
2131     } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2132         jniThrowException(env, "java/lang/IllegalArgumentException",
2133                 "invalid time tag length");
2134         return;
2135     }
2136 
2137     env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2138             reinterpret_cast<jint*>(&data.mLatitude));
2139     env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2140             reinterpret_cast<jint*>(&data.mLongitude));
2141     env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2142             reinterpret_cast<jint*>(&data.mTimestamp));
2143 
2144 
2145     env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2146     data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2147     env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2148     data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2149     env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2150             reinterpret_cast<char*>(&data.mDate));
2151     data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2152 
2153     context->setGpsData(data);
2154 }
2155 
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2156 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2157         jint height) {
2158     ALOGV("%s:", __FUNCTION__);
2159 
2160     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2161     if (context == nullptr) {
2162         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2163         jniThrowException(env, "java/lang/AssertionError",
2164                 "setThumbnail called with uninitialized DngCreator");
2165         return;
2166     }
2167 
2168     size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2169     jlong capacity = env->GetDirectBufferCapacity(buffer);
2170     if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2171         jniThrowExceptionFmt(env, "java/lang/AssertionError",
2172                 "Invalid size %d for thumbnail, expected size was %d",
2173                 capacity, fullSize);
2174         return;
2175     }
2176 
2177     uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2178     if (pixelBytes == nullptr) {
2179         ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2180         jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2181         return;
2182     }
2183 
2184     if (!context->setThumbnail(pixelBytes, width, height)) {
2185         jniThrowException(env, "java/lang/IllegalStateException",
2186                 "Failed to set thumbnail.");
2187         return;
2188     }
2189 }
2190 
2191 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2192 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2193         jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2194         jboolean isDirect) {
2195     ALOGV("%s:", __FUNCTION__);
2196     ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2197           "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2198           height, rowStride, pixStride, offset);
2199     uint32_t rStride = static_cast<uint32_t>(rowStride);
2200     uint32_t pStride = static_cast<uint32_t>(pixStride);
2201     uint32_t uWidth = static_cast<uint32_t>(width);
2202     uint32_t uHeight = static_cast<uint32_t>(height);
2203     uint64_t uOffset = static_cast<uint64_t>(offset);
2204 
2205     sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2206     if(env->ExceptionCheck()) {
2207         ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2208         return;
2209     }
2210 
2211     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2212     if (context == nullptr) {
2213         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2214         jniThrowException(env, "java/lang/AssertionError",
2215                 "Write called with uninitialized DngCreator");
2216         return;
2217     }
2218     sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2219 
2220     if (writer.get() == nullptr) {
2221         return;
2222     }
2223 
2224     // Validate DNG size
2225     if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2226         return;
2227     }
2228 
2229     sp<JniInputByteBuffer> inBuf;
2230     Vector<StripSource*> sources;
2231     sp<DirectStripSource> thumbnailSource;
2232     uint32_t targetIfd = TIFF_IFD_0;
2233 
2234     bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2235 
2236     if (hasThumbnail) {
2237         ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2238         uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2239         uint32_t thumbWidth = context->getThumbnailWidth();
2240         thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2241                 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2242                 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2243                 SAMPLES_PER_RGB_PIXEL);
2244         sources.add(thumbnailSource.get());
2245         targetIfd = TIFF_IFD_SUB1;
2246     }
2247 
2248     if (isDirect) {
2249         size_t fullSize = rStride * uHeight;
2250         jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2251         if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2252             jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2253                     "Invalid size %d for Image, size given in metadata is %d at current stride",
2254                     capacity, fullSize);
2255             return;
2256         }
2257 
2258         uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2259         if (pixelBytes == nullptr) {
2260             ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2261             jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2262             return;
2263         }
2264 
2265         ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2266         DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2267                 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2268         sources.add(&stripSource);
2269 
2270         status_t ret = OK;
2271         if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2272             ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2273             if (!env->ExceptionCheck()) {
2274                 jniThrowExceptionFmt(env, "java/io/IOException",
2275                         "Encountered error %d while writing file.", ret);
2276             }
2277             return;
2278         }
2279     } else {
2280         inBuf = new JniInputByteBuffer(env, inBuffer);
2281 
2282         ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2283         InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2284                  rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2285         sources.add(&stripSource);
2286 
2287         status_t ret = OK;
2288         if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2289             ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2290             if (!env->ExceptionCheck()) {
2291                 jniThrowExceptionFmt(env, "java/io/IOException",
2292                         "Encountered error %d while writing file.", ret);
2293             }
2294             return;
2295         }
2296     }
2297 }
2298 
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2299 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2300         jobject inStream, jint width, jint height, jlong offset) {
2301     ALOGV("%s:", __FUNCTION__);
2302 
2303     uint32_t rowStride = width * BYTES_PER_SAMPLE;
2304     uint32_t pixStride = BYTES_PER_SAMPLE;
2305     uint32_t uWidth = static_cast<uint32_t>(width);
2306     uint32_t uHeight = static_cast<uint32_t>(height);
2307     uint64_t uOffset = static_cast<uint32_t>(offset);
2308 
2309     ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2310           "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2311           height, rowStride, pixStride, offset);
2312 
2313     sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2314     if (env->ExceptionCheck()) {
2315         ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2316         return;
2317     }
2318 
2319     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2320     if (context == nullptr) {
2321         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2322         jniThrowException(env, "java/lang/AssertionError",
2323                 "Write called with uninitialized DngCreator");
2324         return;
2325     }
2326     sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2327 
2328     if (writer.get() == nullptr) {
2329         return;
2330     }
2331 
2332     // Validate DNG size
2333     if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2334         return;
2335     }
2336 
2337     sp<DirectStripSource> thumbnailSource;
2338     uint32_t targetIfd = TIFF_IFD_0;
2339     bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2340     Vector<StripSource*> sources;
2341 
2342     if (hasThumbnail) {
2343         ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2344         uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2345         uint32_t width = context->getThumbnailWidth();
2346         thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2347                 width, context->getThumbnailHeight(), bytesPerPixel,
2348                 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2349                 SAMPLES_PER_RGB_PIXEL);
2350         sources.add(thumbnailSource.get());
2351         targetIfd = TIFF_IFD_SUB1;
2352     }
2353 
2354     sp<JniInputStream> in = new JniInputStream(env, inStream);
2355 
2356     ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2357     InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2358              rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2359     sources.add(&stripSource);
2360 
2361     status_t ret = OK;
2362     if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2363         ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2364         if (!env->ExceptionCheck()) {
2365             jniThrowExceptionFmt(env, "java/io/IOException",
2366                     "Encountered error %d while writing file.", ret);
2367         }
2368         return;
2369     }
2370 }
2371 
2372 } /*extern "C" */
2373 
2374 static const JNINativeMethod gDngCreatorMethods[] = {
2375     {"nativeClassInit",        "()V", (void*) DngCreator_nativeClassInit},
2376     {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2377             "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2378             (void*) DngCreator_init},
2379     {"nativeDestroy",           "()V",      (void*) DngCreator_destroy},
2380     {"nativeSetOrientation",    "(I)V",     (void*) DngCreator_nativeSetOrientation},
2381     {"nativeSetDescription",    "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
2382     {"nativeSetGpsTags",    "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2383             (void*) DngCreator_nativeSetGpsTags},
2384     {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
2385     {"nativeWriteImage",        "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2386             (void*) DngCreator_nativeWriteImage},
2387     {"nativeWriteInputStream",    "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2388             (void*) DngCreator_nativeWriteInputStream},
2389 };
2390 
register_android_hardware_camera2_DngCreator(JNIEnv * env)2391 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2392     return RegisterMethodsOrDie(env,
2393             "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2394 }
2395