1 /*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "DngCreator_JNI"
19 #include <inttypes.h>
20 #include <string.h>
21 #include <algorithm>
22 #include <array>
23 #include <memory>
24 #include <vector>
25 #include <cmath>
26
27 #include <android-base/properties.h>
28 #include <utils/Log.h>
29 #include <utils/Errors.h>
30 #include <utils/StrongPointer.h>
31 #include <utils/RefBase.h>
32 #include <utils/Vector.h>
33 #include <utils/String8.h>
34 #include <system/camera_metadata.h>
35 #include <camera/CameraMetadata.h>
36 #include <img_utils/DngUtils.h>
37 #include <img_utils/TagDefinitions.h>
38 #include <img_utils/TiffIfd.h>
39 #include <img_utils/TiffWriter.h>
40 #include <img_utils/Output.h>
41 #include <img_utils/Input.h>
42 #include <img_utils/StripSource.h>
43
44 #include "core_jni_helpers.h"
45
46 #include "android_runtime/AndroidRuntime.h"
47 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
48
49 #include <jni.h>
50 #include <nativehelper/JNIHelp.h>
51
52 using namespace android;
53 using namespace img_utils;
54 using android::base::GetProperty;
55
56 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
57 if ((expr) != OK) { \
58 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
59 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
60 return false; \
61 }
62
63
64 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
65 if ((expr) != OK) { \
66 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
67 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
68 return nullptr; \
69 }
70
71
72 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
73 if ((expr) != OK) { \
74 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
75 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
76 return -1; \
77 }
78
79 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
80 if ((entry).count == 0) { \
81 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
82 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
83 return nullptr; \
84 }
85
86 #define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
87 if (expr) { \
88 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
89 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
90 return nullptr; \
91 }
92
93
94 #define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext"
95
96 static struct {
97 jfieldID mNativeContext;
98 } gDngCreatorClassInfo;
99
100 static struct {
101 jmethodID mWriteMethod;
102 } gOutputStreamClassInfo;
103
104 static struct {
105 jmethodID mReadMethod;
106 jmethodID mSkipMethod;
107 } gInputStreamClassInfo;
108
109 static struct {
110 jmethodID mGetMethod;
111 } gInputByteBufferClassInfo;
112
113 enum {
114 BITS_PER_SAMPLE = 16,
115 BYTES_PER_SAMPLE = 2,
116 BYTES_PER_RGB_PIXEL = 3,
117 BITS_PER_RGB_SAMPLE = 8,
118 BYTES_PER_RGB_SAMPLE = 1,
119 SAMPLES_PER_RGB_PIXEL = 3,
120 SAMPLES_PER_RAW_PIXEL = 1,
121 TIFF_IFD_0 = 0,
122 TIFF_IFD_SUB1 = 1,
123 TIFF_IFD_GPSINFO = 2,
124 };
125
126
127 /**
128 * POD container class for GPS tag data.
129 */
130 class GpsData {
131 public:
132 enum {
133 GPS_VALUE_LENGTH = 6,
134 GPS_REF_LENGTH = 2,
135 GPS_DATE_LENGTH = 11,
136 };
137
138 uint32_t mLatitude[GPS_VALUE_LENGTH];
139 uint32_t mLongitude[GPS_VALUE_LENGTH];
140 uint32_t mTimestamp[GPS_VALUE_LENGTH];
141 uint8_t mLatitudeRef[GPS_REF_LENGTH];
142 uint8_t mLongitudeRef[GPS_REF_LENGTH];
143 uint8_t mDate[GPS_DATE_LENGTH];
144 };
145
146 // ----------------------------------------------------------------------------
147
148 /**
149 * Container class for the persistent native context.
150 */
151
152 class NativeContext : public LightRefBase<NativeContext> {
153 public:
154 enum {
155 DATETIME_COUNT = 20,
156 };
157
158 NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
159 virtual ~NativeContext();
160
161 TiffWriter* getWriter();
162
163 std::shared_ptr<const CameraMetadata> getCharacteristics() const;
164 std::shared_ptr<const CameraMetadata> getResult() const;
165
166 uint32_t getThumbnailWidth() const;
167 uint32_t getThumbnailHeight() const;
168 const uint8_t* getThumbnail() const;
169 bool hasThumbnail() const;
170
171 bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
172
173 void setOrientation(uint16_t orientation);
174 uint16_t getOrientation() const;
175
176 void setDescription(const String8& desc);
177 String8 getDescription() const;
178 bool hasDescription() const;
179
180 void setGpsData(const GpsData& data);
181 GpsData getGpsData() const;
182 bool hasGpsData() const;
183
184 void setCaptureTime(const String8& formattedCaptureTime);
185 String8 getCaptureTime() const;
186 bool hasCaptureTime() const;
187
188 private:
189 Vector<uint8_t> mCurrentThumbnail;
190 TiffWriter mWriter;
191 std::shared_ptr<CameraMetadata> mCharacteristics;
192 std::shared_ptr<CameraMetadata> mResult;
193 uint32_t mThumbnailWidth;
194 uint32_t mThumbnailHeight;
195 uint16_t mOrientation;
196 bool mThumbnailSet;
197 bool mGpsSet;
198 bool mDescriptionSet;
199 bool mCaptureTimeSet;
200 String8 mDescription;
201 GpsData mGpsData;
202 String8 mFormattedCaptureTime;
203 };
204
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)205 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
206 mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
207 mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
208 mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false),
209 mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {}
210
~NativeContext()211 NativeContext::~NativeContext() {}
212
getWriter()213 TiffWriter* NativeContext::getWriter() {
214 return &mWriter;
215 }
216
getCharacteristics() const217 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
218 return mCharacteristics;
219 }
220
getResult() const221 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
222 return mResult;
223 }
224
getThumbnailWidth() const225 uint32_t NativeContext::getThumbnailWidth() const {
226 return mThumbnailWidth;
227 }
228
getThumbnailHeight() const229 uint32_t NativeContext::getThumbnailHeight() const {
230 return mThumbnailHeight;
231 }
232
getThumbnail() const233 const uint8_t* NativeContext::getThumbnail() const {
234 return mCurrentThumbnail.array();
235 }
236
hasThumbnail() const237 bool NativeContext::hasThumbnail() const {
238 return mThumbnailSet;
239 }
240
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)241 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
242 mThumbnailWidth = width;
243 mThumbnailHeight = height;
244
245 size_t size = BYTES_PER_RGB_PIXEL * width * height;
246 if (mCurrentThumbnail.resize(size) < 0) {
247 ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
248 return false;
249 }
250
251 uint8_t* thumb = mCurrentThumbnail.editArray();
252 memcpy(thumb, buffer, size);
253 mThumbnailSet = true;
254 return true;
255 }
256
setOrientation(uint16_t orientation)257 void NativeContext::setOrientation(uint16_t orientation) {
258 mOrientation = orientation;
259 }
260
getOrientation() const261 uint16_t NativeContext::getOrientation() const {
262 return mOrientation;
263 }
264
setDescription(const String8 & desc)265 void NativeContext::setDescription(const String8& desc) {
266 mDescription = desc;
267 mDescriptionSet = true;
268 }
269
getDescription() const270 String8 NativeContext::getDescription() const {
271 return mDescription;
272 }
273
hasDescription() const274 bool NativeContext::hasDescription() const {
275 return mDescriptionSet;
276 }
277
setGpsData(const GpsData & data)278 void NativeContext::setGpsData(const GpsData& data) {
279 mGpsData = data;
280 mGpsSet = true;
281 }
282
getGpsData() const283 GpsData NativeContext::getGpsData() const {
284 return mGpsData;
285 }
286
hasGpsData() const287 bool NativeContext::hasGpsData() const {
288 return mGpsSet;
289 }
290
setCaptureTime(const String8 & formattedCaptureTime)291 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
292 mFormattedCaptureTime = formattedCaptureTime;
293 mCaptureTimeSet = true;
294 }
295
getCaptureTime() const296 String8 NativeContext::getCaptureTime() const {
297 return mFormattedCaptureTime;
298 }
299
hasCaptureTime() const300 bool NativeContext::hasCaptureTime() const {
301 return mCaptureTimeSet;
302 }
303
304 // End of NativeContext
305 // ----------------------------------------------------------------------------
306
307 /**
308 * Wrapper class for a Java OutputStream.
309 *
310 * This class is not intended to be used across JNI calls.
311 */
312 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
313 public:
314 JniOutputStream(JNIEnv* env, jobject outStream);
315
316 virtual ~JniOutputStream();
317
318 status_t open();
319
320 status_t write(const uint8_t* buf, size_t offset, size_t count);
321
322 status_t close();
323 private:
324 enum {
325 BYTE_ARRAY_LENGTH = 4096
326 };
327 jobject mOutputStream;
328 JNIEnv* mEnv;
329 jbyteArray mByteArray;
330 };
331
JniOutputStream(JNIEnv * env,jobject outStream)332 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
333 mEnv(env) {
334 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
335 if (mByteArray == nullptr) {
336 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
337 }
338 }
339
~JniOutputStream()340 JniOutputStream::~JniOutputStream() {
341 mEnv->DeleteLocalRef(mByteArray);
342 }
343
open()344 status_t JniOutputStream::open() {
345 // Do nothing
346 return OK;
347 }
348
write(const uint8_t * buf,size_t offset,size_t count)349 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
350 while(count > 0) {
351 size_t len = BYTE_ARRAY_LENGTH;
352 len = (count > len) ? len : count;
353 mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
354
355 if (mEnv->ExceptionCheck()) {
356 return BAD_VALUE;
357 }
358
359 mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
360 0, len);
361
362 if (mEnv->ExceptionCheck()) {
363 return BAD_VALUE;
364 }
365
366 count -= len;
367 offset += len;
368 }
369 return OK;
370 }
371
close()372 status_t JniOutputStream::close() {
373 // Do nothing
374 return OK;
375 }
376
377 // End of JniOutputStream
378 // ----------------------------------------------------------------------------
379
380 /**
381 * Wrapper class for a Java InputStream.
382 *
383 * This class is not intended to be used across JNI calls.
384 */
385 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
386 public:
387 JniInputStream(JNIEnv* env, jobject inStream);
388
389 status_t open();
390
391 status_t close();
392
393 ssize_t read(uint8_t* buf, size_t offset, size_t count);
394
395 ssize_t skip(size_t count);
396
397 virtual ~JniInputStream();
398 private:
399 enum {
400 BYTE_ARRAY_LENGTH = 4096
401 };
402 jobject mInStream;
403 JNIEnv* mEnv;
404 jbyteArray mByteArray;
405
406 };
407
JniInputStream(JNIEnv * env,jobject inStream)408 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
409 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
410 if (mByteArray == nullptr) {
411 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
412 }
413 }
414
~JniInputStream()415 JniInputStream::~JniInputStream() {
416 mEnv->DeleteLocalRef(mByteArray);
417 }
418
read(uint8_t * buf,size_t offset,size_t count)419 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
420
421 jint realCount = BYTE_ARRAY_LENGTH;
422 if (count < BYTE_ARRAY_LENGTH) {
423 realCount = count;
424 }
425 jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
426 realCount);
427
428 if (actual < 0) {
429 return NOT_ENOUGH_DATA;
430 }
431
432 if (mEnv->ExceptionCheck()) {
433 return BAD_VALUE;
434 }
435
436 mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
437 if (mEnv->ExceptionCheck()) {
438 return BAD_VALUE;
439 }
440 return actual;
441 }
442
skip(size_t count)443 ssize_t JniInputStream::skip(size_t count) {
444 jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
445 static_cast<jlong>(count));
446
447 if (mEnv->ExceptionCheck()) {
448 return BAD_VALUE;
449 }
450 if (actual < 0) {
451 return NOT_ENOUGH_DATA;
452 }
453 return actual;
454 }
455
open()456 status_t JniInputStream::open() {
457 // Do nothing
458 return OK;
459 }
460
close()461 status_t JniInputStream::close() {
462 // Do nothing
463 return OK;
464 }
465
466 // End of JniInputStream
467 // ----------------------------------------------------------------------------
468
469 /**
470 * Wrapper class for a non-direct Java ByteBuffer.
471 *
472 * This class is not intended to be used across JNI calls.
473 */
474 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
475 public:
476 JniInputByteBuffer(JNIEnv* env, jobject inBuf);
477
478 status_t open();
479
480 status_t close();
481
482 ssize_t read(uint8_t* buf, size_t offset, size_t count);
483
484 virtual ~JniInputByteBuffer();
485 private:
486 enum {
487 BYTE_ARRAY_LENGTH = 4096
488 };
489 jobject mInBuf;
490 JNIEnv* mEnv;
491 jbyteArray mByteArray;
492 };
493
JniInputByteBuffer(JNIEnv * env,jobject inBuf)494 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
495 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
496 if (mByteArray == nullptr) {
497 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
498 }
499 }
500
~JniInputByteBuffer()501 JniInputByteBuffer::~JniInputByteBuffer() {
502 mEnv->DeleteLocalRef(mByteArray);
503 }
504
read(uint8_t * buf,size_t offset,size_t count)505 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
506 jint realCount = BYTE_ARRAY_LENGTH;
507 if (count < BYTE_ARRAY_LENGTH) {
508 realCount = count;
509 }
510
511 jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
512 mByteArray, 0, realCount);
513 mEnv->DeleteLocalRef(chainingBuf);
514
515 if (mEnv->ExceptionCheck()) {
516 ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
517 return BAD_VALUE;
518 }
519
520 mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
521 if (mEnv->ExceptionCheck()) {
522 ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
523 return BAD_VALUE;
524 }
525 return realCount;
526 }
527
open()528 status_t JniInputByteBuffer::open() {
529 // Do nothing
530 return OK;
531 }
532
close()533 status_t JniInputByteBuffer::close() {
534 // Do nothing
535 return OK;
536 }
537
538 // End of JniInputByteBuffer
539 // ----------------------------------------------------------------------------
540
541 /**
542 * StripSource subclass for Input types.
543 *
544 * This class is not intended to be used across JNI calls.
545 */
546
547 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
548 public:
549 InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
550 uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
551 uint32_t samplesPerPixel);
552
553 virtual ~InputStripSource();
554
555 virtual status_t writeToStream(Output& stream, uint32_t count);
556
557 virtual uint32_t getIfd() const;
558 protected:
559 uint32_t mIfd;
560 Input* mInput;
561 uint32_t mWidth;
562 uint32_t mHeight;
563 uint32_t mPixStride;
564 uint32_t mRowStride;
565 uint64_t mOffset;
566 JNIEnv* mEnv;
567 uint32_t mBytesPerSample;
568 uint32_t mSamplesPerPixel;
569 };
570
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)571 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
572 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
573 uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
574 mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
575 mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
576 mSamplesPerPixel(samplesPerPixel) {}
577
~InputStripSource()578 InputStripSource::~InputStripSource() {}
579
writeToStream(Output & stream,uint32_t count)580 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
581 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
582 jlong offset = mOffset;
583
584 if (fullSize != count) {
585 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
586 fullSize);
587 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
588 return BAD_VALUE;
589 }
590
591 // Skip offset
592 while (offset > 0) {
593 ssize_t skipped = mInput->skip(offset);
594 if (skipped <= 0) {
595 if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
596 jniThrowExceptionFmt(mEnv, "java/io/IOException",
597 "Early EOF encountered in skip, not enough pixel data for image of size %u",
598 fullSize);
599 skipped = NOT_ENOUGH_DATA;
600 } else {
601 if (!mEnv->ExceptionCheck()) {
602 jniThrowException(mEnv, "java/io/IOException",
603 "Error encountered while skip bytes in input stream.");
604 }
605 }
606
607 return skipped;
608 }
609 offset -= skipped;
610 }
611
612 Vector<uint8_t> row;
613 if (row.resize(mRowStride) < 0) {
614 jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
615 return BAD_VALUE;
616 }
617
618 uint8_t* rowBytes = row.editArray();
619
620 for (uint32_t i = 0; i < mHeight; ++i) {
621 size_t rowFillAmt = 0;
622 size_t rowSize = mRowStride;
623
624 while (rowFillAmt < mRowStride) {
625 ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
626 if (bytesRead <= 0) {
627 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
628 ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
629 __FUNCTION__, i, bytesRead);
630 jniThrowExceptionFmt(mEnv, "java/io/IOException",
631 "Early EOF encountered, not enough pixel data for image of size %"
632 PRIu32, fullSize);
633 bytesRead = NOT_ENOUGH_DATA;
634 } else {
635 if (!mEnv->ExceptionCheck()) {
636 jniThrowException(mEnv, "java/io/IOException",
637 "Error encountered while reading");
638 }
639 }
640 return bytesRead;
641 }
642 rowFillAmt += bytesRead;
643 rowSize -= bytesRead;
644 }
645
646 if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
647 ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
648
649 if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
650 mEnv->ExceptionCheck()) {
651 if (!mEnv->ExceptionCheck()) {
652 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
653 }
654 return BAD_VALUE;
655 }
656 } else {
657 ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
658 jniThrowException(mEnv, "java/lang/IllegalStateException",
659 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
660 return BAD_VALUE;
661
662 // TODO: Add support for non-contiguous pixels if needed.
663 }
664 }
665 return OK;
666 }
667
getIfd() const668 uint32_t InputStripSource::getIfd() const {
669 return mIfd;
670 }
671
672 // End of InputStripSource
673 // ----------------------------------------------------------------------------
674
675 /**
676 * StripSource subclass for direct buffer types.
677 *
678 * This class is not intended to be used across JNI calls.
679 */
680
681 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
682 public:
683 DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
684 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
685 uint32_t bytesPerSample, uint32_t samplesPerPixel);
686
687 virtual ~DirectStripSource();
688
689 virtual status_t writeToStream(Output& stream, uint32_t count);
690
691 virtual uint32_t getIfd() const;
692 protected:
693 uint32_t mIfd;
694 const uint8_t* mPixelBytes;
695 uint32_t mWidth;
696 uint32_t mHeight;
697 uint32_t mPixStride;
698 uint32_t mRowStride;
699 uint16_t mOffset;
700 JNIEnv* mEnv;
701 uint32_t mBytesPerSample;
702 uint32_t mSamplesPerPixel;
703 };
704
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)705 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
706 uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
707 uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
708 mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
709 mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
710 mSamplesPerPixel(samplesPerPixel) {}
711
~DirectStripSource()712 DirectStripSource::~DirectStripSource() {}
713
writeToStream(Output & stream,uint32_t count)714 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
715 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
716
717 if (fullSize != count) {
718 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
719 fullSize);
720 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
721 return BAD_VALUE;
722 }
723
724
725 if (mPixStride == mBytesPerSample * mSamplesPerPixel
726 && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
727 ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
728
729 if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
730 if (!mEnv->ExceptionCheck()) {
731 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
732 }
733 return BAD_VALUE;
734 }
735 } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
736 ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
737
738 for (size_t i = 0; i < mHeight; ++i) {
739 if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
740 mEnv->ExceptionCheck()) {
741 if (!mEnv->ExceptionCheck()) {
742 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
743 }
744 return BAD_VALUE;
745 }
746 }
747 } else {
748 ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
749
750 jniThrowException(mEnv, "java/lang/IllegalStateException",
751 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
752 return BAD_VALUE;
753
754 // TODO: Add support for non-contiguous pixels if needed.
755 }
756 return OK;
757
758 }
759
getIfd() const760 uint32_t DirectStripSource::getIfd() const {
761 return mIfd;
762 }
763
764 // End of DirectStripSource
765 // ----------------------------------------------------------------------------
766
767 /**
768 * Calculate the default crop relative to the "active area" of the image sensor (this active area
769 * will always be the pre-correction active area rectangle), and set this.
770 */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer)771 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
772 sp<TiffWriter> writer) {
773
774 camera_metadata_ro_entry entry =
775 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
776 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
777 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
778
779 const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
780
781 if (width < margin * 2 || height < margin * 2) {
782 ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
783 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
784 jniThrowException(env, "java/lang/IllegalStateException",
785 "Pre-correction active area is too small.");
786 return BAD_VALUE;
787 }
788
789 uint32_t defaultCropOrigin[] = {margin, margin};
790 uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
791 height - defaultCropOrigin[1] - margin};
792
793 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
794 TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
795 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
796 TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
797
798 return OK;
799 }
800
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)801 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
802 const CameraMetadata& characteristics, jint width, jint height) {
803 if (width <= 0) {
804 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
805 "Image width %d is invalid", width);
806 return false;
807 }
808
809 if (height <= 0) {
810 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
811 "Image height %d is invalid", height);
812 return false;
813 }
814
815 camera_metadata_ro_entry preCorrectionEntry =
816 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
817 camera_metadata_ro_entry pixelArrayEntry =
818 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
819
820 int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
821 int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
822 int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
823 int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
824
825 bool matchesPixelArray = (pWidth == width && pHeight == height);
826 bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
827
828 if (!(matchesPixelArray || matchesPreCorrectionArray)) {
829 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
830 "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
831 "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
832 width, height, pWidth, pHeight, cWidth, cHeight);
833 return false;
834 }
835
836 return true;
837 }
838
moveEntries(sp<TiffWriter> writer,uint32_t ifdFrom,uint32_t ifdTo,const Vector<uint16_t> & entries)839 static status_t moveEntries(sp<TiffWriter> writer, uint32_t ifdFrom, uint32_t ifdTo,
840 const Vector<uint16_t>& entries) {
841 for (size_t i = 0; i < entries.size(); ++i) {
842 uint16_t tagId = entries[i];
843 sp<TiffEntry> entry = writer->getEntry(tagId, ifdFrom);
844 if (entry.get() == nullptr) {
845 ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId,
846 ifdFrom);
847 return BAD_VALUE;
848 }
849 if (writer->addEntry(entry, ifdTo) != OK) {
850 ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId,
851 ifdFrom);
852 return BAD_VALUE;
853 }
854 writer->removeEntry(tagId, ifdFrom);
855 }
856 return OK;
857 }
858
859 /**
860 * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4.
861 * Returns OK on success, or a negative error code if the CFA enum was invalid.
862 */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)863 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
864 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
865 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
866 cfaEnum);
867 switch(cfa) {
868 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
869 cfaOut[0] = 0;
870 cfaOut[1] = 1;
871 cfaOut[2] = 1;
872 cfaOut[3] = 2;
873 break;
874 }
875 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
876 cfaOut[0] = 1;
877 cfaOut[1] = 0;
878 cfaOut[2] = 2;
879 cfaOut[3] = 1;
880 break;
881 }
882 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
883 cfaOut[0] = 1;
884 cfaOut[1] = 2;
885 cfaOut[2] = 0;
886 cfaOut[3] = 1;
887 break;
888 }
889 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
890 cfaOut[0] = 2;
891 cfaOut[1] = 1;
892 cfaOut[2] = 1;
893 cfaOut[3] = 0;
894 break;
895 }
896 // MONO and NIR are degenerate case of RGGB pattern: only Red channel
897 // will be used.
898 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO:
899 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: {
900 cfaOut[0] = 0;
901 break;
902 }
903 default: {
904 return BAD_VALUE;
905 }
906 }
907 return OK;
908 }
909
910 /**
911 * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
912 * RGGB for an unknown enum.
913 */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)914 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
915 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
916 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
917 cfaEnum);
918 switch(cfa) {
919 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
920 return OpcodeListBuilder::CFA_RGGB;
921 }
922 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
923 return OpcodeListBuilder::CFA_GRBG;
924 }
925 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
926 return OpcodeListBuilder::CFA_GBRG;
927 }
928 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
929 return OpcodeListBuilder::CFA_BGGR;
930 }
931 default: {
932 return OpcodeListBuilder::CFA_RGGB;
933 }
934 }
935 }
936
937 /**
938 * For each color plane, find the corresponding noise profile coefficients given in the
939 * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color
940 * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
941 *
942 * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
943 * cfa - numChannels color channels corresponding to each of the per-channel noise profile
944 * coefficients.
945 * numChannels - the number of noise profile coefficient pairs and color channels given in
946 * the perChannelNoiseProfile and cfa arguments, respectively.
947 * planeColors - the color planes in the noise profile output.
948 * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
949 * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
950 *
951 * returns OK, or a negative error code on failure.
952 */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)953 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
954 size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
955 /*out*/double* noiseProfile) {
956
957 for (size_t p = 0; p < numPlanes; ++p) {
958 size_t S = p * 2;
959 size_t O = p * 2 + 1;
960
961 noiseProfile[S] = 0;
962 noiseProfile[O] = 0;
963 bool uninitialized = true;
964 for (size_t c = 0; c < numChannels; ++c) {
965 if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
966 noiseProfile[S] = perChannelNoiseProfile[c * 2];
967 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
968 uninitialized = false;
969 }
970 }
971 if (uninitialized) {
972 ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
973 __FUNCTION__, p);
974 return BAD_VALUE;
975 }
976 }
977 return OK;
978 }
979
undistort(double & x,double & y,const std::array<float,6> & distortion,const float cx,const float cy,const float f)980 static void undistort(/*inout*/double& x, /*inout*/double& y,
981 const std::array<float, 6>& distortion,
982 const float cx, const float cy, const float f) {
983 double xp = (x - cx) / f;
984 double yp = (y - cy) / f;
985
986 double x2 = xp * xp;
987 double y2 = yp * yp;
988 double r2 = x2 + y2;
989 double xy2 = 2.0 * xp * yp;
990
991 const float k0 = distortion[0];
992 const float k1 = distortion[1];
993 const float k2 = distortion[2];
994 const float k3 = distortion[3];
995 const float p1 = distortion[4];
996 const float p2 = distortion[5];
997
998 double kr = k0 + ((k3 * r2 + k2) * r2 + k1) * r2;
999 double xpp = xp * kr + p1 * xy2 + p2 * (r2 + 2.0 * x2);
1000 double ypp = yp * kr + p1 * (r2 + 2.0 * y2) + p2 * xy2;
1001
1002 x = xpp * f + cx;
1003 y = ypp * f + cy;
1004 return;
1005 }
1006
unDistortWithinPreCorrArray(double x,double y,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1007 static inline bool unDistortWithinPreCorrArray(
1008 double x, double y,
1009 const std::array<float, 6>& distortion,
1010 const float cx, const float cy, const float f,
1011 const int preCorrW, const int preCorrH, const int xMin, const int yMin) {
1012 undistort(x, y, distortion, cx, cy, f);
1013 // xMin and yMin are inclusive, and xMax and yMax are exclusive.
1014 int xMax = xMin + preCorrW;
1015 int yMax = yMin + preCorrH;
1016 if (x < xMin || y < yMin || x >= xMax || y >= yMax) {
1017 return false;
1018 }
1019 return true;
1020 }
1021
boxWithinPrecorrectionArray(int left,int top,int right,int bottom,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1022 static inline bool boxWithinPrecorrectionArray(
1023 int left, int top, int right, int bottom,
1024 const std::array<float, 6>& distortion,
1025 const float cx, const float cy, const float f,
1026 const int preCorrW, const int preCorrH, const int xMin, const int yMin){
1027 // Top row
1028 if (!unDistortWithinPreCorrArray(left, top,
1029 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1030 return false;
1031 }
1032
1033 if (!unDistortWithinPreCorrArray(cx, top,
1034 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1035 return false;
1036 }
1037
1038 if (!unDistortWithinPreCorrArray(right, top,
1039 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1040 return false;
1041 }
1042
1043 // Middle row
1044 if (!unDistortWithinPreCorrArray(left, cy,
1045 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1046 return false;
1047 }
1048
1049 if (!unDistortWithinPreCorrArray(right, cy,
1050 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1051 return false;
1052 }
1053
1054 // Bottom row
1055 if (!unDistortWithinPreCorrArray(left, bottom,
1056 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1057 return false;
1058 }
1059
1060 if (!unDistortWithinPreCorrArray(cx, bottom,
1061 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1062 return false;
1063 }
1064
1065 if (!unDistortWithinPreCorrArray(right, bottom,
1066 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1067 return false;
1068 }
1069 return true;
1070 }
1071
scaledBoxWithinPrecorrectionArray(double scale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1072 static inline bool scaledBoxWithinPrecorrectionArray(
1073 double scale/*must be <= 1.0*/,
1074 const std::array<float, 6>& distortion,
1075 const float cx, const float cy, const float f,
1076 const int preCorrW, const int preCorrH,
1077 const int xMin, const int yMin){
1078
1079 double left = cx * (1.0 - scale);
1080 double right = (preCorrW - 1) * scale + cx * (1.0 - scale);
1081 double top = cy * (1.0 - scale);
1082 double bottom = (preCorrH - 1) * scale + cy * (1.0 - scale);
1083
1084 return boxWithinPrecorrectionArray(left, top, right, bottom,
1085 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin);
1086 }
1087
findPostCorrectionScale(double stepSize,double minScale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin,double * outScale)1088 static status_t findPostCorrectionScale(
1089 double stepSize, double minScale,
1090 const std::array<float, 6>& distortion,
1091 const float cx, const float cy, const float f,
1092 const int preCorrW, const int preCorrH, const int xMin, const int yMin,
1093 /*out*/ double* outScale) {
1094 if (outScale == nullptr) {
1095 ALOGE("%s: outScale must not be null", __FUNCTION__);
1096 return BAD_VALUE;
1097 }
1098
1099 for (double scale = 1.0; scale > minScale; scale -= stepSize) {
1100 if (scaledBoxWithinPrecorrectionArray(
1101 scale, distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1102 *outScale = scale;
1103 return OK;
1104 }
1105 }
1106 ALOGE("%s: cannot find cropping scale for lens distortion: stepSize %f, minScale %f",
1107 __FUNCTION__, stepSize, minScale);
1108 return BAD_VALUE;
1109 }
1110
1111 // Apply a scale factor to distortion coefficients so that the image is zoomed out and all pixels
1112 // are sampled within the precorrection array
normalizeLensDistortion(std::array<float,6> & distortion,float cx,float cy,float f,int preCorrW,int preCorrH,int xMin=0,int yMin=0)1113 static void normalizeLensDistortion(
1114 /*inout*/std::array<float, 6>& distortion,
1115 float cx, float cy, float f, int preCorrW, int preCorrH, int xMin = 0, int yMin = 0) {
1116 ALOGV("%s: distortion [%f, %f, %f, %f, %f, %f], (cx,cy) (%f, %f), f %f, (W,H) (%d, %d)"
1117 ", (xmin, ymin, xmax, ymax) (%d, %d, %d, %d)",
1118 __FUNCTION__, distortion[0], distortion[1], distortion[2],
1119 distortion[3], distortion[4], distortion[5],
1120 cx, cy, f, preCorrW, preCorrH,
1121 xMin, yMin, xMin + preCorrW - 1, yMin + preCorrH - 1);
1122
1123 // Only update distortion coeffients if we can find a good bounding box
1124 double scale = 1.0;
1125 if (OK == findPostCorrectionScale(0.002, 0.5,
1126 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin,
1127 /*out*/&scale)) {
1128 ALOGV("%s: scaling distortion coefficients by %f", __FUNCTION__, scale);
1129 // The formula:
1130 // xc = xi * (k0 + k1*r^2 + k2*r^4 + k3*r^6) + k4 * (2*xi*yi) + k5 * (r^2 + 2*xi^2)
1131 // To create effective zoom we want to replace xi by xi *m, yi by yi*m and r^2 by r^2*m^2
1132 // Factor the extra m power terms into k0~k6
1133 std::array<float, 6> scalePowers = {1, 3, 5, 7, 2, 2};
1134 for (size_t i = 0; i < 6; i++) {
1135 distortion[i] *= pow(scale, scalePowers[i]);
1136 }
1137 }
1138 return;
1139 }
1140
1141 // ----------------------------------------------------------------------------
1142 extern "C" {
1143
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)1144 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
1145 ALOGV("%s:", __FUNCTION__);
1146 return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
1147 gDngCreatorClassInfo.mNativeContext));
1148 }
1149
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)1150 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
1151 ALOGV("%s:", __FUNCTION__);
1152 NativeContext* current = DngCreator_getNativeContext(env, thiz);
1153
1154 if (context != nullptr) {
1155 context->incStrong((void*) DngCreator_setNativeContext);
1156 }
1157
1158 if (current) {
1159 current->decStrong((void*) DngCreator_setNativeContext);
1160 }
1161
1162 env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
1163 reinterpret_cast<jlong>(context.get()));
1164 }
1165
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)1166 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
1167 ALOGV("%s:", __FUNCTION__);
1168
1169 gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
1170 clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
1171
1172 jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
1173 gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
1174 outputStreamClazz, "write", "([BII)V");
1175
1176 jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
1177 gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
1178 gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1179
1180 jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1181 gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1182 inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1183 }
1184
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1185 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1186 jobject resultsPtr, jstring formattedCaptureTime) {
1187 ALOGV("%s:", __FUNCTION__);
1188 CameraMetadata characteristics;
1189 CameraMetadata results;
1190 if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1191 jniThrowException(env, "java/lang/AssertionError",
1192 "No native metadata defined for camera characteristics.");
1193 return;
1194 }
1195 if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1196 jniThrowException(env, "java/lang/AssertionError",
1197 "No native metadata defined for capture results.");
1198 return;
1199 }
1200
1201 sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1202
1203 const char* captureTime = env->GetStringUTFChars(formattedCaptureTime, nullptr);
1204
1205 size_t len = strlen(captureTime) + 1;
1206 if (len != NativeContext::DATETIME_COUNT) {
1207 jniThrowException(env, "java/lang/IllegalArgumentException",
1208 "Formatted capture time string length is not required 20 characters");
1209 return;
1210 }
1211
1212 nativeContext->setCaptureTime(String8(captureTime));
1213
1214 DngCreator_setNativeContext(env, thiz, nativeContext);
1215 }
1216
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1217 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1218 uint32_t imageHeight) {
1219
1220 NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1221
1222 if (nativeContext == nullptr) {
1223 jniThrowException(env, "java/lang/AssertionError",
1224 "No native context, must call init before other operations.");
1225 return nullptr;
1226 }
1227
1228 CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1229 CameraMetadata results = *(nativeContext->getResult());
1230
1231 sp<TiffWriter> writer = new TiffWriter();
1232
1233 uint32_t preXMin = 0;
1234 uint32_t preYMin = 0;
1235 uint32_t preWidth = 0;
1236 uint32_t preHeight = 0;
1237 uint8_t colorFilter = 0;
1238 bool isBayer = true;
1239 {
1240 // Check dimensions
1241 camera_metadata_entry entry =
1242 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1243 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1244 preXMin = static_cast<uint32_t>(entry.data.i32[0]);
1245 preYMin = static_cast<uint32_t>(entry.data.i32[1]);
1246 preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1247 preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1248
1249 camera_metadata_entry pixelArrayEntry =
1250 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
1251 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1252 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1253
1254 if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1255 (imageWidth == pixWidth && imageHeight == pixHeight))) {
1256 jniThrowException(env, "java/lang/AssertionError",
1257 "Height and width of image buffer did not match height and width of"
1258 "either the preCorrectionActiveArraySize or the pixelArraySize.");
1259 return nullptr;
1260 }
1261
1262 camera_metadata_entry colorFilterEntry =
1263 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1264 colorFilter = colorFilterEntry.data.u8[0];
1265 camera_metadata_entry capabilitiesEntry =
1266 characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
1267 size_t capsCount = capabilitiesEntry.count;
1268 uint8_t* caps = capabilitiesEntry.data.u8;
1269 if (std::find(caps, caps+capsCount, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME)
1270 != caps+capsCount) {
1271 isBayer = false;
1272 } else if (colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO ||
1273 colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
1274 jniThrowException(env, "java/lang/AssertionError",
1275 "A camera device with MONO/NIR color filter must have MONOCHROME capability.");
1276 return nullptr;
1277 }
1278 }
1279
1280 writer->addIfd(TIFF_IFD_0);
1281
1282 status_t err = OK;
1283
1284 const uint32_t samplesPerPixel = 1;
1285 const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1286
1287 OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE;
1288 uint8_t cfaPlaneColor[3] = {0, 1, 2};
1289 camera_metadata_entry cfaEntry =
1290 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1291 BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer);
1292 uint8_t cfaEnum = cfaEntry.data.u8[0];
1293
1294 // TODO: Greensplit.
1295 // TODO: Add remaining non-essential tags
1296
1297 // Setup main image tags
1298
1299 {
1300 // Set orientation
1301 uint16_t orientation = TAG_ORIENTATION_NORMAL;
1302 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1303 env, TAG_ORIENTATION, writer);
1304 }
1305
1306 {
1307 // Set subfiletype
1308 uint32_t subfileType = 0; // Main image
1309 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1310 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1311 }
1312
1313 {
1314 // Set bits per sample
1315 uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1316 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1317 TAG_BITSPERSAMPLE, writer);
1318 }
1319
1320 {
1321 // Set compression
1322 uint16_t compression = 1; // None
1323 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1324 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1325 }
1326
1327 {
1328 // Set dimensions
1329 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1330 env, TAG_IMAGEWIDTH, writer);
1331 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1332 env, TAG_IMAGELENGTH, writer);
1333 }
1334
1335 {
1336 // Set photometric interpretation
1337 uint16_t interpretation = isBayer ? 32803 /* CFA */ :
1338 34892; /* Linear Raw */;
1339 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1340 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1341 }
1342
1343 {
1344 uint16_t repeatDim[2] = {2, 2};
1345 if (!isBayer) {
1346 repeatDim[0] = repeatDim[1] = 1;
1347 }
1348 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1349 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1350
1351 // Set blacklevel tags, using dynamic black level if available
1352 camera_metadata_entry entry =
1353 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
1354 uint32_t blackLevelRational[8] = {0};
1355 if (entry.count != 0) {
1356 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1357 for (size_t i = 0; i < entry.count; i++) {
1358 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.f[i] * 100);
1359 blackLevelRational[i * 2 + 1] = 100;
1360 }
1361 } else {
1362 // Fall back to static black level which is guaranteed
1363 entry = characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1364 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1365 for (size_t i = 0; i < entry.count; i++) {
1366 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
1367 blackLevelRational[i * 2 + 1] = 1;
1368 }
1369 }
1370 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1],
1371 blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1372 }
1373
1374 {
1375 // Set samples per pixel
1376 uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1377 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1378 env, TAG_SAMPLESPERPIXEL, writer);
1379 }
1380
1381 {
1382 // Set planar configuration
1383 uint16_t config = 1; // Chunky
1384 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1385 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1386 }
1387
1388 // All CFA pattern tags are not necessary for monochrome cameras.
1389 if (isBayer) {
1390 // Set CFA pattern dimensions
1391 uint16_t repeatDim[2] = {2, 2};
1392 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1393 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1394
1395 // Set CFA pattern
1396 const int cfaLength = 4;
1397 uint8_t cfa[cfaLength];
1398 if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1399 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1400 "Invalid metadata for tag %d", TAG_CFAPATTERN);
1401 }
1402
1403 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1404 env, TAG_CFAPATTERN, writer);
1405
1406 opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1407
1408 // Set CFA plane color
1409 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1410 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1411
1412 // Set CFA layout
1413 uint16_t cfaLayout = 1;
1414 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1415 env, TAG_CFALAYOUT, writer);
1416 }
1417
1418 {
1419 // image description
1420 uint8_t imageDescription = '\0'; // empty
1421 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1422 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1423 }
1424
1425 {
1426 // make
1427 // Use "" to represent unknown make as suggested in TIFF/EP spec.
1428 std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1429 uint32_t count = static_cast<uint32_t>(manufacturer.size()) + 1;
1430
1431 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1432 reinterpret_cast<const uint8_t*>(manufacturer.c_str()), TIFF_IFD_0), env, TAG_MAKE,
1433 writer);
1434 }
1435
1436 {
1437 // model
1438 // Use "" to represent unknown model as suggested in TIFF/EP spec.
1439 std::string model = GetProperty("ro.product.model", "");
1440 uint32_t count = static_cast<uint32_t>(model.size()) + 1;
1441
1442 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1443 reinterpret_cast<const uint8_t*>(model.c_str()), TIFF_IFD_0), env, TAG_MODEL,
1444 writer);
1445 }
1446
1447 {
1448 // x resolution
1449 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1450 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1451 env, TAG_XRESOLUTION, writer);
1452
1453 // y resolution
1454 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1455 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1456 env, TAG_YRESOLUTION, writer);
1457
1458 uint16_t unit = 2; // inches
1459 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1460 env, TAG_RESOLUTIONUNIT, writer);
1461 }
1462
1463 {
1464 // software
1465 std::string software = GetProperty("ro.build.fingerprint", "");
1466 uint32_t count = static_cast<uint32_t>(software.size()) + 1;
1467 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1468 reinterpret_cast<const uint8_t*>(software.c_str()), TIFF_IFD_0), env, TAG_SOFTWARE,
1469 writer);
1470 }
1471
1472 if (nativeContext->hasCaptureTime()) {
1473 // datetime
1474 String8 captureTime = nativeContext->getCaptureTime();
1475
1476 if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1477 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1478 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1479 "Invalid metadata for tag %x", TAG_DATETIME);
1480 return nullptr;
1481 }
1482
1483 // datetime original
1484 if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1485 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1486 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1487 "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1488 return nullptr;
1489 }
1490 }
1491
1492 {
1493 // TIFF/EP standard id
1494 uint8_t standardId[] = { 1, 0, 0, 0 };
1495 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1496 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1497 }
1498
1499 {
1500 // copyright
1501 uint8_t copyright = '\0'; // empty
1502 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right,
1503 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1504 }
1505
1506 {
1507 // exposure time
1508 camera_metadata_entry entry =
1509 results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1510 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1511
1512 int64_t exposureTime = *(entry.data.i64);
1513
1514 if (exposureTime < 0) {
1515 // Should be unreachable
1516 jniThrowException(env, "java/lang/IllegalArgumentException",
1517 "Negative exposure time in metadata");
1518 return nullptr;
1519 }
1520
1521 // Ensure exposure time doesn't overflow (for exposures > 4s)
1522 uint32_t denominator = 1000000000;
1523 while (exposureTime > UINT32_MAX) {
1524 exposureTime >>= 1;
1525 denominator >>= 1;
1526 if (denominator == 0) {
1527 // Should be unreachable
1528 jniThrowException(env, "java/lang/IllegalArgumentException",
1529 "Exposure time too long");
1530 return nullptr;
1531 }
1532 }
1533
1534 uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1535 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1536 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1537
1538 }
1539
1540 {
1541 // ISO speed ratings
1542 camera_metadata_entry entry =
1543 results.find(ANDROID_SENSOR_SENSITIVITY);
1544 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1545
1546 int32_t tempIso = *(entry.data.i32);
1547 if (tempIso < 0) {
1548 jniThrowException(env, "java/lang/IllegalArgumentException",
1549 "Negative ISO value");
1550 return nullptr;
1551 }
1552
1553 if (tempIso > UINT16_MAX) {
1554 ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1555 tempIso = UINT16_MAX;
1556 }
1557
1558 uint16_t iso = static_cast<uint16_t>(tempIso);
1559 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1560 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1561 }
1562
1563 {
1564 // Baseline exposure
1565 camera_metadata_entry entry =
1566 results.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
1567 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer);
1568
1569 // post RAW gain should be boostValue / 100
1570 double postRAWGain = static_cast<double> (entry.data.i32[0]) / 100.f;
1571 // Baseline exposure should be in EV units so log2(gain) =
1572 // log10(gain)/log10(2)
1573 double baselineExposure = std::log(postRAWGain) / std::log(2.0f);
1574 int32_t baseExposureSRat[] = { static_cast<int32_t> (baselineExposure * 100),
1575 100 };
1576 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1,
1577 baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer);
1578 }
1579
1580 {
1581 // focal length
1582 camera_metadata_entry entry =
1583 results.find(ANDROID_LENS_FOCAL_LENGTH);
1584 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1585
1586 uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1587 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1588 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1589 }
1590
1591 {
1592 // f number
1593 camera_metadata_entry entry =
1594 results.find(ANDROID_LENS_APERTURE);
1595 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1596
1597 uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1598 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1599 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1600 }
1601
1602 {
1603 // Set DNG version information
1604 uint8_t version[4] = {1, 4, 0, 0};
1605 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1606 env, TAG_DNGVERSION, writer);
1607
1608 uint8_t backwardVersion[4] = {1, 1, 0, 0};
1609 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1610 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1611 }
1612
1613 {
1614 // Set whitelevel
1615 camera_metadata_entry entry =
1616 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1617 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1618 uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1619 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1620 env, TAG_WHITELEVEL, writer);
1621 }
1622
1623 {
1624 // Set default scale
1625 uint32_t defaultScale[4] = {1, 1, 1, 1};
1626 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1627 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1628 }
1629
1630 bool singleIlluminant = false;
1631 if (isBayer) {
1632 // Set calibration illuminants
1633 camera_metadata_entry entry1 =
1634 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1635 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1636 camera_metadata_entry entry2 =
1637 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1638 if (entry2.count == 0) {
1639 singleIlluminant = true;
1640 }
1641 uint16_t ref1 = entry1.data.u8[0];
1642
1643 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1644 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1645
1646 if (!singleIlluminant) {
1647 uint16_t ref2 = entry2.data.u8[0];
1648 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1649 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1650 }
1651 }
1652
1653 if (isBayer) {
1654 // Set color transforms
1655 camera_metadata_entry entry1 =
1656 characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1657 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1658
1659 int32_t colorTransform1[entry1.count * 2];
1660
1661 size_t ctr = 0;
1662 for(size_t i = 0; i < entry1.count; ++i) {
1663 colorTransform1[ctr++] = entry1.data.r[i].numerator;
1664 colorTransform1[ctr++] = entry1.data.r[i].denominator;
1665 }
1666
1667 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1668 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1669
1670 if (!singleIlluminant) {
1671 camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1672 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1673 int32_t colorTransform2[entry2.count * 2];
1674
1675 ctr = 0;
1676 for(size_t i = 0; i < entry2.count; ++i) {
1677 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1678 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1679 }
1680
1681 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1682 colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1683 }
1684 }
1685
1686 if (isBayer) {
1687 // Set calibration transforms
1688 camera_metadata_entry entry1 =
1689 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1690 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1691
1692 int32_t calibrationTransform1[entry1.count * 2];
1693
1694 size_t ctr = 0;
1695 for(size_t i = 0; i < entry1.count; ++i) {
1696 calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1697 calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1698 }
1699
1700 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1701 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1702
1703 if (!singleIlluminant) {
1704 camera_metadata_entry entry2 =
1705 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1706 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1707 int32_t calibrationTransform2[entry2.count * 2];
1708
1709 ctr = 0;
1710 for(size_t i = 0; i < entry2.count; ++i) {
1711 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1712 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1713 }
1714
1715 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1716 calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer);
1717 }
1718 }
1719
1720 if (isBayer) {
1721 // Set forward transforms
1722 camera_metadata_entry entry1 =
1723 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1724 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1725
1726 int32_t forwardTransform1[entry1.count * 2];
1727
1728 size_t ctr = 0;
1729 for(size_t i = 0; i < entry1.count; ++i) {
1730 forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1731 forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1732 }
1733
1734 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1735 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1736
1737 if (!singleIlluminant) {
1738 camera_metadata_entry entry2 =
1739 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1740 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1741 int32_t forwardTransform2[entry2.count * 2];
1742
1743 ctr = 0;
1744 for(size_t i = 0; i < entry2.count; ++i) {
1745 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1746 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1747 }
1748
1749 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1750 forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer);
1751 }
1752 }
1753
1754 if (isBayer) {
1755 // Set camera neutral
1756 camera_metadata_entry entry =
1757 results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1758 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1759 uint32_t cameraNeutral[entry.count * 2];
1760
1761 size_t ctr = 0;
1762 for(size_t i = 0; i < entry.count; ++i) {
1763 cameraNeutral[ctr++] =
1764 static_cast<uint32_t>(entry.data.r[i].numerator);
1765 cameraNeutral[ctr++] =
1766 static_cast<uint32_t>(entry.data.r[i].denominator);
1767 }
1768
1769 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1770 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1771 }
1772
1773
1774 {
1775 // Set dimensions
1776 if (calculateAndSetCrop(env, characteristics, writer) != OK) {
1777 return nullptr;
1778 }
1779 camera_metadata_entry entry =
1780 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1781 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1782 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1783 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1784 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1785 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1786
1787 // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1788 // relative to the pixel array.
1789 if (imageWidth == width && imageHeight == height) {
1790 xmin = 0;
1791 ymin = 0;
1792 }
1793
1794 uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1795 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1796 env, TAG_ACTIVEAREA, writer);
1797 }
1798
1799 {
1800 // Setup unique camera model tag
1801 std::string model = GetProperty("ro.product.model", "");
1802 std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1803 std::string brand = GetProperty("ro.product.brand", "");
1804
1805 String8 cameraModel(model.c_str());
1806 cameraModel += "-";
1807 cameraModel += manufacturer.c_str();
1808 cameraModel += "-";
1809 cameraModel += brand.c_str();
1810
1811 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1812 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env,
1813 TAG_UNIQUECAMERAMODEL, writer);
1814 }
1815
1816 {
1817 // Setup sensor noise model
1818 camera_metadata_entry entry =
1819 results.find(ANDROID_SENSOR_NOISE_PROFILE);
1820
1821 const status_t numPlaneColors = isBayer ? 3 : 1;
1822 const status_t numCfaChannels = isBayer ? 4 : 1;
1823
1824 uint8_t cfaOut[numCfaChannels];
1825 if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1826 jniThrowException(env, "java/lang/IllegalArgumentException",
1827 "Invalid CFA from camera characteristics");
1828 return nullptr;
1829 }
1830
1831 double noiseProfile[numPlaneColors * 2];
1832
1833 if (entry.count > 0) {
1834 if (entry.count != numCfaChannels * 2) {
1835 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1836 "in characteristics, no noise profile tag written...",
1837 __FUNCTION__, entry.count);
1838 } else {
1839 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1840 cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1841
1842 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1843 numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1844 writer);
1845 } else {
1846 ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1847 " tag written...", __FUNCTION__);
1848 }
1849 }
1850 } else {
1851 ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.",
1852 __FUNCTION__);
1853 }
1854 }
1855
1856 {
1857 // Set up opcode List 2
1858 OpcodeListBuilder builder;
1859 status_t err = OK;
1860
1861 // Set up lens shading map
1862 camera_metadata_entry entry1 =
1863 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1864
1865 uint32_t lsmWidth = 0;
1866 uint32_t lsmHeight = 0;
1867
1868 if (entry1.count != 0) {
1869 lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1870 lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1871 }
1872
1873 camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1874
1875 camera_metadata_entry entry =
1876 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1877 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1878 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1879 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1880 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1881 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1882 if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1883 // GainMap rectangle is relative to the active area origin.
1884 err = builder.addGainMapsForMetadata(lsmWidth,
1885 lsmHeight,
1886 0,
1887 0,
1888 height,
1889 width,
1890 opcodeCfaLayout,
1891 entry2.data.f);
1892 if (err != OK) {
1893 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1894 jniThrowRuntimeException(env, "failed to add lens shading map.");
1895 return nullptr;
1896 }
1897 }
1898
1899 // Hot pixel map is specific to bayer camera per DNG spec.
1900 if (isBayer) {
1901 // Set up bad pixel correction list
1902 camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1903
1904 if ((entry3.count % 2) != 0) {
1905 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
1906 __FUNCTION__);
1907 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1908 return nullptr;
1909 }
1910
1911 // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
1912 std::vector<uint32_t> v;
1913 for (size_t i = 0; i < entry3.count; i += 2) {
1914 int32_t x = entry3.data.i32[i];
1915 int32_t y = entry3.data.i32[i + 1];
1916 x -= static_cast<int32_t>(xmin);
1917 y -= static_cast<int32_t>(ymin);
1918 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
1919 static_cast<uint32_t>(y) >= height) {
1920 continue;
1921 }
1922 v.push_back(x);
1923 v.push_back(y);
1924 }
1925 const uint32_t* badPixels = &v[0];
1926 uint32_t badPixelCount = v.size();
1927
1928 if (badPixelCount > 0) {
1929 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
1930
1931 if (err != OK) {
1932 ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
1933 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1934 return nullptr;
1935 }
1936 }
1937 }
1938
1939 if (builder.getCount() > 0) {
1940 size_t listSize = builder.getSize();
1941 uint8_t opcodeListBuf[listSize];
1942 err = builder.buildOpList(opcodeListBuf);
1943 if (err == OK) {
1944 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize,
1945 opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
1946 } else {
1947 ALOGE("%s: Could not build list of opcodes for lens shading map and bad pixel "
1948 "correction.", __FUNCTION__);
1949 jniThrowRuntimeException(env, "failed to construct opcode list for lens shading "
1950 "map and bad pixel correction");
1951 return nullptr;
1952 }
1953 }
1954 }
1955
1956 {
1957 // Set up opcode List 3
1958 OpcodeListBuilder builder;
1959 status_t err = OK;
1960
1961 // Set up rectilinear distortion correction
1962 std::array<float, 6> distortion = {1.f, 0.f, 0.f, 0.f, 0.f, 0.f};
1963 bool gotDistortion = false;
1964
1965 camera_metadata_entry entry4 =
1966 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
1967
1968 if (entry4.count == 5) {
1969 float cx = entry4.data.f[/*c_x*/2];
1970 float cy = entry4.data.f[/*c_y*/3];
1971 // Assuming f_x = f_y, or at least close enough.
1972 // Also assuming s = 0, or at least close enough.
1973 float f = entry4.data.f[/*f_x*/0];
1974
1975 camera_metadata_entry entry3 =
1976 results.find(ANDROID_LENS_DISTORTION);
1977 if (entry3.count == 5) {
1978 gotDistortion = true;
1979
1980 // Scale the distortion coefficients to create a zoom in warpped image so that all
1981 // pixels are drawn within input image.
1982 for (size_t i = 0; i < entry3.count; i++) {
1983 distortion[i+1] = entry3.data.f[i];
1984 }
1985
1986 if (preWidth == imageWidth && preHeight == imageHeight) {
1987 normalizeLensDistortion(distortion, cx, cy, f, preWidth, preHeight);
1988 } else {
1989 // image size == pixel array size (contains optical black pixels)
1990 // cx/cy is defined in preCorrArray so adding the offset
1991 // Also changes default xmin/ymin so that pixels are only
1992 // sampled within preCorrection array
1993 normalizeLensDistortion(
1994 distortion, cx + preXMin, cy + preYMin, f, preWidth, preHeight,
1995 preXMin, preYMin);
1996 }
1997
1998 float m_x = std::fmaxf(preWidth - cx, cx);
1999 float m_y = std::fmaxf(preHeight - cy, cy);
2000 float m_sq = m_x*m_x + m_y*m_y;
2001 float m = sqrtf(m_sq); // distance to farthest corner from optical center
2002 float f_sq = f * f;
2003 // Conversion factors from Camera2 K factors for new LENS_DISTORTION field
2004 // to DNG spec.
2005 //
2006 // Camera2 / OpenCV assume distortion is applied in a space where focal length
2007 // is factored out, while DNG assumes a normalized space where the distance
2008 // from optical center to the farthest corner is 1.
2009 // Scale from camera2 to DNG spec accordingly.
2010 // distortion[0] is always 1 with the new LENS_DISTORTION field.
2011 const double convCoeff[5] = {
2012 m_sq / f_sq,
2013 pow(m_sq, 2) / pow(f_sq, 2),
2014 pow(m_sq, 3) / pow(f_sq, 3),
2015 m / f,
2016 m / f
2017 };
2018 for (size_t i = 0; i < entry3.count; i++) {
2019 distortion[i+1] *= convCoeff[i];
2020 }
2021 } else {
2022 entry3 = results.find(ANDROID_LENS_RADIAL_DISTORTION);
2023 if (entry3.count == 6) {
2024 gotDistortion = true;
2025 // Conversion factors from Camera2 K factors to DNG spec. K factors:
2026 //
2027 // Note: these are necessary because our unit system assumes a
2028 // normalized max radius of sqrt(2), whereas the DNG spec's
2029 // WarpRectilinear opcode assumes a normalized max radius of 1.
2030 // Thus, each K coefficient must include the domain scaling
2031 // factor (the DNG domain is scaled by sqrt(2) to emulate the
2032 // domain used by the Camera2 specification).
2033 const double convCoeff[6] = {
2034 sqrt(2),
2035 2 * sqrt(2),
2036 4 * sqrt(2),
2037 8 * sqrt(2),
2038 2,
2039 2
2040 };
2041 for (size_t i = 0; i < entry3.count; i++) {
2042 distortion[i] = entry3.data.f[i] * convCoeff[i];
2043 }
2044 }
2045 }
2046 if (gotDistortion) {
2047 err = builder.addWarpRectilinearForMetadata(
2048 distortion.data(), preWidth, preHeight, cx, cy);
2049 if (err != OK) {
2050 ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
2051 jniThrowRuntimeException(env, "failed to add distortion correction.");
2052 return nullptr;
2053 }
2054 }
2055 }
2056
2057 if (builder.getCount() > 0) {
2058 size_t listSize = builder.getSize();
2059 uint8_t opcodeListBuf[listSize];
2060 err = builder.buildOpList(opcodeListBuf);
2061 if (err == OK) {
2062 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize,
2063 opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
2064 } else {
2065 ALOGE("%s: Could not build list of opcodes for distortion correction.",
2066 __FUNCTION__);
2067 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
2068 " correction");
2069 return nullptr;
2070 }
2071 }
2072 }
2073
2074 {
2075 // Set up orientation tags.
2076 // Note: There's only one orientation field for the whole file, in IFD0
2077 // The main image and any thumbnails therefore have the same orientation.
2078 uint16_t orientation = nativeContext->getOrientation();
2079 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
2080 env, TAG_ORIENTATION, writer);
2081
2082 }
2083
2084 if (nativeContext->hasDescription()){
2085 // Set Description
2086 String8 description = nativeContext->getDescription();
2087 size_t len = description.bytes() + 1;
2088 if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
2089 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) {
2090 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
2091 "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
2092 }
2093 }
2094
2095 if (nativeContext->hasGpsData()) {
2096 // Set GPS tags
2097 GpsData gpsData = nativeContext->getGpsData();
2098 if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
2099 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
2100 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
2101 TIFF_IFD_0);
2102 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
2103 return nullptr;
2104 }
2105 }
2106
2107 {
2108 uint8_t version[] = {2, 3, 0, 0};
2109 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
2110 TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
2111 }
2112
2113 {
2114 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
2115 GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
2116 TAG_GPSLATITUDEREF, writer);
2117 }
2118
2119 {
2120 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
2121 GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
2122 TAG_GPSLONGITUDEREF, writer);
2123 }
2124
2125 {
2126 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
2127 TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
2128 }
2129
2130 {
2131 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
2132 TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
2133 }
2134
2135 {
2136 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
2137 TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
2138 }
2139
2140 {
2141 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
2142 GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
2143 TAG_GPSDATESTAMP, writer);
2144 }
2145 }
2146
2147
2148 if (nativeContext->hasThumbnail()) {
2149 if (!writer->hasIfd(TIFF_IFD_SUB1)) {
2150 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
2151 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
2152 TIFF_IFD_0);
2153 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
2154 return nullptr;
2155 }
2156 }
2157
2158 Vector<uint16_t> tagsToMove;
2159 tagsToMove.add(TAG_NEWSUBFILETYPE);
2160 tagsToMove.add(TAG_ACTIVEAREA);
2161 tagsToMove.add(TAG_BITSPERSAMPLE);
2162 tagsToMove.add(TAG_COMPRESSION);
2163 tagsToMove.add(TAG_IMAGEWIDTH);
2164 tagsToMove.add(TAG_IMAGELENGTH);
2165 tagsToMove.add(TAG_PHOTOMETRICINTERPRETATION);
2166 tagsToMove.add(TAG_BLACKLEVEL);
2167 tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
2168 tagsToMove.add(TAG_SAMPLESPERPIXEL);
2169 tagsToMove.add(TAG_PLANARCONFIGURATION);
2170 if (isBayer) {
2171 tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
2172 tagsToMove.add(TAG_CFAPATTERN);
2173 tagsToMove.add(TAG_CFAPLANECOLOR);
2174 tagsToMove.add(TAG_CFALAYOUT);
2175 }
2176 tagsToMove.add(TAG_XRESOLUTION);
2177 tagsToMove.add(TAG_YRESOLUTION);
2178 tagsToMove.add(TAG_RESOLUTIONUNIT);
2179 tagsToMove.add(TAG_WHITELEVEL);
2180 tagsToMove.add(TAG_DEFAULTSCALE);
2181 tagsToMove.add(TAG_DEFAULTCROPORIGIN);
2182 tagsToMove.add(TAG_DEFAULTCROPSIZE);
2183
2184 if (nullptr != writer->getEntry(TAG_OPCODELIST2, TIFF_IFD_0).get()) {
2185 tagsToMove.add(TAG_OPCODELIST2);
2186 }
2187
2188 if (nullptr != writer->getEntry(TAG_OPCODELIST3, TIFF_IFD_0).get()) {
2189 tagsToMove.add(TAG_OPCODELIST3);
2190 }
2191
2192 if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) {
2193 jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries");
2194 return nullptr;
2195 }
2196
2197 // Setup thumbnail tags
2198
2199 {
2200 // Set photometric interpretation
2201 uint16_t interpretation = 2; // RGB
2202 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
2203 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
2204 }
2205
2206 {
2207 // Set planar configuration
2208 uint16_t config = 1; // Chunky
2209 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
2210 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
2211 }
2212
2213 {
2214 // Set samples per pixel
2215 uint16_t samples = SAMPLES_PER_RGB_PIXEL;
2216 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
2217 TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer);
2218 }
2219
2220 {
2221 // Set bits per sample
2222 uint16_t bits[SAMPLES_PER_RGB_PIXEL];
2223 for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE;
2224 BAIL_IF_INVALID_RET_NULL_SP(
2225 writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_0),
2226 env, TAG_BITSPERSAMPLE, writer);
2227 }
2228
2229 {
2230 // Set subfiletype
2231 uint32_t subfileType = 1; // Thumbnail image
2232 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
2233 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
2234 }
2235
2236 {
2237 // Set compression
2238 uint16_t compression = 1; // None
2239 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
2240 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
2241 }
2242
2243 {
2244 // Set dimensions
2245 uint32_t uWidth = nativeContext->getThumbnailWidth();
2246 uint32_t uHeight = nativeContext->getThumbnailHeight();
2247 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0),
2248 env, TAG_IMAGEWIDTH, writer);
2249 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0),
2250 env, TAG_IMAGELENGTH, writer);
2251 }
2252
2253 {
2254 // x resolution
2255 uint32_t xres[] = { 72, 1 }; // default 72 ppi
2256 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
2257 env, TAG_XRESOLUTION, writer);
2258
2259 // y resolution
2260 uint32_t yres[] = { 72, 1 }; // default 72 ppi
2261 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
2262 env, TAG_YRESOLUTION, writer);
2263
2264 uint16_t unit = 2; // inches
2265 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
2266 env, TAG_RESOLUTIONUNIT, writer);
2267 }
2268 }
2269
2270 if (writer->addStrip(TIFF_IFD_0) != OK) {
2271 ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__);
2272 jniThrowException(env, "java/lang/IllegalStateException",
2273 "Failed to setup thumbnail strip tags.");
2274 return nullptr;
2275 }
2276
2277 if (writer->hasIfd(TIFF_IFD_SUB1)) {
2278 if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
2279 ALOGE("%s: Could not main image strip tags.", __FUNCTION__);
2280 jniThrowException(env, "java/lang/IllegalStateException",
2281 "Failed to setup main image strip tags.");
2282 return nullptr;
2283 }
2284 }
2285 return writer;
2286 }
2287
DngCreator_destroy(JNIEnv * env,jobject thiz)2288 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
2289 ALOGV("%s:", __FUNCTION__);
2290 DngCreator_setNativeContext(env, thiz, nullptr);
2291 }
2292
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)2293 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
2294 ALOGV("%s:", __FUNCTION__);
2295
2296 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2297 if (context == nullptr) {
2298 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2299 jniThrowException(env, "java/lang/AssertionError",
2300 "setOrientation called with uninitialized DngCreator");
2301 return;
2302 }
2303
2304 uint16_t orientation = static_cast<uint16_t>(orient);
2305 context->setOrientation(orientation);
2306 }
2307
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)2308 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
2309 ALOGV("%s:", __FUNCTION__);
2310
2311 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2312 if (context == nullptr) {
2313 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2314 jniThrowException(env, "java/lang/AssertionError",
2315 "setDescription called with uninitialized DngCreator");
2316 return;
2317 }
2318
2319 const char* desc = env->GetStringUTFChars(description, nullptr);
2320 context->setDescription(String8(desc));
2321 env->ReleaseStringUTFChars(description, desc);
2322 }
2323
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)2324 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
2325 jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
2326 ALOGV("%s:", __FUNCTION__);
2327
2328 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2329 if (context == nullptr) {
2330 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2331 jniThrowException(env, "java/lang/AssertionError",
2332 "setGpsTags called with uninitialized DngCreator");
2333 return;
2334 }
2335
2336 GpsData data;
2337
2338 jsize latLen = env->GetArrayLength(latTag);
2339 jsize longLen = env->GetArrayLength(longTag);
2340 jsize timeLen = env->GetArrayLength(timeTag);
2341 if (latLen != GpsData::GPS_VALUE_LENGTH) {
2342 jniThrowException(env, "java/lang/IllegalArgumentException",
2343 "invalid latitude tag length");
2344 return;
2345 } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
2346 jniThrowException(env, "java/lang/IllegalArgumentException",
2347 "invalid longitude tag length");
2348 return;
2349 } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2350 jniThrowException(env, "java/lang/IllegalArgumentException",
2351 "invalid time tag length");
2352 return;
2353 }
2354
2355 env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2356 reinterpret_cast<jint*>(&data.mLatitude));
2357 env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2358 reinterpret_cast<jint*>(&data.mLongitude));
2359 env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2360 reinterpret_cast<jint*>(&data.mTimestamp));
2361
2362
2363 env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2364 data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2365 env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2366 data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2367 env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2368 reinterpret_cast<char*>(&data.mDate));
2369 data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2370
2371 context->setGpsData(data);
2372 }
2373
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2374 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2375 jint height) {
2376 ALOGV("%s:", __FUNCTION__);
2377
2378 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2379 if (context == nullptr) {
2380 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2381 jniThrowException(env, "java/lang/AssertionError",
2382 "setThumbnail called with uninitialized DngCreator");
2383 return;
2384 }
2385
2386 size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2387 jlong capacity = env->GetDirectBufferCapacity(buffer);
2388 if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2389 jniThrowExceptionFmt(env, "java/lang/AssertionError",
2390 "Invalid size %d for thumbnail, expected size was %d",
2391 capacity, fullSize);
2392 return;
2393 }
2394
2395 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2396 if (pixelBytes == nullptr) {
2397 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2398 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2399 return;
2400 }
2401
2402 if (!context->setThumbnail(pixelBytes, width, height)) {
2403 jniThrowException(env, "java/lang/IllegalStateException",
2404 "Failed to set thumbnail.");
2405 return;
2406 }
2407 }
2408
2409 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2410 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2411 jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2412 jboolean isDirect) {
2413 ALOGV("%s:", __FUNCTION__);
2414 ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2415 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2416 height, rowStride, pixStride, offset);
2417 uint32_t rStride = static_cast<uint32_t>(rowStride);
2418 uint32_t pStride = static_cast<uint32_t>(pixStride);
2419 uint32_t uWidth = static_cast<uint32_t>(width);
2420 uint32_t uHeight = static_cast<uint32_t>(height);
2421 uint64_t uOffset = static_cast<uint64_t>(offset);
2422
2423 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2424 if(env->ExceptionCheck()) {
2425 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2426 return;
2427 }
2428
2429 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2430 if (context == nullptr) {
2431 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2432 jniThrowException(env, "java/lang/AssertionError",
2433 "Write called with uninitialized DngCreator");
2434 return;
2435 }
2436 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2437
2438 if (writer.get() == nullptr) {
2439 return;
2440 }
2441
2442 // Validate DNG size
2443 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2444 return;
2445 }
2446
2447 sp<JniInputByteBuffer> inBuf;
2448 Vector<StripSource*> sources;
2449 sp<DirectStripSource> thumbnailSource;
2450 uint32_t targetIfd = TIFF_IFD_0;
2451
2452 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2453
2454 if (hasThumbnail) {
2455 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2456 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2457 uint32_t thumbWidth = context->getThumbnailWidth();
2458 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2459 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2460 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2461 SAMPLES_PER_RGB_PIXEL);
2462 sources.add(thumbnailSource.get());
2463 targetIfd = TIFF_IFD_SUB1;
2464 }
2465
2466 if (isDirect) {
2467 size_t fullSize = rStride * uHeight;
2468 jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2469 if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2470 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2471 "Invalid size %d for Image, size given in metadata is %d at current stride",
2472 capacity, fullSize);
2473 return;
2474 }
2475
2476 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2477 if (pixelBytes == nullptr) {
2478 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2479 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2480 return;
2481 }
2482
2483 ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2484 DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2485 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2486 sources.add(&stripSource);
2487
2488 status_t ret = OK;
2489 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2490 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2491 if (!env->ExceptionCheck()) {
2492 jniThrowExceptionFmt(env, "java/io/IOException",
2493 "Encountered error %d while writing file.", ret);
2494 }
2495 return;
2496 }
2497 } else {
2498 inBuf = new JniInputByteBuffer(env, inBuffer);
2499
2500 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2501 InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2502 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2503 sources.add(&stripSource);
2504
2505 status_t ret = OK;
2506 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2507 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2508 if (!env->ExceptionCheck()) {
2509 jniThrowExceptionFmt(env, "java/io/IOException",
2510 "Encountered error %d while writing file.", ret);
2511 }
2512 return;
2513 }
2514 }
2515 }
2516
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2517 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2518 jobject inStream, jint width, jint height, jlong offset) {
2519 ALOGV("%s:", __FUNCTION__);
2520
2521 uint32_t rowStride = width * BYTES_PER_SAMPLE;
2522 uint32_t pixStride = BYTES_PER_SAMPLE;
2523 uint32_t uWidth = static_cast<uint32_t>(width);
2524 uint32_t uHeight = static_cast<uint32_t>(height);
2525 uint64_t uOffset = static_cast<uint32_t>(offset);
2526
2527 ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2528 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2529 height, rowStride, pixStride, offset);
2530
2531 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2532 if (env->ExceptionCheck()) {
2533 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2534 return;
2535 }
2536
2537 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2538 if (context == nullptr) {
2539 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2540 jniThrowException(env, "java/lang/AssertionError",
2541 "Write called with uninitialized DngCreator");
2542 return;
2543 }
2544 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2545
2546 if (writer.get() == nullptr) {
2547 return;
2548 }
2549
2550 // Validate DNG size
2551 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2552 return;
2553 }
2554
2555 sp<DirectStripSource> thumbnailSource;
2556 uint32_t targetIfd = TIFF_IFD_0;
2557 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2558 Vector<StripSource*> sources;
2559
2560 if (hasThumbnail) {
2561 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2562 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2563 uint32_t width = context->getThumbnailWidth();
2564 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2565 width, context->getThumbnailHeight(), bytesPerPixel,
2566 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2567 SAMPLES_PER_RGB_PIXEL);
2568 sources.add(thumbnailSource.get());
2569 targetIfd = TIFF_IFD_SUB1;
2570 }
2571
2572 sp<JniInputStream> in = new JniInputStream(env, inStream);
2573
2574 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2575 InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2576 rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2577 sources.add(&stripSource);
2578
2579 status_t ret = OK;
2580 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2581 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2582 if (!env->ExceptionCheck()) {
2583 jniThrowExceptionFmt(env, "java/io/IOException",
2584 "Encountered error %d while writing file.", ret);
2585 }
2586 return;
2587 }
2588 }
2589
2590 } /*extern "C" */
2591
2592 static const JNINativeMethod gDngCreatorMethods[] = {
2593 {"nativeClassInit", "()V", (void*) DngCreator_nativeClassInit},
2594 {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2595 "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2596 (void*) DngCreator_init},
2597 {"nativeDestroy", "()V", (void*) DngCreator_destroy},
2598 {"nativeSetOrientation", "(I)V", (void*) DngCreator_nativeSetOrientation},
2599 {"nativeSetDescription", "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
2600 {"nativeSetGpsTags", "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2601 (void*) DngCreator_nativeSetGpsTags},
2602 {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
2603 {"nativeWriteImage", "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2604 (void*) DngCreator_nativeWriteImage},
2605 {"nativeWriteInputStream", "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2606 (void*) DngCreator_nativeWriteInputStream},
2607 };
2608
register_android_hardware_camera2_DngCreator(JNIEnv * env)2609 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2610 return RegisterMethodsOrDie(env,
2611 "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2612 }
2613