1 /*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "DngCreator_JNI"
19 #include <inttypes.h>
20 #include <string.h>
21 #include <algorithm>
22 #include <memory>
23 #include <vector>
24 #include <cmath>
25
26 #include <utils/Log.h>
27 #include <utils/Errors.h>
28 #include <utils/StrongPointer.h>
29 #include <utils/RefBase.h>
30 #include <utils/Vector.h>
31 #include <utils/String8.h>
32 #include <cutils/properties.h>
33 #include <system/camera_metadata.h>
34 #include <camera/CameraMetadata.h>
35 #include <img_utils/DngUtils.h>
36 #include <img_utils/TagDefinitions.h>
37 #include <img_utils/TiffIfd.h>
38 #include <img_utils/TiffWriter.h>
39 #include <img_utils/Output.h>
40 #include <img_utils/Input.h>
41 #include <img_utils/StripSource.h>
42
43 #include "core_jni_helpers.h"
44
45 #include "android_runtime/AndroidRuntime.h"
46 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
47
48 #include <jni.h>
49 #include <JNIHelp.h>
50
51 using namespace android;
52 using namespace img_utils;
53
54 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
55 if ((expr) != OK) { \
56 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
57 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
58 return false; \
59 }
60
61
62 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
63 if ((expr) != OK) { \
64 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
65 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
66 return nullptr; \
67 }
68
69
70 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
71 if ((expr) != OK) { \
72 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
73 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
74 return -1; \
75 }
76
77 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
78 if ((entry).count == 0) { \
79 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
80 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
81 return nullptr; \
82 }
83
84 #define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
85 if (expr) { \
86 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
87 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
88 return nullptr; \
89 }
90
91
92 #define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext"
93
94 static struct {
95 jfieldID mNativeContext;
96 } gDngCreatorClassInfo;
97
98 static struct {
99 jmethodID mWriteMethod;
100 } gOutputStreamClassInfo;
101
102 static struct {
103 jmethodID mReadMethod;
104 jmethodID mSkipMethod;
105 } gInputStreamClassInfo;
106
107 static struct {
108 jmethodID mGetMethod;
109 } gInputByteBufferClassInfo;
110
111 enum {
112 BITS_PER_SAMPLE = 16,
113 BYTES_PER_SAMPLE = 2,
114 BYTES_PER_RGB_PIXEL = 3,
115 BITS_PER_RGB_SAMPLE = 8,
116 BYTES_PER_RGB_SAMPLE = 1,
117 SAMPLES_PER_RGB_PIXEL = 3,
118 SAMPLES_PER_RAW_PIXEL = 1,
119 TIFF_IFD_0 = 0,
120 TIFF_IFD_SUB1 = 1,
121 TIFF_IFD_GPSINFO = 2,
122 };
123
124
125 /**
126 * POD container class for GPS tag data.
127 */
128 class GpsData {
129 public:
130 enum {
131 GPS_VALUE_LENGTH = 6,
132 GPS_REF_LENGTH = 2,
133 GPS_DATE_LENGTH = 11,
134 };
135
136 uint32_t mLatitude[GPS_VALUE_LENGTH];
137 uint32_t mLongitude[GPS_VALUE_LENGTH];
138 uint32_t mTimestamp[GPS_VALUE_LENGTH];
139 uint8_t mLatitudeRef[GPS_REF_LENGTH];
140 uint8_t mLongitudeRef[GPS_REF_LENGTH];
141 uint8_t mDate[GPS_DATE_LENGTH];
142 };
143
144 // ----------------------------------------------------------------------------
145
146 /**
147 * Container class for the persistent native context.
148 */
149
150 class NativeContext : public LightRefBase<NativeContext> {
151 public:
152 enum {
153 DATETIME_COUNT = 20,
154 };
155
156 NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
157 virtual ~NativeContext();
158
159 TiffWriter* getWriter();
160
161 std::shared_ptr<const CameraMetadata> getCharacteristics() const;
162 std::shared_ptr<const CameraMetadata> getResult() const;
163
164 uint32_t getThumbnailWidth() const;
165 uint32_t getThumbnailHeight() const;
166 const uint8_t* getThumbnail() const;
167 bool hasThumbnail() const;
168
169 bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
170
171 void setOrientation(uint16_t orientation);
172 uint16_t getOrientation() const;
173
174 void setDescription(const String8& desc);
175 String8 getDescription() const;
176 bool hasDescription() const;
177
178 void setGpsData(const GpsData& data);
179 GpsData getGpsData() const;
180 bool hasGpsData() const;
181
182 void setCaptureTime(const String8& formattedCaptureTime);
183 String8 getCaptureTime() const;
184 bool hasCaptureTime() const;
185
186 private:
187 Vector<uint8_t> mCurrentThumbnail;
188 TiffWriter mWriter;
189 std::shared_ptr<CameraMetadata> mCharacteristics;
190 std::shared_ptr<CameraMetadata> mResult;
191 uint32_t mThumbnailWidth;
192 uint32_t mThumbnailHeight;
193 uint16_t mOrientation;
194 bool mThumbnailSet;
195 bool mGpsSet;
196 bool mDescriptionSet;
197 bool mCaptureTimeSet;
198 String8 mDescription;
199 GpsData mGpsData;
200 String8 mFormattedCaptureTime;
201 };
202
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)203 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
204 mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
205 mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
206 mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false),
207 mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {}
208
~NativeContext()209 NativeContext::~NativeContext() {}
210
getWriter()211 TiffWriter* NativeContext::getWriter() {
212 return &mWriter;
213 }
214
getCharacteristics() const215 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
216 return mCharacteristics;
217 }
218
getResult() const219 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
220 return mResult;
221 }
222
getThumbnailWidth() const223 uint32_t NativeContext::getThumbnailWidth() const {
224 return mThumbnailWidth;
225 }
226
getThumbnailHeight() const227 uint32_t NativeContext::getThumbnailHeight() const {
228 return mThumbnailHeight;
229 }
230
getThumbnail() const231 const uint8_t* NativeContext::getThumbnail() const {
232 return mCurrentThumbnail.array();
233 }
234
hasThumbnail() const235 bool NativeContext::hasThumbnail() const {
236 return mThumbnailSet;
237 }
238
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)239 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
240 mThumbnailWidth = width;
241 mThumbnailHeight = height;
242
243 size_t size = BYTES_PER_RGB_PIXEL * width * height;
244 if (mCurrentThumbnail.resize(size) < 0) {
245 ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
246 return false;
247 }
248
249 uint8_t* thumb = mCurrentThumbnail.editArray();
250 memcpy(thumb, buffer, size);
251 mThumbnailSet = true;
252 return true;
253 }
254
setOrientation(uint16_t orientation)255 void NativeContext::setOrientation(uint16_t orientation) {
256 mOrientation = orientation;
257 }
258
getOrientation() const259 uint16_t NativeContext::getOrientation() const {
260 return mOrientation;
261 }
262
setDescription(const String8 & desc)263 void NativeContext::setDescription(const String8& desc) {
264 mDescription = desc;
265 mDescriptionSet = true;
266 }
267
getDescription() const268 String8 NativeContext::getDescription() const {
269 return mDescription;
270 }
271
hasDescription() const272 bool NativeContext::hasDescription() const {
273 return mDescriptionSet;
274 }
275
setGpsData(const GpsData & data)276 void NativeContext::setGpsData(const GpsData& data) {
277 mGpsData = data;
278 mGpsSet = true;
279 }
280
getGpsData() const281 GpsData NativeContext::getGpsData() const {
282 return mGpsData;
283 }
284
hasGpsData() const285 bool NativeContext::hasGpsData() const {
286 return mGpsSet;
287 }
288
setCaptureTime(const String8 & formattedCaptureTime)289 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
290 mFormattedCaptureTime = formattedCaptureTime;
291 mCaptureTimeSet = true;
292 }
293
getCaptureTime() const294 String8 NativeContext::getCaptureTime() const {
295 return mFormattedCaptureTime;
296 }
297
hasCaptureTime() const298 bool NativeContext::hasCaptureTime() const {
299 return mCaptureTimeSet;
300 }
301
302 // End of NativeContext
303 // ----------------------------------------------------------------------------
304
305 /**
306 * Wrapper class for a Java OutputStream.
307 *
308 * This class is not intended to be used across JNI calls.
309 */
310 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
311 public:
312 JniOutputStream(JNIEnv* env, jobject outStream);
313
314 virtual ~JniOutputStream();
315
316 status_t open();
317
318 status_t write(const uint8_t* buf, size_t offset, size_t count);
319
320 status_t close();
321 private:
322 enum {
323 BYTE_ARRAY_LENGTH = 4096
324 };
325 jobject mOutputStream;
326 JNIEnv* mEnv;
327 jbyteArray mByteArray;
328 };
329
JniOutputStream(JNIEnv * env,jobject outStream)330 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
331 mEnv(env) {
332 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
333 if (mByteArray == nullptr) {
334 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
335 }
336 }
337
~JniOutputStream()338 JniOutputStream::~JniOutputStream() {
339 mEnv->DeleteLocalRef(mByteArray);
340 }
341
open()342 status_t JniOutputStream::open() {
343 // Do nothing
344 return OK;
345 }
346
write(const uint8_t * buf,size_t offset,size_t count)347 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
348 while(count > 0) {
349 size_t len = BYTE_ARRAY_LENGTH;
350 len = (count > len) ? len : count;
351 mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
352
353 if (mEnv->ExceptionCheck()) {
354 return BAD_VALUE;
355 }
356
357 mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
358 0, len);
359
360 if (mEnv->ExceptionCheck()) {
361 return BAD_VALUE;
362 }
363
364 count -= len;
365 offset += len;
366 }
367 return OK;
368 }
369
close()370 status_t JniOutputStream::close() {
371 // Do nothing
372 return OK;
373 }
374
375 // End of JniOutputStream
376 // ----------------------------------------------------------------------------
377
378 /**
379 * Wrapper class for a Java InputStream.
380 *
381 * This class is not intended to be used across JNI calls.
382 */
383 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
384 public:
385 JniInputStream(JNIEnv* env, jobject inStream);
386
387 status_t open();
388
389 status_t close();
390
391 ssize_t read(uint8_t* buf, size_t offset, size_t count);
392
393 ssize_t skip(size_t count);
394
395 virtual ~JniInputStream();
396 private:
397 enum {
398 BYTE_ARRAY_LENGTH = 4096
399 };
400 jobject mInStream;
401 JNIEnv* mEnv;
402 jbyteArray mByteArray;
403
404 };
405
JniInputStream(JNIEnv * env,jobject inStream)406 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
407 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
408 if (mByteArray == nullptr) {
409 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
410 }
411 }
412
~JniInputStream()413 JniInputStream::~JniInputStream() {
414 mEnv->DeleteLocalRef(mByteArray);
415 }
416
read(uint8_t * buf,size_t offset,size_t count)417 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
418
419 jint realCount = BYTE_ARRAY_LENGTH;
420 if (count < BYTE_ARRAY_LENGTH) {
421 realCount = count;
422 }
423 jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
424 realCount);
425
426 if (actual < 0) {
427 return NOT_ENOUGH_DATA;
428 }
429
430 if (mEnv->ExceptionCheck()) {
431 return BAD_VALUE;
432 }
433
434 mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
435 if (mEnv->ExceptionCheck()) {
436 return BAD_VALUE;
437 }
438 return actual;
439 }
440
skip(size_t count)441 ssize_t JniInputStream::skip(size_t count) {
442 jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
443 static_cast<jlong>(count));
444
445 if (mEnv->ExceptionCheck()) {
446 return BAD_VALUE;
447 }
448 if (actual < 0) {
449 return NOT_ENOUGH_DATA;
450 }
451 return actual;
452 }
453
open()454 status_t JniInputStream::open() {
455 // Do nothing
456 return OK;
457 }
458
close()459 status_t JniInputStream::close() {
460 // Do nothing
461 return OK;
462 }
463
464 // End of JniInputStream
465 // ----------------------------------------------------------------------------
466
467 /**
468 * Wrapper class for a non-direct Java ByteBuffer.
469 *
470 * This class is not intended to be used across JNI calls.
471 */
472 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
473 public:
474 JniInputByteBuffer(JNIEnv* env, jobject inBuf);
475
476 status_t open();
477
478 status_t close();
479
480 ssize_t read(uint8_t* buf, size_t offset, size_t count);
481
482 virtual ~JniInputByteBuffer();
483 private:
484 enum {
485 BYTE_ARRAY_LENGTH = 4096
486 };
487 jobject mInBuf;
488 JNIEnv* mEnv;
489 jbyteArray mByteArray;
490 };
491
JniInputByteBuffer(JNIEnv * env,jobject inBuf)492 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
493 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
494 if (mByteArray == nullptr) {
495 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
496 }
497 }
498
~JniInputByteBuffer()499 JniInputByteBuffer::~JniInputByteBuffer() {
500 mEnv->DeleteLocalRef(mByteArray);
501 }
502
read(uint8_t * buf,size_t offset,size_t count)503 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
504 jint realCount = BYTE_ARRAY_LENGTH;
505 if (count < BYTE_ARRAY_LENGTH) {
506 realCount = count;
507 }
508
509 jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
510 mByteArray, 0, realCount);
511 mEnv->DeleteLocalRef(chainingBuf);
512
513 if (mEnv->ExceptionCheck()) {
514 ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
515 return BAD_VALUE;
516 }
517
518 mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
519 if (mEnv->ExceptionCheck()) {
520 ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
521 return BAD_VALUE;
522 }
523 return realCount;
524 }
525
open()526 status_t JniInputByteBuffer::open() {
527 // Do nothing
528 return OK;
529 }
530
close()531 status_t JniInputByteBuffer::close() {
532 // Do nothing
533 return OK;
534 }
535
536 // End of JniInputByteBuffer
537 // ----------------------------------------------------------------------------
538
539 /**
540 * StripSource subclass for Input types.
541 *
542 * This class is not intended to be used across JNI calls.
543 */
544
545 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
546 public:
547 InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
548 uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
549 uint32_t samplesPerPixel);
550
551 virtual ~InputStripSource();
552
553 virtual status_t writeToStream(Output& stream, uint32_t count);
554
555 virtual uint32_t getIfd() const;
556 protected:
557 uint32_t mIfd;
558 Input* mInput;
559 uint32_t mWidth;
560 uint32_t mHeight;
561 uint32_t mPixStride;
562 uint32_t mRowStride;
563 uint64_t mOffset;
564 JNIEnv* mEnv;
565 uint32_t mBytesPerSample;
566 uint32_t mSamplesPerPixel;
567 };
568
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)569 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
570 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
571 uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
572 mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
573 mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
574 mSamplesPerPixel(samplesPerPixel) {}
575
~InputStripSource()576 InputStripSource::~InputStripSource() {}
577
writeToStream(Output & stream,uint32_t count)578 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
579 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
580 jlong offset = mOffset;
581
582 if (fullSize != count) {
583 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
584 fullSize);
585 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
586 return BAD_VALUE;
587 }
588
589 // Skip offset
590 while (offset > 0) {
591 ssize_t skipped = mInput->skip(offset);
592 if (skipped <= 0) {
593 if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
594 jniThrowExceptionFmt(mEnv, "java/io/IOException",
595 "Early EOF encountered in skip, not enough pixel data for image of size %u",
596 fullSize);
597 skipped = NOT_ENOUGH_DATA;
598 } else {
599 if (!mEnv->ExceptionCheck()) {
600 jniThrowException(mEnv, "java/io/IOException",
601 "Error encountered while skip bytes in input stream.");
602 }
603 }
604
605 return skipped;
606 }
607 offset -= skipped;
608 }
609
610 Vector<uint8_t> row;
611 if (row.resize(mRowStride) < 0) {
612 jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
613 return BAD_VALUE;
614 }
615
616 uint8_t* rowBytes = row.editArray();
617
618 for (uint32_t i = 0; i < mHeight; ++i) {
619 size_t rowFillAmt = 0;
620 size_t rowSize = mRowStride;
621
622 while (rowFillAmt < mRowStride) {
623 ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
624 if (bytesRead <= 0) {
625 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
626 ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
627 __FUNCTION__, i, bytesRead);
628 jniThrowExceptionFmt(mEnv, "java/io/IOException",
629 "Early EOF encountered, not enough pixel data for image of size %"
630 PRIu32, fullSize);
631 bytesRead = NOT_ENOUGH_DATA;
632 } else {
633 if (!mEnv->ExceptionCheck()) {
634 jniThrowException(mEnv, "java/io/IOException",
635 "Error encountered while reading");
636 }
637 }
638 return bytesRead;
639 }
640 rowFillAmt += bytesRead;
641 rowSize -= bytesRead;
642 }
643
644 if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
645 ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
646
647 if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
648 mEnv->ExceptionCheck()) {
649 if (!mEnv->ExceptionCheck()) {
650 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
651 }
652 return BAD_VALUE;
653 }
654 } else {
655 ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
656 jniThrowException(mEnv, "java/lang/IllegalStateException",
657 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
658 return BAD_VALUE;
659
660 // TODO: Add support for non-contiguous pixels if needed.
661 }
662 }
663 return OK;
664 }
665
getIfd() const666 uint32_t InputStripSource::getIfd() const {
667 return mIfd;
668 }
669
670 // End of InputStripSource
671 // ----------------------------------------------------------------------------
672
673 /**
674 * StripSource subclass for direct buffer types.
675 *
676 * This class is not intended to be used across JNI calls.
677 */
678
679 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
680 public:
681 DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
682 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
683 uint32_t bytesPerSample, uint32_t samplesPerPixel);
684
685 virtual ~DirectStripSource();
686
687 virtual status_t writeToStream(Output& stream, uint32_t count);
688
689 virtual uint32_t getIfd() const;
690 protected:
691 uint32_t mIfd;
692 const uint8_t* mPixelBytes;
693 uint32_t mWidth;
694 uint32_t mHeight;
695 uint32_t mPixStride;
696 uint32_t mRowStride;
697 uint16_t mOffset;
698 JNIEnv* mEnv;
699 uint32_t mBytesPerSample;
700 uint32_t mSamplesPerPixel;
701 };
702
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)703 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
704 uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
705 uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
706 mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
707 mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
708 mSamplesPerPixel(samplesPerPixel) {}
709
~DirectStripSource()710 DirectStripSource::~DirectStripSource() {}
711
writeToStream(Output & stream,uint32_t count)712 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
713 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
714
715 if (fullSize != count) {
716 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
717 fullSize);
718 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
719 return BAD_VALUE;
720 }
721
722
723 if (mPixStride == mBytesPerSample * mSamplesPerPixel
724 && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
725 ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
726
727 if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
728 if (!mEnv->ExceptionCheck()) {
729 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
730 }
731 return BAD_VALUE;
732 }
733 } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
734 ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
735
736 for (size_t i = 0; i < mHeight; ++i) {
737 if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
738 mEnv->ExceptionCheck()) {
739 if (!mEnv->ExceptionCheck()) {
740 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
741 }
742 return BAD_VALUE;
743 }
744 }
745 } else {
746 ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
747
748 jniThrowException(mEnv, "java/lang/IllegalStateException",
749 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
750 return BAD_VALUE;
751
752 // TODO: Add support for non-contiguous pixels if needed.
753 }
754 return OK;
755
756 }
757
getIfd() const758 uint32_t DirectStripSource::getIfd() const {
759 return mIfd;
760 }
761
762 // End of DirectStripSource
763 // ----------------------------------------------------------------------------
764
765 /**
766 * Calculate the default crop relative to the "active area" of the image sensor (this active area
767 * will always be the pre-correction active area rectangle), and set this.
768 */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer)769 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
770 sp<TiffWriter> writer) {
771
772 camera_metadata_ro_entry entry =
773 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
774 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
775 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
776
777 const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
778
779 if (width < margin * 2 || height < margin * 2) {
780 ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
781 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
782 jniThrowException(env, "java/lang/IllegalStateException",
783 "Pre-correction active area is too small.");
784 return BAD_VALUE;
785 }
786
787 uint32_t defaultCropOrigin[] = {margin, margin};
788 uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
789 height - defaultCropOrigin[1] - margin};
790
791 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
792 TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
793 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
794 TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
795
796 return OK;
797 }
798
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)799 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
800 const CameraMetadata& characteristics, jint width, jint height) {
801 if (width <= 0) {
802 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
803 "Image width %d is invalid", width);
804 return false;
805 }
806
807 if (height <= 0) {
808 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
809 "Image height %d is invalid", height);
810 return false;
811 }
812
813 camera_metadata_ro_entry preCorrectionEntry =
814 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
815 camera_metadata_ro_entry pixelArrayEntry =
816 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
817
818 int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
819 int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
820 int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
821 int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
822
823 bool matchesPixelArray = (pWidth == width && pHeight == height);
824 bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
825
826 if (!(matchesPixelArray || matchesPreCorrectionArray)) {
827 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
828 "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
829 "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
830 width, height, pWidth, pHeight, cWidth, cHeight);
831 return false;
832 }
833
834 return true;
835 }
836
moveEntries(sp<TiffWriter> writer,uint32_t ifdFrom,uint32_t ifdTo,const Vector<uint16_t> & entries)837 static status_t moveEntries(sp<TiffWriter> writer, uint32_t ifdFrom, uint32_t ifdTo,
838 const Vector<uint16_t>& entries) {
839 for (size_t i = 0; i < entries.size(); ++i) {
840 uint16_t tagId = entries[i];
841 sp<TiffEntry> entry = writer->getEntry(tagId, ifdFrom);
842 if (entry.get() == nullptr) {
843 ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId,
844 ifdFrom);
845 return BAD_VALUE;
846 }
847 if (writer->addEntry(entry, ifdTo) != OK) {
848 ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId,
849 ifdFrom);
850 return BAD_VALUE;
851 }
852 writer->removeEntry(tagId, ifdFrom);
853 }
854 return OK;
855 }
856
857 /**
858 * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4.
859 * Returns OK on success, or a negative error code if the CFA enum was invalid.
860 */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)861 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
862 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
863 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
864 cfaEnum);
865 switch(cfa) {
866 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
867 cfaOut[0] = 0;
868 cfaOut[1] = 1;
869 cfaOut[2] = 1;
870 cfaOut[3] = 2;
871 break;
872 }
873 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
874 cfaOut[0] = 1;
875 cfaOut[1] = 0;
876 cfaOut[2] = 2;
877 cfaOut[3] = 1;
878 break;
879 }
880 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
881 cfaOut[0] = 1;
882 cfaOut[1] = 2;
883 cfaOut[2] = 0;
884 cfaOut[3] = 1;
885 break;
886 }
887 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
888 cfaOut[0] = 2;
889 cfaOut[1] = 1;
890 cfaOut[2] = 1;
891 cfaOut[3] = 0;
892 break;
893 }
894 default: {
895 return BAD_VALUE;
896 }
897 }
898 return OK;
899 }
900
901 /**
902 * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
903 * RGGB for an unknown enum.
904 */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)905 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
906 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
907 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
908 cfaEnum);
909 switch(cfa) {
910 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
911 return OpcodeListBuilder::CFA_RGGB;
912 }
913 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
914 return OpcodeListBuilder::CFA_GRBG;
915 }
916 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
917 return OpcodeListBuilder::CFA_GBRG;
918 }
919 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
920 return OpcodeListBuilder::CFA_BGGR;
921 }
922 default: {
923 return OpcodeListBuilder::CFA_RGGB;
924 }
925 }
926 }
927
928 /**
929 * For each color plane, find the corresponding noise profile coefficients given in the
930 * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color
931 * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
932 *
933 * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
934 * cfa - numChannels color channels corresponding to each of the per-channel noise profile
935 * coefficients.
936 * numChannels - the number of noise profile coefficient pairs and color channels given in
937 * the perChannelNoiseProfile and cfa arguments, respectively.
938 * planeColors - the color planes in the noise profile output.
939 * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
940 * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
941 *
942 * returns OK, or a negative error code on failure.
943 */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)944 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
945 size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
946 /*out*/double* noiseProfile) {
947
948 for (size_t p = 0; p < numPlanes; ++p) {
949 size_t S = p * 2;
950 size_t O = p * 2 + 1;
951
952 noiseProfile[S] = 0;
953 noiseProfile[O] = 0;
954 bool uninitialized = true;
955 for (size_t c = 0; c < numChannels; ++c) {
956 if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
957 noiseProfile[S] = perChannelNoiseProfile[c * 2];
958 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
959 uninitialized = false;
960 }
961 }
962 if (uninitialized) {
963 ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
964 __FUNCTION__, p);
965 return BAD_VALUE;
966 }
967 }
968 return OK;
969 }
970
971 // ----------------------------------------------------------------------------
972 extern "C" {
973
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)974 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
975 ALOGV("%s:", __FUNCTION__);
976 return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
977 gDngCreatorClassInfo.mNativeContext));
978 }
979
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)980 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
981 ALOGV("%s:", __FUNCTION__);
982 NativeContext* current = DngCreator_getNativeContext(env, thiz);
983
984 if (context != nullptr) {
985 context->incStrong((void*) DngCreator_setNativeContext);
986 }
987
988 if (current) {
989 current->decStrong((void*) DngCreator_setNativeContext);
990 }
991
992 env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
993 reinterpret_cast<jlong>(context.get()));
994 }
995
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)996 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
997 ALOGV("%s:", __FUNCTION__);
998
999 gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
1000 clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
1001
1002 jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
1003 gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
1004 outputStreamClazz, "write", "([BII)V");
1005
1006 jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
1007 gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
1008 gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1009
1010 jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1011 gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1012 inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1013 }
1014
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1015 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1016 jobject resultsPtr, jstring formattedCaptureTime) {
1017 ALOGV("%s:", __FUNCTION__);
1018 CameraMetadata characteristics;
1019 CameraMetadata results;
1020 if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1021 jniThrowException(env, "java/lang/AssertionError",
1022 "No native metadata defined for camera characteristics.");
1023 return;
1024 }
1025 if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1026 jniThrowException(env, "java/lang/AssertionError",
1027 "No native metadata defined for capture results.");
1028 return;
1029 }
1030
1031 sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1032
1033 const char* captureTime = env->GetStringUTFChars(formattedCaptureTime, nullptr);
1034
1035 size_t len = strlen(captureTime) + 1;
1036 if (len != NativeContext::DATETIME_COUNT) {
1037 jniThrowException(env, "java/lang/IllegalArgumentException",
1038 "Formatted capture time string length is not required 20 characters");
1039 return;
1040 }
1041
1042 nativeContext->setCaptureTime(String8(captureTime));
1043
1044 DngCreator_setNativeContext(env, thiz, nativeContext);
1045 }
1046
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1047 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1048 uint32_t imageHeight) {
1049
1050 NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1051
1052 if (nativeContext == nullptr) {
1053 jniThrowException(env, "java/lang/AssertionError",
1054 "No native context, must call init before other operations.");
1055 return nullptr;
1056 }
1057
1058 CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1059 CameraMetadata results = *(nativeContext->getResult());
1060
1061 sp<TiffWriter> writer = new TiffWriter();
1062
1063 uint32_t preWidth = 0;
1064 uint32_t preHeight = 0;
1065 {
1066 // Check dimensions
1067 camera_metadata_entry entry =
1068 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1069 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1070 preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1071 preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1072
1073 camera_metadata_entry pixelArrayEntry =
1074 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
1075 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1076 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1077
1078 if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1079 (imageWidth == pixWidth && imageHeight == pixHeight))) {
1080 jniThrowException(env, "java/lang/AssertionError",
1081 "Height and width of imate buffer did not match height and width of"
1082 "either the preCorrectionActiveArraySize or the pixelArraySize.");
1083 return nullptr;
1084 }
1085 }
1086
1087
1088
1089 writer->addIfd(TIFF_IFD_0);
1090
1091 status_t err = OK;
1092
1093 const uint32_t samplesPerPixel = 1;
1094 const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1095
1096 OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB;
1097 uint8_t cfaPlaneColor[3] = {0, 1, 2};
1098 uint8_t cfaEnum = -1;
1099
1100 // TODO: Greensplit.
1101 // TODO: Add remaining non-essential tags
1102
1103 // Setup main image tags
1104
1105 {
1106 // Set orientation
1107 uint16_t orientation = TAG_ORIENTATION_NORMAL;
1108 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1109 env, TAG_ORIENTATION, writer);
1110 }
1111
1112 {
1113 // Set subfiletype
1114 uint32_t subfileType = 0; // Main image
1115 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1116 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1117 }
1118
1119 {
1120 // Set bits per sample
1121 uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1122 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1123 TAG_BITSPERSAMPLE, writer);
1124 }
1125
1126 {
1127 // Set compression
1128 uint16_t compression = 1; // None
1129 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1130 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1131 }
1132
1133 {
1134 // Set dimensions
1135 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1136 env, TAG_IMAGEWIDTH, writer);
1137 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1138 env, TAG_IMAGELENGTH, writer);
1139 }
1140
1141 {
1142 // Set photometric interpretation
1143 uint16_t interpretation = 32803; // CFA
1144 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1145 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1146 }
1147
1148 {
1149 // Set blacklevel tags, using dynamic black level if available
1150 camera_metadata_entry entry =
1151 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
1152 uint32_t blackLevelRational[8] = {0};
1153 if (entry.count != 0) {
1154 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1155 for (size_t i = 0; i < entry.count; i++) {
1156 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.f[i] * 100);
1157 blackLevelRational[i * 2 + 1] = 100;
1158 }
1159 } else {
1160 // Fall back to static black level which is guaranteed
1161 entry = characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1162 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1163 for (size_t i = 0; i < entry.count; i++) {
1164 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
1165 blackLevelRational[i * 2 + 1] = 1;
1166 }
1167
1168 }
1169 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, 4, blackLevelRational,
1170 TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1171
1172 uint16_t repeatDim[2] = {2, 2};
1173 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1174 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1175 }
1176
1177 {
1178 // Set samples per pixel
1179 uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1180 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1181 env, TAG_SAMPLESPERPIXEL, writer);
1182 }
1183
1184 {
1185 // Set planar configuration
1186 uint16_t config = 1; // Chunky
1187 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1188 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1189 }
1190
1191 {
1192 // Set CFA pattern dimensions
1193 uint16_t repeatDim[2] = {2, 2};
1194 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1195 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1196 }
1197
1198 {
1199 // Set CFA pattern
1200 camera_metadata_entry entry =
1201 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1202 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer);
1203
1204 const int cfaLength = 4;
1205 cfaEnum = entry.data.u8[0];
1206 uint8_t cfa[cfaLength];
1207 if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1208 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1209 "Invalid metadata for tag %d", TAG_CFAPATTERN);
1210 }
1211
1212 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1213 env, TAG_CFAPATTERN, writer);
1214
1215 opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1216 }
1217
1218 {
1219 // Set CFA plane color
1220 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1221 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1222 }
1223
1224 {
1225 // Set CFA layout
1226 uint16_t cfaLayout = 1;
1227 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1228 env, TAG_CFALAYOUT, writer);
1229 }
1230
1231 {
1232 // image description
1233 uint8_t imageDescription = '\0'; // empty
1234 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1235 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1236 }
1237
1238 {
1239 // make
1240 char manufacturer[PROPERTY_VALUE_MAX];
1241
1242 // Use "" to represent unknown make as suggested in TIFF/EP spec.
1243 property_get("ro.product.manufacturer", manufacturer, "");
1244 uint32_t count = static_cast<uint32_t>(strlen(manufacturer)) + 1;
1245
1246 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1247 reinterpret_cast<uint8_t*>(manufacturer), TIFF_IFD_0), env, TAG_MAKE, writer);
1248 }
1249
1250 {
1251 // model
1252 char model[PROPERTY_VALUE_MAX];
1253
1254 // Use "" to represent unknown model as suggested in TIFF/EP spec.
1255 property_get("ro.product.model", model, "");
1256 uint32_t count = static_cast<uint32_t>(strlen(model)) + 1;
1257
1258 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1259 reinterpret_cast<uint8_t*>(model), TIFF_IFD_0), env, TAG_MODEL, writer);
1260 }
1261
1262 {
1263 // x resolution
1264 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1265 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1266 env, TAG_XRESOLUTION, writer);
1267
1268 // y resolution
1269 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1270 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1271 env, TAG_YRESOLUTION, writer);
1272
1273 uint16_t unit = 2; // inches
1274 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1275 env, TAG_RESOLUTIONUNIT, writer);
1276 }
1277
1278 {
1279 // software
1280 char software[PROPERTY_VALUE_MAX];
1281 property_get("ro.build.fingerprint", software, "");
1282 uint32_t count = static_cast<uint32_t>(strlen(software)) + 1;
1283 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1284 reinterpret_cast<uint8_t*>(software), TIFF_IFD_0), env, TAG_SOFTWARE, writer);
1285 }
1286
1287 if (nativeContext->hasCaptureTime()) {
1288 // datetime
1289 String8 captureTime = nativeContext->getCaptureTime();
1290
1291 if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1292 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1293 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1294 "Invalid metadata for tag %x", TAG_DATETIME);
1295 return nullptr;
1296 }
1297
1298 // datetime original
1299 if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1300 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1301 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1302 "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1303 return nullptr;
1304 }
1305 }
1306
1307 {
1308 // TIFF/EP standard id
1309 uint8_t standardId[] = { 1, 0, 0, 0 };
1310 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1311 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1312 }
1313
1314 {
1315 // copyright
1316 uint8_t copyright = '\0'; // empty
1317 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right,
1318 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1319 }
1320
1321 {
1322 // exposure time
1323 camera_metadata_entry entry =
1324 results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1325 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1326
1327 int64_t exposureTime = *(entry.data.i64);
1328
1329 if (exposureTime < 0) {
1330 // Should be unreachable
1331 jniThrowException(env, "java/lang/IllegalArgumentException",
1332 "Negative exposure time in metadata");
1333 return nullptr;
1334 }
1335
1336 // Ensure exposure time doesn't overflow (for exposures > 4s)
1337 uint32_t denominator = 1000000000;
1338 while (exposureTime > UINT32_MAX) {
1339 exposureTime >>= 1;
1340 denominator >>= 1;
1341 if (denominator == 0) {
1342 // Should be unreachable
1343 jniThrowException(env, "java/lang/IllegalArgumentException",
1344 "Exposure time too long");
1345 return nullptr;
1346 }
1347 }
1348
1349 uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1350 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1351 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1352
1353 }
1354
1355 {
1356 // ISO speed ratings
1357 camera_metadata_entry entry =
1358 results.find(ANDROID_SENSOR_SENSITIVITY);
1359 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1360
1361 int32_t tempIso = *(entry.data.i32);
1362 if (tempIso < 0) {
1363 jniThrowException(env, "java/lang/IllegalArgumentException",
1364 "Negative ISO value");
1365 return nullptr;
1366 }
1367
1368 if (tempIso > UINT16_MAX) {
1369 ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1370 tempIso = UINT16_MAX;
1371 }
1372
1373 uint16_t iso = static_cast<uint16_t>(tempIso);
1374 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1375 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1376 }
1377
1378 {
1379 // Baseline exposure
1380 camera_metadata_entry entry =
1381 results.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
1382 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer);
1383
1384 // post RAW gain should be boostValue / 100
1385 double postRAWGain = static_cast<double> (entry.data.i32[0]) / 100.f;
1386 // Baseline exposure should be in EV units so log2(gain) =
1387 // log10(gain)/log10(2)
1388 double baselineExposure = std::log(postRAWGain) / std::log(2.0f);
1389 int32_t baseExposureSRat[] = { static_cast<int32_t> (baselineExposure * 100),
1390 100 };
1391 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1,
1392 baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer);
1393 }
1394
1395 {
1396 // focal length
1397 camera_metadata_entry entry =
1398 results.find(ANDROID_LENS_FOCAL_LENGTH);
1399 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1400
1401 uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1402 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1403 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1404 }
1405
1406 {
1407 // f number
1408 camera_metadata_entry entry =
1409 results.find(ANDROID_LENS_APERTURE);
1410 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1411
1412 uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1413 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1414 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1415 }
1416
1417 {
1418 // Set DNG version information
1419 uint8_t version[4] = {1, 4, 0, 0};
1420 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1421 env, TAG_DNGVERSION, writer);
1422
1423 uint8_t backwardVersion[4] = {1, 1, 0, 0};
1424 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1425 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1426 }
1427
1428 {
1429 // Set whitelevel
1430 camera_metadata_entry entry =
1431 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1432 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1433 uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1434 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1435 env, TAG_WHITELEVEL, writer);
1436 }
1437
1438 {
1439 // Set default scale
1440 uint32_t defaultScale[4] = {1, 1, 1, 1};
1441 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1442 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1443 }
1444
1445 bool singleIlluminant = false;
1446 {
1447 // Set calibration illuminants
1448 camera_metadata_entry entry1 =
1449 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1450 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1451 camera_metadata_entry entry2 =
1452 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1453 if (entry2.count == 0) {
1454 singleIlluminant = true;
1455 }
1456 uint16_t ref1 = entry1.data.u8[0];
1457
1458 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1459 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1460
1461 if (!singleIlluminant) {
1462 uint16_t ref2 = entry2.data.u8[0];
1463 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1464 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1465 }
1466 }
1467
1468 {
1469 // Set color transforms
1470 camera_metadata_entry entry1 =
1471 characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1472 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1473
1474 int32_t colorTransform1[entry1.count * 2];
1475
1476 size_t ctr = 0;
1477 for(size_t i = 0; i < entry1.count; ++i) {
1478 colorTransform1[ctr++] = entry1.data.r[i].numerator;
1479 colorTransform1[ctr++] = entry1.data.r[i].denominator;
1480 }
1481
1482 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1483 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1484
1485 if (!singleIlluminant) {
1486 camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1487 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1488 int32_t colorTransform2[entry2.count * 2];
1489
1490 ctr = 0;
1491 for(size_t i = 0; i < entry2.count; ++i) {
1492 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1493 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1494 }
1495
1496 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1497 colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1498 }
1499 }
1500
1501 {
1502 // Set calibration transforms
1503 camera_metadata_entry entry1 =
1504 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1505 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1506
1507 int32_t calibrationTransform1[entry1.count * 2];
1508
1509 size_t ctr = 0;
1510 for(size_t i = 0; i < entry1.count; ++i) {
1511 calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1512 calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1513 }
1514
1515 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1516 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1517
1518 if (!singleIlluminant) {
1519 camera_metadata_entry entry2 =
1520 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1521 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1522 int32_t calibrationTransform2[entry2.count * 2];
1523
1524 ctr = 0;
1525 for(size_t i = 0; i < entry2.count; ++i) {
1526 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1527 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1528 }
1529
1530 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1531 calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer);
1532 }
1533 }
1534
1535 {
1536 // Set forward transforms
1537 camera_metadata_entry entry1 =
1538 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1539 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1540
1541 int32_t forwardTransform1[entry1.count * 2];
1542
1543 size_t ctr = 0;
1544 for(size_t i = 0; i < entry1.count; ++i) {
1545 forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1546 forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1547 }
1548
1549 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1550 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1551
1552 if (!singleIlluminant) {
1553 camera_metadata_entry entry2 =
1554 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1555 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1556 int32_t forwardTransform2[entry2.count * 2];
1557
1558 ctr = 0;
1559 for(size_t i = 0; i < entry2.count; ++i) {
1560 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1561 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1562 }
1563
1564 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1565 forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer);
1566 }
1567 }
1568
1569 {
1570 // Set camera neutral
1571 camera_metadata_entry entry =
1572 results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1573 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1574 uint32_t cameraNeutral[entry.count * 2];
1575
1576 size_t ctr = 0;
1577 for(size_t i = 0; i < entry.count; ++i) {
1578 cameraNeutral[ctr++] =
1579 static_cast<uint32_t>(entry.data.r[i].numerator);
1580 cameraNeutral[ctr++] =
1581 static_cast<uint32_t>(entry.data.r[i].denominator);
1582 }
1583
1584 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1585 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1586 }
1587
1588
1589 {
1590 // Set dimensions
1591 if (calculateAndSetCrop(env, characteristics, writer) != OK) {
1592 return nullptr;
1593 }
1594 camera_metadata_entry entry =
1595 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1596 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1597 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1598 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1599 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1600 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1601
1602 // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1603 // relative to the pixel array.
1604 if (imageWidth == width && imageHeight == height) {
1605 xmin = 0;
1606 ymin = 0;
1607 }
1608
1609 uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1610 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1611 env, TAG_ACTIVEAREA, writer);
1612 }
1613
1614 {
1615 // Setup unique camera model tag
1616 char model[PROPERTY_VALUE_MAX];
1617 property_get("ro.product.model", model, "");
1618
1619 char manufacturer[PROPERTY_VALUE_MAX];
1620 property_get("ro.product.manufacturer", manufacturer, "");
1621
1622 char brand[PROPERTY_VALUE_MAX];
1623 property_get("ro.product.brand", brand, "");
1624
1625 String8 cameraModel(model);
1626 cameraModel += "-";
1627 cameraModel += manufacturer;
1628 cameraModel += "-";
1629 cameraModel += brand;
1630
1631 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1632 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env,
1633 TAG_UNIQUECAMERAMODEL, writer);
1634 }
1635
1636 {
1637 // Setup sensor noise model
1638 camera_metadata_entry entry =
1639 results.find(ANDROID_SENSOR_NOISE_PROFILE);
1640
1641 const status_t numPlaneColors = 3;
1642 const status_t numCfaChannels = 4;
1643
1644 uint8_t cfaOut[numCfaChannels];
1645 if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1646 jniThrowException(env, "java/lang/IllegalArgumentException",
1647 "Invalid CFA from camera characteristics");
1648 return nullptr;
1649 }
1650
1651 double noiseProfile[numPlaneColors * 2];
1652
1653 if (entry.count > 0) {
1654 if (entry.count != numCfaChannels * 2) {
1655 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1656 "in characteristics, no noise profile tag written...",
1657 __FUNCTION__, entry.count);
1658 } else {
1659 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1660 cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1661
1662 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1663 numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1664 writer);
1665 } else {
1666 ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1667 " tag written...", __FUNCTION__);
1668 }
1669 }
1670 } else {
1671 ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.",
1672 __FUNCTION__);
1673 }
1674 }
1675
1676 {
1677 // Set up opcode List 2
1678 OpcodeListBuilder builder;
1679 status_t err = OK;
1680
1681 // Set up lens shading map
1682 camera_metadata_entry entry1 =
1683 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1684
1685 uint32_t lsmWidth = 0;
1686 uint32_t lsmHeight = 0;
1687
1688 if (entry1.count != 0) {
1689 lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1690 lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1691 }
1692
1693 camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1694
1695 camera_metadata_entry entry =
1696 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1697 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1698 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1699 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1700 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1701 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1702 if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1703 // GainMap rectangle is relative to the active area origin.
1704 err = builder.addGainMapsForMetadata(lsmWidth,
1705 lsmHeight,
1706 0,
1707 0,
1708 height,
1709 width,
1710 opcodeCfaLayout,
1711 entry2.data.f);
1712 if (err != OK) {
1713 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1714 jniThrowRuntimeException(env, "failed to add lens shading map.");
1715 return nullptr;
1716 }
1717 }
1718
1719
1720 // Set up bad pixel correction list
1721 camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1722
1723 if ((entry3.count % 2) != 0) {
1724 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
1725 __FUNCTION__);
1726 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1727 return nullptr;
1728 }
1729
1730 // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
1731 std::vector<uint32_t> v;
1732 for (size_t i = 0; i < entry3.count; i+=2) {
1733 int32_t x = entry3.data.i32[i];
1734 int32_t y = entry3.data.i32[i + 1];
1735 x -= static_cast<int32_t>(xmin);
1736 y -= static_cast<int32_t>(ymin);
1737 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
1738 static_cast<uint32_t>(y) >= width) {
1739 continue;
1740 }
1741 v.push_back(x);
1742 v.push_back(y);
1743 }
1744 const uint32_t* badPixels = &v[0];
1745 uint32_t badPixelCount = v.size();
1746
1747 if (badPixelCount > 0) {
1748 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
1749
1750 if (err != OK) {
1751 ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
1752 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1753 return nullptr;
1754 }
1755 }
1756
1757
1758 size_t listSize = builder.getSize();
1759 uint8_t opcodeListBuf[listSize];
1760 err = builder.buildOpList(opcodeListBuf);
1761 if (err == OK) {
1762 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize, opcodeListBuf,
1763 TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
1764 } else {
1765 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading"
1766 "map.", __FUNCTION__);
1767 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
1768 " correction and lens shading map");
1769 return nullptr;
1770 }
1771 }
1772
1773 {
1774 // Set up opcode List 3
1775 OpcodeListBuilder builder;
1776 status_t err = OK;
1777
1778 // Set up rectilinear distortion correction
1779 camera_metadata_entry entry3 =
1780 results.find(ANDROID_LENS_RADIAL_DISTORTION);
1781 camera_metadata_entry entry4 =
1782 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
1783
1784 if (entry3.count == 6 && entry4.count == 5) {
1785 float cx = entry4.data.f[/*c_x*/2];
1786 float cy = entry4.data.f[/*c_y*/3];
1787 err = builder.addWarpRectilinearForMetadata(entry3.data.f, preWidth, preHeight, cx,
1788 cy);
1789 if (err != OK) {
1790 ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
1791 jniThrowRuntimeException(env, "failed to add distortion correction.");
1792 return nullptr;
1793 }
1794 }
1795
1796 size_t listSize = builder.getSize();
1797 uint8_t opcodeListBuf[listSize];
1798 err = builder.buildOpList(opcodeListBuf);
1799 if (err == OK) {
1800 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize, opcodeListBuf,
1801 TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
1802 } else {
1803 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading"
1804 "map.", __FUNCTION__);
1805 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
1806 " correction and lens shading map");
1807 return nullptr;
1808 }
1809 }
1810
1811 {
1812 // Set up orientation tags.
1813 // Note: There's only one orientation field for the whole file, in IFD0
1814 // The main image and any thumbnails therefore have the same orientation.
1815 uint16_t orientation = nativeContext->getOrientation();
1816 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1817 env, TAG_ORIENTATION, writer);
1818
1819 }
1820
1821 if (nativeContext->hasDescription()){
1822 // Set Description
1823 String8 description = nativeContext->getDescription();
1824 size_t len = description.bytes() + 1;
1825 if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
1826 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) {
1827 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1828 "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
1829 }
1830 }
1831
1832 if (nativeContext->hasGpsData()) {
1833 // Set GPS tags
1834 GpsData gpsData = nativeContext->getGpsData();
1835 if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
1836 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
1837 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
1838 TIFF_IFD_0);
1839 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
1840 return nullptr;
1841 }
1842 }
1843
1844 {
1845 uint8_t version[] = {2, 3, 0, 0};
1846 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
1847 TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
1848 }
1849
1850 {
1851 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
1852 GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
1853 TAG_GPSLATITUDEREF, writer);
1854 }
1855
1856 {
1857 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
1858 GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
1859 TAG_GPSLONGITUDEREF, writer);
1860 }
1861
1862 {
1863 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
1864 TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
1865 }
1866
1867 {
1868 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
1869 TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
1870 }
1871
1872 {
1873 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
1874 TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
1875 }
1876
1877 {
1878 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
1879 GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
1880 TAG_GPSDATESTAMP, writer);
1881 }
1882 }
1883
1884
1885 if (nativeContext->hasThumbnail()) {
1886 if (!writer->hasIfd(TIFF_IFD_SUB1)) {
1887 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
1888 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
1889 TIFF_IFD_0);
1890 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
1891 return nullptr;
1892 }
1893 }
1894
1895 Vector<uint16_t> tagsToMove;
1896 tagsToMove.add(TAG_NEWSUBFILETYPE);
1897 tagsToMove.add(TAG_ACTIVEAREA);
1898 tagsToMove.add(TAG_BITSPERSAMPLE);
1899 tagsToMove.add(TAG_COMPRESSION);
1900 tagsToMove.add(TAG_IMAGEWIDTH);
1901 tagsToMove.add(TAG_IMAGELENGTH);
1902 tagsToMove.add(TAG_PHOTOMETRICINTERPRETATION);
1903 tagsToMove.add(TAG_BLACKLEVEL);
1904 tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
1905 tagsToMove.add(TAG_SAMPLESPERPIXEL);
1906 tagsToMove.add(TAG_PLANARCONFIGURATION);
1907 tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
1908 tagsToMove.add(TAG_CFAPATTERN);
1909 tagsToMove.add(TAG_CFAPLANECOLOR);
1910 tagsToMove.add(TAG_CFALAYOUT);
1911 tagsToMove.add(TAG_XRESOLUTION);
1912 tagsToMove.add(TAG_YRESOLUTION);
1913 tagsToMove.add(TAG_RESOLUTIONUNIT);
1914 tagsToMove.add(TAG_WHITELEVEL);
1915 tagsToMove.add(TAG_DEFAULTSCALE);
1916 tagsToMove.add(TAG_DEFAULTCROPORIGIN);
1917 tagsToMove.add(TAG_DEFAULTCROPSIZE);
1918 tagsToMove.add(TAG_OPCODELIST2);
1919 tagsToMove.add(TAG_OPCODELIST3);
1920
1921 if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) {
1922 jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries");
1923 return nullptr;
1924 }
1925
1926 // Setup thumbnail tags
1927
1928 {
1929 // Set photometric interpretation
1930 uint16_t interpretation = 2; // RGB
1931 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1932 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1933 }
1934
1935 {
1936 // Set planar configuration
1937 uint16_t config = 1; // Chunky
1938 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1939 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1940 }
1941
1942 {
1943 // Set samples per pixel
1944 uint16_t samples = SAMPLES_PER_RGB_PIXEL;
1945 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
1946 TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer);
1947 }
1948
1949 {
1950 // Set bits per sample
1951 uint16_t bits[SAMPLES_PER_RGB_PIXEL];
1952 for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE;
1953 BAIL_IF_INVALID_RET_NULL_SP(
1954 writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_0),
1955 env, TAG_BITSPERSAMPLE, writer);
1956 }
1957
1958 {
1959 // Set subfiletype
1960 uint32_t subfileType = 1; // Thumbnail image
1961 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1962 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1963 }
1964
1965 {
1966 // Set compression
1967 uint16_t compression = 1; // None
1968 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1969 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1970 }
1971
1972 {
1973 // Set dimensions
1974 uint32_t uWidth = nativeContext->getThumbnailWidth();
1975 uint32_t uHeight = nativeContext->getThumbnailHeight();
1976 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0),
1977 env, TAG_IMAGEWIDTH, writer);
1978 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0),
1979 env, TAG_IMAGELENGTH, writer);
1980 }
1981
1982 {
1983 // x resolution
1984 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1985 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1986 env, TAG_XRESOLUTION, writer);
1987
1988 // y resolution
1989 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1990 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1991 env, TAG_YRESOLUTION, writer);
1992
1993 uint16_t unit = 2; // inches
1994 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1995 env, TAG_RESOLUTIONUNIT, writer);
1996 }
1997 }
1998
1999 if (writer->addStrip(TIFF_IFD_0) != OK) {
2000 ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__);
2001 jniThrowException(env, "java/lang/IllegalStateException",
2002 "Failed to setup thumbnail strip tags.");
2003 return nullptr;
2004 }
2005
2006 if (writer->hasIfd(TIFF_IFD_SUB1)) {
2007 if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
2008 ALOGE("%s: Could not main image strip tags.", __FUNCTION__);
2009 jniThrowException(env, "java/lang/IllegalStateException",
2010 "Failed to setup main image strip tags.");
2011 return nullptr;
2012 }
2013 }
2014 return writer;
2015 }
2016
DngCreator_destroy(JNIEnv * env,jobject thiz)2017 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
2018 ALOGV("%s:", __FUNCTION__);
2019 DngCreator_setNativeContext(env, thiz, nullptr);
2020 }
2021
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)2022 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
2023 ALOGV("%s:", __FUNCTION__);
2024
2025 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2026 if (context == nullptr) {
2027 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2028 jniThrowException(env, "java/lang/AssertionError",
2029 "setOrientation called with uninitialized DngCreator");
2030 return;
2031 }
2032
2033 uint16_t orientation = static_cast<uint16_t>(orient);
2034 context->setOrientation(orientation);
2035 }
2036
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)2037 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
2038 ALOGV("%s:", __FUNCTION__);
2039
2040 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2041 if (context == nullptr) {
2042 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2043 jniThrowException(env, "java/lang/AssertionError",
2044 "setDescription called with uninitialized DngCreator");
2045 return;
2046 }
2047
2048 const char* desc = env->GetStringUTFChars(description, nullptr);
2049 context->setDescription(String8(desc));
2050 env->ReleaseStringUTFChars(description, desc);
2051 }
2052
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)2053 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
2054 jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
2055 ALOGV("%s:", __FUNCTION__);
2056
2057 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2058 if (context == nullptr) {
2059 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2060 jniThrowException(env, "java/lang/AssertionError",
2061 "setGpsTags called with uninitialized DngCreator");
2062 return;
2063 }
2064
2065 GpsData data;
2066
2067 jsize latLen = env->GetArrayLength(latTag);
2068 jsize longLen = env->GetArrayLength(longTag);
2069 jsize timeLen = env->GetArrayLength(timeTag);
2070 if (latLen != GpsData::GPS_VALUE_LENGTH) {
2071 jniThrowException(env, "java/lang/IllegalArgumentException",
2072 "invalid latitude tag length");
2073 return;
2074 } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
2075 jniThrowException(env, "java/lang/IllegalArgumentException",
2076 "invalid longitude tag length");
2077 return;
2078 } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2079 jniThrowException(env, "java/lang/IllegalArgumentException",
2080 "invalid time tag length");
2081 return;
2082 }
2083
2084 env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2085 reinterpret_cast<jint*>(&data.mLatitude));
2086 env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2087 reinterpret_cast<jint*>(&data.mLongitude));
2088 env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2089 reinterpret_cast<jint*>(&data.mTimestamp));
2090
2091
2092 env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2093 data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2094 env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2095 data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2096 env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2097 reinterpret_cast<char*>(&data.mDate));
2098 data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2099
2100 context->setGpsData(data);
2101 }
2102
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2103 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2104 jint height) {
2105 ALOGV("%s:", __FUNCTION__);
2106
2107 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2108 if (context == nullptr) {
2109 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2110 jniThrowException(env, "java/lang/AssertionError",
2111 "setThumbnail called with uninitialized DngCreator");
2112 return;
2113 }
2114
2115 size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2116 jlong capacity = env->GetDirectBufferCapacity(buffer);
2117 if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2118 jniThrowExceptionFmt(env, "java/lang/AssertionError",
2119 "Invalid size %d for thumbnail, expected size was %d",
2120 capacity, fullSize);
2121 return;
2122 }
2123
2124 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2125 if (pixelBytes == nullptr) {
2126 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2127 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2128 return;
2129 }
2130
2131 if (!context->setThumbnail(pixelBytes, width, height)) {
2132 jniThrowException(env, "java/lang/IllegalStateException",
2133 "Failed to set thumbnail.");
2134 return;
2135 }
2136 }
2137
2138 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2139 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2140 jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2141 jboolean isDirect) {
2142 ALOGV("%s:", __FUNCTION__);
2143 ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2144 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2145 height, rowStride, pixStride, offset);
2146 uint32_t rStride = static_cast<uint32_t>(rowStride);
2147 uint32_t pStride = static_cast<uint32_t>(pixStride);
2148 uint32_t uWidth = static_cast<uint32_t>(width);
2149 uint32_t uHeight = static_cast<uint32_t>(height);
2150 uint64_t uOffset = static_cast<uint64_t>(offset);
2151
2152 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2153 if(env->ExceptionCheck()) {
2154 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2155 return;
2156 }
2157
2158 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2159 if (context == nullptr) {
2160 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2161 jniThrowException(env, "java/lang/AssertionError",
2162 "Write called with uninitialized DngCreator");
2163 return;
2164 }
2165 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2166
2167 if (writer.get() == nullptr) {
2168 return;
2169 }
2170
2171 // Validate DNG size
2172 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2173 return;
2174 }
2175
2176 sp<JniInputByteBuffer> inBuf;
2177 Vector<StripSource*> sources;
2178 sp<DirectStripSource> thumbnailSource;
2179 uint32_t targetIfd = TIFF_IFD_0;
2180
2181 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2182
2183 if (hasThumbnail) {
2184 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2185 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2186 uint32_t thumbWidth = context->getThumbnailWidth();
2187 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2188 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2189 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2190 SAMPLES_PER_RGB_PIXEL);
2191 sources.add(thumbnailSource.get());
2192 targetIfd = TIFF_IFD_SUB1;
2193 }
2194
2195 if (isDirect) {
2196 size_t fullSize = rStride * uHeight;
2197 jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2198 if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2199 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2200 "Invalid size %d for Image, size given in metadata is %d at current stride",
2201 capacity, fullSize);
2202 return;
2203 }
2204
2205 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2206 if (pixelBytes == nullptr) {
2207 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2208 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2209 return;
2210 }
2211
2212 ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2213 DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2214 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2215 sources.add(&stripSource);
2216
2217 status_t ret = OK;
2218 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2219 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2220 if (!env->ExceptionCheck()) {
2221 jniThrowExceptionFmt(env, "java/io/IOException",
2222 "Encountered error %d while writing file.", ret);
2223 }
2224 return;
2225 }
2226 } else {
2227 inBuf = new JniInputByteBuffer(env, inBuffer);
2228
2229 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2230 InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2231 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2232 sources.add(&stripSource);
2233
2234 status_t ret = OK;
2235 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2236 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2237 if (!env->ExceptionCheck()) {
2238 jniThrowExceptionFmt(env, "java/io/IOException",
2239 "Encountered error %d while writing file.", ret);
2240 }
2241 return;
2242 }
2243 }
2244 }
2245
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2246 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2247 jobject inStream, jint width, jint height, jlong offset) {
2248 ALOGV("%s:", __FUNCTION__);
2249
2250 uint32_t rowStride = width * BYTES_PER_SAMPLE;
2251 uint32_t pixStride = BYTES_PER_SAMPLE;
2252 uint32_t uWidth = static_cast<uint32_t>(width);
2253 uint32_t uHeight = static_cast<uint32_t>(height);
2254 uint64_t uOffset = static_cast<uint32_t>(offset);
2255
2256 ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2257 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2258 height, rowStride, pixStride, offset);
2259
2260 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2261 if (env->ExceptionCheck()) {
2262 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2263 return;
2264 }
2265
2266 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2267 if (context == nullptr) {
2268 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2269 jniThrowException(env, "java/lang/AssertionError",
2270 "Write called with uninitialized DngCreator");
2271 return;
2272 }
2273 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2274
2275 if (writer.get() == nullptr) {
2276 return;
2277 }
2278
2279 // Validate DNG size
2280 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2281 return;
2282 }
2283
2284 sp<DirectStripSource> thumbnailSource;
2285 uint32_t targetIfd = TIFF_IFD_0;
2286 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2287 Vector<StripSource*> sources;
2288
2289 if (hasThumbnail) {
2290 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2291 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2292 uint32_t width = context->getThumbnailWidth();
2293 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2294 width, context->getThumbnailHeight(), bytesPerPixel,
2295 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2296 SAMPLES_PER_RGB_PIXEL);
2297 sources.add(thumbnailSource.get());
2298 targetIfd = TIFF_IFD_SUB1;
2299 }
2300
2301 sp<JniInputStream> in = new JniInputStream(env, inStream);
2302
2303 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2304 InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2305 rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2306 sources.add(&stripSource);
2307
2308 status_t ret = OK;
2309 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2310 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2311 if (!env->ExceptionCheck()) {
2312 jniThrowExceptionFmt(env, "java/io/IOException",
2313 "Encountered error %d while writing file.", ret);
2314 }
2315 return;
2316 }
2317 }
2318
2319 } /*extern "C" */
2320
2321 static const JNINativeMethod gDngCreatorMethods[] = {
2322 {"nativeClassInit", "()V", (void*) DngCreator_nativeClassInit},
2323 {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2324 "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2325 (void*) DngCreator_init},
2326 {"nativeDestroy", "()V", (void*) DngCreator_destroy},
2327 {"nativeSetOrientation", "(I)V", (void*) DngCreator_nativeSetOrientation},
2328 {"nativeSetDescription", "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
2329 {"nativeSetGpsTags", "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2330 (void*) DngCreator_nativeSetGpsTags},
2331 {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
2332 {"nativeWriteImage", "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2333 (void*) DngCreator_nativeWriteImage},
2334 {"nativeWriteInputStream", "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2335 (void*) DngCreator_nativeWriteInputStream},
2336 };
2337
register_android_hardware_camera2_DngCreator(JNIEnv * env)2338 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2339 return RegisterMethodsOrDie(env,
2340 "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2341 }
2342