1 /*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_NDEBUG 0
18 #define LOG_TAG "DngCreator_JNI"
19 #include <inttypes.h>
20 #include <string.h>
21 #include <algorithm>
22 #include <memory>
23
24 #include <utils/Log.h>
25 #include <utils/Errors.h>
26 #include <utils/StrongPointer.h>
27 #include <utils/RefBase.h>
28 #include <utils/Vector.h>
29 #include <utils/String8.h>
30 #include <cutils/properties.h>
31 #include <system/camera_metadata.h>
32 #include <camera/CameraMetadata.h>
33 #include <img_utils/DngUtils.h>
34 #include <img_utils/TagDefinitions.h>
35 #include <img_utils/TiffIfd.h>
36 #include <img_utils/TiffWriter.h>
37 #include <img_utils/Output.h>
38 #include <img_utils/Input.h>
39 #include <img_utils/StripSource.h>
40
41 #include "core_jni_helpers.h"
42
43 #include "android_runtime/AndroidRuntime.h"
44 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
45
46 #include <jni.h>
47 #include <JNIHelp.h>
48
49 using namespace android;
50 using namespace img_utils;
51
52 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
53 if ((expr) != OK) { \
54 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
55 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
56 return false; \
57 }
58
59
60 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
61 if ((expr) != OK) { \
62 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
63 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
64 return nullptr; \
65 }
66
67
68 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
69 if ((expr) != OK) { \
70 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
71 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
72 return -1; \
73 }
74
75 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
76 if (entry.count == 0) { \
77 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
78 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
79 return nullptr; \
80 }
81
82
83 #define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext"
84
85 static struct {
86 jfieldID mNativeContext;
87 } gDngCreatorClassInfo;
88
89 static struct {
90 jmethodID mWriteMethod;
91 } gOutputStreamClassInfo;
92
93 static struct {
94 jmethodID mReadMethod;
95 jmethodID mSkipMethod;
96 } gInputStreamClassInfo;
97
98 static struct {
99 jmethodID mGetMethod;
100 } gInputByteBufferClassInfo;
101
102 enum {
103 BITS_PER_SAMPLE = 16,
104 BYTES_PER_SAMPLE = 2,
105 BYTES_PER_RGB_PIXEL = 3,
106 BITS_PER_RGB_SAMPLE = 8,
107 BYTES_PER_RGB_SAMPLE = 1,
108 SAMPLES_PER_RGB_PIXEL = 3,
109 SAMPLES_PER_RAW_PIXEL = 1,
110 TIFF_IFD_0 = 0,
111 TIFF_IFD_SUB1 = 1,
112 TIFF_IFD_GPSINFO = 2,
113 };
114
115
116 /**
117 * POD container class for GPS tag data.
118 */
119 class GpsData {
120 public:
121 enum {
122 GPS_VALUE_LENGTH = 6,
123 GPS_REF_LENGTH = 2,
124 GPS_DATE_LENGTH = 11,
125 };
126
127 uint32_t mLatitude[GPS_VALUE_LENGTH];
128 uint32_t mLongitude[GPS_VALUE_LENGTH];
129 uint32_t mTimestamp[GPS_VALUE_LENGTH];
130 uint8_t mLatitudeRef[GPS_REF_LENGTH];
131 uint8_t mLongitudeRef[GPS_REF_LENGTH];
132 uint8_t mDate[GPS_DATE_LENGTH];
133 };
134
135 // ----------------------------------------------------------------------------
136
137 /**
138 * Container class for the persistent native context.
139 */
140
141 class NativeContext : public LightRefBase<NativeContext> {
142 public:
143 enum {
144 DATETIME_COUNT = 20,
145 };
146
147 NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
148 virtual ~NativeContext();
149
150 TiffWriter* getWriter();
151
152 std::shared_ptr<const CameraMetadata> getCharacteristics() const;
153 std::shared_ptr<const CameraMetadata> getResult() const;
154
155 uint32_t getThumbnailWidth() const;
156 uint32_t getThumbnailHeight() const;
157 const uint8_t* getThumbnail() const;
158 bool hasThumbnail() const;
159
160 bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
161
162 void setOrientation(uint16_t orientation);
163 uint16_t getOrientation() const;
164
165 void setDescription(const String8& desc);
166 String8 getDescription() const;
167 bool hasDescription() const;
168
169 void setGpsData(const GpsData& data);
170 GpsData getGpsData() const;
171 bool hasGpsData() const;
172
173 void setCaptureTime(const String8& formattedCaptureTime);
174 String8 getCaptureTime() const;
175 bool hasCaptureTime() const;
176
177 private:
178 Vector<uint8_t> mCurrentThumbnail;
179 TiffWriter mWriter;
180 std::shared_ptr<CameraMetadata> mCharacteristics;
181 std::shared_ptr<CameraMetadata> mResult;
182 uint32_t mThumbnailWidth;
183 uint32_t mThumbnailHeight;
184 uint16_t mOrientation;
185 bool mThumbnailSet;
186 bool mGpsSet;
187 bool mDescriptionSet;
188 bool mCaptureTimeSet;
189 String8 mDescription;
190 GpsData mGpsData;
191 String8 mFormattedCaptureTime;
192 };
193
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)194 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
195 mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
196 mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
197 mThumbnailHeight(0), mOrientation(0), mThumbnailSet(false), mGpsSet(false),
198 mDescriptionSet(false), mCaptureTimeSet(false) {}
199
~NativeContext()200 NativeContext::~NativeContext() {}
201
getWriter()202 TiffWriter* NativeContext::getWriter() {
203 return &mWriter;
204 }
205
getCharacteristics() const206 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
207 return mCharacteristics;
208 }
209
getResult() const210 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
211 return mResult;
212 }
213
getThumbnailWidth() const214 uint32_t NativeContext::getThumbnailWidth() const {
215 return mThumbnailWidth;
216 }
217
getThumbnailHeight() const218 uint32_t NativeContext::getThumbnailHeight() const {
219 return mThumbnailHeight;
220 }
221
getThumbnail() const222 const uint8_t* NativeContext::getThumbnail() const {
223 return mCurrentThumbnail.array();
224 }
225
hasThumbnail() const226 bool NativeContext::hasThumbnail() const {
227 return mThumbnailSet;
228 }
229
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)230 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
231 mThumbnailWidth = width;
232 mThumbnailHeight = height;
233
234 size_t size = BYTES_PER_RGB_PIXEL * width * height;
235 if (mCurrentThumbnail.resize(size) < 0) {
236 ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
237 return false;
238 }
239
240 uint8_t* thumb = mCurrentThumbnail.editArray();
241 memcpy(thumb, buffer, size);
242 mThumbnailSet = true;
243 return true;
244 }
245
setOrientation(uint16_t orientation)246 void NativeContext::setOrientation(uint16_t orientation) {
247 mOrientation = orientation;
248 }
249
getOrientation() const250 uint16_t NativeContext::getOrientation() const {
251 return mOrientation;
252 }
253
setDescription(const String8 & desc)254 void NativeContext::setDescription(const String8& desc) {
255 mDescription = desc;
256 mDescriptionSet = true;
257 }
258
getDescription() const259 String8 NativeContext::getDescription() const {
260 return mDescription;
261 }
262
hasDescription() const263 bool NativeContext::hasDescription() const {
264 return mDescriptionSet;
265 }
266
setGpsData(const GpsData & data)267 void NativeContext::setGpsData(const GpsData& data) {
268 mGpsData = data;
269 mGpsSet = true;
270 }
271
getGpsData() const272 GpsData NativeContext::getGpsData() const {
273 return mGpsData;
274 }
275
hasGpsData() const276 bool NativeContext::hasGpsData() const {
277 return mGpsSet;
278 }
279
setCaptureTime(const String8 & formattedCaptureTime)280 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
281 mFormattedCaptureTime = formattedCaptureTime;
282 mCaptureTimeSet = true;
283 }
284
getCaptureTime() const285 String8 NativeContext::getCaptureTime() const {
286 return mFormattedCaptureTime;
287 }
288
hasCaptureTime() const289 bool NativeContext::hasCaptureTime() const {
290 return mCaptureTimeSet;
291 }
292
293 // End of NativeContext
294 // ----------------------------------------------------------------------------
295
296 /**
297 * Wrapper class for a Java OutputStream.
298 *
299 * This class is not intended to be used across JNI calls.
300 */
301 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
302 public:
303 JniOutputStream(JNIEnv* env, jobject outStream);
304
305 virtual ~JniOutputStream();
306
307 status_t open();
308
309 status_t write(const uint8_t* buf, size_t offset, size_t count);
310
311 status_t close();
312 private:
313 enum {
314 BYTE_ARRAY_LENGTH = 4096
315 };
316 jobject mOutputStream;
317 JNIEnv* mEnv;
318 jbyteArray mByteArray;
319 };
320
JniOutputStream(JNIEnv * env,jobject outStream)321 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
322 mEnv(env) {
323 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
324 if (mByteArray == nullptr) {
325 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
326 }
327 }
328
~JniOutputStream()329 JniOutputStream::~JniOutputStream() {
330 mEnv->DeleteLocalRef(mByteArray);
331 }
332
open()333 status_t JniOutputStream::open() {
334 // Do nothing
335 return OK;
336 }
337
write(const uint8_t * buf,size_t offset,size_t count)338 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
339 while(count > 0) {
340 size_t len = BYTE_ARRAY_LENGTH;
341 len = (count > len) ? len : count;
342 mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
343
344 if (mEnv->ExceptionCheck()) {
345 return BAD_VALUE;
346 }
347
348 mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
349 0, len);
350
351 if (mEnv->ExceptionCheck()) {
352 return BAD_VALUE;
353 }
354
355 count -= len;
356 offset += len;
357 }
358 return OK;
359 }
360
close()361 status_t JniOutputStream::close() {
362 // Do nothing
363 return OK;
364 }
365
366 // End of JniOutputStream
367 // ----------------------------------------------------------------------------
368
369 /**
370 * Wrapper class for a Java InputStream.
371 *
372 * This class is not intended to be used across JNI calls.
373 */
374 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
375 public:
376 JniInputStream(JNIEnv* env, jobject inStream);
377
378 status_t open();
379
380 status_t close();
381
382 ssize_t read(uint8_t* buf, size_t offset, size_t count);
383
384 ssize_t skip(size_t count);
385
386 virtual ~JniInputStream();
387 private:
388 enum {
389 BYTE_ARRAY_LENGTH = 4096
390 };
391 jobject mInStream;
392 JNIEnv* mEnv;
393 jbyteArray mByteArray;
394
395 };
396
JniInputStream(JNIEnv * env,jobject inStream)397 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
398 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
399 if (mByteArray == nullptr) {
400 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
401 }
402 }
403
~JniInputStream()404 JniInputStream::~JniInputStream() {
405 mEnv->DeleteLocalRef(mByteArray);
406 }
407
read(uint8_t * buf,size_t offset,size_t count)408 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
409
410 jint realCount = BYTE_ARRAY_LENGTH;
411 if (count < BYTE_ARRAY_LENGTH) {
412 realCount = count;
413 }
414 jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
415 realCount);
416
417 if (actual < 0) {
418 return NOT_ENOUGH_DATA;
419 }
420
421 if (mEnv->ExceptionCheck()) {
422 return BAD_VALUE;
423 }
424
425 mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
426 if (mEnv->ExceptionCheck()) {
427 return BAD_VALUE;
428 }
429 return actual;
430 }
431
skip(size_t count)432 ssize_t JniInputStream::skip(size_t count) {
433 jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
434 static_cast<jlong>(count));
435
436 if (mEnv->ExceptionCheck()) {
437 return BAD_VALUE;
438 }
439 if (actual < 0) {
440 return NOT_ENOUGH_DATA;
441 }
442 return actual;
443 }
444
open()445 status_t JniInputStream::open() {
446 // Do nothing
447 return OK;
448 }
449
close()450 status_t JniInputStream::close() {
451 // Do nothing
452 return OK;
453 }
454
455 // End of JniInputStream
456 // ----------------------------------------------------------------------------
457
458 /**
459 * Wrapper class for a non-direct Java ByteBuffer.
460 *
461 * This class is not intended to be used across JNI calls.
462 */
463 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
464 public:
465 JniInputByteBuffer(JNIEnv* env, jobject inBuf);
466
467 status_t open();
468
469 status_t close();
470
471 ssize_t read(uint8_t* buf, size_t offset, size_t count);
472
473 virtual ~JniInputByteBuffer();
474 private:
475 enum {
476 BYTE_ARRAY_LENGTH = 4096
477 };
478 jobject mInBuf;
479 JNIEnv* mEnv;
480 jbyteArray mByteArray;
481 };
482
JniInputByteBuffer(JNIEnv * env,jobject inBuf)483 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
484 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
485 if (mByteArray == nullptr) {
486 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
487 }
488 }
489
~JniInputByteBuffer()490 JniInputByteBuffer::~JniInputByteBuffer() {
491 mEnv->DeleteLocalRef(mByteArray);
492 }
493
read(uint8_t * buf,size_t offset,size_t count)494 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
495 jint realCount = BYTE_ARRAY_LENGTH;
496 if (count < BYTE_ARRAY_LENGTH) {
497 realCount = count;
498 }
499
500 jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
501 mByteArray, 0, realCount);
502 mEnv->DeleteLocalRef(chainingBuf);
503
504 if (mEnv->ExceptionCheck()) {
505 ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
506 return BAD_VALUE;
507 }
508
509 mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
510 if (mEnv->ExceptionCheck()) {
511 ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
512 return BAD_VALUE;
513 }
514 return realCount;
515 }
516
open()517 status_t JniInputByteBuffer::open() {
518 // Do nothing
519 return OK;
520 }
521
close()522 status_t JniInputByteBuffer::close() {
523 // Do nothing
524 return OK;
525 }
526
527 // End of JniInputByteBuffer
528 // ----------------------------------------------------------------------------
529
530 /**
531 * StripSource subclass for Input types.
532 *
533 * This class is not intended to be used across JNI calls.
534 */
535
536 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
537 public:
538 InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
539 uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
540 uint32_t samplesPerPixel);
541
542 virtual ~InputStripSource();
543
544 virtual status_t writeToStream(Output& stream, uint32_t count);
545
546 virtual uint32_t getIfd() const;
547 protected:
548 uint32_t mIfd;
549 Input* mInput;
550 uint32_t mWidth;
551 uint32_t mHeight;
552 uint32_t mPixStride;
553 uint32_t mRowStride;
554 uint64_t mOffset;
555 JNIEnv* mEnv;
556 uint32_t mBytesPerSample;
557 uint32_t mSamplesPerPixel;
558 };
559
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)560 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
561 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
562 uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
563 mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
564 mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
565 mSamplesPerPixel(samplesPerPixel) {}
566
~InputStripSource()567 InputStripSource::~InputStripSource() {}
568
writeToStream(Output & stream,uint32_t count)569 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
570 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
571 jlong offset = mOffset;
572
573 if (fullSize != count) {
574 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
575 fullSize);
576 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
577 return BAD_VALUE;
578 }
579
580 // Skip offset
581 while (offset > 0) {
582 ssize_t skipped = mInput->skip(offset);
583 if (skipped <= 0) {
584 if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
585 jniThrowExceptionFmt(mEnv, "java/io/IOException",
586 "Early EOF encountered in skip, not enough pixel data for image of size %u",
587 fullSize);
588 skipped = NOT_ENOUGH_DATA;
589 } else {
590 if (!mEnv->ExceptionCheck()) {
591 jniThrowException(mEnv, "java/io/IOException",
592 "Error encountered while skip bytes in input stream.");
593 }
594 }
595
596 return skipped;
597 }
598 offset -= skipped;
599 }
600
601 Vector<uint8_t> row;
602 if (row.resize(mRowStride) < 0) {
603 jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
604 return BAD_VALUE;
605 }
606
607 uint8_t* rowBytes = row.editArray();
608
609 for (uint32_t i = 0; i < mHeight; ++i) {
610 size_t rowFillAmt = 0;
611 size_t rowSize = mRowStride;
612
613 while (rowFillAmt < mRowStride) {
614 ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
615 if (bytesRead <= 0) {
616 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
617 ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
618 __FUNCTION__, i, bytesRead);
619 jniThrowExceptionFmt(mEnv, "java/io/IOException",
620 "Early EOF encountered, not enough pixel data for image of size %"
621 PRIu32, fullSize);
622 bytesRead = NOT_ENOUGH_DATA;
623 } else {
624 if (!mEnv->ExceptionCheck()) {
625 jniThrowException(mEnv, "java/io/IOException",
626 "Error encountered while reading");
627 }
628 }
629 return bytesRead;
630 }
631 rowFillAmt += bytesRead;
632 rowSize -= bytesRead;
633 }
634
635 if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
636 ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
637
638 if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
639 mEnv->ExceptionCheck()) {
640 if (!mEnv->ExceptionCheck()) {
641 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
642 }
643 return BAD_VALUE;
644 }
645 } else {
646 ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
647 jniThrowException(mEnv, "java/lang/IllegalStateException",
648 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
649 return BAD_VALUE;
650
651 // TODO: Add support for non-contiguous pixels if needed.
652 }
653 }
654 return OK;
655 }
656
getIfd() const657 uint32_t InputStripSource::getIfd() const {
658 return mIfd;
659 }
660
661 // End of InputStripSource
662 // ----------------------------------------------------------------------------
663
664 /**
665 * StripSource subclass for direct buffer types.
666 *
667 * This class is not intended to be used across JNI calls.
668 */
669
670 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
671 public:
672 DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
673 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
674 uint32_t bytesPerSample, uint32_t samplesPerPixel);
675
676 virtual ~DirectStripSource();
677
678 virtual status_t writeToStream(Output& stream, uint32_t count);
679
680 virtual uint32_t getIfd() const;
681 protected:
682 uint32_t mIfd;
683 const uint8_t* mPixelBytes;
684 uint32_t mWidth;
685 uint32_t mHeight;
686 uint32_t mPixStride;
687 uint32_t mRowStride;
688 uint16_t mOffset;
689 JNIEnv* mEnv;
690 uint32_t mBytesPerSample;
691 uint32_t mSamplesPerPixel;
692 };
693
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)694 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
695 uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
696 uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
697 mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
698 mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
699 mSamplesPerPixel(samplesPerPixel) {}
700
~DirectStripSource()701 DirectStripSource::~DirectStripSource() {}
702
writeToStream(Output & stream,uint32_t count)703 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
704 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
705
706 if (fullSize != count) {
707 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
708 fullSize);
709 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
710 return BAD_VALUE;
711 }
712
713
714 if (mPixStride == mBytesPerSample * mSamplesPerPixel
715 && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
716 ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
717
718 if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
719 if (!mEnv->ExceptionCheck()) {
720 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
721 }
722 return BAD_VALUE;
723 }
724 } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
725 ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
726
727 for (size_t i = 0; i < mHeight; ++i) {
728 if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
729 mEnv->ExceptionCheck()) {
730 if (!mEnv->ExceptionCheck()) {
731 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
732 }
733 return BAD_VALUE;
734 }
735 }
736 } else {
737 ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
738
739 jniThrowException(mEnv, "java/lang/IllegalStateException",
740 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
741 return BAD_VALUE;
742
743 // TODO: Add support for non-contiguous pixels if needed.
744 }
745 return OK;
746
747 }
748
getIfd() const749 uint32_t DirectStripSource::getIfd() const {
750 return mIfd;
751 }
752
753 // End of DirectStripSource
754 // ----------------------------------------------------------------------------
755
756 /**
757 * Calculate the default crop relative to the "active area" of the image sensor (this active area
758 * will always be the pre-correction active area rectangle), and set this.
759 */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer)760 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
761 sp<TiffWriter> writer) {
762
763 camera_metadata_ro_entry entry =
764 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
765 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
766 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
767
768 const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
769
770 if (width < margin * 2 || height < margin * 2) {
771 ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
772 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
773 jniThrowException(env, "java/lang/IllegalStateException",
774 "Pre-correction active area is too small.");
775 return BAD_VALUE;
776 }
777
778 uint32_t defaultCropOrigin[] = {margin, margin};
779 uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
780 height - defaultCropOrigin[1] - margin};
781
782 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
783 TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
784 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
785 TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
786
787 return OK;
788 }
789
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)790 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
791 const CameraMetadata& characteristics, jint width, jint height) {
792 if (width <= 0) {
793 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
794 "Image width %d is invalid", width);
795 return false;
796 }
797
798 if (height <= 0) {
799 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
800 "Image height %d is invalid", height);
801 return false;
802 }
803
804 camera_metadata_ro_entry preCorrectionEntry =
805 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
806 camera_metadata_ro_entry pixelArrayEntry =
807 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
808
809 int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
810 int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
811 int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
812 int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
813
814 bool matchesPixelArray = (pWidth == width && pHeight == height);
815 bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
816
817 if (!(matchesPixelArray || matchesPreCorrectionArray)) {
818 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
819 "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
820 "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
821 width, height, pWidth, pHeight, cWidth, cHeight);
822 return false;
823 }
824
825 return true;
826 }
827
moveEntries(sp<TiffWriter> writer,uint32_t ifdFrom,uint32_t ifdTo,const Vector<uint16_t> & entries)828 static status_t moveEntries(sp<TiffWriter> writer, uint32_t ifdFrom, uint32_t ifdTo,
829 const Vector<uint16_t>& entries) {
830 for (size_t i = 0; i < entries.size(); ++i) {
831 uint16_t tagId = entries[i];
832 sp<TiffEntry> entry = writer->getEntry(tagId, ifdFrom);
833 if (entry.get() == nullptr) {
834 ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId,
835 ifdFrom);
836 return BAD_VALUE;
837 }
838 if (writer->addEntry(entry, ifdTo) != OK) {
839 ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId,
840 ifdFrom);
841 return BAD_VALUE;
842 }
843 writer->removeEntry(tagId, ifdFrom);
844 }
845 return OK;
846 }
847
848 /**
849 * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4.
850 * Returns OK on success, or a negative error code if the CFA enum was invalid.
851 */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)852 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
853 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
854 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
855 cfaEnum);
856 switch(cfa) {
857 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
858 cfaOut[0] = 0;
859 cfaOut[1] = 1;
860 cfaOut[2] = 1;
861 cfaOut[3] = 2;
862 break;
863 }
864 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
865 cfaOut[0] = 1;
866 cfaOut[1] = 0;
867 cfaOut[2] = 2;
868 cfaOut[3] = 1;
869 break;
870 }
871 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
872 cfaOut[0] = 1;
873 cfaOut[1] = 2;
874 cfaOut[2] = 0;
875 cfaOut[3] = 1;
876 break;
877 }
878 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
879 cfaOut[0] = 2;
880 cfaOut[1] = 1;
881 cfaOut[2] = 1;
882 cfaOut[3] = 0;
883 break;
884 }
885 default: {
886 return BAD_VALUE;
887 }
888 }
889 return OK;
890 }
891
892 /**
893 * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
894 * RGGB for an unknown enum.
895 */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)896 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
897 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
898 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
899 cfaEnum);
900 switch(cfa) {
901 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
902 return OpcodeListBuilder::CFA_RGGB;
903 }
904 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
905 return OpcodeListBuilder::CFA_GRBG;
906 }
907 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
908 return OpcodeListBuilder::CFA_GBRG;
909 }
910 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
911 return OpcodeListBuilder::CFA_BGGR;
912 }
913 default: {
914 return OpcodeListBuilder::CFA_RGGB;
915 }
916 }
917 }
918
919 /**
920 * For each color plane, find the corresponding noise profile coefficients given in the
921 * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color
922 * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
923 *
924 * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
925 * cfa - numChannels color channels corresponding to each of the per-channel noise profile
926 * coefficients.
927 * numChannels - the number of noise profile coefficient pairs and color channels given in
928 * the perChannelNoiseProfile and cfa arguments, respectively.
929 * planeColors - the color planes in the noise profile output.
930 * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
931 * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
932 *
933 * returns OK, or a negative error code on failure.
934 */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)935 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
936 size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
937 /*out*/double* noiseProfile) {
938
939 for (size_t p = 0; p < numPlanes; ++p) {
940 size_t S = p * 2;
941 size_t O = p * 2 + 1;
942
943 noiseProfile[S] = 0;
944 noiseProfile[O] = 0;
945 bool uninitialized = true;
946 for (size_t c = 0; c < numChannels; ++c) {
947 if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
948 noiseProfile[S] = perChannelNoiseProfile[c * 2];
949 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
950 uninitialized = false;
951 }
952 }
953 if (uninitialized) {
954 ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
955 __FUNCTION__, p);
956 return BAD_VALUE;
957 }
958 }
959 return OK;
960 }
961
962 // ----------------------------------------------------------------------------
963 extern "C" {
964
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)965 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
966 ALOGV("%s:", __FUNCTION__);
967 return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
968 gDngCreatorClassInfo.mNativeContext));
969 }
970
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)971 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
972 ALOGV("%s:", __FUNCTION__);
973 NativeContext* current = DngCreator_getNativeContext(env, thiz);
974
975 if (context != nullptr) {
976 context->incStrong((void*) DngCreator_setNativeContext);
977 }
978
979 if (current) {
980 current->decStrong((void*) DngCreator_setNativeContext);
981 }
982
983 env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
984 reinterpret_cast<jlong>(context.get()));
985 }
986
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)987 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
988 ALOGV("%s:", __FUNCTION__);
989
990 gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
991 clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
992
993 jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
994 gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
995 outputStreamClazz, "write", "([BII)V");
996
997 jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
998 gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
999 gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1000
1001 jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1002 gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1003 inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1004 }
1005
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1006 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1007 jobject resultsPtr, jstring formattedCaptureTime) {
1008 ALOGV("%s:", __FUNCTION__);
1009 CameraMetadata characteristics;
1010 CameraMetadata results;
1011 if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1012 jniThrowException(env, "java/lang/AssertionError",
1013 "No native metadata defined for camera characteristics.");
1014 return;
1015 }
1016 if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1017 jniThrowException(env, "java/lang/AssertionError",
1018 "No native metadata defined for capture results.");
1019 return;
1020 }
1021
1022 sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1023
1024 const char* captureTime = env->GetStringUTFChars(formattedCaptureTime, nullptr);
1025
1026 size_t len = strlen(captureTime) + 1;
1027 if (len != NativeContext::DATETIME_COUNT) {
1028 jniThrowException(env, "java/lang/IllegalArgumentException",
1029 "Formatted capture time string length is not required 20 characters");
1030 return;
1031 }
1032
1033 nativeContext->setCaptureTime(String8(captureTime));
1034
1035 DngCreator_setNativeContext(env, thiz, nativeContext);
1036 }
1037
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1038 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1039 uint32_t imageHeight) {
1040
1041 NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1042
1043 if (nativeContext == nullptr) {
1044 jniThrowException(env, "java/lang/AssertionError",
1045 "No native context, must call init before other operations.");
1046 return nullptr;
1047 }
1048
1049 CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1050 CameraMetadata results = *(nativeContext->getResult());
1051
1052 sp<TiffWriter> writer = new TiffWriter();
1053
1054 uint32_t preWidth = 0;
1055 uint32_t preHeight = 0;
1056 {
1057 // Check dimensions
1058 camera_metadata_entry entry =
1059 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1060 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1061 preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1062 preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1063
1064 camera_metadata_entry pixelArrayEntry =
1065 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
1066 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1067 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1068
1069 if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1070 (imageWidth == pixWidth && imageHeight == pixHeight))) {
1071 jniThrowException(env, "java/lang/AssertionError",
1072 "Height and width of imate buffer did not match height and width of"
1073 "either the preCorrectionActiveArraySize or the pixelArraySize.");
1074 return nullptr;
1075 }
1076 }
1077
1078
1079
1080 writer->addIfd(TIFF_IFD_0);
1081
1082 status_t err = OK;
1083
1084 const uint32_t samplesPerPixel = 1;
1085 const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1086
1087 OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB;
1088 uint8_t cfaPlaneColor[3] = {0, 1, 2};
1089 uint8_t cfaEnum = -1;
1090
1091 // TODO: Greensplit.
1092 // TODO: Add remaining non-essential tags
1093
1094 // Setup main image tags
1095
1096 {
1097 // Set orientation
1098 uint16_t orientation = 1; // Normal
1099 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1100 env, TAG_ORIENTATION, writer);
1101 }
1102
1103 {
1104 // Set subfiletype
1105 uint32_t subfileType = 0; // Main image
1106 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1107 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1108 }
1109
1110 {
1111 // Set bits per sample
1112 uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1113 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1114 TAG_BITSPERSAMPLE, writer);
1115 }
1116
1117 {
1118 // Set compression
1119 uint16_t compression = 1; // None
1120 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1121 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1122 }
1123
1124 {
1125 // Set dimensions
1126 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1127 env, TAG_IMAGEWIDTH, writer);
1128 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1129 env, TAG_IMAGELENGTH, writer);
1130 }
1131
1132 {
1133 // Set photometric interpretation
1134 uint16_t interpretation = 32803; // CFA
1135 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1136 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1137 }
1138
1139 {
1140 // Set blacklevel tags
1141 camera_metadata_entry entry =
1142 characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1143 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BLACKLEVEL, writer);
1144 const uint32_t* blackLevel = reinterpret_cast<const uint32_t*>(entry.data.i32);
1145 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, entry.count, blackLevel,
1146 TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1147
1148 uint16_t repeatDim[2] = {2, 2};
1149 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1150 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1151 }
1152
1153 {
1154 // Set samples per pixel
1155 uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1156 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1157 env, TAG_SAMPLESPERPIXEL, writer);
1158 }
1159
1160 {
1161 // Set planar configuration
1162 uint16_t config = 1; // Chunky
1163 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1164 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1165 }
1166
1167 {
1168 // Set CFA pattern dimensions
1169 uint16_t repeatDim[2] = {2, 2};
1170 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1171 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1172 }
1173
1174 {
1175 // Set CFA pattern
1176 camera_metadata_entry entry =
1177 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1178 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer);
1179
1180 const int cfaLength = 4;
1181 cfaEnum = entry.data.u8[0];
1182 uint8_t cfa[cfaLength];
1183 if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1184 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1185 "Invalid metadata for tag %d", TAG_CFAPATTERN);
1186 }
1187
1188 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1189 env, TAG_CFAPATTERN, writer);
1190
1191 opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1192 }
1193
1194 {
1195 // Set CFA plane color
1196 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1197 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1198 }
1199
1200 {
1201 // Set CFA layout
1202 uint16_t cfaLayout = 1;
1203 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1204 env, TAG_CFALAYOUT, writer);
1205 }
1206
1207 {
1208 // image description
1209 uint8_t imageDescription = '\0'; // empty
1210 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1211 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1212 }
1213
1214 {
1215 // make
1216 char manufacturer[PROPERTY_VALUE_MAX];
1217
1218 // Use "" to represent unknown make as suggested in TIFF/EP spec.
1219 property_get("ro.product.manufacturer", manufacturer, "");
1220 uint32_t count = static_cast<uint32_t>(strlen(manufacturer)) + 1;
1221
1222 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1223 reinterpret_cast<uint8_t*>(manufacturer), TIFF_IFD_0), env, TAG_MAKE, writer);
1224 }
1225
1226 {
1227 // model
1228 char model[PROPERTY_VALUE_MAX];
1229
1230 // Use "" to represent unknown model as suggested in TIFF/EP spec.
1231 property_get("ro.product.model", model, "");
1232 uint32_t count = static_cast<uint32_t>(strlen(model)) + 1;
1233
1234 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1235 reinterpret_cast<uint8_t*>(model), TIFF_IFD_0), env, TAG_MODEL, writer);
1236 }
1237
1238 {
1239 // x resolution
1240 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1241 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1242 env, TAG_XRESOLUTION, writer);
1243
1244 // y resolution
1245 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1246 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1247 env, TAG_YRESOLUTION, writer);
1248
1249 uint16_t unit = 2; // inches
1250 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1251 env, TAG_RESOLUTIONUNIT, writer);
1252 }
1253
1254 {
1255 // software
1256 char software[PROPERTY_VALUE_MAX];
1257 property_get("ro.build.fingerprint", software, "");
1258 uint32_t count = static_cast<uint32_t>(strlen(software)) + 1;
1259 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1260 reinterpret_cast<uint8_t*>(software), TIFF_IFD_0), env, TAG_SOFTWARE, writer);
1261 }
1262
1263 if (nativeContext->hasCaptureTime()) {
1264 // datetime
1265 String8 captureTime = nativeContext->getCaptureTime();
1266
1267 if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1268 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1269 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1270 "Invalid metadata for tag %x", TAG_DATETIME);
1271 return nullptr;
1272 }
1273
1274 // datetime original
1275 if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1276 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1277 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1278 "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1279 return nullptr;
1280 }
1281 }
1282
1283 {
1284 // TIFF/EP standard id
1285 uint8_t standardId[] = { 1, 0, 0, 0 };
1286 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1287 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1288 }
1289
1290 {
1291 // copyright
1292 uint8_t copyright = '\0'; // empty
1293 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right,
1294 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1295 }
1296
1297 {
1298 // exposure time
1299 camera_metadata_entry entry =
1300 results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1301 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1302
1303 int64_t exposureTime = *(entry.data.i64);
1304
1305 if (exposureTime < 0) {
1306 // Should be unreachable
1307 jniThrowException(env, "java/lang/IllegalArgumentException",
1308 "Negative exposure time in metadata");
1309 return nullptr;
1310 }
1311
1312 // Ensure exposure time doesn't overflow (for exposures > 4s)
1313 uint32_t denominator = 1000000000;
1314 while (exposureTime > UINT32_MAX) {
1315 exposureTime >>= 1;
1316 denominator >>= 1;
1317 if (denominator == 0) {
1318 // Should be unreachable
1319 jniThrowException(env, "java/lang/IllegalArgumentException",
1320 "Exposure time too long");
1321 return nullptr;
1322 }
1323 }
1324
1325 uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1326 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1327 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1328
1329 }
1330
1331 {
1332 // ISO speed ratings
1333 camera_metadata_entry entry =
1334 results.find(ANDROID_SENSOR_SENSITIVITY);
1335 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1336
1337 int32_t tempIso = *(entry.data.i32);
1338 if (tempIso < 0) {
1339 jniThrowException(env, "java/lang/IllegalArgumentException",
1340 "Negative ISO value");
1341 return nullptr;
1342 }
1343
1344 if (tempIso > UINT16_MAX) {
1345 ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1346 tempIso = UINT16_MAX;
1347 }
1348
1349 uint16_t iso = static_cast<uint16_t>(tempIso);
1350 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1351 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1352 }
1353
1354 {
1355 // focal length
1356 camera_metadata_entry entry =
1357 results.find(ANDROID_LENS_FOCAL_LENGTH);
1358 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1359
1360 uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1361 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1362 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1363 }
1364
1365 {
1366 // f number
1367 camera_metadata_entry entry =
1368 results.find(ANDROID_LENS_APERTURE);
1369 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1370
1371 uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1372 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1373 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1374 }
1375
1376 {
1377 // Set DNG version information
1378 uint8_t version[4] = {1, 4, 0, 0};
1379 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1380 env, TAG_DNGVERSION, writer);
1381
1382 uint8_t backwardVersion[4] = {1, 1, 0, 0};
1383 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1384 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1385 }
1386
1387 {
1388 // Set whitelevel
1389 camera_metadata_entry entry =
1390 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1391 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1392 uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1393 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1394 env, TAG_WHITELEVEL, writer);
1395 }
1396
1397 {
1398 // Set default scale
1399 uint32_t defaultScale[4] = {1, 1, 1, 1};
1400 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1401 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1402 }
1403
1404 bool singleIlluminant = false;
1405 {
1406 // Set calibration illuminants
1407 camera_metadata_entry entry1 =
1408 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1409 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1410 camera_metadata_entry entry2 =
1411 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1412 if (entry2.count == 0) {
1413 singleIlluminant = true;
1414 }
1415 uint16_t ref1 = entry1.data.u8[0];
1416
1417 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1418 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1419
1420 if (!singleIlluminant) {
1421 uint16_t ref2 = entry2.data.u8[0];
1422 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1423 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1424 }
1425 }
1426
1427 {
1428 // Set color transforms
1429 camera_metadata_entry entry1 =
1430 characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1431 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1432
1433 int32_t colorTransform1[entry1.count * 2];
1434
1435 size_t ctr = 0;
1436 for(size_t i = 0; i < entry1.count; ++i) {
1437 colorTransform1[ctr++] = entry1.data.r[i].numerator;
1438 colorTransform1[ctr++] = entry1.data.r[i].denominator;
1439 }
1440
1441 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1442 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1443
1444 if (!singleIlluminant) {
1445 camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1446 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1447 int32_t colorTransform2[entry2.count * 2];
1448
1449 ctr = 0;
1450 for(size_t i = 0; i < entry2.count; ++i) {
1451 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1452 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1453 }
1454
1455 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1456 colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1457 }
1458 }
1459
1460 {
1461 // Set calibration transforms
1462 camera_metadata_entry entry1 =
1463 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1464 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1465
1466 int32_t calibrationTransform1[entry1.count * 2];
1467
1468 size_t ctr = 0;
1469 for(size_t i = 0; i < entry1.count; ++i) {
1470 calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1471 calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1472 }
1473
1474 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1475 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1476
1477 if (!singleIlluminant) {
1478 camera_metadata_entry entry2 =
1479 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1480 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1481 int32_t calibrationTransform2[entry2.count * 2];
1482
1483 ctr = 0;
1484 for(size_t i = 0; i < entry2.count; ++i) {
1485 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1486 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1487 }
1488
1489 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1490 calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer);
1491 }
1492 }
1493
1494 {
1495 // Set forward transforms
1496 camera_metadata_entry entry1 =
1497 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1498 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1499
1500 int32_t forwardTransform1[entry1.count * 2];
1501
1502 size_t ctr = 0;
1503 for(size_t i = 0; i < entry1.count; ++i) {
1504 forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1505 forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1506 }
1507
1508 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1509 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1510
1511 if (!singleIlluminant) {
1512 camera_metadata_entry entry2 =
1513 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1514 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1515 int32_t forwardTransform2[entry2.count * 2];
1516
1517 ctr = 0;
1518 for(size_t i = 0; i < entry2.count; ++i) {
1519 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1520 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1521 }
1522
1523 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1524 forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer);
1525 }
1526 }
1527
1528 {
1529 // Set camera neutral
1530 camera_metadata_entry entry =
1531 results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1532 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1533 uint32_t cameraNeutral[entry.count * 2];
1534
1535 size_t ctr = 0;
1536 for(size_t i = 0; i < entry.count; ++i) {
1537 cameraNeutral[ctr++] =
1538 static_cast<uint32_t>(entry.data.r[i].numerator);
1539 cameraNeutral[ctr++] =
1540 static_cast<uint32_t>(entry.data.r[i].denominator);
1541 }
1542
1543 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1544 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1545 }
1546
1547
1548 {
1549 // Set dimensions
1550 if (calculateAndSetCrop(env, characteristics, writer) != OK) {
1551 return nullptr;
1552 }
1553 camera_metadata_entry entry =
1554 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1555 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1556 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1557 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1558 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1559 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1560
1561 // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1562 // relative to the pixel array.
1563 if (imageWidth == width && imageHeight == height) {
1564 xmin = 0;
1565 ymin = 0;
1566 }
1567
1568 uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1569 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1570 env, TAG_ACTIVEAREA, writer);
1571 }
1572
1573 {
1574 // Setup unique camera model tag
1575 char model[PROPERTY_VALUE_MAX];
1576 property_get("ro.product.model", model, "");
1577
1578 char manufacturer[PROPERTY_VALUE_MAX];
1579 property_get("ro.product.manufacturer", manufacturer, "");
1580
1581 char brand[PROPERTY_VALUE_MAX];
1582 property_get("ro.product.brand", brand, "");
1583
1584 String8 cameraModel(model);
1585 cameraModel += "-";
1586 cameraModel += manufacturer;
1587 cameraModel += "-";
1588 cameraModel += brand;
1589
1590 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1591 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env,
1592 TAG_UNIQUECAMERAMODEL, writer);
1593 }
1594
1595 {
1596 // Setup sensor noise model
1597 camera_metadata_entry entry =
1598 results.find(ANDROID_SENSOR_NOISE_PROFILE);
1599
1600 const status_t numPlaneColors = 3;
1601 const status_t numCfaChannels = 4;
1602
1603 uint8_t cfaOut[numCfaChannels];
1604 if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1605 jniThrowException(env, "java/lang/IllegalArgumentException",
1606 "Invalid CFA from camera characteristics");
1607 return nullptr;
1608 }
1609
1610 double noiseProfile[numPlaneColors * 2];
1611
1612 if (entry.count > 0) {
1613 if (entry.count != numCfaChannels * 2) {
1614 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1615 "in characteristics, no noise profile tag written...",
1616 __FUNCTION__, entry.count);
1617 } else {
1618 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1619 cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1620
1621 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1622 numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1623 writer);
1624 } else {
1625 ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1626 " tag written...", __FUNCTION__);
1627 }
1628 }
1629 } else {
1630 ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.",
1631 __FUNCTION__);
1632 }
1633 }
1634
1635 {
1636 // Set up opcode List 2
1637 OpcodeListBuilder builder;
1638 status_t err = OK;
1639
1640 // Set up lens shading map
1641 camera_metadata_entry entry1 =
1642 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1643
1644 uint32_t lsmWidth = 0;
1645 uint32_t lsmHeight = 0;
1646
1647 if (entry1.count != 0) {
1648 lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1649 lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1650 }
1651
1652 camera_metadata_entry entry2 =
1653 results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1654
1655 camera_metadata_entry entry =
1656 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1657 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1658 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1659 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1660 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1661 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1662 if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1663 err = builder.addGainMapsForMetadata(lsmWidth,
1664 lsmHeight,
1665 ymin,
1666 xmin,
1667 height,
1668 width,
1669 opcodeCfaLayout,
1670 entry2.data.f);
1671 if (err != OK) {
1672 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1673 jniThrowRuntimeException(env, "failed to add lens shading map.");
1674 return nullptr;
1675 }
1676 }
1677
1678 size_t listSize = builder.getSize();
1679 uint8_t opcodeListBuf[listSize];
1680 err = builder.buildOpList(opcodeListBuf);
1681 if (err == OK) {
1682 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize, opcodeListBuf,
1683 TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
1684 } else {
1685 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading"
1686 "map.", __FUNCTION__);
1687 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
1688 " correction and lens shading map");
1689 return nullptr;
1690 }
1691 }
1692
1693 {
1694 // Set up opcode List 3
1695 OpcodeListBuilder builder;
1696 status_t err = OK;
1697
1698 // Set up rectilinear distortion correction
1699 camera_metadata_entry entry3 =
1700 results.find(ANDROID_LENS_RADIAL_DISTORTION);
1701 camera_metadata_entry entry4 =
1702 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
1703
1704 if (entry3.count == 6 && entry4.count == 5) {
1705 float cx = entry4.data.f[/*c_x*/2];
1706 float cy = entry4.data.f[/*c_y*/3];
1707 err = builder.addWarpRectilinearForMetadata(entry3.data.f, preWidth, preHeight, cx,
1708 cy);
1709 if (err != OK) {
1710 ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
1711 jniThrowRuntimeException(env, "failed to add distortion correction.");
1712 return nullptr;
1713 }
1714 }
1715
1716 size_t listSize = builder.getSize();
1717 uint8_t opcodeListBuf[listSize];
1718 err = builder.buildOpList(opcodeListBuf);
1719 if (err == OK) {
1720 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize, opcodeListBuf,
1721 TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
1722 } else {
1723 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading"
1724 "map.", __FUNCTION__);
1725 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
1726 " correction and lens shading map");
1727 return nullptr;
1728 }
1729 }
1730
1731 {
1732 // Set up orientation tags.
1733 uint16_t orientation = nativeContext->getOrientation();
1734 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1735 env, TAG_ORIENTATION, writer);
1736
1737 }
1738
1739 if (nativeContext->hasDescription()){
1740 // Set Description
1741 String8 description = nativeContext->getDescription();
1742 size_t len = description.bytes() + 1;
1743 if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
1744 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) {
1745 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1746 "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
1747 }
1748 }
1749
1750 if (nativeContext->hasGpsData()) {
1751 // Set GPS tags
1752 GpsData gpsData = nativeContext->getGpsData();
1753 if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
1754 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
1755 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
1756 TIFF_IFD_0);
1757 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
1758 return nullptr;
1759 }
1760 }
1761
1762 {
1763 uint8_t version[] = {2, 3, 0, 0};
1764 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
1765 TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
1766 }
1767
1768 {
1769 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
1770 GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
1771 TAG_GPSLATITUDEREF, writer);
1772 }
1773
1774 {
1775 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
1776 GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
1777 TAG_GPSLONGITUDEREF, writer);
1778 }
1779
1780 {
1781 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
1782 TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
1783 }
1784
1785 {
1786 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
1787 TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
1788 }
1789
1790 {
1791 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
1792 TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
1793 }
1794
1795 {
1796 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
1797 GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
1798 TAG_GPSDATESTAMP, writer);
1799 }
1800 }
1801
1802
1803 if (nativeContext->hasThumbnail()) {
1804 if (!writer->hasIfd(TIFF_IFD_SUB1)) {
1805 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
1806 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
1807 TIFF_IFD_0);
1808 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
1809 return nullptr;
1810 }
1811 }
1812
1813 Vector<uint16_t> tagsToMove;
1814 tagsToMove.add(TAG_ORIENTATION);
1815 tagsToMove.add(TAG_NEWSUBFILETYPE);
1816 tagsToMove.add(TAG_ACTIVEAREA);
1817 tagsToMove.add(TAG_BITSPERSAMPLE);
1818 tagsToMove.add(TAG_COMPRESSION);
1819 tagsToMove.add(TAG_IMAGEWIDTH);
1820 tagsToMove.add(TAG_IMAGELENGTH);
1821 tagsToMove.add(TAG_PHOTOMETRICINTERPRETATION);
1822 tagsToMove.add(TAG_BLACKLEVEL);
1823 tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
1824 tagsToMove.add(TAG_SAMPLESPERPIXEL);
1825 tagsToMove.add(TAG_PLANARCONFIGURATION);
1826 tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
1827 tagsToMove.add(TAG_CFAPATTERN);
1828 tagsToMove.add(TAG_CFAPLANECOLOR);
1829 tagsToMove.add(TAG_CFALAYOUT);
1830 tagsToMove.add(TAG_XRESOLUTION);
1831 tagsToMove.add(TAG_YRESOLUTION);
1832 tagsToMove.add(TAG_RESOLUTIONUNIT);
1833 tagsToMove.add(TAG_WHITELEVEL);
1834 tagsToMove.add(TAG_DEFAULTSCALE);
1835 tagsToMove.add(TAG_DEFAULTCROPORIGIN);
1836 tagsToMove.add(TAG_DEFAULTCROPSIZE);
1837 tagsToMove.add(TAG_OPCODELIST2);
1838 tagsToMove.add(TAG_OPCODELIST3);
1839
1840 if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) {
1841 jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries");
1842 return nullptr;
1843 }
1844
1845 // Make sure both IFDs get the same orientation tag
1846 sp<TiffEntry> orientEntry = writer->getEntry(TAG_ORIENTATION, TIFF_IFD_SUB1);
1847 if (orientEntry.get() != nullptr) {
1848 writer->addEntry(orientEntry, TIFF_IFD_0);
1849 }
1850
1851 // Setup thumbnail tags
1852
1853 {
1854 // Set photometric interpretation
1855 uint16_t interpretation = 2; // RGB
1856 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1857 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1858 }
1859
1860 {
1861 // Set planar configuration
1862 uint16_t config = 1; // Chunky
1863 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1864 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1865 }
1866
1867 {
1868 // Set samples per pixel
1869 uint16_t samples = SAMPLES_PER_RGB_PIXEL;
1870 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
1871 TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer);
1872 }
1873
1874 {
1875 // Set bits per sample
1876 uint16_t bits = BITS_PER_RGB_SAMPLE;
1877 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0),
1878 env, TAG_BITSPERSAMPLE, writer);
1879 }
1880
1881 {
1882 // Set subfiletype
1883 uint32_t subfileType = 1; // Thumbnail image
1884 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1885 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1886 }
1887
1888 {
1889 // Set compression
1890 uint16_t compression = 1; // None
1891 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1892 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1893 }
1894
1895 {
1896 // Set dimensions
1897 uint32_t uWidth = nativeContext->getThumbnailWidth();
1898 uint32_t uHeight = nativeContext->getThumbnailHeight();
1899 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0),
1900 env, TAG_IMAGEWIDTH, writer);
1901 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0),
1902 env, TAG_IMAGELENGTH, writer);
1903 }
1904
1905 {
1906 // x resolution
1907 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1908 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1909 env, TAG_XRESOLUTION, writer);
1910
1911 // y resolution
1912 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1913 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1914 env, TAG_YRESOLUTION, writer);
1915
1916 uint16_t unit = 2; // inches
1917 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1918 env, TAG_RESOLUTIONUNIT, writer);
1919 }
1920 }
1921
1922 if (writer->addStrip(TIFF_IFD_0) != OK) {
1923 ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__);
1924 jniThrowException(env, "java/lang/IllegalStateException",
1925 "Failed to setup thumbnail strip tags.");
1926 return nullptr;
1927 }
1928
1929 if (writer->hasIfd(TIFF_IFD_SUB1)) {
1930 if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
1931 ALOGE("%s: Could not main image strip tags.", __FUNCTION__);
1932 jniThrowException(env, "java/lang/IllegalStateException",
1933 "Failed to setup main image strip tags.");
1934 return nullptr;
1935 }
1936 }
1937 return writer;
1938 }
1939
DngCreator_destroy(JNIEnv * env,jobject thiz)1940 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
1941 ALOGV("%s:", __FUNCTION__);
1942 DngCreator_setNativeContext(env, thiz, nullptr);
1943 }
1944
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)1945 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
1946 ALOGV("%s:", __FUNCTION__);
1947
1948 NativeContext* context = DngCreator_getNativeContext(env, thiz);
1949 if (context == nullptr) {
1950 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
1951 jniThrowException(env, "java/lang/AssertionError",
1952 "setOrientation called with uninitialized DngCreator");
1953 return;
1954 }
1955
1956 uint16_t orientation = static_cast<uint16_t>(orient);
1957 context->setOrientation(orientation);
1958 }
1959
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)1960 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
1961 ALOGV("%s:", __FUNCTION__);
1962
1963 NativeContext* context = DngCreator_getNativeContext(env, thiz);
1964 if (context == nullptr) {
1965 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
1966 jniThrowException(env, "java/lang/AssertionError",
1967 "setDescription called with uninitialized DngCreator");
1968 return;
1969 }
1970
1971 const char* desc = env->GetStringUTFChars(description, nullptr);
1972 context->setDescription(String8(desc));
1973 env->ReleaseStringUTFChars(description, desc);
1974 }
1975
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)1976 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
1977 jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
1978 ALOGV("%s:", __FUNCTION__);
1979
1980 NativeContext* context = DngCreator_getNativeContext(env, thiz);
1981 if (context == nullptr) {
1982 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
1983 jniThrowException(env, "java/lang/AssertionError",
1984 "setGpsTags called with uninitialized DngCreator");
1985 return;
1986 }
1987
1988 GpsData data;
1989
1990 jsize latLen = env->GetArrayLength(latTag);
1991 jsize longLen = env->GetArrayLength(longTag);
1992 jsize timeLen = env->GetArrayLength(timeTag);
1993 if (latLen != GpsData::GPS_VALUE_LENGTH) {
1994 jniThrowException(env, "java/lang/IllegalArgumentException",
1995 "invalid latitude tag length");
1996 return;
1997 } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
1998 jniThrowException(env, "java/lang/IllegalArgumentException",
1999 "invalid longitude tag length");
2000 return;
2001 } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2002 jniThrowException(env, "java/lang/IllegalArgumentException",
2003 "invalid time tag length");
2004 return;
2005 }
2006
2007 env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2008 reinterpret_cast<jint*>(&data.mLatitude));
2009 env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2010 reinterpret_cast<jint*>(&data.mLongitude));
2011 env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2012 reinterpret_cast<jint*>(&data.mTimestamp));
2013
2014
2015 env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2016 data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2017 env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2018 data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2019 env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2020 reinterpret_cast<char*>(&data.mDate));
2021 data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2022
2023 context->setGpsData(data);
2024 }
2025
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2026 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2027 jint height) {
2028 ALOGV("%s:", __FUNCTION__);
2029
2030 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2031 if (context == nullptr) {
2032 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2033 jniThrowException(env, "java/lang/AssertionError",
2034 "setThumbnail called with uninitialized DngCreator");
2035 return;
2036 }
2037
2038 size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2039 jlong capacity = env->GetDirectBufferCapacity(buffer);
2040 if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2041 jniThrowExceptionFmt(env, "java/lang/AssertionError",
2042 "Invalid size %d for thumbnail, expected size was %d",
2043 capacity, fullSize);
2044 return;
2045 }
2046
2047 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2048 if (pixelBytes == nullptr) {
2049 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2050 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2051 return;
2052 }
2053
2054 if (!context->setThumbnail(pixelBytes, width, height)) {
2055 jniThrowException(env, "java/lang/IllegalStateException",
2056 "Failed to set thumbnail.");
2057 return;
2058 }
2059 }
2060
2061 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2062 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2063 jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2064 jboolean isDirect) {
2065 ALOGV("%s:", __FUNCTION__);
2066 ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2067 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2068 height, rowStride, pixStride, offset);
2069 uint32_t rStride = static_cast<uint32_t>(rowStride);
2070 uint32_t pStride = static_cast<uint32_t>(pixStride);
2071 uint32_t uWidth = static_cast<uint32_t>(width);
2072 uint32_t uHeight = static_cast<uint32_t>(height);
2073 uint64_t uOffset = static_cast<uint64_t>(offset);
2074
2075 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2076 if(env->ExceptionCheck()) {
2077 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2078 return;
2079 }
2080
2081 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2082 if (context == nullptr) {
2083 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2084 jniThrowException(env, "java/lang/AssertionError",
2085 "Write called with uninitialized DngCreator");
2086 return;
2087 }
2088 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2089
2090 if (writer.get() == nullptr) {
2091 return;
2092 }
2093
2094 // Validate DNG size
2095 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2096 return;
2097 }
2098
2099 sp<JniInputByteBuffer> inBuf;
2100 Vector<StripSource*> sources;
2101 sp<DirectStripSource> thumbnailSource;
2102 uint32_t targetIfd = TIFF_IFD_0;
2103
2104 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2105
2106 if (hasThumbnail) {
2107 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2108 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2109 uint32_t thumbWidth = context->getThumbnailWidth();
2110 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2111 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2112 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2113 SAMPLES_PER_RGB_PIXEL);
2114 sources.add(thumbnailSource.get());
2115 targetIfd = TIFF_IFD_SUB1;
2116 }
2117
2118 if (isDirect) {
2119 size_t fullSize = rStride * uHeight;
2120 jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2121 if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2122 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2123 "Invalid size %d for Image, size given in metadata is %d at current stride",
2124 capacity, fullSize);
2125 return;
2126 }
2127
2128 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2129 if (pixelBytes == nullptr) {
2130 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2131 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2132 return;
2133 }
2134
2135 ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2136 DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2137 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2138 sources.add(&stripSource);
2139
2140 status_t ret = OK;
2141 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2142 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2143 if (!env->ExceptionCheck()) {
2144 jniThrowExceptionFmt(env, "java/io/IOException",
2145 "Encountered error %d while writing file.", ret);
2146 }
2147 return;
2148 }
2149 } else {
2150 inBuf = new JniInputByteBuffer(env, inBuffer);
2151
2152 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2153 InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2154 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2155 sources.add(&stripSource);
2156
2157 status_t ret = OK;
2158 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2159 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2160 if (!env->ExceptionCheck()) {
2161 jniThrowExceptionFmt(env, "java/io/IOException",
2162 "Encountered error %d while writing file.", ret);
2163 }
2164 return;
2165 }
2166 }
2167 }
2168
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2169 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2170 jobject inStream, jint width, jint height, jlong offset) {
2171 ALOGV("%s:", __FUNCTION__);
2172
2173 uint32_t rowStride = width * BYTES_PER_SAMPLE;
2174 uint32_t pixStride = BYTES_PER_SAMPLE;
2175 uint32_t uWidth = static_cast<uint32_t>(width);
2176 uint32_t uHeight = static_cast<uint32_t>(height);
2177 uint64_t uOffset = static_cast<uint32_t>(offset);
2178
2179 ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2180 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2181 height, rowStride, pixStride, offset);
2182
2183 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2184 if (env->ExceptionCheck()) {
2185 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2186 return;
2187 }
2188
2189 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2190 if (context == nullptr) {
2191 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2192 jniThrowException(env, "java/lang/AssertionError",
2193 "Write called with uninitialized DngCreator");
2194 return;
2195 }
2196 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2197
2198 if (writer.get() == nullptr) {
2199 return;
2200 }
2201
2202 // Validate DNG size
2203 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2204 return;
2205 }
2206
2207 sp<DirectStripSource> thumbnailSource;
2208 uint32_t targetIfd = TIFF_IFD_0;
2209 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2210 Vector<StripSource*> sources;
2211
2212 if (hasThumbnail) {
2213 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2214 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2215 uint32_t width = context->getThumbnailWidth();
2216 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2217 width, context->getThumbnailHeight(), bytesPerPixel,
2218 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2219 SAMPLES_PER_RGB_PIXEL);
2220 sources.add(thumbnailSource.get());
2221 targetIfd = TIFF_IFD_SUB1;
2222 }
2223
2224 sp<JniInputStream> in = new JniInputStream(env, inStream);
2225
2226 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2227 InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2228 rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2229 sources.add(&stripSource);
2230
2231 status_t ret = OK;
2232 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2233 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2234 if (!env->ExceptionCheck()) {
2235 jniThrowExceptionFmt(env, "java/io/IOException",
2236 "Encountered error %d while writing file.", ret);
2237 }
2238 return;
2239 }
2240 }
2241
2242 } /*extern "C" */
2243
2244 static JNINativeMethod gDngCreatorMethods[] = {
2245 {"nativeClassInit", "()V", (void*) DngCreator_nativeClassInit},
2246 {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2247 "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2248 (void*) DngCreator_init},
2249 {"nativeDestroy", "()V", (void*) DngCreator_destroy},
2250 {"nativeSetOrientation", "(I)V", (void*) DngCreator_nativeSetOrientation},
2251 {"nativeSetDescription", "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
2252 {"nativeSetGpsTags", "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2253 (void*) DngCreator_nativeSetGpsTags},
2254 {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
2255 {"nativeWriteImage", "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2256 (void*) DngCreator_nativeWriteImage},
2257 {"nativeWriteInputStream", "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2258 (void*) DngCreator_nativeWriteInputStream},
2259 };
2260
register_android_hardware_camera2_DngCreator(JNIEnv * env)2261 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2262 return RegisterMethodsOrDie(env,
2263 "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2264 }
2265