android_hardware_camera2_DngCreator.cpp revision ea1831d211ea0e6b2d161c714bb0786369ef2df5
1/* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#define LOG_NDEBUG 0 18#define LOG_TAG "DngCreator_JNI" 19#include <inttypes.h> 20#include <string.h> 21#include <algorithm> 22#include <memory> 23#include <vector> 24 25#include <utils/Log.h> 26#include <utils/Errors.h> 27#include <utils/StrongPointer.h> 28#include <utils/RefBase.h> 29#include <utils/Vector.h> 30#include <utils/String8.h> 31#include <cutils/properties.h> 32#include <system/camera_metadata.h> 33#include <camera/CameraMetadata.h> 34#include <img_utils/DngUtils.h> 35#include <img_utils/TagDefinitions.h> 36#include <img_utils/TiffIfd.h> 37#include <img_utils/TiffWriter.h> 38#include <img_utils/Output.h> 39#include <img_utils/Input.h> 40#include <img_utils/StripSource.h> 41 42#include "core_jni_helpers.h" 43 44#include "android_runtime/AndroidRuntime.h" 45#include "android_runtime/android_hardware_camera2_CameraMetadata.h" 46 47#include <jni.h> 48#include <JNIHelp.h> 49 50using namespace android; 51using namespace img_utils; 52 53#define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \ 54 if ((expr) != OK) { \ 55 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 56 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 57 return false; \ 58 } 59 60 61#define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \ 62 if ((expr) != OK) { \ 63 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 64 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 65 return nullptr; \ 66 } 67 68 69#define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \ 70 if ((expr) != OK) { \ 71 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 72 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 73 return -1; \ 74 } 75 76#define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \ 77 if (entry.count == 0) { \ 78 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \ 79 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \ 80 return nullptr; \ 81 } 82 83 84#define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext" 85 86static struct { 87 jfieldID mNativeContext; 88} gDngCreatorClassInfo; 89 90static struct { 91 jmethodID mWriteMethod; 92} gOutputStreamClassInfo; 93 94static struct { 95 jmethodID mReadMethod; 96 jmethodID mSkipMethod; 97} gInputStreamClassInfo; 98 99static struct { 100 jmethodID mGetMethod; 101} gInputByteBufferClassInfo; 102 103enum { 104 BITS_PER_SAMPLE = 16, 105 BYTES_PER_SAMPLE = 2, 106 BYTES_PER_RGB_PIXEL = 3, 107 BITS_PER_RGB_SAMPLE = 8, 108 BYTES_PER_RGB_SAMPLE = 1, 109 SAMPLES_PER_RGB_PIXEL = 3, 110 SAMPLES_PER_RAW_PIXEL = 1, 111 TIFF_IFD_0 = 0, 112 TIFF_IFD_SUB1 = 1, 113 TIFF_IFD_GPSINFO = 2, 114}; 115 116 117/** 118 * POD container class for GPS tag data. 119 */ 120class GpsData { 121public: 122 enum { 123 GPS_VALUE_LENGTH = 6, 124 GPS_REF_LENGTH = 2, 125 GPS_DATE_LENGTH = 11, 126 }; 127 128 uint32_t mLatitude[GPS_VALUE_LENGTH]; 129 uint32_t mLongitude[GPS_VALUE_LENGTH]; 130 uint32_t mTimestamp[GPS_VALUE_LENGTH]; 131 uint8_t mLatitudeRef[GPS_REF_LENGTH]; 132 uint8_t mLongitudeRef[GPS_REF_LENGTH]; 133 uint8_t mDate[GPS_DATE_LENGTH]; 134}; 135 136// ---------------------------------------------------------------------------- 137 138/** 139 * Container class for the persistent native context. 140 */ 141 142class NativeContext : public LightRefBase<NativeContext> { 143public: 144 enum { 145 DATETIME_COUNT = 20, 146 }; 147 148 NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result); 149 virtual ~NativeContext(); 150 151 TiffWriter* getWriter(); 152 153 std::shared_ptr<const CameraMetadata> getCharacteristics() const; 154 std::shared_ptr<const CameraMetadata> getResult() const; 155 156 uint32_t getThumbnailWidth() const; 157 uint32_t getThumbnailHeight() const; 158 const uint8_t* getThumbnail() const; 159 bool hasThumbnail() const; 160 161 bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height); 162 163 void setOrientation(uint16_t orientation); 164 uint16_t getOrientation() const; 165 166 void setDescription(const String8& desc); 167 String8 getDescription() const; 168 bool hasDescription() const; 169 170 void setGpsData(const GpsData& data); 171 GpsData getGpsData() const; 172 bool hasGpsData() const; 173 174 void setCaptureTime(const String8& formattedCaptureTime); 175 String8 getCaptureTime() const; 176 bool hasCaptureTime() const; 177 178private: 179 Vector<uint8_t> mCurrentThumbnail; 180 TiffWriter mWriter; 181 std::shared_ptr<CameraMetadata> mCharacteristics; 182 std::shared_ptr<CameraMetadata> mResult; 183 uint32_t mThumbnailWidth; 184 uint32_t mThumbnailHeight; 185 uint16_t mOrientation; 186 bool mThumbnailSet; 187 bool mGpsSet; 188 bool mDescriptionSet; 189 bool mCaptureTimeSet; 190 String8 mDescription; 191 GpsData mGpsData; 192 String8 mFormattedCaptureTime; 193}; 194 195NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) : 196 mCharacteristics(std::make_shared<CameraMetadata>(characteristics)), 197 mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0), 198 mThumbnailHeight(0), mOrientation(0), mThumbnailSet(false), mGpsSet(false), 199 mDescriptionSet(false), mCaptureTimeSet(false) {} 200 201NativeContext::~NativeContext() {} 202 203TiffWriter* NativeContext::getWriter() { 204 return &mWriter; 205} 206 207std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const { 208 return mCharacteristics; 209} 210 211std::shared_ptr<const CameraMetadata> NativeContext::getResult() const { 212 return mResult; 213} 214 215uint32_t NativeContext::getThumbnailWidth() const { 216 return mThumbnailWidth; 217} 218 219uint32_t NativeContext::getThumbnailHeight() const { 220 return mThumbnailHeight; 221} 222 223const uint8_t* NativeContext::getThumbnail() const { 224 return mCurrentThumbnail.array(); 225} 226 227bool NativeContext::hasThumbnail() const { 228 return mThumbnailSet; 229} 230 231bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) { 232 mThumbnailWidth = width; 233 mThumbnailHeight = height; 234 235 size_t size = BYTES_PER_RGB_PIXEL * width * height; 236 if (mCurrentThumbnail.resize(size) < 0) { 237 ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__); 238 return false; 239 } 240 241 uint8_t* thumb = mCurrentThumbnail.editArray(); 242 memcpy(thumb, buffer, size); 243 mThumbnailSet = true; 244 return true; 245} 246 247void NativeContext::setOrientation(uint16_t orientation) { 248 mOrientation = orientation; 249} 250 251uint16_t NativeContext::getOrientation() const { 252 return mOrientation; 253} 254 255void NativeContext::setDescription(const String8& desc) { 256 mDescription = desc; 257 mDescriptionSet = true; 258} 259 260String8 NativeContext::getDescription() const { 261 return mDescription; 262} 263 264bool NativeContext::hasDescription() const { 265 return mDescriptionSet; 266} 267 268void NativeContext::setGpsData(const GpsData& data) { 269 mGpsData = data; 270 mGpsSet = true; 271} 272 273GpsData NativeContext::getGpsData() const { 274 return mGpsData; 275} 276 277bool NativeContext::hasGpsData() const { 278 return mGpsSet; 279} 280 281void NativeContext::setCaptureTime(const String8& formattedCaptureTime) { 282 mFormattedCaptureTime = formattedCaptureTime; 283 mCaptureTimeSet = true; 284} 285 286String8 NativeContext::getCaptureTime() const { 287 return mFormattedCaptureTime; 288} 289 290bool NativeContext::hasCaptureTime() const { 291 return mCaptureTimeSet; 292} 293 294// End of NativeContext 295// ---------------------------------------------------------------------------- 296 297/** 298 * Wrapper class for a Java OutputStream. 299 * 300 * This class is not intended to be used across JNI calls. 301 */ 302class JniOutputStream : public Output, public LightRefBase<JniOutputStream> { 303public: 304 JniOutputStream(JNIEnv* env, jobject outStream); 305 306 virtual ~JniOutputStream(); 307 308 status_t open(); 309 310 status_t write(const uint8_t* buf, size_t offset, size_t count); 311 312 status_t close(); 313private: 314 enum { 315 BYTE_ARRAY_LENGTH = 4096 316 }; 317 jobject mOutputStream; 318 JNIEnv* mEnv; 319 jbyteArray mByteArray; 320}; 321 322JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream), 323 mEnv(env) { 324 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH); 325 if (mByteArray == nullptr) { 326 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array."); 327 } 328} 329 330JniOutputStream::~JniOutputStream() { 331 mEnv->DeleteLocalRef(mByteArray); 332} 333 334status_t JniOutputStream::open() { 335 // Do nothing 336 return OK; 337} 338 339status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) { 340 while(count > 0) { 341 size_t len = BYTE_ARRAY_LENGTH; 342 len = (count > len) ? len : count; 343 mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset)); 344 345 if (mEnv->ExceptionCheck()) { 346 return BAD_VALUE; 347 } 348 349 mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray, 350 0, len); 351 352 if (mEnv->ExceptionCheck()) { 353 return BAD_VALUE; 354 } 355 356 count -= len; 357 offset += len; 358 } 359 return OK; 360} 361 362status_t JniOutputStream::close() { 363 // Do nothing 364 return OK; 365} 366 367// End of JniOutputStream 368// ---------------------------------------------------------------------------- 369 370/** 371 * Wrapper class for a Java InputStream. 372 * 373 * This class is not intended to be used across JNI calls. 374 */ 375class JniInputStream : public Input, public LightRefBase<JniInputStream> { 376public: 377 JniInputStream(JNIEnv* env, jobject inStream); 378 379 status_t open(); 380 381 status_t close(); 382 383 ssize_t read(uint8_t* buf, size_t offset, size_t count); 384 385 ssize_t skip(size_t count); 386 387 virtual ~JniInputStream(); 388private: 389 enum { 390 BYTE_ARRAY_LENGTH = 4096 391 }; 392 jobject mInStream; 393 JNIEnv* mEnv; 394 jbyteArray mByteArray; 395 396}; 397 398JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) { 399 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH); 400 if (mByteArray == nullptr) { 401 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array."); 402 } 403} 404 405JniInputStream::~JniInputStream() { 406 mEnv->DeleteLocalRef(mByteArray); 407} 408 409ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) { 410 411 jint realCount = BYTE_ARRAY_LENGTH; 412 if (count < BYTE_ARRAY_LENGTH) { 413 realCount = count; 414 } 415 jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0, 416 realCount); 417 418 if (actual < 0) { 419 return NOT_ENOUGH_DATA; 420 } 421 422 if (mEnv->ExceptionCheck()) { 423 return BAD_VALUE; 424 } 425 426 mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset)); 427 if (mEnv->ExceptionCheck()) { 428 return BAD_VALUE; 429 } 430 return actual; 431} 432 433ssize_t JniInputStream::skip(size_t count) { 434 jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod, 435 static_cast<jlong>(count)); 436 437 if (mEnv->ExceptionCheck()) { 438 return BAD_VALUE; 439 } 440 if (actual < 0) { 441 return NOT_ENOUGH_DATA; 442 } 443 return actual; 444} 445 446status_t JniInputStream::open() { 447 // Do nothing 448 return OK; 449} 450 451status_t JniInputStream::close() { 452 // Do nothing 453 return OK; 454} 455 456// End of JniInputStream 457// ---------------------------------------------------------------------------- 458 459/** 460 * Wrapper class for a non-direct Java ByteBuffer. 461 * 462 * This class is not intended to be used across JNI calls. 463 */ 464class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> { 465public: 466 JniInputByteBuffer(JNIEnv* env, jobject inBuf); 467 468 status_t open(); 469 470 status_t close(); 471 472 ssize_t read(uint8_t* buf, size_t offset, size_t count); 473 474 virtual ~JniInputByteBuffer(); 475private: 476 enum { 477 BYTE_ARRAY_LENGTH = 4096 478 }; 479 jobject mInBuf; 480 JNIEnv* mEnv; 481 jbyteArray mByteArray; 482}; 483 484JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) { 485 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH); 486 if (mByteArray == nullptr) { 487 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array."); 488 } 489} 490 491JniInputByteBuffer::~JniInputByteBuffer() { 492 mEnv->DeleteLocalRef(mByteArray); 493} 494 495ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) { 496 jint realCount = BYTE_ARRAY_LENGTH; 497 if (count < BYTE_ARRAY_LENGTH) { 498 realCount = count; 499 } 500 501 jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod, 502 mByteArray, 0, realCount); 503 mEnv->DeleteLocalRef(chainingBuf); 504 505 if (mEnv->ExceptionCheck()) { 506 ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__); 507 return BAD_VALUE; 508 } 509 510 mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset)); 511 if (mEnv->ExceptionCheck()) { 512 ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__); 513 return BAD_VALUE; 514 } 515 return realCount; 516} 517 518status_t JniInputByteBuffer::open() { 519 // Do nothing 520 return OK; 521} 522 523status_t JniInputByteBuffer::close() { 524 // Do nothing 525 return OK; 526} 527 528// End of JniInputByteBuffer 529// ---------------------------------------------------------------------------- 530 531/** 532 * StripSource subclass for Input types. 533 * 534 * This class is not intended to be used across JNI calls. 535 */ 536 537class InputStripSource : public StripSource, public LightRefBase<InputStripSource> { 538public: 539 InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height, 540 uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample, 541 uint32_t samplesPerPixel); 542 543 virtual ~InputStripSource(); 544 545 virtual status_t writeToStream(Output& stream, uint32_t count); 546 547 virtual uint32_t getIfd() const; 548protected: 549 uint32_t mIfd; 550 Input* mInput; 551 uint32_t mWidth; 552 uint32_t mHeight; 553 uint32_t mPixStride; 554 uint32_t mRowStride; 555 uint64_t mOffset; 556 JNIEnv* mEnv; 557 uint32_t mBytesPerSample; 558 uint32_t mSamplesPerPixel; 559}; 560 561InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, 562 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset, 563 uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input), 564 mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride), 565 mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample), 566 mSamplesPerPixel(samplesPerPixel) {} 567 568InputStripSource::~InputStripSource() {} 569 570status_t InputStripSource::writeToStream(Output& stream, uint32_t count) { 571 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel; 572 jlong offset = mOffset; 573 574 if (fullSize != count) { 575 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count, 576 fullSize); 577 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write"); 578 return BAD_VALUE; 579 } 580 581 // Skip offset 582 while (offset > 0) { 583 ssize_t skipped = mInput->skip(offset); 584 if (skipped <= 0) { 585 if (skipped == NOT_ENOUGH_DATA || skipped == 0) { 586 jniThrowExceptionFmt(mEnv, "java/io/IOException", 587 "Early EOF encountered in skip, not enough pixel data for image of size %u", 588 fullSize); 589 skipped = NOT_ENOUGH_DATA; 590 } else { 591 if (!mEnv->ExceptionCheck()) { 592 jniThrowException(mEnv, "java/io/IOException", 593 "Error encountered while skip bytes in input stream."); 594 } 595 } 596 597 return skipped; 598 } 599 offset -= skipped; 600 } 601 602 Vector<uint8_t> row; 603 if (row.resize(mRowStride) < 0) { 604 jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector."); 605 return BAD_VALUE; 606 } 607 608 uint8_t* rowBytes = row.editArray(); 609 610 for (uint32_t i = 0; i < mHeight; ++i) { 611 size_t rowFillAmt = 0; 612 size_t rowSize = mRowStride; 613 614 while (rowFillAmt < mRowStride) { 615 ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize); 616 if (bytesRead <= 0) { 617 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) { 618 ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd", 619 __FUNCTION__, i, bytesRead); 620 jniThrowExceptionFmt(mEnv, "java/io/IOException", 621 "Early EOF encountered, not enough pixel data for image of size %" 622 PRIu32, fullSize); 623 bytesRead = NOT_ENOUGH_DATA; 624 } else { 625 if (!mEnv->ExceptionCheck()) { 626 jniThrowException(mEnv, "java/io/IOException", 627 "Error encountered while reading"); 628 } 629 } 630 return bytesRead; 631 } 632 rowFillAmt += bytesRead; 633 rowSize -= bytesRead; 634 } 635 636 if (mPixStride == mBytesPerSample * mSamplesPerPixel) { 637 ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__); 638 639 if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK || 640 mEnv->ExceptionCheck()) { 641 if (!mEnv->ExceptionCheck()) { 642 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); 643 } 644 return BAD_VALUE; 645 } 646 } else { 647 ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__); 648 jniThrowException(mEnv, "java/lang/IllegalStateException", 649 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous"); 650 return BAD_VALUE; 651 652 // TODO: Add support for non-contiguous pixels if needed. 653 } 654 } 655 return OK; 656} 657 658uint32_t InputStripSource::getIfd() const { 659 return mIfd; 660} 661 662// End of InputStripSource 663// ---------------------------------------------------------------------------- 664 665/** 666 * StripSource subclass for direct buffer types. 667 * 668 * This class is not intended to be used across JNI calls. 669 */ 670 671class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> { 672public: 673 DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width, 674 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset, 675 uint32_t bytesPerSample, uint32_t samplesPerPixel); 676 677 virtual ~DirectStripSource(); 678 679 virtual status_t writeToStream(Output& stream, uint32_t count); 680 681 virtual uint32_t getIfd() const; 682protected: 683 uint32_t mIfd; 684 const uint8_t* mPixelBytes; 685 uint32_t mWidth; 686 uint32_t mHeight; 687 uint32_t mPixStride; 688 uint32_t mRowStride; 689 uint16_t mOffset; 690 JNIEnv* mEnv; 691 uint32_t mBytesPerSample; 692 uint32_t mSamplesPerPixel; 693}; 694 695DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, 696 uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride, 697 uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), 698 mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride), 699 mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample), 700 mSamplesPerPixel(samplesPerPixel) {} 701 702DirectStripSource::~DirectStripSource() {} 703 704status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) { 705 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel; 706 707 if (fullSize != count) { 708 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count, 709 fullSize); 710 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write"); 711 return BAD_VALUE; 712 } 713 714 715 if (mPixStride == mBytesPerSample * mSamplesPerPixel 716 && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) { 717 ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__); 718 719 if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) { 720 if (!mEnv->ExceptionCheck()) { 721 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); 722 } 723 return BAD_VALUE; 724 } 725 } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) { 726 ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__); 727 728 for (size_t i = 0; i < mHeight; ++i) { 729 if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK || 730 mEnv->ExceptionCheck()) { 731 if (!mEnv->ExceptionCheck()) { 732 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data"); 733 } 734 return BAD_VALUE; 735 } 736 } 737 } else { 738 ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__); 739 740 jniThrowException(mEnv, "java/lang/IllegalStateException", 741 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous"); 742 return BAD_VALUE; 743 744 // TODO: Add support for non-contiguous pixels if needed. 745 } 746 return OK; 747 748} 749 750uint32_t DirectStripSource::getIfd() const { 751 return mIfd; 752} 753 754// End of DirectStripSource 755// ---------------------------------------------------------------------------- 756 757/** 758 * Calculate the default crop relative to the "active area" of the image sensor (this active area 759 * will always be the pre-correction active area rectangle), and set this. 760 */ 761static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics, 762 sp<TiffWriter> writer) { 763 764 camera_metadata_ro_entry entry = 765 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 766 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]); 767 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]); 768 769 const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation. 770 771 if (width < margin * 2 || height < margin * 2) { 772 ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too" 773 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width); 774 jniThrowException(env, "java/lang/IllegalStateException", 775 "Pre-correction active area is too small."); 776 return BAD_VALUE; 777 } 778 779 uint32_t defaultCropOrigin[] = {margin, margin}; 780 uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin, 781 height - defaultCropOrigin[1] - margin}; 782 783 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin, 784 TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer); 785 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize, 786 TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer); 787 788 return OK; 789} 790 791static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer, 792 const CameraMetadata& characteristics, jint width, jint height) { 793 if (width <= 0) { 794 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \ 795 "Image width %d is invalid", width); 796 return false; 797 } 798 799 if (height <= 0) { 800 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \ 801 "Image height %d is invalid", height); 802 return false; 803 } 804 805 camera_metadata_ro_entry preCorrectionEntry = 806 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 807 camera_metadata_ro_entry pixelArrayEntry = 808 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE); 809 810 int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]); 811 int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]); 812 int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]); 813 int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]); 814 815 bool matchesPixelArray = (pWidth == width && pHeight == height); 816 bool matchesPreCorrectionArray = (cWidth == width && cHeight == height); 817 818 if (!(matchesPixelArray || matchesPreCorrectionArray)) { 819 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \ 820 "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel " 821 "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)", 822 width, height, pWidth, pHeight, cWidth, cHeight); 823 return false; 824 } 825 826 return true; 827} 828 829static status_t moveEntries(sp<TiffWriter> writer, uint32_t ifdFrom, uint32_t ifdTo, 830 const Vector<uint16_t>& entries) { 831 for (size_t i = 0; i < entries.size(); ++i) { 832 uint16_t tagId = entries[i]; 833 sp<TiffEntry> entry = writer->getEntry(tagId, ifdFrom); 834 if (entry.get() == nullptr) { 835 ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId, 836 ifdFrom); 837 return BAD_VALUE; 838 } 839 if (writer->addEntry(entry, ifdTo) != OK) { 840 ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId, 841 ifdFrom); 842 return BAD_VALUE; 843 } 844 writer->removeEntry(tagId, ifdFrom); 845 } 846 return OK; 847} 848 849/** 850 * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4. 851 * Returns OK on success, or a negative error code if the CFA enum was invalid. 852 */ 853static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) { 854 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa = 855 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>( 856 cfaEnum); 857 switch(cfa) { 858 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: { 859 cfaOut[0] = 0; 860 cfaOut[1] = 1; 861 cfaOut[2] = 1; 862 cfaOut[3] = 2; 863 break; 864 } 865 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: { 866 cfaOut[0] = 1; 867 cfaOut[1] = 0; 868 cfaOut[2] = 2; 869 cfaOut[3] = 1; 870 break; 871 } 872 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: { 873 cfaOut[0] = 1; 874 cfaOut[1] = 2; 875 cfaOut[2] = 0; 876 cfaOut[3] = 1; 877 break; 878 } 879 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: { 880 cfaOut[0] = 2; 881 cfaOut[1] = 1; 882 cfaOut[2] = 1; 883 cfaOut[3] = 0; 884 break; 885 } 886 default: { 887 return BAD_VALUE; 888 } 889 } 890 return OK; 891} 892 893/** 894 * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to 895 * RGGB for an unknown enum. 896 */ 897static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) { 898 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa = 899 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>( 900 cfaEnum); 901 switch(cfa) { 902 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: { 903 return OpcodeListBuilder::CFA_RGGB; 904 } 905 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: { 906 return OpcodeListBuilder::CFA_GRBG; 907 } 908 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: { 909 return OpcodeListBuilder::CFA_GBRG; 910 } 911 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: { 912 return OpcodeListBuilder::CFA_BGGR; 913 } 914 default: { 915 return OpcodeListBuilder::CFA_RGGB; 916 } 917 } 918} 919 920/** 921 * For each color plane, find the corresponding noise profile coefficients given in the 922 * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color 923 * plane, this method takes the pair of noise profile coefficients with the higher S coefficient. 924 * 925 * perChannelNoiseProfile - numChannels * 2 noise profile coefficients. 926 * cfa - numChannels color channels corresponding to each of the per-channel noise profile 927 * coefficients. 928 * numChannels - the number of noise profile coefficient pairs and color channels given in 929 * the perChannelNoiseProfile and cfa arguments, respectively. 930 * planeColors - the color planes in the noise profile output. 931 * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile. 932 * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients. 933 * 934 * returns OK, or a negative error code on failure. 935 */ 936static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa, 937 size_t numChannels, const uint8_t* planeColors, size_t numPlanes, 938 /*out*/double* noiseProfile) { 939 940 for (size_t p = 0; p < numPlanes; ++p) { 941 size_t S = p * 2; 942 size_t O = p * 2 + 1; 943 944 noiseProfile[S] = 0; 945 noiseProfile[O] = 0; 946 bool uninitialized = true; 947 for (size_t c = 0; c < numChannels; ++c) { 948 if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) { 949 noiseProfile[S] = perChannelNoiseProfile[c * 2]; 950 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1]; 951 uninitialized = false; 952 } 953 } 954 if (uninitialized) { 955 ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu", 956 __FUNCTION__, p); 957 return BAD_VALUE; 958 } 959 } 960 return OK; 961} 962 963// ---------------------------------------------------------------------------- 964extern "C" { 965 966static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) { 967 ALOGV("%s:", __FUNCTION__); 968 return reinterpret_cast<NativeContext*>(env->GetLongField(thiz, 969 gDngCreatorClassInfo.mNativeContext)); 970} 971 972static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) { 973 ALOGV("%s:", __FUNCTION__); 974 NativeContext* current = DngCreator_getNativeContext(env, thiz); 975 976 if (context != nullptr) { 977 context->incStrong((void*) DngCreator_setNativeContext); 978 } 979 980 if (current) { 981 current->decStrong((void*) DngCreator_setNativeContext); 982 } 983 984 env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext, 985 reinterpret_cast<jlong>(context.get())); 986} 987 988static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) { 989 ALOGV("%s:", __FUNCTION__); 990 991 gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env, 992 clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J"); 993 994 jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream"); 995 gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env, 996 outputStreamClazz, "write", "([BII)V"); 997 998 jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream"); 999 gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I"); 1000 gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J"); 1001 1002 jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer"); 1003 gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env, 1004 inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;"); 1005} 1006 1007static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr, 1008 jobject resultsPtr, jstring formattedCaptureTime) { 1009 ALOGV("%s:", __FUNCTION__); 1010 CameraMetadata characteristics; 1011 CameraMetadata results; 1012 if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) { 1013 jniThrowException(env, "java/lang/AssertionError", 1014 "No native metadata defined for camera characteristics."); 1015 return; 1016 } 1017 if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) { 1018 jniThrowException(env, "java/lang/AssertionError", 1019 "No native metadata defined for capture results."); 1020 return; 1021 } 1022 1023 sp<NativeContext> nativeContext = new NativeContext(characteristics, results); 1024 1025 const char* captureTime = env->GetStringUTFChars(formattedCaptureTime, nullptr); 1026 1027 size_t len = strlen(captureTime) + 1; 1028 if (len != NativeContext::DATETIME_COUNT) { 1029 jniThrowException(env, "java/lang/IllegalArgumentException", 1030 "Formatted capture time string length is not required 20 characters"); 1031 return; 1032 } 1033 1034 nativeContext->setCaptureTime(String8(captureTime)); 1035 1036 DngCreator_setNativeContext(env, thiz, nativeContext); 1037} 1038 1039static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth, 1040 uint32_t imageHeight) { 1041 1042 NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz); 1043 1044 if (nativeContext == nullptr) { 1045 jniThrowException(env, "java/lang/AssertionError", 1046 "No native context, must call init before other operations."); 1047 return nullptr; 1048 } 1049 1050 CameraMetadata characteristics = *(nativeContext->getCharacteristics()); 1051 CameraMetadata results = *(nativeContext->getResult()); 1052 1053 sp<TiffWriter> writer = new TiffWriter(); 1054 1055 uint32_t preWidth = 0; 1056 uint32_t preHeight = 0; 1057 { 1058 // Check dimensions 1059 camera_metadata_entry entry = 1060 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 1061 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer); 1062 preWidth = static_cast<uint32_t>(entry.data.i32[2]); 1063 preHeight = static_cast<uint32_t>(entry.data.i32[3]); 1064 1065 camera_metadata_entry pixelArrayEntry = 1066 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE); 1067 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]); 1068 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]); 1069 1070 if (!((imageWidth == preWidth && imageHeight == preHeight) || 1071 (imageWidth == pixWidth && imageHeight == pixHeight))) { 1072 jniThrowException(env, "java/lang/AssertionError", 1073 "Height and width of imate buffer did not match height and width of" 1074 "either the preCorrectionActiveArraySize or the pixelArraySize."); 1075 return nullptr; 1076 } 1077 } 1078 1079 1080 1081 writer->addIfd(TIFF_IFD_0); 1082 1083 status_t err = OK; 1084 1085 const uint32_t samplesPerPixel = 1; 1086 const uint32_t bitsPerSample = BITS_PER_SAMPLE; 1087 1088 OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB; 1089 uint8_t cfaPlaneColor[3] = {0, 1, 2}; 1090 uint8_t cfaEnum = -1; 1091 1092 // TODO: Greensplit. 1093 // TODO: Add remaining non-essential tags 1094 1095 // Setup main image tags 1096 1097 { 1098 // Set orientation 1099 uint16_t orientation = 1; // Normal 1100 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0), 1101 env, TAG_ORIENTATION, writer); 1102 } 1103 1104 { 1105 // Set subfiletype 1106 uint32_t subfileType = 0; // Main image 1107 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType, 1108 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer); 1109 } 1110 1111 { 1112 // Set bits per sample 1113 uint16_t bits = static_cast<uint16_t>(bitsPerSample); 1114 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env, 1115 TAG_BITSPERSAMPLE, writer); 1116 } 1117 1118 { 1119 // Set compression 1120 uint16_t compression = 1; // None 1121 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression, 1122 TIFF_IFD_0), env, TAG_COMPRESSION, writer); 1123 } 1124 1125 { 1126 // Set dimensions 1127 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0), 1128 env, TAG_IMAGEWIDTH, writer); 1129 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0), 1130 env, TAG_IMAGELENGTH, writer); 1131 } 1132 1133 { 1134 // Set photometric interpretation 1135 uint16_t interpretation = 32803; // CFA 1136 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1, 1137 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer); 1138 } 1139 1140 { 1141 // Set blacklevel tags 1142 camera_metadata_entry entry = 1143 characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN); 1144 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BLACKLEVEL, writer); 1145 const uint32_t* blackLevel = reinterpret_cast<const uint32_t*>(entry.data.i32); 1146 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, entry.count, blackLevel, 1147 TIFF_IFD_0), env, TAG_BLACKLEVEL, writer); 1148 1149 uint16_t repeatDim[2] = {2, 2}; 1150 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim, 1151 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer); 1152 } 1153 1154 { 1155 // Set samples per pixel 1156 uint16_t samples = static_cast<uint16_t>(samplesPerPixel); 1157 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0), 1158 env, TAG_SAMPLESPERPIXEL, writer); 1159 } 1160 1161 { 1162 // Set planar configuration 1163 uint16_t config = 1; // Chunky 1164 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config, 1165 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer); 1166 } 1167 1168 { 1169 // Set CFA pattern dimensions 1170 uint16_t repeatDim[2] = {2, 2}; 1171 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim, 1172 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer); 1173 } 1174 1175 { 1176 // Set CFA pattern 1177 camera_metadata_entry entry = 1178 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT); 1179 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer); 1180 1181 const int cfaLength = 4; 1182 cfaEnum = entry.data.u8[0]; 1183 uint8_t cfa[cfaLength]; 1184 if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) { 1185 jniThrowExceptionFmt(env, "java/lang/IllegalStateException", 1186 "Invalid metadata for tag %d", TAG_CFAPATTERN); 1187 } 1188 1189 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0), 1190 env, TAG_CFAPATTERN, writer); 1191 1192 opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum); 1193 } 1194 1195 { 1196 // Set CFA plane color 1197 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor, 1198 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer); 1199 } 1200 1201 { 1202 // Set CFA layout 1203 uint16_t cfaLayout = 1; 1204 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0), 1205 env, TAG_CFALAYOUT, writer); 1206 } 1207 1208 { 1209 // image description 1210 uint8_t imageDescription = '\0'; // empty 1211 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription, 1212 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer); 1213 } 1214 1215 { 1216 // make 1217 char manufacturer[PROPERTY_VALUE_MAX]; 1218 1219 // Use "" to represent unknown make as suggested in TIFF/EP spec. 1220 property_get("ro.product.manufacturer", manufacturer, ""); 1221 uint32_t count = static_cast<uint32_t>(strlen(manufacturer)) + 1; 1222 1223 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count, 1224 reinterpret_cast<uint8_t*>(manufacturer), TIFF_IFD_0), env, TAG_MAKE, writer); 1225 } 1226 1227 { 1228 // model 1229 char model[PROPERTY_VALUE_MAX]; 1230 1231 // Use "" to represent unknown model as suggested in TIFF/EP spec. 1232 property_get("ro.product.model", model, ""); 1233 uint32_t count = static_cast<uint32_t>(strlen(model)) + 1; 1234 1235 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count, 1236 reinterpret_cast<uint8_t*>(model), TIFF_IFD_0), env, TAG_MODEL, writer); 1237 } 1238 1239 { 1240 // x resolution 1241 uint32_t xres[] = { 72, 1 }; // default 72 ppi 1242 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0), 1243 env, TAG_XRESOLUTION, writer); 1244 1245 // y resolution 1246 uint32_t yres[] = { 72, 1 }; // default 72 ppi 1247 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0), 1248 env, TAG_YRESOLUTION, writer); 1249 1250 uint16_t unit = 2; // inches 1251 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0), 1252 env, TAG_RESOLUTIONUNIT, writer); 1253 } 1254 1255 { 1256 // software 1257 char software[PROPERTY_VALUE_MAX]; 1258 property_get("ro.build.fingerprint", software, ""); 1259 uint32_t count = static_cast<uint32_t>(strlen(software)) + 1; 1260 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count, 1261 reinterpret_cast<uint8_t*>(software), TIFF_IFD_0), env, TAG_SOFTWARE, writer); 1262 } 1263 1264 if (nativeContext->hasCaptureTime()) { 1265 // datetime 1266 String8 captureTime = nativeContext->getCaptureTime(); 1267 1268 if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT, 1269 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) { 1270 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", 1271 "Invalid metadata for tag %x", TAG_DATETIME); 1272 return nullptr; 1273 } 1274 1275 // datetime original 1276 if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT, 1277 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) { 1278 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", 1279 "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL); 1280 return nullptr; 1281 } 1282 } 1283 1284 { 1285 // TIFF/EP standard id 1286 uint8_t standardId[] = { 1, 0, 0, 0 }; 1287 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId, 1288 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer); 1289 } 1290 1291 { 1292 // copyright 1293 uint8_t copyright = '\0'; // empty 1294 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right, 1295 TIFF_IFD_0), env, TAG_COPYRIGHT, writer); 1296 } 1297 1298 { 1299 // exposure time 1300 camera_metadata_entry entry = 1301 results.find(ANDROID_SENSOR_EXPOSURE_TIME); 1302 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer); 1303 1304 int64_t exposureTime = *(entry.data.i64); 1305 1306 if (exposureTime < 0) { 1307 // Should be unreachable 1308 jniThrowException(env, "java/lang/IllegalArgumentException", 1309 "Negative exposure time in metadata"); 1310 return nullptr; 1311 } 1312 1313 // Ensure exposure time doesn't overflow (for exposures > 4s) 1314 uint32_t denominator = 1000000000; 1315 while (exposureTime > UINT32_MAX) { 1316 exposureTime >>= 1; 1317 denominator >>= 1; 1318 if (denominator == 0) { 1319 // Should be unreachable 1320 jniThrowException(env, "java/lang/IllegalArgumentException", 1321 "Exposure time too long"); 1322 return nullptr; 1323 } 1324 } 1325 1326 uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator }; 1327 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure, 1328 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer); 1329 1330 } 1331 1332 { 1333 // ISO speed ratings 1334 camera_metadata_entry entry = 1335 results.find(ANDROID_SENSOR_SENSITIVITY); 1336 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer); 1337 1338 int32_t tempIso = *(entry.data.i32); 1339 if (tempIso < 0) { 1340 jniThrowException(env, "java/lang/IllegalArgumentException", 1341 "Negative ISO value"); 1342 return nullptr; 1343 } 1344 1345 if (tempIso > UINT16_MAX) { 1346 ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__); 1347 tempIso = UINT16_MAX; 1348 } 1349 1350 uint16_t iso = static_cast<uint16_t>(tempIso); 1351 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso, 1352 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer); 1353 } 1354 1355 { 1356 // focal length 1357 camera_metadata_entry entry = 1358 results.find(ANDROID_LENS_FOCAL_LENGTH); 1359 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer); 1360 1361 uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 }; 1362 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength, 1363 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer); 1364 } 1365 1366 { 1367 // f number 1368 camera_metadata_entry entry = 1369 results.find(ANDROID_LENS_APERTURE); 1370 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer); 1371 1372 uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 }; 1373 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum, 1374 TIFF_IFD_0), env, TAG_FNUMBER, writer); 1375 } 1376 1377 { 1378 // Set DNG version information 1379 uint8_t version[4] = {1, 4, 0, 0}; 1380 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0), 1381 env, TAG_DNGVERSION, writer); 1382 1383 uint8_t backwardVersion[4] = {1, 1, 0, 0}; 1384 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion, 1385 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer); 1386 } 1387 1388 { 1389 // Set whitelevel 1390 camera_metadata_entry entry = 1391 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL); 1392 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer); 1393 uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]); 1394 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0), 1395 env, TAG_WHITELEVEL, writer); 1396 } 1397 1398 { 1399 // Set default scale 1400 uint32_t defaultScale[4] = {1, 1, 1, 1}; 1401 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale, 1402 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer); 1403 } 1404 1405 bool singleIlluminant = false; 1406 { 1407 // Set calibration illuminants 1408 camera_metadata_entry entry1 = 1409 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1); 1410 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer); 1411 camera_metadata_entry entry2 = 1412 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2); 1413 if (entry2.count == 0) { 1414 singleIlluminant = true; 1415 } 1416 uint16_t ref1 = entry1.data.u8[0]; 1417 1418 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1, 1419 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer); 1420 1421 if (!singleIlluminant) { 1422 uint16_t ref2 = entry2.data.u8[0]; 1423 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2, 1424 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer); 1425 } 1426 } 1427 1428 { 1429 // Set color transforms 1430 camera_metadata_entry entry1 = 1431 characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1); 1432 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer); 1433 1434 int32_t colorTransform1[entry1.count * 2]; 1435 1436 size_t ctr = 0; 1437 for(size_t i = 0; i < entry1.count; ++i) { 1438 colorTransform1[ctr++] = entry1.data.r[i].numerator; 1439 colorTransform1[ctr++] = entry1.data.r[i].denominator; 1440 } 1441 1442 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count, 1443 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer); 1444 1445 if (!singleIlluminant) { 1446 camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2); 1447 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer); 1448 int32_t colorTransform2[entry2.count * 2]; 1449 1450 ctr = 0; 1451 for(size_t i = 0; i < entry2.count; ++i) { 1452 colorTransform2[ctr++] = entry2.data.r[i].numerator; 1453 colorTransform2[ctr++] = entry2.data.r[i].denominator; 1454 } 1455 1456 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count, 1457 colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer); 1458 } 1459 } 1460 1461 { 1462 // Set calibration transforms 1463 camera_metadata_entry entry1 = 1464 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1); 1465 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer); 1466 1467 int32_t calibrationTransform1[entry1.count * 2]; 1468 1469 size_t ctr = 0; 1470 for(size_t i = 0; i < entry1.count; ++i) { 1471 calibrationTransform1[ctr++] = entry1.data.r[i].numerator; 1472 calibrationTransform1[ctr++] = entry1.data.r[i].denominator; 1473 } 1474 1475 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count, 1476 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer); 1477 1478 if (!singleIlluminant) { 1479 camera_metadata_entry entry2 = 1480 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2); 1481 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer); 1482 int32_t calibrationTransform2[entry2.count * 2]; 1483 1484 ctr = 0; 1485 for(size_t i = 0; i < entry2.count; ++i) { 1486 calibrationTransform2[ctr++] = entry2.data.r[i].numerator; 1487 calibrationTransform2[ctr++] = entry2.data.r[i].denominator; 1488 } 1489 1490 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count, 1491 calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer); 1492 } 1493 } 1494 1495 { 1496 // Set forward transforms 1497 camera_metadata_entry entry1 = 1498 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1); 1499 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer); 1500 1501 int32_t forwardTransform1[entry1.count * 2]; 1502 1503 size_t ctr = 0; 1504 for(size_t i = 0; i < entry1.count; ++i) { 1505 forwardTransform1[ctr++] = entry1.data.r[i].numerator; 1506 forwardTransform1[ctr++] = entry1.data.r[i].denominator; 1507 } 1508 1509 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count, 1510 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer); 1511 1512 if (!singleIlluminant) { 1513 camera_metadata_entry entry2 = 1514 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2); 1515 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer); 1516 int32_t forwardTransform2[entry2.count * 2]; 1517 1518 ctr = 0; 1519 for(size_t i = 0; i < entry2.count; ++i) { 1520 forwardTransform2[ctr++] = entry2.data.r[i].numerator; 1521 forwardTransform2[ctr++] = entry2.data.r[i].denominator; 1522 } 1523 1524 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count, 1525 forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer); 1526 } 1527 } 1528 1529 { 1530 // Set camera neutral 1531 camera_metadata_entry entry = 1532 results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT); 1533 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer); 1534 uint32_t cameraNeutral[entry.count * 2]; 1535 1536 size_t ctr = 0; 1537 for(size_t i = 0; i < entry.count; ++i) { 1538 cameraNeutral[ctr++] = 1539 static_cast<uint32_t>(entry.data.r[i].numerator); 1540 cameraNeutral[ctr++] = 1541 static_cast<uint32_t>(entry.data.r[i].denominator); 1542 } 1543 1544 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral, 1545 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer); 1546 } 1547 1548 1549 { 1550 // Set dimensions 1551 if (calculateAndSetCrop(env, characteristics, writer) != OK) { 1552 return nullptr; 1553 } 1554 camera_metadata_entry entry = 1555 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 1556 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer); 1557 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]); 1558 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]); 1559 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]); 1560 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]); 1561 1562 // If we only have a buffer containing the pre-correction rectangle, ignore the offset 1563 // relative to the pixel array. 1564 if (imageWidth == width && imageHeight == height) { 1565 xmin = 0; 1566 ymin = 0; 1567 } 1568 1569 uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width}; 1570 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0), 1571 env, TAG_ACTIVEAREA, writer); 1572 } 1573 1574 { 1575 // Setup unique camera model tag 1576 char model[PROPERTY_VALUE_MAX]; 1577 property_get("ro.product.model", model, ""); 1578 1579 char manufacturer[PROPERTY_VALUE_MAX]; 1580 property_get("ro.product.manufacturer", manufacturer, ""); 1581 1582 char brand[PROPERTY_VALUE_MAX]; 1583 property_get("ro.product.brand", brand, ""); 1584 1585 String8 cameraModel(model); 1586 cameraModel += "-"; 1587 cameraModel += manufacturer; 1588 cameraModel += "-"; 1589 cameraModel += brand; 1590 1591 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1, 1592 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env, 1593 TAG_UNIQUECAMERAMODEL, writer); 1594 } 1595 1596 { 1597 // Setup sensor noise model 1598 camera_metadata_entry entry = 1599 results.find(ANDROID_SENSOR_NOISE_PROFILE); 1600 1601 const status_t numPlaneColors = 3; 1602 const status_t numCfaChannels = 4; 1603 1604 uint8_t cfaOut[numCfaChannels]; 1605 if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) { 1606 jniThrowException(env, "java/lang/IllegalArgumentException", 1607 "Invalid CFA from camera characteristics"); 1608 return nullptr; 1609 } 1610 1611 double noiseProfile[numPlaneColors * 2]; 1612 1613 if (entry.count > 0) { 1614 if (entry.count != numCfaChannels * 2) { 1615 ALOGW("%s: Invalid entry count %zu for noise profile returned " 1616 "in characteristics, no noise profile tag written...", 1617 __FUNCTION__, entry.count); 1618 } else { 1619 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels, 1620 cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) { 1621 1622 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE, 1623 numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE, 1624 writer); 1625 } else { 1626 ALOGW("%s: Error converting coefficients for noise profile, no noise profile" 1627 " tag written...", __FUNCTION__); 1628 } 1629 } 1630 } else { 1631 ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.", 1632 __FUNCTION__); 1633 } 1634 } 1635 1636 { 1637 // Set up opcode List 2 1638 OpcodeListBuilder builder; 1639 status_t err = OK; 1640 1641 // Set up lens shading map 1642 camera_metadata_entry entry1 = 1643 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE); 1644 1645 uint32_t lsmWidth = 0; 1646 uint32_t lsmHeight = 0; 1647 1648 if (entry1.count != 0) { 1649 lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]); 1650 lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]); 1651 } 1652 1653 camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP); 1654 1655 camera_metadata_entry entry = 1656 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); 1657 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer); 1658 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]); 1659 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]); 1660 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]); 1661 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]); 1662 if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) { 1663 err = builder.addGainMapsForMetadata(lsmWidth, 1664 lsmHeight, 1665 ymin, 1666 xmin, 1667 height, 1668 width, 1669 opcodeCfaLayout, 1670 entry2.data.f); 1671 if (err != OK) { 1672 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__); 1673 jniThrowRuntimeException(env, "failed to add lens shading map."); 1674 return nullptr; 1675 } 1676 } 1677 1678 1679 // Set up bad pixel correction list 1680 camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP); 1681 1682 if ((entry3.count % 2) != 0) { 1683 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!", 1684 __FUNCTION__); 1685 jniThrowRuntimeException(env, "failed to add hotpixel map."); 1686 return nullptr; 1687 } 1688 1689 // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag 1690 std::vector<uint32_t> v; 1691 for (size_t i = 0; i < entry3.count; i+=2) { 1692 int32_t x = entry3.data.i32[i]; 1693 int32_t y = entry3.data.i32[i + 1]; 1694 x -= static_cast<int32_t>(xmin); 1695 y -= static_cast<int32_t>(ymin); 1696 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width || 1697 static_cast<uint32_t>(y) >= width) { 1698 continue; 1699 } 1700 v.push_back(x); 1701 v.push_back(y); 1702 } 1703 const uint32_t* badPixels = &v[0]; 1704 uint32_t badPixelCount = v.size(); 1705 1706 if (badPixelCount > 0) { 1707 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout); 1708 1709 if (err != OK) { 1710 ALOGE("%s: Could not add hotpixel map.", __FUNCTION__); 1711 jniThrowRuntimeException(env, "failed to add hotpixel map."); 1712 return nullptr; 1713 } 1714 } 1715 1716 1717 size_t listSize = builder.getSize(); 1718 uint8_t opcodeListBuf[listSize]; 1719 err = builder.buildOpList(opcodeListBuf); 1720 if (err == OK) { 1721 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize, opcodeListBuf, 1722 TIFF_IFD_0), env, TAG_OPCODELIST2, writer); 1723 } else { 1724 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading" 1725 "map.", __FUNCTION__); 1726 jniThrowRuntimeException(env, "failed to construct opcode list for distortion" 1727 " correction and lens shading map"); 1728 return nullptr; 1729 } 1730 } 1731 1732 { 1733 // Set up opcode List 3 1734 OpcodeListBuilder builder; 1735 status_t err = OK; 1736 1737 // Set up rectilinear distortion correction 1738 camera_metadata_entry entry3 = 1739 results.find(ANDROID_LENS_RADIAL_DISTORTION); 1740 camera_metadata_entry entry4 = 1741 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION); 1742 1743 if (entry3.count == 6 && entry4.count == 5) { 1744 float cx = entry4.data.f[/*c_x*/2]; 1745 float cy = entry4.data.f[/*c_y*/3]; 1746 err = builder.addWarpRectilinearForMetadata(entry3.data.f, preWidth, preHeight, cx, 1747 cy); 1748 if (err != OK) { 1749 ALOGE("%s: Could not add distortion correction.", __FUNCTION__); 1750 jniThrowRuntimeException(env, "failed to add distortion correction."); 1751 return nullptr; 1752 } 1753 } 1754 1755 size_t listSize = builder.getSize(); 1756 uint8_t opcodeListBuf[listSize]; 1757 err = builder.buildOpList(opcodeListBuf); 1758 if (err == OK) { 1759 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize, opcodeListBuf, 1760 TIFF_IFD_0), env, TAG_OPCODELIST3, writer); 1761 } else { 1762 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading" 1763 "map.", __FUNCTION__); 1764 jniThrowRuntimeException(env, "failed to construct opcode list for distortion" 1765 " correction and lens shading map"); 1766 return nullptr; 1767 } 1768 } 1769 1770 { 1771 // Set up orientation tags. 1772 uint16_t orientation = nativeContext->getOrientation(); 1773 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0), 1774 env, TAG_ORIENTATION, writer); 1775 1776 } 1777 1778 if (nativeContext->hasDescription()){ 1779 // Set Description 1780 String8 description = nativeContext->getDescription(); 1781 size_t len = description.bytes() + 1; 1782 if (writer->addEntry(TAG_IMAGEDESCRIPTION, len, 1783 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) { 1784 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", 1785 "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION); 1786 } 1787 } 1788 1789 if (nativeContext->hasGpsData()) { 1790 // Set GPS tags 1791 GpsData gpsData = nativeContext->getGpsData(); 1792 if (!writer->hasIfd(TIFF_IFD_GPSINFO)) { 1793 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) { 1794 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO, 1795 TIFF_IFD_0); 1796 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO"); 1797 return nullptr; 1798 } 1799 } 1800 1801 { 1802 uint8_t version[] = {2, 3, 0, 0}; 1803 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version, 1804 TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer); 1805 } 1806 1807 { 1808 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF, 1809 GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env, 1810 TAG_GPSLATITUDEREF, writer); 1811 } 1812 1813 { 1814 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF, 1815 GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env, 1816 TAG_GPSLONGITUDEREF, writer); 1817 } 1818 1819 { 1820 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude, 1821 TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer); 1822 } 1823 1824 { 1825 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude, 1826 TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer); 1827 } 1828 1829 { 1830 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp, 1831 TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer); 1832 } 1833 1834 { 1835 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP, 1836 GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env, 1837 TAG_GPSDATESTAMP, writer); 1838 } 1839 } 1840 1841 1842 if (nativeContext->hasThumbnail()) { 1843 if (!writer->hasIfd(TIFF_IFD_SUB1)) { 1844 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) { 1845 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1, 1846 TIFF_IFD_0); 1847 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD"); 1848 return nullptr; 1849 } 1850 } 1851 1852 Vector<uint16_t> tagsToMove; 1853 tagsToMove.add(TAG_ORIENTATION); 1854 tagsToMove.add(TAG_NEWSUBFILETYPE); 1855 tagsToMove.add(TAG_ACTIVEAREA); 1856 tagsToMove.add(TAG_BITSPERSAMPLE); 1857 tagsToMove.add(TAG_COMPRESSION); 1858 tagsToMove.add(TAG_IMAGEWIDTH); 1859 tagsToMove.add(TAG_IMAGELENGTH); 1860 tagsToMove.add(TAG_PHOTOMETRICINTERPRETATION); 1861 tagsToMove.add(TAG_BLACKLEVEL); 1862 tagsToMove.add(TAG_BLACKLEVELREPEATDIM); 1863 tagsToMove.add(TAG_SAMPLESPERPIXEL); 1864 tagsToMove.add(TAG_PLANARCONFIGURATION); 1865 tagsToMove.add(TAG_CFAREPEATPATTERNDIM); 1866 tagsToMove.add(TAG_CFAPATTERN); 1867 tagsToMove.add(TAG_CFAPLANECOLOR); 1868 tagsToMove.add(TAG_CFALAYOUT); 1869 tagsToMove.add(TAG_XRESOLUTION); 1870 tagsToMove.add(TAG_YRESOLUTION); 1871 tagsToMove.add(TAG_RESOLUTIONUNIT); 1872 tagsToMove.add(TAG_WHITELEVEL); 1873 tagsToMove.add(TAG_DEFAULTSCALE); 1874 tagsToMove.add(TAG_DEFAULTCROPORIGIN); 1875 tagsToMove.add(TAG_DEFAULTCROPSIZE); 1876 tagsToMove.add(TAG_OPCODELIST2); 1877 tagsToMove.add(TAG_OPCODELIST3); 1878 1879 if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) { 1880 jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries"); 1881 return nullptr; 1882 } 1883 1884 // Make sure both IFDs get the same orientation tag 1885 sp<TiffEntry> orientEntry = writer->getEntry(TAG_ORIENTATION, TIFF_IFD_SUB1); 1886 if (orientEntry.get() != nullptr) { 1887 writer->addEntry(orientEntry, TIFF_IFD_0); 1888 } 1889 1890 // Setup thumbnail tags 1891 1892 { 1893 // Set photometric interpretation 1894 uint16_t interpretation = 2; // RGB 1895 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1, 1896 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer); 1897 } 1898 1899 { 1900 // Set planar configuration 1901 uint16_t config = 1; // Chunky 1902 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config, 1903 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer); 1904 } 1905 1906 { 1907 // Set samples per pixel 1908 uint16_t samples = SAMPLES_PER_RGB_PIXEL; 1909 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, 1910 TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer); 1911 } 1912 1913 { 1914 // Set bits per sample 1915 uint16_t bits = BITS_PER_RGB_SAMPLE; 1916 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), 1917 env, TAG_BITSPERSAMPLE, writer); 1918 } 1919 1920 { 1921 // Set subfiletype 1922 uint32_t subfileType = 1; // Thumbnail image 1923 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType, 1924 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer); 1925 } 1926 1927 { 1928 // Set compression 1929 uint16_t compression = 1; // None 1930 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression, 1931 TIFF_IFD_0), env, TAG_COMPRESSION, writer); 1932 } 1933 1934 { 1935 // Set dimensions 1936 uint32_t uWidth = nativeContext->getThumbnailWidth(); 1937 uint32_t uHeight = nativeContext->getThumbnailHeight(); 1938 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0), 1939 env, TAG_IMAGEWIDTH, writer); 1940 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0), 1941 env, TAG_IMAGELENGTH, writer); 1942 } 1943 1944 { 1945 // x resolution 1946 uint32_t xres[] = { 72, 1 }; // default 72 ppi 1947 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0), 1948 env, TAG_XRESOLUTION, writer); 1949 1950 // y resolution 1951 uint32_t yres[] = { 72, 1 }; // default 72 ppi 1952 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0), 1953 env, TAG_YRESOLUTION, writer); 1954 1955 uint16_t unit = 2; // inches 1956 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0), 1957 env, TAG_RESOLUTIONUNIT, writer); 1958 } 1959 } 1960 1961 if (writer->addStrip(TIFF_IFD_0) != OK) { 1962 ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__); 1963 jniThrowException(env, "java/lang/IllegalStateException", 1964 "Failed to setup thumbnail strip tags."); 1965 return nullptr; 1966 } 1967 1968 if (writer->hasIfd(TIFF_IFD_SUB1)) { 1969 if (writer->addStrip(TIFF_IFD_SUB1) != OK) { 1970 ALOGE("%s: Could not main image strip tags.", __FUNCTION__); 1971 jniThrowException(env, "java/lang/IllegalStateException", 1972 "Failed to setup main image strip tags."); 1973 return nullptr; 1974 } 1975 } 1976 return writer; 1977} 1978 1979static void DngCreator_destroy(JNIEnv* env, jobject thiz) { 1980 ALOGV("%s:", __FUNCTION__); 1981 DngCreator_setNativeContext(env, thiz, nullptr); 1982} 1983 1984static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) { 1985 ALOGV("%s:", __FUNCTION__); 1986 1987 NativeContext* context = DngCreator_getNativeContext(env, thiz); 1988 if (context == nullptr) { 1989 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 1990 jniThrowException(env, "java/lang/AssertionError", 1991 "setOrientation called with uninitialized DngCreator"); 1992 return; 1993 } 1994 1995 uint16_t orientation = static_cast<uint16_t>(orient); 1996 context->setOrientation(orientation); 1997} 1998 1999static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) { 2000 ALOGV("%s:", __FUNCTION__); 2001 2002 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2003 if (context == nullptr) { 2004 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2005 jniThrowException(env, "java/lang/AssertionError", 2006 "setDescription called with uninitialized DngCreator"); 2007 return; 2008 } 2009 2010 const char* desc = env->GetStringUTFChars(description, nullptr); 2011 context->setDescription(String8(desc)); 2012 env->ReleaseStringUTFChars(description, desc); 2013} 2014 2015static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag, 2016 jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) { 2017 ALOGV("%s:", __FUNCTION__); 2018 2019 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2020 if (context == nullptr) { 2021 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2022 jniThrowException(env, "java/lang/AssertionError", 2023 "setGpsTags called with uninitialized DngCreator"); 2024 return; 2025 } 2026 2027 GpsData data; 2028 2029 jsize latLen = env->GetArrayLength(latTag); 2030 jsize longLen = env->GetArrayLength(longTag); 2031 jsize timeLen = env->GetArrayLength(timeTag); 2032 if (latLen != GpsData::GPS_VALUE_LENGTH) { 2033 jniThrowException(env, "java/lang/IllegalArgumentException", 2034 "invalid latitude tag length"); 2035 return; 2036 } else if (longLen != GpsData::GPS_VALUE_LENGTH) { 2037 jniThrowException(env, "java/lang/IllegalArgumentException", 2038 "invalid longitude tag length"); 2039 return; 2040 } else if (timeLen != GpsData::GPS_VALUE_LENGTH) { 2041 jniThrowException(env, "java/lang/IllegalArgumentException", 2042 "invalid time tag length"); 2043 return; 2044 } 2045 2046 env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH), 2047 reinterpret_cast<jint*>(&data.mLatitude)); 2048 env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH), 2049 reinterpret_cast<jint*>(&data.mLongitude)); 2050 env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH), 2051 reinterpret_cast<jint*>(&data.mTimestamp)); 2052 2053 2054 env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef)); 2055 data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0'; 2056 env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef)); 2057 data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0'; 2058 env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1, 2059 reinterpret_cast<char*>(&data.mDate)); 2060 data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0'; 2061 2062 context->setGpsData(data); 2063} 2064 2065static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width, 2066 jint height) { 2067 ALOGV("%s:", __FUNCTION__); 2068 2069 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2070 if (context == nullptr) { 2071 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2072 jniThrowException(env, "java/lang/AssertionError", 2073 "setThumbnail called with uninitialized DngCreator"); 2074 return; 2075 } 2076 2077 size_t fullSize = width * height * BYTES_PER_RGB_PIXEL; 2078 jlong capacity = env->GetDirectBufferCapacity(buffer); 2079 if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) { 2080 jniThrowExceptionFmt(env, "java/lang/AssertionError", 2081 "Invalid size %d for thumbnail, expected size was %d", 2082 capacity, fullSize); 2083 return; 2084 } 2085 2086 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer)); 2087 if (pixelBytes == nullptr) { 2088 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__); 2089 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer"); 2090 return; 2091 } 2092 2093 if (!context->setThumbnail(pixelBytes, width, height)) { 2094 jniThrowException(env, "java/lang/IllegalStateException", 2095 "Failed to set thumbnail."); 2096 return; 2097 } 2098} 2099 2100// TODO: Refactor out common preamble for the two nativeWrite methods. 2101static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width, 2102 jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset, 2103 jboolean isDirect) { 2104 ALOGV("%s:", __FUNCTION__); 2105 ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, " 2106 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width, 2107 height, rowStride, pixStride, offset); 2108 uint32_t rStride = static_cast<uint32_t>(rowStride); 2109 uint32_t pStride = static_cast<uint32_t>(pixStride); 2110 uint32_t uWidth = static_cast<uint32_t>(width); 2111 uint32_t uHeight = static_cast<uint32_t>(height); 2112 uint64_t uOffset = static_cast<uint64_t>(offset); 2113 2114 sp<JniOutputStream> out = new JniOutputStream(env, outStream); 2115 if(env->ExceptionCheck()) { 2116 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__); 2117 return; 2118 } 2119 2120 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2121 if (context == nullptr) { 2122 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2123 jniThrowException(env, "java/lang/AssertionError", 2124 "Write called with uninitialized DngCreator"); 2125 return; 2126 } 2127 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight); 2128 2129 if (writer.get() == nullptr) { 2130 return; 2131 } 2132 2133 // Validate DNG size 2134 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) { 2135 return; 2136 } 2137 2138 sp<JniInputByteBuffer> inBuf; 2139 Vector<StripSource*> sources; 2140 sp<DirectStripSource> thumbnailSource; 2141 uint32_t targetIfd = TIFF_IFD_0; 2142 2143 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1); 2144 2145 if (hasThumbnail) { 2146 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__); 2147 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE; 2148 uint32_t thumbWidth = context->getThumbnailWidth(); 2149 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0, 2150 thumbWidth, context->getThumbnailHeight(), bytesPerPixel, 2151 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE, 2152 SAMPLES_PER_RGB_PIXEL); 2153 sources.add(thumbnailSource.get()); 2154 targetIfd = TIFF_IFD_SUB1; 2155 } 2156 2157 if (isDirect) { 2158 size_t fullSize = rStride * uHeight; 2159 jlong capacity = env->GetDirectBufferCapacity(inBuffer); 2160 if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) { 2161 jniThrowExceptionFmt(env, "java/lang/IllegalStateException", 2162 "Invalid size %d for Image, size given in metadata is %d at current stride", 2163 capacity, fullSize); 2164 return; 2165 } 2166 2167 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer)); 2168 if (pixelBytes == nullptr) { 2169 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__); 2170 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer"); 2171 return; 2172 } 2173 2174 ALOGV("%s: Using direct-type strip source.", __FUNCTION__); 2175 DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride, 2176 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); 2177 sources.add(&stripSource); 2178 2179 status_t ret = OK; 2180 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) { 2181 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); 2182 if (!env->ExceptionCheck()) { 2183 jniThrowExceptionFmt(env, "java/io/IOException", 2184 "Encountered error %d while writing file.", ret); 2185 } 2186 return; 2187 } 2188 } else { 2189 inBuf = new JniInputByteBuffer(env, inBuffer); 2190 2191 ALOGV("%s: Using input-type strip source.", __FUNCTION__); 2192 InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride, 2193 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); 2194 sources.add(&stripSource); 2195 2196 status_t ret = OK; 2197 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) { 2198 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); 2199 if (!env->ExceptionCheck()) { 2200 jniThrowExceptionFmt(env, "java/io/IOException", 2201 "Encountered error %d while writing file.", ret); 2202 } 2203 return; 2204 } 2205 } 2206} 2207 2208static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream, 2209 jobject inStream, jint width, jint height, jlong offset) { 2210 ALOGV("%s:", __FUNCTION__); 2211 2212 uint32_t rowStride = width * BYTES_PER_SAMPLE; 2213 uint32_t pixStride = BYTES_PER_SAMPLE; 2214 uint32_t uWidth = static_cast<uint32_t>(width); 2215 uint32_t uHeight = static_cast<uint32_t>(height); 2216 uint64_t uOffset = static_cast<uint32_t>(offset); 2217 2218 ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, " 2219 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width, 2220 height, rowStride, pixStride, offset); 2221 2222 sp<JniOutputStream> out = new JniOutputStream(env, outStream); 2223 if (env->ExceptionCheck()) { 2224 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__); 2225 return; 2226 } 2227 2228 NativeContext* context = DngCreator_getNativeContext(env, thiz); 2229 if (context == nullptr) { 2230 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__); 2231 jniThrowException(env, "java/lang/AssertionError", 2232 "Write called with uninitialized DngCreator"); 2233 return; 2234 } 2235 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight); 2236 2237 if (writer.get() == nullptr) { 2238 return; 2239 } 2240 2241 // Validate DNG size 2242 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) { 2243 return; 2244 } 2245 2246 sp<DirectStripSource> thumbnailSource; 2247 uint32_t targetIfd = TIFF_IFD_0; 2248 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1); 2249 Vector<StripSource*> sources; 2250 2251 if (hasThumbnail) { 2252 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__); 2253 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE; 2254 uint32_t width = context->getThumbnailWidth(); 2255 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0, 2256 width, context->getThumbnailHeight(), bytesPerPixel, 2257 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE, 2258 SAMPLES_PER_RGB_PIXEL); 2259 sources.add(thumbnailSource.get()); 2260 targetIfd = TIFF_IFD_SUB1; 2261 } 2262 2263 sp<JniInputStream> in = new JniInputStream(env, inStream); 2264 2265 ALOGV("%s: Using input-type strip source.", __FUNCTION__); 2266 InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride, 2267 rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL); 2268 sources.add(&stripSource); 2269 2270 status_t ret = OK; 2271 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) { 2272 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret); 2273 if (!env->ExceptionCheck()) { 2274 jniThrowExceptionFmt(env, "java/io/IOException", 2275 "Encountered error %d while writing file.", ret); 2276 } 2277 return; 2278 } 2279} 2280 2281} /*extern "C" */ 2282 2283static const JNINativeMethod gDngCreatorMethods[] = { 2284 {"nativeClassInit", "()V", (void*) DngCreator_nativeClassInit}, 2285 {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;" 2286 "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V", 2287 (void*) DngCreator_init}, 2288 {"nativeDestroy", "()V", (void*) DngCreator_destroy}, 2289 {"nativeSetOrientation", "(I)V", (void*) DngCreator_nativeSetOrientation}, 2290 {"nativeSetDescription", "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription}, 2291 {"nativeSetGpsTags", "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V", 2292 (void*) DngCreator_nativeSetGpsTags}, 2293 {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail}, 2294 {"nativeWriteImage", "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V", 2295 (void*) DngCreator_nativeWriteImage}, 2296 {"nativeWriteInputStream", "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V", 2297 (void*) DngCreator_nativeWriteInputStream}, 2298}; 2299 2300int register_android_hardware_camera2_DngCreator(JNIEnv *env) { 2301 return RegisterMethodsOrDie(env, 2302 "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods)); 2303} 2304