1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.impl; 18 19 import android.compat.annotation.UnsupportedAppUsage; 20 import android.graphics.ImageFormat; 21 import android.graphics.Point; 22 import android.graphics.Rect; 23 import android.hardware.camera2.CameraCharacteristics; 24 import android.hardware.camera2.CameraMetadata; 25 import android.hardware.camera2.CaptureRequest; 26 import android.hardware.camera2.CaptureResult; 27 import android.hardware.camera2.marshal.MarshalQueryable; 28 import android.hardware.camera2.marshal.MarshalRegistry; 29 import android.hardware.camera2.marshal.Marshaler; 30 import android.hardware.camera2.marshal.impl.MarshalQueryableArray; 31 import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern; 32 import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean; 33 import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform; 34 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 35 import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle; 37 import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger; 38 import android.hardware.camera2.marshal.impl.MarshalQueryablePair; 39 import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable; 40 import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive; 41 import android.hardware.camera2.marshal.impl.MarshalQueryableRange; 42 import android.hardware.camera2.marshal.impl.MarshalQueryableRecommendedStreamConfiguration; 43 import android.hardware.camera2.marshal.impl.MarshalQueryableRect; 44 import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap; 45 import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector; 46 import android.hardware.camera2.marshal.impl.MarshalQueryableSize; 47 import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF; 48 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration; 49 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration; 50 import android.hardware.camera2.marshal.impl.MarshalQueryableString; 51 import android.hardware.camera2.params.Capability; 52 import android.hardware.camera2.params.Face; 53 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 54 import android.hardware.camera2.params.LensShadingMap; 55 import android.hardware.camera2.params.MandatoryStreamCombination; 56 import android.hardware.camera2.params.OisSample; 57 import android.hardware.camera2.params.RecommendedStreamConfiguration; 58 import android.hardware.camera2.params.RecommendedStreamConfigurationMap; 59 import android.hardware.camera2.params.ReprocessFormatsMap; 60 import android.hardware.camera2.params.StreamConfiguration; 61 import android.hardware.camera2.params.StreamConfigurationDuration; 62 import android.hardware.camera2.params.StreamConfigurationMap; 63 import android.hardware.camera2.params.TonemapCurve; 64 import android.hardware.camera2.utils.TypeReference; 65 import android.location.Location; 66 import android.location.LocationManager; 67 import android.os.Parcel; 68 import android.os.Parcelable; 69 import android.os.ServiceSpecificException; 70 import android.util.Log; 71 import android.util.Range; 72 import android.util.Size; 73 74 import dalvik.annotation.optimization.FastNative; 75 76 import java.io.IOException; 77 import java.nio.ByteBuffer; 78 import java.nio.ByteOrder; 79 import java.util.ArrayList; 80 import java.util.HashMap; 81 import java.util.List; 82 import java.util.Objects; 83 84 /** 85 * Implementation of camera metadata marshal/unmarshal across Binder to 86 * the camera service 87 */ 88 public class CameraMetadataNative implements Parcelable { 89 90 public static class Key<T> { 91 private boolean mHasTag; 92 private int mTag; 93 private long mVendorId = Long.MAX_VALUE; 94 private final Class<T> mType; 95 private final TypeReference<T> mTypeReference; 96 private final String mName; 97 private final String mFallbackName; 98 private final int mHash; 99 100 /** 101 * @hide 102 */ Key(String name, Class<T> type, long vendorId)103 public Key(String name, Class<T> type, long vendorId) { 104 if (name == null) { 105 throw new NullPointerException("Key needs a valid name"); 106 } else if (type == null) { 107 throw new NullPointerException("Type needs to be non-null"); 108 } 109 mName = name; 110 mFallbackName = null; 111 mType = type; 112 mVendorId = vendorId; 113 mTypeReference = TypeReference.createSpecializedTypeReference(type); 114 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 115 } 116 117 /** 118 * @hide 119 */ Key(String name, String fallbackName, Class<T> type)120 public Key(String name, String fallbackName, Class<T> type) { 121 if (name == null) { 122 throw new NullPointerException("Key needs a valid name"); 123 } else if (type == null) { 124 throw new NullPointerException("Type needs to be non-null"); 125 } 126 mName = name; 127 mFallbackName = fallbackName; 128 mType = type; 129 mTypeReference = TypeReference.createSpecializedTypeReference(type); 130 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 131 } 132 133 /** 134 * Visible for testing only. 135 * 136 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 137 * for application code or vendor-extended keys.</p> 138 */ Key(String name, Class<T> type)139 public Key(String name, Class<T> type) { 140 if (name == null) { 141 throw new NullPointerException("Key needs a valid name"); 142 } else if (type == null) { 143 throw new NullPointerException("Type needs to be non-null"); 144 } 145 mName = name; 146 mFallbackName = null; 147 mType = type; 148 mTypeReference = TypeReference.createSpecializedTypeReference(type); 149 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 150 } 151 152 /** 153 * Visible for testing only. 154 * 155 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 156 * for application code or vendor-extended keys.</p> 157 */ 158 @SuppressWarnings("unchecked") Key(String name, TypeReference<T> typeReference)159 public Key(String name, TypeReference<T> typeReference) { 160 if (name == null) { 161 throw new NullPointerException("Key needs a valid name"); 162 } else if (typeReference == null) { 163 throw new NullPointerException("TypeReference needs to be non-null"); 164 } 165 mName = name; 166 mFallbackName = null; 167 mType = (Class<T>)typeReference.getRawType(); 168 mTypeReference = typeReference; 169 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 170 } 171 172 /** 173 * Return a camelCase, period separated name formatted like: 174 * {@code "root.section[.subsections].name"}. 175 * 176 * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."}; 177 * keys that are device/platform-specific are prefixed with {@code "com."}.</p> 178 * 179 * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would 180 * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device 181 * specific key might look like {@code "com.google.nexus.data.private"}.</p> 182 * 183 * @return String representation of the key name 184 */ getName()185 public final String getName() { 186 return mName; 187 } 188 189 /** 190 * {@inheritDoc} 191 */ 192 @Override hashCode()193 public final int hashCode() { 194 return mHash; 195 } 196 197 /** 198 * Compare this key against other native keys, request keys, result keys, and 199 * characteristics keys. 200 * 201 * <p>Two keys are considered equal if their name and type reference are equal.</p> 202 * 203 * <p>Note that the equality against non-native keys is one-way. A native key may be equal 204 * to a result key; but that same result key will not be equal to a native key.</p> 205 */ 206 @SuppressWarnings("rawtypes") 207 @Override equals(Object o)208 public final boolean equals(Object o) { 209 if (this == o) { 210 return true; 211 } 212 213 if (o == null || this.hashCode() != o.hashCode()) { 214 return false; 215 } 216 217 Key<?> lhs; 218 219 if (o instanceof CaptureResult.Key) { 220 lhs = ((CaptureResult.Key)o).getNativeKey(); 221 } else if (o instanceof CaptureRequest.Key) { 222 lhs = ((CaptureRequest.Key)o).getNativeKey(); 223 } else if (o instanceof CameraCharacteristics.Key) { 224 lhs = ((CameraCharacteristics.Key)o).getNativeKey(); 225 } else if ((o instanceof Key)) { 226 lhs = (Key<?>)o; 227 } else { 228 return false; 229 } 230 231 return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference); 232 } 233 234 /** 235 * <p> 236 * Get the tag corresponding to this key. This enables insertion into the 237 * native metadata. 238 * </p> 239 * 240 * <p>This value is looked up the first time, and cached subsequently.</p> 241 * 242 * <p>This function may be called without cacheTag() if this is not a vendor key. 243 * If this is a vendor key, cacheTag() must be called first before getTag() can 244 * be called. Otherwise, mVendorId could be default (Long.MAX_VALUE) and vendor 245 * tag lookup could fail.</p> 246 * 247 * @return The tag numeric value corresponding to the string 248 */ 249 @UnsupportedAppUsage getTag()250 public final int getTag() { 251 if (!mHasTag) { 252 mTag = CameraMetadataNative.getTag(mName, mVendorId); 253 mHasTag = true; 254 } 255 return mTag; 256 } 257 258 /** 259 * Whether this key's tag is cached. 260 * 261 * @hide 262 */ 263 @UnsupportedAppUsage hasTag()264 public final boolean hasTag() { 265 return mHasTag; 266 } 267 268 /** 269 * Cache this key's tag. 270 * 271 * @hide 272 */ 273 @UnsupportedAppUsage cacheTag(int tag)274 public final void cacheTag(int tag) { 275 mHasTag = true; 276 mTag = tag; 277 } 278 279 /** 280 * Get the raw class backing the type {@code T} for this key. 281 * 282 * <p>The distinction is only important if {@code T} is a generic, e.g. 283 * {@code Range<Integer>} since the nested type will be erased.</p> 284 */ getType()285 public final Class<T> getType() { 286 // TODO: remove this; other places should use #getTypeReference() instead 287 return mType; 288 } 289 290 /** 291 * Get the vendor tag provider id. 292 * 293 * @hide 294 */ getVendorId()295 public final long getVendorId() { 296 return mVendorId; 297 } 298 299 /** 300 * Get the type reference backing the type {@code T} for this key. 301 * 302 * <p>The distinction is only important if {@code T} is a generic, e.g. 303 * {@code Range<Integer>} since the nested type will be retained.</p> 304 */ getTypeReference()305 public final TypeReference<T> getTypeReference() { 306 return mTypeReference; 307 } 308 } 309 310 private static final String TAG = "CameraMetadataJV"; 311 private static final boolean DEBUG = false; 312 313 // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h 314 public static final int NATIVE_JPEG_FORMAT = 0x21; 315 316 private static final String CELLID_PROCESS = "CELLID"; 317 private static final String GPS_PROCESS = "GPS"; 318 private static final int FACE_LANDMARK_SIZE = 6; 319 translateLocationProviderToProcess(final String provider)320 private static String translateLocationProviderToProcess(final String provider) { 321 if (provider == null) { 322 return null; 323 } 324 switch(provider) { 325 case LocationManager.GPS_PROVIDER: 326 return GPS_PROCESS; 327 case LocationManager.NETWORK_PROVIDER: 328 return CELLID_PROCESS; 329 default: 330 return null; 331 } 332 } 333 translateProcessToLocationProvider(final String process)334 private static String translateProcessToLocationProvider(final String process) { 335 if (process == null) { 336 return null; 337 } 338 switch(process) { 339 case GPS_PROCESS: 340 return LocationManager.GPS_PROVIDER; 341 case CELLID_PROCESS: 342 return LocationManager.NETWORK_PROVIDER; 343 default: 344 return null; 345 } 346 } 347 CameraMetadataNative()348 public CameraMetadataNative() { 349 super(); 350 mMetadataPtr = nativeAllocate(); 351 if (mMetadataPtr == 0) { 352 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 353 } 354 } 355 356 /** 357 * Copy constructor - clone metadata 358 */ CameraMetadataNative(CameraMetadataNative other)359 public CameraMetadataNative(CameraMetadataNative other) { 360 super(); 361 mMetadataPtr = nativeAllocateCopy(other.mMetadataPtr); 362 if (mMetadataPtr == 0) { 363 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 364 } 365 } 366 367 /** 368 * Move the contents from {@code other} into a new camera metadata instance.</p> 369 * 370 * <p>After this call, {@code other} will become empty.</p> 371 * 372 * @param other the previous metadata instance which will get pilfered 373 * @return a new metadata instance with the values from {@code other} moved into it 374 */ move(CameraMetadataNative other)375 public static CameraMetadataNative move(CameraMetadataNative other) { 376 CameraMetadataNative newObject = new CameraMetadataNative(); 377 newObject.swap(other); 378 return newObject; 379 } 380 381 public static final @android.annotation.NonNull Parcelable.Creator<CameraMetadataNative> CREATOR = 382 new Parcelable.Creator<CameraMetadataNative>() { 383 @Override 384 public CameraMetadataNative createFromParcel(Parcel in) { 385 CameraMetadataNative metadata = new CameraMetadataNative(); 386 metadata.readFromParcel(in); 387 return metadata; 388 } 389 390 @Override 391 public CameraMetadataNative[] newArray(int size) { 392 return new CameraMetadataNative[size]; 393 } 394 }; 395 396 @Override describeContents()397 public int describeContents() { 398 return 0; 399 } 400 401 @Override writeToParcel(Parcel dest, int flags)402 public void writeToParcel(Parcel dest, int flags) { 403 nativeWriteToParcel(dest, mMetadataPtr); 404 } 405 406 /** 407 * @hide 408 */ get(CameraCharacteristics.Key<T> key)409 public <T> T get(CameraCharacteristics.Key<T> key) { 410 return get(key.getNativeKey()); 411 } 412 413 /** 414 * @hide 415 */ get(CaptureResult.Key<T> key)416 public <T> T get(CaptureResult.Key<T> key) { 417 return get(key.getNativeKey()); 418 } 419 420 /** 421 * @hide 422 */ get(CaptureRequest.Key<T> key)423 public <T> T get(CaptureRequest.Key<T> key) { 424 return get(key.getNativeKey()); 425 } 426 427 /** 428 * Look-up a metadata field value by its key. 429 * 430 * @param key a non-{@code null} key instance 431 * @return the field corresponding to the {@code key}, or {@code null} if no value was set 432 */ get(Key<T> key)433 public <T> T get(Key<T> key) { 434 Objects.requireNonNull(key, "key must not be null"); 435 436 // Check if key has been overridden to use a wrapper class on the java side. 437 GetCommand g = sGetCommandMap.get(key); 438 if (g != null) { 439 return g.getValue(this, key); 440 } 441 return getBase(key); 442 } 443 readFromParcel(Parcel in)444 public void readFromParcel(Parcel in) { 445 nativeReadFromParcel(in, mMetadataPtr); 446 } 447 448 /** 449 * Set the global client-side vendor tag descriptor to allow use of vendor 450 * tags in camera applications. 451 * 452 * @throws ServiceSpecificException 453 * @hide 454 */ setupGlobalVendorTagDescriptor()455 public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException { 456 int err = nativeSetupGlobalVendorTagDescriptor(); 457 if (err != 0) { 458 throw new ServiceSpecificException(err, "Failure to set up global vendor tags"); 459 } 460 } 461 462 /** 463 * Set the global client-side vendor tag descriptor to allow use of vendor 464 * tags in camera applications. 465 * 466 * @return int An error code corresponding to one of the 467 * {@link ICameraService} error constants, or 0 on success. 468 */ nativeSetupGlobalVendorTagDescriptor()469 private static native int nativeSetupGlobalVendorTagDescriptor(); 470 471 /** 472 * Set a camera metadata field to a value. The field definitions can be 473 * found in {@link CameraCharacteristics}, {@link CaptureResult}, and 474 * {@link CaptureRequest}. 475 * 476 * @param key The metadata field to write. 477 * @param value The value to set the field to, which must be of a matching 478 * type to the key. 479 */ set(Key<T> key, T value)480 public <T> void set(Key<T> key, T value) { 481 SetCommand s = sSetCommandMap.get(key); 482 if (s != null) { 483 s.setValue(this, value); 484 return; 485 } 486 487 setBase(key, value); 488 } 489 set(CaptureRequest.Key<T> key, T value)490 public <T> void set(CaptureRequest.Key<T> key, T value) { 491 set(key.getNativeKey(), value); 492 } 493 set(CaptureResult.Key<T> key, T value)494 public <T> void set(CaptureResult.Key<T> key, T value) { 495 set(key.getNativeKey(), value); 496 } 497 set(CameraCharacteristics.Key<T> key, T value)498 public <T> void set(CameraCharacteristics.Key<T> key, T value) { 499 set(key.getNativeKey(), value); 500 } 501 502 // Keep up-to-date with camera_metadata.h 503 /** 504 * @hide 505 */ 506 public static final int TYPE_BYTE = 0; 507 /** 508 * @hide 509 */ 510 public static final int TYPE_INT32 = 1; 511 /** 512 * @hide 513 */ 514 public static final int TYPE_FLOAT = 2; 515 /** 516 * @hide 517 */ 518 public static final int TYPE_INT64 = 3; 519 /** 520 * @hide 521 */ 522 public static final int TYPE_DOUBLE = 4; 523 /** 524 * @hide 525 */ 526 public static final int TYPE_RATIONAL = 5; 527 /** 528 * @hide 529 */ 530 public static final int NUM_TYPES = 6; 531 close()532 private void close() { 533 // Delete native pointer, but does not clear it 534 nativeClose(mMetadataPtr); 535 mMetadataPtr = 0; 536 } 537 getBase(CameraCharacteristics.Key<T> key)538 private <T> T getBase(CameraCharacteristics.Key<T> key) { 539 return getBase(key.getNativeKey()); 540 } 541 getBase(CaptureResult.Key<T> key)542 private <T> T getBase(CaptureResult.Key<T> key) { 543 return getBase(key.getNativeKey()); 544 } 545 getBase(CaptureRequest.Key<T> key)546 private <T> T getBase(CaptureRequest.Key<T> key) { 547 return getBase(key.getNativeKey()); 548 } 549 getBase(Key<T> key)550 private <T> T getBase(Key<T> key) { 551 int tag; 552 if (key.hasTag()) { 553 tag = key.getTag(); 554 } else { 555 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 556 key.cacheTag(tag); 557 } 558 byte[] values = readValues(tag); 559 if (values == null) { 560 // If the key returns null, use the fallback key if exists. 561 // This is to support old key names for the newly published keys. 562 if (key.mFallbackName == null) { 563 return null; 564 } 565 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.mFallbackName); 566 values = readValues(tag); 567 if (values == null) { 568 return null; 569 } 570 } 571 572 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 573 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 574 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 575 return marshaler.unmarshal(buffer); 576 } 577 578 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 579 // metadata. 580 private static final HashMap<Key<?>, GetCommand> sGetCommandMap = 581 new HashMap<Key<?>, GetCommand>(); 582 static { 583 sGetCommandMap.put( GetCommand()584 CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() { 585 @Override 586 @SuppressWarnings("unchecked") 587 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 588 return (T) metadata.getAvailableFormats(); 589 } 590 }); 591 sGetCommandMap.put( GetCommand()592 CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() { 593 @Override 594 @SuppressWarnings("unchecked") 595 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 596 return (T) metadata.getFaces(); 597 } 598 }); 599 sGetCommandMap.put( GetCommand()600 CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() { 601 @Override 602 @SuppressWarnings("unchecked") 603 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 604 return (T) metadata.getFaceRectangles(); 605 } 606 }); 607 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey()608 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(), 609 new GetCommand() { 610 @Override 611 @SuppressWarnings("unchecked") 612 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 613 return (T) metadata.getStreamConfigurationMap(); 614 } 615 }); 616 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey()617 CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey(), 618 new GetCommand() { 619 @Override 620 @SuppressWarnings("unchecked") 621 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 622 return (T) metadata.getMandatoryStreamCombinations(); 623 } 624 }); 625 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey()626 CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey(), 627 new GetCommand() { 628 @Override 629 @SuppressWarnings("unchecked") 630 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 631 return (T) metadata.getMandatoryConcurrentStreamCombinations(); 632 } 633 }); 634 635 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey()636 CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() { 637 @Override 638 @SuppressWarnings("unchecked") 639 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 640 return (T) metadata.getMaxRegions(key); 641 } 642 }); 643 sGetCommandMap.put( GetCommand()644 CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() { 645 @Override 646 @SuppressWarnings("unchecked") 647 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 648 return (T) metadata.getMaxRegions(key); 649 } 650 }); 651 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey()652 CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() { 653 @Override 654 @SuppressWarnings("unchecked") 655 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 656 return (T) metadata.getMaxRegions(key); 657 } 658 }); 659 sGetCommandMap.put( GetCommand()660 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() { 661 @Override 662 @SuppressWarnings("unchecked") 663 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 664 return (T) metadata.getMaxNumOutputs(key); 665 } 666 }); 667 sGetCommandMap.put( GetCommand()668 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() { 669 @Override 670 @SuppressWarnings("unchecked") 671 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 672 return (T) metadata.getMaxNumOutputs(key); 673 } 674 }); 675 sGetCommandMap.put( CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey()676 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(), 677 new GetCommand() { 678 @Override 679 @SuppressWarnings("unchecked") 680 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 681 return (T) metadata.getMaxNumOutputs(key); 682 } 683 }); 684 sGetCommandMap.put( GetCommand()685 CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() { 686 @Override 687 @SuppressWarnings("unchecked") 688 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 689 return (T) metadata.getTonemapCurve(); 690 } 691 }); 692 sGetCommandMap.put( GetCommand()693 CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() { 694 @Override 695 @SuppressWarnings("unchecked") 696 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 697 return (T) metadata.getGpsLocation(); 698 } 699 }); 700 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()701 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 702 new GetCommand() { 703 @Override 704 @SuppressWarnings("unchecked") 705 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 706 return (T) metadata.getLensShadingMap(); 707 } 708 }); 709 sGetCommandMap.put( CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey()710 CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey(), 711 new GetCommand() { 712 @Override 713 @SuppressWarnings("unchecked") 714 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 715 return (T) metadata.getOisSamples(); 716 } 717 }); 718 sGetCommandMap.put( CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey()719 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey(), 720 new GetCommand() { 721 @Override 722 @SuppressWarnings("unchecked") 723 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 724 return (T) metadata.getExtendedSceneModeCapabilities(); 725 } 726 }); 727 } 728 getAvailableFormats()729 private int[] getAvailableFormats() { 730 int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS); 731 if (availableFormats != null) { 732 for (int i = 0; i < availableFormats.length; i++) { 733 // JPEG has different value between native and managed side, need override. 734 if (availableFormats[i] == NATIVE_JPEG_FORMAT) { 735 availableFormats[i] = ImageFormat.JPEG; 736 } 737 } 738 } 739 740 return availableFormats; 741 } 742 setFaces(Face[] faces)743 private boolean setFaces(Face[] faces) { 744 if (faces == null) { 745 return false; 746 } 747 748 int numFaces = faces.length; 749 750 // Detect if all faces are SIMPLE or not; count # of valid faces 751 boolean fullMode = true; 752 for (Face face : faces) { 753 if (face == null) { 754 numFaces--; 755 Log.w(TAG, "setFaces - null face detected, skipping"); 756 continue; 757 } 758 759 if (face.getId() == Face.ID_UNSUPPORTED) { 760 fullMode = false; 761 } 762 } 763 764 Rect[] faceRectangles = new Rect[numFaces]; 765 byte[] faceScores = new byte[numFaces]; 766 int[] faceIds = null; 767 int[] faceLandmarks = null; 768 769 if (fullMode) { 770 faceIds = new int[numFaces]; 771 faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; 772 } 773 774 int i = 0; 775 for (Face face : faces) { 776 if (face == null) { 777 continue; 778 } 779 780 faceRectangles[i] = face.getBounds(); 781 faceScores[i] = (byte)face.getScore(); 782 783 if (fullMode) { 784 faceIds[i] = face.getId(); 785 786 int j = 0; 787 788 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; 789 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; 790 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; 791 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; 792 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; 793 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; 794 } 795 796 i++; 797 } 798 799 set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); 800 set(CaptureResult.STATISTICS_FACE_IDS, faceIds); 801 set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); 802 set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); 803 804 return true; 805 } 806 getFaces()807 private Face[] getFaces() { 808 Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); 809 byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES); 810 Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES); 811 int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS); 812 int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS); 813 814 if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) { 815 return null; 816 } 817 818 if (faceDetectMode == null) { 819 Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); 820 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 821 } else if (faceDetectMode > CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 822 // Face detect mode is larger than FULL, assuming the mode is FULL 823 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL; 824 } else { 825 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 826 return new Face[0]; 827 } 828 if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && 829 faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 830 Log.w(TAG, "Unknown face detect mode: " + faceDetectMode); 831 return new Face[0]; 832 } 833 } 834 835 // Face scores and rectangles are required by SIMPLE and FULL mode. 836 if (faceScores == null || faceRectangles == null) { 837 Log.w(TAG, "Expect face scores and rectangles to be non-null"); 838 return new Face[0]; 839 } else if (faceScores.length != faceRectangles.length) { 840 Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", 841 faceScores.length, faceRectangles.length)); 842 } 843 844 // To be safe, make number of faces is the minimal of all face info metadata length. 845 int numFaces = Math.min(faceScores.length, faceRectangles.length); 846 // Face id and landmarks are only required by FULL mode. 847 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 848 if (faceIds == null || faceLandmarks == null) { 849 Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + 850 "fallback to SIMPLE mode"); 851 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 852 } else { 853 if (faceIds.length != numFaces || 854 faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) { 855 Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + 856 "match face number(%d)!", 857 faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces)); 858 } 859 // To be safe, make number of faces is the minimal of all face info metadata length. 860 numFaces = Math.min(numFaces, faceIds.length); 861 numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE); 862 } 863 } 864 865 ArrayList<Face> faceList = new ArrayList<Face>(); 866 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 867 for (int i = 0; i < numFaces; i++) { 868 if (faceScores[i] <= Face.SCORE_MAX && 869 faceScores[i] >= Face.SCORE_MIN) { 870 faceList.add(new Face(faceRectangles[i], faceScores[i])); 871 } 872 } 873 } else { 874 // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL 875 for (int i = 0; i < numFaces; i++) { 876 if (faceScores[i] <= Face.SCORE_MAX && 877 faceScores[i] >= Face.SCORE_MIN && 878 faceIds[i] >= 0) { 879 Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], 880 faceLandmarks[i*FACE_LANDMARK_SIZE+1]); 881 Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], 882 faceLandmarks[i*FACE_LANDMARK_SIZE+3]); 883 Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], 884 faceLandmarks[i*FACE_LANDMARK_SIZE+5]); 885 Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], 886 leftEye, rightEye, mouth); 887 faceList.add(face); 888 } 889 } 890 } 891 Face[] faces = new Face[faceList.size()]; 892 faceList.toArray(faces); 893 return faces; 894 } 895 896 // Face rectangles are defined as (left, top, right, bottom) instead of 897 // (left, top, width, height) at the native level, so the normal Rect 898 // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo 899 // that conversion here for just the faces. getFaceRectangles()900 private Rect[] getFaceRectangles() { 901 Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES); 902 if (faceRectangles == null) return null; 903 904 Rect[] fixedFaceRectangles = new Rect[faceRectangles.length]; 905 for (int i = 0; i < faceRectangles.length; i++) { 906 fixedFaceRectangles[i] = new Rect( 907 faceRectangles[i].left, 908 faceRectangles[i].top, 909 faceRectangles[i].right - faceRectangles[i].left, 910 faceRectangles[i].bottom - faceRectangles[i].top); 911 } 912 return fixedFaceRectangles; 913 } 914 getLensShadingMap()915 private LensShadingMap getLensShadingMap() { 916 float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP); 917 Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE); 918 919 // Do not warn if lsmArray is null while s is not. This is valid. 920 if (lsmArray == null) { 921 return null; 922 } 923 924 if (s == null) { 925 Log.w(TAG, "getLensShadingMap - Lens shading map size was null."); 926 return null; 927 } 928 929 LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth()); 930 return map; 931 } 932 getGpsLocation()933 private Location getGpsLocation() { 934 String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD); 935 double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES); 936 Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP); 937 938 if (areValuesAllNull(processingMethod, coords, timeStamp)) { 939 return null; 940 } 941 942 Location l = new Location(translateProcessToLocationProvider(processingMethod)); 943 if (timeStamp != null) { 944 // Location expects timestamp in [ms.] 945 l.setTime(timeStamp * 1000); 946 } else { 947 Log.w(TAG, "getGpsLocation - No timestamp for GPS location."); 948 } 949 950 if (coords != null) { 951 l.setLatitude(coords[0]); 952 l.setLongitude(coords[1]); 953 l.setAltitude(coords[2]); 954 } else { 955 Log.w(TAG, "getGpsLocation - No coordinates for GPS location"); 956 } 957 958 return l; 959 } 960 setGpsLocation(Location l)961 private boolean setGpsLocation(Location l) { 962 if (l == null) { 963 return false; 964 } 965 966 double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() }; 967 String processMethod = translateLocationProviderToProcess(l.getProvider()); 968 //JPEG_GPS_TIMESTAMP expects sec. instead of msec. 969 long timestamp = l.getTime() / 1000; 970 971 set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp); 972 set(CaptureRequest.JPEG_GPS_COORDINATES, coords); 973 974 if (processMethod == null) { 975 Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" + 976 "provider"); 977 } else { 978 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod); 979 } 980 return true; 981 } 982 parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, StreamConfigurationMap fullMap, boolean isDepth, ArrayList<ArrayList<StreamConfiguration>> streamConfigList, ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList, ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList, boolean[] supportsPrivate)983 private void parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, 984 StreamConfigurationMap fullMap, boolean isDepth, 985 ArrayList<ArrayList<StreamConfiguration>> /*out*/streamConfigList, 986 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamDurationList, 987 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamStallList, 988 boolean[] /*out*/supportsPrivate) { 989 990 streamConfigList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 991 streamDurationList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 992 streamStallList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 993 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 994 streamConfigList.add(new ArrayList<StreamConfiguration> ()); 995 streamDurationList.add(new ArrayList<StreamConfigurationDuration> ()); 996 streamStallList.add(new ArrayList<StreamConfigurationDuration> ()); 997 } 998 999 for (RecommendedStreamConfiguration c : configurations) { 1000 int width = c.getWidth(); 1001 int height = c.getHeight(); 1002 int internalFormat = c.getFormat(); 1003 int publicFormat = 1004 (isDepth) ? StreamConfigurationMap.depthFormatToPublic(internalFormat) : 1005 StreamConfigurationMap.imageFormatToPublic(internalFormat); 1006 Size sz = new Size(width, height); 1007 int usecaseBitmap = c.getUsecaseBitmap(); 1008 1009 if (!c.isInput()) { 1010 StreamConfigurationDuration minDurationConfiguration = null; 1011 StreamConfigurationDuration stallDurationConfiguration = null; 1012 1013 StreamConfiguration streamConfiguration = new StreamConfiguration(internalFormat, 1014 width, height, /*input*/ false); 1015 1016 long minFrameDuration = fullMap.getOutputMinFrameDuration(publicFormat, sz); 1017 if (minFrameDuration > 0) { 1018 minDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1019 width, height, minFrameDuration); 1020 } 1021 1022 long stallDuration = fullMap.getOutputStallDuration(publicFormat, sz); 1023 if (stallDuration > 0) { 1024 stallDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1025 width, height, stallDuration); 1026 } 1027 1028 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1029 if ((usecaseBitmap & (1 << i)) != 0) { 1030 ArrayList<StreamConfiguration> sc = streamConfigList.get(i); 1031 sc.add(streamConfiguration); 1032 1033 if (minFrameDuration > 0) { 1034 ArrayList<StreamConfigurationDuration> scd = streamDurationList.get(i); 1035 scd.add(minDurationConfiguration); 1036 } 1037 1038 if (stallDuration > 0) { 1039 ArrayList<StreamConfigurationDuration> scs = streamStallList.get(i); 1040 scs.add(stallDurationConfiguration); 1041 } 1042 1043 if ((supportsPrivate != null) && !supportsPrivate[i] && 1044 (publicFormat == ImageFormat.PRIVATE)) { 1045 supportsPrivate[i] = true; 1046 } 1047 } 1048 } 1049 } else { 1050 if (usecaseBitmap != (1 << RecommendedStreamConfigurationMap.USECASE_ZSL)) { 1051 throw new IllegalArgumentException("Recommended input stream configurations " + 1052 "should only be advertised in the ZSL use case!"); 1053 } 1054 1055 ArrayList<StreamConfiguration> sc = streamConfigList.get( 1056 RecommendedStreamConfigurationMap.USECASE_ZSL); 1057 sc.add(new StreamConfiguration(internalFormat, 1058 width, height, /*input*/ true)); 1059 } 1060 } 1061 } 1062 1063 private class StreamConfigurationData { 1064 StreamConfiguration [] streamConfigurationArray = null; 1065 StreamConfigurationDuration [] minDurationArray = null; 1066 StreamConfigurationDuration [] stallDurationArray = null; 1067 } 1068 initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, StreamConfigurationData scData)1069 public void initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, 1070 ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, 1071 StreamConfigurationData /*out*/scData) { 1072 if ((scData == null) || (sc == null)) { 1073 return; 1074 } 1075 1076 scData.streamConfigurationArray = new StreamConfiguration[sc.size()]; 1077 scData.streamConfigurationArray = sc.toArray(scData.streamConfigurationArray); 1078 1079 if ((scd != null) && !scd.isEmpty()) { 1080 scData.minDurationArray = new StreamConfigurationDuration[scd.size()]; 1081 scData.minDurationArray = scd.toArray(scData.minDurationArray); 1082 } else { 1083 scData.minDurationArray = new StreamConfigurationDuration[0]; 1084 } 1085 1086 if ((scs != null) && !scs.isEmpty()) { 1087 scData.stallDurationArray = new StreamConfigurationDuration[scs.size()]; 1088 scData.stallDurationArray = scs.toArray(scData.stallDurationArray); 1089 } else { 1090 scData.stallDurationArray = new StreamConfigurationDuration[0]; 1091 } 1092 } 1093 1094 /** 1095 * Retrieve the list of recommended stream configurations. 1096 * 1097 * @return A list of recommended stream configuration maps for each common use case or null 1098 * in case the recommended stream configurations are invalid or incomplete. 1099 * @hide 1100 */ getRecommendedStreamConfigurations()1101 public ArrayList<RecommendedStreamConfigurationMap> getRecommendedStreamConfigurations() { 1102 RecommendedStreamConfiguration[] configurations = getBase( 1103 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS); 1104 RecommendedStreamConfiguration[] depthConfigurations = getBase( 1105 CameraCharacteristics.DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS); 1106 if ((configurations == null) && (depthConfigurations == null)) { 1107 return null; 1108 } 1109 1110 StreamConfigurationMap fullMap = getStreamConfigurationMap(); 1111 ArrayList<RecommendedStreamConfigurationMap> recommendedConfigurations = 1112 new ArrayList<RecommendedStreamConfigurationMap> (); 1113 1114 ArrayList<ArrayList<StreamConfiguration>> streamConfigList = 1115 new ArrayList<ArrayList<StreamConfiguration>>(); 1116 ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList = 1117 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1118 ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList = 1119 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1120 boolean[] supportsPrivate = 1121 new boolean[RecommendedStreamConfigurationMap.MAX_USECASE_COUNT]; 1122 try { 1123 if (configurations != null) { 1124 parseRecommendedConfigurations(configurations, fullMap, /*isDepth*/ false, 1125 streamConfigList, streamDurationList, streamStallList, supportsPrivate); 1126 } 1127 } catch (IllegalArgumentException e) { 1128 Log.e(TAG, "Failed parsing the recommended stream configurations!"); 1129 return null; 1130 } 1131 1132 ArrayList<ArrayList<StreamConfiguration>> depthStreamConfigList = 1133 new ArrayList<ArrayList<StreamConfiguration>>(); 1134 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamDurationList = 1135 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1136 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamStallList = 1137 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1138 if (depthConfigurations != null) { 1139 try { 1140 parseRecommendedConfigurations(depthConfigurations, fullMap, /*isDepth*/ true, 1141 depthStreamConfigList, depthStreamDurationList, depthStreamStallList, 1142 /*supportsPrivate*/ null); 1143 } catch (IllegalArgumentException e) { 1144 Log.e(TAG, "Failed parsing the recommended depth stream configurations!"); 1145 return null; 1146 } 1147 } 1148 1149 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1150 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP); 1151 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1152 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1153 boolean listHighResolution = isBurstSupported(); 1154 recommendedConfigurations.ensureCapacity( 1155 RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1156 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1157 StreamConfigurationData scData = new StreamConfigurationData(); 1158 if (configurations != null) { 1159 initializeStreamConfigurationData(streamConfigList.get(i), 1160 streamDurationList.get(i), streamStallList.get(i), scData); 1161 } 1162 1163 StreamConfigurationData depthScData = new StreamConfigurationData(); 1164 if (depthConfigurations != null) { 1165 initializeStreamConfigurationData(depthStreamConfigList.get(i), 1166 depthStreamDurationList.get(i), depthStreamStallList.get(i), depthScData); 1167 } 1168 1169 if ((scData.streamConfigurationArray == null || 1170 scData.streamConfigurationArray.length == 0) && 1171 (depthScData.streamConfigurationArray == null || 1172 depthScData.streamConfigurationArray.length == 0)) { 1173 recommendedConfigurations.add(null); 1174 continue; 1175 } 1176 1177 // Dynamic depth streams involve alot of SW processing and currently cannot be 1178 // recommended. 1179 StreamConfigurationMap map = null; 1180 switch (i) { 1181 case RecommendedStreamConfigurationMap.USECASE_PREVIEW: 1182 case RecommendedStreamConfigurationMap.USECASE_RAW: 1183 case RecommendedStreamConfigurationMap.USECASE_LOW_LATENCY_SNAPSHOT: 1184 case RecommendedStreamConfigurationMap.USECASE_VIDEO_SNAPSHOT: 1185 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1186 scData.minDurationArray, scData.stallDurationArray, 1187 /*depthconfiguration*/ null, /*depthminduration*/ null, 1188 /*depthstallduration*/ null, 1189 /*dynamicDepthConfigurations*/ null, 1190 /*dynamicDepthMinFrameDurations*/ null, 1191 /*dynamicDepthStallDurations*/ null, 1192 /*heicconfiguration*/ null, 1193 /*heicminduration*/ null, 1194 /*heicstallduration*/ null, 1195 /*highspeedvideoconfigurations*/ null, 1196 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1197 break; 1198 case RecommendedStreamConfigurationMap.USECASE_RECORD: 1199 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1200 scData.minDurationArray, scData.stallDurationArray, 1201 /*depthconfiguration*/ null, /*depthminduration*/ null, 1202 /*depthstallduration*/ null, 1203 /*dynamicDepthConfigurations*/ null, 1204 /*dynamicDepthMinFrameDurations*/ null, 1205 /*dynamicDepthStallDurations*/ null, 1206 /*heicconfiguration*/ null, 1207 /*heicminduration*/ null, 1208 /*heicstallduration*/ null, 1209 highSpeedVideoConfigurations, 1210 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1211 break; 1212 case RecommendedStreamConfigurationMap.USECASE_ZSL: 1213 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1214 scData.minDurationArray, scData.stallDurationArray, 1215 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1216 depthScData.stallDurationArray, 1217 /*dynamicDepthConfigurations*/ null, 1218 /*dynamicDepthMinFrameDurations*/ null, 1219 /*dynamicDepthStallDurations*/ null, 1220 /*heicconfiguration*/ null, 1221 /*heicminduration*/ null, 1222 /*heicstallduration*/ null, 1223 /*highSpeedVideoConfigurations*/ null, 1224 inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); 1225 break; 1226 default: 1227 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1228 scData.minDurationArray, scData.stallDurationArray, 1229 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1230 depthScData.stallDurationArray, 1231 /*dynamicDepthConfigurations*/ null, 1232 /*dynamicDepthMinFrameDurations*/ null, 1233 /*dynamicDepthStallDurations*/ null, 1234 /*heicconfiguration*/ null, 1235 /*heicminduration*/ null, 1236 /*heicstallduration*/ null, 1237 /*highSpeedVideoConfigurations*/ null, 1238 /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); 1239 } 1240 1241 recommendedConfigurations.add(new RecommendedStreamConfigurationMap(map, /*usecase*/i, 1242 supportsPrivate[i])); 1243 } 1244 1245 return recommendedConfigurations; 1246 } 1247 isBurstSupported()1248 private boolean isBurstSupported() { 1249 boolean ret = false; 1250 1251 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1252 for (int capability : capabilities) { 1253 if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE) { 1254 ret = true; 1255 break; 1256 } 1257 } 1258 1259 return ret; 1260 } 1261 getMandatoryStreamCombinationsHelper( boolean getConcurrent)1262 private MandatoryStreamCombination[] getMandatoryStreamCombinationsHelper( 1263 boolean getConcurrent) { 1264 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1265 ArrayList<Integer> caps = new ArrayList<Integer>(); 1266 caps.ensureCapacity(capabilities.length); 1267 for (int c : capabilities) { 1268 caps.add(new Integer(c)); 1269 } 1270 int hwLevel = getBase(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 1271 MandatoryStreamCombination.Builder build = new MandatoryStreamCombination.Builder( 1272 mCameraId, hwLevel, mDisplaySize, caps, getStreamConfigurationMap()); 1273 1274 List<MandatoryStreamCombination> combs = null; 1275 if (getConcurrent) { 1276 combs = build.getAvailableMandatoryConcurrentStreamCombinations(); 1277 } else { 1278 combs = build.getAvailableMandatoryStreamCombinations(); 1279 } 1280 if ((combs != null) && (!combs.isEmpty())) { 1281 MandatoryStreamCombination[] combArray = new MandatoryStreamCombination[combs.size()]; 1282 combArray = combs.toArray(combArray); 1283 return combArray; 1284 } 1285 1286 return null; 1287 } 1288 getMandatoryConcurrentStreamCombinations()1289 private MandatoryStreamCombination[] getMandatoryConcurrentStreamCombinations() { 1290 if (!mHasMandatoryConcurrentStreams) { 1291 return null; 1292 } 1293 return getMandatoryStreamCombinationsHelper(true); 1294 } 1295 getMandatoryStreamCombinations()1296 private MandatoryStreamCombination[] getMandatoryStreamCombinations() { 1297 return getMandatoryStreamCombinationsHelper(false); 1298 } 1299 getStreamConfigurationMap()1300 private StreamConfigurationMap getStreamConfigurationMap() { 1301 StreamConfiguration[] configurations = getBase( 1302 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS); 1303 StreamConfigurationDuration[] minFrameDurations = getBase( 1304 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS); 1305 StreamConfigurationDuration[] stallDurations = getBase( 1306 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS); 1307 StreamConfiguration[] depthConfigurations = getBase( 1308 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS); 1309 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1310 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); 1311 StreamConfigurationDuration[] depthStallDurations = getBase( 1312 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); 1313 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1314 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS); 1315 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1316 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS); 1317 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1318 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS); 1319 StreamConfiguration[] heicConfigurations = getBase( 1320 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); 1321 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1322 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); 1323 StreamConfigurationDuration[] heicStallDurations = getBase( 1324 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); 1325 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1326 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1327 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1328 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP); 1329 boolean listHighResolution = isBurstSupported(); 1330 return new StreamConfigurationMap( 1331 configurations, minFrameDurations, stallDurations, 1332 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1333 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1334 dynamicDepthStallDurations, heicConfigurations, 1335 heicMinFrameDurations, heicStallDurations, 1336 highSpeedVideoConfigurations, inputOutputFormatsMap, 1337 listHighResolution); 1338 } 1339 getMaxRegions(Key<T> key)1340 private <T> Integer getMaxRegions(Key<T> key) { 1341 final int AE = 0; 1342 final int AWB = 1; 1343 final int AF = 2; 1344 1345 // The order of the elements is: (AE, AWB, AF) 1346 int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); 1347 1348 if (maxRegions == null) { 1349 return null; 1350 } 1351 1352 if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { 1353 return maxRegions[AE]; 1354 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { 1355 return maxRegions[AWB]; 1356 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { 1357 return maxRegions[AF]; 1358 } else { 1359 throw new AssertionError("Invalid key " + key); 1360 } 1361 } 1362 getMaxNumOutputs(Key<T> key)1363 private <T> Integer getMaxNumOutputs(Key<T> key) { 1364 final int RAW = 0; 1365 final int PROC = 1; 1366 final int PROC_STALLING = 2; 1367 1368 // The order of the elements is: (raw, proc+nonstalling, proc+stalling) 1369 int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); 1370 1371 if (maxNumOutputs == null) { 1372 return null; 1373 } 1374 1375 if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { 1376 return maxNumOutputs[RAW]; 1377 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { 1378 return maxNumOutputs[PROC]; 1379 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { 1380 return maxNumOutputs[PROC_STALLING]; 1381 } else { 1382 throw new AssertionError("Invalid key " + key); 1383 } 1384 } 1385 getTonemapCurve()1386 private <T> TonemapCurve getTonemapCurve() { 1387 float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED); 1388 float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN); 1389 float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE); 1390 1391 if (areValuesAllNull(red, green, blue)) { 1392 return null; 1393 } 1394 1395 if (red == null || green == null || blue == null) { 1396 Log.w(TAG, "getTonemapCurve - missing tone curve components"); 1397 return null; 1398 } 1399 TonemapCurve tc = new TonemapCurve(red, green, blue); 1400 return tc; 1401 } 1402 getOisSamples()1403 private OisSample[] getOisSamples() { 1404 long[] timestamps = getBase(CaptureResult.STATISTICS_OIS_TIMESTAMPS); 1405 float[] xShifts = getBase(CaptureResult.STATISTICS_OIS_X_SHIFTS); 1406 float[] yShifts = getBase(CaptureResult.STATISTICS_OIS_Y_SHIFTS); 1407 1408 if (timestamps == null) { 1409 if (xShifts != null) { 1410 throw new AssertionError("timestamps is null but xShifts is not"); 1411 } 1412 1413 if (yShifts != null) { 1414 throw new AssertionError("timestamps is null but yShifts is not"); 1415 } 1416 1417 return null; 1418 } 1419 1420 if (xShifts == null) { 1421 throw new AssertionError("timestamps is not null but xShifts is"); 1422 } 1423 1424 if (yShifts == null) { 1425 throw new AssertionError("timestamps is not null but yShifts is"); 1426 } 1427 1428 if (xShifts.length != timestamps.length) { 1429 throw new AssertionError(String.format( 1430 "timestamps has %d entries but xShifts has %d", timestamps.length, 1431 xShifts.length)); 1432 } 1433 1434 if (yShifts.length != timestamps.length) { 1435 throw new AssertionError(String.format( 1436 "timestamps has %d entries but yShifts has %d", timestamps.length, 1437 yShifts.length)); 1438 } 1439 1440 OisSample[] samples = new OisSample[timestamps.length]; 1441 for (int i = 0; i < timestamps.length; i++) { 1442 samples[i] = new OisSample(timestamps[i], xShifts[i], yShifts[i]); 1443 } 1444 return samples; 1445 } 1446 getExtendedSceneModeCapabilities()1447 private Capability[] getExtendedSceneModeCapabilities() { 1448 int[] maxSizes = 1449 getBase(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES); 1450 float[] zoomRanges = getBase( 1451 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES); 1452 Range<Float> zoomRange = getBase(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); 1453 float maxDigitalZoom = getBase(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); 1454 1455 if (maxSizes == null) { 1456 return null; 1457 } 1458 if (maxSizes.length % 3 != 0) { 1459 throw new AssertionError("availableExtendedSceneModeMaxSizes must be tuples of " 1460 + "[mode, width, height]"); 1461 } 1462 int numExtendedSceneModes = maxSizes.length / 3; 1463 int numExtendedSceneModeZoomRanges = 0; 1464 if (zoomRanges != null) { 1465 if (zoomRanges.length % 2 != 0) { 1466 throw new AssertionError("availableExtendedSceneModeZoomRanges must be tuples of " 1467 + "[minZoom, maxZoom]"); 1468 } 1469 numExtendedSceneModeZoomRanges = zoomRanges.length / 2; 1470 if (numExtendedSceneModes - numExtendedSceneModeZoomRanges != 1) { 1471 throw new AssertionError("Number of extended scene mode zoom ranges must be 1 " 1472 + "less than number of supported modes"); 1473 } 1474 } 1475 1476 float modeOffMinZoomRatio = 1.0f; 1477 float modeOffMaxZoomRatio = maxDigitalZoom; 1478 if (zoomRange != null) { 1479 modeOffMinZoomRatio = zoomRange.getLower(); 1480 modeOffMaxZoomRatio = zoomRange.getUpper(); 1481 } 1482 1483 Capability[] capabilities = new Capability[numExtendedSceneModes]; 1484 for (int i = 0, j = 0; i < numExtendedSceneModes; i++) { 1485 int mode = maxSizes[3 * i]; 1486 int width = maxSizes[3 * i + 1]; 1487 int height = maxSizes[3 * i + 2]; 1488 if (mode != CameraMetadata.CONTROL_EXTENDED_SCENE_MODE_DISABLED 1489 && j < numExtendedSceneModeZoomRanges) { 1490 capabilities[i] = new Capability(mode, width, height, zoomRanges[2 * j], 1491 zoomRanges[2 * j + 1]); 1492 j++; 1493 } else { 1494 capabilities[i] = new Capability(mode, width, height, modeOffMinZoomRatio, 1495 modeOffMaxZoomRatio); 1496 } 1497 } 1498 1499 return capabilities; 1500 } 1501 setBase(CameraCharacteristics.Key<T> key, T value)1502 private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { 1503 setBase(key.getNativeKey(), value); 1504 } 1505 setBase(CaptureResult.Key<T> key, T value)1506 private <T> void setBase(CaptureResult.Key<T> key, T value) { 1507 setBase(key.getNativeKey(), value); 1508 } 1509 setBase(CaptureRequest.Key<T> key, T value)1510 private <T> void setBase(CaptureRequest.Key<T> key, T value) { 1511 setBase(key.getNativeKey(), value); 1512 } 1513 setBase(Key<T> key, T value)1514 private <T> void setBase(Key<T> key, T value) { 1515 int tag; 1516 if (key.hasTag()) { 1517 tag = key.getTag(); 1518 } else { 1519 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 1520 key.cacheTag(tag); 1521 } 1522 if (value == null) { 1523 // Erase the entry 1524 writeValues(tag, /*src*/null); 1525 return; 1526 } // else update the entry to a new value 1527 1528 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 1529 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 1530 int size = marshaler.calculateMarshalSize(value); 1531 1532 // TODO: Optimization. Cache the byte[] and reuse if the size is big enough. 1533 byte[] values = new byte[size]; 1534 1535 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 1536 marshaler.marshal(value, buffer); 1537 1538 writeValues(tag, values); 1539 } 1540 1541 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 1542 // metadata. 1543 private static final HashMap<Key<?>, SetCommand> sSetCommandMap = 1544 new HashMap<Key<?>, SetCommand>(); 1545 static { CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey()1546 sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), 1547 new SetCommand() { 1548 @Override 1549 public <T> void setValue(CameraMetadataNative metadata, T value) { 1550 metadata.setAvailableFormats((int[]) value); 1551 } 1552 }); CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey()1553 sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), 1554 new SetCommand() { 1555 @Override 1556 public <T> void setValue(CameraMetadataNative metadata, T value) { 1557 metadata.setFaceRectangles((Rect[]) value); 1558 } 1559 }); CaptureResult.STATISTICS_FACES.getNativeKey()1560 sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), 1561 new SetCommand() { 1562 @Override 1563 public <T> void setValue(CameraMetadataNative metadata, T value) { 1564 metadata.setFaces((Face[])value); 1565 } 1566 }); CaptureRequest.TONEMAP_CURVE.getNativeKey()1567 sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { 1568 @Override 1569 public <T> void setValue(CameraMetadataNative metadata, T value) { 1570 metadata.setTonemapCurve((TonemapCurve) value); 1571 } 1572 }); CaptureResult.JPEG_GPS_LOCATION.getNativeKey()1573 sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() { 1574 @Override 1575 public <T> void setValue(CameraMetadataNative metadata, T value) { 1576 metadata.setGpsLocation((Location) value); 1577 } 1578 }); 1579 } 1580 setAvailableFormats(int[] value)1581 private boolean setAvailableFormats(int[] value) { 1582 int[] availableFormat = value; 1583 if (value == null) { 1584 // Let setBase() to handle the null value case. 1585 return false; 1586 } 1587 1588 int[] newValues = new int[availableFormat.length]; 1589 for (int i = 0; i < availableFormat.length; i++) { 1590 newValues[i] = availableFormat[i]; 1591 if (availableFormat[i] == ImageFormat.JPEG) { 1592 newValues[i] = NATIVE_JPEG_FORMAT; 1593 } 1594 } 1595 1596 setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues); 1597 return true; 1598 } 1599 1600 /** 1601 * Convert Face Rectangles from managed side to native side as they have different definitions. 1602 * <p> 1603 * Managed side face rectangles are defined as: left, top, width, height. 1604 * Native side face rectangles are defined as: left, top, right, bottom. 1605 * The input face rectangle need to be converted to native side definition when set is called. 1606 * </p> 1607 * 1608 * @param faceRects Input face rectangles. 1609 * @return true if face rectangles can be set successfully. Otherwise, Let the caller 1610 * (setBase) to handle it appropriately. 1611 */ setFaceRectangles(Rect[] faceRects)1612 private boolean setFaceRectangles(Rect[] faceRects) { 1613 if (faceRects == null) { 1614 return false; 1615 } 1616 1617 Rect[] newFaceRects = new Rect[faceRects.length]; 1618 for (int i = 0; i < newFaceRects.length; i++) { 1619 newFaceRects[i] = new Rect( 1620 faceRects[i].left, 1621 faceRects[i].top, 1622 faceRects[i].right + faceRects[i].left, 1623 faceRects[i].bottom + faceRects[i].top); 1624 } 1625 1626 setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects); 1627 return true; 1628 } 1629 setTonemapCurve(TonemapCurve tc)1630 private <T> boolean setTonemapCurve(TonemapCurve tc) { 1631 if (tc == null) { 1632 return false; 1633 } 1634 1635 float[][] curve = new float[3][]; 1636 for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) { 1637 int pointCount = tc.getPointCount(i); 1638 curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE]; 1639 tc.copyColorCurve(i, curve[i], 0); 1640 } 1641 setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]); 1642 setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]); 1643 setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]); 1644 1645 return true; 1646 } 1647 1648 private int mCameraId = -1; 1649 private boolean mHasMandatoryConcurrentStreams = false; 1650 private Size mDisplaySize = new Size(0, 0); 1651 1652 /** 1653 * Set the current camera Id. 1654 * 1655 * @param cameraId Current camera id. 1656 * 1657 * @hide 1658 */ setCameraId(int cameraId)1659 public void setCameraId(int cameraId) { 1660 mCameraId = cameraId; 1661 } 1662 1663 /** 1664 * Set the current camera Id. 1665 * 1666 * @param hasMandatoryConcurrentStreams whether the metadata advertises mandatory concurrent 1667 * streams. 1668 * 1669 * @hide 1670 */ setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams)1671 public void setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams) { 1672 mHasMandatoryConcurrentStreams = hasMandatoryConcurrentStreams; 1673 } 1674 1675 /** 1676 * Set the current display size. 1677 * 1678 * @param displaySize The current display size. 1679 * 1680 * @hide 1681 */ setDisplaySize(Size displaySize)1682 public void setDisplaySize(Size displaySize) { 1683 mDisplaySize = displaySize; 1684 } 1685 1686 @UnsupportedAppUsage 1687 private long mMetadataPtr; // native std::shared_ptr<CameraMetadata>* 1688 1689 @FastNative nativeAllocate()1690 private static native long nativeAllocate(); 1691 @FastNative nativeAllocateCopy(long ptr)1692 private static native long nativeAllocateCopy(long ptr) 1693 throws NullPointerException; 1694 1695 @FastNative nativeWriteToParcel(Parcel dest, long ptr)1696 private static synchronized native void nativeWriteToParcel(Parcel dest, long ptr); 1697 @FastNative nativeReadFromParcel(Parcel source, long ptr)1698 private static synchronized native void nativeReadFromParcel(Parcel source, long ptr); 1699 @FastNative nativeSwap(long ptr, long otherPtr)1700 private static synchronized native void nativeSwap(long ptr, long otherPtr) 1701 throws NullPointerException; 1702 @FastNative nativeClose(long ptr)1703 private static synchronized native void nativeClose(long ptr); 1704 @FastNative nativeIsEmpty(long ptr)1705 private static synchronized native boolean nativeIsEmpty(long ptr); 1706 @FastNative nativeGetEntryCount(long ptr)1707 private static synchronized native int nativeGetEntryCount(long ptr); 1708 1709 @UnsupportedAppUsage 1710 @FastNative nativeReadValues(int tag, long ptr)1711 private static synchronized native byte[] nativeReadValues(int tag, long ptr); 1712 @FastNative nativeWriteValues(int tag, byte[] src, long ptr)1713 private static synchronized native void nativeWriteValues(int tag, byte[] src, long ptr); nativeDump(long ptr)1714 private static synchronized native void nativeDump(long ptr) throws IOException; // dump to LOGD 1715 1716 @FastNative nativeGetAllVendorKeys(long ptr, Class keyClass)1717 private static synchronized native ArrayList nativeGetAllVendorKeys(long ptr, Class keyClass); 1718 @UnsupportedAppUsage 1719 @FastNative nativeGetTagFromKeyLocal(long ptr, String keyName)1720 private static synchronized native int nativeGetTagFromKeyLocal(long ptr, String keyName) 1721 throws IllegalArgumentException; 1722 @UnsupportedAppUsage 1723 @FastNative nativeGetTypeFromTagLocal(long ptr, int tag)1724 private static synchronized native int nativeGetTypeFromTagLocal(long ptr, int tag) 1725 throws IllegalArgumentException; 1726 @FastNative nativeGetTagFromKey(String keyName, long vendorId)1727 private static native int nativeGetTagFromKey(String keyName, long vendorId) 1728 throws IllegalArgumentException; 1729 @FastNative nativeGetTypeFromTag(int tag, long vendorId)1730 private static native int nativeGetTypeFromTag(int tag, long vendorId) 1731 throws IllegalArgumentException; 1732 1733 /** 1734 * <p>Perform a 0-copy swap of the internal metadata with another object.</p> 1735 * 1736 * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p> 1737 * 1738 * @param other Metadata to swap with 1739 * @throws NullPointerException if other was null 1740 * @hide 1741 */ swap(CameraMetadataNative other)1742 public void swap(CameraMetadataNative other) { 1743 nativeSwap(mMetadataPtr, other.mMetadataPtr); 1744 mCameraId = other.mCameraId; 1745 mHasMandatoryConcurrentStreams = other.mHasMandatoryConcurrentStreams; 1746 mDisplaySize = other.mDisplaySize; 1747 } 1748 1749 /** 1750 * @hide 1751 */ getEntryCount()1752 public int getEntryCount() { 1753 return nativeGetEntryCount(mMetadataPtr); 1754 } 1755 1756 /** 1757 * Does this metadata contain at least 1 entry? 1758 * 1759 * @hide 1760 */ isEmpty()1761 public boolean isEmpty() { 1762 return nativeIsEmpty(mMetadataPtr); 1763 } 1764 1765 1766 /** 1767 * Retrieves the pointer to the native shared_ptr<CameraMetadata> as a Java long. 1768 * 1769 * @hide 1770 */ getMetadataPtr()1771 public long getMetadataPtr() { 1772 return mMetadataPtr; 1773 } 1774 1775 /** 1776 * Return a list containing keys of the given key class for all defined vendor tags. 1777 * 1778 * @hide 1779 */ getAllVendorKeys(Class<K> keyClass)1780 public <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) { 1781 if (keyClass == null) { 1782 throw new NullPointerException(); 1783 } 1784 return (ArrayList<K>) nativeGetAllVendorKeys(mMetadataPtr, keyClass); 1785 } 1786 1787 /** 1788 * Convert a key string into the equivalent native tag. 1789 * 1790 * @throws IllegalArgumentException if the key was not recognized 1791 * @throws NullPointerException if the key was null 1792 * 1793 * @hide 1794 */ getTag(String key)1795 public static int getTag(String key) { 1796 return nativeGetTagFromKey(key, Long.MAX_VALUE); 1797 } 1798 1799 /** 1800 * Convert a key string into the equivalent native tag. 1801 * 1802 * @throws IllegalArgumentException if the key was not recognized 1803 * @throws NullPointerException if the key was null 1804 * 1805 * @hide 1806 */ getTag(String key, long vendorId)1807 public static int getTag(String key, long vendorId) { 1808 return nativeGetTagFromKey(key, vendorId); 1809 } 1810 1811 /** 1812 * Get the underlying native type for a tag. 1813 * 1814 * @param tag An integer tag, see e.g. {@link #getTag} 1815 * @param vendorId A vendor tag provider id 1816 * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE} 1817 * 1818 * @hide 1819 */ getNativeType(int tag, long vendorId)1820 public static int getNativeType(int tag, long vendorId) { 1821 return nativeGetTypeFromTag(tag, vendorId); 1822 } 1823 1824 /** 1825 * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing 1826 * the entry if src was null.</p> 1827 * 1828 * <p>An empty array can be passed in to update the entry to 0 elements.</p> 1829 * 1830 * @param tag An integer tag, see e.g. {@link #getTag} 1831 * @param src An array of bytes, or null to erase the entry 1832 * 1833 * @hide 1834 */ writeValues(int tag, byte[] src)1835 public void writeValues(int tag, byte[] src) { 1836 nativeWriteValues(tag, src, mMetadataPtr); 1837 } 1838 1839 /** 1840 * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize 1841 * the data properly.</p> 1842 * 1843 * <p>An empty array can be returned to denote an existing entry with 0 elements.</p> 1844 * 1845 * @param tag An integer tag, see e.g. {@link #getTag} 1846 * 1847 * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise. 1848 * @hide 1849 */ readValues(int tag)1850 public byte[] readValues(int tag) { 1851 // TODO: Optimization. Native code returns a ByteBuffer instead. 1852 return nativeReadValues(tag, mMetadataPtr); 1853 } 1854 1855 /** 1856 * Dumps the native metadata contents to logcat. 1857 * 1858 * <p>Visibility for testing/debugging only. The results will not 1859 * include any synthesized keys, as they are invisible to the native layer.</p> 1860 * 1861 * @hide 1862 */ dumpToLog()1863 public void dumpToLog() { 1864 try { 1865 nativeDump(mMetadataPtr); 1866 } catch (IOException e) { 1867 Log.wtf(TAG, "Dump logging failed", e); 1868 } 1869 } 1870 1871 @Override finalize()1872 protected void finalize() throws Throwable { 1873 try { 1874 close(); 1875 } finally { 1876 super.finalize(); 1877 } 1878 } 1879 1880 /** 1881 * Get the marshaler compatible with the {@code key} and type {@code T}. 1882 * 1883 * @throws UnsupportedOperationException 1884 * if the native/managed type combination for {@code key} is not supported 1885 */ getMarshalerForKey(Key<T> key, int nativeType)1886 private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) { 1887 return MarshalRegistry.getMarshaler(key.getTypeReference(), 1888 nativeType); 1889 } 1890 1891 @SuppressWarnings({ "unchecked", "rawtypes" }) registerAllMarshalers()1892 private static void registerAllMarshalers() { 1893 if (DEBUG) { 1894 Log.v(TAG, "Shall register metadata marshalers"); 1895 } 1896 1897 MarshalQueryable[] queryList = new MarshalQueryable[] { 1898 // marshalers for standard types 1899 new MarshalQueryablePrimitive(), 1900 new MarshalQueryableEnum(), 1901 new MarshalQueryableArray(), 1902 1903 // pseudo standard types, that expand/narrow the native type into a managed type 1904 new MarshalQueryableBoolean(), 1905 new MarshalQueryableNativeByteToInteger(), 1906 1907 // marshalers for custom types 1908 new MarshalQueryableRect(), 1909 new MarshalQueryableSize(), 1910 new MarshalQueryableSizeF(), 1911 new MarshalQueryableString(), 1912 new MarshalQueryableReprocessFormatsMap(), 1913 new MarshalQueryableRange(), 1914 new MarshalQueryablePair(), 1915 new MarshalQueryableMeteringRectangle(), 1916 new MarshalQueryableColorSpaceTransform(), 1917 new MarshalQueryableStreamConfiguration(), 1918 new MarshalQueryableStreamConfigurationDuration(), 1919 new MarshalQueryableRggbChannelVector(), 1920 new MarshalQueryableBlackLevelPattern(), 1921 new MarshalQueryableHighSpeedVideoConfiguration(), 1922 new MarshalQueryableRecommendedStreamConfiguration(), 1923 1924 // generic parcelable marshaler (MUST BE LAST since it has lowest priority) 1925 new MarshalQueryableParcelable(), 1926 }; 1927 1928 for (MarshalQueryable query : queryList) { 1929 MarshalRegistry.registerMarshalQueryable(query); 1930 } 1931 if (DEBUG) { 1932 Log.v(TAG, "Registered metadata marshalers"); 1933 } 1934 } 1935 1936 /** Check if input arguments are all {@code null}. 1937 * 1938 * @param objs Input arguments for null check 1939 * @return {@code true} if input arguments are all {@code null}, otherwise {@code false} 1940 */ areValuesAllNull(Object... objs)1941 private static boolean areValuesAllNull(Object... objs) { 1942 for (Object o : objs) { 1943 if (o != null) return false; 1944 } 1945 return true; 1946 } 1947 1948 static { registerAllMarshalers()1949 registerAllMarshalers(); 1950 } 1951 } 1952