1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.impl; 18 19 import android.annotation.NonNull; 20 import android.compat.annotation.UnsupportedAppUsage; 21 import android.graphics.ImageFormat; 22 import android.graphics.Point; 23 import android.graphics.Rect; 24 import android.hardware.camera2.CameraCharacteristics; 25 import android.hardware.camera2.CameraMetadata; 26 import android.hardware.camera2.CaptureRequest; 27 import android.hardware.camera2.CaptureResult; 28 import android.hardware.camera2.marshal.MarshalQueryable; 29 import android.hardware.camera2.marshal.MarshalRegistry; 30 import android.hardware.camera2.marshal.Marshaler; 31 import android.hardware.camera2.marshal.impl.MarshalQueryableArray; 32 import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern; 33 import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean; 34 import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform; 35 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration; 37 import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle; 38 import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger; 39 import android.hardware.camera2.marshal.impl.MarshalQueryablePair; 40 import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable; 41 import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive; 42 import android.hardware.camera2.marshal.impl.MarshalQueryableRange; 43 import android.hardware.camera2.marshal.impl.MarshalQueryableRecommendedStreamConfiguration; 44 import android.hardware.camera2.marshal.impl.MarshalQueryableRect; 45 import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap; 46 import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector; 47 import android.hardware.camera2.marshal.impl.MarshalQueryableSize; 48 import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF; 49 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration; 50 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration; 51 import android.hardware.camera2.marshal.impl.MarshalQueryableString; 52 import android.hardware.camera2.params.Capability; 53 import android.hardware.camera2.params.ColorSpaceProfiles; 54 import android.hardware.camera2.params.DeviceStateSensorOrientationMap; 55 import android.hardware.camera2.params.DynamicRangeProfiles; 56 import android.hardware.camera2.params.Face; 57 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 58 import android.hardware.camera2.params.LensIntrinsicsSample; 59 import android.hardware.camera2.params.LensShadingMap; 60 import android.hardware.camera2.params.MandatoryStreamCombination; 61 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap; 62 import android.hardware.camera2.params.OisSample; 63 import android.hardware.camera2.params.RecommendedStreamConfiguration; 64 import android.hardware.camera2.params.RecommendedStreamConfigurationMap; 65 import android.hardware.camera2.params.ReprocessFormatsMap; 66 import android.hardware.camera2.params.StreamConfiguration; 67 import android.hardware.camera2.params.StreamConfigurationDuration; 68 import android.hardware.camera2.params.StreamConfigurationMap; 69 import android.hardware.camera2.params.TonemapCurve; 70 import android.hardware.camera2.utils.ArrayUtils; 71 import android.hardware.camera2.utils.TypeReference; 72 import android.location.Location; 73 import android.location.LocationManager; 74 import android.os.Build; 75 import android.os.Parcel; 76 import android.os.Parcelable; 77 import android.os.ServiceSpecificException; 78 import android.util.Log; 79 import android.util.Range; 80 import android.util.Size; 81 82 import com.android.internal.camera.flags.Flags; 83 84 import dalvik.annotation.optimization.FastNative; 85 import dalvik.system.VMRuntime; 86 87 import java.io.IOException; 88 import java.nio.ByteBuffer; 89 import java.nio.ByteOrder; 90 import java.util.ArrayList; 91 import java.util.Arrays; 92 import java.util.Collections; 93 import java.util.HashMap; 94 import java.util.HashSet; 95 import java.util.List; 96 import java.util.Map; 97 import java.util.Objects; 98 import java.util.Set; 99 100 /** 101 * Implementation of camera metadata marshal/unmarshal across Binder to 102 * the camera service 103 */ 104 public class CameraMetadataNative implements Parcelable { 105 106 public static class Key<T> { 107 private boolean mHasTag; 108 private int mTag; 109 private long mVendorId = Long.MAX_VALUE; 110 private final Class<T> mType; 111 private final TypeReference<T> mTypeReference; 112 private final String mName; 113 private final String mFallbackName; 114 private final int mHash; 115 116 /** 117 * @hide 118 */ Key(String name, Class<T> type, long vendorId)119 public Key(String name, Class<T> type, long vendorId) { 120 if (name == null) { 121 throw new NullPointerException("Key needs a valid name"); 122 } else if (type == null) { 123 throw new NullPointerException("Type needs to be non-null"); 124 } 125 mName = name; 126 mFallbackName = null; 127 mType = type; 128 mVendorId = vendorId; 129 mTypeReference = TypeReference.createSpecializedTypeReference(type); 130 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 131 } 132 133 /** 134 * @hide 135 */ Key(String name, String fallbackName, Class<T> type)136 public Key(String name, String fallbackName, Class<T> type) { 137 if (name == null) { 138 throw new NullPointerException("Key needs a valid name"); 139 } else if (type == null) { 140 throw new NullPointerException("Type needs to be non-null"); 141 } 142 mName = name; 143 mFallbackName = fallbackName; 144 mType = type; 145 mTypeReference = TypeReference.createSpecializedTypeReference(type); 146 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 147 } 148 149 /** 150 * Visible for testing only. 151 * 152 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 153 * for application code or vendor-extended keys.</p> 154 */ Key(String name, Class<T> type)155 public Key(String name, Class<T> type) { 156 if (name == null) { 157 throw new NullPointerException("Key needs a valid name"); 158 } else if (type == null) { 159 throw new NullPointerException("Type needs to be non-null"); 160 } 161 mName = name; 162 mFallbackName = null; 163 mType = type; 164 mTypeReference = TypeReference.createSpecializedTypeReference(type); 165 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 166 } 167 168 /** 169 * Visible for testing only. 170 * 171 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 172 * for application code or vendor-extended keys.</p> 173 */ 174 @SuppressWarnings("unchecked") Key(String name, TypeReference<T> typeReference)175 public Key(String name, TypeReference<T> typeReference) { 176 if (name == null) { 177 throw new NullPointerException("Key needs a valid name"); 178 } else if (typeReference == null) { 179 throw new NullPointerException("TypeReference needs to be non-null"); 180 } 181 mName = name; 182 mFallbackName = null; 183 mType = (Class<T>)typeReference.getRawType(); 184 mTypeReference = typeReference; 185 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 186 } 187 188 /** 189 * Return a camelCase, period separated name formatted like: 190 * {@code "root.section[.subsections].name"}. 191 * 192 * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."}; 193 * keys that are device/platform-specific are prefixed with {@code "com."}.</p> 194 * 195 * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would 196 * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device 197 * specific key might look like {@code "com.google.nexus.data.private"}.</p> 198 * 199 * @return String representation of the key name 200 */ getName()201 public final String getName() { 202 return mName; 203 } 204 205 /** 206 * {@inheritDoc} 207 */ 208 @Override hashCode()209 public final int hashCode() { 210 return mHash; 211 } 212 213 /** 214 * Compare this key against other native keys, request keys, result keys, and 215 * characteristics keys. 216 * 217 * <p>Two keys are considered equal if their name and type reference are equal.</p> 218 * 219 * <p>Note that the equality against non-native keys is one-way. A native key may be equal 220 * to a result key; but that same result key will not be equal to a native key.</p> 221 */ 222 @SuppressWarnings("rawtypes") 223 @Override equals(Object o)224 public final boolean equals(Object o) { 225 if (this == o) { 226 return true; 227 } 228 229 if (o == null || this.hashCode() != o.hashCode()) { 230 return false; 231 } 232 233 Key<?> lhs; 234 235 if (o instanceof CaptureResult.Key) { 236 lhs = ((CaptureResult.Key)o).getNativeKey(); 237 } else if (o instanceof CaptureRequest.Key) { 238 lhs = ((CaptureRequest.Key)o).getNativeKey(); 239 } else if (o instanceof CameraCharacteristics.Key) { 240 lhs = ((CameraCharacteristics.Key)o).getNativeKey(); 241 } else if ((o instanceof Key)) { 242 lhs = (Key<?>)o; 243 } else { 244 return false; 245 } 246 247 return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference); 248 } 249 250 /** 251 * <p> 252 * Get the tag corresponding to this key. This enables insertion into the 253 * native metadata. 254 * </p> 255 * 256 * <p>This value is looked up the first time, and cached subsequently.</p> 257 * 258 * <p>This function may be called without cacheTag() if this is not a vendor key. 259 * If this is a vendor key, cacheTag() must be called first before getTag() can 260 * be called. Otherwise, mVendorId could be default (Long.MAX_VALUE) and vendor 261 * tag lookup could fail.</p> 262 * 263 * @return The tag numeric value corresponding to the string 264 */ 265 @UnsupportedAppUsage getTag()266 public final int getTag() { 267 if (!mHasTag) { 268 mTag = CameraMetadataNative.getTag(mName, mVendorId); 269 mHasTag = true; 270 } 271 return mTag; 272 } 273 274 /** 275 * Whether this key's tag is cached. 276 * 277 * @hide 278 */ 279 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) hasTag()280 public final boolean hasTag() { 281 return mHasTag; 282 } 283 284 /** 285 * Cache this key's tag. 286 * 287 * @hide 288 */ 289 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) cacheTag(int tag)290 public final void cacheTag(int tag) { 291 mHasTag = true; 292 mTag = tag; 293 } 294 295 /** 296 * Get the raw class backing the type {@code T} for this key. 297 * 298 * <p>The distinction is only important if {@code T} is a generic, e.g. 299 * {@code Range<Integer>} since the nested type will be erased.</p> 300 */ getType()301 public final Class<T> getType() { 302 // TODO: remove this; other places should use #getTypeReference() instead 303 return mType; 304 } 305 306 /** 307 * Get the vendor tag provider id. 308 * 309 * @hide 310 */ getVendorId()311 public final long getVendorId() { 312 return mVendorId; 313 } 314 315 /** 316 * Get the type reference backing the type {@code T} for this key. 317 * 318 * <p>The distinction is only important if {@code T} is a generic, e.g. 319 * {@code Range<Integer>} since the nested type will be retained.</p> 320 */ getTypeReference()321 public final TypeReference<T> getTypeReference() { 322 return mTypeReference; 323 } 324 } 325 326 private static final String TAG = "CameraMetadataJV"; 327 private static final boolean DEBUG = false; 328 329 // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h 330 public static final int NATIVE_JPEG_FORMAT = 0x21; 331 332 private static final String CELLID_PROCESS = "CELLID"; 333 private static final String GPS_PROCESS = "GPS"; 334 private static final int FACE_LANDMARK_SIZE = 6; 335 336 private static final int MANDATORY_STREAM_CONFIGURATIONS_DEFAULT = 0; 337 private static final int MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION = 1; 338 private static final int MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT = 2; 339 private static final int MANDATORY_STREAM_CONFIGURATIONS_10BIT = 3; 340 private static final int MANDATORY_STREAM_CONFIGURATIONS_USE_CASE = 4; 341 private static final int MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION = 5; 342 translateLocationProviderToProcess(final String provider)343 private static String translateLocationProviderToProcess(final String provider) { 344 if (provider == null) { 345 return null; 346 } 347 switch(provider) { 348 case LocationManager.GPS_PROVIDER: 349 return GPS_PROCESS; 350 case LocationManager.NETWORK_PROVIDER: 351 return CELLID_PROCESS; 352 default: 353 return null; 354 } 355 } 356 translateProcessToLocationProvider(final String process)357 private static String translateProcessToLocationProvider(final String process) { 358 if (process == null) { 359 return null; 360 } 361 switch(process) { 362 case GPS_PROCESS: 363 return LocationManager.GPS_PROVIDER; 364 case CELLID_PROCESS: 365 return LocationManager.NETWORK_PROVIDER; 366 default: 367 return null; 368 } 369 } 370 CameraMetadataNative()371 public CameraMetadataNative() { 372 super(); 373 mMetadataPtr = nativeAllocate(); 374 if (mMetadataPtr == 0) { 375 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 376 } 377 updateNativeAllocation(); 378 } 379 380 /** 381 * Copy constructor - clone metadata 382 */ CameraMetadataNative(CameraMetadataNative other)383 public CameraMetadataNative(CameraMetadataNative other) { 384 super(); 385 mMetadataPtr = nativeAllocateCopy(other.mMetadataPtr); 386 if (mMetadataPtr == 0) { 387 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 388 } 389 updateNativeAllocation(); 390 } 391 392 /** 393 * Move the contents from {@code other} into a new camera metadata instance.</p> 394 * 395 * <p>After this call, {@code other} will become empty.</p> 396 * 397 * @param other the previous metadata instance which will get pilfered 398 * @return a new metadata instance with the values from {@code other} moved into it 399 */ move(CameraMetadataNative other)400 public static CameraMetadataNative move(CameraMetadataNative other) { 401 CameraMetadataNative newObject = new CameraMetadataNative(); 402 newObject.swap(other); 403 return newObject; 404 } 405 406 /** 407 * Set all metadata values in the destination argument by using the corresponding 408 * values from the source. Metadata tags present in the destination and absent 409 * from the source will remain unmodified. 410 * 411 * @param dst Destination metadata 412 * @param src Source metadata 413 * @hide 414 */ update(CameraMetadataNative dst, CameraMetadataNative src)415 public static void update(CameraMetadataNative dst, CameraMetadataNative src) { 416 nativeUpdate(dst.mMetadataPtr, src.mMetadataPtr); 417 } 418 419 public static final @android.annotation.NonNull Parcelable.Creator<CameraMetadataNative> CREATOR = 420 new Parcelable.Creator<CameraMetadataNative>() { 421 @Override 422 public CameraMetadataNative createFromParcel(Parcel in) { 423 CameraMetadataNative metadata = new CameraMetadataNative(); 424 metadata.readFromParcel(in); 425 return metadata; 426 } 427 428 @Override 429 public CameraMetadataNative[] newArray(int size) { 430 return new CameraMetadataNative[size]; 431 } 432 }; 433 434 @Override describeContents()435 public int describeContents() { 436 return 0; 437 } 438 439 @Override writeToParcel(Parcel dest, int flags)440 public void writeToParcel(Parcel dest, int flags) { 441 nativeWriteToParcel(dest, mMetadataPtr); 442 } 443 444 /** 445 * @hide 446 */ get(CameraCharacteristics.Key<T> key)447 public <T> T get(CameraCharacteristics.Key<T> key) { 448 return get(key.getNativeKey()); 449 } 450 451 /** 452 * @hide 453 */ get(CaptureResult.Key<T> key)454 public <T> T get(CaptureResult.Key<T> key) { 455 return get(key.getNativeKey()); 456 } 457 458 /** 459 * @hide 460 */ get(CaptureRequest.Key<T> key)461 public <T> T get(CaptureRequest.Key<T> key) { 462 return get(key.getNativeKey()); 463 } 464 465 /** 466 * Look-up a metadata field value by its key. 467 * 468 * @param key a non-{@code null} key instance 469 * @return the field corresponding to the {@code key}, or {@code null} if no value was set 470 */ get(Key<T> key)471 public <T> T get(Key<T> key) { 472 Objects.requireNonNull(key, "key must not be null"); 473 474 // Check if key has been overridden to use a wrapper class on the java side. 475 GetCommand g = sGetCommandMap.get(key); 476 if (g != null) { 477 return g.getValue(this, key); 478 } 479 return getBase(key); 480 } 481 readFromParcel(Parcel in)482 public void readFromParcel(Parcel in) { 483 nativeReadFromParcel(in, mMetadataPtr); 484 updateNativeAllocation(); 485 } 486 487 /** 488 * Set the global client-side vendor tag descriptor to allow use of vendor 489 * tags in camera applications. 490 * 491 * @throws ServiceSpecificException 492 * @hide 493 */ setupGlobalVendorTagDescriptor()494 public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException { 495 int err = nativeSetupGlobalVendorTagDescriptor(); 496 if (err != 0) { 497 throw new ServiceSpecificException(err, "Failure to set up global vendor tags"); 498 } 499 } 500 501 /** 502 * Set the global client-side vendor tag descriptor to allow use of vendor 503 * tags in camera applications. 504 * 505 * @return int An error code corresponding to one of the 506 * {@link ICameraService} error constants, or 0 on success. 507 */ nativeSetupGlobalVendorTagDescriptor()508 private static native int nativeSetupGlobalVendorTagDescriptor(); 509 510 /** 511 * Set a camera metadata field to a value. The field definitions can be 512 * found in {@link CameraCharacteristics}, {@link CaptureResult}, and 513 * {@link CaptureRequest}. 514 * 515 * @param key The metadata field to write. 516 * @param value The value to set the field to, which must be of a matching 517 * type to the key. 518 */ set(Key<T> key, T value)519 public <T> void set(Key<T> key, T value) { 520 SetCommand s = sSetCommandMap.get(key); 521 if (s != null) { 522 s.setValue(this, value); 523 return; 524 } 525 526 setBase(key, value); 527 } 528 set(CaptureRequest.Key<T> key, T value)529 public <T> void set(CaptureRequest.Key<T> key, T value) { 530 set(key.getNativeKey(), value); 531 } 532 set(CaptureResult.Key<T> key, T value)533 public <T> void set(CaptureResult.Key<T> key, T value) { 534 set(key.getNativeKey(), value); 535 } 536 set(CameraCharacteristics.Key<T> key, T value)537 public <T> void set(CameraCharacteristics.Key<T> key, T value) { 538 set(key.getNativeKey(), value); 539 } 540 541 // Keep up-to-date with camera_metadata.h 542 /** 543 * @hide 544 */ 545 public static final int TYPE_BYTE = 0; 546 /** 547 * @hide 548 */ 549 public static final int TYPE_INT32 = 1; 550 /** 551 * @hide 552 */ 553 public static final int TYPE_FLOAT = 2; 554 /** 555 * @hide 556 */ 557 public static final int TYPE_INT64 = 3; 558 /** 559 * @hide 560 */ 561 public static final int TYPE_DOUBLE = 4; 562 /** 563 * @hide 564 */ 565 public static final int TYPE_RATIONAL = 5; 566 /** 567 * @hide 568 */ 569 public static final int NUM_TYPES = 6; 570 close()571 private void close() { 572 // Delete native pointer, but does not clear it 573 nativeClose(mMetadataPtr); 574 mMetadataPtr = 0; 575 576 if (mBufferSize > 0) { 577 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 578 } 579 mBufferSize = 0; 580 } 581 getBase(CameraCharacteristics.Key<T> key)582 private <T> T getBase(CameraCharacteristics.Key<T> key) { 583 return getBase(key.getNativeKey()); 584 } 585 getBase(CaptureResult.Key<T> key)586 private <T> T getBase(CaptureResult.Key<T> key) { 587 return getBase(key.getNativeKey()); 588 } 589 getBase(CaptureRequest.Key<T> key)590 private <T> T getBase(CaptureRequest.Key<T> key) { 591 return getBase(key.getNativeKey()); 592 } 593 getBase(Key<T> key)594 private <T> T getBase(Key<T> key) { 595 int tag; 596 if (key.hasTag()) { 597 tag = key.getTag(); 598 } else { 599 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 600 key.cacheTag(tag); 601 } 602 byte[] values = readValues(tag); 603 if (values == null) { 604 // If the key returns null, use the fallback key if exists. 605 // This is to support old key names for the newly published keys. 606 if (key.mFallbackName == null) { 607 return null; 608 } 609 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.mFallbackName); 610 values = readValues(tag); 611 if (values == null) { 612 return null; 613 } 614 } 615 616 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 617 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 618 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 619 return marshaler.unmarshal(buffer); 620 } 621 622 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 623 // metadata. 624 private static final HashMap<Key<?>, GetCommand> sGetCommandMap = 625 new HashMap<Key<?>, GetCommand>(); 626 static { 627 sGetCommandMap.put( GetCommand()628 CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() { 629 @Override 630 @SuppressWarnings("unchecked") 631 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 632 return (T) metadata.getAvailableFormats(); 633 } 634 }); 635 sGetCommandMap.put( GetCommand()636 CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() { 637 @Override 638 @SuppressWarnings("unchecked") 639 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 640 return (T) metadata.getFaces(); 641 } 642 }); 643 sGetCommandMap.put( GetCommand()644 CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() { 645 @Override 646 @SuppressWarnings("unchecked") 647 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 648 return (T) metadata.getFaceRectangles(); 649 } 650 }); 651 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey()652 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(), 653 new GetCommand() { 654 @Override 655 @SuppressWarnings("unchecked") 656 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 657 return (T) metadata.getStreamConfigurationMap(); 658 } 659 }); 660 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey()661 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey(), 662 new GetCommand() { 663 @Override 664 @SuppressWarnings("unchecked") 665 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 666 return (T) metadata.getStreamConfigurationMapMaximumResolution(); 667 } 668 }); 669 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey()670 CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey(), 671 new GetCommand() { 672 @Override 673 @SuppressWarnings("unchecked") 674 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 675 return (T) metadata.getMandatoryStreamCombinations(); 676 } 677 }); 678 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey()679 CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey(), 680 new GetCommand() { 681 @Override 682 @SuppressWarnings("unchecked") 683 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 684 return (T) metadata.getMandatoryConcurrentStreamCombinations(); 685 } 686 }); 687 688 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_TEN_BIT_OUTPUT_STREAM_COMBINATIONS.getNativeKey()689 CameraCharacteristics.SCALER_MANDATORY_TEN_BIT_OUTPUT_STREAM_COMBINATIONS.getNativeKey(), 690 new GetCommand() { 691 @Override 692 @SuppressWarnings("unchecked") 693 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 694 return (T) metadata.getMandatory10BitStreamCombinations(); 695 } 696 }); 697 698 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey()699 CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey(), 700 new GetCommand() { 701 @Override 702 @SuppressWarnings("unchecked") 703 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 704 return (T) metadata.getMandatoryMaximumResolutionStreamCombinations(); 705 } 706 }); 707 708 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_USE_CASE_STREAM_COMBINATIONS.getNativeKey()709 CameraCharacteristics.SCALER_MANDATORY_USE_CASE_STREAM_COMBINATIONS.getNativeKey(), 710 new GetCommand() { 711 @Override 712 @SuppressWarnings("unchecked") 713 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 714 return (T) metadata.getMandatoryUseCaseStreamCombinations(); 715 } 716 }); 717 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_PREVIEW_STABILIZATION_OUTPUT_STREAM_COMBINATIONS.getNativeKey()718 CameraCharacteristics.SCALER_MANDATORY_PREVIEW_STABILIZATION_OUTPUT_STREAM_COMBINATIONS.getNativeKey(), 719 new GetCommand() { 720 @Override 721 @SuppressWarnings("unchecked") 722 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 723 return (T) metadata.getMandatoryPreviewStabilizationStreamCombinations(); 724 } 725 }); 726 727 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey()728 CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() { 729 @Override 730 @SuppressWarnings("unchecked") 731 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 732 return (T) metadata.getMaxRegions(key); 733 } 734 }); 735 sGetCommandMap.put( GetCommand()736 CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() { 737 @Override 738 @SuppressWarnings("unchecked") 739 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 740 return (T) metadata.getMaxRegions(key); 741 } 742 }); 743 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey()744 CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() { 745 @Override 746 @SuppressWarnings("unchecked") 747 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 748 return (T) metadata.getMaxRegions(key); 749 } 750 }); 751 sGetCommandMap.put( GetCommand()752 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() { 753 @Override 754 @SuppressWarnings("unchecked") 755 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 756 return (T) metadata.getMaxNumOutputs(key); 757 } 758 }); 759 sGetCommandMap.put( GetCommand()760 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() { 761 @Override 762 @SuppressWarnings("unchecked") 763 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 764 return (T) metadata.getMaxNumOutputs(key); 765 } 766 }); 767 sGetCommandMap.put( CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey()768 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(), 769 new GetCommand() { 770 @Override 771 @SuppressWarnings("unchecked") 772 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 773 return (T) metadata.getMaxNumOutputs(key); 774 } 775 }); 776 sGetCommandMap.put( GetCommand()777 CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() { 778 @Override 779 @SuppressWarnings("unchecked") 780 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 781 return (T) metadata.getTonemapCurve(); 782 } 783 }); 784 sGetCommandMap.put( GetCommand()785 CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() { 786 @Override 787 @SuppressWarnings("unchecked") 788 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 789 return (T) metadata.getGpsLocation(); 790 } 791 }); 792 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()793 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 794 new GetCommand() { 795 @Override 796 @SuppressWarnings("unchecked") 797 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 798 return (T) metadata.getLensShadingMap(); 799 } 800 }); 801 sGetCommandMap.put( CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey()802 CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey(), 803 new GetCommand() { 804 @Override 805 @SuppressWarnings("unchecked") 806 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 807 return (T) metadata.getDeviceStateOrientationMap(); 808 } 809 }); 810 sGetCommandMap.put( CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES.getNativeKey()811 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES.getNativeKey(), 812 new GetCommand() { 813 @Override 814 @SuppressWarnings("unchecked") 815 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 816 return (T) metadata.getDynamicRangeProfiles(); 817 } 818 }); 819 sGetCommandMap.put( CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES.getNativeKey()820 CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES.getNativeKey(), 821 new GetCommand() { 822 @Override 823 @SuppressWarnings("unchecked") 824 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 825 return (T) metadata.getColorSpaceProfiles(); 826 } 827 }); 828 sGetCommandMap.put( CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey()829 CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey(), 830 new GetCommand() { 831 @Override 832 @SuppressWarnings("unchecked") 833 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 834 return (T) metadata.getOisSamples(); 835 } 836 }); 837 sGetCommandMap.put( CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey()838 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey(), 839 new GetCommand() { 840 @Override 841 @SuppressWarnings("unchecked") 842 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 843 return (T) metadata.getExtendedSceneModeCapabilities(); 844 } 845 }); 846 sGetCommandMap.put( CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey()847 CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey(), 848 new GetCommand() { 849 @Override 850 @SuppressWarnings("unchecked") 851 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 852 return (T) metadata.getMultiResolutionStreamConfigurationMap(); 853 } 854 }); 855 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey()856 CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey(), 857 new GetCommand() { 858 @Override 859 @SuppressWarnings("unchecked") 860 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 861 return (T) metadata.getLensIntrinsicSamples(); 862 } 863 }); 864 } 865 getAvailableFormats()866 private int[] getAvailableFormats() { 867 int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS); 868 if (availableFormats != null) { 869 for (int i = 0; i < availableFormats.length; i++) { 870 // JPEG has different value between native and managed side, need override. 871 if (availableFormats[i] == NATIVE_JPEG_FORMAT) { 872 availableFormats[i] = ImageFormat.JPEG; 873 } 874 } 875 } 876 877 return availableFormats; 878 } 879 setFaces(Face[] faces)880 private boolean setFaces(Face[] faces) { 881 if (faces == null) { 882 return false; 883 } 884 885 int numFaces = faces.length; 886 887 // Detect if all faces are SIMPLE or not; count # of valid faces 888 boolean fullMode = true; 889 for (Face face : faces) { 890 if (face == null) { 891 numFaces--; 892 Log.w(TAG, "setFaces - null face detected, skipping"); 893 continue; 894 } 895 896 if (face.getId() == Face.ID_UNSUPPORTED) { 897 fullMode = false; 898 } 899 } 900 901 Rect[] faceRectangles = new Rect[numFaces]; 902 byte[] faceScores = new byte[numFaces]; 903 int[] faceIds = null; 904 int[] faceLandmarks = null; 905 906 if (fullMode) { 907 faceIds = new int[numFaces]; 908 faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; 909 } 910 911 int i = 0; 912 for (Face face : faces) { 913 if (face == null) { 914 continue; 915 } 916 917 faceRectangles[i] = face.getBounds(); 918 faceScores[i] = (byte)face.getScore(); 919 920 if (fullMode) { 921 faceIds[i] = face.getId(); 922 923 int j = 0; 924 925 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; 926 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; 927 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; 928 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; 929 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; 930 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; 931 } 932 933 i++; 934 } 935 936 set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); 937 set(CaptureResult.STATISTICS_FACE_IDS, faceIds); 938 set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); 939 set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); 940 941 return true; 942 } 943 getFaces()944 private Face[] getFaces() { 945 Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); 946 byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES); 947 Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES); 948 int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS); 949 int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS); 950 951 if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) { 952 return null; 953 } 954 955 if (faceDetectMode == null) { 956 Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); 957 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 958 } else if (faceDetectMode > CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 959 // Face detect mode is larger than FULL, assuming the mode is FULL 960 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL; 961 } else { 962 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 963 return new Face[0]; 964 } 965 if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && 966 faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 967 Log.w(TAG, "Unknown face detect mode: " + faceDetectMode); 968 return new Face[0]; 969 } 970 } 971 972 // Face scores and rectangles are required by SIMPLE and FULL mode. 973 if (faceScores == null || faceRectangles == null) { 974 Log.w(TAG, "Expect face scores and rectangles to be non-null"); 975 return new Face[0]; 976 } else if (faceScores.length != faceRectangles.length) { 977 Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", 978 faceScores.length, faceRectangles.length)); 979 } 980 981 // To be safe, make number of faces is the minimal of all face info metadata length. 982 int numFaces = Math.min(faceScores.length, faceRectangles.length); 983 // Face id and landmarks are only required by FULL mode. 984 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 985 if (faceIds == null || faceLandmarks == null) { 986 Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + 987 "fallback to SIMPLE mode"); 988 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 989 } else { 990 if (faceIds.length != numFaces || 991 faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) { 992 Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + 993 "match face number(%d)!", 994 faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces)); 995 } 996 // To be safe, make number of faces is the minimal of all face info metadata length. 997 numFaces = Math.min(numFaces, faceIds.length); 998 numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE); 999 } 1000 } 1001 1002 ArrayList<Face> faceList = new ArrayList<Face>(); 1003 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 1004 for (int i = 0; i < numFaces; i++) { 1005 if (faceScores[i] <= Face.SCORE_MAX && 1006 faceScores[i] >= Face.SCORE_MIN) { 1007 faceList.add(new Face(faceRectangles[i], faceScores[i])); 1008 } 1009 } 1010 } else { 1011 // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL 1012 for (int i = 0; i < numFaces; i++) { 1013 if (faceScores[i] <= Face.SCORE_MAX && 1014 faceScores[i] >= Face.SCORE_MIN && 1015 faceIds[i] >= 0) { 1016 Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], 1017 faceLandmarks[i*FACE_LANDMARK_SIZE+1]); 1018 Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], 1019 faceLandmarks[i*FACE_LANDMARK_SIZE+3]); 1020 Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], 1021 faceLandmarks[i*FACE_LANDMARK_SIZE+5]); 1022 Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], 1023 leftEye, rightEye, mouth); 1024 faceList.add(face); 1025 } 1026 } 1027 } 1028 Face[] faces = new Face[faceList.size()]; 1029 faceList.toArray(faces); 1030 return faces; 1031 } 1032 1033 // Face rectangles are defined as (left, top, right, bottom) instead of 1034 // (left, top, width, height) at the native level, so the normal Rect 1035 // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo 1036 // that conversion here for just the faces. getFaceRectangles()1037 private Rect[] getFaceRectangles() { 1038 Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES); 1039 if (faceRectangles == null) return null; 1040 1041 Rect[] fixedFaceRectangles = new Rect[faceRectangles.length]; 1042 for (int i = 0; i < faceRectangles.length; i++) { 1043 fixedFaceRectangles[i] = new Rect( 1044 faceRectangles[i].left, 1045 faceRectangles[i].top, 1046 faceRectangles[i].right - faceRectangles[i].left, 1047 faceRectangles[i].bottom - faceRectangles[i].top); 1048 } 1049 return fixedFaceRectangles; 1050 } 1051 setLensShadingMap(LensShadingMap lensShadingMap)1052 private boolean setLensShadingMap(LensShadingMap lensShadingMap) { 1053 if (lensShadingMap == null) { 1054 return false; 1055 } 1056 float[] lsmArray = new float[lensShadingMap.getGainFactorCount()]; 1057 lensShadingMap.copyGainFactors(lsmArray, 0); 1058 setBase(CaptureResult.STATISTICS_LENS_SHADING_MAP, lsmArray); 1059 1060 Size s = new Size(lensShadingMap.getRowCount(), lensShadingMap.getColumnCount()); 1061 setBase(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE, s); 1062 return true; 1063 } 1064 getLensShadingMap()1065 private LensShadingMap getLensShadingMap() { 1066 float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP); 1067 Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE); 1068 1069 // Do not warn if lsmArray is null while s is not. This is valid. 1070 if (lsmArray == null) { 1071 return null; 1072 } 1073 1074 if (s == null) { 1075 Log.w(TAG, "getLensShadingMap - Lens shading map size was null."); 1076 return null; 1077 } 1078 1079 LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth()); 1080 return map; 1081 } 1082 getDeviceStateOrientationMap()1083 private DeviceStateSensorOrientationMap getDeviceStateOrientationMap() { 1084 long[] mapArray = getBase(CameraCharacteristics.INFO_DEVICE_STATE_ORIENTATIONS); 1085 1086 // Do not warn if map is null while s is not. This is valid. 1087 if (mapArray == null) { 1088 return null; 1089 } 1090 1091 DeviceStateSensorOrientationMap map = new DeviceStateSensorOrientationMap(mapArray); 1092 return map; 1093 } 1094 getDynamicRangeProfiles()1095 private DynamicRangeProfiles getDynamicRangeProfiles() { 1096 long[] profileArray = getBase( 1097 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP); 1098 1099 if (profileArray == null) { 1100 return null; 1101 } 1102 1103 return new DynamicRangeProfiles(profileArray); 1104 } 1105 getColorSpaceProfiles()1106 private ColorSpaceProfiles getColorSpaceProfiles() { 1107 long[] profileArray = getBase( 1108 CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP); 1109 1110 if (profileArray == null) { 1111 return null; 1112 } 1113 1114 return new ColorSpaceProfiles(profileArray); 1115 } 1116 getGpsLocation()1117 private Location getGpsLocation() { 1118 String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD); 1119 double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES); 1120 Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP); 1121 1122 if (areValuesAllNull(processingMethod, coords, timeStamp)) { 1123 return null; 1124 } 1125 1126 Location l = new Location(translateProcessToLocationProvider(processingMethod)); 1127 if (timeStamp != null) { 1128 // Location expects timestamp in [ms.] 1129 l.setTime(timeStamp * 1000); 1130 } else { 1131 Log.w(TAG, "getGpsLocation - No timestamp for GPS location."); 1132 } 1133 1134 if (coords != null) { 1135 l.setLatitude(coords[0]); 1136 l.setLongitude(coords[1]); 1137 l.setAltitude(coords[2]); 1138 } else { 1139 Log.w(TAG, "getGpsLocation - No coordinates for GPS location"); 1140 } 1141 1142 return l; 1143 } 1144 setGpsLocation(Location l)1145 private boolean setGpsLocation(Location l) { 1146 if (l == null) { 1147 // If Location value being set is null, remove corresponding keys. 1148 // This is safe because api1/client2/CameraParameters.cpp already erases 1149 // the keys for JPEG_GPS_LOCATION for certain cases. 1150 setBase(CaptureRequest.JPEG_GPS_TIMESTAMP, null); 1151 setBase(CaptureRequest.JPEG_GPS_COORDINATES, null); 1152 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, null); 1153 return false; 1154 } 1155 1156 double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() }; 1157 String processMethod = translateLocationProviderToProcess(l.getProvider()); 1158 //JPEG_GPS_TIMESTAMP expects sec. instead of msec. 1159 long timestamp = l.getTime() / 1000; 1160 1161 set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp); 1162 set(CaptureRequest.JPEG_GPS_COORDINATES, coords); 1163 1164 if (processMethod == null) { 1165 Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" + 1166 "provider"); 1167 } else { 1168 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod); 1169 } 1170 return true; 1171 } 1172 parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, StreamConfigurationMap fullMap, boolean isDepth, ArrayList<ArrayList<StreamConfiguration>> streamConfigList, ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList, ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList, boolean[] supportsPrivate)1173 private void parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, 1174 StreamConfigurationMap fullMap, boolean isDepth, 1175 ArrayList<ArrayList<StreamConfiguration>> /*out*/streamConfigList, 1176 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamDurationList, 1177 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamStallList, 1178 boolean[] /*out*/supportsPrivate) { 1179 1180 streamConfigList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1181 streamDurationList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1182 streamStallList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1183 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1184 streamConfigList.add(new ArrayList<StreamConfiguration> ()); 1185 streamDurationList.add(new ArrayList<StreamConfigurationDuration> ()); 1186 streamStallList.add(new ArrayList<StreamConfigurationDuration> ()); 1187 } 1188 1189 for (RecommendedStreamConfiguration c : configurations) { 1190 int width = c.getWidth(); 1191 int height = c.getHeight(); 1192 int internalFormat = c.getFormat(); 1193 int publicFormat = 1194 (isDepth) ? StreamConfigurationMap.depthFormatToPublic(internalFormat) : 1195 StreamConfigurationMap.imageFormatToPublic(internalFormat); 1196 Size sz = new Size(width, height); 1197 int usecaseBitmap = c.getUsecaseBitmap(); 1198 1199 if (!c.isInput()) { 1200 StreamConfigurationDuration minDurationConfiguration = null; 1201 StreamConfigurationDuration stallDurationConfiguration = null; 1202 1203 StreamConfiguration streamConfiguration = new StreamConfiguration(internalFormat, 1204 width, height, /*input*/ false); 1205 1206 long minFrameDuration = fullMap.getOutputMinFrameDuration(publicFormat, sz); 1207 if (minFrameDuration > 0) { 1208 minDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1209 width, height, minFrameDuration); 1210 } 1211 1212 long stallDuration = fullMap.getOutputStallDuration(publicFormat, sz); 1213 if (stallDuration > 0) { 1214 stallDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1215 width, height, stallDuration); 1216 } 1217 1218 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1219 if ((usecaseBitmap & (1 << i)) != 0) { 1220 ArrayList<StreamConfiguration> sc = streamConfigList.get(i); 1221 sc.add(streamConfiguration); 1222 1223 if (minFrameDuration > 0) { 1224 ArrayList<StreamConfigurationDuration> scd = streamDurationList.get(i); 1225 scd.add(minDurationConfiguration); 1226 } 1227 1228 if (stallDuration > 0) { 1229 ArrayList<StreamConfigurationDuration> scs = streamStallList.get(i); 1230 scs.add(stallDurationConfiguration); 1231 } 1232 1233 if ((supportsPrivate != null) && !supportsPrivate[i] && 1234 (publicFormat == ImageFormat.PRIVATE)) { 1235 supportsPrivate[i] = true; 1236 } 1237 } 1238 } 1239 } else { 1240 if (usecaseBitmap != (1 << RecommendedStreamConfigurationMap.USECASE_ZSL)) { 1241 throw new IllegalArgumentException("Recommended input stream configurations " + 1242 "should only be advertised in the ZSL use case!"); 1243 } 1244 1245 ArrayList<StreamConfiguration> sc = streamConfigList.get( 1246 RecommendedStreamConfigurationMap.USECASE_ZSL); 1247 sc.add(new StreamConfiguration(internalFormat, 1248 width, height, /*input*/ true)); 1249 } 1250 } 1251 } 1252 1253 private class StreamConfigurationData { 1254 StreamConfiguration [] streamConfigurationArray = null; 1255 StreamConfigurationDuration [] minDurationArray = null; 1256 StreamConfigurationDuration [] stallDurationArray = null; 1257 } 1258 initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, StreamConfigurationData scData)1259 public void initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, 1260 ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, 1261 StreamConfigurationData /*out*/scData) { 1262 if ((scData == null) || (sc == null)) { 1263 return; 1264 } 1265 1266 scData.streamConfigurationArray = new StreamConfiguration[sc.size()]; 1267 scData.streamConfigurationArray = sc.toArray(scData.streamConfigurationArray); 1268 1269 if ((scd != null) && !scd.isEmpty()) { 1270 scData.minDurationArray = new StreamConfigurationDuration[scd.size()]; 1271 scData.minDurationArray = scd.toArray(scData.minDurationArray); 1272 } else { 1273 scData.minDurationArray = new StreamConfigurationDuration[0]; 1274 } 1275 1276 if ((scs != null) && !scs.isEmpty()) { 1277 scData.stallDurationArray = new StreamConfigurationDuration[scs.size()]; 1278 scData.stallDurationArray = scs.toArray(scData.stallDurationArray); 1279 } else { 1280 scData.stallDurationArray = new StreamConfigurationDuration[0]; 1281 } 1282 } 1283 1284 /** 1285 * Retrieve the list of recommended stream configurations. 1286 * 1287 * @return A list of recommended stream configuration maps for each common use case or null 1288 * in case the recommended stream configurations are invalid or incomplete. 1289 * @hide 1290 */ getRecommendedStreamConfigurations()1291 public ArrayList<RecommendedStreamConfigurationMap> getRecommendedStreamConfigurations() { 1292 RecommendedStreamConfiguration[] configurations = getBase( 1293 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS); 1294 RecommendedStreamConfiguration[] depthConfigurations = getBase( 1295 CameraCharacteristics.DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS); 1296 if ((configurations == null) && (depthConfigurations == null)) { 1297 return null; 1298 } 1299 1300 StreamConfigurationMap fullMap = getStreamConfigurationMap(); 1301 ArrayList<RecommendedStreamConfigurationMap> recommendedConfigurations = 1302 new ArrayList<RecommendedStreamConfigurationMap> (); 1303 1304 ArrayList<ArrayList<StreamConfiguration>> streamConfigList = 1305 new ArrayList<ArrayList<StreamConfiguration>>(); 1306 ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList = 1307 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1308 ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList = 1309 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1310 boolean[] supportsPrivate = 1311 new boolean[RecommendedStreamConfigurationMap.MAX_USECASE_COUNT]; 1312 try { 1313 if (configurations != null) { 1314 parseRecommendedConfigurations(configurations, fullMap, /*isDepth*/ false, 1315 streamConfigList, streamDurationList, streamStallList, supportsPrivate); 1316 } 1317 } catch (IllegalArgumentException e) { 1318 Log.e(TAG, "Failed parsing the recommended stream configurations!"); 1319 return null; 1320 } 1321 1322 ArrayList<ArrayList<StreamConfiguration>> depthStreamConfigList = 1323 new ArrayList<ArrayList<StreamConfiguration>>(); 1324 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamDurationList = 1325 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1326 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamStallList = 1327 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1328 if (depthConfigurations != null) { 1329 try { 1330 parseRecommendedConfigurations(depthConfigurations, fullMap, /*isDepth*/ true, 1331 depthStreamConfigList, depthStreamDurationList, depthStreamStallList, 1332 /*supportsPrivate*/ null); 1333 } catch (IllegalArgumentException e) { 1334 Log.e(TAG, "Failed parsing the recommended depth stream configurations!"); 1335 return null; 1336 } 1337 } 1338 1339 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1340 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP); 1341 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1342 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1343 boolean listHighResolution = isBurstSupported(); 1344 recommendedConfigurations.ensureCapacity( 1345 RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1346 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1347 StreamConfigurationData scData = new StreamConfigurationData(); 1348 if (configurations != null) { 1349 initializeStreamConfigurationData(streamConfigList.get(i), 1350 streamDurationList.get(i), streamStallList.get(i), scData); 1351 } 1352 1353 StreamConfigurationData depthScData = new StreamConfigurationData(); 1354 if (depthConfigurations != null) { 1355 initializeStreamConfigurationData(depthStreamConfigList.get(i), 1356 depthStreamDurationList.get(i), depthStreamStallList.get(i), depthScData); 1357 } 1358 1359 if ((scData.streamConfigurationArray == null || 1360 scData.streamConfigurationArray.length == 0) && 1361 (depthScData.streamConfigurationArray == null || 1362 depthScData.streamConfigurationArray.length == 0)) { 1363 recommendedConfigurations.add(null); 1364 continue; 1365 } 1366 1367 // Dynamic depth streams involve alot of SW processing and currently cannot be 1368 // recommended. 1369 StreamConfigurationMap map = null; 1370 switch (i) { 1371 case RecommendedStreamConfigurationMap.USECASE_PREVIEW: 1372 case RecommendedStreamConfigurationMap.USECASE_RAW: 1373 case RecommendedStreamConfigurationMap.USECASE_LOW_LATENCY_SNAPSHOT: 1374 case RecommendedStreamConfigurationMap.USECASE_VIDEO_SNAPSHOT: 1375 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1376 scData.minDurationArray, scData.stallDurationArray, 1377 /*depthconfiguration*/ null, /*depthminduration*/ null, 1378 /*depthstallduration*/ null, 1379 /*dynamicDepthConfigurations*/ null, 1380 /*dynamicDepthMinFrameDurations*/ null, 1381 /*dynamicDepthStallDurations*/ null, 1382 /*heicconfiguration*/ null, 1383 /*heicminduration*/ null, 1384 /*heicstallduration*/ null, 1385 /*jpegRconfiguration*/ null, 1386 /*jpegRminduration*/ null, 1387 /*jpegRstallduration*/ null, 1388 /*highspeedvideoconfigurations*/ null, 1389 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1390 break; 1391 case RecommendedStreamConfigurationMap.USECASE_RECORD: 1392 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1393 scData.minDurationArray, scData.stallDurationArray, 1394 /*depthconfiguration*/ null, /*depthminduration*/ null, 1395 /*depthstallduration*/ null, 1396 /*dynamicDepthConfigurations*/ null, 1397 /*dynamicDepthMinFrameDurations*/ null, 1398 /*dynamicDepthStallDurations*/ null, 1399 /*heicconfiguration*/ null, 1400 /*heicminduration*/ null, 1401 /*heicstallduration*/ null, 1402 /*jpegRconfiguration*/ null, 1403 /*jpegRminduration*/ null, 1404 /*jpegRstallduration*/ null, 1405 highSpeedVideoConfigurations, 1406 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1407 break; 1408 case RecommendedStreamConfigurationMap.USECASE_ZSL: 1409 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1410 scData.minDurationArray, scData.stallDurationArray, 1411 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1412 depthScData.stallDurationArray, 1413 /*dynamicDepthConfigurations*/ null, 1414 /*dynamicDepthMinFrameDurations*/ null, 1415 /*dynamicDepthStallDurations*/ null, 1416 /*heicconfiguration*/ null, 1417 /*heicminduration*/ null, 1418 /*heicstallduration*/ null, 1419 /*jpegRconfiguration*/ null, 1420 /*jpegRminduration*/ null, 1421 /*jpegRstallduration*/ null, 1422 /*highSpeedVideoConfigurations*/ null, 1423 inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); 1424 break; 1425 default: 1426 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1427 scData.minDurationArray, scData.stallDurationArray, 1428 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1429 depthScData.stallDurationArray, 1430 /*dynamicDepthConfigurations*/ null, 1431 /*dynamicDepthMinFrameDurations*/ null, 1432 /*dynamicDepthStallDurations*/ null, 1433 /*heicconfiguration*/ null, 1434 /*heicminduration*/ null, 1435 /*heicstallduration*/ null, 1436 /*jpegRconfiguration*/ null, 1437 /*jpegRminduration*/ null, 1438 /*jpegRstallduration*/ null, 1439 /*highSpeedVideoConfigurations*/ null, 1440 /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); 1441 } 1442 1443 recommendedConfigurations.add(new RecommendedStreamConfigurationMap(map, /*usecase*/i, 1444 supportsPrivate[i])); 1445 } 1446 1447 return recommendedConfigurations; 1448 } 1449 isCapabilitySupported(int capabilityRequested)1450 private boolean isCapabilitySupported(int capabilityRequested) { 1451 boolean ret = false; 1452 1453 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1454 for (int capability : capabilities) { 1455 if (capabilityRequested == capability) { 1456 ret = true; 1457 break; 1458 } 1459 } 1460 1461 return ret; 1462 } 1463 1464 /** 1465 * @hide 1466 */ isUltraHighResolutionSensor()1467 public boolean isUltraHighResolutionSensor() { 1468 return isCapabilitySupported( 1469 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR); 1470 1471 } isBurstSupported()1472 private boolean isBurstSupported() { 1473 return isCapabilitySupported( 1474 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); 1475 } 1476 isPreviewStabilizationSupported()1477 private boolean isPreviewStabilizationSupported() { 1478 boolean ret = false; 1479 1480 int[] videoStabilizationModes = 1481 getBase(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); 1482 if (videoStabilizationModes == null) { 1483 return false; 1484 } 1485 for (int mode : videoStabilizationModes) { 1486 if (mode == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) { 1487 ret = true; 1488 break; 1489 } 1490 } 1491 1492 return ret; 1493 } 1494 isCroppedRawSupported()1495 private boolean isCroppedRawSupported() { 1496 boolean ret = false; 1497 1498 long[] streamUseCases = 1499 getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES); 1500 if (streamUseCases == null) { 1501 return false; 1502 } 1503 for (long useCase : streamUseCases) { 1504 if (useCase == CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW) { 1505 return true; 1506 } 1507 } 1508 1509 return ret; 1510 } 1511 getMandatoryStreamCombinationsHelper( int mandatoryStreamsType)1512 private MandatoryStreamCombination[] getMandatoryStreamCombinationsHelper( 1513 int mandatoryStreamsType) { 1514 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1515 ArrayList<Integer> caps = new ArrayList<Integer>(); 1516 caps.ensureCapacity(capabilities.length); 1517 for (int c : capabilities) { 1518 caps.add(new Integer(c)); 1519 } 1520 int hwLevel = getBase(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 1521 MandatoryStreamCombination.Builder build = new MandatoryStreamCombination.Builder( 1522 mCameraId, hwLevel, mDisplaySize, caps, getStreamConfigurationMap(), 1523 getStreamConfigurationMapMaximumResolution(), isPreviewStabilizationSupported(), 1524 isCroppedRawSupported()); 1525 1526 List<MandatoryStreamCombination> combs = null; 1527 switch (mandatoryStreamsType) { 1528 case MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT: 1529 combs = build.getAvailableMandatoryConcurrentStreamCombinations(); 1530 break; 1531 case MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION: 1532 combs = build.getAvailableMandatoryMaximumResolutionStreamCombinations(); 1533 break; 1534 case MANDATORY_STREAM_CONFIGURATIONS_10BIT: 1535 combs = build.getAvailableMandatory10BitStreamCombinations(); 1536 break; 1537 case MANDATORY_STREAM_CONFIGURATIONS_USE_CASE: 1538 combs = build.getAvailableMandatoryStreamUseCaseCombinations(); 1539 break; 1540 case MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION: 1541 combs = build.getAvailableMandatoryPreviewStabilizedStreamCombinations(); 1542 break; 1543 default: 1544 combs = build.getAvailableMandatoryStreamCombinations(); 1545 } 1546 if ((combs != null) && (!combs.isEmpty())) { 1547 MandatoryStreamCombination[] combArray = new MandatoryStreamCombination[combs.size()]; 1548 combArray = combs.toArray(combArray); 1549 return combArray; 1550 } 1551 return null; 1552 } 1553 getMandatory10BitStreamCombinations()1554 private MandatoryStreamCombination[] getMandatory10BitStreamCombinations() { 1555 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_10BIT); 1556 } 1557 getMandatoryConcurrentStreamCombinations()1558 private MandatoryStreamCombination[] getMandatoryConcurrentStreamCombinations() { 1559 if (!mHasMandatoryConcurrentStreams) { 1560 return null; 1561 } 1562 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT); 1563 } 1564 getMandatoryMaximumResolutionStreamCombinations()1565 private MandatoryStreamCombination[] getMandatoryMaximumResolutionStreamCombinations() { 1566 if (!isUltraHighResolutionSensor()) { 1567 return null; 1568 } 1569 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION); 1570 } 1571 getMandatoryStreamCombinations()1572 private MandatoryStreamCombination[] getMandatoryStreamCombinations() { 1573 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_DEFAULT); 1574 } 1575 getMandatoryUseCaseStreamCombinations()1576 private MandatoryStreamCombination[] getMandatoryUseCaseStreamCombinations() { 1577 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_USE_CASE); 1578 } 1579 getMandatoryPreviewStabilizationStreamCombinations()1580 private MandatoryStreamCombination[] getMandatoryPreviewStabilizationStreamCombinations() { 1581 return getMandatoryStreamCombinationsHelper( 1582 MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION); 1583 } 1584 getStreamConfigurationMap()1585 private StreamConfigurationMap getStreamConfigurationMap() { 1586 StreamConfiguration[] configurations = getBase( 1587 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS); 1588 StreamConfigurationDuration[] minFrameDurations = getBase( 1589 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS); 1590 StreamConfigurationDuration[] stallDurations = getBase( 1591 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS); 1592 StreamConfiguration[] depthConfigurations = getBase( 1593 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS); 1594 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1595 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); 1596 StreamConfigurationDuration[] depthStallDurations = getBase( 1597 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); 1598 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1599 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS); 1600 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1601 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS); 1602 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1603 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS); 1604 StreamConfiguration[] heicConfigurations = getBase( 1605 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); 1606 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1607 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); 1608 StreamConfigurationDuration[] heicStallDurations = getBase( 1609 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); 1610 StreamConfiguration[] jpegRConfigurations = getBase( 1611 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS); 1612 StreamConfigurationDuration[] jpegRMinFrameDurations = getBase( 1613 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS); 1614 StreamConfigurationDuration[] jpegRStallDurations = getBase( 1615 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS); 1616 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1617 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1618 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1619 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP); 1620 boolean listHighResolution = isBurstSupported(); 1621 return new StreamConfigurationMap( 1622 configurations, minFrameDurations, stallDurations, 1623 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1624 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1625 dynamicDepthStallDurations, heicConfigurations, 1626 heicMinFrameDurations, heicStallDurations, 1627 jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, 1628 highSpeedVideoConfigurations, inputOutputFormatsMap, 1629 listHighResolution); 1630 } 1631 getStreamConfigurationMapMaximumResolution()1632 private StreamConfigurationMap getStreamConfigurationMapMaximumResolution() { 1633 StreamConfiguration[] configurations = getBase( 1634 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1635 StreamConfigurationDuration[] minFrameDurations = getBase( 1636 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1637 StreamConfigurationDuration[] stallDurations = getBase( 1638 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1639 // If the at least these keys haven't been advertised, there cannot be a meaningful max 1640 // resolution StreamConfigurationMap 1641 if (configurations == null || 1642 minFrameDurations == null || 1643 stallDurations == null) { 1644 return null; 1645 } 1646 1647 StreamConfiguration[] depthConfigurations = getBase( 1648 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1649 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1650 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1651 StreamConfigurationDuration[] depthStallDurations = getBase( 1652 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1653 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1654 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1655 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1656 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1657 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1658 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1659 StreamConfiguration[] heicConfigurations = getBase( 1660 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1661 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1662 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1663 StreamConfigurationDuration[] heicStallDurations = getBase( 1664 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1665 StreamConfiguration[] jpegRConfigurations = getBase( 1666 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1667 StreamConfigurationDuration[] jpegRMinFrameDurations = getBase( 1668 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1669 StreamConfigurationDuration[] jpegRStallDurations = getBase( 1670 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1671 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1672 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1673 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1674 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION); 1675 // TODO: Is this correct, burst capability shouldn't necessarily correspond to max res mode 1676 boolean listHighResolution = isBurstSupported(); 1677 return new StreamConfigurationMap( 1678 configurations, minFrameDurations, stallDurations, 1679 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1680 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1681 dynamicDepthStallDurations, heicConfigurations, 1682 heicMinFrameDurations, heicStallDurations, 1683 jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, 1684 highSpeedVideoConfigurations, inputOutputFormatsMap, 1685 listHighResolution, false); 1686 } 1687 getMaxRegions(Key<T> key)1688 private <T> Integer getMaxRegions(Key<T> key) { 1689 final int AE = 0; 1690 final int AWB = 1; 1691 final int AF = 2; 1692 1693 // The order of the elements is: (AE, AWB, AF) 1694 int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); 1695 1696 if (maxRegions == null) { 1697 return null; 1698 } 1699 1700 if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { 1701 return maxRegions[AE]; 1702 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { 1703 return maxRegions[AWB]; 1704 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { 1705 return maxRegions[AF]; 1706 } else { 1707 throw new AssertionError("Invalid key " + key); 1708 } 1709 } 1710 getMaxNumOutputs(Key<T> key)1711 private <T> Integer getMaxNumOutputs(Key<T> key) { 1712 final int RAW = 0; 1713 final int PROC = 1; 1714 final int PROC_STALLING = 2; 1715 1716 // The order of the elements is: (raw, proc+nonstalling, proc+stalling) 1717 int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); 1718 1719 if (maxNumOutputs == null) { 1720 return null; 1721 } 1722 1723 if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { 1724 return maxNumOutputs[RAW]; 1725 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { 1726 return maxNumOutputs[PROC]; 1727 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { 1728 return maxNumOutputs[PROC_STALLING]; 1729 } else { 1730 throw new AssertionError("Invalid key " + key); 1731 } 1732 } 1733 getTonemapCurve()1734 private <T> TonemapCurve getTonemapCurve() { 1735 float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED); 1736 float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN); 1737 float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE); 1738 1739 if (areValuesAllNull(red, green, blue)) { 1740 return null; 1741 } 1742 1743 if (red == null || green == null || blue == null) { 1744 Log.w(TAG, "getTonemapCurve - missing tone curve components"); 1745 return null; 1746 } 1747 TonemapCurve tc = new TonemapCurve(red, green, blue); 1748 return tc; 1749 } 1750 getOisSamples()1751 private OisSample[] getOisSamples() { 1752 long[] timestamps = getBase(CaptureResult.STATISTICS_OIS_TIMESTAMPS); 1753 float[] xShifts = getBase(CaptureResult.STATISTICS_OIS_X_SHIFTS); 1754 float[] yShifts = getBase(CaptureResult.STATISTICS_OIS_Y_SHIFTS); 1755 1756 if (timestamps == null) { 1757 if (xShifts != null) { 1758 throw new AssertionError("timestamps is null but xShifts is not"); 1759 } 1760 1761 if (yShifts != null) { 1762 throw new AssertionError("timestamps is null but yShifts is not"); 1763 } 1764 1765 return null; 1766 } 1767 1768 if (xShifts == null) { 1769 throw new AssertionError("timestamps is not null but xShifts is"); 1770 } 1771 1772 if (yShifts == null) { 1773 throw new AssertionError("timestamps is not null but yShifts is"); 1774 } 1775 1776 if (xShifts.length != timestamps.length) { 1777 throw new AssertionError(String.format( 1778 "timestamps has %d entries but xShifts has %d", timestamps.length, 1779 xShifts.length)); 1780 } 1781 1782 if (yShifts.length != timestamps.length) { 1783 throw new AssertionError(String.format( 1784 "timestamps has %d entries but yShifts has %d", timestamps.length, 1785 yShifts.length)); 1786 } 1787 1788 OisSample[] samples = new OisSample[timestamps.length]; 1789 for (int i = 0; i < timestamps.length; i++) { 1790 samples[i] = new OisSample(timestamps[i], xShifts[i], yShifts[i]); 1791 } 1792 return samples; 1793 } 1794 setLensIntrinsicsSamples(LensIntrinsicsSample[] samples)1795 private boolean setLensIntrinsicsSamples(LensIntrinsicsSample[] samples) { 1796 if (samples == null) { 1797 return false; 1798 } 1799 1800 if (Flags.concertMode()) { 1801 long[] tsArray = new long[samples.length]; 1802 float[] intrinsicsArray = new float[samples.length * 5]; 1803 for (int i = 0; i < samples.length; i++) { 1804 tsArray[i] = samples[i].getTimestampNanos(); 1805 System.arraycopy(samples[i].getLensIntrinsics(), 0, intrinsicsArray, 5 * i, 5); 1806 1807 } 1808 setBase(CaptureResult.STATISTICS_LENS_INTRINSIC_SAMPLES, intrinsicsArray); 1809 setBase(CaptureResult.STATISTICS_LENS_INTRINSIC_TIMESTAMPS, tsArray); 1810 1811 return true; 1812 } else { 1813 return false; 1814 } 1815 } 1816 getLensIntrinsicSamples()1817 private LensIntrinsicsSample[] getLensIntrinsicSamples() { 1818 if (Flags.concertMode()) { 1819 long[] timestamps = getBase(CaptureResult.STATISTICS_LENS_INTRINSIC_TIMESTAMPS); 1820 float[] intrinsics = getBase(CaptureResult.STATISTICS_LENS_INTRINSIC_SAMPLES); 1821 1822 if (timestamps == null) { 1823 if (intrinsics != null) { 1824 throw new AssertionError("timestamps is null but intrinsics is not"); 1825 } 1826 1827 return null; 1828 } 1829 1830 if (intrinsics == null) { 1831 throw new AssertionError("timestamps is not null but intrinsics is"); 1832 } else if ((intrinsics.length % 5) != 0) { 1833 throw new AssertionError("intrinsics are not multiple of 5"); 1834 } 1835 1836 if ((intrinsics.length / 5) != timestamps.length) { 1837 throw new AssertionError(String.format( 1838 "timestamps has %d entries but intrinsics has %d", timestamps.length, 1839 intrinsics.length / 5)); 1840 } 1841 1842 LensIntrinsicsSample[] samples = new LensIntrinsicsSample[timestamps.length]; 1843 for (int i = 0; i < timestamps.length; i++) { 1844 float[] currentIntrinsic = Arrays.copyOfRange(intrinsics, 5 * i, 5 * i + 5); 1845 samples[i] = new LensIntrinsicsSample(timestamps[i], currentIntrinsic); 1846 } 1847 return samples; 1848 } else { 1849 return null; 1850 } 1851 } 1852 getExtendedSceneModeCapabilities()1853 private Capability[] getExtendedSceneModeCapabilities() { 1854 int[] maxSizes = 1855 getBase(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES); 1856 float[] zoomRanges = getBase( 1857 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES); 1858 Range<Float> zoomRange = getBase(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); 1859 float maxDigitalZoom = getBase(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); 1860 1861 if (maxSizes == null) { 1862 return null; 1863 } 1864 if (maxSizes.length % 3 != 0) { 1865 throw new AssertionError("availableExtendedSceneModeMaxSizes must be tuples of " 1866 + "[mode, width, height]"); 1867 } 1868 int numExtendedSceneModes = maxSizes.length / 3; 1869 int numExtendedSceneModeZoomRanges = 0; 1870 if (zoomRanges != null) { 1871 if (zoomRanges.length % 2 != 0) { 1872 throw new AssertionError("availableExtendedSceneModeZoomRanges must be tuples of " 1873 + "[minZoom, maxZoom]"); 1874 } 1875 numExtendedSceneModeZoomRanges = zoomRanges.length / 2; 1876 if (numExtendedSceneModes - numExtendedSceneModeZoomRanges != 1) { 1877 throw new AssertionError("Number of extended scene mode zoom ranges must be 1 " 1878 + "less than number of supported modes"); 1879 } 1880 } 1881 1882 float modeOffMinZoomRatio = 1.0f; 1883 float modeOffMaxZoomRatio = maxDigitalZoom; 1884 if (zoomRange != null) { 1885 modeOffMinZoomRatio = zoomRange.getLower(); 1886 modeOffMaxZoomRatio = zoomRange.getUpper(); 1887 } 1888 1889 Capability[] capabilities = new Capability[numExtendedSceneModes]; 1890 for (int i = 0, j = 0; i < numExtendedSceneModes; i++) { 1891 int mode = maxSizes[3 * i]; 1892 int width = maxSizes[3 * i + 1]; 1893 int height = maxSizes[3 * i + 2]; 1894 if (mode != CameraMetadata.CONTROL_EXTENDED_SCENE_MODE_DISABLED 1895 && j < numExtendedSceneModeZoomRanges) { 1896 capabilities[i] = new Capability(mode, new Size(width, height), 1897 new Range<Float>(zoomRanges[2 * j], zoomRanges[2 * j + 1])); 1898 j++; 1899 } else { 1900 capabilities[i] = new Capability(mode, new Size(width, height), 1901 new Range<Float>(modeOffMinZoomRatio, modeOffMaxZoomRatio)); 1902 } 1903 } 1904 1905 return capabilities; 1906 } 1907 setBase(CameraCharacteristics.Key<T> key, T value)1908 private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { 1909 setBase(key.getNativeKey(), value); 1910 } 1911 setBase(CaptureResult.Key<T> key, T value)1912 private <T> void setBase(CaptureResult.Key<T> key, T value) { 1913 setBase(key.getNativeKey(), value); 1914 } 1915 setBase(CaptureRequest.Key<T> key, T value)1916 private <T> void setBase(CaptureRequest.Key<T> key, T value) { 1917 setBase(key.getNativeKey(), value); 1918 } 1919 setBase(Key<T> key, T value)1920 private <T> void setBase(Key<T> key, T value) { 1921 int tag; 1922 if (key.hasTag()) { 1923 tag = key.getTag(); 1924 } else { 1925 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 1926 key.cacheTag(tag); 1927 } 1928 if (value == null) { 1929 // Erase the entry 1930 writeValues(tag, /*src*/null); 1931 return; 1932 } // else update the entry to a new value 1933 1934 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 1935 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 1936 int size = marshaler.calculateMarshalSize(value); 1937 1938 // TODO: Optimization. Cache the byte[] and reuse if the size is big enough. 1939 byte[] values = new byte[size]; 1940 1941 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 1942 marshaler.marshal(value, buffer); 1943 1944 writeValues(tag, values); 1945 } 1946 1947 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 1948 // metadata. 1949 private static final HashMap<Key<?>, SetCommand> sSetCommandMap = 1950 new HashMap<Key<?>, SetCommand>(); 1951 static { CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey()1952 sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), 1953 new SetCommand() { 1954 @Override 1955 public <T> void setValue(CameraMetadataNative metadata, T value) { 1956 metadata.setAvailableFormats((int[]) value); 1957 } 1958 }); CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey()1959 sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), 1960 new SetCommand() { 1961 @Override 1962 public <T> void setValue(CameraMetadataNative metadata, T value) { 1963 metadata.setFaceRectangles((Rect[]) value); 1964 } 1965 }); CaptureResult.STATISTICS_FACES.getNativeKey()1966 sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), 1967 new SetCommand() { 1968 @Override 1969 public <T> void setValue(CameraMetadataNative metadata, T value) { 1970 metadata.setFaces((Face[])value); 1971 } 1972 }); CaptureRequest.TONEMAP_CURVE.getNativeKey()1973 sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { 1974 @Override 1975 public <T> void setValue(CameraMetadataNative metadata, T value) { 1976 metadata.setTonemapCurve((TonemapCurve) value); 1977 } 1978 }); CaptureResult.JPEG_GPS_LOCATION.getNativeKey()1979 sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() { 1980 @Override 1981 public <T> void setValue(CameraMetadataNative metadata, T value) { 1982 metadata.setGpsLocation((Location) value); 1983 } 1984 }); CaptureRequest.SCALER_CROP_REGION.getNativeKey()1985 sSetCommandMap.put(CaptureRequest.SCALER_CROP_REGION.getNativeKey(), 1986 new SetCommand() { 1987 @Override 1988 public <T> void setValue(CameraMetadataNative metadata, T value) { 1989 metadata.setScalerCropRegion((Rect) value); 1990 } 1991 }); CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey()1992 sSetCommandMap.put(CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey(), 1993 new SetCommand() { 1994 @Override 1995 public <T> void setValue(CameraMetadataNative metadata, T value) { 1996 metadata.setAWBRegions(value); 1997 } 1998 }); CaptureRequest.CONTROL_AF_REGIONS.getNativeKey()1999 sSetCommandMap.put(CaptureRequest.CONTROL_AF_REGIONS.getNativeKey(), 2000 new SetCommand() { 2001 @Override 2002 public <T> void setValue(CameraMetadataNative metadata, T value) { 2003 metadata.setAFRegions(value); 2004 } 2005 }); CaptureRequest.CONTROL_AE_REGIONS.getNativeKey()2006 sSetCommandMap.put(CaptureRequest.CONTROL_AE_REGIONS.getNativeKey(), 2007 new SetCommand() { 2008 @Override 2009 public <T> void setValue(CameraMetadataNative metadata, T value) { 2010 metadata.setAERegions(value); 2011 } 2012 }); CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()2013 sSetCommandMap.put(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 2014 new SetCommand() { 2015 @Override 2016 public <T> void setValue(CameraMetadataNative metadata, T value) { 2017 metadata.setLensShadingMap((LensShadingMap) value); 2018 } 2019 }); 2020 sSetCommandMap.put( CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey()2021 CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey(), 2022 new SetCommand() { 2023 @Override 2024 @SuppressWarnings("unchecked") 2025 public <T> void setValue(CameraMetadataNative metadata, T value) { 2026 metadata.setLensIntrinsicsSamples((LensIntrinsicsSample []) value); 2027 } 2028 }); 2029 } 2030 setAvailableFormats(int[] value)2031 private boolean setAvailableFormats(int[] value) { 2032 int[] availableFormat = value; 2033 if (value == null) { 2034 // Let setBase() to handle the null value case. 2035 return false; 2036 } 2037 2038 int[] newValues = new int[availableFormat.length]; 2039 for (int i = 0; i < availableFormat.length; i++) { 2040 newValues[i] = availableFormat[i]; 2041 if (availableFormat[i] == ImageFormat.JPEG) { 2042 newValues[i] = NATIVE_JPEG_FORMAT; 2043 } 2044 } 2045 2046 setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues); 2047 return true; 2048 } 2049 2050 /** 2051 * Convert Face Rectangles from managed side to native side as they have different definitions. 2052 * <p> 2053 * Managed side face rectangles are defined as: left, top, width, height. 2054 * Native side face rectangles are defined as: left, top, right, bottom. 2055 * The input face rectangle need to be converted to native side definition when set is called. 2056 * </p> 2057 * 2058 * @param faceRects Input face rectangles. 2059 * @return true if face rectangles can be set successfully. Otherwise, Let the caller 2060 * (setBase) to handle it appropriately. 2061 */ setFaceRectangles(Rect[] faceRects)2062 private boolean setFaceRectangles(Rect[] faceRects) { 2063 if (faceRects == null) { 2064 return false; 2065 } 2066 2067 Rect[] newFaceRects = new Rect[faceRects.length]; 2068 for (int i = 0; i < newFaceRects.length; i++) { 2069 newFaceRects[i] = new Rect( 2070 faceRects[i].left, 2071 faceRects[i].top, 2072 faceRects[i].right + faceRects[i].left, 2073 faceRects[i].bottom + faceRects[i].top); 2074 } 2075 2076 setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects); 2077 return true; 2078 } 2079 setTonemapCurve(TonemapCurve tc)2080 private <T> boolean setTonemapCurve(TonemapCurve tc) { 2081 if (tc == null) { 2082 return false; 2083 } 2084 2085 float[][] curve = new float[3][]; 2086 for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) { 2087 int pointCount = tc.getPointCount(i); 2088 curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE]; 2089 tc.copyColorCurve(i, curve[i], 0); 2090 } 2091 setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]); 2092 setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]); 2093 setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]); 2094 2095 return true; 2096 } 2097 setScalerCropRegion(Rect cropRegion)2098 private <T> boolean setScalerCropRegion(Rect cropRegion) { 2099 if (cropRegion == null) { 2100 return false; 2101 } 2102 setBase(CaptureRequest.SCALER_CROP_REGION_SET, true); 2103 setBase(CaptureRequest.SCALER_CROP_REGION, cropRegion); 2104 return true; 2105 } 2106 setAFRegions(T afRegions)2107 private <T> boolean setAFRegions(T afRegions) { 2108 if (afRegions == null) { 2109 return false; 2110 } 2111 setBase(CaptureRequest.CONTROL_AF_REGIONS_SET, true); 2112 // The cast to CaptureRequest.Key is needed since java does not support template 2113 // specialization and we need to route this method to 2114 // setBase(CaptureRequest.Key<T> key, T value) 2115 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AF_REGIONS, afRegions); 2116 return true; 2117 } 2118 setAERegions(T aeRegions)2119 private <T> boolean setAERegions(T aeRegions) { 2120 if (aeRegions == null) { 2121 return false; 2122 } 2123 setBase(CaptureRequest.CONTROL_AE_REGIONS_SET, true); 2124 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AE_REGIONS, aeRegions); 2125 return true; 2126 } 2127 setAWBRegions(T awbRegions)2128 private <T> boolean setAWBRegions(T awbRegions) { 2129 if (awbRegions == null) { 2130 return false; 2131 } 2132 setBase(CaptureRequest.CONTROL_AWB_REGIONS_SET, true); 2133 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AWB_REGIONS, awbRegions); 2134 return true; 2135 } 2136 updateNativeAllocation()2137 private void updateNativeAllocation() { 2138 long currentBufferSize = nativeGetBufferSize(mMetadataPtr); 2139 2140 if (currentBufferSize != mBufferSize) { 2141 if (mBufferSize > 0) { 2142 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 2143 } 2144 2145 mBufferSize = currentBufferSize; 2146 2147 if (mBufferSize > 0) { 2148 VMRuntime.getRuntime().registerNativeAllocation(mBufferSize); 2149 } 2150 } 2151 } 2152 2153 private int mCameraId = -1; 2154 private boolean mHasMandatoryConcurrentStreams = false; 2155 private Size mDisplaySize = new Size(0, 0); 2156 private long mBufferSize = 0; 2157 private MultiResolutionStreamConfigurationMap mMultiResolutionStreamConfigurationMap = null; 2158 2159 /** 2160 * Set the current camera Id. 2161 * 2162 * @param cameraId Current camera id. 2163 * 2164 * @hide 2165 */ setCameraId(int cameraId)2166 public void setCameraId(int cameraId) { 2167 mCameraId = cameraId; 2168 } 2169 2170 /** 2171 * Set the current camera Id. 2172 * 2173 * @param hasMandatoryConcurrentStreams whether the metadata advertises mandatory concurrent 2174 * streams. 2175 * 2176 * @hide 2177 */ setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams)2178 public void setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams) { 2179 mHasMandatoryConcurrentStreams = hasMandatoryConcurrentStreams; 2180 } 2181 2182 /** 2183 * Set the current display size. 2184 * 2185 * @param displaySize The current display size. 2186 * 2187 * @hide 2188 */ setDisplaySize(Size displaySize)2189 public void setDisplaySize(Size displaySize) { 2190 mDisplaySize = displaySize; 2191 } 2192 2193 /** 2194 * Set the multi-resolution stream configuration map. 2195 * 2196 * @param multiResolutionMap The multi-resolution stream configuration map. 2197 * 2198 * @hide 2199 */ setMultiResolutionStreamConfigurationMap( @onNull Map<String, StreamConfiguration[]> multiResolutionMap)2200 public void setMultiResolutionStreamConfigurationMap( 2201 @NonNull Map<String, StreamConfiguration[]> multiResolutionMap) { 2202 mMultiResolutionStreamConfigurationMap = 2203 new MultiResolutionStreamConfigurationMap(multiResolutionMap); 2204 } 2205 2206 /** 2207 * Get the multi-resolution stream configuration map. 2208 * 2209 * @return The multi-resolution stream configuration map. 2210 * 2211 * @hide 2212 */ getMultiResolutionStreamConfigurationMap()2213 public MultiResolutionStreamConfigurationMap getMultiResolutionStreamConfigurationMap() { 2214 return mMultiResolutionStreamConfigurationMap; 2215 } 2216 2217 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 2218 private long mMetadataPtr; // native std::shared_ptr<CameraMetadata>* 2219 2220 @FastNative nativeAllocate()2221 private static native long nativeAllocate(); 2222 @FastNative nativeAllocateCopy(long ptr)2223 private static native long nativeAllocateCopy(long ptr) 2224 throws NullPointerException; 2225 2226 2227 @FastNative nativeUpdate(long dst, long src)2228 private static native void nativeUpdate(long dst, long src); nativeWriteToParcel(Parcel dest, long ptr)2229 private static synchronized native void nativeWriteToParcel(Parcel dest, long ptr); nativeReadFromParcel(Parcel source, long ptr)2230 private static synchronized native void nativeReadFromParcel(Parcel source, long ptr); nativeSwap(long ptr, long otherPtr)2231 private static synchronized native void nativeSwap(long ptr, long otherPtr) 2232 throws NullPointerException; 2233 @FastNative nativeSetVendorId(long ptr, long vendorId)2234 private static native void nativeSetVendorId(long ptr, long vendorId); nativeClose(long ptr)2235 private static synchronized native void nativeClose(long ptr); nativeIsEmpty(long ptr)2236 private static synchronized native boolean nativeIsEmpty(long ptr); nativeGetEntryCount(long ptr)2237 private static synchronized native int nativeGetEntryCount(long ptr); nativeGetBufferSize(long ptr)2238 private static synchronized native long nativeGetBufferSize(long ptr); 2239 2240 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeReadValues(int tag, long ptr)2241 private static synchronized native byte[] nativeReadValues(int tag, long ptr); nativeWriteValues(int tag, byte[] src, long ptr)2242 private static synchronized native void nativeWriteValues(int tag, byte[] src, long ptr); nativeDump(long ptr)2243 private static synchronized native void nativeDump(long ptr) throws IOException; // dump to LOGD 2244 nativeGetAllVendorKeys(long ptr, Class keyClass)2245 private static synchronized native ArrayList nativeGetAllVendorKeys(long ptr, Class keyClass); 2246 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTagFromKeyLocal(long ptr, String keyName)2247 private static synchronized native int nativeGetTagFromKeyLocal(long ptr, String keyName) 2248 throws IllegalArgumentException; 2249 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTypeFromTagLocal(long ptr, int tag)2250 private static synchronized native int nativeGetTypeFromTagLocal(long ptr, int tag) 2251 throws IllegalArgumentException; 2252 @FastNative nativeGetTagFromKey(String keyName, long vendorId)2253 private static native int nativeGetTagFromKey(String keyName, long vendorId) 2254 throws IllegalArgumentException; 2255 @FastNative nativeGetTypeFromTag(int tag, long vendorId)2256 private static native int nativeGetTypeFromTag(int tag, long vendorId) 2257 throws IllegalArgumentException; 2258 2259 /** 2260 * <p>Perform a 0-copy swap of the internal metadata with another object.</p> 2261 * 2262 * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p> 2263 * 2264 * @param other Metadata to swap with 2265 * @throws NullPointerException if other was null 2266 * @hide 2267 */ swap(CameraMetadataNative other)2268 public void swap(CameraMetadataNative other) { 2269 nativeSwap(mMetadataPtr, other.mMetadataPtr); 2270 mCameraId = other.mCameraId; 2271 mHasMandatoryConcurrentStreams = other.mHasMandatoryConcurrentStreams; 2272 mDisplaySize = other.mDisplaySize; 2273 mMultiResolutionStreamConfigurationMap = other.mMultiResolutionStreamConfigurationMap; 2274 updateNativeAllocation(); 2275 other.updateNativeAllocation(); 2276 } 2277 2278 /** 2279 * Set the native metadata vendor id. 2280 * 2281 * @hide 2282 */ setVendorId(long vendorId)2283 public void setVendorId(long vendorId) { 2284 nativeSetVendorId(mMetadataPtr, vendorId); 2285 } 2286 2287 /** 2288 * @hide 2289 */ getEntryCount()2290 public int getEntryCount() { 2291 return nativeGetEntryCount(mMetadataPtr); 2292 } 2293 2294 /** 2295 * Does this metadata contain at least 1 entry? 2296 * 2297 * @hide 2298 */ isEmpty()2299 public boolean isEmpty() { 2300 return nativeIsEmpty(mMetadataPtr); 2301 } 2302 2303 2304 /** 2305 * Retrieves the pointer to the native shared_ptr<CameraMetadata> as a Java long. 2306 * 2307 * @hide 2308 */ getMetadataPtr()2309 public long getMetadataPtr() { 2310 return mMetadataPtr; 2311 } 2312 2313 /** 2314 * Return a list containing keys of the given key class for all defined vendor tags. 2315 * 2316 * @hide 2317 */ getAllVendorKeys(Class<K> keyClass)2318 public <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) { 2319 if (keyClass == null) { 2320 throw new NullPointerException(); 2321 } 2322 return (ArrayList<K>) nativeGetAllVendorKeys(mMetadataPtr, keyClass); 2323 } 2324 2325 /** 2326 * Convert a key string into the equivalent native tag. 2327 * 2328 * @throws IllegalArgumentException if the key was not recognized 2329 * @throws NullPointerException if the key was null 2330 * 2331 * @hide 2332 */ getTag(String key)2333 public static int getTag(String key) { 2334 return nativeGetTagFromKey(key, Long.MAX_VALUE); 2335 } 2336 2337 /** 2338 * Convert a key string into the equivalent native tag. 2339 * 2340 * @throws IllegalArgumentException if the key was not recognized 2341 * @throws NullPointerException if the key was null 2342 * 2343 * @hide 2344 */ getTag(String key, long vendorId)2345 public static int getTag(String key, long vendorId) { 2346 return nativeGetTagFromKey(key, vendorId); 2347 } 2348 2349 /** 2350 * Get the underlying native type for a tag. 2351 * 2352 * @param tag An integer tag, see e.g. {@link #getTag} 2353 * @param vendorId A vendor tag provider id 2354 * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE} 2355 * 2356 * @hide 2357 */ getNativeType(int tag, long vendorId)2358 public static int getNativeType(int tag, long vendorId) { 2359 return nativeGetTypeFromTag(tag, vendorId); 2360 } 2361 2362 /** 2363 * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing 2364 * the entry if src was null.</p> 2365 * 2366 * <p>An empty array can be passed in to update the entry to 0 elements.</p> 2367 * 2368 * @param tag An integer tag, see e.g. {@link #getTag} 2369 * @param src An array of bytes, or null to erase the entry 2370 * 2371 * @hide 2372 */ writeValues(int tag, byte[] src)2373 public void writeValues(int tag, byte[] src) { 2374 nativeWriteValues(tag, src, mMetadataPtr); 2375 } 2376 2377 /** 2378 * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize 2379 * the data properly.</p> 2380 * 2381 * <p>An empty array can be returned to denote an existing entry with 0 elements.</p> 2382 * 2383 * @param tag An integer tag, see e.g. {@link #getTag} 2384 * 2385 * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise. 2386 * @hide 2387 */ readValues(int tag)2388 public byte[] readValues(int tag) { 2389 // TODO: Optimization. Native code returns a ByteBuffer instead. 2390 return nativeReadValues(tag, mMetadataPtr); 2391 } 2392 2393 /** 2394 * Dumps the native metadata contents to logcat. 2395 * 2396 * <p>Visibility for testing/debugging only. The results will not 2397 * include any synthesized keys, as they are invisible to the native layer.</p> 2398 * 2399 * @hide 2400 */ dumpToLog()2401 public void dumpToLog() { 2402 try { 2403 nativeDump(mMetadataPtr); 2404 } catch (IOException e) { 2405 Log.wtf(TAG, "Dump logging failed", e); 2406 } 2407 } 2408 2409 @Override finalize()2410 protected void finalize() throws Throwable { 2411 try { 2412 close(); 2413 } finally { 2414 super.finalize(); 2415 } 2416 } 2417 2418 /** 2419 * Get the marshaler compatible with the {@code key} and type {@code T}. 2420 * 2421 * @throws UnsupportedOperationException 2422 * if the native/managed type combination for {@code key} is not supported 2423 */ getMarshalerForKey(Key<T> key, int nativeType)2424 private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) { 2425 return MarshalRegistry.getMarshaler(key.getTypeReference(), 2426 nativeType); 2427 } 2428 2429 @SuppressWarnings({ "unchecked", "rawtypes" }) registerAllMarshalers()2430 private static void registerAllMarshalers() { 2431 if (DEBUG) { 2432 Log.v(TAG, "Shall register metadata marshalers"); 2433 } 2434 2435 MarshalQueryable[] queryList = new MarshalQueryable[] { 2436 // marshalers for standard types 2437 new MarshalQueryablePrimitive(), 2438 new MarshalQueryableEnum(), 2439 new MarshalQueryableArray(), 2440 2441 // pseudo standard types, that expand/narrow the native type into a managed type 2442 new MarshalQueryableBoolean(), 2443 new MarshalQueryableNativeByteToInteger(), 2444 2445 // marshalers for custom types 2446 new MarshalQueryableRect(), 2447 new MarshalQueryableSize(), 2448 new MarshalQueryableSizeF(), 2449 new MarshalQueryableString(), 2450 new MarshalQueryableReprocessFormatsMap(), 2451 new MarshalQueryableRange(), 2452 new MarshalQueryablePair(), 2453 new MarshalQueryableMeteringRectangle(), 2454 new MarshalQueryableColorSpaceTransform(), 2455 new MarshalQueryableStreamConfiguration(), 2456 new MarshalQueryableStreamConfigurationDuration(), 2457 new MarshalQueryableRggbChannelVector(), 2458 new MarshalQueryableBlackLevelPattern(), 2459 new MarshalQueryableHighSpeedVideoConfiguration(), 2460 new MarshalQueryableRecommendedStreamConfiguration(), 2461 2462 // generic parcelable marshaler (MUST BE LAST since it has lowest priority) 2463 new MarshalQueryableParcelable(), 2464 }; 2465 2466 for (MarshalQueryable query : queryList) { 2467 MarshalRegistry.registerMarshalQueryable(query); 2468 } 2469 if (DEBUG) { 2470 Log.v(TAG, "Registered metadata marshalers"); 2471 } 2472 } 2473 2474 /** Check if input arguments are all {@code null}. 2475 * 2476 * @param objs Input arguments for null check 2477 * @return {@code true} if input arguments are all {@code null}, otherwise {@code false} 2478 */ areValuesAllNull(Object... objs)2479 private static boolean areValuesAllNull(Object... objs) { 2480 for (Object o : objs) { 2481 if (o != null) return false; 2482 } 2483 return true; 2484 } 2485 2486 /** 2487 * Return the set of physical camera ids that this logical {@link CameraDevice} is made 2488 * up of. 2489 * 2490 * If the camera device isn't a logical camera, return an empty set. 2491 * 2492 * @hide 2493 */ getPhysicalCameraIds()2494 public Set<String> getPhysicalCameraIds() { 2495 int[] availableCapabilities = get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2496 if (availableCapabilities == null) { 2497 throw new AssertionError("android.request.availableCapabilities must be non-null " 2498 + "in the characteristics"); 2499 } 2500 2501 if (!ArrayUtils.contains(availableCapabilities, 2502 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)) { 2503 return Collections.emptySet(); 2504 } 2505 byte[] physicalCamIds = get(CameraCharacteristics.LOGICAL_MULTI_CAMERA_PHYSICAL_IDS); 2506 2507 String physicalCamIdString = null; 2508 try { 2509 physicalCamIdString = new String(physicalCamIds, "UTF-8"); 2510 } catch (java.io.UnsupportedEncodingException e) { 2511 throw new AssertionError("android.logicalCam.physicalIds must be UTF-8 string"); 2512 } 2513 String[] physicalCameraIdArray = physicalCamIdString.split("\0"); 2514 2515 return Collections.unmodifiableSet( 2516 new HashSet<String>(Arrays.asList(physicalCameraIdArray))); 2517 } 2518 2519 static { registerAllMarshalers()2520 registerAllMarshalers(); 2521 } 2522 } 2523