1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.graphics;
18 
19 import android.annotation.IntDef;
20 
21 import java.lang.annotation.Retention;
22 import java.lang.annotation.RetentionPolicy;
23 
24 public class ImageFormat {
25      /** @hide */
26      @Retention(RetentionPolicy.SOURCE)
27      @IntDef(value = {
28              UNKNOWN,
29              /*
30               * Since some APIs accept either ImageFormat or PixelFormat (and the two
31               * enums do not overlap since they're both partial versions of the
32               * internal format enum), add PixelFormat values here so linting
33               * tools won't complain when method arguments annotated with
34               * ImageFormat are provided with PixelFormat values.
35               */
36              PixelFormat.RGBA_8888,
37              PixelFormat.RGBX_8888,
38              PixelFormat.RGB_888,
39              RGB_565,
40              YV12,
41              Y8,
42              Y16,
43              YCBCR_P010,
44              NV16,
45              NV21,
46              YUY2,
47              JPEG,
48              DEPTH_JPEG,
49              YUV_420_888,
50              YUV_422_888,
51              YUV_444_888,
52              FLEX_RGB_888,
53              FLEX_RGBA_8888,
54              RAW_SENSOR,
55              RAW_PRIVATE,
56              RAW10,
57              RAW12,
58              DEPTH16,
59              DEPTH_POINT_CLOUD,
60              RAW_DEPTH,
61              RAW_DEPTH10,
62              PRIVATE,
63              HEIC,
64              JPEG_R
65      })
66      public @interface Format {
67      }
68 
69     /*
70      * these constants are chosen to be binary compatible with their previous
71      * location in PixelFormat.java
72      */
73 
74     public static final int UNKNOWN = 0;
75 
76     /**
77      * RGB format used for pictures encoded as RGB_565. See
78      * {@link android.hardware.Camera.Parameters#setPictureFormat(int)}.
79      */
80     public static final int RGB_565 = 4;
81 
82     /**
83      * <p>Android YUV format.</p>
84      *
85      * <p>This format is exposed to software decoders and applications.</p>
86      *
87      * <p>YV12 is a 4:2:0 YCrCb planar format comprised of a WxH Y plane followed
88      * by (W/2) x (H/2) Cr and Cb planes.</p>
89      *
90      * <p>This format assumes
91      * <ul>
92      * <li>an even width</li>
93      * <li>an even height</li>
94      * <li>a horizontal stride multiple of 16 pixels</li>
95      * <li>a vertical stride equal to the height</li>
96      * </ul>
97      * </p>
98      *
99      * <pre> y_size = stride * height
100      * c_stride = ALIGN(stride/2, 16)
101      * c_size = c_stride * height/2
102      * size = y_size + c_size * 2
103      * cr_offset = y_size
104      * cb_offset = y_size + c_size</pre>
105      *
106      * <p>For the {@link android.hardware.camera2} API, the {@link #YUV_420_888} format is
107      * recommended for YUV output instead.</p>
108      *
109      * <p>For the older camera API, this format is guaranteed to be supported for
110      * {@link android.hardware.Camera} preview images since API level 12; for earlier API versions,
111      * check {@link android.hardware.Camera.Parameters#getSupportedPreviewFormats()}.
112      *
113      * <p>Note that for camera preview callback use (see
114      * {@link android.hardware.Camera#setPreviewCallback}), the
115      * <var>stride</var> value is the smallest possible; that is, it is equal
116      * to:
117      *
118      * <pre>stride = ALIGN(width, 16)</pre>
119      *
120      * @see android.hardware.Camera.Parameters#setPreviewCallback
121      * @see android.hardware.Camera.Parameters#setPreviewFormat
122      * @see android.hardware.Camera.Parameters#getSupportedPreviewFormats
123      * </p>
124      */
125     public static final int YV12 = 0x32315659;
126 
127     /**
128      * <p>Android Y8 format.</p>
129      *
130      * <p>Y8 is a YUV planar format comprised of a WxH Y plane only, with each pixel
131      * being represented by 8 bits. It is equivalent to just the Y plane from {@link #YV12}
132      * format.</p>
133      *
134      * <p>This format assumes
135      * <ul>
136      * <li>an even width</li>
137      * <li>an even height</li>
138      * <li>a horizontal stride multiple of 16 pixels</li>
139      * </ul>
140      * </p>
141      *
142      * <pre> size = stride * height </pre>
143      *
144      * <p>For example, the {@link android.media.Image} object can provide data
145      * in this format from a {@link android.hardware.camera2.CameraDevice} (if
146      * supported) through a {@link android.media.ImageReader} object. The
147      * {@link android.media.Image#getPlanes() Image#getPlanes()} will return a
148      * single plane containing the pixel data. The pixel stride is always 1 in
149      * {@link android.media.Image.Plane#getPixelStride()}, and the
150      * {@link android.media.Image.Plane#getRowStride()} describes the vertical
151      * neighboring pixel distance (in bytes) between adjacent rows.</p>
152      *
153      * @see android.media.Image
154      * @see android.media.ImageReader
155      * @see android.hardware.camera2.CameraDevice
156      */
157     public static final int Y8 = 0x20203859;
158 
159     /**
160      * <p>Android Y16 format.</p>
161      *
162      * Y16 is a YUV planar format comprised of a WxH Y plane, with each pixel
163      * being represented by 16 bits. It is just like {@link #Y8}, but has 16
164      * bits per pixel (little endian).</p>
165      *
166      * <p>This format assumes
167      * <ul>
168      * <li>an even width</li>
169      * <li>an even height</li>
170      * <li>a horizontal stride multiple of 16 pixels</li>
171      * </ul>
172      * </p>
173      *
174      * <pre> y_size = stride * height </pre>
175      *
176      * <p>For example, the {@link android.media.Image} object can provide data
177      * in this format from a {@link android.hardware.camera2.CameraDevice}
178      * through a {@link android.media.ImageReader} object if this format is
179      * supported by {@link android.hardware.camera2.CameraDevice}.</p>
180      *
181      * @see android.media.Image
182      * @see android.media.ImageReader
183      * @see android.hardware.camera2.CameraDevice
184      *
185      * @hide
186      */
187     public static final int Y16 = 0x20363159;
188 
189     /**
190      * <p>Android YUV P010 format.</p>
191      *
192      * P010 is a 4:2:0 YCbCr semiplanar format comprised of a WxH Y plane
193      * followed by a Wx(H/2) CbCr plane. Each sample is represented by a 16-bit
194      * little-endian value, with the lower 6 bits set to zero.
195      *
196      * <p>For example, the {@link android.media.Image} object can provide data
197      * in this format from a {@link android.hardware.camera2.CameraDevice}
198      * through a {@link android.media.ImageReader} object if this format is
199      * supported by {@link android.hardware.camera2.CameraDevice}.</p>
200      *
201      * @see android.media.Image
202      * @see android.media.ImageReader
203      * @see android.hardware.camera2.CameraDevice
204      *
205      */
206     public static final int YCBCR_P010 = 0x36;
207 
208     /**
209      * YCbCr format, used for video.
210      *
211      * <p>For the {@link android.hardware.camera2} API, the {@link #YUV_420_888} format is
212      * recommended for YUV output instead.</p>
213      *
214      * <p>Whether this format is supported by the old camera API can be determined by
215      * {@link android.hardware.Camera.Parameters#getSupportedPreviewFormats()}.</p>
216      *
217      */
218     public static final int NV16 = 0x10;
219 
220     /**
221      * YCrCb format used for images, which uses the NV21 encoding format.
222      *
223      * <p>This is the default format
224      * for {@link android.hardware.Camera} preview images, when not otherwise set with
225      * {@link android.hardware.Camera.Parameters#setPreviewFormat(int)}.</p>
226      *
227      * <p>For the {@link android.hardware.camera2} API, the {@link #YUV_420_888} format is
228      * recommended for YUV output instead.</p>
229      */
230     public static final int NV21 = 0x11;
231 
232     /**
233      * YCbCr format used for images, which uses YUYV (YUY2) encoding format.
234      *
235      * <p>For the {@link android.hardware.camera2} API, the {@link #YUV_420_888} format is
236      * recommended for YUV output instead.</p>
237      *
238      * <p>This is an alternative format for {@link android.hardware.Camera} preview images. Whether
239      * this format is supported by the camera hardware can be determined by
240      * {@link android.hardware.Camera.Parameters#getSupportedPreviewFormats()}.</p>
241      */
242     public static final int YUY2 = 0x14;
243 
244     /**
245      * Compressed JPEG format.
246      *
247      * <p>This format is always supported as an output format for the
248      * {@link android.hardware.camera2} API, and as a picture format for the older
249      * {@link android.hardware.Camera} API</p>
250      */
251     public static final int JPEG = 0x100;
252 
253     /**
254      * Depth augmented compressed JPEG format.
255      *
256      * <p>JPEG compressed main image along with XMP embedded depth metadata
257      * following ISO 16684-1:2011(E).</p>
258      */
259     public static final int DEPTH_JPEG = 0x69656963;
260 
261     /**
262      * Compressed JPEG format that includes an embedded recovery map.
263      *
264      * <p>JPEG compressed main image along with embedded recovery map following the
265      * <a href="https://developer.android.com/guide/topics/media/hdr-image-format">Ultra HDR
266      * Image format specification</a>.</p>
267      */
268     public static final int JPEG_R = 0x1005;
269 
270     /**
271      * <p>Multi-plane Android YUV 420 format</p>
272      *
273      * <p>This format is a generic YCbCr format, capable of describing any 4:2:0
274      * chroma-subsampled planar or semiplanar buffer (but not fully interleaved),
275      * with 8 bits per color sample.</p>
276      *
277      * <p>Images in this format are always represented by three separate buffers
278      * of data, one for each color plane. Additional information always
279      * accompanies the buffers, describing the row stride and the pixel stride
280      * for each plane.</p>
281      *
282      * <p>The order of planes in the array returned by
283      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
284      * plane #0 is always Y, plane #1 is always U (Cb), and plane #2 is always V (Cr).</p>
285      *
286      * <p>The Y-plane is guaranteed not to be interleaved with the U/V planes
287      * (in particular, pixel stride is always 1 in
288      * {@link android.media.Image.Plane#getPixelStride() yPlane.getPixelStride()}).</p>
289      *
290      * <p>The U/V planes are guaranteed to have the same row stride and pixel stride
291      * (in particular,
292      * {@link android.media.Image.Plane#getRowStride() uPlane.getRowStride()}
293      * == {@link android.media.Image.Plane#getRowStride() vPlane.getRowStride()} and
294      * {@link android.media.Image.Plane#getPixelStride() uPlane.getPixelStride()}
295      * == {@link android.media.Image.Plane#getPixelStride() vPlane.getPixelStride()};
296      * ).</p>
297      *
298      * <p>For example, the {@link android.media.Image} object can provide data
299      * in this format from a {@link android.hardware.camera2.CameraDevice}
300      * through a {@link android.media.ImageReader} object.</p>
301      *
302      * @see android.media.Image
303      * @see android.media.ImageReader
304      * @see android.hardware.camera2.CameraDevice
305      */
306     public static final int YUV_420_888 = 0x23;
307 
308     /**
309      * <p>Multi-plane Android YUV 422 format</p>
310      *
311      * <p>This format is a generic YCbCr format, capable of describing any 4:2:2
312      * chroma-subsampled (planar, semiplanar or interleaved) format,
313      * with 8 bits per color sample.</p>
314      *
315      * <p>Images in this format are always represented by three separate buffers
316      * of data, one for each color plane. Additional information always
317      * accompanies the buffers, describing the row stride and the pixel stride
318      * for each plane.</p>
319      *
320      * <p>The order of planes in the array returned by
321      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
322      * plane #0 is always Y, plane #1 is always U (Cb), and plane #2 is always V (Cr).</p>
323      *
324      * <p>In contrast to the {@link #YUV_420_888} format, the Y-plane may have a pixel
325      * stride greater than 1 in
326      * {@link android.media.Image.Plane#getPixelStride() yPlane.getPixelStride()}.</p>
327      *
328      * <p>The U/V planes are guaranteed to have the same row stride and pixel stride
329      * (in particular,
330      * {@link android.media.Image.Plane#getRowStride() uPlane.getRowStride()}
331      * == {@link android.media.Image.Plane#getRowStride() vPlane.getRowStride()} and
332      * {@link android.media.Image.Plane#getPixelStride() uPlane.getPixelStride()}
333      * == {@link android.media.Image.Plane#getPixelStride() vPlane.getPixelStride()};
334      * ).</p>
335      *
336      * <p>For example, the {@link android.media.Image} object can provide data
337      * in this format from a {@link android.media.MediaCodec}
338      * through {@link android.media.MediaCodec#getOutputImage} object.</p>
339      *
340      * @see android.media.Image
341      * @see android.media.MediaCodec
342      */
343     public static final int YUV_422_888 = 0x27;
344 
345     /**
346      * <p>Multi-plane Android YUV 444 format</p>
347      *
348      * <p>This format is a generic YCbCr format, capable of describing any 4:4:4
349      * (planar, semiplanar or interleaved) format,
350      * with 8 bits per color sample.</p>
351      *
352      * <p>Images in this format are always represented by three separate buffers
353      * of data, one for each color plane. Additional information always
354      * accompanies the buffers, describing the row stride and the pixel stride
355      * for each plane.</p>
356      *
357      * <p>The order of planes in the array returned by
358      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
359      * plane #0 is always Y, plane #1 is always U (Cb), and plane #2 is always V (Cr).</p>
360      *
361      * <p>In contrast to the {@link #YUV_420_888} format, the Y-plane may have a pixel
362      * stride greater than 1 in
363      * {@link android.media.Image.Plane#getPixelStride() yPlane.getPixelStride()}.</p>
364      *
365      * <p>The U/V planes are guaranteed to have the same row stride and pixel stride
366      * (in particular,
367      * {@link android.media.Image.Plane#getRowStride() uPlane.getRowStride()}
368      * == {@link android.media.Image.Plane#getRowStride() vPlane.getRowStride()} and
369      * {@link android.media.Image.Plane#getPixelStride() uPlane.getPixelStride()}
370      * == {@link android.media.Image.Plane#getPixelStride() vPlane.getPixelStride()};
371      * ).</p>
372      *
373      * <p>For example, the {@link android.media.Image} object can provide data
374      * in this format from a {@link android.media.MediaCodec}
375      * through {@link android.media.MediaCodec#getOutputImage} object.</p>
376      *
377      * @see android.media.Image
378      * @see android.media.MediaCodec
379      */
380     public static final int YUV_444_888 = 0x28;
381 
382     /**
383      * <p>Multi-plane Android RGB format</p>
384      *
385      * <p>This format is a generic RGB format, capable of describing most RGB formats,
386      * with 8 bits per color sample.</p>
387      *
388      * <p>Images in this format are always represented by three separate buffers
389      * of data, one for each color plane. Additional information always
390      * accompanies the buffers, describing the row stride and the pixel stride
391      * for each plane.</p>
392      *
393      * <p>The order of planes in the array returned by
394      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
395      * plane #0 is always R (red), plane #1 is always G (green), and plane #2 is always B
396      * (blue).</p>
397      *
398      * <p>All three planes are guaranteed to have the same row strides and pixel strides.</p>
399      *
400      * <p>For example, the {@link android.media.Image} object can provide data
401      * in this format from a {@link android.media.MediaCodec}
402      * through {@link android.media.MediaCodec#getOutputImage} object.</p>
403      *
404      * @see android.media.Image
405      * @see android.media.MediaCodec
406      */
407     public static final int FLEX_RGB_888 = 0x29;
408 
409     /**
410      * <p>Multi-plane Android RGBA format</p>
411      *
412      * <p>This format is a generic RGBA format, capable of describing most RGBA formats,
413      * with 8 bits per color sample.</p>
414      *
415      * <p>Images in this format are always represented by four separate buffers
416      * of data, one for each color plane. Additional information always
417      * accompanies the buffers, describing the row stride and the pixel stride
418      * for each plane.</p>
419      *
420      * <p>The order of planes in the array returned by
421      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
422      * plane #0 is always R (red), plane #1 is always G (green), plane #2 is always B (blue),
423      * and plane #3 is always A (alpha). This format may represent pre-multiplied or
424      * non-premultiplied alpha.</p>
425      *
426      * <p>All four planes are guaranteed to have the same row strides and pixel strides.</p>
427      *
428      * <p>For example, the {@link android.media.Image} object can provide data
429      * in this format from a {@link android.media.MediaCodec}
430      * through {@link android.media.MediaCodec#getOutputImage} object.</p>
431      *
432      * @see android.media.Image
433      * @see android.media.MediaCodec
434      */
435     public static final int FLEX_RGBA_8888 = 0x2A;
436 
437     /**
438      * <p>General raw camera sensor image format, usually representing a
439      * single-channel Bayer-mosaic image. Each pixel color sample is stored with
440      * 16 bits of precision.</p>
441      *
442      * <p>The layout of the color mosaic, the maximum and minimum encoding
443      * values of the raw pixel data, the color space of the image, and all other
444      * needed information to interpret a raw sensor image must be queried from
445      * the {@link android.hardware.camera2.CameraDevice} which produced the
446      * image.</p>
447      */
448     public static final int RAW_SENSOR = 0x20;
449 
450     /**
451      * <p>Private raw camera sensor image format, a single channel image with
452      * implementation dependent pixel layout.</p>
453      *
454      * <p>RAW_PRIVATE is a format for unprocessed raw image buffers coming from an
455      * image sensor. The actual structure of buffers of this format is
456      * implementation-dependent.</p>
457      *
458      */
459     public static final int RAW_PRIVATE = 0x24;
460 
461     /**
462      * <p>
463      * Android 10-bit raw format
464      * </p>
465      * <p>
466      * This is a single-plane, 10-bit per pixel, densely packed (in each row),
467      * unprocessed format, usually representing raw Bayer-pattern images coming
468      * from an image sensor.
469      * </p>
470      * <p>
471      * In an image buffer with this format, starting from the first pixel of
472      * each row, each 4 consecutive pixels are packed into 5 bytes (40 bits).
473      * Each one of the first 4 bytes contains the top 8 bits of each pixel, The
474      * fifth byte contains the 2 least significant bits of the 4 pixels, the
475      * exact layout data for each 4 consecutive pixels is illustrated below
476      * ({@code Pi[j]} stands for the jth bit of the ith pixel):
477      * </p>
478      * <table>
479      * <thead>
480      * <tr>
481      * <th align="center"></th>
482      * <th align="center">bit 7</th>
483      * <th align="center">bit 6</th>
484      * <th align="center">bit 5</th>
485      * <th align="center">bit 4</th>
486      * <th align="center">bit 3</th>
487      * <th align="center">bit 2</th>
488      * <th align="center">bit 1</th>
489      * <th align="center">bit 0</th>
490      * </tr>
491      * </thead> <tbody>
492      * <tr>
493      * <td align="center">Byte 0:</td>
494      * <td align="center">P0[9]</td>
495      * <td align="center">P0[8]</td>
496      * <td align="center">P0[7]</td>
497      * <td align="center">P0[6]</td>
498      * <td align="center">P0[5]</td>
499      * <td align="center">P0[4]</td>
500      * <td align="center">P0[3]</td>
501      * <td align="center">P0[2]</td>
502      * </tr>
503      * <tr>
504      * <td align="center">Byte 1:</td>
505      * <td align="center">P1[9]</td>
506      * <td align="center">P1[8]</td>
507      * <td align="center">P1[7]</td>
508      * <td align="center">P1[6]</td>
509      * <td align="center">P1[5]</td>
510      * <td align="center">P1[4]</td>
511      * <td align="center">P1[3]</td>
512      * <td align="center">P1[2]</td>
513      * </tr>
514      * <tr>
515      * <td align="center">Byte 2:</td>
516      * <td align="center">P2[9]</td>
517      * <td align="center">P2[8]</td>
518      * <td align="center">P2[7]</td>
519      * <td align="center">P2[6]</td>
520      * <td align="center">P2[5]</td>
521      * <td align="center">P2[4]</td>
522      * <td align="center">P2[3]</td>
523      * <td align="center">P2[2]</td>
524      * </tr>
525      * <tr>
526      * <td align="center">Byte 3:</td>
527      * <td align="center">P3[9]</td>
528      * <td align="center">P3[8]</td>
529      * <td align="center">P3[7]</td>
530      * <td align="center">P3[6]</td>
531      * <td align="center">P3[5]</td>
532      * <td align="center">P3[4]</td>
533      * <td align="center">P3[3]</td>
534      * <td align="center">P3[2]</td>
535      * </tr>
536      * <tr>
537      * <td align="center">Byte 4:</td>
538      * <td align="center">P3[1]</td>
539      * <td align="center">P3[0]</td>
540      * <td align="center">P2[1]</td>
541      * <td align="center">P2[0]</td>
542      * <td align="center">P1[1]</td>
543      * <td align="center">P1[0]</td>
544      * <td align="center">P0[1]</td>
545      * <td align="center">P0[0]</td>
546      * </tr>
547      * </tbody>
548      * </table>
549      * <p>
550      * This format assumes
551      * <ul>
552      * <li>a width multiple of 4 pixels</li>
553      * <li>an even height</li>
554      * </ul>
555      * </p>
556      *
557      * <pre>size = row stride * height</pre> where the row stride is in <em>bytes</em>,
558      * not pixels.
559      *
560      * <p>
561      * Since this is a densely packed format, the pixel stride is always 0. The
562      * application must use the pixel data layout defined in above table to
563      * access each row data. When row stride is equal to {@code width * (10 / 8)}, there
564      * will be no padding bytes at the end of each row, the entire image data is
565      * densely packed. When stride is larger than {@code width * (10 / 8)}, padding
566      * bytes will be present at the end of each row.
567      * </p>
568      * <p>
569      * For example, the {@link android.media.Image} object can provide data in
570      * this format from a {@link android.hardware.camera2.CameraDevice} (if
571      * supported) through a {@link android.media.ImageReader} object. The
572      * {@link android.media.Image#getPlanes() Image#getPlanes()} will return a
573      * single plane containing the pixel data. The pixel stride is always 0 in
574      * {@link android.media.Image.Plane#getPixelStride()}, and the
575      * {@link android.media.Image.Plane#getRowStride()} describes the vertical
576      * neighboring pixel distance (in bytes) between adjacent rows.
577      * </p>
578      *
579      * @see android.media.Image
580      * @see android.media.ImageReader
581      * @see android.hardware.camera2.CameraDevice
582      */
583     public static final int RAW10 = 0x25;
584 
585     /**
586      * <p>
587      * Android 12-bit raw format
588      * </p>
589      * <p>
590      * This is a single-plane, 12-bit per pixel, densely packed (in each row),
591      * unprocessed format, usually representing raw Bayer-pattern images coming
592      * from an image sensor.
593      * </p>
594      * <p>
595      * In an image buffer with this format, starting from the first pixel of each
596      * row, each two consecutive pixels are packed into 3 bytes (24 bits). The first
597      * and second byte contains the top 8 bits of first and second pixel. The third
598      * byte contains the 4 least significant bits of the two pixels, the exact layout
599      * data for each two consecutive pixels is illustrated below (Pi[j] stands for
600      * the jth bit of the ith pixel):
601      * </p>
602      * <table>
603      * <thead>
604      * <tr>
605      * <th align="center"></th>
606      * <th align="center">bit 7</th>
607      * <th align="center">bit 6</th>
608      * <th align="center">bit 5</th>
609      * <th align="center">bit 4</th>
610      * <th align="center">bit 3</th>
611      * <th align="center">bit 2</th>
612      * <th align="center">bit 1</th>
613      * <th align="center">bit 0</th>
614      * </tr>
615      * </thead> <tbody>
616      * <tr>
617      * <td align="center">Byte 0:</td>
618      * <td align="center">P0[11]</td>
619      * <td align="center">P0[10]</td>
620      * <td align="center">P0[ 9]</td>
621      * <td align="center">P0[ 8]</td>
622      * <td align="center">P0[ 7]</td>
623      * <td align="center">P0[ 6]</td>
624      * <td align="center">P0[ 5]</td>
625      * <td align="center">P0[ 4]</td>
626      * </tr>
627      * <tr>
628      * <td align="center">Byte 1:</td>
629      * <td align="center">P1[11]</td>
630      * <td align="center">P1[10]</td>
631      * <td align="center">P1[ 9]</td>
632      * <td align="center">P1[ 8]</td>
633      * <td align="center">P1[ 7]</td>
634      * <td align="center">P1[ 6]</td>
635      * <td align="center">P1[ 5]</td>
636      * <td align="center">P1[ 4]</td>
637      * </tr>
638      * <tr>
639      * <td align="center">Byte 2:</td>
640      * <td align="center">P1[ 3]</td>
641      * <td align="center">P1[ 2]</td>
642      * <td align="center">P1[ 1]</td>
643      * <td align="center">P1[ 0]</td>
644      * <td align="center">P0[ 3]</td>
645      * <td align="center">P0[ 2]</td>
646      * <td align="center">P0[ 1]</td>
647      * <td align="center">P0[ 0]</td>
648      * </tr>
649      * </tbody>
650      * </table>
651      * <p>
652      * This format assumes
653      * <ul>
654      * <li>a width multiple of 4 pixels</li>
655      * <li>an even height</li>
656      * </ul>
657      * </p>
658      *
659      * <pre>size = row stride * height</pre> where the row stride is in <em>bytes</em>,
660      * not pixels.
661      *
662      * <p>
663      * Since this is a densely packed format, the pixel stride is always 0. The
664      * application must use the pixel data layout defined in above table to
665      * access each row data. When row stride is equal to {@code width * (12 / 8)}, there
666      * will be no padding bytes at the end of each row, the entire image data is
667      * densely packed. When stride is larger than {@code width * (12 / 8)}, padding
668      * bytes will be present at the end of each row.
669      * </p>
670      * <p>
671      * For example, the {@link android.media.Image} object can provide data in
672      * this format from a {@link android.hardware.camera2.CameraDevice} (if
673      * supported) through a {@link android.media.ImageReader} object. The
674      * {@link android.media.Image#getPlanes() Image#getPlanes()} will return a
675      * single plane containing the pixel data. The pixel stride is always 0 in
676      * {@link android.media.Image.Plane#getPixelStride()}, and the
677      * {@link android.media.Image.Plane#getRowStride()} describes the vertical
678      * neighboring pixel distance (in bytes) between adjacent rows.
679      * </p>
680      *
681      * @see android.media.Image
682      * @see android.media.ImageReader
683      * @see android.hardware.camera2.CameraDevice
684      */
685     public static final int RAW12 = 0x26;
686 
687     /**
688      * <p>Android dense depth image format.</p>
689      *
690      * <p>Each pixel is 16 bits, representing a depth ranging measurement from a depth camera or
691      * similar sensor. The 16-bit sample consists of a confidence value and the actual ranging
692      * measurement.</p>
693      *
694      * <p>The confidence value is an estimate of correctness for this sample.  It is encoded in the
695      * 3 most significant bits of the sample, with a value of 0 representing 100% confidence, a
696      * value of 1 representing 0% confidence, a value of 2 representing 1/7, a value of 3
697      * representing 2/7, and so on.</p>
698      *
699      * <p>As an example, the following sample extracts the range and confidence from the first pixel
700      * of a DEPTH16-format {@link android.media.Image}, and converts the confidence to a
701      * floating-point value between 0 and 1.f inclusive, with 1.f representing maximum confidence:
702      *
703      * <pre>
704      *    ShortBuffer shortDepthBuffer = img.getPlanes()[0].getBuffer().asShortBuffer();
705      *    short depthSample = shortDepthBuffer.get()
706      *    short depthRange = (short) (depthSample & 0x1FFF);
707      *    short depthConfidence = (short) ((depthSample >> 13) & 0x7);
708      *    float depthPercentage = depthConfidence == 0 ? 1.f : (depthConfidence - 1) / 7.f;
709      * </pre>
710      * </p>
711      *
712      * <p>This format assumes
713      * <ul>
714      * <li>an even width</li>
715      * <li>an even height</li>
716      * <li>a horizontal stride multiple of 16 pixels</li>
717      * </ul>
718      * </p>
719      *
720      * <pre> y_size = stride * height </pre>
721      *
722      * When produced by a camera, the units for the range are millimeters.
723      */
724     public static final int DEPTH16 = 0x44363159;
725 
726     /**
727      * Android sparse depth point cloud format.
728      *
729      * <p>A variable-length list of 3D points plus a confidence value, with each point represented
730      * by four floats; first the X, Y, Z position coordinates, and then the confidence value.</p>
731      *
732      * <p>The number of points is {@code (size of the buffer in bytes) / 16}.
733      *
734      * <p>The coordinate system and units of the position values depend on the source of the point
735      * cloud data. The confidence value is between 0.f and 1.f, inclusive, with 0 representing 0%
736      * confidence and 1.f representing 100% confidence in the measured position values.</p>
737      *
738      * <p>As an example, the following code extracts the first depth point in a DEPTH_POINT_CLOUD
739      * format {@link android.media.Image}:
740      * <pre>
741      *    FloatBuffer floatDepthBuffer = img.getPlanes()[0].getBuffer().asFloatBuffer();
742      *    float x = floatDepthBuffer.get();
743      *    float y = floatDepthBuffer.get();
744      *    float z = floatDepthBuffer.get();
745      *    float confidence = floatDepthBuffer.get();
746      * </pre>
747      *
748      * For camera devices that support the
749      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT DEPTH_OUTPUT}
750      * capability, DEPTH_POINT_CLOUD coordinates have units of meters, and the coordinate system is
751      * defined by the camera's pose transforms:
752      * {@link android.hardware.camera2.CameraCharacteristics#LENS_POSE_TRANSLATION} and
753      * {@link android.hardware.camera2.CameraCharacteristics#LENS_POSE_ROTATION}. That means the origin is
754      * the optical center of the camera device, and the positive Z axis points along the camera's optical axis,
755      * toward the scene.
756      */
757     public static final int DEPTH_POINT_CLOUD = 0x101;
758 
759     /**
760      * Unprocessed implementation-dependent raw
761      * depth measurements, opaque with 16 bit
762      * samples.
763      *
764      * @hide
765      */
766     public static final int RAW_DEPTH = 0x1002;
767 
768     /**
769      * Unprocessed implementation-dependent raw
770      * depth measurements, opaque with 10 bit
771      * samples and device specific bit layout.
772      *
773      * @hide
774      */
775     public static final int RAW_DEPTH10 = 0x1003;
776 
777     /**
778      * Android private opaque image format.
779      * <p>
780      * The choices of the actual format and pixel data layout are entirely up to
781      * the device-specific and framework internal implementations, and may vary
782      * depending on use cases even for the same device. The buffers of this
783      * format can be produced by components like
784      * {@link android.media.ImageWriter ImageWriter} , and interpreted correctly
785      * by consumers like {@link android.hardware.camera2.CameraDevice
786      * CameraDevice} based on the device/framework private information. However,
787      * these buffers are not directly accessible to the application.
788      * </p>
789      * <p>
790      * When an {@link android.media.Image Image} of this format is obtained from
791      * an {@link android.media.ImageReader ImageReader} or
792      * {@link android.media.ImageWriter ImageWriter}, the
793      * {@link android.media.Image#getPlanes() getPlanes()} method will return an
794      * empty {@link android.media.Image.Plane Plane} array.
795      * </p>
796      * <p>
797      * If a buffer of this format is to be used as an OpenGL ES texture, the
798      * framework will assume that sampling the texture will always return an
799      * alpha value of 1.0 (i.e. the buffer contains only opaque pixel values).
800      * </p>
801      */
802     public static final int PRIVATE = 0x22;
803 
804     /**
805      * Compressed HEIC format.
806      *
807      * <p>This format defines the HEIC brand of High Efficiency Image File
808      * Format as described in ISO/IEC 23008-12.</p>
809      */
810     public static final int HEIC = 0x48454946;
811 
812     /**
813      * Use this function to retrieve the number of bits per pixel of an
814      * ImageFormat.
815      *
816      * @param format
817      * @return the number of bits per pixel of the given format or -1 if the
818      *         format doesn't exist or is not supported.
819      */
getBitsPerPixel(@ormat int format)820     public static int getBitsPerPixel(@Format int format) {
821         switch (format) {
822             case RGB_565:
823                 return 16;
824             case NV16:
825                 return 16;
826             case YUY2:
827                 return 16;
828             case YV12:
829                 return 12;
830             case Y8:
831                 return 8;
832             case Y16:
833             case DEPTH16:
834                 return 16;
835             case NV21:
836                 return 12;
837             case YUV_420_888:
838                 return 12;
839             case YUV_422_888:
840                 return 16;
841             case YUV_444_888:
842                 return 24;
843             case FLEX_RGB_888:
844                 return 24;
845             case FLEX_RGBA_8888:
846                 return 32;
847             case RAW_DEPTH:
848             case RAW_SENSOR:
849                 return 16;
850             case YCBCR_P010:
851                 return 24;
852             case RAW_DEPTH10:
853             case RAW10:
854                 return 10;
855             case RAW12:
856                 return 12;
857         }
858         return -1;
859     }
860 
861     /**
862      * Determine whether or not this is a public-visible {@code format}.
863      *
864      * <p>In particular, {@code @hide} formats will return {@code false}.</p>
865      *
866      * <p>Any other formats (including UNKNOWN) will return {@code false}.</p>
867      *
868      * @param format an integer format
869      * @return a boolean
870      *
871      * @hide
872      */
isPublicFormat(@ormat int format)873     public static boolean isPublicFormat(@Format int format) {
874         switch (format) {
875             case RGB_565:
876             case NV16:
877             case YUY2:
878             case YV12:
879             case JPEG:
880             case NV21:
881             case YUV_420_888:
882             case YUV_422_888:
883             case YUV_444_888:
884             case YCBCR_P010:
885             case FLEX_RGB_888:
886             case FLEX_RGBA_8888:
887             case RAW_SENSOR:
888             case RAW_PRIVATE:
889             case RAW10:
890             case RAW12:
891             case DEPTH16:
892             case DEPTH_POINT_CLOUD:
893             case PRIVATE:
894             case RAW_DEPTH:
895             case RAW_DEPTH10:
896             case Y8:
897             case DEPTH_JPEG:
898             case HEIC:
899             case JPEG_R:
900                 return true;
901         }
902 
903         return false;
904     }
905 }
906