1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media;
18 
19 import java.nio.ByteBuffer;
20 import java.lang.AutoCloseable;
21 
22 import android.graphics.Rect;
23 
24 /**
25  * <p>A single complete image buffer to use with a media source such as a
26  * {@link MediaCodec} or a
27  * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p>
28  *
29  * <p>This class allows for efficient direct application access to the pixel
30  * data of the Image through one or more
31  * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a
32  * {@link Plane} that describes the layout of the pixel data in that plane. Due
33  * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class,
34  * Images are not directly usable as as UI resources.</p>
35  *
36  * <p>Since Images are often directly produced or consumed by hardware
37  * components, they are a limited resource shared across the system, and should
38  * be closed as soon as they are no longer needed.</p>
39  *
40  * <p>For example, when using the {@link ImageReader} class to read out Images
41  * from various media sources, not closing old Image objects will prevent the
42  * availability of new Images once
43  * {@link ImageReader#getMaxImages the maximum outstanding image count} is
44  * reached. When this happens, the function acquiring new Images will typically
45  * throw an {@link IllegalStateException}.</p>
46  *
47  * @see ImageReader
48  */
49 public abstract class Image implements AutoCloseable {
50     /**
51      * @hide
52      */
53     protected boolean mIsImageValid = false;
54 
55     /**
56      * @hide
57      */
Image()58     protected Image() {
59     }
60 
61     /**
62      * Throw IllegalStateException if the image is invalid (already closed).
63      *
64      * @hide
65      */
throwISEIfImageIsInvalid()66     protected void throwISEIfImageIsInvalid() {
67         if (!mIsImageValid) {
68             throw new IllegalStateException("Image is already closed");
69         }
70     }
71     /**
72      * Get the format for this image. This format determines the number of
73      * ByteBuffers needed to represent the image, and the general layout of the
74      * pixel data in each in ByteBuffer.
75      *
76      * <p>
77      * The format is one of the values from
78      * {@link android.graphics.ImageFormat ImageFormat}. The mapping between the
79      * formats and the planes is as follows:
80      * </p>
81      *
82      * <table>
83      * <tr>
84      *   <th>Format</th>
85      *   <th>Plane count</th>
86      *   <th>Layout details</th>
87      * </tr>
88      * <tr>
89      *   <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td>
90      *   <td>1</td>
91      *   <td>Compressed data, so row and pixel strides are 0. To uncompress, use
92      *      {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
93      *   </td>
94      * </tr>
95      * <tr>
96      *   <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td>
97      *   <td>3</td>
98      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
99      *     The chroma planes have half the width and height of the luminance
100      *     plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits.
101      *     Each plane has its own row stride and pixel stride.</td>
102      * </tr>
103      * <tr>
104      *   <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td>
105      *   <td>3</td>
106      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
107      *     The chroma planes have half the width and the full height of the luminance
108      *     plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits.
109      *     Each plane has its own row stride and pixel stride.</td>
110      * </tr>
111      * <tr>
112      *   <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td>
113      *   <td>3</td>
114      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
115      *     The chroma planes have the same width and height as that of the luminance
116      *     plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits.
117      *     Each plane has its own row stride and pixel stride.</td>
118      * </tr>
119      * <tr>
120      *   <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td>
121      *   <td>3</td>
122      *   <td>A R (red) plane followed by the G (green) and B (blue) planes.
123      *     All planes have the same widths and heights.
124      *     Each pixel sample in each plane has 8 bits.
125      *     Each plane has its own row stride and pixel stride.</td>
126      * </tr>
127      * <tr>
128      *   <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td>
129      *   <td>4</td>
130      *   <td>A R (red) plane followed by the G (green), B (blue), and
131      *     A (alpha) planes. All planes have the same widths and heights.
132      *     Each pixel sample in each plane has 8 bits.
133      *     Each plane has its own row stride and pixel stride.</td>
134      * </tr>
135      * <tr>
136      *   <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td>
137      *   <td>1</td>
138      *   <td>A single plane of raw sensor image data, with 16 bits per color
139      *     sample. The details of the layout need to be queried from the source of
140      *     the raw sensor data, such as
141      *     {@link android.hardware.camera2.CameraDevice CameraDevice}.
142      *   </td>
143      * </tr>
144      * <tr>
145      *   <td>{@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}</td>
146      *   <td>1</td>
147      *   <td>A single plane of raw sensor image data of private layout.
148      *   The details of the layout is implementation specific. Row stride and
149      *   pixel stride are undefined for this format. Calling {@link Plane#getRowStride()}
150      *   or {@link Plane#getPixelStride()} on RAW_PRIVATE image will cause
151      *   UnSupportedOperationException being thrown.
152      *   </td>
153      * </tr>
154      * </table>
155      *
156      * @see android.graphics.ImageFormat
157      */
getFormat()158     public abstract int getFormat();
159 
160     /**
161      * The width of the image in pixels. For formats where some color channels
162      * are subsampled, this is the width of the largest-resolution plane.
163      */
getWidth()164     public abstract int getWidth();
165 
166     /**
167      * The height of the image in pixels. For formats where some color channels
168      * are subsampled, this is the height of the largest-resolution plane.
169      */
getHeight()170     public abstract int getHeight();
171 
172     /**
173      * Get the timestamp associated with this frame.
174      * <p>
175      * The timestamp is measured in nanoseconds, and is normally monotonically
176      * increasing. The timestamps for the images from different sources may have
177      * different timebases therefore may not be comparable. The specific meaning and
178      * timebase of the timestamp depend on the source providing images. See
179      * {@link android.hardware.Camera Camera},
180      * {@link android.hardware.camera2.CameraDevice CameraDevice},
181      * {@link MediaPlayer} and {@link MediaCodec} for more details.
182      * </p>
183      */
getTimestamp()184     public abstract long getTimestamp();
185 
186     /**
187      * Set the timestamp associated with this frame.
188      * <p>
189      * The timestamp is measured in nanoseconds, and is normally monotonically
190      * increasing. The timestamps for the images from different sources may have
191      * different timebases therefore may not be comparable. The specific meaning and
192      * timebase of the timestamp depend on the source providing images. See
193      * {@link android.hardware.Camera Camera},
194      * {@link android.hardware.camera2.CameraDevice CameraDevice},
195      * {@link MediaPlayer} and {@link MediaCodec} for more details.
196      * </p>
197      * <p>
198      * For images dequeued from {@link ImageWriter} via
199      * {@link ImageWriter#dequeueInputImage()}, it's up to the application to
200      * set the timestamps correctly before sending them back to the
201      * {@link ImageWriter}, or the timestamp will be generated automatically when
202      * {@link ImageWriter#queueInputImage queueInputImage()} is called.
203      * </p>
204      *
205      * @param timestamp The timestamp to be set for this image.
206      */
setTimestamp(long timestamp)207     public void setTimestamp(long timestamp) {
208         throwISEIfImageIsInvalid();
209         return;
210     }
211 
212     private Rect mCropRect;
213 
214     /**
215      * Get the crop rectangle associated with this frame.
216      * <p>
217      * The crop rectangle specifies the region of valid pixels in the image,
218      * using coordinates in the largest-resolution plane.
219      */
getCropRect()220     public Rect getCropRect() {
221         throwISEIfImageIsInvalid();
222 
223         if (mCropRect == null) {
224             return new Rect(0, 0, getWidth(), getHeight());
225         } else {
226             return new Rect(mCropRect); // return a copy
227         }
228     }
229 
230     /**
231      * Set the crop rectangle associated with this frame.
232      * <p>
233      * The crop rectangle specifies the region of valid pixels in the image,
234      * using coordinates in the largest-resolution plane.
235      */
setCropRect(Rect cropRect)236     public void setCropRect(Rect cropRect) {
237         throwISEIfImageIsInvalid();
238 
239         if (cropRect != null) {
240             cropRect = new Rect(cropRect);  // make a copy
241             if (!cropRect.intersect(0, 0, getWidth(), getHeight())) {
242                 cropRect.setEmpty();
243             }
244         }
245         mCropRect = cropRect;
246     }
247 
248     /**
249      * Get the array of pixel planes for this Image. The number of planes is
250      * determined by the format of the Image. The application will get an empty
251      * array if the image format is {@link android.graphics.ImageFormat#PRIVATE
252      * PRIVATE}, because the image pixel data is not directly accessible. The
253      * application can check the image format by calling
254      * {@link Image#getFormat()}.
255      */
getPlanes()256     public abstract Plane[] getPlanes();
257 
258     /**
259      * Free up this frame for reuse.
260      * <p>
261      * After calling this method, calling any methods on this {@code Image} will
262      * result in an {@link IllegalStateException}, and attempting to read from
263      * or write to {@link ByteBuffer ByteBuffers} returned by an earlier
264      * {@link Plane#getBuffer} call will have undefined behavior. If the image
265      * was obtained from {@link ImageWriter} via
266      * {@link ImageWriter#dequeueInputImage()}, after calling this method, any
267      * image data filled by the application will be lost and the image will be
268      * returned to {@link ImageWriter} for reuse. Images given to
269      * {@link ImageWriter#queueInputImage queueInputImage()} are automatically
270      * closed.
271      * </p>
272      */
273     @Override
close()274     public abstract void close();
275 
276     /**
277      * <p>
278      * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}).
279      * </p>
280      * <p>
281      * This is a package private method that is only used internally.
282      * </p>
283      *
284      * @return true if the image is attachable to a new owner, false if the image is still attached
285      *         to its current owner, or the image is a stand-alone image and is not attachable to
286      *         a new owner.
287      */
isAttachable()288     boolean isAttachable() {
289         throwISEIfImageIsInvalid();
290 
291         return false;
292     }
293 
294     /**
295      * <p>
296      * Get the owner of the {@link Image}.
297      * </p>
298      * <p>
299      * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter},
300      * {@link MediaCodec} etc. This method returns the owner that produces this image, or null
301      * if the image is stand-alone image or the owner is unknown.
302      * </p>
303      * <p>
304      * This is a package private method that is only used internally.
305      * </p>
306      *
307      * @return The owner of the Image.
308      */
getOwner()309     Object getOwner() {
310         throwISEIfImageIsInvalid();
311 
312         return null;
313     }
314 
315     /**
316      * Get native context (buffer pointer) associated with this image.
317      * <p>
318      * This is a package private method that is only used internally. It can be
319      * used to get the native buffer pointer and passed to native, which may be
320      * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse
321      * JNI call.
322      * </p>
323      *
324      * @return native context associated with this Image.
325      */
getNativeContext()326     long getNativeContext() {
327         throwISEIfImageIsInvalid();
328 
329         return 0;
330     }
331 
332     /**
333      * <p>A single color plane of image data.</p>
334      *
335      * <p>The number and meaning of the planes in an Image are determined by the
336      * format of the Image.</p>
337      *
338      * <p>Once the Image has been closed, any access to the the plane's
339      * ByteBuffer will fail.</p>
340      *
341      * @see #getFormat
342      */
343     public static abstract class Plane {
344         /**
345          * @hide
346          */
Plane()347         protected Plane() {
348         }
349 
350         /**
351          * <p>The row stride for this color plane, in bytes.</p>
352          *
353          * <p>This is the distance between the start of two consecutive rows of
354          * pixels in the image. Note that row stried is undefined for some formats
355          * such as
356          * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
357          * and calling getRowStride on images of these formats will
358          * cause an UnsupportedOperationException being thrown.
359          * For formats where row stride is well defined, the row stride
360          * is always greater than 0.</p>
361          */
getRowStride()362         public abstract int getRowStride();
363         /**
364          * <p>The distance between adjacent pixel samples, in bytes.</p>
365          *
366          * <p>This is the distance between two consecutive pixel values in a row
367          * of pixels. It may be larger than the size of a single pixel to
368          * account for interleaved image data or padded formats.
369          * Note that pixel stride is undefined for some formats such as
370          * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
371          * and calling getPixelStride on images of these formats will
372          * cause an UnsupportedOperationException being thrown.
373          * For formats where pixel stride is well defined, the pixel stride
374          * is always greater than 0.</p>
375          */
getPixelStride()376         public abstract int getPixelStride();
377         /**
378          * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer}
379          * containing the frame data.</p>
380          *
381          * <p>In particular, the buffer returned will always have
382          * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so
383          * the underlying data could be mapped as a pointer in JNI without doing
384          * any copies with {@code GetDirectBufferAddress}.</p>
385          *
386          * <p>For raw formats, each plane is only guaranteed to contain data
387          * up to the last pixel in the last row. In other words, the stride
388          * after the last row may not be mapped into the buffer. This is a
389          * necessary requirement for any interleaved format.</p>
390          *
391          * @return the byte buffer containing the image data for this plane.
392          */
getBuffer()393         public abstract ByteBuffer getBuffer();
394     }
395 
396 }
397