1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define FAILURE_DEBUG_PREFIX "FakeRotatingCamera"
18 
19 #include <log/log.h>
20 #include <android-base/properties.h>
21 #include <system/camera_metadata.h>
22 #include <ui/GraphicBuffer.h>
23 #include <ui/GraphicBufferAllocator.h>
24 #include <ui/GraphicBufferMapper.h>
25 
26 #include <gralloc_cb_bp.h>
27 #include <qemu_pipe_bp.h>
28 
29 #define GL_GLEXT_PROTOTYPES
30 #define EGL_EGLEXT_PROTOTYPES
31 #include <EGL/egl.h>
32 #include <EGL/eglext.h>
33 #include <GLES2/gl2.h>
34 #include <GLES2/gl2ext.h>
35 #undef EGL_EGLEXT_PROTOTYPES
36 #undef GL_GLEXT_PROTOTYPES
37 
38 #include "acircles_pattern_512_512.h"
39 #include "converters.h"
40 #include "debug.h"
41 #include "FakeRotatingCamera.h"
42 #include "jpeg.h"
43 #include "metadata_utils.h"
44 #include "utils.h"
45 #include "yuv.h"
46 
47 namespace android {
48 namespace hardware {
49 namespace camera {
50 namespace provider {
51 namespace implementation {
52 namespace hw {
53 
54 using base::unique_fd;
55 
56 namespace {
57 constexpr char kClass[] = "FakeRotatingCamera";
58 
59 constexpr int kMinFPS = 2;
60 constexpr int kMedFPS = 15;
61 constexpr int kMaxFPS = 30;
62 constexpr int64_t kOneSecondNs = 1000000000;
63 
64 constexpr float kDefaultFocalLength = 2.8;
65 
66 constexpr int64_t kMinFrameDurationNs = kOneSecondNs / kMaxFPS;
67 constexpr int64_t kMaxFrameDurationNs = kOneSecondNs / kMinFPS;
68 constexpr int64_t kDefaultFrameDurationNs = kOneSecondNs / kMedFPS;
69 
70 constexpr int64_t kDefaultSensorExposureTimeNs = kOneSecondNs / 100;
71 constexpr int64_t kMinSensorExposureTimeNs = kDefaultSensorExposureTimeNs / 100;
72 constexpr int64_t kMaxSensorExposureTimeNs = kDefaultSensorExposureTimeNs * 10;
73 
74 constexpr int32_t kDefaultJpegQuality = 85;
75 
usageOr(const BufferUsage a,const BufferUsage b)76 constexpr BufferUsage usageOr(const BufferUsage a, const BufferUsage b) {
77     return static_cast<BufferUsage>(static_cast<uint64_t>(a) | static_cast<uint64_t>(b));
78 }
79 
usageTest(const BufferUsage a,const BufferUsage b)80 constexpr bool usageTest(const BufferUsage a, const BufferUsage b) {
81     return (static_cast<uint64_t>(a) & static_cast<uint64_t>(b)) != 0;
82 }
83 
toR5G6B5(float r,float g,float b)84 constexpr uint16_t toR5G6B5(float r, float g, float b) {
85     return uint16_t(b * 31) | (uint16_t(g * 63) << 5) | (uint16_t(r * 31) << 11);
86 }
87 
toR8G8B8A8(uint8_t r,uint8_t g,uint8_t b,uint8_t a)88 constexpr uint32_t toR8G8B8A8(uint8_t r, uint8_t g, uint8_t b, uint8_t a) {
89     return uint32_t(r) | (uint32_t(g) << 8) | (uint32_t(b) << 16) | (uint32_t(a) << 24);
90 }
91 
degrees2rad(const double degrees)92 constexpr double degrees2rad(const double degrees) {
93     return degrees * M_PI / 180.0;
94 }
95 
96 // This texture is useful to debug camera orientation and image aspect ratio
loadTestPatternTextureA()97 abc3d::AutoTexture loadTestPatternTextureA() {
98     constexpr uint16_t B = toR5G6B5(.4, .4, .4);
99     constexpr uint16_t R = toR5G6B5( 1, .1, .1);
100 
101     static const uint16_t texels[] = {
102         B, R, R, R, R, R, B, B,
103         R, B, B, B, B, B, R, B,
104         B, B, B, B, B, B, R, B,
105         B, R, R, R, R, R, B, B,
106         R, B, B, B, B, B, R, B,
107         R, B, B, B, B, B, R, B,
108         R, B, B, B, B, B, R, B,
109         B, R, R, R, R, R, B, R,
110     };
111 
112     abc3d::AutoTexture tex(GL_TEXTURE_2D, GL_RGB, 8, 8,
113                            GL_RGB, GL_UNSIGNED_SHORT_5_6_5, texels);
114     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
115     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
116 
117     return tex;
118 }
119 
120 // This texture is useful to debug camera dataspace
loadTestPatternTextureColors()121 abc3d::AutoTexture loadTestPatternTextureColors() {
122     static const uint32_t texels[] = {
123         toR8G8B8A8(32, 0, 0, 255), toR8G8B8A8(64, 0, 0, 255), toR8G8B8A8(96, 0, 0, 255), toR8G8B8A8(128, 0, 0, 255),
124         toR8G8B8A8(160, 0, 0, 255), toR8G8B8A8(192, 0, 0, 255), toR8G8B8A8(224, 0, 0, 255), toR8G8B8A8(255, 0, 0, 255),
125 
126         toR8G8B8A8(32, 32, 0, 255), toR8G8B8A8(64, 64, 0, 255), toR8G8B8A8(96, 96, 0, 255), toR8G8B8A8(128, 128, 0, 255),
127         toR8G8B8A8(160, 160, 0, 255), toR8G8B8A8(192, 192, 0, 255), toR8G8B8A8(224, 224, 0, 255), toR8G8B8A8(255, 255, 0, 255),
128 
129         toR8G8B8A8(0, 32, 0, 255), toR8G8B8A8(0, 64, 0, 255), toR8G8B8A8(0, 96, 0, 255), toR8G8B8A8(0, 128, 0, 255),
130         toR8G8B8A8(0, 160, 0, 255), toR8G8B8A8(0, 192, 0, 255), toR8G8B8A8(0, 224, 0, 255), toR8G8B8A8(0, 255, 0, 255),
131 
132         toR8G8B8A8(0, 32, 32, 255), toR8G8B8A8(0, 64, 64, 255), toR8G8B8A8(0, 96, 96, 255), toR8G8B8A8(0, 128, 128, 255),
133         toR8G8B8A8(0, 160, 160, 255), toR8G8B8A8(0, 192, 192, 255), toR8G8B8A8(0, 224, 224, 255), toR8G8B8A8(0, 255, 255, 255),
134 
135         toR8G8B8A8(0, 0, 32, 255), toR8G8B8A8(0, 0, 64, 255), toR8G8B8A8(0, 0, 96, 255), toR8G8B8A8(0, 0, 128, 255),
136         toR8G8B8A8(0, 0, 160, 255), toR8G8B8A8(0, 0, 192, 255), toR8G8B8A8(0, 0, 224, 255), toR8G8B8A8(0, 0, 255, 255),
137 
138         toR8G8B8A8(32, 0, 32, 255), toR8G8B8A8(64, 0, 64, 255), toR8G8B8A8(96, 0, 96, 255), toR8G8B8A8(128, 0, 128, 255),
139         toR8G8B8A8(160, 0, 160, 255), toR8G8B8A8(192, 0, 192, 255), toR8G8B8A8(224, 0, 224, 255), toR8G8B8A8(255, 0, 255, 255),
140 
141         toR8G8B8A8(32, 128, 0, 255), toR8G8B8A8(64, 128, 0, 255), toR8G8B8A8(96, 128, 0, 255), toR8G8B8A8(255, 255, 255, 255),
142         toR8G8B8A8(160, 128, 0, 255), toR8G8B8A8(192, 128, 0, 255), toR8G8B8A8(224, 128, 0, 255), toR8G8B8A8(255, 128, 0, 255),
143 
144         toR8G8B8A8(0, 0, 0, 255), toR8G8B8A8(32, 32, 32, 255), toR8G8B8A8(64, 64, 64, 255), toR8G8B8A8(96, 96, 96, 255),
145         toR8G8B8A8(128, 128, 128, 255), toR8G8B8A8(160, 160, 160, 255), toR8G8B8A8(192, 192, 192, 255), toR8G8B8A8(224, 224, 224, 255),
146     };
147 
148     abc3d::AutoTexture tex(GL_TEXTURE_2D, GL_RGBA, 8, 8,
149                            GL_RGBA, GL_UNSIGNED_BYTE, texels);
150     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
151     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
152 
153     return tex;
154 }
155 
156 // This texture is used to pass CtsVerifier
loadTestPatternTextureAcircles()157 abc3d::AutoTexture loadTestPatternTextureAcircles() {
158     constexpr uint16_t kPalette[] = {
159         toR5G6B5(0, 0, 0),
160         toR5G6B5(.25, .25, .25),
161         toR5G6B5(.5, .5, .5),
162         toR5G6B5(1, 1, 0),
163         toR5G6B5(1, 1, 1),
164     };
165 
166     std::vector<uint16_t> texels;
167     texels.reserve(kAcirclesPatternWidth * kAcirclesPatternWidth);
168 
169     auto i = std::begin(kAcirclesPatternRLE);
170     const auto end = std::end(kAcirclesPatternRLE);
171     while (i < end) {
172         const unsigned x = *i;
173         ++i;
174         unsigned n;
175         uint16_t color;
176         if (x & 1) {
177             n = (x >> 3) + 1;
178             color = kPalette[(x >> 1) & 3];
179         } else {
180             if (x & 2) {
181                 n = ((unsigned(*i) << 6) | (x >> 2)) + 1;
182                 ++i;
183             } else {
184                 n = (x >> 2) + 1;
185             }
186             color = kPalette[4];
187         }
188         texels.insert(texels.end(), n, color);
189     }
190 
191     abc3d::AutoTexture tex(GL_TEXTURE_2D, GL_RGB,
192                            kAcirclesPatternWidth, kAcirclesPatternWidth,
193                            GL_RGB, GL_UNSIGNED_SHORT_5_6_5, texels.data());
194     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
195     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
196 
197     return tex;
198 }
199 
loadTestPatternTexture()200 abc3d::AutoTexture loadTestPatternTexture() {
201     std::string valueStr =
202         base::GetProperty("vendor.qemu.FakeRotatingCamera.scene", "");
203     if (valueStr.empty()) {
204         valueStr =
205             base::GetProperty("ro.boot.qemu.FakeRotatingCamera.scene", "");
206     }
207 
208     if (strcmp(valueStr.c_str(), "a") == 0) {
209         return loadTestPatternTextureA();
210     } else if (strcmp(valueStr.c_str(), "colors") == 0) {
211         return loadTestPatternTextureColors();
212     } else {
213         return loadTestPatternTextureAcircles();
214     }
215 }
216 
compressNV21IntoJpeg(const Rect<uint16_t> imageSize,const uint8_t * nv21data,const CameraMetadata & metadata,const native_handle_t * jpegBuffer,const size_t jpegBufferSize)217 bool compressNV21IntoJpeg(const Rect<uint16_t> imageSize,
218                           const uint8_t* nv21data,
219                           const CameraMetadata& metadata,
220                           const native_handle_t* jpegBuffer,
221                           const size_t jpegBufferSize) {
222     const android_ycbcr imageYcbcr = yuv::NV21init(imageSize.width, imageSize.height,
223                                                    const_cast<uint8_t*>(nv21data));
224 
225     return HwCamera::compressJpeg(imageSize, imageYcbcr, metadata,
226                                   jpegBuffer, jpegBufferSize);
227 }
228 
229 }  // namespace
230 
FakeRotatingCamera(const bool isBackFacing)231 FakeRotatingCamera::FakeRotatingCamera(const bool isBackFacing)
232         : mIsBackFacing(isBackFacing)
233         , mAFStateMachine(200, 1, 2) {}
234 
~FakeRotatingCamera()235 FakeRotatingCamera::~FakeRotatingCamera() {
236     closeImpl(true);
237 }
238 
239 std::tuple<PixelFormat, BufferUsage, Dataspace, int32_t>
overrideStreamParams(const PixelFormat format,const BufferUsage usage,const Dataspace dataspace) const240 FakeRotatingCamera::overrideStreamParams(const PixelFormat format,
241                                          const BufferUsage usage,
242                                          const Dataspace dataspace) const {
243     constexpr BufferUsage kRgbaExtraUsage = usageOr(BufferUsage::CAMERA_OUTPUT,
244                                                     BufferUsage::GPU_RENDER_TARGET);
245     constexpr BufferUsage kYuvExtraUsage = usageOr(BufferUsage::CAMERA_OUTPUT,
246                                                    BufferUsage::CPU_WRITE_OFTEN);
247     constexpr BufferUsage kBlobExtraUsage = usageOr(BufferUsage::CAMERA_OUTPUT,
248                                                     BufferUsage::CPU_WRITE_OFTEN);
249 
250     switch (format) {
251     case PixelFormat::YCBCR_420_888:
252         return {PixelFormat::YCBCR_420_888, usageOr(usage, kYuvExtraUsage),
253                 Dataspace::JFIF, (usageTest(usage, BufferUsage::VIDEO_ENCODER) ? 8 : 4)};
254 
255     case PixelFormat::IMPLEMENTATION_DEFINED:
256         if (usageTest(usage, BufferUsage::VIDEO_ENCODER)) {
257             return {PixelFormat::YCBCR_420_888, usageOr(usage, kYuvExtraUsage),
258                     Dataspace::JFIF, 8};
259         } else {
260             return {PixelFormat::RGBA_8888, usageOr(usage, kRgbaExtraUsage),
261                     Dataspace::UNKNOWN, 4};
262         }
263 
264     case PixelFormat::RGBA_8888:
265         return {PixelFormat::RGBA_8888, usageOr(usage, kRgbaExtraUsage),
266                 Dataspace::UNKNOWN, (usageTest(usage, BufferUsage::VIDEO_ENCODER) ? 8 : 4)};
267 
268     case PixelFormat::BLOB:
269         switch (dataspace) {
270         case Dataspace::JFIF:
271             return {PixelFormat::BLOB, usageOr(usage, kBlobExtraUsage),
272                     Dataspace::JFIF, 4};  // JPEG
273         default:
274             return {format, usage, dataspace, FAILURE(kErrorBadDataspace)};
275         }
276 
277     default:
278         return {format, usage, dataspace, FAILURE(kErrorBadFormat)};
279     }
280 }
281 
configure(const CameraMetadata & sessionParams,size_t nStreams,const Stream * streams,const HalStream * halStreams)282 bool FakeRotatingCamera::configure(const CameraMetadata& sessionParams,
283                                    size_t nStreams,
284                                    const Stream* streams,
285                                    const HalStream* halStreams) {
286     closeImpl(false);
287 
288     applyMetadata(sessionParams);
289 
290     if (!mQemuChannel.ok()) {
291         static const char kPipeName[] = "FakeRotatingCameraSensor";
292         mQemuChannel.reset(qemu_pipe_open_ns(NULL, kPipeName, O_RDWR));
293         if (!mQemuChannel.ok()) {
294             ALOGE("%s:%s:%d qemu_pipe_open_ns failed for '%s'",
295                   kClass, __func__, __LINE__, kPipeName);
296             return FAILURE(false);
297         }
298     }
299 
300     const abc3d::EglCurrentContext currentContext = initOpenGL();
301     if (!currentContext.ok()) {
302         return FAILURE(false);
303     }
304 
305     LOG_ALWAYS_FATAL_IF(!mStreamInfoCache.empty());
306     for (; nStreams > 0; --nStreams, ++streams, ++halStreams) {
307         const int32_t id = streams->id;
308         LOG_ALWAYS_FATAL_IF(halStreams->id != id);
309         StreamInfo& si = mStreamInfoCache[id];
310         si.usage = halStreams->producerUsage;
311         si.size.width = streams->width;
312         si.size.height = streams->height;
313         si.pixelFormat = halStreams->overrideFormat;
314         si.blobBufferSize = streams->bufferSize;
315 
316         if (si.pixelFormat != PixelFormat::RGBA_8888) {
317             const native_handle_t* buffer;
318             GraphicBufferAllocator& gba = GraphicBufferAllocator::get();
319             uint32_t stride;
320 
321             if (gba.allocate(si.size.width, si.size.height,
322                     static_cast<int>(PixelFormat::RGBA_8888), 1,
323                     static_cast<uint64_t>(usageOr(BufferUsage::GPU_RENDER_TARGET,
324                                                   usageOr(BufferUsage::CPU_READ_OFTEN,
325                                                           BufferUsage::CAMERA_OUTPUT))),
326                     &buffer, &stride, kClass) == NO_ERROR) {
327                 si.rgbaBuffer.reset(buffer);
328             } else {
329                 mStreamInfoCache.clear();
330                 return FAILURE(false);
331             }
332         }
333     }
334 
335     return true;
336 }
337 
close()338 void FakeRotatingCamera::close() {
339     closeImpl(true);
340 }
341 
initOpenGL()342 abc3d::EglCurrentContext FakeRotatingCamera::initOpenGL() {
343     if (mGlProgram.ok()) {
344         return mEglContext.getCurrentContext();
345     }
346 
347     abc3d::EglContext context;
348     abc3d::EglCurrentContext currentContext = context.init();
349     if (!currentContext.ok()) {
350         return abc3d::EglCurrentContext();
351     }
352 
353     abc3d::AutoTexture testPatternTexture = loadTestPatternTexture();
354     if (!testPatternTexture.ok()) {
355         return abc3d::EglCurrentContext();
356     }
357 
358     const char kVertexShaderStr[] = R"CODE(
359 attribute vec4 a_position;
360 attribute vec2 a_texCoord;
361 uniform mat4 u_pvmMatrix;
362 varying vec2 v_texCoord;
363 void main() {
364     gl_Position = u_pvmMatrix * a_position;
365     v_texCoord = a_texCoord;
366 }
367 )CODE";
368     abc3d::AutoShader vertexShader;
369     if (!vertexShader.compile(GL_VERTEX_SHADER, kVertexShaderStr)) {
370         return abc3d::EglCurrentContext();
371     }
372 
373     const char kFragmentShaderStr[] = R"CODE(
374 precision mediump float;
375 varying vec2 v_texCoord;
376 uniform sampler2D u_texture;
377 void main() {
378     gl_FragColor = texture2D(u_texture, v_texCoord);
379 }
380 )CODE";
381     abc3d::AutoShader fragmentShader;
382     if (!fragmentShader.compile(GL_FRAGMENT_SHADER, kFragmentShaderStr)) {
383         return abc3d::EglCurrentContext();
384     }
385 
386     abc3d::AutoProgram program;
387     if (!program.link(vertexShader.get(), fragmentShader.get())) {
388         return abc3d::EglCurrentContext();
389     }
390 
391     const GLint programAttrPositionLoc = program.getAttribLocation("a_position");
392     if (programAttrPositionLoc < 0) {
393         return abc3d::EglCurrentContext();
394     }
395     const GLint programAttrTexCoordLoc = program.getAttribLocation("a_texCoord");
396     if (programAttrTexCoordLoc < 0) {
397         return abc3d::EglCurrentContext();
398     }
399     const GLint programUniformTextureLoc = program.getUniformLocation("u_texture");
400     if (programUniformTextureLoc < 0) {
401         return abc3d::EglCurrentContext();
402     }
403     const GLint programUniformPvmMatrixLoc = program.getUniformLocation("u_pvmMatrix");
404     if (programUniformPvmMatrixLoc < 0) {
405         return abc3d::EglCurrentContext();
406     }
407 
408     mEglContext = std::move(context);
409     mGlTestPatternTexture = std::move(testPatternTexture);
410     mGlProgramAttrPositionLoc = programAttrPositionLoc;
411     mGlProgramAttrTexCoordLoc = programAttrTexCoordLoc;
412     mGlProgramUniformTextureLoc = programUniformTextureLoc;
413     mGlProgramUniformPvmMatrixLoc = programUniformPvmMatrixLoc;
414     mGlProgram = std::move(program);
415 
416     return std::move(currentContext);
417 }
418 
closeImpl(const bool everything)419 void FakeRotatingCamera::closeImpl(const bool everything) {
420     {
421         const abc3d::EglCurrentContext currentContext = mEglContext.getCurrentContext();
422         LOG_ALWAYS_FATAL_IF(!mStreamInfoCache.empty() && !currentContext.ok());
423         mStreamInfoCache.clear();
424 
425         if (everything) {
426             mGlProgram.clear();
427             mGlTestPatternTexture.clear();
428         }
429     }
430 
431     if (everything) {
432         mEglContext.clear();
433         mQemuChannel.reset();
434     }
435 }
436 
437 std::tuple<int64_t, int64_t, CameraMetadata,
438            std::vector<StreamBuffer>, std::vector<DelayedStreamBuffer>>
processCaptureRequest(CameraMetadata metadataUpdate,Span<CachedStreamBuffer * > csbs)439 FakeRotatingCamera::processCaptureRequest(CameraMetadata metadataUpdate,
440                                           Span<CachedStreamBuffer*> csbs) {
441     CameraMetadata resultMetadata = metadataUpdate.metadata.empty() ?
442         updateCaptureResultMetadata() :
443         applyMetadata(std::move(metadataUpdate));
444 
445     const size_t csbsSize = csbs.size();
446     std::vector<StreamBuffer> outputBuffers;
447     std::vector<DelayedStreamBuffer> delayedOutputBuffers;
448     outputBuffers.reserve(csbsSize);
449 
450     const abc3d::EglCurrentContext currentContext = mEglContext.getCurrentContext();
451     if (!currentContext.ok()) {
452         goto fail;
453     }
454 
455     RenderParams renderParams;
456     {
457         SensorValues sensorValues;
458         if (readSensors(&sensorValues)) {
459             static_assert(sizeof(renderParams.cameraParams.rotXYZ3) ==
460                           sizeof(sensorValues.rotation));
461 
462             memcpy(renderParams.cameraParams.rotXYZ3, sensorValues.rotation,
463                    sizeof(sensorValues.rotation));
464         } else {
465             goto fail;
466         }
467 
468         constexpr double kR = 5.0;
469 
470         float* pos3 = renderParams.cameraParams.pos3;
471         pos3[0] = -kR * sin(sensorValues.rotation[0]) * sin(sensorValues.rotation[1]);
472         pos3[1] = -kR * sin(sensorValues.rotation[0]) * cos(sensorValues.rotation[1]);
473         pos3[2] = kR * cos(sensorValues.rotation[0]);
474     }
475 
476     for (size_t i = 0; i < csbsSize; ++i) {
477         CachedStreamBuffer* csb = csbs[i];
478         LOG_ALWAYS_FATAL_IF(!csb);  // otherwise mNumBuffersInFlight will be hard
479 
480         const StreamInfo* si = csb->getStreamInfo<StreamInfo>();
481         if (!si) {
482             const auto sii = mStreamInfoCache.find(csb->getStreamId());
483             if (sii == mStreamInfoCache.end()) {
484                 ALOGE("%s:%s:%d could not find stream=%d in the cache",
485                       kClass, __func__, __LINE__, csb->getStreamId());
486             } else {
487                 si = &sii->second;
488                 csb->setStreamInfo(si);
489             }
490         }
491 
492         if (si) {
493             captureFrame(*si, renderParams, csb, &outputBuffers, &delayedOutputBuffers);
494         } else {
495             outputBuffers.push_back(csb->finish(false));
496         }
497     }
498 
499     return make_tuple(mFrameDurationNs, kDefaultSensorExposureTimeNs,
500                       std::move(resultMetadata), std::move(outputBuffers),
501                       std::move(delayedOutputBuffers));
502 
503 fail:
504     for (size_t i = 0; i < csbsSize; ++i) {
505         CachedStreamBuffer* csb = csbs[i];
506         LOG_ALWAYS_FATAL_IF(!csb);  // otherwise mNumBuffersInFlight will be hard
507         outputBuffers.push_back(csb->finish(false));
508     }
509 
510     return make_tuple(FAILURE(-1), 0,
511                       std::move(resultMetadata), std::move(outputBuffers),
512                       std::move(delayedOutputBuffers));
513 }
514 
captureFrame(const StreamInfo & si,const RenderParams & renderParams,CachedStreamBuffer * csb,std::vector<StreamBuffer> * outputBuffers,std::vector<DelayedStreamBuffer> * delayedOutputBuffers) const515 void FakeRotatingCamera::captureFrame(const StreamInfo& si,
516                                       const RenderParams& renderParams,
517                                       CachedStreamBuffer* csb,
518                                       std::vector<StreamBuffer>* outputBuffers,
519                                       std::vector<DelayedStreamBuffer>* delayedOutputBuffers) const {
520     switch (si.pixelFormat) {
521     case PixelFormat::RGBA_8888:
522         outputBuffers->push_back(csb->finish(captureFrameRGBA(si, renderParams, csb)));
523         break;
524 
525     case PixelFormat::YCBCR_420_888:
526         outputBuffers->push_back(csb->finish(captureFrameYUV(si, renderParams, csb)));
527         break;
528 
529     case PixelFormat::BLOB:
530         delayedOutputBuffers->push_back(captureFrameJpeg(si, renderParams, csb));
531         break;
532 
533     default:
534         ALOGE("%s:%s:%d: unexpected pixelFormat=%" PRIx32,
535               kClass, __func__, __LINE__, static_cast<uint32_t>(si.pixelFormat));
536         outputBuffers->push_back(csb->finish(false));
537         break;
538     }
539 }
540 
captureFrameRGBA(const StreamInfo & si,const RenderParams & renderParams,CachedStreamBuffer * csb) const541 bool FakeRotatingCamera::captureFrameRGBA(const StreamInfo& si,
542                                             const RenderParams& renderParams,
543                                             CachedStreamBuffer* csb) const {
544     if (!csb->waitAcquireFence(mFrameDurationNs / 2000000)) {
545         return FAILURE(false);
546     }
547 
548     return renderIntoRGBA(si, renderParams, csb->getBuffer());
549 }
550 
captureFrameYUV(const StreamInfo & si,const RenderParams & renderParams,CachedStreamBuffer * csb) const551 bool FakeRotatingCamera::captureFrameYUV(const StreamInfo& si,
552                                          const RenderParams& renderParams,
553                                          CachedStreamBuffer* csb) const {
554     LOG_ALWAYS_FATAL_IF(!si.rgbaBuffer);
555     if (!renderIntoRGBA(si, renderParams, si.rgbaBuffer.get())) {
556         return false;
557     }
558 
559     if (!csb->waitAcquireFence(mFrameDurationNs / 2000000)) {
560         return false;
561     }
562 
563     void* rgba = nullptr;
564     if (GraphicBufferMapper::get().lock(
565             si.rgbaBuffer.get(), static_cast<uint32_t>(BufferUsage::CPU_READ_OFTEN),
566             {si.size.width, si.size.height}, &rgba) != NO_ERROR) {
567         return FAILURE(false);
568     }
569 
570     android_ycbcr ycbcr;
571     if (GraphicBufferMapper::get().lockYCbCr(
572             csb->getBuffer(), static_cast<uint32_t>(BufferUsage::CPU_WRITE_OFTEN),
573             {si.size.width, si.size.height}, &ycbcr) != NO_ERROR) {
574         LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(si.rgbaBuffer.get()) != NO_ERROR);
575         return FAILURE(false);
576     }
577 
578     const bool converted = conv::rgba2yuv(si.size.width, si.size.height,
579                                           static_cast<const uint32_t*>(rgba),
580                                           ycbcr);
581 
582     LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(csb->getBuffer()) != NO_ERROR);
583     LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(si.rgbaBuffer.get()) != NO_ERROR);
584 
585     return converted;
586 }
587 
captureFrameJpeg(const StreamInfo & si,const RenderParams & renderParams,CachedStreamBuffer * csb) const588 DelayedStreamBuffer FakeRotatingCamera::captureFrameJpeg(const StreamInfo& si,
589                                                          const RenderParams& renderParams,
590                                                          CachedStreamBuffer* csb) const {
591     std::vector<uint8_t> nv21data = captureFrameForCompressing(si, renderParams);
592 
593     const Rect<uint16_t> imageSize = si.size;
594     const uint32_t jpegBufferSize = si.blobBufferSize;
595     const int64_t frameDurationNs = mFrameDurationNs;
596     CameraMetadata metadata = mCaptureResultMetadata;
597 
598     return [csb, imageSize, nv21data = std::move(nv21data), metadata = std::move(metadata),
599             jpegBufferSize, frameDurationNs](const bool ok) -> StreamBuffer {
600         StreamBuffer sb;
601         if (ok && !nv21data.empty() && csb->waitAcquireFence(frameDurationNs / 1000000)) {
602             sb = csb->finish(compressNV21IntoJpeg(imageSize, nv21data.data(), metadata,
603                                                   csb->getBuffer(), jpegBufferSize));
604         } else {
605             sb = csb->finish(false);
606         }
607 
608         return sb;
609     };
610 }
611 
612 std::vector<uint8_t>
captureFrameForCompressing(const StreamInfo & si,const RenderParams & renderParams) const613 FakeRotatingCamera::captureFrameForCompressing(const StreamInfo& si,
614                                                const RenderParams& renderParams) const {
615     if (!renderIntoRGBA(si, renderParams, si.rgbaBuffer.get())) {
616         return {};
617     }
618 
619     void* rgba = nullptr;
620     if (GraphicBufferMapper::get().lock(
621             si.rgbaBuffer.get(), static_cast<uint32_t>(BufferUsage::CPU_READ_OFTEN),
622             {si.size.width, si.size.height}, &rgba) != NO_ERROR) {
623         return {};
624     }
625 
626     std::vector<uint8_t> nv21data(yuv::NV21size(si.size.width, si.size.height));
627     const android_ycbcr ycbcr = yuv::NV21init(si.size.width, si.size.height,
628                                               nv21data.data());
629 
630     const bool converted = conv::rgba2yuv(si.size.width, si.size.height,
631                                           static_cast<const uint32_t*>(rgba),
632                                           ycbcr);
633 
634     LOG_ALWAYS_FATAL_IF(GraphicBufferMapper::get().unlock(si.rgbaBuffer.get()) != NO_ERROR);
635 
636     if (converted) {
637         return nv21data;
638     } else {
639         return {};
640     }
641 }
642 
drawScene(const Rect<uint16_t> imageSize,const RenderParams & renderParams,const bool isHardwareBuffer) const643 bool FakeRotatingCamera::drawScene(const Rect<uint16_t> imageSize,
644                                    const RenderParams& renderParams,
645                                    const bool isHardwareBuffer) const {
646     float pvMatrix44[16];
647     {
648         float projectionMatrix44[16];
649         float viewMatrix44[16];
650 
651         // This matrix takes into account specific behaviors below:
652         // * The Y axis if rendering int0 AHardwareBuffer goes down while it
653         //   goes up everywhere else (e.g. when rendering to `EGLSurface`).
654         // * We set `sensorOrientation=90` because a lot of places in Android and
655         //   3Ps assume this and don't work properly with `sensorOrientation=0`.
656         const float workaroundMatrix44[16] = {
657             0, (isHardwareBuffer ? -1.0f : 1.0f), 0, 0,
658            -1,                                 0, 0, 0,
659             0,                                 0, 1, 0,
660             0,                                 0, 0, 1,
661         };
662 
663         {
664             constexpr double kNear = 1.0;
665             constexpr double kFar = 10.0;
666 
667             // We use `height` to calculate `right` because the image is 90degrees
668             // rotated (sensorOrientation=90).
669             const double right = kNear * (.5 * getSensorSize().height / getSensorDPI() / getDefaultFocalLength());
670             const double top = right / imageSize.width * imageSize.height;
671             abc3d::frustum(pvMatrix44, -right, right, -top, top,
672                            kNear, kFar);
673         }
674 
675         abc3d::mulM44(projectionMatrix44, pvMatrix44, workaroundMatrix44);
676 
677         {
678             const auto& cam = renderParams.cameraParams;
679             abc3d::lookAtXyzRot(viewMatrix44, cam.pos3, cam.rotXYZ3);
680         }
681 
682         abc3d::mulM44(pvMatrix44, projectionMatrix44, viewMatrix44);
683     }
684 
685     glViewport(0, 0, imageSize.width, imageSize.height);
686     const bool result = drawSceneImpl(pvMatrix44);
687     glFinish();
688     return result;
689 }
690 
drawSceneImpl(const float pvMatrix44[]) const691 bool FakeRotatingCamera::drawSceneImpl(const float pvMatrix44[]) const {
692     constexpr float kX = 0;
693     constexpr float kY = 0;
694     constexpr float kZ = 0;
695     constexpr float kS = 1;
696 
697     const GLfloat vVertices[] = {
698        -kS + kX, kY, kZ - kS,   // Position 0
699         0,  0,                  // TexCoord 0
700        -kS + kX, kY, kZ + kS,   // Position 1
701         0,  1,                  // TexCoord 1
702         kS + kX, kY, kZ + kS,   // Position 2
703         1,  1,                  // TexCoord 2
704         kS + kX, kY, kZ - kS,   // Position 3
705         1,  0                   // TexCoord 3
706     };
707 
708     static const GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
709 
710     glClearColor(0.2, 0.3, 0.2, 1.0);
711     glClear(GL_COLOR_BUFFER_BIT);
712 
713     glUseProgram(mGlProgram.get());
714     glVertexAttribPointer(mGlProgramAttrPositionLoc, 3, GL_FLOAT, GL_FALSE,
715                           5 * sizeof(GLfloat), &vVertices[0]);
716     glEnableVertexAttribArray(mGlProgramAttrPositionLoc);
717     glVertexAttribPointer(mGlProgramAttrTexCoordLoc, 2, GL_FLOAT, GL_FALSE,
718                           5 * sizeof(GLfloat), &vVertices[3]);
719     glEnableVertexAttribArray(mGlProgramAttrTexCoordLoc);
720     glUniformMatrix4fv(mGlProgramUniformPvmMatrixLoc, 1, true, pvMatrix44);
721 
722     glActiveTexture(GL_TEXTURE0);
723     glBindTexture(GL_TEXTURE_2D, mGlTestPatternTexture.get());
724     glUniform1i(mGlProgramUniformTextureLoc, 0);
725 
726     glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
727 
728     return true;
729 }
730 
renderIntoRGBA(const StreamInfo & si,const RenderParams & renderParams,const native_handle_t * rgbaBuffer) const731 bool FakeRotatingCamera::renderIntoRGBA(const StreamInfo& si,
732                                         const RenderParams& renderParams,
733                                         const native_handle_t* rgbaBuffer) const {
734     const cb_handle_t* const cb = cb_handle_t::from(rgbaBuffer);
735     if (!cb) {
736         return FAILURE(false);
737     }
738 
739     const auto gb = sp<GraphicBuffer>::make(
740         rgbaBuffer, GraphicBuffer::WRAP_HANDLE, si.size.width,
741         si.size.height, static_cast<int>(si.pixelFormat), 1,
742         static_cast<uint64_t>(si.usage), cb->stride);
743 
744     const EGLClientBuffer clientBuf =
745         eglGetNativeClientBufferANDROID(gb->toAHardwareBuffer());
746     if (!clientBuf) {
747         return FAILURE(false);
748     }
749 
750     const abc3d::AutoImageKHR eglImage(mEglContext.getDisplay(), clientBuf);
751     if (!eglImage.ok()) {
752         return false;
753     }
754 
755     abc3d::AutoTexture fboTex(GL_TEXTURE_2D);
756     glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, eglImage.get());
757 
758     abc3d::AutoFrameBuffer fbo;
759     glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
760                            GL_TEXTURE_2D, fboTex.get(), 0);
761 
762     // drawing into EGLClientBuffer is Y-flipped on Android
763     return drawScene(si.size, renderParams, true);
764 }
765 
readSensors(SensorValues * vals)766 bool FakeRotatingCamera::readSensors(SensorValues* vals) {
767     static const char kReadCommand[] = "get";
768 
769     uint32_t len = sizeof(kReadCommand);
770     if (qemu_pipe_write_fully(mQemuChannel.get(), &len, sizeof(len))) {
771         return FAILURE(false);
772     }
773     if (qemu_pipe_write_fully(mQemuChannel.get(), &kReadCommand[0], sizeof(kReadCommand))) {
774         return FAILURE(false);
775     }
776     if (qemu_pipe_read_fully(mQemuChannel.get(), &len, sizeof(len))) {
777         return FAILURE(false);
778     }
779     if (len != sizeof(*vals)) {
780         return FAILURE(false);
781     }
782     if (qemu_pipe_read_fully(mQemuChannel.get(), vals, len)) {
783         return FAILURE(false);
784     }
785 
786     return true;
787 }
788 
applyMetadata(const CameraMetadata & metadata)789 CameraMetadata FakeRotatingCamera::applyMetadata(const CameraMetadata& metadata) {
790     const camera_metadata_t* const raw =
791         reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data());
792 
793     mFrameDurationNs = getFrameDuration(raw, kDefaultFrameDurationNs,
794                                         kMinFrameDurationNs, kMaxFrameDurationNs);
795 
796     camera_metadata_ro_entry_t entry;
797     const camera_metadata_enum_android_control_af_mode_t afMode =
798         find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_MODE, &entry) ?
799             ANDROID_CONTROL_AF_MODE_OFF :
800             static_cast<camera_metadata_enum_android_control_af_mode_t>(entry.data.u8[0]);
801 
802     const camera_metadata_enum_android_control_af_trigger_t afTrigger =
803         find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_TRIGGER, &entry) ?
804             ANDROID_CONTROL_AF_TRIGGER_IDLE :
805             static_cast<camera_metadata_enum_android_control_af_trigger_t>(entry.data.u8[0]);
806 
807     const auto af = mAFStateMachine(afMode, afTrigger);
808 
809     CameraMetadataMap m = parseCameraMetadataMap(metadata);
810 
811     m[ANDROID_CONTROL_AE_STATE] = uint8_t(ANDROID_CONTROL_AE_STATE_CONVERGED);
812     m[ANDROID_CONTROL_AF_STATE] = uint8_t(af.first);
813     m[ANDROID_CONTROL_AWB_STATE] = uint8_t(ANDROID_CONTROL_AWB_STATE_CONVERGED);
814     m[ANDROID_FLASH_STATE] = uint8_t(ANDROID_FLASH_STATE_UNAVAILABLE);
815     m[ANDROID_LENS_APERTURE] = getDefaultAperture();
816     m[ANDROID_LENS_FOCUS_DISTANCE] = af.second;
817     m[ANDROID_LENS_STATE] = uint8_t(getAfLensState(af.first));
818     m[ANDROID_REQUEST_PIPELINE_DEPTH] = uint8_t(4);
819     m[ANDROID_SENSOR_FRAME_DURATION] = mFrameDurationNs;
820     m[ANDROID_SENSOR_EXPOSURE_TIME] = kDefaultSensorExposureTimeNs;
821     m[ANDROID_SENSOR_SENSITIVITY] = getDefaultSensorSensitivity();
822     m[ANDROID_SENSOR_TIMESTAMP] = int64_t(0);
823     m[ANDROID_SENSOR_ROLLING_SHUTTER_SKEW] = kMinSensorExposureTimeNs;
824     m[ANDROID_STATISTICS_SCENE_FLICKER] = uint8_t(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
825 
826     std::optional<CameraMetadata> maybeSerialized =
827         serializeCameraMetadataMap(m);
828 
829     if (maybeSerialized) {
830         mCaptureResultMetadata = std::move(maybeSerialized.value());
831     }
832 
833     {   // reset ANDROID_CONTROL_AF_TRIGGER to IDLE
834         camera_metadata_t* const raw =
835             reinterpret_cast<camera_metadata_t*>(mCaptureResultMetadata.metadata.data());
836 
837         camera_metadata_ro_entry_t entry;
838         const auto newTriggerValue = ANDROID_CONTROL_AF_TRIGGER_IDLE;
839 
840         if (find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_TRIGGER, &entry)) {
841             return mCaptureResultMetadata;
842         } else if (entry.data.i32[0] == newTriggerValue) {
843             return mCaptureResultMetadata;
844         } else {
845             CameraMetadata result = mCaptureResultMetadata;
846 
847             if (update_camera_metadata_entry(raw, entry.index, &newTriggerValue, 1, nullptr)) {
848                 ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_CONTROL_AF_TRIGGER) "
849                       "failed", kClass, __func__, __LINE__);
850             }
851 
852             return result;
853         }
854     }
855 }
856 
updateCaptureResultMetadata()857 CameraMetadata FakeRotatingCamera::updateCaptureResultMetadata() {
858     camera_metadata_t* const raw =
859         reinterpret_cast<camera_metadata_t*>(mCaptureResultMetadata.metadata.data());
860 
861     const auto af = mAFStateMachine();
862 
863     camera_metadata_ro_entry_t entry;
864 
865     if (find_camera_metadata_ro_entry(raw, ANDROID_CONTROL_AF_STATE, &entry)) {
866         ALOGW("%s:%s:%d: find_camera_metadata_ro_entry(ANDROID_CONTROL_AF_STATE) failed",
867               kClass, __func__, __LINE__);
868     } else if (update_camera_metadata_entry(raw, entry.index, &af.first, 1, nullptr)) {
869         ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_CONTROL_AF_STATE) failed",
870               kClass, __func__, __LINE__);
871     }
872 
873     if (find_camera_metadata_ro_entry(raw, ANDROID_LENS_FOCUS_DISTANCE, &entry)) {
874         ALOGW("%s:%s:%d: find_camera_metadata_ro_entry(ANDROID_LENS_FOCUS_DISTANCE) failed",
875               kClass, __func__, __LINE__);
876     } else if (update_camera_metadata_entry(raw, entry.index, &af.second, 1, nullptr)) {
877         ALOGW("%s:%s:%d: update_camera_metadata_entry(ANDROID_LENS_FOCUS_DISTANCE) failed",
878               kClass, __func__, __LINE__);
879     }
880 
881     return metadataCompact(mCaptureResultMetadata);
882 }
883 
884 ////////////////////////////////////////////////////////////////////////////////
885 
getTargetFpsRanges() const886 Span<const std::pair<int32_t, int32_t>> FakeRotatingCamera::getTargetFpsRanges() const {
887     // ordered to satisfy testPreviewFpsRangeByCamera
888     static const std::pair<int32_t, int32_t> targetFpsRanges[] = {
889         {kMinFPS, kMedFPS},
890         {kMedFPS, kMedFPS},
891         {kMinFPS, kMaxFPS},
892         {kMaxFPS, kMaxFPS},
893     };
894 
895     return targetFpsRanges;
896 }
897 
getAvailableThumbnailSizes() const898 Span<const Rect<uint16_t>> FakeRotatingCamera::getAvailableThumbnailSizes() const {
899     static const Rect<uint16_t> availableThumbnailSizes[] = {
900         {0, 0},
901         {11 * 4, 9 * 4},
902         {16 * 4, 9 * 4},
903         {4 * 16, 3 * 16},
904     };
905 
906     return availableThumbnailSizes;
907 }
908 
isBackFacing() const909 bool FakeRotatingCamera::isBackFacing() const {
910     return mIsBackFacing;
911 }
912 
getAvailableFocalLength() const913 Span<const float> FakeRotatingCamera::getAvailableFocalLength() const {
914     static const float availableFocalLengths[] = {
915         kDefaultFocalLength
916     };
917 
918     return availableFocalLengths;
919 }
920 
getMaxNumOutputStreams() const921 std::tuple<int32_t, int32_t, int32_t> FakeRotatingCamera::getMaxNumOutputStreams() const {
922     return {
923         0,  // raw
924         2,  // processed
925         1,  // jpeg
926     };
927 }
928 
getSupportedPixelFormats() const929 Span<const PixelFormat> FakeRotatingCamera::getSupportedPixelFormats() const {
930     static const PixelFormat supportedPixelFormats[] = {
931         PixelFormat::IMPLEMENTATION_DEFINED,
932         PixelFormat::YCBCR_420_888,
933         PixelFormat::RGBA_8888,
934         PixelFormat::BLOB,
935     };
936 
937     return {supportedPixelFormats};
938 }
939 
getMinFrameDurationNs() const940 int64_t FakeRotatingCamera::getMinFrameDurationNs() const {
941     return kMinFrameDurationNs;
942 }
943 
getSensorSize() const944 Rect<uint16_t> FakeRotatingCamera::getSensorSize() const {
945     return {1920, 1080};
946 }
947 
getSensorColorFilterArrangement() const948 uint8_t FakeRotatingCamera::getSensorColorFilterArrangement() const {
949     return ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB;
950 }
951 
getSensorExposureTimeRange() const952 std::pair<int64_t, int64_t> FakeRotatingCamera::getSensorExposureTimeRange() const {
953     return {kMinSensorExposureTimeNs, kMaxSensorExposureTimeNs};
954 }
955 
getSensorMaxFrameDuration() const956 int64_t FakeRotatingCamera::getSensorMaxFrameDuration() const {
957     return kMaxFrameDurationNs;
958 }
959 
getSupportedResolutions() const960 Span<const Rect<uint16_t>> FakeRotatingCamera::getSupportedResolutions() const {
961     static const Rect<uint16_t> supportedResolutions[] = {
962         {176, 144},
963         {320, 240},
964         {640, 480},
965         {1024, 576},
966         {1280, 720},
967         {1600, 900},
968         {1920, 1080},
969     };
970 
971     return supportedResolutions;
972 }
973 
getDefaultTargetFpsRange(const RequestTemplate tpl) const974 std::pair<int32_t, int32_t> FakeRotatingCamera::getDefaultTargetFpsRange(const RequestTemplate tpl) const {
975     switch (tpl) {
976     case RequestTemplate::PREVIEW:
977     case RequestTemplate::VIDEO_RECORD:
978     case RequestTemplate::VIDEO_SNAPSHOT:
979         return {kMaxFPS, kMaxFPS};
980 
981     default:
982         return {kMinFPS, kMaxFPS};
983     }
984 }
985 
getDefaultSensorExpTime() const986 int64_t FakeRotatingCamera::getDefaultSensorExpTime() const {
987     return kDefaultSensorExposureTimeNs;
988 }
989 
getDefaultSensorFrameDuration() const990 int64_t FakeRotatingCamera::getDefaultSensorFrameDuration() const {
991     return kMinFrameDurationNs;
992 }
993 
getDefaultFocalLength() const994 float FakeRotatingCamera::getDefaultFocalLength() const {
995     return kDefaultFocalLength;
996 }
997 
998 }  // namespace hw
999 }  // namespace implementation
1000 }  // namespace provider
1001 }  // namespace camera
1002 }  // namespace hardware
1003 }  // namespace android
1004