1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 
19 #include "Encoder.h"
20 
21 #include <DeviceAsWebcamNative.h>
22 #include <condition_variable>
23 #include <libyuv/convert.h>
24 #include <libyuv/convert_from.h>
25 #include <libyuv/rotate.h>
26 #include <log/log.h>
27 #include <queue>
28 #include <sched.h>
29 
30 namespace android {
31 namespace webcam {
32 
Encoder(CameraConfig & config,EncoderCallback * cb)33 Encoder::Encoder(CameraConfig& config, EncoderCallback* cb)
34     : mConfig(config), mCb(cb){
35     // Inititalize intermediate buffers here.
36     mI420.y = std::make_unique<uint8_t[]>(config.width * config.height);
37 
38     // TODO:(b/267794640): Can the size be width * height / 4 as it is subsampled by height
39     //                     and width?
40     mI420.u = std::make_unique<uint8_t[]>(config.width * config.height / 2);
41     mI420.v = std::make_unique<uint8_t[]>(config.width * config.height / 2);
42 
43     mI420.yRowStride = config.width;
44     mI420.uRowStride = config.width / 2;
45     mI420.vRowStride = config.width / 2;
46 
47     if (mI420.y == nullptr || mI420.u == nullptr || mI420.v == nullptr) {
48         ALOGE("%s Failed to allocate memory for intermediate I420 buffers", __FUNCTION__);
49         return;
50     }
51 
52     mInited = true;
53 }
54 
isInited() const55 bool Encoder::isInited() const {
56     return mInited;
57 }
58 
~Encoder()59 Encoder::~Encoder() {
60     mContinueEncoding = false;
61     if (mEncoderThread.joinable()) {
62         mEncoderThread.join();
63     }
64 }
65 
encodeThreadLoop()66 void Encoder::encodeThreadLoop() {
67     using namespace std::chrono_literals;
68     ALOGV("%s Starting encode threadLoop", __FUNCTION__);
69     EncodeRequest request;
70     while (mContinueEncoding) {
71         {
72             std::unique_lock<std::mutex> l(mRequestLock);
73             while (mRequestQueue.empty()) {
74                 mRequestCondition.wait_for(l, 50ms);
75                 if (!mContinueEncoding) {
76                     return;
77                 }
78             }
79             request = mRequestQueue.front();
80             mRequestQueue.pop();
81         }
82         encode(request);
83     }
84 
85     // Thread signalled to exit.
86     ALOGV("%s Encode thread now exiting", __FUNCTION__);
87     std::unique_lock<std::mutex> l(mRequestLock);
88     // Return any pending buffers with encode failure callbacks.
89     while (!mRequestQueue.empty()) {
90         request = mRequestQueue.front();
91         mRequestQueue.pop();
92         mCb->onEncoded(request.dstBuffer, request.srcBuffer, /*success*/ false);
93     }
94 }
95 
queueRequest(EncodeRequest & request)96 void Encoder::queueRequest(EncodeRequest& request) {
97     std::unique_lock<std::mutex> l(mRequestLock);
98     mRequestQueue.emplace(request);
99     mRequestCondition.notify_one();
100 }
101 
checkError(const char * msg,j_common_ptr jpeg_error_info_)102 bool Encoder::checkError(const char* msg, j_common_ptr jpeg_error_info_) {
103     if (jpeg_error_info_) {
104         char err_buffer[JMSG_LENGTH_MAX];
105         jpeg_error_info_->err->format_message(jpeg_error_info_, err_buffer);
106         ALOGE("%s: %s: %s", __FUNCTION__, msg, err_buffer);
107         jpeg_error_info_ = nullptr;
108         return true;
109     }
110 
111     return false;
112 }
113 
i420ToJpeg(EncodeRequest & request)114 uint32_t Encoder::i420ToJpeg(EncodeRequest& request) {
115     ALOGV("%s: E cpu : %d", __FUNCTION__, sched_getcpu());
116     j_common_ptr jpegErrorInfo;
117     auto dstBuffer = request.dstBuffer;
118     struct CustomJpegDestMgr : public jpeg_destination_mgr {
119         JOCTET* buffer;
120         size_t bufferSize;
121         size_t encodedSize;
122         bool success;
123     } dmgr;
124 
125     // Set up error management
126     jpegErrorInfo = nullptr;
127     jpeg_error_mgr jErr;
128     auto jpegCompressDeleter =
129           [] (jpeg_compress_struct *c) {
130               jpeg_destroy_compress(c);
131               delete c;
132           };
133 
134     std::unique_ptr<jpeg_compress_struct, decltype(jpegCompressDeleter)> cInfo(
135             new jpeg_compress_struct(), jpegCompressDeleter);
136 
137     cInfo->err = jpeg_std_error(&jErr);
138     cInfo->err->error_exit = [](j_common_ptr cInfo) {
139         (*cInfo->err->output_message)(cInfo);
140         if (cInfo->client_data) {
141             auto& dmgr = *static_cast<CustomJpegDestMgr*>(cInfo->client_data);
142             dmgr.success = false;
143         }
144     };
145 
146     jpeg_create_compress(cInfo.get());
147     if (checkError("Error initializing compression", jpegErrorInfo)) {
148         return 0;
149     }
150 
151     dmgr.buffer = static_cast<JOCTET*>(dstBuffer->getMem());
152     dmgr.bufferSize = dstBuffer->getLength();
153     dmgr.encodedSize = 0;
154     dmgr.success = true;
155     cInfo->client_data = static_cast<void*>(&dmgr);
156     dmgr.init_destination = [](j_compress_ptr cInfo) {
157         auto& dmgr = static_cast<CustomJpegDestMgr&>(*cInfo->dest);
158         dmgr.next_output_byte = dmgr.buffer;
159         dmgr.free_in_buffer = dmgr.bufferSize;
160     };
161 
162     dmgr.empty_output_buffer = [](j_compress_ptr) {
163         ALOGE("%s:%d Out of buffer", __FUNCTION__, __LINE__);
164         return 0;
165     };
166 
167     dmgr.term_destination = [](j_compress_ptr cInfo) {
168         auto& dmgr = static_cast<CustomJpegDestMgr&>(*cInfo->dest);
169         dmgr.encodedSize = dmgr.bufferSize - dmgr.free_in_buffer;
170         ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.encodedSize);
171     };
172 
173     cInfo->dest = &dmgr;
174 
175     // Set up compression parameters
176     cInfo->image_width = mConfig.width;
177     cInfo->image_height = mConfig.height;
178     cInfo->input_components = 3;
179     cInfo->in_color_space = JCS_YCbCr;
180 
181     jpeg_set_defaults(cInfo.get());
182     if (checkError("Error configuring defaults", jpegErrorInfo)) {
183         return 0;
184     }
185 
186     jpeg_set_colorspace(cInfo.get(), JCS_YCbCr);
187     if (checkError("Error configuring color space", jpegErrorInfo)) {
188         return 0;
189     }
190 
191     cInfo->raw_data_in = 1;
192 
193     // YUV420 planar with chroma subsampling
194     // Configure sampling factors. The sampling factor is JPEG subsampling 420
195     // because the source format is YUV420. Note that libjpeg sampling factors
196     // have a somewhat interesting meaning: Sampling of Y=2,U=1,V=1 means there is 1 U and
197     // 1 V value for each 2 Y values */
198     cInfo->comp_info[0].h_samp_factor = 2; // Y horizontal sampling
199     cInfo->comp_info[0].v_samp_factor = 2; // Y vertical sampling
200     cInfo->comp_info[1].h_samp_factor = 1; // U horizontal sampling
201     cInfo->comp_info[1].v_samp_factor = 1; // U vertical sampling
202     cInfo->comp_info[2].h_samp_factor = 1; // V horizontal sampling
203     cInfo->comp_info[2].v_samp_factor = 1; // V vertical sampling
204 
205     // This vertical subsampling is the same for both Cb and Cr components as defined in
206     // cInfo->comp_info.
207     int cvSubSampling = cInfo->comp_info[0].v_samp_factor / cInfo->comp_info[1].v_samp_factor;
208 
209     // Start compression
210     jpeg_start_compress(cInfo.get(), TRUE);
211     if (checkError("Error starting compression", jpegErrorInfo)) {
212         return 0;
213     }
214 
215     // Compute our macroblock height, so we can pad our input to be vertically
216     // macroblock aligned.
217     int maxVSampFactor =
218             std::max({cInfo->comp_info[0].v_samp_factor, cInfo->comp_info[1].v_samp_factor,
219                       cInfo->comp_info[2].v_samp_factor});
220     size_t mcuV = DCTSIZE * maxVSampFactor;
221     size_t paddedHeight = mcuV * ((cInfo->image_height + mcuV - 1) / mcuV);
222 
223     std::vector<JSAMPROW> yLines(paddedHeight);
224     std::vector<JSAMPROW> cbLines(paddedHeight / cvSubSampling);
225     std::vector<JSAMPROW> crLines(paddedHeight / cvSubSampling);
226 
227     uint8_t* pY = mI420.y.get();
228     uint8_t* pCr = mI420.v.get();
229     uint8_t* pCb = mI420.u.get();
230 
231     uint32_t cbCrStride = mConfig.width / 2;
232     uint32_t yStride = mConfig.width;
233 
234     for (uint32_t i = 0; i < paddedHeight; i++) {
235         // Once we are in the padding territory we still point to the last line
236         // effectively replicating it several times ~ CLAMP_TO_EDGE
237         uint32_t li = std::min(i, cInfo->image_height - 1);
238         yLines[i] = static_cast<JSAMPROW>(pY + li * yStride);
239         if (i < paddedHeight / cvSubSampling) {
240             li = std::min(i, (cInfo->image_height - 1) / cvSubSampling);
241             crLines[i] = static_cast<JSAMPROW>(pCr + li * cbCrStride);
242             cbLines[i] = static_cast<JSAMPROW>(pCb + li * cbCrStride);
243         }
244     }
245 
246     const uint32_t batchSize = DCTSIZE * maxVSampFactor;
247     while (cInfo->next_scanline < cInfo->image_height) {
248         JSAMPARRAY planes[3]{&yLines[cInfo->next_scanline],
249                              &cbLines[cInfo->next_scanline / cvSubSampling],
250                              &crLines[cInfo->next_scanline / cvSubSampling]};
251 
252         jpeg_write_raw_data(cInfo.get(), planes, batchSize);
253         if (checkError("Error while compressing", jpegErrorInfo)) {
254             return 0;
255         }
256     }
257 
258     jpeg_finish_compress(cInfo.get());
259     if (checkError("Error while finishing compression", jpegErrorInfo)) {
260         return 0;
261     }
262 
263     ALOGV("%s: X", __FUNCTION__);
264     return dmgr.encodedSize;
265 }
266 
encodeToMJpeg(EncodeRequest & request)267 void Encoder::encodeToMJpeg(EncodeRequest& request) {
268     // First fill intermediate I420 buffers
269     // TODO(b/267794640) : Can we skip this conversion and encode to jpeg straight ?
270     if (convertToI420(request) != 0) {
271         ALOGE("%s: Encode from YUV_420 to I420 failed", __FUNCTION__);
272         mCb->onEncoded(request.dstBuffer, request.srcBuffer, /*success*/ false);
273         return;
274     }
275 
276     // Now encode the I420 to JPEG
277     uint32_t encodedSize = i420ToJpeg(request);
278     if (encodedSize == 0) {
279         ALOGE("%s: Encode from I420 to JPEG failed", __FUNCTION__);
280         mCb->onEncoded(request.dstBuffer, request.srcBuffer, /*success*/ false);
281         return;
282     }
283     request.dstBuffer->setBytesUsed(encodedSize);
284 
285     mCb->onEncoded(request.dstBuffer, request.srcBuffer, /*success*/ true);
286 }
287 
convertToI420(EncodeRequest & request)288 int Encoder::convertToI420(EncodeRequest& request) {
289     HardwareBufferDesc& src = request.srcBuffer;
290     uint8_t* dstY = mI420.y.get();
291     uint8_t* dstU = mI420.u.get();
292     uint8_t* dstV = mI420.v.get();
293     int32_t dstYRowStride = mConfig.width;
294     int32_t dstURowStride = mConfig.width / 2;
295     int32_t dstVRowStride = mConfig.width / 2;
296     libyuv::RotationMode rotationMode = request.rotationDegrees == 180 ?
297         libyuv::kRotate180 : libyuv::kRotate0;
298     if (request.srcBuffer.format == AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM) {
299         ARGBHardwareBufferDesc desc = std::get<ARGBHardwareBufferDesc>(src.bufferDesc);
300         return libyuv::ARGBToI420(desc.buf, desc.rowStride, dstY,
301                                   dstYRowStride, dstU, dstURowStride, dstV,
302                                   dstVRowStride, mConfig.width, mConfig.height);
303     }
304     YuvHardwareBufferDesc desc = std::get<YuvHardwareBufferDesc>(src.bufferDesc);
305     return libyuv::Android420ToI420Rotate(desc.yData, desc.yRowStride, desc.uData, desc.uRowStride,
306                                     desc.vData, desc.vRowStride, desc.uvPixelStride, dstY,
307                                     dstYRowStride, dstU, dstURowStride, dstV, dstVRowStride,
308                                     mConfig.width, mConfig.height, rotationMode);
309 }
310 
encodeToYUYV(EncodeRequest & r)311 void Encoder::encodeToYUYV(EncodeRequest& r) {
312     Buffer* dstBuffer = r.dstBuffer;
313     // First convert from Android YUV format to I420.
314     if (convertToI420(r) != 0) {
315         ALOGE("%s: Encode from YUV_420 to I420 failed", __FUNCTION__);
316         mCb->onEncoded(r.dstBuffer, r.srcBuffer, /*success*/ false);
317         return;
318     }
319 
320     int32_t dstYRowStride = mConfig.width;
321     int32_t dstURowStride = mConfig.width / 2;
322     int32_t dstVRowStride = mConfig.width / 2;
323     uint8_t* dstY = mI420.y.get();
324     uint8_t* dstU = mI420.u.get();
325     uint8_t* dstV = mI420.v.get();
326 
327     // Now convert from I420 to YUYV
328     if (libyuv::I420ToYUY2(dstY, dstYRowStride, dstU, dstURowStride, dstV, dstVRowStride,
329                            reinterpret_cast<uint8_t*>(dstBuffer->getMem()),
330                            /*rowStride*/ mConfig.width * 2, mConfig.width, mConfig.height) != 0) {
331         ALOGE("%s: Encode from I420 to YUYV failed", __FUNCTION__);
332         mCb->onEncoded(r.dstBuffer, r.srcBuffer, /*success*/ false);
333         return;
334     }
335     dstBuffer->setBytesUsed(mConfig.width * mConfig.height * 2);
336     // Call the callback
337     mCb->onEncoded(r.dstBuffer, r.srcBuffer, /*success*/ true);
338 }
339 
encode(EncodeRequest & encodeRequest)340 void Encoder::encode(EncodeRequest& encodeRequest) {
341     // Based on the config format
342     switch (mConfig.fcc) {
343         case V4L2_PIX_FMT_YUYV:
344             encodeToYUYV(encodeRequest);
345             break;
346         case V4L2_PIX_FMT_MJPEG:
347             encodeToMJpeg(encodeRequest);
348             break;
349         default:
350             ALOGE("%s: Fourcc %u not supported for encoding", __FUNCTION__, mConfig.fcc);
351     }
352 }
353 
startEncoderThread()354 void Encoder::startEncoderThread() {
355     // mEncoderThread can call into java as a part of EncoderCallback
356     mEncoderThread =
357             DeviceAsWebcamNative::createJniAttachedThread(&Encoder::encodeThreadLoop, this);
358     ALOGV("Started new Encoder Thread");
359 }
360 
361 }  // namespace webcam
362 }  // namespace android
363