1 // Copyright 2020 The Chromium OS Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 // #define LOG_NDEBUG 0
6 #define LOG_TAG "MediaCodecEncoder"
7
8 #include "mediacodec_encoder.h"
9
10 #include <memory>
11 #include <string>
12 #include <utility>
13 #include <vector>
14
15 #include <log/log.h>
16 #include <media/NdkMediaFormat.h>
17
18 namespace android {
19 namespace {
20 // These values are defined at
21 // <android_root>/frameworks/base/media/java/android/media/MediaCodecInfo.java.
22 constexpr int32_t COLOR_FormatYUV420Planar = 19;
23 constexpr int32_t BITRATE_MODE_CBR = 2;
24 constexpr int32_t AVCProfileBaseline = 0x01;
25 constexpr int32_t VP8ProfileMain = 0x01;
26 constexpr int32_t VP9Profile0 = 0x01;
27
28 // The time interval between two key frames.
29 constexpr int32_t kIFrameIntervalSec = 10;
30
31 // The timeout of AMediaCodec function calls.
32 constexpr int kTimeoutUs = 1000; // 1ms.
33
34 // The tolenrance period between two input buffers are enqueued,
35 // and the period between submitting EOS input buffer to receiving the EOS
36 // output buffer.
37 constexpr int kBufferPeriodTimeoutUs = 1000000; // 1 sec
38
39 // Helper function to get possible C2 hardware encoder names from |type|.
40 // Note: A single test APK is built for both ARC++ and ARCVM, so both the C2 VEA encoder and the new
41 // V4L2 encoder names need to be specified here.
GetHWVideoEncoderNames(VideoCodecType type)42 std::vector<const char*> GetHWVideoEncoderNames(VideoCodecType type) {
43 switch (type) {
44 case VideoCodecType::H264:
45 return {"c2.v4l2.avc.encoder", "c2.vea.avc.encoder"};
46 case VideoCodecType::VP8:
47 return {"c2.v4l2.vp8.encoder"}; // Only supported on ARCVM
48 case VideoCodecType::VP9:
49 return {"c2.v4l2.vp9.encoder"}; // Only supported on ARCVM
50 default:
51 return {};
52 }
53 }
54
55 // Helper function to get possible software encoder names from |type|.
56 // Note: A single test APK is built for both ARC++ and ARCVM, so both the OMX encoder used on
57 // Android P and the c2.android encoder used on Android R need to be specified here.
GetSWVideoEncoderNames(VideoCodecType type)58 std::vector<const char*> GetSWVideoEncoderNames(VideoCodecType type) {
59 switch (type) {
60 case VideoCodecType::H264:
61 return {"c2.android.avc.encoder", "OMX.google.h264.encoder"};
62 case VideoCodecType::VP8:
63 return {"c2.android.vp8.encoder", "OMX.google.vp8.encoder"};
64 case VideoCodecType::VP9:
65 return {"c2.android.vp9.encoder", "OMX.google.vp9.encoder"};
66 default:
67 return {};
68 }
69 }
70
71 // Helper function to get the profile associated with the specified codec.
GetProfile(VideoCodecType type)72 int32_t GetProfile(VideoCodecType type) {
73 switch (type) {
74 case VideoCodecType::H264:
75 return AVCProfileBaseline;
76 case VideoCodecType::VP8:
77 return VP8ProfileMain;
78 case VideoCodecType::VP9:
79 return VP9Profile0;
80 default:
81 return AVCProfileBaseline;
82 }
83 }
84
85 } // namespace
86
87 // static
Create(std::string input_path,VideoCodecType type,Size visible_size,bool use_sw_encoder)88 std::unique_ptr<MediaCodecEncoder> MediaCodecEncoder::Create(std::string input_path,
89 VideoCodecType type, Size visible_size,
90 bool use_sw_encoder) {
91 if (visible_size.width <= 0 || visible_size.height <= 0 || visible_size.width % 2 == 1 ||
92 visible_size.height % 2 == 1) {
93 ALOGE("Size is not valid: %dx%d", visible_size.width, visible_size.height);
94 return nullptr;
95 }
96 size_t buffer_size = visible_size.width * visible_size.height * 3 / 2;
97
98 std::unique_ptr<CachedInputFileStream> input_file(new CachedInputFileStream(input_path));
99 if (!input_file->IsValid()) {
100 ALOGE("Failed to open file: %s", input_path.c_str());
101 return nullptr;
102 }
103 int file_size = input_file->GetLength();
104 if (file_size < 0 || file_size % buffer_size != 0) {
105 ALOGE("Stream byte size (%d) is not a multiple of frame byte size (%zu).", file_size,
106 buffer_size);
107 return nullptr;
108 }
109
110 AMediaCodec* codec = nullptr;
111 auto encoder_names =
112 use_sw_encoder ? GetSWVideoEncoderNames(type) : GetHWVideoEncoderNames(type);
113 for (const auto& encoder_name : encoder_names) {
114 codec = AMediaCodec_createCodecByName(encoder_name);
115 if (codec) {
116 ALOGD("Created mediacodec encoder by name: %s", encoder_name);
117 break;
118 }
119 }
120 if (!codec) {
121 ALOGE("Failed to create mediacodec encoder.");
122 return nullptr;
123 }
124
125 return std::unique_ptr<MediaCodecEncoder>(
126 new MediaCodecEncoder(codec, type, std::move(input_file), visible_size, buffer_size,
127 file_size / buffer_size));
128 }
129
MediaCodecEncoder(AMediaCodec * codec,VideoCodecType type,std::unique_ptr<CachedInputFileStream> input_file,Size size,size_t buffer_size,size_t num_total_frames)130 MediaCodecEncoder::MediaCodecEncoder(AMediaCodec* codec, VideoCodecType type,
131 std::unique_ptr<CachedInputFileStream> input_file, Size size,
132 size_t buffer_size, size_t num_total_frames)
133 : kVisibleSize(size),
134 kBufferSize(buffer_size),
135 kNumTotalFrames(num_total_frames),
136 codec_(codec),
137 type_(type),
138 num_encoded_frames_(num_total_frames),
139 input_file_(std::move(input_file)) {}
140
~MediaCodecEncoder()141 MediaCodecEncoder::~MediaCodecEncoder() {
142 if (codec_ != nullptr) {
143 AMediaCodec_delete(codec_);
144 }
145 }
146
SetEncodeInputBufferCb(const EncodeInputBufferCb & cb)147 void MediaCodecEncoder::SetEncodeInputBufferCb(const EncodeInputBufferCb& cb) {
148 encode_input_buffer_cb_ = cb;
149 }
150
SetOutputBufferReadyCb(const OutputBufferReadyCb & cb)151 void MediaCodecEncoder::SetOutputBufferReadyCb(const OutputBufferReadyCb& cb) {
152 output_buffer_ready_cb_ = cb;
153 }
154
Rewind()155 void MediaCodecEncoder::Rewind() {
156 input_frame_index_ = 0;
157 input_file_->Rewind();
158 }
159
Configure(int32_t bitrate,int32_t framerate)160 bool MediaCodecEncoder::Configure(int32_t bitrate, int32_t framerate) {
161 ALOGV("Configure encoder bitrate=%d, framerate=%d", bitrate, framerate);
162 AMediaFormat* format = AMediaFormat_new();
163 AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, GetMimeType(type_));
164 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_PROFILE, GetProfile(type_));
165 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, COLOR_FormatYUV420Planar);
166 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BITRATE_MODE, BITRATE_MODE_CBR);
167 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, kIFrameIntervalSec);
168 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, kVisibleSize.width);
169 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, kVisibleSize.height);
170 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
171 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, framerate);
172 bool ret = AMediaCodec_configure(codec_, format, nullptr /* surface */, nullptr /* crtpto */,
173 AMEDIACODEC_CONFIGURE_FLAG_ENCODE) == AMEDIA_OK;
174 AMediaFormat_delete(format);
175 if (ret) {
176 bitrate_ = bitrate;
177 framerate_ = framerate;
178 }
179 return ret;
180 }
181
Start()182 bool MediaCodecEncoder::Start() {
183 return AMediaCodec_start(codec_) == AMEDIA_OK;
184 }
185
Encode()186 bool MediaCodecEncoder::Encode() {
187 const int64_t input_period = run_at_fps_ ? (1000000 / framerate_) : 0;
188 const int64_t start_time = GetNowUs();
189
190 bool input_done = false;
191 bool output_done = false;
192 int64_t last_enqueue_input_time = start_time;
193 int64_t send_eos_time;
194 while (!output_done) {
195 // Feed input stream to the encoder.
196 ssize_t index;
197 if (!input_done && (GetNowUs() - start_time >= input_frame_index_ * input_period)) {
198 index = AMediaCodec_dequeueInputBuffer(codec_, kTimeoutUs);
199 if (index == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
200 if (GetNowUs() - last_enqueue_input_time > kBufferPeriodTimeoutUs) {
201 ALOGE("Timeout to dequeue next input buffer.");
202 return false;
203 }
204 } else if (index >= 0) {
205 ALOGV("input buffer index: %zu", index);
206 if (input_frame_index_ == num_encoded_frames_) {
207 if (!FeedEOSInputBuffer(index)) return false;
208
209 input_done = true;
210 send_eos_time = GetNowUs();
211 } else {
212 if (!FeedInputBuffer(index)) return false;
213
214 last_enqueue_input_time = GetNowUs();
215 }
216 }
217 }
218
219 // Retrieve the encoded output buffer.
220 AMediaCodecBufferInfo info;
221 index = AMediaCodec_dequeueOutputBuffer(codec_, &info, kTimeoutUs);
222 if (index == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
223 if (input_done && GetNowUs() - send_eos_time > kBufferPeriodTimeoutUs) {
224 ALOGE("Timeout to receive EOS output buffer.");
225 return false;
226 }
227 } else if (index >= 0) {
228 ALOGV("output buffer index: %zu", index);
229 if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) output_done = true;
230 if (!ReceiveOutputBuffer(index, info)) return false;
231 }
232 }
233 return true;
234 }
235
Stop()236 bool MediaCodecEncoder::Stop() {
237 return AMediaCodec_stop(codec_) == AMEDIA_OK;
238 }
239
set_num_encoded_frames(size_t num_encoded_frames)240 void MediaCodecEncoder::set_num_encoded_frames(size_t num_encoded_frames) {
241 num_encoded_frames_ = num_encoded_frames;
242 }
243
num_encoded_frames() const244 size_t MediaCodecEncoder::num_encoded_frames() const {
245 return num_encoded_frames_;
246 }
247
set_run_at_fps(bool run_at_fps)248 void MediaCodecEncoder::set_run_at_fps(bool run_at_fps) {
249 run_at_fps_ = run_at_fps;
250 }
251
FeedInputBuffer(size_t index)252 bool MediaCodecEncoder::FeedInputBuffer(size_t index) {
253 ALOGV("input buffer index: %zu", index);
254 uint64_t time_us = input_frame_index_ * 1000000 / framerate_;
255
256 size_t out_size;
257 uint8_t* buf = AMediaCodec_getInputBuffer(codec_, index, &out_size);
258 if (!buf || out_size < kBufferSize) {
259 ALOGE("Failed to getInputBuffer: index=%zu, buf=%p, out_size=%zu", index, buf, out_size);
260 return false;
261 }
262
263 if (input_file_->Read(reinterpret_cast<char*>(buf), kBufferSize) != kBufferSize) {
264 ALOGE("Failed to read buffer from file.");
265 return false;
266 }
267
268 // We circularly encode the video stream if the frame number is not enough.
269 ++input_frame_index_;
270 if (input_frame_index_ % kNumTotalFrames == 0) {
271 input_file_->Rewind();
272 }
273
274 if (encode_input_buffer_cb_) encode_input_buffer_cb_(time_us);
275
276 media_status_t status = AMediaCodec_queueInputBuffer(codec_, index, 0 /* offset */, kBufferSize,
277 time_us, 0 /* flag */);
278 if (status != AMEDIA_OK) {
279 ALOGE("Failed to queueInputBuffer: %d", static_cast<int>(status));
280 return false;
281 }
282 return true;
283 }
284
FeedEOSInputBuffer(size_t index)285 bool MediaCodecEncoder::FeedEOSInputBuffer(size_t index) {
286 ALOGV("input buffer index: %zu", index);
287 uint64_t time_us = input_frame_index_ * 1000000 / framerate_;
288
289 media_status_t status =
290 AMediaCodec_queueInputBuffer(codec_, index, 0 /* offset */, kBufferSize, time_us,
291 AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM);
292 if (status != AMEDIA_OK) {
293 ALOGE("Failed to queueInputBuffer: %d", static_cast<int>(status));
294 return false;
295 }
296 return true;
297 }
298
ReceiveOutputBuffer(size_t index,const AMediaCodecBufferInfo & info)299 bool MediaCodecEncoder::ReceiveOutputBuffer(size_t index, const AMediaCodecBufferInfo& info) {
300 size_t out_size;
301 uint8_t* buf = AMediaCodec_getOutputBuffer(codec_, index, &out_size);
302 if (!buf) {
303 ALOGE("Failed to getOutputBuffer.");
304 return false;
305 }
306
307 if (output_buffer_ready_cb_) output_buffer_ready_cb_(buf, info);
308
309 media_status_t status = AMediaCodec_releaseOutputBuffer(codec_, index, false /* render */);
310 if (status != AMEDIA_OK) {
311 ALOGE("Failed to releaseOutputBuffer: %d", static_cast<int>(status));
312 return false;
313 }
314 return true;
315 }
316
317 } // namespace android
318