1 /*
2 * Copyright 2012, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #include "hidl/HidlSupport.h"
19 #define LOG_TAG "MediaCodec"
20 #include <utils/Log.h>
21
22 #include <dlfcn.h>
23 #include <inttypes.h>
24 #include <future>
25 #include <random>
26 #include <set>
27 #include <string>
28
29 #include <C2Buffer.h>
30
31 #include "include/SoftwareRenderer.h"
32
33 #include <android_media_codec.h>
34
35 #include <android/api-level.h>
36 #include <android/content/pm/IPackageManagerNative.h>
37 #include <android/hardware/cas/native/1.0/IDescrambler.h>
38 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
39
40 #include <aidl/android/media/BnResourceManagerClient.h>
41 #include <aidl/android/media/IResourceManagerService.h>
42 #include <android/binder_ibinder.h>
43 #include <android/binder_manager.h>
44 #include <android/dlext.h>
45 #include <android-base/stringprintf.h>
46 #include <binder/IMemory.h>
47 #include <binder/IServiceManager.h>
48 #include <binder/MemoryDealer.h>
49 #include <cutils/properties.h>
50 #include <gui/BufferQueue.h>
51 #include <gui/Surface.h>
52 #include <hidlmemory/FrameworkUtils.h>
53 #include <mediadrm/ICrypto.h>
54 #include <media/IOMX.h>
55 #include <media/MediaCodecBuffer.h>
56 #include <media/MediaCodecInfo.h>
57 #include <media/MediaMetricsItem.h>
58 #include <media/MediaResource.h>
59 #include <media/NdkMediaErrorPriv.h>
60 #include <media/NdkMediaFormat.h>
61 #include <media/NdkMediaFormatPriv.h>
62 #include <media/formatshaper/FormatShaper.h>
63 #include <media/stagefright/foundation/ABuffer.h>
64 #include <media/stagefright/foundation/ADebug.h>
65 #include <media/stagefright/foundation/AMessage.h>
66 #include <media/stagefright/foundation/AString.h>
67 #include <media/stagefright/foundation/AUtils.h>
68 #include <media/stagefright/foundation/avc_utils.h>
69 #include <media/stagefright/foundation/hexdump.h>
70 #include <media/stagefright/ACodec.h>
71 #include <media/stagefright/BatteryChecker.h>
72 #include <media/stagefright/BufferProducerWrapper.h>
73 #include <media/stagefright/CCodec.h>
74 #include <media/stagefright/CryptoAsync.h>
75 #include <media/stagefright/MediaCodec.h>
76 #include <media/stagefright/MediaCodecConstants.h>
77 #include <media/stagefright/MediaCodecList.h>
78 #include <media/stagefright/MediaDefs.h>
79 #include <media/stagefright/MediaErrors.h>
80 #include <media/stagefright/OMXClient.h>
81 #include <media/stagefright/PersistentSurface.h>
82 #include <media/stagefright/RenderedFrameInfo.h>
83 #include <media/stagefright/SurfaceUtils.h>
84 #include <nativeloader/dlext_namespaces.h>
85 #include <private/android_filesystem_config.h>
86 #include <server_configurable_flags/get_flags.h>
87 #include <utils/Singleton.h>
88
89 namespace android {
90
91 using Status = ::ndk::ScopedAStatus;
92 using aidl::android::media::BnResourceManagerClient;
93 using aidl::android::media::IResourceManagerClient;
94 using aidl::android::media::IResourceManagerService;
95 using aidl::android::media::ClientInfoParcel;
96 using server_configurable_flags::GetServerConfigurableFlag;
97 using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
98 using JudderEvent = VideoRenderQualityTracker::JudderEvent;
99
100 // key for media statistics
101 static const char *kCodecKeyName = "codec";
102 // attrs for media statistics
103 // NB: these are matched with public Java API constants defined
104 // in frameworks/base/media/java/android/media/MediaCodec.java
105 // These must be kept synchronized with the constants there.
106 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
107 static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
108 static const char *kCodecId = "android.media.mediacodec.id";
109 static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
110 static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
111 static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
112 static const char *kCodecModeAudio = "audio";
113 static const char *kCodecModeImage = "image";
114 static const char *kCodecModeUnknown = "unknown";
115 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
116 static const char *kCodecHardware = "android.media.mediacodec.hardware"; /* 0,1 */
117 static const char *kCodecSecure = "android.media.mediacodec.secure"; /* 0, 1 */
118 static const char *kCodecTunneled = "android.media.mediacodec.tunneled"; /* 0,1 */
119 static const char *kCodecWidth = "android.media.mediacodec.width"; /* 0..n */
120 static const char *kCodecHeight = "android.media.mediacodec.height"; /* 0..n */
121 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees"; /* 0/90/180/270 */
122 static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
123 static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
124 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
125 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
126 static const char *kCodecPriority = "android.media.mediacodec.priority";
127
128 // Min/Max QP before shaping
129 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
130 static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
131 static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
132 static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
133 static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
134 static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
135
136 // Min/Max QP after shaping
137 static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
138 static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
139 static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
140 static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
141 static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
142 static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
143
144 // NB: These are not yet exposed as public Java API constants.
145 static const char *kCodecCrypto = "android.media.mediacodec.crypto"; /* 0,1 */
146 static const char *kCodecProfile = "android.media.mediacodec.profile"; /* 0..n */
147 static const char *kCodecLevel = "android.media.mediacodec.level"; /* 0..n */
148 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode"; /* CQ/VBR/CBR */
149 static const char *kCodecBitrate = "android.media.mediacodec.bitrate"; /* 0..n */
150 static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate"; /* 0..n */
151 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth"; /* 0..n */
152 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
153 static const char *kCodecError = "android.media.mediacodec.errcode";
154 static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs"; /* 0..n ms*/
155 static const char *kCodecErrorState = "android.media.mediacodec.errstate";
156 static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
157 static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
158 static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg"; /* in us */
159 static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
160 static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
161 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
162 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
163 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
164 static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
165
166 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
167 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
168 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame"; /* 0..n */
169 static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
170 static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
171 static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
172 static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
173 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
174 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
175 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
176 // HDR metrics
177 static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
178 static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
179 static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
180 static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
181 static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
182 static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
183 static const char *kCodecHdrStaticInfo = "android.media.mediacodec.hdr-static-info";
184 static const char *kCodecHdr10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
185 static const char *kCodecHdrFormat = "android.media.mediacodec.hdr-format";
186 // array/sync/async/block modes
187 static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
188 static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
189 static const char *kCodecOutputSurface = "android.media.mediacodec.output-surface";
190 // max size configured by the app
191 static const char *kCodecAppMaxInputSize = "android.media.mediacodec.app-max-input-size";
192 // max size actually used
193 static const char *kCodecUsedMaxInputSize = "android.media.mediacodec.used-max-input-size";
194 // max size suggested by the codec
195 static const char *kCodecCodecMaxInputSize = "android.media.mediacodec.codec-max-input-size";
196 static const char *kCodecFlushCount = "android.media.mediacodec.flush-count";
197 static const char *kCodecSetSurfaceCount = "android.media.mediacodec.set-surface-count";
198 static const char *kCodecResolutionChangeCount = "android.media.mediacodec.resolution-change-count";
199
200 // the kCodecRecent* fields appear only in getMetrics() results
201 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
202 static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min"; /* in us */
203 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
204 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
205 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
206
207 /* -1: shaper disabled
208 >=0: number of fields changed */
209 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
210
211 // Render metrics
212 static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
213 static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
214 static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
215 static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
216 static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
217 static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
218 static const char *kCodecFramesSkipped = "android.media.mediacodec.frames-skipped";
219 static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
220 static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
221 static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
222 // Freeze
223 static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
224 static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
225 static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
226 static const char *kCodecFreezeDurationMsAvg = "android.media.mediacodec.freeze-duration-ms-avg";
227 static const char *kCodecFreezeDurationMsMax = "android.media.mediacodec.freeze-duration-ms-max";
228 static const char *kCodecFreezeDurationMsHistogram =
229 "android.media.mediacodec.freeze-duration-ms-histogram";
230 static const char *kCodecFreezeDurationMsHistogramBuckets =
231 "android.media.mediacodec.freeze-duration-ms-histogram-buckets";
232 static const char *kCodecFreezeDistanceMsAvg = "android.media.mediacodec.freeze-distance-ms-avg";
233 static const char *kCodecFreezeDistanceMsHistogram =
234 "android.media.mediacodec.freeze-distance-ms-histogram";
235 static const char *kCodecFreezeDistanceMsHistogramBuckets =
236 "android.media.mediacodec.freeze-distance-ms-histogram-buckets";
237 // Judder
238 static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
239 static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
240 static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
241 static const char *kCodecJudderScoreAvg = "android.media.mediacodec.judder-score-avg";
242 static const char *kCodecJudderScoreMax = "android.media.mediacodec.judder-score-max";
243 static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
244 static const char *kCodecJudderScoreHistogramBuckets =
245 "android.media.mediacodec.judder-score-histogram-buckets";
246 // Freeze event
247 static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
248 static const char *kFreezeEventKeyName = "videofreeze";
249 static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
250 static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
251 static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
252 static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
253 static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
254 static const char *kFreezeEventDetailsDurationMs =
255 "android.media.mediacodec.freeze.details-duration-ms";
256 static const char *kFreezeEventDetailsDistanceMs =
257 "android.media.mediacodec.freeze.details-distance-ms";
258 // Judder event
259 static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
260 static const char *kJudderEventKeyName = "videojudder";
261 static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
262 static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
263 static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
264 static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
265 static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
266 static const char *kJudderEventDetailsActualDurationUs =
267 "android.media.mediacodec.judder.details-actual-duration-us";
268 static const char *kJudderEventDetailsContentDurationUs =
269 "android.media.mediacodec.judder.details-content-duration-us";
270 static const char *kJudderEventDetailsDistanceMs =
271 "android.media.mediacodec.judder.details-distance-ms";
272
273 // XXX suppress until we get our representation right
274 static bool kEmitHistogram = false;
275
276 typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
277
278 // Multi access unit helpers
generateFlagsFromAccessUnitInfo(sp<AMessage> & msg,const sp<BufferInfosWrapper> & bufferInfos)279 static status_t generateFlagsFromAccessUnitInfo(
280 sp<AMessage> &msg, const sp<BufferInfosWrapper> &bufferInfos) {
281 msg->setInt64("timeUs", bufferInfos->value[0].mTimestamp);
282 msg->setInt32("flags", bufferInfos->value[0].mFlags);
283 // will prevent any access-unit info copy.
284 if (bufferInfos->value.size() > 1) {
285 uint32_t bufferFlags = 0;
286 uint32_t flagsInAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODEC_CONFIG;
287 uint32_t andFlags = flagsInAllAU;
288 int infoIdx = 0;
289 bool foundEndOfStream = false;
290 for ( ; infoIdx < bufferInfos->value.size() && !foundEndOfStream; ++infoIdx) {
291 bufferFlags |= bufferInfos->value[infoIdx].mFlags;
292 andFlags &= bufferInfos->value[infoIdx].mFlags;
293 if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
294 foundEndOfStream = true;
295 }
296 }
297 bufferFlags = bufferFlags & (andFlags | (~flagsInAllAU));
298 if (infoIdx != bufferInfos->value.size()) {
299 ALOGE("Error: incorrect access-units");
300 return -EINVAL;
301 }
302 msg->setInt32("flags", bufferFlags);
303 }
304 return OK;
305 }
306
getId(IResourceManagerClient const * client)307 static int64_t getId(IResourceManagerClient const * client) {
308 return (int64_t) client;
309 }
310
getId(const std::shared_ptr<IResourceManagerClient> & client)311 static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
312 return getId(client.get());
313 }
314
isResourceError(status_t err)315 static bool isResourceError(status_t err) {
316 return (err == NO_MEMORY);
317 }
318
areRenderMetricsEnabled()319 static bool areRenderMetricsEnabled() {
320 std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
321 return v == "true";
322 }
323
324 static const int kMaxRetry = 2;
325 static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
326 static const int kNumBuffersAlign = 16;
327
328 static const C2MemoryUsage kDefaultReadWriteUsage{
329 C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
330
331 ////////////////////////////////////////////////////////////////////////////////
332
333 /*
334 * Implementation of IResourceManagerClient interrface that facilitates
335 * MediaCodec reclaim for the ResourceManagerService.
336 */
337 struct ResourceManagerClient : public BnResourceManagerClient {
ResourceManagerClientandroid::ResourceManagerClient338 explicit ResourceManagerClient(MediaCodec* codec, int32_t pid, int32_t uid) :
339 mMediaCodec(codec), mPid(pid), mUid(uid) {}
340
reclaimResourceandroid::ResourceManagerClient341 Status reclaimResource(bool* _aidl_return) override {
342 sp<MediaCodec> codec = mMediaCodec.promote();
343 if (codec == NULL) {
344 // Codec is already gone, so remove the resources as well
345 ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
346 std::shared_ptr<IResourceManagerService> service =
347 IResourceManagerService::fromBinder(binder);
348 if (service == nullptr) {
349 ALOGE("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
350 *_aidl_return = false;
351 return Status::fromStatus(STATUS_INVALID_OPERATION);
352 }
353 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
354 .uid = static_cast<int32_t>(mUid),
355 .id = getId(this)};
356 service->removeClient(clientInfo);
357 *_aidl_return = true;
358 return Status::ok();
359 }
360 status_t err = codec->reclaim();
361 if (err == WOULD_BLOCK) {
362 ALOGD("Wait for the client to release codec.");
363 usleep(kMaxReclaimWaitTimeInUs);
364 ALOGD("Try to reclaim again.");
365 err = codec->reclaim(true /* force */);
366 }
367 if (err != OK) {
368 ALOGW("ResourceManagerClient failed to release codec with err %d", err);
369 }
370 *_aidl_return = (err == OK);
371 return Status::ok();
372 }
373
getNameandroid::ResourceManagerClient374 Status getName(::std::string* _aidl_return) override {
375 _aidl_return->clear();
376 sp<MediaCodec> codec = mMediaCodec.promote();
377 if (codec == NULL) {
378 // codec is already gone.
379 return Status::ok();
380 }
381
382 AString name;
383 if (codec->getName(&name) == OK) {
384 *_aidl_return = name.c_str();
385 }
386 return Status::ok();
387 }
388
~ResourceManagerClientandroid::ResourceManagerClient389 virtual ~ResourceManagerClient() {}
390
391 private:
392 wp<MediaCodec> mMediaCodec;
393 int32_t mPid;
394 int32_t mUid;
395
396 DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
397 };
398
399 /*
400 * Proxy for ResourceManagerService that communicates with the
401 * ResourceManagerService for MediaCodec
402 */
403 struct MediaCodec::ResourceManagerServiceProxy :
404 public std::enable_shared_from_this<ResourceManagerServiceProxy> {
405
406 // BinderDiedContext defines the cookie that is passed as DeathRecipient.
407 // Since this can maintain more context than a raw pointer, we can
408 // validate the scope of ResourceManagerServiceProxy,
409 // before deferencing it upon the binder death.
410 struct BinderDiedContext {
411 std::weak_ptr<ResourceManagerServiceProxy> mRMServiceProxy;
412 };
413
414 ResourceManagerServiceProxy(pid_t pid, uid_t uid,
415 const std::shared_ptr<IResourceManagerClient> &client);
416 ~ResourceManagerServiceProxy();
417 status_t init();
418 void addResource(const MediaResourceParcel &resource);
419 void removeResource(const MediaResourceParcel &resource);
420 void removeClient();
421 void markClientForPendingRemoval();
422 bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
423 void notifyClientCreated();
424 void notifyClientStarted(ClientConfigParcel& clientConfig);
425 void notifyClientStopped(ClientConfigParcel& clientConfig);
426 void notifyClientConfigChanged(ClientConfigParcel& clientConfig);
427
setCodecNameandroid::MediaCodec::ResourceManagerServiceProxy428 inline void setCodecName(const char* name) {
429 mCodecName = name;
430 }
431
setImportanceandroid::MediaCodec::ResourceManagerServiceProxy432 inline void setImportance(int importance) {
433 mImportance = importance;
434 }
435
436 private:
437 // To get the binder interface to ResourceManagerService.
getServiceandroid::MediaCodec::ResourceManagerServiceProxy438 void getService() {
439 std::scoped_lock lock{mLock};
440 getService_l();
441 }
442
443 std::shared_ptr<IResourceManagerService> getService_l();
444
445 // To add/register all the resources currently added/registered with
446 // the ResourceManagerService.
447 // This function will be called right after the death of the Resource
448 // Manager to make sure that the newly started ResourceManagerService
449 // knows about the current resource usage.
450 void reRegisterAllResources_l();
451
deinitandroid::MediaCodec::ResourceManagerServiceProxy452 void deinit() {
453 std::scoped_lock lock{mLock};
454 // Unregistering from DeathRecipient notification.
455 if (mService != nullptr) {
456 AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), mCookie);
457 mService = nullptr;
458 }
459 }
460
461 // For binder death handling
462 static void BinderDiedCallback(void* cookie);
463 static void BinderUnlinkedCallback(void* cookie);
464
binderDiedandroid::MediaCodec::ResourceManagerServiceProxy465 void binderDied() {
466 std::scoped_lock lock{mLock};
467 ALOGE("ResourceManagerService died.");
468 mService = nullptr;
469 mBinderDied = true;
470 // start an async operation that will reconnect with the RM and
471 // re-registers all the resources.
472 mGetServiceFuture = std::async(std::launch::async, [this] { getService(); });
473 }
474
475 /**
476 * Get the ClientInfo to communicate with the ResourceManager.
477 *
478 * ClientInfo includes:
479 * - {pid, uid} of the process
480 * - identifier for the client
481 * - name of the client/codec
482 * - importance associated with the client
483 */
getClientInfoandroid::MediaCodec::ResourceManagerServiceProxy484 inline ClientInfoParcel getClientInfo() const {
485 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
486 .uid = static_cast<int32_t>(mUid),
487 .id = getId(mClient),
488 .name = mCodecName,
489 .importance = mImportance};
490 return std::move(clientInfo);
491 }
492
493 private:
494 std::mutex mLock;
495 bool mBinderDied = false;
496 pid_t mPid;
497 uid_t mUid;
498 int mImportance = 0;
499 std::string mCodecName;
500 /**
501 * Reconnecting with the ResourceManagerService, after its binder interface dies,
502 * is done asynchronously. It will also make sure that, all the resources
503 * asssociated with this Proxy (MediaCodec) is added with the new instance
504 * of the ResourceManagerService to persist the state of resources.
505 * We must store the reference of the furture to guarantee real asynchronous operation.
506 */
507 std::future<void> mGetServiceFuture;
508 // To maintain the list of all the resources currently added/registered with
509 // the ResourceManagerService.
510 std::set<MediaResourceParcel> mMediaResourceParcel;
511 std::shared_ptr<IResourceManagerClient> mClient;
512 ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
513 std::shared_ptr<IResourceManagerService> mService;
514 BinderDiedContext* mCookie;
515 };
516
ResourceManagerServiceProxy(pid_t pid,uid_t uid,const std::shared_ptr<IResourceManagerClient> & client)517 MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
518 pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client) :
519 mPid(pid), mUid(uid), mClient(client),
520 mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
521 AIBinder_DeathRecipient_new(BinderDiedCallback))),
522 mCookie(nullptr) {
523 if (mUid == MediaCodec::kNoUid) {
524 mUid = AIBinder_getCallingUid();
525 }
526 if (mPid == MediaCodec::kNoPid) {
527 mPid = AIBinder_getCallingPid();
528 }
529 // Setting callback notification when DeathRecipient gets deleted.
530 AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
531 }
532
~ResourceManagerServiceProxy()533 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
534 deinit();
535 }
536
init()537 status_t MediaCodec::ResourceManagerServiceProxy::init() {
538 std::scoped_lock lock{mLock};
539
540 int callerPid = AIBinder_getCallingPid();
541 int callerUid = AIBinder_getCallingUid();
542
543 if (mPid != callerPid || mUid != callerUid) {
544 // Media processes don't need special permissions to act on behalf of other processes.
545 if (callerUid != AID_MEDIA) {
546 char const * permission = "android.permission.MEDIA_RESOURCE_OVERRIDE_PID";
547 if (!checkCallingPermission(String16(permission))) {
548 ALOGW("%s is required to override the caller's PID for media resource management.",
549 permission);
550 return PERMISSION_DENIED;
551 }
552 }
553 }
554
555 mService = getService_l();
556 if (mService == nullptr) {
557 return DEAD_OBJECT;
558 }
559
560 // Kill clients pending removal.
561 mService->reclaimResourcesFromClientsPendingRemoval(mPid);
562 return OK;
563 }
564
getService_l()565 std::shared_ptr<IResourceManagerService> MediaCodec::ResourceManagerServiceProxy::getService_l() {
566 if (mService != nullptr) {
567 return mService;
568 }
569
570 // Get binder interface to resource manager.
571 ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
572 mService = IResourceManagerService::fromBinder(binder);
573 if (mService == nullptr) {
574 ALOGE("Failed to get ResourceManagerService");
575 return mService;
576 }
577
578 // Create the context that is passed as cookie to the binder death notification.
579 // The context gets deleted at BinderUnlinkedCallback.
580 mCookie = new BinderDiedContext{.mRMServiceProxy = weak_from_this()};
581 // Register for the callbacks by linking to death notification.
582 AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), mCookie);
583
584 // If the RM was restarted, re-register all the resources.
585 if (mBinderDied) {
586 reRegisterAllResources_l();
587 mBinderDied = false;
588 }
589 return mService;
590 }
591
reRegisterAllResources_l()592 void MediaCodec::ResourceManagerServiceProxy::reRegisterAllResources_l() {
593 if (mMediaResourceParcel.empty()) {
594 ALOGV("No resources to add");
595 return;
596 }
597
598 if (mService == nullptr) {
599 ALOGW("Service isn't available");
600 return;
601 }
602
603 std::vector<MediaResourceParcel> resources;
604 std::copy(mMediaResourceParcel.begin(), mMediaResourceParcel.end(),
605 std::back_inserter(resources));
606 mService->addResource(getClientInfo(), mClient, resources);
607 }
608
BinderDiedCallback(void * cookie)609 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
610 BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
611
612 // Validate the context and check if the ResourceManagerServiceProxy object is still in scope.
613 if (context != nullptr) {
614 std::shared_ptr<ResourceManagerServiceProxy> thiz = context->mRMServiceProxy.lock();
615 if (thiz != nullptr) {
616 thiz->binderDied();
617 } else {
618 ALOGI("ResourceManagerServiceProxy is out of scope already");
619 }
620 }
621 }
622
BinderUnlinkedCallback(void * cookie)623 void MediaCodec::ResourceManagerServiceProxy::BinderUnlinkedCallback(void* cookie) {
624 BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
625 // Since we don't need the context anymore, we are deleting it now.
626 delete context;
627 }
628
addResource(const MediaResourceParcel & resource)629 void MediaCodec::ResourceManagerServiceProxy::addResource(
630 const MediaResourceParcel &resource) {
631 std::scoped_lock lock{mLock};
632 std::shared_ptr<IResourceManagerService> service = getService_l();
633 if (service == nullptr) {
634 ALOGW("Service isn't available");
635 return;
636 }
637 std::vector<MediaResourceParcel> resources;
638 resources.push_back(resource);
639 service->addResource(getClientInfo(), mClient, resources);
640 mMediaResourceParcel.emplace(resource);
641 }
642
removeResource(const MediaResourceParcel & resource)643 void MediaCodec::ResourceManagerServiceProxy::removeResource(
644 const MediaResourceParcel &resource) {
645 std::scoped_lock lock{mLock};
646 std::shared_ptr<IResourceManagerService> service = getService_l();
647 if (service == nullptr) {
648 ALOGW("Service isn't available");
649 return;
650 }
651 std::vector<MediaResourceParcel> resources;
652 resources.push_back(resource);
653 service->removeResource(getClientInfo(), resources);
654 mMediaResourceParcel.erase(resource);
655 }
656
removeClient()657 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
658 std::scoped_lock lock{mLock};
659 std::shared_ptr<IResourceManagerService> service = getService_l();
660 if (service == nullptr) {
661 ALOGW("Service isn't available");
662 return;
663 }
664 service->removeClient(getClientInfo());
665 mMediaResourceParcel.clear();
666 }
667
markClientForPendingRemoval()668 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
669 std::scoped_lock lock{mLock};
670 std::shared_ptr<IResourceManagerService> service = getService_l();
671 if (service == nullptr) {
672 ALOGW("Service isn't available");
673 return;
674 }
675 service->markClientForPendingRemoval(getClientInfo());
676 mMediaResourceParcel.clear();
677 }
678
reclaimResource(const std::vector<MediaResourceParcel> & resources)679 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
680 const std::vector<MediaResourceParcel> &resources) {
681 std::scoped_lock lock{mLock};
682 std::shared_ptr<IResourceManagerService> service = getService_l();
683 if (service == nullptr) {
684 ALOGW("Service isn't available");
685 return false;
686 }
687 bool success;
688 Status status = service->reclaimResource(getClientInfo(), resources, &success);
689 return status.isOk() && success;
690 }
691
notifyClientCreated()692 void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
693 std::scoped_lock lock{mLock};
694 std::shared_ptr<IResourceManagerService> service = getService_l();
695 if (service == nullptr) {
696 ALOGW("Service isn't available");
697 return;
698 }
699 service->notifyClientCreated(getClientInfo());
700 }
701
notifyClientStarted(ClientConfigParcel & clientConfig)702 void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
703 ClientConfigParcel& clientConfig) {
704 std::scoped_lock lock{mLock};
705 std::shared_ptr<IResourceManagerService> service = getService_l();
706 if (service == nullptr) {
707 ALOGW("Service isn't available");
708 return;
709 }
710 clientConfig.clientInfo = getClientInfo();
711 service->notifyClientStarted(clientConfig);
712 }
713
notifyClientStopped(ClientConfigParcel & clientConfig)714 void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
715 ClientConfigParcel& clientConfig) {
716 std::scoped_lock lock{mLock};
717 std::shared_ptr<IResourceManagerService> service = getService_l();
718 if (service == nullptr) {
719 ALOGW("Service isn't available");
720 return;
721 }
722 clientConfig.clientInfo = getClientInfo();
723 service->notifyClientStopped(clientConfig);
724 }
725
notifyClientConfigChanged(ClientConfigParcel & clientConfig)726 void MediaCodec::ResourceManagerServiceProxy::notifyClientConfigChanged(
727 ClientConfigParcel& clientConfig) {
728 std::scoped_lock lock{mLock};
729 std::shared_ptr<IResourceManagerService> service = getService_l();
730 if (service == nullptr) {
731 ALOGW("Service isn't available");
732 return;
733 }
734 clientConfig.clientInfo = getClientInfo();
735 service->notifyClientConfigChanged(clientConfig);
736 }
737
738 ////////////////////////////////////////////////////////////////////////////////
739
BufferInfo()740 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
741
742 ////////////////////////////////////////////////////////////////////////////////
743
744 class MediaCodec::ReleaseSurface {
745 public:
ReleaseSurface(uint64_t usage)746 explicit ReleaseSurface(uint64_t usage) {
747 BufferQueue::createBufferQueue(&mProducer, &mConsumer);
748 mSurface = new Surface(mProducer, false /* controlledByApp */);
749 struct ConsumerListener : public BnConsumerListener {
750 ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
751 mConsumer = consumer;
752 }
753 void onFrameAvailable(const BufferItem&) override {
754 BufferItem buffer;
755 // consume buffer
756 sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
757 if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
758 consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
759 EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
760 }
761 }
762
763 wp<IGraphicBufferConsumer> mConsumer;
764 void onBuffersReleased() override {}
765 void onSidebandStreamChanged() override {}
766 };
767 sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
768 mConsumer->consumerConnect(listener, false);
769 mConsumer->setConsumerName(String8{"MediaCodec.release"});
770 mConsumer->setConsumerUsageBits(usage);
771 }
772
getSurface()773 const sp<Surface> &getSurface() {
774 return mSurface;
775 }
776
777 private:
778 sp<IGraphicBufferProducer> mProducer;
779 sp<IGraphicBufferConsumer> mConsumer;
780 sp<Surface> mSurface;
781 };
782
783 ////////////////////////////////////////////////////////////////////////////////
784
785 namespace {
786
787 enum {
788 kWhatFillThisBuffer = 'fill',
789 kWhatDrainThisBuffer = 'drai',
790 kWhatEOS = 'eos ',
791 kWhatStartCompleted = 'Scom',
792 kWhatStopCompleted = 'scom',
793 kWhatReleaseCompleted = 'rcom',
794 kWhatFlushCompleted = 'fcom',
795 kWhatError = 'erro',
796 kWhatCryptoError = 'ercp',
797 kWhatComponentAllocated = 'cAll',
798 kWhatComponentConfigured = 'cCon',
799 kWhatInputSurfaceCreated = 'isfc',
800 kWhatInputSurfaceAccepted = 'isfa',
801 kWhatSignaledInputEOS = 'seos',
802 kWhatOutputFramesRendered = 'outR',
803 kWhatOutputBuffersChanged = 'outC',
804 kWhatFirstTunnelFrameReady = 'ftfR',
805 kWhatPollForRenderedBuffers = 'plrb',
806 kWhatMetricsUpdated = 'mtru',
807 };
808
809 class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
810 public:
811
CryptoAsyncCallback(const sp<AMessage> & notify)812 explicit CryptoAsyncCallback(const sp<AMessage> & notify):mNotify(notify) {
813 }
814
~CryptoAsyncCallback()815 ~CryptoAsyncCallback() {}
816
onDecryptComplete(const sp<AMessage> & result)817 void onDecryptComplete(const sp<AMessage> &result) override {
818 (void)result;
819 }
820
onDecryptError(const std::list<sp<AMessage>> & errorMsgs)821 void onDecryptError(const std::list<sp<AMessage>> &errorMsgs) override {
822 // This error may be decrypt/queue error.
823 status_t errorCode ;
824 for (auto &emsg : errorMsgs) {
825 sp<AMessage> notify(mNotify->dup());
826 if(emsg->findInt32("err", &errorCode)) {
827 if (isCryptoError(errorCode)) {
828 notify->setInt32("what", kWhatCryptoError);
829 } else {
830 notify->setInt32("what", kWhatError);
831 }
832 notify->extend(emsg);
833 notify->post();
834 } else {
835 ALOGW("Buffers with no errorCode are not expected");
836 }
837 }
838 }
839 private:
840 const sp<AMessage> mNotify;
841 };
842
843 class OnBufferReleasedListener : public ::android::BnProducerListener{
844 private:
845 uint32_t mGeneration;
846 std::weak_ptr<BufferChannelBase> mBufferChannel;
847
notifyBufferReleased()848 void notifyBufferReleased() {
849 auto p = mBufferChannel.lock();
850 if (p) {
851 p->onBufferReleasedFromOutputSurface(mGeneration);
852 }
853 }
854
855 public:
OnBufferReleasedListener(uint32_t generation,const std::shared_ptr<BufferChannelBase> & bufferChannel)856 explicit OnBufferReleasedListener(
857 uint32_t generation,
858 const std::shared_ptr<BufferChannelBase> &bufferChannel)
859 : mGeneration(generation), mBufferChannel(bufferChannel) {}
860
861 virtual ~OnBufferReleasedListener() = default;
862
onBufferReleased()863 void onBufferReleased() override {
864 notifyBufferReleased();
865 }
866
onBufferDetached(int slot)867 void onBufferDetached([[maybe_unused]] int slot) override {
868 notifyBufferReleased();
869 }
870
needsReleaseNotify()871 bool needsReleaseNotify() override { return true; }
872 };
873
874 class BufferCallback : public CodecBase::BufferCallback {
875 public:
876 explicit BufferCallback(const sp<AMessage> ¬ify);
877 virtual ~BufferCallback() = default;
878
879 virtual void onInputBufferAvailable(
880 size_t index, const sp<MediaCodecBuffer> &buffer) override;
881 virtual void onOutputBufferAvailable(
882 size_t index, const sp<MediaCodecBuffer> &buffer) override;
883 private:
884 const sp<AMessage> mNotify;
885 };
886
BufferCallback(const sp<AMessage> & notify)887 BufferCallback::BufferCallback(const sp<AMessage> ¬ify)
888 : mNotify(notify) {}
889
onInputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)890 void BufferCallback::onInputBufferAvailable(
891 size_t index, const sp<MediaCodecBuffer> &buffer) {
892 sp<AMessage> notify(mNotify->dup());
893 notify->setInt32("what", kWhatFillThisBuffer);
894 notify->setSize("index", index);
895 notify->setObject("buffer", buffer);
896 notify->post();
897 }
898
onOutputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)899 void BufferCallback::onOutputBufferAvailable(
900 size_t index, const sp<MediaCodecBuffer> &buffer) {
901 sp<AMessage> notify(mNotify->dup());
902 notify->setInt32("what", kWhatDrainThisBuffer);
903 notify->setSize("index", index);
904 notify->setObject("buffer", buffer);
905 notify->post();
906 }
907
908 class CodecCallback : public CodecBase::CodecCallback {
909 public:
910 explicit CodecCallback(const sp<AMessage> ¬ify);
911 virtual ~CodecCallback() = default;
912
913 virtual void onEos(status_t err) override;
914 virtual void onStartCompleted() override;
915 virtual void onStopCompleted() override;
916 virtual void onReleaseCompleted() override;
917 virtual void onFlushCompleted() override;
918 virtual void onError(status_t err, enum ActionCode actionCode) override;
919 virtual void onComponentAllocated(const char *componentName) override;
920 virtual void onComponentConfigured(
921 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
922 virtual void onInputSurfaceCreated(
923 const sp<AMessage> &inputFormat,
924 const sp<AMessage> &outputFormat,
925 const sp<BufferProducerWrapper> &inputSurface) override;
926 virtual void onInputSurfaceCreationFailed(status_t err) override;
927 virtual void onInputSurfaceAccepted(
928 const sp<AMessage> &inputFormat,
929 const sp<AMessage> &outputFormat) override;
930 virtual void onInputSurfaceDeclined(status_t err) override;
931 virtual void onSignaledInputEOS(status_t err) override;
932 virtual void onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) override;
933 virtual void onOutputBuffersChanged() override;
934 virtual void onFirstTunnelFrameReady() override;
935 virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) override;
936 private:
937 const sp<AMessage> mNotify;
938 };
939
CodecCallback(const sp<AMessage> & notify)940 CodecCallback::CodecCallback(const sp<AMessage> ¬ify) : mNotify(notify) {}
941
onEos(status_t err)942 void CodecCallback::onEos(status_t err) {
943 sp<AMessage> notify(mNotify->dup());
944 notify->setInt32("what", kWhatEOS);
945 notify->setInt32("err", err);
946 notify->post();
947 }
948
onStartCompleted()949 void CodecCallback::onStartCompleted() {
950 sp<AMessage> notify(mNotify->dup());
951 notify->setInt32("what", kWhatStartCompleted);
952 notify->post();
953 }
954
onStopCompleted()955 void CodecCallback::onStopCompleted() {
956 sp<AMessage> notify(mNotify->dup());
957 notify->setInt32("what", kWhatStopCompleted);
958 notify->post();
959 }
960
onReleaseCompleted()961 void CodecCallback::onReleaseCompleted() {
962 sp<AMessage> notify(mNotify->dup());
963 notify->setInt32("what", kWhatReleaseCompleted);
964 notify->post();
965 }
966
onFlushCompleted()967 void CodecCallback::onFlushCompleted() {
968 sp<AMessage> notify(mNotify->dup());
969 notify->setInt32("what", kWhatFlushCompleted);
970 notify->post();
971 }
972
onError(status_t err,enum ActionCode actionCode)973 void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
974 sp<AMessage> notify(mNotify->dup());
975 notify->setInt32("what", kWhatError);
976 notify->setInt32("err", err);
977 notify->setInt32("actionCode", actionCode);
978 notify->post();
979 }
980
onComponentAllocated(const char * componentName)981 void CodecCallback::onComponentAllocated(const char *componentName) {
982 sp<AMessage> notify(mNotify->dup());
983 notify->setInt32("what", kWhatComponentAllocated);
984 notify->setString("componentName", componentName);
985 notify->post();
986 }
987
onComponentConfigured(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)988 void CodecCallback::onComponentConfigured(
989 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
990 sp<AMessage> notify(mNotify->dup());
991 notify->setInt32("what", kWhatComponentConfigured);
992 notify->setMessage("input-format", inputFormat);
993 notify->setMessage("output-format", outputFormat);
994 notify->post();
995 }
996
onInputSurfaceCreated(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat,const sp<BufferProducerWrapper> & inputSurface)997 void CodecCallback::onInputSurfaceCreated(
998 const sp<AMessage> &inputFormat,
999 const sp<AMessage> &outputFormat,
1000 const sp<BufferProducerWrapper> &inputSurface) {
1001 sp<AMessage> notify(mNotify->dup());
1002 notify->setInt32("what", kWhatInputSurfaceCreated);
1003 notify->setMessage("input-format", inputFormat);
1004 notify->setMessage("output-format", outputFormat);
1005 notify->setObject("input-surface", inputSurface);
1006 notify->post();
1007 }
1008
onInputSurfaceCreationFailed(status_t err)1009 void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
1010 sp<AMessage> notify(mNotify->dup());
1011 notify->setInt32("what", kWhatInputSurfaceCreated);
1012 notify->setInt32("err", err);
1013 notify->post();
1014 }
1015
onInputSurfaceAccepted(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)1016 void CodecCallback::onInputSurfaceAccepted(
1017 const sp<AMessage> &inputFormat,
1018 const sp<AMessage> &outputFormat) {
1019 sp<AMessage> notify(mNotify->dup());
1020 notify->setInt32("what", kWhatInputSurfaceAccepted);
1021 notify->setMessage("input-format", inputFormat);
1022 notify->setMessage("output-format", outputFormat);
1023 notify->post();
1024 }
1025
onInputSurfaceDeclined(status_t err)1026 void CodecCallback::onInputSurfaceDeclined(status_t err) {
1027 sp<AMessage> notify(mNotify->dup());
1028 notify->setInt32("what", kWhatInputSurfaceAccepted);
1029 notify->setInt32("err", err);
1030 notify->post();
1031 }
1032
onSignaledInputEOS(status_t err)1033 void CodecCallback::onSignaledInputEOS(status_t err) {
1034 sp<AMessage> notify(mNotify->dup());
1035 notify->setInt32("what", kWhatSignaledInputEOS);
1036 if (err != OK) {
1037 notify->setInt32("err", err);
1038 }
1039 notify->post();
1040 }
1041
onOutputFramesRendered(const std::list<RenderedFrameInfo> & done)1042 void CodecCallback::onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) {
1043 sp<AMessage> notify(mNotify->dup());
1044 notify->setInt32("what", kWhatOutputFramesRendered);
1045 if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
1046 notify->post();
1047 }
1048 }
1049
onOutputBuffersChanged()1050 void CodecCallback::onOutputBuffersChanged() {
1051 sp<AMessage> notify(mNotify->dup());
1052 notify->setInt32("what", kWhatOutputBuffersChanged);
1053 notify->post();
1054 }
1055
onFirstTunnelFrameReady()1056 void CodecCallback::onFirstTunnelFrameReady() {
1057 sp<AMessage> notify(mNotify->dup());
1058 notify->setInt32("what", kWhatFirstTunnelFrameReady);
1059 notify->post();
1060 }
1061
onMetricsUpdated(const sp<AMessage> & updatedMetrics)1062 void CodecCallback::onMetricsUpdated(const sp<AMessage> &updatedMetrics) {
1063 sp<AMessage> notify(mNotify->dup());
1064 notify->setInt32("what", kWhatMetricsUpdated);
1065 notify->setMessage("updated-metrics", updatedMetrics);
1066 notify->post();
1067 }
1068
toMediaResourceSubType(bool isHardware,MediaCodec::Domain domain)1069 static MediaResourceSubType toMediaResourceSubType(bool isHardware, MediaCodec::Domain domain) {
1070 switch (domain) {
1071 case MediaCodec::DOMAIN_VIDEO:
1072 return isHardware? MediaResourceSubType::kHwVideoCodec :
1073 MediaResourceSubType::kSwVideoCodec;
1074 case MediaCodec::DOMAIN_AUDIO:
1075 return isHardware? MediaResourceSubType::kHwAudioCodec :
1076 MediaResourceSubType::kSwAudioCodec;
1077 case MediaCodec::DOMAIN_IMAGE:
1078 return isHardware? MediaResourceSubType::kHwImageCodec :
1079 MediaResourceSubType::kSwImageCodec;
1080 default:
1081 return MediaResourceSubType::kUnspecifiedSubType;
1082 }
1083 }
1084
toCodecMode(MediaCodec::Domain domain)1085 static const char * toCodecMode(MediaCodec::Domain domain) {
1086 switch (domain) {
1087 case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
1088 case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
1089 case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
1090 default: return kCodecModeUnknown;
1091 }
1092 }
1093
1094 } // namespace
1095
1096 ////////////////////////////////////////////////////////////////////////////////
1097
1098 // static
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid)1099 sp<MediaCodec> MediaCodec::CreateByType(
1100 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
1101 uid_t uid) {
1102 sp<AMessage> format;
1103 return CreateByType(looper, mime, encoder, err, pid, uid, format);
1104 }
1105
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid,sp<AMessage> format)1106 sp<MediaCodec> MediaCodec::CreateByType(
1107 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
1108 uid_t uid, sp<AMessage> format) {
1109 Vector<AString> matchingCodecs;
1110
1111 MediaCodecList::findMatchingCodecs(
1112 mime.c_str(),
1113 encoder,
1114 0,
1115 format,
1116 &matchingCodecs);
1117
1118 if (err != NULL) {
1119 *err = NAME_NOT_FOUND;
1120 }
1121 for (size_t i = 0; i < matchingCodecs.size(); ++i) {
1122 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
1123 AString componentName = matchingCodecs[i];
1124 status_t ret = codec->init(componentName);
1125 if (err != NULL) {
1126 *err = ret;
1127 }
1128 if (ret == OK) {
1129 return codec;
1130 }
1131 ALOGD("Allocating component '%s' failed (%d), try next one.",
1132 componentName.c_str(), ret);
1133 }
1134 return NULL;
1135 }
1136
1137 // static
CreateByComponentName(const sp<ALooper> & looper,const AString & name,status_t * err,pid_t pid,uid_t uid)1138 sp<MediaCodec> MediaCodec::CreateByComponentName(
1139 const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
1140 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
1141
1142 const status_t ret = codec->init(name);
1143 if (err != NULL) {
1144 *err = ret;
1145 }
1146 return ret == OK ? codec : NULL; // NULL deallocates codec.
1147 }
1148
1149 // static
CreatePersistentInputSurface()1150 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
1151 sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
1152 if (pluginSurface != nullptr) {
1153 return pluginSurface;
1154 }
1155
1156 OMXClient client;
1157 if (client.connect() != OK) {
1158 ALOGE("Failed to connect to OMX to create persistent input surface.");
1159 return NULL;
1160 }
1161
1162 sp<IOMX> omx = client.interface();
1163
1164 sp<IGraphicBufferProducer> bufferProducer;
1165 sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
1166
1167 status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
1168
1169 if (err != OK) {
1170 ALOGE("Failed to create persistent input surface.");
1171 return NULL;
1172 }
1173
1174 return new PersistentSurface(bufferProducer, bufferSource);
1175 }
1176
1177 // GenerateCodecId generates a 64bit Random ID for each codec that is created.
1178 // The Codec ID is generated as:
1179 // - A process-unique random high 32bits
1180 // - An atomic sequence low 32bits
1181 //
GenerateCodecId()1182 static uint64_t GenerateCodecId() {
1183 static std::atomic_uint64_t sId = [] {
1184 std::random_device rd;
1185 std::mt19937 gen(rd());
1186 std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
1187 uint32_t randomID = distrib(gen);
1188 uint64_t id = randomID;
1189 return id << 32;
1190 }();
1191 return sId++;
1192 }
1193
MediaCodec(const sp<ALooper> & looper,pid_t pid,uid_t uid,std::function<sp<CodecBase> (const AString &,const char *)> getCodecBase,std::function<status_t (const AString &,sp<MediaCodecInfo> *)> getCodecInfo)1194 MediaCodec::MediaCodec(
1195 const sp<ALooper> &looper, pid_t pid, uid_t uid,
1196 std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
1197 std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
1198 : mState(UNINITIALIZED),
1199 mReleasedByResourceManager(false),
1200 mLooper(looper),
1201 mCodec(NULL),
1202 mReplyID(0),
1203 mFlags(0),
1204 mStickyError(OK),
1205 mSoftRenderer(NULL),
1206 mDomain(DOMAIN_UNKNOWN),
1207 mWidth(0),
1208 mHeight(0),
1209 mRotationDegrees(0),
1210 mDequeueInputTimeoutGeneration(0),
1211 mDequeueInputReplyID(0),
1212 mDequeueOutputTimeoutGeneration(0),
1213 mDequeueOutputReplyID(0),
1214 mTunneledInputWidth(0),
1215 mTunneledInputHeight(0),
1216 mTunneled(false),
1217 mTunnelPeekState(TunnelPeekState::kLegacyMode),
1218 mTunnelPeekEnabled(false),
1219 mHaveInputSurface(false),
1220 mHavePendingInputBuffers(false),
1221 mCpuBoostRequested(false),
1222 mIsSurfaceToDisplay(false),
1223 mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
1224 mVideoRenderQualityTracker(
1225 VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
1226 GetServerConfigurableFlag)),
1227 mLatencyUnknown(0),
1228 mBytesEncoded(0),
1229 mEarliestEncodedPtsUs(INT64_MAX),
1230 mLatestEncodedPtsUs(INT64_MIN),
1231 mFramesEncoded(0),
1232 mNumLowLatencyEnables(0),
1233 mNumLowLatencyDisables(0),
1234 mIsLowLatencyModeOn(false),
1235 mIndexOfFirstFrameWhenLowLatencyOn(-1),
1236 mInputBufferCounter(0),
1237 mGetCodecBase(getCodecBase),
1238 mGetCodecInfo(getCodecInfo) {
1239 mCodecId = GenerateCodecId();
1240 mResourceManagerProxy = std::make_shared<ResourceManagerServiceProxy>(pid, uid,
1241 ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
1242 if (!mGetCodecBase) {
1243 mGetCodecBase = [](const AString &name, const char *owner) {
1244 return GetCodecBase(name, owner);
1245 };
1246 }
1247 if (!mGetCodecInfo) {
1248 mGetCodecInfo = [&log = mErrorLog](const AString &name,
1249 sp<MediaCodecInfo> *info) -> status_t {
1250 *info = nullptr;
1251 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
1252 if (!mcl) {
1253 log.log(LOG_TAG, "Fatal error: failed to initialize MediaCodecList");
1254 return NO_INIT; // if called from Java should raise IOException
1255 }
1256 AString tmp = name;
1257 if (tmp.endsWith(".secure")) {
1258 tmp.erase(tmp.size() - 7, 7);
1259 }
1260 for (const AString &codecName : { name, tmp }) {
1261 ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
1262 if (codecIdx < 0) {
1263 continue;
1264 }
1265 *info = mcl->getCodecInfo(codecIdx);
1266 return OK;
1267 }
1268 log.log(LOG_TAG, base::StringPrintf("Codec with name '%s' is not found on the device.",
1269 name.c_str()));
1270 return NAME_NOT_FOUND;
1271 };
1272 }
1273
1274 // we want an empty metrics record for any early getMetrics() call
1275 // this should be the *only* initMediametrics() call that's not on the Looper thread
1276 initMediametrics();
1277 }
1278
~MediaCodec()1279 MediaCodec::~MediaCodec() {
1280 CHECK_EQ(mState, UNINITIALIZED);
1281 mResourceManagerProxy->removeClient();
1282
1283 flushMediametrics();
1284
1285 // clean any saved metrics info we stored as part of configure()
1286 if (mConfigureMsg != nullptr) {
1287 mediametrics_handle_t metricsHandle;
1288 if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
1289 mediametrics_delete(metricsHandle);
1290 }
1291 }
1292 }
1293
1294 // except for in constructor, called from the looper thread (and therefore mutexed)
initMediametrics()1295 void MediaCodec::initMediametrics() {
1296 if (mMetricsHandle == 0) {
1297 mMetricsHandle = mediametrics_create(kCodecKeyName);
1298 }
1299
1300 mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1301
1302 {
1303 Mutex::Autolock al(mRecentLock);
1304 for (int i = 0; i<kRecentLatencyFrames; i++) {
1305 mRecentSamples[i] = kRecentSampleInvalid;
1306 }
1307 mRecentHead = 0;
1308 }
1309
1310 {
1311 Mutex::Autolock al(mLatencyLock);
1312 mBuffersInFlight.clear();
1313 mNumLowLatencyEnables = 0;
1314 mNumLowLatencyDisables = 0;
1315 mIsLowLatencyModeOn = false;
1316 mIndexOfFirstFrameWhenLowLatencyOn = -1;
1317 mInputBufferCounter = 0;
1318 }
1319
1320 mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
1321 resetMetricsFields();
1322 }
1323
resetMetricsFields()1324 void MediaCodec::resetMetricsFields() {
1325 mHdrInfoFlags = 0;
1326
1327 mApiUsageMetrics = ApiUsageMetrics();
1328 mReliabilityContextMetrics = ReliabilityContextMetrics();
1329 }
1330
updateMediametrics()1331 void MediaCodec::updateMediametrics() {
1332 if (mMetricsHandle == 0) {
1333 ALOGV("no metrics handle found");
1334 return;
1335 }
1336
1337 Mutex::Autolock _lock(mMetricsLock);
1338
1339 mediametrics_setInt32(mMetricsHandle, kCodecArrayMode, mApiUsageMetrics.isArrayMode ? 1 : 0);
1340 mApiUsageMetrics.operationMode = (mFlags & kFlagIsAsync) ?
1341 ((mFlags & kFlagUseBlockModel) ? ApiUsageMetrics::kBlockMode
1342 : ApiUsageMetrics::kAsynchronousMode)
1343 : ApiUsageMetrics::kSynchronousMode;
1344 mediametrics_setInt32(mMetricsHandle, kCodecOperationMode, mApiUsageMetrics.operationMode);
1345 mediametrics_setInt32(mMetricsHandle, kCodecOutputSurface,
1346 mApiUsageMetrics.isUsingOutputSurface ? 1 : 0);
1347
1348 mediametrics_setInt32(mMetricsHandle, kCodecAppMaxInputSize,
1349 mApiUsageMetrics.inputBufferSize.appMax);
1350 mediametrics_setInt32(mMetricsHandle, kCodecUsedMaxInputSize,
1351 mApiUsageMetrics.inputBufferSize.usedMax);
1352 mediametrics_setInt32(mMetricsHandle, kCodecCodecMaxInputSize,
1353 mApiUsageMetrics.inputBufferSize.codecMax);
1354
1355 mediametrics_setInt32(mMetricsHandle, kCodecFlushCount, mReliabilityContextMetrics.flushCount);
1356 mediametrics_setInt32(mMetricsHandle, kCodecSetSurfaceCount,
1357 mReliabilityContextMetrics.setOutputSurfaceCount);
1358 mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
1359 mReliabilityContextMetrics.resolutionChangeCount);
1360
1361 // Video rendering quality metrics
1362 {
1363 const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
1364 if (m.frameReleasedCount > 0) {
1365 mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
1366 mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
1367 mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
1368 mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
1369 mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
1370 mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
1371 mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
1372 mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
1373 mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
1374 }
1375 if (m.freezeDurationMsHistogram.getCount() >= 1) {
1376 const MediaHistogram<int32_t> &h = m.freezeDurationMsHistogram;
1377 mediametrics_setInt64(mMetricsHandle, kCodecFreezeScore, m.freezeScore);
1378 mediametrics_setDouble(mMetricsHandle, kCodecFreezeRate, m.freezeRate);
1379 mediametrics_setInt64(mMetricsHandle, kCodecFreezeCount, h.getCount());
1380 mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsAvg, h.getAvg());
1381 mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsMax, h.getMax());
1382 mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogram, h.emit());
1383 mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogramBuckets,
1384 h.emitBuckets());
1385 }
1386 if (m.freezeDistanceMsHistogram.getCount() >= 1) {
1387 const MediaHistogram<int32_t> &h = m.freezeDistanceMsHistogram;
1388 mediametrics_setInt32(mMetricsHandle, kCodecFreezeDistanceMsAvg, h.getAvg());
1389 mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogram, h.emit());
1390 mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogramBuckets,
1391 h.emitBuckets());
1392 }
1393 if (m.judderScoreHistogram.getCount() >= 1) {
1394 const MediaHistogram<int32_t> &h = m.judderScoreHistogram;
1395 mediametrics_setInt64(mMetricsHandle, kCodecJudderScore, m.judderScore);
1396 mediametrics_setDouble(mMetricsHandle, kCodecJudderRate, m.judderRate);
1397 mediametrics_setInt64(mMetricsHandle, kCodecJudderCount, h.getCount());
1398 mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreAvg, h.getAvg());
1399 mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreMax, h.getMax());
1400 mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogram, h.emit());
1401 mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
1402 h.emitBuckets());
1403 }
1404 if (m.freezeEventCount != 0) {
1405 mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
1406 }
1407 if (m.judderEventCount != 0) {
1408 mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
1409 }
1410 }
1411
1412 if (mLatencyHist.getCount() != 0 ) {
1413 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
1414 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
1415 mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
1416 mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
1417
1418 if (kEmitHistogram) {
1419 // and the histogram itself
1420 std::string hist = mLatencyHist.emit();
1421 mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
1422 }
1423 }
1424 if (mLatencyUnknown > 0) {
1425 mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
1426 }
1427 int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
1428 if (playbackDurationSec > 0) {
1429 mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
1430 }
1431 if (mLifetimeStartNs > 0) {
1432 nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
1433 lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
1434 mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
1435 }
1436
1437 if (mBytesEncoded) {
1438 Mutex::Autolock al(mOutputStatsLock);
1439
1440 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
1441 int64_t duration = 0;
1442 if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
1443 duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
1444 }
1445 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
1446 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
1447 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
1448 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
1449 }
1450
1451 {
1452 Mutex::Autolock al(mLatencyLock);
1453 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
1454 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
1455 mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
1456 mIndexOfFirstFrameWhenLowLatencyOn);
1457 }
1458
1459 #if 0
1460 // enable for short term, only while debugging
1461 updateEphemeralMediametrics(mMetricsHandle);
1462 #endif
1463 }
1464
updateHdrMetrics(bool isConfig)1465 void MediaCodec::updateHdrMetrics(bool isConfig) {
1466 if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
1467 return;
1468 }
1469
1470 int32_t colorStandard = -1;
1471 if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
1472 mediametrics_setInt32(mMetricsHandle,
1473 isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
1474 }
1475 int32_t colorRange = -1;
1476 if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
1477 mediametrics_setInt32(mMetricsHandle,
1478 isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
1479 }
1480 int32_t colorTransfer = -1;
1481 if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
1482 mediametrics_setInt32(mMetricsHandle,
1483 isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
1484 }
1485 HDRStaticInfo info;
1486 if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
1487 && ColorUtils::isHDRStaticInfoValid(&info)) {
1488 mHdrInfoFlags |= kFlagHasHdrStaticInfo;
1489 }
1490 mediametrics_setInt32(mMetricsHandle, kCodecHdrStaticInfo,
1491 (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
1492 sp<ABuffer> hdr10PlusInfo;
1493 if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
1494 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
1495 mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
1496 }
1497 mediametrics_setInt32(mMetricsHandle, kCodecHdr10PlusInfo,
1498 (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
1499
1500 // hdr format
1501 sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
1502
1503 AString mime;
1504 int32_t profile = -1;
1505
1506 if (codedFormat->findString("mime", &mime)
1507 && codedFormat->findInt32(KEY_PROFILE, &profile)
1508 && colorTransfer != -1) {
1509 hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
1510 mediametrics_setInt32(mMetricsHandle, kCodecHdrFormat, static_cast<int>(hdrFormat));
1511 }
1512 }
1513
getHdrFormat(const AString & mime,const int32_t profile,const int32_t colorTransfer)1514 hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
1515 const int32_t colorTransfer) {
1516 return (mFlags & kFlagIsEncoder)
1517 ? getHdrFormatForEncoder(mime, profile, colorTransfer)
1518 : getHdrFormatForDecoder(mime, profile, colorTransfer);
1519 }
1520
getHdrFormatForEncoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1521 hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
1522 const int32_t colorTransfer) {
1523 switch (colorTransfer) {
1524 case COLOR_TRANSFER_ST2084:
1525 if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1526 switch (profile) {
1527 case VP9Profile2HDR:
1528 return HDR_FORMAT_HDR10;
1529 case VP9Profile2HDR10Plus:
1530 return HDR_FORMAT_HDR10PLUS;
1531 default:
1532 return HDR_FORMAT_NONE;
1533 }
1534 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1535 switch (profile) {
1536 case AV1ProfileMain10HDR10:
1537 return HDR_FORMAT_HDR10;
1538 case AV1ProfileMain10HDR10Plus:
1539 return HDR_FORMAT_HDR10PLUS;
1540 default:
1541 return HDR_FORMAT_NONE;
1542 }
1543 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1544 switch (profile) {
1545 case HEVCProfileMain10HDR10:
1546 return HDR_FORMAT_HDR10;
1547 case HEVCProfileMain10HDR10Plus:
1548 return HDR_FORMAT_HDR10PLUS;
1549 default:
1550 return HDR_FORMAT_NONE;
1551 }
1552 } else {
1553 return HDR_FORMAT_NONE;
1554 }
1555 case COLOR_TRANSFER_HLG:
1556 if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1557 return HDR_FORMAT_HLG;
1558 } else {
1559 // TODO: DOLBY format
1560 return HDR_FORMAT_NONE;
1561 }
1562 default:
1563 return HDR_FORMAT_NONE;
1564 }
1565 }
1566
getHdrFormatForDecoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1567 hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
1568 const int32_t colorTransfer) {
1569 switch (colorTransfer) {
1570 case COLOR_TRANSFER_ST2084:
1571 if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
1572 return HDR_FORMAT_NONE;
1573 }
1574 return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
1575 case COLOR_TRANSFER_HLG:
1576 if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1577 return HDR_FORMAT_HLG;
1578 }
1579 // TODO: DOLBY format
1580 }
1581 return HDR_FORMAT_NONE;
1582 }
1583
profileSupport10Bits(const AString & mime,const int32_t profile)1584 bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
1585 if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1586 return true;
1587 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1588 switch (profile) {
1589 case VP9Profile2:
1590 case VP9Profile3:
1591 case VP9Profile2HDR:
1592 case VP9Profile3HDR:
1593 case VP9Profile2HDR10Plus:
1594 case VP9Profile3HDR10Plus:
1595 return true;
1596 }
1597 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1598 switch (profile) {
1599 case HEVCProfileMain10:
1600 case HEVCProfileMain10HDR10:
1601 case HEVCProfileMain10HDR10Plus:
1602 return true;
1603 }
1604 }
1605 return false;
1606 }
1607
1608
1609 // called to update info being passed back via getMetrics(), which is a
1610 // unique copy for that call, no concurrent access worries.
updateEphemeralMediametrics(mediametrics_handle_t item)1611 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
1612 ALOGD("MediaCodec::updateEphemeralMediametrics()");
1613
1614 if (item == 0) {
1615 return;
1616 }
1617
1618 // build an empty histogram
1619 MediaHistogram<int64_t> recentHist;
1620 recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1621
1622 // stuff it with the samples in the ring buffer
1623 {
1624 Mutex::Autolock al(mRecentLock);
1625
1626 for (int i = 0; i < kRecentLatencyFrames; i++) {
1627 if (mRecentSamples[i] != kRecentSampleInvalid) {
1628 recentHist.insert(mRecentSamples[i]);
1629 }
1630 }
1631 }
1632
1633 // spit the data (if any) into the supplied analytics record
1634 if (recentHist.getCount() != 0 ) {
1635 mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
1636 mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
1637 mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
1638 mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
1639
1640 if (kEmitHistogram) {
1641 // and the histogram itself
1642 std::string hist = recentHist.emit();
1643 mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
1644 }
1645 }
1646 }
1647
emitVector(std::vector<int32_t> vector)1648 static std::string emitVector(std::vector<int32_t> vector) {
1649 std::ostringstream sstr;
1650 for (size_t i = 0; i < vector.size(); ++i) {
1651 if (i != 0) {
1652 sstr << ',';
1653 }
1654 sstr << vector[i];
1655 }
1656 return sstr.str();
1657 }
1658
reportToMediaMetricsIfValid(const FreezeEvent & e)1659 static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
1660 if (e.valid) {
1661 mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
1662 mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
1663 mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
1664 mediametrics_setInt64(handle, kFreezeEventCount, e.count);
1665 mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
1666 mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
1667 mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
1668 emitVector(e.details.durationMs));
1669 mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
1670 emitVector(e.details.distanceMs));
1671 mediametrics_selfRecord(handle);
1672 mediametrics_delete(handle);
1673 }
1674 }
1675
reportToMediaMetricsIfValid(const JudderEvent & e)1676 static void reportToMediaMetricsIfValid(const JudderEvent &e) {
1677 if (e.valid) {
1678 mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
1679 mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
1680 mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
1681 mediametrics_setInt64(handle, kJudderEventCount, e.count);
1682 mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
1683 mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
1684 mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
1685 emitVector(e.details.actualRenderDurationUs));
1686 mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
1687 emitVector(e.details.contentRenderDurationUs));
1688 mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
1689 emitVector(e.details.distanceMs));
1690 mediametrics_selfRecord(handle);
1691 mediametrics_delete(handle);
1692 }
1693 }
1694
flushMediametrics()1695 void MediaCodec::flushMediametrics() {
1696 ALOGV("flushMediametrics");
1697
1698 // update does its own mutex locking
1699 updateMediametrics();
1700 resetMetricsFields();
1701
1702 // ensure mutex while we do our own work
1703 Mutex::Autolock _lock(mMetricsLock);
1704 if (mMetricsHandle != 0) {
1705 if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
1706 mediametrics_selfRecord(mMetricsHandle);
1707 }
1708 mediametrics_delete(mMetricsHandle);
1709 mMetricsHandle = 0;
1710 }
1711 // we no longer have anything pending upload
1712 mMetricsToUpload = false;
1713
1714 // Freeze and judder events are reported separately
1715 reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
1716 reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
1717 }
1718
updateLowLatency(const sp<AMessage> & msg)1719 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
1720 int32_t lowLatency = 0;
1721 if (msg->findInt32("low-latency", &lowLatency)) {
1722 Mutex::Autolock al(mLatencyLock);
1723 if (lowLatency > 0) {
1724 ++mNumLowLatencyEnables;
1725 // This is just an estimate since low latency mode change happens ONLY at key frame
1726 mIsLowLatencyModeOn = true;
1727 } else if (lowLatency == 0) {
1728 ++mNumLowLatencyDisables;
1729 // This is just an estimate since low latency mode change happens ONLY at key frame
1730 mIsLowLatencyModeOn = false;
1731 }
1732 }
1733 }
1734
updateCodecImportance(const sp<AMessage> & msg)1735 void MediaCodec::updateCodecImportance(const sp<AMessage>& msg) {
1736 // Update the codec importance.
1737 int32_t importance = 0;
1738 if (msg->findInt32(KEY_IMPORTANCE, &importance)) {
1739 // Ignoring the negative importance.
1740 if (importance >= 0) {
1741 // Notify RM about the change in the importance.
1742 mResourceManagerProxy->setImportance(importance);
1743 ClientConfigParcel clientConfig;
1744 initClientConfigParcel(clientConfig);
1745 mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
1746 }
1747 }
1748 }
1749
asString(TunnelPeekState state,const char * default_string)1750 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
1751 switch(state) {
1752 case TunnelPeekState::kLegacyMode:
1753 return "LegacyMode";
1754 case TunnelPeekState::kEnabledNoBuffer:
1755 return "EnabledNoBuffer";
1756 case TunnelPeekState::kDisabledNoBuffer:
1757 return "DisabledNoBuffer";
1758 case TunnelPeekState::kBufferDecoded:
1759 return "BufferDecoded";
1760 case TunnelPeekState::kBufferRendered:
1761 return "BufferRendered";
1762 case TunnelPeekState::kDisabledQueued:
1763 return "DisabledQueued";
1764 case TunnelPeekState::kEnabledQueued:
1765 return "EnabledQueued";
1766 default:
1767 return default_string;
1768 }
1769 }
1770
updateTunnelPeek(const sp<AMessage> & msg)1771 void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
1772 int32_t tunnelPeek = 0;
1773 if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
1774 return;
1775 }
1776
1777 TunnelPeekState previousState = mTunnelPeekState;
1778 if(tunnelPeek == 0){
1779 mTunnelPeekEnabled = false;
1780 switch (mTunnelPeekState) {
1781 case TunnelPeekState::kLegacyMode:
1782 msg->setInt32("android._tunnel-peek-set-legacy", 0);
1783 [[fallthrough]];
1784 case TunnelPeekState::kEnabledNoBuffer:
1785 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
1786 break;
1787 case TunnelPeekState::kEnabledQueued:
1788 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
1789 break;
1790 default:
1791 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1792 return;
1793 }
1794 } else {
1795 mTunnelPeekEnabled = true;
1796 switch (mTunnelPeekState) {
1797 case TunnelPeekState::kLegacyMode:
1798 msg->setInt32("android._tunnel-peek-set-legacy", 0);
1799 [[fallthrough]];
1800 case TunnelPeekState::kDisabledNoBuffer:
1801 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
1802 break;
1803 case TunnelPeekState::kDisabledQueued:
1804 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
1805 break;
1806 case TunnelPeekState::kBufferDecoded:
1807 msg->setInt32("android._trigger-tunnel-peek", 1);
1808 mTunnelPeekState = TunnelPeekState::kBufferRendered;
1809 break;
1810 default:
1811 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1812 return;
1813 }
1814 }
1815
1816 ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
1817 }
1818
processRenderedFrames(const sp<AMessage> & msg)1819 void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
1820 int what = 0;
1821 msg->findInt32("what", &what);
1822 if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
1823 static bool logged = false;
1824 if (!logged) {
1825 logged = true;
1826 ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
1827 }
1828 return;
1829 }
1830 // Rendered frames only matter if they're being sent to the display
1831 if (mIsSurfaceToDisplay) {
1832 int64_t renderTimeNs;
1833 for (size_t index = 0;
1834 msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
1835 index++) {
1836 // Capture metrics for playback duration
1837 mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
1838 // Capture metrics for quality
1839 int64_t mediaTimeUs = 0;
1840 if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
1841 ALOGE("processRenderedFrames: no media time found");
1842 continue;
1843 }
1844 // Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
1845 // rendered frame.
1846 if (!mTunneled || mediaTimeUs != INT64_MAX) {
1847 FreezeEvent freezeEvent;
1848 JudderEvent judderEvent;
1849 mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
1850 &judderEvent);
1851 reportToMediaMetricsIfValid(freezeEvent);
1852 reportToMediaMetricsIfValid(judderEvent);
1853 }
1854 }
1855 }
1856 }
1857
1858 // when we send a buffer to the codec;
statsBufferSent(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1859 void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1860
1861 // only enqueue if we have a legitimate time
1862 if (presentationUs <= 0) {
1863 ALOGV("presentation time: %" PRId64, presentationUs);
1864 return;
1865 }
1866
1867 if (mBatteryChecker != nullptr) {
1868 mBatteryChecker->onCodecActivity([this] () {
1869 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
1870 });
1871 }
1872
1873 if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
1874 mBytesInput += buffer->size();
1875 mFramesInput++;
1876 }
1877
1878 // mutex access to mBuffersInFlight and other stats
1879 Mutex::Autolock al(mLatencyLock);
1880
1881 // XXX: we *could* make sure that the time is later than the end of queue
1882 // as part of a consistency check...
1883 if (!mTunneled) {
1884 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1885 BufferFlightTiming_t startdata = { presentationUs, nowNs };
1886 mBuffersInFlight.push_back(startdata);
1887 }
1888
1889 if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
1890 mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
1891 }
1892 ++mInputBufferCounter;
1893 }
1894
1895 // when we get a buffer back from the codec
statsBufferReceived(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1896 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1897
1898 CHECK_NE(mState, UNINITIALIZED);
1899
1900 if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
1901 int32_t flags = 0;
1902 (void) buffer->meta()->findInt32("flags", &flags);
1903
1904 // some of these frames, we don't want to count
1905 // standalone EOS.... has an invalid timestamp
1906 if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
1907 mBytesEncoded += buffer->size();
1908 mFramesEncoded++;
1909
1910 Mutex::Autolock al(mOutputStatsLock);
1911 int64_t timeUs = 0;
1912 if (buffer->meta()->findInt64("timeUs", &timeUs)) {
1913 if (timeUs > mLatestEncodedPtsUs) {
1914 mLatestEncodedPtsUs = timeUs;
1915 }
1916 // can't chain as an else-if or this never triggers
1917 if (timeUs < mEarliestEncodedPtsUs) {
1918 mEarliestEncodedPtsUs = timeUs;
1919 }
1920 }
1921 }
1922 }
1923
1924 // mutex access to mBuffersInFlight and other stats
1925 Mutex::Autolock al(mLatencyLock);
1926
1927 // how long this buffer took for the round trip through the codec
1928 // NB: pipelining can/will make these times larger. e.g., if each packet
1929 // is always 2 msec and we have 3 in flight at any given time, we're going to
1930 // see "6 msec" as an answer.
1931
1932 // ignore stuff with no presentation time
1933 if (presentationUs <= 0) {
1934 ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
1935 mLatencyUnknown++;
1936 return;
1937 }
1938
1939 if (mBatteryChecker != nullptr) {
1940 mBatteryChecker->onCodecActivity([this] () {
1941 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
1942 });
1943 }
1944
1945 BufferFlightTiming_t startdata;
1946 bool valid = false;
1947 while (mBuffersInFlight.size() > 0) {
1948 startdata = *mBuffersInFlight.begin();
1949 ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
1950 startdata.presentationUs, startdata.startedNs);
1951 if (startdata.presentationUs == presentationUs) {
1952 // a match
1953 ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
1954 startdata.presentationUs, presentationUs);
1955 mBuffersInFlight.pop_front();
1956 valid = true;
1957 break;
1958 } else if (startdata.presentationUs < presentationUs) {
1959 // we must have missed the match for this, drop it and keep looking
1960 ALOGV("-- drop entry for %" PRId64 ", before our frame of %" PRId64,
1961 startdata.presentationUs, presentationUs);
1962 mBuffersInFlight.pop_front();
1963 continue;
1964 } else {
1965 // head is after, so we don't have a frame for ourselves
1966 ALOGV("-- found entry for %" PRId64 ", AFTER our frame of %" PRId64
1967 " we have nothing to pair with",
1968 startdata.presentationUs, presentationUs);
1969 mLatencyUnknown++;
1970 return;
1971 }
1972 }
1973 if (!valid) {
1974 ALOGV("-- empty queue, so ignore that.");
1975 mLatencyUnknown++;
1976 return;
1977 }
1978
1979 // now start our calculations
1980 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1981 int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
1982
1983 mLatencyHist.insert(latencyUs);
1984
1985 // push into the recent samples
1986 {
1987 Mutex::Autolock al(mRecentLock);
1988
1989 if (mRecentHead >= kRecentLatencyFrames) {
1990 mRecentHead = 0;
1991 }
1992 mRecentSamples[mRecentHead++] = latencyUs;
1993 }
1994 }
1995
discardDecodeOnlyOutputBuffer(size_t index)1996 bool MediaCodec::discardDecodeOnlyOutputBuffer(size_t index) {
1997 Mutex::Autolock al(mBufferLock);
1998 BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
1999 sp<MediaCodecBuffer> buffer = info->mData;
2000 int32_t flags;
2001 CHECK(buffer->meta()->findInt32("flags", &flags));
2002 if (flags & BUFFER_FLAG_DECODE_ONLY) {
2003 info->mOwnedByClient = false;
2004 info->mData.clear();
2005 mBufferChannel->discardBuffer(buffer);
2006 return true;
2007 }
2008 return false;
2009 }
2010
2011 // static
PostAndAwaitResponse(const sp<AMessage> & msg,sp<AMessage> * response)2012 status_t MediaCodec::PostAndAwaitResponse(
2013 const sp<AMessage> &msg, sp<AMessage> *response) {
2014 status_t err = msg->postAndAwaitResponse(response);
2015
2016 if (err != OK) {
2017 return err;
2018 }
2019
2020 if (!(*response)->findInt32("err", &err)) {
2021 err = OK;
2022 }
2023
2024 return err;
2025 }
2026
PostReplyWithError(const sp<AMessage> & msg,int32_t err)2027 void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
2028 sp<AReplyToken> replyID;
2029 CHECK(msg->senderAwaitsResponse(&replyID));
2030 PostReplyWithError(replyID, err);
2031 }
2032
PostReplyWithError(const sp<AReplyToken> & replyID,int32_t err)2033 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
2034 int32_t finalErr = err;
2035 if (mReleasedByResourceManager) {
2036 // override the err code if MediaCodec has been released by ResourceManager.
2037 finalErr = DEAD_OBJECT;
2038 }
2039
2040 sp<AMessage> response = new AMessage;
2041 response->setInt32("err", finalErr);
2042 response->postReply(replyID);
2043 }
2044
CreateCCodec()2045 static CodecBase *CreateCCodec() {
2046 return new CCodec;
2047 }
2048
2049 //static
GetCodecBase(const AString & name,const char * owner)2050 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
2051 if (owner) {
2052 if (strcmp(owner, "default") == 0) {
2053 return new ACodec;
2054 } else if (strncmp(owner, "codec2", 6) == 0) {
2055 return CreateCCodec();
2056 }
2057 }
2058
2059 if (name.startsWithIgnoreCase("c2.")) {
2060 return CreateCCodec();
2061 } else if (name.startsWithIgnoreCase("omx.")) {
2062 // at this time only ACodec specifies a mime type.
2063 return new ACodec;
2064 } else {
2065 return NULL;
2066 }
2067 }
2068
2069 struct CodecListCache {
CodecListCacheandroid::CodecListCache2070 CodecListCache()
2071 : mCodecInfoMap{[] {
2072 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
2073 size_t count = mcl->countCodecs();
2074 std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
2075 for (size_t i = 0; i < count; ++i) {
2076 sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
2077 codecInfoMap.emplace(info->getCodecName(), info);
2078 }
2079 return codecInfoMap;
2080 }()} {
2081 }
2082
2083 const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
2084 };
2085
GetCodecListCache()2086 static const CodecListCache &GetCodecListCache() {
2087 static CodecListCache sCache{};
2088 return sCache;
2089 }
2090
init(const AString & name)2091 status_t MediaCodec::init(const AString &name) {
2092 status_t err = mResourceManagerProxy->init();
2093 if (err != OK) {
2094 mErrorLog.log(LOG_TAG, base::StringPrintf(
2095 "Fatal error: failed to initialize ResourceManager (err=%d)", err));
2096 mCodec = NULL; // remove the codec
2097 return err;
2098 }
2099
2100 // save init parameters for reset
2101 mInitName = name;
2102
2103 // Current video decoders do not return from OMX_FillThisBuffer
2104 // quickly, violating the OpenMAX specs, until that is remedied
2105 // we need to invest in an extra looper to free the main event
2106 // queue.
2107
2108 mCodecInfo.clear();
2109
2110 bool secureCodec = false;
2111 const char *owner = "";
2112 if (!name.startsWith("android.filter.")) {
2113 err = mGetCodecInfo(name, &mCodecInfo);
2114 if (err != OK) {
2115 mErrorLog.log(LOG_TAG, base::StringPrintf(
2116 "Getting codec info with name '%s' failed (err=%d)", name.c_str(), err));
2117 mCodec = NULL; // remove the codec.
2118 return err;
2119 }
2120 if (mCodecInfo == nullptr) {
2121 mErrorLog.log(LOG_TAG, base::StringPrintf(
2122 "Getting codec info with name '%s' failed", name.c_str()));
2123 return NAME_NOT_FOUND;
2124 }
2125 secureCodec = name.endsWith(".secure");
2126 Vector<AString> mediaTypes;
2127 mCodecInfo->getSupportedMediaTypes(&mediaTypes);
2128 for (size_t i = 0; i < mediaTypes.size(); ++i) {
2129 if (mediaTypes[i].startsWith("video/")) {
2130 mDomain = DOMAIN_VIDEO;
2131 break;
2132 } else if (mediaTypes[i].startsWith("audio/")) {
2133 mDomain = DOMAIN_AUDIO;
2134 break;
2135 } else if (mediaTypes[i].startsWith("image/")) {
2136 mDomain = DOMAIN_IMAGE;
2137 break;
2138 }
2139 }
2140 owner = mCodecInfo->getOwnerName();
2141 }
2142
2143 mCodec = mGetCodecBase(name, owner);
2144 if (mCodec == NULL) {
2145 mErrorLog.log(LOG_TAG, base::StringPrintf(
2146 "Getting codec base with name '%s' (from '%s' HAL) failed", name.c_str(), owner));
2147 return NAME_NOT_FOUND;
2148 }
2149
2150 if (mDomain == DOMAIN_VIDEO) {
2151 // video codec needs dedicated looper
2152 if (mCodecLooper == NULL) {
2153 status_t err = OK;
2154 mCodecLooper = new ALooper;
2155 mCodecLooper->setName("CodecLooper");
2156 err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
2157 if (OK != err) {
2158 mErrorLog.log(LOG_TAG, "Fatal error: codec looper failed to start");
2159 return err;
2160 }
2161 }
2162
2163 mCodecLooper->registerHandler(mCodec);
2164 } else {
2165 mLooper->registerHandler(mCodec);
2166 }
2167
2168 mLooper->registerHandler(this);
2169
2170 mCodec->setCallback(
2171 std::unique_ptr<CodecBase::CodecCallback>(
2172 new CodecCallback(new AMessage(kWhatCodecNotify, this))));
2173 mBufferChannel = mCodec->getBufferChannel();
2174 mBufferChannel->setCallback(
2175 std::unique_ptr<CodecBase::BufferCallback>(
2176 new BufferCallback(new AMessage(kWhatCodecNotify, this))));
2177 sp<AMessage> msg = new AMessage(kWhatInit, this);
2178 if (mCodecInfo) {
2179 msg->setObject("codecInfo", mCodecInfo);
2180 // name may be different from mCodecInfo->getCodecName() if we stripped
2181 // ".secure"
2182 }
2183 msg->setString("name", name);
2184
2185 // initial naming setup covers the period before the first call to ::configure().
2186 // after that, we manage this through ::configure() and the setup message.
2187 if (mMetricsHandle != 0) {
2188 mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
2189 mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
2190 }
2191
2192 if (mDomain == DOMAIN_VIDEO) {
2193 mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
2194 }
2195
2196 // If the ComponentName is not set yet, use the name passed by the user.
2197 if (mComponentName.empty()) {
2198 mIsHardware = !MediaCodecList::isSoftwareCodec(name);
2199 mResourceManagerProxy->setCodecName(name.c_str());
2200 }
2201
2202 std::vector<MediaResourceParcel> resources;
2203 resources.push_back(MediaResource::CodecResource(secureCodec,
2204 toMediaResourceSubType(mIsHardware, mDomain)));
2205
2206 for (int i = 0; i <= kMaxRetry; ++i) {
2207 if (i > 0) {
2208 // Don't try to reclaim resource for the first time.
2209 if (!mResourceManagerProxy->reclaimResource(resources)) {
2210 break;
2211 }
2212 }
2213
2214 sp<AMessage> response;
2215 err = PostAndAwaitResponse(msg, &response);
2216 if (!isResourceError(err)) {
2217 break;
2218 }
2219 }
2220
2221 if (OK == err) {
2222 // Notify the ResourceManager that, this codec has been created
2223 // (initialized) successfully.
2224 mResourceManagerProxy->notifyClientCreated();
2225 }
2226 return err;
2227 }
2228
setCallback(const sp<AMessage> & callback)2229 status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
2230 sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
2231 msg->setMessage("callback", callback);
2232
2233 sp<AMessage> response;
2234 return PostAndAwaitResponse(msg, &response);
2235 }
2236
setOnFrameRenderedNotification(const sp<AMessage> & notify)2237 status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> ¬ify) {
2238 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
2239 msg->setMessage("on-frame-rendered", notify);
2240 return msg->post();
2241 }
2242
setOnFirstTunnelFrameReadyNotification(const sp<AMessage> & notify)2243 status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> ¬ify) {
2244 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
2245 msg->setMessage("first-tunnel-frame-ready", notify);
2246 return msg->post();
2247 }
2248
2249 /*
2250 * MediaFormat Shaping forward declarations
2251 * including the property name we use for control.
2252 */
2253 static int enableMediaFormatShapingDefault = 1;
2254 static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
2255 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2256 bool reverse);
2257
createMediaMetrics(const sp<AMessage> & format,uint32_t flags,status_t * err)2258 mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format,
2259 uint32_t flags,
2260 status_t* err) {
2261 *err = OK;
2262 mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
2263 bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
2264
2265 // TODO: validity check log-session-id: it should be a 32-hex-digit.
2266 format->findString("log-session-id", &mLogSessionId);
2267
2268 if (nextMetricsHandle != 0) {
2269 mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
2270 int32_t profile = 0;
2271 if (format->findInt32("profile", &profile)) {
2272 mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
2273 }
2274 int32_t level = 0;
2275 if (format->findInt32("level", &level)) {
2276 mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
2277 }
2278 mediametrics_setInt32(nextMetricsHandle, kCodecEncoder, isEncoder);
2279
2280 if (!mLogSessionId.empty()) {
2281 mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
2282 }
2283
2284 // moved here from ::init()
2285 mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
2286 mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
2287 }
2288
2289 if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
2290 format->findInt32("width", &mWidth);
2291 format->findInt32("height", &mHeight);
2292 if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
2293 mRotationDegrees = 0;
2294 }
2295 if (nextMetricsHandle != 0) {
2296 mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
2297 mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
2298 mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
2299 int32_t maxWidth = 0;
2300 if (format->findInt32("max-width", &maxWidth)) {
2301 mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
2302 }
2303 int32_t maxHeight = 0;
2304 if (format->findInt32("max-height", &maxHeight)) {
2305 mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
2306 }
2307 int32_t colorFormat = -1;
2308 if (format->findInt32("color-format", &colorFormat)) {
2309 mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
2310 }
2311 int32_t appMaxInputSize = -1;
2312 if (format->findInt32(KEY_MAX_INPUT_SIZE, &appMaxInputSize)) {
2313 mApiUsageMetrics.inputBufferSize.appMax = appMaxInputSize;
2314 }
2315 if (mDomain == DOMAIN_VIDEO) {
2316 float frameRate = -1.0;
2317 if (format->findFloat("frame-rate", &frameRate)) {
2318 mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
2319 }
2320 float captureRate = -1.0;
2321 if (format->findFloat("capture-rate", &captureRate)) {
2322 mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
2323 }
2324 float operatingRate = -1.0;
2325 if (format->findFloat("operating-rate", &operatingRate)) {
2326 mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
2327 }
2328 int32_t priority = -1;
2329 if (format->findInt32("priority", &priority)) {
2330 mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
2331 }
2332 }
2333 }
2334
2335 // Prevent possible integer overflow in downstream code.
2336 if (mWidth < 0 || mHeight < 0 ||
2337 (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
2338 mErrorLog.log(LOG_TAG, base::StringPrintf(
2339 "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
2340 mediametrics_delete(nextMetricsHandle);
2341 // Set the error code and return null handle.
2342 *err = BAD_VALUE;
2343 return 0;
2344 }
2345
2346 } else {
2347 if (nextMetricsHandle != 0) {
2348 int32_t channelCount;
2349 if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
2350 mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
2351 }
2352 int32_t sampleRate;
2353 if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
2354 mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
2355 }
2356 }
2357 }
2358
2359 if (isEncoder) {
2360 int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
2361 enableMediaFormatShapingDefault);
2362 if (!enableShaping) {
2363 ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
2364 if (nextMetricsHandle != 0) {
2365 mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
2366 }
2367 } else {
2368 (void) shapeMediaFormat(format, flags, nextMetricsHandle);
2369 // XXX: do we want to do this regardless of shaping enablement?
2370 mapFormat(mComponentName, format, nullptr, false);
2371 }
2372 }
2373
2374 // push min/max QP to MediaMetrics after shaping
2375 if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
2376 int32_t qpIMin = -1;
2377 if (format->findInt32("video-qp-i-min", &qpIMin)) {
2378 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
2379 }
2380 int32_t qpIMax = -1;
2381 if (format->findInt32("video-qp-i-max", &qpIMax)) {
2382 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
2383 }
2384 int32_t qpPMin = -1;
2385 if (format->findInt32("video-qp-p-min", &qpPMin)) {
2386 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
2387 }
2388 int32_t qpPMax = -1;
2389 if (format->findInt32("video-qp-p-max", &qpPMax)) {
2390 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
2391 }
2392 int32_t qpBMin = -1;
2393 if (format->findInt32("video-qp-b-min", &qpBMin)) {
2394 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
2395 }
2396 int32_t qpBMax = -1;
2397 if (format->findInt32("video-qp-b-max", &qpBMax)) {
2398 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
2399 }
2400 }
2401
2402 updateLowLatency(format);
2403
2404 return nextMetricsHandle;
2405 }
2406
configure(const sp<AMessage> & format,const sp<Surface> & nativeWindow,const sp<ICrypto> & crypto,uint32_t flags)2407 status_t MediaCodec::configure(
2408 const sp<AMessage> &format,
2409 const sp<Surface> &nativeWindow,
2410 const sp<ICrypto> &crypto,
2411 uint32_t flags) {
2412 return configure(format, nativeWindow, crypto, NULL, flags);
2413 }
2414
configure(const sp<AMessage> & format,const sp<Surface> & surface,const sp<ICrypto> & crypto,const sp<IDescrambler> & descrambler,uint32_t flags)2415 status_t MediaCodec::configure(
2416 const sp<AMessage> &format,
2417 const sp<Surface> &surface,
2418 const sp<ICrypto> &crypto,
2419 const sp<IDescrambler> &descrambler,
2420 uint32_t flags) {
2421
2422 // Update the codec importance.
2423 updateCodecImportance(format);
2424
2425 // Create and set up metrics for this codec.
2426 status_t err = OK;
2427 mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags, &err);
2428 if (err != OK) {
2429 return err;
2430 }
2431
2432 sp<AMessage> msg = new AMessage(kWhatConfigure, this);
2433 msg->setMessage("format", format);
2434 msg->setInt32("flags", flags);
2435 msg->setObject("surface", surface);
2436
2437 if (crypto != NULL || descrambler != NULL) {
2438 if (crypto != NULL) {
2439 msg->setPointer("crypto", crypto.get());
2440 } else {
2441 msg->setPointer("descrambler", descrambler.get());
2442 }
2443 if (nextMetricsHandle != 0) {
2444 mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
2445 }
2446 } else if (mFlags & kFlagIsSecure) {
2447 ALOGW("Crypto or descrambler should be given for secure codec");
2448 }
2449
2450 if (mConfigureMsg != nullptr) {
2451 // if re-configuring, we have one of these from before.
2452 // Recover the space before we discard the old mConfigureMsg
2453 mediametrics_handle_t metricsHandle;
2454 if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
2455 mediametrics_delete(metricsHandle);
2456 }
2457 }
2458 msg->setInt64("metrics", nextMetricsHandle);
2459
2460 // save msg for reset
2461 mConfigureMsg = msg;
2462
2463 sp<AMessage> callback = mCallback;
2464
2465 std::vector<MediaResourceParcel> resources;
2466 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
2467 toMediaResourceSubType(mIsHardware, mDomain)));
2468 if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
2469 // Don't know the buffer size at this point, but it's fine to use 1 because
2470 // the reclaimResource call doesn't consider the requester's buffer size for now.
2471 resources.push_back(MediaResource::GraphicMemoryResource(1));
2472 }
2473 for (int i = 0; i <= kMaxRetry; ++i) {
2474 sp<AMessage> response;
2475 err = PostAndAwaitResponse(msg, &response);
2476 if (err != OK && err != INVALID_OPERATION) {
2477 if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
2478 break;
2479 }
2480 // MediaCodec now set state to UNINITIALIZED upon any fatal error.
2481 // To maintain backward-compatibility, do a reset() to put codec
2482 // back into INITIALIZED state.
2483 // But don't reset if the err is INVALID_OPERATION, which means
2484 // the configure failure is due to wrong state.
2485
2486 ALOGE("configure failed with err 0x%08x, resetting...", err);
2487 status_t err2 = reset();
2488 if (err2 != OK) {
2489 ALOGE("retrying configure: failed to reset codec (%08x)", err2);
2490 break;
2491 }
2492 if (callback != nullptr) {
2493 err2 = setCallback(callback);
2494 if (err2 != OK) {
2495 ALOGE("retrying configure: failed to set callback (%08x)", err2);
2496 break;
2497 }
2498 }
2499 }
2500 if (!isResourceError(err)) {
2501 break;
2502 }
2503 }
2504
2505 return err;
2506 }
2507
2508 // Media Format Shaping support
2509 //
2510
2511 static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
2512 static bool sIsHandheld = true;
2513
connectFormatShaper()2514 static bool connectFormatShaper() {
2515 static std::once_flag sCheckOnce;
2516
2517 ALOGV("connectFormatShaper...");
2518
2519 std::call_once(sCheckOnce, [&](){
2520
2521 void *libHandle = NULL;
2522 nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
2523
2524 // prefer any copy in the mainline module
2525 //
2526 android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
2527 AString libraryName = "libmediaformatshaper.so";
2528
2529 if (mediaNs != NULL) {
2530 static const android_dlextinfo dlextinfo = {
2531 .flags = ANDROID_DLEXT_USE_NAMESPACE,
2532 .library_namespace = mediaNs,
2533 };
2534
2535 AString libraryMainline = "/apex/com.android.media/";
2536 #if __LP64__
2537 libraryMainline.append("lib64/");
2538 #else
2539 libraryMainline.append("lib/");
2540 #endif
2541 libraryMainline.append(libraryName);
2542
2543 libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
2544 &dlextinfo);
2545
2546 if (libHandle != NULL) {
2547 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2548 dlsym(libHandle, "shaper_ops");
2549 } else {
2550 ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
2551 libraryMainline.c_str());
2552 }
2553 } else {
2554 ALOGV("connectFormatShaper: couldn't find media namespace.");
2555 }
2556
2557 // fall back to the system partition, if present.
2558 //
2559 if (sShaperOps == NULL) {
2560
2561 libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
2562
2563 if (libHandle != NULL) {
2564 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2565 dlsym(libHandle, "shaper_ops");
2566 } else {
2567 ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
2568 }
2569 }
2570
2571 if (sShaperOps != nullptr
2572 && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
2573 ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
2574 sShaperOps->version);
2575 sShaperOps = nullptr;
2576 }
2577
2578 if (sShaperOps != nullptr) {
2579 ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
2580 }
2581
2582 nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
2583 ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
2584 (loading_finished - loading_started)/1000);
2585
2586
2587 // we also want to know whether this is a handheld device
2588 // start with assumption that the device is handheld.
2589 sIsHandheld = true;
2590 sp<IServiceManager> serviceMgr = defaultServiceManager();
2591 sp<content::pm::IPackageManagerNative> packageMgr;
2592 if (serviceMgr.get() != nullptr) {
2593 sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
2594 packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
2595 }
2596 // if we didn't get serviceMgr, we'll leave packageMgr as default null
2597 if (packageMgr != nullptr) {
2598
2599 // MUST have these
2600 static const String16 featuresNeeded[] = {
2601 String16("android.hardware.touchscreen")
2602 };
2603 // these must be present to be a handheld
2604 for (::android::String16 required : featuresNeeded) {
2605 bool hasFeature = false;
2606 binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
2607 if (!status.isOk()) {
2608 ALOGE("%s: hasSystemFeature failed: %s",
2609 __func__, status.exceptionMessage().c_str());
2610 continue;
2611 }
2612 ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
2613 if (!hasFeature) {
2614 ALOGV("... which means we are not handheld");
2615 sIsHandheld = false;
2616 break;
2617 }
2618 }
2619
2620 // MUST NOT have these
2621 static const String16 featuresDisallowed[] = {
2622 String16("android.hardware.type.automotive"),
2623 String16("android.hardware.type.television"),
2624 String16("android.hardware.type.watch")
2625 };
2626 // any of these present -- we aren't a handheld
2627 for (::android::String16 forbidden : featuresDisallowed) {
2628 bool hasFeature = false;
2629 binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
2630 if (!status.isOk()) {
2631 ALOGE("%s: hasSystemFeature failed: %s",
2632 __func__, status.exceptionMessage().c_str());
2633 continue;
2634 }
2635 ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
2636 if (hasFeature) {
2637 ALOGV("... which means we are not handheld");
2638 sIsHandheld = false;
2639 break;
2640 }
2641 }
2642 }
2643
2644 });
2645
2646 return true;
2647 }
2648
2649
2650 #if 0
2651 // a construct to force the above dlopen() to run very early.
2652 // goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
2653 // failure of this means that cold start of those apps is slower by the time to dlopen()
2654 // TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
2655 //
2656 static bool forceEarlyLoadingShaper = connectFormatShaper();
2657 #endif
2658
2659 // parse the codec's properties: mapping, whether it meets min quality, etc
2660 // and pass them into the video quality code
2661 //
loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,sp<MediaCodecInfo> codecInfo,AString mediaType)2662 static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
2663 sp<MediaCodecInfo> codecInfo, AString mediaType) {
2664
2665 sp<MediaCodecInfo::Capabilities> capabilities =
2666 codecInfo->getCapabilitiesFor(mediaType.c_str());
2667 if (capabilities == nullptr) {
2668 ALOGI("no capabilities as part of the codec?");
2669 } else {
2670 const sp<AMessage> &details = capabilities->getDetails();
2671 AString mapTarget;
2672 int count = details->countEntries();
2673 for(int ix = 0; ix < count; ix++) {
2674 AMessage::Type entryType;
2675 const char *mapSrc = details->getEntryNameAt(ix, &entryType);
2676 // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
2677 //
2678 static const char *featurePrefix = "feature-";
2679 static const int featurePrefixLen = strlen(featurePrefix);
2680 static const char *tuningPrefix = "tuning-";
2681 static const int tuningPrefixLen = strlen(tuningPrefix);
2682 static const char *mappingPrefix = "mapping-";
2683 static const int mappingPrefixLen = strlen(mappingPrefix);
2684
2685 if (mapSrc == NULL) {
2686 continue;
2687 } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
2688 int32_t intValue;
2689 if (details->findInt32(mapSrc, &intValue)) {
2690 ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
2691 (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
2692 intValue);
2693 }
2694 continue;
2695 } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
2696 AString value;
2697 if (details->findString(mapSrc, &value)) {
2698 ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
2699 (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
2700 value.c_str());
2701 }
2702 continue;
2703 } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
2704 AString target;
2705 if (details->findString(mapSrc, &target)) {
2706 ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
2707 target.c_str());
2708 // key is really "kind-key"
2709 // separate that, so setMap() sees the triple kind, key, value
2710 const char *kind = &mapSrc[mappingPrefixLen];
2711 const char *sep = strchr(kind, '-');
2712 const char *key = sep+1;
2713 if (sep != NULL) {
2714 std::string xkind = std::string(kind, sep-kind);
2715 (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
2716 key, target.c_str());
2717 }
2718 }
2719 }
2720 }
2721 }
2722
2723 // we also carry in the codec description whether we are on a handheld device.
2724 // this info is eventually used by both the Codec and the C2 machinery to inform
2725 // the underlying codec whether to do any shaping.
2726 //
2727 if (sIsHandheld) {
2728 // set if we are indeed a handheld device (or in future 'any eligible device'
2729 // missing on devices that aren't eligible for minimum quality enforcement.
2730 (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
2731 // strictly speaking, it's a tuning, but those are strings and feature stores int
2732 (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
2733 }
2734 }
2735
setupFormatShaper(AString mediaType)2736 status_t MediaCodec::setupFormatShaper(AString mediaType) {
2737 ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
2738 mComponentName.c_str(), mediaType.c_str());
2739
2740 nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
2741
2742 // someone might have beaten us to it.
2743 mediaformatshaper::shaperHandle_t shaperHandle;
2744 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2745 if (shaperHandle != nullptr) {
2746 ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
2747 return OK;
2748 }
2749
2750 // we get to build & register one
2751 shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
2752 if (shaperHandle == nullptr) {
2753 ALOGW("unable to create a shaper for cocodec %s mediaType %s",
2754 mComponentName.c_str(), mediaType.c_str());
2755 return OK;
2756 }
2757
2758 (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
2759
2760 shaperHandle = sShaperOps->registerShaper(shaperHandle,
2761 mComponentName.c_str(), mediaType.c_str());
2762
2763 nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
2764 ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
2765 mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
2766
2767 return OK;
2768 }
2769
2770
2771 // Format Shaping
2772 // Mapping and Manipulation of encoding parameters
2773 //
2774 // All of these decisions are pushed into the shaper instead of here within MediaCodec.
2775 // this includes decisions based on whether the codec implements minimum quality bars
2776 // itself or needs to be shaped outside of the codec.
2777 // This keeps all those decisions in one place.
2778 // It also means that we push some extra decision information (is this a handheld device
2779 // or one that is otherwise eligible for minimum quality manipulation, which generational
2780 // quality target is in force, etc). This allows those values to be cached in the
2781 // per-codec structures that are done 1 time within a process instead of for each
2782 // codec instantiation.
2783 //
2784
shapeMediaFormat(const sp<AMessage> & format,uint32_t flags,mediametrics_handle_t metricsHandle)2785 status_t MediaCodec::shapeMediaFormat(
2786 const sp<AMessage> &format,
2787 uint32_t flags,
2788 mediametrics_handle_t metricsHandle) {
2789 ALOGV("shapeMediaFormat entry");
2790
2791 if (!(flags & CONFIGURE_FLAG_ENCODE)) {
2792 ALOGW("shapeMediaFormat: not encoder");
2793 return OK;
2794 }
2795 if (mCodecInfo == NULL) {
2796 ALOGW("shapeMediaFormat: no codecinfo");
2797 return OK;
2798 }
2799
2800 AString mediaType;
2801 if (!format->findString("mime", &mediaType)) {
2802 ALOGW("shapeMediaFormat: no mediaType information");
2803 return OK;
2804 }
2805
2806 // make sure we have the function entry points for the shaper library
2807 //
2808
2809 connectFormatShaper();
2810 if (sShaperOps == nullptr) {
2811 ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
2812 return OK;
2813 }
2814
2815 // find the shaper information for this codec+mediaType pair
2816 //
2817 mediaformatshaper::shaperHandle_t shaperHandle;
2818 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2819 if (shaperHandle == nullptr) {
2820 setupFormatShaper(mediaType);
2821 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2822 }
2823 if (shaperHandle == nullptr) {
2824 ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
2825 mComponentName.c_str(), mediaType.c_str());
2826 return OK;
2827 }
2828
2829 // run the shaper
2830 //
2831
2832 ALOGV("Shaping input: %s", format->debugString(0).c_str());
2833
2834 sp<AMessage> updatedFormat = format->dup();
2835 AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
2836
2837 int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
2838 if (result == 0) {
2839 AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
2840
2841 sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
2842 size_t changeCount = deltas->countEntries();
2843 ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
2844 if (metricsHandle != 0) {
2845 mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
2846 }
2847 if (changeCount > 0) {
2848 if (metricsHandle != 0) {
2849 // save some old properties before we fold in the new ones
2850 int32_t bitrate;
2851 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
2852 mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
2853 }
2854 int32_t qpIMin = -1;
2855 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
2856 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
2857 }
2858 int32_t qpIMax = -1;
2859 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
2860 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
2861 }
2862 int32_t qpPMin = -1;
2863 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
2864 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
2865 }
2866 int32_t qpPMax = -1;
2867 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
2868 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
2869 }
2870 int32_t qpBMin = -1;
2871 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
2872 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
2873 }
2874 int32_t qpBMax = -1;
2875 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
2876 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
2877 }
2878 }
2879 // NB: for any field in both format and deltas, the deltas copy wins
2880 format->extend(deltas);
2881 }
2882 }
2883
2884 AMediaFormat_delete(updatedNdkFormat);
2885 return OK;
2886 }
2887
mapFormat(AString componentName,const sp<AMessage> & format,const char * kind,bool reverse)2888 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2889 bool reverse) {
2890 AString mediaType;
2891 if (!format->findString("mime", &mediaType)) {
2892 ALOGV("mapFormat: no mediaType information");
2893 return;
2894 }
2895 ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
2896 mediaType.c_str(), kind ? kind : "<all>", reverse);
2897
2898 // make sure we have the function entry points for the shaper library
2899 //
2900
2901 #if 0
2902 // let's play the faster "only do mapping if we've already loaded the library
2903 connectFormatShaper();
2904 #endif
2905 if (sShaperOps == nullptr) {
2906 ALOGV("mapFormat: no MediaFormatShaper hooks available");
2907 return;
2908 }
2909
2910 // find the shaper information for this codec+mediaType pair
2911 //
2912 mediaformatshaper::shaperHandle_t shaperHandle;
2913 shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
2914 if (shaperHandle == nullptr) {
2915 ALOGV("mapFormat: no shaper handle");
2916 return;
2917 }
2918
2919 const char **mappings;
2920 if (reverse)
2921 mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
2922 else
2923 mappings = sShaperOps->getMappings(shaperHandle, kind);
2924
2925 if (mappings == nullptr) {
2926 ALOGV("no mappings returned");
2927 return;
2928 }
2929
2930 ALOGV("Pre-mapping: %s", format->debugString(2).c_str());
2931 // do the mapping
2932 //
2933 int entries = format->countEntries();
2934 for (int i = 0; ; i += 2) {
2935 if (mappings[i] == nullptr) {
2936 break;
2937 }
2938
2939 size_t ix = format->findEntryByName(mappings[i]);
2940 if (ix < entries) {
2941 ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
2942 status_t status = format->setEntryNameAt(ix, mappings[i+1]);
2943 if (status != OK) {
2944 ALOGW("Unable to map from '%s' to '%s': status %d",
2945 mappings[i], mappings[i+1], status);
2946 }
2947 }
2948 }
2949 ALOGV("Post-mapping: %s", format->debugString(2).c_str());
2950
2951
2952 // reclaim the mapping memory
2953 for (int i = 0; ; i += 2) {
2954 if (mappings[i] == nullptr) {
2955 break;
2956 }
2957 free((void*)mappings[i]);
2958 free((void*)mappings[i + 1]);
2959 }
2960 free(mappings);
2961 mappings = nullptr;
2962 }
2963
2964 //
2965 // end of Format Shaping hooks within MediaCodec
2966 //
2967
releaseCrypto()2968 status_t MediaCodec::releaseCrypto()
2969 {
2970 ALOGV("releaseCrypto");
2971
2972 sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
2973
2974 sp<AMessage> response;
2975 status_t status = msg->postAndAwaitResponse(&response);
2976
2977 if (status == OK && response != NULL) {
2978 CHECK(response->findInt32("status", &status));
2979 ALOGV("releaseCrypto ret: %d ", status);
2980 }
2981 else {
2982 ALOGE("releaseCrypto err: %d", status);
2983 }
2984
2985 return status;
2986 }
2987
onReleaseCrypto(const sp<AMessage> & msg)2988 void MediaCodec::onReleaseCrypto(const sp<AMessage>& msg)
2989 {
2990 status_t status = INVALID_OPERATION;
2991 if (mCrypto != NULL) {
2992 ALOGV("onReleaseCrypto: mCrypto: %p (%d)", mCrypto.get(), mCrypto->getStrongCount());
2993 mBufferChannel->setCrypto(NULL);
2994 // TODO change to ALOGV
2995 ALOGD("onReleaseCrypto: [before clear] mCrypto: %p (%d)",
2996 mCrypto.get(), mCrypto->getStrongCount());
2997 mCrypto.clear();
2998
2999 status = OK;
3000 }
3001 else {
3002 ALOGW("onReleaseCrypto: No mCrypto. err: %d", status);
3003 }
3004
3005 sp<AMessage> response = new AMessage;
3006 response->setInt32("status", status);
3007
3008 sp<AReplyToken> replyID;
3009 CHECK(msg->senderAwaitsResponse(&replyID));
3010 response->postReply(replyID);
3011 }
3012
setInputSurface(const sp<PersistentSurface> & surface)3013 status_t MediaCodec::setInputSurface(
3014 const sp<PersistentSurface> &surface) {
3015 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
3016 msg->setObject("input-surface", surface.get());
3017
3018 sp<AMessage> response;
3019 return PostAndAwaitResponse(msg, &response);
3020 }
3021
detachOutputSurface()3022 status_t MediaCodec::detachOutputSurface() {
3023 sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
3024
3025 sp<AMessage> response;
3026 return PostAndAwaitResponse(msg, &response);
3027 }
3028
setSurface(const sp<Surface> & surface)3029 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
3030 sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
3031 msg->setObject("surface", surface);
3032
3033 sp<AMessage> response;
3034 return PostAndAwaitResponse(msg, &response);
3035 }
3036
createInputSurface(sp<IGraphicBufferProducer> * bufferProducer)3037 status_t MediaCodec::createInputSurface(
3038 sp<IGraphicBufferProducer>* bufferProducer) {
3039 sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
3040
3041 sp<AMessage> response;
3042 status_t err = PostAndAwaitResponse(msg, &response);
3043 if (err == NO_ERROR) {
3044 // unwrap the sp<IGraphicBufferProducer>
3045 sp<RefBase> obj;
3046 bool found = response->findObject("input-surface", &obj);
3047 CHECK(found);
3048 sp<BufferProducerWrapper> wrapper(
3049 static_cast<BufferProducerWrapper*>(obj.get()));
3050 *bufferProducer = wrapper->getBufferProducer();
3051 } else {
3052 ALOGW("createInputSurface failed, err=%d", err);
3053 }
3054 return err;
3055 }
3056
getGraphicBufferSize()3057 uint64_t MediaCodec::getGraphicBufferSize() {
3058 if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
3059 return 0;
3060 }
3061
3062 uint64_t size = 0;
3063 size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
3064 for (size_t i = 0; i < portNum; ++i) {
3065 // TODO: this is just an estimation, we should get the real buffer size from ACodec.
3066 size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
3067 }
3068 return size;
3069 }
3070
start()3071 status_t MediaCodec::start() {
3072 sp<AMessage> msg = new AMessage(kWhatStart, this);
3073
3074 sp<AMessage> callback;
3075
3076 status_t err;
3077 std::vector<MediaResourceParcel> resources;
3078 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
3079 toMediaResourceSubType(mIsHardware, mDomain)));
3080 if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
3081 // Don't know the buffer size at this point, but it's fine to use 1 because
3082 // the reclaimResource call doesn't consider the requester's buffer size for now.
3083 resources.push_back(MediaResource::GraphicMemoryResource(1));
3084 }
3085 for (int i = 0; i <= kMaxRetry; ++i) {
3086 if (i > 0) {
3087 // Don't try to reclaim resource for the first time.
3088 if (!mResourceManagerProxy->reclaimResource(resources)) {
3089 break;
3090 }
3091 // Recover codec from previous error before retry start.
3092 err = reset();
3093 if (err != OK) {
3094 ALOGE("retrying start: failed to reset codec");
3095 break;
3096 }
3097 if (callback != nullptr) {
3098 err = setCallback(callback);
3099 if (err != OK) {
3100 ALOGE("retrying start: failed to set callback");
3101 break;
3102 }
3103 ALOGD("succeed to set callback for reclaim");
3104 }
3105 sp<AMessage> response;
3106 err = PostAndAwaitResponse(mConfigureMsg, &response);
3107 if (err != OK) {
3108 ALOGE("retrying start: failed to configure codec");
3109 break;
3110 }
3111 }
3112
3113 // Keep callback message after the first iteration if necessary.
3114 if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
3115 callback = mCallback;
3116 ALOGD("keep callback message for reclaim");
3117 }
3118
3119 sp<AMessage> response;
3120 err = PostAndAwaitResponse(msg, &response);
3121 if (!isResourceError(err)) {
3122 break;
3123 }
3124 }
3125 return err;
3126 }
3127
stop()3128 status_t MediaCodec::stop() {
3129 sp<AMessage> msg = new AMessage(kWhatStop, this);
3130
3131 sp<AMessage> response;
3132 return PostAndAwaitResponse(msg, &response);
3133 }
3134
hasPendingBuffer(int portIndex)3135 bool MediaCodec::hasPendingBuffer(int portIndex) {
3136 return std::any_of(
3137 mPortBuffers[portIndex].begin(), mPortBuffers[portIndex].end(),
3138 [](const BufferInfo &info) { return info.mOwnedByClient; });
3139 }
3140
hasPendingBuffer()3141 bool MediaCodec::hasPendingBuffer() {
3142 return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
3143 }
3144
reclaim(bool force)3145 status_t MediaCodec::reclaim(bool force) {
3146 ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
3147 sp<AMessage> msg = new AMessage(kWhatRelease, this);
3148 msg->setInt32("reclaimed", 1);
3149 msg->setInt32("force", force ? 1 : 0);
3150
3151 sp<AMessage> response;
3152 status_t ret = PostAndAwaitResponse(msg, &response);
3153 if (ret == -ENOENT) {
3154 ALOGD("MediaCodec looper is gone, skip reclaim");
3155 ret = OK;
3156 }
3157 return ret;
3158 }
3159
release()3160 status_t MediaCodec::release() {
3161 sp<AMessage> msg = new AMessage(kWhatRelease, this);
3162 sp<AMessage> response;
3163 return PostAndAwaitResponse(msg, &response);
3164 }
3165
releaseAsync(const sp<AMessage> & notify)3166 status_t MediaCodec::releaseAsync(const sp<AMessage> ¬ify) {
3167 sp<AMessage> msg = new AMessage(kWhatRelease, this);
3168 msg->setMessage("async", notify);
3169 sp<AMessage> response;
3170 return PostAndAwaitResponse(msg, &response);
3171 }
3172
reset()3173 status_t MediaCodec::reset() {
3174 /* When external-facing MediaCodec object is created,
3175 it is already initialized. Thus, reset is essentially
3176 release() followed by init(), plus clearing the state */
3177
3178 status_t err = release();
3179
3180 // unregister handlers
3181 if (mCodec != NULL) {
3182 if (mCodecLooper != NULL) {
3183 mCodecLooper->unregisterHandler(mCodec->id());
3184 } else {
3185 mLooper->unregisterHandler(mCodec->id());
3186 }
3187 mCodec = NULL;
3188 }
3189 mLooper->unregisterHandler(id());
3190
3191 mFlags = 0; // clear all flags
3192 mStickyError = OK;
3193
3194 // reset state not reset by setState(UNINITIALIZED)
3195 mDequeueInputReplyID = 0;
3196 mDequeueOutputReplyID = 0;
3197 mDequeueInputTimeoutGeneration = 0;
3198 mDequeueOutputTimeoutGeneration = 0;
3199 mHaveInputSurface = false;
3200
3201 if (err == OK) {
3202 err = init(mInitName);
3203 }
3204 return err;
3205 }
3206
queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)3207 status_t MediaCodec::queueInputBuffer(
3208 size_t index,
3209 size_t offset,
3210 size_t size,
3211 int64_t presentationTimeUs,
3212 uint32_t flags,
3213 AString *errorDetailMsg) {
3214 if (errorDetailMsg != NULL) {
3215 errorDetailMsg->clear();
3216 }
3217
3218 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3219 msg->setSize("index", index);
3220 msg->setSize("offset", offset);
3221 msg->setSize("size", size);
3222 msg->setInt64("timeUs", presentationTimeUs);
3223 msg->setInt32("flags", flags);
3224 msg->setPointer("errorDetailMsg", errorDetailMsg);
3225 sp<AMessage> response;
3226 return PostAndAwaitResponse(msg, &response);
3227 }
3228
queueInputBuffers(size_t index,size_t offset,size_t size,const sp<BufferInfosWrapper> & infos,AString * errorDetailMsg)3229 status_t MediaCodec::queueInputBuffers(
3230 size_t index,
3231 size_t offset,
3232 size_t size,
3233 const sp<BufferInfosWrapper> &infos,
3234 AString *errorDetailMsg) {
3235 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3236 uint32_t bufferFlags = 0;
3237 uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
3238 uint32_t andFlags = flagsinAllAU;
3239 if (infos == nullptr || infos->value.empty()) {
3240 ALOGE("ERROR: Large Audio frame with no BufferInfo");
3241 return BAD_VALUE;
3242 }
3243 int infoIdx = 0;
3244 std::vector<AccessUnitInfo> &accessUnitInfo = infos->value;
3245 int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
3246 bool foundEndOfStream = false;
3247 for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
3248 bufferFlags |= accessUnitInfo[infoIdx].mFlags;
3249 andFlags &= accessUnitInfo[infoIdx].mFlags;
3250 if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
3251 foundEndOfStream = true;
3252 }
3253 }
3254 bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
3255 if (infoIdx != accessUnitInfo.size()) {
3256 ALOGE("queueInputBuffers has incorrect access-units");
3257 return -EINVAL;
3258 }
3259 msg->setSize("index", index);
3260 msg->setSize("offset", offset);
3261 msg->setSize("size", size);
3262 msg->setInt64("timeUs", minTimeUs);
3263 // Make this represent flags for the entire buffer
3264 // decodeOnly Flag is set only when all buffers are decodeOnly
3265 msg->setInt32("flags", bufferFlags);
3266 msg->setObject("accessUnitInfo", infos);
3267 msg->setPointer("errorDetailMsg", errorDetailMsg);
3268 sp<AMessage> response;
3269 return PostAndAwaitResponse(msg, &response);
3270 }
3271
queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)3272 status_t MediaCodec::queueSecureInputBuffer(
3273 size_t index,
3274 size_t offset,
3275 const CryptoPlugin::SubSample *subSamples,
3276 size_t numSubSamples,
3277 const uint8_t key[16],
3278 const uint8_t iv[16],
3279 CryptoPlugin::Mode mode,
3280 const CryptoPlugin::Pattern &pattern,
3281 int64_t presentationTimeUs,
3282 uint32_t flags,
3283 AString *errorDetailMsg) {
3284 if (errorDetailMsg != NULL) {
3285 errorDetailMsg->clear();
3286 }
3287
3288 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3289 msg->setSize("index", index);
3290 msg->setSize("offset", offset);
3291 msg->setPointer("subSamples", (void *)subSamples);
3292 msg->setSize("numSubSamples", numSubSamples);
3293 msg->setPointer("key", (void *)key);
3294 msg->setPointer("iv", (void *)iv);
3295 msg->setInt32("mode", mode);
3296 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
3297 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
3298 msg->setInt64("timeUs", presentationTimeUs);
3299 msg->setInt32("flags", flags);
3300 msg->setPointer("errorDetailMsg", errorDetailMsg);
3301
3302 sp<AMessage> response;
3303 status_t err = PostAndAwaitResponse(msg, &response);
3304
3305 return err;
3306 }
3307
queueSecureInputBuffers(size_t index,size_t offset,size_t size,const sp<BufferInfosWrapper> & auInfo,const sp<CryptoInfosWrapper> & cryptoInfos,AString * errorDetailMsg)3308 status_t MediaCodec::queueSecureInputBuffers(
3309 size_t index,
3310 size_t offset,
3311 size_t size,
3312 const sp<BufferInfosWrapper> &auInfo,
3313 const sp<CryptoInfosWrapper> &cryptoInfos,
3314 AString *errorDetailMsg) {
3315 if (errorDetailMsg != NULL) {
3316 errorDetailMsg->clear();
3317 }
3318 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3319 uint32_t bufferFlags = 0;
3320 uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
3321 uint32_t andFlags = flagsinAllAU;
3322 if (auInfo == nullptr
3323 || auInfo->value.empty()
3324 || cryptoInfos == nullptr
3325 || cryptoInfos->value.empty()) {
3326 ALOGE("ERROR: Large Audio frame with no BufferInfo/CryptoInfo");
3327 return BAD_VALUE;
3328 }
3329 int infoIdx = 0;
3330 std::vector<AccessUnitInfo> &accessUnitInfo = auInfo->value;
3331 int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
3332 bool foundEndOfStream = false;
3333 for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
3334 bufferFlags |= accessUnitInfo[infoIdx].mFlags;
3335 andFlags &= accessUnitInfo[infoIdx].mFlags;
3336 if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
3337 foundEndOfStream = true;
3338 }
3339 }
3340 bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
3341 if (infoIdx != accessUnitInfo.size()) {
3342 ALOGE("queueInputBuffers has incorrect access-units");
3343 return -EINVAL;
3344 }
3345 msg->setSize("index", index);
3346 msg->setSize("offset", offset);
3347 msg->setSize("ssize", size);
3348 msg->setInt64("timeUs", minTimeUs);
3349 msg->setInt32("flags", bufferFlags);
3350 msg->setObject("accessUnitInfo", auInfo);
3351 msg->setObject("cryptoInfos", cryptoInfos);
3352 msg->setPointer("errorDetailMsg", errorDetailMsg);
3353
3354 sp<AMessage> response;
3355 status_t err = PostAndAwaitResponse(msg, &response);
3356
3357 return err;
3358 }
3359
queueBuffer(size_t index,const std::shared_ptr<C2Buffer> & buffer,const sp<BufferInfosWrapper> & bufferInfos,const sp<AMessage> & tunings,AString * errorDetailMsg)3360 status_t MediaCodec::queueBuffer(
3361 size_t index,
3362 const std::shared_ptr<C2Buffer> &buffer,
3363 const sp<BufferInfosWrapper> &bufferInfos,
3364 const sp<AMessage> &tunings,
3365 AString *errorDetailMsg) {
3366 if (errorDetailMsg != NULL) {
3367 errorDetailMsg->clear();
3368 }
3369 if (bufferInfos == nullptr || bufferInfos->value.empty()) {
3370 return BAD_VALUE;
3371 }
3372 status_t err = OK;
3373 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3374 msg->setSize("index", index);
3375 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
3376 new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
3377 msg->setObject("c2buffer", obj);
3378 if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
3379 return err;
3380 }
3381 msg->setObject("accessUnitInfo", bufferInfos);
3382 if (tunings && tunings->countEntries() > 0) {
3383 msg->setMessage("tunings", tunings);
3384 }
3385 msg->setPointer("errorDetailMsg", errorDetailMsg);
3386 sp<AMessage> response;
3387 err = PostAndAwaitResponse(msg, &response);
3388
3389 return err;
3390 }
3391
queueEncryptedBuffer(size_t index,const sp<hardware::HidlMemory> & buffer,size_t offset,size_t size,const sp<BufferInfosWrapper> & bufferInfos,const sp<CryptoInfosWrapper> & cryptoInfos,const sp<AMessage> & tunings,AString * errorDetailMsg)3392 status_t MediaCodec::queueEncryptedBuffer(
3393 size_t index,
3394 const sp<hardware::HidlMemory> &buffer,
3395 size_t offset,
3396 size_t size,
3397 const sp<BufferInfosWrapper> &bufferInfos,
3398 const sp<CryptoInfosWrapper> &cryptoInfos,
3399 const sp<AMessage> &tunings,
3400 AString *errorDetailMsg) {
3401 if (errorDetailMsg != NULL) {
3402 errorDetailMsg->clear();
3403 }
3404 if (bufferInfos == nullptr || bufferInfos->value.empty()) {
3405 return BAD_VALUE;
3406 }
3407 status_t err = OK;
3408 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3409 msg->setSize("index", index);
3410 sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
3411 new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
3412 msg->setObject("memory", memory);
3413 msg->setSize("offset", offset);
3414 if (cryptoInfos != nullptr) {
3415 msg->setSize("ssize", size);
3416 msg->setObject("cryptoInfos", cryptoInfos);
3417 } else {
3418 msg->setSize("size", size);
3419 }
3420 msg->setObject("accessUnitInfo", bufferInfos);
3421 if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
3422 return err;
3423 }
3424 if (tunings && tunings->countEntries() > 0) {
3425 msg->setMessage("tunings", tunings);
3426 }
3427 msg->setPointer("errorDetailMsg", errorDetailMsg);
3428
3429 sp<AMessage> response;
3430 err = PostAndAwaitResponse(msg, &response);
3431
3432 return err;
3433 }
3434
dequeueInputBuffer(size_t * index,int64_t timeoutUs)3435 status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
3436 sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
3437 msg->setInt64("timeoutUs", timeoutUs);
3438
3439 sp<AMessage> response;
3440 status_t err;
3441 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3442 return err;
3443 }
3444
3445 CHECK(response->findSize("index", index));
3446
3447 return OK;
3448 }
3449
dequeueOutputBuffer(size_t * index,size_t * offset,size_t * size,int64_t * presentationTimeUs,uint32_t * flags,int64_t timeoutUs)3450 status_t MediaCodec::dequeueOutputBuffer(
3451 size_t *index,
3452 size_t *offset,
3453 size_t *size,
3454 int64_t *presentationTimeUs,
3455 uint32_t *flags,
3456 int64_t timeoutUs) {
3457 sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
3458 msg->setInt64("timeoutUs", timeoutUs);
3459
3460 sp<AMessage> response;
3461 status_t err;
3462 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3463 return err;
3464 }
3465
3466 CHECK(response->findSize("index", index));
3467 CHECK(response->findSize("offset", offset));
3468 CHECK(response->findSize("size", size));
3469 CHECK(response->findInt64("timeUs", presentationTimeUs));
3470 CHECK(response->findInt32("flags", (int32_t *)flags));
3471
3472 return OK;
3473 }
3474
renderOutputBufferAndRelease(size_t index)3475 status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
3476 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3477 msg->setSize("index", index);
3478 msg->setInt32("render", true);
3479
3480 sp<AMessage> response;
3481 return PostAndAwaitResponse(msg, &response);
3482 }
3483
renderOutputBufferAndRelease(size_t index,int64_t timestampNs)3484 status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
3485 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3486 msg->setSize("index", index);
3487 msg->setInt32("render", true);
3488 msg->setInt64("timestampNs", timestampNs);
3489
3490 sp<AMessage> response;
3491 return PostAndAwaitResponse(msg, &response);
3492 }
3493
releaseOutputBuffer(size_t index)3494 status_t MediaCodec::releaseOutputBuffer(size_t index) {
3495 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3496 msg->setSize("index", index);
3497
3498 sp<AMessage> response;
3499 return PostAndAwaitResponse(msg, &response);
3500 }
3501
signalEndOfInputStream()3502 status_t MediaCodec::signalEndOfInputStream() {
3503 sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
3504
3505 sp<AMessage> response;
3506 return PostAndAwaitResponse(msg, &response);
3507 }
3508
getOutputFormat(sp<AMessage> * format) const3509 status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
3510 sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
3511
3512 sp<AMessage> response;
3513 status_t err;
3514 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3515 return err;
3516 }
3517
3518 CHECK(response->findMessage("format", format));
3519
3520 return OK;
3521 }
3522
getInputFormat(sp<AMessage> * format) const3523 status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
3524 sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
3525
3526 sp<AMessage> response;
3527 status_t err;
3528 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3529 return err;
3530 }
3531
3532 CHECK(response->findMessage("format", format));
3533
3534 return OK;
3535 }
3536
getName(AString * name) const3537 status_t MediaCodec::getName(AString *name) const {
3538 sp<AMessage> msg = new AMessage(kWhatGetName, this);
3539
3540 sp<AMessage> response;
3541 status_t err;
3542 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3543 return err;
3544 }
3545
3546 CHECK(response->findString("name", name));
3547
3548 return OK;
3549 }
3550
getCodecInfo(sp<MediaCodecInfo> * codecInfo) const3551 status_t MediaCodec::getCodecInfo(sp<MediaCodecInfo> *codecInfo) const {
3552 sp<AMessage> msg = new AMessage(kWhatGetCodecInfo, this);
3553
3554 sp<AMessage> response;
3555 status_t err;
3556 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3557 return err;
3558 }
3559
3560 sp<RefBase> obj;
3561 CHECK(response->findObject("codecInfo", &obj));
3562 *codecInfo = static_cast<MediaCodecInfo *>(obj.get());
3563
3564 return OK;
3565 }
3566
3567 // this is the user-callable entry point
getMetrics(mediametrics_handle_t & reply)3568 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
3569
3570 reply = 0;
3571
3572 sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
3573 sp<AMessage> response;
3574 status_t err;
3575 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3576 return err;
3577 }
3578
3579 CHECK(response->findInt64("metrics", &reply));
3580
3581 return OK;
3582 }
3583
3584 // runs on the looper thread (for mutex purposes)
onGetMetrics(const sp<AMessage> & msg)3585 void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
3586
3587 mediametrics_handle_t results = 0;
3588
3589 sp<AReplyToken> replyID;
3590 CHECK(msg->senderAwaitsResponse(&replyID));
3591
3592 if (mMetricsHandle != 0) {
3593 updateMediametrics();
3594 results = mediametrics_dup(mMetricsHandle);
3595 updateEphemeralMediametrics(results);
3596 } else {
3597 results = mediametrics_dup(mMetricsHandle);
3598 }
3599
3600 sp<AMessage> response = new AMessage;
3601 response->setInt64("metrics", results);
3602 response->postReply(replyID);
3603 }
3604
getInputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const3605 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
3606 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
3607 msg->setInt32("portIndex", kPortIndexInput);
3608 msg->setPointer("buffers", buffers);
3609
3610 sp<AMessage> response;
3611 return PostAndAwaitResponse(msg, &response);
3612 }
3613
getOutputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const3614 status_t MediaCodec::getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
3615 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
3616 msg->setInt32("portIndex", kPortIndexOutput);
3617 msg->setPointer("buffers", buffers);
3618
3619 sp<AMessage> response;
3620 return PostAndAwaitResponse(msg, &response);
3621 }
3622
getOutputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)3623 status_t MediaCodec::getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
3624 sp<AMessage> format;
3625 return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
3626 }
3627
getOutputFormat(size_t index,sp<AMessage> * format)3628 status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
3629 sp<MediaCodecBuffer> buffer;
3630 return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
3631 }
3632
getInputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)3633 status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
3634 sp<AMessage> format;
3635 return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
3636 }
3637
isExecuting() const3638 bool MediaCodec::isExecuting() const {
3639 return mState == STARTED || mState == FLUSHED;
3640 }
3641
getBufferAndFormat(size_t portIndex,size_t index,sp<MediaCodecBuffer> * buffer,sp<AMessage> * format)3642 status_t MediaCodec::getBufferAndFormat(
3643 size_t portIndex, size_t index,
3644 sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
3645 // use mutex instead of a context switch
3646 if (mReleasedByResourceManager) {
3647 mErrorLog.log(LOG_TAG, "resource already released");
3648 return DEAD_OBJECT;
3649 }
3650
3651 if (buffer == NULL) {
3652 mErrorLog.log(LOG_TAG, "null buffer");
3653 return INVALID_OPERATION;
3654 }
3655
3656 if (format == NULL) {
3657 mErrorLog.log(LOG_TAG, "null format");
3658 return INVALID_OPERATION;
3659 }
3660
3661 buffer->clear();
3662 format->clear();
3663
3664 if (!isExecuting()) {
3665 mErrorLog.log(LOG_TAG, base::StringPrintf(
3666 "Invalid to call %s; only valid in Executing states",
3667 apiStateString().c_str()));
3668 return INVALID_OPERATION;
3669 }
3670
3671 // we do not want mPortBuffers to change during this section
3672 // we also don't want mOwnedByClient to change during this
3673 Mutex::Autolock al(mBufferLock);
3674
3675 std::vector<BufferInfo> &buffers = mPortBuffers[portIndex];
3676 if (index >= buffers.size()) {
3677 ALOGE("getBufferAndFormat - trying to get buffer with "
3678 "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
3679 mErrorLog.log(LOG_TAG, base::StringPrintf("Bad index (index=%zu)", index));
3680 return INVALID_OPERATION;
3681 }
3682
3683 const BufferInfo &info = buffers[index];
3684 if (!info.mOwnedByClient) {
3685 ALOGE("getBufferAndFormat - invalid operation "
3686 "(the index %zu is not owned by client)", index);
3687 mErrorLog.log(LOG_TAG, base::StringPrintf("index %zu is not owned by client", index));
3688 return INVALID_OPERATION;
3689 }
3690
3691 *buffer = info.mData;
3692 *format = info.mData->format();
3693
3694 return OK;
3695 }
3696
flush()3697 status_t MediaCodec::flush() {
3698 sp<AMessage> msg = new AMessage(kWhatFlush, this);
3699
3700 sp<AMessage> response;
3701 return PostAndAwaitResponse(msg, &response);
3702 }
3703
requestIDRFrame()3704 status_t MediaCodec::requestIDRFrame() {
3705 (new AMessage(kWhatRequestIDRFrame, this))->post();
3706
3707 return OK;
3708 }
3709
querySupportedVendorParameters(std::vector<std::string> * names)3710 status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
3711 return mCodec->querySupportedParameters(names);
3712 }
3713
describeParameter(const std::string & name,CodecParameterDescriptor * desc)3714 status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
3715 return mCodec->describeParameter(name, desc);
3716 }
3717
subscribeToVendorParameters(const std::vector<std::string> & names)3718 status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
3719 return mCodec->subscribeToParameters(names);
3720 }
3721
unsubscribeFromVendorParameters(const std::vector<std::string> & names)3722 status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
3723 return mCodec->unsubscribeFromParameters(names);
3724 }
3725
requestActivityNotification(const sp<AMessage> & notify)3726 void MediaCodec::requestActivityNotification(const sp<AMessage> ¬ify) {
3727 sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
3728 msg->setMessage("notify", notify);
3729 msg->post();
3730 }
3731
requestCpuBoostIfNeeded()3732 void MediaCodec::requestCpuBoostIfNeeded() {
3733 if (mCpuBoostRequested) {
3734 return;
3735 }
3736 int32_t colorFormat;
3737 if (mOutputFormat->contains("hdr-static-info")
3738 && mOutputFormat->findInt32("color-format", &colorFormat)
3739 // check format for OMX only, for C2 the format is always opaque since the
3740 // software rendering doesn't go through client
3741 && ((mSoftRenderer != NULL && colorFormat == OMX_COLOR_FormatYUV420Planar16)
3742 || mOwnerName.equalsIgnoreCase("codec2::software"))) {
3743 int32_t left, top, right, bottom, width, height;
3744 int64_t totalPixel = 0;
3745 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
3746 totalPixel = (right - left + 1) * (bottom - top + 1);
3747 } else if (mOutputFormat->findInt32("width", &width)
3748 && mOutputFormat->findInt32("height", &height)) {
3749 totalPixel = width * height;
3750 }
3751 if (totalPixel >= 1920 * 1080) {
3752 mResourceManagerProxy->addResource(MediaResource::CpuBoostResource());
3753 mCpuBoostRequested = true;
3754 }
3755 }
3756 }
3757
BatteryChecker(const sp<AMessage> & msg,int64_t timeoutUs)3758 BatteryChecker::BatteryChecker(const sp<AMessage> &msg, int64_t timeoutUs)
3759 : mTimeoutUs(timeoutUs)
3760 , mLastActivityTimeUs(-1ll)
3761 , mBatteryStatNotified(false)
3762 , mBatteryCheckerGeneration(0)
3763 , mIsExecuting(false)
3764 , mBatteryCheckerMsg(msg) {}
3765
onCodecActivity(std::function<void ()> batteryOnCb)3766 void BatteryChecker::onCodecActivity(std::function<void()> batteryOnCb) {
3767 if (!isExecuting()) {
3768 // ignore if not executing
3769 return;
3770 }
3771 if (!mBatteryStatNotified) {
3772 batteryOnCb();
3773 mBatteryStatNotified = true;
3774 sp<AMessage> msg = mBatteryCheckerMsg->dup();
3775 msg->setInt32("generation", mBatteryCheckerGeneration);
3776
3777 // post checker and clear last activity time
3778 msg->post(mTimeoutUs);
3779 mLastActivityTimeUs = -1ll;
3780 } else {
3781 // update last activity time
3782 mLastActivityTimeUs = ALooper::GetNowUs();
3783 }
3784 }
3785
onCheckBatteryTimer(const sp<AMessage> & msg,std::function<void ()> batteryOffCb)3786 void BatteryChecker::onCheckBatteryTimer(
3787 const sp<AMessage> &msg, std::function<void()> batteryOffCb) {
3788 // ignore if this checker already expired because the client resource was removed
3789 int32_t generation;
3790 if (!msg->findInt32("generation", &generation)
3791 || generation != mBatteryCheckerGeneration) {
3792 return;
3793 }
3794
3795 if (mLastActivityTimeUs < 0ll) {
3796 // timed out inactive, do not repost checker
3797 batteryOffCb();
3798 mBatteryStatNotified = false;
3799 } else {
3800 // repost checker and clear last activity time
3801 msg->post(mTimeoutUs + mLastActivityTimeUs - ALooper::GetNowUs());
3802 mLastActivityTimeUs = -1ll;
3803 }
3804 }
3805
onClientRemoved()3806 void BatteryChecker::onClientRemoved() {
3807 mBatteryStatNotified = false;
3808 mBatteryCheckerGeneration++;
3809 }
3810
3811 ////////////////////////////////////////////////////////////////////////////////
3812
cancelPendingDequeueOperations()3813 void MediaCodec::cancelPendingDequeueOperations() {
3814 if (mFlags & kFlagDequeueInputPending) {
3815 mErrorLog.log(LOG_TAG, "Pending dequeue input buffer request cancelled");
3816 PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
3817
3818 ++mDequeueInputTimeoutGeneration;
3819 mDequeueInputReplyID = 0;
3820 mFlags &= ~kFlagDequeueInputPending;
3821 }
3822
3823 if (mFlags & kFlagDequeueOutputPending) {
3824 mErrorLog.log(LOG_TAG, "Pending dequeue output buffer request cancelled");
3825 PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
3826
3827 ++mDequeueOutputTimeoutGeneration;
3828 mDequeueOutputReplyID = 0;
3829 mFlags &= ~kFlagDequeueOutputPending;
3830 }
3831 }
3832
handleDequeueInputBuffer(const sp<AReplyToken> & replyID,bool newRequest)3833 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
3834 if (!isExecuting()) {
3835 mErrorLog.log(LOG_TAG, base::StringPrintf(
3836 "Invalid to call %s; only valid in executing state",
3837 apiStateString().c_str()));
3838 PostReplyWithError(replyID, INVALID_OPERATION);
3839 } else if (mFlags & kFlagIsAsync) {
3840 mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
3841 PostReplyWithError(replyID, INVALID_OPERATION);
3842 } else if (newRequest && (mFlags & kFlagDequeueInputPending)) {
3843 mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue input request is pending");
3844 PostReplyWithError(replyID, INVALID_OPERATION);
3845 return true;
3846 } else if (mFlags & kFlagStickyError) {
3847 PostReplyWithError(replyID, getStickyError());
3848 return true;
3849 }
3850
3851 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3852
3853 if (index < 0) {
3854 CHECK_EQ(index, -EAGAIN);
3855 return false;
3856 }
3857
3858 sp<AMessage> response = new AMessage;
3859 response->setSize("index", index);
3860 response->postReply(replyID);
3861
3862 return true;
3863 }
3864
handleDequeueOutputBuffer(const sp<AReplyToken> & replyID,bool newRequest)3865 MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
3866 const sp<AReplyToken> &replyID, bool newRequest) {
3867 if (!isExecuting()) {
3868 mErrorLog.log(LOG_TAG, base::StringPrintf(
3869 "Invalid to call %s; only valid in executing state",
3870 apiStateString().c_str()));
3871 PostReplyWithError(replyID, INVALID_OPERATION);
3872 } else if (mFlags & kFlagIsAsync) {
3873 mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
3874 PostReplyWithError(replyID, INVALID_OPERATION);
3875 } else if (newRequest && (mFlags & kFlagDequeueOutputPending)) {
3876 mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue output request is pending");
3877 PostReplyWithError(replyID, INVALID_OPERATION);
3878 } else if (mFlags & kFlagStickyError) {
3879 PostReplyWithError(replyID, getStickyError());
3880 } else if (mFlags & kFlagOutputBuffersChanged) {
3881 PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
3882 mFlags &= ~kFlagOutputBuffersChanged;
3883 } else {
3884 sp<AMessage> response = new AMessage;
3885 BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
3886 if (!info) {
3887 return DequeueOutputResult::kNoBuffer;
3888 }
3889
3890 // In synchronous mode, output format change should be handled
3891 // at dequeue to put the event at the correct order.
3892
3893 const sp<MediaCodecBuffer> &buffer = info->mData;
3894 handleOutputFormatChangeIfNeeded(buffer);
3895 if (mFlags & kFlagOutputFormatChanged) {
3896 PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
3897 mFlags &= ~kFlagOutputFormatChanged;
3898 return DequeueOutputResult::kRepliedWithError;
3899 }
3900
3901 ssize_t index = dequeuePortBuffer(kPortIndexOutput);
3902 if (discardDecodeOnlyOutputBuffer(index)) {
3903 return DequeueOutputResult::kDiscardedBuffer;
3904 }
3905
3906 response->setSize("index", index);
3907 response->setSize("offset", buffer->offset());
3908 response->setSize("size", buffer->size());
3909
3910 int64_t timeUs;
3911 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
3912
3913 response->setInt64("timeUs", timeUs);
3914
3915 int32_t flags;
3916 CHECK(buffer->meta()->findInt32("flags", &flags));
3917
3918 response->setInt32("flags", flags);
3919
3920 statsBufferReceived(timeUs, buffer);
3921
3922 response->postReply(replyID);
3923 return DequeueOutputResult::kSuccess;
3924 }
3925
3926 return DequeueOutputResult::kRepliedWithError;
3927 }
3928
3929
initClientConfigParcel(ClientConfigParcel & clientConfig)3930 inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
3931 clientConfig.codecType = toMediaResourceSubType(mIsHardware, mDomain);
3932 clientConfig.isEncoder = mFlags & kFlagIsEncoder;
3933 clientConfig.width = mWidth;
3934 clientConfig.height = mHeight;
3935 clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
3936 clientConfig.id = mCodecId;
3937 }
3938
onMessageReceived(const sp<AMessage> & msg)3939 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
3940 switch (msg->what()) {
3941 case kWhatCodecNotify:
3942 {
3943 int32_t what;
3944 CHECK(msg->findInt32("what", &what));
3945 AString codecErrorState;
3946 switch (what) {
3947 case kWhatError:
3948 case kWhatCryptoError:
3949 {
3950 int32_t err, actionCode;
3951 CHECK(msg->findInt32("err", &err));
3952 CHECK(msg->findInt32("actionCode", &actionCode));
3953
3954 ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
3955 err, StrMediaError(err).c_str(), actionCode,
3956 mState, stateString(mState).c_str());
3957 if (err == DEAD_OBJECT) {
3958 mFlags |= kFlagSawMediaServerDie;
3959 mFlags &= ~kFlagIsComponentAllocated;
3960 }
3961 bool sendErrorResponse = true;
3962 std::string origin;
3963 if (what == kWhatCryptoError) {
3964 origin = "kWhatCryptoError:";
3965 } else {
3966 origin = "kWhatError:";
3967 //TODO: add a new error state
3968 }
3969 codecErrorState = kCodecErrorState;
3970 origin += stateString(mState);
3971 if (mCryptoAsync) {
3972 //TODO: do some book keeping on the buffers
3973 mCryptoAsync->stop();
3974 }
3975 switch (mState) {
3976 case INITIALIZING:
3977 {
3978 // Resource error during INITIALIZING state needs to be logged
3979 // through metrics, to be able to track such occurrences.
3980 if (isResourceError(err)) {
3981 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3982 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3983 stateString(mState).c_str());
3984 flushMediametrics();
3985 initMediametrics();
3986 }
3987 setState(UNINITIALIZED);
3988 break;
3989 }
3990
3991 case CONFIGURING:
3992 {
3993 if (actionCode == ACTION_CODE_FATAL) {
3994 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3995 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3996 stateString(mState).c_str());
3997 flushMediametrics();
3998 initMediametrics();
3999 }
4000 setState(actionCode == ACTION_CODE_FATAL ?
4001 UNINITIALIZED : INITIALIZED);
4002 break;
4003 }
4004
4005 case STARTING:
4006 {
4007 if (actionCode == ACTION_CODE_FATAL) {
4008 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4009 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4010 stateString(mState).c_str());
4011 flushMediametrics();
4012 initMediametrics();
4013 }
4014 setState(actionCode == ACTION_CODE_FATAL ?
4015 UNINITIALIZED : CONFIGURED);
4016 break;
4017 }
4018
4019 case RELEASING:
4020 {
4021 // Ignore the error, assuming we'll still get
4022 // the shutdown complete notification. If we
4023 // don't, we'll timeout and force release.
4024 sendErrorResponse = false;
4025 FALLTHROUGH_INTENDED;
4026 }
4027 case STOPPING:
4028 {
4029 if (mFlags & kFlagSawMediaServerDie) {
4030 if (mState == RELEASING && !mReplyID) {
4031 ALOGD("Releasing asynchronously, so nothing to reply here.");
4032 }
4033 // MediaServer died, there definitely won't
4034 // be a shutdown complete notification after
4035 // all.
4036
4037 // note that we may be directly going from
4038 // STOPPING->UNINITIALIZED, instead of the
4039 // usual STOPPING->INITIALIZED state.
4040 setState(UNINITIALIZED);
4041 if (mState == RELEASING) {
4042 mComponentName.clear();
4043 }
4044 if (mReplyID) {
4045 postPendingRepliesAndDeferredMessages(origin + ":dead");
4046 } else {
4047 ALOGD("no pending replies: %s:dead following %s",
4048 origin.c_str(), mLastReplyOrigin.c_str());
4049 }
4050 sendErrorResponse = false;
4051 } else if (!mReplyID) {
4052 sendErrorResponse = false;
4053 }
4054 break;
4055 }
4056
4057 case FLUSHING:
4058 {
4059 if (actionCode == ACTION_CODE_FATAL) {
4060 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4061 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4062 stateString(mState).c_str());
4063 flushMediametrics();
4064 initMediametrics();
4065
4066 setState(UNINITIALIZED);
4067 } else {
4068 setState((mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
4069 }
4070 break;
4071 }
4072
4073 case FLUSHED:
4074 case STARTED:
4075 {
4076 sendErrorResponse = (mReplyID != nullptr);
4077
4078 setStickyError(err);
4079 postActivityNotificationIfPossible();
4080
4081 cancelPendingDequeueOperations();
4082
4083 if (mFlags & kFlagIsAsync) {
4084 if (what == kWhatError) {
4085 onError(err, actionCode);
4086 } else if (what == kWhatCryptoError) {
4087 onCryptoError(msg);
4088 }
4089 }
4090 switch (actionCode) {
4091 case ACTION_CODE_TRANSIENT:
4092 break;
4093 case ACTION_CODE_RECOVERABLE:
4094 setState(INITIALIZED);
4095 break;
4096 default:
4097 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4098 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4099 stateString(mState).c_str());
4100 flushMediametrics();
4101 initMediametrics();
4102 setState(UNINITIALIZED);
4103 break;
4104 }
4105 break;
4106 }
4107
4108 default:
4109 {
4110 sendErrorResponse = (mReplyID != nullptr);
4111
4112 setStickyError(err);
4113 postActivityNotificationIfPossible();
4114
4115 // actionCode in an uninitialized state is always fatal.
4116 if (mState == UNINITIALIZED) {
4117 actionCode = ACTION_CODE_FATAL;
4118 }
4119 if (mFlags & kFlagIsAsync) {
4120 if (what == kWhatError) {
4121 onError(err, actionCode);
4122 } else if (what == kWhatCryptoError) {
4123 onCryptoError(msg);
4124 }
4125 }
4126 switch (actionCode) {
4127 case ACTION_CODE_TRANSIENT:
4128 break;
4129 case ACTION_CODE_RECOVERABLE:
4130 setState(INITIALIZED);
4131 break;
4132 default:
4133 setState(UNINITIALIZED);
4134 break;
4135 }
4136 break;
4137 }
4138 }
4139
4140 if (sendErrorResponse) {
4141 // TRICKY: replicate PostReplyWithError logic for
4142 // err code override
4143 int32_t finalErr = err;
4144 if (mReleasedByResourceManager) {
4145 // override the err code if MediaCodec has been
4146 // released by ResourceManager.
4147 finalErr = DEAD_OBJECT;
4148 }
4149 postPendingRepliesAndDeferredMessages(origin, finalErr);
4150 }
4151 break;
4152 }
4153
4154 case kWhatComponentAllocated:
4155 {
4156 if (mState == RELEASING || mState == UNINITIALIZED) {
4157 // In case a kWhatError or kWhatRelease message came in and replied,
4158 // we log a warning and ignore.
4159 ALOGW("allocate interrupted by error or release, current state %d/%s",
4160 mState, stateString(mState).c_str());
4161 break;
4162 }
4163 CHECK_EQ(mState, INITIALIZING);
4164 setState(INITIALIZED);
4165 mFlags |= kFlagIsComponentAllocated;
4166
4167 CHECK(msg->findString("componentName", &mComponentName));
4168
4169 if (mComponentName.c_str()) {
4170 mIsHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
4171 mediametrics_setCString(mMetricsHandle, kCodecCodec,
4172 mComponentName.c_str());
4173 // Update the codec name.
4174 mResourceManagerProxy->setCodecName(mComponentName.c_str());
4175 }
4176
4177 const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
4178 if (mComponentName.startsWith("OMX.google.")
4179 && strncmp(owner, "default", 8) == 0) {
4180 mFlags |= kFlagUsesSoftwareRenderer;
4181 } else {
4182 mFlags &= ~kFlagUsesSoftwareRenderer;
4183 }
4184 mOwnerName = owner;
4185
4186 if (mComponentName.endsWith(".secure")) {
4187 mFlags |= kFlagIsSecure;
4188 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 1);
4189 } else {
4190 mFlags &= ~kFlagIsSecure;
4191 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
4192 }
4193
4194 mediametrics_setInt32(mMetricsHandle, kCodecHardware,
4195 MediaCodecList::isSoftwareCodec(mComponentName) ? 0 : 1);
4196
4197 mResourceManagerProxy->addResource(MediaResource::CodecResource(
4198 mFlags & kFlagIsSecure, toMediaResourceSubType(mIsHardware, mDomain)));
4199
4200 postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
4201 break;
4202 }
4203
4204 case kWhatComponentConfigured:
4205 {
4206 if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
4207 // In case a kWhatError or kWhatRelease message came in and replied,
4208 // we log a warning and ignore.
4209 ALOGW("configure interrupted by error or release, current state %d/%s",
4210 mState, stateString(mState).c_str());
4211 break;
4212 }
4213 CHECK_EQ(mState, CONFIGURING);
4214
4215 // reset input surface flag
4216 mHaveInputSurface = false;
4217
4218 CHECK(msg->findMessage("input-format", &mInputFormat));
4219 CHECK(msg->findMessage("output-format", &mOutputFormat));
4220
4221 // limit to confirming the opt-in behavior to minimize any behavioral change
4222 if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
4223 // signal frame dropping mode in the input format as this may also be
4224 // meaningful and confusing for an encoder in a transcoder scenario
4225 mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
4226 }
4227 sp<AMessage> interestingFormat =
4228 (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
4229 ALOGV("[%s] configured as input format: %s, output format: %s",
4230 mComponentName.c_str(),
4231 mInputFormat->debugString(4).c_str(),
4232 mOutputFormat->debugString(4).c_str());
4233 int32_t usingSwRenderer;
4234 if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
4235 && usingSwRenderer) {
4236 mFlags |= kFlagUsesSoftwareRenderer;
4237 }
4238 setState(CONFIGURED);
4239 postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
4240
4241 // augment our media metrics info, now that we know more things
4242 // such as what the codec extracted from any CSD passed in.
4243 if (mMetricsHandle != 0) {
4244 sp<AMessage> format;
4245 if (mConfigureMsg != NULL &&
4246 mConfigureMsg->findMessage("format", &format)) {
4247 // format includes: mime
4248 AString mime;
4249 if (format->findString("mime", &mime)) {
4250 mediametrics_setCString(mMetricsHandle, kCodecMime,
4251 mime.c_str());
4252 }
4253 }
4254 // perhaps video only?
4255 int32_t profile = 0;
4256 if (interestingFormat->findInt32("profile", &profile)) {
4257 mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
4258 }
4259 int32_t level = 0;
4260 if (interestingFormat->findInt32("level", &level)) {
4261 mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
4262 }
4263 sp<AMessage> uncompressedFormat =
4264 (mFlags & kFlagIsEncoder) ? mInputFormat : mOutputFormat;
4265 int32_t componentColorFormat = -1;
4266 if (uncompressedFormat->findInt32("android._color-format",
4267 &componentColorFormat)) {
4268 mediametrics_setInt32(mMetricsHandle,
4269 kCodecComponentColorFormat, componentColorFormat);
4270 }
4271 updateHdrMetrics(true /* isConfig */);
4272 int32_t codecMaxInputSize = -1;
4273 if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
4274 mApiUsageMetrics.inputBufferSize.codecMax = codecMaxInputSize;
4275 }
4276 // bitrate and bitrate mode, encoder only
4277 if (mFlags & kFlagIsEncoder) {
4278 // encoder specific values
4279 int32_t bitrate_mode = -1;
4280 if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
4281 mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
4282 asString_BitrateMode(bitrate_mode));
4283 }
4284 int32_t bitrate = -1;
4285 if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
4286 mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
4287 }
4288 } else {
4289 // decoder specific values
4290 }
4291 }
4292 break;
4293 }
4294
4295 case kWhatInputSurfaceCreated:
4296 {
4297 if (mState != CONFIGURED) {
4298 // state transitioned unexpectedly; we should have replied already.
4299 ALOGD("received kWhatInputSurfaceCreated message in state %s",
4300 stateString(mState).c_str());
4301 break;
4302 }
4303 // response to initiateCreateInputSurface()
4304 status_t err = NO_ERROR;
4305 sp<AMessage> response = new AMessage;
4306 if (!msg->findInt32("err", &err)) {
4307 sp<RefBase> obj;
4308 msg->findObject("input-surface", &obj);
4309 CHECK(msg->findMessage("input-format", &mInputFormat));
4310 CHECK(msg->findMessage("output-format", &mOutputFormat));
4311 ALOGV("[%s] input surface created as input format: %s, output format: %s",
4312 mComponentName.c_str(),
4313 mInputFormat->debugString(4).c_str(),
4314 mOutputFormat->debugString(4).c_str());
4315 CHECK(obj != NULL);
4316 response->setObject("input-surface", obj);
4317 mHaveInputSurface = true;
4318 } else {
4319 response->setInt32("err", err);
4320 }
4321 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
4322 break;
4323 }
4324
4325 case kWhatInputSurfaceAccepted:
4326 {
4327 if (mState != CONFIGURED) {
4328 // state transitioned unexpectedly; we should have replied already.
4329 ALOGD("received kWhatInputSurfaceAccepted message in state %s",
4330 stateString(mState).c_str());
4331 break;
4332 }
4333 // response to initiateSetInputSurface()
4334 status_t err = NO_ERROR;
4335 sp<AMessage> response = new AMessage();
4336 if (!msg->findInt32("err", &err)) {
4337 CHECK(msg->findMessage("input-format", &mInputFormat));
4338 CHECK(msg->findMessage("output-format", &mOutputFormat));
4339 mHaveInputSurface = true;
4340 } else {
4341 response->setInt32("err", err);
4342 }
4343 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
4344 break;
4345 }
4346
4347 case kWhatSignaledInputEOS:
4348 {
4349 if (!isExecuting()) {
4350 // state transitioned unexpectedly; we should have replied already.
4351 ALOGD("received kWhatSignaledInputEOS message in state %s",
4352 stateString(mState).c_str());
4353 break;
4354 }
4355 // response to signalEndOfInputStream()
4356 sp<AMessage> response = new AMessage;
4357 status_t err;
4358 if (msg->findInt32("err", &err)) {
4359 response->setInt32("err", err);
4360 }
4361 postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
4362 break;
4363 }
4364
4365 case kWhatStartCompleted:
4366 {
4367 if (mState == RELEASING || mState == UNINITIALIZED) {
4368 // In case a kWhatRelease message came in and replied,
4369 // we log a warning and ignore.
4370 ALOGW("start interrupted by release, current state %d/%s",
4371 mState, stateString(mState).c_str());
4372 break;
4373 }
4374
4375 CHECK_EQ(mState, STARTING);
4376 if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
4377 mResourceManagerProxy->addResource(
4378 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
4379 }
4380 // Notify the RM that the codec is in use (has been started).
4381 ClientConfigParcel clientConfig;
4382 initClientConfigParcel(clientConfig);
4383 mResourceManagerProxy->notifyClientStarted(clientConfig);
4384
4385 setState(STARTED);
4386 postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
4387
4388 // Now that the codec has started, configure, by default, the peek behavior to
4389 // be undefined for backwards compatibility with older releases. Later, if an
4390 // app explicitly enables or disables peek, the parameter will be turned off and
4391 // the legacy undefined behavior is disallowed.
4392 // See updateTunnelPeek called in onSetParameters for more details.
4393 if (mTunneled && mTunnelPeekState == TunnelPeekState::kLegacyMode) {
4394 sp<AMessage> params = new AMessage;
4395 params->setInt32("android._tunnel-peek-set-legacy", 1);
4396 mCodec->signalSetParameters(params);
4397 }
4398 break;
4399 }
4400
4401 case kWhatOutputBuffersChanged:
4402 {
4403 mFlags |= kFlagOutputBuffersChanged;
4404 postActivityNotificationIfPossible();
4405 break;
4406 }
4407
4408 case kWhatOutputFramesRendered:
4409 {
4410 // ignore these in all states except running
4411 if (mState != STARTED) {
4412 break;
4413 }
4414 TunnelPeekState previousState = mTunnelPeekState;
4415 if (mTunnelPeekState != TunnelPeekState::kLegacyMode) {
4416 mTunnelPeekState = TunnelPeekState::kBufferRendered;
4417 ALOGV("TunnelPeekState: %s -> %s",
4418 asString(previousState),
4419 asString(TunnelPeekState::kBufferRendered));
4420 }
4421 processRenderedFrames(msg);
4422 // check that we have a notification set
4423 if (mOnFrameRenderedNotification != NULL) {
4424 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
4425 notify->setMessage("data", msg);
4426 notify->post();
4427 }
4428 break;
4429 }
4430
4431 case kWhatFirstTunnelFrameReady:
4432 {
4433 if (mState != STARTED) {
4434 break;
4435 }
4436 TunnelPeekState previousState = mTunnelPeekState;
4437 switch(mTunnelPeekState) {
4438 case TunnelPeekState::kDisabledNoBuffer:
4439 case TunnelPeekState::kDisabledQueued:
4440 mTunnelPeekState = TunnelPeekState::kBufferDecoded;
4441 ALOGV("First tunnel frame ready");
4442 ALOGV("TunnelPeekState: %s -> %s",
4443 asString(previousState),
4444 asString(mTunnelPeekState));
4445 break;
4446 case TunnelPeekState::kEnabledNoBuffer:
4447 case TunnelPeekState::kEnabledQueued:
4448 {
4449 sp<AMessage> parameters = new AMessage();
4450 parameters->setInt32("android._trigger-tunnel-peek", 1);
4451 mCodec->signalSetParameters(parameters);
4452 }
4453 mTunnelPeekState = TunnelPeekState::kBufferRendered;
4454 ALOGV("First tunnel frame ready");
4455 ALOGV("TunnelPeekState: %s -> %s",
4456 asString(previousState),
4457 asString(mTunnelPeekState));
4458 break;
4459 default:
4460 ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
4461 asString(mTunnelPeekState));
4462 break;
4463 }
4464
4465 if (mOnFirstTunnelFrameReadyNotification != nullptr) {
4466 sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
4467 notify->setMessage("data", msg);
4468 notify->post();
4469 }
4470 break;
4471 }
4472
4473 case kWhatFillThisBuffer:
4474 {
4475 /* size_t index = */updateBuffers(kPortIndexInput, msg);
4476
4477 if (mState == FLUSHING
4478 || mState == STOPPING
4479 || mState == RELEASING) {
4480 returnBuffersToCodecOnPort(kPortIndexInput);
4481 break;
4482 }
4483
4484 if (!mCSD.empty()) {
4485 ssize_t index = dequeuePortBuffer(kPortIndexInput);
4486 CHECK_GE(index, 0);
4487
4488 // If codec specific data had been specified as
4489 // part of the format in the call to configure and
4490 // if there's more csd left, we submit it here
4491 // clients only get access to input buffers once
4492 // this data has been exhausted.
4493
4494 status_t err = queueCSDInputBuffer(index);
4495
4496 if (err != OK) {
4497 ALOGE("queueCSDInputBuffer failed w/ error %d",
4498 err);
4499
4500 setStickyError(err);
4501 postActivityNotificationIfPossible();
4502
4503 cancelPendingDequeueOperations();
4504 }
4505 break;
4506 }
4507 if (!mLeftover.empty()) {
4508 ssize_t index = dequeuePortBuffer(kPortIndexInput);
4509 CHECK_GE(index, 0);
4510
4511 status_t err = handleLeftover(index);
4512 if (err != OK) {
4513 setStickyError(err);
4514 postActivityNotificationIfPossible();
4515 cancelPendingDequeueOperations();
4516 }
4517 break;
4518 }
4519
4520 if (mFlags & kFlagIsAsync) {
4521 if (!mHaveInputSurface) {
4522 if (mState == FLUSHED) {
4523 mHavePendingInputBuffers = true;
4524 } else {
4525 onInputBufferAvailable();
4526 }
4527 }
4528 } else if (mFlags & kFlagDequeueInputPending) {
4529 CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
4530
4531 ++mDequeueInputTimeoutGeneration;
4532 mFlags &= ~kFlagDequeueInputPending;
4533 mDequeueInputReplyID = 0;
4534 } else {
4535 postActivityNotificationIfPossible();
4536 }
4537 break;
4538 }
4539
4540 case kWhatDrainThisBuffer:
4541 {
4542 if ((mFlags & kFlagUseBlockModel) == 0 && mTunneled) {
4543 sp<RefBase> obj;
4544 CHECK(msg->findObject("buffer", &obj));
4545 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
4546 if (mFlags & kFlagIsAsync) {
4547 // In asynchronous mode, output format change is processed immediately.
4548 handleOutputFormatChangeIfNeeded(buffer);
4549 } else {
4550 postActivityNotificationIfPossible();
4551 }
4552 mBufferChannel->discardBuffer(buffer);
4553 break;
4554 }
4555
4556 /* size_t index = */updateBuffers(kPortIndexOutput, msg);
4557
4558 if (mState == FLUSHING
4559 || mState == STOPPING
4560 || mState == RELEASING) {
4561 returnBuffersToCodecOnPort(kPortIndexOutput);
4562 break;
4563 }
4564
4565 if (mFlags & kFlagIsAsync) {
4566 sp<RefBase> obj;
4567 CHECK(msg->findObject("buffer", &obj));
4568 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
4569
4570 // In asynchronous mode, output format change is processed immediately.
4571 handleOutputFormatChangeIfNeeded(buffer);
4572 onOutputBufferAvailable();
4573 } else if (mFlags & kFlagDequeueOutputPending) {
4574 DequeueOutputResult dequeueResult =
4575 handleDequeueOutputBuffer(mDequeueOutputReplyID);
4576 switch (dequeueResult) {
4577 case DequeueOutputResult::kNoBuffer:
4578 TRESPASS();
4579 break;
4580 case DequeueOutputResult::kDiscardedBuffer:
4581 break;
4582 case DequeueOutputResult::kRepliedWithError:
4583 [[fallthrough]];
4584 case DequeueOutputResult::kSuccess:
4585 {
4586 ++mDequeueOutputTimeoutGeneration;
4587 mFlags &= ~kFlagDequeueOutputPending;
4588 mDequeueOutputReplyID = 0;
4589 break;
4590 }
4591 default:
4592 TRESPASS();
4593 }
4594 } else {
4595 postActivityNotificationIfPossible();
4596 }
4597
4598 break;
4599 }
4600
4601 case kWhatMetricsUpdated:
4602 {
4603 sp<AMessage> updatedMetrics;
4604 CHECK(msg->findMessage("updated-metrics", &updatedMetrics));
4605
4606 size_t numEntries = updatedMetrics->countEntries();
4607 AMessage::Type type;
4608 for (size_t i = 0; i < numEntries; ++i) {
4609 const char *name = updatedMetrics->getEntryNameAt(i, &type);
4610 AMessage::ItemData itemData = updatedMetrics->getEntryAt(i);
4611 switch (type) {
4612 case AMessage::kTypeInt32: {
4613 int32_t metricValue;
4614 itemData.find(&metricValue);
4615 mediametrics_setInt32(mMetricsHandle, name, metricValue);
4616 break;
4617 }
4618 case AMessage::kTypeInt64: {
4619 int64_t metricValue;
4620 itemData.find(&metricValue);
4621 mediametrics_setInt64(mMetricsHandle, name, metricValue);
4622 break;
4623 }
4624 case AMessage::kTypeDouble: {
4625 double metricValue;
4626 itemData.find(&metricValue);
4627 mediametrics_setDouble(mMetricsHandle, name, metricValue);
4628 break;
4629 }
4630 case AMessage::kTypeString: {
4631 AString metricValue;
4632 itemData.find(&metricValue);
4633 mediametrics_setCString(mMetricsHandle, name, metricValue.c_str());
4634 break;
4635 }
4636 // ToDo: add support for other types
4637 default:
4638 ALOGW("Updated metrics type not supported.");
4639 }
4640 }
4641 break;
4642 }
4643
4644 case kWhatEOS:
4645 {
4646 // We already notify the client of this by using the
4647 // corresponding flag in "onOutputBufferReady".
4648 break;
4649 }
4650
4651 case kWhatStopCompleted:
4652 {
4653 if (mState != STOPPING) {
4654 ALOGW("Received kWhatStopCompleted in state %d/%s",
4655 mState, stateString(mState).c_str());
4656 break;
4657 }
4658
4659 if (mIsSurfaceToDisplay) {
4660 mVideoRenderQualityTracker.resetForDiscontinuity();
4661 }
4662
4663 // Notify the RM that the codec has been stopped.
4664 ClientConfigParcel clientConfig;
4665 initClientConfigParcel(clientConfig);
4666 mResourceManagerProxy->notifyClientStopped(clientConfig);
4667
4668 setState(INITIALIZED);
4669 if (mReplyID) {
4670 postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
4671 } else {
4672 ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
4673 "but the operation completed anyway. (last reply origin=%s)",
4674 mLastReplyOrigin.c_str());
4675 }
4676 break;
4677 }
4678
4679 case kWhatReleaseCompleted:
4680 {
4681 if (mState != RELEASING) {
4682 ALOGW("Received kWhatReleaseCompleted in state %d/%s",
4683 mState, stateString(mState).c_str());
4684 break;
4685 }
4686 setState(UNINITIALIZED);
4687 mComponentName.clear();
4688
4689 mFlags &= ~kFlagIsComponentAllocated;
4690
4691 // off since we're removing all resources including the battery on
4692 if (mBatteryChecker != nullptr) {
4693 mBatteryChecker->onClientRemoved();
4694 }
4695
4696 mResourceManagerProxy->removeClient();
4697 mDetachedSurface.reset();
4698
4699 if (mReplyID != nullptr) {
4700 postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
4701 }
4702 if (mAsyncReleaseCompleteNotification != nullptr) {
4703 flushMediametrics();
4704 mAsyncReleaseCompleteNotification->post();
4705 mAsyncReleaseCompleteNotification.clear();
4706 }
4707 break;
4708 }
4709
4710 case kWhatFlushCompleted:
4711 {
4712 if (mState != FLUSHING) {
4713 ALOGW("received FlushCompleted message in state %d/%s",
4714 mState, stateString(mState).c_str());
4715 break;
4716 }
4717
4718 if (mIsSurfaceToDisplay) {
4719 mVideoRenderQualityTracker.resetForDiscontinuity();
4720 }
4721
4722 if (mFlags & kFlagIsAsync) {
4723 setState(FLUSHED);
4724 } else {
4725 setState(STARTED);
4726 mCodec->signalResume();
4727 }
4728 mReliabilityContextMetrics.flushCount++;
4729
4730 postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
4731 break;
4732 }
4733
4734 default:
4735 TRESPASS();
4736 }
4737 break;
4738 }
4739
4740 case kWhatInit:
4741 {
4742 if (mState != UNINITIALIZED) {
4743 PostReplyWithError(msg, INVALID_OPERATION);
4744 break;
4745 }
4746
4747 if (mReplyID) {
4748 mDeferredMessages.push_back(msg);
4749 break;
4750 }
4751 sp<AReplyToken> replyID;
4752 CHECK(msg->senderAwaitsResponse(&replyID));
4753
4754 mReplyID = replyID;
4755 setState(INITIALIZING);
4756
4757 sp<RefBase> codecInfo;
4758 (void)msg->findObject("codecInfo", &codecInfo);
4759 AString name;
4760 CHECK(msg->findString("name", &name));
4761
4762 sp<AMessage> format = new AMessage;
4763 if (codecInfo) {
4764 format->setObject("codecInfo", codecInfo);
4765 }
4766 format->setString("componentName", name);
4767
4768 mCodec->initiateAllocateComponent(format);
4769 break;
4770 }
4771
4772 case kWhatSetNotification:
4773 {
4774 sp<AMessage> notify;
4775 if (msg->findMessage("on-frame-rendered", ¬ify)) {
4776 mOnFrameRenderedNotification = notify;
4777 }
4778 if (msg->findMessage("first-tunnel-frame-ready", ¬ify)) {
4779 mOnFirstTunnelFrameReadyNotification = notify;
4780 }
4781 break;
4782 }
4783
4784 case kWhatSetCallback:
4785 {
4786 sp<AReplyToken> replyID;
4787 CHECK(msg->senderAwaitsResponse(&replyID));
4788
4789 if (mState == UNINITIALIZED
4790 || mState == INITIALIZING
4791 || isExecuting()) {
4792 // callback can't be set after codec is executing,
4793 // or before it's initialized (as the callback
4794 // will be cleared when it goes to INITIALIZED)
4795 mErrorLog.log(LOG_TAG, base::StringPrintf(
4796 "Invalid to call %s; only valid at Initialized state",
4797 apiStateString().c_str()));
4798 PostReplyWithError(replyID, INVALID_OPERATION);
4799 break;
4800 }
4801
4802 sp<AMessage> callback;
4803 CHECK(msg->findMessage("callback", &callback));
4804
4805 mCallback = callback;
4806
4807 if (mCallback != NULL) {
4808 ALOGI("MediaCodec will operate in async mode");
4809 mFlags |= kFlagIsAsync;
4810 } else {
4811 mFlags &= ~kFlagIsAsync;
4812 }
4813
4814 sp<AMessage> response = new AMessage;
4815 response->postReply(replyID);
4816 break;
4817 }
4818
4819 case kWhatGetMetrics:
4820 {
4821 onGetMetrics(msg);
4822 break;
4823 }
4824
4825
4826 case kWhatConfigure:
4827 {
4828 if (mState != INITIALIZED) {
4829 mErrorLog.log(LOG_TAG, base::StringPrintf(
4830 "configure() is valid only at Initialized state; currently %s",
4831 apiStateString().c_str()));
4832 PostReplyWithError(msg, INVALID_OPERATION);
4833 break;
4834 }
4835
4836 if (mReplyID) {
4837 mDeferredMessages.push_back(msg);
4838 break;
4839 }
4840 sp<AReplyToken> replyID;
4841 CHECK(msg->senderAwaitsResponse(&replyID));
4842
4843 sp<RefBase> obj;
4844 CHECK(msg->findObject("surface", &obj));
4845
4846 sp<AMessage> format;
4847 CHECK(msg->findMessage("format", &format));
4848
4849 // start with a copy of the passed metrics info for use in this run
4850 mediametrics_handle_t handle;
4851 CHECK(msg->findInt64("metrics", &handle));
4852 if (handle != 0) {
4853 if (mMetricsHandle != 0) {
4854 flushMediametrics();
4855 }
4856 mMetricsHandle = mediametrics_dup(handle);
4857 // and set some additional metrics values
4858 initMediametrics();
4859 }
4860
4861 // from this point forward, in this configure/use/release lifecycle, we want to
4862 // upload our data
4863 mMetricsToUpload = true;
4864
4865 int32_t push;
4866 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
4867 mFlags |= kFlagPushBlankBuffersOnShutdown;
4868 }
4869
4870 uint32_t flags;
4871 CHECK(msg->findInt32("flags", (int32_t *)&flags));
4872
4873 if (android::media::codec::provider_->null_output_surface_support()) {
4874 if (obj == nullptr
4875 && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
4876 && !(flags & CONFIGURE_FLAG_ENCODE)) {
4877 sp<Surface> surface = getOrCreateDetachedSurface();
4878 if (surface == nullptr) {
4879 mErrorLog.log(
4880 LOG_TAG, "Detached surface mode is not supported by this codec");
4881 PostReplyWithError(replyID, INVALID_OPERATION);
4882 }
4883 obj = surface;
4884 }
4885 }
4886
4887 if (obj != NULL) {
4888 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
4889 // allow frame dropping by surface by default
4890 mAllowFrameDroppingBySurface = true;
4891 }
4892
4893 format->setObject("native-window", obj);
4894 status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));
4895 if (err != OK) {
4896 PostReplyWithError(replyID, err);
4897 break;
4898 }
4899 uint32_t generation = mSurfaceGeneration;
4900 format->setInt32("native-window-generation", generation);
4901 } else {
4902 // we are not using surface so this variable is not used, but initialize sensibly anyway
4903 mAllowFrameDroppingBySurface = false;
4904
4905 handleSetSurface(NULL);
4906 }
4907
4908 mApiUsageMetrics.isUsingOutputSurface = true;
4909
4910 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
4911 flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
4912 if (!(mFlags & kFlagIsAsync)) {
4913 mErrorLog.log(
4914 LOG_TAG, "Block model is only valid with callback set (async mode)");
4915 PostReplyWithError(replyID, INVALID_OPERATION);
4916 break;
4917 }
4918 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
4919 mFlags |= kFlagUseBlockModel;
4920 }
4921 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
4922 mFlags |= kFlagUseCryptoAsync;
4923 if ((mFlags & kFlagUseBlockModel)) {
4924 ALOGW("CrytoAsync not yet enabled for block model, "
4925 "falling back to normal");
4926 }
4927 }
4928 }
4929 int32_t largeFrameParamMax = 0, largeFrameParamThreshold = 0;
4930 if (format->findInt32(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, &largeFrameParamMax) ||
4931 format->findInt32(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
4932 &largeFrameParamThreshold)) {
4933 if (largeFrameParamMax > 0 || largeFrameParamThreshold > 0) {
4934 if(mComponentName.startsWith("OMX")) {
4935 mErrorLog.log(LOG_TAG,
4936 "Large Frame params are not supported on OMX codecs."
4937 "Currently only supported on C2 audio codec.");
4938 PostReplyWithError(replyID, INVALID_OPERATION);
4939 break;
4940 }
4941 AString mime;
4942 CHECK(format->findString("mime", &mime));
4943 if (!mime.startsWith("audio")) {
4944 mErrorLog.log(LOG_TAG,
4945 "Large Frame params only works with audio codec");
4946 PostReplyWithError(replyID, INVALID_OPERATION);
4947 break;
4948 }
4949 if (!(mFlags & kFlagIsAsync)) {
4950 mErrorLog.log(LOG_TAG, "Large Frame audio" \
4951 "config works only with async mode");
4952 PostReplyWithError(replyID, INVALID_OPERATION);
4953 break;
4954 }
4955 }
4956 }
4957
4958 mReplyID = replyID;
4959 setState(CONFIGURING);
4960
4961 void *crypto;
4962 if (!msg->findPointer("crypto", &crypto)) {
4963 crypto = NULL;
4964 }
4965
4966 ALOGV("kWhatConfigure: Old mCrypto: %p (%d)",
4967 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
4968
4969 mCrypto = static_cast<ICrypto *>(crypto);
4970 mBufferChannel->setCrypto(mCrypto);
4971
4972 ALOGV("kWhatConfigure: New mCrypto: %p (%d)",
4973 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
4974
4975 void *descrambler;
4976 if (!msg->findPointer("descrambler", &descrambler)) {
4977 descrambler = NULL;
4978 }
4979
4980 mDescrambler = static_cast<IDescrambler *>(descrambler);
4981 mBufferChannel->setDescrambler(mDescrambler);
4982 if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
4983 // set kFlagUseCryptoAsync but do-not use this for block model
4984 // this is to propagate the error in onCryptoError()
4985 // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
4986 // with CONFIGURE_FLAG_USE_BLOCK_MODEL)
4987 if (!(mFlags & kFlagUseBlockModel)) {
4988 mCryptoAsync = new CryptoAsync(mBufferChannel);
4989 mCryptoAsync->setCallback(
4990 std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
4991 mCryptoLooper = new ALooper();
4992 mCryptoLooper->setName("CryptoAsyncLooper");
4993 mCryptoLooper->registerHandler(mCryptoAsync);
4994 status_t err = mCryptoLooper->start();
4995 if (err != OK) {
4996 ALOGE("Crypto Looper failed to start");
4997 mCryptoAsync = nullptr;
4998 mCryptoLooper = nullptr;
4999 }
5000 }
5001 }
5002
5003 format->setInt32("flags", flags);
5004 if (flags & CONFIGURE_FLAG_ENCODE) {
5005 format->setInt32("encoder", true);
5006 mFlags |= kFlagIsEncoder;
5007 }
5008
5009 extractCSD(format);
5010
5011 int32_t tunneled;
5012 if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
5013 ALOGI("Configuring TUNNELED video playback.");
5014 mTunneled = true;
5015 } else {
5016 mTunneled = false;
5017 }
5018 mediametrics_setInt32(mMetricsHandle, kCodecTunneled, mTunneled ? 1 : 0);
5019
5020 int32_t background = 0;
5021 if (format->findInt32("android._background-mode", &background) && background) {
5022 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
5023 }
5024
5025 mCodec->initiateConfigureComponent(format);
5026 break;
5027 }
5028
5029 case kWhatDetachSurface:
5030 {
5031 // detach surface is equivalent to setSurface(mDetachedSurface)
5032 sp<Surface> surface = getOrCreateDetachedSurface();
5033
5034 if (surface == nullptr) {
5035 sp<AReplyToken> replyID;
5036 CHECK(msg->senderAwaitsResponse(&replyID));
5037 mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
5038 PostReplyWithError(replyID, INVALID_OPERATION);
5039 break;
5040 }
5041
5042 msg->setObject("surface", surface);
5043 }
5044 [[fallthrough]];
5045
5046 case kWhatSetSurface:
5047 {
5048 sp<AReplyToken> replyID;
5049 CHECK(msg->senderAwaitsResponse(&replyID));
5050
5051 status_t err = OK;
5052
5053 switch (mState) {
5054 case CONFIGURED:
5055 case STARTED:
5056 case FLUSHED:
5057 {
5058 sp<RefBase> obj;
5059 (void)msg->findObject("surface", &obj);
5060 sp<Surface> surface = static_cast<Surface *>(obj.get());
5061 if (mSurface == NULL) {
5062 // do not support setting surface if it was not set
5063 mErrorLog.log(LOG_TAG, base::StringPrintf(
5064 "Cannot %s surface if the codec is not configured with "
5065 "a surface already",
5066 msg->what() == kWhatDetachSurface ? "detach" : "set"));
5067 err = INVALID_OPERATION;
5068 } else if (obj == NULL) {
5069 // do not support unsetting surface
5070 mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
5071 err = BAD_VALUE;
5072 } else if (android::media::codec::provider_->null_output_surface_support()) {
5073 err = handleSetSurface(surface, true /* callCodec */);
5074 } else {
5075 uint32_t generation;
5076 err = connectToSurface(surface, &generation);
5077 if (err == ALREADY_EXISTS) {
5078 // reconnecting to same surface
5079 err = OK;
5080 } else {
5081 if (err == OK) {
5082 if (mFlags & kFlagUsesSoftwareRenderer) {
5083 if (mSoftRenderer != NULL
5084 && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
5085 pushBlankBuffersToNativeWindow(mSurface.get());
5086 }
5087 surface->setDequeueTimeout(-1);
5088 mSoftRenderer = new SoftwareRenderer(surface);
5089 // TODO: check if this was successful
5090 } else {
5091 err = mCodec->setSurface(surface, generation);
5092 }
5093 }
5094 if (err == OK) {
5095 (void)disconnectFromSurface();
5096 mSurface = surface;
5097 mSurfaceGeneration = generation;
5098 }
5099 mReliabilityContextMetrics.setOutputSurfaceCount++;
5100 }
5101 }
5102 break;
5103 }
5104
5105 default:
5106 mErrorLog.log(LOG_TAG, base::StringPrintf(
5107 "%sSurface() is valid only at Executing states; currently %s",
5108 msg->what() == kWhatDetachSurface ? "detach" : "set",
5109 apiStateString().c_str()));
5110 err = INVALID_OPERATION;
5111 break;
5112 }
5113
5114 PostReplyWithError(replyID, err);
5115 break;
5116 }
5117
5118 case kWhatCreateInputSurface:
5119 case kWhatSetInputSurface:
5120 {
5121 // Must be configured, but can't have been started yet.
5122 if (mState != CONFIGURED) {
5123 mErrorLog.log(LOG_TAG, base::StringPrintf(
5124 "setInputSurface() is valid only at Configured state; currently %s",
5125 apiStateString().c_str()));
5126 PostReplyWithError(msg, INVALID_OPERATION);
5127 break;
5128 }
5129
5130 if (mReplyID) {
5131 mDeferredMessages.push_back(msg);
5132 break;
5133 }
5134 sp<AReplyToken> replyID;
5135 CHECK(msg->senderAwaitsResponse(&replyID));
5136
5137 mReplyID = replyID;
5138 if (msg->what() == kWhatCreateInputSurface) {
5139 mCodec->initiateCreateInputSurface();
5140 } else {
5141 sp<RefBase> obj;
5142 CHECK(msg->findObject("input-surface", &obj));
5143
5144 mCodec->initiateSetInputSurface(
5145 static_cast<PersistentSurface *>(obj.get()));
5146 }
5147 break;
5148 }
5149 case kWhatStart:
5150 {
5151 if (mState == FLUSHED) {
5152 setState(STARTED);
5153 if (mHavePendingInputBuffers) {
5154 onInputBufferAvailable();
5155 mHavePendingInputBuffers = false;
5156 }
5157 mCodec->signalResume();
5158 PostReplyWithError(msg, OK);
5159 break;
5160 } else if (mState != CONFIGURED) {
5161 mErrorLog.log(LOG_TAG, base::StringPrintf(
5162 "start() is valid only at Configured state; currently %s",
5163 apiStateString().c_str()));
5164 PostReplyWithError(msg, INVALID_OPERATION);
5165 break;
5166 }
5167
5168 if (mReplyID) {
5169 mDeferredMessages.push_back(msg);
5170 break;
5171 }
5172 sp<AReplyToken> replyID;
5173 CHECK(msg->senderAwaitsResponse(&replyID));
5174 TunnelPeekState previousState = mTunnelPeekState;
5175 if (previousState != TunnelPeekState::kLegacyMode) {
5176 mTunnelPeekState = mTunnelPeekEnabled ? TunnelPeekState::kEnabledNoBuffer :
5177 TunnelPeekState::kDisabledNoBuffer;
5178 ALOGV("TunnelPeekState: %s -> %s",
5179 asString(previousState),
5180 asString(mTunnelPeekState));
5181 }
5182
5183 mReplyID = replyID;
5184 setState(STARTING);
5185
5186 mCodec->initiateStart();
5187 break;
5188 }
5189
5190 case kWhatStop: {
5191 if (mReplyID) {
5192 mDeferredMessages.push_back(msg);
5193 break;
5194 }
5195 [[fallthrough]];
5196 }
5197 case kWhatRelease:
5198 {
5199 State targetState =
5200 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
5201
5202 if ((mState == RELEASING && targetState == UNINITIALIZED)
5203 || (mState == STOPPING && targetState == INITIALIZED)) {
5204 mDeferredMessages.push_back(msg);
5205 break;
5206 }
5207
5208 sp<AReplyToken> replyID;
5209 CHECK(msg->senderAwaitsResponse(&replyID));
5210 if (mCryptoAsync) {
5211 mCryptoAsync->stop();
5212 }
5213 sp<AMessage> asyncNotify;
5214 (void)msg->findMessage("async", &asyncNotify);
5215 // post asyncNotify if going out of scope.
5216 struct AsyncNotifyPost {
5217 AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
5218 ~AsyncNotifyPost() {
5219 if (mAsyncNotify) {
5220 mAsyncNotify->post();
5221 }
5222 }
5223 void clear() { mAsyncNotify.clear(); }
5224 private:
5225 sp<AMessage> mAsyncNotify;
5226 } asyncNotifyPost{asyncNotify};
5227
5228 // already stopped/released
5229 if (mState == UNINITIALIZED && mReleasedByResourceManager) {
5230 sp<AMessage> response = new AMessage;
5231 response->setInt32("err", OK);
5232 response->postReply(replyID);
5233 break;
5234 }
5235
5236 int32_t reclaimed = 0;
5237 msg->findInt32("reclaimed", &reclaimed);
5238 if (reclaimed) {
5239 if (!mReleasedByResourceManager) {
5240 // notify the async client
5241 if (mFlags & kFlagIsAsync) {
5242 onError(DEAD_OBJECT, ACTION_CODE_FATAL);
5243 }
5244 mErrorLog.log(LOG_TAG, "Released by resource manager");
5245 mReleasedByResourceManager = true;
5246 }
5247
5248 int32_t force = 0;
5249 msg->findInt32("force", &force);
5250 if (!force && hasPendingBuffer()) {
5251 ALOGW("Can't reclaim codec right now due to pending buffers.");
5252
5253 // return WOULD_BLOCK to ask resource manager to retry later.
5254 sp<AMessage> response = new AMessage;
5255 response->setInt32("err", WOULD_BLOCK);
5256 response->postReply(replyID);
5257
5258 break;
5259 }
5260 }
5261
5262 bool isReleasingAllocatedComponent =
5263 (mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED;
5264 if (!isReleasingAllocatedComponent // See 1
5265 && mState != INITIALIZED
5266 && mState != CONFIGURED && !isExecuting()) {
5267 // 1) Permit release to shut down the component if allocated.
5268 //
5269 // 2) We may be in "UNINITIALIZED" state already and
5270 // also shutdown the encoder/decoder without the
5271 // client being aware of this if media server died while
5272 // we were being stopped. The client would assume that
5273 // after stop() returned, it would be safe to call release()
5274 // and it should be in this case, no harm to allow a release()
5275 // if we're already uninitialized.
5276 sp<AMessage> response = new AMessage;
5277 // TODO: we shouldn't throw an exception for stop/release. Change this to wait until
5278 // the previous stop/release completes and then reply with OK.
5279 status_t err = mState == targetState ? OK : INVALID_OPERATION;
5280 response->setInt32("err", err);
5281 // TODO: mErrorLog
5282 if (err == OK && targetState == UNINITIALIZED) {
5283 mComponentName.clear();
5284 }
5285 response->postReply(replyID);
5286 break;
5287 }
5288
5289 // If we're flushing, configuring or starting but
5290 // received a release request, post the reply for the pending call
5291 // first, and consider it done. The reply token will be replaced
5292 // after this, and we'll no longer be able to reply.
5293 if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
5294 // mReply is always set if in these states.
5295 postPendingRepliesAndDeferredMessages(
5296 std::string("kWhatRelease:") + stateString(mState));
5297 }
5298 // If we're stopping but received a release request, post the reply
5299 // for the pending call if necessary. Note that the reply may have been
5300 // already posted due to an error.
5301 if (mState == STOPPING && mReplyID) {
5302 postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
5303 }
5304
5305 if (mFlags & kFlagSawMediaServerDie) {
5306 // It's dead, Jim. Don't expect initiateShutdown to yield
5307 // any useful results now...
5308 // Any pending reply would have been handled at kWhatError.
5309 setState(UNINITIALIZED);
5310 if (targetState == UNINITIALIZED) {
5311 mComponentName.clear();
5312 }
5313 (new AMessage)->postReply(replyID);
5314 break;
5315 }
5316
5317 // If we already have an error, component may not be able to
5318 // complete the shutdown properly. If we're stopping, post the
5319 // reply now with an error to unblock the client, client can
5320 // release after the failure (instead of ANR).
5321 if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
5322 // Any pending reply would have been handled at kWhatError.
5323 PostReplyWithError(replyID, getStickyError());
5324 break;
5325 }
5326
5327 bool forceSync = false;
5328 if (asyncNotify != nullptr && mSurface != NULL) {
5329 if (android::media::codec::provider_->null_output_surface_support()) {
5330 if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
5331 true /* onShutDown */) != OK) {
5332 // We were not able to detach the surface, so force
5333 // synchronous release.
5334 forceSync = true;
5335 }
5336 } else {
5337 if (!mDetachedSurface) {
5338 uint64_t usage = 0;
5339 if (mSurface->getConsumerUsage(&usage) != OK) {
5340 usage = 0;
5341 }
5342 mDetachedSurface.reset(new ReleaseSurface(usage));
5343 }
5344 if (mSurface != mDetachedSurface->getSurface()) {
5345 uint32_t generation;
5346 status_t err =
5347 connectToSurface(mDetachedSurface->getSurface(), &generation);
5348 ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
5349 if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
5350 err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
5351 ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
5352 }
5353 if (err == OK) {
5354 (void)disconnectFromSurface();
5355 mSurface = mDetachedSurface->getSurface();
5356 mSurfaceGeneration = generation;
5357 } else {
5358 // We were not able to switch the surface, so force
5359 // synchronous release.
5360 forceSync = true;
5361 }
5362 }
5363 }
5364 }
5365
5366 if (mReplyID) {
5367 // State transition replies are handled above, so this reply
5368 // would not be related to state transition. As we are
5369 // shutting down the component, just fail the operation.
5370 postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
5371 }
5372 mReplyID = replyID;
5373 setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
5374
5375 mCodec->initiateShutdown(
5376 msg->what() == kWhatStop /* keepComponentAllocated */);
5377
5378 returnBuffersToCodec(reclaimed);
5379
5380 if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
5381 pushBlankBuffersToNativeWindow(mSurface.get());
5382 }
5383
5384 if (asyncNotify != nullptr) {
5385 if (!forceSync) {
5386 mResourceManagerProxy->markClientForPendingRemoval();
5387 postPendingRepliesAndDeferredMessages("kWhatRelease:async");
5388 }
5389 asyncNotifyPost.clear();
5390 mAsyncReleaseCompleteNotification = asyncNotify;
5391 }
5392
5393 break;
5394 }
5395
5396 case kWhatDequeueInputBuffer:
5397 {
5398 sp<AReplyToken> replyID;
5399 CHECK(msg->senderAwaitsResponse(&replyID));
5400
5401 if (mFlags & kFlagIsAsync) {
5402 mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used in async mode");
5403 PostReplyWithError(replyID, INVALID_OPERATION);
5404 break;
5405 }
5406
5407 if (mHaveInputSurface) {
5408 mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used with input surface");
5409 PostReplyWithError(replyID, INVALID_OPERATION);
5410 break;
5411 }
5412
5413 if (handleDequeueInputBuffer(replyID, true /* new request */)) {
5414 break;
5415 }
5416
5417 int64_t timeoutUs;
5418 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
5419
5420 if (timeoutUs == 0LL) {
5421 PostReplyWithError(replyID, -EAGAIN);
5422 break;
5423 }
5424
5425 mFlags |= kFlagDequeueInputPending;
5426 mDequeueInputReplyID = replyID;
5427
5428 if (timeoutUs > 0LL) {
5429 sp<AMessage> timeoutMsg =
5430 new AMessage(kWhatDequeueInputTimedOut, this);
5431 timeoutMsg->setInt32(
5432 "generation", ++mDequeueInputTimeoutGeneration);
5433 timeoutMsg->post(timeoutUs);
5434 }
5435 break;
5436 }
5437
5438 case kWhatDequeueInputTimedOut:
5439 {
5440 int32_t generation;
5441 CHECK(msg->findInt32("generation", &generation));
5442
5443 if (generation != mDequeueInputTimeoutGeneration) {
5444 // Obsolete
5445 break;
5446 }
5447
5448 CHECK(mFlags & kFlagDequeueInputPending);
5449
5450 PostReplyWithError(mDequeueInputReplyID, -EAGAIN);
5451
5452 mFlags &= ~kFlagDequeueInputPending;
5453 mDequeueInputReplyID = 0;
5454 break;
5455 }
5456
5457 case kWhatQueueInputBuffer:
5458 {
5459 sp<AReplyToken> replyID;
5460 CHECK(msg->senderAwaitsResponse(&replyID));
5461
5462 if (!isExecuting()) {
5463 mErrorLog.log(LOG_TAG, base::StringPrintf(
5464 "queueInputBuffer() is valid only at Executing states; currently %s",
5465 apiStateString().c_str()));
5466 PostReplyWithError(replyID, INVALID_OPERATION);
5467 break;
5468 } else if (mFlags & kFlagStickyError) {
5469 PostReplyWithError(replyID, getStickyError());
5470 break;
5471 }
5472
5473 status_t err = UNKNOWN_ERROR;
5474 if (!mLeftover.empty()) {
5475 mLeftover.push_back(msg);
5476 size_t index;
5477 msg->findSize("index", &index);
5478 err = handleLeftover(index);
5479 } else {
5480 err = onQueueInputBuffer(msg);
5481 }
5482
5483 PostReplyWithError(replyID, err);
5484 break;
5485 }
5486
5487 case kWhatDequeueOutputBuffer:
5488 {
5489 sp<AReplyToken> replyID;
5490 CHECK(msg->senderAwaitsResponse(&replyID));
5491
5492 if (mFlags & kFlagIsAsync) {
5493 mErrorLog.log(LOG_TAG, "dequeueOutputBuffer can't be used in async mode");
5494 PostReplyWithError(replyID, INVALID_OPERATION);
5495 break;
5496 }
5497
5498 DequeueOutputResult dequeueResult =
5499 handleDequeueOutputBuffer(replyID, true /* new request */);
5500 switch (dequeueResult) {
5501 case DequeueOutputResult::kNoBuffer:
5502 [[fallthrough]];
5503 case DequeueOutputResult::kDiscardedBuffer:
5504 {
5505 int64_t timeoutUs;
5506 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
5507
5508 if (timeoutUs == 0LL) {
5509 PostReplyWithError(replyID, -EAGAIN);
5510 break;
5511 }
5512
5513 mFlags |= kFlagDequeueOutputPending;
5514 mDequeueOutputReplyID = replyID;
5515
5516 if (timeoutUs > 0LL) {
5517 sp<AMessage> timeoutMsg =
5518 new AMessage(kWhatDequeueOutputTimedOut, this);
5519 timeoutMsg->setInt32(
5520 "generation", ++mDequeueOutputTimeoutGeneration);
5521 timeoutMsg->post(timeoutUs);
5522 }
5523 break;
5524 }
5525 case DequeueOutputResult::kRepliedWithError:
5526 [[fallthrough]];
5527 case DequeueOutputResult::kSuccess:
5528 break;
5529 default:
5530 TRESPASS();
5531 }
5532 break;
5533 }
5534
5535 case kWhatDequeueOutputTimedOut:
5536 {
5537 int32_t generation;
5538 CHECK(msg->findInt32("generation", &generation));
5539
5540 if (generation != mDequeueOutputTimeoutGeneration) {
5541 // Obsolete
5542 break;
5543 }
5544
5545 CHECK(mFlags & kFlagDequeueOutputPending);
5546
5547 PostReplyWithError(mDequeueOutputReplyID, -EAGAIN);
5548
5549 mFlags &= ~kFlagDequeueOutputPending;
5550 mDequeueOutputReplyID = 0;
5551 break;
5552 }
5553
5554 case kWhatReleaseOutputBuffer:
5555 {
5556 sp<AReplyToken> replyID;
5557 CHECK(msg->senderAwaitsResponse(&replyID));
5558
5559 if (!isExecuting()) {
5560 mErrorLog.log(LOG_TAG, base::StringPrintf(
5561 "releaseOutputBuffer() is valid only at Executing states; currently %s",
5562 apiStateString().c_str()));
5563 PostReplyWithError(replyID, INVALID_OPERATION);
5564 break;
5565 } else if (mFlags & kFlagStickyError) {
5566 PostReplyWithError(replyID, getStickyError());
5567 break;
5568 }
5569
5570 status_t err = onReleaseOutputBuffer(msg);
5571
5572 PostReplyWithError(replyID, err);
5573 break;
5574 }
5575
5576 case kWhatPollForRenderedBuffers:
5577 {
5578 if (isExecuting()) {
5579 mBufferChannel->pollForRenderedBuffers();
5580 }
5581 break;
5582 }
5583
5584 case kWhatSignalEndOfInputStream:
5585 {
5586 if (!isExecuting()) {
5587 mErrorLog.log(LOG_TAG, base::StringPrintf(
5588 "signalEndOfInputStream() is valid only at Executing states; currently %s",
5589 apiStateString().c_str()));
5590 PostReplyWithError(msg, INVALID_OPERATION);
5591 break;
5592 } else if (!mHaveInputSurface) {
5593 mErrorLog.log(
5594 LOG_TAG, "signalEndOfInputStream() called without an input surface set");
5595 PostReplyWithError(msg, INVALID_OPERATION);
5596 break;
5597 } else if (mFlags & kFlagStickyError) {
5598 PostReplyWithError(msg, getStickyError());
5599 break;
5600 }
5601
5602 if (mReplyID) {
5603 mDeferredMessages.push_back(msg);
5604 break;
5605 }
5606 sp<AReplyToken> replyID;
5607 CHECK(msg->senderAwaitsResponse(&replyID));
5608
5609 mReplyID = replyID;
5610 mCodec->signalEndOfInputStream();
5611 break;
5612 }
5613
5614 case kWhatGetBuffers:
5615 {
5616 sp<AReplyToken> replyID;
5617 CHECK(msg->senderAwaitsResponse(&replyID));
5618 if (!isExecuting()) {
5619 mErrorLog.log(LOG_TAG, base::StringPrintf(
5620 "getInput/OutputBuffers() is valid only at Executing states; currently %s",
5621 apiStateString().c_str()));
5622 PostReplyWithError(replyID, INVALID_OPERATION);
5623 break;
5624 } else if (mFlags & kFlagIsAsync) {
5625 mErrorLog.log(LOG_TAG, "getInput/OutputBuffers() is not supported with callbacks");
5626 PostReplyWithError(replyID, INVALID_OPERATION);
5627 break;
5628 } else if (mFlags & kFlagStickyError) {
5629 PostReplyWithError(replyID, getStickyError());
5630 break;
5631 }
5632
5633 int32_t portIndex;
5634 CHECK(msg->findInt32("portIndex", &portIndex));
5635
5636 Vector<sp<MediaCodecBuffer> > *dstBuffers;
5637 CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
5638
5639 dstBuffers->clear();
5640 // If we're using input surface (either non-persistent created by
5641 // createInputSurface(), or persistent set by setInputSurface()),
5642 // give the client an empty input buffers array.
5643 if (portIndex != kPortIndexInput || !mHaveInputSurface) {
5644 if (portIndex == kPortIndexInput) {
5645 mBufferChannel->getInputBufferArray(dstBuffers);
5646 } else {
5647 mBufferChannel->getOutputBufferArray(dstBuffers);
5648 }
5649 }
5650
5651 mApiUsageMetrics.isArrayMode = true;
5652
5653 (new AMessage)->postReply(replyID);
5654 break;
5655 }
5656
5657 case kWhatFlush:
5658 {
5659 if (!isExecuting()) {
5660 mErrorLog.log(LOG_TAG, base::StringPrintf(
5661 "flush() is valid only at Executing states; currently %s",
5662 apiStateString().c_str()));
5663 PostReplyWithError(msg, INVALID_OPERATION);
5664 break;
5665 } else if (mFlags & kFlagStickyError) {
5666 PostReplyWithError(msg, getStickyError());
5667 break;
5668 }
5669
5670 if (mReplyID) {
5671 mDeferredMessages.push_back(msg);
5672 break;
5673 }
5674 sp<AReplyToken> replyID;
5675 CHECK(msg->senderAwaitsResponse(&replyID));
5676
5677 mReplyID = replyID;
5678 // TODO: skip flushing if already FLUSHED
5679 setState(FLUSHING);
5680 if (mCryptoAsync) {
5681 std::list<sp<AMessage>> pendingBuffers;
5682 mCryptoAsync->stop(&pendingBuffers);
5683 //TODO: do something with these buffers
5684 }
5685 mCodec->signalFlush();
5686 returnBuffersToCodec();
5687 TunnelPeekState previousState = mTunnelPeekState;
5688 if (previousState != TunnelPeekState::kLegacyMode) {
5689 mTunnelPeekState = mTunnelPeekEnabled ? TunnelPeekState::kEnabledNoBuffer :
5690 TunnelPeekState::kDisabledNoBuffer;
5691 ALOGV("TunnelPeekState: %s -> %s",
5692 asString(previousState),
5693 asString(mTunnelPeekState));
5694 }
5695 break;
5696 }
5697
5698 case kWhatGetInputFormat:
5699 case kWhatGetOutputFormat:
5700 {
5701 sp<AMessage> format =
5702 (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
5703
5704 sp<AReplyToken> replyID;
5705 CHECK(msg->senderAwaitsResponse(&replyID));
5706
5707 if (mState != CONFIGURED && mState != STARTING &&
5708 mState != STARTED && mState != FLUSHING &&
5709 mState != FLUSHED) {
5710 mErrorLog.log(LOG_TAG, base::StringPrintf(
5711 "getInput/OutputFormat() is valid at Executing states "
5712 "and Configured state; currently %s",
5713 apiStateString().c_str()));
5714 PostReplyWithError(replyID, INVALID_OPERATION);
5715 break;
5716 } else if (format == NULL) {
5717 mErrorLog.log(LOG_TAG, "Fatal error: format is not initialized");
5718 PostReplyWithError(replyID, INVALID_OPERATION);
5719 break;
5720 } else if (mFlags & kFlagStickyError) {
5721 PostReplyWithError(replyID, getStickyError());
5722 break;
5723 }
5724
5725 sp<AMessage> response = new AMessage;
5726 response->setMessage("format", format);
5727 response->postReply(replyID);
5728 break;
5729 }
5730
5731 case kWhatRequestIDRFrame:
5732 {
5733 mCodec->signalRequestIDRFrame();
5734 break;
5735 }
5736
5737 case kWhatRequestActivityNotification:
5738 {
5739 CHECK(mActivityNotify == NULL);
5740 CHECK(msg->findMessage("notify", &mActivityNotify));
5741
5742 postActivityNotificationIfPossible();
5743 break;
5744 }
5745
5746 case kWhatGetName:
5747 {
5748 sp<AReplyToken> replyID;
5749 CHECK(msg->senderAwaitsResponse(&replyID));
5750
5751 if (mComponentName.empty()) {
5752 mErrorLog.log(LOG_TAG, "Fatal error: name is not set");
5753 PostReplyWithError(replyID, INVALID_OPERATION);
5754 break;
5755 }
5756
5757 sp<AMessage> response = new AMessage;
5758 response->setString("name", mComponentName.c_str());
5759 response->postReply(replyID);
5760 break;
5761 }
5762
5763 case kWhatGetCodecInfo:
5764 {
5765 sp<AReplyToken> replyID;
5766 CHECK(msg->senderAwaitsResponse(&replyID));
5767
5768 sp<AMessage> response = new AMessage;
5769 response->setObject("codecInfo", mCodecInfo);
5770 response->postReply(replyID);
5771 break;
5772 }
5773
5774 case kWhatSetParameters:
5775 {
5776 sp<AReplyToken> replyID;
5777 CHECK(msg->senderAwaitsResponse(&replyID));
5778
5779 sp<AMessage> params;
5780 CHECK(msg->findMessage("params", ¶ms));
5781
5782 status_t err = onSetParameters(params);
5783
5784 PostReplyWithError(replyID, err);
5785 break;
5786 }
5787
5788 case kWhatDrmReleaseCrypto:
5789 {
5790 onReleaseCrypto(msg);
5791 break;
5792 }
5793
5794 case kWhatCheckBatteryStats:
5795 {
5796 if (mBatteryChecker != nullptr) {
5797 mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
5798 mResourceManagerProxy->removeResource(
5799 MediaResource::VideoBatteryResource(mIsHardware));
5800 });
5801 }
5802 break;
5803 }
5804
5805 default:
5806 TRESPASS();
5807 }
5808 }
5809
handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> & buffer)5810 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
5811 sp<AMessage> format = buffer->format();
5812 if (mOutputFormat == format) {
5813 return;
5814 }
5815 if (mFlags & kFlagUseBlockModel) {
5816 sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
5817 sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
5818 std::set<std::string> keys;
5819 size_t numEntries = diff1->countEntries();
5820 AMessage::Type type;
5821 for (size_t i = 0; i < numEntries; ++i) {
5822 keys.emplace(diff1->getEntryNameAt(i, &type));
5823 }
5824 numEntries = diff2->countEntries();
5825 for (size_t i = 0; i < numEntries; ++i) {
5826 keys.emplace(diff2->getEntryNameAt(i, &type));
5827 }
5828 sp<WrapperObject<std::set<std::string>>> changedKeys{
5829 new WrapperObject<std::set<std::string>>{std::move(keys)}};
5830 buffer->meta()->setObject("changedKeys", changedKeys);
5831 }
5832 mOutputFormat = format;
5833 mapFormat(mComponentName, format, nullptr, true);
5834 ALOGV("[%s] output format changed to: %s",
5835 mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
5836
5837 if (mSoftRenderer == NULL &&
5838 mSurface != NULL &&
5839 (mFlags & kFlagUsesSoftwareRenderer)) {
5840 AString mime;
5841 CHECK(mOutputFormat->findString("mime", &mime));
5842
5843 // TODO: propagate color aspects to software renderer to allow better
5844 // color conversion to RGB. For now, just mark dataspace for YUV
5845 // rendering.
5846 int32_t dataSpace;
5847 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
5848 ALOGD("[%s] setting dataspace on output surface to %#x",
5849 mComponentName.c_str(), dataSpace);
5850 int err = native_window_set_buffers_data_space(
5851 mSurface.get(), (android_dataspace)dataSpace);
5852 ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
5853 }
5854 if (mOutputFormat->contains("hdr-static-info")) {
5855 HDRStaticInfo info;
5856 if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
5857 setNativeWindowHdrMetadata(mSurface.get(), &info);
5858 }
5859 }
5860
5861 sp<ABuffer> hdr10PlusInfo;
5862 if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
5863 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
5864 native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
5865 hdr10PlusInfo->size(), hdr10PlusInfo->data());
5866 }
5867
5868 if (mime.startsWithIgnoreCase("video/")) {
5869 mSurface->setDequeueTimeout(-1);
5870 mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
5871 }
5872 }
5873
5874 requestCpuBoostIfNeeded();
5875
5876 if (mFlags & kFlagIsEncoder) {
5877 // Before we announce the format change we should
5878 // collect codec specific data and amend the output
5879 // format as necessary.
5880 int32_t flags = 0;
5881 (void) buffer->meta()->findInt32("flags", &flags);
5882 if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)
5883 && !mOwnerName.startsWith("codec2::")) {
5884 status_t err =
5885 amendOutputFormatWithCodecSpecificData(buffer);
5886
5887 if (err != OK) {
5888 ALOGE("Codec spit out malformed codec "
5889 "specific data!");
5890 }
5891 }
5892 }
5893 if (mFlags & kFlagIsAsync) {
5894 onOutputFormatChanged();
5895 } else {
5896 mFlags |= kFlagOutputFormatChanged;
5897 postActivityNotificationIfPossible();
5898 }
5899
5900 // Update the width and the height.
5901 int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
5902 bool resolutionChanged = false;
5903 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
5904 mWidth = right - left + 1;
5905 mHeight = bottom - top + 1;
5906 resolutionChanged = true;
5907 } else if (mOutputFormat->findInt32("width", &width) &&
5908 mOutputFormat->findInt32("height", &height)) {
5909 mWidth = width;
5910 mHeight = height;
5911 resolutionChanged = true;
5912 }
5913
5914 // Notify mCrypto and the RM of video resolution changes
5915 if (resolutionChanged) {
5916 if (mCrypto != NULL) {
5917 mCrypto->notifyResolution(mWidth, mHeight);
5918 }
5919 ClientConfigParcel clientConfig;
5920 initClientConfigParcel(clientConfig);
5921 mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
5922 mReliabilityContextMetrics.resolutionChangeCount++;
5923 }
5924
5925 updateHdrMetrics(false /* isConfig */);
5926 }
5927
extractCSD(const sp<AMessage> & format)5928 void MediaCodec::extractCSD(const sp<AMessage> &format) {
5929 mCSD.clear();
5930
5931 size_t i = 0;
5932 for (;;) {
5933 sp<ABuffer> csd;
5934 if (!format->findBuffer(base::StringPrintf("csd-%zu", i).c_str(), &csd)) {
5935 break;
5936 }
5937 if (csd->size() == 0) {
5938 ALOGW("csd-%zu size is 0", i);
5939 }
5940
5941 mCSD.push_back(csd);
5942 ++i;
5943 }
5944
5945 ALOGV("Found %zu pieces of codec specific data.", mCSD.size());
5946 }
5947
queueCSDInputBuffer(size_t bufferIndex)5948 status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
5949 CHECK(!mCSD.empty());
5950
5951 sp<ABuffer> csd = *mCSD.begin();
5952 mCSD.erase(mCSD.begin());
5953 std::shared_ptr<C2Buffer> c2Buffer;
5954 sp<hardware::HidlMemory> memory;
5955
5956 if (mFlags & kFlagUseBlockModel) {
5957 if (hasCryptoOrDescrambler()) {
5958 constexpr size_t kInitialDealerCapacity = 1048576; // 1MB
5959 thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
5960 kInitialDealerCapacity, "CSD(1MB)");
5961 sp<IMemory> mem = sDealer->allocate(csd->size());
5962 if (mem == nullptr) {
5963 size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
5964 while (csd->size() * 2 > newDealerCapacity) {
5965 newDealerCapacity *= 2;
5966 }
5967 sDealer = new MemoryDealer(
5968 newDealerCapacity,
5969 base::StringPrintf("CSD(%zuMB)", newDealerCapacity / 1048576).c_str());
5970 mem = sDealer->allocate(csd->size());
5971 }
5972 memcpy(mem->unsecurePointer(), csd->data(), csd->size());
5973 ssize_t heapOffset;
5974 memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
5975 } else {
5976 std::shared_ptr<C2LinearBlock> block =
5977 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
5978 C2WriteView view{block->map().get()};
5979 if (view.error() != C2_OK) {
5980 mErrorLog.log(LOG_TAG, "Fatal error: failed to allocate and map a block");
5981 return -EINVAL;
5982 }
5983 if (csd->size() > view.capacity()) {
5984 mErrorLog.log(LOG_TAG, base::StringPrintf(
5985 "Fatal error: allocated block is too small "
5986 "(csd size %zu; block cap %u)",
5987 csd->size(), view.capacity()));
5988 return -EINVAL;
5989 }
5990 memcpy(view.base(), csd->data(), csd->size());
5991 c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
5992 }
5993 } else {
5994 const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
5995 const sp<MediaCodecBuffer> &codecInputData = info.mData;
5996
5997 if (csd->size() > codecInputData->capacity()) {
5998 mErrorLog.log(LOG_TAG, base::StringPrintf(
5999 "CSD is too large to fit in input buffer "
6000 "(csd size %zu; buffer cap %zu)",
6001 csd->size(), codecInputData->capacity()));
6002 return -EINVAL;
6003 }
6004 if (codecInputData->data() == NULL) {
6005 ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
6006 mErrorLog.log(LOG_TAG, base::StringPrintf(
6007 "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
6008 return -EINVAL;
6009 }
6010
6011 memcpy(codecInputData->data(), csd->data(), csd->size());
6012 }
6013
6014 AString errorDetailMsg;
6015
6016 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
6017 msg->setSize("index", bufferIndex);
6018 msg->setSize("offset", 0);
6019 msg->setSize("size", csd->size());
6020 msg->setInt64("timeUs", 0LL);
6021 msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
6022 msg->setPointer("errorDetailMsg", &errorDetailMsg);
6023 if (c2Buffer) {
6024 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
6025 new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
6026 msg->setObject("c2buffer", obj);
6027 } else if (memory) {
6028 sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
6029 new WrapperObject<sp<hardware::HidlMemory>>{memory}};
6030 msg->setObject("memory", obj);
6031 }
6032
6033 return onQueueInputBuffer(msg);
6034 }
6035
setState(State newState)6036 void MediaCodec::setState(State newState) {
6037 if (newState == INITIALIZED || newState == UNINITIALIZED) {
6038 delete mSoftRenderer;
6039 mSoftRenderer = NULL;
6040
6041 if ( mCrypto != NULL ) {
6042 ALOGV("setState: ~mCrypto: %p (%d)",
6043 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
6044 }
6045 mCrypto.clear();
6046 mDescrambler.clear();
6047 handleSetSurface(NULL);
6048
6049 mInputFormat.clear();
6050 mOutputFormat.clear();
6051 mFlags &= ~kFlagOutputFormatChanged;
6052 mFlags &= ~kFlagOutputBuffersChanged;
6053 mFlags &= ~kFlagStickyError;
6054 mFlags &= ~kFlagIsEncoder;
6055 mFlags &= ~kFlagIsAsync;
6056 mStickyError = OK;
6057
6058 mActivityNotify.clear();
6059 mCallback.clear();
6060 mErrorLog.clear();
6061 }
6062
6063 if (android::media::codec::provider_->set_state_early()) {
6064 mState = newState;
6065 }
6066
6067 if (newState == UNINITIALIZED) {
6068 // return any straggling buffers, e.g. if we got here on an error
6069 returnBuffersToCodec();
6070
6071 // The component is gone, mediaserver's probably back up already
6072 // but should definitely be back up should we try to instantiate
6073 // another component.. and the cycle continues.
6074 mFlags &= ~kFlagSawMediaServerDie;
6075 }
6076
6077 if (!android::media::codec::provider_->set_state_early()) {
6078 mState = newState;
6079 }
6080
6081 if (mBatteryChecker != nullptr) {
6082 mBatteryChecker->setExecuting(isExecuting());
6083 }
6084
6085 cancelPendingDequeueOperations();
6086 }
6087
returnBuffersToCodec(bool isReclaim)6088 void MediaCodec::returnBuffersToCodec(bool isReclaim) {
6089 returnBuffersToCodecOnPort(kPortIndexInput, isReclaim);
6090 returnBuffersToCodecOnPort(kPortIndexOutput, isReclaim);
6091 }
6092
returnBuffersToCodecOnPort(int32_t portIndex,bool isReclaim)6093 void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex, bool isReclaim) {
6094 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6095 Mutex::Autolock al(mBufferLock);
6096
6097 if (portIndex == kPortIndexInput) {
6098 mLeftover.clear();
6099 }
6100 for (size_t i = 0; i < mPortBuffers[portIndex].size(); ++i) {
6101 BufferInfo *info = &mPortBuffers[portIndex][i];
6102
6103 if (info->mData != nullptr) {
6104 sp<MediaCodecBuffer> buffer = info->mData;
6105 if (isReclaim && info->mOwnedByClient) {
6106 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
6107 portIndex, i);
6108 } else {
6109 info->mOwnedByClient = false;
6110 info->mData.clear();
6111 }
6112 mBufferChannel->discardBuffer(buffer);
6113 }
6114 }
6115
6116 mAvailPortBuffers[portIndex].clear();
6117 }
6118
updateBuffers(int32_t portIndex,const sp<AMessage> & msg)6119 size_t MediaCodec::updateBuffers(
6120 int32_t portIndex, const sp<AMessage> &msg) {
6121 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6122 size_t index;
6123 CHECK(msg->findSize("index", &index));
6124 sp<RefBase> obj;
6125 CHECK(msg->findObject("buffer", &obj));
6126 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
6127
6128 {
6129 Mutex::Autolock al(mBufferLock);
6130 if (mPortBuffers[portIndex].size() <= index) {
6131 mPortBuffers[portIndex].resize(align(index + 1, kNumBuffersAlign));
6132 }
6133 mPortBuffers[portIndex][index].mData = buffer;
6134 }
6135 mAvailPortBuffers[portIndex].push_back(index);
6136
6137 return index;
6138 }
6139
onQueueInputBuffer(const sp<AMessage> & msg)6140 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
6141 size_t index;
6142 size_t offset = 0;
6143 size_t size = 0;
6144 int64_t timeUs = 0;
6145 uint32_t flags = 0;
6146 CHECK(msg->findSize("index", &index));
6147 CHECK(msg->findInt64("timeUs", &timeUs));
6148 CHECK(msg->findInt32("flags", (int32_t *)&flags));
6149 std::shared_ptr<C2Buffer> c2Buffer;
6150 sp<hardware::HidlMemory> memory;
6151 sp<RefBase> obj;
6152 if (msg->findObject("c2buffer", &obj)) {
6153 CHECK(obj);
6154 c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
6155 } else if (msg->findObject("memory", &obj)) {
6156 CHECK(obj);
6157 memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
6158 CHECK(msg->findSize("offset", &offset));
6159 } else {
6160 CHECK(msg->findSize("offset", &offset));
6161 }
6162 const CryptoPlugin::SubSample *subSamples;
6163 size_t numSubSamples = 0;
6164 const uint8_t *key = NULL;
6165 const uint8_t *iv = NULL;
6166 CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
6167
6168 // We allow the simpler queueInputBuffer API to be used even in
6169 // secure mode, by fabricating a single unencrypted subSample.
6170 CryptoPlugin::SubSample ss;
6171 CryptoPlugin::Pattern pattern;
6172
6173 if (msg->findSize("size", &size)) {
6174 if (hasCryptoOrDescrambler()) {
6175 ss.mNumBytesOfClearData = size;
6176 ss.mNumBytesOfEncryptedData = 0;
6177
6178 subSamples = &ss;
6179 numSubSamples = 1;
6180 pattern.mEncryptBlocks = 0;
6181 pattern.mSkipBlocks = 0;
6182 }
6183 } else if (!c2Buffer) {
6184 if (!hasCryptoOrDescrambler()) {
6185 ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
6186 mComponentName.c_str());
6187 mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
6188 return -EINVAL;
6189 }
6190 sp<RefBase> obj;
6191 if (msg->findObject("cryptoInfos", &obj)) {
6192 CHECK(msg->findSize("ssize", &size));
6193 } else {
6194 CHECK(msg->findPointer("subSamples", (void **)&subSamples));
6195 CHECK(msg->findSize("numSubSamples", &numSubSamples));
6196 CHECK(msg->findPointer("key", (void **)&key));
6197 CHECK(msg->findPointer("iv", (void **)&iv));
6198 CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
6199 CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
6200
6201 int32_t tmp;
6202 CHECK(msg->findInt32("mode", &tmp));
6203
6204 mode = (CryptoPlugin::Mode)tmp;
6205 size = 0;
6206 for (size_t i = 0; i < numSubSamples; ++i) {
6207 size += subSamples[i].mNumBytesOfClearData;
6208 size += subSamples[i].mNumBytesOfEncryptedData;
6209 }
6210 }
6211 }
6212
6213 if (index >= mPortBuffers[kPortIndexInput].size()) {
6214 mErrorLog.log(LOG_TAG, base::StringPrintf(
6215 "index out of range (index=%zu)", mPortBuffers[kPortIndexInput].size()));
6216 return -ERANGE;
6217 }
6218
6219 BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
6220 sp<MediaCodecBuffer> buffer = info->mData;
6221 if (buffer == nullptr) {
6222 mErrorLog.log(LOG_TAG, base::StringPrintf(
6223 "Fatal error: failed to fetch buffer for index %zu", index));
6224 return -EACCES;
6225 }
6226 if (!info->mOwnedByClient) {
6227 mErrorLog.log(LOG_TAG, base::StringPrintf(
6228 "client does not own the buffer #%zu", index));
6229 return -EACCES;
6230 }
6231 auto setInputBufferParams = [this, &msg, &buffer]
6232 (int64_t timeUs, uint32_t flags = 0) -> status_t {
6233 status_t err = OK;
6234 sp<RefBase> obj;
6235 if (msg->findObject("accessUnitInfo", &obj)) {
6236 buffer->meta()->setObject("accessUnitInfo", obj);
6237 }
6238 buffer->meta()->setInt64("timeUs", timeUs);
6239 if (flags & BUFFER_FLAG_EOS) {
6240 buffer->meta()->setInt32("eos", true);
6241 }
6242
6243 if (flags & BUFFER_FLAG_CODECCONFIG) {
6244 buffer->meta()->setInt32("csd", true);
6245 }
6246 bool isBufferDecodeOnly = ((flags & BUFFER_FLAG_DECODE_ONLY) != 0);
6247 if (isBufferDecodeOnly) {
6248 buffer->meta()->setInt32("decode-only", true);
6249 }
6250 if (mTunneled && !isBufferDecodeOnly && !(flags & BUFFER_FLAG_CODECCONFIG)) {
6251 TunnelPeekState previousState = mTunnelPeekState;
6252 switch(mTunnelPeekState){
6253 case TunnelPeekState::kEnabledNoBuffer:
6254 buffer->meta()->setInt32("tunnel-first-frame", 1);
6255 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
6256 ALOGV("TunnelPeekState: %s -> %s",
6257 asString(previousState),
6258 asString(mTunnelPeekState));
6259 break;
6260 case TunnelPeekState::kDisabledNoBuffer:
6261 buffer->meta()->setInt32("tunnel-first-frame", 1);
6262 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
6263 ALOGV("TunnelPeekState: %s -> %s",
6264 asString(previousState),
6265 asString(mTunnelPeekState));
6266 break;
6267 default:
6268 break;
6269 }
6270 }
6271 return err;
6272 };
6273 auto buildCryptoInfoAMessage = [&](const sp<AMessage> & cryptoInfo, int32_t action) {
6274 // set decrypt Action
6275 cryptoInfo->setInt32("action", action);
6276 cryptoInfo->setObject("buffer", buffer);
6277 cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
6278 sp<RefBase> obj;
6279 if (msg->findObject("cryptoInfos", &obj)) {
6280 // this object is a standalone object when created (no copy requied here)
6281 buffer->meta()->setObject("cryptoInfos", obj);
6282 } else {
6283 size_t key_len = (key != nullptr)? 16 : 0;
6284 size_t iv_len = (iv != nullptr)? 16 : 0;
6285 sp<ABuffer> shared_key;
6286 sp<ABuffer> shared_iv;
6287 if (key_len > 0) {
6288 shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
6289 }
6290 if (iv_len > 0) {
6291 shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
6292 }
6293 sp<ABuffer> subSamples_buffer =
6294 new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
6295 CryptoPlugin::SubSample * samples =
6296 (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
6297 for (int s = 0 ; s < numSubSamples ; s++) {
6298 samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
6299 samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
6300 }
6301 cryptoInfo->setBuffer("key", shared_key);
6302 cryptoInfo->setBuffer("iv", shared_iv);
6303 cryptoInfo->setInt32("mode", (int)mode);
6304 cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
6305 cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
6306 cryptoInfo->setBuffer("subSamples", subSamples_buffer);
6307 cryptoInfo->setSize("numSubSamples", numSubSamples);
6308 }
6309 };
6310 if (c2Buffer || memory) {
6311 sp<AMessage> tunings = NULL;
6312 if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
6313 onSetParameters(tunings);
6314 }
6315 status_t err = OK;
6316 if (c2Buffer) {
6317 err = mBufferChannel->attachBuffer(c2Buffer, buffer);
6318 // to prevent unnecessary copy for single info case.
6319 if (msg->findObject("accessUnitInfo", &obj)) {
6320 sp<BufferInfosWrapper> infos{(BufferInfosWrapper*)(obj.get())};
6321 if (infos->value.size() == 1) {
6322 msg->removeEntryByName("accessUnitInfo");
6323 }
6324 }
6325 } else if (memory) {
6326 AString errorDetailMsg;
6327 if (msg->findObject("cryptoInfos", &obj)) {
6328 buffer->meta()->setSize("ssize", size);
6329 buffer->meta()->setObject("cryptoInfos", obj);
6330 if (msg->findObject("accessUnitInfo", &obj)) {
6331 // the reference will be same here and
6332 // setBufferParams
6333 buffer->meta()->setObject("accessUnitInfo", obj);
6334 }
6335 err = mBufferChannel->attachEncryptedBuffers(
6336 memory,
6337 offset,
6338 buffer,
6339 (mFlags & kFlagIsSecure),
6340 &errorDetailMsg);
6341 } else {
6342 err = mBufferChannel->attachEncryptedBuffer(
6343 memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
6344 offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
6345 }
6346 if (err != OK && hasCryptoOrDescrambler()
6347 && (mFlags & kFlagUseCryptoAsync)) {
6348 // create error detail
6349 sp<AMessage> cryptoErrorInfo = new AMessage();
6350 buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
6351 cryptoErrorInfo->setInt32("err", err);
6352 cryptoErrorInfo->setInt32("actionCode", ACTION_CODE_FATAL);
6353 cryptoErrorInfo->setString("errorDetail", errorDetailMsg);
6354 onCryptoError(cryptoErrorInfo);
6355 // we want cryptoError to be in the callback
6356 // but Codec IllegalStateException to be triggered.
6357 err = INVALID_OPERATION;
6358 }
6359 } else {
6360 mErrorLog.log(LOG_TAG, "Fatal error: invalid queue request without a buffer");
6361 err = UNKNOWN_ERROR;
6362 }
6363 if (err == OK && !buffer->asC2Buffer()
6364 && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
6365 C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
6366 if (block.size() > buffer->size()) {
6367 C2ConstLinearBlock leftover = block.subBlock(
6368 block.offset() + buffer->size(), block.size() - buffer->size());
6369 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
6370 new WrapperObject<std::shared_ptr<C2Buffer>>{
6371 C2Buffer::CreateLinearBuffer(leftover)}};
6372 msg->setObject("c2buffer", obj);
6373 mLeftover.push_front(msg);
6374 // Not sending EOS if we have leftovers
6375 flags &= ~BUFFER_FLAG_EOS;
6376 }
6377 }
6378 offset = buffer->offset();
6379 size = buffer->size();
6380 if (err != OK) {
6381 ALOGE("block model buffer attach failed: err = %s (%d)",
6382 StrMediaError(err).c_str(), err);
6383 return err;
6384 }
6385 }
6386
6387 if (offset + size > buffer->capacity()) {
6388 mErrorLog.log(LOG_TAG, base::StringPrintf(
6389 "buffer offset and size goes beyond the capacity: "
6390 "offset=%zu, size=%zu, cap=%zu",
6391 offset, size, buffer->capacity()));
6392 return -EINVAL;
6393 }
6394 buffer->setRange(offset, size);
6395 status_t err = OK;
6396 err = setInputBufferParams(timeUs, flags);
6397 if (err != OK) {
6398 return -EINVAL;
6399 }
6400
6401 int32_t usedMaxInputSize = mApiUsageMetrics.inputBufferSize.usedMax;
6402 mApiUsageMetrics.inputBufferSize.usedMax = size > usedMaxInputSize ? size : usedMaxInputSize;
6403
6404 if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
6405 AString *errorDetailMsg;
6406 CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
6407 // Notify mCrypto of video resolution changes
6408 if (mTunneled && mCrypto != NULL) {
6409 int32_t width, height;
6410 if (mInputFormat->findInt32("width", &width) &&
6411 mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
6412 if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
6413 mTunneledInputWidth = width;
6414 mTunneledInputHeight = height;
6415 mCrypto->notifyResolution(width, height);
6416 }
6417 }
6418 }
6419 if (mCryptoAsync) {
6420 // prepare a message and enqueue
6421 sp<AMessage> cryptoInfo = new AMessage();
6422 buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
6423 mCryptoAsync->decrypt(cryptoInfo);
6424 } else if (msg->findObject("cryptoInfos", &obj)) {
6425 buffer->meta()->setObject("cryptoInfos", obj);
6426 err = mBufferChannel->queueSecureInputBuffers(
6427 buffer,
6428 (mFlags & kFlagIsSecure),
6429 errorDetailMsg);
6430 } else {
6431 err = mBufferChannel->queueSecureInputBuffer(
6432 buffer,
6433 (mFlags & kFlagIsSecure),
6434 key,
6435 iv,
6436 mode,
6437 pattern,
6438 subSamples,
6439 numSubSamples,
6440 errorDetailMsg);
6441 }
6442 if (err != OK) {
6443 mediametrics_setInt32(mMetricsHandle, kCodecQueueSecureInputBufferError, err);
6444 ALOGW("Log queueSecureInputBuffer error: %d", err);
6445 }
6446 } else {
6447 err = mBufferChannel->queueInputBuffer(buffer);
6448 if (err != OK) {
6449 mediametrics_setInt32(mMetricsHandle, kCodecQueueInputBufferError, err);
6450 ALOGW("Log queueInputBuffer error: %d", err);
6451 }
6452 }
6453
6454 if (err == OK) {
6455 if (mTunneled && (flags & (BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_END_OF_STREAM)) == 0) {
6456 mVideoRenderQualityTracker.onTunnelFrameQueued(timeUs);
6457 }
6458
6459 // synchronization boundary for getBufferAndFormat
6460 Mutex::Autolock al(mBufferLock);
6461 info->mOwnedByClient = false;
6462 info->mData.clear();
6463
6464 statsBufferSent(timeUs, buffer);
6465 }
6466
6467 return err;
6468 }
6469
handleLeftover(size_t index)6470 status_t MediaCodec::handleLeftover(size_t index) {
6471 if (mLeftover.empty()) {
6472 return OK;
6473 }
6474 sp<AMessage> msg = mLeftover.front();
6475 mLeftover.pop_front();
6476 msg->setSize("index", index);
6477 return onQueueInputBuffer(msg);
6478 }
6479
6480 template<typename T>
CreateFramesRenderedMessageInternal(const std::list<T> & done,sp<AMessage> & msg)6481 static size_t CreateFramesRenderedMessageInternal(const std::list<T> &done, sp<AMessage> &msg) {
6482 size_t index = 0;
6483 for (typename std::list<T>::const_iterator it = done.cbegin(); it != done.cend(); ++it) {
6484 if (it->getRenderTimeNs() < 0) {
6485 continue; // dropped frame from tracking
6486 }
6487 msg->setInt64(base::StringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
6488 msg->setInt64(base::StringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
6489 ++index;
6490 }
6491 return index;
6492 }
6493
6494 //static
CreateFramesRenderedMessage(const std::list<RenderedFrameInfo> & done,sp<AMessage> & msg)6495 size_t MediaCodec::CreateFramesRenderedMessage(
6496 const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg) {
6497 return CreateFramesRenderedMessageInternal(done, msg);
6498 }
6499
6500 //static
CreateFramesRenderedMessage(const std::list<FrameRenderTracker::Info> & done,sp<AMessage> & msg)6501 size_t MediaCodec::CreateFramesRenderedMessage(
6502 const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
6503 return CreateFramesRenderedMessageInternal(done, msg);
6504 }
6505
onReleaseOutputBuffer(const sp<AMessage> & msg)6506 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
6507 size_t index;
6508 CHECK(msg->findSize("index", &index));
6509
6510 int32_t render;
6511 if (!msg->findInt32("render", &render)) {
6512 render = 0;
6513 }
6514
6515 if (!isExecuting()) {
6516 mErrorLog.log(LOG_TAG, base::StringPrintf(
6517 "releaseOutputBuffer() is valid at Executing states; currently %s",
6518 apiStateString().c_str()));
6519 return -EINVAL;
6520 }
6521
6522 if (index >= mPortBuffers[kPortIndexOutput].size()) {
6523 mErrorLog.log(LOG_TAG, base::StringPrintf(
6524 "index out of range (index=%zu)", mPortBuffers[kPortIndexOutput].size()));
6525 return -ERANGE;
6526 }
6527
6528 BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
6529
6530 if (!info->mOwnedByClient) {
6531 mErrorLog.log(LOG_TAG, base::StringPrintf(
6532 "client does not own the buffer #%zu", index));
6533 return -EACCES;
6534 }
6535 if (info->mData == nullptr) {
6536 mErrorLog.log(LOG_TAG, base::StringPrintf(
6537 "Fatal error: null buffer for index %zu", index));
6538 return -EACCES;
6539 }
6540
6541 // synchronization boundary for getBufferAndFormat
6542 sp<MediaCodecBuffer> buffer;
6543 {
6544 Mutex::Autolock al(mBufferLock);
6545 info->mOwnedByClient = false;
6546 buffer = info->mData;
6547 info->mData.clear();
6548 }
6549
6550 if (render && buffer->size() != 0) {
6551 int64_t mediaTimeUs = INT64_MIN;
6552 buffer->meta()->findInt64("timeUs", &mediaTimeUs);
6553
6554 bool noRenderTime = false;
6555 int64_t renderTimeNs = 0;
6556 if (!msg->findInt64("timestampNs", &renderTimeNs)) {
6557 // use media timestamp if client did not request a specific render timestamp
6558 ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
6559 renderTimeNs = mediaTimeUs * 1000;
6560 noRenderTime = true;
6561 }
6562
6563 if (mSoftRenderer != NULL) {
6564 std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
6565 buffer->data(), buffer->size(), mediaTimeUs, renderTimeNs,
6566 mPortBuffers[kPortIndexOutput].size(), buffer->format());
6567
6568 // if we are running, notify rendered frames
6569 if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
6570 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
6571 sp<AMessage> data = new AMessage;
6572 if (CreateFramesRenderedMessage(doneFrames, data)) {
6573 notify->setMessage("data", data);
6574 notify->post();
6575 }
6576 }
6577 }
6578
6579 // If rendering to the screen, then schedule a time in the future to poll to see if this
6580 // frame was ever rendered to seed onFrameRendered callbacks.
6581 if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
6582 if (mediaTimeUs != INT64_MIN) {
6583 noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
6584 : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
6585 renderTimeNs);
6586 }
6587 // can't initialize this in the constructor because the Looper parent class needs to be
6588 // initialized first
6589 if (mMsgPollForRenderedBuffers == nullptr) {
6590 mMsgPollForRenderedBuffers = new AMessage(kWhatPollForRenderedBuffers, this);
6591 }
6592 // Schedule the poll to occur 100ms after the render time - should be safe for
6593 // determining if the frame was ever rendered. If no render time was specified, the
6594 // presentation timestamp is used instead, which almost certainly occurs in the past,
6595 // since it's almost always a zero-based offset from the start of the stream. In these
6596 // scenarios, we expect the frame to be rendered with no delay.
6597 int64_t nowUs = ALooper::GetNowUs();
6598 int64_t renderTimeUs = renderTimeNs / 1000;
6599 int64_t delayUs = renderTimeUs < nowUs ? 0 : renderTimeUs - nowUs;
6600 delayUs += 100 * 1000; /* 100ms in microseconds */
6601 status_t err =
6602 mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
6603 delayUs);
6604 if (err != OK) {
6605 ALOGE("unexpected failure to post pollForRenderedBuffers: %d", err);
6606 }
6607 }
6608 status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
6609
6610 if (err == NO_INIT) {
6611 mErrorLog.log(LOG_TAG, "rendering to non-initialized(obsolete) surface");
6612 return err;
6613 }
6614 if (err != OK) {
6615 ALOGI("rendring output error %d", err);
6616 }
6617 } else {
6618 if (mIsSurfaceToDisplay && buffer->size() != 0) {
6619 int64_t mediaTimeUs = INT64_MIN;
6620 if (buffer->meta()->findInt64("timeUs", &mediaTimeUs)) {
6621 mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
6622 }
6623 }
6624 mBufferChannel->discardBuffer(buffer);
6625 }
6626
6627 return OK;
6628 }
6629
peekNextPortBuffer(int32_t portIndex)6630 MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
6631 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6632
6633 std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
6634
6635 if (availBuffers->empty()) {
6636 return nullptr;
6637 }
6638
6639 return &mPortBuffers[portIndex][*availBuffers->begin()];
6640 }
6641
dequeuePortBuffer(int32_t portIndex)6642 ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
6643 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6644
6645 BufferInfo *info = peekNextPortBuffer(portIndex);
6646 if (!info) {
6647 return -EAGAIN;
6648 }
6649
6650 std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
6651 size_t index = *availBuffers->begin();
6652 CHECK_EQ(info, &mPortBuffers[portIndex][index]);
6653 availBuffers->erase(availBuffers->begin());
6654
6655 {
6656 Mutex::Autolock al(mBufferLock);
6657 CHECK(!info->mOwnedByClient);
6658 info->mOwnedByClient = true;
6659
6660 // set image-data
6661 if (info->mData->format() != NULL) {
6662 sp<ABuffer> imageData;
6663 if (info->mData->format()->findBuffer("image-data", &imageData)) {
6664 info->mData->meta()->setBuffer("image-data", imageData);
6665 }
6666 int32_t left, top, right, bottom;
6667 if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
6668 info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
6669 }
6670 }
6671 }
6672
6673 return index;
6674 }
6675
getOrCreateDetachedSurface()6676 sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
6677 if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
6678 return nullptr;
6679 }
6680
6681 if (!mDetachedSurface) {
6682 uint64_t usage = 0;
6683 if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
6684 // TODO: should we use a/the default consumer usage?
6685 usage = 0;
6686 }
6687 mDetachedSurface.reset(new ReleaseSurface(usage));
6688 }
6689
6690 return mDetachedSurface->getSurface();
6691 }
6692
connectToSurface(const sp<Surface> & surface,uint32_t * generation)6693 status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
6694 status_t err = OK;
6695 if (surface != NULL) {
6696 uint64_t oldId, newId;
6697 if (mSurface != NULL
6698 && surface->getUniqueId(&newId) == NO_ERROR
6699 && mSurface->getUniqueId(&oldId) == NO_ERROR
6700 && newId == oldId) {
6701 ALOGI("[%s] connecting to the same surface. Nothing to do.", mComponentName.c_str());
6702 return ALREADY_EXISTS;
6703 }
6704
6705 // in case we don't connect, ensure that we don't signal the surface is
6706 // connected to the screen
6707 mIsSurfaceToDisplay = false;
6708
6709 err = nativeWindowConnect(surface.get(), "connectToSurface");
6710 if (err == OK) {
6711 // Require a fresh set of buffers after each connect by using a unique generation
6712 // number. Rely on the fact that max supported process id by Linux is 2^22.
6713 // PID is never 0 so we don't have to worry that we use the default generation of 0.
6714 // TODO: come up with a unique scheme if other producers also set the generation number.
6715 static uint32_t sSurfaceGeneration = 0;
6716 *generation = (getpid() << 10) | (++sSurfaceGeneration & ((1 << 10) - 1));
6717 surface->setGenerationNumber(*generation);
6718 ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), *generation);
6719
6720 // HACK: clear any free buffers. Remove when connect will automatically do this.
6721 // This is needed as the consumer may be holding onto stale frames that it can reattach
6722 // to this surface after disconnect/connect, and those free frames would inherit the new
6723 // generation number. Disconnecting after setting a unique generation prevents this.
6724 nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
6725 sp<IProducerListener> listener =
6726 new OnBufferReleasedListener(*generation, mBufferChannel);
6727 err = surfaceConnectWithListener(
6728 surface, listener, "connectToSurface(reconnect-with-listener)");
6729 }
6730
6731 if (err != OK) {
6732 *generation = 0;
6733 ALOGE("nativeWindowConnect/surfaceConnectWithListener returned an error: %s (%d)",
6734 strerror(-err), err);
6735 } else {
6736 if (!mAllowFrameDroppingBySurface) {
6737 disableLegacyBufferDropPostQ(surface);
6738 }
6739 // keep track whether or not the buffers of the connected surface go to the screen
6740 int result = 0;
6741 surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
6742 mIsSurfaceToDisplay = result != 0;
6743 }
6744 }
6745 // do not return ALREADY_EXISTS unless surfaces are the same
6746 return err == ALREADY_EXISTS ? BAD_VALUE : err;
6747 }
6748
disconnectFromSurface()6749 status_t MediaCodec::disconnectFromSurface() {
6750 status_t err = OK;
6751 if (mSurface != NULL) {
6752 // Resetting generation is not technically needed, but there is no need to keep it either
6753 mSurface->setGenerationNumber(0);
6754 err = nativeWindowDisconnect(mSurface.get(), "disconnectFromSurface");
6755 if (err != OK) {
6756 ALOGW("nativeWindowDisconnect returned an error: %s (%d)", strerror(-err), err);
6757 }
6758 // assume disconnected even on error
6759 mSurface.clear();
6760 mSurfaceGeneration = 0;
6761 mIsSurfaceToDisplay = false;
6762 }
6763 return err;
6764 }
6765
handleSetSurface(const sp<Surface> & surface,bool callCodec,bool onShutDown)6766 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
6767 uint32_t generation;
6768 status_t err = OK;
6769 if (surface != nullptr) {
6770 err = connectToSurface(surface, &generation);
6771 if (err == ALREADY_EXISTS) {
6772 // reconnecting to same surface
6773 return OK;
6774 }
6775
6776 if (err == OK && callCodec) {
6777 if (mFlags & kFlagUsesSoftwareRenderer) {
6778 if (mSoftRenderer != NULL
6779 && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
6780 pushBlankBuffersToNativeWindow(mSurface.get());
6781 }
6782 // do not create a new software renderer on shutdown (release)
6783 // as it will not be used anyway
6784 if (!onShutDown) {
6785 surface->setDequeueTimeout(-1);
6786 mSoftRenderer = new SoftwareRenderer(surface);
6787 // TODO: check if this was successful
6788 }
6789 } else {
6790 err = mCodec->setSurface(surface, generation);
6791 }
6792
6793 mReliabilityContextMetrics.setOutputSurfaceCount++;
6794 }
6795 }
6796
6797 if (err == OK) {
6798 if (mSurface != NULL) {
6799 (void)disconnectFromSurface();
6800 }
6801
6802 if (surface != NULL) {
6803 mSurface = surface;
6804 mSurfaceGeneration = generation;
6805 }
6806 }
6807
6808 return err;
6809 }
6810
handleSetSurface(const sp<Surface> & surface)6811 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
6812 if (android::media::codec::provider_->null_output_surface_support()) {
6813 return handleSetSurface(surface, false /* callCodec */);
6814 }
6815
6816 status_t err = OK;
6817 if (mSurface != NULL) {
6818 (void)disconnectFromSurface();
6819 }
6820 if (surface != NULL) {
6821 uint32_t generation;
6822 err = connectToSurface(surface, &generation);
6823 if (err == OK) {
6824 mSurface = surface;
6825 mSurfaceGeneration = generation;
6826 }
6827 }
6828 return err;
6829 }
6830
onInputBufferAvailable()6831 void MediaCodec::onInputBufferAvailable() {
6832 int32_t index;
6833 while ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) {
6834 sp<AMessage> msg = mCallback->dup();
6835 msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
6836 msg->setInt32("index", index);
6837 msg->post();
6838 }
6839 }
6840
onOutputBufferAvailable()6841 void MediaCodec::onOutputBufferAvailable() {
6842 int32_t index;
6843 while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
6844 if (discardDecodeOnlyOutputBuffer(index)) {
6845 continue;
6846 }
6847 sp<AMessage> msg = mCallback->dup();
6848 const sp<MediaCodecBuffer> &buffer =
6849 mPortBuffers[kPortIndexOutput][index].mData;
6850 int32_t outputCallbackID = CB_OUTPUT_AVAILABLE;
6851 sp<RefBase> accessUnitInfoObj;
6852 msg->setInt32("index", index);
6853 msg->setSize("offset", buffer->offset());
6854 msg->setSize("size", buffer->size());
6855
6856 int64_t timeUs;
6857 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
6858
6859 msg->setInt64("timeUs", timeUs);
6860
6861 int32_t flags;
6862 CHECK(buffer->meta()->findInt32("flags", &flags));
6863
6864 msg->setInt32("flags", flags);
6865 buffer->meta()->findObject("accessUnitInfo", &accessUnitInfoObj);
6866 if (accessUnitInfoObj) {
6867 outputCallbackID = CB_LARGE_FRAME_OUTPUT_AVAILABLE;
6868 msg->setObject("accessUnitInfo", accessUnitInfoObj);
6869 sp<BufferInfosWrapper> auInfo(
6870 (decltype(auInfo.get()))accessUnitInfoObj.get());
6871 auInfo->value.back().mFlags |= flags & BUFFER_FLAG_END_OF_STREAM;
6872 }
6873 msg->setInt32("callbackID", outputCallbackID);
6874
6875 statsBufferReceived(timeUs, buffer);
6876
6877 msg->post();
6878 }
6879 }
onCryptoError(const sp<AMessage> & msg)6880 void MediaCodec::onCryptoError(const sp<AMessage> & msg) {
6881 if (mCallback != NULL) {
6882 sp<AMessage> cb_msg = mCallback->dup();
6883 cb_msg->setInt32("callbackID", CB_CRYPTO_ERROR);
6884 cb_msg->extend(msg);
6885 cb_msg->post();
6886 }
6887 }
onError(status_t err,int32_t actionCode,const char * detail)6888 void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
6889 if (mCallback != NULL) {
6890 sp<AMessage> msg = mCallback->dup();
6891 msg->setInt32("callbackID", CB_ERROR);
6892 msg->setInt32("err", err);
6893 msg->setInt32("actionCode", actionCode);
6894
6895 if (detail != NULL) {
6896 msg->setString("detail", detail);
6897 }
6898
6899 msg->post();
6900 }
6901 }
6902
onOutputFormatChanged()6903 void MediaCodec::onOutputFormatChanged() {
6904 if (mCallback != NULL) {
6905 sp<AMessage> msg = mCallback->dup();
6906 msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
6907 msg->setMessage("format", mOutputFormat);
6908 msg->post();
6909 }
6910 }
6911
postActivityNotificationIfPossible()6912 void MediaCodec::postActivityNotificationIfPossible() {
6913 if (mActivityNotify == NULL) {
6914 return;
6915 }
6916
6917 bool isErrorOrOutputChanged =
6918 (mFlags & (kFlagStickyError
6919 | kFlagOutputBuffersChanged
6920 | kFlagOutputFormatChanged));
6921
6922 if (isErrorOrOutputChanged
6923 || !mAvailPortBuffers[kPortIndexInput].empty()
6924 || !mAvailPortBuffers[kPortIndexOutput].empty()) {
6925 mActivityNotify->setInt32("input-buffers",
6926 mAvailPortBuffers[kPortIndexInput].size());
6927
6928 if (isErrorOrOutputChanged) {
6929 // we want consumer to dequeue as many times as it can
6930 mActivityNotify->setInt32("output-buffers", INT32_MAX);
6931 } else {
6932 mActivityNotify->setInt32("output-buffers",
6933 mAvailPortBuffers[kPortIndexOutput].size());
6934 }
6935 mActivityNotify->post();
6936 mActivityNotify.clear();
6937 }
6938 }
6939
setParameters(const sp<AMessage> & params)6940 status_t MediaCodec::setParameters(const sp<AMessage> ¶ms) {
6941 sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
6942 msg->setMessage("params", params);
6943
6944 sp<AMessage> response;
6945 return PostAndAwaitResponse(msg, &response);
6946 }
6947
onSetParameters(const sp<AMessage> & params)6948 status_t MediaCodec::onSetParameters(const sp<AMessage> ¶ms) {
6949 if (mState == UNINITIALIZED || mState == INITIALIZING) {
6950 return NO_INIT;
6951 }
6952 updateLowLatency(params);
6953 updateCodecImportance(params);
6954 mapFormat(mComponentName, params, nullptr, false);
6955 updateTunnelPeek(params);
6956 mCodec->signalSetParameters(params);
6957
6958 return OK;
6959 }
6960
amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> & buffer)6961 status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
6962 const sp<MediaCodecBuffer> &buffer) {
6963 AString mime;
6964 CHECK(mOutputFormat->findString("mime", &mime));
6965
6966 if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
6967 // Codec specific data should be SPS and PPS in a single buffer,
6968 // each prefixed by a startcode (0x00 0x00 0x00 0x01).
6969 // We separate the two and put them into the output format
6970 // under the keys "csd-0" and "csd-1".
6971
6972 unsigned csdIndex = 0;
6973
6974 const uint8_t *data = buffer->data();
6975 size_t size = buffer->size();
6976
6977 const uint8_t *nalStart;
6978 size_t nalSize;
6979 while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
6980 sp<ABuffer> csd = new ABuffer(nalSize + 4);
6981 memcpy(csd->data(), "\x00\x00\x00\x01", 4);
6982 memcpy(csd->data() + 4, nalStart, nalSize);
6983
6984 mOutputFormat->setBuffer(
6985 base::StringPrintf("csd-%u", csdIndex).c_str(), csd);
6986
6987 ++csdIndex;
6988 }
6989
6990 if (csdIndex != 2) {
6991 mErrorLog.log(LOG_TAG, base::StringPrintf(
6992 "codec config data contains %u NAL units; expected 2.", csdIndex));
6993 return ERROR_MALFORMED;
6994 }
6995 } else {
6996 // For everything else we just stash the codec specific data into
6997 // the output format as a single piece of csd under "csd-0".
6998 sp<ABuffer> csd = new ABuffer(buffer->size());
6999 memcpy(csd->data(), buffer->data(), buffer->size());
7000 csd->setRange(0, buffer->size());
7001 mOutputFormat->setBuffer("csd-0", csd);
7002 }
7003
7004 return OK;
7005 }
7006
postPendingRepliesAndDeferredMessages(std::string origin,status_t err)7007 void MediaCodec::postPendingRepliesAndDeferredMessages(
7008 std::string origin, status_t err /* = OK */) {
7009 sp<AMessage> response{new AMessage};
7010 if (err != OK) {
7011 response->setInt32("err", err);
7012 }
7013 postPendingRepliesAndDeferredMessages(origin, response);
7014 }
7015
postPendingRepliesAndDeferredMessages(std::string origin,const sp<AMessage> & response)7016 void MediaCodec::postPendingRepliesAndDeferredMessages(
7017 std::string origin, const sp<AMessage> &response) {
7018 LOG_ALWAYS_FATAL_IF(
7019 !mReplyID,
7020 "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
7021 origin.c_str(),
7022 mLastReplyOrigin.c_str());
7023 mLastReplyOrigin = origin;
7024 response->postReply(mReplyID);
7025 mReplyID.clear();
7026 ALOGV_IF(!mDeferredMessages.empty(),
7027 "posting %zu deferred messages", mDeferredMessages.size());
7028 for (sp<AMessage> msg : mDeferredMessages) {
7029 msg->post();
7030 }
7031 mDeferredMessages.clear();
7032 }
7033
apiStateString()7034 std::string MediaCodec::apiStateString() {
7035 const char *rval = NULL;
7036 char rawbuffer[16]; // room for "%d"
7037
7038 switch (mState) {
7039 case UNINITIALIZED:
7040 rval = (mFlags & kFlagStickyError) ? "at Error state" : "at Released state";
7041 break;
7042 case INITIALIZING: rval = "while constructing"; break;
7043 case INITIALIZED: rval = "at Uninitialized state"; break;
7044 case CONFIGURING: rval = "during configure()"; break;
7045 case CONFIGURED: rval = "at Configured state"; break;
7046 case STARTING: rval = "during start()"; break;
7047 case STARTED: rval = "at Running state"; break;
7048 case FLUSHING: rval = "during flush()"; break;
7049 case FLUSHED: rval = "at Flushed state"; break;
7050 case STOPPING: rval = "during stop()"; break;
7051 case RELEASING: rval = "during release()"; break;
7052 default:
7053 snprintf(rawbuffer, sizeof(rawbuffer), "at %d", mState);
7054 rval = rawbuffer;
7055 break;
7056 }
7057 return rval;
7058 }
7059
stateString(State state)7060 std::string MediaCodec::stateString(State state) {
7061 const char *rval = NULL;
7062 char rawbuffer[16]; // room for "%d"
7063
7064 switch (state) {
7065 case UNINITIALIZED: rval = "UNINITIALIZED"; break;
7066 case INITIALIZING: rval = "INITIALIZING"; break;
7067 case INITIALIZED: rval = "INITIALIZED"; break;
7068 case CONFIGURING: rval = "CONFIGURING"; break;
7069 case CONFIGURED: rval = "CONFIGURED"; break;
7070 case STARTING: rval = "STARTING"; break;
7071 case STARTED: rval = "STARTED"; break;
7072 case FLUSHING: rval = "FLUSHING"; break;
7073 case FLUSHED: rval = "FLUSHED"; break;
7074 case STOPPING: rval = "STOPPING"; break;
7075 case RELEASING: rval = "RELEASING"; break;
7076 default:
7077 snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
7078 rval = rawbuffer;
7079 break;
7080 }
7081 return rval;
7082 }
7083
7084 // static
CanFetchLinearBlock(const std::vector<std::string> & names,bool * isCompatible)7085 status_t MediaCodec::CanFetchLinearBlock(
7086 const std::vector<std::string> &names, bool *isCompatible) {
7087 *isCompatible = false;
7088 if (names.size() == 0) {
7089 *isCompatible = true;
7090 return OK;
7091 }
7092 const CodecListCache &cache = GetCodecListCache();
7093 for (const std::string &name : names) {
7094 auto it = cache.mCodecInfoMap.find(name);
7095 if (it == cache.mCodecInfoMap.end()) {
7096 return NAME_NOT_FOUND;
7097 }
7098 const char *owner = it->second->getOwnerName();
7099 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
7100 *isCompatible = false;
7101 return OK;
7102 } else if (strncmp(owner, "codec2::", 8) != 0) {
7103 return NAME_NOT_FOUND;
7104 }
7105 }
7106 return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
7107 }
7108
7109 // static
FetchLinearBlock(size_t capacity,const std::vector<std::string> & names)7110 std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
7111 size_t capacity, const std::vector<std::string> &names) {
7112 return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
7113 }
7114
7115 // static
CanFetchGraphicBlock(const std::vector<std::string> & names,bool * isCompatible)7116 status_t MediaCodec::CanFetchGraphicBlock(
7117 const std::vector<std::string> &names, bool *isCompatible) {
7118 *isCompatible = false;
7119 if (names.size() == 0) {
7120 *isCompatible = true;
7121 return OK;
7122 }
7123 const CodecListCache &cache = GetCodecListCache();
7124 for (const std::string &name : names) {
7125 auto it = cache.mCodecInfoMap.find(name);
7126 if (it == cache.mCodecInfoMap.end()) {
7127 return NAME_NOT_FOUND;
7128 }
7129 const char *owner = it->second->getOwnerName();
7130 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
7131 *isCompatible = false;
7132 return OK;
7133 } else if (strncmp(owner, "codec2.", 7) != 0) {
7134 return NAME_NOT_FOUND;
7135 }
7136 }
7137 return CCodec::CanFetchGraphicBlock(names, isCompatible);
7138 }
7139
7140 // static
FetchGraphicBlock(int32_t width,int32_t height,int32_t format,uint64_t usage,const std::vector<std::string> & names)7141 std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
7142 int32_t width,
7143 int32_t height,
7144 int32_t format,
7145 uint64_t usage,
7146 const std::vector<std::string> &names) {
7147 return CCodec::FetchGraphicBlock(width, height, format, usage, names);
7148 }
7149
7150 } // namespace android
7151