1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <string.h>
18 #include "VideoEncoderLog.h"
19 #include "VideoEncoderBase.h"
20 #include "IntelMetadataBuffer.h"
21 #include <va/va_tpi.h>
22 #include <va/va_android.h>
23
VideoEncoderBase()24 VideoEncoderBase::VideoEncoderBase()
25 :mInitialized(true)
26 ,mStarted(false)
27 ,mVADisplay(NULL)
28 ,mVAContext(VA_INVALID_ID)
29 ,mVAConfig(VA_INVALID_ID)
30 ,mVAEntrypoint(VAEntrypointEncSlice)
31 ,mNewHeader(false)
32 ,mRenderMaxSliceSize(false)
33 ,mRenderQP (false)
34 ,mRenderAIR(false)
35 ,mRenderCIR(false)
36 ,mRenderFrameRate(false)
37 ,mRenderBitRate(false)
38 ,mRenderHrd(false)
39 ,mRenderMultiTemporal(false)
40 ,mForceKFrame(false)
41 ,mSeqParamBuf(0)
42 ,mPicParamBuf(0)
43 ,mSliceParamBuf(0)
44 ,mAutoRefSurfaces(NULL)
45 ,mRefSurface(VA_INVALID_SURFACE)
46 ,mRecSurface(VA_INVALID_SURFACE)
47 ,mFrameNum(0)
48 ,mCodedBufSize(0)
49 ,mAutoReference(false)
50 ,mAutoReferenceSurfaceNum(4)
51 ,mEncPackedHeaders(VA_ATTRIB_NOT_SUPPORTED)
52 ,mSliceSizeOverflow(false)
53 ,mCurOutputTask(NULL)
54 ,mOutCodedBuffer(0)
55 ,mOutCodedBufferPtr(NULL)
56 ,mCurSegment(NULL)
57 ,mOffsetInSeg(0)
58 ,mTotalSize(0)
59 ,mTotalSizeCopied(0)
60 ,mFrameSkipped(false)
61 ,mSupportedSurfaceMemType(0)
62 ,mVASurfaceMappingAction(0)
63 #ifdef INTEL_VIDEO_XPROC_SHARING
64 ,mSessionFlag(0)
65 #endif
66 {
67
68 VAStatus vaStatus = VA_STATUS_SUCCESS;
69 // here the display can be any value, use following one
70 // just for consistence purpose, so don't define it
71 unsigned int display = 0x18C34078;
72 int majorVersion = -1;
73 int minorVersion = -1;
74
75 setDefaultParams();
76
77 LOG_V("vaGetDisplay \n");
78 mVADisplay = vaGetDisplay(&display);
79 if (mVADisplay == NULL) {
80 LOG_E("vaGetDisplay failed.");
81 }
82
83 vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
84 LOG_V("vaInitialize \n");
85 if (vaStatus != VA_STATUS_SUCCESS) {
86 LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
87 mInitialized = false;
88 }
89 }
90
~VideoEncoderBase()91 VideoEncoderBase::~VideoEncoderBase() {
92
93 VAStatus vaStatus = VA_STATUS_SUCCESS;
94
95 stop();
96
97 vaStatus = vaTerminate(mVADisplay);
98 LOG_V( "vaTerminate\n");
99 if (vaStatus != VA_STATUS_SUCCESS) {
100 LOG_W( "Failed vaTerminate, vaStatus = %d\n", vaStatus);
101 } else {
102 mVADisplay = NULL;
103 }
104
105 #ifdef INTEL_VIDEO_XPROC_SHARING
106 IntelMetadataBuffer::ClearContext(mSessionFlag, false);
107 #endif
108 }
109
start()110 Encode_Status VideoEncoderBase::start() {
111
112 Encode_Status ret = ENCODE_SUCCESS;
113 VAStatus vaStatus = VA_STATUS_SUCCESS;
114
115 if (!mInitialized) {
116 LOGE("Encoder Initialize fail can not start");
117 return ENCODE_DRIVER_FAIL;
118 }
119
120 if (mStarted) {
121 LOG_V("Encoder has been started\n");
122 return ENCODE_ALREADY_INIT;
123 }
124
125 if (mComParams.rawFormat != RAW_FORMAT_NV12)
126 #ifdef IMG_GFX
127 mVASurfaceMappingAction |= MAP_ACTION_COLORCONVERT;
128 #else
129 return ENCODE_NOT_SUPPORTED;
130 #endif
131
132 if (mComParams.resolution.width > 2048 || mComParams.resolution.height > 2048){
133 LOGE("Unsupported resolution width %d, height %d\n",
134 mComParams.resolution.width, mComParams.resolution.height);
135 return ENCODE_NOT_SUPPORTED;
136 }
137 queryAutoReferenceConfig(mComParams.profile);
138
139 VAConfigAttrib vaAttrib_tmp[6],vaAttrib[VAConfigAttribTypeMax];
140 int vaAttribNumber = 0;
141 vaAttrib_tmp[0].type = VAConfigAttribRTFormat;
142 vaAttrib_tmp[1].type = VAConfigAttribRateControl;
143 vaAttrib_tmp[2].type = VAConfigAttribEncAutoReference;
144 vaAttrib_tmp[3].type = VAConfigAttribEncPackedHeaders;
145 vaAttrib_tmp[4].type = VAConfigAttribEncMaxRefFrames;
146 vaAttrib_tmp[5].type = VAConfigAttribEncRateControlExt;
147
148 vaStatus = vaGetConfigAttributes(mVADisplay, mComParams.profile,
149 VAEntrypointEncSlice, &vaAttrib_tmp[0], 6);
150 CHECK_VA_STATUS_RETURN("vaGetConfigAttributes");
151
152 if((vaAttrib_tmp[0].value & VA_RT_FORMAT_YUV420) != 0)
153 {
154 vaAttrib[vaAttribNumber].type = VAConfigAttribRTFormat;
155 vaAttrib[vaAttribNumber].value = VA_RT_FORMAT_YUV420;
156 vaAttribNumber++;
157 }
158
159 vaAttrib[vaAttribNumber].type = VAConfigAttribRateControl;
160 vaAttrib[vaAttribNumber].value = mComParams.rcMode;
161 vaAttribNumber++;
162
163 vaAttrib[vaAttribNumber].type = VAConfigAttribEncAutoReference;
164 vaAttrib[vaAttribNumber].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED;
165 vaAttribNumber++;
166
167 if(vaAttrib_tmp[3].value != VA_ATTRIB_NOT_SUPPORTED)
168 {
169 vaAttrib[vaAttribNumber].type = VAConfigAttribEncPackedHeaders;
170 vaAttrib[vaAttribNumber].value = vaAttrib[3].value;
171 vaAttribNumber++;
172 mEncPackedHeaders = vaAttrib[3].value;
173 }
174
175 if(vaAttrib_tmp[4].value != VA_ATTRIB_NOT_SUPPORTED)
176 {
177 vaAttrib[vaAttribNumber].type = VAConfigAttribEncMaxRefFrames;
178 vaAttrib[vaAttribNumber].value = vaAttrib[4].value;
179 vaAttribNumber++;
180 mEncMaxRefFrames = vaAttrib[4].value;
181 }
182
183 if(vaAttrib_tmp[5].value != VA_ATTRIB_NOT_SUPPORTED)
184 {
185 vaAttrib[vaAttribNumber].type = VAConfigAttribEncRateControlExt;
186 vaAttrib[vaAttribNumber].value = mComParams.numberOfLayer;
187 vaAttribNumber++;
188 }
189
190 LOG_V( "======VA Configuration======\n");
191 LOG_V( "profile = %d\n", mComParams.profile);
192 LOG_V( "mVAEntrypoint = %d\n", mVAEntrypoint);
193 LOG_V( "vaAttrib[0].type = %d\n", vaAttrib[0].type);
194 LOG_V( "vaAttrib[1].type = %d\n", vaAttrib[1].type);
195 LOG_V( "vaAttrib[2].type = %d\n", vaAttrib[2].type);
196 LOG_V( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value);
197 LOG_V( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value);
198 LOG_V( "vaAttrib[2].value (AutoReference) = %d\n", vaAttrib[2].value);
199 LOG_V( "vaAttribNumber is %d\n", vaAttribNumber);
200 LOG_V( "mComParams.numberOfLayer is %d\n", mComParams.numberOfLayer);
201
202 LOG_V( "vaCreateConfig\n");
203
204 vaStatus = vaCreateConfig(
205 mVADisplay, mComParams.profile, mVAEntrypoint,
206 &vaAttrib[0], vaAttribNumber, &(mVAConfig));
207 // &vaAttrib[0], 3, &(mVAConfig)); //uncomment this after psb_video supports
208 CHECK_VA_STATUS_RETURN("vaCreateConfig");
209
210 querySupportedSurfaceMemTypes();
211
212 if (mComParams.rcMode == VA_RC_VCM) {
213 // Following three features are only enabled in VCM mode
214 mRenderMaxSliceSize = true;
215 mRenderAIR = true;
216 mRenderBitRate = true;
217 }
218
219 LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n");
220
221 uint32_t stride_aligned, height_aligned;
222 if(mAutoReference == false){
223 stride_aligned = (mComParams.resolution.width + 15) & ~15;
224 height_aligned = (mComParams.resolution.height + 15) & ~15;
225 }else{
226 // this alignment is used for AVC. For vp8 encode, driver will handle the alignment
227 if(mComParams.profile == VAProfileVP8Version0_3)
228 {
229 stride_aligned = mComParams.resolution.width;
230 height_aligned = mComParams.resolution.height;
231 mVASurfaceMappingAction |= MAP_ACTION_COPY;
232 }
233 else
234 {
235 stride_aligned = (mComParams.resolution.width + 63) & ~63; //on Merr, stride must be 64 aligned.
236 height_aligned = (mComParams.resolution.height + 31) & ~31;
237 mVASurfaceMappingAction |= MAP_ACTION_ALIGN64;
238 }
239 }
240
241 if(mAutoReference == false){
242 mRefSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
243 mRecSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
244
245 }else {
246 mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum];
247 for(uint32_t i = 0; i < mAutoReferenceSurfaceNum; i ++)
248 mAutoRefSurfaces[i] = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
249 }
250 CHECK_VA_STATUS_RETURN("vaCreateSurfaces");
251
252 //Prepare all Surfaces to be added into Context
253 uint32_t contextSurfaceCnt;
254 if(mAutoReference == false )
255 contextSurfaceCnt = 2 + mSrcSurfaceMapList.size();
256 else
257 contextSurfaceCnt = mAutoReferenceSurfaceNum + mSrcSurfaceMapList.size();
258
259 VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt];
260 int32_t index = -1;
261 android::List<VASurfaceMap *>::iterator map_node;
262
263 for(map_node = mSrcSurfaceMapList.begin(); map_node != mSrcSurfaceMapList.end(); map_node++)
264 {
265 contextSurfaces[++index] = (*map_node)->getVASurface();
266 (*map_node)->setTracked();
267 }
268
269 if(mAutoReference == false){
270 contextSurfaces[++index] = mRefSurface;
271 contextSurfaces[++index] = mRecSurface;
272 } else {
273 for (uint32_t i=0; i < mAutoReferenceSurfaceNum; i++)
274 contextSurfaces[++index] = mAutoRefSurfaces[i];
275 }
276
277 //Initialize and save the VA context ID
278 LOG_V( "vaCreateContext\n");
279 vaStatus = vaCreateContext(mVADisplay, mVAConfig,
280 #ifdef IMG_GFX
281 mComParams.resolution.width,
282 mComParams.resolution.height,
283 #else
284 stride_aligned,
285 height_aligned,
286 #endif
287 VA_PROGRESSIVE, contextSurfaces, contextSurfaceCnt,
288 &(mVAContext));
289 CHECK_VA_STATUS_RETURN("vaCreateContext");
290
291 delete [] contextSurfaces;
292
293 LOG_I("Success to create libva context width %d, height %d\n",
294 mComParams.resolution.width, mComParams.resolution.height);
295
296 uint32_t maxSize = 0;
297 ret = getMaxOutSize(&maxSize);
298 CHECK_ENCODE_STATUS_RETURN("getMaxOutSize");
299
300 // Create CodedBuffer for output
301 VABufferID VACodedBuffer;
302
303 for(uint32_t i = 0; i <mComParams.codedBufNum; i++) {
304 vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
305 VAEncCodedBufferType,
306 mCodedBufSize,
307 1, NULL,
308 &VACodedBuffer);
309 CHECK_VA_STATUS_RETURN("vaCreateBuffer::VAEncCodedBufferType");
310
311 mVACodedBufferList.push_back(VACodedBuffer);
312 }
313
314 if (ret == ENCODE_SUCCESS)
315 mStarted = true;
316
317 LOG_V( "end\n");
318 return ret;
319 }
320
encode(VideoEncRawBuffer * inBuffer,uint32_t timeout)321 Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout) {
322
323 Encode_Status ret = ENCODE_SUCCESS;
324 VAStatus vaStatus = VA_STATUS_SUCCESS;
325
326 if (!mStarted) {
327 LOG_E("Encoder has not initialized yet\n");
328 return ENCODE_NOT_INIT;
329 }
330
331 CHECK_NULL_RETURN_IFFAIL(inBuffer);
332
333 //======Prepare all resources encoder needed=====.
334
335 //Prepare encode vaSurface
336 VASurfaceID sid = VA_INVALID_SURFACE;
337 ret = manageSrcSurface(inBuffer, &sid);
338 CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
339
340 //Prepare CodedBuffer
341 mCodedBuffer_Lock.lock();
342 if(mVACodedBufferList.empty()){
343 if(timeout == FUNC_BLOCK)
344 mCodedBuffer_Cond.wait(mCodedBuffer_Lock);
345 else if (timeout > 0) {
346 if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){
347 mCodedBuffer_Lock.unlock();
348 LOG_E("Time out wait for Coded buffer.\n");
349 return ENCODE_DEVICE_BUSY;
350 }
351 }
352 else {//Nonblock
353 mCodedBuffer_Lock.unlock();
354 LOG_E("Coded buffer is not ready now.\n");
355 return ENCODE_DEVICE_BUSY;
356 }
357 }
358
359 if(mVACodedBufferList.empty()){
360 mCodedBuffer_Lock.unlock();
361 return ENCODE_DEVICE_BUSY;
362 }
363 VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin());
364 mVACodedBufferList.erase(mVACodedBufferList.begin());
365 mCodedBuffer_Lock.unlock();
366
367 LOG_V("CodedBuffer ID 0x%08x\n", coded_buf);
368
369 //All resources are ready, start to assemble EncodeTask
370 EncodeTask* task = new EncodeTask();
371
372 task->completed = false;
373 task->enc_surface = sid;
374 task->coded_buffer = coded_buf;
375 task->timestamp = inBuffer->timeStamp;
376 task->priv = inBuffer->priv;
377
378 //Setup frame info, like flag ( SYNCFRAME), frame number, type etc
379 task->type = inBuffer->type;
380 task->flag = inBuffer->flag;
381 PrepareFrameInfo(task);
382
383 if(mAutoReference == false){
384 //Setup ref /rec frames
385 //TODO: B frame support, temporary use same logic
386 switch (inBuffer->type) {
387 case FTYPE_UNKNOWN:
388 case FTYPE_IDR:
389 case FTYPE_I:
390 case FTYPE_P:
391 {
392 if(!mFrameSkipped) {
393 VASurfaceID tmpSurface = mRecSurface;
394 mRecSurface = mRefSurface;
395 mRefSurface = tmpSurface;
396 }
397
398 task->ref_surface = mRefSurface;
399 task->rec_surface = mRecSurface;
400
401 break;
402 }
403 case FTYPE_B:
404 default:
405 LOG_V("Something wrong, B frame may not be supported in this mode\n");
406 ret = ENCODE_NOT_SUPPORTED;
407 goto CLEAN_UP;
408 }
409 }else {
410 task->ref_surface = VA_INVALID_SURFACE;
411 task->rec_surface = VA_INVALID_SURFACE;
412 }
413 //======Start Encoding, add task to list======
414 LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface);
415
416 vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface);
417 CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture");
418
419 ret = sendEncodeCommand(task);
420 CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand");
421
422 vaStatus = vaEndPicture(mVADisplay, mVAContext);
423 CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture");
424
425 LOG_V("Add Task %p into Encode Task list\n", task);
426 mEncodeTask_Lock.lock();
427 mEncodeTaskList.push_back(task);
428 mEncodeTask_Cond.signal();
429 mEncodeTask_Lock.unlock();
430
431 mFrameNum ++;
432
433 LOG_V("encode return Success\n");
434
435 return ENCODE_SUCCESS;
436
437 CLEAN_UP:
438
439 delete task;
440 mCodedBuffer_Lock.lock();
441 mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used
442 mCodedBuffer_Cond.signal();
443 mCodedBuffer_Lock.unlock();
444
445 LOG_V("encode return error=%x\n", ret);
446
447 return ret;
448 }
449
450 /*
451 1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list.
452 2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes,
453 start output data
454 3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure
455 on non-block/block with timeout modes.
456 4. if complete all output data, curoutputtask should be set NULL
457 */
getOutput(VideoEncOutputBuffer * outBuffer,uint32_t timeout)458 Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) {
459
460 Encode_Status ret = ENCODE_SUCCESS;
461 VAStatus vaStatus = VA_STATUS_SUCCESS;
462 bool useLocalBuffer = false;
463
464 CHECK_NULL_RETURN_IFFAIL(outBuffer);
465
466 if (mCurOutputTask == NULL) {
467 mEncodeTask_Lock.lock();
468 if(mEncodeTaskList.empty()) {
469 LOG_V("getOutput CurrentTask is NULL\n");
470 if(timeout == FUNC_BLOCK) {
471 LOG_V("waiting for task....\n");
472 mEncodeTask_Cond.wait(mEncodeTask_Lock);
473 } else if (timeout > 0) {
474 LOG_V("waiting for task in %i ms....\n", timeout);
475 if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) {
476 mEncodeTask_Lock.unlock();
477 LOG_E("Time out wait for encode task.\n");
478 return ENCODE_NO_REQUEST_DATA;
479 }
480 } else {//Nonblock
481 mEncodeTask_Lock.unlock();
482 return ENCODE_NO_REQUEST_DATA;
483 }
484 }
485
486 if(mEncodeTaskList.empty()){
487 mEncodeTask_Lock.unlock();
488 return ENCODE_DATA_NOT_READY;
489 }
490 mCurOutputTask = *(mEncodeTaskList.begin());
491 mEncodeTaskList.erase(mEncodeTaskList.begin());
492 mEncodeTask_Lock.unlock();
493 }
494
495 //sync/query/wait task if not completed
496 if (mCurOutputTask->completed == false) {
497 VASurfaceStatus vaSurfaceStatus;
498
499 if (timeout == FUNC_BLOCK) {
500 //block mode, direct sync surface to output data
501
502 mOutCodedBuffer = mCurOutputTask->coded_buffer;
503
504 // Check frame skip
505 // Need encoding to be completed before calling query surface below to
506 // get the right skip frame flag for current frame
507 // It is a requirement of video driver
508 // vaSyncSurface syncs the wrong frame when rendering the same surface multiple times,
509 // so use vaMapbuffer instead
510 LOG_V ("block mode, vaMapBuffer ID = 0x%08x\n", mOutCodedBuffer);
511 if (mOutCodedBufferPtr == NULL) {
512 vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
513 CHECK_VA_STATUS_GOTO_CLEANUP("vaMapBuffer");
514 CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
515 }
516
517 vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus);
518 CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
519 mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
520
521 mCurOutputTask->completed = true;
522
523 } else {
524 //For both block with timeout and non-block mode, query surface, if ready, output data
525 LOG_V ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface);
526
527 vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface, &vaSurfaceStatus);
528 if (vaSurfaceStatus & VASurfaceReady) {
529 mOutCodedBuffer = mCurOutputTask->coded_buffer;
530 mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
531 mCurOutputTask->completed = true;
532 //if need to call SyncSurface again ?
533
534 } else {//not encode complet yet, but keep all context and return directly
535 return ENCODE_DATA_NOT_READY;
536 }
537
538 }
539
540 }
541
542 //start to output data
543 ret = prepareForOutput(outBuffer, &useLocalBuffer);
544 CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
545
546 //copy all flags to outBuffer
547 outBuffer->offset = 0;
548 outBuffer->flag = mCurOutputTask->flag;
549 outBuffer->type = mCurOutputTask->type;
550 outBuffer->timeStamp = mCurOutputTask->timestamp;
551 outBuffer->priv = mCurOutputTask->priv;
552
553 if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) {
554 ret = outputAllData(outBuffer);
555 CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
556 }else {
557 ret = getExtFormatOutput(outBuffer);
558 CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput");
559 }
560
561 LOG_V("out size for this getOutput call = %d\n", outBuffer->dataSize);
562
563 ret = cleanupForOutput();
564 CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput");
565
566 LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped);
567
568 return ENCODE_SUCCESS;
569
570 CLEAN_UP:
571
572 if (outBuffer->data && (useLocalBuffer == true)) {
573 delete[] outBuffer->data;
574 outBuffer->data = NULL;
575 useLocalBuffer = false;
576 }
577
578 if (mOutCodedBufferPtr != NULL) {
579 vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
580 mOutCodedBufferPtr = NULL;
581 mCurSegment = NULL;
582 }
583
584 delete mCurOutputTask;
585 mCurOutputTask = NULL;
586 mCodedBuffer_Lock.lock();
587 mVACodedBufferList.push_back(mOutCodedBuffer);
588 mCodedBuffer_Cond.signal();
589 mCodedBuffer_Lock.unlock();
590
591 LOG_V("getOutput return error=%x\n", ret);
592 return ret;
593 }
594
flush()595 void VideoEncoderBase::flush() {
596
597 LOG_V( "Begin\n");
598
599 // reset the properities
600 mFrameNum = 0;
601
602 LOG_V( "end\n");
603 }
604
stop()605 Encode_Status VideoEncoderBase::stop() {
606
607 VAStatus vaStatus = VA_STATUS_SUCCESS;
608 Encode_Status ret = ENCODE_SUCCESS;
609
610 LOG_V( "Begin\n");
611
612 // It is possible that above pointers have been allocated
613 // before we set mStarted to true
614 if (!mStarted) {
615 LOG_V("Encoder has been stopped\n");
616 return ENCODE_SUCCESS;
617 }
618 if (mAutoRefSurfaces) {
619 delete[] mAutoRefSurfaces;
620 mAutoRefSurfaces = NULL;
621 }
622
623 mCodedBuffer_Lock.lock();
624 mVACodedBufferList.clear();
625 mCodedBuffer_Lock.unlock();
626 mCodedBuffer_Cond.broadcast();
627
628 //Delete all uncompleted tasks
629 mEncodeTask_Lock.lock();
630 while(! mEncodeTaskList.empty())
631 {
632 delete *mEncodeTaskList.begin();
633 mEncodeTaskList.erase(mEncodeTaskList.begin());
634 }
635 mEncodeTask_Lock.unlock();
636 mEncodeTask_Cond.broadcast();
637
638 //Release Src Surface Buffer Map, destroy surface manually since it is not added into context
639 LOG_V( "Rlease Src Surface Map\n");
640 while(! mSrcSurfaceMapList.empty())
641 {
642 delete (*mSrcSurfaceMapList.begin());
643 mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
644 }
645
646 LOG_V( "vaDestroyContext\n");
647 if (mVAContext != VA_INVALID_ID) {
648 vaStatus = vaDestroyContext(mVADisplay, mVAContext);
649 CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
650 }
651
652 LOG_V( "vaDestroyConfig\n");
653 if (mVAConfig != VA_INVALID_ID) {
654 vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
655 CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
656 }
657
658 CLEAN_UP:
659
660 mStarted = false;
661 mSliceSizeOverflow = false;
662 mCurOutputTask= NULL;
663 mOutCodedBuffer = 0;
664 mCurSegment = NULL;
665 mOffsetInSeg =0;
666 mTotalSize = 0;
667 mTotalSizeCopied = 0;
668 mFrameSkipped = false;
669 mSupportedSurfaceMemType = 0;
670
671 LOG_V( "end\n");
672 return ret;
673 }
674
prepareForOutput(VideoEncOutputBuffer * outBuffer,bool * useLocalBuffer)675 Encode_Status VideoEncoderBase::prepareForOutput(
676 VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) {
677
678 VAStatus vaStatus = VA_STATUS_SUCCESS;
679 VACodedBufferSegment *vaCodedSeg = NULL;
680 uint32_t status = 0;
681
682 LOG_V( "begin\n");
683 // Won't check parameters here as the caller already checked them
684 // mCurSegment is NULL means it is first time to be here after finishing encoding a frame
685 if (mCurSegment == NULL) {
686 if (mOutCodedBufferPtr == NULL) {
687 vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
688 CHECK_VA_STATUS_RETURN("vaMapBuffer");
689 CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
690 }
691
692 LOG_V("Coded Buffer ID been mapped = 0x%08x\n", mOutCodedBuffer);
693
694 mTotalSize = 0;
695 mOffsetInSeg = 0;
696 mTotalSizeCopied = 0;
697 vaCodedSeg = (VACodedBufferSegment *)mOutCodedBufferPtr;
698 mCurSegment = (VACodedBufferSegment *)mOutCodedBufferPtr;
699
700 while (1) {
701
702 mTotalSize += vaCodedSeg->size;
703 status = vaCodedSeg->status;
704 #ifndef IMG_GFX
705 uint8_t *pTemp;
706 uint32_t ii;
707 pTemp = (uint8_t*)vaCodedSeg->buf;
708 for(ii = 0; ii < 16;){
709 if (*(pTemp + ii) == 0xFF)
710 ii++;
711 else
712 break;
713 }
714 if (ii > 0) {
715 mOffsetInSeg = ii;
716 }
717 #endif
718 if (!mSliceSizeOverflow) {
719 mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
720 }
721
722 if (vaCodedSeg->next == NULL)
723 break;
724
725 vaCodedSeg = (VACodedBufferSegment *)vaCodedSeg->next;
726 }
727 }
728
729 // We will support two buffer allocation mode,
730 // one is application allocates the buffer and passes to encode,
731 // the other is encode allocate memory
732
733 //means app doesn't allocate the buffer, so _encode will allocate it.
734 if (outBuffer->data == NULL) {
735 *useLocalBuffer = true;
736 outBuffer->data = new uint8_t[mTotalSize - mTotalSizeCopied + 100];
737 if (outBuffer->data == NULL) {
738 LOG_E( "outBuffer->data == NULL\n");
739 return ENCODE_NO_MEMORY;
740 }
741 outBuffer->bufferSize = mTotalSize + 100;
742 outBuffer->dataSize = 0;
743 }
744
745 // Clear all flag for every call
746 outBuffer->flag = 0;
747 if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW;
748
749 if (!mCurSegment)
750 return ENCODE_FAIL;
751
752 if (mCurSegment->size < mOffsetInSeg) {
753 LOG_E("mCurSegment->size < mOffsetInSeg\n");
754 return ENCODE_FAIL;
755 }
756
757 // Make sure we have data in current segment
758 if (mCurSegment->size == mOffsetInSeg) {
759 if (mCurSegment->next != NULL) {
760 mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
761 mOffsetInSeg = 0;
762 } else {
763 LOG_V("No more data available\n");
764 outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
765 outBuffer->dataSize = 0;
766 mCurSegment = NULL;
767 return ENCODE_NO_REQUEST_DATA;
768 }
769 }
770
771 LOG_V( "end\n");
772 return ENCODE_SUCCESS;
773 }
774
cleanupForOutput()775 Encode_Status VideoEncoderBase::cleanupForOutput() {
776
777 VAStatus vaStatus = VA_STATUS_SUCCESS;
778
779 //mCurSegment is NULL means all data has been copied out
780 if (mCurSegment == NULL && mOutCodedBufferPtr) {
781 vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
782 CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
783 mOutCodedBufferPtr = NULL;
784 mTotalSize = 0;
785 mOffsetInSeg = 0;
786 mTotalSizeCopied = 0;
787
788 delete mCurOutputTask;
789 mCurOutputTask = NULL;
790 mCodedBuffer_Lock.lock();
791 mVACodedBufferList.push_back(mOutCodedBuffer);
792 mCodedBuffer_Cond.signal();
793 mCodedBuffer_Lock.unlock();
794
795 LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer);
796 }
797 return ENCODE_SUCCESS;
798 }
799
queryProfileLevelConfig(VADisplay dpy,VAProfile profile)800 Encode_Status VideoEncoderBase::queryProfileLevelConfig(VADisplay dpy, VAProfile profile) {
801
802 VAStatus vaStatus = VA_STATUS_SUCCESS;
803 VAEntrypoint entryPtr[8];
804 int i, entryPtrNum;
805
806 if(profile == VAProfileH264Main) //need to be fixed
807 return ENCODE_NOT_SUPPORTED;
808
809 vaStatus = vaQueryConfigEntrypoints(dpy, profile, entryPtr, &entryPtrNum);
810 CHECK_VA_STATUS_RETURN("vaQueryConfigEntrypoints");
811
812 for(i=0; i<entryPtrNum; i++){
813 if(entryPtr[i] == VAEntrypointEncSlice)
814 return ENCODE_SUCCESS;
815 }
816
817 return ENCODE_NOT_SUPPORTED;
818 }
819
queryAutoReferenceConfig(VAProfile profile)820 Encode_Status VideoEncoderBase::queryAutoReferenceConfig(VAProfile profile) {
821
822 VAStatus vaStatus = VA_STATUS_SUCCESS;
823 VAConfigAttrib attrib_list;
824 attrib_list.type = VAConfigAttribEncAutoReference;
825 attrib_list.value = VA_ATTRIB_NOT_SUPPORTED;
826
827 vaStatus = vaGetConfigAttributes(mVADisplay, profile, VAEntrypointEncSlice, &attrib_list, 1);
828 if(attrib_list.value == VA_ATTRIB_NOT_SUPPORTED )
829 mAutoReference = false;
830 else
831 mAutoReference = true;
832
833 return ENCODE_SUCCESS;
834 }
835
querySupportedSurfaceMemTypes()836 Encode_Status VideoEncoderBase::querySupportedSurfaceMemTypes() {
837
838 VAStatus vaStatus = VA_STATUS_SUCCESS;
839
840 unsigned int num = 0;
841
842 VASurfaceAttrib* attribs = NULL;
843
844 //get attribs number
845 vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
846 CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
847
848 if (num == 0)
849 return ENCODE_SUCCESS;
850
851 attribs = new VASurfaceAttrib[num];
852
853 vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
854 CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
855
856 for(uint32_t i = 0; i < num; i ++) {
857 if (attribs[i].type == VASurfaceAttribMemoryType) {
858 mSupportedSurfaceMemType = attribs[i].value.value.i;
859 break;
860 }
861 else
862 continue;
863 }
864
865 delete attribs;
866
867 return ENCODE_SUCCESS;
868 }
869
outputAllData(VideoEncOutputBuffer * outBuffer)870 Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) {
871
872 // Data size been copied for every single call
873 uint32_t sizeCopiedHere = 0;
874 uint32_t sizeToBeCopied = 0;
875
876 CHECK_NULL_RETURN_IFFAIL(outBuffer->data);
877
878 while (1) {
879
880 LOG_V("mCurSegment->size = %d, mOffsetInSeg = %d\n", mCurSegment->size, mOffsetInSeg);
881 LOG_V("outBuffer->bufferSize = %d, sizeCopiedHere = %d, mTotalSizeCopied = %d\n",
882 outBuffer->bufferSize, sizeCopiedHere, mTotalSizeCopied);
883
884 if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
885 LOG_E("mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere\n");
886 return ENCODE_FAIL;
887 }
888
889 if ((mCurSegment->size - mOffsetInSeg) <= outBuffer->bufferSize - sizeCopiedHere) {
890 sizeToBeCopied = mCurSegment->size - mOffsetInSeg;
891 memcpy(outBuffer->data + sizeCopiedHere,
892 (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
893 sizeCopiedHere += sizeToBeCopied;
894 mTotalSizeCopied += sizeToBeCopied;
895 mOffsetInSeg = 0;
896 } else {
897 sizeToBeCopied = outBuffer->bufferSize - sizeCopiedHere;
898 memcpy(outBuffer->data + sizeCopiedHere,
899 (uint8_t *)mCurSegment->buf + mOffsetInSeg, outBuffer->bufferSize - sizeCopiedHere);
900 mTotalSizeCopied += sizeToBeCopied;
901 mOffsetInSeg += sizeToBeCopied;
902 outBuffer->dataSize = outBuffer->bufferSize;
903 outBuffer->remainingSize = mTotalSize - mTotalSizeCopied;
904 outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
905 return ENCODE_BUFFER_TOO_SMALL;
906 }
907
908 if (mCurSegment->next == NULL) {
909 outBuffer->dataSize = sizeCopiedHere;
910 outBuffer->remainingSize = 0;
911 outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
912 mCurSegment = NULL;
913 return ENCODE_SUCCESS;
914 }
915
916 mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
917 mOffsetInSeg = 0;
918 }
919 }
920
setDefaultParams()921 void VideoEncoderBase::setDefaultParams() {
922
923 // Set default value for input parameters
924 mComParams.profile = VAProfileH264Baseline;
925 mComParams.level = 41;
926 mComParams.rawFormat = RAW_FORMAT_NV12;
927 mComParams.frameRate.frameRateNum = 30;
928 mComParams.frameRate.frameRateDenom = 1;
929 mComParams.resolution.width = 0;
930 mComParams.resolution.height = 0;
931 mComParams.intraPeriod = 30;
932 mComParams.rcMode = RATE_CONTROL_NONE;
933 mComParams.rcParams.initQP = 15;
934 mComParams.rcParams.minQP = 0;
935 mComParams.rcParams.maxQP = 0;
936 mComParams.rcParams.I_minQP = 0;
937 mComParams.rcParams.I_maxQP = 0;
938 mComParams.rcParams.bitRate = 640000;
939 mComParams.rcParams.targetPercentage= 0;
940 mComParams.rcParams.windowSize = 0;
941 mComParams.rcParams.disableFrameSkip = 0;
942 mComParams.rcParams.disableBitsStuffing = 1;
943 mComParams.rcParams.enableIntraFrameQPControl = 0;
944 mComParams.rcParams.temporalFrameRate = 0;
945 mComParams.rcParams.temporalID = 0;
946 mComParams.cyclicFrameInterval = 30;
947 mComParams.refreshType = VIDEO_ENC_NONIR;
948 mComParams.airParams.airMBs = 0;
949 mComParams.airParams.airThreshold = 0;
950 mComParams.airParams.airAuto = 1;
951 mComParams.disableDeblocking = 2;
952 mComParams.syncEncMode = false;
953 mComParams.codedBufNum = 2;
954 mComParams.numberOfLayer = 1;
955 mComParams.nPeriodicity = 0;
956 memset(mComParams.nLayerID,0,32*sizeof(uint32_t));
957
958 mHrdParam.bufferSize = 0;
959 mHrdParam.initBufferFullness = 0;
960
961 mStoreMetaDataInBuffers.isEnabled = false;
962 }
963
setParameters(VideoParamConfigSet * videoEncParams)964 Encode_Status VideoEncoderBase::setParameters(
965 VideoParamConfigSet *videoEncParams) {
966
967 Encode_Status ret = ENCODE_SUCCESS;
968 CHECK_NULL_RETURN_IFFAIL(videoEncParams);
969 LOG_V("Config type = %x\n", (int)videoEncParams->type);
970
971 if (mStarted) {
972 LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
973 return ENCODE_ALREADY_INIT;
974 }
975
976 switch (videoEncParams->type) {
977 case VideoParamsTypeCommon: {
978
979 VideoParamsCommon *paramsCommon =
980 reinterpret_cast <VideoParamsCommon *> (videoEncParams);
981 if (paramsCommon->size != sizeof (VideoParamsCommon)) {
982 return ENCODE_INVALID_PARAMS;
983 }
984 if(paramsCommon->codedBufNum < 2)
985 paramsCommon->codedBufNum =2;
986 mComParams = *paramsCommon;
987 break;
988 }
989
990 case VideoParamsTypeUpSteamBuffer: {
991
992 VideoParamsUpstreamBuffer *upStreamBuffer =
993 reinterpret_cast <VideoParamsUpstreamBuffer *> (videoEncParams);
994
995 if (upStreamBuffer->size != sizeof (VideoParamsUpstreamBuffer)) {
996 return ENCODE_INVALID_PARAMS;
997 }
998
999 ret = setUpstreamBuffer(upStreamBuffer);
1000 break;
1001 }
1002
1003 case VideoParamsTypeUsrptrBuffer: {
1004
1005 // usrptr only can be get
1006 // this case should not happen
1007 break;
1008 }
1009
1010 case VideoParamsTypeHRD: {
1011 VideoParamsHRD *hrd =
1012 reinterpret_cast <VideoParamsHRD *> (videoEncParams);
1013
1014 if (hrd->size != sizeof (VideoParamsHRD)) {
1015 return ENCODE_INVALID_PARAMS;
1016 }
1017
1018 mHrdParam.bufferSize = hrd->bufferSize;
1019 mHrdParam.initBufferFullness = hrd->initBufferFullness;
1020 mRenderHrd = true;
1021
1022 break;
1023 }
1024
1025 case VideoParamsTypeStoreMetaDataInBuffers: {
1026 VideoParamsStoreMetaDataInBuffers *metadata =
1027 reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
1028
1029 if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
1030 return ENCODE_INVALID_PARAMS;
1031 }
1032
1033 mStoreMetaDataInBuffers.isEnabled = metadata->isEnabled;
1034
1035 break;
1036 }
1037
1038 case VideoParamsTypeTemporalLayer:{
1039 VideoParamsTemporalLayer *temporallayer =
1040 reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
1041
1042 if (temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
1043 return ENCODE_INVALID_PARAMS;
1044 }
1045
1046 mComParams.numberOfLayer = temporallayer->numberOfLayer;
1047 mComParams.nPeriodicity = temporallayer->nPeriodicity;
1048 for(uint32_t i=0;i<temporallayer->nPeriodicity;i++)
1049 mComParams.nLayerID[i] = temporallayer->nLayerID[i];
1050 mRenderMultiTemporal = true;
1051 break;
1052 }
1053
1054 case VideoParamsTypeAVC:
1055 case VideoParamsTypeH263:
1056 case VideoParamsTypeMP4:
1057 case VideoParamsTypeVC1:
1058 case VideoParamsTypeVP8: {
1059 ret = derivedSetParams(videoEncParams);
1060 break;
1061 }
1062
1063 default: {
1064 LOG_E ("Wrong ParamType here\n");
1065 return ENCODE_INVALID_PARAMS;
1066 }
1067 }
1068 return ret;
1069 }
1070
getParameters(VideoParamConfigSet * videoEncParams)1071 Encode_Status VideoEncoderBase::getParameters(
1072 VideoParamConfigSet *videoEncParams) {
1073
1074 Encode_Status ret = ENCODE_SUCCESS;
1075 CHECK_NULL_RETURN_IFFAIL(videoEncParams);
1076 LOG_V("Config type = %d\n", (int)videoEncParams->type);
1077
1078 switch (videoEncParams->type) {
1079 case VideoParamsTypeCommon: {
1080
1081 VideoParamsCommon *paramsCommon =
1082 reinterpret_cast <VideoParamsCommon *> (videoEncParams);
1083
1084 if (paramsCommon->size != sizeof (VideoParamsCommon)) {
1085 return ENCODE_INVALID_PARAMS;
1086 }
1087 *paramsCommon = mComParams;
1088 break;
1089 }
1090
1091 case VideoParamsTypeUpSteamBuffer: {
1092
1093 // Get upstream buffer could happen
1094 // but not meaningful a lot
1095 break;
1096 }
1097
1098 case VideoParamsTypeUsrptrBuffer: {
1099 VideoParamsUsrptrBuffer *usrptrBuffer =
1100 reinterpret_cast <VideoParamsUsrptrBuffer *> (videoEncParams);
1101
1102 if (usrptrBuffer->size != sizeof (VideoParamsUsrptrBuffer)) {
1103 return ENCODE_INVALID_PARAMS;
1104 }
1105
1106 ret = getNewUsrptrFromSurface(
1107 usrptrBuffer->width, usrptrBuffer->height, usrptrBuffer->format,
1108 usrptrBuffer->expectedSize, &(usrptrBuffer->actualSize),
1109 &(usrptrBuffer->stride), &(usrptrBuffer->usrPtr));
1110
1111 break;
1112 }
1113
1114 case VideoParamsTypeHRD: {
1115 VideoParamsHRD *hrd =
1116 reinterpret_cast <VideoParamsHRD *> (videoEncParams);
1117
1118 if (hrd->size != sizeof (VideoParamsHRD)) {
1119 return ENCODE_INVALID_PARAMS;
1120 }
1121
1122 hrd->bufferSize = mHrdParam.bufferSize;
1123 hrd->initBufferFullness = mHrdParam.initBufferFullness;
1124
1125 break;
1126 }
1127
1128 case VideoParamsTypeStoreMetaDataInBuffers: {
1129 VideoParamsStoreMetaDataInBuffers *metadata =
1130 reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
1131
1132 if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
1133 return ENCODE_INVALID_PARAMS;
1134 }
1135
1136 metadata->isEnabled = mStoreMetaDataInBuffers.isEnabled;
1137
1138 break;
1139 }
1140
1141 case VideoParamsTypeProfileLevel: {
1142 VideoParamsProfileLevel *profilelevel =
1143 reinterpret_cast <VideoParamsProfileLevel *> (videoEncParams);
1144
1145 if (profilelevel->size != sizeof (VideoParamsProfileLevel)) {
1146 return ENCODE_INVALID_PARAMS;
1147 }
1148
1149 profilelevel->level = 0;
1150 if(queryProfileLevelConfig(mVADisplay, profilelevel->profile) == ENCODE_SUCCESS){
1151 profilelevel->isSupported = true;
1152 if(profilelevel->profile == VAProfileH264High)
1153 profilelevel->level = 42;
1154 else if(profilelevel->profile == VAProfileH264Main)
1155 profilelevel->level = 42;
1156 else if(profilelevel->profile == VAProfileH264Baseline)
1157 profilelevel->level = 41;
1158 else{
1159 profilelevel->level = 0;
1160 profilelevel->isSupported = false;
1161 }
1162 }
1163 }
1164
1165 case VideoParamsTypeTemporalLayer:{
1166 VideoParamsTemporalLayer *temporallayer =
1167 reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
1168
1169 if(temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
1170 return ENCODE_INVALID_PARAMS;
1171 }
1172
1173 temporallayer->numberOfLayer = mComParams.numberOfLayer;
1174
1175 break;
1176 }
1177
1178 case VideoParamsTypeAVC:
1179 case VideoParamsTypeH263:
1180 case VideoParamsTypeMP4:
1181 case VideoParamsTypeVC1:
1182 case VideoParamsTypeVP8: {
1183 derivedGetParams(videoEncParams);
1184 break;
1185 }
1186
1187 default: {
1188 LOG_E ("Wrong ParamType here\n");
1189 break;
1190 }
1191
1192 }
1193 return ENCODE_SUCCESS;
1194 }
1195
setConfig(VideoParamConfigSet * videoEncConfig)1196 Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) {
1197
1198 Encode_Status ret = ENCODE_SUCCESS;
1199 CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
1200 LOG_V("Config type = %d\n", (int)videoEncConfig->type);
1201
1202 // workaround
1203 #if 0
1204 if (!mStarted) {
1205 LOG_E("Encoder has not initialized yet, can't call setConfig\n");
1206 return ENCODE_NOT_INIT;
1207 }
1208 #endif
1209
1210 switch (videoEncConfig->type) {
1211 case VideoConfigTypeFrameRate: {
1212 VideoConfigFrameRate *configFrameRate =
1213 reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
1214
1215 if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
1216 return ENCODE_INVALID_PARAMS;
1217 }
1218 mComParams.frameRate = configFrameRate->frameRate;
1219 mRenderFrameRate = true;
1220 break;
1221 }
1222
1223 case VideoConfigTypeBitRate: {
1224 VideoConfigBitRate *configBitRate =
1225 reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
1226
1227 if (configBitRate->size != sizeof (VideoConfigBitRate)) {
1228 return ENCODE_INVALID_PARAMS;
1229 }
1230
1231 if(mComParams.numberOfLayer == 1)
1232 {
1233 mComParams.rcParams = configBitRate->rcParams;
1234 mRenderBitRate = true;
1235 }
1236 else
1237 {
1238 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].nLayerID = configBitRate->rcParams.temporalID;
1239 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].bitRate = configBitRate->rcParams.bitRate;
1240 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].frameRate = configBitRate->rcParams.temporalFrameRate;
1241 }
1242 break;
1243 }
1244
1245 case VideoConfigTypeResolution: {
1246
1247 // Not Implemented
1248 break;
1249 }
1250 case VideoConfigTypeIntraRefreshType: {
1251
1252 VideoConfigIntraRefreshType *configIntraRefreshType =
1253 reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
1254
1255 if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
1256 return ENCODE_INVALID_PARAMS;
1257 }
1258 mComParams.refreshType = configIntraRefreshType->refreshType;
1259 break;
1260 }
1261
1262 case VideoConfigTypeCyclicFrameInterval: {
1263 VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
1264 reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
1265 if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
1266 return ENCODE_INVALID_PARAMS;
1267 }
1268
1269 mComParams.cyclicFrameInterval = configCyclicFrameInterval->cyclicFrameInterval;
1270 break;
1271 }
1272
1273 case VideoConfigTypeAIR: {
1274
1275 VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
1276
1277 if (configAIR->size != sizeof (VideoConfigAIR)) {
1278 return ENCODE_INVALID_PARAMS;
1279 }
1280
1281 mComParams.airParams = configAIR->airParams;
1282 mRenderAIR = true;
1283 break;
1284 }
1285 case VideoConfigTypeCIR: {
1286
1287 VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
1288
1289 if (configCIR->size != sizeof (VideoConfigCIR)) {
1290 return ENCODE_INVALID_PARAMS;
1291 }
1292
1293 mComParams.cirParams = configCIR->cirParams;
1294 mRenderCIR = true;
1295 break;
1296 }
1297 case VideoConfigTypeAVCIntraPeriod:
1298 case VideoConfigTypeNALSize:
1299 case VideoConfigTypeIDRRequest:
1300 case VideoConfigTypeSliceNum:
1301 case VideoConfigTypeVP8:
1302 case VideoConfigTypeVP8ReferenceFrame:
1303 case VideoConfigTypeVP8MaxFrameSizeRatio:{
1304 ret = derivedSetConfig(videoEncConfig);
1305 break;
1306 }
1307 default: {
1308 LOG_E ("Wrong Config Type here\n");
1309 break;
1310 }
1311 }
1312 return ret;
1313 }
1314
getConfig(VideoParamConfigSet * videoEncConfig)1315 Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) {
1316
1317 Encode_Status ret = ENCODE_SUCCESS;
1318 CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
1319 LOG_V("Config type = %d\n", (int)videoEncConfig->type);
1320
1321 switch (videoEncConfig->type) {
1322 case VideoConfigTypeFrameRate: {
1323 VideoConfigFrameRate *configFrameRate =
1324 reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
1325
1326 if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
1327 return ENCODE_INVALID_PARAMS;
1328 }
1329
1330 configFrameRate->frameRate = mComParams.frameRate;
1331 break;
1332 }
1333
1334 case VideoConfigTypeBitRate: {
1335 VideoConfigBitRate *configBitRate =
1336 reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
1337
1338 if (configBitRate->size != sizeof (VideoConfigBitRate)) {
1339 return ENCODE_INVALID_PARAMS;
1340 }
1341 configBitRate->rcParams = mComParams.rcParams;
1342
1343
1344 break;
1345 }
1346 case VideoConfigTypeResolution: {
1347 // Not Implemented
1348 break;
1349 }
1350 case VideoConfigTypeIntraRefreshType: {
1351
1352 VideoConfigIntraRefreshType *configIntraRefreshType =
1353 reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
1354
1355 if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
1356 return ENCODE_INVALID_PARAMS;
1357 }
1358 configIntraRefreshType->refreshType = mComParams.refreshType;
1359 break;
1360 }
1361
1362 case VideoConfigTypeCyclicFrameInterval: {
1363 VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
1364 reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
1365 if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
1366 return ENCODE_INVALID_PARAMS;
1367 }
1368
1369 configCyclicFrameInterval->cyclicFrameInterval = mComParams.cyclicFrameInterval;
1370 break;
1371 }
1372
1373 case VideoConfigTypeAIR: {
1374
1375 VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
1376
1377 if (configAIR->size != sizeof (VideoConfigAIR)) {
1378 return ENCODE_INVALID_PARAMS;
1379 }
1380
1381 configAIR->airParams = mComParams.airParams;
1382 break;
1383 }
1384 case VideoConfigTypeCIR: {
1385
1386 VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
1387
1388 if (configCIR->size != sizeof (VideoConfigCIR)) {
1389 return ENCODE_INVALID_PARAMS;
1390 }
1391
1392 configCIR->cirParams = mComParams.cirParams;
1393 break;
1394 }
1395 case VideoConfigTypeAVCIntraPeriod:
1396 case VideoConfigTypeNALSize:
1397 case VideoConfigTypeIDRRequest:
1398 case VideoConfigTypeSliceNum:
1399 case VideoConfigTypeVP8: {
1400
1401 ret = derivedGetConfig(videoEncConfig);
1402 break;
1403 }
1404 default: {
1405 LOG_E ("Wrong ParamType here\n");
1406 break;
1407 }
1408 }
1409 return ret;
1410 }
1411
PrepareFrameInfo(EncodeTask * task)1412 void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) {
1413 if (mNewHeader) mFrameNum = 0;
1414 LOG_V( "mFrameNum = %d ", mFrameNum);
1415
1416 updateFrameInfo(task) ;
1417 }
1418
updateFrameInfo(EncodeTask * task)1419 Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) {
1420
1421 task->type = FTYPE_P;
1422
1423 // determine the picture type
1424 if (mFrameNum == 0)
1425 task->type = FTYPE_I;
1426 if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0))
1427 task->type = FTYPE_I;
1428
1429 if (task->type == FTYPE_I)
1430 task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
1431
1432 return ENCODE_SUCCESS;
1433 }
1434
getMaxOutSize(uint32_t * maxSize)1435 Encode_Status VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) {
1436
1437 uint32_t size = mComParams.resolution.width * mComParams.resolution.height;
1438
1439 if (maxSize == NULL) {
1440 LOG_E("maxSize == NULL\n");
1441 return ENCODE_NULL_PTR;
1442 }
1443
1444 LOG_V( "Begin\n");
1445
1446 if (mCodedBufSize > 0) {
1447 *maxSize = mCodedBufSize;
1448 LOG_V ("Already calculate the max encoded size, get the value directly");
1449 return ENCODE_SUCCESS;
1450 }
1451
1452 // here, VP8 is different from AVC/H263
1453 if(mComParams.profile == VAProfileVP8Version0_3) // for VP8 encode
1454 {
1455 // According to VIED suggestions, in CBR mode, coded buffer should be the size of 3 bytes per luma pixel
1456 // in CBR_HRD mode, coded buffer size should be 5 * rc_buf_sz * rc_target_bitrate;
1457 // now we just hardcode mCodedBufSize as 2M to walk round coded buffer size issue;
1458 /*
1459 if(mComParams.rcMode == VA_RC_CBR) // CBR_HRD mode
1460 mCodedBufSize = 5 * mComParams.rcParams.bitRate * 6000;
1461 else // CBR mode
1462 mCodedBufSize = 3 * mComParams.resolution.width * mComParams.resolution.height;
1463 */
1464 mCodedBufSize = (2 * 1024 * 1024 + 31) & (~31);
1465 }
1466 else // for AVC/H263/MPEG4 encode
1467 {
1468 // base on the rate control mode to calculate the defaule encoded buffer size
1469 if (mComParams.rcMode == VA_RC_NONE) {
1470 mCodedBufSize = (size * 400) / (16 * 16);
1471 // set to value according to QP
1472 } else {
1473 mCodedBufSize = mComParams.rcParams.bitRate / 4;
1474 }
1475
1476 mCodedBufSize = max (mCodedBufSize , (size * 400) / (16 * 16));
1477
1478 // in case got a very large user input bit rate value
1479 mCodedBufSize = min(mCodedBufSize, (size * 1.5 * 8));
1480 mCodedBufSize = (mCodedBufSize + 15) &(~15);
1481 }
1482
1483 *maxSize = mCodedBufSize;
1484 return ENCODE_SUCCESS;
1485 }
1486
getNewUsrptrFromSurface(uint32_t width,uint32_t height,uint32_t format,uint32_t expectedSize,uint32_t * outsize,uint32_t * stride,uint8_t ** usrptr)1487 Encode_Status VideoEncoderBase::getNewUsrptrFromSurface(
1488 uint32_t width, uint32_t height, uint32_t format,
1489 uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) {
1490
1491 Encode_Status ret = ENCODE_FAIL;
1492 VAStatus vaStatus = VA_STATUS_SUCCESS;
1493
1494 VASurfaceID surface = VA_INVALID_SURFACE;
1495 VAImage image;
1496 uint32_t index = 0;
1497
1498 LOG_V( "Begin\n");
1499 // If encode session has been configured, we can not request surface creation anymore
1500 if (mStarted) {
1501 LOG_E( "Already Initialized, can not request VA surface anymore\n");
1502 return ENCODE_WRONG_STATE;
1503 }
1504 if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) {
1505 LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n");
1506 return ENCODE_NULL_PTR;
1507 }
1508
1509 // Current only NV12 is supported in VA API
1510 // Through format we can get known the number of planes
1511 if (format != STRING_TO_FOURCC("NV12")) {
1512 LOG_W ("Format is not supported\n");
1513 return ENCODE_NOT_SUPPORTED;
1514 }
1515
1516 surface = CreateNewVASurface(mVADisplay, width, height);
1517 if (surface == VA_INVALID_SURFACE)
1518 return ENCODE_DRIVER_FAIL;
1519
1520 vaStatus = vaDeriveImage(mVADisplay, surface, &image);
1521 CHECK_VA_STATUS_RETURN("vaDeriveImage");
1522 LOG_V( "vaDeriveImage Done\n");
1523 vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) usrptr);
1524 CHECK_VA_STATUS_RETURN("vaMapBuffer");
1525
1526 // make sure the physical page been allocated
1527 for (index = 0; index < image.data_size; index = index + 4096) {
1528 unsigned char tmp = *(*usrptr + index);
1529 if (tmp == 0)
1530 *(*usrptr + index) = 0;
1531 }
1532
1533 *outsize = image.data_size;
1534 *stride = image.pitches[0];
1535
1536 LOG_V( "surface = 0x%08x\n",(uint32_t)surface);
1537 LOG_V("image->pitches[0] = %d\n", image.pitches[0]);
1538 LOG_V("image->pitches[1] = %d\n", image.pitches[1]);
1539 LOG_V("image->offsets[0] = %d\n", image.offsets[0]);
1540 LOG_V("image->offsets[1] = %d\n", image.offsets[1]);
1541 LOG_V("image->num_planes = %d\n", image.num_planes);
1542 LOG_V("image->width = %d\n", image.width);
1543 LOG_V("image->height = %d\n", image.height);
1544 LOG_V("data_size = %d\n", image.data_size);
1545 LOG_V("usrptr = 0x%p\n", *usrptr);
1546
1547 vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
1548 CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
1549 vaStatus = vaDestroyImage(mVADisplay, image.image_id);
1550 CHECK_VA_STATUS_RETURN("vaDestroyImage");
1551
1552 if (*outsize < expectedSize) {
1553 LOG_E ("Allocated buffer size is small than the expected size, destroy the surface");
1554 LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expectedSize);
1555 vaStatus = vaDestroySurfaces(mVADisplay, &surface, 1);
1556 CHECK_VA_STATUS_RETURN("vaDestroySurfaces");
1557 return ENCODE_FAIL;
1558 }
1559
1560 VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
1561 if (map == NULL) {
1562 LOG_E( "new VASurfaceMap failed\n");
1563 return ENCODE_NO_MEMORY;
1564 }
1565
1566 map->setVASurface(surface); //special case, vasuface is set, so nothing do in doMapping
1567 // map->setType(MetadataBufferTypeEncoder);
1568 map->setValue((intptr_t)*usrptr);
1569 ValueInfo vinfo;
1570 memset(&vinfo, 0, sizeof(ValueInfo));
1571 vinfo.mode = (MemMode)MEM_MODE_USRPTR;
1572 vinfo.handle = 0;
1573 vinfo.size = 0;
1574 vinfo.width = width;
1575 vinfo.height = height;
1576 vinfo.lumaStride = width;
1577 vinfo.chromStride = width;
1578 vinfo.format = VA_FOURCC_NV12;
1579 vinfo.s3dformat = 0xffffffff;
1580 map->setValueInfo(vinfo);
1581 map->doMapping();
1582
1583 mSrcSurfaceMapList.push_back(map);
1584
1585 ret = ENCODE_SUCCESS;
1586
1587 return ret;
1588 }
1589
setUpstreamBuffer(VideoParamsUpstreamBuffer * upStreamBuffer)1590 Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) {
1591
1592 Encode_Status status = ENCODE_SUCCESS;
1593
1594 CHECK_NULL_RETURN_IFFAIL(upStreamBuffer);
1595 if (upStreamBuffer->bufCnt == 0) {
1596 LOG_E("bufCnt == 0\n");
1597 return ENCODE_FAIL;
1598 }
1599
1600 for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) {
1601 if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL) //already mapped
1602 continue;
1603
1604 //wrap upstream buffer into vaSurface
1605 VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
1606
1607 // map->setType(MetadataBufferTypeUser);
1608 map->setValue(upStreamBuffer->bufList[i]);
1609 ValueInfo vinfo;
1610 memset(&vinfo, 0, sizeof(ValueInfo));
1611 vinfo.mode = (MemMode)upStreamBuffer->bufferMode;
1612 vinfo.handle = (intptr_t)upStreamBuffer->display;
1613 vinfo.size = 0;
1614 if (upStreamBuffer->bufAttrib) {
1615 vinfo.width = upStreamBuffer->bufAttrib->realWidth;
1616 vinfo.height = upStreamBuffer->bufAttrib->realHeight;
1617 vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride;
1618 vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride;
1619 vinfo.format = upStreamBuffer->bufAttrib->format;
1620 }
1621 vinfo.s3dformat = 0xFFFFFFFF;
1622 map->setValueInfo(vinfo);
1623 status = map->doMapping();
1624
1625 if (status == ENCODE_SUCCESS)
1626 mSrcSurfaceMapList.push_back(map);
1627 else
1628 delete map;
1629 }
1630
1631 return status;
1632 }
1633
manageSrcSurface(VideoEncRawBuffer * inBuffer,VASurfaceID * sid)1634 Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) {
1635
1636 Encode_Status ret = ENCODE_SUCCESS;
1637 IntelMetadataBufferType type;
1638 intptr_t value;
1639 ValueInfo vinfo;
1640 ValueInfo *pvinfo = &vinfo;
1641 intptr_t *extravalues = NULL;
1642 unsigned int extravalues_count = 0;
1643
1644 IntelMetadataBuffer imb;
1645 VASurfaceMap *map = NULL;
1646
1647 memset(&vinfo, 0, sizeof(ValueInfo));
1648 if (mStoreMetaDataInBuffers.isEnabled) {
1649 //metadatabuffer mode
1650 LOG_V("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
1651 if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) {
1652 //fail to parse buffer
1653 return ENCODE_NO_REQUEST_DATA;
1654 }
1655
1656 imb.GetType(type);
1657 imb.GetValue(value);
1658 } else {
1659 //raw mode
1660 LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
1661 if (! inBuffer->data || inBuffer->size == 0) {
1662 return ENCODE_NULL_PTR;
1663 }
1664
1665 type = IntelMetadataBufferTypeUser;
1666 value = (intptr_t)inBuffer->data;
1667 }
1668
1669 #ifdef INTEL_VIDEO_XPROC_SHARING
1670 uint32_t sflag = mSessionFlag;
1671 imb.GetSessionFlag(mSessionFlag);
1672 if (mSessionFlag != sflag) {
1673 //new sharing session, flush buffer sharing cache
1674 IntelMetadataBuffer::ClearContext(sflag, false);
1675 //flush surfacemap cache
1676 LOG_V( "Flush Src Surface Map\n");
1677 while(! mSrcSurfaceMapList.empty())
1678 {
1679 delete (*mSrcSurfaceMapList.begin());
1680 mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
1681 }
1682 }
1683 #endif
1684
1685 //find if mapped
1686 map = (VASurfaceMap*) findSurfaceMapByValue(value);
1687
1688 if (map) {
1689 //has mapped, get surfaceID directly and do all necessary actions
1690 LOG_V("direct find surface %d from value %i\n", map->getVASurface(), value);
1691 *sid = map->getVASurface();
1692 map->doMapping();
1693 return ret;
1694 }
1695
1696 //if no found from list, then try to map value with parameters
1697 LOG_V("not find surface from cache with value %i, start mapping if enough information\n", value);
1698
1699 if (mStoreMetaDataInBuffers.isEnabled) {
1700
1701 //if type is IntelMetadataBufferTypeGrallocSource, use default parameters since no ValueInfo
1702 if (type == IntelMetadataBufferTypeGrallocSource) {
1703 vinfo.mode = MEM_MODE_GFXHANDLE;
1704 vinfo.handle = 0;
1705 vinfo.size = 0;
1706 vinfo.width = mComParams.resolution.width;
1707 vinfo.height = mComParams.resolution.height;
1708 vinfo.lumaStride = mComParams.resolution.width;
1709 vinfo.chromStride = mComParams.resolution.width;
1710 vinfo.format = VA_FOURCC_NV12;
1711 vinfo.s3dformat = 0xFFFFFFFF;
1712 } else {
1713 //get all info mapping needs
1714 imb.GetValueInfo(pvinfo);
1715 imb.GetExtraValues(extravalues, extravalues_count);
1716 }
1717
1718 } else {
1719
1720 //raw mode
1721 vinfo.mode = MEM_MODE_MALLOC;
1722 vinfo.handle = 0;
1723 vinfo.size = inBuffer->size;
1724 vinfo.width = mComParams.resolution.width;
1725 vinfo.height = mComParams.resolution.height;
1726 vinfo.lumaStride = mComParams.resolution.width;
1727 vinfo.chromStride = mComParams.resolution.width;
1728 vinfo.format = VA_FOURCC_NV12;
1729 vinfo.s3dformat = 0xFFFFFFFF;
1730 }
1731
1732 /* Start mapping, if pvinfo is not NULL, then have enough info to map;
1733 * if extravalues is not NULL, then need to do more times mapping
1734 */
1735 if (pvinfo){
1736 //map according info, and add to surfacemap list
1737 map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
1738 map->setValue(value);
1739 map->setValueInfo(*pvinfo);
1740 map->setAction(mVASurfaceMappingAction);
1741
1742 ret = map->doMapping();
1743 if (ret == ENCODE_SUCCESS) {
1744 LOG_V("surface mapping success, map value %i into surface %d\n", value, map->getVASurface());
1745 mSrcSurfaceMapList.push_back(map);
1746 } else {
1747 delete map;
1748 LOG_E("surface mapping failed, wrong info or meet serious error\n");
1749 return ret;
1750 }
1751
1752 *sid = map->getVASurface();
1753
1754 } else {
1755 //can't map due to no info
1756 LOG_E("surface mapping failed, missing information\n");
1757 return ENCODE_NO_REQUEST_DATA;
1758 }
1759
1760 if (extravalues) {
1761 //map more using same ValueInfo
1762 for(unsigned int i=0; i<extravalues_count; i++) {
1763 map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
1764 map->setValue(extravalues[i]);
1765 map->setValueInfo(vinfo);
1766
1767 ret = map->doMapping();
1768 if (ret == ENCODE_SUCCESS) {
1769 LOG_V("surface mapping extravalue success, map value %i into surface %d\n", extravalues[i], map->getVASurface());
1770 mSrcSurfaceMapList.push_back(map);
1771 } else {
1772 delete map;
1773 map = NULL;
1774 LOG_E( "surface mapping extravalue failed, extravalue is %i\n", extravalues[i]);
1775 }
1776 }
1777 }
1778
1779 return ret;
1780 }
1781
renderDynamicBitrate(EncodeTask * task)1782 Encode_Status VideoEncoderBase::renderDynamicBitrate(EncodeTask* task) {
1783 VAStatus vaStatus = VA_STATUS_SUCCESS;
1784
1785 LOG_V( "Begin\n\n");
1786 // disable bits stuffing and skip frame apply to all rate control mode
1787
1788 VAEncMiscParameterBuffer *miscEncParamBuf;
1789 VAEncMiscParameterRateControl *bitrateControlParam;
1790 VABufferID miscParamBufferID;
1791
1792 vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
1793 VAEncMiscParameterBufferType,
1794 sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
1795 1, NULL,
1796 &miscParamBufferID);
1797
1798 CHECK_VA_STATUS_RETURN("vaCreateBuffer");
1799
1800 vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
1801 CHECK_VA_STATUS_RETURN("vaMapBuffer");
1802
1803 miscEncParamBuf->type = VAEncMiscParameterTypeRateControl;
1804 bitrateControlParam = (VAEncMiscParameterRateControl *)miscEncParamBuf->data;
1805
1806 bitrateControlParam->bits_per_second = mComParams.rcParams.bitRate;
1807 bitrateControlParam->initial_qp = mComParams.rcParams.initQP;
1808 if(mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I)) {
1809 bitrateControlParam->min_qp = mComParams.rcParams.I_minQP;
1810 bitrateControlParam->max_qp = mComParams.rcParams.I_maxQP;
1811 mRenderBitRate = true;
1812 LOG_I("apply I min/max qp for IDR or I frame\n");
1813 } else {
1814 bitrateControlParam->min_qp = mComParams.rcParams.minQP;
1815 bitrateControlParam->max_qp = mComParams.rcParams.maxQP;
1816 mRenderBitRate = false;
1817 LOG_I("revert to original min/max qp after IDR or I frame\n");
1818 }
1819 bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage;
1820 bitrateControlParam->window_size = mComParams.rcParams.windowSize;
1821 bitrateControlParam->rc_flags.bits.disable_frame_skip = mComParams.rcParams.disableFrameSkip;
1822 bitrateControlParam->rc_flags.bits.disable_bit_stuffing = mComParams.rcParams.disableBitsStuffing;
1823 bitrateControlParam->basic_unit_size = 0;
1824
1825 LOG_I("bits_per_second = %d\n", bitrateControlParam->bits_per_second);
1826 LOG_I("initial_qp = %d\n", bitrateControlParam->initial_qp);
1827 LOG_I("min_qp = %d\n", bitrateControlParam->min_qp);
1828 LOG_I("max_qp = %d\n", bitrateControlParam->max_qp);
1829 LOG_I("target_percentage = %d\n", bitrateControlParam->target_percentage);
1830 LOG_I("window_size = %d\n", bitrateControlParam->window_size);
1831 LOG_I("disable_frame_skip = %d\n", bitrateControlParam->rc_flags.bits.disable_frame_skip);
1832 LOG_I("disable_bit_stuffing = %d\n", bitrateControlParam->rc_flags.bits.disable_bit_stuffing);
1833
1834 vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
1835 CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
1836
1837 vaStatus = vaRenderPicture(mVADisplay, mVAContext,
1838 &miscParamBufferID, 1);
1839 CHECK_VA_STATUS_RETURN("vaRenderPicture");
1840
1841 return ENCODE_SUCCESS;
1842 }
1843
1844
renderDynamicFrameRate()1845 Encode_Status VideoEncoderBase::renderDynamicFrameRate() {
1846
1847 VAStatus vaStatus = VA_STATUS_SUCCESS;
1848
1849 if (mComParams.rcMode != RATE_CONTROL_VCM) {
1850
1851 LOG_W("Not in VCM mode, but call SendDynamicFramerate\n");
1852 return ENCODE_SUCCESS;
1853 }
1854
1855 VAEncMiscParameterBuffer *miscEncParamBuf;
1856 VAEncMiscParameterFrameRate *frameRateParam;
1857 VABufferID miscParamBufferID;
1858
1859 vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
1860 VAEncMiscParameterBufferType,
1861 sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterFrameRate),
1862 1, NULL, &miscParamBufferID);
1863 CHECK_VA_STATUS_RETURN("vaCreateBuffer");
1864
1865 vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
1866 CHECK_VA_STATUS_RETURN("vaMapBuffer");
1867
1868 miscEncParamBuf->type = VAEncMiscParameterTypeFrameRate;
1869 frameRateParam = (VAEncMiscParameterFrameRate *)miscEncParamBuf->data;
1870 frameRateParam->framerate =
1871 (unsigned int) (mComParams.frameRate.frameRateNum + mComParams.frameRate.frameRateDenom/2)
1872 / mComParams.frameRate.frameRateDenom;
1873
1874 vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
1875 CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
1876
1877 vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
1878 CHECK_VA_STATUS_RETURN("vaRenderPicture");
1879
1880 LOG_I( "frame rate = %d\n", frameRateParam->framerate);
1881 return ENCODE_SUCCESS;
1882 }
1883
renderHrd()1884 Encode_Status VideoEncoderBase::renderHrd() {
1885
1886 VAStatus vaStatus = VA_STATUS_SUCCESS;
1887
1888 VAEncMiscParameterBuffer *miscEncParamBuf;
1889 VAEncMiscParameterHRD *hrdParam;
1890 VABufferID miscParamBufferID;
1891
1892 vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
1893 VAEncMiscParameterBufferType,
1894 sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterHRD),
1895 1, NULL, &miscParamBufferID);
1896 CHECK_VA_STATUS_RETURN("vaCreateBuffer");
1897
1898 vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
1899 CHECK_VA_STATUS_RETURN("vaMapBuffer");
1900
1901 miscEncParamBuf->type = VAEncMiscParameterTypeHRD;
1902 hrdParam = (VAEncMiscParameterHRD *)miscEncParamBuf->data;
1903
1904 hrdParam->buffer_size = mHrdParam.bufferSize;
1905 hrdParam->initial_buffer_fullness = mHrdParam.initBufferFullness;
1906
1907 vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
1908 CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
1909
1910 vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
1911 CHECK_VA_STATUS_RETURN("vaRenderPicture");
1912
1913 return ENCODE_SUCCESS;
1914 }
1915
findSurfaceMapByValue(intptr_t value)1916 VASurfaceMap *VideoEncoderBase::findSurfaceMapByValue(intptr_t value) {
1917 android::List<VASurfaceMap *>::iterator node;
1918
1919 for(node = mSrcSurfaceMapList.begin(); node != mSrcSurfaceMapList.end(); node++)
1920 {
1921 if ((*node)->getValue() == value)
1922 return *node;
1923 else
1924 continue;
1925 }
1926
1927 return NULL;
1928 }
1929