1 /*
2 * Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 
17 #include <string.h>
18 #include "VideoEncoderLog.h"
19 #include "VideoEncoderBase.h"
20 #include "IntelMetadataBuffer.h"
21 #include <va/va_tpi.h>
22 #include <va/va_android.h>
23 
24 #define min(X,Y) (((X) < (Y)) ? (X) : (Y))
25 #define max(X,Y) (((X) > (Y)) ? (X) : (Y))
26 
VideoEncoderBase()27 VideoEncoderBase::VideoEncoderBase()
28     :mInitialized(true)
29     ,mStarted(false)
30     ,mVADisplay(NULL)
31     ,mVAContext(VA_INVALID_ID)
32     ,mVAConfig(VA_INVALID_ID)
33     ,mVAEntrypoint(VAEntrypointEncSlice)
34     ,mNewHeader(false)
35     ,mRenderMaxSliceSize(false)
36     ,mRenderQP (false)
37     ,mRenderAIR(false)
38     ,mRenderCIR(false)
39     ,mRenderFrameRate(false)
40     ,mRenderBitRate(false)
41     ,mRenderHrd(false)
42     ,mRenderMultiTemporal(false)
43     ,mForceKFrame(false)
44     ,mSeqParamBuf(0)
45     ,mPicParamBuf(0)
46     ,mSliceParamBuf(0)
47     ,mAutoRefSurfaces(NULL)
48     ,mRefSurface(VA_INVALID_SURFACE)
49     ,mRecSurface(VA_INVALID_SURFACE)
50     ,mFrameNum(0)
51     ,mCodedBufSize(0)
52     ,mAutoReference(false)
53     ,mAutoReferenceSurfaceNum(4)
54     ,mEncPackedHeaders(VA_ATTRIB_NOT_SUPPORTED)
55     ,mSliceSizeOverflow(false)
56     ,mCurOutputTask(NULL)
57     ,mOutCodedBuffer(0)
58     ,mOutCodedBufferPtr(NULL)
59     ,mCurSegment(NULL)
60     ,mOffsetInSeg(0)
61     ,mTotalSize(0)
62     ,mTotalSizeCopied(0)
63     ,mFrameSkipped(false)
64     ,mSupportedSurfaceMemType(0)
65     ,mVASurfaceMappingAction(0)
66 #ifdef INTEL_VIDEO_XPROC_SHARING
67     ,mSessionFlag(0)
68 #endif
69     {
70 
71     VAStatus vaStatus = VA_STATUS_SUCCESS;
72     // here the display can be any value, use following one
73     // just for consistence purpose, so don't define it
74     unsigned int display = 0x18C34078;
75     int majorVersion = -1;
76     int minorVersion = -1;
77 
78     setDefaultParams();
79 
80     LOG_V("vaGetDisplay \n");
81     mVADisplay = vaGetDisplay(&display);
82     if (mVADisplay == NULL) {
83         LOG_E("vaGetDisplay failed.");
84     }
85 
86     vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
87     LOG_V("vaInitialize \n");
88     if (vaStatus != VA_STATUS_SUCCESS) {
89         LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
90         mInitialized = false;
91     }
92 }
93 
~VideoEncoderBase()94 VideoEncoderBase::~VideoEncoderBase() {
95 
96     VAStatus vaStatus = VA_STATUS_SUCCESS;
97 
98     stop();
99 
100     vaStatus = vaTerminate(mVADisplay);
101     LOG_V( "vaTerminate\n");
102     if (vaStatus != VA_STATUS_SUCCESS) {
103         LOG_W( "Failed vaTerminate, vaStatus = %d\n", vaStatus);
104     } else {
105         mVADisplay = NULL;
106     }
107 
108 #ifdef INTEL_VIDEO_XPROC_SHARING
109     IntelMetadataBuffer::ClearContext(mSessionFlag, false);
110 #endif
111 }
112 
start()113 Encode_Status VideoEncoderBase::start() {
114 
115     Encode_Status ret = ENCODE_SUCCESS;
116     VAStatus vaStatus = VA_STATUS_SUCCESS;
117 
118     if (!mInitialized) {
119         LOGE("Encoder Initialize fail can not start");
120         return ENCODE_DRIVER_FAIL;
121     }
122 
123     if (mStarted) {
124         LOG_V("Encoder has been started\n");
125         return ENCODE_ALREADY_INIT;
126     }
127 
128     if (mComParams.rawFormat != RAW_FORMAT_NV12)
129 #ifdef IMG_GFX
130         mVASurfaceMappingAction |= MAP_ACTION_COLORCONVERT;
131 #else
132         return ENCODE_NOT_SUPPORTED;
133 #endif
134 
135     if (mComParams.resolution.width > 2048 || mComParams.resolution.height > 2048){
136         LOGE("Unsupported resolution width %d, height %d\n",
137             mComParams.resolution.width, mComParams.resolution.height);
138         return ENCODE_NOT_SUPPORTED;
139     }
140     queryAutoReferenceConfig(mComParams.profile);
141 
142     VAConfigAttrib vaAttrib_tmp[6],vaAttrib[VAConfigAttribTypeMax];
143     int vaAttribNumber = 0;
144     vaAttrib_tmp[0].type = VAConfigAttribRTFormat;
145     vaAttrib_tmp[1].type = VAConfigAttribRateControl;
146     vaAttrib_tmp[2].type = VAConfigAttribEncAutoReference;
147     vaAttrib_tmp[3].type = VAConfigAttribEncPackedHeaders;
148     vaAttrib_tmp[4].type = VAConfigAttribEncMaxRefFrames;
149     vaAttrib_tmp[5].type = VAConfigAttribEncRateControlExt;
150 
151     vaStatus = vaGetConfigAttributes(mVADisplay, mComParams.profile,
152             VAEntrypointEncSlice, &vaAttrib_tmp[0], 6);
153     CHECK_VA_STATUS_RETURN("vaGetConfigAttributes");
154 
155     if((vaAttrib_tmp[0].value & VA_RT_FORMAT_YUV420) != 0)
156     {
157         vaAttrib[vaAttribNumber].type = VAConfigAttribRTFormat;
158         vaAttrib[vaAttribNumber].value = VA_RT_FORMAT_YUV420;
159         vaAttribNumber++;
160     }
161 
162     vaAttrib[vaAttribNumber].type = VAConfigAttribRateControl;
163     vaAttrib[vaAttribNumber].value = mComParams.rcMode;
164     vaAttribNumber++;
165 
166     vaAttrib[vaAttribNumber].type = VAConfigAttribEncAutoReference;
167     vaAttrib[vaAttribNumber].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED;
168     vaAttribNumber++;
169 
170     if(vaAttrib_tmp[3].value != VA_ATTRIB_NOT_SUPPORTED)
171     {
172         vaAttrib[vaAttribNumber].type = VAConfigAttribEncPackedHeaders;
173         vaAttrib[vaAttribNumber].value = vaAttrib[3].value;
174         vaAttribNumber++;
175         mEncPackedHeaders = vaAttrib[3].value;
176     }
177 
178     if(vaAttrib_tmp[4].value != VA_ATTRIB_NOT_SUPPORTED)
179     {
180         vaAttrib[vaAttribNumber].type = VAConfigAttribEncMaxRefFrames;
181         vaAttrib[vaAttribNumber].value = vaAttrib[4].value;
182         vaAttribNumber++;
183         mEncMaxRefFrames = vaAttrib[4].value;
184     }
185 
186     if(vaAttrib_tmp[5].value != VA_ATTRIB_NOT_SUPPORTED)
187     {
188         vaAttrib[vaAttribNumber].type = VAConfigAttribEncRateControlExt;
189         vaAttrib[vaAttribNumber].value = mComParams.numberOfLayer;
190         vaAttribNumber++;
191     }
192 
193     LOG_V( "======VA Configuration======\n");
194     LOG_V( "profile = %d\n", mComParams.profile);
195     LOG_V( "mVAEntrypoint = %d\n", mVAEntrypoint);
196     LOG_V( "vaAttrib[0].type = %d\n", vaAttrib[0].type);
197     LOG_V( "vaAttrib[1].type = %d\n", vaAttrib[1].type);
198     LOG_V( "vaAttrib[2].type = %d\n", vaAttrib[2].type);
199     LOG_V( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value);
200     LOG_V( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value);
201     LOG_V( "vaAttrib[2].value (AutoReference) = %d\n", vaAttrib[2].value);
202     LOG_V( "vaAttribNumber is %d\n", vaAttribNumber);
203     LOG_V( "mComParams.numberOfLayer is %d\n", mComParams.numberOfLayer);
204 
205     LOG_V( "vaCreateConfig\n");
206 
207     vaStatus = vaCreateConfig(
208             mVADisplay, mComParams.profile, mVAEntrypoint,
209             &vaAttrib[0], vaAttribNumber, &(mVAConfig));
210 //            &vaAttrib[0], 3, &(mVAConfig));  //uncomment this after psb_video supports
211     CHECK_VA_STATUS_RETURN("vaCreateConfig");
212 
213     querySupportedSurfaceMemTypes();
214 
215     if (mComParams.rcMode == VA_RC_VCM) {
216         // Following three features are only enabled in VCM mode
217         mRenderMaxSliceSize = true;
218         mRenderAIR = true;
219         mRenderBitRate = true;
220     }
221 
222     LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n");
223 
224     uint32_t stride_aligned, height_aligned;
225     if(mAutoReference == false){
226         stride_aligned = (mComParams.resolution.width + 15) & ~15;
227         height_aligned = (mComParams.resolution.height + 15) & ~15;
228     }else{
229         // this alignment is used for AVC. For vp8 encode, driver will handle the alignment
230         if(mComParams.profile == VAProfileVP8Version0_3)
231         {
232             stride_aligned = mComParams.resolution.width;
233             height_aligned = mComParams.resolution.height;
234             mVASurfaceMappingAction |= MAP_ACTION_COPY;
235         }
236         else
237         {
238             stride_aligned = (mComParams.resolution.width + 63) & ~63;  //on Merr, stride must be 64 aligned.
239             height_aligned = (mComParams.resolution.height + 31) & ~31;
240             mVASurfaceMappingAction |= MAP_ACTION_ALIGN64;
241         }
242     }
243 
244     if(mAutoReference == false){
245         mRefSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
246         mRecSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
247 
248     }else {
249         mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum];
250         for(uint32_t i = 0; i < mAutoReferenceSurfaceNum; i ++)
251             mAutoRefSurfaces[i] = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
252     }
253     CHECK_VA_STATUS_RETURN("vaCreateSurfaces");
254 
255     //Prepare all Surfaces to be added into Context
256     uint32_t contextSurfaceCnt;
257     if(mAutoReference == false )
258         contextSurfaceCnt = 2 + mSrcSurfaceMapList.size();
259     else
260         contextSurfaceCnt = mAutoReferenceSurfaceNum + mSrcSurfaceMapList.size();
261 
262     VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt];
263     int32_t index = -1;
264     android::List<VASurfaceMap *>::iterator map_node;
265 
266     for(map_node = mSrcSurfaceMapList.begin(); map_node !=  mSrcSurfaceMapList.end(); map_node++)
267     {
268         contextSurfaces[++index] = (*map_node)->getVASurface();
269         (*map_node)->setTracked();
270     }
271 
272     if(mAutoReference == false){
273         contextSurfaces[++index] = mRefSurface;
274         contextSurfaces[++index] = mRecSurface;
275     } else {
276         for (uint32_t i=0; i < mAutoReferenceSurfaceNum; i++)
277             contextSurfaces[++index] = mAutoRefSurfaces[i];
278     }
279 
280     //Initialize and save the VA context ID
281     LOG_V( "vaCreateContext\n");
282     vaStatus = vaCreateContext(mVADisplay, mVAConfig,
283 #ifdef IMG_GFX
284             mComParams.resolution.width,
285             mComParams.resolution.height,
286 #else
287             stride_aligned,
288             height_aligned,
289 #endif
290             VA_PROGRESSIVE, contextSurfaces, contextSurfaceCnt,
291             &(mVAContext));
292     CHECK_VA_STATUS_RETURN("vaCreateContext");
293 
294     delete [] contextSurfaces;
295 
296     LOG_I("Success to create libva context width %d, height %d\n",
297           mComParams.resolution.width, mComParams.resolution.height);
298 
299     uint32_t maxSize = 0;
300     ret = getMaxOutSize(&maxSize);
301     CHECK_ENCODE_STATUS_RETURN("getMaxOutSize");
302 
303     // Create CodedBuffer for output
304     VABufferID VACodedBuffer;
305 
306     for(uint32_t i = 0; i <mComParams.codedBufNum; i++) {
307             vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
308                     VAEncCodedBufferType,
309                     mCodedBufSize,
310                     1, NULL,
311                     &VACodedBuffer);
312             CHECK_VA_STATUS_RETURN("vaCreateBuffer::VAEncCodedBufferType");
313 
314             mVACodedBufferList.push_back(VACodedBuffer);
315     }
316 
317     if (ret == ENCODE_SUCCESS)
318         mStarted = true;
319 
320     LOG_V( "end\n");
321     return ret;
322 }
323 
encode(VideoEncRawBuffer * inBuffer,uint32_t timeout)324 Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout) {
325 
326     Encode_Status ret = ENCODE_SUCCESS;
327     VAStatus vaStatus = VA_STATUS_SUCCESS;
328 
329     if (!mStarted) {
330         LOG_E("Encoder has not initialized yet\n");
331         return ENCODE_NOT_INIT;
332     }
333 
334     CHECK_NULL_RETURN_IFFAIL(inBuffer);
335 
336     //======Prepare all resources encoder needed=====.
337 
338     //Prepare encode vaSurface
339     VASurfaceID sid = VA_INVALID_SURFACE;
340     ret = manageSrcSurface(inBuffer, &sid);
341     CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
342 
343     //Prepare CodedBuffer
344     mCodedBuffer_Lock.lock();
345     if(mVACodedBufferList.empty()){
346         if(timeout == FUNC_BLOCK)
347             mCodedBuffer_Cond.wait(mCodedBuffer_Lock);
348         else if (timeout > 0) {
349             if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){
350                 mCodedBuffer_Lock.unlock();
351                 LOG_E("Time out wait for Coded buffer.\n");
352                 return ENCODE_DEVICE_BUSY;
353             }
354         }
355         else {//Nonblock
356             mCodedBuffer_Lock.unlock();
357             LOG_E("Coded buffer is not ready now.\n");
358             return ENCODE_DEVICE_BUSY;
359         }
360     }
361 
362     if(mVACodedBufferList.empty()){
363         mCodedBuffer_Lock.unlock();
364         return ENCODE_DEVICE_BUSY;
365     }
366     VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin());
367     mVACodedBufferList.erase(mVACodedBufferList.begin());
368     mCodedBuffer_Lock.unlock();
369 
370     LOG_V("CodedBuffer ID 0x%08x\n", coded_buf);
371 
372     //All resources are ready, start to assemble EncodeTask
373     EncodeTask* task = new EncodeTask();
374 
375     task->completed = false;
376     task->enc_surface = sid;
377     task->coded_buffer = coded_buf;
378     task->timestamp = inBuffer->timeStamp;
379     task->priv = inBuffer->priv;
380 
381     //Setup frame info, like flag ( SYNCFRAME), frame number, type etc
382     task->type = inBuffer->type;
383     task->flag = inBuffer->flag;
384     PrepareFrameInfo(task);
385 
386     if(mAutoReference == false){
387         //Setup ref /rec frames
388         //TODO: B frame support, temporary use same logic
389         switch (inBuffer->type) {
390             case FTYPE_UNKNOWN:
391             case FTYPE_IDR:
392             case FTYPE_I:
393             case FTYPE_P:
394             {
395                 if(!mFrameSkipped) {
396                     VASurfaceID tmpSurface = mRecSurface;
397                     mRecSurface = mRefSurface;
398                     mRefSurface = tmpSurface;
399                 }
400 
401                 task->ref_surface = mRefSurface;
402                 task->rec_surface = mRecSurface;
403 
404                 break;
405             }
406             case FTYPE_B:
407             default:
408                 LOG_V("Something wrong, B frame may not be supported in this mode\n");
409                 ret = ENCODE_NOT_SUPPORTED;
410                 goto CLEAN_UP;
411         }
412     }else {
413         task->ref_surface = VA_INVALID_SURFACE;
414         task->rec_surface = VA_INVALID_SURFACE;
415     }
416     //======Start Encoding, add task to list======
417     LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface);
418 
419     vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface);
420     CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture");
421 
422     ret = sendEncodeCommand(task);
423     CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand");
424 
425     vaStatus = vaEndPicture(mVADisplay, mVAContext);
426     CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture");
427 
428     LOG_V("Add Task %p into Encode Task list\n", task);
429     mEncodeTask_Lock.lock();
430     mEncodeTaskList.push_back(task);
431     mEncodeTask_Cond.signal();
432     mEncodeTask_Lock.unlock();
433 
434     mFrameNum ++;
435 
436     LOG_V("encode return Success\n");
437 
438     return ENCODE_SUCCESS;
439 
440 CLEAN_UP:
441 
442     delete task;
443     mCodedBuffer_Lock.lock();
444     mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used
445     mCodedBuffer_Cond.signal();
446     mCodedBuffer_Lock.unlock();
447 
448     LOG_V("encode return error=%x\n", ret);
449 
450     return ret;
451 }
452 
453 /*
454   1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list.
455   2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes,
456     start output data
457   3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure
458     on non-block/block with timeout modes.
459   4. if complete all output data, curoutputtask should be set NULL
460 */
getOutput(VideoEncOutputBuffer * outBuffer,uint32_t timeout)461 Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) {
462 
463     Encode_Status ret = ENCODE_SUCCESS;
464     VAStatus vaStatus = VA_STATUS_SUCCESS;
465     bool useLocalBuffer = false;
466 
467     CHECK_NULL_RETURN_IFFAIL(outBuffer);
468 
469     if (mCurOutputTask == NULL) {
470         mEncodeTask_Lock.lock();
471         if(mEncodeTaskList.empty()) {
472             LOG_V("getOutput CurrentTask is NULL\n");
473             if(timeout == FUNC_BLOCK) {
474                 LOG_V("waiting for task....\n");
475                 mEncodeTask_Cond.wait(mEncodeTask_Lock);
476             } else if (timeout > 0) {
477                 LOG_V("waiting for task in %i ms....\n", timeout);
478                 if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) {
479                     mEncodeTask_Lock.unlock();
480                     LOG_E("Time out wait for encode task.\n");
481                     return ENCODE_NO_REQUEST_DATA;
482                 }
483             } else {//Nonblock
484                 mEncodeTask_Lock.unlock();
485                 return ENCODE_NO_REQUEST_DATA;
486             }
487         }
488 
489         if(mEncodeTaskList.empty()){
490             mEncodeTask_Lock.unlock();
491             return ENCODE_DATA_NOT_READY;
492         }
493         mCurOutputTask =  *(mEncodeTaskList.begin());
494         mEncodeTaskList.erase(mEncodeTaskList.begin());
495         mEncodeTask_Lock.unlock();
496     }
497 
498     //sync/query/wait task if not completed
499     if (mCurOutputTask->completed == false) {
500         VASurfaceStatus vaSurfaceStatus;
501 
502         if (timeout == FUNC_BLOCK) {
503             //block mode, direct sync surface to output data
504 
505             mOutCodedBuffer = mCurOutputTask->coded_buffer;
506 
507             // Check frame skip
508             // Need encoding to be completed before calling query surface below to
509             // get the right skip frame flag for current frame
510             // It is a requirement of video driver
511             // vaSyncSurface syncs the wrong frame when rendering the same surface multiple times,
512             // so use vaMapbuffer instead
513             LOG_V ("block mode, vaMapBuffer ID = 0x%08x\n", mOutCodedBuffer);
514             if (mOutCodedBufferPtr == NULL) {
515                 vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
516                 CHECK_VA_STATUS_GOTO_CLEANUP("vaMapBuffer");
517                 CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
518             }
519 
520             vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface,  &vaSurfaceStatus);
521             CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
522             mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
523 
524             mCurOutputTask->completed = true;
525 
526         } else {
527             //For both block with timeout and non-block mode, query surface, if ready, output data
528             LOG_V ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface);
529 
530             vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface,  &vaSurfaceStatus);
531             if (vaSurfaceStatus & VASurfaceReady) {
532                 mOutCodedBuffer = mCurOutputTask->coded_buffer;
533                 mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
534                 mCurOutputTask->completed = true;
535                 //if need to call SyncSurface again ?
536 
537             }  else {//not encode complet yet, but keep all context and return directly
538                 return ENCODE_DATA_NOT_READY;
539             }
540 
541         }
542 
543     }
544 
545     //start to output data
546     ret = prepareForOutput(outBuffer, &useLocalBuffer);
547     CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
548 
549     //copy all flags to outBuffer
550     outBuffer->offset = 0;
551     outBuffer->flag = mCurOutputTask->flag;
552     outBuffer->type = mCurOutputTask->type;
553     outBuffer->timeStamp = mCurOutputTask->timestamp;
554     outBuffer->priv = mCurOutputTask->priv;
555 
556     if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) {
557         ret = outputAllData(outBuffer);
558         CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
559     }else {
560         ret = getExtFormatOutput(outBuffer);
561         CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput");
562     }
563 
564     LOG_V("out size for this getOutput call = %d\n", outBuffer->dataSize);
565 
566     ret = cleanupForOutput();
567     CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput");
568 
569     LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped);
570 
571     return ENCODE_SUCCESS;
572 
573 CLEAN_UP:
574 
575     if (outBuffer->data && (useLocalBuffer == true)) {
576         delete[] outBuffer->data;
577         outBuffer->data = NULL;
578         useLocalBuffer = false;
579     }
580 
581     if (mOutCodedBufferPtr != NULL) {
582         vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
583         mOutCodedBufferPtr = NULL;
584         mCurSegment = NULL;
585     }
586 
587     delete mCurOutputTask;
588     mCurOutputTask = NULL;
589     mCodedBuffer_Lock.lock();
590     mVACodedBufferList.push_back(mOutCodedBuffer);
591     mCodedBuffer_Cond.signal();
592     mCodedBuffer_Lock.unlock();
593 
594     LOG_V("getOutput return error=%x\n", ret);
595     return ret;
596 }
597 
flush()598 void VideoEncoderBase::flush() {
599 
600     LOG_V( "Begin\n");
601 
602     // reset the properities
603     mFrameNum = 0;
604 
605     LOG_V( "end\n");
606 }
607 
stop()608 Encode_Status VideoEncoderBase::stop() {
609 
610     VAStatus vaStatus = VA_STATUS_SUCCESS;
611     Encode_Status ret = ENCODE_SUCCESS;
612 
613     LOG_V( "Begin\n");
614 
615     // It is possible that above pointers have been allocated
616     // before we set mStarted to true
617     if (!mStarted) {
618         LOG_V("Encoder has been stopped\n");
619         return ENCODE_SUCCESS;
620     }
621     if (mAutoRefSurfaces) {
622         delete[] mAutoRefSurfaces;
623         mAutoRefSurfaces = NULL;
624     }
625 
626     mCodedBuffer_Lock.lock();
627     mVACodedBufferList.clear();
628     mCodedBuffer_Lock.unlock();
629     mCodedBuffer_Cond.broadcast();
630 
631     //Delete all uncompleted tasks
632     mEncodeTask_Lock.lock();
633     while(! mEncodeTaskList.empty())
634     {
635         delete *mEncodeTaskList.begin();
636         mEncodeTaskList.erase(mEncodeTaskList.begin());
637     }
638     mEncodeTask_Lock.unlock();
639     mEncodeTask_Cond.broadcast();
640 
641     //Release Src Surface Buffer Map, destroy surface manually since it is not added into context
642     LOG_V( "Rlease Src Surface Map\n");
643     while(! mSrcSurfaceMapList.empty())
644     {
645         delete (*mSrcSurfaceMapList.begin());
646         mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
647     }
648 
649     LOG_V( "vaDestroyContext\n");
650     if (mVAContext != VA_INVALID_ID) {
651         vaStatus = vaDestroyContext(mVADisplay, mVAContext);
652         CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
653     }
654 
655     LOG_V( "vaDestroyConfig\n");
656     if (mVAConfig != VA_INVALID_ID) {
657         vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
658         CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
659     }
660 
661 CLEAN_UP:
662 
663     mStarted = false;
664     mSliceSizeOverflow = false;
665     mCurOutputTask= NULL;
666     mOutCodedBuffer = 0;
667     mCurSegment = NULL;
668     mOffsetInSeg =0;
669     mTotalSize = 0;
670     mTotalSizeCopied = 0;
671     mFrameSkipped = false;
672     mSupportedSurfaceMemType = 0;
673 
674     LOG_V( "end\n");
675     return ret;
676 }
677 
prepareForOutput(VideoEncOutputBuffer * outBuffer,bool * useLocalBuffer)678 Encode_Status VideoEncoderBase::prepareForOutput(
679         VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) {
680 
681     VAStatus vaStatus = VA_STATUS_SUCCESS;
682     VACodedBufferSegment *vaCodedSeg = NULL;
683     uint32_t status = 0;
684 
685     LOG_V( "begin\n");
686     // Won't check parameters here as the caller already checked them
687     // mCurSegment is NULL means it is first time to be here after finishing encoding a frame
688     if (mCurSegment == NULL) {
689         if (mOutCodedBufferPtr == NULL) {
690             vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
691             CHECK_VA_STATUS_RETURN("vaMapBuffer");
692             CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
693         }
694 
695         LOG_V("Coded Buffer ID been mapped = 0x%08x\n", mOutCodedBuffer);
696 
697         mTotalSize = 0;
698         mOffsetInSeg = 0;
699         mTotalSizeCopied = 0;
700         vaCodedSeg = (VACodedBufferSegment *)mOutCodedBufferPtr;
701         mCurSegment = (VACodedBufferSegment *)mOutCodedBufferPtr;
702 
703         while (1) {
704 
705             mTotalSize += vaCodedSeg->size;
706             status = vaCodedSeg->status;
707 #ifndef IMG_GFX
708             uint8_t *pTemp;
709             uint32_t ii;
710             pTemp = (uint8_t*)vaCodedSeg->buf;
711             for(ii = 0; ii < 16;){
712                 if (*(pTemp + ii) == 0xFF)
713                     ii++;
714                 else
715                     break;
716             }
717             if (ii > 0) {
718                 mOffsetInSeg = ii;
719             }
720 #endif
721             if (!mSliceSizeOverflow) {
722                 mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
723             }
724 
725             if (vaCodedSeg->next == NULL)
726                 break;
727 
728             vaCodedSeg = (VACodedBufferSegment *)vaCodedSeg->next;
729         }
730     }
731 
732     // We will support two buffer allocation mode,
733     // one is application allocates the buffer and passes to encode,
734     // the other is encode allocate memory
735 
736     //means  app doesn't allocate the buffer, so _encode will allocate it.
737     if (outBuffer->data == NULL) {
738         *useLocalBuffer = true;
739         outBuffer->data = new  uint8_t[mTotalSize - mTotalSizeCopied + 100];
740         if (outBuffer->data == NULL) {
741             LOG_E( "outBuffer->data == NULL\n");
742             return ENCODE_NO_MEMORY;
743         }
744         outBuffer->bufferSize = mTotalSize + 100;
745         outBuffer->dataSize = 0;
746     }
747 
748     // Clear all flag for every call
749     outBuffer->flag = 0;
750     if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW;
751 
752     if (!mCurSegment)
753         return ENCODE_FAIL;
754 
755     if (mCurSegment->size < mOffsetInSeg) {
756         LOG_E("mCurSegment->size < mOffsetInSeg\n");
757         return ENCODE_FAIL;
758     }
759 
760     // Make sure we have data in current segment
761     if (mCurSegment->size == mOffsetInSeg) {
762         if (mCurSegment->next != NULL) {
763             mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
764             mOffsetInSeg = 0;
765         } else {
766             LOG_V("No more data available\n");
767             outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
768             outBuffer->dataSize = 0;
769             mCurSegment = NULL;
770             return ENCODE_NO_REQUEST_DATA;
771         }
772     }
773 
774     LOG_V( "end\n");
775     return ENCODE_SUCCESS;
776 }
777 
cleanupForOutput()778 Encode_Status VideoEncoderBase::cleanupForOutput() {
779 
780     VAStatus vaStatus = VA_STATUS_SUCCESS;
781 
782     //mCurSegment is NULL means all data has been copied out
783     if (mCurSegment == NULL && mOutCodedBufferPtr) {
784         vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
785         CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
786         mOutCodedBufferPtr = NULL;
787         mTotalSize = 0;
788         mOffsetInSeg = 0;
789         mTotalSizeCopied = 0;
790 
791         delete mCurOutputTask;
792         mCurOutputTask = NULL;
793         mCodedBuffer_Lock.lock();
794         mVACodedBufferList.push_back(mOutCodedBuffer);
795         mCodedBuffer_Cond.signal();
796         mCodedBuffer_Lock.unlock();
797 
798         LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer);
799     }
800     return ENCODE_SUCCESS;
801 }
802 
queryProfileLevelConfig(VADisplay dpy,VAProfile profile)803 Encode_Status VideoEncoderBase::queryProfileLevelConfig(VADisplay dpy, VAProfile profile) {
804 
805     VAStatus vaStatus = VA_STATUS_SUCCESS;
806     VAEntrypoint entryPtr[8];
807     int i, entryPtrNum;
808 
809     if(profile ==  VAProfileH264Main) //need to be fixed
810         return ENCODE_NOT_SUPPORTED;
811 
812     vaStatus = vaQueryConfigEntrypoints(dpy, profile, entryPtr, &entryPtrNum);
813     CHECK_VA_STATUS_RETURN("vaQueryConfigEntrypoints");
814 
815     for(i=0; i<entryPtrNum; i++){
816         if(entryPtr[i] == VAEntrypointEncSlice)
817             return ENCODE_SUCCESS;
818     }
819 
820     return ENCODE_NOT_SUPPORTED;
821 }
822 
queryAutoReferenceConfig(VAProfile profile)823 Encode_Status VideoEncoderBase::queryAutoReferenceConfig(VAProfile profile) {
824 
825     VAStatus vaStatus = VA_STATUS_SUCCESS;
826     VAConfigAttrib attrib_list;
827     attrib_list.type = VAConfigAttribEncAutoReference;
828     attrib_list.value = VA_ATTRIB_NOT_SUPPORTED;
829 
830     vaStatus = vaGetConfigAttributes(mVADisplay, profile, VAEntrypointEncSlice, &attrib_list, 1);
831     CHECK_VA_STATUS_RETURN("vaQueryConfigAttributes");
832 
833     if(attrib_list.value == VA_ATTRIB_NOT_SUPPORTED )
834         mAutoReference = false;
835     else
836         mAutoReference = true;
837 
838     return ENCODE_SUCCESS;
839 }
840 
querySupportedSurfaceMemTypes()841 Encode_Status VideoEncoderBase::querySupportedSurfaceMemTypes() {
842 
843     VAStatus vaStatus = VA_STATUS_SUCCESS;
844 
845     unsigned int num = 0;
846 
847     VASurfaceAttrib* attribs = NULL;
848 
849     //get attribs number
850     vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
851     CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
852 
853     if (num == 0)
854         return ENCODE_SUCCESS;
855 
856     attribs = new VASurfaceAttrib[num];
857 
858     vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
859     CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
860 
861     for(uint32_t i = 0; i < num; i ++) {
862         if (attribs[i].type == VASurfaceAttribMemoryType) {
863             mSupportedSurfaceMemType = attribs[i].value.value.i;
864             break;
865         }
866         else
867             continue;
868     }
869 
870     delete[] attribs;
871 
872     return ENCODE_SUCCESS;
873 }
874 
outputAllData(VideoEncOutputBuffer * outBuffer)875 Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) {
876 
877     // Data size been copied for every single call
878     uint32_t sizeCopiedHere = 0;
879     uint32_t sizeToBeCopied = 0;
880 
881     CHECK_NULL_RETURN_IFFAIL(outBuffer->data);
882 
883     while (1) {
884 
885         LOG_V("mCurSegment->size = %d, mOffsetInSeg = %d\n", mCurSegment->size, mOffsetInSeg);
886         LOG_V("outBuffer->bufferSize = %d, sizeCopiedHere = %d, mTotalSizeCopied = %d\n",
887               outBuffer->bufferSize, sizeCopiedHere, mTotalSizeCopied);
888 
889         if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
890             LOG_E("mCurSegment->size < mOffsetInSeg  || outBuffer->bufferSize < sizeCopiedHere\n");
891             return ENCODE_FAIL;
892         }
893 
894         if ((mCurSegment->size - mOffsetInSeg) <= outBuffer->bufferSize - sizeCopiedHere) {
895             sizeToBeCopied = mCurSegment->size - mOffsetInSeg;
896             memcpy(outBuffer->data + sizeCopiedHere,
897                    (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
898             sizeCopiedHere += sizeToBeCopied;
899             mTotalSizeCopied += sizeToBeCopied;
900             mOffsetInSeg = 0;
901         } else {
902             sizeToBeCopied = outBuffer->bufferSize - sizeCopiedHere;
903             memcpy(outBuffer->data + sizeCopiedHere,
904                    (uint8_t *)mCurSegment->buf + mOffsetInSeg, outBuffer->bufferSize - sizeCopiedHere);
905             mTotalSizeCopied += sizeToBeCopied;
906             mOffsetInSeg += sizeToBeCopied;
907             outBuffer->dataSize = outBuffer->bufferSize;
908             outBuffer->remainingSize = mTotalSize - mTotalSizeCopied;
909             outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
910             return ENCODE_BUFFER_TOO_SMALL;
911         }
912 
913         if (mCurSegment->next == NULL) {
914             outBuffer->dataSize = sizeCopiedHere;
915             outBuffer->remainingSize = 0;
916             outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
917             mCurSegment = NULL;
918             return ENCODE_SUCCESS;
919         }
920 
921         mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
922         mOffsetInSeg = 0;
923     }
924 }
925 
setDefaultParams()926 void VideoEncoderBase::setDefaultParams() {
927 
928     // Set default value for input parameters
929     mComParams.profile = VAProfileH264Baseline;
930     mComParams.level = 41;
931     mComParams.rawFormat = RAW_FORMAT_NV12;
932     mComParams.frameRate.frameRateNum = 30;
933     mComParams.frameRate.frameRateDenom = 1;
934     mComParams.resolution.width = 0;
935     mComParams.resolution.height = 0;
936     mComParams.intraPeriod = 30;
937     mComParams.rcMode = RATE_CONTROL_NONE;
938     mComParams.rcParams.initQP = 15;
939     mComParams.rcParams.minQP = 0;
940     mComParams.rcParams.maxQP = 0;
941     mComParams.rcParams.I_minQP = 0;
942     mComParams.rcParams.I_maxQP = 0;
943     mComParams.rcParams.bitRate = 640000;
944     mComParams.rcParams.targetPercentage= 0;
945     mComParams.rcParams.windowSize = 0;
946     mComParams.rcParams.disableFrameSkip = 0;
947     mComParams.rcParams.disableBitsStuffing = 1;
948     mComParams.rcParams.enableIntraFrameQPControl = 0;
949     mComParams.rcParams.temporalFrameRate = 0;
950     mComParams.rcParams.temporalID = 0;
951     mComParams.cyclicFrameInterval = 30;
952     mComParams.refreshType = VIDEO_ENC_NONIR;
953     mComParams.airParams.airMBs = 0;
954     mComParams.airParams.airThreshold = 0;
955     mComParams.airParams.airAuto = 1;
956     mComParams.disableDeblocking = 2;
957     mComParams.syncEncMode = false;
958     mComParams.codedBufNum = 2;
959     mComParams.numberOfLayer = 1;
960     mComParams.nPeriodicity = 0;
961     memset(mComParams.nLayerID,0,32*sizeof(uint32_t));
962 
963     mHrdParam.bufferSize = 0;
964     mHrdParam.initBufferFullness = 0;
965 
966     mStoreMetaDataInBuffers.isEnabled = false;
967 }
968 
setParameters(VideoParamConfigSet * videoEncParams)969 Encode_Status VideoEncoderBase::setParameters(
970         VideoParamConfigSet *videoEncParams) {
971 
972     Encode_Status ret = ENCODE_SUCCESS;
973     CHECK_NULL_RETURN_IFFAIL(videoEncParams);
974     LOG_V("Config type = %x\n", (int)videoEncParams->type);
975 
976     if (mStarted) {
977         LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
978         return ENCODE_ALREADY_INIT;
979     }
980 
981     switch (videoEncParams->type) {
982         case VideoParamsTypeCommon: {
983 
984             VideoParamsCommon *paramsCommon =
985                     reinterpret_cast <VideoParamsCommon *> (videoEncParams);
986             if (paramsCommon->size != sizeof (VideoParamsCommon)) {
987                 return ENCODE_INVALID_PARAMS;
988             }
989             if(paramsCommon->codedBufNum < 2)
990                 paramsCommon->codedBufNum =2;
991             mComParams = *paramsCommon;
992             break;
993         }
994 
995         case VideoParamsTypeUpSteamBuffer: {
996 
997             VideoParamsUpstreamBuffer *upStreamBuffer =
998                     reinterpret_cast <VideoParamsUpstreamBuffer *> (videoEncParams);
999 
1000             if (upStreamBuffer->size != sizeof (VideoParamsUpstreamBuffer)) {
1001                 return ENCODE_INVALID_PARAMS;
1002             }
1003 
1004             ret = setUpstreamBuffer(upStreamBuffer);
1005             break;
1006         }
1007 
1008         case VideoParamsTypeUsrptrBuffer: {
1009 
1010             // usrptr only can be get
1011             // this case should not happen
1012             break;
1013         }
1014 
1015         case VideoParamsTypeHRD: {
1016             VideoParamsHRD *hrd =
1017                     reinterpret_cast <VideoParamsHRD *> (videoEncParams);
1018 
1019             if (hrd->size != sizeof (VideoParamsHRD)) {
1020                 return ENCODE_INVALID_PARAMS;
1021             }
1022 
1023             mHrdParam.bufferSize = hrd->bufferSize;
1024             mHrdParam.initBufferFullness = hrd->initBufferFullness;
1025             mRenderHrd = true;
1026 
1027             break;
1028         }
1029 
1030         case VideoParamsTypeStoreMetaDataInBuffers: {
1031             VideoParamsStoreMetaDataInBuffers *metadata =
1032                     reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
1033 
1034             if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
1035                 return ENCODE_INVALID_PARAMS;
1036             }
1037 
1038             mStoreMetaDataInBuffers.isEnabled = metadata->isEnabled;
1039 
1040             break;
1041         }
1042 
1043         case VideoParamsTypeTemporalLayer:{
1044             VideoParamsTemporalLayer *temporallayer =
1045                     reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
1046 
1047             if (temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
1048                  return ENCODE_INVALID_PARAMS;
1049             }
1050 
1051             mComParams.numberOfLayer = temporallayer->numberOfLayer;
1052             mComParams.nPeriodicity = temporallayer->nPeriodicity;
1053             for(uint32_t i=0;i<temporallayer->nPeriodicity;i++)
1054                 mComParams.nLayerID[i] = temporallayer->nLayerID[i];
1055             mRenderMultiTemporal = true;
1056             break;
1057         }
1058 
1059         case VideoParamsTypeAVC:
1060         case VideoParamsTypeH263:
1061         case VideoParamsTypeMP4:
1062         case VideoParamsTypeVC1:
1063         case VideoParamsTypeVP8: {
1064             ret = derivedSetParams(videoEncParams);
1065             break;
1066         }
1067 
1068         default: {
1069             LOG_E ("Wrong ParamType here\n");
1070             return ENCODE_INVALID_PARAMS;
1071         }
1072     }
1073     return ret;
1074 }
1075 
getParameters(VideoParamConfigSet * videoEncParams)1076 Encode_Status VideoEncoderBase::getParameters(
1077         VideoParamConfigSet *videoEncParams) {
1078 
1079     Encode_Status ret = ENCODE_SUCCESS;
1080     CHECK_NULL_RETURN_IFFAIL(videoEncParams);
1081     LOG_V("Config type = %d\n", (int)videoEncParams->type);
1082 
1083     switch (videoEncParams->type) {
1084         case VideoParamsTypeCommon: {
1085 
1086             VideoParamsCommon *paramsCommon =
1087                     reinterpret_cast <VideoParamsCommon *> (videoEncParams);
1088 
1089             if (paramsCommon->size != sizeof (VideoParamsCommon)) {
1090                 return ENCODE_INVALID_PARAMS;
1091             }
1092             *paramsCommon = mComParams;
1093             break;
1094         }
1095 
1096         case VideoParamsTypeUpSteamBuffer: {
1097 
1098             // Get upstream buffer could happen
1099             // but not meaningful a lot
1100             break;
1101         }
1102 
1103         case VideoParamsTypeUsrptrBuffer: {
1104             VideoParamsUsrptrBuffer *usrptrBuffer =
1105                     reinterpret_cast <VideoParamsUsrptrBuffer *> (videoEncParams);
1106 
1107             if (usrptrBuffer->size != sizeof (VideoParamsUsrptrBuffer)) {
1108                 return ENCODE_INVALID_PARAMS;
1109             }
1110 
1111             ret = getNewUsrptrFromSurface(
1112                     usrptrBuffer->width, usrptrBuffer->height, usrptrBuffer->format,
1113                     usrptrBuffer->expectedSize, &(usrptrBuffer->actualSize),
1114                     &(usrptrBuffer->stride), &(usrptrBuffer->usrPtr));
1115 
1116             break;
1117         }
1118 
1119         case VideoParamsTypeHRD: {
1120             VideoParamsHRD *hrd =
1121                     reinterpret_cast <VideoParamsHRD *> (videoEncParams);
1122 
1123             if (hrd->size != sizeof (VideoParamsHRD)) {
1124                 return ENCODE_INVALID_PARAMS;
1125             }
1126 
1127             hrd->bufferSize = mHrdParam.bufferSize;
1128             hrd->initBufferFullness = mHrdParam.initBufferFullness;
1129 
1130             break;
1131         }
1132 
1133         case VideoParamsTypeStoreMetaDataInBuffers: {
1134             VideoParamsStoreMetaDataInBuffers *metadata =
1135                     reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
1136 
1137             if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
1138                 return ENCODE_INVALID_PARAMS;
1139             }
1140 
1141             metadata->isEnabled = mStoreMetaDataInBuffers.isEnabled;
1142 
1143             break;
1144         }
1145 
1146         case VideoParamsTypeProfileLevel: {
1147             VideoParamsProfileLevel *profilelevel =
1148                 reinterpret_cast <VideoParamsProfileLevel *> (videoEncParams);
1149 
1150             if (profilelevel->size != sizeof (VideoParamsProfileLevel)) {
1151                 return ENCODE_INVALID_PARAMS;
1152             }
1153 
1154             profilelevel->level = 0;
1155             if(queryProfileLevelConfig(mVADisplay, profilelevel->profile) == ENCODE_SUCCESS){
1156                 profilelevel->isSupported = true;
1157                 if(profilelevel->profile == VAProfileH264High)
1158                     profilelevel->level = 42;
1159                 else if(profilelevel->profile == VAProfileH264Main)
1160                      profilelevel->level = 42;
1161                 else if(profilelevel->profile == VAProfileH264Baseline)
1162                      profilelevel->level = 41;
1163                 else{
1164                     profilelevel->level = 0;
1165                     profilelevel->isSupported = false;
1166                 }
1167             }
1168         }
1169 
1170         case VideoParamsTypeTemporalLayer:{
1171             VideoParamsTemporalLayer *temporallayer =
1172                 reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
1173 
1174             if(temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
1175                 return ENCODE_INVALID_PARAMS;
1176             }
1177 
1178             temporallayer->numberOfLayer = mComParams.numberOfLayer;
1179 
1180             break;
1181         }
1182 
1183         case VideoParamsTypeAVC:
1184         case VideoParamsTypeH263:
1185         case VideoParamsTypeMP4:
1186         case VideoParamsTypeVC1:
1187         case VideoParamsTypeVP8: {
1188             derivedGetParams(videoEncParams);
1189             break;
1190         }
1191 
1192         default: {
1193             LOG_E ("Wrong ParamType here\n");
1194             break;
1195         }
1196 
1197     }
1198     return ret;
1199 }
1200 
setConfig(VideoParamConfigSet * videoEncConfig)1201 Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) {
1202 
1203     Encode_Status ret = ENCODE_SUCCESS;
1204     CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
1205     LOG_V("Config type = %d\n", (int)videoEncConfig->type);
1206 
1207    // workaround
1208 #if 0
1209     if (!mStarted) {
1210         LOG_E("Encoder has not initialized yet, can't call setConfig\n");
1211         return ENCODE_NOT_INIT;
1212     }
1213 #endif
1214 
1215     switch (videoEncConfig->type) {
1216         case VideoConfigTypeFrameRate: {
1217             VideoConfigFrameRate *configFrameRate =
1218                     reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
1219 
1220             if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
1221                 return ENCODE_INVALID_PARAMS;
1222             }
1223             mComParams.frameRate = configFrameRate->frameRate;
1224             mRenderFrameRate = true;
1225             break;
1226         }
1227 
1228         case VideoConfigTypeBitRate: {
1229             VideoConfigBitRate *configBitRate =
1230                     reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
1231 
1232             if (configBitRate->size != sizeof (VideoConfigBitRate)) {
1233                 return ENCODE_INVALID_PARAMS;
1234             }
1235 
1236             if(mComParams.numberOfLayer == 1)
1237             {
1238                 mComParams.rcParams = configBitRate->rcParams;
1239                 mRenderBitRate = true;
1240             }
1241             else
1242             {
1243                 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].nLayerID = configBitRate->rcParams.temporalID;
1244                 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].bitRate = configBitRate->rcParams.bitRate;
1245                 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].frameRate = configBitRate->rcParams.temporalFrameRate;
1246             }
1247             break;
1248         }
1249 
1250         case VideoConfigTypeResolution: {
1251 
1252             // Not Implemented
1253             break;
1254         }
1255         case VideoConfigTypeIntraRefreshType: {
1256 
1257             VideoConfigIntraRefreshType *configIntraRefreshType =
1258                     reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
1259 
1260             if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
1261                 return ENCODE_INVALID_PARAMS;
1262             }
1263             mComParams.refreshType = configIntraRefreshType->refreshType;
1264             break;
1265         }
1266 
1267         case VideoConfigTypeCyclicFrameInterval: {
1268             VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
1269                     reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
1270             if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
1271                 return ENCODE_INVALID_PARAMS;
1272             }
1273 
1274             mComParams.cyclicFrameInterval = configCyclicFrameInterval->cyclicFrameInterval;
1275             break;
1276         }
1277 
1278         case VideoConfigTypeAIR: {
1279 
1280             VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
1281 
1282             if (configAIR->size != sizeof (VideoConfigAIR)) {
1283                 return ENCODE_INVALID_PARAMS;
1284             }
1285 
1286             mComParams.airParams = configAIR->airParams;
1287             mRenderAIR = true;
1288             break;
1289         }
1290         case VideoConfigTypeCIR: {
1291 
1292             VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
1293 
1294             if (configCIR->size != sizeof (VideoConfigCIR)) {
1295                 return ENCODE_INVALID_PARAMS;
1296             }
1297 
1298             mComParams.cirParams = configCIR->cirParams;
1299             mRenderCIR = true;
1300             break;
1301         }
1302         case VideoConfigTypeAVCIntraPeriod:
1303         case VideoConfigTypeNALSize:
1304         case VideoConfigTypeIDRRequest:
1305         case VideoConfigTypeSliceNum:
1306         case VideoConfigTypeVP8:
1307         case VideoConfigTypeVP8ReferenceFrame:
1308         case VideoConfigTypeVP8MaxFrameSizeRatio:{
1309             ret = derivedSetConfig(videoEncConfig);
1310             break;
1311         }
1312         default: {
1313             LOG_E ("Wrong Config Type here\n");
1314             break;
1315         }
1316     }
1317     return ret;
1318 }
1319 
getConfig(VideoParamConfigSet * videoEncConfig)1320 Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) {
1321 
1322     Encode_Status ret = ENCODE_SUCCESS;
1323     CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
1324     LOG_V("Config type = %d\n", (int)videoEncConfig->type);
1325 
1326     switch (videoEncConfig->type) {
1327         case VideoConfigTypeFrameRate: {
1328             VideoConfigFrameRate *configFrameRate =
1329                     reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
1330 
1331             if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
1332                 return ENCODE_INVALID_PARAMS;
1333             }
1334 
1335             configFrameRate->frameRate = mComParams.frameRate;
1336             break;
1337         }
1338 
1339         case VideoConfigTypeBitRate: {
1340             VideoConfigBitRate *configBitRate =
1341                     reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
1342 
1343             if (configBitRate->size != sizeof (VideoConfigBitRate)) {
1344                 return ENCODE_INVALID_PARAMS;
1345             }
1346             configBitRate->rcParams = mComParams.rcParams;
1347 
1348 
1349             break;
1350         }
1351         case VideoConfigTypeResolution: {
1352             // Not Implemented
1353             break;
1354         }
1355         case VideoConfigTypeIntraRefreshType: {
1356 
1357             VideoConfigIntraRefreshType *configIntraRefreshType =
1358                     reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
1359 
1360             if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
1361                 return ENCODE_INVALID_PARAMS;
1362             }
1363             configIntraRefreshType->refreshType = mComParams.refreshType;
1364             break;
1365         }
1366 
1367         case VideoConfigTypeCyclicFrameInterval: {
1368             VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
1369                     reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
1370             if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
1371                 return ENCODE_INVALID_PARAMS;
1372             }
1373 
1374             configCyclicFrameInterval->cyclicFrameInterval = mComParams.cyclicFrameInterval;
1375             break;
1376         }
1377 
1378         case VideoConfigTypeAIR: {
1379 
1380             VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
1381 
1382             if (configAIR->size != sizeof (VideoConfigAIR)) {
1383                 return ENCODE_INVALID_PARAMS;
1384             }
1385 
1386             configAIR->airParams = mComParams.airParams;
1387             break;
1388         }
1389         case VideoConfigTypeCIR: {
1390 
1391             VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
1392 
1393             if (configCIR->size != sizeof (VideoConfigCIR)) {
1394                 return ENCODE_INVALID_PARAMS;
1395             }
1396 
1397             configCIR->cirParams = mComParams.cirParams;
1398             break;
1399         }
1400         case VideoConfigTypeAVCIntraPeriod:
1401         case VideoConfigTypeNALSize:
1402         case VideoConfigTypeIDRRequest:
1403         case VideoConfigTypeSliceNum:
1404         case VideoConfigTypeVP8: {
1405 
1406             ret = derivedGetConfig(videoEncConfig);
1407             break;
1408         }
1409         default: {
1410             LOG_E ("Wrong ParamType here\n");
1411             break;
1412         }
1413     }
1414     return ret;
1415 }
1416 
PrepareFrameInfo(EncodeTask * task)1417 void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) {
1418     if (mNewHeader) mFrameNum = 0;
1419     LOG_V( "mFrameNum = %d   ", mFrameNum);
1420 
1421     updateFrameInfo(task) ;
1422 }
1423 
updateFrameInfo(EncodeTask * task)1424 Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) {
1425 
1426     task->type = FTYPE_P;
1427 
1428     // determine the picture type
1429     if (mFrameNum == 0)
1430         task->type = FTYPE_I;
1431     if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0))
1432         task->type = FTYPE_I;
1433 
1434     if (task->type == FTYPE_I)
1435         task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
1436 
1437     return ENCODE_SUCCESS;
1438 }
1439 
getMaxOutSize(uint32_t * maxSize)1440 Encode_Status  VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) {
1441 
1442     uint32_t size = mComParams.resolution.width * mComParams.resolution.height;
1443 
1444     if (maxSize == NULL) {
1445         LOG_E("maxSize == NULL\n");
1446         return ENCODE_NULL_PTR;
1447     }
1448 
1449     LOG_V( "Begin\n");
1450 
1451     if (mCodedBufSize > 0) {
1452         *maxSize = mCodedBufSize;
1453         LOG_V ("Already calculate the max encoded size, get the value directly");
1454         return ENCODE_SUCCESS;
1455     }
1456 
1457     // here, VP8 is different from AVC/H263
1458     if(mComParams.profile == VAProfileVP8Version0_3) // for VP8 encode
1459     {
1460         // According to VIED suggestions, in CBR mode, coded buffer should be the size of 3 bytes per luma pixel
1461         // in CBR_HRD mode, coded buffer size should be  5 * rc_buf_sz * rc_target_bitrate;
1462         // now we just hardcode mCodedBufSize as 2M to walk round coded buffer size issue;
1463         /*
1464         if(mComParams.rcMode == VA_RC_CBR) // CBR_HRD mode
1465             mCodedBufSize = 5 * mComParams.rcParams.bitRate * 6000;
1466         else // CBR mode
1467             mCodedBufSize = 3 * mComParams.resolution.width * mComParams.resolution.height;
1468         */
1469         mCodedBufSize = (2 * 1024 * 1024 + 31) & (~31);
1470     }
1471     else // for AVC/H263/MPEG4 encode
1472     {
1473         // base on the rate control mode to calculate the defaule encoded buffer size
1474         if (mComParams.rcMode == VA_RC_NONE) {
1475              mCodedBufSize = (size * 400) / (16 * 16);
1476              // set to value according to QP
1477         } else {
1478              mCodedBufSize = mComParams.rcParams.bitRate / 4;
1479         }
1480 
1481         mCodedBufSize = max (mCodedBufSize , (size * 400) / (16 * 16));
1482 
1483         // in case got a very large user input bit rate value
1484         mCodedBufSize = min(mCodedBufSize, (size * 1.5 * 8));
1485         mCodedBufSize =  (mCodedBufSize + 15) &(~15);
1486     }
1487 
1488     *maxSize = mCodedBufSize;
1489     return ENCODE_SUCCESS;
1490 }
1491 
getNewUsrptrFromSurface(uint32_t width,uint32_t height,uint32_t format,uint32_t expectedSize,uint32_t * outsize,uint32_t * stride,uint8_t ** usrptr)1492 Encode_Status VideoEncoderBase::getNewUsrptrFromSurface(
1493     uint32_t width, uint32_t height, uint32_t format,
1494     uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) {
1495 
1496     Encode_Status ret = ENCODE_FAIL;
1497     VAStatus vaStatus = VA_STATUS_SUCCESS;
1498 
1499     VASurfaceID surface = VA_INVALID_SURFACE;
1500     VAImage image;
1501     uint32_t index = 0;
1502 
1503     LOG_V( "Begin\n");
1504     // If encode session has been configured, we can not request surface creation anymore
1505     if (mStarted) {
1506         LOG_E( "Already Initialized, can not request VA surface anymore\n");
1507         return ENCODE_WRONG_STATE;
1508     }
1509     if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) {
1510         LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n");
1511         return ENCODE_NULL_PTR;
1512     }
1513 
1514     // Current only NV12 is supported in VA API
1515     // Through format we can get known the number of planes
1516     if (format != STRING_TO_FOURCC("NV12")) {
1517         LOG_W ("Format is not supported\n");
1518         return ENCODE_NOT_SUPPORTED;
1519     }
1520 
1521     surface = CreateNewVASurface(mVADisplay, width, height);
1522     if (surface == VA_INVALID_SURFACE)
1523         return ENCODE_DRIVER_FAIL;
1524 
1525     vaStatus = vaDeriveImage(mVADisplay, surface, &image);
1526     CHECK_VA_STATUS_RETURN("vaDeriveImage");
1527     LOG_V( "vaDeriveImage Done\n");
1528     vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) usrptr);
1529     CHECK_VA_STATUS_RETURN("vaMapBuffer");
1530 
1531     // make sure the physical page been allocated
1532     for (index = 0; index < image.data_size; index = index + 4096) {
1533         unsigned char tmp =  *(*usrptr + index);
1534         if (tmp == 0)
1535             *(*usrptr + index) = 0;
1536     }
1537 
1538     *outsize = image.data_size;
1539     *stride = image.pitches[0];
1540 
1541     LOG_V( "surface = 0x%08x\n",(uint32_t)surface);
1542     LOG_V("image->pitches[0] = %d\n", image.pitches[0]);
1543     LOG_V("image->pitches[1] = %d\n", image.pitches[1]);
1544     LOG_V("image->offsets[0] = %d\n", image.offsets[0]);
1545     LOG_V("image->offsets[1] = %d\n", image.offsets[1]);
1546     LOG_V("image->num_planes = %d\n", image.num_planes);
1547     LOG_V("image->width = %d\n", image.width);
1548     LOG_V("image->height = %d\n", image.height);
1549     LOG_V("data_size = %d\n", image.data_size);
1550     LOG_V("usrptr = 0x%p\n", *usrptr);
1551 
1552     vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
1553     CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
1554     vaStatus = vaDestroyImage(mVADisplay, image.image_id);
1555     CHECK_VA_STATUS_RETURN("vaDestroyImage");
1556 
1557     if (*outsize < expectedSize) {
1558         LOG_E ("Allocated buffer size is small than the expected size, destroy the surface");
1559         LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expectedSize);
1560         vaStatus = vaDestroySurfaces(mVADisplay, &surface, 1);
1561         CHECK_VA_STATUS_RETURN("vaDestroySurfaces");
1562         return ENCODE_FAIL;
1563     }
1564 
1565     VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
1566     if (map == NULL) {
1567         LOG_E( "new VASurfaceMap failed\n");
1568         return ENCODE_NO_MEMORY;
1569     }
1570 
1571     map->setVASurface(surface);  //special case, vasuface is set, so nothing do in doMapping
1572 //    map->setType(MetadataBufferTypeEncoder);
1573     map->setValue((intptr_t)*usrptr);
1574     ValueInfo vinfo;
1575     memset(&vinfo, 0, sizeof(ValueInfo));
1576     vinfo.mode = (MemMode)MEM_MODE_USRPTR;
1577     vinfo.handle = 0;
1578     vinfo.size = 0;
1579     vinfo.width = width;
1580     vinfo.height = height;
1581     vinfo.lumaStride = width;
1582     vinfo.chromStride = width;
1583     vinfo.format = VA_FOURCC_NV12;
1584     vinfo.s3dformat = 0xffffffff;
1585     map->setValueInfo(vinfo);
1586     map->doMapping();
1587 
1588     mSrcSurfaceMapList.push_back(map);
1589 
1590     ret = ENCODE_SUCCESS;
1591 
1592     return ret;
1593 }
1594 
setUpstreamBuffer(VideoParamsUpstreamBuffer * upStreamBuffer)1595 Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) {
1596 
1597     Encode_Status status = ENCODE_SUCCESS;
1598 
1599     CHECK_NULL_RETURN_IFFAIL(upStreamBuffer);
1600     if (upStreamBuffer->bufCnt == 0) {
1601         LOG_E("bufCnt == 0\n");
1602         return ENCODE_FAIL;
1603     }
1604 
1605     for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) {
1606         if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL)  //already mapped
1607             continue;
1608 
1609         //wrap upstream buffer into vaSurface
1610         VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
1611 
1612 //        map->setType(MetadataBufferTypeUser);
1613         map->setValue(upStreamBuffer->bufList[i]);
1614         ValueInfo vinfo;
1615         memset(&vinfo, 0, sizeof(ValueInfo));
1616         vinfo.mode = (MemMode)upStreamBuffer->bufferMode;
1617         vinfo.handle = (intptr_t)upStreamBuffer->display;
1618         vinfo.size = 0;
1619         if (upStreamBuffer->bufAttrib) {
1620             vinfo.width = upStreamBuffer->bufAttrib->realWidth;
1621             vinfo.height = upStreamBuffer->bufAttrib->realHeight;
1622             vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride;
1623             vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride;
1624             vinfo.format = upStreamBuffer->bufAttrib->format;
1625         }
1626         vinfo.s3dformat = 0xFFFFFFFF;
1627         map->setValueInfo(vinfo);
1628         status = map->doMapping();
1629 
1630         if (status == ENCODE_SUCCESS)
1631             mSrcSurfaceMapList.push_back(map);
1632         else
1633            delete map;
1634     }
1635 
1636     return status;
1637 }
1638 
manageSrcSurface(VideoEncRawBuffer * inBuffer,VASurfaceID * sid)1639 Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) {
1640 
1641     Encode_Status ret = ENCODE_SUCCESS;
1642     IntelMetadataBufferType type;
1643     intptr_t value;
1644     ValueInfo vinfo;
1645     ValueInfo *pvinfo = &vinfo;
1646     intptr_t *extravalues = NULL;
1647     unsigned int extravalues_count = 0;
1648 
1649     IntelMetadataBuffer imb;
1650     VASurfaceMap *map = NULL;
1651 
1652     memset(&vinfo, 0, sizeof(ValueInfo));
1653     if (mStoreMetaDataInBuffers.isEnabled) {
1654         //metadatabuffer mode
1655         LOG_V("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
1656         if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) {
1657             //fail to parse buffer
1658             return ENCODE_NO_REQUEST_DATA;
1659         }
1660 
1661         imb.GetType(type);
1662         imb.GetValue(value);
1663     } else {
1664         //raw mode
1665         LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
1666         if (! inBuffer->data || inBuffer->size == 0) {
1667             return ENCODE_NULL_PTR;
1668         }
1669 
1670         type = IntelMetadataBufferTypeUser;
1671         value = (intptr_t)inBuffer->data;
1672     }
1673 
1674 #ifdef INTEL_VIDEO_XPROC_SHARING
1675     uint32_t sflag = mSessionFlag;
1676     imb.GetSessionFlag(mSessionFlag);
1677     if (mSessionFlag != sflag) {
1678         //new sharing session, flush buffer sharing cache
1679         IntelMetadataBuffer::ClearContext(sflag, false);
1680         //flush surfacemap cache
1681         LOG_V( "Flush Src Surface Map\n");
1682         while(! mSrcSurfaceMapList.empty())
1683         {
1684             delete (*mSrcSurfaceMapList.begin());
1685             mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
1686         }
1687     }
1688 #endif
1689 
1690     //find if mapped
1691     map = (VASurfaceMap*) findSurfaceMapByValue(value);
1692 
1693     if (map) {
1694         //has mapped, get surfaceID directly and do all necessary actions
1695         LOG_V("direct find surface %d from value %i\n", map->getVASurface(), value);
1696         *sid = map->getVASurface();
1697         map->doMapping();
1698         return ret;
1699     }
1700 
1701     //if no found from list, then try to map value with parameters
1702     LOG_V("not find surface from cache with value %i, start mapping if enough information\n", value);
1703 
1704     if (mStoreMetaDataInBuffers.isEnabled) {
1705 
1706         //if type is IntelMetadataBufferTypeGrallocSource, use default parameters since no ValueInfo
1707         if (type == IntelMetadataBufferTypeGrallocSource) {
1708             vinfo.mode = MEM_MODE_GFXHANDLE;
1709             vinfo.handle = 0;
1710             vinfo.size = 0;
1711             vinfo.width = mComParams.resolution.width;
1712             vinfo.height = mComParams.resolution.height;
1713             vinfo.lumaStride = mComParams.resolution.width;
1714             vinfo.chromStride = mComParams.resolution.width;
1715             vinfo.format = VA_FOURCC_NV12;
1716             vinfo.s3dformat = 0xFFFFFFFF;
1717         } else {
1718             //get all info mapping needs
1719             imb.GetValueInfo(pvinfo);
1720             imb.GetExtraValues(extravalues, extravalues_count);
1721         }
1722 
1723     } else {
1724 
1725         //raw mode
1726         vinfo.mode = MEM_MODE_MALLOC;
1727         vinfo.handle = 0;
1728         vinfo.size = inBuffer->size;
1729         vinfo.width = mComParams.resolution.width;
1730         vinfo.height = mComParams.resolution.height;
1731         vinfo.lumaStride = mComParams.resolution.width;
1732         vinfo.chromStride = mComParams.resolution.width;
1733         vinfo.format = VA_FOURCC_NV12;
1734         vinfo.s3dformat = 0xFFFFFFFF;
1735     }
1736 
1737     /*  Start mapping, if pvinfo is not NULL, then have enough info to map;
1738      *   if extravalues is not NULL, then need to do more times mapping
1739      */
1740     if (pvinfo){
1741         //map according info, and add to surfacemap list
1742         map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
1743         map->setValue(value);
1744         map->setValueInfo(*pvinfo);
1745         map->setAction(mVASurfaceMappingAction);
1746 
1747         ret = map->doMapping();
1748         if (ret == ENCODE_SUCCESS) {
1749             LOG_V("surface mapping success, map value %i into surface %d\n", value, map->getVASurface());
1750             mSrcSurfaceMapList.push_back(map);
1751         } else {
1752             delete map;
1753             LOG_E("surface mapping failed, wrong info or meet serious error\n");
1754             return ret;
1755         }
1756 
1757         *sid = map->getVASurface();
1758 
1759     } else {
1760         //can't map due to no info
1761         LOG_E("surface mapping failed, missing information\n");
1762         return ENCODE_NO_REQUEST_DATA;
1763     }
1764 
1765     if (extravalues) {
1766         //map more using same ValueInfo
1767         for(unsigned int i=0; i<extravalues_count; i++) {
1768             map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
1769             map->setValue(extravalues[i]);
1770             map->setValueInfo(vinfo);
1771 
1772             ret = map->doMapping();
1773             if (ret == ENCODE_SUCCESS) {
1774                 LOG_V("surface mapping extravalue success, map value %i into surface %d\n", extravalues[i], map->getVASurface());
1775                 mSrcSurfaceMapList.push_back(map);
1776             } else {
1777                 delete map;
1778                 map = NULL;
1779                 LOG_E( "surface mapping extravalue failed, extravalue is %i\n", extravalues[i]);
1780             }
1781         }
1782     }
1783 
1784     return ret;
1785 }
1786 
renderDynamicBitrate(EncodeTask * task)1787 Encode_Status VideoEncoderBase::renderDynamicBitrate(EncodeTask* task) {
1788     VAStatus vaStatus = VA_STATUS_SUCCESS;
1789 
1790     LOG_V( "Begin\n\n");
1791     // disable bits stuffing and skip frame apply to all rate control mode
1792 
1793     VAEncMiscParameterBuffer   *miscEncParamBuf;
1794     VAEncMiscParameterRateControl *bitrateControlParam;
1795     VABufferID miscParamBufferID;
1796 
1797     vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
1798             VAEncMiscParameterBufferType,
1799             sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
1800             1, NULL,
1801             &miscParamBufferID);
1802 
1803     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
1804 
1805     vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
1806     CHECK_VA_STATUS_RETURN("vaMapBuffer");
1807 
1808     miscEncParamBuf->type = VAEncMiscParameterTypeRateControl;
1809     bitrateControlParam = (VAEncMiscParameterRateControl *)miscEncParamBuf->data;
1810 
1811     bitrateControlParam->bits_per_second = mComParams.rcParams.bitRate;
1812     bitrateControlParam->initial_qp = mComParams.rcParams.initQP;
1813     if(mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I)) {
1814         bitrateControlParam->min_qp = mComParams.rcParams.I_minQP;
1815         bitrateControlParam->max_qp = mComParams.rcParams.I_maxQP;
1816         mRenderBitRate = true;
1817         LOG_I("apply I min/max qp for IDR or I frame\n");
1818     } else {
1819         bitrateControlParam->min_qp = mComParams.rcParams.minQP;
1820         bitrateControlParam->max_qp = mComParams.rcParams.maxQP;
1821         mRenderBitRate = false;
1822         LOG_I("revert to original min/max qp after IDR or I frame\n");
1823     }
1824     bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage;
1825     bitrateControlParam->window_size = mComParams.rcParams.windowSize;
1826     bitrateControlParam->rc_flags.bits.disable_frame_skip = mComParams.rcParams.disableFrameSkip;
1827     bitrateControlParam->rc_flags.bits.disable_bit_stuffing = mComParams.rcParams.disableBitsStuffing;
1828     bitrateControlParam->basic_unit_size = 0;
1829 
1830     LOG_I("bits_per_second = %d\n", bitrateControlParam->bits_per_second);
1831     LOG_I("initial_qp = %d\n", bitrateControlParam->initial_qp);
1832     LOG_I("min_qp = %d\n", bitrateControlParam->min_qp);
1833     LOG_I("max_qp = %d\n", bitrateControlParam->max_qp);
1834     LOG_I("target_percentage = %d\n", bitrateControlParam->target_percentage);
1835     LOG_I("window_size = %d\n", bitrateControlParam->window_size);
1836     LOG_I("disable_frame_skip = %d\n", bitrateControlParam->rc_flags.bits.disable_frame_skip);
1837     LOG_I("disable_bit_stuffing = %d\n", bitrateControlParam->rc_flags.bits.disable_bit_stuffing);
1838 
1839     vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
1840     CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
1841 
1842     vaStatus = vaRenderPicture(mVADisplay, mVAContext,
1843             &miscParamBufferID, 1);
1844     CHECK_VA_STATUS_RETURN("vaRenderPicture");
1845 
1846     return ENCODE_SUCCESS;
1847 }
1848 
1849 
renderDynamicFrameRate()1850 Encode_Status VideoEncoderBase::renderDynamicFrameRate() {
1851 
1852     VAStatus vaStatus = VA_STATUS_SUCCESS;
1853 
1854     if (mComParams.rcMode != RATE_CONTROL_VCM) {
1855 
1856         LOG_W("Not in VCM mode, but call SendDynamicFramerate\n");
1857         return ENCODE_SUCCESS;
1858     }
1859 
1860     VAEncMiscParameterBuffer   *miscEncParamBuf;
1861     VAEncMiscParameterFrameRate *frameRateParam;
1862     VABufferID miscParamBufferID;
1863 
1864     vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
1865             VAEncMiscParameterBufferType,
1866             sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterFrameRate),
1867             1, NULL, &miscParamBufferID);
1868     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
1869 
1870     vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
1871     CHECK_VA_STATUS_RETURN("vaMapBuffer");
1872 
1873     miscEncParamBuf->type = VAEncMiscParameterTypeFrameRate;
1874     frameRateParam = (VAEncMiscParameterFrameRate *)miscEncParamBuf->data;
1875     frameRateParam->framerate =
1876             (unsigned int) (mComParams.frameRate.frameRateNum + mComParams.frameRate.frameRateDenom/2)
1877             / mComParams.frameRate.frameRateDenom;
1878 
1879     vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
1880     CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
1881 
1882     vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
1883     CHECK_VA_STATUS_RETURN("vaRenderPicture");
1884 
1885     LOG_I( "frame rate = %d\n", frameRateParam->framerate);
1886     return ENCODE_SUCCESS;
1887 }
1888 
renderHrd()1889 Encode_Status VideoEncoderBase::renderHrd() {
1890 
1891     VAStatus vaStatus = VA_STATUS_SUCCESS;
1892 
1893     VAEncMiscParameterBuffer *miscEncParamBuf;
1894     VAEncMiscParameterHRD *hrdParam;
1895     VABufferID miscParamBufferID;
1896 
1897     vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
1898             VAEncMiscParameterBufferType,
1899             sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterHRD),
1900             1, NULL, &miscParamBufferID);
1901     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
1902 
1903     vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
1904     CHECK_VA_STATUS_RETURN("vaMapBuffer");
1905 
1906     miscEncParamBuf->type = VAEncMiscParameterTypeHRD;
1907     hrdParam = (VAEncMiscParameterHRD *)miscEncParamBuf->data;
1908 
1909     hrdParam->buffer_size = mHrdParam.bufferSize;
1910     hrdParam->initial_buffer_fullness = mHrdParam.initBufferFullness;
1911 
1912     vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
1913     CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
1914 
1915     vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
1916     CHECK_VA_STATUS_RETURN("vaRenderPicture");
1917 
1918     return ENCODE_SUCCESS;
1919 }
1920 
findSurfaceMapByValue(intptr_t value)1921 VASurfaceMap *VideoEncoderBase::findSurfaceMapByValue(intptr_t value) {
1922     android::List<VASurfaceMap *>::iterator node;
1923 
1924     for(node = mSrcSurfaceMapList.begin(); node !=  mSrcSurfaceMapList.end(); node++)
1925     {
1926         if ((*node)->getValue() == value)
1927             return *node;
1928         else
1929             continue;
1930     }
1931 
1932     return NULL;
1933 }
1934