1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "WebmFrameThread"
19 
20 #include "WebmConstants.h"
21 #include "WebmFrameThread.h"
22 
23 #include <media/stagefright/MetaData.h>
24 #include <media/stagefright/foundation/ADebug.h>
25 
26 #include <utils/Log.h>
27 #include <inttypes.h>
28 
29 using namespace webm;
30 
31 namespace android {
32 
wrap(void * arg)33 void *WebmFrameThread::wrap(void *arg) {
34     WebmFrameThread *worker = reinterpret_cast<WebmFrameThread*>(arg);
35     worker->run();
36     return NULL;
37 }
38 
start()39 status_t WebmFrameThread::start() {
40     pthread_attr_t attr;
41     pthread_attr_init(&attr);
42     pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
43     pthread_create(&mThread, &attr, WebmFrameThread::wrap, this);
44     pthread_attr_destroy(&attr);
45     return OK;
46 }
47 
stop()48 status_t WebmFrameThread::stop() {
49     void *status;
50     pthread_join(mThread, &status);
51     return (status_t)(intptr_t)status;
52 }
53 
54 //=================================================================================================
55 
WebmFrameSourceThread(int type,LinkedBlockingQueue<const sp<WebmFrame>> & sink)56 WebmFrameSourceThread::WebmFrameSourceThread(
57     int type,
58     LinkedBlockingQueue<const sp<WebmFrame> >& sink)
59     : mType(type), mSink(sink) {
60 }
61 
62 //=================================================================================================
63 
WebmFrameSinkThread(const int & fd,const uint64_t & off,sp<WebmFrameSourceThread> videoThread,sp<WebmFrameSourceThread> audioThread,List<sp<WebmElement>> & cues)64 WebmFrameSinkThread::WebmFrameSinkThread(
65         const int& fd,
66         const uint64_t& off,
67         sp<WebmFrameSourceThread> videoThread,
68         sp<WebmFrameSourceThread> audioThread,
69         List<sp<WebmElement> >& cues)
70     : mFd(fd),
71       mSegmentDataStart(off),
72       mVideoFrames(videoThread->mSink),
73       mAudioFrames(audioThread->mSink),
74       mCues(cues),
75       mDone(true) {
76 }
77 
WebmFrameSinkThread(const int & fd,const uint64_t & off,LinkedBlockingQueue<const sp<WebmFrame>> & videoSource,LinkedBlockingQueue<const sp<WebmFrame>> & audioSource,List<sp<WebmElement>> & cues)78 WebmFrameSinkThread::WebmFrameSinkThread(
79         const int& fd,
80         const uint64_t& off,
81         LinkedBlockingQueue<const sp<WebmFrame> >& videoSource,
82         LinkedBlockingQueue<const sp<WebmFrame> >& audioSource,
83         List<sp<WebmElement> >& cues)
84     : mFd(fd),
85       mSegmentDataStart(off),
86       mVideoFrames(videoSource),
87       mAudioFrames(audioSource),
88       mCues(cues),
89       mDone(true) {
90 }
91 
92 // Initializes a webm cluster with its starting timecode.
93 //
94 // frames:
95 //   sequence of input audio/video frames received from the source.
96 //
97 // clusterTimecodeL:
98 //   the starting timecode of the cluster; this is the timecode of the first
99 //   frame since frames are ordered by timestamp.
100 //
101 // children:
102 //   list to hold child elements in a webm cluster (start timecode and
103 //   simple blocks).
104 //
105 // static
initCluster(List<const sp<WebmFrame>> & frames,uint64_t & clusterTimecodeL,List<sp<WebmElement>> & children)106 void WebmFrameSinkThread::initCluster(
107     List<const sp<WebmFrame> >& frames,
108     uint64_t& clusterTimecodeL,
109     List<sp<WebmElement> >& children) {
110     CHECK(!frames.empty() && children.empty());
111 
112     const sp<WebmFrame> f = *(frames.begin());
113     clusterTimecodeL = f->mAbsTimecode;
114     WebmUnsigned *clusterTimecode = new WebmUnsigned(kMkvTimecode, clusterTimecodeL);
115     children.clear();
116     children.push_back(clusterTimecode);
117 }
118 
writeCluster(List<sp<WebmElement>> & children)119 void WebmFrameSinkThread::writeCluster(List<sp<WebmElement> >& children) {
120     // children must contain at least one simpleblock and its timecode
121     CHECK_GE(children.size(), 2);
122 
123     uint64_t size;
124     sp<WebmElement> cluster = new WebmMaster(kMkvCluster, children);
125     cluster->write(mFd, size);
126     children.clear();
127 }
128 
129 // Write out (possibly multiple) webm cluster(s) from frames split on video key frames.
130 //
131 // last:
132 //   current flush is triggered by EOS instead of a second outstanding video key frame.
flushFrames(List<const sp<WebmFrame>> & frames,bool last)133 void WebmFrameSinkThread::flushFrames(List<const sp<WebmFrame> >& frames, bool last) {
134     if (frames.empty()) {
135         return;
136     }
137 
138     uint64_t clusterTimecodeL;
139     List<sp<WebmElement> > children;
140     initCluster(frames, clusterTimecodeL, children);
141 
142     uint64_t cueTime = clusterTimecodeL;
143     off_t fpos = ::lseek(mFd, 0, SEEK_CUR);
144     size_t n = frames.size();
145     if (!last) {
146         // If we are not flushing the last sequence of outstanding frames, flushFrames
147         // must have been called right after we have pushed a second outstanding video key
148         // frame (the last frame), which belongs to the next cluster; also hold back on
149         // flushing the second to last frame before we check its type. A audio frame
150         // should precede the aforementioned video key frame in the next sequence, a video
151         // frame should be the last frame in the current (to-be-flushed) sequence.
152         CHECK_GE(n, 2);
153         n -= 2;
154     }
155 
156     for (size_t i = 0; i < n; i++) {
157         const sp<WebmFrame> f = *(frames.begin());
158         if (f->mType == kVideoType && f->mKey) {
159             cueTime = f->mAbsTimecode;
160         }
161 
162         if (f->mAbsTimecode - clusterTimecodeL > INT16_MAX) {
163             writeCluster(children);
164             initCluster(frames, clusterTimecodeL, children);
165         }
166 
167         frames.erase(frames.begin());
168         children.push_back(f->SimpleBlock(clusterTimecodeL));
169     }
170 
171     // equivalent to last==false
172     if (!frames.empty()) {
173         // decide whether to write out the second to last frame.
174         const sp<WebmFrame> secondLastFrame = *(frames.begin());
175         if (secondLastFrame->mType == kVideoType) {
176             frames.erase(frames.begin());
177             children.push_back(secondLastFrame->SimpleBlock(clusterTimecodeL));
178         }
179     }
180 
181     writeCluster(children);
182     sp<WebmElement> cuePoint = WebmElement::CuePointEntry(cueTime, 1, fpos - mSegmentDataStart);
183     mCues.push_back(cuePoint);
184 }
185 
start()186 status_t WebmFrameSinkThread::start() {
187     mDone = false;
188     return WebmFrameThread::start();
189 }
190 
stop()191 status_t WebmFrameSinkThread::stop() {
192     mDone = true;
193     mVideoFrames.push(WebmFrame::EOS);
194     mAudioFrames.push(WebmFrame::EOS);
195     return WebmFrameThread::stop();
196 }
197 
run()198 void WebmFrameSinkThread::run() {
199     int numVideoKeyFrames = 0;
200     List<const sp<WebmFrame> > outstandingFrames;
201     while (!mDone) {
202         ALOGV("wait v frame");
203         const sp<WebmFrame> videoFrame = mVideoFrames.peek();
204         ALOGV("v frame: %p", videoFrame.get());
205 
206         ALOGV("wait a frame");
207         const sp<WebmFrame> audioFrame = mAudioFrames.peek();
208         ALOGV("a frame: %p", audioFrame.get());
209 
210         if (videoFrame->mEos && audioFrame->mEos) {
211             break;
212         }
213 
214         if (*audioFrame < *videoFrame) {
215             ALOGV("take a frame");
216             mAudioFrames.take();
217             outstandingFrames.push_back(audioFrame);
218         } else {
219             ALOGV("take v frame");
220             mVideoFrames.take();
221             outstandingFrames.push_back(videoFrame);
222             if (videoFrame->mKey)
223                 numVideoKeyFrames++;
224         }
225 
226         if (numVideoKeyFrames == 2) {
227             flushFrames(outstandingFrames, /* last = */ false);
228             numVideoKeyFrames--;
229         }
230     }
231     ALOGV("flushing last cluster (size %zu)", outstandingFrames.size());
232     flushFrames(outstandingFrames, /* last = */ true);
233     mDone = true;
234 }
235 
236 //=================================================================================================
237 
238 static const int64_t kInitialDelayTimeUs = 700000LL;
239 
clearFlags()240 void WebmFrameMediaSourceThread::clearFlags() {
241     mDone = false;
242     mPaused = false;
243     mResumed = false;
244     mStarted = false;
245     mReachedEOS = false;
246 }
247 
WebmFrameMediaSourceThread(const sp<MediaSource> & source,int type,LinkedBlockingQueue<const sp<WebmFrame>> & sink,uint64_t timeCodeScale,int64_t startTimeRealUs,int32_t startTimeOffsetMs,int numTracks,bool realTimeRecording)248 WebmFrameMediaSourceThread::WebmFrameMediaSourceThread(
249         const sp<MediaSource>& source,
250         int type,
251         LinkedBlockingQueue<const sp<WebmFrame> >& sink,
252         uint64_t timeCodeScale,
253         int64_t startTimeRealUs,
254         int32_t startTimeOffsetMs,
255         int numTracks,
256         bool realTimeRecording)
257     : WebmFrameSourceThread(type, sink),
258       mSource(source),
259       mTimeCodeScale(timeCodeScale),
260       mTrackDurationUs(0) {
261     clearFlags();
262     mStartTimeUs = startTimeRealUs;
263     if (realTimeRecording && numTracks > 1) {
264         /*
265          * Copied from MPEG4Writer
266          *
267          * This extra delay of accepting incoming audio/video signals
268          * helps to align a/v start time at the beginning of a recording
269          * session, and it also helps eliminate the "recording" sound for
270          * camcorder applications.
271          *
272          * If client does not set the start time offset, we fall back to
273          * use the default initial delay value.
274          */
275         int64_t startTimeOffsetUs = startTimeOffsetMs * 1000LL;
276         if (startTimeOffsetUs < 0) {  // Start time offset was not set
277             startTimeOffsetUs = kInitialDelayTimeUs;
278         }
279         mStartTimeUs += startTimeOffsetUs;
280         ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs);
281     }
282 }
283 
start()284 status_t WebmFrameMediaSourceThread::start() {
285     sp<MetaData> meta = new MetaData;
286     meta->setInt64(kKeyTime, mStartTimeUs);
287     status_t err = mSource->start(meta.get());
288     if (err != OK) {
289         mDone = true;
290         mReachedEOS = true;
291         return err;
292     } else {
293         mStarted = true;
294         return WebmFrameThread::start();
295     }
296 }
297 
resume()298 status_t WebmFrameMediaSourceThread::resume() {
299     if (!mDone && mPaused) {
300         mPaused = false;
301         mResumed = true;
302     }
303     return OK;
304 }
305 
pause()306 status_t WebmFrameMediaSourceThread::pause() {
307     if (mStarted) {
308         mPaused = true;
309     }
310     return OK;
311 }
312 
stop()313 status_t WebmFrameMediaSourceThread::stop() {
314     if (mStarted) {
315         mStarted = false;
316         mDone = true;
317         mSource->stop();
318         return WebmFrameThread::stop();
319     }
320     return OK;
321 }
322 
run()323 void WebmFrameMediaSourceThread::run() {
324     int32_t count = 0;
325     int64_t timestampUs = 0xdeadbeef;
326     int64_t lastTimestampUs = 0; // Previous sample time stamp
327     int64_t lastDurationUs = 0; // Previous sample duration
328     int64_t previousPausedDurationUs = 0;
329 
330     const uint64_t kUninitialized = 0xffffffffffffffffL;
331     mStartTimeUs = kUninitialized;
332 
333     status_t err = OK;
334     MediaBuffer *buffer;
335     while (!mDone && (err = mSource->read(&buffer, NULL)) == OK) {
336         if (buffer->range_length() == 0) {
337             buffer->release();
338             buffer = NULL;
339             continue;
340         }
341 
342         sp<MetaData> md = buffer->meta_data();
343         CHECK(md->findInt64(kKeyTime, &timestampUs));
344         if (mStartTimeUs == kUninitialized) {
345             mStartTimeUs = timestampUs;
346         }
347         timestampUs -= mStartTimeUs;
348 
349         if (mPaused && !mResumed) {
350             lastDurationUs = timestampUs - lastTimestampUs;
351             lastTimestampUs = timestampUs;
352             buffer->release();
353             buffer = NULL;
354             continue;
355         }
356         ++count;
357 
358         // adjust time-stamps after pause/resume
359         if (mResumed) {
360             int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
361             CHECK_GE(durExcludingEarlierPausesUs, 0ll);
362             int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
363             CHECK_GE(pausedDurationUs, lastDurationUs);
364             previousPausedDurationUs += pausedDurationUs - lastDurationUs;
365             mResumed = false;
366         }
367         timestampUs -= previousPausedDurationUs;
368         CHECK_GE(timestampUs, 0ll);
369 
370         int32_t isSync = false;
371         md->findInt32(kKeyIsSyncFrame, &isSync);
372         const sp<WebmFrame> f = new WebmFrame(
373             mType,
374             isSync,
375             timestampUs * 1000 / mTimeCodeScale,
376             buffer);
377         mSink.push(f);
378 
379         ALOGV(
380             "%s %s frame at %" PRId64 " size %zu\n",
381             mType == kVideoType ? "video" : "audio",
382             isSync ? "I" : "P",
383             timestampUs * 1000 / mTimeCodeScale,
384             buffer->range_length());
385 
386         buffer->release();
387         buffer = NULL;
388 
389         if (timestampUs > mTrackDurationUs) {
390             mTrackDurationUs = timestampUs;
391         }
392         lastDurationUs = timestampUs - lastTimestampUs;
393         lastTimestampUs = timestampUs;
394     }
395 
396     mTrackDurationUs += lastDurationUs;
397     mSink.push(WebmFrame::EOS);
398 }
399 }
400