1 /*
2  * Copyright 2015, The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "MediaSync-JNI"
19 #include <utils/Log.h>
20 
21 #include "android_media_MediaSync.h"
22 
23 #include "android_media_AudioTrack.h"
24 #include "android_media_PlaybackParams.h"
25 #include "android_media_SyncParams.h"
26 #include "android_runtime/AndroidRuntime.h"
27 #include "android_runtime/android_view_Surface.h"
28 #include "jni.h"
29 #include "JNIHelp.h"
30 
31 #include <gui/Surface.h>
32 
33 #include <media/AudioResamplerPublic.h>
34 #include <media/AudioTrack.h>
35 #include <media/stagefright/MediaClock.h>
36 #include <media/stagefright/MediaSync.h>
37 #include <media/stagefright/foundation/ADebug.h>
38 #include <media/stagefright/foundation/AString.h>
39 
40 #include <nativehelper/ScopedLocalRef.h>
41 
42 namespace android {
43 
44 struct fields_t {
45     jfieldID context;
46     jfieldID mediaTimestampMediaTimeUsID;
47     jfieldID mediaTimestampNanoTimeID;
48     jfieldID mediaTimestampClockRateID;
49 };
50 
51 static fields_t gFields;
52 static PlaybackParams::fields_t gPlaybackParamsFields;
53 static SyncParams::fields_t gSyncParamsFields;
54 
55 ////////////////////////////////////////////////////////////////////////////////
56 
JMediaSync()57 JMediaSync::JMediaSync() {
58     mSync = MediaSync::create();
59 }
60 
~JMediaSync()61 JMediaSync::~JMediaSync() {
62 }
63 
setSurface(const sp<IGraphicBufferProducer> & bufferProducer)64 status_t JMediaSync::setSurface(const sp<IGraphicBufferProducer> &bufferProducer) {
65     return mSync->setSurface(bufferProducer);
66 }
67 
setAudioTrack(const sp<AudioTrack> & audioTrack)68 status_t JMediaSync::setAudioTrack(const sp<AudioTrack> &audioTrack) {
69     return mSync->setAudioTrack(audioTrack);
70 }
71 
createInputSurface(sp<IGraphicBufferProducer> * bufferProducer)72 status_t JMediaSync::createInputSurface(
73         sp<IGraphicBufferProducer>* bufferProducer) {
74     return mSync->createInputSurface(bufferProducer);
75 }
76 
getMediaClock()77 sp<const MediaClock> JMediaSync::getMediaClock() {
78     return mSync->getMediaClock();
79 }
80 
setPlaybackParams(const AudioPlaybackRate & rate)81 status_t JMediaSync::setPlaybackParams(const AudioPlaybackRate& rate) {
82     return mSync->setPlaybackSettings(rate);
83 }
84 
getPlaybackParams(AudioPlaybackRate * rate)85 void JMediaSync::getPlaybackParams(AudioPlaybackRate* rate /* nonnull */) {
86     mSync->getPlaybackSettings(rate);
87 }
88 
setSyncParams(const AVSyncSettings & syncParams)89 status_t JMediaSync::setSyncParams(const AVSyncSettings& syncParams) {
90     return mSync->setSyncSettings(syncParams);
91 }
92 
getSyncParams(AVSyncSettings * syncParams)93 void JMediaSync::getSyncParams(AVSyncSettings* syncParams /* nonnull */) {
94     mSync->getSyncSettings(syncParams);
95 }
96 
setVideoFrameRateHint(float rate)97 status_t JMediaSync::setVideoFrameRateHint(float rate) {
98     return mSync->setVideoFrameRateHint(rate);
99 }
100 
getVideoFrameRate()101 float JMediaSync::getVideoFrameRate() {
102     return mSync->getVideoFrameRate();
103 }
104 
flush()105 void JMediaSync::flush() {
106     mSync->flush();
107 }
108 
updateQueuedAudioData(int sizeInBytes,int64_t presentationTimeUs)109 status_t JMediaSync::updateQueuedAudioData(
110         int sizeInBytes, int64_t presentationTimeUs) {
111     return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
112 }
113 
getPlayTimeForPendingAudioFrames(int64_t * outTimeUs)114 status_t JMediaSync::getPlayTimeForPendingAudioFrames(int64_t *outTimeUs) {
115     return mSync->getPlayTimeForPendingAudioFrames(outTimeUs);
116 }
117 
118 }  // namespace android
119 
120 ////////////////////////////////////////////////////////////////////////////////
121 
122 using namespace android;
123 
setMediaSync(JNIEnv * env,jobject thiz,const sp<JMediaSync> & sync)124 static sp<JMediaSync> setMediaSync(JNIEnv *env, jobject thiz, const sp<JMediaSync> &sync) {
125     sp<JMediaSync> old = (JMediaSync *)env->GetLongField(thiz, gFields.context);
126     if (sync != NULL) {
127         sync->incStrong(thiz);
128     }
129     if (old != NULL) {
130         old->decStrong(thiz);
131     }
132 
133     env->SetLongField(thiz, gFields.context, (jlong)sync.get());
134 
135     return old;
136 }
137 
getMediaSync(JNIEnv * env,jobject thiz)138 static sp<JMediaSync> getMediaSync(JNIEnv *env, jobject thiz) {
139     return (JMediaSync *)env->GetLongField(thiz, gFields.context);
140 }
141 
android_media_MediaSync_release(JNIEnv * env,jobject thiz)142 static void android_media_MediaSync_release(JNIEnv *env, jobject thiz) {
143     setMediaSync(env, thiz, NULL);
144 }
145 
throwExceptionAsNecessary(JNIEnv * env,status_t err,const char * msg=NULL)146 static void throwExceptionAsNecessary(
147         JNIEnv *env, status_t err, const char *msg = NULL) {
148     switch (err) {
149         case NO_ERROR:
150             break;
151 
152         case BAD_VALUE:
153             jniThrowException(env, "java/lang/IllegalArgumentException", msg);
154             break;
155 
156         case NO_INIT:
157         case INVALID_OPERATION:
158         default:
159             if (err > 0) {
160                 break;
161             }
162             AString msgWithErrorCode(msg);
163             msgWithErrorCode.append(" error:");
164             msgWithErrorCode.append(err);
165             jniThrowException(env, "java/lang/IllegalStateException", msgWithErrorCode.c_str());
166             break;
167     }
168 }
169 
android_media_MediaSync_native_setSurface(JNIEnv * env,jobject thiz,jobject jsurface)170 static void android_media_MediaSync_native_setSurface(
171         JNIEnv *env, jobject thiz, jobject jsurface) {
172     ALOGV("android_media_MediaSync_setSurface");
173 
174     sp<JMediaSync> sync = getMediaSync(env, thiz);
175     if (sync == NULL) {
176         throwExceptionAsNecessary(env, INVALID_OPERATION);
177         return;
178     }
179 
180     sp<IGraphicBufferProducer> bufferProducer;
181     if (jsurface != NULL) {
182         sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
183         if (surface != NULL) {
184             bufferProducer = surface->getIGraphicBufferProducer();
185         } else {
186             throwExceptionAsNecessary(env, BAD_VALUE, "The surface has been released");
187             return;
188         }
189     }
190 
191     status_t err = sync->setSurface(bufferProducer);
192 
193     if (err == INVALID_OPERATION) {
194         throwExceptionAsNecessary(
195                 env, INVALID_OPERATION, "Surface has already been configured");
196     } if (err != NO_ERROR) {
197         AString msg("Failed to connect to surface with error ");
198         msg.append(err);
199         throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
200     }
201 }
202 
android_media_MediaSync_native_setAudioTrack(JNIEnv * env,jobject thiz,jobject jaudioTrack)203 static void android_media_MediaSync_native_setAudioTrack(
204         JNIEnv *env, jobject thiz, jobject jaudioTrack) {
205     ALOGV("android_media_MediaSync_setAudioTrack");
206 
207     sp<JMediaSync> sync = getMediaSync(env, thiz);
208     if (sync == NULL) {
209         throwExceptionAsNecessary(env, INVALID_OPERATION);
210         return;
211     }
212 
213     sp<AudioTrack> audioTrack;
214     if (jaudioTrack != NULL) {
215         audioTrack = android_media_AudioTrack_getAudioTrack(env, jaudioTrack);
216         if (audioTrack == NULL) {
217             throwExceptionAsNecessary(env, BAD_VALUE, "The audio track has been released");
218             return;
219         }
220     }
221 
222     status_t err = sync->setAudioTrack(audioTrack);
223 
224     if (err == INVALID_OPERATION) {
225         throwExceptionAsNecessary(
226                 env, INVALID_OPERATION, "Audio track has already been configured");
227     } if (err != NO_ERROR) {
228         AString msg("Failed to configure audio track with error ");
229         msg.append(err);
230         throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
231     }
232 }
233 
android_media_MediaSync_createInputSurface(JNIEnv * env,jobject thiz)234 static jobject android_media_MediaSync_createInputSurface(
235         JNIEnv* env, jobject thiz) {
236     ALOGV("android_media_MediaSync_createInputSurface");
237 
238     sp<JMediaSync> sync = getMediaSync(env, thiz);
239     if (sync == NULL) {
240         throwExceptionAsNecessary(env, INVALID_OPERATION);
241         return NULL;
242     }
243 
244     // Tell the MediaSync that we want to use a Surface as input.
245     sp<IGraphicBufferProducer> bufferProducer;
246     status_t err = sync->createInputSurface(&bufferProducer);
247     if (err != NO_ERROR) {
248         throwExceptionAsNecessary(env, INVALID_OPERATION);
249         return NULL;
250     }
251 
252     // Wrap the IGBP in a Java-language Surface.
253     return android_view_Surface_createFromIGraphicBufferProducer(env,
254             bufferProducer);
255 }
256 
android_media_MediaSync_native_updateQueuedAudioData(JNIEnv * env,jobject thiz,jint sizeInBytes,jlong presentationTimeUs)257 static void android_media_MediaSync_native_updateQueuedAudioData(
258         JNIEnv *env, jobject thiz, jint sizeInBytes, jlong presentationTimeUs) {
259     sp<JMediaSync> sync = getMediaSync(env, thiz);
260     if (sync == NULL) {
261         throwExceptionAsNecessary(env, INVALID_OPERATION);
262         return;
263     }
264 
265     status_t err = sync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
266     if (err != NO_ERROR) {
267         throwExceptionAsNecessary(env, err);
268         return;
269     }
270 }
271 
android_media_MediaSync_native_getTimestamp(JNIEnv * env,jobject thiz,jobject timestamp)272 static jboolean android_media_MediaSync_native_getTimestamp(
273         JNIEnv *env, jobject thiz, jobject timestamp) {
274     sp<JMediaSync> sync = getMediaSync(env, thiz);
275     if (sync == NULL) {
276         throwExceptionAsNecessary(env, INVALID_OPERATION);
277         return JNI_FALSE;
278     }
279 
280     sp<const MediaClock> mediaClock = sync->getMediaClock();
281     if (mediaClock == NULL) {
282         return JNI_FALSE;
283     }
284 
285     int64_t nowUs = ALooper::GetNowUs();
286     int64_t mediaUs = 0;
287     if (mediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
288         return JNI_FALSE;
289     }
290 
291     env->SetLongField(timestamp, gFields.mediaTimestampMediaTimeUsID,
292             (jlong)mediaUs);
293     env->SetLongField(timestamp, gFields.mediaTimestampNanoTimeID,
294             (jlong)(nowUs * 1000));
295     env->SetFloatField(timestamp, gFields.mediaTimestampClockRateID,
296             (jfloat)mediaClock->getPlaybackRate());
297     return JNI_TRUE;
298 }
299 
android_media_MediaSync_native_getPlayTimeForPendingAudioFrames(JNIEnv * env,jobject thiz)300 static jlong android_media_MediaSync_native_getPlayTimeForPendingAudioFrames(
301         JNIEnv *env, jobject thiz) {
302     sp<JMediaSync> sync = getMediaSync(env, thiz);
303     if (sync == NULL) {
304         throwExceptionAsNecessary(env, INVALID_OPERATION);
305     }
306 
307     int64_t playTimeUs = 0;
308     status_t err = sync->getPlayTimeForPendingAudioFrames(&playTimeUs);
309     if (err != NO_ERROR) {
310         throwExceptionAsNecessary(env, err);
311     }
312     return (jlong)playTimeUs;
313 }
314 
android_media_MediaSync_setPlaybackParams(JNIEnv * env,jobject thiz,jobject params)315 static jfloat android_media_MediaSync_setPlaybackParams(
316         JNIEnv *env, jobject thiz, jobject params) {
317     sp<JMediaSync> sync = getMediaSync(env, thiz);
318     if (sync == NULL) {
319         throwExceptionAsNecessary(env, INVALID_OPERATION);
320         return (jfloat)0.f;
321     }
322 
323     PlaybackParams pbs;
324     pbs.fillFromJobject(env, gPlaybackParamsFields, params);
325     ALOGV("setPlaybackParams: %d:%f %d:%f %d:%u %d:%u",
326             pbs.speedSet, pbs.audioRate.mSpeed,
327             pbs.pitchSet, pbs.audioRate.mPitch,
328             pbs.audioFallbackModeSet, pbs.audioRate.mFallbackMode,
329             pbs.audioStretchModeSet, pbs.audioRate.mStretchMode);
330 
331     AudioPlaybackRate rate;
332     sync->getPlaybackParams(&rate);
333     bool updatedRate = false;
334     if (pbs.speedSet) {
335         rate.mSpeed = pbs.audioRate.mSpeed;
336         updatedRate = true;
337     }
338     if (pbs.pitchSet) {
339         rate.mPitch = pbs.audioRate.mPitch;
340         updatedRate = true;
341     }
342     if (pbs.audioFallbackModeSet) {
343         rate.mFallbackMode = pbs.audioRate.mFallbackMode;
344         updatedRate = true;
345     }
346     if (pbs.audioStretchModeSet) {
347         rate.mStretchMode = pbs.audioRate.mStretchMode;
348         updatedRate = true;
349     }
350     if (updatedRate) {
351         status_t err = sync->setPlaybackParams(rate);
352         if (err != OK) {
353             throwExceptionAsNecessary(env, err);
354             return (jfloat)0.f;
355         }
356     }
357 
358     sp<const MediaClock> mediaClock = sync->getMediaClock();
359     if (mediaClock == NULL) {
360         return (jfloat)0.f;
361     }
362 
363     return (jfloat)mediaClock->getPlaybackRate();
364 }
365 
android_media_MediaSync_getPlaybackParams(JNIEnv * env,jobject thiz)366 static jobject android_media_MediaSync_getPlaybackParams(
367         JNIEnv *env, jobject thiz) {
368     sp<JMediaSync> sync = getMediaSync(env, thiz);
369     if (sync == NULL) {
370         throwExceptionAsNecessary(env, INVALID_OPERATION);
371         return NULL;
372     }
373 
374     PlaybackParams pbs;
375     AudioPlaybackRate &audioRate = pbs.audioRate;
376     sync->getPlaybackParams(&audioRate);
377     ALOGV("getPlaybackParams: %f %f %d %d",
378             audioRate.mSpeed, audioRate.mPitch, audioRate.mFallbackMode, audioRate.mStretchMode);
379 
380     pbs.speedSet = true;
381     pbs.pitchSet = true;
382     pbs.audioFallbackModeSet = true;
383     pbs.audioStretchModeSet = true;
384 
385     return pbs.asJobject(env, gPlaybackParamsFields);
386 }
387 
android_media_MediaSync_setSyncParams(JNIEnv * env,jobject thiz,jobject params)388 static jfloat android_media_MediaSync_setSyncParams(
389         JNIEnv *env, jobject thiz, jobject params) {
390     sp<JMediaSync> sync = getMediaSync(env, thiz);
391     if (sync == NULL) {
392         throwExceptionAsNecessary(env, INVALID_OPERATION);
393         return (jfloat)0.f;
394     }
395 
396     SyncParams scs;
397     scs.fillFromJobject(env, gSyncParamsFields, params);
398     ALOGV("setSyncParams: %d:%d %d:%d %d:%f %d:%f",
399             scs.syncSourceSet, scs.sync.mSource,
400             scs.audioAdjustModeSet, scs.sync.mAudioAdjustMode,
401             scs.toleranceSet, scs.sync.mTolerance,
402             scs.frameRateSet, scs.frameRate);
403 
404     AVSyncSettings avsync;
405     sync->getSyncParams(&avsync);
406     bool updatedSync = false;
407     status_t err = OK;
408     if (scs.syncSourceSet) {
409         avsync.mSource = scs.sync.mSource;
410         updatedSync = true;
411     }
412     if (scs.audioAdjustModeSet) {
413         avsync.mAudioAdjustMode = scs.sync.mAudioAdjustMode;
414         updatedSync = true;
415     }
416     if (scs.toleranceSet) {
417         avsync.mTolerance = scs.sync.mTolerance;
418         updatedSync = true;
419     }
420     if (updatedSync) {
421         err = sync->setSyncParams(avsync);
422     }
423 
424     if (scs.frameRateSet && err == OK) {
425         err = sync->setVideoFrameRateHint(scs.frameRate);
426     }
427     if (err != OK) {
428         throwExceptionAsNecessary(env, err);
429         return (jfloat)0.f;
430     }
431 
432     sp<const MediaClock> mediaClock = sync->getMediaClock();
433     if (mediaClock == NULL) {
434         return (jfloat)0.f;
435     }
436 
437     return (jfloat)mediaClock->getPlaybackRate();
438 }
439 
android_media_MediaSync_getSyncParams(JNIEnv * env,jobject thiz)440 static jobject android_media_MediaSync_getSyncParams(JNIEnv *env, jobject thiz) {
441     sp<JMediaSync> sync = getMediaSync(env, thiz);
442     if (sync == NULL) {
443         throwExceptionAsNecessary(env, INVALID_OPERATION);
444         return NULL;
445     }
446 
447     SyncParams scs;
448     sync->getSyncParams(&scs.sync);
449     scs.frameRate = sync->getVideoFrameRate();
450 
451     ALOGV("getSyncParams: %d %d %f %f",
452             scs.sync.mSource, scs.sync.mAudioAdjustMode, scs.sync.mTolerance, scs.frameRate);
453 
454     // sanity check params
455     if (scs.sync.mSource >= AVSYNC_SOURCE_MAX
456             || scs.sync.mAudioAdjustMode >= AVSYNC_AUDIO_ADJUST_MODE_MAX
457             || scs.sync.mTolerance < 0.f
458             || scs.sync.mTolerance >= AVSYNC_TOLERANCE_MAX) {
459         throwExceptionAsNecessary(env, INVALID_OPERATION);
460         return NULL;
461     }
462 
463     scs.syncSourceSet = true;
464     scs.audioAdjustModeSet = true;
465     scs.toleranceSet = true;
466     scs.frameRateSet = scs.frameRate >= 0.f;
467 
468     return scs.asJobject(env, gSyncParamsFields);
469 }
470 
android_media_MediaSync_native_flush(JNIEnv * env,jobject thiz)471 static void android_media_MediaSync_native_flush(JNIEnv *env, jobject thiz) {
472     sp<JMediaSync> sync = getMediaSync(env, thiz);
473     if (sync == NULL) {
474         throwExceptionAsNecessary(env, INVALID_OPERATION);
475         return;
476     }
477 
478     sync->flush();
479 }
480 
android_media_MediaSync_native_init(JNIEnv * env)481 static void android_media_MediaSync_native_init(JNIEnv *env) {
482     ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync"));
483     CHECK(clazz.get() != NULL);
484 
485     gFields.context = env->GetFieldID(clazz.get(), "mNativeContext", "J");
486     CHECK(gFields.context != NULL);
487 
488     clazz.reset(env->FindClass("android/media/MediaTimestamp"));
489     CHECK(clazz.get() != NULL);
490 
491     gFields.mediaTimestampMediaTimeUsID =
492         env->GetFieldID(clazz.get(), "mediaTimeUs", "J");
493     CHECK(gFields.mediaTimestampMediaTimeUsID != NULL);
494 
495     gFields.mediaTimestampNanoTimeID =
496         env->GetFieldID(clazz.get(), "nanoTime", "J");
497     CHECK(gFields.mediaTimestampNanoTimeID != NULL);
498 
499     gFields.mediaTimestampClockRateID =
500         env->GetFieldID(clazz.get(), "clockRate", "F");
501     CHECK(gFields.mediaTimestampClockRateID != NULL);
502 
503     gSyncParamsFields.init(env);
504     gPlaybackParamsFields.init(env);
505 }
506 
android_media_MediaSync_native_setup(JNIEnv * env,jobject thiz)507 static void android_media_MediaSync_native_setup(JNIEnv *env, jobject thiz) {
508     sp<JMediaSync> sync = new JMediaSync();
509 
510     setMediaSync(env, thiz, sync);
511 }
512 
android_media_MediaSync_native_finalize(JNIEnv * env,jobject thiz)513 static void android_media_MediaSync_native_finalize(JNIEnv *env, jobject thiz) {
514     android_media_MediaSync_release(env, thiz);
515 }
516 
517 static JNINativeMethod gMethods[] = {
518     { "native_setSurface",
519       "(Landroid/view/Surface;)V",
520       (void *)android_media_MediaSync_native_setSurface },
521 
522     { "native_setAudioTrack",
523       "(Landroid/media/AudioTrack;)V",
524       (void *)android_media_MediaSync_native_setAudioTrack },
525 
526     { "createInputSurface", "()Landroid/view/Surface;",
527       (void *)android_media_MediaSync_createInputSurface },
528 
529     { "native_updateQueuedAudioData",
530       "(IJ)V",
531       (void *)android_media_MediaSync_native_updateQueuedAudioData },
532 
533     { "native_getTimestamp",
534       "(Landroid/media/MediaTimestamp;)Z",
535       (void *)android_media_MediaSync_native_getTimestamp },
536 
537     { "native_getPlayTimeForPendingAudioFrames",
538       "()J",
539       (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames },
540 
541     { "native_flush", "()V", (void *)android_media_MediaSync_native_flush },
542 
543     { "native_init", "()V", (void *)android_media_MediaSync_native_init },
544 
545     { "native_setup", "()V", (void *)android_media_MediaSync_native_setup },
546 
547     { "native_release", "()V", (void *)android_media_MediaSync_release },
548 
549     { "native_setPlaybackParams", "(Landroid/media/PlaybackParams;)F",
550       (void *)android_media_MediaSync_setPlaybackParams },
551 
552     { "getPlaybackParams", "()Landroid/media/PlaybackParams;",
553       (void *)android_media_MediaSync_getPlaybackParams },
554 
555     { "native_setSyncParams", "(Landroid/media/SyncParams;)F",
556       (void *)android_media_MediaSync_setSyncParams },
557 
558     { "getSyncParams", "()Landroid/media/SyncParams;",
559       (void *)android_media_MediaSync_getSyncParams },
560 
561     { "native_finalize", "()V", (void *)android_media_MediaSync_native_finalize },
562 };
563 
register_android_media_MediaSync(JNIEnv * env)564 int register_android_media_MediaSync(JNIEnv *env) {
565     return AndroidRuntime::registerNativeMethods(
566                    env, "android/media/MediaSync", gMethods, NELEM(gMethods));
567 }
568