一、OpenSL ES开发流程和重要接口
通过分析android-ndk中native-audio的代码,发现了用OpenSL ES来开发应用也是有套路(规律)可寻的:
1.1、OpenSL ES的开发流程主要有如下6个步骤:
1、创建接口对象 2、设置混音器 3、创建播放器(录音器) 4、设置缓冲队列和回调函数 5、设置播放状态 6、启动回调函数 其中4和6是播放PCM等数据格式的音频是需要用到的。
1.2、OpenSL ES中最重要的接口类SLObjectItf
通过SLObjectItf接口类我们可以创建所需要的各种类型的类接口,比如: 创建引擎接口对象:SLObjectItf engineObject 创建混音器接口对象:SLObjectItf outputMixObject 创建播放器接口对象:SLObjectItf playerObject 以上等等都是通过SLObjectItf来创建的,知道了这个规则后,看源代码就会轻松很多了
1.3、利用SLObjectItf来创建具体的接口对象实例
OpenSL ES中也有具体的接口类,比如(引擎:SLEngineItf,播放器:SLPlayItf,声音控制器:SLVolumeItf等等)。
1.4、创建引擎并实现它
OpenSL ES中开始的第一步都是声明SLObjectItf接口类型的引擎接口对象engineObject,然后用方法slCreateEngine创建一个引擎接口对象;创建好引擎接口对象后,需要用SLObjectItf的Realize方法来实现engineObject;最后用SLObjectItf的GetInterface方法来初始化SLEngnineItf对象实例。如 SLObjectItf engineObject = NULL;//用SLObjectItf声明引擎接口对象 SLEngineItf engineEngine = NULL;//声明具体的引擎对象实例 void createEngine() { SLresult result;//返回结果 result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);//第一步创建引擎 result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);//实现(Realize)engineObject接口对象 result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);//通过engineObject的GetInterface方法初始化engineEngine }
1.5、利用引擎对象创建其他接口对象
其他接口对象(SLObjectItf outputMixObject,SLObjectItf playerObject)等都是用引擎接口对象创建的(具体的接口对象需要的参数这里就说了,可参照ndk例子里面的),如: //混音器 SLObjectItf outputMixObject = NULL;//用SLObjectItf创建混音器接口对象 SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;创建具体的混音器对象实例 result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, mids, mreq);//利用引擎接口对象创建混音器接口对象 result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);//实现(Realize)混音器接口对象 result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB, &outputMixEnvironmentalReverb);//利用混音器接口对象初始化具体混音器实例 //播放器 SLObjectItf playerObject = NULL;//用SLObjectItf创建播放器接口对象 SLPlayItf playerPlay = NULL;//创建具体的播放器对象实例 result = (*engineEngine)->CreateAudioPlayer(engineEngine, &playerObject, &audioSrc, &audioSnk, 3, ids, req);//利用引擎接口对象创建播放器接口对象 result = (*playerObject)->Realize(playerObject, SL_BOOLEAN_FALSE);//实现(Realize)播放器接口对象 result = (*playerObject)->GetInterface(playerObject, SL_IID_PLAY, &playerPlay);//初始化具体的播放器对象实例
1.6、最后就是使用创建好的具体对象实例来实现具体的功能。
二、具体列子
首先导入OpenSL ES和其他必须的库
target_link_libraries( # Specifies the target library. openslaudioywl5320 OpenSLES android # Links the target library to the log library # included in the NDK. ${log-lib} ) 2.1、播放assets文件
第一步:创建引擎
第二步:创建混音器
第三步:创建播放器
第四步:设置播放状态
JNIEXPORT void JNICALL Java_com_ywl5320_openslaudio_MainActivity_playAudioByOpenSL_1assets(JNIEnv *env, jobject instance, jobject assetManager, jstring filename) { release(); const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL); // use asset manager to open asset by filename AAssetManager* mgr = AAssetManager_fromJava(env, assetManager); AAsset* asset = AAssetManager_open(mgr, utf8, AASSET_MODE_UNKNOWN); (*env)->ReleaseStringUTFChars(env, filename, utf8); // open asset as file descriptor off_t start, length; int fd = AAsset_openFileDescriptor(asset, &start, &length); AAsset_close(asset); SLresult result; //第一步,创建引擎 createEngine(); //第二步,创建混音器 const SLInterfaceID mids[1] = {SL_IID_ENVIRONMENTALREVERB}; const SLboolean mreq[1] = {SL_BOOLEAN_FALSE}; result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, mids, mreq); (void)result; result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE); (void)result; result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB, &outputMixEnvironmentalReverb); if (SL_RESULT_SUCCESS == result) { result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(outputMixEnvironmentalReverb, &reverbSettings); (void)result; } //第三步,设置播放器参数和创建播放器 // 1、 配置 audio source SLDataLocator_AndroidFD loc_fd = {SL_DATALOCATOR_ANDROIDFD, fd, start, length}; SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED}; SLDataSource audioSrc = {&loc_fd, &format_mime}; // 2、 配置 audio sink SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject}; SLDataSink audioSnk = {&loc_outmix, NULL}; // 创建播放器 const SLInterfaceID ids[3] = {SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; result = (*engineEngine)->CreateAudioPlayer(engineEngine, &fdPlayerObject, &audioSrc, &audioSnk, 3, ids, req); (void)result; // 实现播放器 result = (*fdPlayerObject)->Realize(fdPlayerObject, SL_BOOLEAN_FALSE); (void)result; // 得到播放器接口 result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_PLAY, &fdPlayerPlay); (void)result; // 得到声音控制接口 result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_VOLUME, &fdPlayerVolume); (void)result; //第四步,设置播放状态 if (NULL != fdPlayerPlay) { result = (*fdPlayerPlay)->SetPlayState(fdPlayerPlay, SL_PLAYSTATE_PLAYING); (void)result; } //设置播放音量 (100 * -50:静音 ) (*fdPlayerVolume)->SetVolumeLevel(fdPlayerVolume, 20 * -50);
}
2.2、播放Uri连接
第一步:创建引擎
第二步:创建混音器
第三步:创建播放器
第四步:设置播放状态
JNIEXPORT void JNICALL Java_com_ywl5320_openslaudio_MainActivity_playAudioByOpenSL_1uri(JNIEnv *env, jobject instance, jstring uri) { SLresult result; release(); // convert Java string to UTF-8 const char *utf8 = (*env)->GetStringUTFChars(env, uri, NULL); //第一步,创建引擎 createEngine(); //第二步,创建混音器 const SLInterfaceID mids[1] = {SL_IID_ENVIRONMENTALREVERB}; const SLboolean mreq[1] = {SL_BOOLEAN_FALSE}; result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, mids, mreq); (void)result; result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE); (void)result; result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB, &outputMixEnvironmentalReverb); if (SL_RESULT_SUCCESS == result) { result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties( outputMixEnvironmentalReverb, &reverbSettings); (void)result; } //第三步,设置播放器参数和创建播放器 // configure audio source // (requires the INTERNET permission depending on the uri parameter) SLDataLocator_URI loc_uri = {SL_DATALOCATOR_URI, (SLchar *) utf8}; SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED}; SLDataSource audioSrc = {&loc_uri, &format_mime}; // configure audio sink SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject}; SLDataSink audioSnk = {&loc_outmix, NULL}; // create audio player const SLInterfaceID ids[3] = {SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; result = (*engineEngine)->CreateAudioPlayer(engineEngine, &uriPlayerObject, &audioSrc, &audioSnk, 3, ids, req); (void)result; // release the Java string and UTF-8 (*env)->ReleaseStringUTFChars(env, uri, utf8); // realize the player result = (*uriPlayerObject)->Realize(uriPlayerObject, SL_BOOLEAN_FALSE); // this will always succeed on Android, but we check result for portability to other platforms if (SL_RESULT_SUCCESS != result) { (*uriPlayerObject)->Destroy(uriPlayerObject); uriPlayerObject = NULL; return; } // get the play interface result = (*uriPlayerObject)->GetInterface(uriPlayerObject, SL_IID_PLAY, &uriPlayerPlay); (void)result; // get the volume interface result = (*uriPlayerObject)->GetInterface(uriPlayerObject, SL_IID_VOLUME, &uriPlayerVolume); (void)result; //第四步,设置播放状态 if (NULL != uriPlayerPlay) { // set the player's state result = (*uriPlayerPlay)->SetPlayState(uriPlayerPlay, SL_PLAYSTATE_PLAYING); (void)result; } //设置播放音量 (100 * -50:静音 ) // (*uriPlayerVolume)->SetVolumeLevel(uriPlayerVolume, 0 * -50); }
2.3、播放pcm文件(集成到ffmpeg时,也是播放ffmpeg转换成的pcm格式的数据),这里为了模拟是直接读取的pcm格式的音频文件。
2.3.1、创建播放器和混音器
//第一步,创建引擎
createEngine();
//第二步,创建混音器 const SLInterfaceID mids[1] = {SL_IID_ENVIRONMENTALREVERB}; const SLboolean mreq[1] = {SL_BOOLEAN_FALSE}; result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, mids, mreq); (void)result; result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE); (void)result; result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB, &outputMixEnvironmentalReverb); if (SL_RESULT_SUCCESS == result) { result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties( outputMixEnvironmentalReverb, &reverbSettings); (void)result; } SLDataLocator_OutputMix outputMix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject}; SLDataSink audioSnk = {&outputMix, NULL};
2.3.2、设置pcm格式的频率位数等信息并创建播放器
// 第三步,配置PCM格式信息 SLDataLocator_AndroidSimpleBufferQueue android_queue={SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,2}; SLDataFormat_PCM pcm={ SL_DATAFORMAT_PCM,//播放pcm格式的数据 2,//2个声道(立体声) SL_SAMPLINGRATE_44_1,//44100hz的频率 SL_PCMSAMPLEFORMAT_FIXED_16,//位数 16位 SL_PCMSAMPLEFORMAT_FIXED_16,//和位数一致就行 SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,//立体声(前左前右) SL_BYTEORDER_LITTLEENDIAN//结束标志 }; SLDataSource slDataSource = {&android_queue, &pcm}; const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND, SL_IID_VOLUME}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; result = (*engineEngine)->CreateAudioPlayer(engineEngine, &pcmPlayerObject, &slDataSource, &audioSnk, 3, ids, req); //初始化播放器 (*pcmPlayerObject)->Realize(pcmPlayerObject, SL_BOOLEAN_FALSE); // 得到接口后调用 获取Player接口 (*pcmPlayerObject)->GetInterface(pcmPlayerObject, SL_IID_PLAY, &pcmPlayerPlay);
2.3.3、设置缓冲队列和回调函数
// 注册回调缓冲区 获取缓冲队列接口 (*pcmPlayerObject)->GetInterface(pcmPlayerObject, SL_IID_BUFFERQUEUE, &pcmBufferQueue); //缓冲接口回调 (*pcmBufferQueue)->RegisterCallback(pcmBufferQueue, pcmBufferCallBack, NULL);
回调函数:
void * pcmBufferCallBack(SLAndroidBufferQueueItf bf, void * context) { //assert(NULL == context); getPcmData(&buffer); // for streaming playback, replace this test by logic to find and fill the next buffer if (NULL != buffer) { SLresult result; // enqueue another buffer result = (*pcmBufferQueue)->Enqueue(pcmBufferQueue, buffer, 44100 * 2 * 2); // the most likely other result is SL_RESULT_BUFFER_INSUFFICIENT, // which for this code example would indicate a programming error } }
读取pcm格式的文件:
void getPcmData(void **pcm) { while(!feof(pcmFile)) { fread(out_buffer, 44100 * 2 * 2, 1, pcmFile); if(out_buffer == NULL) { LOGI("%s", "read end"); break; } else{ LOGI("%s", "reading"); } *pcm = out_buffer; break; } }
2.3.4、设置播放状态并手动开始调用回调函数
// 获取播放状态接口 (*pcmPlayerPlay)->SetPlayState(pcmPlayerPlay, SL_PLAYSTATE_PLAYING); // 主动调用回调函数开始工作 pcmBufferCallBack(pcmBufferQueue, NULL); 注意:在回调函数中result = (*pcmBufferQueue)->Enqueue(pcmBufferQueue, buffer, 44100