https://github.com/BelledonneCommunications/mediastreamer2 https://github.com/BelledonneCommunications/linphone-android啓動Linphone MediaStreamer2音頻和視頻流一起
只使用mediastreamer2庫我能夠開始與遠程IP和指定端口,並呼籲各audiostream.c方法音頻呼叫。
我還需要開始視頻通話。所以初始化videostream.c和 使用其各自的方法,並提供表面渲染遠程和本地攝像頭的飼料。我能夠用遠程端口和ip給出成功啓動視頻流。
但問題是當我開始兩個流在一起。聲音停止,視頻流也停止。只有本地攝像頭供稿。
所以我有一個神奇的方法,爲我做這一切。如果我評論它的視頻部分。音頻通話工作正常,如果評論音頻部分,視頻通話工作正常。但是當我開始時。沒有聲音沒有流媒體。 但我們獲得AudioStream啓動成功並且VideoStream啓動成功登錄。
有人可以通過linphone的經驗幫助找出正確的方法順序嗎?或者我們做錯了什麼。這是我們的方法。
JNIEXPORT jint JNICALL Java_com_myapp_services_LinPhoneMSEngine_LinPhoneMSVE_1AudioStreamStartFull
(JNIEnv *env, jclass self, jstring remote_ip, jint remote_port, jint localport, jint payloadindex, jboolean isLowEnd)
{
int bRetVal = 0;
MSVideoSize size = {320, 240};
char rtcp_tool[128]={0};
int ret;
//jboolean copy;
char cname[128]={0};
const char *cremote_ip;
ortp_warning("Audio Stream Start Full");
LOGD("Audio Stream Start Full");
cremote_ip = (*env)->GetStringUTFChars(env, remote_ip, NULL);
ortp_warning("Cremote_ip= %s", cremote_ip);
LOGD("Cremote_ip= %s", cremote_ip);
// ms_filter_enable_statistics(TRUE);
veData->queue = ortp_ev_queue_new();
veData->soundCard = NULL;
set_playback_device();
ortp_warning("sound: playback_dev_id: %s", ms_snd_card_get_string_id(veData->soundCard));
LOGD("sound: playback_dev_id: %s", ms_snd_card_get_string_id(veData->soundCard));
veData->CaptureCard = NULL;
set_capture_device();
ortp_warning("sound: capture_dev_id: %s", ms_snd_card_get_string_id(veData->CaptureCard));
LOGD("sound: capture_dev_id: %s", ms_snd_card_get_string_id(veData->CaptureCard));
veData->audioStream = audio_stream_new(msFactory ,localport, localport + 1, false);
audio_stream_enable_adaptive_bitrate_control(veData->audioStream, true);
audio_stream_enable_adaptive_jittcomp(veData->audioStream, true);
rtp_session_set_jitter_compensation(veData->audioStream->ms.sessions.rtp_session, 50);
rtp_session_enable_rtcp_mux(veData->audioStream->ms.sessions.rtp_session, true);
ret=AUDIO_STREAM_FEATURE_VOL_SND | \
AUDIO_STREAM_FEATURE_VOL_RCV;
if (!isLowEnd)
{
ret = ret | AUDIO_STREAM_FEATURE_EC | AUDIO_STREAM_FEATURE_EQUALIZER | AUDIO_STREAM_FEATURE_DTMF | AUDIO_STREAM_FEATURE_DTMF_ECHO;
audio_stream_set_features(veData->audioStream, ret);
ortp_warning("Setting Echo Canceller params");
LOGD("Setting Echo Canceller params");
rtp_session_enable_jitter_buffer(veData->audioStream->ms.sessions.rtp_session, TRUE);
audio_stream_set_echo_canceller_params(veData->audioStream, 60, 0, 128);
audio_stream_enable_gain_control(veData->audioStream, true);
audio_stream_enable_automatic_gain_control(veData->audioStream, true);
}
else
{
audio_stream_set_features(veData->audioStream, ret);
ortp_warning("No Echo Canceller params!!");
LOGD("No Echo Canceller params!!");
rtp_session_enable_jitter_buffer(veData->audioStream->ms.sessions.rtp_session, FALSE);
}
if(veData->audioStream == NULL){
ortp_warning("AudioStream is Null");
LOGD("AudioStream is Null");
bRetVal = -1;
return -1;
}
audio_stream_play_received_dtmfs(veData->audioStream, true);
snprintf(rtcp_tool,sizeof(rtcp_tool)-1,"%s-%s","Android","2.8.0");
snprintf(cname,sizeof(cname)-1,"%s-%d", cremote_ip, remote_port);
ortp_warning("cname value: %s",cname);
LOGD("cname value: %s",cname);
audio_stream_prepare_sound(veData->audioStream, veData->soundCard, veData->CaptureCard);
if(0== audio_stream_start_full(veData->audioStream,veData->prof, cremote_ip, remote_port, cremote_ip, remote_port + 1, 114, 50,NULL,NULL,veData->soundCard,veData->CaptureCard, !isLowEnd))
{
veData->rtpSession = veData->audioStream->ms.sessions.rtp_session;
ortp_warning("AudioStreamStartFull Success");
post_audio_config(veData->audioStream);
audio_stream_set_rtcp_information(veData->audioStream, cname, rtcp_tool);
}
else
{
ortp_warning("AudioStream start failed");
bRetVal = -1;
}
// init video stream
veData->videoStream = video_stream_new(msFactory, localport,localport+1,false);
video_stream_enable_adaptive_bitrate_control(veData->videoStream, true);
video_stream_enable_adaptive_jittcomp(veData->videoStream, true);
rtp_session_enable_rtcp_mux(veData->videoStream->ms.sessions.rtp_session, true);
video_stream_use_video_preset(veData->videoStream, "custom");
video_stream_set_sent_video_size(veData->videoStream, size);
video_stream_set_preview_size(veData->videoStream, size);
video_stream_enable_self_view(veData->videoStream, TRUE);
ortp_message("Video Stream : [%p] & native window id : [%p]",veData->videoStream, veData->native_window_id);
video_stream_set_native_window_id(veData->videoStream, veData->native_window_id);
ortp_message("Video Stream : [%p] & preview window id : [%p]",veData->videoStream, veData->native_preview_window_id);
video_stream_set_native_preview_window_id(veData->videoStream, veData->native_preview_window_id);
video_stream_use_preview_video_window(veData->videoStream, TRUE);
video_stream_set_device_rotation(veData->videoStream, 0);
video_stream_set_fps(veData->videoStream, 10.0);
// link audio with video
audio_stream_link_video(veData->audioStream, veData->videoStream);
ms_message("Setting webcam as %p", veData->msWebCam);
if(bRetVal != -1 && video_stream_start(veData->videoStream, veData->prof,
cremote_ip,
remote_port,
cremote_ip,
remote_port + 1,
101,
60,
veData->msWebCam) >=0) {
ortp_warning("VideoStream started successfully");
veData->rtpSession = veData->videoStream->ms.sessions.rtp_session;
video_stream_set_rtcp_information(veData->videoStream, cname,rtcp_tool);
}
else
{
ortp_warning("VideoStream start failed");
bRetVal = -1;
}
(*env)->ReleaseStringUTFChars(env, remote_ip, cremote_ip);
return bRetVal;
}
我也可以提供一個日誌文件。 – Javanator