问题描述
我尝试使用Firebase FireStore作为信令机制,使用webrtc sdk在android中创建视频聊天实现。我遵循了一些教程,并成功地能够创建对等连接。
最初,我尝试将视频流从一个对等体发送到另一个对等体,并且它起作用了。然后,当我尝试将音频轨道添加到流中时,最初的对等方不再可以创建商品(我是从调试中找到的)。结果,无法再建立webrtc连接。现在我很困惑如何发生这种情况。
后来我还发现,如果我仅添加一个轨道(音频轨道或视频轨道),则我的应用程序运行正常,但是如果我尝试同时添加音频和视频轨道,则最初的对等方将无法再创建报价
这是我的CallActivity.java的完整代码,其中包含所有webrtc实现。我从该代码中删除了一些不需要的语句,我认为这是不需要的。
public class CallActivity extends AppCompatActivity {
private String userUid,friendUid;
private FirebaseFirestore db = FirebaseFirestore.getInstance();
private boolean isInitiator = false;
//request codes
private int CAMERA_PERMISSION_CODE = 0;
//views
SurfaceVieWrenderer localVideoView,friendVideoView;
//webrtc
private EglBase rootEglBase;
private PeerConnectionFactory factory;
private PeerConnection peerConnection;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_call);
userUid = getIntent().getStringExtra("userUid");
friendUid = getIntent().getStringExtra("friendUid");
isInitiator = getIntent().getBooleanExtra("initiator",false);
checkPermissions();
initialize();
setupFireStoreListeners();
}
@Override
public void onBackpressed() {
hangup();
}
private void initialize()
{
//initialize views
rootEglBase = EglBase.create();
localVideoView = findViewById(R.id.localVideo);
localVideoView.init(rootEglBase.getEglBaseContext(),null);
localVideoView.setEnableHardwareScaler(true);
localVideoView.setMirror(true);
friendVideoView = findViewById(R.id.friendVideo);
friendVideoView.init(rootEglBase.getEglBaseContext(),null);
friendVideoView.setEnableHardwareScaler(true);
friendVideoView.setMirror(true);
//initialize peer connection factory
PeerConnectionFactory.Initializationoptions initializationoptions = PeerConnectionFactory.Initializationoptions.builder(this)
.setEnableInternalTracer(true)
.setFieldTrials("WebRTC-H264HighProfile/Enabled/")
.createInitializationoptions();
PeerConnectionFactory.initialize(initializationoptions);
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
options.disableEncryption = true;
options.disableNetworkMonitor = true;
factory = PeerConnectionFactory.builder()
.setoptions(options)
.setVideoDecoderFactory(new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext()))
.setVideoEncoderFactory(new DefaultVideoEncoderFactory(rootEglBase.getEglBaseContext(),true,true))
.createPeerConnectionFactory();
//create video track form camera and show it
VideoCapturer videoCapturer = createVideoCapturer();
if(videoCapturer == null){
finish();
return;
}
VideoSource videoSource = factory.createVideoSource(false);
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(Thread.currentThread().getName(),rootEglBase.getEglBaseContext());
videoCapturer.initialize(surfaceTextureHelper,localVideoView.getContext(),videoSource.getCapturerObserver());
videoCapturer.startCapture(1240,720,30);
VideoTrack localVideoTrack = factory.createVideoTrack("local",videoSource);
localVideoTrack.addSink(localVideoView);
//set ice candidates to null
db.document("users/" + userUid).update("ice",null);
db.document("users/" + friendUid).update("ice",null);
//create peer connection
ArrayList<PeerConnection.IceServer> iceServers = new ArrayList<>();
iceServers.add(PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer());
PeerConnection.Observer pcObserver = new SimplePeerConnectionObserver() {
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
Log.d("WEBRTCD","Ice");
db.runTransaction(new Transaction.Function<Void>() {
@Nullable
@Override
public Void apply(@NonNull Transaction transaction) throws FirebaseFirestoreException {
List<Map> iceList = (List<Map>) transaction.get(db.document("users/" + friendUid)).get("ice");
if(iceList == null) iceList = new ArrayList<>();
Map<String,Object> ice = new HashMap<>();
ice.put("label",iceCandidate.sdpMLineIndex);
ice.put("id",iceCandidate.sdpMid);
ice.put("sdp",iceCandidate.sdp);
iceList.add(0,ice);
transaction.update(db.document("users/" + friendUid),"ice",iceList);
return null;
}
});
}
@Override
public void onAddStream(MediaStream mediaStream) {
VideoTrack remoteVideoTrack = mediaStream.videoTracks.get(0);
if(mediaStream.audioTracks.size() > 0) {
AudioTrack remoteAudioTrack = mediaStream.audioTracks.get(0);
remoteAudioTrack.setEnabled(true);
}
remoteVideoTrack.setEnabled(true);
remoteVideoTrack.addSink(friendVideoView);
}
};
peerConnection = factory.createPeerConnection(iceServers,pcObserver);
//create audio track
MediaConstraints audioConstraints = new MediaConstraints();
AudioSource audioSource = factory.createAudioSource(audioConstraints);
AudioTrack localAudioTrack = factory.createAudioTrack("local",audioSource);
peerConnection.setAudioRecording(true);
peerConnection.setAudioPlayout(true);
//add stream to peer connection
MediaStream mediaStream = factory.createLocalMediaStream("local");
//mediaStream.addTrack(localAudioTrack);
mediaStream.addTrack(localVideoTrack);
peerConnection.addStream(mediaStream);
if(isInitiator) doCall();
else doAnswer();
}
private void doCall()
{
db.document("users/" + friendUid).update("call",userUid);
MediaConstraints mediaConstraints = new MediaConstraints();
mediaConstraints.mandatory.add(new MediaConstraints.keyvaluePair("OfferToReceiveAudio","true"));
mediaConstraints.mandatory.add(new MediaConstraints.keyvaluePair("OfferToReceiveVideo","true"));
peerConnection.createOffer(new SimpleSdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
peerConnection.setLocalDescription(new SimpleSdpObserver(),sessionDescription);
Map<String,String> sdp = new HashMap<>();
sdp.put("type","offer");
sdp.put("desc",sessionDescription.description);
db.document("users/" + friendUid).update("sdp",sdp);
}
},mediaConstraints);
}
private void doAnswer()
{
db.document("users/" + friendUid).update("call",userUid);
db.document("users/" + userUid).get().addOnCompleteListener(new OnCompleteListener<DocumentSnapshot>() {
@Override
public void onComplete(@NonNull Task<DocumentSnapshot> task) {
if(task.isSuccessful() && task.getResult() != null)
{
Map sdpData = (Map) task.getResult().get("sdp");
MediaConstraints mediaConstraints = new MediaConstraints();
mediaConstraints.mandatory.add(new MediaConstraints.keyvaluePair("OfferToReceiveAudio","true"));
mediaConstraints.mandatory.add(new MediaConstraints.keyvaluePair("OfferToReceiveVideo","true"));
peerConnection.setRemoteDescription(new SimpleSdpObserver(),new SessionDescription(SessionDescription.Type.OFFER,(String) sdpData.get("desc")));
peerConnection.createAnswer(new SimpleSdpObserver(){
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
peerConnection.setLocalDescription(new SimpleSdpObserver(),sessionDescription);
Map<String,String> sdp = new HashMap<>();
sdp.put("type","answer");
sdp.put("desc",sessionDescription.description);
db.document("users/" + friendUid).update("sdp",sdp);
}
},mediaConstraints);
}
}
});
}
private void hangup()
{
db.document("users/" + friendUid).update("call","hangup",null,"sdp",null);
db.document("users/" + userUid).update("call",null);
}
private void setupFireStoreListeners()
{
//listen for ice candidates
db.document("users/" + userUid).addSnapshotListener(this,new EventListener<DocumentSnapshot>() {
@Override
public void onEvent(@Nullable DocumentSnapshot value,@Nullable FirebaseFirestoreException error) {
if(value != null && value.get("ice") != null)
{
List<Map> iceList = (List<Map>) value.get("ice");
if(iceList == null) iceList = new ArrayList<>();
for(Map iceCandidate : iceList) {
Log.d("WEBRTCD","Ice added");
peerConnection.addIceCandidate(new IceCandidate((String) iceCandidate.get("id"),Integer.parseInt(iceCandidate.get("label") + ""),(String) iceCandidate.get("sdp")));
}
//db.document("users/" + userUid).update("ice",null);
}
}
});
//listen for hangup
db.document("users/" + userUid).addSnapshotListener(this,@Nullable FirebaseFirestoreException error) {
if(value != null && value.get("call") != null && value.get("call").equals("hangup"))
{
db.document("users/" + userUid).update("call",null);
endCall();
}
}
});
//listen for answer if initiator
if(!isInitiator) return;
db.document("users/" + userUid).addSnapshotListener(this,@Nullable FirebaseFirestoreException error) {
if(value != null && value.get("sdp") != null) {
peerConnection.setRemoteDescription(new SimpleSdpObserver(),new SessionDescription(SessionDescription.Type.ANSWER,(String) ((Map) value.get("sdp")).get("desc")));
db.document("users/" + userUid).update("sdp",null);
}
}
});
}
private void endCall(){
peerConnection.close();
super.onBackpressed();
}
private VideoCapturer createVideoCapturer() {
VideoCapturer videoCapturer;
CameraEnumerator enumerator;
if(Camera2Enumerator.isSupported(this))
enumerator = new Camera2Enumerator(this);
else
enumerator = new Camera1Enumerator(true);
for (String device : enumerator.getDeviceNames()) {
if(enumerator.isFrontFacing(device)) {
videoCapturer = enumerator.createCapturer(device,null);
if(videoCapturer != null)
return videoCapturer;
}
}
for (String device : enumerator.getDeviceNames()) {
if(!enumerator.isFrontFacing(device)) {
videoCapturer = enumerator.createCapturer(device,null);
if(videoCapturer != null)
return videoCapturer;
}
}
return null;
}
}
请注意这一行
//mediaStream.addTrack(localAudioTrack);
在initialize()函数的末尾。由于注释了此行,所以一切正常,但是如果我取消注释此行,则webrtc无法创建连接,因为没有成功创建要约,这是我不知道如何解决的问题。我尝试使用Google搜索,但只发现了一些与webrtc浏览器api不相关的问题。我不明白这一行会如何阻止整个webrtc连接正常工作。 (我知道webrtc无法创建要约,因为如果我取消注释此行,则peerConnection.createOffer()之后的onCreateSuccess()方法将永远不会调用)。由于用该行注释后一切正常,所以我认为执行信号发送的Firestore代码不成问题。
这是build.gradle文件:
apply plugin: 'com.android.application'
apply plugin: 'com.google.gms.google-services'
android {
compileSdkVersion 29
buildToolsversion "29.0.3"
defaultConfig {
applicationId "com.example.myapplication"
minSdkVersion 19
targetSdkVersion 29
versionCode 1
versionName "1.0"
multiDexEnabled true
vectorDrawables.useSupportLibrary true
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'),'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation filetree(dir: 'libs',include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
implementation 'com.google.firebase:firebase-firestore:21.5.0'
implementation 'com.android.support:multidex:1.0.3'
implementation 'com.google.firebase:firebase-auth:19.3.2'
implementation 'com.google.firebase:firebase-storage:19.1.1'
implementation 'androidx.navigation:navigation-fragment:2.3.0'
implementation 'androidx.navigation:navigation-ui:2.3.0'
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'androidx.lifecycle:lifecycle-extensions:2.2.0'
implementation 'com.google.firebase:firebase-messaging:20.2.4'
testImplementation 'junit:junit:4.13'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
implementation 'com.google.android.material:material:1.2.0'
implementation 'androidx.documentfile:documentfile:1.0.1'
implementation "com.mikepenz:materialdrawer:6.1.2"
implementation 'org.webrtc:google-webrtc:1.0.30039'
}
我已经按照webrtc android上的一些在线教程进行了学习,所有这些教程都仅执行视频流(在我的应用中有效)。由于没有正式的webrtc文档,所以我不知道如何解决此问题。请帮忙!
解决方法
好的,所以我弄清楚了,音频和视频轨道必须具有唯一的ID。就我而言,音频和视频轨道的ID均为“本地”,这使webrtc混乱,它们必须具有不同的ID。另外,在通话时,客户端在流上应该具有不同的ID。
就我而言,我将音频和视频轨道ID分别更改为_audio和_video,并且可以正常工作