在下面的代码中,大家好。如何结束从移动到移动的两个对等方之间的呼叫。如何在android中实现事件处理程序功能。我已经为我的布局进行了自定义,我有图像按钮来结束呼叫。当点击“结束图像”以结束通话时,如果有人想打开后摄像头或前摄像头,则应将音频静音并取消静音。
有人能在这方面帮我吗
public class VideocallFragment extends Fragment {
private static final String TAG = "VideocallFragment";
private static final int RC_CALL = 111;
private Socket socket;
private boolean isInitiator;
private ImageButton button_call_disconnect;
public static final int VIDEO_RESOLUTION_WIDTH = 1280;
private boolean isStarted;
private boolean isChannelReady;
private PeerConnectionFactory factory;
private PeerConnection peerConnection;
private EglBase rootEglBase;
AudioSource audioSource;
public static final int FPS = 30;
public static final String VIDEO_TRACK_ID = "ARDAMSv0";
AudioTrack localAudioTrack;
MediaConstraints audioConstraints;
private VideoTrack videoTrackFromCamera;
private SurfaceViewRenderer surfaceView,surfaceView2;
public static final int VIDEO_RESOLUTION_HEIGHT = 720;
public VideocallFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.video_main, container, false);
surfaceView=rootView.findViewById(R.id.surfaceView);
surfaceView2=rootView.findViewById(R.id.surfaceView2);
button_call_disconnect=rootView.findViewById(R.id.button_call_disconnect);
start();
button_call_disconnect.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
}
});
return rootView;
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this);
}
@Override
public void onDestroy() {
if (socket != null) {
sendMessage("bye");
socket.disconnect();
}
super.onDestroy();
}
@AfterPermissionGranted(RC_CALL)
private void start() {
String[] perms = {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO};
if (EasyPermissions.hasPermissions(getContext(), perms)) {
connectToSignallingServer();
initializeSurfaceViews();
initializePeerConnectionFactory();
createVideoTrackFromCameraAndShowIt();
initializePeerConnections();
startStreamingVideo();
} else {
EasyPermissions.requestPermissions(this, "Need some permissions", RC_CALL, perms);
}
}
private void connectToSignallingServer() {
try {
socket = IO.socket("http://192.168.0.102:3000");
socket.on(EVENT_CONNECT, args -> {
Log.d(TAG, "connectToSignallingServer: connect");
socket.emit("create or join", "chandra");
}).on("ipaddr", args -> {
Log.d(TAG, "connectToSignallingServer: ipaddr");
}).on("created", args -> {
Log.d(TAG, "connectToSignallingServer: created");
isInitiator = true;
}).on("full", args -> {
Log.d(TAG, "connectToSignallingServer: full");
}).on("join", args -> {
Log.d(TAG, "connectToSignallingServer: join");
Log.d(TAG, "connectToSignallingServer: Another peer made a request to join room");
Log.d(TAG, "connectToSignallingServer: This peer is the initiator of room");
isChannelReady = true;
}).on("joined", args -> {
Log.d(TAG, "connectToSignallingServer: joined");
isChannelReady = true;
}).on("log", args -> {
for (Object arg : args) {
Log.d(TAG, "connectToSignallingServer: " + String.valueOf(arg));
}
}).on("message", args -> {
Log.d(TAG, "connectToSignallingServer: got a message");
}).on("message", args -> {
try {
if (args[0] instanceof String) {
String message = (String) args[0];
if (message.equals("got user media")) {
maybeStart();
}
} else {
JSONObject message = (JSONObject) args[0];
Log.d(TAG, "connectToSignallingServer: got message " + message);
if (message.getString("type").equals("offer")) {
Log.d(TAG, "connectToSignallingServer: received an offer " + isInitiator + " " + isStarted);
if (!isInitiator && !isStarted) {
maybeStart();
}
peerConnection.setRemoteDescription(new SimpleSdpObserver(), new SessionDescription(OFFER, message.getString("sdp")));
doAnswer();
} else if (message.getString("type").equals("answer") && isStarted) {
peerConnection.setRemoteDescription(new SimpleSdpObserver(), new SessionDescription(ANSWER, message.getString("sdp")));
} else if (message.getString("type").equals("candidate") && isStarted) {
Log.d(TAG, "connectToSignallingServer: receiving candidates");
IceCandidate candidate = new IceCandidate(message.getString("id"), message.getInt("label"), message.getString("candidate"));
peerConnection.addIceCandidate(candidate);
}
/*else if (message === 'bye' && isStarted) {
handleRemoteHangup();
}*/
}
} catch (JSONException e) {
e.printStackTrace();
}
}).on(EVENT_DISCONNECT, args -> {
Log.d(TAG, "connectToSignallingServer: disconnect");
});
socket.connect();
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
//MirtDPM4
private void doAnswer() {
peerConnection.createAnswer(new SimpleSdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
peerConnection.setLocalDescription(new SimpleSdpObserver(), sessionDescription);
JSONObject message = new JSONObject();
try {
message.put("type", "answer");
message.put("sdp", sessionDescription.description);
sendMessage(message);
} catch (JSONException e) {
e.printStackTrace();
}
}
}, new MediaConstraints());
}
private void maybeStart() {
Log.d(TAG, "maybeStart: " + isStarted + " " + isChannelReady);
if (!isStarted && isChannelReady) {
isStarted = true;
if (isInitiator) {
doCall();
}
}
}
private void doCall() {
MediaConstraints sdpMediaConstraints = new MediaConstraints();
sdpMediaConstraints.mandatory.add(
new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
sdpMediaConstraints.mandatory.add(
new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
peerConnection.createOffer(new SimpleSdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
Log.d(TAG, "onCreateSuccess: ");
peerConnection.setLocalDescription(new SimpleSdpObserver(), sessionDescription);
JSONObject message = new JSONObject();
try {
message.put("type", "offer");
message.put("sdp", sessionDescription.description);
sendMessage(message);
} catch (JSONException e) {
e.printStackTrace();
}
}
}, sdpMediaConstraints);
}
private void sendMessage(Object message) {
socket.emit("message", message);
}
private void initializeSurfaceViews() {
rootEglBase = EglBase.create();
surfaceView.init(rootEglBase.getEglBaseContext(), null);
surfaceView.setEnableHardwareScaler(true);
surfaceView.setMirror(true);
surfaceView2.init(rootEglBase.getEglBaseContext(), null);
surfaceView2.setEnableHardwareScaler(true);
surfaceView2.setMirror(true);
//add one more
}
private void initializePeerConnectionFactory() {
PeerConnectionFactory.initializeAndroidGlobals(getActivity(), true, true, true);
factory = new PeerConnectionFactory(null);
factory.setVideoHwAccelerationOptions(rootEglBase.getEglBaseContext(), rootEglBase.getEglBaseContext());
}
private void createVideoTrackFromCameraAndShowIt() {
audioConstraints = new MediaConstraints();
VideoCapturer videoCapturer = createVideoCapturer();
VideoSource videoSource = factory.createVideoSource(videoCapturer);
videoCapturer.startCapture(VIDEO_RESOLUTION_WIDTH, VIDEO_RESOLUTION_HEIGHT, FPS);
videoTrackFromCamera = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
videoTrackFromCamera.setEnabled(true);
videoTrackFromCamera.addRenderer(new VideoRenderer(surfaceView2));
//create an AudioSource instance
audioSource = factory.createAudioSource(audioConstraints);
localAudioTrack = factory.createAudioTrack("101", audioSource);
}
private void initializePeerConnections() {
peerConnection = createPeerConnection(factory);
}
private void startStreamingVideo() {
MediaStream mediaStream = factory.createLocalMediaStream("ARDAMS");
mediaStream.addTrack(videoTrackFromCamera);
mediaStream.addTrack(localAudioTrack);
peerConnection.addStream(mediaStream);
sendMessage("got user media");
}
private PeerConnection createPeerConnection(PeerConnectionFactory factory) {
ArrayList<PeerConnection.IceServer> iceServers = new ArrayList<>();
iceServers.add(new PeerConnection.IceServer("stun:stun.l.google.com:19302"));
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
MediaConstraints pcConstraints = new MediaConstraints();
PeerConnection.Observer pcObserver = new PeerConnection.Observer() {
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
Log.d(TAG, "onSignalingChange: ");
}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
Log.d(TAG, "onIceConnectionChange: ");
}
@Override
public void onIceConnectionReceivingChange(boolean b) {
Log.d(TAG, "onIceConnectionReceivingChange: ");
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
Log.d(TAG, "onIceGatheringChange: ");
}
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
Log.d(TAG, "onIceCandidate: ");
JSONObject message = new JSONObject();
try {
message.put("type", "candidate");
message.put("label", iceCandidate.sdpMLineIndex);
message.put("id", iceCandidate.sdpMid);
message.put("candidate", iceCandidate.sdp);
Log.d(TAG, "onIceCandidate: sending candidate " + message);
sendMessage(message);
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {
Log.d(TAG, "onIceCandidatesRemoved: ");
}
@Override
public void onAddStream(MediaStream mediaStream) {
Log.d(TAG, "onAddStream: " + mediaStream.videoTracks.size());
VideoTrack remoteVideoTrack = mediaStream.videoTracks.get(0);
AudioTrack remoteAudioTrack = mediaStream.audioTracks.get(0);
remoteAudioTrack.setEnabled(true);
remoteVideoTrack.setEnabled(true);
remoteVideoTrack.addRenderer(new VideoRenderer(surfaceView));
}
@Override
public void onRemoveStream(MediaStream mediaStream) {
Log.d(TAG, "onRemoveStream: ");
}
@Override
public void onDataChannel(DataChannel dataChannel) {
Log.d(TAG, "onDataChannel: ");
}
@Override
public void onRenegotiationNeeded() {
Log.d(TAG, "onRenegotiationNeeded: ");
}
};
return factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
}
private VideoCapturer createVideoCapturer() {
VideoCapturer videoCapturer;
if (useCamera2()) {
videoCapturer = createCameraCapturer(new Camera2Enumerator(getContext()));
} else {
videoCapturer = createCameraCapturer(new Camera1Enumerator(true));
}
return videoCapturer;
}
private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
final String[] deviceNames = enumerator.getDeviceNames();
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
for (String deviceName : deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
return null;
}
private boolean useCamera2() {
return Camera2Enumerator.isSupported(getContext());
}
}
暂无答案!
目前还没有任何答案,快来回答吧!