enable video on audio call

This commit is contained in:
Avently 2023-11-04 04:22:34 +08:00
parent baff661e96
commit 120068d09a
7 changed files with 129 additions and 23 deletions

View File

@ -132,6 +132,12 @@ actual fun ActiveCallView() {
is WCallResponse.Ice -> withBGApi {
chatModel.controller.apiSendCallExtraInfo(call.contact, r.iceCandidates)
}
is WCallResponse.Media -> {
when (r.media) {
CallMediaType.Video -> call.remoteVideoEnabled.value = r.enable
CallMediaType.Audio -> call.remoteAudioEnabled.value = r.enable
}
}
is WCallResponse.Connection ->
try {
val callStatus = json.decodeFromString<WebRTCCallStatus>("\"${r.state.connectionState}\"")
@ -275,8 +281,8 @@ private fun ActiveCallOverlayLayout(
flipCamera: () -> Unit
) {
Column(Modifier.padding(DEFAULT_PADDING)) {
when (call.peerMedia ?: call.localMedia) {
CallMediaType.Video -> {
when {
remember { call.remoteVideoEnabled }.value || (call.peerMedia ?: call.localMedia) == CallMediaType.Video -> {
CallInfoView(call, alignment = Alignment.Start)
Box(Modifier.fillMaxWidth().fillMaxHeight().weight(1f), contentAlignment = Alignment.BottomCenter) {
DisabledBackgroundCallsButton()
@ -296,7 +302,7 @@ private fun ActiveCallOverlayLayout(
}
}
}
CallMediaType.Audio -> {
else -> {
Spacer(Modifier.fillMaxHeight().weight(1f))
Column(
Modifier.fillMaxWidth(),

View File

@ -1,5 +1,7 @@
package chat.simplex.common.views.call
import androidx.compose.runtime.MutableState
import androidx.compose.runtime.mutableStateOf
import chat.simplex.common.views.helpers.generalGetString
import chat.simplex.common.model.*
import chat.simplex.res.MR
@ -35,6 +37,9 @@ data class Call(
}
val hasMedia: Boolean get() = callState == CallState.OfferSent || callState == CallState.Negotiated || callState == CallState.Connected
val remoteAudioEnabled: MutableState<Boolean> = mutableStateOf(true)
val remoteVideoEnabled: MutableState<Boolean> = mutableStateOf(localMedia == CallMediaType.Video)
}
enum class CallState {
@ -83,6 +88,7 @@ sealed class WCallResponse {
@Serializable @SerialName("offer") data class Offer(val offer: String, val iceCandidates: String, val capabilities: CallCapabilities): WCallResponse()
@Serializable @SerialName("answer") data class Answer(val answer: String, val iceCandidates: String): WCallResponse()
@Serializable @SerialName("ice") data class Ice(val iceCandidates: String): WCallResponse()
@Serializable @SerialName("media") data class Media(val media: CallMediaType, val enable: Boolean): WCallResponse()
@Serializable @SerialName("connection") data class Connection(val state: ConnectionState): WCallResponse()
@Serializable @SerialName("connected") data class Connected(val connectionInfo: ConnectionInfo): WCallResponse()
@Serializable @SerialName("end") object End: WCallResponse()

View File

@ -396,11 +396,37 @@ const processCommand = (function () {
console.log("set up decryption for receiving");
setupPeerTransform(TransformOperation.Decrypt, event.receiver, call.worker, call.aesKey, call.key);
}
const hadAudio = call.remoteStream.getTracks().some((elem) => elem.kind == "audio" && elem.enabled);
const hadVideo = call.remoteStream.getTracks().some((elem) => elem.kind == "video" && elem.enabled);
for (const stream of event.streams) {
stream.onaddtrack = (event) => {
console.log("LALAL ADDED TRACK " + event.track.kind);
};
for (const track of stream.getTracks()) {
call.remoteStream.addTrack(track);
}
}
const hasAudio = call.remoteStream.getTracks().some((elem) => elem.kind == "audio" && elem.enabled);
const hasVideo = call.remoteStream.getTracks().some((elem) => elem.kind == "video" && elem.enabled);
console.log(`LALAL HAS AUDIO ${hasAudio} ${hasVideo} ${JSON.stringify(call.remoteStream.getTracks())}`);
if (hadAudio != hasAudio) {
const resp = {
type: "media",
media: CallMediaType.Audio,
enable: hasAudio,
};
const apiResp = { corrId: undefined, resp, command: undefined };
sendMessageToNative(apiResp);
}
if (hadVideo != hasVideo) {
const resp = {
type: "media",
media: CallMediaType.Video,
enable: hasVideo,
};
const apiResp = { corrId: undefined, resp, command: undefined };
sendMessageToNative(apiResp);
}
console.log(`ontrack success`);
}
catch (e) {
@ -441,8 +467,6 @@ const processCommand = (function () {
if (!videos)
throw Error("no video elements");
const pc = call.connection;
const oldAudioTracks = call.localStream.getAudioTracks();
const audioWasEnabled = oldAudioTracks.some((elem) => elem.enabled);
let localStream;
try {
localStream = call.screenShareEnabled ? await getLocalScreenCaptureStream() : await getLocalMediaStream(call.localMedia, camera);
@ -458,24 +482,39 @@ const processCommand = (function () {
call.localCamera = camera;
const audioTracks = localStream.getAudioTracks();
const videoTracks = localStream.getVideoTracks();
if (!audioWasEnabled && oldAudioTracks.length > 0) {
audioTracks.forEach((elem) => (elem.enabled = false));
const audioWasEnabled = call.localStream.getAudioTracks().some((elem) => elem.enabled);
if (!audioWasEnabled && call.localStream.getAudioTracks().length > 0) {
enableMedia(localStream, CallMediaType.Audio, false);
}
if (!call.cameraEnabled && !call.screenShareEnabled) {
videoTracks.forEach((elem) => (elem.enabled = false));
enableMedia(localStream, CallMediaType.Video, false);
}
replaceTracks(pc, audioTracks);
replaceTracks(pc, videoTracks);
replaceTracks(pc, audioTracks, false);
replaceTracks(pc, videoTracks, call.screenShareEnabled);
call.localStream = localStream;
videos.local.srcObject = localStream;
}
function replaceTracks(pc, tracks) {
function replaceTracks(pc, tracks, addIfNeeded) {
var _a;
if (!tracks.length)
return;
const sender = pc.getSenders().find((s) => { var _a; return ((_a = s.track) === null || _a === void 0 ? void 0 : _a.kind) === tracks[0].kind; });
if (sender)
for (const t of tracks)
sender.replaceTrack(t);
else if (addIfNeeded) {
for (const track of tracks)
pc.addTrack(track, activeCall.localStream);
const call = activeCall;
if (call.aesKey && call.key) {
console.log("set up encryption for sending");
for (const sender of pc.getSenders()) {
if (((_a = sender.track) === null || _a === void 0 ? void 0 : _a.kind) == "video") {
setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key);
}
}
}
}
}
function setupPeerTransform(operation, peer, worker, aesKey, key) {
if (worker && "RTCRtpScriptTransform" in window) {

View File

@ -74,9 +74,9 @@ function reactOnMessageFromServer(msg) {
case "start":
document.getElementById("toggle-audio").style.display = "inline-block";
document.getElementById("toggle-speaker").style.display = "inline-block";
document.getElementById("toggle-screen").style.display = "inline-block";
if (msg.command.media == CallMediaType.Video) {
document.getElementById("toggle-video").style.display = "inline-block";
document.getElementById("toggle-screen").style.display = "inline-block";
}
document.getElementById("info-block").className = msg.command.media;
break;

View File

@ -67,6 +67,12 @@ actual fun ActiveCallView() {
is WCallResponse.Ice -> withBGApi {
chatModel.controller.apiSendCallExtraInfo(call.contact, r.iceCandidates)
}
is WCallResponse.Media -> {
when (r.media) {
CallMediaType.Video -> call.remoteVideoEnabled.value = r.enable
CallMediaType.Audio -> call.remoteAudioEnabled.value = r.enable
}
}
is WCallResponse.Connection ->
try {
val callStatus = json.decodeFromString<WebRTCCallStatus>("\"${r.state.connectionState}\"")

View File

@ -23,6 +23,7 @@ type WCallResponse =
| WCallOffer
| WCallAnswer
| WCallIceCandidates
| WCEnableMedia
| WRConnection
| WRCallConnected
| WRCallEnd
@ -33,7 +34,18 @@ type WCallResponse =
type WCallCommandTag = "capabilities" | "start" | "offer" | "answer" | "ice" | "media" | "camera" | "description" | "end"
type WCallResponseTag = "capabilities" | "offer" | "answer" | "ice" | "connection" | "connected" | "end" | "ended" | "ok" | "error"
type WCallResponseTag =
| "capabilities"
| "offer"
| "answer"
| "ice"
| "media"
| "connection"
| "connected"
| "end"
| "ended"
| "ok"
| "error"
enum CallMediaType {
Audio = "audio",
@ -98,7 +110,7 @@ interface WCallIceCandidates extends IWCallCommand, IWCallResponse {
iceCandidates: string // JSON strings for RTCIceCandidateInit[]
}
interface WCEnableMedia extends IWCallCommand {
interface WCEnableMedia extends IWCallCommand, IWCallResponse {
type: "media"
media: CallMediaType
enable: boolean
@ -594,11 +606,37 @@ const processCommand = (function () {
console.log("set up decryption for receiving")
setupPeerTransform(TransformOperation.Decrypt, event.receiver as RTCRtpReceiverWithEncryption, call.worker, call.aesKey, call.key)
}
const hadAudio = call.remoteStream.getTracks().some((elem) => elem.kind == "audio" && elem.enabled)
const hadVideo = call.remoteStream.getTracks().some((elem) => elem.kind == "video" && elem.enabled)
for (const stream of event.streams) {
stream.onaddtrack = (event) => {
console.log("LALAL ADDED TRACK " + event.track.kind)
}
for (const track of stream.getTracks()) {
call.remoteStream.addTrack(track)
}
}
const hasAudio = call.remoteStream.getTracks().some((elem) => elem.kind == "audio" && elem.enabled)
const hasVideo = call.remoteStream.getTracks().some((elem) => elem.kind == "video" && elem.enabled)
console.log(`LALAL HAS AUDIO ${hasAudio} ${hasVideo} ${JSON.stringify(call.remoteStream.getTracks())}`)
if (hadAudio != hasAudio) {
const resp: WCEnableMedia = {
type: "media",
media: CallMediaType.Audio,
enable: hasAudio,
}
const apiResp: WVApiMessage = {corrId: undefined, resp, command: undefined}
sendMessageToNative(apiResp)
}
if (hadVideo != hasVideo) {
const resp: WCEnableMedia = {
type: "media",
media: CallMediaType.Video,
enable: hasVideo,
}
const apiResp: WVApiMessage = {corrId: undefined, resp, command: undefined}
sendMessageToNative(apiResp)
}
console.log(`ontrack success`)
} catch (e) {
console.log(`ontrack error: ${(e as Error).message}`)
@ -639,8 +677,6 @@ const processCommand = (function () {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
const pc = call.connection
const oldAudioTracks = call.localStream.getAudioTracks()
const audioWasEnabled = oldAudioTracks.some((elem) => elem.enabled)
let localStream: MediaStream
try {
localStream = call.screenShareEnabled ? await getLocalScreenCaptureStream() : await getLocalMediaStream(call.localMedia, camera)
@ -655,23 +691,36 @@ const processCommand = (function () {
const audioTracks = localStream.getAudioTracks()
const videoTracks = localStream.getVideoTracks()
if (!audioWasEnabled && oldAudioTracks.length > 0) {
audioTracks.forEach((elem) => (elem.enabled = false))
const audioWasEnabled = call.localStream.getAudioTracks().some((elem) => elem.enabled)
if (!audioWasEnabled && call.localStream.getAudioTracks().length > 0) {
enableMedia(localStream, CallMediaType.Audio, false)
}
if (!call.cameraEnabled && !call.screenShareEnabled) {
videoTracks.forEach((elem) => (elem.enabled = false))
enableMedia(localStream, CallMediaType.Video, false)
}
replaceTracks(pc, audioTracks)
replaceTracks(pc, videoTracks)
replaceTracks(pc, audioTracks, false)
replaceTracks(pc, videoTracks, call.screenShareEnabled)
call.localStream = localStream
videos.local.srcObject = localStream
}
function replaceTracks(pc: RTCPeerConnection, tracks: MediaStreamTrack[]) {
function replaceTracks(pc: RTCPeerConnection, tracks: MediaStreamTrack[], addIfNeeded: boolean) {
if (!tracks.length) return
const sender = pc.getSenders().find((s) => s.track?.kind === tracks[0].kind)
if (sender) for (const t of tracks) sender.replaceTrack(t)
else if (addIfNeeded) {
for (const track of tracks) pc.addTrack(track, activeCall!.localStream)
const call = activeCall!
if (call.aesKey && call.key) {
console.log("set up encryption for sending")
for (const sender of pc.getSenders() as RTCRtpSenderWithEncryption[]) {
if (sender.track?.kind == "video") {
setupPeerTransform(TransformOperation.Encrypt, sender, call.worker, call.aesKey, call.key)
}
}
}
}
}
function setupPeerTransform(

View File

@ -81,9 +81,9 @@ function reactOnMessageFromServer(msg: WVApiMessage) {
case "start":
document.getElementById("toggle-audio")!!.style.display = "inline-block"
document.getElementById("toggle-speaker")!!.style.display = "inline-block"
document.getElementById("toggle-screen")!!.style.display = "inline-block"
if (msg.command.media == CallMediaType.Video) {
document.getElementById("toggle-video")!!.style.display = "inline-block"
document.getElementById("toggle-screen")!!.style.display = "inline-block"
}
document.getElementById("info-block")!!.className = msg.command.media
break