android, desktop: support calls on desktop and moved www dir to different root (#3219)

* android, desktop: support calls on desktop and moved www dir to different root

* add page title, fix links on Android, change timeouts

* using worker in desktop Chrome and Safari

* ui changes

* end call button in app bar

* fix android

* a lot of enhancements

* fix after merge master

* layout

* sound play on call

---------

Co-authored-by: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
This commit is contained in:
Stanislav Dmitrenko 2023-10-23 01:47:27 +08:00 committed by GitHub
parent 1401f56288
commit 530ec70171
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
54 changed files with 1262 additions and 159 deletions

View File

@ -77,6 +77,7 @@ android {
}
jniLibs.useLegacyPackaging = rootProject.extra["compression.level"] as Int != 0
}
android.sourceSets["main"].assets.setSrcDirs(listOf("../common/src/commonMain/resources/assets"))
val isRelease = gradle.startParameter.taskNames.find { it.toLowerCase().contains("release") } != null
val isBundle = gradle.startParameter.taskNames.find { it.toLowerCase().contains("bundle") } != null
// if (isRelease) {

View File

@ -98,6 +98,8 @@ kotlin {
implementation("com.sshtools:two-slices:0.9.0-SNAPSHOT")
implementation("org.slf4j:slf4j-simple:2.0.7")
implementation("uk.co.caprica:vlcj:4.7.3")
implementation("com.github.NanoHttpd.nanohttpd:nanohttpd:efb2ebf85a")
implementation("com.github.NanoHttpd.nanohttpd:nanohttpd-websocket:efb2ebf85a")
}
}
val desktopTest by getting

View File

@ -18,6 +18,7 @@ import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.*
import androidx.compose.runtime.*
import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.runtime.snapshots.SnapshotStateList
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
@ -43,6 +44,9 @@ import chat.simplex.res.MR
import com.google.accompanist.permissions.rememberMultiplePermissionsState
import dev.icerock.moko.resources.StringResource
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.distinctUntilChanged
import kotlinx.coroutines.flow.filterNotNull
import kotlinx.datetime.Clock
import kotlinx.serialization.decodeFromString
import kotlinx.serialization.encodeToString
@ -52,7 +56,7 @@ actual fun ActiveCallView() {
val chatModel = ChatModel
BackHandler(onBack = {
val call = chatModel.activeCall.value
if (call != null) withApi { chatModel.callManager.endCall(call) }
if (call != null) withBGApi { chatModel.callManager.endCall(call) }
})
val audioViaBluetooth = rememberSaveable { mutableStateOf(false) }
val ntfModeService = remember { chatModel.controller.appPrefs.notificationsMode.get() == NotificationsMode.SERVICE }
@ -112,30 +116,30 @@ actual fun ActiveCallView() {
if (call != null) {
Log.d(TAG, "has active call $call")
when (val r = apiMsg.resp) {
is WCallResponse.Capabilities -> withApi {
is WCallResponse.Capabilities -> withBGApi {
val callType = CallType(call.localMedia, r.capabilities)
chatModel.controller.apiSendCallInvitation(call.contact, callType)
chatModel.activeCall.value = call.copy(callState = CallState.InvitationSent, localCapabilities = r.capabilities)
}
is WCallResponse.Offer -> withApi {
is WCallResponse.Offer -> withBGApi {
chatModel.controller.apiSendCallOffer(call.contact, r.offer, r.iceCandidates, call.localMedia, r.capabilities)
chatModel.activeCall.value = call.copy(callState = CallState.OfferSent, localCapabilities = r.capabilities)
}
is WCallResponse.Answer -> withApi {
is WCallResponse.Answer -> withBGApi {
chatModel.controller.apiSendCallAnswer(call.contact, r.answer, r.iceCandidates)
chatModel.activeCall.value = call.copy(callState = CallState.Negotiated)
}
is WCallResponse.Ice -> withApi {
is WCallResponse.Ice -> withBGApi {
chatModel.controller.apiSendCallExtraInfo(call.contact, r.iceCandidates)
}
is WCallResponse.Connection ->
try {
val callStatus = json.decodeFromString<WebRTCCallStatus>("\"${r.state.connectionState}\"")
if (callStatus == WebRTCCallStatus.Connected) {
chatModel.activeCall.value = call.copy(callState = CallState.Connected)
chatModel.activeCall.value = call.copy(callState = CallState.Connected, connectedAt = Clock.System.now())
setCallSound(call.soundSpeaker, audioViaBluetooth)
}
withApi { chatModel.controller.apiCallStatus(call.contact, callStatus) }
withBGApi { chatModel.controller.apiCallStatus(call.contact, callStatus) }
} catch (e: Error) {
Log.d(TAG,"call status ${r.state.connectionState} not used")
}
@ -145,9 +149,12 @@ actual fun ActiveCallView() {
setCallSound(call.soundSpeaker, audioViaBluetooth)
}
}
is WCallResponse.End -> {
withBGApi { chatModel.callManager.endCall(call) }
}
is WCallResponse.Ended -> {
chatModel.activeCall.value = call.copy(callState = CallState.Ended)
withApi { chatModel.callManager.endCall(call) }
withBGApi { chatModel.callManager.endCall(call) }
chatModel.showCallView.value = false
}
is WCallResponse.Ok -> when (val cmd = apiMsg.command) {
@ -162,7 +169,7 @@ actual fun ActiveCallView() {
is WCallCommand.Camera -> {
chatModel.activeCall.value = call.copy(localCamera = cmd.camera)
if (!call.audioEnabled) {
chatModel.callCommand.value = WCallCommand.Media(CallMediaType.Audio, enable = false)
chatModel.callCommand.add(WCallCommand.Media(CallMediaType.Audio, enable = false))
}
}
is WCallCommand.End ->
@ -187,11 +194,14 @@ actual fun ActiveCallView() {
// Lock orientation to portrait in order to have good experience with calls
activity.requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT
chatModel.activeCallViewIsVisible.value = true
// After the first call, End command gets added to the list which prevents making another calls
chatModel.callCommand.removeAll { it is WCallCommand.End }
onDispose {
activity.volumeControlStream = prevVolumeControlStream
// Unlock orientation
activity.requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED
chatModel.activeCallViewIsVisible.value = false
chatModel.callCommand.clear()
}
}
}
@ -201,9 +211,9 @@ private fun ActiveCallOverlay(call: Call, chatModel: ChatModel, audioViaBluetoot
ActiveCallOverlayLayout(
call = call,
speakerCanBeEnabled = !audioViaBluetooth.value,
dismiss = { withApi { chatModel.callManager.endCall(call) } },
toggleAudio = { chatModel.callCommand.value = WCallCommand.Media(CallMediaType.Audio, enable = !call.audioEnabled) },
toggleVideo = { chatModel.callCommand.value = WCallCommand.Media(CallMediaType.Video, enable = !call.videoEnabled) },
dismiss = { withBGApi { chatModel.callManager.endCall(call) } },
toggleAudio = { chatModel.callCommand.add(WCallCommand.Media(CallMediaType.Audio, enable = !call.audioEnabled)) },
toggleVideo = { chatModel.callCommand.add(WCallCommand.Media(CallMediaType.Video, enable = !call.videoEnabled)) },
toggleSound = {
var call = chatModel.activeCall.value
if (call != null) {
@ -212,7 +222,7 @@ private fun ActiveCallOverlay(call: Call, chatModel: ChatModel, audioViaBluetoot
setCallSound(call.soundSpeaker, audioViaBluetooth)
}
},
flipCamera = { chatModel.callCommand.value = WCallCommand.Camera(call.localCamera.flipped) }
flipCamera = { chatModel.callCommand.add(WCallCommand.Camera(call.localCamera.flipped)) }
)
}
@ -439,7 +449,7 @@ private fun DisabledBackgroundCallsButton() {
//}
@Composable
fun WebRTCView(callCommand: MutableState<WCallCommand?>, onResponse: (WVAPIMessage) -> Unit) {
fun WebRTCView(callCommand: SnapshotStateList<WCallCommand>, onResponse: (WVAPIMessage) -> Unit) {
val scope = rememberCoroutineScope()
val webView = remember { mutableStateOf<WebView?>(null) }
val permissionsState = rememberMultiplePermissionsState(
@ -470,13 +480,19 @@ fun WebRTCView(callCommand: MutableState<WCallCommand?>, onResponse: (WVAPIMessa
webView.value = null
}
}
LaunchedEffect(callCommand.value, webView.value) {
val cmd = callCommand.value
val wv = webView.value
if (cmd != null && wv != null) {
Log.d(TAG, "WebRTCView LaunchedEffect executing $cmd")
processCommand(wv, cmd)
callCommand.value = null
val wv = webView.value
if (wv != null) {
LaunchedEffect(Unit) {
snapshotFlow { callCommand.firstOrNull() }
.distinctUntilChanged()
.filterNotNull()
.collect {
while (callCommand.isNotEmpty()) {
val cmd = callCommand.removeFirst()
Log.d(TAG, "WebRTCView LaunchedEffect executing $cmd")
processCommand(wv, cmd)
}
}
}
}
val assetLoader = WebViewAssetLoader.Builder()
@ -502,7 +518,7 @@ fun WebRTCView(callCommand: MutableState<WCallCommand?>, onResponse: (WVAPIMessa
}
}
}
this.webViewClient = LocalContentWebViewClient(assetLoader)
this.webViewClient = LocalContentWebViewClient(webView, assetLoader)
this.clearHistory()
this.clearCache(true)
this.addJavascriptInterface(WebRTCInterface(onResponse), "WebRTCInterface")
@ -512,19 +528,10 @@ fun WebRTCView(callCommand: MutableState<WCallCommand?>, onResponse: (WVAPIMessa
webViewSettings.javaScriptEnabled = true
webViewSettings.mediaPlaybackRequiresUserGesture = false
webViewSettings.cacheMode = WebSettings.LOAD_NO_CACHE
this.loadUrl("file:android_asset/www/call.html")
this.loadUrl("file:android_asset/www/android/call.html")
}
}
) { wv ->
Log.d(TAG, "WebRTCView: webview ready")
// for debugging
// wv.evaluateJavascript("sendMessageToNative = ({resp}) => WebRTCInterface.postMessage(JSON.stringify({command: resp}))", null)
scope.launch {
delay(2000L)
wv.evaluateJavascript("sendMessageToNative = (msg) => WebRTCInterface.postMessage(JSON.stringify(msg))", null)
webView.value = wv
}
}
) { /* WebView */ }
}
}
}
@ -539,19 +546,28 @@ class WebRTCInterface(private val onResponse: (WVAPIMessage) -> Unit) {
// for debugging
// onResponse(message)
onResponse(json.decodeFromString(message))
} catch (e: Error) {
} catch (e: Exception) {
Log.e(TAG, "failed parsing WebView message: $message")
}
}
}
private class LocalContentWebViewClient(private val assetLoader: WebViewAssetLoader) : WebViewClientCompat() {
private class LocalContentWebViewClient(val webView: MutableState<WebView?>, private val assetLoader: WebViewAssetLoader) : WebViewClientCompat() {
override fun shouldInterceptRequest(
view: WebView,
request: WebResourceRequest
): WebResourceResponse? {
return assetLoader.shouldInterceptRequest(request.url)
}
override fun onPageFinished(view: WebView, url: String) {
super.onPageFinished(view, url)
view.evaluateJavascript("sendMessageToNative = (msg) => WebRTCInterface.postMessage(JSON.stringify(msg))", null)
webView.value = view
Log.d(TAG, "WebRTCView: webview ready")
// for debugging
// view.evaluateJavascript("sendMessageToNative = ({resp}) => WebRTCInterface.postMessage(JSON.stringify({command: resp}))", null)
}
}
@Preview

View File

@ -0,0 +1,8 @@
package chat.simplex.common.views.chatlist
import androidx.compose.runtime.*
import chat.simplex.common.views.helpers.*
import kotlinx.coroutines.flow.MutableStateFlow
@Composable
actual fun DesktopActiveCallOverlayLayout(newChatSheetState: MutableStateFlow<AnimatedViewState>) {}

View File

@ -88,7 +88,7 @@ object ChatModel {
val activeCallInvitation = mutableStateOf<RcvCallInvitation?>(null)
val activeCall = mutableStateOf<Call?>(null)
val activeCallViewIsVisible = mutableStateOf<Boolean>(false)
val callCommand = mutableStateOf<WCallCommand?>(null)
val callCommand = mutableStateListOf<WCallCommand>()
val showCallView = mutableStateOf(false)
val switchingCall = mutableStateOf(false)

View File

@ -1647,25 +1647,25 @@ object ChatController {
val useRelay = appPrefs.webrtcPolicyRelay.get()
val iceServers = getIceServers()
Log.d(TAG, ".callOffer iceServers $iceServers")
chatModel.callCommand.value = WCallCommand.Offer(
chatModel.callCommand.add(WCallCommand.Offer(
offer = r.offer.rtcSession,
iceCandidates = r.offer.rtcIceCandidates,
media = r.callType.media,
aesKey = r.sharedKey,
iceServers = iceServers,
relay = useRelay
)
))
}
}
is CR.CallAnswer -> {
withCall(r, r.contact) { call ->
chatModel.activeCall.value = call.copy(callState = CallState.AnswerReceived)
chatModel.callCommand.value = WCallCommand.Answer(answer = r.answer.rtcSession, iceCandidates = r.answer.rtcIceCandidates)
chatModel.callCommand.add(WCallCommand.Answer(answer = r.answer.rtcSession, iceCandidates = r.answer.rtcIceCandidates))
}
}
is CR.CallExtraInfo -> {
withCall(r, r.contact) { _ ->
chatModel.callCommand.value = WCallCommand.Ice(iceCandidates = r.extraInfo.rtcIceCandidates)
chatModel.callCommand.add(WCallCommand.Ice(iceCandidates = r.extraInfo.rtcIceCandidates))
}
}
is CR.CallEnded -> {
@ -1674,7 +1674,7 @@ object ChatController {
chatModel.callManager.reportCallRemoteEnded(invitation = invitation)
}
withCall(r, r.contact) { _ ->
chatModel.callCommand.value = WCallCommand.End
chatModel.callCommand.add(WCallCommand.End)
withApi {
chatModel.activeCall.value = null
chatModel.showCallView.value = false

View File

@ -3,8 +3,6 @@ package chat.simplex.common.views.call
import chat.simplex.common.model.ChatModel
import chat.simplex.common.platform.*
import chat.simplex.common.views.helpers.withApi
import chat.simplex.common.views.helpers.withBGApi
import chat.simplex.common.views.usersettings.showInDevelopingAlert
import kotlinx.datetime.Clock
import kotlin.time.Duration.Companion.minutes
@ -26,10 +24,6 @@ class CallManager(val chatModel: ChatModel) {
}
fun acceptIncomingCall(invitation: RcvCallInvitation) {
if (appPlatform.isDesktop) {
return showInDevelopingAlert()
}
val call = chatModel.activeCall.value
if (call == null) {
justAcceptIncomingCall(invitation = invitation)
@ -58,12 +52,12 @@ class CallManager(val chatModel: ChatModel) {
val useRelay = controller.appPrefs.webrtcPolicyRelay.get()
val iceServers = getIceServers()
Log.d(TAG, "answerIncomingCall iceServers: $iceServers")
callCommand.value = WCallCommand.Start(
callCommand.add(WCallCommand.Start(
media = invitation.callType.media,
aesKey = invitation.sharedKey,
iceServers = iceServers,
relay = useRelay
)
))
callInvitations.remove(invitation.contact.id)
if (invitation.contact.id == activeCallInvitation.value?.contact?.id) {
activeCallInvitation.value = null
@ -80,7 +74,7 @@ class CallManager(val chatModel: ChatModel) {
showCallView.value = false
} else {
Log.d(TAG, "CallManager.endCall: ending call...")
callCommand.value = WCallCommand.End
callCommand.add(WCallCommand.End)
showCallView.value = false
controller.apiEndCall(call.contact)
activeCall.value = null

View File

@ -1,7 +1,5 @@
package chat.simplex.common.views.call
import androidx.compose.runtime.Composable
import dev.icerock.moko.resources.compose.stringResource
import chat.simplex.common.views.helpers.generalGetString
import chat.simplex.common.model.*
import chat.simplex.res.MR
@ -23,16 +21,17 @@ data class Call(
val videoEnabled: Boolean = localMedia == CallMediaType.Video,
val soundSpeaker: Boolean = localMedia == CallMediaType.Video,
var localCamera: VideoCamera = VideoCamera.User,
val connectionInfo: ConnectionInfo? = null
val connectionInfo: ConnectionInfo? = null,
var connectedAt: Instant? = null
) {
val encrypted: Boolean get() = localEncrypted && sharedKey != null
val localEncrypted: Boolean get() = localCapabilities?.encryption ?: false
val encryptionStatus: String @Composable get() = when(callState) {
val encryptionStatus: String get() = when(callState) {
CallState.WaitCapabilities -> ""
CallState.InvitationSent -> stringResource(if (localEncrypted) MR.strings.status_e2e_encrypted else MR.strings.status_no_e2e_encryption)
CallState.InvitationAccepted -> stringResource(if (sharedKey == null) MR.strings.status_contact_has_no_e2e_encryption else MR.strings.status_contact_has_e2e_encryption)
else -> stringResource(if (!localEncrypted) MR.strings.status_no_e2e_encryption else if (sharedKey == null) MR.strings.status_contact_has_no_e2e_encryption else MR.strings.status_e2e_encrypted)
CallState.InvitationSent -> generalGetString(if (localEncrypted) MR.strings.status_e2e_encrypted else MR.strings.status_no_e2e_encryption)
CallState.InvitationAccepted -> generalGetString(if (sharedKey == null) MR.strings.status_contact_has_no_e2e_encryption else MR.strings.status_contact_has_e2e_encryption)
else -> generalGetString(if (!localEncrypted) MR.strings.status_no_e2e_encryption else if (sharedKey == null) MR.strings.status_contact_has_no_e2e_encryption else MR.strings.status_e2e_encrypted)
}
val hasMedia: Boolean get() = callState == CallState.OfferSent || callState == CallState.Negotiated || callState == CallState.Connected
@ -49,16 +48,16 @@ enum class CallState {
Connected,
Ended;
val text: String @Composable get() = when(this) {
WaitCapabilities -> stringResource(MR.strings.callstate_starting)
InvitationSent -> stringResource(MR.strings.callstate_waiting_for_answer)
InvitationAccepted -> stringResource(MR.strings.callstate_starting)
OfferSent -> stringResource(MR.strings.callstate_waiting_for_confirmation)
OfferReceived -> stringResource(MR.strings.callstate_received_answer)
AnswerReceived -> stringResource(MR.strings.callstate_received_confirmation)
Negotiated -> stringResource(MR.strings.callstate_connecting)
Connected -> stringResource(MR.strings.callstate_connected)
Ended -> stringResource(MR.strings.callstate_ended)
val text: String get() = when(this) {
WaitCapabilities -> generalGetString(MR.strings.callstate_starting)
InvitationSent -> generalGetString(MR.strings.callstate_waiting_for_answer)
InvitationAccepted -> generalGetString(MR.strings.callstate_starting)
OfferSent -> generalGetString(MR.strings.callstate_waiting_for_confirmation)
OfferReceived -> generalGetString(MR.strings.callstate_received_answer)
AnswerReceived -> generalGetString(MR.strings.callstate_received_confirmation)
Negotiated -> generalGetString(MR.strings.callstate_connecting)
Connected -> generalGetString(MR.strings.callstate_connected)
Ended -> generalGetString(MR.strings.callstate_ended)
}
}
@ -67,13 +66,14 @@ enum class CallState {
@Serializable
sealed class WCallCommand {
@Serializable @SerialName("capabilities") object Capabilities: WCallCommand()
@Serializable @SerialName("capabilities") data class Capabilities(val media: CallMediaType): WCallCommand()
@Serializable @SerialName("start") data class Start(val media: CallMediaType, val aesKey: String? = null, val iceServers: List<RTCIceServer>? = null, val relay: Boolean? = null): WCallCommand()
@Serializable @SerialName("offer") data class Offer(val offer: String, val iceCandidates: String, val media: CallMediaType, val aesKey: String? = null, val iceServers: List<RTCIceServer>? = null, val relay: Boolean? = null): WCallCommand()
@Serializable @SerialName("answer") data class Answer (val answer: String, val iceCandidates: String): WCallCommand()
@Serializable @SerialName("ice") data class Ice(val iceCandidates: String): WCallCommand()
@Serializable @SerialName("media") data class Media(val media: CallMediaType, val enable: Boolean): WCallCommand()
@Serializable @SerialName("camera") data class Camera(val camera: VideoCamera): WCallCommand()
@Serializable @SerialName("description") data class Description(val state: String, val description: String): WCallCommand()
@Serializable @SerialName("end") object End: WCallCommand()
}
@ -85,6 +85,7 @@ sealed class WCallResponse {
@Serializable @SerialName("ice") data class Ice(val iceCandidates: String): WCallResponse()
@Serializable @SerialName("connection") data class Connection(val state: ConnectionState): WCallResponse()
@Serializable @SerialName("connected") data class Connected(val connectionInfo: ConnectionInfo): WCallResponse()
@Serializable @SerialName("end") object End: WCallResponse()
@Serializable @SerialName("ended") object Ended: WCallResponse()
@Serializable @SerialName("ok") object Ok: WCallResponse()
@Serializable @SerialName("error") data class Error(val message: String): WCallResponse()
@ -106,14 +107,14 @@ sealed class WCallResponse {
}
@Serializable data class CallCapabilities(val encryption: Boolean)
@Serializable data class ConnectionInfo(private val localCandidate: RTCIceCandidate?, private val remoteCandidate: RTCIceCandidate?) {
val text: String @Composable get() {
val text: String get() {
val local = localCandidate?.candidateType
val remote = remoteCandidate?.candidateType
return when {
local == RTCIceCandidateType.Host && remote == RTCIceCandidateType.Host ->
stringResource(MR.strings.call_connection_peer_to_peer)
generalGetString(MR.strings.call_connection_peer_to_peer)
local == RTCIceCandidateType.Relay && remote == RTCIceCandidateType.Relay ->
stringResource(MR.strings.call_connection_via_relay)
generalGetString(MR.strings.call_connection_via_relay)
else ->
"${local?.value ?: "unknown"} / ${remote?.value ?: "unknown"}"
}

View File

@ -33,7 +33,6 @@ import chat.simplex.common.views.helpers.*
import chat.simplex.common.model.GroupInfo
import chat.simplex.common.platform.*
import chat.simplex.common.platform.AudioPlayer
import chat.simplex.common.views.usersettings.showInDevelopingAlert
import chat.simplex.res.MR
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.*
@ -274,23 +273,24 @@ fun ChatView(chatId: String, chatModel: ChatModel, onComposed: suspend (chatId:
withApi { chatModel.controller.apiJoinGroup(groupId) }
},
startCall = out@ { media ->
if (appPlatform.isDesktop) {
return@out showInDevelopingAlert()
}
withBGApi {
val cInfo = chat.chatInfo
if (cInfo is ChatInfo.Direct) {
chatModel.activeCall.value = Call(contact = cInfo.contact, callState = CallState.WaitCapabilities, localMedia = media)
chatModel.showCallView.value = true
chatModel.callCommand.value = WCallCommand.Capabilities
chatModel.callCommand.add(WCallCommand.Capabilities(media))
}
}
},
endCall = {
val call = chatModel.activeCall.value
if (call != null) withApi { chatModel.callManager.endCall(call) }
},
acceptCall = { contact ->
hideKeyboard(view)
val invitation = chatModel.callInvitations.remove(contact.id)
if (invitation == null) {
AlertManager.shared.showAlertMsg("Call already ended!")
AlertManager.shared.showAlertMsg(generalGetString(MR.strings.call_already_ended))
} else {
chatModel.callManager.acceptIncomingCall(invitation = invitation)
}
@ -433,6 +433,7 @@ fun ChatLayout(
cancelFile: (Long) -> Unit,
joinGroup: (Long) -> Unit,
startCall: (CallMediaType) -> Unit,
endCall: () -> Unit,
acceptCall: (Contact) -> Unit,
acceptFeature: (Contact, ChatFeature, Int?) -> Unit,
openDirectChat: (Long) -> Unit,
@ -491,7 +492,7 @@ fun ChatLayout(
}
Scaffold(
topBar = { ChatInfoToolbar(chat, back, info, startCall, addMembers, changeNtfsState, onSearchValueChanged) },
topBar = { ChatInfoToolbar(chat, back, info, startCall, endCall, addMembers, changeNtfsState, onSearchValueChanged) },
bottomBar = composeView,
modifier = Modifier.navigationBarsWithImePadding(),
floatingActionButton = { floatingButton.value() },
@ -520,6 +521,7 @@ fun ChatInfoToolbar(
back: () -> Unit,
info: () -> Unit,
startCall: (CallMediaType) -> Unit,
endCall: () -> Unit,
addMembers: (GroupInfo) -> Unit,
changeNtfsState: (Boolean, currentValue: MutableState<Boolean>) -> Unit,
onSearchValueChanged: (String) -> Unit,
@ -540,6 +542,7 @@ fun ChatInfoToolbar(
}
val barButtons = arrayListOf<@Composable RowScope.() -> Unit>()
val menuItems = arrayListOf<@Composable () -> Unit>()
val activeCall by remember { chatModel.activeCall }
menuItems.add {
ItemAction(stringResource(MR.strings.search_verb), painterResource(MR.images.ic_search), onClick = {
showMenu.value = false
@ -548,20 +551,52 @@ fun ChatInfoToolbar(
}
if (chat.chatInfo is ChatInfo.Direct && chat.chatInfo.contact.allowsFeature(ChatFeature.Calls)) {
barButtons.add {
IconButton({
showMenu.value = false
startCall(CallMediaType.Audio)
},
enabled = chat.chatInfo.contact.ready && chat.chatInfo.contact.active) {
Icon(
painterResource(MR.images.ic_call_500),
stringResource(MR.strings.icon_descr_more_button),
tint = if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active) MaterialTheme.colors.primary else MaterialTheme.colors.secondary
)
if (activeCall == null) {
barButtons.add {
IconButton(
{
showMenu.value = false
startCall(CallMediaType.Audio)
},
enabled = chat.chatInfo.contact.ready && chat.chatInfo.contact.active
) {
Icon(
painterResource(MR.images.ic_call_500),
stringResource(MR.strings.icon_descr_more_button),
tint = if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active) MaterialTheme.colors.primary else MaterialTheme.colors.secondary
)
}
}
} else if (activeCall?.contact?.id == chat.id) {
barButtons.add {
val call = remember { chatModel.activeCall }.value
val connectedAt = call?.connectedAt
if (connectedAt != null) {
val time = remember { mutableStateOf(durationText(0)) }
LaunchedEffect(Unit) {
while (true) {
time.value = durationText((Clock.System.now() - connectedAt).inWholeSeconds.toInt())
delay(250)
}
}
val sp50 = with(LocalDensity.current) { 50.sp.toDp() }
Text(time.value, Modifier.widthIn(min = sp50))
}
}
barButtons.add {
IconButton({
showMenu.value = false
endCall()
}) {
Icon(
painterResource(MR.images.ic_call_end_filled),
null,
tint = MaterialTheme.colors.error
)
}
}
}
if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active) {
if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active && activeCall == null) {
menuItems.add {
ItemAction(stringResource(MR.strings.icon_descr_video_call).capitalize(Locale.current), painterResource(MR.images.ic_videocam), onClick = {
showMenu.value = false
@ -1290,6 +1325,7 @@ fun PreviewChatLayout() {
cancelFile = {},
joinGroup = {},
startCall = {},
endCall = {},
acceptCall = { _ -> },
acceptFeature = { _, _, _ -> },
openDirectChat = { _ -> },
@ -1359,6 +1395,7 @@ fun PreviewGroupChatLayout() {
cancelFile = {},
joinGroup = {},
startCall = {},
endCall = {},
acceptCall = { _ -> },
acceptFeature = { _, _, _ -> },
openDirectChat = { _ -> },

View File

@ -3,7 +3,6 @@ package chat.simplex.common.views.chat.group
import InfoRow
import SectionBottomSpacer
import SectionDividerSpaced
import SectionItemView
import SectionSpacer
import SectionTextFooter
import SectionView
@ -35,7 +34,7 @@ import chat.simplex.common.views.newchat.*
import chat.simplex.common.views.usersettings.SettingsActionItem
import chat.simplex.common.model.GroupInfo
import chat.simplex.common.platform.*
import chat.simplex.common.views.chatlist.openChat
import chat.simplex.common.views.chatlist.openLoadedChat
import chat.simplex.res.MR
import kotlinx.datetime.Clock
@ -87,7 +86,7 @@ fun GroupMemberInfoView(
if (memberContact != null) {
val memberChat = Chat(ChatInfo.Direct(memberContact), chatItems = arrayListOf())
chatModel.addChat(memberChat)
openChat(memberChat, chatModel)
openLoadedChat(memberChat, chatModel)
closeAll()
chatModel.setContactNetworkStatus(memberContact, NetworkStatus.Connected())
}

View File

@ -68,7 +68,7 @@ fun AcceptCallButton(cInfo: ChatInfo, acceptCall: (Contact) -> Unit) {
// sharedKey: invitation.sharedKey
// )
// m.showCallView = true
// m.callCommand = .start(media: invitation.peerMedia, aesKey: invitation.sharedKey, useWorker: true)
// m.callCommand = .start(media: invitation.peerMedia, aesKey: invitation.sharedKey: true)
// } else {
// AlertManager.shared.showAlertMsg(title: "Call already ended!")
// }
@ -141,7 +141,7 @@ fun AcceptCallButton(cInfo: ChatInfo, acceptCall: (Contact) -> Unit) {
// sharedKey: invitation.sharedKey
// )
// m.showCallView = true
// m.callCommand = .start(media: invitation.peerMedia, aesKey: invitation.sharedKey, useWorker: true)
// m.callCommand = .start(media: invitation.peerMedia, aesKey: invitation.sharedKey: true)
// } else {
// AlertManager.shared.showAlertMsg(title: "Call already ended!")
// }

View File

@ -4,7 +4,6 @@ import SectionItemView
import androidx.compose.foundation.layout.*
import androidx.compose.material.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.platform.LocalDensity
@ -13,10 +12,6 @@ import dev.icerock.moko.resources.compose.stringResource
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.desktop.ui.tooling.preview.Preview
import androidx.compose.foundation.*
import androidx.compose.foundation.interaction.InteractionSource
import androidx.compose.ui.graphics.drawscope.ContentDrawScope
import androidx.compose.ui.platform.LocalViewConfiguration
import androidx.compose.ui.text.AnnotatedString
import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.unit.dp
@ -126,14 +121,14 @@ fun groupChatAction(groupInfo: GroupInfo, chatModel: ChatModel) {
suspend fun openDirectChat(contactId: Long, chatModel: ChatModel) {
val chat = chatModel.controller.apiGetChat(ChatType.Direct, contactId)
if (chat != null) {
openChat(chat, chatModel)
openLoadedChat(chat, chatModel)
}
}
suspend fun openGroupChat(groupId: Long, chatModel: ChatModel) {
val chat = chatModel.controller.apiGetChat(ChatType.Group, groupId)
if (chat != null) {
openChat(chat, chatModel)
openLoadedChat(chat, chatModel)
}
}
@ -141,12 +136,12 @@ suspend fun openChat(chatInfo: ChatInfo, chatModel: ChatModel) {
Log.d(TAG, "TODOCHAT: openChat: opening ${chatInfo.id}, current chatId ${ChatModel.chatId.value}, size ${ChatModel.chatItems.size}")
val chat = chatModel.controller.apiGetChat(chatInfo.chatType, chatInfo.apiId)
if (chat != null) {
openChat(chat, chatModel)
openLoadedChat(chat, chatModel)
Log.d(TAG, "TODOCHAT: openChat: opened ${chatInfo.id}, current chatId ${ChatModel.chatId.value}, size ${ChatModel.chatItems.size}")
}
}
suspend fun openChat(chat: Chat, chatModel: ChatModel) {
fun openLoadedChat(chat: Chat, chatModel: ChatModel) {
chatModel.chatItems.clear()
chatModel.chatItemStatuses.clear()
chatModel.chatItems.addAll(chat.chatItems)

View File

@ -12,6 +12,7 @@ import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.runtime.snapshots.SnapshotStateList
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.*
import androidx.compose.ui.platform.LocalUriHandler
import androidx.compose.ui.text.AnnotatedString
@ -29,6 +30,9 @@ import chat.simplex.common.views.onboarding.shouldShowWhatsNew
import chat.simplex.common.views.usersettings.SettingsView
import chat.simplex.common.views.usersettings.simplexTeamUri
import chat.simplex.common.platform.*
import chat.simplex.common.views.call.Call
import chat.simplex.common.views.call.CallMediaType
import chat.simplex.common.views.chat.item.ItemAction
import chat.simplex.common.views.newchat.*
import chat.simplex.res.MR
import kotlinx.coroutines.*
@ -121,6 +125,7 @@ fun ChatListView(chatModel: ChatModel, settingsState: SettingsViewState, setPerf
}
}
if (searchInList.isEmpty()) {
DesktopActiveCallOverlayLayout(newChatSheetState)
NewChatSheet(chatModel, newChatSheetState, stopped, hideNewChatSheet)
}
if (appPlatform.isAndroid) {
@ -311,6 +316,9 @@ private fun ProgressIndicator() {
)
}
@Composable
expect fun DesktopActiveCallOverlayLayout(newChatSheetState: MutableStateFlow<AnimatedViewState>)
fun connectIfOpenedViaUri(uri: URI, chatModel: ChatModel) {
Log.d(TAG, "connectIfOpenedViaUri: opened via link")
if (chatModel.currentUser.value == null) {

View File

@ -50,11 +50,12 @@ class AlertManager {
fun showAlertDialogButtonsColumn(
title: String,
text: AnnotatedString? = null,
onDismissRequest: (() -> Unit)? = null,
buttons: @Composable () -> Unit,
) {
showAlert {
AlertDialog(
onDismissRequest = ::hideAlert,
onDismissRequest = { onDismissRequest?.invoke(); hideAlert() },
title = {
Text(
title,

View File

@ -81,6 +81,35 @@ fun ProfileImage(
}
}
/** [AccountCircleFilled] has its inner padding which leads to visible border if there is background underneath.
* This is workaround
* */
@Composable
fun ProfileImageForActiveCall(
size: Dp,
image: String? = null,
color: Color = MaterialTheme.colors.secondaryVariant,
) {
if (image == null) {
Box(Modifier.requiredSize(size).clip(CircleShape)) {
Icon(
AccountCircleFilled,
contentDescription = stringResource(MR.strings.icon_descr_profile_image_placeholder),
tint = color,
modifier = Modifier.requiredSize(size + 14.dp)
)
}
} else {
val imageBitmap = base64ToBitmap(image)
Image(
imageBitmap,
stringResource(MR.strings.image_descr_profile_image),
contentScale = ContentScale.Crop,
modifier = Modifier.size(size).clip(CircleShape)
)
}
}
@Preview
@Composable

View File

@ -3,7 +3,7 @@
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0, viewport-fit=cover" />
<link href="./style.css" rel="stylesheet" />
<script src="./lz-string.min.js"></script>
<script src="../lz-string.min.js"></script>
</head>
<body>
<video
@ -21,6 +21,6 @@
></video>
</body>
<footer>
<script src="./call.js"></script>
<script src="../call.js"></script>
</footer>
</html>

View File

@ -23,6 +23,9 @@ var TransformOperation;
})(TransformOperation || (TransformOperation = {}));
let activeCall;
let answerTimeout = 30000;
var useWorker = false;
var localizedState = "";
var localizedDescription = "";
const processCommand = (function () {
const defaultIceServers = [
{ urls: ["stun:stun.simplex.im:443"] },
@ -38,9 +41,9 @@ const processCommand = (function () {
iceTransportPolicy: relay ? "relay" : "all",
},
iceCandidates: {
delay: 3000,
extrasInterval: 2000,
extrasTimeout: 8000,
delay: 750,
extrasInterval: 1500,
extrasTimeout: 12000,
},
};
}
@ -81,6 +84,10 @@ const processCommand = (function () {
if (delay)
clearTimeout(delay);
resolved = true;
console.log("LALAL resolveIceCandidates", JSON.stringify(candidates));
//const ipv6Elem = candidates.find((item) => item.candidate.includes("raddr ::"))
//candidates = ipv6Elem != undefined ? candidates.filter((elem) => elem == ipv6Elem) : candidates
//console.log("LALAL resolveIceCandidates2", JSON.stringify(candidates))
const iceCandidates = serialize(candidates);
candidates = [];
resolve(iceCandidates);
@ -88,19 +95,20 @@ const processCommand = (function () {
function sendIceCandidates() {
if (candidates.length === 0)
return;
console.log("LALAL sendIceCandidates", JSON.stringify(candidates));
const iceCandidates = serialize(candidates);
candidates = [];
sendMessageToNative({ resp: { type: "ice", iceCandidates } });
}
});
}
async function initializeCall(config, mediaType, aesKey, useWorker) {
async function initializeCall(config, mediaType, aesKey) {
const pc = new RTCPeerConnection(config.peerConnectionConfig);
const remoteStream = new MediaStream();
const localCamera = VideoCamera.User;
const localStream = await getLocalMediaStream(mediaType, localCamera);
const iceCandidates = getIceCandidates(pc, config);
const call = { connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey, useWorker };
const call = { connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey };
await setupMediaStreams(call);
let connectionTimeout = setTimeout(connectionHandler, answerTimeout);
pc.addEventListener("connectionstatechange", connectionStateChange);
@ -178,17 +186,17 @@ const processCommand = (function () {
// This request for local media stream is made to prompt for camera/mic permissions on call start
if (command.media)
await getLocalMediaStream(command.media, VideoCamera.User);
const encryption = supportsInsertableStreams(command.useWorker);
const encryption = supportsInsertableStreams(useWorker);
resp = { type: "capabilities", capabilities: { encryption } };
break;
case "start": {
console.log("starting incoming call - create webrtc session");
if (activeCall)
endCall();
const { media, useWorker, iceServers, relay } = command;
const { media, iceServers, relay } = command;
const encryption = supportsInsertableStreams(useWorker);
const aesKey = encryption ? command.aesKey : undefined;
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey, useWorker);
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey);
const pc = activeCall.connection;
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
@ -202,7 +210,6 @@ const processCommand = (function () {
// iceServers,
// relay,
// aesKey,
// useWorker,
// }
resp = {
type: "offer",
@ -210,21 +217,23 @@ const processCommand = (function () {
iceCandidates: await activeCall.iceCandidates,
capabilities: { encryption },
};
console.log("LALALs", JSON.stringify(resp));
break;
}
case "offer":
if (activeCall) {
resp = { type: "error", message: "accept: call already started" };
}
else if (!supportsInsertableStreams(command.useWorker) && command.aesKey) {
else if (!supportsInsertableStreams(useWorker) && command.aesKey) {
resp = { type: "error", message: "accept: encryption is not supported" };
}
else {
const offer = parse(command.offer);
const remoteIceCandidates = parse(command.iceCandidates);
const { media, aesKey, useWorker, iceServers, relay } = command;
activeCall = await initializeCall(getCallConfig(!!aesKey, iceServers, relay), media, aesKey, useWorker);
const { media, aesKey, iceServers, relay } = command;
activeCall = await initializeCall(getCallConfig(!!aesKey, iceServers, relay), media, aesKey);
const pc = activeCall.connection;
console.log("LALALo", JSON.stringify(remoteIceCandidates));
await pc.setRemoteDescription(new RTCSessionDescription(offer));
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
@ -236,6 +245,7 @@ const processCommand = (function () {
iceCandidates: await activeCall.iceCandidates,
};
}
console.log("LALALo", JSON.stringify(resp));
break;
case "answer":
if (!pc) {
@ -250,6 +260,7 @@ const processCommand = (function () {
else {
const answer = parse(command.answer);
const remoteIceCandidates = parse(command.iceCandidates);
console.log("LALALa", JSON.stringify(remoteIceCandidates));
await pc.setRemoteDescription(new RTCSessionDescription(answer));
addIceCandidates(pc, remoteIceCandidates);
resp = { type: "ok" };
@ -286,6 +297,11 @@ const processCommand = (function () {
resp = { type: "ok" };
}
break;
case "description":
localizedState = command.state;
localizedDescription = command.description;
resp = { type: "ok" };
break;
case "end":
endCall();
resp = { type: "ok" };
@ -310,12 +326,14 @@ const processCommand = (function () {
catch (e) {
console.log(e);
}
shutdownCameraAndMic();
activeCall = undefined;
resetVideoElements();
}
function addIceCandidates(conn, iceCandidates) {
for (const c of iceCandidates) {
conn.addIceCandidate(new RTCIceCandidate(c));
console.log("LALAL addIceCandidates", JSON.stringify(c));
}
}
async function setupMediaStreams(call) {
@ -335,7 +353,7 @@ const processCommand = (function () {
if (call.aesKey) {
if (!call.key)
call.key = await callCrypto.decodeAesKey(call.aesKey);
if (call.useWorker && !call.worker) {
if (useWorker && !call.worker) {
const workerCode = `const callCrypto = (${callCryptoFunction.toString()})(); (${workerFunction.toString()})()`;
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], { type: "text/javascript" })));
call.worker.onerror = ({ error, filename, lineno, message }) => console.log(JSON.stringify({ error, filename, lineno, message }));
@ -479,6 +497,11 @@ const processCommand = (function () {
return (("createEncodedStreams" in RTCRtpSender.prototype && "createEncodedStreams" in RTCRtpReceiver.prototype) ||
(!!useWorker && "RTCRtpScriptTransform" in window));
}
function shutdownCameraAndMic() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localStream) {
activeCall.localStream.getTracks().forEach((track) => track.stop());
}
}
function resetVideoElements() {
const videos = getVideoElements();
if (!videos)
@ -507,6 +530,15 @@ const processCommand = (function () {
}
return processCommand;
})();
function toggleMedia(s, media) {
let res = false;
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
for (const t of tracks) {
t.enabled = !t.enabled;
res = t.enabled;
}
return res;
}
// Cryptography function - it is loaded both in the main window and in worker context (if the worker is used)
function callCryptoFunction() {
const initialPlainTextRequired = {

View File

@ -0,0 +1,50 @@
<!DOCTYPE html>
<html>
<head>
<title>SimpleX Chat WebRTC call</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, viewport-fit=cover" />
<link href="/desktop/style.css" rel="stylesheet" />
<script src="/lz-string.min.js"></script>
</head>
<body>
<video
id="remote-video-stream"
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<video
id="local-video-stream"
muted
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<div id="progress"></div>
<div id="info-block">
<p id="state"></p>
<p id="description"></p>
</div>
<div id="audio-call-icon">
<img src="/desktop/images/ic_phone_in_talk.svg" />
</div>
<p id="manage-call">
<button id="toggle-audio" style="display: none" onclick="javascript:toggleAudioManually()">
<img src="/desktop/images/ic_mic.svg" />
</button>
<button id="end-call" onclick="javascript:endCallManually()">
<img src="/desktop/images/ic_call_end_filled.svg" />
</button>
<button id="toggle-speaker" style="display: none" onclick="javascript:toggleSpeakerManually()">
<img src="/desktop/images/ic_volume_up.svg" />
</button>
<button id="toggle-video" style="display: none" onclick="javascript:toggleVideoManually()">
<img src="/desktop/images/ic_videocam_filled.svg" />
</button>
</p>
</body>
<footer>
<script src="/call.js"></script>
<script src="/desktop/ui.js"></script>
</footer>
</html>

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="red" d="M480 418q125 0 238.75 50.25T914 613.5q8 9.5 8.25 21t-8.25 20L821 748q-8 8-22.5 8.75t-23-5.75l-113-84.5q-6-4.5-8.75-10.25T651 643.5v-139q-42-16-85.5-22.5t-85-6.5q-42 0-85.5 6.5t-85.5 22.5v139q0 6.5-2.75 12.5T298 666.5L184.5 751q-11.5 8.5-23.5 7.5T139.5 748L46 654.5q-8.5-8.5-8.25-20t8.25-21q81.5-95 195.25-145.25T480 418Z"/></svg>

After

Width:  |  Height:  |  Size: 435 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M480 630.5q-41.75 0-69.875-30.167Q382 570.167 382 527V278q0-40.417 28.566-68.708Q439.132 181 479.941 181t69.434 28.292Q578 237.583 578 278v249q0 43.167-28.125 73.333Q521.75 630.5 480 630.5Zm0-224.5Zm-.175 526q-12.325 0-20.325-8.375t-8-20.625V795.865Q354 786 285.25 719T206 557.5q-1.5-12.593 7.295-21.547Q222.091 527 235.5 527q9.917 0 18.148 7.542 8.232 7.541 9.852 18.458 10.5 80.5 72.044 134 61.543 53.5 144.347 53.5 82.805 0 144.457-53.5Q686 633.5 696.5 553q1.853-11.167 10.121-18.583Q714.89 527 725.543 527q12.91 0 21.434 8.953Q755.5 544.907 754 557.5 743.5 652 674.75 719T509 795.865V903q0 12.25-8.425 20.625-8.426 8.375-20.75 8.375ZM480 573q18.075 0 29.288-13.5Q520.5 546 520.5 527V278.335q0-16.835-11.629-28.335-11.628-11.5-28.818-11.5t-28.872 11.356Q439.5 261.212 439.5 278v248.868q0 19.132 11.212 32.632Q461.925 573 480 573Z"/></svg>

After

Width:  |  Height:  |  Size: 949 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M681.5 693 640 651.5q16.5-20.5 26-45.75T679 553q1.814-11.167 10.182-18.583Q697.551 527 707.847 527q13.153 0 21.653 8.953 8.5 8.954 7 21.547-4.5 37-18.5 71.75T681.5 693ZM554 566l-51-50V279.038q0-17.463-11.489-29.001-11.49-11.537-29.213-11.537t-29.01 11.431Q422 261.362 422 279v155l-57.5-57.5V279q0-40.833 28.515-69.417Q421.529 181 462.265 181q40.735 0 69.485 28.583Q560.5 238.167 560.5 279v248.23q0 7.103-1.5 19.186-1.5 12.084-5 19.584Zm-94.5-94.5Zm350.5 505L58.5 225q-8-7.444-8-18.222Q50.5 196 58.25 188q7.75-8 18.006-8 10.255 0 18.244 8L847 940.5q8 7.989 8 17.994 0 10.006-8 17.756-8 8.25-18.961 8.25-10.961 0-18.039-8ZM433.5 903V795.865Q336 786 267.5 719t-79-161.5q-2-12.5 7.045-21.5 9.046-9 22.455-9 9.5 0 17.75 7.5T246 553q10.053 80.713 71.588 134.107Q379.124 740.5 462.289 740.5q37.711 0 73.071-12.588Q570.721 715.325 599.5 693l41.5 41.5q-31 26-69.014 41.568Q533.972 791.635 491 796v107q0 12.25-8.463 20.625T462.325 932q-12.325 0-20.575-8.375T433.5 903Z"/></svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="100" viewBox="0 -960 960 960" width="100"><path fill="white" d="M774.5-488.5q-5.5-119.5-89-203.25t-203-88.75V-838q71 2.5 133.5 30.5t109.75 75.25q47.25 47.25 75.5 110T832-488.5h-57.5Zm-168 0q-6-49.5-40.5-83.75t-83.5-39.25V-669q73 5 124.25 56T664-488.5h-57.5Zm184 363.5Q677-125 558-180.5T338-338Q236-439 180.5-557.75T125-790.692q0-18.808 12.714-31.558Q150.429-835 169.5-835H306q14 0 23.75 9.75t13.75 24.75l26.929 123.641Q372-663.5 369.5-652q-2.5 11.5-10.229 19.226L261-533q26 44 54.688 81.658Q344.375-413.683 379-380q36.5 38 77.25 69.323Q497-279.353 542-255l95.544-98q9.456-10.5 21.357-14.25T682.5-369l117.362 25.438Q815-340 825-327.801q10 12.198 10 27.301v131q0 19.071-12.714 31.786Q809.571-125 790.5-125ZM232-585.5l81-82-23.5-110H183q1.5 41.5 13 88.25t36 103.75Zm364 358q40 19 88.166 31t93.334 14v-107l-102-21.5-79.5 83.5Zm-364-358Zm364 358Z"/></svg>

After

Width:  |  Height:  |  Size: 898 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M143.5 891.5q-23.031 0-40.266-17.234Q86 857.031 86 834V318q0-23.031 17.234-40.266Q120.469 260.5 143.5 260.5h516.211q22.289 0 39.789 17.234Q717 294.969 717 318v215.5L849 401q8-7.5 16.75-3.75t8.75 13.063V741q0 10-8.75 13.75t-16.85-4.35L717 618.5V834q0 23.031-17.5 40.266Q682 891.5 659.711 891.5H143.5Z"/></svg>

After

Width:  |  Height:  |  Size: 416 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M849.5 750.5 717 618.5v114L659.5 675V318H302l-57.5-57.5h415q22.969 0 40.234 17.266Q717 295.031 717 318v215l132.5-132.5q6.5-6.5 15.75-3.167 9.25 3.334 9.25 12.667v331q0 9.625-9.25 13.062Q856 757.5 849.5 750.5Zm-26.5 250-758-758q-8-7.547-8-19.069 0-11.522 9-20.431 8.5-8.5 20-8.5t20.5 8.5l758 758q7.5 7.93 7.5 19.465t-8.5 20.035q-9 9-20.5 9t-20-9Zm-340-502Zm-319.5-238L221 318h-77.5v516h516v-77.5L716 813v21q0 22.969-17.266 40.234Q681.469 891.5 658.5 891.5h-515q-22.969 0-40.234-17.266Q86 856.969 86 834V318q0-22.969 17.266-40.234Q120.531 260.5 143.5 260.5h20Zm236 316.5Z"/></svg>

After

Width:  |  Height:  |  Size: 686 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M232.5 693q-12.5 0-20.5-8t-8-20.5v-177q0-12.5 8-20.5t20.5-8h129L509 311.5q13.5-13.5 31-6.5t17.5 26v489.5q0 19.5-17.5 26.5t-31-6.5L361.5 693h-129ZM615 742V409.5q55 17 88 63.25T736 576q0 58-33 103.25T615 742ZM500 408.5l-112.5 108h-126v119h126L500 744V408.5ZM379 576Z"/></svg>

After

Width:  |  Height:  |  Size: 381 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M600.5 904.5q-15.5 5.5-28.25-4T559.5 874q0-7.5 4.25-13.5t11.75-8q89-31.5 143.75-107T774 575q0-94.5-54.5-170.5T575.5 298q-7-2-11.5-8.5t-4.5-14.5q0-16 13.25-25.25t27.75-4.25Q704.5 283 768 373t63.5 202q0 112.5-63.5 202.5t-167.5 127ZM157 693q-12.5 0-20.5-8t-8-20.5v-177q0-12.5 8-20.5t20.5-8h129l147.5-147.5q13.5-13.5 31-6.25T482 331v489.5q0 19-17.5 26.25t-31-6.25L286 693H157Zm382.5 49V409.5q55 17 88 63.25t33 103.25q0 58-33 103.25t-88 62.75Zm-115-333.5L312 516.5H186v119h126L424.5 744V408.5Zm-93 167.5Z"/></svg>

After

Width:  |  Height:  |  Size: 616 B

View File

@ -0,0 +1,127 @@
html,
body {
padding: 0;
margin: 0;
background-color: black;
}
#remote-video-stream {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream {
position: absolute;
width: 20%;
max-width: 20%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
top: 0;
right: 0;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-panel {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-play-button {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-start-playback-button {
display: none !important;
-webkit-appearance: none !important;
}
#manage-call {
position: absolute;
width: fit-content;
top: 90%;
left: 50%;
transform: translate(-50%, 0);
display: grid;
grid-auto-flow: column;
grid-column-gap: 30px;
}
#manage-call button {
border: none;
cursor: pointer;
appearance: none;
background-color: inherit;
}
#progress {
position: absolute;
left: 50%;
top: 50%;
margin-left: -52px;
margin-top: -52px;
border-radius: 50%;
border-top: 5px solid white;
border-right: 5px solid white;
border-bottom: 5px solid white;
border-left: 5px solid black;
width: 100px;
height: 100px;
-webkit-animation: spin 2s linear infinite;
animation: spin 2s linear infinite;
}
@-webkit-keyframes spin {
0% {
-webkit-transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
}
}
@keyframes spin {
0% {
transform: rotate(0deg);
}
100% {
transform: rotate(360deg);
}
}
#info-block {
position: absolute;
color: white;
line-height: 10px;
opacity: 0.8;
width: 200px;
font-family: Arial, Helvetica, sans-serif;
}
#info-block.audio {
text-align: center;
left: 50%;
top: 50%;
margin-left: -100px;
margin-top: 100px;
}
#info-block.video {
left: 16px;
top: 2px;
}
#audio-call-icon {
position: absolute;
display: none;
left: 50%;
top: 50%;
margin-left: -50px;
margin-top: -44px;
width: 100px;
height: 100px;
}

View File

@ -0,0 +1,80 @@
"use strict";
// Override defaults to enable worker on Chrome and Safari
useWorker = window.safari !== undefined || navigator.userAgent.indexOf("Chrome") != -1;
// Create WebSocket connection.
const socket = new WebSocket(`ws://${location.host}`);
socket.addEventListener("open", (_event) => {
console.log("Opened socket");
sendMessageToNative = (msg) => {
console.log("Message to server: ", msg);
socket.send(JSON.stringify(msg));
};
});
socket.addEventListener("message", (event) => {
const parsed = JSON.parse(event.data);
reactOnMessageFromServer(parsed);
processCommand(parsed);
console.log("Message from server: ", event.data);
});
socket.addEventListener("close", (_event) => {
console.log("Closed socket");
sendMessageToNative = (_msg) => {
console.log("Tried to send message to native but the socket was closed already");
};
window.close();
});
function endCallManually() {
sendMessageToNative({ resp: { type: "end" } });
}
function toggleAudioManually() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMedia) {
document.getElementById("toggle-audio").innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Audio)
? '<img src="/desktop/images/ic_mic.svg" />'
: '<img src="/desktop/images/ic_mic_off.svg" />';
}
}
function toggleSpeakerManually() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.remoteStream) {
document.getElementById("toggle-speaker").innerHTML = toggleMedia(activeCall.remoteStream, CallMediaType.Audio)
? '<img src="/desktop/images/ic_volume_up.svg" />'
: '<img src="/desktop/images/ic_volume_down.svg" />';
}
}
function toggleVideoManually() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMedia) {
document.getElementById("toggle-video").innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Video)
? '<img src="/desktop/images/ic_videocam_filled.svg" />'
: '<img src="/desktop/images/ic_videocam_off.svg" />';
}
}
function reactOnMessageFromServer(msg) {
var _a;
switch ((_a = msg.command) === null || _a === void 0 ? void 0 : _a.type) {
case "capabilities":
document.getElementById("info-block").className = msg.command.media;
break;
case "offer":
case "start":
document.getElementById("toggle-audio").style.display = "inline-block";
document.getElementById("toggle-speaker").style.display = "inline-block";
if (msg.command.media == "video") {
document.getElementById("toggle-video").style.display = "inline-block";
}
document.getElementById("info-block").className = msg.command.media;
break;
case "description":
updateCallInfoView(msg.command.state, msg.command.description);
if ((activeCall === null || activeCall === void 0 ? void 0 : activeCall.connection.connectionState) == "connected") {
document.getElementById("progress").style.display = "none";
if (document.getElementById("info-block").className == CallMediaType.Audio) {
document.getElementById("audio-call-icon").style.display = "block";
}
}
break;
}
}
function updateCallInfoView(state, description) {
document.getElementById("state").innerText = state;
document.getElementById("description").innerText = description;
}
//# sourceMappingURL=ui.js.map

View File

@ -1,16 +1,15 @@
package chat.simplex.common.platform
import androidx.compose.runtime.MutableState
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.*
import chat.simplex.common.model.*
import chat.simplex.common.views.helpers.AlertManager
import chat.simplex.common.views.helpers.generalGetString
import chat.simplex.common.views.helpers.*
import chat.simplex.res.MR
import kotlinx.coroutines.*
import uk.co.caprica.vlcj.player.base.MediaPlayer
import uk.co.caprica.vlcj.player.base.State
import uk.co.caprica.vlcj.player.component.AudioPlayerComponent
import java.io.File
import java.util.*
import kotlin.math.max
actual class RecorderNative: RecorderInterface {
@ -38,7 +37,7 @@ actual object AudioPlayer: AudioPlayerInterface {
// Returns real duration of the track
private fun start(fileSource: CryptoFile, seek: Int? = null, onProgressUpdate: (position: Int?, state: TrackState) -> Unit): Int? {
val absoluteFilePath = getAppFilePath(fileSource.filePath)
val absoluteFilePath = if (fileSource.isAbsolutePath) fileSource.filePath else getAppFilePath(fileSource.filePath)
if (!File(absoluteFilePath).exists()) {
Log.e(TAG, "No such file: ${fileSource.filePath}")
return null
@ -208,6 +207,25 @@ val MediaPlayer.duration: Int
get() = media().info().duration().toInt()
actual object SoundPlayer: SoundPlayerInterface {
override fun start(scope: CoroutineScope, sound: Boolean) { /*LALAL*/ }
override fun stop() { /*LALAL*/ }
var playing = false
override fun start(scope: CoroutineScope, sound: Boolean) {
withBGApi {
val tmpFile = File(tmpDir, UUID.randomUUID().toString())
tmpFile.deleteOnExit()
SoundPlayer::class.java.getResource("/media/ring_once.mp3").openStream()!!.use { it.copyTo(tmpFile.outputStream()) }
playing = true
while (playing) {
if (sound) {
AudioPlayer.play(CryptoFile.plain(tmpFile.absolutePath), mutableStateOf(true), mutableStateOf(0), mutableStateOf(0), true)
}
delay(3500)
}
}
}
override fun stop() {
playing = false
AudioPlayer.stop()
}
}

View File

@ -1,8 +1,243 @@
package chat.simplex.common.views.call
import androidx.compose.runtime.Composable
import androidx.compose.foundation.*
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.*
import androidx.compose.runtime.*
import androidx.compose.runtime.snapshots.SnapshotStateList
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.platform.LocalUriHandler
import androidx.compose.ui.text.AnnotatedString
import androidx.compose.ui.unit.dp
import chat.simplex.common.model.*
import chat.simplex.common.platform.*
import chat.simplex.common.ui.theme.*
import chat.simplex.common.views.chat.item.ItemAction
import chat.simplex.common.views.helpers.*
import chat.simplex.res.MR
import dev.icerock.moko.resources.compose.painterResource
import dev.icerock.moko.resources.compose.stringResource
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.*
import kotlinx.datetime.Clock
import kotlinx.serialization.decodeFromString
import kotlinx.serialization.encodeToString
import org.nanohttpd.protocols.http.IHTTPSession
import org.nanohttpd.protocols.http.response.Response
import org.nanohttpd.protocols.http.response.Response.newFixedLengthResponse
import org.nanohttpd.protocols.http.response.Status
import org.nanohttpd.protocols.websockets.*
import java.io.IOException
import java.net.URI
private const val SERVER_HOST = "localhost"
private const val SERVER_PORT = 50395
val connections = ArrayList<WebSocket>()
@Composable
actual fun ActiveCallView() {
// LALAL
val endCall = {
val call = chatModel.activeCall.value
if (call != null) withBGApi { chatModel.callManager.endCall(call) }
}
BackHandler(onBack = endCall)
WebRTCController(chatModel.callCommand) { apiMsg ->
Log.d(TAG, "received from WebRTCController: $apiMsg")
val call = chatModel.activeCall.value
if (call != null) {
Log.d(TAG, "has active call $call")
when (val r = apiMsg.resp) {
is WCallResponse.Capabilities -> withBGApi {
val callType = CallType(call.localMedia, r.capabilities)
chatModel.controller.apiSendCallInvitation(call.contact, callType)
chatModel.activeCall.value = call.copy(callState = CallState.InvitationSent, localCapabilities = r.capabilities)
}
is WCallResponse.Offer -> withBGApi {
chatModel.controller.apiSendCallOffer(call.contact, r.offer, r.iceCandidates, call.localMedia, r.capabilities)
chatModel.activeCall.value = call.copy(callState = CallState.OfferSent, localCapabilities = r.capabilities)
}
is WCallResponse.Answer -> withBGApi {
chatModel.controller.apiSendCallAnswer(call.contact, r.answer, r.iceCandidates)
chatModel.activeCall.value = call.copy(callState = CallState.Negotiated)
}
is WCallResponse.Ice -> withBGApi {
chatModel.controller.apiSendCallExtraInfo(call.contact, r.iceCandidates)
}
is WCallResponse.Connection ->
try {
val callStatus = json.decodeFromString<WebRTCCallStatus>("\"${r.state.connectionState}\"")
if (callStatus == WebRTCCallStatus.Connected) {
chatModel.activeCall.value = call.copy(callState = CallState.Connected, connectedAt = Clock.System.now())
}
withBGApi { chatModel.controller.apiCallStatus(call.contact, callStatus) }
} catch (e: Error) {
Log.d(TAG, "call status ${r.state.connectionState} not used")
}
is WCallResponse.Connected -> {
chatModel.activeCall.value = call.copy(callState = CallState.Connected, connectionInfo = r.connectionInfo)
}
is WCallResponse.End -> {
withBGApi { chatModel.callManager.endCall(call) }
}
is WCallResponse.Ended -> {
chatModel.activeCall.value = call.copy(callState = CallState.Ended)
withBGApi { chatModel.callManager.endCall(call) }
chatModel.showCallView.value = false
}
is WCallResponse.Ok -> when (val cmd = apiMsg.command) {
is WCallCommand.Answer ->
chatModel.activeCall.value = call.copy(callState = CallState.Negotiated)
is WCallCommand.Media -> {
when (cmd.media) {
CallMediaType.Video -> chatModel.activeCall.value = call.copy(videoEnabled = cmd.enable)
CallMediaType.Audio -> chatModel.activeCall.value = call.copy(audioEnabled = cmd.enable)
}
}
is WCallCommand.Camera -> {
chatModel.activeCall.value = call.copy(localCamera = cmd.camera)
if (!call.audioEnabled) {
chatModel.callCommand.add(WCallCommand.Media(CallMediaType.Audio, enable = false))
}
}
is WCallCommand.End ->
chatModel.showCallView.value = false
else -> {}
}
is WCallResponse.Error -> {
Log.e(TAG, "ActiveCallView: command error ${r.message}")
}
}
}
}
SendStateUpdates()
DisposableEffect(Unit) {
chatModel.activeCallViewIsVisible.value = true
// After the first call, End command gets added to the list which prevents making another calls
chatModel.callCommand.removeAll { it is WCallCommand.End }
onDispose {
chatModel.activeCallViewIsVisible.value = false
chatModel.callCommand.clear()
}
}
}
@Composable
private fun SendStateUpdates() {
LaunchedEffect(Unit) {
snapshotFlow { chatModel.activeCall.value }
.distinctUntilChanged()
.filterNotNull()
.collect { call ->
val state = call.callState.text
val connInfo = call.connectionInfo
// val connInfoText = if (connInfo == null) "" else " (${connInfo.text}, ${connInfo.protocolText})"
val connInfoText = if (connInfo == null) "" else " (${connInfo.text})"
val description = call.encryptionStatus + connInfoText
chatModel.callCommand.add(WCallCommand.Description(state, description))
}
}
}
@Composable
fun WebRTCController(callCommand: SnapshotStateList<WCallCommand>, onResponse: (WVAPIMessage) -> Unit) {
val uriHandler = LocalUriHandler.current
val server = remember {
uriHandler.openUri("http://${SERVER_HOST}:$SERVER_PORT/simplex/call/")
startServer(onResponse)
}
fun processCommand(cmd: WCallCommand) {
val apiCall = WVAPICall(command = cmd)
for (connection in connections.toList()) {
try {
connection.send(json.encodeToString(apiCall))
break
} catch (e: Exception) {
Log.e(TAG, "Failed to send message to browser: ${e.stackTraceToString()}")
}
}
}
DisposableEffect(Unit) {
onDispose {
processCommand(WCallCommand.End)
server.stop()
connections.clear()
}
}
LaunchedEffect(Unit) {
snapshotFlow { callCommand.firstOrNull() }
.distinctUntilChanged()
.filterNotNull()
.collect {
while (connections.isEmpty()) {
delay(100)
}
while (callCommand.isNotEmpty()) {
val cmd = callCommand.removeFirst()
Log.d(TAG, "WebRTCController LaunchedEffect executing $cmd")
processCommand(cmd)
}
}
}
}
fun startServer(onResponse: (WVAPIMessage) -> Unit): NanoWSD {
val server = object: NanoWSD(SERVER_HOST, SERVER_PORT) {
override fun openWebSocket(session: IHTTPSession): WebSocket = MyWebSocket(onResponse, session)
@Suppress("NewApi")
fun resourcesToResponse(path: String): Response {
val uri = Class.forName("chat.simplex.common.AppKt").getResource("/assets/www$path") ?: return resourceNotFound
val response = newFixedLengthResponse(
Status.OK, getMimeTypeForFile(uri.file),
uri.openStream().readAllBytes()
)
response.setKeepAlive(true)
response.setUseGzip(true)
return response
}
val resourceNotFound = newFixedLengthResponse(Status.NOT_FOUND, "text/plain", "This page couldn't be found")
override fun handle(session: IHTTPSession): Response {
return when {
session.headers["upgrade"] == "websocket" -> super.handle(session)
session.uri.contains("/simplex/call/") -> resourcesToResponse("/desktop/call.html")
else -> resourcesToResponse(URI.create(session.uri).path)
}
}
}
server.start(60_000_000)
return server
}
class MyWebSocket(val onResponse: (WVAPIMessage) -> Unit, handshakeRequest: IHTTPSession) : WebSocket(handshakeRequest) {
override fun onOpen() {
connections.add(this)
}
override fun onClose(closeCode: CloseCode?, reason: String?, initiatedByRemote: Boolean) {
onResponse(WVAPIMessage(null, WCallResponse.End))
}
override fun onMessage(message: WebSocketFrame) {
Log.d(TAG, "MyWebSocket.onMessage")
try {
// for debugging
// onResponse(message.textPayload)
onResponse(json.decodeFromString(message.textPayload))
} catch (e: Exception) {
Log.e(TAG, "failed parsing browser message: $message")
}
}
override fun onPong(pong: WebSocketFrame?) = Unit
override fun onException(exception: IOException) {
Log.e(TAG, "WebSocket exception: ${exception.stackTraceToString()}")
}
}

View File

@ -13,7 +13,7 @@ import androidx.compose.ui.graphics.drawscope.ContentDrawScope
import androidx.compose.ui.unit.dp
import chat.simplex.common.views.helpers.*
private object NoIndication : Indication {
object NoIndication : Indication {
private object NoIndicationInstance : IndicationInstance {
override fun ContentDrawScope.drawIndication() {
drawContent()

View File

@ -0,0 +1,75 @@
package chat.simplex.common.views.chatlist
import androidx.compose.foundation.*
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.Icon
import androidx.compose.material.MaterialTheme
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.unit.dp
import chat.simplex.common.platform.*
import chat.simplex.common.ui.theme.*
import chat.simplex.common.views.call.CallMediaType
import chat.simplex.common.views.chat.item.ItemAction
import chat.simplex.common.views.helpers.*
import chat.simplex.res.MR
import dev.icerock.moko.resources.compose.painterResource
import dev.icerock.moko.resources.compose.stringResource
import kotlinx.coroutines.flow.MutableStateFlow
@Composable
actual fun DesktopActiveCallOverlayLayout(newChatSheetState: MutableStateFlow<AnimatedViewState>) {
val call = remember { chatModel.activeCall}.value
// if (call?.callState == CallState.Connected && !newChatSheetState.collectAsState().value.isVisible()) {
if (call != null && !newChatSheetState.collectAsState().value.isVisible()) {
val showMenu = remember { mutableStateOf(false) }
val media = call.peerMedia ?: call.localMedia
CompositionLocalProvider(
LocalIndication provides NoIndication
) {
Box(
Modifier
.fillMaxSize(),
contentAlignment = Alignment.BottomEnd
) {
Box(
Modifier
.padding(end = 71.dp, bottom = 92.dp)
.size(67.dp)
.combinedClickable(onClick = {
val chat = chatModel.getChat(call.contact.id)
if (chat != null) {
withApi {
openChat(chat.chatInfo, chatModel)
}
}
},
onLongClick = { showMenu.value = true })
.onRightClick { showMenu.value = true },
contentAlignment = Alignment.Center
) {
Box(Modifier.background(MaterialTheme.colors.background, CircleShape)) {
ProfileImageForActiveCall(size = 56.dp, image = call.contact.profile.image)
}
Box(Modifier.padding().background(SimplexGreen, CircleShape).padding(4.dp).align(Alignment.TopEnd)) {
if (media == CallMediaType.Video) {
Icon(painterResource(MR.images.ic_videocam_filled), stringResource(MR.strings.icon_descr_video_call), Modifier.size(18.dp), tint = Color.White)
} else {
Icon(painterResource(MR.images.ic_call_filled), stringResource(MR.strings.icon_descr_audio_call), Modifier.size(18.dp), tint = Color.White)
}
}
DefaultDropdownMenu(showMenu) {
ItemAction(stringResource(MR.strings.icon_descr_hang_up), painterResource(MR.images.ic_call_end_filled), color = MaterialTheme.colors.error, onClick = {
withBGApi { chatModel.callManager.endCall(call) }
showMenu.value = false
})
}
}
}
}
}
}

View File

@ -1,14 +1,24 @@
#!/bin/sh
# it can be tested in the browser from dist folder
cp ./src/call.html ./dist/call.html
cp ./src/style.css ./dist/style.css
mkdir -p dist/{android,desktop,desktop/images} 2>/dev/null
cp ./src/android/call.html ./dist/android/call.html
cp ./src/android/style.css ./dist/android/style.css
cp ./src/desktop/call.html ./dist/desktop/call.html
cp ./src/desktop/style.css ./dist/desktop/style.css
cp ./src/desktop/images/* ./dist/desktop/images/
cp ./node_modules/lz-string/libs/lz-string.min.js ./dist/lz-string.min.js
cp ./src/webcall.html ./dist/webcall.html
cp ./src/ui.js ./dist/ui.js
# copy to android app
cp ./src/call.html ../../apps/multiplatform/android/src/main/assets/www/call.html
cp ./src/style.css ../../apps/multiplatform/android/src/main/assets/www/style.css
cp ./dist/call.js ../../apps/multiplatform/android/src/main/assets/www/call.js
cp ./node_modules/lz-string/libs/lz-string.min.js ../../apps/multiplatform/android/src/main/assets/www/lz-string.min.js
# copy to android and desktop apps
mkdir -p ../../apps/multiplatform/common/src/commonMain/resources/assets/www/{android,desktop,desktop/images} 2>/dev/null
cp ./src/android/call.html ../../apps/multiplatform/common/src/commonMain/resources/assets/www/android/call.html
cp ./src/android/style.css ../../apps/multiplatform/common/src/commonMain/resources/assets/www/android/style.css
cp ./src/desktop/call.html ../../apps/multiplatform/common/src/commonMain/resources/assets/www/desktop/call.html
cp ./src/desktop/style.css ../../apps/multiplatform/common/src/commonMain/resources/assets/www/desktop/style.css
cp ./src/desktop/images/* ../../apps/multiplatform/common/src/commonMain/resources/assets/www/desktop/images/
cp ./dist/desktop/ui.js ../../apps/multiplatform/common/src/commonMain/resources/assets/www/desktop/ui.js
cp ./dist/call.js ../../apps/multiplatform/common/src/commonMain/resources/assets/www/call.js
cp ./node_modules/lz-string/libs/lz-string.min.js ../../apps/multiplatform/common/src/commonMain/resources/assets/www/lz-string.min.js

View File

@ -40,4 +40,4 @@
"dependencies": {
"lz-string": "^1.4.4"
}
}
}

View File

@ -0,0 +1,26 @@
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0, viewport-fit=cover" />
<link href="./style.css" rel="stylesheet" />
<script src="../lz-string.min.js"></script>
</head>
<body>
<video
id="remote-video-stream"
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<video
id="local-video-stream"
muted
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
</body>
<footer>
<script src="../call.js"></script>
</footer>
</html>

View File

@ -0,0 +1,41 @@
html,
body {
padding: 0;
margin: 0;
background-color: black;
}
#remote-video-stream {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream {
position: absolute;
width: 30%;
max-width: 30%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
top: 0;
right: 0;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-panel {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-play-button {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-start-playback-button {
display: none !important;
-webkit-appearance: none !important;
}

View File

@ -15,6 +15,7 @@ type WCallCommand =
| WCallIceCandidates
| WCEnableMedia
| WCToggleCamera
| WCDescription
| WCEndCall
type WCallResponse =
@ -24,14 +25,15 @@ type WCallResponse =
| WCallIceCandidates
| WRConnection
| WRCallConnected
| WRCallEnd
| WRCallEnded
| WROk
| WRError
| WCAcceptOffer
type WCallCommandTag = "capabilities" | "start" | "offer" | "answer" | "ice" | "media" | "camera" | "end"
type WCallCommandTag = "capabilities" | "start" | "offer" | "answer" | "ice" | "media" | "camera" | "description" | "end"
type WCallResponseTag = "capabilities" | "offer" | "answer" | "ice" | "connection" | "connected" | "ended" | "ok" | "error"
type WCallResponseTag = "capabilities" | "offer" | "answer" | "ice" | "connection" | "connected" | "end" | "ended" | "ok" | "error"
enum CallMediaType {
Audio = "audio",
@ -53,15 +55,13 @@ interface IWCallResponse {
interface WCCapabilities extends IWCallCommand {
type: "capabilities"
media?: CallMediaType
useWorker?: boolean
media: CallMediaType
}
interface WCStartCall extends IWCallCommand {
type: "start"
media: CallMediaType
aesKey?: string
useWorker?: boolean
iceServers?: RTCIceServer[]
relay?: boolean
}
@ -76,7 +76,6 @@ interface WCAcceptOffer extends IWCallCommand {
iceCandidates: string // JSON strings for RTCIceCandidateInit
media: CallMediaType
aesKey?: string
useWorker?: boolean
iceServers?: RTCIceServer[]
relay?: boolean
}
@ -110,6 +109,12 @@ interface WCToggleCamera extends IWCallCommand {
camera: VideoCamera
}
interface WCDescription extends IWCallCommand {
type: "description"
state: string
description: string
}
interface WRCapabilities extends IWCallResponse {
type: "capabilities"
capabilities: CallCapabilities
@ -134,6 +139,10 @@ interface WRCallConnected extends IWCallResponse {
connectionInfo: ConnectionInfo
}
interface WRCallEnd extends IWCallResponse {
type: "end"
}
interface WRCallEnded extends IWCallResponse {
type: "ended"
}
@ -185,13 +194,15 @@ interface Call {
localStream: MediaStream
remoteStream: MediaStream
aesKey?: string
useWorker?: boolean
worker?: Worker
key?: CryptoKey
}
let activeCall: Call | undefined
let answerTimeout = 30_000
var useWorker = false
var localizedState = ""
var localizedDescription = ""
const processCommand = (function () {
type RTCRtpSenderWithEncryption = RTCRtpSender & {
@ -232,9 +243,9 @@ const processCommand = (function () {
iceTransportPolicy: relay ? "relay" : "all",
},
iceCandidates: {
delay: 3000,
extrasInterval: 2000,
extrasTimeout: 8000,
delay: 750,
extrasInterval: 1500,
extrasTimeout: 12000,
},
}
}
@ -274,6 +285,7 @@ const processCommand = (function () {
function resolveIceCandidates() {
if (delay) clearTimeout(delay)
resolved = true
console.log("LALAL resolveIceCandidates", JSON.stringify(candidates))
const iceCandidates = serialize(candidates)
candidates = []
resolve(iceCandidates)
@ -281,6 +293,7 @@ const processCommand = (function () {
function sendIceCandidates() {
if (candidates.length === 0) return
console.log("LALAL sendIceCandidates", JSON.stringify(candidates))
const iceCandidates = serialize(candidates)
candidates = []
sendMessageToNative({resp: {type: "ice", iceCandidates}})
@ -288,13 +301,13 @@ const processCommand = (function () {
})
}
async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesKey?: string, useWorker?: boolean): Promise<Call> {
async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesKey?: string): Promise<Call> {
const pc = new RTCPeerConnection(config.peerConnectionConfig)
const remoteStream = new MediaStream()
const localCamera = VideoCamera.User
const localStream = await getLocalMediaStream(mediaType, localCamera)
const iceCandidates = getIceCandidates(pc, config)
const call = {connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey, useWorker}
const call = {connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey}
await setupMediaStreams(call)
let connectionTimeout: number | undefined = setTimeout(connectionHandler, answerTimeout)
pc.addEventListener("connectionstatechange", connectionStateChange)
@ -374,16 +387,16 @@ const processCommand = (function () {
if (activeCall) endCall()
// This request for local media stream is made to prompt for camera/mic permissions on call start
if (command.media) await getLocalMediaStream(command.media, VideoCamera.User)
const encryption = supportsInsertableStreams(command.useWorker)
const encryption = supportsInsertableStreams(useWorker)
resp = {type: "capabilities", capabilities: {encryption}}
break
case "start": {
console.log("starting incoming call - create webrtc session")
if (activeCall) endCall()
const {media, useWorker, iceServers, relay} = command
const {media, iceServers, relay} = command
const encryption = supportsInsertableStreams(useWorker)
const aesKey = encryption ? command.aesKey : undefined
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey, useWorker)
activeCall = await initializeCall(getCallConfig(encryption && !!aesKey, iceServers, relay), media, aesKey)
const pc = activeCall.connection
const offer = await pc.createOffer()
await pc.setLocalDescription(offer)
@ -397,7 +410,6 @@ const processCommand = (function () {
// iceServers,
// relay,
// aesKey,
// useWorker,
// }
resp = {
type: "offer",
@ -405,19 +417,21 @@ const processCommand = (function () {
iceCandidates: await activeCall.iceCandidates,
capabilities: {encryption},
}
console.log("LALALs", JSON.stringify(resp))
break
}
case "offer":
if (activeCall) {
resp = {type: "error", message: "accept: call already started"}
} else if (!supportsInsertableStreams(command.useWorker) && command.aesKey) {
} else if (!supportsInsertableStreams(useWorker) && command.aesKey) {
resp = {type: "error", message: "accept: encryption is not supported"}
} else {
const offer: RTCSessionDescriptionInit = parse(command.offer)
const remoteIceCandidates: RTCIceCandidateInit[] = parse(command.iceCandidates)
const {media, aesKey, useWorker, iceServers, relay} = command
activeCall = await initializeCall(getCallConfig(!!aesKey, iceServers, relay), media, aesKey, useWorker)
const {media, aesKey, iceServers, relay} = command
activeCall = await initializeCall(getCallConfig(!!aesKey, iceServers, relay), media, aesKey)
const pc = activeCall.connection
console.log("LALALo", JSON.stringify(remoteIceCandidates))
await pc.setRemoteDescription(new RTCSessionDescription(offer))
const answer = await pc.createAnswer()
await pc.setLocalDescription(answer)
@ -429,6 +443,7 @@ const processCommand = (function () {
iceCandidates: await activeCall.iceCandidates,
}
}
console.log("LALALo", JSON.stringify(resp))
break
case "answer":
if (!pc) {
@ -440,6 +455,7 @@ const processCommand = (function () {
} else {
const answer: RTCSessionDescriptionInit = parse(command.answer)
const remoteIceCandidates: RTCIceCandidateInit[] = parse(command.iceCandidates)
console.log("LALALa", JSON.stringify(remoteIceCandidates))
await pc.setRemoteDescription(new RTCSessionDescription(answer))
addIceCandidates(pc, remoteIceCandidates)
resp = {type: "ok"}
@ -472,6 +488,11 @@ const processCommand = (function () {
resp = {type: "ok"}
}
break
case "description":
localizedState = command.state
localizedDescription = command.description
resp = {type: "ok"}
break
case "end":
endCall()
resp = {type: "ok"}
@ -494,6 +515,7 @@ const processCommand = (function () {
} catch (e) {
console.log(e)
}
shutdownCameraAndMic()
activeCall = undefined
resetVideoElements()
}
@ -501,6 +523,7 @@ const processCommand = (function () {
function addIceCandidates(conn: RTCPeerConnection, iceCandidates: RTCIceCandidateInit[]) {
for (const c of iceCandidates) {
conn.addIceCandidate(new RTCIceCandidate(c))
console.log("LALAL addIceCandidates", JSON.stringify(c))
}
}
@ -520,7 +543,7 @@ const processCommand = (function () {
async function setupEncryptionWorker(call: Call) {
if (call.aesKey) {
if (!call.key) call.key = await callCrypto.decodeAesKey(call.aesKey)
if (call.useWorker && !call.worker) {
if (useWorker && !call.worker) {
const workerCode = `const callCrypto = (${callCryptoFunction.toString()})(); (${workerFunction.toString()})()`
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], {type: "text/javascript"})))
call.worker.onerror = ({error, filename, lineno, message}: ErrorEvent) =>
@ -680,6 +703,12 @@ const processCommand = (function () {
remote: HTMLMediaElement
}
function shutdownCameraAndMic() {
if (activeCall?.localStream) {
activeCall.localStream.getTracks().forEach((track) => track.stop())
}
}
function resetVideoElements() {
const videos = getVideoElements()
if (!videos) return
@ -706,10 +735,19 @@ const processCommand = (function () {
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
for (const t of tracks) t.enabled = enable
}
return processCommand
})()
function toggleMedia(s: MediaStream, media: CallMediaType): boolean {
let res = false
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
for (const t of tracks) {
t.enabled = !t.enabled
res = t.enabled
}
return res
}
type TransformFrameFunc = (key: CryptoKey) => (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => Promise<void>
interface CallCrypto {

View File

@ -0,0 +1,50 @@
<!DOCTYPE html>
<html>
<head>
<title>SimpleX Chat WebRTC call</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, viewport-fit=cover" />
<link href="/desktop/style.css" rel="stylesheet" />
<script src="/lz-string.min.js"></script>
</head>
<body>
<video
id="remote-video-stream"
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<video
id="local-video-stream"
muted
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
></video>
<div id="progress"></div>
<div id="info-block">
<p id="state"></p>
<p id="description"></p>
</div>
<div id="audio-call-icon">
<img src="/desktop/images/ic_phone_in_talk.svg" />
</div>
<p id="manage-call">
<button id="toggle-audio" style="display: none" onclick="javascript:toggleAudioManually()">
<img src="/desktop/images/ic_mic.svg" />
</button>
<button id="end-call" onclick="javascript:endCallManually()">
<img src="/desktop/images/ic_call_end_filled.svg" />
</button>
<button id="toggle-speaker" style="display: none" onclick="javascript:toggleSpeakerManually()">
<img src="/desktop/images/ic_volume_up.svg" />
</button>
<button id="toggle-video" style="display: none" onclick="javascript:toggleVideoManually()">
<img src="/desktop/images/ic_videocam_filled.svg" />
</button>
</p>
</body>
<footer>
<script src="/call.js"></script>
<script src="/desktop/ui.js"></script>
</footer>
</html>

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="red" d="M480 418q125 0 238.75 50.25T914 613.5q8 9.5 8.25 21t-8.25 20L821 748q-8 8-22.5 8.75t-23-5.75l-113-84.5q-6-4.5-8.75-10.25T651 643.5v-139q-42-16-85.5-22.5t-85-6.5q-42 0-85.5 6.5t-85.5 22.5v139q0 6.5-2.75 12.5T298 666.5L184.5 751q-11.5 8.5-23.5 7.5T139.5 748L46 654.5q-8.5-8.5-8.25-20t8.25-21q81.5-95 195.25-145.25T480 418Z"/></svg>

After

Width:  |  Height:  |  Size: 435 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M480 630.5q-41.75 0-69.875-30.167Q382 570.167 382 527V278q0-40.417 28.566-68.708Q439.132 181 479.941 181t69.434 28.292Q578 237.583 578 278v249q0 43.167-28.125 73.333Q521.75 630.5 480 630.5Zm0-224.5Zm-.175 526q-12.325 0-20.325-8.375t-8-20.625V795.865Q354 786 285.25 719T206 557.5q-1.5-12.593 7.295-21.547Q222.091 527 235.5 527q9.917 0 18.148 7.542 8.232 7.541 9.852 18.458 10.5 80.5 72.044 134 61.543 53.5 144.347 53.5 82.805 0 144.457-53.5Q686 633.5 696.5 553q1.853-11.167 10.121-18.583Q714.89 527 725.543 527q12.91 0 21.434 8.953Q755.5 544.907 754 557.5 743.5 652 674.75 719T509 795.865V903q0 12.25-8.425 20.625-8.426 8.375-20.75 8.375ZM480 573q18.075 0 29.288-13.5Q520.5 546 520.5 527V278.335q0-16.835-11.629-28.335-11.628-11.5-28.818-11.5t-28.872 11.356Q439.5 261.212 439.5 278v248.868q0 19.132 11.212 32.632Q461.925 573 480 573Z"/></svg>

After

Width:  |  Height:  |  Size: 949 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M681.5 693 640 651.5q16.5-20.5 26-45.75T679 553q1.814-11.167 10.182-18.583Q697.551 527 707.847 527q13.153 0 21.653 8.953 8.5 8.954 7 21.547-4.5 37-18.5 71.75T681.5 693ZM554 566l-51-50V279.038q0-17.463-11.489-29.001-11.49-11.537-29.213-11.537t-29.01 11.431Q422 261.362 422 279v155l-57.5-57.5V279q0-40.833 28.515-69.417Q421.529 181 462.265 181q40.735 0 69.485 28.583Q560.5 238.167 560.5 279v248.23q0 7.103-1.5 19.186-1.5 12.084-5 19.584Zm-94.5-94.5Zm350.5 505L58.5 225q-8-7.444-8-18.222Q50.5 196 58.25 188q7.75-8 18.006-8 10.255 0 18.244 8L847 940.5q8 7.989 8 17.994 0 10.006-8 17.756-8 8.25-18.961 8.25-10.961 0-18.039-8ZM433.5 903V795.865Q336 786 267.5 719t-79-161.5q-2-12.5 7.045-21.5 9.046-9 22.455-9 9.5 0 17.75 7.5T246 553q10.053 80.713 71.588 134.107Q379.124 740.5 462.289 740.5q37.711 0 73.071-12.588Q570.721 715.325 599.5 693l41.5 41.5q-31 26-69.014 41.568Q533.972 791.635 491 796v107q0 12.25-8.463 20.625T462.325 932q-12.325 0-20.575-8.375T433.5 903Z"/></svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="100" viewBox="0 -960 960 960" width="100"><path fill="white" d="M774.5-488.5q-5.5-119.5-89-203.25t-203-88.75V-838q71 2.5 133.5 30.5t109.75 75.25q47.25 47.25 75.5 110T832-488.5h-57.5Zm-168 0q-6-49.5-40.5-83.75t-83.5-39.25V-669q73 5 124.25 56T664-488.5h-57.5Zm184 363.5Q677-125 558-180.5T338-338Q236-439 180.5-557.75T125-790.692q0-18.808 12.714-31.558Q150.429-835 169.5-835H306q14 0 23.75 9.75t13.75 24.75l26.929 123.641Q372-663.5 369.5-652q-2.5 11.5-10.229 19.226L261-533q26 44 54.688 81.658Q344.375-413.683 379-380q36.5 38 77.25 69.323Q497-279.353 542-255l95.544-98q9.456-10.5 21.357-14.25T682.5-369l117.362 25.438Q815-340 825-327.801q10 12.198 10 27.301v131q0 19.071-12.714 31.786Q809.571-125 790.5-125ZM232-585.5l81-82-23.5-110H183q1.5 41.5 13 88.25t36 103.75Zm364 358q40 19 88.166 31t93.334 14v-107l-102-21.5-79.5 83.5Zm-364-358Zm364 358Z"/></svg>

After

Width:  |  Height:  |  Size: 898 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M143.5 891.5q-23.031 0-40.266-17.234Q86 857.031 86 834V318q0-23.031 17.234-40.266Q120.469 260.5 143.5 260.5h516.211q22.289 0 39.789 17.234Q717 294.969 717 318v215.5L849 401q8-7.5 16.75-3.75t8.75 13.063V741q0 10-8.75 13.75t-16.85-4.35L717 618.5V834q0 23.031-17.5 40.266Q682 891.5 659.711 891.5H143.5Z"/></svg>

After

Width:  |  Height:  |  Size: 416 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M849.5 750.5 717 618.5v114L659.5 675V318H302l-57.5-57.5h415q22.969 0 40.234 17.266Q717 295.031 717 318v215l132.5-132.5q6.5-6.5 15.75-3.167 9.25 3.334 9.25 12.667v331q0 9.625-9.25 13.062Q856 757.5 849.5 750.5Zm-26.5 250-758-758q-8-7.547-8-19.069 0-11.522 9-20.431 8.5-8.5 20-8.5t20.5 8.5l758 758q7.5 7.93 7.5 19.465t-8.5 20.035q-9 9-20.5 9t-20-9Zm-340-502Zm-319.5-238L221 318h-77.5v516h516v-77.5L716 813v21q0 22.969-17.266 40.234Q681.469 891.5 658.5 891.5h-515q-22.969 0-40.234-17.266Q86 856.969 86 834V318q0-22.969 17.266-40.234Q120.531 260.5 143.5 260.5h20Zm236 316.5Z"/></svg>

After

Width:  |  Height:  |  Size: 686 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M232.5 693q-12.5 0-20.5-8t-8-20.5v-177q0-12.5 8-20.5t20.5-8h129L509 311.5q13.5-13.5 31-6.5t17.5 26v489.5q0 19.5-17.5 26.5t-31-6.5L361.5 693h-129ZM615 742V409.5q55 17 88 63.25T736 576q0 58-33 103.25T615 742ZM500 408.5l-112.5 108h-126v119h126L500 744V408.5ZM379 576Z"/></svg>

After

Width:  |  Height:  |  Size: 381 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 96 960 960" width="44"><path fill="white" d="M600.5 904.5q-15.5 5.5-28.25-4T559.5 874q0-7.5 4.25-13.5t11.75-8q89-31.5 143.75-107T774 575q0-94.5-54.5-170.5T575.5 298q-7-2-11.5-8.5t-4.5-14.5q0-16 13.25-25.25t27.75-4.25Q704.5 283 768 373t63.5 202q0 112.5-63.5 202.5t-167.5 127ZM157 693q-12.5 0-20.5-8t-8-20.5v-177q0-12.5 8-20.5t20.5-8h129l147.5-147.5q13.5-13.5 31-6.25T482 331v489.5q0 19-17.5 26.25t-31-6.25L286 693H157Zm382.5 49V409.5q55 17 88 63.25t33 103.25q0 58-33 103.25t-88 62.75Zm-115-333.5L312 516.5H186v119h126L424.5 744V408.5Zm-93 167.5Z"/></svg>

After

Width:  |  Height:  |  Size: 616 B

View File

@ -0,0 +1,127 @@
html,
body {
padding: 0;
margin: 0;
background-color: black;
}
#remote-video-stream {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream {
position: absolute;
width: 20%;
max-width: 20%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
top: 0;
right: 0;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-panel {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-play-button {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-start-playback-button {
display: none !important;
-webkit-appearance: none !important;
}
#manage-call {
position: absolute;
width: fit-content;
top: 90%;
left: 50%;
transform: translate(-50%, 0);
display: grid;
grid-auto-flow: column;
grid-column-gap: 30px;
}
#manage-call button {
border: none;
cursor: pointer;
appearance: none;
background-color: inherit;
}
#progress {
position: absolute;
left: 50%;
top: 50%;
margin-left: -52px;
margin-top: -52px;
border-radius: 50%;
border-top: 5px solid white;
border-right: 5px solid white;
border-bottom: 5px solid white;
border-left: 5px solid black;
width: 100px;
height: 100px;
-webkit-animation: spin 2s linear infinite;
animation: spin 2s linear infinite;
}
@-webkit-keyframes spin {
0% {
-webkit-transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
}
}
@keyframes spin {
0% {
transform: rotate(0deg);
}
100% {
transform: rotate(360deg);
}
}
#info-block {
position: absolute;
color: white;
line-height: 10px;
opacity: 0.8;
width: 200px;
font-family: Arial, Helvetica, sans-serif;
}
#info-block.audio {
text-align: center;
left: 50%;
top: 50%;
margin-left: -100px;
margin-top: 100px;
}
#info-block.video {
left: 16px;
top: 2px;
}
#audio-call-icon {
position: absolute;
display: none;
left: 50%;
top: 50%;
margin-left: -50px;
margin-top: -44px;
width: 100px;
height: 100px;
}

View File

@ -0,0 +1,87 @@
// Override defaults to enable worker on Chrome and Safari
useWorker = (window as any).safari !== undefined || navigator.userAgent.indexOf("Chrome") != -1
// Create WebSocket connection.
const socket = new WebSocket(`ws://${location.host}`)
socket.addEventListener("open", (_event) => {
console.log("Opened socket")
sendMessageToNative = (msg: WVApiMessage) => {
console.log("Message to server: ", msg)
socket.send(JSON.stringify(msg))
}
})
socket.addEventListener("message", (event) => {
const parsed = JSON.parse(event.data)
reactOnMessageFromServer(parsed)
processCommand(parsed)
console.log("Message from server: ", event.data)
})
socket.addEventListener("close", (_event) => {
console.log("Closed socket")
sendMessageToNative = (_msg: WVApiMessage) => {
console.log("Tried to send message to native but the socket was closed already")
}
window.close()
})
function endCallManually() {
sendMessageToNative({resp: {type: "end"}})
}
function toggleAudioManually() {
if (activeCall?.localMedia) {
document.getElementById("toggle-audio")!!.innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Audio)
? '<img src="/desktop/images/ic_mic.svg" />'
: '<img src="/desktop/images/ic_mic_off.svg" />'
}
}
function toggleSpeakerManually() {
if (activeCall?.remoteStream) {
document.getElementById("toggle-speaker")!!.innerHTML = toggleMedia(activeCall.remoteStream, CallMediaType.Audio)
? '<img src="/desktop/images/ic_volume_up.svg" />'
: '<img src="/desktop/images/ic_volume_down.svg" />'
}
}
function toggleVideoManually() {
if (activeCall?.localMedia) {
document.getElementById("toggle-video")!!.innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Video)
? '<img src="/desktop/images/ic_videocam_filled.svg" />'
: '<img src="/desktop/images/ic_videocam_off.svg" />'
}
}
function reactOnMessageFromServer(msg: WVApiMessage) {
switch (msg.command?.type) {
case "capabilities":
document.getElementById("info-block")!!.className = msg.command.media
break
case "offer":
case "start":
document.getElementById("toggle-audio")!!.style.display = "inline-block"
document.getElementById("toggle-speaker")!!.style.display = "inline-block"
if (msg.command.media == "video") {
document.getElementById("toggle-video")!!.style.display = "inline-block"
}
document.getElementById("info-block")!!.className = msg.command.media
break
case "description":
updateCallInfoView(msg.command.state, msg.command.description)
if (activeCall?.connection.connectionState == "connected") {
document.getElementById("progress")!.style.display = "none"
if (document.getElementById("info-block")!!.className == CallMediaType.Audio) {
document.getElementById("audio-call-icon")!.style.display = "block"
}
}
break
}
}
function updateCallInfoView(state: string, description: string) {
document.getElementById("state")!!.innerText = state
document.getElementById("description")!!.innerText = description
}