id/video calls prototype (#570)
* initial UI framework * limited javascrtipt interaction * run some js * try to resolve permissions issues * some initial RTC javascript * approaching a workable js file * js fixes * tidy up js * add some ui to web call * fixes * ready to test * typo * refactor for readability * tidy up before adding encryption * add transform to video streams * tidy a little, audio encoding works, video fails * minor changes * use variables consistently * e2ee video calls git push * include IV in outgoing message, decrypt fails when trying to read back * add different prefix retention for differing frame types * e2ee video calls with iv passed in band * enforce use of VP8 encoding * allow plaintext chunk only for video frames * tidy up kotlin. Android <> browser tested * minor ios changes * capture js logs in xcode * typo * linting Co-authored-by: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
This commit is contained in:
@@ -81,6 +81,7 @@ dependencies {
|
||||
implementation "androidx.compose.material:material-icons-extended:$compose_version"
|
||||
implementation "androidx.navigation:navigation-compose:2.4.1"
|
||||
implementation "com.google.accompanist:accompanist-insets:0.23.0"
|
||||
implementation 'androidx.webkit:webkit:1.4.0'
|
||||
|
||||
def work_version = "2.7.1"
|
||||
implementation "androidx.work:work-runtime-ktx:$work_version"
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
<uses-feature android:name="android.hardware.camera" />
|
||||
|
||||
<uses-permission android:name="android.permission.CAMERA" />
|
||||
<uses-permission android:name="android.permission.VIDEO_CAPTURE" />
|
||||
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
||||
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
||||
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
|
||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
|
||||
<uses-permission android:name="android.permission.WAKE_LOCK" />
|
||||
|
||||
13
apps/android/app/src/main/assets/www/call.html
Normal file
13
apps/android/app/src/main/assets/www/call.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link href="./style.css" rel="stylesheet" />
|
||||
</head>
|
||||
<body>
|
||||
<video id="incoming-video-stream" autoplay></video>
|
||||
<video id="outgoing-video-stream" muted autoplay></video>
|
||||
</body>
|
||||
<footer>
|
||||
<script src="./call.js"></script>
|
||||
</footer>
|
||||
</html>
|
||||
299
apps/android/app/src/main/assets/www/call.js
Normal file
299
apps/android/app/src/main/assets/www/call.js
Normal file
@@ -0,0 +1,299 @@
|
||||
// Inspired by
|
||||
// https://github.com/webrtc/samples/blob/gh-pages/src/content/insertable-streams/endtoend-encryption
|
||||
|
||||
let incomingVideo = document.getElementById("incoming-video-stream")
|
||||
let outgoingVideo = document.getElementById("outgoing-video-stream")
|
||||
incomingVideo.style.opacity = 0
|
||||
outgoingVideo.style.opacity = 0
|
||||
incomingVideo.onplaying = () => {
|
||||
incomingVideo.style.opacity = 1
|
||||
}
|
||||
outgoingVideo.onplaying = () => {
|
||||
outgoingVideo.style.opacity = 1
|
||||
}
|
||||
|
||||
// STUN servers
|
||||
const peerConnectionConfig = {
|
||||
iceServers: [{urls: ["stun:stun.l.google.com:19302"]}],
|
||||
iceCandidatePoolSize: 10,
|
||||
encodedInsertableStreams: true,
|
||||
}
|
||||
let keyGenConfig = {
|
||||
name: "AES-GCM",
|
||||
length: 256,
|
||||
tagLength: 128,
|
||||
}
|
||||
let keyUsages = ["encrypt", "decrypt"]
|
||||
|
||||
// Hardcode a key for development
|
||||
let keyData = {alg: "A256GCM", ext: true, k: "JCMDWkhxLmPDhua0BUdhgv6Ac6hOtB9frSxJlnkTAK8", key_ops: keyUsages, kty: "oct"}
|
||||
|
||||
let pc
|
||||
let key
|
||||
let IV_LENGTH = 12
|
||||
const initialPlainTextRequired = {
|
||||
key: 10,
|
||||
delta: 3,
|
||||
undefined: 1,
|
||||
}
|
||||
|
||||
// let encryptKeyRepresentation
|
||||
let candidates = []
|
||||
run()
|
||||
|
||||
async function run() {
|
||||
pc = new RTCPeerConnection(peerConnectionConfig)
|
||||
|
||||
pc.onicecandidate = (event) => {
|
||||
// add candidate to maintained list to be sent all at once
|
||||
if (event.candidate) {
|
||||
candidates.push(event.candidate)
|
||||
}
|
||||
}
|
||||
pc.onicegatheringstatechange = (_) => {
|
||||
if (pc.iceGatheringState == "complete") {
|
||||
// Give command for other caller to use
|
||||
console.log(JSON.stringify({action: "processIceCandidates", content: candidates}))
|
||||
}
|
||||
}
|
||||
let remoteStream = new MediaStream()
|
||||
key = await crypto.subtle.importKey("jwk", keyData, keyGenConfig, true, keyUsages)
|
||||
let localStream = await getLocalVideoStream()
|
||||
setUpVideos(pc, localStream, remoteStream)
|
||||
}
|
||||
|
||||
async function processCommand(data) {
|
||||
switch (data.action) {
|
||||
case "initiateCall":
|
||||
console.log("initiating call")
|
||||
let result = await makeOffer(pc)
|
||||
// Give command for callee to use
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
action: "processAndAnswerOffer",
|
||||
content: result,
|
||||
})
|
||||
)
|
||||
return result
|
||||
case "processAndAnswerOffer":
|
||||
await processOffer(data.content)
|
||||
let answer = await answerOffer(pc)
|
||||
// Give command for callee to use
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
action: "processOffer",
|
||||
content: answer,
|
||||
})
|
||||
)
|
||||
return answer
|
||||
case "processOffer":
|
||||
await processOffer(data.content)
|
||||
break
|
||||
case "processIceCandidates":
|
||||
processIceCandidates(data.content)
|
||||
break
|
||||
default:
|
||||
console.log("JS: Unknown Command")
|
||||
}
|
||||
}
|
||||
|
||||
async function makeOffer(pc) {
|
||||
// For initiating a call. Send offer to callee
|
||||
let offerDescription = await pc.createOffer()
|
||||
await pc.setLocalDescription(offerDescription)
|
||||
let offer = {
|
||||
sdp: offerDescription.sdp,
|
||||
type: offerDescription.type,
|
||||
}
|
||||
return offer
|
||||
}
|
||||
|
||||
async function answerOffer(pc) {
|
||||
let answerDescription = await pc.createAnswer()
|
||||
await pc.setLocalDescription(answerDescription)
|
||||
let answer = {
|
||||
sdp: answerDescription.sdp,
|
||||
type: answerDescription.type,
|
||||
}
|
||||
return answer
|
||||
}
|
||||
|
||||
function processIceCandidates(iceCandidates) {
|
||||
iceCandidates.forEach((candidate) => processIceCandidate(candidate))
|
||||
}
|
||||
|
||||
function processIceCandidate(iceCandidate) {
|
||||
let candidate = new RTCIceCandidate(iceCandidate)
|
||||
pc.addIceCandidate(candidate)
|
||||
}
|
||||
|
||||
async function processOffer(offer) {
|
||||
// Negotiating initial connection
|
||||
if (!pc.currentRemoteDescription) {
|
||||
let remoteSessionDescription = new RTCSessionDescription(offer)
|
||||
await pc.setRemoteDescription(remoteSessionDescription)
|
||||
}
|
||||
}
|
||||
|
||||
function setUpVideos(pc, localStream, remoteStream) {
|
||||
localStream.getTracks().forEach((track) => {
|
||||
pc.addTrack(track, localStream)
|
||||
})
|
||||
pc.getSenders().forEach(setupSenderTransform)
|
||||
// Pull tracks from remote stream as they arrive add them to remoteStream video
|
||||
pc.ontrack = (event) => {
|
||||
setupReceiverTransform(event.receiver)
|
||||
event.streams[0].getTracks().forEach((track) => {
|
||||
remoteStream.addTrack(track)
|
||||
})
|
||||
}
|
||||
// We assume VP8 encoding in the decode/encode stages to get the initial
|
||||
// bytes to pass as plaintext so we enforce that here.
|
||||
// VP8 is supported by all supports of webrtc.
|
||||
// Use of VP8 by default may also reduce depacketisation issues.
|
||||
// We do not encrypt the first couple of bytes of the payload so that the
|
||||
// video elements can work by determining video keyframes and the opus mode
|
||||
// being used. This appears to be necessary for any video feed at all.
|
||||
// For VP8 this is the content described in
|
||||
// https://tools.ietf.org/html/rfc6386#section-9.1
|
||||
// which is 10 bytes for key frames and 3 bytes for delta frames.
|
||||
// For opus (where encodedFrame.type is not set) this is the TOC byte from
|
||||
// https://tools.ietf.org/html/rfc6716#section-3.1
|
||||
|
||||
const {codecs} = RTCRtpSender.getCapabilities("video")
|
||||
const selectedCodecIndex = codecs.findIndex((c) => c.mimeType === "video/VP8")
|
||||
const selectedCodec = codecs[selectedCodecIndex]
|
||||
codecs.splice(selectedCodecIndex, 1)
|
||||
codecs.unshift(selectedCodec)
|
||||
const transceiver = pc.getTransceivers().find((t) => t.sender && t.sender.track.kind === "video")
|
||||
transceiver.setCodecPreferences(codecs)
|
||||
|
||||
outgoingVideo.srcObject = localStream
|
||||
incomingVideo.srcObject = remoteStream
|
||||
}
|
||||
|
||||
async function getLocalVideoStream() {
|
||||
return await navigator.mediaDevices.getUserMedia({
|
||||
audio: true,
|
||||
video: {
|
||||
frameRate: 24,
|
||||
width: {
|
||||
min: 480,
|
||||
ideal: 720,
|
||||
max: 1280,
|
||||
},
|
||||
aspectRatio: 1.33,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
function endCall() {
|
||||
pc.close()
|
||||
}
|
||||
|
||||
function toggleVideo(b) {
|
||||
if (b == "true") {
|
||||
localStream.getVideoTracks()[0].enabled = true
|
||||
} else {
|
||||
localStream.getVideoTracks()[0].enabled = false
|
||||
}
|
||||
}
|
||||
|
||||
function f() {
|
||||
console.log("Debug Function")
|
||||
return "Debugging Return"
|
||||
}
|
||||
|
||||
/* Stream Transforms */
|
||||
function setupSenderTransform(sender) {
|
||||
const senderStreams = sender.createEncodedStreams()
|
||||
const transformStream = new TransformStream({
|
||||
transform: encodeFunction,
|
||||
})
|
||||
senderStreams.readable.pipeThrough(transformStream).pipeTo(senderStreams.writable)
|
||||
}
|
||||
|
||||
function setupReceiverTransform(receiver) {
|
||||
const receiverStreams = receiver.createEncodedStreams()
|
||||
const transformStream = new TransformStream({
|
||||
transform: decodeFunction,
|
||||
})
|
||||
receiverStreams.readable.pipeThrough(transformStream).pipeTo(receiverStreams.writable)
|
||||
}
|
||||
|
||||
/* Cryptography */
|
||||
function encodeFunction(frame, controller) {
|
||||
// frame is an RTCEncodedAudioFrame
|
||||
// frame.data is ArrayBuffer
|
||||
let data = new Uint8Array(frame.data)
|
||||
let n = frame instanceof RTCEncodedVideoFrame ? initialPlainTextRequired[frame.type] : 0
|
||||
let iv = randomIV()
|
||||
let initial = data.subarray(0, n)
|
||||
let plaintext = data.subarray(n, data.byteLength)
|
||||
crypto.subtle
|
||||
.encrypt({name: "AES-GCM", iv: iv.buffer}, key, plaintext)
|
||||
.then((c) => {
|
||||
frame.data = concatN(initial, new Uint8Array(c), iv).buffer
|
||||
controller.enqueue(frame)
|
||||
})
|
||||
.catch((e) => {
|
||||
console.log("encrypt error")
|
||||
endCall()
|
||||
throw e
|
||||
})
|
||||
}
|
||||
function decodeFunction(frame, controller) {
|
||||
let data = new Uint8Array(frame.data)
|
||||
let n = frame instanceof RTCEncodedVideoFrame ? initialPlainTextRequired[frame.type] : 0
|
||||
let initial = data.subarray(0, n)
|
||||
let ciphertext = data.subarray(n, data.byteLength - IV_LENGTH)
|
||||
let iv = data.subarray(data.byteLength - IV_LENGTH, data.byteLength)
|
||||
crypto.subtle
|
||||
.decrypt({name: "AES-GCM", iv: iv}, key, ciphertext)
|
||||
.then((p) => {
|
||||
frame.data = concatN(initial, new Uint8Array(p)).buffer
|
||||
controller.enqueue(frame)
|
||||
})
|
||||
.catch((e) => {
|
||||
console.log("decrypt error")
|
||||
endCall()
|
||||
throw e
|
||||
})
|
||||
}
|
||||
|
||||
function randomIV() {
|
||||
return crypto.getRandomValues(new Uint8Array(IV_LENGTH))
|
||||
}
|
||||
async function loadKey(keyData) {
|
||||
key = await crypto.subtle.importKey("jwk", keyData, keyGenConfig, false, keyUsages)
|
||||
}
|
||||
|
||||
function concatN(...bs) {
|
||||
const a = new Uint8Array(bs.reduce((size, b) => size + b.byteLength, 0))
|
||||
bs.reduce((offset, b) => {
|
||||
a.set(b, offset)
|
||||
return offset + b.byteLength
|
||||
}, 0)
|
||||
return a
|
||||
}
|
||||
|
||||
async function generateKey() {
|
||||
crypto.subtle
|
||||
.generateKey(keyGenConfig, true, keyUsages)
|
||||
.then((k) => {
|
||||
encryptKey = k
|
||||
return crypto.subtle.exportKey("jwk", encryptKey)
|
||||
})
|
||||
.then((r) => {
|
||||
encryptKeyRepresentation = r
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
action: "processDecryptionKey",
|
||||
content: {
|
||||
key: encryptKeyRepresentation,
|
||||
iv: encryptIv,
|
||||
},
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
24
apps/android/app/src/main/assets/www/style.css
Normal file
24
apps/android/app/src/main/assets/www/style.css
Normal file
@@ -0,0 +1,24 @@
|
||||
video::-webkit-media-controls {
|
||||
display: none;
|
||||
}
|
||||
html, body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#incoming-video-stream {
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
#outgoing-video-stream {
|
||||
position: absolute;
|
||||
width: 30%;
|
||||
max-width: 30%;
|
||||
object-fit: cover;
|
||||
margin: 16px;
|
||||
border-radius: 16px;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
package chat.simplex.app.views.call
|
||||
|
||||
import android.Manifest
|
||||
import android.annotation.SuppressLint
|
||||
import android.content.ClipData
|
||||
import android.content.ClipboardManager
|
||||
import android.util.Log
|
||||
import android.view.ViewGroup
|
||||
import android.webkit.*
|
||||
import androidx.activity.compose.BackHandler
|
||||
import androidx.compose.foundation.background
|
||||
import androidx.compose.foundation.layout.*
|
||||
import androidx.compose.material.*
|
||||
import androidx.compose.runtime.*
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.compose.ui.platform.LocalLifecycleOwner
|
||||
import androidx.compose.ui.unit.dp
|
||||
import androidx.compose.ui.viewinterop.AndroidView
|
||||
import androidx.core.content.ContextCompat
|
||||
import androidx.lifecycle.Lifecycle
|
||||
import androidx.lifecycle.LifecycleEventObserver
|
||||
import androidx.webkit.WebViewAssetLoader
|
||||
import androidx.webkit.WebViewClientCompat
|
||||
import chat.simplex.app.TAG
|
||||
import chat.simplex.app.views.helpers.TextEditor
|
||||
import com.google.accompanist.permissions.rememberMultiplePermissionsState
|
||||
|
||||
//@SuppressLint("JavascriptInterface")
|
||||
@Composable
|
||||
fun VideoCallView(close: () -> Unit) {
|
||||
BackHandler(onBack = close)
|
||||
lateinit var wv: WebView
|
||||
val context = LocalContext.current
|
||||
val clipboard = ContextCompat.getSystemService(context, ClipboardManager::class.java)
|
||||
val permissionsState = rememberMultiplePermissionsState(
|
||||
permissions = listOf(
|
||||
Manifest.permission.CAMERA,
|
||||
Manifest.permission.RECORD_AUDIO,
|
||||
Manifest.permission.MODIFY_AUDIO_SETTINGS,
|
||||
Manifest.permission.INTERNET
|
||||
)
|
||||
)
|
||||
val lifecycleOwner = LocalLifecycleOwner.current
|
||||
DisposableEffect(lifecycleOwner) {
|
||||
val observer = LifecycleEventObserver { _, event ->
|
||||
if (event == Lifecycle.Event.ON_RESUME || event == Lifecycle.Event.ON_START) {
|
||||
permissionsState.launchMultiplePermissionRequest()
|
||||
}
|
||||
}
|
||||
lifecycleOwner.lifecycle.addObserver(observer)
|
||||
|
||||
onDispose {
|
||||
wv.evaluateJavascript("endCall()", null)
|
||||
lifecycleOwner.lifecycle.removeObserver(observer)
|
||||
}
|
||||
}
|
||||
val localContext = LocalContext.current
|
||||
val iceCandidateCommand = remember { mutableStateOf("") }
|
||||
val commandToShow = remember { mutableStateOf("processCommand({action: \"initiateCall\"})") }
|
||||
val assetLoader = WebViewAssetLoader.Builder()
|
||||
.addPathHandler("/assets/www/", WebViewAssetLoader.AssetsPathHandler(localContext))
|
||||
.build()
|
||||
|
||||
Column(
|
||||
horizontalAlignment = Alignment.CenterHorizontally,
|
||||
verticalArrangement = Arrangement.spacedBy(12.dp),
|
||||
modifier = Modifier
|
||||
.background(MaterialTheme.colors.background)
|
||||
.fillMaxSize()
|
||||
) {
|
||||
if (permissionsState.allPermissionsGranted) {
|
||||
Box(
|
||||
Modifier
|
||||
.fillMaxWidth()
|
||||
.aspectRatio(ratio = 1F)
|
||||
) {
|
||||
AndroidView(
|
||||
factory = { AndroidViewContext ->
|
||||
WebView(AndroidViewContext).apply {
|
||||
layoutParams = ViewGroup.LayoutParams(
|
||||
ViewGroup.LayoutParams.MATCH_PARENT,
|
||||
ViewGroup.LayoutParams.MATCH_PARENT,
|
||||
)
|
||||
this.webChromeClient = object: WebChromeClient() {
|
||||
override fun onPermissionRequest(request: PermissionRequest) {
|
||||
if (request.origin.toString().startsWith("file:/")) {
|
||||
request.grant(request.resources)
|
||||
} else {
|
||||
Log.d(TAG, "Permission request from webview denied.")
|
||||
request.deny()
|
||||
}
|
||||
}
|
||||
|
||||
override fun onConsoleMessage(consoleMessage: ConsoleMessage?): Boolean {
|
||||
val rtnValue = super.onConsoleMessage(consoleMessage)
|
||||
val msg = consoleMessage?.message() as String
|
||||
if (msg.startsWith("{\"action\":\"processIceCandidates\"")) {
|
||||
iceCandidateCommand.value = "processCommand($msg)"
|
||||
} else if (msg.startsWith("{")) {
|
||||
commandToShow.value = "processCommand($msg)"
|
||||
}
|
||||
return rtnValue
|
||||
}
|
||||
}
|
||||
this.webViewClient = LocalContentWebViewClient(assetLoader)
|
||||
this.clearHistory()
|
||||
this.clearCache(true)
|
||||
// this.addJavascriptInterface(JavascriptInterface(), "Android")
|
||||
val webViewSettings = this.settings
|
||||
webViewSettings.allowFileAccess = true
|
||||
webViewSettings.allowContentAccess = true
|
||||
webViewSettings.javaScriptEnabled = true
|
||||
webViewSettings.mediaPlaybackRequiresUserGesture = false
|
||||
webViewSettings.cacheMode = WebSettings.LOAD_NO_CACHE
|
||||
this.loadUrl("file:android_asset/www/call.html")
|
||||
}
|
||||
}
|
||||
) {
|
||||
wv = it
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Text("NEED PERMISSIONS")
|
||||
}
|
||||
|
||||
TextEditor(Modifier.height(180.dp), text = commandToShow)
|
||||
|
||||
Row(
|
||||
Modifier
|
||||
.fillMaxWidth()
|
||||
.padding(bottom = 6.dp),
|
||||
horizontalArrangement = Arrangement.SpaceBetween
|
||||
) {
|
||||
Button( onClick = {
|
||||
val clip: ClipData = ClipData.newPlainText("js command", commandToShow.value)
|
||||
clipboard?.setPrimaryClip(clip)
|
||||
}) {Text("Copy")}
|
||||
Button( onClick = {
|
||||
println("sending: ${commandToShow.value}")
|
||||
wv.evaluateJavascript(commandToShow.value, null)
|
||||
commandToShow.value = ""
|
||||
}) {Text("Send")}
|
||||
Button( onClick = {
|
||||
commandToShow.value = iceCandidateCommand.value
|
||||
}) {Text("ICE")}
|
||||
Button( onClick = {
|
||||
commandToShow.value = ""
|
||||
}) {Text("Clear")}
|
||||
Button( onClick = {
|
||||
wv.evaluateJavascript("endCall()", null)
|
||||
}) {Text("End Call")}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class LocalContentWebViewClient(private val assetLoader: WebViewAssetLoader) : WebViewClientCompat() {
|
||||
override fun shouldInterceptRequest(
|
||||
view: WebView,
|
||||
request: WebResourceRequest
|
||||
): WebResourceResponse? {
|
||||
return assetLoader.shouldInterceptRequest(request.url)
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import chat.simplex.app.model.Profile
|
||||
import chat.simplex.app.ui.theme.HighOrLowlight
|
||||
import chat.simplex.app.ui.theme.SimpleXTheme
|
||||
import chat.simplex.app.views.TerminalView
|
||||
import chat.simplex.app.views.call.VideoCallView
|
||||
import chat.simplex.app.views.helpers.*
|
||||
|
||||
@Composable
|
||||
@@ -39,7 +40,8 @@ fun SettingsView(chatModel: ChatModel) {
|
||||
},
|
||||
showModal = { modalView -> { ModalManager.shared.showModal { modalView(chatModel) } } },
|
||||
showCustomModal = { modalView -> { ModalManager.shared.showCustomModal { close -> modalView(chatModel, close) } } },
|
||||
showTerminal = { ModalManager.shared.showCustomModal { close -> TerminalView(chatModel, close) } }
|
||||
showTerminal = { ModalManager.shared.showCustomModal { close -> TerminalView(chatModel, close) } },
|
||||
showVideoChatPrototype = { ModalManager.shared.showCustomModal { close -> VideoCallView(close) } },
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -54,7 +56,8 @@ fun SettingsLayout(
|
||||
setRunServiceInBackground: (Boolean) -> Unit,
|
||||
showModal: (@Composable (ChatModel) -> Unit) -> (() -> Unit),
|
||||
showCustomModal: (@Composable (ChatModel, () -> Unit) -> Unit) -> (() -> Unit),
|
||||
showTerminal: () -> Unit
|
||||
showTerminal: () -> Unit,
|
||||
showVideoChatPrototype: () -> Unit
|
||||
) {
|
||||
val uriHandler = LocalUriHandler.current
|
||||
Surface(
|
||||
@@ -159,9 +162,9 @@ fun SettingsLayout(
|
||||
Spacer(Modifier.padding(horizontal = 4.dp))
|
||||
Text(
|
||||
stringResource(R.string.private_notifications), Modifier
|
||||
.padding(end = 24.dp)
|
||||
.fillMaxWidth()
|
||||
.weight(1F))
|
||||
.padding(end = 24.dp)
|
||||
.fillMaxWidth()
|
||||
.weight(1F))
|
||||
Switch(
|
||||
checked = runServiceInBackground.value,
|
||||
onCheckedChange = { setRunServiceInBackground(it) },
|
||||
@@ -191,7 +194,7 @@ fun SettingsLayout(
|
||||
Text(annotatedStringResource(R.string.install_simplex_chat_for_terminal))
|
||||
}
|
||||
Divider(Modifier.padding(horizontal = 8.dp))
|
||||
SettingsSectionView() {
|
||||
SettingsSectionView(showVideoChatPrototype) {
|
||||
Text("v${BuildConfig.VERSION_NAME} (${BuildConfig.VERSION_CODE})")
|
||||
}
|
||||
}
|
||||
@@ -227,7 +230,8 @@ fun PreviewSettingsLayout() {
|
||||
setRunServiceInBackground = {},
|
||||
showModal = {{}},
|
||||
showCustomModal = {{}},
|
||||
showTerminal = {}
|
||||
showTerminal = {},
|
||||
showVideoChatPrototype = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user