id/video calls prototype (#570)
* initial UI framework * limited javascrtipt interaction * run some js * try to resolve permissions issues * some initial RTC javascript * approaching a workable js file * js fixes * tidy up js * add some ui to web call * fixes * ready to test * typo * refactor for readability * tidy up before adding encryption * add transform to video streams * tidy a little, audio encoding works, video fails * minor changes * use variables consistently * e2ee video calls git push * include IV in outgoing message, decrypt fails when trying to read back * add different prefix retention for differing frame types * e2ee video calls with iv passed in band * enforce use of VP8 encoding * allow plaintext chunk only for video frames * tidy up kotlin. Android <> browser tested * minor ios changes * capture js logs in xcode * typo * linting Co-authored-by: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
This commit is contained in:
@@ -81,6 +81,7 @@ dependencies {
|
||||
implementation "androidx.compose.material:material-icons-extended:$compose_version"
|
||||
implementation "androidx.navigation:navigation-compose:2.4.1"
|
||||
implementation "com.google.accompanist:accompanist-insets:0.23.0"
|
||||
implementation 'androidx.webkit:webkit:1.4.0'
|
||||
|
||||
def work_version = "2.7.1"
|
||||
implementation "androidx.work:work-runtime-ktx:$work_version"
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
<uses-feature android:name="android.hardware.camera" />
|
||||
|
||||
<uses-permission android:name="android.permission.CAMERA" />
|
||||
<uses-permission android:name="android.permission.VIDEO_CAPTURE" />
|
||||
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
||||
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
||||
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
|
||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
|
||||
<uses-permission android:name="android.permission.WAKE_LOCK" />
|
||||
|
||||
13
apps/android/app/src/main/assets/www/call.html
Normal file
13
apps/android/app/src/main/assets/www/call.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link href="./style.css" rel="stylesheet" />
|
||||
</head>
|
||||
<body>
|
||||
<video id="incoming-video-stream" autoplay></video>
|
||||
<video id="outgoing-video-stream" muted autoplay></video>
|
||||
</body>
|
||||
<footer>
|
||||
<script src="./call.js"></script>
|
||||
</footer>
|
||||
</html>
|
||||
299
apps/android/app/src/main/assets/www/call.js
Normal file
299
apps/android/app/src/main/assets/www/call.js
Normal file
@@ -0,0 +1,299 @@
|
||||
// Inspired by
|
||||
// https://github.com/webrtc/samples/blob/gh-pages/src/content/insertable-streams/endtoend-encryption
|
||||
|
||||
let incomingVideo = document.getElementById("incoming-video-stream")
|
||||
let outgoingVideo = document.getElementById("outgoing-video-stream")
|
||||
incomingVideo.style.opacity = 0
|
||||
outgoingVideo.style.opacity = 0
|
||||
incomingVideo.onplaying = () => {
|
||||
incomingVideo.style.opacity = 1
|
||||
}
|
||||
outgoingVideo.onplaying = () => {
|
||||
outgoingVideo.style.opacity = 1
|
||||
}
|
||||
|
||||
// STUN servers
|
||||
const peerConnectionConfig = {
|
||||
iceServers: [{urls: ["stun:stun.l.google.com:19302"]}],
|
||||
iceCandidatePoolSize: 10,
|
||||
encodedInsertableStreams: true,
|
||||
}
|
||||
let keyGenConfig = {
|
||||
name: "AES-GCM",
|
||||
length: 256,
|
||||
tagLength: 128,
|
||||
}
|
||||
let keyUsages = ["encrypt", "decrypt"]
|
||||
|
||||
// Hardcode a key for development
|
||||
let keyData = {alg: "A256GCM", ext: true, k: "JCMDWkhxLmPDhua0BUdhgv6Ac6hOtB9frSxJlnkTAK8", key_ops: keyUsages, kty: "oct"}
|
||||
|
||||
let pc
|
||||
let key
|
||||
let IV_LENGTH = 12
|
||||
const initialPlainTextRequired = {
|
||||
key: 10,
|
||||
delta: 3,
|
||||
undefined: 1,
|
||||
}
|
||||
|
||||
// let encryptKeyRepresentation
|
||||
let candidates = []
|
||||
run()
|
||||
|
||||
async function run() {
|
||||
pc = new RTCPeerConnection(peerConnectionConfig)
|
||||
|
||||
pc.onicecandidate = (event) => {
|
||||
// add candidate to maintained list to be sent all at once
|
||||
if (event.candidate) {
|
||||
candidates.push(event.candidate)
|
||||
}
|
||||
}
|
||||
pc.onicegatheringstatechange = (_) => {
|
||||
if (pc.iceGatheringState == "complete") {
|
||||
// Give command for other caller to use
|
||||
console.log(JSON.stringify({action: "processIceCandidates", content: candidates}))
|
||||
}
|
||||
}
|
||||
let remoteStream = new MediaStream()
|
||||
key = await crypto.subtle.importKey("jwk", keyData, keyGenConfig, true, keyUsages)
|
||||
let localStream = await getLocalVideoStream()
|
||||
setUpVideos(pc, localStream, remoteStream)
|
||||
}
|
||||
|
||||
async function processCommand(data) {
|
||||
switch (data.action) {
|
||||
case "initiateCall":
|
||||
console.log("initiating call")
|
||||
let result = await makeOffer(pc)
|
||||
// Give command for callee to use
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
action: "processAndAnswerOffer",
|
||||
content: result,
|
||||
})
|
||||
)
|
||||
return result
|
||||
case "processAndAnswerOffer":
|
||||
await processOffer(data.content)
|
||||
let answer = await answerOffer(pc)
|
||||
// Give command for callee to use
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
action: "processOffer",
|
||||
content: answer,
|
||||
})
|
||||
)
|
||||
return answer
|
||||
case "processOffer":
|
||||
await processOffer(data.content)
|
||||
break
|
||||
case "processIceCandidates":
|
||||
processIceCandidates(data.content)
|
||||
break
|
||||
default:
|
||||
console.log("JS: Unknown Command")
|
||||
}
|
||||
}
|
||||
|
||||
async function makeOffer(pc) {
|
||||
// For initiating a call. Send offer to callee
|
||||
let offerDescription = await pc.createOffer()
|
||||
await pc.setLocalDescription(offerDescription)
|
||||
let offer = {
|
||||
sdp: offerDescription.sdp,
|
||||
type: offerDescription.type,
|
||||
}
|
||||
return offer
|
||||
}
|
||||
|
||||
async function answerOffer(pc) {
|
||||
let answerDescription = await pc.createAnswer()
|
||||
await pc.setLocalDescription(answerDescription)
|
||||
let answer = {
|
||||
sdp: answerDescription.sdp,
|
||||
type: answerDescription.type,
|
||||
}
|
||||
return answer
|
||||
}
|
||||
|
||||
function processIceCandidates(iceCandidates) {
|
||||
iceCandidates.forEach((candidate) => processIceCandidate(candidate))
|
||||
}
|
||||
|
||||
function processIceCandidate(iceCandidate) {
|
||||
let candidate = new RTCIceCandidate(iceCandidate)
|
||||
pc.addIceCandidate(candidate)
|
||||
}
|
||||
|
||||
async function processOffer(offer) {
|
||||
// Negotiating initial connection
|
||||
if (!pc.currentRemoteDescription) {
|
||||
let remoteSessionDescription = new RTCSessionDescription(offer)
|
||||
await pc.setRemoteDescription(remoteSessionDescription)
|
||||
}
|
||||
}
|
||||
|
||||
function setUpVideos(pc, localStream, remoteStream) {
|
||||
localStream.getTracks().forEach((track) => {
|
||||
pc.addTrack(track, localStream)
|
||||
})
|
||||
pc.getSenders().forEach(setupSenderTransform)
|
||||
// Pull tracks from remote stream as they arrive add them to remoteStream video
|
||||
pc.ontrack = (event) => {
|
||||
setupReceiverTransform(event.receiver)
|
||||
event.streams[0].getTracks().forEach((track) => {
|
||||
remoteStream.addTrack(track)
|
||||
})
|
||||
}
|
||||
// We assume VP8 encoding in the decode/encode stages to get the initial
|
||||
// bytes to pass as plaintext so we enforce that here.
|
||||
// VP8 is supported by all supports of webrtc.
|
||||
// Use of VP8 by default may also reduce depacketisation issues.
|
||||
// We do not encrypt the first couple of bytes of the payload so that the
|
||||
// video elements can work by determining video keyframes and the opus mode
|
||||
// being used. This appears to be necessary for any video feed at all.
|
||||
// For VP8 this is the content described in
|
||||
// https://tools.ietf.org/html/rfc6386#section-9.1
|
||||
// which is 10 bytes for key frames and 3 bytes for delta frames.
|
||||
// For opus (where encodedFrame.type is not set) this is the TOC byte from
|
||||
// https://tools.ietf.org/html/rfc6716#section-3.1
|
||||
|
||||
const {codecs} = RTCRtpSender.getCapabilities("video")
|
||||
const selectedCodecIndex = codecs.findIndex((c) => c.mimeType === "video/VP8")
|
||||
const selectedCodec = codecs[selectedCodecIndex]
|
||||
codecs.splice(selectedCodecIndex, 1)
|
||||
codecs.unshift(selectedCodec)
|
||||
const transceiver = pc.getTransceivers().find((t) => t.sender && t.sender.track.kind === "video")
|
||||
transceiver.setCodecPreferences(codecs)
|
||||
|
||||
outgoingVideo.srcObject = localStream
|
||||
incomingVideo.srcObject = remoteStream
|
||||
}
|
||||
|
||||
async function getLocalVideoStream() {
|
||||
return await navigator.mediaDevices.getUserMedia({
|
||||
audio: true,
|
||||
video: {
|
||||
frameRate: 24,
|
||||
width: {
|
||||
min: 480,
|
||||
ideal: 720,
|
||||
max: 1280,
|
||||
},
|
||||
aspectRatio: 1.33,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
function endCall() {
|
||||
pc.close()
|
||||
}
|
||||
|
||||
function toggleVideo(b) {
|
||||
if (b == "true") {
|
||||
localStream.getVideoTracks()[0].enabled = true
|
||||
} else {
|
||||
localStream.getVideoTracks()[0].enabled = false
|
||||
}
|
||||
}
|
||||
|
||||
function f() {
|
||||
console.log("Debug Function")
|
||||
return "Debugging Return"
|
||||
}
|
||||
|
||||
/* Stream Transforms */
|
||||
function setupSenderTransform(sender) {
|
||||
const senderStreams = sender.createEncodedStreams()
|
||||
const transformStream = new TransformStream({
|
||||
transform: encodeFunction,
|
||||
})
|
||||
senderStreams.readable.pipeThrough(transformStream).pipeTo(senderStreams.writable)
|
||||
}
|
||||
|
||||
function setupReceiverTransform(receiver) {
|
||||
const receiverStreams = receiver.createEncodedStreams()
|
||||
const transformStream = new TransformStream({
|
||||
transform: decodeFunction,
|
||||
})
|
||||
receiverStreams.readable.pipeThrough(transformStream).pipeTo(receiverStreams.writable)
|
||||
}
|
||||
|
||||
/* Cryptography */
|
||||
function encodeFunction(frame, controller) {
|
||||
// frame is an RTCEncodedAudioFrame
|
||||
// frame.data is ArrayBuffer
|
||||
let data = new Uint8Array(frame.data)
|
||||
let n = frame instanceof RTCEncodedVideoFrame ? initialPlainTextRequired[frame.type] : 0
|
||||
let iv = randomIV()
|
||||
let initial = data.subarray(0, n)
|
||||
let plaintext = data.subarray(n, data.byteLength)
|
||||
crypto.subtle
|
||||
.encrypt({name: "AES-GCM", iv: iv.buffer}, key, plaintext)
|
||||
.then((c) => {
|
||||
frame.data = concatN(initial, new Uint8Array(c), iv).buffer
|
||||
controller.enqueue(frame)
|
||||
})
|
||||
.catch((e) => {
|
||||
console.log("encrypt error")
|
||||
endCall()
|
||||
throw e
|
||||
})
|
||||
}
|
||||
function decodeFunction(frame, controller) {
|
||||
let data = new Uint8Array(frame.data)
|
||||
let n = frame instanceof RTCEncodedVideoFrame ? initialPlainTextRequired[frame.type] : 0
|
||||
let initial = data.subarray(0, n)
|
||||
let ciphertext = data.subarray(n, data.byteLength - IV_LENGTH)
|
||||
let iv = data.subarray(data.byteLength - IV_LENGTH, data.byteLength)
|
||||
crypto.subtle
|
||||
.decrypt({name: "AES-GCM", iv: iv}, key, ciphertext)
|
||||
.then((p) => {
|
||||
frame.data = concatN(initial, new Uint8Array(p)).buffer
|
||||
controller.enqueue(frame)
|
||||
})
|
||||
.catch((e) => {
|
||||
console.log("decrypt error")
|
||||
endCall()
|
||||
throw e
|
||||
})
|
||||
}
|
||||
|
||||
function randomIV() {
|
||||
return crypto.getRandomValues(new Uint8Array(IV_LENGTH))
|
||||
}
|
||||
async function loadKey(keyData) {
|
||||
key = await crypto.subtle.importKey("jwk", keyData, keyGenConfig, false, keyUsages)
|
||||
}
|
||||
|
||||
function concatN(...bs) {
|
||||
const a = new Uint8Array(bs.reduce((size, b) => size + b.byteLength, 0))
|
||||
bs.reduce((offset, b) => {
|
||||
a.set(b, offset)
|
||||
return offset + b.byteLength
|
||||
}, 0)
|
||||
return a
|
||||
}
|
||||
|
||||
async function generateKey() {
|
||||
crypto.subtle
|
||||
.generateKey(keyGenConfig, true, keyUsages)
|
||||
.then((k) => {
|
||||
encryptKey = k
|
||||
return crypto.subtle.exportKey("jwk", encryptKey)
|
||||
})
|
||||
.then((r) => {
|
||||
encryptKeyRepresentation = r
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
action: "processDecryptionKey",
|
||||
content: {
|
||||
key: encryptKeyRepresentation,
|
||||
iv: encryptIv,
|
||||
},
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
24
apps/android/app/src/main/assets/www/style.css
Normal file
24
apps/android/app/src/main/assets/www/style.css
Normal file
@@ -0,0 +1,24 @@
|
||||
video::-webkit-media-controls {
|
||||
display: none;
|
||||
}
|
||||
html, body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#incoming-video-stream {
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
#outgoing-video-stream {
|
||||
position: absolute;
|
||||
width: 30%;
|
||||
max-width: 30%;
|
||||
object-fit: cover;
|
||||
margin: 16px;
|
||||
border-radius: 16px;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
package chat.simplex.app.views.call
|
||||
|
||||
import android.Manifest
|
||||
import android.annotation.SuppressLint
|
||||
import android.content.ClipData
|
||||
import android.content.ClipboardManager
|
||||
import android.util.Log
|
||||
import android.view.ViewGroup
|
||||
import android.webkit.*
|
||||
import androidx.activity.compose.BackHandler
|
||||
import androidx.compose.foundation.background
|
||||
import androidx.compose.foundation.layout.*
|
||||
import androidx.compose.material.*
|
||||
import androidx.compose.runtime.*
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.compose.ui.platform.LocalLifecycleOwner
|
||||
import androidx.compose.ui.unit.dp
|
||||
import androidx.compose.ui.viewinterop.AndroidView
|
||||
import androidx.core.content.ContextCompat
|
||||
import androidx.lifecycle.Lifecycle
|
||||
import androidx.lifecycle.LifecycleEventObserver
|
||||
import androidx.webkit.WebViewAssetLoader
|
||||
import androidx.webkit.WebViewClientCompat
|
||||
import chat.simplex.app.TAG
|
||||
import chat.simplex.app.views.helpers.TextEditor
|
||||
import com.google.accompanist.permissions.rememberMultiplePermissionsState
|
||||
|
||||
//@SuppressLint("JavascriptInterface")
|
||||
@Composable
|
||||
fun VideoCallView(close: () -> Unit) {
|
||||
BackHandler(onBack = close)
|
||||
lateinit var wv: WebView
|
||||
val context = LocalContext.current
|
||||
val clipboard = ContextCompat.getSystemService(context, ClipboardManager::class.java)
|
||||
val permissionsState = rememberMultiplePermissionsState(
|
||||
permissions = listOf(
|
||||
Manifest.permission.CAMERA,
|
||||
Manifest.permission.RECORD_AUDIO,
|
||||
Manifest.permission.MODIFY_AUDIO_SETTINGS,
|
||||
Manifest.permission.INTERNET
|
||||
)
|
||||
)
|
||||
val lifecycleOwner = LocalLifecycleOwner.current
|
||||
DisposableEffect(lifecycleOwner) {
|
||||
val observer = LifecycleEventObserver { _, event ->
|
||||
if (event == Lifecycle.Event.ON_RESUME || event == Lifecycle.Event.ON_START) {
|
||||
permissionsState.launchMultiplePermissionRequest()
|
||||
}
|
||||
}
|
||||
lifecycleOwner.lifecycle.addObserver(observer)
|
||||
|
||||
onDispose {
|
||||
wv.evaluateJavascript("endCall()", null)
|
||||
lifecycleOwner.lifecycle.removeObserver(observer)
|
||||
}
|
||||
}
|
||||
val localContext = LocalContext.current
|
||||
val iceCandidateCommand = remember { mutableStateOf("") }
|
||||
val commandToShow = remember { mutableStateOf("processCommand({action: \"initiateCall\"})") }
|
||||
val assetLoader = WebViewAssetLoader.Builder()
|
||||
.addPathHandler("/assets/www/", WebViewAssetLoader.AssetsPathHandler(localContext))
|
||||
.build()
|
||||
|
||||
Column(
|
||||
horizontalAlignment = Alignment.CenterHorizontally,
|
||||
verticalArrangement = Arrangement.spacedBy(12.dp),
|
||||
modifier = Modifier
|
||||
.background(MaterialTheme.colors.background)
|
||||
.fillMaxSize()
|
||||
) {
|
||||
if (permissionsState.allPermissionsGranted) {
|
||||
Box(
|
||||
Modifier
|
||||
.fillMaxWidth()
|
||||
.aspectRatio(ratio = 1F)
|
||||
) {
|
||||
AndroidView(
|
||||
factory = { AndroidViewContext ->
|
||||
WebView(AndroidViewContext).apply {
|
||||
layoutParams = ViewGroup.LayoutParams(
|
||||
ViewGroup.LayoutParams.MATCH_PARENT,
|
||||
ViewGroup.LayoutParams.MATCH_PARENT,
|
||||
)
|
||||
this.webChromeClient = object: WebChromeClient() {
|
||||
override fun onPermissionRequest(request: PermissionRequest) {
|
||||
if (request.origin.toString().startsWith("file:/")) {
|
||||
request.grant(request.resources)
|
||||
} else {
|
||||
Log.d(TAG, "Permission request from webview denied.")
|
||||
request.deny()
|
||||
}
|
||||
}
|
||||
|
||||
override fun onConsoleMessage(consoleMessage: ConsoleMessage?): Boolean {
|
||||
val rtnValue = super.onConsoleMessage(consoleMessage)
|
||||
val msg = consoleMessage?.message() as String
|
||||
if (msg.startsWith("{\"action\":\"processIceCandidates\"")) {
|
||||
iceCandidateCommand.value = "processCommand($msg)"
|
||||
} else if (msg.startsWith("{")) {
|
||||
commandToShow.value = "processCommand($msg)"
|
||||
}
|
||||
return rtnValue
|
||||
}
|
||||
}
|
||||
this.webViewClient = LocalContentWebViewClient(assetLoader)
|
||||
this.clearHistory()
|
||||
this.clearCache(true)
|
||||
// this.addJavascriptInterface(JavascriptInterface(), "Android")
|
||||
val webViewSettings = this.settings
|
||||
webViewSettings.allowFileAccess = true
|
||||
webViewSettings.allowContentAccess = true
|
||||
webViewSettings.javaScriptEnabled = true
|
||||
webViewSettings.mediaPlaybackRequiresUserGesture = false
|
||||
webViewSettings.cacheMode = WebSettings.LOAD_NO_CACHE
|
||||
this.loadUrl("file:android_asset/www/call.html")
|
||||
}
|
||||
}
|
||||
) {
|
||||
wv = it
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Text("NEED PERMISSIONS")
|
||||
}
|
||||
|
||||
TextEditor(Modifier.height(180.dp), text = commandToShow)
|
||||
|
||||
Row(
|
||||
Modifier
|
||||
.fillMaxWidth()
|
||||
.padding(bottom = 6.dp),
|
||||
horizontalArrangement = Arrangement.SpaceBetween
|
||||
) {
|
||||
Button( onClick = {
|
||||
val clip: ClipData = ClipData.newPlainText("js command", commandToShow.value)
|
||||
clipboard?.setPrimaryClip(clip)
|
||||
}) {Text("Copy")}
|
||||
Button( onClick = {
|
||||
println("sending: ${commandToShow.value}")
|
||||
wv.evaluateJavascript(commandToShow.value, null)
|
||||
commandToShow.value = ""
|
||||
}) {Text("Send")}
|
||||
Button( onClick = {
|
||||
commandToShow.value = iceCandidateCommand.value
|
||||
}) {Text("ICE")}
|
||||
Button( onClick = {
|
||||
commandToShow.value = ""
|
||||
}) {Text("Clear")}
|
||||
Button( onClick = {
|
||||
wv.evaluateJavascript("endCall()", null)
|
||||
}) {Text("End Call")}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class LocalContentWebViewClient(private val assetLoader: WebViewAssetLoader) : WebViewClientCompat() {
|
||||
override fun shouldInterceptRequest(
|
||||
view: WebView,
|
||||
request: WebResourceRequest
|
||||
): WebResourceResponse? {
|
||||
return assetLoader.shouldInterceptRequest(request.url)
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import chat.simplex.app.model.Profile
|
||||
import chat.simplex.app.ui.theme.HighOrLowlight
|
||||
import chat.simplex.app.ui.theme.SimpleXTheme
|
||||
import chat.simplex.app.views.TerminalView
|
||||
import chat.simplex.app.views.call.VideoCallView
|
||||
import chat.simplex.app.views.helpers.*
|
||||
|
||||
@Composable
|
||||
@@ -39,7 +40,8 @@ fun SettingsView(chatModel: ChatModel) {
|
||||
},
|
||||
showModal = { modalView -> { ModalManager.shared.showModal { modalView(chatModel) } } },
|
||||
showCustomModal = { modalView -> { ModalManager.shared.showCustomModal { close -> modalView(chatModel, close) } } },
|
||||
showTerminal = { ModalManager.shared.showCustomModal { close -> TerminalView(chatModel, close) } }
|
||||
showTerminal = { ModalManager.shared.showCustomModal { close -> TerminalView(chatModel, close) } },
|
||||
showVideoChatPrototype = { ModalManager.shared.showCustomModal { close -> VideoCallView(close) } },
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -54,7 +56,8 @@ fun SettingsLayout(
|
||||
setRunServiceInBackground: (Boolean) -> Unit,
|
||||
showModal: (@Composable (ChatModel) -> Unit) -> (() -> Unit),
|
||||
showCustomModal: (@Composable (ChatModel, () -> Unit) -> Unit) -> (() -> Unit),
|
||||
showTerminal: () -> Unit
|
||||
showTerminal: () -> Unit,
|
||||
showVideoChatPrototype: () -> Unit
|
||||
) {
|
||||
val uriHandler = LocalUriHandler.current
|
||||
Surface(
|
||||
@@ -159,9 +162,9 @@ fun SettingsLayout(
|
||||
Spacer(Modifier.padding(horizontal = 4.dp))
|
||||
Text(
|
||||
stringResource(R.string.private_notifications), Modifier
|
||||
.padding(end = 24.dp)
|
||||
.fillMaxWidth()
|
||||
.weight(1F))
|
||||
.padding(end = 24.dp)
|
||||
.fillMaxWidth()
|
||||
.weight(1F))
|
||||
Switch(
|
||||
checked = runServiceInBackground.value,
|
||||
onCheckedChange = { setRunServiceInBackground(it) },
|
||||
@@ -191,7 +194,7 @@ fun SettingsLayout(
|
||||
Text(annotatedStringResource(R.string.install_simplex_chat_for_terminal))
|
||||
}
|
||||
Divider(Modifier.padding(horizontal = 8.dp))
|
||||
SettingsSectionView() {
|
||||
SettingsSectionView(showVideoChatPrototype) {
|
||||
Text("v${BuildConfig.VERSION_NAME} (${BuildConfig.VERSION_CODE})")
|
||||
}
|
||||
}
|
||||
@@ -227,7 +230,8 @@ fun PreviewSettingsLayout() {
|
||||
setRunServiceInBackground = {},
|
||||
showModal = {{}},
|
||||
showCustomModal = {{}},
|
||||
showTerminal = {}
|
||||
showTerminal = {},
|
||||
showVideoChatPrototype = {}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
93
apps/ios/Shared/Views/Call/CallView.swift
Normal file
93
apps/ios/Shared/Views/Call/CallView.swift
Normal file
@@ -0,0 +1,93 @@
|
||||
//
|
||||
// CallView.swift
|
||||
// SimpleX (iOS)
|
||||
//
|
||||
// Created by Ian Davies on 29/04/2022.
|
||||
// Copyright © 2022 SimpleX Chat. All rights reserved.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
import WebKit
|
||||
|
||||
struct WebView: UIViewRepresentable {
|
||||
class Coordinator: NSObject, WKNavigationDelegate, WKScriptMessageHandler {
|
||||
var webView: WKWebView!
|
||||
|
||||
|
||||
func webView(_ webView: WKWebView, didFinish navigation: WKNavigation!) {
|
||||
webView.allowsBackForwardNavigationGestures = false
|
||||
self.webView = webView
|
||||
}
|
||||
|
||||
// receive message from wkwebview
|
||||
func userContentController(
|
||||
_ userContentController: WKUserContentController,
|
||||
didReceive message: WKScriptMessage
|
||||
) {
|
||||
print(message.body)
|
||||
// let date = Date()
|
||||
// DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
|
||||
// self.messageToWebview(msg: "hello, I got your messsage: \(message.body) at \(date)")
|
||||
// }
|
||||
}
|
||||
|
||||
func messageToWebview(msg: String) {
|
||||
self.webView?.evaluateJavaScript("webkit.messageHandlers.bridge.onMessage('\(msg)')")
|
||||
}
|
||||
}
|
||||
|
||||
func makeCoordinator() -> Coordinator {
|
||||
return Coordinator()
|
||||
}
|
||||
|
||||
func makeUIView(context: Context) -> WKWebView {
|
||||
let coordinator = makeCoordinator()
|
||||
let userContentController = WKUserContentController()
|
||||
userContentController.add(coordinator, name: "bridge")
|
||||
|
||||
let configuration = WKWebViewConfiguration()
|
||||
configuration.userContentController = userContentController
|
||||
configuration.mediaTypesRequiringUserActionForPlayback = []
|
||||
configuration.allowsInlineMediaPlayback = true
|
||||
|
||||
// Enable us to capture calls to console.log in the xcode logs
|
||||
// Print actually happens on line 29
|
||||
let source = "console.log = (msg) => webkit.messageHandlers.logHandler.postMessage(msg)"
|
||||
let script = WKUserScript(source: source, injectionTime: .atDocumentStart, forMainFrameOnly: false)
|
||||
configuration.userContentController.addUserScript(script)
|
||||
configuration.userContentController.add(coordinator, name: "logHandler")
|
||||
|
||||
let _wkwebview = WKWebView(frame: .zero, configuration: configuration)
|
||||
_wkwebview.navigationDelegate = coordinator
|
||||
|
||||
return _wkwebview
|
||||
}
|
||||
|
||||
func updateUIView(_ webView: WKWebView, context: Context) {
|
||||
guard let path: String = Bundle.main.path(forResource: "call", ofType: "html", inDirectory: "www") else {
|
||||
print("page not found")
|
||||
return
|
||||
}
|
||||
let localHTMLUrl = URL(fileURLWithPath: path, isDirectory: false)
|
||||
webView.loadFileURL(localHTMLUrl, allowingReadAccessTo: localHTMLUrl)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
struct CallView: View {
|
||||
var body: some View {
|
||||
VStack {
|
||||
WebView()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
struct CallView_Previews: PreviewProvider {
|
||||
static var previews: some View {
|
||||
CallView()
|
||||
}
|
||||
}
|
||||
@@ -143,7 +143,12 @@ struct SettingsView: View {
|
||||
notificationsToggle(token)
|
||||
}
|
||||
}
|
||||
Text("v\(appVersion ?? "?") (\(appBuild ?? "?"))")
|
||||
NavigationLink {
|
||||
CallView()
|
||||
.frame(maxHeight: .infinity, alignment: .top)
|
||||
} label: {
|
||||
Text("v\(appVersion ?? "?") (\(appBuild ?? "?"))")
|
||||
}
|
||||
}
|
||||
}
|
||||
.navigationTitle("Your settings")
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
objects = {
|
||||
|
||||
/* Begin PBXBuildFile section */
|
||||
3C714777281C081000CB4D4B /* CallView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3C714776281C081000CB4D4B /* CallView.swift */; };
|
||||
3C71477A281C0F6800CB4D4B /* www in Resources */ = {isa = PBXBuildFile; fileRef = 3C714779281C0F6800CB4D4B /* www */; };
|
||||
3C8C548928133C84000A3EC7 /* PasteToConnectView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3C8C548828133C84000A3EC7 /* PasteToConnectView.swift */; };
|
||||
3CDBCF4227FAE51000354CDD /* ComposeLinkView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */; };
|
||||
3CDBCF4827FF621E00354CDD /* CILinkView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3CDBCF4727FF621E00354CDD /* CILinkView.swift */; };
|
||||
@@ -86,6 +88,8 @@
|
||||
/* End PBXContainerItemProxy section */
|
||||
|
||||
/* Begin PBXFileReference section */
|
||||
3C714776281C081000CB4D4B /* CallView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallView.swift; sourceTree = "<group>"; };
|
||||
3C714779281C0F6800CB4D4B /* www */ = {isa = PBXFileReference; lastKnownFileType = folder; name = www; path = ../android/app/src/main/assets/www; sourceTree = "<group>"; };
|
||||
3C8C548828133C84000A3EC7 /* PasteToConnectView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PasteToConnectView.swift; sourceTree = "<group>"; };
|
||||
3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ComposeLinkView.swift; sourceTree = "<group>"; };
|
||||
3CDBCF4727FF621E00354CDD /* CILinkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CILinkView.swift; sourceTree = "<group>"; };
|
||||
@@ -185,9 +189,18 @@
|
||||
/* End PBXFrameworksBuildPhase section */
|
||||
|
||||
/* Begin PBXGroup section */
|
||||
3C714775281C080100CB4D4B /* Call */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
3C714776281C081000CB4D4B /* CallView.swift */,
|
||||
);
|
||||
path = Call;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
5C2E260D27A30E2400F70299 /* Views */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
3C714775281C080100CB4D4B /* Call */,
|
||||
5C971E1F27AEBF7000C8A3CE /* Helpers */,
|
||||
5C5F4AC227A5E9AF00B51EF1 /* Chat */,
|
||||
5CB9250B27A942F300ACCCDD /* ChatList */,
|
||||
@@ -266,6 +279,7 @@
|
||||
5CA059BD279559F40002BEB4 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
3C714779281C0F6800CB4D4B /* www */,
|
||||
5CC2C0FD2809BF11000C35E3 /* SimpleX--iOS--InfoPlist.strings */,
|
||||
5CC2C0FA2809BF11000C35E3 /* Localizable.strings */,
|
||||
5C422A7C27A9A6FA0097A1E1 /* SimpleX (iOS).entitlements */,
|
||||
@@ -463,6 +477,7 @@
|
||||
isa = PBXResourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
3C71477A281C0F6800CB4D4B /* www in Resources */,
|
||||
5CA059EF279559F40002BEB4 /* Assets.xcassets in Resources */,
|
||||
5CC2C0FC2809BF11000C35E3 /* Localizable.strings in Resources */,
|
||||
5CC2C0FF2809BF11000C35E3 /* SimpleX--iOS--InfoPlist.strings in Resources */,
|
||||
@@ -531,6 +546,7 @@
|
||||
5CC1C99527A6CF7F000D9FF6 /* ShareSheet.swift in Sources */,
|
||||
5C2E260727A2941F00F70299 /* SimpleXAPI.swift in Sources */,
|
||||
5CB924D427A853F100ACCCDD /* SettingsButton.swift in Sources */,
|
||||
3C714777281C081000CB4D4B /* CallView.swift in Sources */,
|
||||
5C5F2B7027EBC704006A9D5F /* ProfileImage.swift in Sources */,
|
||||
64AA1C6C27F3537400AC7277 /* DeletedItemView.swift in Sources */,
|
||||
5CE4407227ADB1D0007B033A /* Emoji.swift in Sources */,
|
||||
@@ -706,7 +722,8 @@
|
||||
ENABLE_PREVIEWS = YES;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
INFOPLIST_FILE = "SimpleX--iOS--Info.plist";
|
||||
INFOPLIST_KEY_NSCameraUsageDescription = "SimpleX needs camera access to scan QR codes to connect to other app users";
|
||||
INFOPLIST_KEY_NSCameraUsageDescription = "SimpleX needs camera access to scan QR codes to connect other users and for video calls.";
|
||||
INFOPLIST_KEY_NSMicrophoneUsageDescription = "SimpleX needs microphone access for audio and video calls.";
|
||||
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
|
||||
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
|
||||
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
|
||||
@@ -750,7 +767,8 @@
|
||||
ENABLE_PREVIEWS = YES;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
INFOPLIST_FILE = "SimpleX--iOS--Info.plist";
|
||||
INFOPLIST_KEY_NSCameraUsageDescription = "SimpleX needs camera access to scan QR codes to connect to other app users";
|
||||
INFOPLIST_KEY_NSCameraUsageDescription = "SimpleX needs camera access to scan QR codes to connect other users and for video calls.";
|
||||
INFOPLIST_KEY_NSMicrophoneUsageDescription = "SimpleX needs microphone access for audio and video calls.";
|
||||
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
|
||||
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
|
||||
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
|
||||
|
||||
Reference in New Issue
Block a user