ios: integrating webrtc calls with callkit (#686)

* ios: integrating webrtc calls with callkit

* accept call via chat item (e.g. when DND is on, and callkit blocks the call); refactor

* fix remote video, support logging from ios

* use callkit depending on CallController setting

* call sound

* update incoming call view

* fixing audio encryption

* refactor encryption webrtc fix

* log ontrack success/error

* accept / ignore call via notification

* remove unused imports

* remove unused file

* remove comments
This commit is contained in:
Evgeny Poberezkin 2022-05-24 19:34:27 +01:00 committed by GitHub
parent 0290a687af
commit 546ad01fcb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 1089 additions and 504 deletions

View File

@ -96,7 +96,8 @@ const processCommand = (function () {
const pc = new RTCPeerConnection(config.peerConnectionConfig);
const remoteStream = new MediaStream();
const localCamera = VideoCamera.User;
const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType, localCamera));
const constraints = callMediaConstraints(mediaType, localCamera);
const localStream = await navigator.mediaDevices.getUserMedia(constraints);
const iceCandidates = getIceCandidates(pc, config);
const call = { connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey, useWorker };
await setupMediaStreams(call);
@ -116,8 +117,10 @@ const processCommand = (function () {
});
if (pc.connectionState == "disconnected" || pc.connectionState == "failed") {
pc.removeEventListener("connectionstatechange", connectionStateChange);
if (activeCall) {
setTimeout(() => sendMessageToNative({ resp: { type: "ended" } }), 0);
}
endCall();
setTimeout(() => sendMessageToNative({ resp: { type: "ended" } }), 0);
}
else if (pc.connectionState == "connected") {
const stats = (await pc.getStats());
@ -133,7 +136,7 @@ const processCommand = (function () {
remoteCandidate: stats.get(iceCandidatePair.remoteCandidateId),
},
};
setTimeout(() => sendMessageToNative({ resp }), 0);
setTimeout(() => sendMessageToNative({ resp }), 500);
break;
}
}
@ -256,19 +259,9 @@ const processCommand = (function () {
if (!activeCall || !pc) {
resp = { type: "error", message: "camera: call not started" };
}
else if (activeCall.localMedia == CallMediaType.Audio) {
resp = { type: "error", message: "camera: no video" };
}
else {
try {
if (command.camera != activeCall.localCamera) {
await replaceCamera(activeCall, command.camera);
}
resp = { type: "ok" };
}
catch (e) {
resp = { type: "error", message: `camera: ${e.message}` };
}
await replaceMedia(activeCall, command.camera);
resp = { type: "ok" };
}
break;
case "end":
@ -281,7 +274,7 @@ const processCommand = (function () {
}
}
catch (e) {
resp = { type: "error", message: e.message };
resp = { type: "error", message: `${command.type}: ${e.message}` };
}
const apiResp = { corrId, resp, command };
sendMessageToNative(apiResp);
@ -323,6 +316,8 @@ const processCommand = (function () {
if (call.useWorker && !call.worker) {
const workerCode = `const callCrypto = (${callCryptoFunction.toString()})(); (${workerFunction.toString()})()`;
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], { type: "text/javascript" })));
call.worker.onerror = ({ error, filename, lineno, message }) => console.log(JSON.stringify({ error, filename, lineno, message }));
call.worker.onmessage = ({ data }) => console.log(JSON.stringify({ message: data }));
}
}
}
@ -346,14 +341,20 @@ const processCommand = (function () {
// Pull tracks from remote stream as they arrive add them to remoteStream video
const pc = call.connection;
pc.ontrack = (event) => {
if (call.aesKey && call.key) {
console.log("set up decryption for receiving");
setupPeerTransform(TransformOperation.Decrypt, event.receiver, call.worker, call.aesKey, call.key);
}
for (const stream of event.streams) {
for (const track of stream.getTracks()) {
call.remoteStream.addTrack(track);
try {
if (call.aesKey && call.key) {
console.log("set up decryption for receiving");
setupPeerTransform(TransformOperation.Decrypt, event.receiver, call.worker, call.aesKey, call.key);
}
for (const stream of event.streams) {
for (const track of stream.getTracks()) {
call.remoteStream.addTrack(track);
}
}
console.log(`ontrack success`);
}
catch (e) {
console.log(`ontrack error: ${e.message}`);
}
};
}
@ -385,7 +386,7 @@ const processCommand = (function () {
}
}
}
async function replaceCamera(call, camera) {
async function replaceMedia(call, camera) {
const videos = getVideoElements();
if (!videos)
throw Error("no video elements");
@ -401,6 +402,8 @@ const processCommand = (function () {
videos.local.srcObject = localStream;
}
function replaceTracks(pc, tracks) {
if (!tracks.length)
return;
const sender = pc.getSenders().find((s) => { var _a; return ((_a = s.track) === null || _a === void 0 ? void 0 : _a.kind) === tracks[0].kind; });
if (sender)
for (const t of tracks)
@ -494,8 +497,8 @@ function callCryptoFunction() {
const initial = data.subarray(0, n);
const plaintext = data.subarray(n, data.byteLength);
try {
const ciphertext = await crypto.subtle.encrypt({ name: "AES-GCM", iv: iv.buffer }, key, plaintext);
frame.data = concatN(initial, new Uint8Array(ciphertext), iv).buffer;
const ciphertext = new Uint8Array(plaintext.length ? await crypto.subtle.encrypt({ name: "AES-GCM", iv: iv.buffer }, key, plaintext) : 0);
frame.data = concatN(initial, ciphertext, iv).buffer;
controller.enqueue(frame);
}
catch (e) {
@ -512,8 +515,8 @@ function callCryptoFunction() {
const ciphertext = data.subarray(n, data.byteLength - IV_LENGTH);
const iv = data.subarray(data.byteLength - IV_LENGTH, data.byteLength);
try {
const plaintext = await crypto.subtle.decrypt({ name: "AES-GCM", iv }, key, ciphertext);
frame.data = concatN(initial, new Uint8Array(plaintext)).buffer;
const plaintext = new Uint8Array(ciphertext.length ? await crypto.subtle.decrypt({ name: "AES-GCM", iv }, key, ciphertext) : 0);
frame.data = concatN(initial, plaintext).buffer;
controller.enqueue(frame);
}
catch (e) {
@ -619,9 +622,15 @@ function workerFunction() {
// encryption using RTCRtpScriptTransform.
if ("RTCTransformEvent" in self) {
self.addEventListener("rtctransform", async ({ transformer }) => {
const { operation, aesKey } = transformer.options;
const { readable, writable } = transformer;
await setupTransform({ operation, aesKey, readable, writable });
try {
const { operation, aesKey } = transformer.options;
const { readable, writable } = transformer;
await setupTransform({ operation, aesKey, readable, writable });
self.postMessage({ result: "setupTransform success" });
}
catch (e) {
self.postMessage({ message: `setupTransform error: ${e.message}` });
}
});
}
async function setupTransform({ operation, aesKey, readable, writable }) {

View File

@ -1,12 +1,10 @@
video::-webkit-media-controls {
display: none;
}
html,
body {
padding: 0;
margin: 0;
background-color: black;
}
#remote-video-stream {
position: absolute;
width: 100%;
@ -24,3 +22,20 @@ body {
top: 0;
right: 0;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-panel {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-play-button {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-start-playback-button {
display: none !important;
-webkit-appearance: none !important;
}

View File

@ -209,18 +209,18 @@ fun ChatInfoToolbar(chat: Chat, back: () -> Unit, info: () -> Unit, startCall: (
) {
val cInfo = chat.chatInfo
toolbarButton(Icons.Outlined.ArrowBackIos, R.string.back, onClick = back)
// if (cInfo is ChatInfo.Direct) {
// Box(Modifier.fillMaxWidth(), contentAlignment = Alignment.CenterEnd) {
// Box(Modifier.width(85.dp), contentAlignment = Alignment.CenterStart) {
// toolbarButton(Icons.Outlined.Phone, R.string.icon_descr_audio_call) {
// startCall(CallMediaType.Audio)
// }
// }
// toolbarButton(Icons.Outlined.Videocam, R.string.icon_descr_video_call) {
// startCall(CallMediaType.Video)
// }
// }
// }
if (cInfo is ChatInfo.Direct) {
Box(Modifier.fillMaxWidth(), contentAlignment = Alignment.CenterEnd) {
Box(Modifier.width(85.dp), contentAlignment = Alignment.CenterStart) {
toolbarButton(Icons.Outlined.Phone, R.string.icon_descr_audio_call) {
startCall(CallMediaType.Audio)
}
}
toolbarButton(Icons.Outlined.Videocam, R.string.icon_descr_video_call) {
startCall(CallMediaType.Video)
}
}
}
Row(
Modifier
.padding(horizontal = 80.dp)

View File

@ -70,7 +70,7 @@ class AppDelegate: NSObject, UIApplicationDelegate {
// TODO check if app in background
logger.debug("AppDelegate: didReceiveRemoteNotification: checkMessages")
// TODO remove
NtfManager.shared.notifyCheckingMessages()
// NtfManager.shared.notifyCheckingMessages()
receiveMessages(completionHandler)
} else if let smpQueue = ntfData["checkMessage"] as? String {
// TODO check if app in background

View File

@ -10,6 +10,7 @@ import SwiftUI
struct ContentView: View {
@EnvironmentObject var chatModel: ChatModel
@ObservedObject var alertManager = AlertManager.shared
@ObservedObject var callController = CallController.shared
@State private var showNotificationAlert = false
var body: some View {
@ -17,11 +18,17 @@ struct ContentView: View {
if let step = chatModel.onboardingStage {
if case .onboardingComplete = step,
let user = chatModel.currentUser {
ChatListView(user: user)
.onAppear {
NtfManager.shared.requestAuthorization(onDeny: {
alertManager.showAlert(notificationAlert())
})
ZStack(alignment: .top) {
ChatListView(user: user)
.onAppear {
NtfManager.shared.requestAuthorization(onDeny: {
alertManager.showAlert(notificationAlert())
})
}
if chatModel.showCallView, let call = chatModel.activeCall {
ActiveCallView(call: call)
}
IncomingCallView()
}
} else {
OnboardingView(onboarding: step)

View File

@ -9,6 +9,7 @@
import Foundation
import Combine
import SwiftUI
import WebKit
final class ChatModel: ObservableObject {
@Published var onboardingStage: OnboardingStage?
@ -28,10 +29,10 @@ final class ChatModel: ObservableObject {
@Published var tokenStatus = NtfTknStatus.new
// current WebRTC call
@Published var callInvitations: Dictionary<ChatId, CallInvitation> = [:]
@Published var activeCallInvitation: ContactRef?
@Published var activeCall: Call?
@Published var callCommand: WCallCommand?
@Published var showCallView = false
var callWebView: WKWebView?
var messageDelivery: Dictionary<Int64, () -> Void> = [:]

View File

@ -37,25 +37,12 @@ class NtfManager: NSObject, UNUserNotificationCenterDelegate, ObservableObject {
Task { await acceptContactRequest(contactRequest) }
} else if content.categoryIdentifier == ntfCategoryCallInvitation && (action == ntfActionAcceptCall || action == ntfActionRejectCall),
let chatId = content.userInfo["chatId"] as? String,
case let .direct(contact) = chatModel.getChat(chatId)?.chatInfo,
let invitation = chatModel.callInvitations.removeValue(forKey: chatId) {
let cc = CallController.shared
if action == ntfActionAcceptCall {
chatModel.activeCallInvitation = nil
chatModel.activeCall = Call(contact: contact, callState: .invitationReceived, localMedia: invitation.peerMedia)
chatModel.showCallView = true
chatModel.callCommand = .start(media: invitation.peerMedia, aesKey: invitation.sharedKey)
cc.answerCall(invitation: invitation)
} else {
Task {
do {
try await apiRejectCall(contact)
if chatModel.activeCall?.contact.id == chatId {
DispatchQueue.main.async {
chatModel.callCommand = .end
chatModel.activeCall = nil
}
}
}
}
cc.endCall(invitation: invitation)
}
} else {
chatModel.chatId = content.targetContentIdentifier
@ -89,6 +76,7 @@ class NtfManager: NSObject, UNUserNotificationCenterDelegate, ObservableObject {
// this notification is deliverd from the notifications server
// when the app is in foreground it does not need to be shown
case ntfCategoryCheckMessage: return []
case ntfCategoryCallInvitation: return []
default: return [.sound, .banner, .list]
}
} else {
@ -136,11 +124,12 @@ class NtfManager: NSObject, UNUserNotificationCenterDelegate, ObservableObject {
actions: [
UNNotificationAction(
identifier: ntfActionAcceptCall,
title: NSLocalizedString("Answer", comment: "accept incoming call via notification")
title: NSLocalizedString("Accept", comment: "accept incoming call via notification"),
options: .foreground
),
UNNotificationAction(
identifier: ntfActionRejectCall,
title: NSLocalizedString("Ignore", comment: "ignore incoming call via notification")
title: NSLocalizedString("Reject", comment: "reject incoming call via notification")
)
],
intentIdentifiers: [],
@ -194,9 +183,9 @@ class NtfManager: NSObject, UNUserNotificationCenterDelegate, ObservableObject {
addNotification(createMessageReceivedNtf(cInfo, cItem))
}
func notifyCallInvitation(_ contact: Contact, _ invitation: CallInvitation) {
func notifyCallInvitation(_ invitation: CallInvitation) {
logger.debug("NtfManager.notifyCallInvitation")
addNotification(createCallInvitationNtf(contact, invitation))
addNotification(createCallInvitationNtf(invitation))
}
// TODO remove

View File

@ -24,25 +24,23 @@ struct WebRTCExtraInfo: Codable {
}
struct CallInvitation {
var contact: Contact
var callkitUUID: UUID?
var peerMedia: CallMediaType
var sharedKey: String?
var callTypeText: LocalizedStringKey {
get {
switch peerMedia {
case .video: return sharedKey == nil ? "video call (not e2e encrypted)." : "**e2e encrypted** video call."
case .audio: return sharedKey == nil ? "audio call (not e2e encrypted)." : "**e2e encrypted** audio call."
case .video: return sharedKey == nil ? "video call (not e2e encrypted)" : "**e2e encrypted** video call"
case .audio: return sharedKey == nil ? "audio call (not e2e encrypted)" : "**e2e encrypted** audio call"
}
}
}
var callTitle: LocalizedStringKey {
get {
switch peerMedia {
case .video: return "Incoming video call"
case .audio: return "Incoming audio call"
}
}
}
var encryptionText: LocalizedStringKey { get { sharedKey == nil ? "no e2e encryption" : "with e2e encryption" } }
static let sampleData = CallInvitation(
contact: Contact.sampleData,
peerMedia: .audio
)
}
struct CallType: Codable {

View File

@ -879,13 +879,13 @@ enum CICallStatus: String, Decodable {
func text(_ sec: Int) -> String {
switch self {
case .pending: return NSLocalizedString("calling…", comment: "call status")
case .missed: return NSLocalizedString("missed", comment: "call status")
case .rejected: return NSLocalizedString("rejected", comment: "call status")
case .accepted: return NSLocalizedString("accepted", comment: "call status")
case .negotiated: return NSLocalizedString("connecting", comment: "call status")
case .progress: return NSLocalizedString("in progress", comment: "call status")
case .ended: return String.localizedStringWithFormat(NSLocalizedString("ended %@", comment: "call status"), CICallStatus.durationText(sec))
case .error: return NSLocalizedString("error", comment: "call status")
case .missed: return NSLocalizedString("missed call", comment: "call status")
case .rejected: return NSLocalizedString("rejected call", comment: "call status")
case .accepted: return NSLocalizedString("accepted call", comment: "call status")
case .negotiated: return NSLocalizedString("connecting call", comment: "call status")
case .progress: return NSLocalizedString("call in progress", comment: "call status")
case .ended: return String.localizedStringWithFormat(NSLocalizedString("ended call %@", comment: "call status"), CICallStatus.durationText(sec))
case .error: return NSLocalizedString("call error", comment: "call status")
}
}

View File

@ -50,16 +50,16 @@ func createMessageReceivedNtf(_ cInfo: ChatInfo, _ cItem: ChatItem) -> UNMutable
)
}
func createCallInvitationNtf(_ contact: Contact, _ invitation: CallInvitation) -> UNMutableNotificationContent {
func createCallInvitationNtf(_ invitation: CallInvitation) -> UNMutableNotificationContent {
let text = invitation.peerMedia == .video
? NSLocalizedString("Incoming video call", comment: "notification")
: NSLocalizedString("Incoming audio call", comment: "notification")
return createNotification(
categoryIdentifier: ntfCategoryCallInvitation,
title: "\(contact.chatViewName):",
title: "\(invitation.contact.chatViewName):",
body: text,
targetContentIdentifier: nil,
userInfo: ["chatId": contact.id]
userInfo: ["chatId": invitation.contact.id]
)
}

View File

@ -11,6 +11,7 @@ import UIKit
import Dispatch
import BackgroundTasks
import SwiftUI
import CallKit
private var chatController: chat_ctrl?
@ -629,22 +630,41 @@ func processReceivedMsg(_ res: ChatResponse) {
removeFile(fileName)
}
case let .callInvitation(contact, callType, sharedKey):
let invitation = CallInvitation(peerMedia: callType.media, sharedKey: sharedKey)
let uuid = UUID()
var invitation = CallInvitation(contact: contact, callkitUUID: uuid, peerMedia: callType.media, sharedKey: sharedKey)
m.callInvitations[contact.id] = invitation
if (m.activeCallInvitation == nil) {
m.activeCallInvitation = ContactRef(contactId: contact.apiId, localDisplayName: contact.localDisplayName)
CallController.shared.reportNewIncomingCall(invitation: invitation) { error in
if let error = error {
invitation.callkitUUID = nil
m.callInvitations[contact.id] = invitation
logger.error("reportNewIncomingCall error: \(error.localizedDescription)")
} else {
logger.debug("reportNewIncomingCall success")
}
}
NtfManager.shared.notifyCallInvitation(contact, invitation)
// This will be called from notification service extension
// CXProvider.reportNewIncomingVoIPPushPayload([
// "displayName": contact.displayName,
// "contactId": contact.id,
// "uuid": invitation.callkitUUID
// ]) { error in
// if let error = error {
// logger.error("reportNewIncomingVoIPPushPayload error \(error.localizedDescription)")
// } else {
// logger.debug("reportNewIncomingVoIPPushPayload success for \(contact.id)")
// }
// }
case let .callOffer(contact, callType, offer, sharedKey, _):
// TODO askConfirmation?
// TODO check encryption is compatible
withCall(contact) { call in
m.activeCall = call.copy(callState: .offerReceived, peerMedia: callType.media, sharedKey: sharedKey)
call.callState = .offerReceived
call.peerMedia = callType.media
call.sharedKey = sharedKey
m.callCommand = .offer(offer: offer.rtcSession, iceCandidates: offer.rtcIceCandidates, media: callType.media, aesKey: sharedKey, useWorker: true)
}
case let .callAnswer(contact, answer):
withCall(contact) { call in
m.activeCall = call.copy(callState: .negotiated)
call.callState = .answerReceived
m.callCommand = .answer(answer: answer.rtcSession, iceCandidates: answer.rtcIceCandidates)
}
case let .callExtraInfo(contact, extraInfo):
@ -652,9 +672,12 @@ func processReceivedMsg(_ res: ChatResponse) {
m.callCommand = .ice(iceCandidates: extraInfo.rtcIceCandidates)
}
case let .callEnded(contact):
m.activeCallInvitation = nil
withCall(contact) { _ in
if let invitation = m.callInvitations.removeValue(forKey: contact.id) {
CallController.shared.reportCallRemoteEnded(invitation: invitation)
}
withCall(contact) { call in
m.callCommand = .end
CallController.shared.reportCallRemoteEnded(call: call)
}
default:
logger.debug("unsupported event: \(res.responseType)")

View File

@ -7,160 +7,211 @@
//
import SwiftUI
import WebKit
struct ActiveCallView: View {
@EnvironmentObject var chatModel: ChatModel
@Environment(\.dismiss) private var dismiss
@State private var coordinator: WebRTCCoordinator? = nil
@State private var webViewReady: Bool = false
@EnvironmentObject var m: ChatModel
@ObservedObject var call: Call
@State private var rtcWebView: WKWebView? = nil
@State private var webViewMsg: WVAPIMessage? = nil
var body: some View {
ZStack(alignment: .bottom) {
WebRTCView(coordinator: $coordinator, webViewReady: $webViewReady, webViewMsg: $webViewMsg)
WebRTCView(rtcWebView: $rtcWebView, webViewMsg: $webViewMsg)
.onAppear() { sendCommandToWebView() }
.onChange(of: chatModel.callCommand) { _ in sendCommandToWebView() }
.onChange(of: webViewReady) { _ in sendCommandToWebView() }
.onChange(of: m.callCommand) { _ in sendCommandToWebView() }
.onChange(of: rtcWebView) { _ in sendCommandToWebView() }
.onChange(of: webViewMsg) { _ in processWebViewMessage() }
.background(.black)
ActiveCallOverlay(call: chatModel.activeCall, dismiss: { dismiss() })
if let call = m.activeCall, let webView = rtcWebView {
ActiveCallOverlay(call: call, webView: webView)
}
}
.preferredColorScheme(.dark)
}
private func sendCommandToWebView() {
if chatModel.activeCall != nil && webViewReady,
let cmd = chatModel.callCommand,
let c = coordinator {
chatModel.callCommand = nil
logger.debug("ActiveCallView: command \(cmd.cmdType)")
c.sendCommand(command: cmd)
if m.activeCall != nil,
let wv = rtcWebView,
let cmd = m.callCommand {
m.callCommand = nil
sendCallCommand(wv, cmd)
}
}
private func processWebViewMessage() {
let m = chatModel
if let msg = webViewMsg,
let call = chatModel.activeCall {
let call = m.activeCall,
let webView = rtcWebView {
logger.debug("ActiveCallView: response \(msg.resp.respType)")
Task {
switch msg.resp {
case let .capabilities(capabilities):
let callType = CallType(media: call.localMedia, capabilities: capabilities)
try await apiSendCallInvitation(call.contact, callType)
m.activeCall = call.copy(callState: .invitationSent, localCapabilities: capabilities)
case let .offer(offer, iceCandidates, capabilities):
try await apiSendCallOffer(call.contact, offer, iceCandidates,
media: call.localMedia, capabilities: capabilities)
m.activeCall = call.copy(callState: .offerSent, localCapabilities: capabilities)
case let .answer(answer, iceCandidates):
try await apiSendCallAnswer(call.contact, answer, iceCandidates)
m.activeCall = call.copy(callState: .negotiated)
case let .ice(iceCandidates):
try await apiSendCallExtraInfo(call.contact, iceCandidates)
case let .connection(state):
if let callStatus = WebRTCCallStatus.init(rawValue: state.connectionState),
case .connected = callStatus {
m.activeCall = call.copy(callState: .connected)
switch msg.resp {
case let .capabilities(capabilities):
let callType = CallType(media: call.localMedia, capabilities: capabilities)
Task {
do {
try await apiSendCallInvitation(call.contact, callType)
} catch {
logger.error("apiSendCallInvitation \(responseError(error))")
}
try await apiCallStatus(call.contact, state.connectionState)
case let .connected(connectionInfo):
m.activeCall = call.copy(callState: .connected, connectionInfo: connectionInfo)
case .ended:
m.activeCall = nil
m.activeCallInvitation = nil
m.callCommand = nil
m.showCallView = false
case .ok:
switch msg.command {
case let .media(media, enable):
switch media {
case .video: m.activeCall = call.copy(videoEnabled: enable)
case .audio: m.activeCall = call.copy(audioEnabled: enable)
}
case let .camera(camera):
m.activeCall = call.copy(localCamera: camera)
case .end:
m.activeCall = nil
m.activeCallInvitation = nil
m.callCommand = nil
m.showCallView = false
default: ()
DispatchQueue.main.async {
call.callState = .invitationSent
call.localCapabilities = capabilities
}
case let .error(message):
logger.debug("ActiveCallView: command error: \(message)")
case let .invalid(type):
logger.debug("ActiveCallView: invalid response: \(type)")
}
case let .offer(offer, iceCandidates, capabilities):
Task {
do {
try await apiSendCallOffer(call.contact, offer, iceCandidates,
media: call.localMedia, capabilities: capabilities)
} catch {
logger.error("apiSendCallOffer \(responseError(error))")
}
DispatchQueue.main.async {
call.callState = .offerSent
call.localCapabilities = capabilities
}
}
case let .answer(answer, iceCandidates):
Task {
do {
try await apiSendCallAnswer(call.contact, answer, iceCandidates)
} catch {
logger.error("apiSendCallAnswer \(responseError(error))")
}
DispatchQueue.main.async {
call.callState = .negotiated
}
}
case let .ice(iceCandidates):
Task {
do {
try await apiSendCallExtraInfo(call.contact, iceCandidates)
} catch {
logger.error("apiSendCallExtraInfo \(responseError(error))")
}
}
case let .connection(state):
if let callStatus = WebRTCCallStatus.init(rawValue: state.connectionState),
case .connected = callStatus {
if case .outgoing = call.direction {
CallController.shared.reportOutgoingCall(call: call, connectedAt: nil)
}
call.callState = .connected
// CallKit doesn't work well with WKWebView
// This is a hack to enable microphone in WKWebView after CallKit takes over it
if CallController.useCallKit {
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
m.callCommand = .camera(camera: call.localCamera)
}
}
}
Task {
do {
try await apiCallStatus(call.contact, state.connectionState)
} catch {
logger.error("apiCallStatus \(responseError(error))")
}
}
case let .connected(connectionInfo):
call.callState = .connected
call.connectionInfo = connectionInfo
case .ended:
closeCallView(webView)
call.callState = .ended
if let uuid = call.callkitUUID {
CallController.shared.endCall(callUUID: uuid)
}
case .ok:
switch msg.command {
case .answer:
call.callState = .negotiated
case let .camera(camera):
call.localCamera = camera
Task {
// This disables microphone if it was disabled before flipping the camera
await webView.setMicrophoneCaptureState(call.audioEnabled ? .active : .muted)
// This compensates for the bug on some devices when remote video does not appear
// await webView.setCameraCaptureState(.muted)
// await webView.setCameraCaptureState(call.videoEnabled ? .active : .muted)
}
case .end:
closeCallView(webView)
m.activeCall = nil
default: ()
}
case let .error(message):
logger.debug("ActiveCallView: command error: \(message)")
case let .invalid(type):
logger.debug("ActiveCallView: invalid response: \(type)")
}
}
}
private func closeCallView(_ webView: WKWebView) {
m.showCallView = false
Task {
await webView.setMicrophoneCaptureState(.muted)
await webView.setCameraCaptureState(.muted)
}
}
}
struct ActiveCallOverlay: View {
@EnvironmentObject var chatModel: ChatModel
var call: Call?
var dismiss: () -> Void
@ObservedObject var call: Call
var webView: WKWebView
var body: some View {
VStack {
if let call = call {
switch call.localMedia {
case .video:
callInfoView(call, .leading)
.foregroundColor(.white)
.opacity(0.8)
.padding()
switch call.localMedia {
case .video:
callInfoView(call, .leading)
.foregroundColor(.white)
.opacity(0.8)
.padding()
Spacer()
HStack {
toggleAudioButton()
Spacer()
HStack {
controlButton(call, call.audioEnabled ? "mic.fill" : "mic.slash") {
chatModel.callCommand = .media(media: .audio, enable: !call.audioEnabled)
}
Spacer()
Color.clear.frame(width: 40, height: 40)
Spacer()
endCallButton()
Spacer()
if call.videoEnabled {
flipCameraButton()
} else {
Color.clear.frame(width: 40, height: 40)
Spacer()
callButton("phone.down.fill", size: 60) { dismiss() }
.foregroundColor(.red)
Spacer()
controlButton(call, "arrow.triangle.2.circlepath") {
chatModel.callCommand = .camera(camera: call.localCamera == .user ? .environment : .user)
}
Spacer()
controlButton(call, call.videoEnabled ? "video.fill" : "video.slash") {
chatModel.callCommand = .media(media: .video, enable: !call.videoEnabled)
}
}
.padding(.horizontal, 20)
.padding(.bottom, 16)
.frame(maxWidth: .infinity, alignment: .center)
case .audio:
VStack {
ProfileImage(imageStr: call.contact.profile.image)
.scaledToFit()
.frame(width: 192, height: 192)
callInfoView(call, .center)
}
.foregroundColor(.white)
.opacity(0.8)
.padding()
.frame(maxHeight: .infinity)
Spacer()
ZStack(alignment: .bottom) {
controlButton(call, call.audioEnabled ? "mic.fill" : "mic.slash") {
chatModel.callCommand = .media(media: .audio, enable: !call.audioEnabled)
}
.frame(maxWidth: .infinity, alignment: .leading)
callButton("phone.down.fill", size: 60) { dismiss() }
.foregroundColor(.red)
}
.padding(.bottom, 60)
.padding(.horizontal, 48)
toggleVideoButton()
}
.padding(.horizontal, 20)
.padding(.bottom, 16)
.frame(maxWidth: .infinity, alignment: .center)
case .audio:
VStack {
ProfileImage(imageStr: call.contact.profile.image)
.scaledToFit()
.frame(width: 192, height: 192)
callInfoView(call, .center)
}
.foregroundColor(.white)
.opacity(0.8)
.padding()
.frame(maxHeight: .infinity)
Spacer()
ZStack(alignment: .bottom) {
toggleAudioButton()
.frame(maxWidth: .infinity, alignment: .leading)
endCallButton()
}
.padding(.bottom, 60)
.padding(.horizontal, 48)
}
}
.frame(maxWidth: .infinity)
@ -186,6 +237,57 @@ struct ActiveCallOverlay: View {
}
}
private func endCallButton() -> some View {
let cc = CallController.shared
return callButton("phone.down.fill", size: 60) {
if let uuid = call.callkitUUID {
cc.endCall(callUUID: uuid)
} else {
cc.endCall(call: call) {}
}
}
.foregroundColor(.red)
}
private func toggleAudioButton() -> some View {
controlButton(call, call.audioEnabled ? "mic.fill" : "mic.slash") {
Task {
await webView.setMicrophoneCaptureState(call.audioEnabled ? .muted : .active)
DispatchQueue.main.async {
call.audioEnabled = !call.audioEnabled
}
}
}
}
private func toggleVideoButton() -> some View {
controlButton(call, call.videoEnabled ? "video.fill" : "video.slash") {
Task {
await webView.setCameraCaptureState(call.videoEnabled ? .muted : .active)
DispatchQueue.main.async {
call.videoEnabled = !call.videoEnabled
}
}
}
}
@ViewBuilder private func flipCameraButton() -> some View {
let cmd = WCallCommand.camera(camera: call.localCamera == .user ? .environment : .user)
controlButton(call, "arrow.triangle.2.circlepath") {
if call.audioEnabled {
chatModel.callCommand = cmd
} else {
Task {
// Microphone has to be enabled before flipping the camera to avoid prompt for user permission when getUserMedia is called in webview
await webView.setMicrophoneCaptureState(.active)
DispatchQueue.main.async {
chatModel.callCommand = cmd
}
}
}
}
}
@ViewBuilder private func controlButton(_ call: Call, _ imageName: String, _ perform: @escaping () -> Void) -> some View {
if call.hasMedia {
callButton(imageName, size: 40, perform)
@ -211,9 +313,9 @@ struct ActiveCallOverlay: View {
struct ActiveCallOverlay_Previews: PreviewProvider {
static var previews: some View {
Group{
ActiveCallOverlay(call: Call(contact: Contact.sampleData, callState: .offerSent, localMedia: .video), dismiss: {})
ActiveCallOverlay(call: Call(direction: .incoming, contact: Contact.sampleData, callkitUUID: UUID(), callState: .offerSent, localMedia: .video), webView: WKWebView())
.background(.black)
ActiveCallOverlay(call: Call(contact: Contact.sampleData, callState: .offerSent, localMedia: .audio), dismiss: {})
ActiveCallOverlay(call: Call(direction: .incoming, contact: Contact.sampleData, callkitUUID: UUID(), callState: .offerSent, localMedia: .audio), webView: WKWebView())
.background(.black)
}
}

View File

@ -0,0 +1,215 @@
//
// CallController.swift
// SimpleX (iOS)
//
// Created by Evgeny on 21/05/2022.
// Copyright © 2022 SimpleX Chat. All rights reserved.
//
import Foundation
import CallKit
import AVFoundation
class CallController: NSObject, CXProviderDelegate, ObservableObject {
static let useCallKit = false
static let shared = CallController()
private let provider = CXProvider(configuration: CallController.configuration)
private let controller = CXCallController()
private let callManager = CallManager()
@Published var activeCallInvitation: CallInvitation?
// PKPushRegistry will be used from notification service extension
// let registry = PKPushRegistry(queue: nil)
static let configuration: CXProviderConfiguration = {
let configuration = CXProviderConfiguration()
configuration.supportsVideo = true
configuration.supportedHandleTypes = [.generic]
configuration.includesCallsInRecents = true // TODO disable or add option
configuration.maximumCallsPerCallGroup = 1
return configuration
}()
override init() {
super.init()
self.provider.setDelegate(self, queue: nil)
// self.registry.delegate = self
// self.registry.desiredPushTypes = [.voIP]
}
func providerDidReset(_ provider: CXProvider) {
}
func provider(_ provider: CXProvider, perform action: CXStartCallAction) {
logger.debug("CallController.provider CXStartCallAction")
if callManager.startOutgoingCall(callUUID: action.callUUID) {
action.fulfill()
provider.reportOutgoingCall(with: action.callUUID, startedConnectingAt: nil)
} else {
action.fail()
}
}
func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
logger.debug("CallController.provider CXAnswerCallAction")
if callManager.answerIncomingCall(callUUID: action.callUUID) {
action.fulfill()
} else {
action.fail()
}
}
func provider(_ provider: CXProvider, perform action: CXEndCallAction) {
logger.debug("CallController.provider CXEndCallAction")
callManager.endCall(callUUID: action.callUUID) { ok in
if ok {
action.fulfill()
} else {
action.fail()
}
}
}
func provider(_ provider: CXProvider, timedOutPerforming action: CXAction) {
print("timed out", #function)
action.fulfill()
}
func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
print("received", #function)
// do {
// try audioSession.setCategory(.playAndRecord, mode: .voiceChat, options: .mixWithOthers)
// logger.debug("audioSession category set")
// try audioSession.setActive(true)
// logger.debug("audioSession activated")
// } catch {
// print(error)
// logger.error("failed activating audio session")
// }
}
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
print("received", #function)
}
// func pushRegistry(_ registry: PKPushRegistry, didUpdate pushCredentials: PKPushCredentials, for type: PKPushType) {
//
// }
// This will be needed when we have notification service extension
// func pushRegistry(_ registry: PKPushRegistry, didReceiveIncomingPushWith payload: PKPushPayload, for type: PKPushType, completion: @escaping () -> Void) {
// if type == .voIP {
// // Extract the call information from the push notification payload
// if let displayName = payload.dictionaryPayload["displayName"] as? String,
// let contactId = payload.dictionaryPayload["contactId"] as? String,
// let uuidStr = payload.dictionaryPayload["uuid"] as? String,
// let uuid = UUID(uuidString: uuidStr) {
// let callUpdate = CXCallUpdate()
// callUpdate.remoteHandle = CXHandle(type: .phoneNumber, value: displayName)
// provider.reportNewIncomingCall(with: uuid, update: callUpdate, completion: { error in
// if error != nil {
// let m = ChatModel.shared
// m.callInvitations.removeValue(forKey: contactId)
// }
// // Tell PushKit that the notification is handled.
// completion()
// })
// }
// }
// }
func reportNewIncomingCall(invitation: CallInvitation, completion: @escaping (Error?) -> Void) {
logger.debug("CallController.reportNewIncomingCall")
if CallController.useCallKit, let uuid = invitation.callkitUUID {
let update = CXCallUpdate()
update.remoteHandle = CXHandle(type: .generic, value: invitation.contact.displayName)
update.hasVideo = invitation.peerMedia == .video
provider.reportNewIncomingCall(with: uuid, update: update, completion: completion)
} else {
NtfManager.shared.notifyCallInvitation(invitation)
activeCallInvitation = invitation
}
}
func reportOutgoingCall(call: Call, connectedAt dateConnected: Date?) {
if CallController.useCallKit, let uuid = call.callkitUUID {
provider.reportOutgoingCall(with: uuid, connectedAt: dateConnected)
}
}
func reportCallRemoteEnded(invitation: CallInvitation) {
if CallController.useCallKit, let uuid = invitation.callkitUUID {
provider.reportCall(with: uuid, endedAt: nil, reason: .remoteEnded)
} else if invitation.contact.id == activeCallInvitation?.contact.id {
activeCallInvitation = nil
}
}
func reportCallRemoteEnded(call: Call) {
if CallController.useCallKit, let uuid = call.callkitUUID {
provider.reportCall(with: uuid, endedAt: nil, reason: .remoteEnded)
}
}
func startCall(_ contact: Contact, _ media: CallMediaType) {
logger.debug("CallController.startCall")
let uuid = callManager.newOutgoingCall(contact, media)
if CallController.useCallKit {
let handle = CXHandle(type: .generic, value: contact.displayName)
let action = CXStartCallAction(call: uuid, handle: handle)
action.isVideo = media == .video
requestTransaction(with: action)
} else if callManager.startOutgoingCall(callUUID: uuid) {
logger.debug("CallController.startCall: call started")
} else {
logger.error("CallController.startCall: no active call")
}
}
func answerCall(invitation: CallInvitation) {
callManager.answerIncomingCall(invitation: invitation)
if invitation.contact.id == self.activeCallInvitation?.contact.id {
self.activeCallInvitation = nil
}
}
func endCall(callUUID: UUID) {
if CallController.useCallKit {
requestTransaction(with: CXEndCallAction(call: callUUID))
} else {
callManager.endCall(callUUID: callUUID) { ok in
if ok {
logger.debug("CallController.endCall: call ended")
} else {
logger.error("CallController.endCall: no actove call pr call invitation to end")
}
}
}
}
func endCall(invitation: CallInvitation) {
callManager.endCall(invitation: invitation) {
if invitation.contact.id == self.activeCallInvitation?.contact.id {
DispatchQueue.main.async {
self.activeCallInvitation = nil
}
}
}
}
func endCall(call: Call, completed: @escaping () -> Void) {
callManager.endCall(call: call, completed: completed)
}
private func requestTransaction(with action: CXAction) {
let t = CXTransaction()
t.addAction(action)
controller.request(t) { error in
if let error = error {
logger.error("CallController.requestTransaction error requesting transaction: \(error.localizedDescription)")
} else {
logger.debug("CallController.requestTransaction requested transaction successfully")
}
}
}
}

View File

@ -0,0 +1,104 @@
//
// CallManager.swift
// SimpleX (iOS)
//
// Created by Evgeny on 22/05/2022.
// Copyright © 2022 SimpleX Chat. All rights reserved.
//
import Foundation
class CallManager {
func newOutgoingCall(_ contact: Contact, _ media: CallMediaType) -> UUID {
let uuid = UUID()
ChatModel.shared.activeCall = Call(direction: .outgoing, contact: contact, callkitUUID: uuid, callState: .waitCapabilities, localMedia: media)
return uuid
}
func startOutgoingCall(callUUID: UUID) -> Bool {
let m = ChatModel.shared
if let call = m.activeCall, call.callkitUUID == callUUID {
m.showCallView = true
m.callCommand = .capabilities(useWorker: true)
return true
}
return false
}
func answerIncomingCall(callUUID: UUID) -> Bool {
if let invitation = getCallInvitation(callUUID) {
answerIncomingCall(invitation: invitation)
return true
}
return false
}
func answerIncomingCall(invitation: CallInvitation) {
let m = ChatModel.shared
m.callInvitations.removeValue(forKey: invitation.contact.id)
m.activeCall = Call(
direction: .incoming,
contact: invitation.contact,
callkitUUID: invitation.callkitUUID,
callState: .invitationAccepted,
localMedia: invitation.peerMedia,
sharedKey: invitation.sharedKey
)
m.showCallView = true
m.callCommand = .start(media: invitation.peerMedia, aesKey: invitation.sharedKey, useWorker: true)
}
func endCall(callUUID: UUID, completed: @escaping (Bool) -> Void) {
if let call = ChatModel.shared.activeCall, call.callkitUUID == callUUID {
endCall(call: call) { completed(true) }
} else if let invitation = getCallInvitation(callUUID) {
endCall(invitation: invitation) { completed(true) }
} else {
completed(false)
}
}
func endCall(call: Call, completed: @escaping () -> Void) {
let m = ChatModel.shared
if case .ended = call.callState {
logger.debug("CallController.provider CXEndCallAction: call ended")
m.activeCall = nil
m.showCallView = false
completed()
} else {
logger.debug("CallController.provider CXEndCallAction: ending call...")
m.callCommand = .end
m.showCallView = false
Task {
do {
try await apiEndCall(call.contact)
} catch {
logger.error("CallController.provider apiEndCall error: \(responseError(error))")
}
DispatchQueue.main.async {
m.activeCall = nil
completed()
}
}
}
}
func endCall(invitation: CallInvitation, completed: @escaping () -> Void) {
ChatModel.shared.callInvitations.removeValue(forKey: invitation.contact.id)
Task {
do {
try await apiRejectCall(invitation.contact)
} catch {
logger.error("CallController.provider apiRejectCall error: \(responseError(error))")
}
completed()
}
}
private func getCallInvitation(_ callUUID: UUID) -> CallInvitation? {
if let (_, invitation) = ChatModel.shared.callInvitations.first(where: { (_, inv) in inv.callkitUUID == callUUID }) {
return invitation
}
return nil
}
}

View File

@ -0,0 +1,86 @@
//
// IncomingCallView.swift
// SimpleX (iOS)
//
// Created by Evgeny on 24/05/2022.
// Copyright © 2022 SimpleX Chat. All rights reserved.
//
import SwiftUI
struct IncomingCallView: View {
@EnvironmentObject var m: ChatModel
@ObservedObject var cc = CallController.shared
var body: some View {
let sp = SoundPlayer.shared
if let invitation = cc.activeCallInvitation {
if m.showCallView {
incomingCall(invitation)
} else {
incomingCall(invitation)
.onAppear { sp.startRingtone() }
.onDisappear { sp.stopRingtone() }
}
}
}
private func incomingCall(_ invitation: CallInvitation) -> some View {
VStack(alignment: .leading, spacing: 6) {
HStack {
Image(systemName: invitation.peerMedia == .video ? "video.fill" : "phone.fill").foregroundColor(.green)
Text(invitation.callTypeText)
}
HStack {
ProfilePreview(profileOf: invitation.contact, color: .white)
Spacer()
callButton("Reject", "phone.down.fill", .red) {
cc.endCall(invitation: invitation)
}
callButton("Ignore", "multiply", .accentColor) {
cc.activeCallInvitation = nil
}
callButton("Accept", "checkmark", .green) {
if let call = m.activeCall {
cc.endCall(call: call) {
DispatchQueue.main.async {
cc.answerCall(invitation: invitation)
}
}
} else {
cc.answerCall(invitation: invitation)
}
}
}
}
.padding(.horizontal, 16)
.padding(.vertical, 12)
.frame(maxWidth: .infinity)
.background(Color(uiColor: .tertiarySystemGroupedBackground))
}
private func callButton(_ text: LocalizedStringKey, _ image: String, _ color: Color, action: @escaping () -> Void) -> some View {
Button(action: action, label: {
VStack(spacing: 2) {
Image(systemName: image)
.scaleEffect(1.24)
.foregroundColor(color)
.frame(width: 24, height: 24)
Text(text)
.font(.caption)
.foregroundColor(.secondary)
}
.frame(minWidth: 44)
})
}
}
struct IncomingCallView_Previews: PreviewProvider {
static var previews: some View {
CallController.shared.activeCallInvitation = CallInvitation.sampleData
return IncomingCallView()
}
}

View File

@ -0,0 +1,45 @@
//
// SoundPlayer.swift
// SimpleX (iOS)
//
// Created by Evgeny on 24/05/2022.
// Copyright © 2022 SimpleX Chat. All rights reserved.
//
import Foundation
import AVFoundation
class SoundPlayer {
static let shared = SoundPlayer()
private var audioPlayer: AVAudioPlayer?
func startRingtone() {
audioPlayer?.stop()
logger.debug("startRingtone")
guard let path = Bundle.main.path(forResource: "ringtone2", ofType: "m4a", inDirectory: "sounds") else {
logger.debug("startRingtone: file not found")
return
}
do {
let player = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: path))
if player.prepareToPlay() {
audioPlayer = player
}
} catch {
logger.debug("startRingtone: AVAudioPlayer error \(error.localizedDescription)")
}
Task {
while let player = audioPlayer {
player.play()
AudioServicesPlayAlertSound(kSystemSoundID_Vibrate)
_ = try? await Task.sleep(nanoseconds: UInt64(player.duration * 1_000_000_000))
}
}
}
func stopRingtone() {
audioPlayer?.stop()
audioPlayer = nil
}
}

View File

@ -9,70 +9,39 @@
import Foundation
import SwiftUI
class Call: Equatable {
class Call: ObservableObject, Equatable {
static func == (lhs: Call, rhs: Call) -> Bool {
lhs.contact.apiId == rhs.contact.apiId
}
var direction: CallDirection
var contact: Contact
var callState: CallState
var callkitUUID: UUID?
var localMedia: CallMediaType
var localCapabilities: CallCapabilities?
var peerMedia: CallMediaType?
var sharedKey: String?
var audioEnabled: Bool
var videoEnabled: Bool
var localCamera: VideoCamera
var connectionInfo: ConnectionInfo?
@Published var callState: CallState
@Published var localCapabilities: CallCapabilities?
@Published var peerMedia: CallMediaType?
@Published var sharedKey: String?
@Published var audioEnabled = true
@Published var videoEnabled: Bool
@Published var localCamera = VideoCamera.user
@Published var connectionInfo: ConnectionInfo?
init(
direction: CallDirection,
contact: Contact,
callkitUUID: UUID?,
callState: CallState,
localMedia: CallMediaType,
localCapabilities: CallCapabilities? = nil,
peerMedia: CallMediaType? = nil,
sharedKey: String? = nil,
audioEnabled: Bool? = nil,
videoEnabled: Bool? = nil,
localCamera: VideoCamera = .user,
connectionInfo: ConnectionInfo? = nil
sharedKey: String? = nil
) {
self.direction = direction
self.contact = contact
self.callkitUUID = callkitUUID
self.callState = callState
self.localMedia = localMedia
self.localCapabilities = localCapabilities
self.peerMedia = peerMedia
self.sharedKey = sharedKey
self.audioEnabled = audioEnabled ?? true
self.videoEnabled = videoEnabled ?? (localMedia == .video)
self.localCamera = localCamera
self.connectionInfo = connectionInfo
}
func copy(
contact: Contact? = nil,
callState: CallState? = nil,
localMedia: CallMediaType? = nil,
localCapabilities: CallCapabilities? = nil,
peerMedia: CallMediaType? = nil,
sharedKey: String? = nil,
audioEnabled: Bool? = nil,
videoEnabled: Bool? = nil,
localCamera: VideoCamera? = nil,
connectionInfo: ConnectionInfo? = nil
) -> Call {
Call (
contact: contact ?? self.contact,
callState: callState ?? self.callState,
localMedia: localMedia ?? self.localMedia,
localCapabilities: localCapabilities ?? self.localCapabilities,
peerMedia: peerMedia ?? self.peerMedia,
sharedKey: sharedKey ?? self.sharedKey,
audioEnabled: audioEnabled ?? self.audioEnabled,
videoEnabled: videoEnabled ?? self.videoEnabled,
localCamera: localCamera ?? self.localCamera,
connectionInfo: connectionInfo ?? self.connectionInfo
)
self.videoEnabled = localMedia == .video
}
var encrypted: Bool { get { localEncrypted && sharedKey != nil } }
@ -82,7 +51,7 @@ class Call: Equatable {
switch callState {
case .waitCapabilities: return ""
case .invitationSent: return localEncrypted ? "e2e encrypted" : "no e2e encryption"
case .invitationReceived: return sharedKey == nil ? "contact has no e2e encryption" : "contact has e2e encryption"
case .invitationAccepted: return sharedKey == nil ? "contact has no e2e encryption" : "contact has e2e encryption"
default: return !localEncrypted ? "no e2e encryption" : sharedKey == nil ? "contact has no e2e encryption" : "e2e encrypted"
}
}
@ -90,24 +59,33 @@ class Call: Equatable {
var hasMedia: Bool { get { callState == .offerSent || callState == .negotiated || callState == .connected } }
}
enum CallDirection {
case incoming
case outgoing
}
enum CallState {
case waitCapabilities
case invitationSent
case invitationReceived
case offerSent
case offerReceived
case negotiated
case waitCapabilities // outgoing call started
case invitationSent // outgoing call - sent invitation
case invitationAccepted // incoming call started
case offerSent // incoming - webrtc started and offer sent
case offerReceived // outgoing - webrtc offer received via API
case answerReceived // incoming - webrtc answer received via API
case negotiated // outgoing - webrtc offer processed and answer sent, incoming - webrtc answer processed
case connected
case ended
var text: LocalizedStringKey {
switch self {
case .waitCapabilities: return "starting…"
case .invitationSent: return "waiting for answer…"
case .invitationReceived: return "starting…"
case .invitationAccepted: return "starting…"
case .offerSent: return "waiting for confirmation…"
case .offerReceived: return "received answer…"
case .answerReceived: return "received confirmation…"
case .negotiated: return "connecting…"
case .connected: return "connected"
case .ended: return "ended"
}
}
}
@ -275,16 +253,16 @@ enum WCallResponse: Equatable, Decodable {
var respType: String {
get {
switch self {
case .capabilities: return("capabilities")
case .offer: return("offer")
case .answer: return("answer")
case .ice: return("ice")
case .connection: return("connection")
case .connected: return("connected")
case .ended: return("ended")
case .ok: return("ok")
case .error: return("error")
case .invalid: return("invalid")
case .capabilities: return "capabilities"
case .offer: return "offer"
case .answer: return "answer"
case .ice: return "ice"
case .connection: return "connection"
case .connected: return "connected"
case .ended: return "ended"
case .ok: return "ok"
case .error: return "error"
case .invalid: return "invalid"
}
}
}

View File

@ -9,20 +9,24 @@
import SwiftUI
import WebKit
class WebRTCCoordinator: NSObject, WKNavigationDelegate, WKScriptMessageHandler {
var webViewReady: Binding<Bool>
class WebRTCCoordinator: NSObject, WKNavigationDelegate, WKScriptMessageHandler, WKUIDelegate {
var rtcWebView: Binding<WKWebView?>
var webViewMsg: Binding<WVAPIMessage?>
private var webView: WKWebView?
internal init(webViewReady: Binding<Bool>, webViewMsg: Binding<WVAPIMessage?>) {
self.webViewReady = webViewReady
internal init(rtcWebView: Binding<WKWebView?>, webViewMsg: Binding<WVAPIMessage?>) {
self.rtcWebView = rtcWebView
self.webViewMsg = webViewMsg
}
func webView(_ webView: WKWebView, didFinish navigation: WKNavigation!) {
webView.allowsBackForwardNavigationGestures = false
self.webView = webView
webViewReady.wrappedValue = true
self.rtcWebView.wrappedValue = webView
ChatModel.shared.callWebView = webView
}
func webView(_ webView: WKWebView, decideMediaCapturePermissionsFor origin : WKSecurityOrigin, initiatedBy frame: WKFrameInfo, type: WKMediaCaptureType) async -> WKPermissionDecision {
print("webView", #function)
return .grant
}
// receive message from WKWebView
@ -31,34 +35,37 @@ class WebRTCCoordinator: NSObject, WKNavigationDelegate, WKScriptMessageHandler
didReceive message: WKScriptMessage
) {
logger.debug("WebRTCCoordinator.userContentController")
if let msgStr = message.body as? String,
let msg: WVAPIMessage = decodeJSON(msgStr) {
webViewMsg.wrappedValue = msg
if case .invalid = msg.resp {
logger.error("WebRTCCoordinator.userContentController: invalid message \(String(describing: message.body))")
switch message.name {
case "webrtc":
if let msgStr = message.body as? String,
let msg: WVAPIMessage = decodeJSON(msgStr) {
// this is the binding that communicates messages from webview to swift view
webViewMsg.wrappedValue = msg
if case .invalid = msg.resp {
logger.error("WebRTCCoordinator.userContentController: invalid message \(String(describing: message.body))")
}
} else {
logger.error("WebRTCCoordinator.userContentController: message parsing error \(String(describing: message.body))")
}
} else {
logger.error("WebRTCCoordinator.userContentController: message parsing error \(String(describing: message.body))")
}
}
func sendCommand(command: WCallCommand) {
if let webView = webView {
logger.debug("WebRTCCoordinator.sendCommand")
let apiCmd = encodeJSON(WVAPICall(command: command))
let js = "processCommand(\(apiCmd))"
webView.evaluateJavaScript(js)
case "logger":
if let msgStr = message.body as? String {
logger.error("WebRTCCoordinator console.log: \(msgStr)")
} else {
logger.error("WebRTCCoordinator console.log: \(String(describing: message.body))")
}
default:
logger.error("WebRTCCoordinator.userContentController: invalid message.name \(message.name)")
}
}
}
struct WebRTCView: UIViewRepresentable {
@Binding var coordinator: WebRTCCoordinator?
@Binding var webViewReady: Bool
@State private var coordinator: WebRTCCoordinator?
@Binding var rtcWebView: WKWebView?
@Binding var webViewMsg: WVAPIMessage?
func makeCoordinator() -> WebRTCCoordinator {
WebRTCCoordinator(webViewReady: $webViewReady, webViewMsg: $webViewMsg)
WebRTCCoordinator(rtcWebView: $rtcWebView, webViewMsg: $webViewMsg)
}
func makeUIView(context: Context) -> WKWebView {
@ -72,10 +79,14 @@ struct WebRTCView: UIViewRepresentable {
cfg.mediaTypesRequiringUserActionForPlayback = []
cfg.allowsInlineMediaPlayback = true
let source = "sendMessageToNative = (msg) => webkit.messageHandlers.webrtc.postMessage(JSON.stringify(msg))"
let script = WKUserScript(source: source, injectionTime: .atDocumentEnd, forMainFrameOnly: false)
wkController.addUserScript(script)
wkController.add(wkCoordinator, name: "webrtc")
let addScript = { (handler: String, source: String) in
let script = WKUserScript(source: source, injectionTime: .atDocumentEnd, forMainFrameOnly: false)
wkController.addUserScript(script)
wkController.add(wkCoordinator, name: handler)
}
addScript("webrtc", "sendMessageToNative = (msg) => webkit.messageHandlers.webrtc.postMessage(JSON.stringify(msg))")
addScript("logger", "console.log = (arg) => webkit.messageHandlers.logger.postMessage(JSON.stringify(arg))")
let wkWebView = WKWebView(frame: .zero, configuration: cfg)
wkWebView.navigationDelegate = wkCoordinator
@ -93,16 +104,22 @@ struct WebRTCView: UIViewRepresentable {
}
}
func sendCallCommand(_ webView: WKWebView, _ command: WCallCommand) {
logger.debug("sendCallCommand: \(command.cmdType)")
let apiCmd = encodeJSON(WVAPICall(command: command))
let js = "processCommand(\(apiCmd))"
webView.evaluateJavaScript(js)
}
struct CallViewDebug: View {
@State private var coordinator: WebRTCCoordinator? = nil
@State private var commandStr = ""
@State private var webViewReady: Bool = false
@State private var rtcWebView: WKWebView? = nil
@State private var webViewMsg: WVAPIMessage? = nil
@FocusState private var keyboardVisible: Bool
var body: some View {
VStack(spacing: 30) {
WebRTCView(coordinator: $coordinator, webViewReady: $webViewReady, webViewMsg: $webViewMsg).frame(maxHeight: 260)
WebRTCView(rtcWebView: $rtcWebView, webViewMsg: $webViewMsg).frame(maxHeight: 260)
.onChange(of: webViewMsg) { _ in
if let resp = webViewMsg {
commandStr = encodeJSON(resp)
@ -130,21 +147,21 @@ struct CallViewDebug: View {
commandStr = ""
}
Button("Send") {
if let c = coordinator,
if let wv = rtcWebView,
let command: WCallCommand = decodeJSON(commandStr) {
c.sendCommand(command: command)
sendCallCommand(wv, command)
}
}
}
HStack(spacing: 20) {
Button("Capabilities") {
if let c = coordinator {
c.sendCommand(command: .capabilities(useWorker: true))
if let wv = rtcWebView {
sendCallCommand(wv, .capabilities(useWorker: true))
}
}
Button("Start") {
if let c = coordinator {
c.sendCommand(command: .start(media: .video))
if let wv = rtcWebView {
sendCallCommand(wv, .start(media: .video))
}
}
Button("Accept") {

View File

@ -26,7 +26,7 @@ struct CICallItemView: View {
acceptCallButton()
}
case .missed: missedCallIcon(sent).foregroundColor(.red)
case .rejected: Image(systemName: "phone.down").foregroundColor(.secondary)
case .rejected: Image(systemName: "phone.down").foregroundColor(.red)
case .accepted: connectingCallIcon()
case .negotiated: connectingCallIcon()
case .progress: Image(systemName: "phone.and.waveform.fill").foregroundColor(.green)
@ -61,16 +61,9 @@ struct CICallItemView: View {
@ViewBuilder private func acceptCallButton() -> some View {
if case let .direct(contact) = chatInfo {
Button {
if let invitation = m.callInvitations.removeValue(forKey: contact.id) {
m.activeCallInvitation = nil
m.activeCall = Call(
contact: contact,
callState: .invitationReceived,
localMedia: invitation.peerMedia,
sharedKey: invitation.sharedKey
)
m.showCallView = true
m.callCommand = .start(media: invitation.peerMedia, aesKey: invitation.sharedKey, useWorker: true)
if let invitation = m.callInvitations[contact.id] {
CallController.shared.answerCall(invitation: invitation)
logger.debug("acceptCallButton call answered")
} else {
AlertManager.shared.showAlertMsg(title: "Call already ended!")
}

View File

@ -105,27 +105,21 @@ struct ChatView: View {
ChatInfoView(chat: chat, showChatInfo: $showChatInfo)
}
}
// ToolbarItem(placement: .navigationBarTrailing) {
// if case let .direct(contact) = cInfo {
// HStack {
// callButton(contact, .audio, imageName: "phone")
// callButton(contact, .video, imageName: "video")
// }
// }
// }
ToolbarItem(placement: .navigationBarTrailing) {
if case let .direct(contact) = cInfo {
HStack {
callButton(contact, .audio, imageName: "phone")
callButton(contact, .video, imageName: "video")
}
}
}
}
.navigationBarBackButtonHidden(true)
}
private func callButton(_ contact: Contact, _ media: CallMediaType, imageName: String) -> some View {
Button {
chatModel.activeCall = Call(
contact: contact,
callState: .waitCapabilities,
localMedia: media
)
chatModel.showCallView = true
chatModel.callCommand = .capabilities(useWorker: true)
CallController.shared.startCall(contact, media)
} label: {
Image(systemName: imageName)
}

View File

@ -49,29 +49,6 @@ struct ChatListView: View {
NewChatButton()
}
}
.fullScreenCover(isPresented: $chatModel.showCallView) {
ActiveCallView()
}
.onChange(of: chatModel.showCallView) { _ in
if (chatModel.showCallView) { return }
if let call = chatModel.activeCall {
Task {
do {
try await apiEndCall(call.contact)
} catch {
logger.error("ChatListView apiEndCall error: \(error.localizedDescription)")
}
}
}
chatModel.callCommand = .end
}
.onChange(of: chatModel.activeCallInvitation) { _ in
if let contactRef = chatModel.activeCallInvitation,
case let .direct(contact) = chatModel.getChat(contactRef.id)?.chatInfo,
let invitation = chatModel.callInvitations[contactRef.id] {
answerCallAlert(contact, invitation)
}
}
}
.navigationViewStyle(.stack)
@ -95,34 +72,6 @@ struct ChatListView: View {
$0.chatInfo.chatViewName.localizedLowercase.contains(s)
}
}
private func answerCallAlert(_ contact: Contact, _ invitation: CallInvitation) {
return AlertManager.shared.showAlert(Alert(
title: Text(invitation.callTitle),
message: Text(contact.profile.displayName).bold() +
Text(" wants to connect with you via ") +
Text(invitation.callTypeText),
primaryButton: .default(Text("Answer")) {
if let activeCallInvitation = chatModel.activeCallInvitation {
chatModel.callInvitations.removeValue(forKey: activeCallInvitation.id)
chatModel.activeCallInvitation = nil
chatModel.activeCall = Call(
contact: contact,
callState: .invitationReceived,
localMedia: invitation.peerMedia,
sharedKey: invitation.sharedKey
)
chatModel.showCallView = true
chatModel.callCommand = .start(media: invitation.peerMedia, aesKey: invitation.sharedKey, useWorker: true)
} else {
DispatchQueue.main.async {
AlertManager.shared.showAlertMsg(title: "Call already ended!")
}
}
},
secondaryButton: .cancel()
))
}
}
struct ChatListView_Previews: PreviewProvider {

View File

@ -38,18 +38,7 @@ struct SettingsView: View {
UserProfile()
.navigationTitle("Your chat profile")
} label: {
HStack {
ProfileImage(imageStr: user.image)
.frame(width: 44, height: 44)
.padding(.trailing, 6)
.padding(.vertical, 6)
VStack(alignment: .leading) {
Text(user.displayName)
.fontWeight(.bold)
.font(.title2)
Text(user.fullName)
}
}
ProfilePreview(profileOf: user)
.padding(.leading, -8)
}
NavigationLink {
@ -242,6 +231,26 @@ struct SettingsView: View {
}
}
struct ProfilePreview: View {
var profileOf: NamedChat
var color = Color(uiColor: .tertiarySystemGroupedBackground)
var body: some View {
HStack {
ProfileImage(imageStr: profileOf.image, color: color)
.frame(width: 44, height: 44)
.padding(.trailing, 6)
.padding(.vertical, 6)
VStack(alignment: .leading) {
Text(profileOf.displayName)
.fontWeight(.bold)
.font(.title2)
Text(profileOf.fullName)
}
}
}
}
struct SettingsView_Previews: PreviewProvider {
static var previews: some View {
let chatModel = ChatModel()

View File

@ -90,14 +90,14 @@
<target>**Scan QR code**: to connect to your contact in person or via video call.</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="**e2e encrypted** audio call." xml:space="preserve">
<source>**e2e encrypted** audio call.</source>
<target>**e2e encrypted** audio call.</target>
<trans-unit id="**e2e encrypted** audio call" xml:space="preserve">
<source>**e2e encrypted** audio call</source>
<target>**e2e encrypted** audio call</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="**e2e encrypted** video call." xml:space="preserve">
<source>**e2e encrypted** video call.</source>
<target>**e2e encrypted** video call.</target>
<trans-unit id="**e2e encrypted** video call" xml:space="preserve">
<source>**e2e encrypted** video call</source>
<target>**e2e encrypted** video call</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="*bold*" xml:space="preserve">
@ -1118,9 +1118,9 @@ SimpleX servers cannot see your profile.</target>
<target>accepted</target>
<note>call status</note>
</trans-unit>
<trans-unit id="audio call (not e2e encrypted)." xml:space="preserve">
<source>audio call (not e2e encrypted).</source>
<target>audio call (not e2e encrypted).</target>
<trans-unit id="audio call (not e2e encrypted)" xml:space="preserve">
<source>audio call (not e2e encrypted)</source>
<target>audio call (not e2e encrypted)</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="bold" xml:space="preserve">
@ -1279,9 +1279,9 @@ SimpleX servers cannot see your profile.</target>
<target>via relay</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="video call (not e2e encrypted)." xml:space="preserve">
<source>video call (not e2e encrypted).</source>
<target>video call (not e2e encrypted).</target>
<trans-unit id="video call (not e2e encrypted)" xml:space="preserve">
<source>video call (not e2e encrypted)</source>
<target>video call (not e2e encrypted)</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="waiting for answer…" xml:space="preserve">
@ -1380,14 +1380,14 @@ SimpleX servers cannot see your profile.</target>
<target>%@ wants to connect!</target>
<note>notification title</note>
</trans-unit>
<trans-unit id="**e2e encrypted** audio call." xml:space="preserve">
<source>**e2e encrypted** audio call.</source>
<target>**e2e encrypted** audio call.</target>
<trans-unit id="**e2e encrypted** audio call" xml:space="preserve">
<source>**e2e encrypted** audio call</source>
<target>**e2e encrypted** audio call</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="**e2e encrypted** video call." xml:space="preserve">
<source>**e2e encrypted** video call.</source>
<target>**e2e encrypted** video call.</target>
<trans-unit id="**e2e encrypted** video call" xml:space="preserve">
<source>**e2e encrypted** video call</source>
<target>**e2e encrypted** video call</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Accept contact request from %@?" xml:space="preserve">
@ -1415,9 +1415,9 @@ SimpleX servers cannot see your profile.</target>
<target>accepted</target>
<note>call status</note>
</trans-unit>
<trans-unit id="audio call (not e2e encrypted)." xml:space="preserve">
<source>audio call (not e2e encrypted).</source>
<target>audio call (not e2e encrypted).</target>
<trans-unit id="audio call (not e2e encrypted)" xml:space="preserve">
<source>audio call (not e2e encrypted)</source>
<target>audio call (not e2e encrypted)</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="calling…" xml:space="preserve">
@ -1491,9 +1491,9 @@ SimpleX servers cannot see your profile.</target>
<target>via one-time link</target>
<note>chat list item description</note>
</trans-unit>
<trans-unit id="video call (not e2e encrypted)." xml:space="preserve">
<source>video call (not e2e encrypted).</source>
<target>video call (not e2e encrypted).</target>
<trans-unit id="video call (not e2e encrypted)" xml:space="preserve">
<source>video call (not e2e encrypted)</source>
<target>video call (not e2e encrypted)</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="with e2e encryption" xml:space="preserve">

View File

@ -90,14 +90,14 @@
<target>**Сканировать QR код**: соединиться с вашим контактом при встрече или во время видеозвонка.</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="**e2e encrypted** audio call." xml:space="preserve">
<source>**e2e encrypted** audio call.</source>
<target>**e2e зашифрованный** аудиозвонок.</target>
<trans-unit id="**e2e encrypted** audio call" xml:space="preserve">
<source>**e2e encrypted** audio call</source>
<target>**e2e зашифрованный** аудиозвонок</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="**e2e encrypted** video call." xml:space="preserve">
<source>**e2e encrypted** video call.</source>
<target>**e2e зашифрованный** видеозвонок.</target>
<trans-unit id="**e2e encrypted** video call" xml:space="preserve">
<source>**e2e encrypted** video call</source>
<target>**e2e зашифрованный** видеозвонок</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="*bold*" xml:space="preserve">
@ -1118,9 +1118,9 @@ SimpleX серверы не могут получить доступ к ваше
<target>принятый звонок</target>
<note>call status</note>
</trans-unit>
<trans-unit id="audio call (not e2e encrypted)." xml:space="preserve">
<source>audio call (not e2e encrypted).</source>
<target>аудиозвонок (не e2e зашифрованный).</target>
<trans-unit id="audio call (not e2e encrypted)" xml:space="preserve">
<source>audio call (not e2e encrypted)</source>
<target>аудиозвонок (не e2e зашифрованный)</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="bold" xml:space="preserve">
@ -1279,9 +1279,9 @@ SimpleX серверы не могут получить доступ к ваше
<target>через relay сервер</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="video call (not e2e encrypted)." xml:space="preserve">
<source>video call (not e2e encrypted).</source>
<target>видеозвонок (не e2e зашифрованный).</target>
<trans-unit id="video call (not e2e encrypted)" xml:space="preserve">
<source>video call (not e2e encrypted)</source>
<target>видеозвонок (не e2e зашифрованный)</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="waiting for answer…" xml:space="preserve">
@ -1380,14 +1380,14 @@ SimpleX серверы не могут получить доступ к ваше
<target>%@ хочет соединиться!</target>
<note>notification title</note>
</trans-unit>
<trans-unit id="**e2e encrypted** audio call." xml:space="preserve">
<source>**e2e encrypted** audio call.</source>
<target>**e2e зашифрованный** аудиозвонок.</target>
<trans-unit id="**e2e encrypted** audio call" xml:space="preserve">
<source>**e2e encrypted** audio call</source>
<target>**e2e зашифрованный** аудиозвонок</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="**e2e encrypted** video call." xml:space="preserve">
<source>**e2e encrypted** video call.</source>
<target>**e2e зашифрованный** видеозвонок.</target>
<trans-unit id="**e2e encrypted** video call" xml:space="preserve">
<source>**e2e encrypted** video call</source>
<target>**e2e зашифрованный** видеозвонок</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Accept contact request from %@?" xml:space="preserve">
@ -1415,9 +1415,9 @@ SimpleX серверы не могут получить доступ к ваше
<target>принятый звонок</target>
<note>call status</note>
</trans-unit>
<trans-unit id="audio call (not e2e encrypted)." xml:space="preserve">
<source>audio call (not e2e encrypted).</source>
<target>аудиозвонок (не e2e зашифрованный).</target>
<trans-unit id="audio call (not e2e encrypted)" xml:space="preserve">
<source>audio call (not e2e encrypted)</source>
<target>аудиозвонок (не e2e зашифрованный)</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="calling…" xml:space="preserve">
@ -1491,9 +1491,9 @@ SimpleX серверы не могут получить доступ к ваше
<target>через одноразовую ссылку</target>
<note>chat list item description</note>
</trans-unit>
<trans-unit id="video call (not e2e encrypted)." xml:space="preserve">
<source>video call (not e2e encrypted).</source>
<target>видеозвонок (не e2e зашифрованный).</target>
<trans-unit id="video call (not e2e encrypted)" xml:space="preserve">
<source>video call (not e2e encrypted)</source>
<target>видеозвонок (не e2e зашифрованный)</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="with e2e encryption" xml:space="preserve">

View File

@ -1,8 +1,8 @@
/* No comment provided by engineer. */
"**e2e encrypted** audio call." = "**e2e зашифрованный** аудиозвонок.";
"**e2e encrypted** audio call" = "**e2e зашифрованный** аудиозвонок";
/* No comment provided by engineer. */
"**e2e encrypted** video call." = "**e2e зашифрованный** видеозвонок.";
"**e2e encrypted** video call" = "**e2e зашифрованный** видеозвонок";
/* notification title */
"%@ is connected!" = "соединение с %@ установлено!";
@ -17,7 +17,7 @@
"accepted" = "принятый звонок";
/* No comment provided by engineer. */
"audio call (not e2e encrypted)." = "аудиозвонок (не e2e зашифрованный).";
"audio call (not e2e encrypted)" = "аудиозвонок (не e2e зашифрованный)";
/* call status */
"calling…" = "входящий звонок…";
@ -69,7 +69,7 @@
"via one-time link" = "через одноразовую ссылку";
/* No comment provided by engineer. */
"video call (not e2e encrypted)." = "видеозвонок (не e2e зашифрованный).";
"video call (not e2e encrypted)" = "видеозвонок (не e2e зашифрованный)";
/* No comment provided by engineer. */
"with e2e encryption" = "e2e зашифровано";

View File

@ -23,8 +23,10 @@
<false/>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
<string>fetch</string>
<string>remote-notification</string>
<string>voip</string>
</array>
</dict>
</plist>

View File

@ -13,6 +13,7 @@
3CDBCF4227FAE51000354CDD /* ComposeLinkView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */; };
3CDBCF4827FF621E00354CDD /* CILinkView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3CDBCF4727FF621E00354CDD /* CILinkView.swift */; };
5C029EA82837DBB3004A9677 /* CICallItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C029EA72837DBB3004A9677 /* CICallItemView.swift */; };
5C029EAA283942EA004A9677 /* CallController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C029EA9283942EA004A9677 /* CallController.swift */; };
5C063D2727A4564100AEC577 /* ChatPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C063D2627A4564100AEC577 /* ChatPreviewView.swift */; };
5C116CDC27AABE0400E66D01 /* ContactRequestView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C116CDB27AABE0400E66D01 /* ContactRequestView.swift */; };
5C13730B28156D2700F43030 /* ContactConnectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C13730A28156D2700F43030 /* ContactConnectionView.swift */; };
@ -27,6 +28,10 @@
5C3A88CE27DF50170060F1C2 /* DetermineWidth.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C3A88CD27DF50170060F1C2 /* DetermineWidth.swift */; };
5C3A88D127DF57800060F1C2 /* FramedItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C3A88D027DF57800060F1C2 /* FramedItemView.swift */; };
5C5346A827B59A6A004DF848 /* ChatHelp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C5346A727B59A6A004DF848 /* ChatHelp.swift */; };
5C55A91F283AD0E400C4E99E /* CallManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C55A91E283AD0E400C4E99E /* CallManager.swift */; };
5C55A921283CCCB700C4E99E /* IncomingCallView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C55A920283CCCB700C4E99E /* IncomingCallView.swift */; };
5C55A923283CEDE600C4E99E /* SoundPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C55A922283CEDE600C4E99E /* SoundPlayer.swift */; };
5C55A92E283D0FDE00C4E99E /* sounds in Resources */ = {isa = PBXBuildFile; fileRef = 5C55A92D283D0FDE00C4E99E /* sounds */; };
5C577F7D27C83AA10006112D /* MarkdownHelp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C577F7C27C83AA10006112D /* MarkdownHelp.swift */; };
5C5E5D3B2824468B00B0488A /* ActiveCallView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C5E5D3A2824468B00B0488A /* ActiveCallView.swift */; };
5C5E5D3D282447AB00B0488A /* CallTypes.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C5E5D3C282447AB00B0488A /* CallTypes.swift */; };
@ -130,6 +135,7 @@
3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ComposeLinkView.swift; sourceTree = "<group>"; };
3CDBCF4727FF621E00354CDD /* CILinkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CILinkView.swift; sourceTree = "<group>"; };
5C029EA72837DBB3004A9677 /* CICallItemView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CICallItemView.swift; sourceTree = "<group>"; };
5C029EA9283942EA004A9677 /* CallController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallController.swift; sourceTree = "<group>"; };
5C063D2627A4564100AEC577 /* ChatPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatPreviewView.swift; sourceTree = "<group>"; };
5C116CDB27AABE0400E66D01 /* ContactRequestView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactRequestView.swift; sourceTree = "<group>"; };
5C13730A28156D2700F43030 /* ContactConnectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactConnectionView.swift; sourceTree = "<group>"; };
@ -146,6 +152,10 @@
5C3A88D027DF57800060F1C2 /* FramedItemView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FramedItemView.swift; sourceTree = "<group>"; };
5C422A7C27A9A6FA0097A1E1 /* SimpleX (iOS).entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "SimpleX (iOS).entitlements"; sourceTree = "<group>"; };
5C5346A727B59A6A004DF848 /* ChatHelp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatHelp.swift; sourceTree = "<group>"; };
5C55A91E283AD0E400C4E99E /* CallManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallManager.swift; sourceTree = "<group>"; };
5C55A920283CCCB700C4E99E /* IncomingCallView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IncomingCallView.swift; sourceTree = "<group>"; };
5C55A922283CEDE600C4E99E /* SoundPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SoundPlayer.swift; sourceTree = "<group>"; };
5C55A92D283D0FDE00C4E99E /* sounds */ = {isa = PBXFileReference; lastKnownFileType = folder; path = sounds; sourceTree = "<group>"; };
5C577F7C27C83AA10006112D /* MarkdownHelp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MarkdownHelp.swift; sourceTree = "<group>"; };
5C5E5D3A2824468B00B0488A /* ActiveCallView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ActiveCallView.swift; sourceTree = "<group>"; };
5C5E5D3C282447AB00B0488A /* CallTypes.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallTypes.swift; sourceTree = "<group>"; };
@ -271,6 +281,10 @@
3C714776281C081000CB4D4B /* WebRTCView.swift */,
5C9D13A2282187BB00AB8B43 /* WebRTC.swift */,
5C5E5D3A2824468B00B0488A /* ActiveCallView.swift */,
5C029EA9283942EA004A9677 /* CallController.swift */,
5C55A91E283AD0E400C4E99E /* CallManager.swift */,
5C55A920283CCCB700C4E99E /* IncomingCallView.swift */,
5C55A922283CEDE600C4E99E /* SoundPlayer.swift */,
);
path = Call;
sourceTree = "<group>";
@ -362,6 +376,7 @@
5CA059BD279559F40002BEB4 = {
isa = PBXGroup;
children = (
5C55A92D283D0FDE00C4E99E /* sounds */,
3C714779281C0F6800CB4D4B /* www */,
5CC2C0FD2809BF11000C35E3 /* SimpleX--iOS--InfoPlist.strings */,
5CC2C0FA2809BF11000C35E3 /* Localizable.strings */,
@ -625,6 +640,7 @@
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
5C55A92E283D0FDE00C4E99E /* sounds in Resources */,
3C71477A281C0F6800CB4D4B /* www in Resources */,
5CA059EF279559F40002BEB4 /* Assets.xcassets in Resources */,
5CC2C0FC2809BF11000C35E3 /* Localizable.strings in Resources */,
@ -657,6 +673,7 @@
files = (
5C6AD81327A834E300348BD7 /* NewChatButton.swift in Sources */,
5CDCAD7F281894FB00503DA2 /* API.swift in Sources */,
5C55A923283CEDE600C4E99E /* SoundPlayer.swift in Sources */,
5CDCAD81281A7E2700503DA2 /* Notifications.swift in Sources */,
5CB924D727A8563F00ACCCDD /* SettingsView.swift in Sources */,
5CEACCE327DE9246000BD591 /* ComposeView.swift in Sources */,
@ -667,6 +684,7 @@
5CDCAD5328186F9500503DA2 /* GroupDefaults.swift in Sources */,
5C13730B28156D2700F43030 /* ContactConnectionView.swift in Sources */,
5CE4407927ADB701007B033A /* EmojiItemView.swift in Sources */,
5C029EAA283942EA004A9677 /* CallController.swift in Sources */,
5C5346A827B59A6A004DF848 /* ChatHelp.swift in Sources */,
648010AB281ADD15009009B9 /* CIFileView.swift in Sources */,
3CDBCF4227FAE51000354CDD /* ComposeLinkView.swift in Sources */,
@ -701,10 +719,12 @@
5C2E260F27A30FDC00F70299 /* ChatView.swift in Sources */,
5C2E260B27A30CFA00F70299 /* ChatListView.swift in Sources */,
5C971E2127AEBF8300C8A3CE /* ChatInfoImage.swift in Sources */,
5C55A921283CCCB700C4E99E /* IncomingCallView.swift in Sources */,
6454036F2822A9750090DDFF /* ComposeFileView.swift in Sources */,
5C5F2B6D27EBC3FE006A9D5F /* ImagePicker.swift in Sources */,
5C577F7D27C83AA10006112D /* MarkdownHelp.swift in Sources */,
5CA059EB279559F40002BEB4 /* SimpleXApp.swift in Sources */,
5C55A91F283AD0E400C4E99E /* CallManager.swift in Sources */,
5CCD403727A5F9A200368C90 /* ScanToConnectView.swift in Sources */,
649BCDA22805D6EF00C3A862 /* CIImageView.swift in Sources */,
5CCD403A27A5F9BE00368C90 /* CreateGroupView.swift in Sources */,

View File

@ -38,10 +38,10 @@
"**Create link / QR code** for your contact to use." = "**Создать ссылку / QR код** для вашего контакта.";
/* No comment provided by engineer. */
"**e2e encrypted** audio call." = "**e2e зашифрованный** аудиозвонок.";
"**e2e encrypted** audio call" = "**e2e зашифрованный** аудиозвонок";
/* No comment provided by engineer. */
"**e2e encrypted** video call." = "**e2e зашифрованный** видеозвонок.";
"**e2e encrypted** video call" = "**e2e зашифрованный** видеозвонок";
/* No comment provided by engineer. */
"**Paste received link** or open it in the browser and tap **Open in mobile app**." = "**Вставить полученную ссылку**, или откройте её в браузере и нажмите **Open in mobile app**.";
@ -122,7 +122,7 @@
"Attach" = "Прикрепить";
/* No comment provided by engineer. */
"audio call (not e2e encrypted)." = "аудиозвонок (не e2e зашифрованный).";
"audio call (not e2e encrypted)" = "аудиозвонок (не e2e зашифрованный)";
/* No comment provided by engineer. */
"bold" = "жирный";
@ -678,7 +678,7 @@
"via relay" = "через relay сервер";
/* No comment provided by engineer. */
"video call (not e2e encrypted)." = "видеозвонок (не e2e зашифрованный).";
"video call (not e2e encrypted)" = "видеозвонок (не e2e зашифрованный)";
/* No comment provided by engineer. */
"waiting for answer…" = "ожидается ответ…";

Binary file not shown.

View File

@ -289,7 +289,8 @@ const processCommand = (function () {
const pc = new RTCPeerConnection(config.peerConnectionConfig)
const remoteStream = new MediaStream()
const localCamera = VideoCamera.User
const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType, localCamera))
const constraints = callMediaConstraints(mediaType, localCamera)
const localStream = await navigator.mediaDevices.getUserMedia(constraints)
const iceCandidates = getIceCandidates(pc, config)
const call = {connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey, useWorker}
await setupMediaStreams(call)
@ -310,8 +311,10 @@ const processCommand = (function () {
})
if (pc.connectionState == "disconnected" || pc.connectionState == "failed") {
pc.removeEventListener("connectionstatechange", connectionStateChange)
if (activeCall) {
setTimeout(() => sendMessageToNative({resp: {type: "ended"}}), 0)
}
endCall()
setTimeout(() => sendMessageToNative({resp: {type: "ended"}}), 0)
} else if (pc.connectionState == "connected") {
const stats = (await pc.getStats()) as Map<string, any>
for (const stat of stats.values()) {
@ -326,7 +329,7 @@ const processCommand = (function () {
remoteCandidate: stats.get(iceCandidatePair.remoteCandidateId),
},
}
setTimeout(() => sendMessageToNative({resp}), 0)
setTimeout(() => sendMessageToNative({resp}), 500)
break
}
}
@ -442,17 +445,9 @@ const processCommand = (function () {
case "camera":
if (!activeCall || !pc) {
resp = {type: "error", message: "camera: call not started"}
} else if (activeCall.localMedia == CallMediaType.Audio) {
resp = {type: "error", message: "camera: no video"}
} else {
try {
if (command.camera != activeCall.localCamera) {
await replaceCamera(activeCall, command.camera)
}
resp = {type: "ok"}
} catch (e) {
resp = {type: "error", message: `camera: ${(e as Error).message}`}
}
await replaceMedia(activeCall, command.camera)
resp = {type: "ok"}
}
break
case "end":
@ -464,7 +459,7 @@ const processCommand = (function () {
break
}
} catch (e) {
resp = {type: "error", message: (e as Error).message}
resp = {type: "error", message: `${command.type}: ${(e as Error).message}`}
}
const apiResp = {corrId, resp, command}
sendMessageToNative(apiResp)
@ -506,6 +501,9 @@ const processCommand = (function () {
if (call.useWorker && !call.worker) {
const workerCode = `const callCrypto = (${callCryptoFunction.toString()})(); (${workerFunction.toString()})()`
call.worker = new Worker(URL.createObjectURL(new Blob([workerCode], {type: "text/javascript"})))
call.worker.onerror = ({error, filename, lineno, message}: ErrorEvent) =>
console.log(JSON.stringify({error, filename, lineno, message}))
call.worker.onmessage = ({data}) => console.log(JSON.stringify({message: data}))
}
}
}
@ -532,14 +530,19 @@ const processCommand = (function () {
// Pull tracks from remote stream as they arrive add them to remoteStream video
const pc = call.connection
pc.ontrack = (event) => {
if (call.aesKey && call.key) {
console.log("set up decryption for receiving")
setupPeerTransform(TransformOperation.Decrypt, event.receiver as RTCRtpReceiverWithEncryption, call.worker, call.aesKey, call.key)
}
for (const stream of event.streams) {
for (const track of stream.getTracks()) {
call.remoteStream.addTrack(track)
try {
if (call.aesKey && call.key) {
console.log("set up decryption for receiving")
setupPeerTransform(TransformOperation.Decrypt, event.receiver as RTCRtpReceiverWithEncryption, call.worker, call.aesKey, call.key)
}
for (const stream of event.streams) {
for (const track of stream.getTracks()) {
call.remoteStream.addTrack(track)
}
}
console.log(`ontrack success`)
} catch (e) {
console.log(`ontrack error: ${(e as Error).message}`)
}
}
}
@ -573,7 +576,7 @@ const processCommand = (function () {
}
}
async function replaceCamera(call: Call, camera: VideoCamera): Promise<void> {
async function replaceMedia(call: Call, camera: VideoCamera): Promise<void> {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
const pc = call.connection
@ -588,6 +591,7 @@ const processCommand = (function () {
}
function replaceTracks(pc: RTCPeerConnection, tracks: MediaStreamTrack[]) {
if (!tracks.length) return
const sender = pc.getSenders().find((s) => s.track?.kind === tracks[0].kind)
if (sender) for (const t of tracks) sender.replaceTrack(t)
}
@ -713,8 +717,10 @@ function callCryptoFunction(): CallCrypto {
const initial = data.subarray(0, n)
const plaintext = data.subarray(n, data.byteLength)
try {
const ciphertext = await crypto.subtle.encrypt({name: "AES-GCM", iv: iv.buffer}, key, plaintext)
frame.data = concatN(initial, new Uint8Array(ciphertext), iv).buffer
const ciphertext = new Uint8Array(
plaintext.length ? await crypto.subtle.encrypt({name: "AES-GCM", iv: iv.buffer}, key, plaintext) : 0
)
frame.data = concatN(initial, ciphertext, iv).buffer
controller.enqueue(frame)
} catch (e) {
console.log(`encryption error ${e}`)
@ -731,8 +737,8 @@ function callCryptoFunction(): CallCrypto {
const ciphertext = data.subarray(n, data.byteLength - IV_LENGTH)
const iv = data.subarray(data.byteLength - IV_LENGTH, data.byteLength)
try {
const plaintext = await crypto.subtle.decrypt({name: "AES-GCM", iv}, key, ciphertext)
frame.data = concatN(initial, new Uint8Array(plaintext)).buffer
const plaintext = new Uint8Array(ciphertext.length ? await crypto.subtle.decrypt({name: "AES-GCM", iv}, key, ciphertext) : 0)
frame.data = concatN(initial, plaintext).buffer
controller.enqueue(frame)
} catch (e) {
console.log(`decryption error ${e}`)
@ -864,9 +870,14 @@ function workerFunction() {
// encryption using RTCRtpScriptTransform.
if ("RTCTransformEvent" in self) {
self.addEventListener("rtctransform", async ({transformer}: any) => {
const {operation, aesKey} = transformer.options
const {readable, writable} = transformer
await setupTransform({operation, aesKey, readable, writable})
try {
const {operation, aesKey} = transformer.options
const {readable, writable} = transformer
await setupTransform({operation, aesKey, readable, writable})
self.postMessage({result: "setupTransform success"})
} catch (e) {
self.postMessage({message: `setupTransform error: ${(e as Error).message}`})
}
})
}

View File

@ -1,12 +1,10 @@
video::-webkit-media-controls {
display: none;
}
html,
body {
padding: 0;
margin: 0;
background-color: black;
}
#remote-video-stream {
position: absolute;
width: 100%;
@ -24,3 +22,20 @@ body {
top: 0;
right: 0;
}
*::-webkit-media-controls {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-panel {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-play-button {
display: none !important;
-webkit-appearance: none !important;
}
*::-webkit-media-controls-start-playback-button {
display: none !important;
-webkit-appearance: none !important;
}

View File

@ -880,6 +880,7 @@ callStatusItemContent userId Contact {contactId} chatItemId receivedStatus = do
(Just CISCallPending, WCSDisconnected) -> Just (CISCallMissed, 0)
(Just CISCallEnded, _) -> Nothing -- if call already ended or failed -> no change
(Just CISCallError, _) -> Nothing
(Just _, WCSConnecting) -> Just (CISCallNegotiated, 0)
(Just _, WCSConnected) -> Just (CISCallProgress, 0) -- if call ended that was never connected, duration = 0
(Just _, WCSDisconnected) -> Just (CISCallEnded, 0)
(Just _, WCSFailed) -> Just (CISCallError, 0)

View File

@ -202,16 +202,18 @@ instance ToJSON WebRTCExtraInfo where
toJSON = J.genericToJSON J.defaultOptions
toEncoding = J.genericToEncoding J.defaultOptions
data WebRTCCallStatus = WCSConnected | WCSDisconnected | WCSFailed
data WebRTCCallStatus = WCSConnecting | WCSConnected | WCSDisconnected | WCSFailed
deriving (Show)
instance StrEncoding WebRTCCallStatus where
strEncode = \case
WCSConnecting -> "connecting"
WCSConnected -> "connected"
WCSDisconnected -> "disconnected"
WCSFailed -> "failed"
strP =
A.takeTill (== ' ') >>= \case
"connecting" -> pure WCSConnecting
"connected" -> pure WCSConnected
"disconnected" -> pure WCSDisconnected
"failed" -> pure WCSFailed