From edc5a4c31b238fa2c9e97b01fc6b16c795e4876e Mon Sep 17 00:00:00 2001 From: Stanislav Dmitrenko <7953703+avently@users.noreply.github.com> Date: Tue, 13 Feb 2024 22:04:42 +0700 Subject: [PATCH] ios: Picture-in-picture while in calls (#3792) * ios: Picture-in-picture while in calls * simplify * improvements * back button and lots of small issues * layout * padding * back button * animation, padding, fullscreen * end active call button * removed unused code * unused line * transition * better * better * deinit PiP controller * stop camera after call end * formatting * stop capture if active --------- Co-authored-by: Avently Co-authored-by: Evgeny Poberezkin --- apps/ios/Shared/ContentView.swift | 56 +++++++- apps/ios/Shared/Model/ChatModel.swift | 1 + .../Shared/Views/Call/ActiveCallView.swift | 126 ++++++++++++----- apps/ios/Shared/Views/Call/CallManager.swift | 2 + .../Shared/Views/Call/CallViewRenderers.swift | 127 +++++++++++++++++- apps/ios/Shared/Views/Call/WebRTC.swift | 2 + apps/ios/Shared/Views/Call/WebRTCClient.swift | 5 + .../Views/Chat/ChatItem/CIImageView.swift | 3 + .../Views/Chat/ChatItem/CIVideoView.swift | 6 + apps/ios/Shared/Views/Chat/ChatView.swift | 24 +++- 10 files changed, 309 insertions(+), 43 deletions(-) diff --git a/apps/ios/Shared/ContentView.swift b/apps/ios/Shared/ContentView.swift index 45e0332da..acea38e69 100644 --- a/apps/ios/Shared/ContentView.swift +++ b/apps/ios/Shared/ContentView.swift @@ -34,6 +34,8 @@ struct ContentView: View { @State private var waitingForOrPassedAuth = true @State private var chatListActionSheet: ChatListActionSheet? = nil + private let callTopPadding: CGFloat = 50 + private enum ChatListActionSheet: Identifiable { case planAndConnectSheet(sheet: PlanAndConnectActionSheet) @@ -50,16 +52,28 @@ struct ContentView: View { var body: some View { ZStack { + let showCallArea = chatModel.activeCall != nil && chatModel.activeCall?.callState != .waitCapabilities && chatModel.activeCall?.callState != .invitationAccepted // contentView() has to be in a single branch, so that enabling authentication doesn't trigger re-rendering and close settings. // i.e. with separate branches like this settings are closed: `if prefPerformLA { ... contentView() ... } else { contentView() } if !prefPerformLA || accessAuthenticated { contentView() + .padding(.top, showCallArea ? callTopPadding : 0) } else { lockButton() + .padding(.top, showCallArea ? callTopPadding : 0) } + + if showCallArea, let call = chatModel.activeCall { + VStack { + activeCallInteractiveArea(call) + Spacer() + } + } + if chatModel.showCallView, let call = chatModel.activeCall { callView(call) } + if !showSettings, let la = chatModel.laRequest { LocalAuthView(authRequest: la) .onDisappear { @@ -135,11 +149,11 @@ struct ContentView: View { if case .onboardingComplete = step, chatModel.currentUser != nil { mainView() - .actionSheet(item: $chatListActionSheet) { sheet in - switch sheet { - case let .planAndConnectSheet(sheet): return planAndConnectActionSheet(sheet, dismiss: false) + .actionSheet(item: $chatListActionSheet) { sheet in + switch sheet { + case let .planAndConnectSheet(sheet): return planAndConnectActionSheet(sheet, dismiss: false) + } } - } } else { OnboardingView(onboarding: step) } @@ -163,6 +177,40 @@ struct ContentView: View { } } + @ViewBuilder private func activeCallInteractiveArea(_ call: Call) -> some View { + HStack { + Text(call.contact.displayName).font(.body).foregroundColor(.white) + Spacer() + CallDuration(call: call) + } + .padding(.horizontal) + .frame(height: callTopPadding - 10) + .background(Color(uiColor: UIColor(red: 47/255, green: 208/255, blue: 88/255, alpha: 1))) + .onTapGesture { + chatModel.activeCallViewIsCollapsed = false + } + } + + struct CallDuration: View { + let call: Call + @State var text: String = "" + @State var timer: Timer? = nil + + var body: some View { + Text(text).frame(minWidth: text.count <= 5 ? 52 : 77, alignment: .leading).offset(x: 4).font(.body).foregroundColor(.white) + .onAppear { + timer = Timer.scheduledTimer(withTimeInterval: 0.3, repeats: true) { timer in + if let connectedAt = call.connectedAt { + text = durationText(Int(Date.now.timeIntervalSince1970 - connectedAt.timeIntervalSince1970)) + } + } + } + .onDisappear { + _ = timer?.invalidate() + } + } + } + private func lockButton() -> some View { Button(action: authenticateContentViewAccess) { Label("Unlock", systemImage: "lock") } } diff --git a/apps/ios/Shared/Model/ChatModel.swift b/apps/ios/Shared/Model/ChatModel.swift index 0fff53a06..c54e11eb7 100644 --- a/apps/ios/Shared/Model/ChatModel.swift +++ b/apps/ios/Shared/Model/ChatModel.swift @@ -90,6 +90,7 @@ final class ChatModel: ObservableObject { @Published var activeCall: Call? let callCommand: WebRTCCommandProcessor = WebRTCCommandProcessor() @Published var showCallView = false + @Published var activeCallViewIsCollapsed = false // remote desktop @Published var remoteCtrlSession: RemoteCtrlSession? // currently showing invitation diff --git a/apps/ios/Shared/Views/Call/ActiveCallView.swift b/apps/ios/Shared/Views/Call/ActiveCallView.swift index a3be2e900..9f246f63f 100644 --- a/apps/ios/Shared/Views/Call/ActiveCallView.swift +++ b/apps/ios/Shared/Views/Call/ActiveCallView.swift @@ -12,49 +12,67 @@ import SimpleXChat struct ActiveCallView: View { @EnvironmentObject var m: ChatModel + @Environment(\.colorScheme) var colorScheme @ObservedObject var call: Call @Environment(\.scenePhase) var scenePhase @State private var client: WebRTCClient? = nil @State private var activeCall: WebRTCClient.Call? = nil @State private var localRendererAspectRatio: CGFloat? = nil @Binding var canConnectCall: Bool + @State var prevColorScheme: ColorScheme = .dark + @State var pipShown = false var body: some View { - ZStack(alignment: .bottom) { - if let client = client, [call.peerMedia, call.localMedia].contains(.video), activeCall != nil { - GeometryReader { g in - let width = g.size.width * 0.3 - ZStack(alignment: .topTrailing) { - CallViewRemote(client: client, activeCall: $activeCall) - CallViewLocal(client: client, activeCall: $activeCall, localRendererAspectRatio: $localRendererAspectRatio) - .cornerRadius(10) - .frame(width: width, height: width / (localRendererAspectRatio ?? 1)) - .padding([.top, .trailing], 17) + ZStack(alignment: .topLeading) { + ZStack(alignment: .bottom) { + if let client = client, [call.peerMedia, call.localMedia].contains(.video), activeCall != nil { + GeometryReader { g in + let width = g.size.width * 0.3 + ZStack(alignment: .topTrailing) { + CallViewRemote(client: client, activeCall: $activeCall, activeCallViewIsCollapsed: $m.activeCallViewIsCollapsed, pipShown: $pipShown) + CallViewLocal(client: client, activeCall: $activeCall, localRendererAspectRatio: $localRendererAspectRatio, pipShown: $pipShown) + .cornerRadius(10) + .frame(width: width, height: width / (localRendererAspectRatio ?? 1)) + .padding([.top, .trailing], 17) + ZStack(alignment: .center) { + // For some reason, when the view in GeometryReader and ZStack is visible, it steals clicks on a back button, so showing something on top like this with background color helps (.clear color doesn't work) + } + .frame(maxWidth: .infinity, maxHeight: .infinity) + .background(Color.primary.opacity(0.000001)) + } } } - } - if let call = m.activeCall, let client = client { - ActiveCallOverlay(call: call, client: client) + if let call = m.activeCall, let client = client, (!pipShown || !call.supportsVideo) { + ActiveCallOverlay(call: call, client: client) + } } } + .allowsHitTesting(!m.activeCallViewIsCollapsed) + .opacity(m.activeCallViewIsCollapsed ? 0 : 1) .onAppear { logger.debug("ActiveCallView: appear client is nil \(client == nil), scenePhase \(String(describing: scenePhase)), canConnectCall \(canConnectCall)") AppDelegate.keepScreenOn(true) createWebRTCClient() dismissAllSheets() + hideKeyboard() + prevColorScheme = colorScheme } .onChange(of: canConnectCall) { _ in logger.debug("ActiveCallView: canConnectCall changed to \(canConnectCall)") createWebRTCClient() } + .onChange(of: m.activeCallViewIsCollapsed) { _ in + hideKeyboard() + } .onDisappear { logger.debug("ActiveCallView: disappear") Task { await m.callCommand.setClient(nil) } AppDelegate.keepScreenOn(false) client?.endCall() } - .background(.black) - .preferredColorScheme(.dark) + .background(m.activeCallViewIsCollapsed ? .clear : .black) + // Quite a big delay when opening/closing the view when a scheme changes (globally) this way. It's not needed when CallKit is used since status bar is green with white text on it + .preferredColorScheme(m.activeCallViewIsCollapsed || CallController.useCallKit() ? prevColorScheme : .dark) } private func createWebRTCClient() { @@ -69,8 +87,8 @@ struct ActiveCallView: View { @MainActor private func processRtcMessage(msg: WVAPIMessage) { if call == m.activeCall, - let call = m.activeCall, - let client = client { + let call = m.activeCall, + let client = client { logger.debug("ActiveCallView: response \(msg.resp.respType)") switch msg.resp { case let .capabilities(capabilities): @@ -90,7 +108,7 @@ struct ActiveCallView: View { Task { do { try await apiSendCallOffer(call.contact, offer, iceCandidates, - media: call.localMedia, capabilities: capabilities) + media: call.localMedia, capabilities: capabilities) } catch { logger.error("apiSendCallOffer \(responseError(error))") } @@ -122,13 +140,15 @@ struct ActiveCallView: View { if let callStatus = WebRTCCallStatus.init(rawValue: state.connectionState), case .connected = callStatus { call.direction == .outgoing - ? CallController.shared.reportOutgoingCall(call: call, connectedAt: nil) - : CallController.shared.reportIncomingCall(call: call, connectedAt: nil) + ? CallController.shared.reportOutgoingCall(call: call, connectedAt: nil) + : CallController.shared.reportIncomingCall(call: call, connectedAt: nil) call.callState = .connected + call.connectedAt = .now } if state.connectionState == "closed" { closeCallView(client) m.activeCall = nil + m.activeCallViewIsCollapsed = false } Task { do { @@ -140,6 +160,7 @@ struct ActiveCallView: View { case let .connected(connectionInfo): call.callState = .connected call.connectionInfo = connectionInfo + call.connectedAt = .now case .ended: closeCallView(client) call.callState = .ended @@ -153,6 +174,7 @@ struct ActiveCallView: View { case .end: closeCallView(client) m.activeCall = nil + m.activeCallViewIsCollapsed = false default: () } case let .error(message): @@ -181,7 +203,7 @@ struct ActiveCallOverlay: View { VStack { switch call.localMedia { case .video: - callInfoView(call, .leading) + videoCallInfoView(call) .foregroundColor(.white) .opacity(0.8) .padding() @@ -208,16 +230,25 @@ struct ActiveCallOverlay: View { .frame(maxWidth: .infinity, alignment: .center) case .audio: - VStack { - ProfileImage(imageStr: call.contact.profile.image) - .scaledToFit() - .frame(width: 192, height: 192) - callInfoView(call, .center) + ZStack(alignment: .topLeading) { + Button { + chatModel.activeCallViewIsCollapsed = true + } label: { + Label("Back", systemImage: "chevron.left") + .padding() + .foregroundColor(.white.opacity(0.8)) + } + VStack { + ProfileImage(imageStr: call.contact.profile.image) + .scaledToFit() + .frame(width: 192, height: 192) + audioCallInfoView(call) + } + .foregroundColor(.white) + .opacity(0.8) + .padding() + .frame(maxHeight: .infinity) } - .foregroundColor(.white) - .opacity(0.8) - .padding() - .frame(maxHeight: .infinity) Spacer() @@ -235,12 +266,12 @@ struct ActiveCallOverlay: View { .frame(maxWidth: .infinity) } - private func callInfoView(_ call: Call, _ alignment: Alignment) -> some View { + private func audioCallInfoView(_ call: Call) -> some View { VStack { Text(call.contact.chatViewName) .lineLimit(1) .font(.title) - .frame(maxWidth: .infinity, alignment: alignment) + .frame(maxWidth: .infinity, alignment: .center) Group { Text(call.callState.text) HStack { @@ -251,7 +282,36 @@ struct ActiveCallOverlay: View { } } .font(.subheadline) - .frame(maxWidth: .infinity, alignment: alignment) + .frame(maxWidth: .infinity, alignment: .center) + } + } + + private func videoCallInfoView(_ call: Call) -> some View { + VStack { + Button { + chatModel.activeCallViewIsCollapsed = true + } label: { + HStack(alignment: .center, spacing: 16) { + Image(systemName: "chevron.left") + .resizable() + .frame(width: 10, height: 18) + Text(call.contact.chatViewName) + .lineLimit(1) + .font(.title) + .frame(maxWidth: .infinity, alignment: .leading) + } + } + Group { + Text(call.callState.text) + HStack { + Text(call.encryptionStatus) + if let connInfo = call.connectionInfo { + Text("(") + Text(connInfo.text) + Text(")") + } + } + } + .font(.subheadline) + .frame(maxWidth: .infinity, alignment: .leading) } } diff --git a/apps/ios/Shared/Views/Call/CallManager.swift b/apps/ios/Shared/Views/Call/CallManager.swift index 194af3ab0..a6d5ea17c 100644 --- a/apps/ios/Shared/Views/Call/CallManager.swift +++ b/apps/ios/Shared/Views/Call/CallManager.swift @@ -92,6 +92,7 @@ class CallManager { if case .ended = call.callState { logger.debug("CallManager.endCall: call ended") m.activeCall = nil + m.activeCallViewIsCollapsed = false m.showCallView = false completed() } else { @@ -100,6 +101,7 @@ class CallManager { await m.callCommand.processCommand(.end) await MainActor.run { m.activeCall = nil + m.activeCallViewIsCollapsed = false m.showCallView = false completed() } diff --git a/apps/ios/Shared/Views/Call/CallViewRenderers.swift b/apps/ios/Shared/Views/Call/CallViewRenderers.swift index 93766ced1..a3201d935 100644 --- a/apps/ios/Shared/Views/Call/CallViewRenderers.swift +++ b/apps/ios/Shared/Views/Call/CallViewRenderers.swift @@ -6,14 +6,20 @@ import SwiftUI import WebRTC import SimpleXChat +import AVKit struct CallViewRemote: UIViewRepresentable { var client: WebRTCClient var activeCall: Binding + @State var enablePip: (Bool) -> Void = {_ in } + @Binding var activeCallViewIsCollapsed: Bool + @Binding var pipShown: Bool - init(client: WebRTCClient, activeCall: Binding) { + init(client: WebRTCClient, activeCall: Binding, activeCallViewIsCollapsed: Binding, pipShown: Binding) { self.client = client self.activeCall = activeCall + self._activeCallViewIsCollapsed = activeCallViewIsCollapsed + self._pipShown = pipShown } func makeUIView(context: Context) -> UIView { @@ -23,12 +29,120 @@ struct CallViewRemote: UIViewRepresentable { remoteRenderer.videoContentMode = .scaleAspectFill client.addRemoteRenderer(call, remoteRenderer) addSubviewAndResize(remoteRenderer, into: view) + + if AVPictureInPictureController.isPictureInPictureSupported() { + makeViewWithRTCRenderer(call, remoteRenderer, view, context) + } } return view } + + func makeViewWithRTCRenderer(_ call: WebRTCClient.Call, _ remoteRenderer: RTCMTLVideoView, _ view: UIView, _ context: Context) { + let pipRemoteRenderer = RTCMTLVideoView(frame: view.frame) + pipRemoteRenderer.videoContentMode = .scaleAspectFill + + let pipVideoCallViewController = AVPictureInPictureVideoCallViewController() + pipVideoCallViewController.preferredContentSize = CGSize(width: 1080, height: 1920) + addSubviewAndResize(pipRemoteRenderer, into: pipVideoCallViewController.view) + let pipContentSource = AVPictureInPictureController.ContentSource( + activeVideoCallSourceView: view, + contentViewController: pipVideoCallViewController + ) + + let pipController = AVPictureInPictureController(contentSource: pipContentSource) + pipController.canStartPictureInPictureAutomaticallyFromInline = true + pipController.delegate = context.coordinator + context.coordinator.pipController = pipController + context.coordinator.willShowHide = { show in + if show { + client.addRemoteRenderer(call, pipRemoteRenderer) + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + activeCallViewIsCollapsed = true + } + } else { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { + activeCallViewIsCollapsed = false + } + } + } + context.coordinator.didShowHide = { show in + if show { + remoteRenderer.isHidden = true + } else { + client.removeRemoteRenderer(call, pipRemoteRenderer) + remoteRenderer.isHidden = false + } + pipShown = show + } + DispatchQueue.main.async { + enablePip = { enable in + if enable != pipShown /* pipController.isPictureInPictureActive */ { + if enable { + pipController.startPictureInPicture() + } else { + pipController.stopPictureInPicture() + } + } + } + } + } + + func makeCoordinator() -> Coordinator { + Coordinator() + } func updateUIView(_ view: UIView, context: Context) { logger.debug("CallView.updateUIView remote") + DispatchQueue.main.async { + if activeCallViewIsCollapsed != pipShown { + enablePip(activeCallViewIsCollapsed) + } + } + } + + // MARK: - Coordinator + class Coordinator: NSObject, AVPictureInPictureControllerDelegate { + var pipController: AVPictureInPictureController? = nil + var willShowHide: (Bool) -> Void = { _ in } + var didShowHide: (Bool) -> Void = { _ in } + + func pictureInPictureControllerWillStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { + willShowHide(true) + } + + func pictureInPictureControllerDidStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { + didShowHide(true) + } + + func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) { + logger.error("PiP failed to start: \(error.localizedDescription)") + } + + func pictureInPictureControllerWillStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { + willShowHide(false) + } + + func pictureInPictureControllerDidStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { + didShowHide(false) + } + + deinit { + pipController?.stopPictureInPicture() + pipController?.canStartPictureInPictureAutomaticallyFromInline = false + pipController?.contentSource = nil + pipController?.delegate = nil + pipController = nil + } + } + + class SampleBufferVideoCallView: UIView { + override class var layerClass: AnyClass { + get { return AVSampleBufferDisplayLayer.self } + } + + var sampleBufferDisplayLayer: AVSampleBufferDisplayLayer { + return layer as! AVSampleBufferDisplayLayer + } } } @@ -36,11 +150,14 @@ struct CallViewLocal: UIViewRepresentable { var client: WebRTCClient var activeCall: Binding var localRendererAspectRatio: Binding + @State var pipStateChanged: (Bool) -> Void = {_ in } + @Binding var pipShown: Bool - init(client: WebRTCClient, activeCall: Binding, localRendererAspectRatio: Binding) { + init(client: WebRTCClient, activeCall: Binding, localRendererAspectRatio: Binding, pipShown: Binding) { self.client = client self.activeCall = activeCall self.localRendererAspectRatio = localRendererAspectRatio + self._pipShown = pipShown } func makeUIView(context: Context) -> UIView { @@ -50,12 +167,18 @@ struct CallViewLocal: UIViewRepresentable { client.addLocalRenderer(call, localRenderer) client.startCaptureLocalVideo(call) addSubviewAndResize(localRenderer, into: view) + DispatchQueue.main.async { + pipStateChanged = { shown in + localRenderer.isHidden = shown + } + } } return view } func updateUIView(_ view: UIView, context: Context) { logger.debug("CallView.updateUIView local") + pipStateChanged(pipShown) } } diff --git a/apps/ios/Shared/Views/Call/WebRTC.swift b/apps/ios/Shared/Views/Call/WebRTC.swift index c21ef5019..919b1e14e 100644 --- a/apps/ios/Shared/Views/Call/WebRTC.swift +++ b/apps/ios/Shared/Views/Call/WebRTC.swift @@ -28,6 +28,7 @@ class Call: ObservableObject, Equatable { @Published var speakerEnabled = false @Published var videoEnabled: Bool @Published var connectionInfo: ConnectionInfo? + @Published var connectedAt: Date? = nil init( direction: CallDirection, @@ -59,6 +60,7 @@ class Call: ObservableObject, Equatable { } } var hasMedia: Bool { get { callState == .offerSent || callState == .negotiated || callState == .connected } } + var supportsVideo: Bool { get { peerMedia == .video || localMedia == .video } } } enum CallDirection { diff --git a/apps/ios/Shared/Views/Call/WebRTCClient.swift b/apps/ios/Shared/Views/Call/WebRTCClient.swift index 933a3c745..1806984d6 100644 --- a/apps/ios/Shared/Views/Call/WebRTCClient.swift +++ b/apps/ios/Shared/Views/Call/WebRTCClient.swift @@ -331,6 +331,10 @@ final class WebRTCClient: NSObject, RTCVideoViewDelegate, RTCFrameEncryptorDeleg activeCall.remoteStream?.add(renderer) } + func removeRemoteRenderer(_ activeCall: Call, _ renderer: RTCVideoRenderer) { + activeCall.remoteStream?.remove(renderer) + } + func startCaptureLocalVideo(_ activeCall: Call) { #if targetEnvironment(simulator) guard @@ -410,6 +414,7 @@ final class WebRTCClient: NSObject, RTCVideoViewDelegate, RTCFrameEncryptorDeleg guard let call = activeCall.wrappedValue else { return } logger.debug("WebRTCClient: ending the call") activeCall.wrappedValue = nil + (call.localCamera as? RTCCameraVideoCapturer)?.stopCapture() call.connection.close() call.connection.delegate = nil call.frameEncryptor?.delegate = nil diff --git a/apps/ios/Shared/Views/Chat/ChatItem/CIImageView.swift b/apps/ios/Shared/Views/Chat/ChatItem/CIImageView.swift index c7e89fc5e..c3e4805bf 100644 --- a/apps/ios/Shared/Views/Chat/ChatItem/CIImageView.swift +++ b/apps/ios/Shared/Views/Chat/ChatItem/CIImageView.swift @@ -29,6 +29,9 @@ struct CIImageView: View { FullScreenMediaView(chatItem: chatItem, image: uiImage, showView: $showFullScreenImage, scrollProxy: scrollProxy) } .onTapGesture { showFullScreenImage = true } + .onChange(of: m.activeCallViewIsCollapsed) { _ in + showFullScreenImage = false + } } else if let data = Data(base64Encoded: dropImagePrefix(image)), let uiImage = UIImage(data: data) { imageView(uiImage) diff --git a/apps/ios/Shared/Views/Chat/ChatItem/CIVideoView.swift b/apps/ios/Shared/Views/Chat/ChatItem/CIVideoView.swift index a824ddc49..ff208fe58 100644 --- a/apps/ios/Shared/Views/Chat/ChatItem/CIVideoView.swift +++ b/apps/ios/Shared/Views/Chat/ChatItem/CIVideoView.swift @@ -120,6 +120,9 @@ struct CIVideoView: View { showFullScreenPlayer = urlDecrypted != nil } } + .onChange(of: m.activeCallViewIsCollapsed) { _ in + showFullScreenPlayer = false + } if !decryptionInProgress { Button { decrypt(file: file) { @@ -168,6 +171,9 @@ struct CIVideoView: View { default: () } } + .onChange(of: m.activeCallViewIsCollapsed) { _ in + showFullScreenPlayer = false + } if !videoPlaying { Button { m.stopPreviousRecPlay = url diff --git a/apps/ios/Shared/Views/Chat/ChatView.swift b/apps/ios/Shared/Views/Chat/ChatView.swift index 35caf655e..550a9a45b 100644 --- a/apps/ios/Shared/Views/Chat/ChatView.swift +++ b/apps/ios/Shared/Views/Chat/ChatView.swift @@ -161,11 +161,15 @@ struct ChatView: View { HStack { let callsPrefEnabled = contact.mergedPreferences.calls.enabled.forUser if callsPrefEnabled { - callButton(contact, .audio, imageName: "phone") - .disabled(!contact.ready || !contact.active) + if chatModel.activeCall == nil { + callButton(contact, .audio, imageName: "phone") + .disabled(!contact.ready || !contact.active) + } else if let call = chatModel.activeCall, call.contact.id == cInfo.id { + endCallButton(call) + } } Menu { - if callsPrefEnabled { + if callsPrefEnabled && chatModel.activeCall == nil { Button { CallController.shared.startCall(contact, .video) } label: { @@ -422,7 +426,19 @@ struct ChatView: View { Image(systemName: imageName) } } - + + private func endCallButton(_ call: Call) -> some View { + Button { + if let uuid = call.callkitUUID { + CallController.shared.endCall(callUUID: uuid) + } else { + CallController.shared.endCall(call: call) {} + } + } label: { + Image(systemName: "phone.down.fill").tint(.red) + } + } + private func searchButton() -> some View { Button { searchMode = true