ios: Picture-in-picture while in calls (#3792)

* ios: Picture-in-picture while in calls

* simplify

* improvements

* back button and lots of small issues

* layout

* padding

* back button

* animation, padding, fullscreen

* end active call button

* removed unused code

* unused line

* transition

* better

* better

* deinit PiP controller

* stop camera after call end

* formatting

* stop capture if active

---------

Co-authored-by: Avently <avently@local>
Co-authored-by: Evgeny Poberezkin <evgeny@poberezkin.com>
This commit is contained in:
Stanislav Dmitrenko 2024-02-13 22:04:42 +07:00 committed by GitHub
parent 4260c20012
commit edc5a4c31b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 309 additions and 43 deletions

View File

@ -34,6 +34,8 @@ struct ContentView: View {
@State private var waitingForOrPassedAuth = true @State private var waitingForOrPassedAuth = true
@State private var chatListActionSheet: ChatListActionSheet? = nil @State private var chatListActionSheet: ChatListActionSheet? = nil
private let callTopPadding: CGFloat = 50
private enum ChatListActionSheet: Identifiable { private enum ChatListActionSheet: Identifiable {
case planAndConnectSheet(sheet: PlanAndConnectActionSheet) case planAndConnectSheet(sheet: PlanAndConnectActionSheet)
@ -50,16 +52,28 @@ struct ContentView: View {
var body: some View { var body: some View {
ZStack { ZStack {
let showCallArea = chatModel.activeCall != nil && chatModel.activeCall?.callState != .waitCapabilities && chatModel.activeCall?.callState != .invitationAccepted
// contentView() has to be in a single branch, so that enabling authentication doesn't trigger re-rendering and close settings. // contentView() has to be in a single branch, so that enabling authentication doesn't trigger re-rendering and close settings.
// i.e. with separate branches like this settings are closed: `if prefPerformLA { ... contentView() ... } else { contentView() } // i.e. with separate branches like this settings are closed: `if prefPerformLA { ... contentView() ... } else { contentView() }
if !prefPerformLA || accessAuthenticated { if !prefPerformLA || accessAuthenticated {
contentView() contentView()
.padding(.top, showCallArea ? callTopPadding : 0)
} else { } else {
lockButton() lockButton()
.padding(.top, showCallArea ? callTopPadding : 0)
} }
if showCallArea, let call = chatModel.activeCall {
VStack {
activeCallInteractiveArea(call)
Spacer()
}
}
if chatModel.showCallView, let call = chatModel.activeCall { if chatModel.showCallView, let call = chatModel.activeCall {
callView(call) callView(call)
} }
if !showSettings, let la = chatModel.laRequest { if !showSettings, let la = chatModel.laRequest {
LocalAuthView(authRequest: la) LocalAuthView(authRequest: la)
.onDisappear { .onDisappear {
@ -135,11 +149,11 @@ struct ContentView: View {
if case .onboardingComplete = step, if case .onboardingComplete = step,
chatModel.currentUser != nil { chatModel.currentUser != nil {
mainView() mainView()
.actionSheet(item: $chatListActionSheet) { sheet in .actionSheet(item: $chatListActionSheet) { sheet in
switch sheet { switch sheet {
case let .planAndConnectSheet(sheet): return planAndConnectActionSheet(sheet, dismiss: false) case let .planAndConnectSheet(sheet): return planAndConnectActionSheet(sheet, dismiss: false)
}
} }
}
} else { } else {
OnboardingView(onboarding: step) OnboardingView(onboarding: step)
} }
@ -163,6 +177,40 @@ struct ContentView: View {
} }
} }
@ViewBuilder private func activeCallInteractiveArea(_ call: Call) -> some View {
HStack {
Text(call.contact.displayName).font(.body).foregroundColor(.white)
Spacer()
CallDuration(call: call)
}
.padding(.horizontal)
.frame(height: callTopPadding - 10)
.background(Color(uiColor: UIColor(red: 47/255, green: 208/255, blue: 88/255, alpha: 1)))
.onTapGesture {
chatModel.activeCallViewIsCollapsed = false
}
}
struct CallDuration: View {
let call: Call
@State var text: String = ""
@State var timer: Timer? = nil
var body: some View {
Text(text).frame(minWidth: text.count <= 5 ? 52 : 77, alignment: .leading).offset(x: 4).font(.body).foregroundColor(.white)
.onAppear {
timer = Timer.scheduledTimer(withTimeInterval: 0.3, repeats: true) { timer in
if let connectedAt = call.connectedAt {
text = durationText(Int(Date.now.timeIntervalSince1970 - connectedAt.timeIntervalSince1970))
}
}
}
.onDisappear {
_ = timer?.invalidate()
}
}
}
private func lockButton() -> some View { private func lockButton() -> some View {
Button(action: authenticateContentViewAccess) { Label("Unlock", systemImage: "lock") } Button(action: authenticateContentViewAccess) { Label("Unlock", systemImage: "lock") }
} }

View File

@ -90,6 +90,7 @@ final class ChatModel: ObservableObject {
@Published var activeCall: Call? @Published var activeCall: Call?
let callCommand: WebRTCCommandProcessor = WebRTCCommandProcessor() let callCommand: WebRTCCommandProcessor = WebRTCCommandProcessor()
@Published var showCallView = false @Published var showCallView = false
@Published var activeCallViewIsCollapsed = false
// remote desktop // remote desktop
@Published var remoteCtrlSession: RemoteCtrlSession? @Published var remoteCtrlSession: RemoteCtrlSession?
// currently showing invitation // currently showing invitation

View File

@ -12,49 +12,67 @@ import SimpleXChat
struct ActiveCallView: View { struct ActiveCallView: View {
@EnvironmentObject var m: ChatModel @EnvironmentObject var m: ChatModel
@Environment(\.colorScheme) var colorScheme
@ObservedObject var call: Call @ObservedObject var call: Call
@Environment(\.scenePhase) var scenePhase @Environment(\.scenePhase) var scenePhase
@State private var client: WebRTCClient? = nil @State private var client: WebRTCClient? = nil
@State private var activeCall: WebRTCClient.Call? = nil @State private var activeCall: WebRTCClient.Call? = nil
@State private var localRendererAspectRatio: CGFloat? = nil @State private var localRendererAspectRatio: CGFloat? = nil
@Binding var canConnectCall: Bool @Binding var canConnectCall: Bool
@State var prevColorScheme: ColorScheme = .dark
@State var pipShown = false
var body: some View { var body: some View {
ZStack(alignment: .bottom) { ZStack(alignment: .topLeading) {
if let client = client, [call.peerMedia, call.localMedia].contains(.video), activeCall != nil { ZStack(alignment: .bottom) {
GeometryReader { g in if let client = client, [call.peerMedia, call.localMedia].contains(.video), activeCall != nil {
let width = g.size.width * 0.3 GeometryReader { g in
ZStack(alignment: .topTrailing) { let width = g.size.width * 0.3
CallViewRemote(client: client, activeCall: $activeCall) ZStack(alignment: .topTrailing) {
CallViewLocal(client: client, activeCall: $activeCall, localRendererAspectRatio: $localRendererAspectRatio) CallViewRemote(client: client, activeCall: $activeCall, activeCallViewIsCollapsed: $m.activeCallViewIsCollapsed, pipShown: $pipShown)
.cornerRadius(10) CallViewLocal(client: client, activeCall: $activeCall, localRendererAspectRatio: $localRendererAspectRatio, pipShown: $pipShown)
.frame(width: width, height: width / (localRendererAspectRatio ?? 1)) .cornerRadius(10)
.padding([.top, .trailing], 17) .frame(width: width, height: width / (localRendererAspectRatio ?? 1))
.padding([.top, .trailing], 17)
ZStack(alignment: .center) {
// For some reason, when the view in GeometryReader and ZStack is visible, it steals clicks on a back button, so showing something on top like this with background color helps (.clear color doesn't work)
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
.background(Color.primary.opacity(0.000001))
}
} }
} }
} if let call = m.activeCall, let client = client, (!pipShown || !call.supportsVideo) {
if let call = m.activeCall, let client = client { ActiveCallOverlay(call: call, client: client)
ActiveCallOverlay(call: call, client: client) }
} }
} }
.allowsHitTesting(!m.activeCallViewIsCollapsed)
.opacity(m.activeCallViewIsCollapsed ? 0 : 1)
.onAppear { .onAppear {
logger.debug("ActiveCallView: appear client is nil \(client == nil), scenePhase \(String(describing: scenePhase)), canConnectCall \(canConnectCall)") logger.debug("ActiveCallView: appear client is nil \(client == nil), scenePhase \(String(describing: scenePhase)), canConnectCall \(canConnectCall)")
AppDelegate.keepScreenOn(true) AppDelegate.keepScreenOn(true)
createWebRTCClient() createWebRTCClient()
dismissAllSheets() dismissAllSheets()
hideKeyboard()
prevColorScheme = colorScheme
} }
.onChange(of: canConnectCall) { _ in .onChange(of: canConnectCall) { _ in
logger.debug("ActiveCallView: canConnectCall changed to \(canConnectCall)") logger.debug("ActiveCallView: canConnectCall changed to \(canConnectCall)")
createWebRTCClient() createWebRTCClient()
} }
.onChange(of: m.activeCallViewIsCollapsed) { _ in
hideKeyboard()
}
.onDisappear { .onDisappear {
logger.debug("ActiveCallView: disappear") logger.debug("ActiveCallView: disappear")
Task { await m.callCommand.setClient(nil) } Task { await m.callCommand.setClient(nil) }
AppDelegate.keepScreenOn(false) AppDelegate.keepScreenOn(false)
client?.endCall() client?.endCall()
} }
.background(.black) .background(m.activeCallViewIsCollapsed ? .clear : .black)
.preferredColorScheme(.dark) // Quite a big delay when opening/closing the view when a scheme changes (globally) this way. It's not needed when CallKit is used since status bar is green with white text on it
.preferredColorScheme(m.activeCallViewIsCollapsed || CallController.useCallKit() ? prevColorScheme : .dark)
} }
private func createWebRTCClient() { private func createWebRTCClient() {
@ -69,8 +87,8 @@ struct ActiveCallView: View {
@MainActor @MainActor
private func processRtcMessage(msg: WVAPIMessage) { private func processRtcMessage(msg: WVAPIMessage) {
if call == m.activeCall, if call == m.activeCall,
let call = m.activeCall, let call = m.activeCall,
let client = client { let client = client {
logger.debug("ActiveCallView: response \(msg.resp.respType)") logger.debug("ActiveCallView: response \(msg.resp.respType)")
switch msg.resp { switch msg.resp {
case let .capabilities(capabilities): case let .capabilities(capabilities):
@ -90,7 +108,7 @@ struct ActiveCallView: View {
Task { Task {
do { do {
try await apiSendCallOffer(call.contact, offer, iceCandidates, try await apiSendCallOffer(call.contact, offer, iceCandidates,
media: call.localMedia, capabilities: capabilities) media: call.localMedia, capabilities: capabilities)
} catch { } catch {
logger.error("apiSendCallOffer \(responseError(error))") logger.error("apiSendCallOffer \(responseError(error))")
} }
@ -122,13 +140,15 @@ struct ActiveCallView: View {
if let callStatus = WebRTCCallStatus.init(rawValue: state.connectionState), if let callStatus = WebRTCCallStatus.init(rawValue: state.connectionState),
case .connected = callStatus { case .connected = callStatus {
call.direction == .outgoing call.direction == .outgoing
? CallController.shared.reportOutgoingCall(call: call, connectedAt: nil) ? CallController.shared.reportOutgoingCall(call: call, connectedAt: nil)
: CallController.shared.reportIncomingCall(call: call, connectedAt: nil) : CallController.shared.reportIncomingCall(call: call, connectedAt: nil)
call.callState = .connected call.callState = .connected
call.connectedAt = .now
} }
if state.connectionState == "closed" { if state.connectionState == "closed" {
closeCallView(client) closeCallView(client)
m.activeCall = nil m.activeCall = nil
m.activeCallViewIsCollapsed = false
} }
Task { Task {
do { do {
@ -140,6 +160,7 @@ struct ActiveCallView: View {
case let .connected(connectionInfo): case let .connected(connectionInfo):
call.callState = .connected call.callState = .connected
call.connectionInfo = connectionInfo call.connectionInfo = connectionInfo
call.connectedAt = .now
case .ended: case .ended:
closeCallView(client) closeCallView(client)
call.callState = .ended call.callState = .ended
@ -153,6 +174,7 @@ struct ActiveCallView: View {
case .end: case .end:
closeCallView(client) closeCallView(client)
m.activeCall = nil m.activeCall = nil
m.activeCallViewIsCollapsed = false
default: () default: ()
} }
case let .error(message): case let .error(message):
@ -181,7 +203,7 @@ struct ActiveCallOverlay: View {
VStack { VStack {
switch call.localMedia { switch call.localMedia {
case .video: case .video:
callInfoView(call, .leading) videoCallInfoView(call)
.foregroundColor(.white) .foregroundColor(.white)
.opacity(0.8) .opacity(0.8)
.padding() .padding()
@ -208,16 +230,25 @@ struct ActiveCallOverlay: View {
.frame(maxWidth: .infinity, alignment: .center) .frame(maxWidth: .infinity, alignment: .center)
case .audio: case .audio:
VStack { ZStack(alignment: .topLeading) {
ProfileImage(imageStr: call.contact.profile.image) Button {
.scaledToFit() chatModel.activeCallViewIsCollapsed = true
.frame(width: 192, height: 192) } label: {
callInfoView(call, .center) Label("Back", systemImage: "chevron.left")
.padding()
.foregroundColor(.white.opacity(0.8))
}
VStack {
ProfileImage(imageStr: call.contact.profile.image)
.scaledToFit()
.frame(width: 192, height: 192)
audioCallInfoView(call)
}
.foregroundColor(.white)
.opacity(0.8)
.padding()
.frame(maxHeight: .infinity)
} }
.foregroundColor(.white)
.opacity(0.8)
.padding()
.frame(maxHeight: .infinity)
Spacer() Spacer()
@ -235,12 +266,12 @@ struct ActiveCallOverlay: View {
.frame(maxWidth: .infinity) .frame(maxWidth: .infinity)
} }
private func callInfoView(_ call: Call, _ alignment: Alignment) -> some View { private func audioCallInfoView(_ call: Call) -> some View {
VStack { VStack {
Text(call.contact.chatViewName) Text(call.contact.chatViewName)
.lineLimit(1) .lineLimit(1)
.font(.title) .font(.title)
.frame(maxWidth: .infinity, alignment: alignment) .frame(maxWidth: .infinity, alignment: .center)
Group { Group {
Text(call.callState.text) Text(call.callState.text)
HStack { HStack {
@ -251,7 +282,36 @@ struct ActiveCallOverlay: View {
} }
} }
.font(.subheadline) .font(.subheadline)
.frame(maxWidth: .infinity, alignment: alignment) .frame(maxWidth: .infinity, alignment: .center)
}
}
private func videoCallInfoView(_ call: Call) -> some View {
VStack {
Button {
chatModel.activeCallViewIsCollapsed = true
} label: {
HStack(alignment: .center, spacing: 16) {
Image(systemName: "chevron.left")
.resizable()
.frame(width: 10, height: 18)
Text(call.contact.chatViewName)
.lineLimit(1)
.font(.title)
.frame(maxWidth: .infinity, alignment: .leading)
}
}
Group {
Text(call.callState.text)
HStack {
Text(call.encryptionStatus)
if let connInfo = call.connectionInfo {
Text("(") + Text(connInfo.text) + Text(")")
}
}
}
.font(.subheadline)
.frame(maxWidth: .infinity, alignment: .leading)
} }
} }

View File

@ -92,6 +92,7 @@ class CallManager {
if case .ended = call.callState { if case .ended = call.callState {
logger.debug("CallManager.endCall: call ended") logger.debug("CallManager.endCall: call ended")
m.activeCall = nil m.activeCall = nil
m.activeCallViewIsCollapsed = false
m.showCallView = false m.showCallView = false
completed() completed()
} else { } else {
@ -100,6 +101,7 @@ class CallManager {
await m.callCommand.processCommand(.end) await m.callCommand.processCommand(.end)
await MainActor.run { await MainActor.run {
m.activeCall = nil m.activeCall = nil
m.activeCallViewIsCollapsed = false
m.showCallView = false m.showCallView = false
completed() completed()
} }

View File

@ -6,14 +6,20 @@
import SwiftUI import SwiftUI
import WebRTC import WebRTC
import SimpleXChat import SimpleXChat
import AVKit
struct CallViewRemote: UIViewRepresentable { struct CallViewRemote: UIViewRepresentable {
var client: WebRTCClient var client: WebRTCClient
var activeCall: Binding<WebRTCClient.Call?> var activeCall: Binding<WebRTCClient.Call?>
@State var enablePip: (Bool) -> Void = {_ in }
@Binding var activeCallViewIsCollapsed: Bool
@Binding var pipShown: Bool
init(client: WebRTCClient, activeCall: Binding<WebRTCClient.Call?>) { init(client: WebRTCClient, activeCall: Binding<WebRTCClient.Call?>, activeCallViewIsCollapsed: Binding<Bool>, pipShown: Binding<Bool>) {
self.client = client self.client = client
self.activeCall = activeCall self.activeCall = activeCall
self._activeCallViewIsCollapsed = activeCallViewIsCollapsed
self._pipShown = pipShown
} }
func makeUIView(context: Context) -> UIView { func makeUIView(context: Context) -> UIView {
@ -23,12 +29,120 @@ struct CallViewRemote: UIViewRepresentable {
remoteRenderer.videoContentMode = .scaleAspectFill remoteRenderer.videoContentMode = .scaleAspectFill
client.addRemoteRenderer(call, remoteRenderer) client.addRemoteRenderer(call, remoteRenderer)
addSubviewAndResize(remoteRenderer, into: view) addSubviewAndResize(remoteRenderer, into: view)
if AVPictureInPictureController.isPictureInPictureSupported() {
makeViewWithRTCRenderer(call, remoteRenderer, view, context)
}
} }
return view return view
} }
func makeViewWithRTCRenderer(_ call: WebRTCClient.Call, _ remoteRenderer: RTCMTLVideoView, _ view: UIView, _ context: Context) {
let pipRemoteRenderer = RTCMTLVideoView(frame: view.frame)
pipRemoteRenderer.videoContentMode = .scaleAspectFill
let pipVideoCallViewController = AVPictureInPictureVideoCallViewController()
pipVideoCallViewController.preferredContentSize = CGSize(width: 1080, height: 1920)
addSubviewAndResize(pipRemoteRenderer, into: pipVideoCallViewController.view)
let pipContentSource = AVPictureInPictureController.ContentSource(
activeVideoCallSourceView: view,
contentViewController: pipVideoCallViewController
)
let pipController = AVPictureInPictureController(contentSource: pipContentSource)
pipController.canStartPictureInPictureAutomaticallyFromInline = true
pipController.delegate = context.coordinator
context.coordinator.pipController = pipController
context.coordinator.willShowHide = { show in
if show {
client.addRemoteRenderer(call, pipRemoteRenderer)
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
activeCallViewIsCollapsed = true
}
} else {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) {
activeCallViewIsCollapsed = false
}
}
}
context.coordinator.didShowHide = { show in
if show {
remoteRenderer.isHidden = true
} else {
client.removeRemoteRenderer(call, pipRemoteRenderer)
remoteRenderer.isHidden = false
}
pipShown = show
}
DispatchQueue.main.async {
enablePip = { enable in
if enable != pipShown /* pipController.isPictureInPictureActive */ {
if enable {
pipController.startPictureInPicture()
} else {
pipController.stopPictureInPicture()
}
}
}
}
}
func makeCoordinator() -> Coordinator {
Coordinator()
}
func updateUIView(_ view: UIView, context: Context) { func updateUIView(_ view: UIView, context: Context) {
logger.debug("CallView.updateUIView remote") logger.debug("CallView.updateUIView remote")
DispatchQueue.main.async {
if activeCallViewIsCollapsed != pipShown {
enablePip(activeCallViewIsCollapsed)
}
}
}
// MARK: - Coordinator
class Coordinator: NSObject, AVPictureInPictureControllerDelegate {
var pipController: AVPictureInPictureController? = nil
var willShowHide: (Bool) -> Void = { _ in }
var didShowHide: (Bool) -> Void = { _ in }
func pictureInPictureControllerWillStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
willShowHide(true)
}
func pictureInPictureControllerDidStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
didShowHide(true)
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) {
logger.error("PiP failed to start: \(error.localizedDescription)")
}
func pictureInPictureControllerWillStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
willShowHide(false)
}
func pictureInPictureControllerDidStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
didShowHide(false)
}
deinit {
pipController?.stopPictureInPicture()
pipController?.canStartPictureInPictureAutomaticallyFromInline = false
pipController?.contentSource = nil
pipController?.delegate = nil
pipController = nil
}
}
class SampleBufferVideoCallView: UIView {
override class var layerClass: AnyClass {
get { return AVSampleBufferDisplayLayer.self }
}
var sampleBufferDisplayLayer: AVSampleBufferDisplayLayer {
return layer as! AVSampleBufferDisplayLayer
}
} }
} }
@ -36,11 +150,14 @@ struct CallViewLocal: UIViewRepresentable {
var client: WebRTCClient var client: WebRTCClient
var activeCall: Binding<WebRTCClient.Call?> var activeCall: Binding<WebRTCClient.Call?>
var localRendererAspectRatio: Binding<CGFloat?> var localRendererAspectRatio: Binding<CGFloat?>
@State var pipStateChanged: (Bool) -> Void = {_ in }
@Binding var pipShown: Bool
init(client: WebRTCClient, activeCall: Binding<WebRTCClient.Call?>, localRendererAspectRatio: Binding<CGFloat?>) { init(client: WebRTCClient, activeCall: Binding<WebRTCClient.Call?>, localRendererAspectRatio: Binding<CGFloat?>, pipShown: Binding<Bool>) {
self.client = client self.client = client
self.activeCall = activeCall self.activeCall = activeCall
self.localRendererAspectRatio = localRendererAspectRatio self.localRendererAspectRatio = localRendererAspectRatio
self._pipShown = pipShown
} }
func makeUIView(context: Context) -> UIView { func makeUIView(context: Context) -> UIView {
@ -50,12 +167,18 @@ struct CallViewLocal: UIViewRepresentable {
client.addLocalRenderer(call, localRenderer) client.addLocalRenderer(call, localRenderer)
client.startCaptureLocalVideo(call) client.startCaptureLocalVideo(call)
addSubviewAndResize(localRenderer, into: view) addSubviewAndResize(localRenderer, into: view)
DispatchQueue.main.async {
pipStateChanged = { shown in
localRenderer.isHidden = shown
}
}
} }
return view return view
} }
func updateUIView(_ view: UIView, context: Context) { func updateUIView(_ view: UIView, context: Context) {
logger.debug("CallView.updateUIView local") logger.debug("CallView.updateUIView local")
pipStateChanged(pipShown)
} }
} }

View File

@ -28,6 +28,7 @@ class Call: ObservableObject, Equatable {
@Published var speakerEnabled = false @Published var speakerEnabled = false
@Published var videoEnabled: Bool @Published var videoEnabled: Bool
@Published var connectionInfo: ConnectionInfo? @Published var connectionInfo: ConnectionInfo?
@Published var connectedAt: Date? = nil
init( init(
direction: CallDirection, direction: CallDirection,
@ -59,6 +60,7 @@ class Call: ObservableObject, Equatable {
} }
} }
var hasMedia: Bool { get { callState == .offerSent || callState == .negotiated || callState == .connected } } var hasMedia: Bool { get { callState == .offerSent || callState == .negotiated || callState == .connected } }
var supportsVideo: Bool { get { peerMedia == .video || localMedia == .video } }
} }
enum CallDirection { enum CallDirection {

View File

@ -331,6 +331,10 @@ final class WebRTCClient: NSObject, RTCVideoViewDelegate, RTCFrameEncryptorDeleg
activeCall.remoteStream?.add(renderer) activeCall.remoteStream?.add(renderer)
} }
func removeRemoteRenderer(_ activeCall: Call, _ renderer: RTCVideoRenderer) {
activeCall.remoteStream?.remove(renderer)
}
func startCaptureLocalVideo(_ activeCall: Call) { func startCaptureLocalVideo(_ activeCall: Call) {
#if targetEnvironment(simulator) #if targetEnvironment(simulator)
guard guard
@ -410,6 +414,7 @@ final class WebRTCClient: NSObject, RTCVideoViewDelegate, RTCFrameEncryptorDeleg
guard let call = activeCall.wrappedValue else { return } guard let call = activeCall.wrappedValue else { return }
logger.debug("WebRTCClient: ending the call") logger.debug("WebRTCClient: ending the call")
activeCall.wrappedValue = nil activeCall.wrappedValue = nil
(call.localCamera as? RTCCameraVideoCapturer)?.stopCapture()
call.connection.close() call.connection.close()
call.connection.delegate = nil call.connection.delegate = nil
call.frameEncryptor?.delegate = nil call.frameEncryptor?.delegate = nil

View File

@ -29,6 +29,9 @@ struct CIImageView: View {
FullScreenMediaView(chatItem: chatItem, image: uiImage, showView: $showFullScreenImage, scrollProxy: scrollProxy) FullScreenMediaView(chatItem: chatItem, image: uiImage, showView: $showFullScreenImage, scrollProxy: scrollProxy)
} }
.onTapGesture { showFullScreenImage = true } .onTapGesture { showFullScreenImage = true }
.onChange(of: m.activeCallViewIsCollapsed) { _ in
showFullScreenImage = false
}
} else if let data = Data(base64Encoded: dropImagePrefix(image)), } else if let data = Data(base64Encoded: dropImagePrefix(image)),
let uiImage = UIImage(data: data) { let uiImage = UIImage(data: data) {
imageView(uiImage) imageView(uiImage)

View File

@ -120,6 +120,9 @@ struct CIVideoView: View {
showFullScreenPlayer = urlDecrypted != nil showFullScreenPlayer = urlDecrypted != nil
} }
} }
.onChange(of: m.activeCallViewIsCollapsed) { _ in
showFullScreenPlayer = false
}
if !decryptionInProgress { if !decryptionInProgress {
Button { Button {
decrypt(file: file) { decrypt(file: file) {
@ -168,6 +171,9 @@ struct CIVideoView: View {
default: () default: ()
} }
} }
.onChange(of: m.activeCallViewIsCollapsed) { _ in
showFullScreenPlayer = false
}
if !videoPlaying { if !videoPlaying {
Button { Button {
m.stopPreviousRecPlay = url m.stopPreviousRecPlay = url

View File

@ -161,11 +161,15 @@ struct ChatView: View {
HStack { HStack {
let callsPrefEnabled = contact.mergedPreferences.calls.enabled.forUser let callsPrefEnabled = contact.mergedPreferences.calls.enabled.forUser
if callsPrefEnabled { if callsPrefEnabled {
callButton(contact, .audio, imageName: "phone") if chatModel.activeCall == nil {
.disabled(!contact.ready || !contact.active) callButton(contact, .audio, imageName: "phone")
.disabled(!contact.ready || !contact.active)
} else if let call = chatModel.activeCall, call.contact.id == cInfo.id {
endCallButton(call)
}
} }
Menu { Menu {
if callsPrefEnabled { if callsPrefEnabled && chatModel.activeCall == nil {
Button { Button {
CallController.shared.startCall(contact, .video) CallController.shared.startCall(contact, .video)
} label: { } label: {
@ -422,7 +426,19 @@ struct ChatView: View {
Image(systemName: imageName) Image(systemName: imageName)
} }
} }
private func endCallButton(_ call: Call) -> some View {
Button {
if let uuid = call.callkitUUID {
CallController.shared.endCall(callUUID: uuid)
} else {
CallController.shared.endCall(call: call) {}
}
} label: {
Image(systemName: "phone.down.fill").tint(.red)
}
}
private func searchButton() -> some View { private func searchButton() -> some View {
Button { Button {
searchMode = true searchMode = true