ios: video support (#2115)

* ios: video support

* made video experience prettier

* line reordering

* fix warning

* remove playback speed

* fullscreen player

* removed unused code

* fix conflict

* setting playing status better

* thumbnail dimensions and loading indicator

* fill under video

---------

Co-authored-by: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
This commit is contained in:
Stanislav Dmitrenko
2023-04-06 20:26:48 +03:00
committed by GitHub
parent 1a3f0bed47
commit afb0ae3d03
30 changed files with 760 additions and 118 deletions

View File

@@ -40,7 +40,8 @@ class AudioRecorder {
AVEncoderBitRateKey: 12000,
AVNumberOfChannelsKey: 1
]
audioRecorder = try AVAudioRecorder(url: getAppFilePath(fileName), settings: settings)
let url = getAppFilePath(fileName)
audioRecorder = try AVAudioRecorder(url: url, settings: settings)
audioRecorder?.record(forDuration: MAX_VOICE_MESSAGE_LENGTH)
await MainActor.run {
@@ -102,7 +103,8 @@ class AudioPlayer: NSObject, AVAudioPlayerDelegate {
}
func start(fileName: String) {
audioPlayer = try? AVAudioPlayer(contentsOf: getAppFilePath(fileName))
let url = getAppFilePath(fileName)
audioPlayer = try? AVAudioPlayer(contentsOf: url)
audioPlayer?.delegate = self
audioPlayer?.prepareToPlay()
audioPlayer?.play()

View File

@@ -53,13 +53,13 @@ final class ChatModel: ObservableObject {
// currently showing QR code
@Published var connReqInv: String?
// audio recording and playback
@Published var stopPreviousRecPlay: Bool = false // value is not taken into account, only the fact it switches
@Published var stopPreviousRecPlay: URL? = nil // coordinates currently playing source
@Published var draft: ComposeState?
@Published var draftChatId: String?
var messageDelivery: Dictionary<Int64, () -> Void> = [:]
var filesToDelete: [String] = []
var filesToDelete: Set<URL> = []
static let shared = ChatModel()

View File

@@ -9,6 +9,7 @@
import Foundation
import SimpleXChat
import SwiftUI
import AVKit
func getLoadedFilePath(_ file: CIFile?) -> String? {
if let fileName = getLoadedFileName(file) {
@@ -42,6 +43,17 @@ func getLoadedImage(_ file: CIFile?) -> UIImage? {
return nil
}
func getLoadedVideo(_ file: CIFile?) -> URL? {
let loadedFilePath = getLoadedFilePath(file)
if loadedFilePath != nil, let fileName = file?.filePath {
let filePath = getAppFilePath(fileName)
if FileManager.default.fileExists(atPath: filePath.path) {
return filePath
}
}
return nil
}
func saveAnimImage(_ image: UIImage) -> String? {
let fileName = generateNewFileName("IMG", "gif")
guard let imageData = image.imageData else { return nil }
@@ -164,6 +176,20 @@ func saveFileFromURL(_ url: URL) -> String? {
return savedFile
}
func saveFileFromURLWithoutLoad(_ url: URL) -> String? {
let savedFile: String?
do {
let fileName = uniqueCombine(url.lastPathComponent)
try FileManager.default.moveItem(at: url, to: getAppFilePath(fileName))
ChatModel.shared.filesToDelete.remove(url)
savedFile = fileName
} catch {
logger.error("FileUtils.saveFileFromURLWithoutLoad error: \(error.localizedDescription)")
savedFile = nil
}
return savedFile
}
func generateNewFileName(_ prefix: String, _ ext: String) -> String {
uniqueCombine("\(prefix)_\(getTimestamp()).\(ext)")
}
@@ -204,6 +230,18 @@ private func dropPrefix(_ s: String, _ prefix: String) -> String {
s.hasPrefix(prefix) ? String(s.dropFirst(prefix.count)) : s
}
extension AVAsset {
func generatePreview() -> (UIImage, Int)? {
let generator = AVAssetImageGenerator(asset: self)
generator.appliesPreferredTrackTransform = true
var actualTime = CMTimeMake(value: 0, timescale: 0)
if let image = try? generator.copyCGImage(at: CMTimeMakeWithSeconds(0.0, preferredTimescale: 1), actualTime: &actualTime) {
return (UIImage(cgImage: image), Int(duration.seconds))
}
return nil
}
}
extension UIImage {
func replaceColor(_ from: UIColor, _ to: UIColor) -> UIImage {
if let cgImage = cgImage {

View File

@@ -24,7 +24,7 @@ struct CIImageView: View {
if let uiImage = getLoadedImage(file) {
imageView(uiImage)
.fullScreenCover(isPresented: $showFullScreenImage) {
FullScreenImageView(chatItem: chatItem, image: uiImage, showView: $showFullScreenImage, scrollProxy: scrollProxy)
FullScreenMediaView(chatItem: chatItem, image: uiImage, showView: $showFullScreenImage, scrollProxy: scrollProxy)
}
.onTapGesture { showFullScreenImage = true }
} else if let data = Data(base64Encoded: dropImagePrefix(image)),

View File

@@ -0,0 +1,334 @@
//
// CIVideoView.swift
// SimpleX
//
// Created by Avently on 30/03/2023.
// Copyright © 2023 SimpleX Chat. All rights reserved.
//
import SwiftUI
import AVKit
import SimpleXChat
struct CIVideoView: View {
@Environment(\.colorScheme) var colorScheme
private let chatItem: ChatItem
private let image: String
@State private var duration: Int
@State private var progress: Int = 0
@State private var videoPlaying: Bool = false
private let maxWidth: CGFloat
@Binding private var videoWidth: CGFloat?
@State private var scrollProxy: ScrollViewProxy?
@State private var preview: UIImage? = nil
@State private var player: AVPlayer?
@State private var url: URL?
@State private var showFullScreenPlayer = false
@State private var timeObserver: Any? = nil
@State private var fullScreenTimeObserver: Any? = nil
init(chatItem: ChatItem, image: String, duration: Int, maxWidth: CGFloat, videoWidth: Binding<CGFloat?>, scrollProxy: ScrollViewProxy?) {
self.chatItem = chatItem
self.image = image
self._duration = State(initialValue: duration)
self.maxWidth = maxWidth
self._videoWidth = videoWidth
self.scrollProxy = scrollProxy
if let url = getLoadedVideo(chatItem.file) {
self._player = State(initialValue: VideoPlayerView.getOrCreatePlayer(url, false))
self._url = State(initialValue: url)
}
if let data = Data(base64Encoded: dropImagePrefix(image)),
let uiImage = UIImage(data: data) {
self._preview = State(initialValue: uiImage)
}
}
var body: some View {
let file = chatItem.file
ZStack {
ZStack(alignment: .topLeading) {
if let file = file, let preview = preview, let player = player, let url = url {
videoView(player, url, file, preview, duration)
} else if let data = Data(base64Encoded: dropImagePrefix(image)),
let uiImage = UIImage(data: data) {
imageView(uiImage)
.onTapGesture {
if let file = file {
switch file.fileStatus {
case .rcvInvitation:
receiveFileIfValidSize(file: file, receiveFile: receiveFile)
case .rcvAccepted:
switch file.fileProtocol {
case .xftp:
AlertManager.shared.showAlertMsg(
title: "Waiting for video",
message: "Video will be received when your contact completes uploading it."
)
case .smp:
AlertManager.shared.showAlertMsg(
title: "Waiting for video",
message: "Video will be received when your contact is online, please wait or check later!"
)
}
case .rcvTransfer: () // ?
case .rcvComplete: () // ?
case .rcvCancelled: () // TODO
default: ()
}
}
}
}
durationProgress()
}
if let file = file, case .rcvInvitation = file.fileStatus {
Button {
receiveFileIfValidSize(file: file, receiveFile: receiveFile)
} label: {
playPauseIcon("play.fill")
}
}
}
}
private func videoView(_ player: AVPlayer, _ url: URL, _ file: CIFile, _ preview: UIImage, _ duration: Int) -> some View {
let w = preview.size.width <= preview.size.height ? maxWidth * 0.75 : maxWidth
DispatchQueue.main.async { videoWidth = w }
return ZStack(alignment: .topTrailing) {
ZStack(alignment: .center) {
VideoPlayerView(player: player, url: url, showControls: false)
.frame(width: w, height: w * preview.size.height / preview.size.width)
.onChange(of: ChatModel.shared.stopPreviousRecPlay) { playingUrl in
if playingUrl != url {
player.pause()
videoPlaying = false
}
}
.fullScreenCover(isPresented: $showFullScreenPlayer) {
fullScreenPlayer(url)
}
.onTapGesture {
switch player.timeControlStatus {
case .playing:
player.pause()
videoPlaying = false
case .paused:
showFullScreenPlayer = true
default: ()
}
}
if !videoPlaying {
Button {
ChatModel.shared.stopPreviousRecPlay = url
player.play()
} label: {
playPauseIcon("play.fill")
}
}
}
loadingIndicator()
}
.onAppear {
addObserver(player, url)
}
.onDisappear {
removeObserver()
player.pause()
videoPlaying = false
}
}
private func playPauseIcon(_ image: String, _ color: Color = .white) -> some View {
Image(systemName: image)
.resizable()
.aspectRatio(contentMode: .fit)
.frame(width: 12, height: 12)
.foregroundColor(color)
.padding(.leading, 4)
.frame(width: 40, height: 40)
.background(Color.black.opacity(0.35))
.clipShape(Circle())
}
private func durationProgress() -> some View {
HStack {
Text("\(durationText(videoPlaying ? progress : duration))")
.foregroundColor(.white)
.font(.caption)
.padding(.vertical, 3)
.padding(.horizontal, 6)
.background(Color.black.opacity(0.35))
.cornerRadius(10)
.padding([.top, .leading], 6)
if let file = chatItem.file, !videoPlaying {
Text("\(ByteCountFormatter.string(fromByteCount: file.fileSize, countStyle: .binary))")
.foregroundColor(.white)
.font(.caption)
.padding(.vertical, 3)
.padding(.horizontal, 6)
.background(Color.black.opacity(0.35))
.cornerRadius(10)
.padding(.top, 6)
}
}
}
private func imageView(_ img: UIImage) -> some View {
let w = img.size.width <= img.size.height ? maxWidth * 0.75 : .infinity
DispatchQueue.main.async { videoWidth = w }
return ZStack(alignment: .topTrailing) {
Image(uiImage: img)
.resizable()
.scaledToFit()
.frame(maxWidth: w)
loadingIndicator()
}
}
@ViewBuilder private func loadingIndicator() -> some View {
if let file = chatItem.file {
switch file.fileStatus {
case .sndStored:
switch file.fileProtocol {
case .xftp: progressView()
case .smp: EmptyView()
}
case let .sndTransfer(sndProgress, sndTotal):
switch file.fileProtocol {
case .xftp: progressCircle(sndProgress, sndTotal)
case .smp: progressView()
}
case .sndComplete:
Image(systemName: "checkmark")
.resizable()
.aspectRatio(contentMode: .fit)
.frame(width: 10, height: 10)
.foregroundColor(.white)
.padding(13)
case .rcvInvitation:
Image(systemName: "arrow.down")
.resizable()
.aspectRatio(contentMode: .fit)
.frame(width: 14, height: 14)
.foregroundColor(.white)
.padding(11)
case .rcvAccepted:
Image(systemName: "ellipsis")
.resizable()
.aspectRatio(contentMode: .fit)
.frame(width: 14, height: 14)
.foregroundColor(.white)
.padding(11)
case let .rcvTransfer(rcvProgress, rcvTotal):
if file.fileProtocol == .xftp && rcvProgress < rcvTotal {
progressCircle(rcvProgress, rcvTotal)
} else {
progressView()
}
default: EmptyView()
}
}
}
private func progressView() -> some View {
ProgressView()
.progressViewStyle(.circular)
.frame(width: 16, height: 16)
.tint(.white)
.padding(11)
}
private func progressCircle(_ progress: Int64, _ total: Int64) -> some View {
Circle()
.trim(from: 0, to: Double(progress) / Double(total))
.stroke(
Color(uiColor: .white),
style: StrokeStyle(lineWidth: 2)
)
.rotationEffect(.degrees(-90))
.frame(width: 16, height: 16)
.padding([.trailing, .top], 11)
}
private func receiveFileIfValidSize(file: CIFile, receiveFile: @escaping (User, Int64) async -> Void) {
Task {
if let user = ChatModel.shared.currentUser {
await receiveFile(user, file.fileId)
}
// TODO image accepted alert?
}
}
private func fullScreenPlayer(_ url: URL) -> some View {
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
VideoPlayer(player: createFullScreenPlayerAndPlay(url)) {
}
.overlay(alignment: .topLeading, content: {
Button(action: { showFullScreenPlayer = false },
label: {
Image(systemName: "multiply")
.resizable()
.tint(.white)
.frame(width: 15, height: 15)
.padding(.leading, 15)
.padding(.top, 13)
}
)
})
.gesture(
DragGesture(minimumDistance: 80)
.onChanged { gesture in
let t = gesture.translation
let w = abs(t.width)
if t.height > 60 && t.height > w * 2 {
showFullScreenPlayer = false
}
}
)
.onDisappear {
if let fullScreenTimeObserver = fullScreenTimeObserver {
NotificationCenter.default.removeObserver(fullScreenTimeObserver)
}
fullScreenTimeObserver = nil
}
}
}
private func createFullScreenPlayerAndPlay(_ url: URL) -> AVPlayer {
let player = AVPlayer(url: url)
DispatchQueue.main.asyncAfter(deadline: .now()) {
ChatModel.shared.stopPreviousRecPlay = url
player.play()
fullScreenTimeObserver = NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in
player.seek(to: CMTime.zero)
player.play()
}
}
return player
}
private func addObserver(_ player: AVPlayer, _ url: URL) {
timeObserver = player.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.01, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), queue: .main) { time in
if let item = player.currentItem {
let dur = CMTimeGetSeconds(item.duration)
if !dur.isInfinite && !dur.isNaN {
duration = Int(dur)
}
progress = Int(CMTimeGetSeconds(player.currentTime()))
// `if` prevents showing Play button while the playback seeks to start and then plays
if player.currentTime() != player.currentItem?.duration && player.currentTime() != .zero {
videoPlaying = player.timeControlStatus == .playing || player.timeControlStatus == .waitingToPlayAtSpecifiedRate
}
}
}
}
private func removeObserver() {
if let timeObserver = timeObserver {
player?.removeTimeObserver(timeObserver)
}
timeObserver = nil
}
}

View File

@@ -203,7 +203,7 @@ struct VoiceMessagePlayer: View {
private func startPlayback(_ recordingFileName: String) {
startingPlayback = true
chatModel.stopPreviousRecPlay.toggle()
chatModel.stopPreviousRecPlay = getAppFilePath(recordingFileName)
audioPlayer = AudioPlayer(
onTimer: { playbackTime = $0 },
onFinishPlayback: {

View File

@@ -23,6 +23,7 @@ struct FramedItemView: View {
@State var scrollProxy: ScrollViewProxy? = nil
@State var msgWidth: CGFloat = 0
@State var imgWidth: CGFloat? = nil
@State var videoWidth: CGFloat? = nil
@State var metaColor = Color.secondary
@State var showFullScreenImage = false
@@ -64,7 +65,7 @@ struct FramedItemView: View {
.overlay(DetermineWidth())
}
}
.background(chatItemFrameColorMaybeImage(chatItem, colorScheme))
.background(chatItemFrameColorMaybeImageOrVideo(chatItem, colorScheme))
.cornerRadius(18)
.onPreferenceChange(DetermineWidth.Key.self) { msgWidth = $0 }
@@ -103,6 +104,19 @@ struct FramedItemView: View {
} else {
ciMsgContentView (chatItem, showMember)
}
case let .video(text, image, duration):
CIVideoView(chatItem: chatItem, image: image, duration: duration, maxWidth: maxWidth, videoWidth: $videoWidth, scrollProxy: scrollProxy)
.overlay(DetermineWidth())
if text == "" && !chatItem.meta.isLive {
Color.clear
.frame(width: 0, height: 0)
.preference(
key: MetaColorPreferenceKey.self,
value: .white
)
} else {
ciMsgContentView (chatItem, showMember)
}
case let .voice(text, duration):
FramedCIVoiceView(chatItem: chatItem, recordingFile: chatItem.file, duration: duration)
.overlay(DetermineWidth())
@@ -152,8 +166,8 @@ struct FramedItemView: View {
.overlay(DetermineWidth())
.frame(minWidth: msgWidth, alignment: .leading)
.background(chatItemFrameContextColor(chatItem, colorScheme))
if let imgWidth = imgWidth, imgWidth < maxWidth {
v.frame(maxWidth: imgWidth, alignment: .leading)
if let mediaWidth = maxMediaWidth(), mediaWidth < maxWidth {
v.frame(maxWidth: mediaWidth, alignment: .leading)
} else {
v
}
@@ -175,6 +189,19 @@ struct FramedItemView: View {
} else {
ciQuotedMsgView(qi)
}
case let .video(_, image, _):
if let data = Data(base64Encoded: dropImagePrefix(image)),
let uiImage = UIImage(data: data) {
ciQuotedMsgView(qi)
.padding(.trailing, 70).frame(minWidth: msgWidth, alignment: .leading)
Image(uiImage: uiImage)
.resizable()
.aspectRatio(contentMode: .fill)
.frame(width: 68, height: 68)
.clipped()
} else {
ciQuotedMsgView(qi)
}
case .file:
ciQuotedMsgView(qi)
.padding(.trailing, 20).frame(minWidth: msgWidth, alignment: .leading)
@@ -190,9 +217,9 @@ struct FramedItemView: View {
.overlay(DetermineWidth())
.frame(minWidth: msgWidth, alignment: .leading)
.background(chatItemFrameContextColor(chatItem, colorScheme))
if let imgWidth = imgWidth, imgWidth < maxWidth {
v.frame(maxWidth: imgWidth, alignment: .leading)
if let mediaWidth = maxMediaWidth(), mediaWidth < maxWidth {
v.frame(maxWidth: mediaWidth, alignment: .leading)
} else {
v
}
@@ -243,9 +270,9 @@ struct FramedItemView: View {
.overlay(DetermineWidth())
.frame(minWidth: 0, alignment: .leading)
.textSelection(.enabled)
if let imgWidth = imgWidth, imgWidth < maxWidth {
v.frame(maxWidth: imgWidth, alignment: .leading)
if let mediaWidth = maxMediaWidth(), mediaWidth < maxWidth {
v.frame(maxWidth: mediaWidth, alignment: .leading)
} else {
v
}
@@ -258,6 +285,16 @@ struct FramedItemView: View {
ciMsgContentView (chatItem, showMember)
}
}
private func maxMediaWidth() -> CGFloat? {
if let imgWidth = imgWidth, let videoWidth = videoWidth {
return imgWidth > videoWidth ? imgWidth : videoWidth
} else if let imgWidth = imgWidth {
return imgWidth
} else {
return videoWidth
}
}
}
func isRightToLeft(_ s: String) -> Bool {
@@ -274,15 +311,17 @@ private struct MetaColorPreferenceKey: PreferenceKey {
}
}
func onlyImage(_ ci: ChatItem) -> Bool {
func onlyImageOrVideo(_ ci: ChatItem) -> Bool {
if case let .image(text, _) = ci.content.msgContent {
return ci.meta.itemDeleted == nil && !ci.meta.isLive && ci.quotedItem == nil && text == ""
} else if case let .video(text, _, _) = ci.content.msgContent {
return ci.meta.itemDeleted == nil && !ci.meta.isLive && ci.quotedItem == nil && text == ""
}
return false
}
func chatItemFrameColorMaybeImage(_ ci: ChatItem, _ colorScheme: ColorScheme) -> Color {
onlyImage(ci)
func chatItemFrameColorMaybeImageOrVideo(_ ci: ChatItem, _ colorScheme: ColorScheme) -> Color {
onlyImageOrVideo(ci)
? Color.clear
: chatItemFrameColor(ci, colorScheme)
}

View File

@@ -9,36 +9,61 @@
import SwiftUI
import SimpleXChat
import SwiftyGif
import AVKit
struct FullScreenImageView: View {
struct FullScreenMediaView: View {
@EnvironmentObject var m: ChatModel
@State var chatItem: ChatItem
@State var image: UIImage
@State var image: UIImage?
@State var player: AVPlayer? = nil
@State var url: URL? = nil
@Binding var showView: Bool
@State var scrollProxy: ScrollViewProxy?
@State private var showNext = false
@State private var nextImage: UIImage?
@State private var nextPlayer: AVPlayer?
@State private var nextURL: URL?
@State private var scrolling = false
@State private var offset: CGFloat = 0
@State private var nextOffset: CGFloat = 0
var body: some View {
GeometryReader(content: imageScrollView)
GeometryReader(content: mediaScrollView)
}
func imageScrollView(_ g: GeometryProxy) -> some View {
func mediaScrollView(_ g: GeometryProxy) -> some View {
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
if showNext, let nextImage = nextImage {
imageView(image).offset(x: offset)
if let image = image {
imageView(image).offset(x: offset)
} else if let player = player, let url = url {
videoView(player, url).offset(x: offset)
}
imageView(nextImage).offset(x: offset + nextOffset)
} else if showNext, let nextPlayer = nextPlayer, let nextURL = nextURL {
if let image = image {
imageView(image).offset(x: offset)
} else if let player = player, let url = url {
videoView(player, url).offset(x: offset)
}
videoView(nextPlayer, nextURL).offset(x: offset + nextOffset)
} else {
ZoomableScrollView {
imageView(image)
if let image = image {
imageView(image)
} else if let player = player, let url = url {
videoView(player, url)
}
}
}
}
.onTapGesture { showView = false }
.onAppear {
startPlayerAndNotify()
}
.onDisappear {
player?.pause()
}
.gesture(
DragGesture(minimumDistance: 80)
.onChanged { gesture in
@@ -53,9 +78,17 @@ struct FullScreenImageView: View {
let previous = t.width > 0
scrolling = true
if let item = m.nextChatItemData(chatItem.id, previous: previous, map: chatItemImage) {
var img: UIImage
(chatItem, img) = item
var img: UIImage?
var url: URL?
(chatItem, img, url) = item
nextImage = img
nextPlayer?.pause()
if let url = url {
nextPlayer = VideoPlayerView.getOrCreatePlayer(url, true)
} else {
nextPlayer = nil
}
nextURL = url
let s = g.size.width
var toOffset: CGFloat
(toOffset, nextOffset) = previous ? (s, -s) : (-s, s)
@@ -65,6 +98,14 @@ struct FullScreenImageView: View {
}
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
image = img
player?.pause()
self.url = url
if let url = url {
player = VideoPlayerView.getOrCreatePlayer(url, true)
startPlayerAndNotify()
} else {
player = nil
}
showNext = false
offset = 0
}
@@ -87,13 +128,30 @@ struct FullScreenImageView: View {
.scaledToFit()
}
}
.onTapGesture { showView = false }
}
private func chatItemImage(_ ci: ChatItem) -> (ChatItem, UIImage)? {
private func videoView( _ player: AVPlayer, _ url: URL) -> some View {
VideoPlayerView(player: player, url: url, showControls: true)
}
private func chatItemImage(_ ci: ChatItem) -> (ChatItem, UIImage?, URL?)? {
if case .image = ci.content.msgContent,
let img = getLoadedImage(ci.file) {
return (ci, img)
return (ci, img, nil)
}
// Currently, video support in gallery is not enabled
/*else if case .video = ci.content.msgContent,
let url = getLoadedVideo(ci.file) {
return (ci, nil, url)
}*/
return nil
}
private func startPlayerAndNotify() {
if let player = player {
ChatModel.shared.stopPreviousRecPlay = url
player.play()
}
}
}

View File

@@ -78,6 +78,7 @@ struct ChatView: View {
if chatModel.chatId == nil { dismiss() }
}
.onDisappear {
VideoPlayerView.players.removeAll()
if chatModel.chatId == cInfo.id && !presentationMode.wrappedValue.isPresented {
chatModel.chatId = nil
DispatchQueue.main.asyncAfter(deadline: .now() + 0.35) {

View File

@@ -14,7 +14,7 @@ import PhotosUI
enum ComposePreview {
case noPreview
case linkPreview(linkPreview: LinkPreview?)
case imagePreviews(imagePreviews: [(String, UploadContent?)])
case mediaPreviews(mediaPreviews: [(String, UploadContent?)])
case voicePreview(recordingFileName: String, duration: Int)
case filePreview(fileName: String, file: URL)
}
@@ -105,7 +105,7 @@ struct ComposeState {
var sendEnabled: Bool {
switch preview {
case .imagePreviews: return true
case .mediaPreviews: return true
case .voicePreview: return voiceMessageRecordingState == .finished
case .filePreview: return true
default: return !message.isEmpty || liveMessage != nil
@@ -118,7 +118,7 @@ struct ComposeState {
var linkPreviewAllowed: Bool {
switch preview {
case .imagePreviews: return false
case .mediaPreviews: return false
case .voicePreview: return false
case .filePreview: return false
default: return useLinkPreviews
@@ -175,7 +175,9 @@ func chatItemPreview(chatItem: ChatItem) -> ComposePreview {
case let .link(_, preview: preview):
chatItemPreview = .linkPreview(linkPreview: preview)
case let .image(_, image):
chatItemPreview = .imagePreviews(imagePreviews: [(image, nil)])
chatItemPreview = .mediaPreviews(mediaPreviews: [(image, nil)])
case let .video(_, image, _):
chatItemPreview = .mediaPreviews(mediaPreviews: [(image, nil)])
case let .voice(_, duration):
chatItemPreview = .voicePreview(recordingFileName: chatItem.file?.fileName ?? "", duration: duration)
case .file:
@@ -190,11 +192,13 @@ func chatItemPreview(chatItem: ChatItem) -> ComposePreview {
enum UploadContent: Equatable {
case simpleImage(image: UIImage)
case animatedImage(image: UIImage)
case video(image: UIImage, url: URL, duration: Int)
var uiImage: UIImage {
switch self {
case let .simpleImage(image): return image
case let .animatedImage(image): return image
case let .video(image, _, _): return image
}
}
@@ -216,6 +220,14 @@ enum UploadContent: Equatable {
}
return nil
}
static func loadVideoFromURL(url: URL) -> UploadContent? {
let asset = AVAsset(url: url)
if let (image, duration) = asset.generatePreview() {
return .video(image: image, url: url, duration: duration)
}
return nil
}
}
struct ComposeView: View {
@@ -232,9 +244,9 @@ struct ComposeView: View {
@AppStorage(GROUP_DEFAULT_XFTP_SEND_ENABLED, store: groupDefaults) private var xftpSendEnabled = false
@State private var showChooseSource = false
@State private var showImagePicker = false
@State private var showMediaPicker = false
@State private var showTakePhoto = false
@State var chosenImages: [UploadContent] = []
@State var chosenMedia: [UploadContent] = []
@State private var showFileImporter = false
@State private var audioRecorder: AudioRecorder?
@@ -286,7 +298,7 @@ struct ComposeView: View {
},
finishVoiceMessageRecording: finishVoiceMessageRecording,
allowVoiceMessagesToContact: allowVoiceMessagesToContact,
onImagesAdded: { images in if !images.isEmpty { chosenImages = images }},
onMediaAdded: { media in if !media.isEmpty { chosenMedia = media }},
keyboardVisible: $keyboardVisible
)
.padding(.trailing, 12)
@@ -329,7 +341,7 @@ struct ComposeView: View {
showTakePhoto = true
}
Button("Choose from library") {
showImagePicker = true
showMediaPicker = true
}
if UIPasteboard.general.hasImages {
Button("Paste image") {
@@ -337,7 +349,7 @@ struct ComposeView: View {
if p.hasItemConformingToTypeIdentifier(UTType.data.identifier) {
p.loadFileRepresentation(forTypeIdentifier: UTType.data.identifier) { url, error in
if let url = url, let image = UploadContent.loadFromURL(url: url) {
chosenImages.append(image)
chosenMedia.append(image)
}
}
}
@@ -351,31 +363,31 @@ struct ComposeView: View {
.fullScreenCover(isPresented: $showTakePhoto) {
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
CameraImageListPicker(images: $chosenImages)
CameraImageListPicker(images: $chosenMedia)
}
}
.sheet(isPresented: $showImagePicker) {
LibraryImageListPicker(images: $chosenImages, selectionLimit: 10) { itemsSelected in
showImagePicker = false
.sheet(isPresented: $showMediaPicker) {
LibraryMediaListPicker(media: $chosenMedia, selectionLimit: 10) { itemsSelected in
showMediaPicker = false
if itemsSelected {
DispatchQueue.main.async {
composeState = composeState.copy(preview: .imagePreviews(imagePreviews: []))
composeState = composeState.copy(preview: .mediaPreviews(mediaPreviews: []))
}
}
}
}
.onChange(of: chosenImages) { images in
.onChange(of: chosenMedia) { selected in
Task {
var imgs: [(String, UploadContent)] = []
for image in images {
if let img = resizeImageToStrSize(image.uiImage, maxDataSize: 14000) {
imgs.append((img, image))
var media: [(String, UploadContent)] = []
for content in selected {
if let img = resizeImageToStrSize(content.uiImage, maxDataSize: 14000) {
media.append((img, content))
await MainActor.run {
composeState = composeState.copy(preview: .imagePreviews(imagePreviews: imgs))
composeState = composeState.copy(preview: .mediaPreviews(mediaPreviews: media))
}
}
}
if imgs.count == 0 {
if media.count == 0 {
await MainActor.run {
composeState = composeState.copy(preview: .noPreview)
}
@@ -514,12 +526,12 @@ struct ComposeView: View {
EmptyView()
case let .linkPreview(linkPreview: preview):
ComposeLinkView(linkPreview: preview, cancelPreview: cancelLinkPreview)
case let .imagePreviews(imagePreviews: images):
case let .mediaPreviews(mediaPreviews: media):
ComposeImageView(
images: images.map { (img, _) in img },
images: media.map { (img, _) in img },
cancelImage: {
composeState = composeState.copy(preview: .noPreview)
chosenImages = []
chosenMedia = []
},
cancelEnabled: !composeState.editing)
case let .voicePreview(recordingFileName, _):
@@ -594,21 +606,29 @@ struct ComposeView: View {
sent = await send(.text(msgText), quoted: quoted, live: live)
case .linkPreview:
sent = await send(checkLinkPreview(), quoted: quoted, live: live)
case let .imagePreviews(imagePreviews: images):
let last = images.count - 1
case let .mediaPreviews(mediaPreviews: media):
let last = media.count - 1
if last >= 0 {
for i in 0..<last {
sent = await sendImage(images[i])
if case (_, .video(_, _, _)) = media[i] {
sent = await sendVideo(media[i])
} else {
sent = await sendImage(media[i])
}
_ = try? await Task.sleep(nanoseconds: 100_000000)
}
sent = await sendImage(images[last], text: msgText, quoted: quoted, live: live)
if case (_, .video(_, _, _)) = media[last] {
sent = await sendVideo(media[last], text: msgText, quoted: quoted, live: live)
} else {
sent = await sendImage(media[last], text: msgText, quoted: quoted, live: live)
}
}
if sent == nil {
sent = await send(.text(msgText), quoted: quoted, live: live)
}
case let .voicePreview(recordingFileName, duration):
stopPlayback.toggle()
chatModel.filesToDelete.removeAll { $0 == recordingFileName }
chatModel.filesToDelete.remove(getAppFilePath(recordingFileName))
sent = await send(.voice(text: msgText, duration: duration), quoted: quoted, file: recordingFileName)
case let .filePreview(_, file):
if let savedFile = saveFileFromURL(file) {
@@ -657,6 +677,8 @@ struct ComposeView: View {
return checkLinkPreview()
case .image(_, let image):
return .image(text: msgText, image: image)
case .video(_, let image, let duration):
return .video(text: msgText, image: image, duration: duration)
case .voice(_, let duration):
return .voice(text: msgText, duration: duration)
case .file:
@@ -674,6 +696,14 @@ struct ComposeView: View {
return nil
}
func sendVideo(_ imageData: (String, UploadContent?), text: String = "", quoted: Int64? = nil, live: Bool = false) async -> ChatItem? {
let (image, data) = imageData
if case let .video(_, url, duration) = data, let savedFile = saveFileFromURLWithoutLoad(url) {
return await send(.video(text: text, image: image, duration: duration), quoted: quoted, file: savedFile, live: live)
}
return nil
}
func send(_ mc: MsgContent, quoted: Int64?, file: String? = nil, live: Bool = false) async -> ChatItem? {
if let chatItem = await apiSendMessage(
type: chat.chatInfo.chatType,
@@ -711,14 +741,15 @@ struct ComposeView: View {
switch img {
case let .simpleImage(image): return saveImage(image)
case let .animatedImage(image): return saveAnimImage(image)
default: return nil
}
}
}
private func startVoiceMessageRecording() async {
startingRecording = true
chatModel.stopPreviousRecPlay.toggle()
let fileName = generateNewFileName("voice", "m4a")
chatModel.stopPreviousRecPlay = getAppFilePath(fileName)
audioRecorder = AudioRecorder(
onTimer: { voiceMessageRecordingTime = $0 },
onFinishRecording: {
@@ -804,7 +835,7 @@ struct ComposeView: View {
composeState = ComposeState()
resetLinkPreview()
}
chosenImages = []
chosenMedia = []
audioRecorder = nil
voiceMessageRecordingTime = nil
startingRecording = false
@@ -814,7 +845,7 @@ struct ComposeView: View {
if case .recording = composeState.voiceMessageRecordingState {
finishVoiceMessageRecording()
if let fileName = composeState.voiceMessageRecordingFileName {
chatModel.filesToDelete.append(fileName)
chatModel.filesToDelete.insert(getAppFilePath(fileName))
}
}
chatModel.draft = composeState

View File

@@ -164,7 +164,7 @@ struct ComposeVoiceView: View {
private func startPlayback() {
startingPlayback = true
chatModel.stopPreviousRecPlay.toggle()
chatModel.stopPreviousRecPlay = getAppFilePath(recordingFileName)
audioPlayer = AudioPlayer(
onTimer: { playbackTime = $0 },
onFinishPlayback: {

View File

@@ -23,7 +23,7 @@ struct SendMessageView: View {
var startVoiceMessageRecording: (() -> Void)? = nil
var finishVoiceMessageRecording: (() -> Void)? = nil
var allowVoiceMessagesToContact: (() -> Void)? = nil
var onImagesAdded: ([UploadContent]) -> Void
var onMediaAdded: ([UploadContent]) -> Void
@State private var holdingVMR = false
@Namespace var namespace
@FocusState.Binding var keyboardVisible: Bool
@@ -69,7 +69,7 @@ struct SendMessageView: View {
font: teUiFont,
focused: $keyboardVisible,
alignment: alignment,
onImagesAdded: onImagesAdded
onImagesAdded: onMediaAdded
)
.allowsTightening(false)
.frame(height: teHeight)
@@ -365,7 +365,7 @@ struct SendMessageView_Previews: PreviewProvider {
SendMessageView(
composeState: $composeStateNew,
sendMessage: {},
onImagesAdded: { _ in },
onMediaAdded: { _ in },
keyboardVisible: $keyboardVisible
)
}
@@ -375,7 +375,7 @@ struct SendMessageView_Previews: PreviewProvider {
SendMessageView(
composeState: $composeStateEditing,
sendMessage: {},
onImagesAdded: { _ in },
onMediaAdded: { _ in },
keyboardVisible: $keyboardVisible
)
}

View File

@@ -143,7 +143,7 @@ struct ChatPreviewView: View {
func attachment() -> Text {
switch draft.preview {
case let .filePreview(fileName, _): return image("doc.fill") + Text(fileName) + Text(" ")
case .imagePreviews: return image("photo")
case .mediaPreviews: return image("photo")
case let .voicePreview(_, duration): return image("play.fill") + Text(durationText(duration))
default: return Text("")
}
@@ -159,6 +159,7 @@ struct ChatPreviewView: View {
switch cItem.content.msgContent {
case .file: return "doc.fill"
case .image: return "photo"
case .video: return "video"
case .voice: return "play.fill"
default: return nil
}

View File

@@ -17,7 +17,7 @@ struct LibraryImagePicker: View {
@State var images: [UploadContent] = []
var body: some View {
LibraryImageListPicker(images: $images, selectionLimit: 1, didFinishPicking: didFinishPicking)
LibraryMediaListPicker(media: $images, selectionLimit: 1, didFinishPicking: didFinishPicking)
.onChange(of: images) { _ in
if let img = images.first {
image = img.uiImage
@@ -26,19 +26,20 @@ struct LibraryImagePicker: View {
}
}
struct LibraryImageListPicker: UIViewControllerRepresentable {
struct LibraryMediaListPicker: UIViewControllerRepresentable {
typealias UIViewControllerType = PHPickerViewController
@Binding var images: [UploadContent]
@AppStorage(GROUP_DEFAULT_XFTP_SEND_ENABLED, store: groupDefaults) var xftpSendEnabled = false
@Binding var media: [UploadContent]
var selectionLimit: Int
var didFinishPicking: (_ didSelectItems: Bool) -> Void
class Coordinator: PHPickerViewControllerDelegate {
let parent: LibraryImageListPicker
let dispatchQueue = DispatchQueue(label: "chat.simplex.app.LibraryImageListPicker")
var images: [UploadContent] = []
var imageCount: Int = 0
let parent: LibraryMediaListPicker
let dispatchQueue = DispatchQueue(label: "chat.simplex.app.LibraryMediaListPicker")
var media: [UploadContent] = []
var mediaCount: Int = 0
init(_ parent: LibraryImageListPicker) {
init(_ parent: LibraryMediaListPicker) {
self.parent = parent
}
@@ -48,13 +49,23 @@ struct LibraryImageListPicker: UIViewControllerRepresentable {
return
}
parent.images = []
images = []
imageCount = results.count
parent.media = []
media = []
mediaCount = results.count
for result in results {
logger.log("LibraryImageListPicker result")
logger.log("LibraryMediaListPicker result")
let p = result.itemProvider
if p.hasItemConformingToTypeIdentifier(UTType.data.identifier) {
if p.hasItemConformingToTypeIdentifier(UTType.movie.identifier) {
p.loadFileRepresentation(forTypeIdentifier: UTType.movie.identifier) { url, error in
if let url = url {
let tempUrl = URL(fileURLWithPath: getTempFilesDirectory().path + "/" + generateNewFileName("video", url.pathExtension))
if ((try? FileManager.default.copyItem(at: url, to: tempUrl)) != nil) {
ChatModel.shared.filesToDelete.insert(tempUrl)
self.loadVideo(url: tempUrl, error: error)
}
}
}
} else if p.hasItemConformingToTypeIdentifier(UTType.data.identifier) {
p.loadFileRepresentation(forTypeIdentifier: UTType.data.identifier) { url, error in
self.loadImage(object: url, error: error)
}
@@ -65,14 +76,14 @@ struct LibraryImageListPicker: UIViewControllerRepresentable {
}
}
} else {
dispatchQueue.sync { self.imageCount -= 1}
dispatchQueue.sync { self.mediaCount -= 1}
}
}
DispatchQueue.main.asyncAfter(deadline: .now() + 10) {
self.dispatchQueue.sync {
if self.parent.images.count == 0 {
logger.log("LibraryImageListPicker: added \(self.images.count) images out of \(results.count)")
self.parent.images = self.images
if self.parent.media.count == 0 {
logger.log("LibraryMediaListPicker: added \(self.media.count) images out of \(results.count)")
self.parent.media = self.media
}
}
}
@@ -80,19 +91,35 @@ struct LibraryImageListPicker: UIViewControllerRepresentable {
func loadImage(object: Any?, error: Error? = nil) {
if let error = error {
logger.error("LibraryImageListPicker: couldn't load image with error: \(error.localizedDescription)")
logger.error("LibraryMediaListPicker: couldn't load image with error: \(error.localizedDescription)")
} else if let image = object as? UIImage {
images.append(.simpleImage(image: image))
logger.log("LibraryImageListPicker: added image")
media.append(.simpleImage(image: image))
logger.log("LibraryMediaListPicker: added image")
} else if let url = object as? URL, let image = UploadContent.loadFromURL(url: url) {
images.append(image)
media.append(image)
}
dispatchQueue.sync {
self.imageCount -= 1
if self.imageCount == 0 && self.parent.images.count == 0 {
logger.log("LibraryImageListPicker: added all images")
self.parent.images = self.images
self.images = []
self.mediaCount -= 1
if self.mediaCount == 0 && self.parent.media.count == 0 {
logger.log("LibraryMediaListPicker: added all media")
self.parent.media = self.media
self.media = []
}
}
}
func loadVideo(url: URL?, error: Error? = nil) {
if let error = error {
logger.error("LibraryMediaListPicker: couldn't load video with error: \(error.localizedDescription)")
} else if let url = url as URL?, let video = UploadContent.loadVideoFromURL(url: url) {
media.append(video)
}
dispatchQueue.sync {
self.mediaCount -= 1
if self.mediaCount == 0 && self.parent.media.count == 0 {
logger.log("LibraryMediaListPicker: added all media")
self.parent.media = self.media
self.media = []
}
}
}
@@ -104,8 +131,15 @@ struct LibraryImageListPicker: UIViewControllerRepresentable {
func makeUIViewController(context: Context) -> PHPickerViewController {
var config = PHPickerConfiguration()
config.filter = .images
let allowVideoAttachment = xftpSendEnabled
if allowVideoAttachment {
config.filter = .any(of: [.images, .videos])
} else {
config.filter = .images
}
config.selectionLimit = selectionLimit
config.selection = .ordered
//config.preferredAssetRepresentationMode = .current
let controller = PHPickerViewController(configuration: config)
controller.delegate = context.coordinator
return controller

View File

@@ -0,0 +1,61 @@
//
// Created by Avently on 30.03.2023.
// Copyright (c) 2023 SimpleX Chat. All rights reserved.
//
import Foundation
import SwiftUI
import AVKit
struct VideoPlayerView: UIViewRepresentable {
static var players: [String: AVPlayer] = [:]
static func getOrCreatePlayer(_ url: URL, _ gallery: Bool) -> AVPlayer {
if let player = players[url.absoluteString + gallery.description] {
return player
} else {
let player = AVPlayer(url: url)
players[url.absoluteString + gallery.description] = player
return player
}
}
typealias UIViewType = UIView
let player: AVPlayer
let url: URL
let showControls: Bool
func makeUIView(context: UIViewRepresentableContext<VideoPlayerView>) -> UIView {
let controller = AVPlayerViewController()
controller.showsPlaybackControls = showControls
if #available(iOS 16.0, *) {
controller.speeds = []
}
controller.player = player
context.coordinator.controller = controller
context.coordinator.timeObserver = NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in
player.seek(to: CMTime.zero)
player.play()
}
return controller.view
}
func updateUIView(_ uiView: UIView, context: UIViewRepresentableContext<VideoPlayerView>) {
}
func makeCoordinator() -> VideoPlayerView.Coordinator {
Coordinator()
}
class Coordinator: NSObject {
var controller: AVPlayerViewController?
var timeObserver: Any? = nil
deinit {
print("deinit coordinator of VideoPlayer")
if let timeObserver = timeObserver {
NotificationCenter.default.removeObserver(timeObserver)
}
}
}
}

View File

@@ -74,7 +74,7 @@ struct TerminalView: View {
composeState: $composeState,
sendMessage: sendMessage,
showVoiceMessageButton: false,
onImagesAdded: { _ in },
onMediaAdded: { _ in },
keyboardVisible: $keyboardVisible
)
.padding(.horizontal, 12)

View File

@@ -44,7 +44,7 @@ struct DeveloperView: View {
Section {
settingsRow("arrow.up.doc") {
Toggle("Send files via XFTP", isOn: $xftpSendEnabled)
Toggle("Send videos and files via XFTP", isOn: $xftpSendEnabled)
.onChange(of: xftpSendEnabled) { _ in
do {
try setXFTPConfig(getXFTPCfg())

View File

@@ -16,7 +16,7 @@ struct ExperimentalFeaturesView: View {
List {
Section("") {
settingsRow("arrow.up.doc") {
Toggle("Send files via XFTP", isOn: $xftpSendEnabled)
Toggle("Send videos and files via XFTP", isOn: $xftpSendEnabled)
.onChange(of: xftpSendEnabled) { _ in
do {
try setXFTPConfig(getXFTPCfg())

View File

@@ -3081,8 +3081,8 @@ Wir werden Serverredundanzen hinzufügen, um verloren gegangene Nachrichten zu v
<target>Direktnachricht senden</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send files via XFTP" xml:space="preserve">
<source>Send files via XFTP</source>
<trans-unit id="Send videos and files via XFTP" xml:space="preserve">
<source>Send videos and files via XFTP</source>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send link previews" xml:space="preserve">

View File

@@ -3084,9 +3084,9 @@ We will be adding server redundancy to prevent lost messages.</target>
<target>Send direct message</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send files via XFTP" xml:space="preserve">
<source>Send files via XFTP</source>
<target>Send files via XFTP</target>
<trans-unit id="Send videos and files via XFTP" xml:space="preserve">
<source>Send videos and files via XFTP</source>
<target>Send videos and files via XFTP</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send link previews" xml:space="preserve">

View File

@@ -3082,8 +3082,8 @@ Añadiremos redundancia de servidores para evitar la pérdida de mensajes.</targ
<target>Enviar mensaje directo</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send files via XFTP" xml:space="preserve">
<source>Send files via XFTP</source>
<trans-unit id="Send videos and files via XFTP" xml:space="preserve">
<source>Send videos and files via XFTP</source>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send link previews" xml:space="preserve">

View File

@@ -3081,8 +3081,8 @@ Nous allons ajouter une redondance des serveurs pour éviter la perte de message
<target>Envoi de message direct</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send files via XFTP" xml:space="preserve">
<source>Send files via XFTP</source>
<trans-unit id="Send videos and files via XFTP" xml:space="preserve">
<source>Send videos and files via XFTP</source>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send link previews" xml:space="preserve">

View File

@@ -3081,8 +3081,8 @@ Aggiungeremo la ridondanza del server per prevenire la perdita di messaggi.</tar
<target>Invia messaggio diretto</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send files via XFTP" xml:space="preserve">
<source>Send files via XFTP</source>
<trans-unit id="Send videos and files via XFTP" xml:space="preserve">
<source>Send videos and files via XFTP</source>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send link previews" xml:space="preserve">

View File

@@ -3081,8 +3081,8 @@ We zullen serverredundantie toevoegen om verloren berichten te voorkomen.</targe
<target>Direct bericht sturen</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send files via XFTP" xml:space="preserve">
<source>Send files via XFTP</source>
<trans-unit id="Send videos and files via XFTP" xml:space="preserve">
<source>Send videos and files via XFTP</source>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send link previews" xml:space="preserve">

View File

@@ -3084,9 +3084,9 @@ We will be adding server redundancy to prevent lost messages.</source>
<target>Отправить сообщение</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send files via XFTP" xml:space="preserve">
<source>Send files via XFTP</source>
<target>Отправлять файлы через XFTP</target>
<trans-unit id="Send videos and files via XFTP" xml:space="preserve">
<source>Send videos and files via XFTP</source>
<target>Отправлять видео и файлы через XFTP</target>
<note>No comment provided by engineer.</note>
</trans-unit>
<trans-unit id="Send link previews" xml:space="preserve">

View File

@@ -277,6 +277,12 @@ func receivedMsgNtf(_ res: ChatResponse) async -> (String, NSENotification)? {
privacyAcceptImagesGroupDefault.get() {
cItem = apiReceiveFile(fileId: file.fileId)?.chatItem ?? cItem
}
} else if case .video = cItem.content.msgContent {
if let file = cItem.file,
file.fileSize <= MAX_VIDEO_SIZE_AUTO_RCV,
privacyAcceptImagesGroupDefault.get() {
cItem = apiReceiveFile(fileId: file.fileId)?.chatItem ?? cItem
}
} else if case .voice = cItem.content.msgContent { // TODO check inlineFileMode != IFMSent
if let file = cItem.file,
file.fileSize <= MAX_IMAGE_SIZE,

View File

@@ -10,9 +10,11 @@
184152CEF68D2336FC2EBCB0 /* CallViewRenderers.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415B08031E8FB0F7FC27F9 /* CallViewRenderers.swift */; };
1841538E296606C74533367C /* UserPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415835CBD939A9ABDC108A /* UserPicker.swift */; };
1841560FD1CD447955474C1D /* UserProfilesView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415845648CA4F5A8BCA272 /* UserProfilesView.swift */; };
184158C131FDB829D8A117EA /* VideoPlayerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415DAAAD1ADBEDB0EDA852 /* VideoPlayerView.swift */; };
1841594C978674A7B42EF0C0 /* AnimatedImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1841511920742C6E152E469F /* AnimatedImageView.swift */; };
18415B0585EB5A9A0A7CA8CD /* PressedButtonStyle.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415A7F0F189D87DEFEABCA /* PressedButtonStyle.swift */; };
18415C6C56DBCEC2CBBD2F11 /* WebRTCClient.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415323A4082FC92887F906 /* WebRTCClient.swift */; };
18415F9A2D551F9757DA4654 /* CIVideoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 18415FD2E36F13F596A45BB4 /* CIVideoView.swift */; };
18415FEFE153C5920BFB7828 /* GroupWelcomeView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1841516F0CE5992B0EDFB377 /* GroupWelcomeView.swift */; };
3C71477A281C0F6800CB4D4B /* www in Resources */ = {isa = PBXBuildFile; fileRef = 3C714779281C0F6800CB4D4B /* www */; };
3C8C548928133C84000A3EC7 /* PasteToConnectView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3C8C548828133C84000A3EC7 /* PasteToConnectView.swift */; };
@@ -25,7 +27,7 @@
5C05DF532840AA1D00C683F9 /* CallSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C05DF522840AA1D00C683F9 /* CallSettings.swift */; };
5C063D2727A4564100AEC577 /* ChatPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C063D2627A4564100AEC577 /* ChatPreviewView.swift */; };
5C10D88828EED12E00E58BF0 /* ContactConnectionInfo.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C10D88728EED12E00E58BF0 /* ContactConnectionInfo.swift */; };
5C10D88A28F187F300E58BF0 /* FullScreenImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C10D88928F187F300E58BF0 /* FullScreenImageView.swift */; };
5C10D88A28F187F300E58BF0 /* FullScreenMediaView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C10D88928F187F300E58BF0 /* FullScreenMediaView.swift */; };
5C116CDC27AABE0400E66D01 /* ContactRequestView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C116CDB27AABE0400E66D01 /* ContactRequestView.swift */; };
5C13730B28156D2700F43030 /* ContactConnectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C13730A28156D2700F43030 /* ContactConnectionView.swift */; };
5C1A4C1E27A715B700EAD5AD /* ChatItemView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C1A4C1D27A715B700EAD5AD /* ChatItemView.swift */; };
@@ -240,6 +242,8 @@
18415845648CA4F5A8BCA272 /* UserProfilesView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UserProfilesView.swift; sourceTree = "<group>"; };
18415A7F0F189D87DEFEABCA /* PressedButtonStyle.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PressedButtonStyle.swift; sourceTree = "<group>"; };
18415B08031E8FB0F7FC27F9 /* CallViewRenderers.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallViewRenderers.swift; sourceTree = "<group>"; };
18415DAAAD1ADBEDB0EDA852 /* VideoPlayerView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoPlayerView.swift; sourceTree = "<group>"; };
18415FD2E36F13F596A45BB4 /* CIVideoView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CIVideoView.swift; sourceTree = "<group>"; };
3C714779281C0F6800CB4D4B /* www */ = {isa = PBXFileReference; lastKnownFileType = folder; name = www; path = ../android/app/src/main/assets/www; sourceTree = "<group>"; };
3C8C548828133C84000A3EC7 /* PasteToConnectView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PasteToConnectView.swift; sourceTree = "<group>"; };
3CDBCF4127FAE51000354CDD /* ComposeLinkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ComposeLinkView.swift; sourceTree = "<group>"; };
@@ -251,7 +255,7 @@
5C05DF522840AA1D00C683F9 /* CallSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallSettings.swift; sourceTree = "<group>"; };
5C063D2627A4564100AEC577 /* ChatPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatPreviewView.swift; sourceTree = "<group>"; };
5C10D88728EED12E00E58BF0 /* ContactConnectionInfo.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactConnectionInfo.swift; sourceTree = "<group>"; };
5C10D88928F187F300E58BF0 /* FullScreenImageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FullScreenImageView.swift; sourceTree = "<group>"; };
5C10D88928F187F300E58BF0 /* FullScreenMediaView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FullScreenMediaView.swift; sourceTree = "<group>"; };
5C116CDB27AABE0400E66D01 /* ContactRequestView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactRequestView.swift; sourceTree = "<group>"; };
5C13730A28156D2700F43030 /* ContactConnectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactConnectionView.swift; sourceTree = "<group>"; };
5C13730C2815740A00F43030 /* DebugJSON.playground */ = {isa = PBXFileReference; lastKnownFileType = file.playground; path = DebugJSON.playground; sourceTree = "<group>"; xcLanguageSpecificationIdentifier = xcode.lang.swift; };
@@ -584,6 +588,7 @@
5CA7DFC229302AF000F7FDDE /* AppSheet.swift */,
18415A7F0F189D87DEFEABCA /* PressedButtonStyle.swift */,
5CCB939B297EFCB100399E78 /* NavStackCompat.swift */,
18415DAAAD1ADBEDB0EDA852 /* VideoPlayerView.swift */,
);
path = Helpers;
sourceTree = "<group>";
@@ -753,7 +758,7 @@
5CEACCEC27DEA495000BD591 /* MsgContentView.swift */,
5C3A88D027DF57800060F1C2 /* FramedItemView.swift */,
649BCDA12805D6EF00C3A862 /* CIImageView.swift */,
5C10D88928F187F300E58BF0 /* FullScreenImageView.swift */,
5C10D88928F187F300E58BF0 /* FullScreenMediaView.swift */,
648010AA281ADD15009009B9 /* CIFileView.swift */,
644EFFDF292CFD7F00525D5B /* CIVoiceView.swift */,
3CDBCF4727FF621E00354CDD /* CILinkView.swift */,
@@ -768,6 +773,7 @@
644EFFE32937BE9700525D5B /* MarkedDeletedItemView.swift */,
1841511920742C6E152E469F /* AnimatedImageView.swift */,
6407BA82295DA85D0082BA18 /* CIInvalidJSONView.swift */,
18415FD2E36F13F596A45BB4 /* CIVideoView.swift */,
);
path = ChatItem;
sourceTree = "<group>";
@@ -1052,7 +1058,7 @@
3CDBCF4227FAE51000354CDD /* ComposeLinkView.swift in Sources */,
3CDBCF4827FF621E00354CDD /* CILinkView.swift in Sources */,
5C7505A827B6D34800BE3227 /* ChatInfoToolbar.swift in Sources */,
5C10D88A28F187F300E58BF0 /* FullScreenImageView.swift in Sources */,
5C10D88A28F187F300E58BF0 /* FullScreenMediaView.swift in Sources */,
D72A9088294BD7A70047C86D /* NativeTextEditor.swift in Sources */,
5C00164428A26FBC0094D739 /* ContextMenu.swift in Sources */,
5C3A88D127DF57800060F1C2 /* FramedItemView.swift in Sources */,
@@ -1143,6 +1149,8 @@
18415C6C56DBCEC2CBBD2F11 /* WebRTCClient.swift in Sources */,
184152CEF68D2336FC2EBCB0 /* CallViewRenderers.swift in Sources */,
18415FEFE153C5920BFB7828 /* GroupWelcomeView.swift in Sources */,
18415F9A2D551F9757DA4654 /* CIVideoView.swift in Sources */,
184158C131FDB829D8A117EA /* VideoPlayerView.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

View File

@@ -2289,6 +2289,7 @@ public enum MsgContent {
case text(String)
case link(text: String, preview: LinkPreview)
case image(text: String, image: String)
case video(text: String, image: String, duration: Int)
case voice(text: String, duration: Int)
case file(String)
// TODO include original JSON, possibly using https://github.com/zoul/generic-json-swift
@@ -2299,6 +2300,7 @@ public enum MsgContent {
case let .text(text): return text
case let .link(text, _): return text
case let .image(text, _): return text
case let .video(text, _, _): return text
case let .voice(text, _): return text
case let .file(text): return text
case let .unknown(_, text): return text
@@ -2326,6 +2328,13 @@ public enum MsgContent {
}
}
public var isVideo: Bool {
switch self {
case .video: return true
default: return false
}
}
var cmdString: String {
"json \(encodeJSON(self))"
}
@@ -2356,6 +2365,11 @@ extension MsgContent: Decodable {
let text = try container.decode(String.self, forKey: CodingKeys.text)
let image = try container.decode(String.self, forKey: CodingKeys.image)
self = .image(text: text, image: image)
case "video":
let text = try container.decode(String.self, forKey: CodingKeys.text)
let image = try container.decode(String.self, forKey: CodingKeys.image)
let duration = try container.decode(Int.self, forKey: CodingKeys.duration)
self = .video(text: text, image: image, duration: duration)
case "voice":
let text = try container.decode(String.self, forKey: CodingKeys.text)
let duration = try container.decode(Int.self, forKey: CodingKeys.duration)
@@ -2388,6 +2402,11 @@ extension MsgContent: Encodable {
try container.encode("image", forKey: .type)
try container.encode(text, forKey: .text)
try container.encode(image, forKey: .image)
case let .video(text, image, duration):
try container.encode("video", forKey: .type)
try container.encode(text, forKey: .text)
try container.encode(image, forKey: .image)
try container.encode(duration, forKey: .duration)
case let .voice(text, duration):
try container.encode("voice", forKey: .type)
try container.encode(text, forKey: .text)

View File

@@ -16,6 +16,8 @@ public let MAX_IMAGE_SIZE: Int64 = 236700
public let MAX_IMAGE_SIZE_AUTO_RCV: Int64 = MAX_IMAGE_SIZE * 2
public let MAX_VIDEO_SIZE_AUTO_RCV: Int64 = 8000000
public let MAX_FILE_SIZE_XFTP: Int64 = 1_073_741_824
public let MAX_FILE_SIZE_SMP: Int64 = 8000000
@@ -182,6 +184,14 @@ public func saveFile(_ data: Data, _ fileName: String) -> String? {
}
}
public func removeFile(_ url: URL) {
do {
try FileManager.default.removeItem(atPath: url.path)
} catch {
logger.error("FileUtils.removeFile error: \(error.localizedDescription)")
}
}
public func removeFile(_ fileName: String) {
do {
try FileManager.default.removeItem(atPath: getAppFilePath(fileName).path)

View File

@@ -2125,7 +2125,7 @@
"Send direct message" = "Отправить сообщение";
/* No comment provided by engineer. */
"Send files via XFTP" = "Отправлять файлы через XFTP";
"Send videos and files via XFTP" = "Отправлять видео и файлы через XFTP";
/* No comment provided by engineer. */
"Send link previews" = "Отправлять картинки ссылок";