Merge branch 'master' into master-ghc8107

This commit is contained in:
spaced4ndy 2023-11-06 11:41:55 +04:00
commit 4253cd7fb9
21 changed files with 343 additions and 89 deletions

View File

@ -16,7 +16,6 @@ import chat.simplex.common.views.chatlist.*
import chat.simplex.common.views.helpers.*
import chat.simplex.common.views.onboarding.*
import chat.simplex.common.platform.*
import chat.simplex.res.MR
import kotlinx.coroutines.*
import java.lang.ref.WeakReference
@ -143,7 +142,7 @@ fun processExternalIntent(intent: Intent?) {
val text = intent.getStringExtra(Intent.EXTRA_TEXT)
val uri = intent.getParcelableExtra<Parcelable>(Intent.EXTRA_STREAM) as? Uri
if (uri != null) {
if (uri.scheme != "content") return showNonContentUriAlert()
if (uri.scheme != "content") return showWrongUriAlert()
// Shared file that contains plain text, like `*.log` file
chatModel.sharedContent.value = SharedContent.File(text ?: "", uri.toURI())
} else if (text != null) {
@ -154,14 +153,14 @@ fun processExternalIntent(intent: Intent?) {
isMediaIntent(intent) -> {
val uri = intent.getParcelableExtra<Parcelable>(Intent.EXTRA_STREAM) as? Uri
if (uri != null) {
if (uri.scheme != "content") return showNonContentUriAlert()
if (uri.scheme != "content") return showWrongUriAlert()
chatModel.sharedContent.value = SharedContent.Media(intent.getStringExtra(Intent.EXTRA_TEXT) ?: "", listOf(uri.toURI()))
} // All other mime types
}
else -> {
val uri = intent.getParcelableExtra<Parcelable>(Intent.EXTRA_STREAM) as? Uri
if (uri != null) {
if (uri.scheme != "content") return showNonContentUriAlert()
if (uri.scheme != "content") return showWrongUriAlert()
chatModel.sharedContent.value = SharedContent.File(intent.getStringExtra(Intent.EXTRA_TEXT) ?: "", uri.toURI())
}
}
@ -176,7 +175,7 @@ fun processExternalIntent(intent: Intent?) {
isMediaIntent(intent) -> {
val uris = intent.getParcelableArrayListExtra<Parcelable>(Intent.EXTRA_STREAM) as? List<Uri>
if (uris != null) {
if (uris.any { it.scheme != "content" }) return showNonContentUriAlert()
if (uris.any { it.scheme != "content" }) return showWrongUriAlert()
chatModel.sharedContent.value = SharedContent.Media(intent.getStringExtra(Intent.EXTRA_TEXT) ?: "", uris.map { it.toURI() })
} // All other mime types
}
@ -189,13 +188,6 @@ fun processExternalIntent(intent: Intent?) {
fun isMediaIntent(intent: Intent): Boolean =
intent.type?.startsWith("image/") == true || intent.type?.startsWith("video/") == true
private fun showNonContentUriAlert() {
AlertManager.shared.showAlertMsg(
title = generalGetString(MR.strings.non_content_uri_alert_title),
text = generalGetString(MR.strings.non_content_uri_alert_text)
)
}
//fun testJson() {
// val str: String = """
// """.trimIndent()

View File

@ -200,7 +200,7 @@ actual class VideoPlayer actual constructor(
private fun setPreviewAndDuration() {
// It freezes main thread, doing it in IO thread
CoroutineScope(Dispatchers.IO).launch {
val previewAndDuration = VideoPlayerHolder.previewsAndDurations.getOrPut(uri) { getBitmapFromVideo(uri) }
val previewAndDuration = VideoPlayerHolder.previewsAndDurations.getOrPut(uri) { getBitmapFromVideo(uri, withAlertOnException = false) }
withContext(Dispatchers.Main) {
preview.value = previewAndDuration.preview ?: defaultPreview
duration.value = (previewAndDuration.duration ?: 0)

View File

@ -233,17 +233,13 @@ actual fun getFileSize(uri: URI): Long? {
actual fun getBitmapFromUri(uri: URI, withAlertOnException: Boolean): ImageBitmap? {
return if (Build.VERSION.SDK_INT >= 28) {
val source = ImageDecoder.createSource(androidAppContext.contentResolver, uri.toUri())
try {
val source = ImageDecoder.createSource(androidAppContext.contentResolver, uri.toUri())
ImageDecoder.decodeBitmap(source)
} catch (e: android.graphics.ImageDecoder.DecodeException) {
} catch (e: Exception) {
Log.e(TAG, "Unable to decode the image: ${e.stackTraceToString()}")
if (withAlertOnException) {
AlertManager.shared.showAlertMsg(
title = generalGetString(MR.strings.image_decoding_exception_title),
text = generalGetString(MR.strings.image_decoding_exception_desc)
)
}
if (withAlertOnException) showImageDecodingException()
null
}
} else {
@ -253,17 +249,13 @@ actual fun getBitmapFromUri(uri: URI, withAlertOnException: Boolean): ImageBitma
actual fun getBitmapFromByteArray(data: ByteArray, withAlertOnException: Boolean): ImageBitmap? {
return if (Build.VERSION.SDK_INT >= 31) {
val source = ImageDecoder.createSource(data)
try {
val source = ImageDecoder.createSource(data)
ImageDecoder.decodeBitmap(source)
} catch (e: android.graphics.ImageDecoder.DecodeException) {
Log.e(TAG, "Unable to decode the image: ${e.stackTraceToString()}")
if (withAlertOnException) {
AlertManager.shared.showAlertMsg(
title = generalGetString(MR.strings.image_decoding_exception_title),
text = generalGetString(MR.strings.image_decoding_exception_desc)
)
}
if (withAlertOnException) showImageDecodingException()
null
}
} else {
@ -273,17 +265,13 @@ actual fun getBitmapFromByteArray(data: ByteArray, withAlertOnException: Boolean
actual fun getDrawableFromUri(uri: URI, withAlertOnException: Boolean): Any? {
return if (Build.VERSION.SDK_INT >= 28) {
val source = ImageDecoder.createSource(androidAppContext.contentResolver, uri.toUri())
try {
val source = ImageDecoder.createSource(androidAppContext.contentResolver, uri.toUri())
ImageDecoder.decodeDrawable(source)
} catch (e: android.graphics.ImageDecoder.DecodeException) {
if (withAlertOnException) {
AlertManager.shared.showAlertMsg(
title = generalGetString(MR.strings.image_decoding_exception_title),
text = generalGetString(MR.strings.image_decoding_exception_desc)
)
}
} catch (e: Exception) {
Log.e(TAG, "Error while decoding drawable: ${e.stackTraceToString()}")
if (withAlertOnException) showImageDecodingException()
null
}
} else {
@ -304,12 +292,13 @@ actual suspend fun saveTempImageUncompressed(image: ImageBitmap, asPng: Boolean)
ChatModel.filesToDelete.add(this)
}
} catch (e: Exception) {
Log.e(TAG, "Util.kt saveTempImageUncompressed error: ${e.message}")
Log.e(TAG, "Utils.android saveTempImageUncompressed error: ${e.message}")
null
}
}
actual suspend fun getBitmapFromVideo(uri: URI, timestamp: Long?, random: Boolean): VideoPlayerInterface.PreviewAndDuration {
actual suspend fun getBitmapFromVideo(uri: URI, timestamp: Long?, random: Boolean, withAlertOnException: Boolean): VideoPlayerInterface.PreviewAndDuration =
try {
val mmr = MediaMetadataRetriever()
mmr.setDataSource(androidAppContext, uri.toUri())
val durationMs = mmr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)?.toLong()
@ -319,7 +308,12 @@ actual suspend fun getBitmapFromVideo(uri: URI, timestamp: Long?, random: Boolea
else -> mmr.getFrameAtTime(0)
}
mmr.release()
return VideoPlayerInterface.PreviewAndDuration(image?.asImageBitmap(), durationMs, timestamp ?: 0)
VideoPlayerInterface.PreviewAndDuration(image?.asImageBitmap(), durationMs, timestamp ?: 0)
} catch (e: Exception) {
Log.e(TAG, "Utils.android getBitmapFromVideo error: ${e.message}")
if (withAlertOnException) showVideoDecodingException()
VideoPlayerInterface.PreviewAndDuration(null, 0, 0)
}
actual fun ByteArray.toBase64StringForPassphrase(): String = Base64.encodeToString(this, Base64.DEFAULT)

View File

@ -589,19 +589,31 @@ fun ChatInfoToolbar(
if (chat.chatInfo is ChatInfo.Direct && chat.chatInfo.contact.allowsFeature(ChatFeature.Calls)) {
if (activeCall == null) {
barButtons.add {
IconButton(
{
if (appPlatform.isAndroid) {
IconButton({
showMenu.value = false
startCall(CallMediaType.Audio)
},
enabled = chat.chatInfo.contact.ready && chat.chatInfo.contact.active
}, enabled = chat.chatInfo.contact.ready && chat.chatInfo.contact.active
) {
Icon(
painterResource(MR.images.ic_call_500),
stringResource(MR.strings.icon_descr_more_button),
stringResource(MR.strings.icon_descr_audio_call).capitalize(Locale.current),
tint = if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active) MaterialTheme.colors.primary else MaterialTheme.colors.secondary
)
}
} else {
IconButton({
showMenu.value = false
startCall(CallMediaType.Video)
}, enabled = chat.chatInfo.contact.ready && chat.chatInfo.contact.active
) {
Icon(
painterResource(MR.images.ic_videocam),
stringResource(MR.strings.icon_descr_video_call).capitalize(Locale.current),
tint = if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active) MaterialTheme.colors.primary else MaterialTheme.colors.secondary
)
}
}
}
} else if (activeCall?.contact?.id == chat.id) {
barButtons.add {
@ -634,10 +646,17 @@ fun ChatInfoToolbar(
}
if (chat.chatInfo.contact.ready && chat.chatInfo.contact.active && activeCall == null) {
menuItems.add {
if (appPlatform.isAndroid) {
ItemAction(stringResource(MR.strings.icon_descr_video_call).capitalize(Locale.current), painterResource(MR.images.ic_videocam), onClick = {
showMenu.value = false
startCall(CallMediaType.Video)
})
} else {
ItemAction(stringResource(MR.strings.icon_descr_audio_call).capitalize(Locale.current), painterResource(MR.images.ic_call_500), onClick = {
showMenu.value = false
startCall(CallMediaType.Audio)
})
}
}
}
} else if (chat.chatInfo is ChatInfo.Group && chat.chatInfo.groupInfo.canAddMembers) {

View File

@ -178,11 +178,13 @@ fun MutableState<ComposeState>.processPickedFile(uri: URI?, text: String?) {
if (fileName != null) {
value = value.copy(message = text ?: value.message, preview = ComposePreview.FilePreview(fileName, uri))
}
} else {
} else if (fileSize != null) {
AlertManager.shared.showAlertMsg(
generalGetString(MR.strings.large_file),
String.format(generalGetString(MR.strings.maximum_supported_file_size), formatBytes(maxFileSize))
)
} else {
showWrongUriAlert()
}
}
}
@ -196,7 +198,8 @@ suspend fun MutableState<ComposeState>.processPickedMedia(uris: List<URI>, text:
isImage(uri) -> {
// Image
val drawable = getDrawableFromUri(uri)
bitmap = getBitmapFromUri(uri)
// Do not show alert in case it's already shown from the function above
bitmap = getBitmapFromUri(uri, withAlertOnException = AlertManager.shared.alertViews.isEmpty())
if (isAnimImage(uri, drawable)) {
// It's a gif or webp
val fileSize = getFileSize(uri)
@ -209,13 +212,13 @@ suspend fun MutableState<ComposeState>.processPickedMedia(uris: List<URI>, text:
String.format(generalGetString(MR.strings.maximum_supported_file_size), formatBytes(maxFileSize))
)
}
} else {
} else if (bitmap != null) {
content.add(UploadContent.SimpleImage(uri))
}
}
else -> {
// Video
val res = getBitmapFromVideo(uri)
val res = getBitmapFromVideo(uri, withAlertOnException = true)
bitmap = res.preview
val durationMs = res.duration
content.add(UploadContent.Video(uri, durationMs?.div(1000)?.toInt() ?: 0))

View File

@ -151,7 +151,7 @@ fun saveAnimImage(uri: URI, encrypted: Boolean): CryptoFile? {
expect suspend fun saveTempImageUncompressed(image: ImageBitmap, asPng: Boolean): File?
fun saveFileFromUri(uri: URI, encrypted: Boolean): CryptoFile? {
fun saveFileFromUri(uri: URI, encrypted: Boolean, withAlertOnException: Boolean = true): CryptoFile? {
return try {
val inputStream = uri.inputStream()
val fileToSave = getFileName(uri)
@ -170,10 +170,14 @@ fun saveFileFromUri(uri: URI, encrypted: Boolean): CryptoFile? {
}
} else {
Log.e(TAG, "Util.kt saveFileFromUri null inputStream")
if (withAlertOnException) showWrongUriAlert()
null
}
} catch (e: Exception) {
Log.e(TAG, "Util.kt saveFileFromUri error: ${e.stackTraceToString()}")
if (withAlertOnException) showWrongUriAlert()
null
}
}
@ -267,7 +271,28 @@ fun getMaxFileSize(fileProtocol: FileProtocol): Long {
}
}
expect suspend fun getBitmapFromVideo(uri: URI, timestamp: Long? = null, random: Boolean = true): VideoPlayerInterface.PreviewAndDuration
expect suspend fun getBitmapFromVideo(uri: URI, timestamp: Long? = null, random: Boolean = true, withAlertOnException: Boolean = true): VideoPlayerInterface.PreviewAndDuration
fun showWrongUriAlert() {
AlertManager.shared.showAlertMsg(
title = generalGetString(MR.strings.non_content_uri_alert_title),
text = generalGetString(MR.strings.non_content_uri_alert_text)
)
}
fun showImageDecodingException() {
AlertManager.shared.showAlertMsg(
title = generalGetString(MR.strings.image_decoding_exception_title),
text = generalGetString(MR.strings.image_decoding_exception_desc)
)
}
fun showVideoDecodingException() {
AlertManager.shared.showAlertMsg(
title = generalGetString(MR.strings.image_decoding_exception_title),
text = generalGetString(MR.strings.video_decoding_exception_desc)
)
}
fun Color.darker(factor: Float = 0.1f): Color =
Color(max(red * (1 - factor), 0f), max(green * (1 - factor), 0f), max(blue * (1 - factor), 0f), alpha)

View File

@ -309,6 +309,7 @@
<string name="videos_limit_desc">Only 10 videos can be sent at the same time</string>
<string name="image_decoding_exception_title">Decoding error</string>
<string name="image_decoding_exception_desc">The image cannot be decoded. Please, try a different image or contact developers.</string>
<string name="video_decoding_exception_desc">The video cannot be decoded. Please, try a different video or contact developers.</string>
<string name="you_are_observer">you are observer</string>
<string name="observer_cant_send_message_title">You can\'t send messages!</string>
<string name="observer_cant_send_message_desc">Please contact group admin.</string>

View File

@ -11,6 +11,7 @@
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="local-video-stream"

View File

@ -14,6 +14,7 @@ var VideoCamera;
// for debugging
// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
var sendMessageToNative = (msg) => console.log(JSON.stringify(msg));
var toggleScreenShare = async () => { };
// Global object with cryptrographic/encoding functions
const callCrypto = callCryptoFunction();
var TransformOperation;
@ -24,6 +25,7 @@ var TransformOperation;
let activeCall;
let answerTimeout = 30000;
var useWorker = false;
var isDesktop = false;
var localizedState = "";
var localizedDescription = "";
const processCommand = (function () {
@ -106,8 +108,24 @@ const processCommand = (function () {
const remoteStream = new MediaStream();
const localCamera = VideoCamera.User;
const localStream = await getLocalMediaStream(mediaType, localCamera);
if (isDesktop) {
localStream
.getTracks()
.filter((elem) => elem.kind == "video")
.forEach((elem) => (elem.enabled = false));
}
const iceCandidates = getIceCandidates(pc, config);
const call = { connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey };
const call = {
connection: pc,
iceCandidates,
localMedia: mediaType,
localCamera,
localStream,
remoteStream,
aesKey,
screenShareEnabled: false,
cameraEnabled: true,
};
await setupMediaStreams(call);
let connectionTimeout = setTimeout(connectionHandler, answerTimeout);
pc.addEventListener("connectionstatechange", connectionStateChange);
@ -430,12 +448,31 @@ const processCommand = (function () {
if (!videos)
throw Error("no video elements");
const pc = call.connection;
const oldAudioTracks = call.localStream.getAudioTracks();
const audioWasEnabled = oldAudioTracks.some((elem) => elem.enabled);
let localStream;
try {
localStream = call.screenShareEnabled ? await getLocalScreenCaptureStream() : await getLocalMediaStream(call.localMedia, camera);
}
catch (e) {
if (call.screenShareEnabled) {
call.screenShareEnabled = false;
}
return;
}
for (const t of call.localStream.getTracks())
t.stop();
call.localCamera = camera;
const localStream = await getLocalMediaStream(call.localMedia, camera);
replaceTracks(pc, localStream.getVideoTracks());
replaceTracks(pc, localStream.getAudioTracks());
const audioTracks = localStream.getAudioTracks();
const videoTracks = localStream.getVideoTracks();
if (!audioWasEnabled && oldAudioTracks.length > 0) {
audioTracks.forEach((elem) => (elem.enabled = false));
}
if (!call.cameraEnabled && !call.screenShareEnabled) {
videoTracks.forEach((elem) => (elem.enabled = false));
}
replaceTracks(pc, audioTracks);
replaceTracks(pc, videoTracks);
call.localStream = localStream;
videos.local.srcObject = localStream;
}
@ -472,6 +509,21 @@ const processCommand = (function () {
const constraints = callMediaConstraints(mediaType, facingMode);
return navigator.mediaDevices.getUserMedia(constraints);
}
function getLocalScreenCaptureStream() {
const constraints /* DisplayMediaStreamConstraints */ = {
video: {
frameRate: 24,
//width: {
//min: 480,
//ideal: 720,
//max: 1280,
//},
//aspectRatio: 1.33,
},
audio: true,
};
return navigator.mediaDevices.getDisplayMedia(constraints);
}
function callMediaConstraints(mediaType, facingMode) {
switch (mediaType) {
case CallMediaType.Audio:
@ -526,9 +578,23 @@ const processCommand = (function () {
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
for (const t of tracks)
t.enabled = enable;
if (media == CallMediaType.Video && activeCall) {
activeCall.cameraEnabled = enable;
}
}
toggleScreenShare = async function () {
const call = activeCall;
if (!call)
return;
call.screenShareEnabled = !call.screenShareEnabled;
await replaceMedia(call, call.localCamera);
};
return processCommand;
})();
function toggleRemoteVideoFitFill() {
const remote = document.getElementById("remote-video-stream");
remote.style.objectFit = remote.style.objectFit != "contain" ? "contain" : "cover";
}
function toggleMedia(s, media) {
let res = false;
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks();
@ -536,6 +602,9 @@ function toggleMedia(s, media) {
t.enabled = !t.enabled;
res = t.enabled;
}
if (media == CallMediaType.Video && activeCall) {
activeCall.cameraEnabled = res;
}
return res;
}
// Cryptography function - it is loaded both in the main window and in worker context (if the worker is used)

View File

@ -12,6 +12,7 @@
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="local-video-stream"
@ -29,6 +30,9 @@
<img src="/desktop/images/ic_phone_in_talk.svg" />
</div>
<p id="manage-call">
<button id="toggle-screen" style="display: none" onclick="javascript:toggleScreenManually()">
<img src="/desktop/images/ic_screen_share.svg" />
</button>
<button id="toggle-audio" style="display: none" onclick="javascript:toggleAudioManually()">
<img src="/desktop/images/ic_mic.svg" />
</button>
@ -39,7 +43,7 @@
<img src="/desktop/images/ic_volume_up.svg" />
</button>
<button id="toggle-video" style="display: none" onclick="javascript:toggleVideoManually()">
<img src="/desktop/images/ic_videocam_filled.svg" />
<img src="/desktop/images/ic_videocam_off.svg" />
</button>
</p>
</body>

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 -960 960 960" width="44"><path fill="white" d="M335.5-388H393v-89q0-23.875 16.35-40.438Q425.7-534 450.175-534H530v68l97-96.5-97-97v68h-80.077q-47.756 0-81.09 33.396Q335.5-524.708 335.5-477v89ZM74-126q-12.25 0-20.625-8.425Q45-142.851 45-154.925 45-167 53.375-175.25T74-183.5h812.5q11.675 0 20.088 8.463Q915-166.574 915-154.825q0 12.325-8.412 20.575Q898.175-126 886.5-126H74Zm68.5-117q-22.969 0-40.234-17.266Q85-277.531 85-300.5V-777q0-22.969 17.266-40.234Q119.531-834.5 142.5-834.5h675q22.969 0 40.234 17.266Q875-799.969 875-777v476.5q0 22.969-17.266 40.234Q840.469-243 817.5-243h-675Zm0-57.5h675V-777h-675v476.5Zm0 0V-777v476.5Z"/></svg>

After

Width:  |  Height:  |  Size: 700 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 -960 960 960" width="44"><path fill="white" d="m549-484.5-107-107h88v-68l97 97-78 78ZM799.5-233 741-291.5h70.5V-771h-550L204-828.5h607.5q22.969 0 40.234 17.266Q869-793.969 869-771v479.53q0 26.088-20.25 43.779Q828.5-230 799.5-233Zm36 200.5L737-131H45v-57.5h635L634.5-234h-485q-22.969 0-40.234-17.266Q92-268.531 92-291.5v-481.25q0-2.75.5-3.75l-56-55L78-873 877-74l-41.5 41.5ZM393-475.5v88h-57.5V-477q0-10 2.25-22.5t7.25-23.534L149.5-719v427.5H577l-184-184ZM502-532Zm-138 26.5Z"/></svg>

After

Width:  |  Height:  |  Size: 546 B

View File

@ -1,6 +1,7 @@
"use strict";
// Override defaults to enable worker on Chrome and Safari
useWorker = typeof window.Worker !== "undefined";
isDesktop = true;
// Create WebSocket connection.
const socket = new WebSocket(`ws://${location.host}`);
socket.addEventListener("open", (_event) => {
@ -42,11 +43,28 @@ function toggleSpeakerManually() {
}
function toggleVideoManually() {
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.localMedia) {
document.getElementById("toggle-video").innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Video)
let res;
if (activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled) {
activeCall.cameraEnabled = !activeCall.cameraEnabled;
res = activeCall.cameraEnabled;
}
else {
res = toggleMedia(activeCall.localStream, CallMediaType.Video);
}
document.getElementById("toggle-video").innerHTML = res
? '<img src="/desktop/images/ic_videocam_filled.svg" />'
: '<img src="/desktop/images/ic_videocam_off.svg" />';
}
}
async function toggleScreenManually() {
const was = activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled;
await toggleScreenShare();
if (was != (activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled)) {
document.getElementById("toggle-screen").innerHTML = (activeCall === null || activeCall === void 0 ? void 0 : activeCall.screenShareEnabled)
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />';
}
}
function reactOnMessageFromServer(msg) {
var _a;
switch ((_a = msg.command) === null || _a === void 0 ? void 0 : _a.type) {
@ -57,8 +75,9 @@ function reactOnMessageFromServer(msg) {
case "start":
document.getElementById("toggle-audio").style.display = "inline-block";
document.getElementById("toggle-speaker").style.display = "inline-block";
if (msg.command.media == "video") {
if (msg.command.media == CallMediaType.Video) {
document.getElementById("toggle-video").style.display = "inline-block";
document.getElementById("toggle-screen").style.display = "inline-block";
}
document.getElementById("info-block").className = msg.command.media;
break;

View File

@ -189,7 +189,7 @@ actual class VideoPlayer actual constructor(
private fun setPreviewAndDuration() {
// It freezes main thread, doing it in IO thread
CoroutineScope(Dispatchers.IO).launch {
val previewAndDuration = VideoPlayerHolder.previewsAndDurations.getOrPut(uri) { getBitmapFromVideo(defaultPreview, uri) }
val previewAndDuration = VideoPlayerHolder.previewsAndDurations.getOrPut(uri) { getBitmapFromVideo(defaultPreview, uri, withAlertOnException = false) }
withContext(Dispatchers.Main) {
preview.value = previewAndDuration.preview ?: defaultPreview
duration.value = (previewAndDuration.duration ?: 0)
@ -214,10 +214,12 @@ actual class VideoPlayer actual constructor(
}
}
suspend fun getBitmapFromVideo(defaultPreview: ImageBitmap?, uri: URI?): VideoPlayerInterface.PreviewAndDuration = withContext(playerThread.asCoroutineDispatcher()) {
suspend fun getBitmapFromVideo(defaultPreview: ImageBitmap?, uri: URI?, withAlertOnException: Boolean = true): VideoPlayerInterface.PreviewAndDuration = withContext(playerThread.asCoroutineDispatcher()) {
val mediaComponent = getOrCreateHelperPlayer()
val player = mediaComponent.mediaPlayer()
if (uri == null || !File(uri.rawPath).exists()) {
if (withAlertOnException) showVideoDecodingException()
return@withContext VideoPlayerInterface.PreviewAndDuration(preview = defaultPreview, timestamp = 0L, duration = 0L)
}
player.media().startPaused(uri.toString().replaceFirst("file:", "file://"))
@ -227,7 +229,14 @@ actual class VideoPlayer actual constructor(
snap = player.snapshots()?.get()
delay(10)
}
val orientation = player.media().info().videoTracks().first().orientation()
val orientation = player.media().info().videoTracks().firstOrNull()?.orientation()
if (orientation == null) {
player.stop()
putHelperPlayer(mediaComponent)
if (withAlertOnException) showVideoDecodingException()
return@withContext VideoPlayerInterface.PreviewAndDuration(preview = defaultPreview, timestamp = 0L, duration = 0L)
}
val preview: ImageBitmap? = when (orientation) {
VideoOrientation.TOP_LEFT -> snap
VideoOrientation.TOP_RIGHT -> snap?.flip(false, true)

View File

@ -9,6 +9,7 @@ import chat.simplex.common.model.CIFile
import chat.simplex.common.model.readCryptoFile
import chat.simplex.common.platform.*
import chat.simplex.common.simplexWindowState
import chat.simplex.res.MR
import java.io.ByteArrayInputStream
import java.io.File
import java.net.URI
@ -108,10 +109,24 @@ actual fun getAppFilePath(uri: URI): String? = uri.path
actual fun getFileSize(uri: URI): Long? = uri.toPath().toFile().length()
actual fun getBitmapFromUri(uri: URI, withAlertOnException: Boolean): ImageBitmap? =
try {
ImageIO.read(uri.inputStream()).toComposeImageBitmap()
} catch (e: Exception) {
Log.e(TAG, "Error while decoding drawable: ${e.stackTraceToString()}")
if (withAlertOnException) showImageDecodingException()
null
}
actual fun getBitmapFromByteArray(data: ByteArray, withAlertOnException: Boolean): ImageBitmap? =
try {
ImageIO.read(ByteArrayInputStream(data)).toComposeImageBitmap()
} catch (e: Exception) {
Log.e(TAG, "Error while encoding bitmap from byte array: ${e.stackTraceToString()}")
if (withAlertOnException) showImageDecodingException()
null
}
// LALAL implement to support animated drawable
actual fun getDrawableFromUri(uri: URI, withAlertOnException: Boolean): Any? = null
@ -132,8 +147,8 @@ actual suspend fun saveTempImageUncompressed(image: ImageBitmap, asPng: Boolean)
} else null
}
actual suspend fun getBitmapFromVideo(uri: URI, timestamp: Long?, random: Boolean): VideoPlayerInterface.PreviewAndDuration {
return VideoPlayer.getBitmapFromVideo(null, uri)
actual suspend fun getBitmapFromVideo(uri: URI, timestamp: Long?, random: Boolean, withAlertOnException: Boolean): VideoPlayerInterface.PreviewAndDuration {
return VideoPlayer.getBitmapFromVideo(null, uri, withAlertOnException)
}
@OptIn(ExperimentalEncodingApi::class)

View File

@ -11,6 +11,7 @@
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="local-video-stream"

View File

@ -165,6 +165,7 @@ interface ConnectionInfo {
// for debugging
// var sendMessageToNative = ({resp}: WVApiMessage) => console.log(JSON.stringify({command: resp}))
var sendMessageToNative = (msg: WVApiMessage) => console.log(JSON.stringify(msg))
var toggleScreenShare = async () => {}
// Global object with cryptrographic/encoding functions
const callCrypto = callCryptoFunction()
@ -193,6 +194,8 @@ interface Call {
localCamera: VideoCamera
localStream: MediaStream
remoteStream: MediaStream
screenShareEnabled: boolean
cameraEnabled: boolean
aesKey?: string
worker?: Worker
key?: CryptoKey
@ -201,6 +204,7 @@ interface Call {
let activeCall: Call | undefined
let answerTimeout = 30_000
var useWorker = false
var isDesktop = false
var localizedState = ""
var localizedDescription = ""
@ -308,8 +312,24 @@ const processCommand = (function () {
const remoteStream = new MediaStream()
const localCamera = VideoCamera.User
const localStream = await getLocalMediaStream(mediaType, localCamera)
if (isDesktop) {
localStream
.getTracks()
.filter((elem) => elem.kind == "video")
.forEach((elem) => (elem.enabled = false))
}
const iceCandidates = getIceCandidates(pc, config)
const call = {connection: pc, iceCandidates, localMedia: mediaType, localCamera, localStream, remoteStream, aesKey}
const call = {
connection: pc,
iceCandidates,
localMedia: mediaType,
localCamera,
localStream,
remoteStream,
aesKey,
screenShareEnabled: false,
cameraEnabled: true,
}
await setupMediaStreams(call)
let connectionTimeout: number | undefined = setTimeout(connectionHandler, answerTimeout)
pc.addEventListener("connectionstatechange", connectionStateChange)
@ -626,11 +646,31 @@ const processCommand = (function () {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
const pc = call.connection
const oldAudioTracks = call.localStream.getAudioTracks()
const audioWasEnabled = oldAudioTracks.some((elem) => elem.enabled)
let localStream: MediaStream
try {
localStream = call.screenShareEnabled ? await getLocalScreenCaptureStream() : await getLocalMediaStream(call.localMedia, camera)
} catch (e: any) {
if (call.screenShareEnabled) {
call.screenShareEnabled = false
}
return
}
for (const t of call.localStream.getTracks()) t.stop()
call.localCamera = camera
const localStream = await getLocalMediaStream(call.localMedia, camera)
replaceTracks(pc, localStream.getVideoTracks())
replaceTracks(pc, localStream.getAudioTracks())
const audioTracks = localStream.getAudioTracks()
const videoTracks = localStream.getVideoTracks()
if (!audioWasEnabled && oldAudioTracks.length > 0) {
audioTracks.forEach((elem) => (elem.enabled = false))
}
if (!call.cameraEnabled && !call.screenShareEnabled) {
videoTracks.forEach((elem) => (elem.enabled = false))
}
replaceTracks(pc, audioTracks)
replaceTracks(pc, videoTracks)
call.localStream = localStream
videos.local.srcObject = localStream
}
@ -671,6 +711,22 @@ const processCommand = (function () {
return navigator.mediaDevices.getUserMedia(constraints)
}
function getLocalScreenCaptureStream(): Promise<MediaStream> {
const constraints: any /* DisplayMediaStreamConstraints */ = {
video: {
frameRate: 24,
//width: {
//min: 480,
//ideal: 720,
//max: 1280,
//},
//aspectRatio: 1.33,
},
audio: true,
}
return navigator.mediaDevices.getDisplayMedia(constraints)
}
function callMediaConstraints(mediaType: CallMediaType, facingMode: VideoCamera): MediaStreamConstraints {
switch (mediaType) {
case CallMediaType.Audio:
@ -735,10 +791,26 @@ const processCommand = (function () {
function enableMedia(s: MediaStream, media: CallMediaType, enable: boolean) {
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
for (const t of tracks) t.enabled = enable
if (media == CallMediaType.Video && activeCall) {
activeCall.cameraEnabled = enable
}
}
toggleScreenShare = async function () {
const call = activeCall
if (!call) return
call.screenShareEnabled = !call.screenShareEnabled
await replaceMedia(call, call.localCamera)
}
return processCommand
})()
function toggleRemoteVideoFitFill() {
const remote = document.getElementById("remote-video-stream")!
remote.style.objectFit = remote.style.objectFit != "contain" ? "contain" : "cover"
}
function toggleMedia(s: MediaStream, media: CallMediaType): boolean {
let res = false
const tracks = media == CallMediaType.Video ? s.getVideoTracks() : s.getAudioTracks()
@ -746,6 +818,9 @@ function toggleMedia(s: MediaStream, media: CallMediaType): boolean {
t.enabled = !t.enabled
res = t.enabled
}
if (media == CallMediaType.Video && activeCall) {
activeCall.cameraEnabled = res
}
return res
}

View File

@ -12,6 +12,7 @@
autoplay
playsinline
poster="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAQAAAAnOwc2AAAAEUlEQVR42mNk+M+AARiHsiAAcCIKAYwFoQ8AAAAASUVORK5CYII="
onclick="javascript:toggleRemoteVideoFitFill()"
></video>
<video
id="local-video-stream"
@ -29,6 +30,9 @@
<img src="/desktop/images/ic_phone_in_talk.svg" />
</div>
<p id="manage-call">
<button id="toggle-screen" style="display: none" onclick="javascript:toggleScreenManually()">
<img src="/desktop/images/ic_screen_share.svg" />
</button>
<button id="toggle-audio" style="display: none" onclick="javascript:toggleAudioManually()">
<img src="/desktop/images/ic_mic.svg" />
</button>
@ -39,7 +43,7 @@
<img src="/desktop/images/ic_volume_up.svg" />
</button>
<button id="toggle-video" style="display: none" onclick="javascript:toggleVideoManually()">
<img src="/desktop/images/ic_videocam_filled.svg" />
<img src="/desktop/images/ic_videocam_off.svg" />
</button>
</p>
</body>

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 -960 960 960" width="44"><path fill="white" d="M335.5-388H393v-89q0-23.875 16.35-40.438Q425.7-534 450.175-534H530v68l97-96.5-97-97v68h-80.077q-47.756 0-81.09 33.396Q335.5-524.708 335.5-477v89ZM74-126q-12.25 0-20.625-8.425Q45-142.851 45-154.925 45-167 53.375-175.25T74-183.5h812.5q11.675 0 20.088 8.463Q915-166.574 915-154.825q0 12.325-8.412 20.575Q898.175-126 886.5-126H74Zm68.5-117q-22.969 0-40.234-17.266Q85-277.531 85-300.5V-777q0-22.969 17.266-40.234Q119.531-834.5 142.5-834.5h675q22.969 0 40.234 17.266Q875-799.969 875-777v476.5q0 22.969-17.266 40.234Q840.469-243 817.5-243h-675Zm0-57.5h675V-777h-675v476.5Zm0 0V-777v476.5Z"/></svg>

After

Width:  |  Height:  |  Size: 700 B

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" height="44" viewBox="0 -960 960 960" width="44"><path fill="white" d="m549-484.5-107-107h88v-68l97 97-78 78ZM799.5-233 741-291.5h70.5V-771h-550L204-828.5h607.5q22.969 0 40.234 17.266Q869-793.969 869-771v479.53q0 26.088-20.25 43.779Q828.5-230 799.5-233Zm36 200.5L737-131H45v-57.5h635L634.5-234h-485q-22.969 0-40.234-17.266Q92-268.531 92-291.5v-481.25q0-2.75.5-3.75l-56-55L78-873 877-74l-41.5 41.5ZM393-475.5v88h-57.5V-477q0-10 2.25-22.5t7.25-23.534L149.5-719v427.5H577l-184-184ZM502-532Zm-138 26.5Z"/></svg>

After

Width:  |  Height:  |  Size: 546 B

View File

@ -1,5 +1,6 @@
// Override defaults to enable worker on Chrome and Safari
useWorker = typeof window.Worker !== "undefined"
isDesktop = true
// Create WebSocket connection.
const socket = new WebSocket(`ws://${location.host}`)
@ -49,12 +50,29 @@ function toggleSpeakerManually() {
function toggleVideoManually() {
if (activeCall?.localMedia) {
document.getElementById("toggle-video")!!.innerHTML = toggleMedia(activeCall.localStream, CallMediaType.Video)
let res: boolean
if (activeCall?.screenShareEnabled) {
activeCall.cameraEnabled = !activeCall.cameraEnabled
res = activeCall.cameraEnabled
} else {
res = toggleMedia(activeCall.localStream, CallMediaType.Video)
}
document.getElementById("toggle-video")!!.innerHTML = res
? '<img src="/desktop/images/ic_videocam_filled.svg" />'
: '<img src="/desktop/images/ic_videocam_off.svg" />'
}
}
async function toggleScreenManually() {
const was = activeCall?.screenShareEnabled
await toggleScreenShare()
if (was != activeCall?.screenShareEnabled) {
document.getElementById("toggle-screen")!!.innerHTML = activeCall?.screenShareEnabled
? '<img src="/desktop/images/ic_stop_screen_share.svg" />'
: '<img src="/desktop/images/ic_screen_share.svg" />'
}
}
function reactOnMessageFromServer(msg: WVApiMessage) {
switch (msg.command?.type) {
case "capabilities":
@ -64,8 +82,9 @@ function reactOnMessageFromServer(msg: WVApiMessage) {
case "start":
document.getElementById("toggle-audio")!!.style.display = "inline-block"
document.getElementById("toggle-speaker")!!.style.display = "inline-block"
if (msg.command.media == "video") {
if (msg.command.media == CallMediaType.Video) {
document.getElementById("toggle-video")!!.style.display = "inline-block"
document.getElementById("toggle-screen")!!.style.display = "inline-block"
}
document.getElementById("info-block")!!.className = msg.command.media
break