Compare commits

..

92 Commits

Author SHA1 Message Date
John Preston
887b6629f6 Version 2.3: Fix emoji fingerprint on Retina. 2020-08-15 00:02:26 +04:00
John Preston
b6a66bbf1b Version 2.3.
- Video Calls (alpha version).
2020-08-14 20:20:59 +04:00
John Preston
a0eb073728 Close call panel instead of hiding. 2020-08-14 19:59:24 +04:00
John Preston
34840766b2 Move fingerprint / signal bars to a separate widget. 2020-08-14 16:53:06 +04:00
John Preston
1aaf7df54a Add labels to call buttons. 2020-08-14 16:53:06 +04:00
John Preston
1b60829da8 Add top window controls shadow. 2020-08-14 16:53:06 +04:00
John Preston
fbe02dbb8d Add bottom shadow to the incoming video. 2020-08-14 16:53:06 +04:00
John Preston
cb18c9a9b3 Show remote audio muted tooltip. 2020-08-14 16:53:06 +04:00
John Preston
a106d80a41 Fix accept call button outer ripple. 2020-08-14 16:53:06 +04:00
John Preston
465c661c45 Show peer-s microphone mute state on userpic. 2020-08-14 16:53:06 +04:00
John Preston
8af40c22a4 Improve mute button styles. 2020-08-14 16:53:06 +04:00
John Preston
f1b6d1fdae Improve top controls design on Windows. 2020-08-14 16:53:06 +04:00
John Preston
476b9c44c1 Closed alpha version 2.2.0.2. 2020-08-13 15:42:23 +04:00
John Preston
377ff2f421 Use expanding incoming frame scale if aspect is good. 2020-08-13 15:37:00 +04:00
John Preston
cdc87086f3 Add some paddings. 2020-08-13 15:16:39 +04:00
John Preston
99f3173ae6 Use smaller emoji. 2020-08-13 14:39:55 +04:00
John Preston
7de5cabd79 Add window controls to call panel on Windows. 2020-08-13 14:32:11 +04:00
John Preston
925f6df06a Fix call panel behaviour on macOS. 2020-08-13 13:02:15 +04:00
John Preston
c93d3ae924 Toggle fullscreen by double click. 2020-08-12 20:58:24 +04:00
John Preston
537645c282 Hide window title for calls panel. 2020-08-12 20:58:05 +04:00
John Preston
38b9111bf5 Use Ui::Window for the Calls::Panel. 2020-08-12 17:35:31 +04:00
John Preston
79feb0c6d9 Show video icon in call bubble in history. 2020-08-12 12:47:19 +04:00
John Preston
97fe05c7ed Mirror outgoing video horizontally. 2020-08-12 12:47:07 +04:00
John Preston
e01bf8e1cd Closed alpha version 2.2.0.1. 2020-08-11 21:33:15 +04:00
John Preston
369ec46064 Allow building without WebRTC. 2020-08-11 21:33:15 +04:00
John Preston
13f2ceaf47 Update building docs. 2020-08-11 18:21:40 +04:00
John Preston
eda22b925f Fix build on Linux 64 bit. 2020-08-11 18:21:40 +04:00
John Preston
bd16708781 Update tgcalls library. 2020-08-11 18:21:40 +04:00
John Preston
38546c701a Round outgoing video preview. 2020-08-11 18:21:40 +04:00
John Preston
4971e281fa Improve design for all controls states. 2020-08-11 18:21:40 +04:00
John Preston
d4b8fa70a7 Extract Calls::Userpic and Calls::VideoBubble. 2020-08-11 18:21:40 +04:00
John Preston
95de762529 Support three-value VideoState. 2020-08-11 18:21:40 +04:00
John Preston
a89634b767 Start call panel redesign. 2020-08-11 18:21:40 +04:00
John Preston
4bf6550e24 Update API scheme to layer 117. 2020-08-11 18:21:40 +04:00
John Preston
83759adb5f Start video call from video call service message. 2020-08-11 18:21:40 +04:00
John Preston
6f90e57523 Support special video calls service messages. 2020-08-11 18:21:40 +04:00
John Preston
aba8f72c36 Fix camera start. 2020-08-11 18:21:40 +04:00
John Preston
35dfb9fab3 Show outgoing video stream. 2020-08-11 18:21:40 +04:00
John Preston
b692242012 Allow reference tgcalls implementation. 2020-08-11 18:21:40 +04:00
John Preston
c1f727bde9 Update tgcalls to use new protocol. 2020-08-11 18:21:40 +04:00
John Preston
f3808bdc24 Start using webrtc::VideoTrack. 2020-08-11 18:21:40 +04:00
John Preston
16177eae2b Simplify frame presentation. 2020-08-11 18:21:40 +04:00
John Preston
da3bbba497 Allow enable/disable video in a call. 2020-08-11 18:21:40 +04:00
John Preston
a4ee90e8c6 Remove webrtc test code usage on Windows. 2020-08-11 18:21:40 +04:00
John Preston
7fec49a752 Remove webrtc test code usage on macOS. 2020-08-11 18:21:40 +04:00
John Preston
78c9c1e7f8 Update tgcalls library. 2020-08-11 18:21:40 +04:00
John Preston
662424319c Fix build on Windows. 2020-08-11 18:21:40 +04:00
John Preston
5c55f31972 Use test capture from webrtc on macOS. 2020-08-11 18:21:40 +04:00
John Preston
29d42a6936 Fix runtime errors in macOS. 2020-08-11 18:21:40 +04:00
John Preston
46550381a4 Fix build on macOS. 2020-08-11 18:21:40 +04:00
John Preston
6c272e38ad Update tgcalls to support codecs negotiation. 2020-08-11 18:21:40 +04:00
John Preston
0a019411ee Use typed signaling messages. 2020-08-11 18:21:40 +04:00
John Preston
815300bffc Fix video between tdesktop instances. 2020-08-11 18:21:40 +04:00
John Preston
12e0399cf4 Add tgcalls library. 2020-08-11 18:21:40 +04:00
John Preston
b703f4e555 Fix build on Linux. 2020-08-11 18:21:40 +04:00
John Preston
71040464c5 Request camera permissions on macOS. 2020-08-11 18:21:40 +04:00
John Preston
6d36176a8d Add test implementation of webrtc calls. 2020-08-11 18:21:40 +04:00
John Preston
438a560a79 Link tdesktop with external_webrtc. 2020-08-11 18:21:40 +04:00
John Preston
145ace2fa0 Use external_webrtc in libtgvoip. 2020-08-11 18:21:40 +04:00
John Preston
4ba7de8df1 Fix build on Windows. 2020-08-11 18:21:40 +04:00
23rd
b6c70572a9 Fixed Github CI macOS build. 2020-08-11 18:20:43 +04:00
23rd
b12256f1ee Added ability to schedule polls from section of scheduled messages.
Fixed #7433.
2020-08-11 18:13:40 +04:00
23rd
14cda49db2 Moved code for send context menu to namespace. 2020-08-11 18:13:40 +04:00
23rd
af9440db38 Slightly refactored InlineBots::Layout::Widget. 2020-08-11 18:13:40 +04:00
23rd
1eea07d88a Added ability to schedule and send silently stickers from autocomplete. 2020-08-11 18:13:40 +04:00
23rd
cf56658664 Slightly refactored FieldAutocomplete. 2020-08-11 18:13:40 +04:00
23rd
2ac425f350 Moved default send callbacks to separate functions. 2020-08-11 18:13:40 +04:00
23rd
edceed28d7 Moved code for send context menu to separate file. 2020-08-11 18:13:40 +04:00
23rd
1fee7d1a41 Added ability to schedule and send silently media from inline bots. 2020-08-11 18:13:40 +04:00
23rd
d1d153e886 Added Api::SendOptions passing from TabbedSelector for inline results. 2020-08-11 18:13:40 +04:00
23rd
5b95585725 Added other items to TabbedPanel's context menu. 2020-08-11 18:13:40 +04:00
23rd
43056107fd Added ability to pass SendMenuType to TabbedSelector. 2020-08-11 18:13:40 +04:00
23rd
7db9843543 Added initial context menu to TabbedPanel.
Added ability to schedule and send silently stickers and GIFs.
2020-08-11 18:13:40 +04:00
23rd
a95a324401 Separated send menu filling to another method. 2020-08-11 18:13:40 +04:00
23rd
759e802eba Added ability to pass Api::SendOptions from TabbedSelector. 2020-08-11 18:13:40 +04:00
23rd
b71062561a Added ability to drag'n'drop videos in SendFilesBox. 2020-08-11 18:13:09 +04:00
23rd
2576312cd4 Fixed autodownloading of dictionaries at logout. 2020-08-11 18:13:09 +04:00
23rd
972bbbce6a Fixed online status stuck when switching between accounts one more time.
The first attempt to fix this bug is here: 8171ed6c12.
It caused crash so it was reverted here: 2ef47222f4.
2020-08-11 12:28:41 +03:00
Ilya Fedin
65e2bbee3e Override cursor only if no buttons pressed 2020-08-11 12:58:07 +04:00
ilya-fedin
bbe2adc44f Fix AppImage overwrite by updater 2020-08-11 10:58:17 +04:00
Ilya Fedin
449fa0ef2c Don't focus parent window after opening file dialog 2020-08-11 10:57:11 +04:00
Ilya Fedin
dc9d0aae54 Add support for showing window menu on Wayland 2020-08-11 10:56:29 +04:00
Ilya Fedin
08b67e383e Add -D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF to docs & actions 2020-08-11 10:55:50 +04:00
Ilya Fedin
4e5e30a8dd Update fcitx5-qt 2020-08-11 10:55:50 +04:00
Ilya Fedin
cbeaee24c9 Update submodules & patches commit 2020-08-11 10:55:50 +04:00
Ilya Fedin
26bedd95d7 DESKTOP_APP_USE_PACKAGED_FONTS respects DESKTOP_APP_USE_PACKAGED_LAZY now 2020-08-11 10:55:50 +04:00
Ilya Fedin
252bdd2353 TDESKTOP_DISABLE_DBUS_INTEGRATION -> DESKTOP_APP_DISABLE_DBUS_INTEGRATION 2020-08-11 10:55:50 +04:00
John Preston
361b99b0c9 Use threaded decoding in ffmpeg streaming. 2020-07-28 20:14:45 +04:00
John Preston
e293a26029 Fix crash in incorrect passcode input. 2020-07-28 20:14:45 +04:00
Magnus Groß
4f3f1e18a2 Export chats with ISO 8601 dates
Fixes #6020
2020-07-27 09:29:25 +04:00
John Preston
188b98b4d8 Update cmake_helpers submodule. 2020-07-27 09:28:12 +04:00
John Preston
fe639078a6 Version 2.2: Fix default-night mode. 2020-07-26 16:55:04 +04:00
186 changed files with 4258 additions and 1569 deletions

View File

@@ -423,8 +423,8 @@ jobs:
git clone -b v5.12.8 --depth=1 git://code.qt.io/qt/qt5.git qt_${QT}
cd qt_${QT}
perl init-repository --module-subset=qtbase,qtwayland,qtimageformats,qtsvg,qtx11extras
git submodule update qtbase qtwayland qtimageformats qtsvg qtx11extras
perl init-repository --module-subset=qtbase,qtwayland,qtimageformats,qtsvg
git submodule update qtbase qtwayland qtimageformats qtsvg
cd qtbase
find ../../patches/qtbase_${QT} -type f -print0 | sort -z | xargs -r0 git apply
cd ..
@@ -530,6 +530,7 @@ jobs:
-D CMAKE_CXX_FLAGS="-s" \
-D TDESKTOP_API_TEST=ON \
-D DESKTOP_APP_USE_PACKAGED=OFF \
-D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF \
$DEFINE
cd ../out/Debug

View File

@@ -79,6 +79,7 @@ jobs:
- name: First set up.
run: |
sudo chown -R `whoami`:admin /usr/local/share
brew install automake fdk-aac lame libass libtool libvorbis libvpx \
ninja opus sdl shtool texi2html theora x264 xvid yasm pkg-config
@@ -462,7 +463,11 @@ jobs:
echo ::set-env name=ARTIFACT_NAME::Telegram
fi
./configure.sh -D TDESKTOP_API_TEST=ON -D DESKTOP_APP_USE_PACKAGED=OFF $DEFINE
./configure.sh \
-D TDESKTOP_API_TEST=ON \
-D DESKTOP_APP_USE_PACKAGED=OFF \
-D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF \
$DEFINE
cd ../out

View File

@@ -365,6 +365,7 @@ jobs:
call configure.bat ^
-D TDESKTOP_API_TEST=ON ^
-D DESKTOP_APP_USE_PACKAGED=OFF ^
-D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF ^
%TDESKTOP_BUILD_DEFINE% ^
-DCMAKE_SYSTEM_VERSION=%SDK%

6
.gitmodules vendored
View File

@@ -94,3 +94,9 @@
[submodule "Telegram/ThirdParty/fcitx5-qt"]
path = Telegram/ThirdParty/fcitx5-qt
url = https://github.com/fcitx/fcitx5-qt.git
[submodule "Telegram/lib_webrtc"]
path = Telegram/lib_webrtc
url = https://github.com/desktop-app/lib_webrtc.git
[submodule "Telegram/ThirdParty/tgcalls"]
path = Telegram/ThirdParty/tgcalls
url = https://github.com/TelegramMessenger/tgcalls.git

View File

@@ -18,6 +18,7 @@ endif()
add_subdirectory(lib_storage)
add_subdirectory(lib_lottie)
add_subdirectory(lib_qr)
add_subdirectory(lib_webrtc)
add_subdirectory(codegen)
include(CheckCXXSourceCompiles)
@@ -34,6 +35,7 @@ include(cmake/lib_ffmpeg.cmake)
include(cmake/lib_mtproto.cmake)
include(cmake/lib_scheme.cmake)
include(cmake/lib_tgvoip.cmake)
include(cmake/lib_tgcalls.cmake)
set(style_files
boxes/boxes.style
@@ -101,10 +103,13 @@ endif()
target_link_libraries(Telegram
PRIVATE
tdesktop::lib_tgcalls_legacy
tdesktop::lib_tgcalls
tdesktop::lib_tgvoip
tdesktop::lib_mtproto
tdesktop::lib_scheme
tdesktop::lib_export
tdesktop::lib_tgvoip
desktop-app::lib_webrtc
desktop-app::lib_base
desktop-app::lib_crl
desktop-app::lib_ui
@@ -313,17 +318,20 @@ PRIVATE
calls/calls_box_controller.h
calls/calls_call.cpp
calls/calls_call.h
calls/calls_controller.cpp
calls/calls_controller.h
calls/calls_controller_tgvoip.h
calls/calls_emoji_fingerprint.cpp
calls/calls_emoji_fingerprint.h
calls/calls_instance.cpp
calls/calls_instance.h
calls/calls_panel.cpp
calls/calls_panel.h
calls/calls_signal_bars.cpp
calls/calls_signal_bars.h
calls/calls_top_bar.cpp
calls/calls_top_bar.h
calls/calls_userpic.cpp
calls/calls_userpic.h
calls/calls_video_bubble.cpp
calls/calls_video_bubble.h
chat_helpers/bot_keyboard.cpp
chat_helpers/bot_keyboard.h
chat_helpers/emoji_keywords.cpp
@@ -340,6 +348,8 @@ PRIVATE
chat_helpers/gifs_list_widget.h
chat_helpers/message_field.cpp
chat_helpers/message_field.h
chat_helpers/send_context_menu.cpp
chat_helpers/send_context_menu.h
chat_helpers/spellchecker_common.cpp
chat_helpers/spellchecker_common.h
chat_helpers/stickers_emoji_image_loader.cpp
@@ -1220,6 +1230,18 @@ elseif (APPLE)
)
endif()
endif()
elseif (LINUX)
if (NOT TDESKTOP_DISABLE_GTK_INTEGRATION)
find_package(PkgConfig REQUIRED)
pkg_search_module(GTK REQUIRED gtk+-2.0 gtk+-3.0)
target_include_directories(Telegram PRIVATE ${GTK_INCLUDE_DIRS})
if (DESKTOP_APP_USE_PACKAGED)
find_library(X11_LIBRARY X11)
target_link_libraries(Telegram PRIVATE ${X11_LIBRARY})
endif()
endif()
endif()
if (build_macstore)

View File

@@ -13,6 +13,8 @@ pacman --noconfirm -S pkg-config
PKG_CONFIG_PATH="/mingw64/lib/pkgconfig:$PKG_CONFIG_PATH"
./configure --toolchain=msvc \
--extra-cflags="-DCONFIG_SAFE_BITSTREAM_READER=1" \
--extra-cxxflags="-DCONFIG_SAFE_BITSTREAM_READER=1" \
--extra-ldflags="-libpath:$FullExecPath/../opus/win32/VS2015/Win32/Release" \
--disable-programs \
--disable-doc \

Binary file not shown.

Before

Width:  |  Height:  |  Size: 456 B

After

Width:  |  Height:  |  Size: 595 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 377 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 733 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 727 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 214 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 426 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 912 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 405 B

After

Width:  |  Height:  |  Size: 460 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 736 B

After

Width:  |  Height:  |  Size: 970 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 886 B

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 412 B

After

Width:  |  Height:  |  Size: 489 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 800 B

After

Width:  |  Height:  |  Size: 1002 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 945 B

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 459 B

After

Width:  |  Height:  |  Size: 744 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 843 B

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 305 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 524 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 820 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 385 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 873 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 230 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 393 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 616 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 333 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 552 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 889 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 173 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 304 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 545 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 228 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 420 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 661 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 637 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 545 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 290 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 499 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 846 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 420 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 715 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.3 KiB

View File

@@ -1468,6 +1468,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_context_copy_text" = "Copy Text";
"lng_context_open_gif" = "Open GIF";
"lng_context_save_gif" = "Save GIF";
"lng_context_delete_gif" = "Delete GIF";
"lng_context_attached_stickers" = "Attached Stickers";
"lng_context_to_msg" = "Go To Message";
"lng_context_reply_msg" = "Reply";
@@ -1735,16 +1736,33 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_call_box_status_group" = "({amount}) {status}";
"lng_call_outgoing" = "Outgoing call";
"lng_call_video_outgoing" = "Outgoing video call";
"lng_call_incoming" = "Incoming call";
"lng_call_video_incoming" = "Incoming video call";
"lng_call_missed" = "Missed call";
"lng_call_video_missed" = "Missed video call";
"lng_call_cancelled" = "Cancelled call";
"lng_call_video_cancelled" = "Cancelled video call";
"lng_call_declined" = "Declined call";
"lng_call_video_declined" = "Declined video call";
"lng_call_duration_info" = "{time}, {duration}";
"lng_call_type_and_duration" = "{type} ({duration})";
"lng_call_rate_label" = "Please rate the quality of your call";
"lng_call_rate_comment" = "Comment (optional)";
"lng_call_start_video" = "Start Video";
"lng_call_stop_video" = "Stop Video";
"lng_call_end_call" = "End Call";
"lng_call_mute_audio" = "Mute";
"lng_call_unmute_audio" = "Unmute";
"lng_call_accept" = "Accept";
"lng_call_decline" = "Decline";
"lng_call_redial" = "Redial";
"lng_call_cancel" = "Cancel";
"lng_call_microphone_off" = "{user}'s microphone is off";
"lng_no_mic_permission" = "Telegram needs access to your microphone so that you can make calls and record voice messages.";
"lng_player_message_today" = "Today at {time}";

View File

@@ -109,7 +109,7 @@ storage.fileMp4#b3cea0e4 = storage.FileType;
storage.fileWebp#1081464c = storage.FileType;
userEmpty#200250ba id:int = User;
user#938458c1 flags:# self:flags.10?true contact:flags.11?true mutual_contact:flags.12?true deleted:flags.13?true bot:flags.14?true bot_chat_history:flags.15?true bot_nochats:flags.16?true verified:flags.17?true restricted:flags.18?true min:flags.20?true bot_inline_geo:flags.21?true support:flags.23?true scam:flags.24?true id:int access_hash:flags.0?long first_name:flags.1?string last_name:flags.2?string username:flags.3?string phone:flags.4?string photo:flags.5?UserProfilePhoto status:flags.6?UserStatus bot_info_version:flags.14?int restriction_reason:flags.18?Vector<RestrictionReason> bot_inline_placeholder:flags.19?string lang_code:flags.22?string = User;
user#938458c1 flags:# self:flags.10?true contact:flags.11?true mutual_contact:flags.12?true deleted:flags.13?true bot:flags.14?true bot_chat_history:flags.15?true bot_nochats:flags.16?true verified:flags.17?true restricted:flags.18?true min:flags.20?true bot_inline_geo:flags.21?true support:flags.23?true scam:flags.24?true apply_min_photo:flags.25?true id:int access_hash:flags.0?long first_name:flags.1?string last_name:flags.2?string username:flags.3?string phone:flags.4?string photo:flags.5?UserProfilePhoto status:flags.6?UserStatus bot_info_version:flags.14?int restriction_reason:flags.18?Vector<RestrictionReason> bot_inline_placeholder:flags.19?string lang_code:flags.22?string = User;
userProfilePhotoEmpty#4f11bae1 = UserProfilePhoto;
userProfilePhoto#69d3ab26 flags:# has_video:flags.0?true photo_id:long photo_small:FileLocation photo_big:FileLocation dc_id:int = UserProfilePhoto;
@@ -128,7 +128,7 @@ channel#d31a961e flags:# creator:flags.0?true left:flags.2?true broadcast:flags.
channelForbidden#289da732 flags:# broadcast:flags.5?true megagroup:flags.8?true id:int access_hash:long title:string until_date:flags.16?int = Chat;
chatFull#1b7c9db3 flags:# can_set_username:flags.7?true has_scheduled:flags.8?true id:int about:string participants:ChatParticipants chat_photo:flags.2?Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:flags.3?Vector<BotInfo> pinned_msg_id:flags.6?int folder_id:flags.11?int = ChatFull;
channelFull#f0e6672a flags:# can_view_participants:flags.3?true can_set_username:flags.6?true can_set_stickers:flags.7?true hidden_prehistory:flags.10?true can_view_stats:flags.12?true can_set_location:flags.16?true has_scheduled:flags.19?true id:int about:string participants_count:flags.0?int admins_count:flags.1?int kicked_count:flags.2?int banned_count:flags.2?int online_count:flags.13?int read_inbox_max_id:int read_outbox_max_id:int unread_count:int chat_photo:Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:Vector<BotInfo> migrated_from_chat_id:flags.4?int migrated_from_max_id:flags.4?int pinned_msg_id:flags.5?int stickerset:flags.8?StickerSet available_min_id:flags.9?int folder_id:flags.11?int linked_chat_id:flags.14?int location:flags.15?ChannelLocation slowmode_seconds:flags.17?int slowmode_next_send_date:flags.18?int stats_dc:flags.12?int pts:int = ChatFull;
channelFull#f0e6672a flags:# can_view_participants:flags.3?true can_set_username:flags.6?true can_set_stickers:flags.7?true hidden_prehistory:flags.10?true can_set_location:flags.16?true has_scheduled:flags.19?true can_view_stats:flags.20?true id:int about:string participants_count:flags.0?int admins_count:flags.1?int kicked_count:flags.2?int banned_count:flags.2?int online_count:flags.13?int read_inbox_max_id:int read_outbox_max_id:int unread_count:int chat_photo:Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:Vector<BotInfo> migrated_from_chat_id:flags.4?int migrated_from_max_id:flags.4?int pinned_msg_id:flags.5?int stickerset:flags.8?StickerSet available_min_id:flags.9?int folder_id:flags.11?int linked_chat_id:flags.14?int location:flags.15?ChannelLocation slowmode_seconds:flags.17?int slowmode_next_send_date:flags.18?int stats_dc:flags.12?int pts:int = ChatFull;
chatParticipant#c8d7493e user_id:int inviter_id:int date:int = ChatParticipant;
chatParticipantCreator#da13538a user_id:int = ChatParticipant;
@@ -225,7 +225,7 @@ inputReportReasonOther#e1746d0a text:string = ReportReason;
inputReportReasonCopyright#9b89f93a = ReportReason;
inputReportReasonGeoIrrelevant#dbd4feed = ReportReason;
userFull#edf17c12 flags:# blocked:flags.0?true phone_calls_available:flags.4?true phone_calls_private:flags.5?true can_pin_message:flags.7?true has_scheduled:flags.12?true user:User about:flags.1?string settings:PeerSettings profile_photo:flags.2?Photo notify_settings:PeerNotifySettings bot_info:flags.3?BotInfo pinned_msg_id:flags.6?int common_chats_count:int folder_id:flags.11?int = UserFull;
userFull#edf17c12 flags:# blocked:flags.0?true phone_calls_available:flags.4?true phone_calls_private:flags.5?true can_pin_message:flags.7?true has_scheduled:flags.12?true video_calls_available:flags.13?true user:User about:flags.1?string settings:PeerSettings profile_photo:flags.2?Photo notify_settings:PeerNotifySettings bot_info:flags.3?BotInfo pinned_msg_id:flags.6?int common_chats_count:int folder_id:flags.11?int = UserFull;
contact#f911c994 user_id:int mutual:Bool = Contact;
@@ -818,13 +818,14 @@ inputStickerSetItem#ffa0a496 flags:# document:InputDocument emoji:string mask_co
inputPhoneCall#1e36fded id:long access_hash:long = InputPhoneCall;
phoneCallEmpty#5366c915 id:long = PhoneCall;
phoneCallWaiting#1b8f4ad1 flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int protocol:PhoneCallProtocol receive_date:flags.0?int = PhoneCall;
phoneCallRequested#87eabb53 flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_hash:bytes protocol:PhoneCallProtocol = PhoneCall;
phoneCallAccepted#997c454a flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_b:bytes protocol:PhoneCallProtocol = PhoneCall;
phoneCall#8742ae7f flags:# p2p_allowed:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_or_b:bytes key_fingerprint:long protocol:PhoneCallProtocol connections:Vector<PhoneConnection> start_date:int = PhoneCall;
phoneCallDiscarded#50ca4de1 flags:# need_rating:flags.2?true need_debug:flags.3?true video:flags.5?true id:long reason:flags.0?PhoneCallDiscardReason duration:flags.1?int = PhoneCall;
phoneCallWaiting#1b8f4ad1 flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int protocol:PhoneCallProtocol receive_date:flags.0?int = PhoneCall;
phoneCallRequested#87eabb53 flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_hash:bytes protocol:PhoneCallProtocol = PhoneCall;
phoneCallAccepted#997c454a flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_b:bytes protocol:PhoneCallProtocol = PhoneCall;
phoneCall#8742ae7f flags:# p2p_allowed:flags.5?true video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_or_b:bytes key_fingerprint:long protocol:PhoneCallProtocol connections:Vector<PhoneConnection> start_date:int = PhoneCall;
phoneCallDiscarded#50ca4de1 flags:# need_rating:flags.2?true need_debug:flags.3?true video:flags.6?true id:long reason:flags.0?PhoneCallDiscardReason duration:flags.1?int = PhoneCall;
phoneConnection#9d4c17c0 id:long ip:string ipv6:string port:int peer_tag:bytes = PhoneConnection;
phoneConnectionWebrtc#635fe375 flags:# turn:flags.0?true stun:flags.1?true id:long ip:string ipv6:string port:int username:string password:string = PhoneConnection;
phoneCallProtocol#fc878fc8 flags:# udp_p2p:flags.0?true udp_reflector:flags.1?true min_layer:int max_layer:int library_versions:Vector<string> = PhoneCallProtocol;
@@ -1398,7 +1399,7 @@ updates.getState#edd4882a = updates.State;
updates.getDifference#25939651 flags:# pts:int pts_total_limit:flags.0?int date:int qts:int = updates.Difference;
updates.getChannelDifference#3173d78 flags:# force:flags.0?true channel:InputChannel filter:ChannelMessagesFilter pts:int limit:int = updates.ChannelDifference;
photos.updateProfilePhoto#f0bb5152 id:InputPhoto = UserProfilePhoto;
photos.updateProfilePhoto#72d4742c id:InputPhoto = photos.Photo;
photos.uploadProfilePhoto#89f30f69 flags:# file:flags.0?InputFile video:flags.1?InputFile video_start_ts:flags.2?double = photos.Photo;
photos.deletePhotos#87cf7f2f id:Vector<InputPhoto> = Vector<long>;
photos.getUserPhotos#91cd32a8 user_id:InputUser offset:int max_id:long limit:int = photos.Photos;
@@ -1511,4 +1512,4 @@ stats.getBroadcastStats#ab42441a flags:# dark:flags.0?true channel:InputChannel
stats.loadAsyncGraph#621d5fa0 flags:# token:string x:flags.0?long = StatsGraph;
stats.getMegagroupStats#dcdf8607 flags:# dark:flags.0?true channel:InputChannel = stats.MegagroupStats;
// LAYER 116
// LAYER 117

View File

@@ -9,7 +9,7 @@
<Identity Name="TelegramMessengerLLP.TelegramDesktop"
ProcessorArchitecture="ARCHITECTURE"
Publisher="CN=536BC709-8EE1-4478-AF22-F0F0F26FF64A"
Version="2.2.0.0" />
Version="2.3.0.0" />
<Properties>
<DisplayName>Telegram Desktop</DisplayName>
<PublisherDisplayName>Telegram FZ-LLC</PublisherDisplayName>

View File

@@ -44,8 +44,8 @@ IDI_ICON1 ICON "..\\art\\icon256.ico"
//
VS_VERSION_INFO VERSIONINFO
FILEVERSION 2,2,0,0
PRODUCTVERSION 2,2,0,0
FILEVERSION 2,3,0,0
PRODUCTVERSION 2,3,0,0
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
@@ -62,10 +62,10 @@ BEGIN
BEGIN
VALUE "CompanyName", "Telegram FZ-LLC"
VALUE "FileDescription", "Telegram Desktop"
VALUE "FileVersion", "2.2.0.0"
VALUE "FileVersion", "2.3.0.0"
VALUE "LegalCopyright", "Copyright (C) 2014-2020"
VALUE "ProductName", "Telegram Desktop"
VALUE "ProductVersion", "2.2.0.0"
VALUE "ProductVersion", "2.3.0.0"
END
END
BLOCK "VarFileInfo"

View File

@@ -35,8 +35,8 @@ LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
//
VS_VERSION_INFO VERSIONINFO
FILEVERSION 2,2,0,0
PRODUCTVERSION 2,2,0,0
FILEVERSION 2,3,0,0
PRODUCTVERSION 2,3,0,0
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
@@ -53,10 +53,10 @@ BEGIN
BEGIN
VALUE "CompanyName", "Telegram FZ-LLC"
VALUE "FileDescription", "Telegram Desktop Updater"
VALUE "FileVersion", "2.2.0.0"
VALUE "FileVersion", "2.3.0.0"
VALUE "LegalCopyright", "Copyright (C) 2014-2020"
VALUE "ProductName", "Telegram Desktop"
VALUE "ProductVersion", "2.2.0.0"
VALUE "ProductVersion", "2.3.0.0"
END
END
BLOCK "VarFileInfo"

View File

@@ -298,6 +298,28 @@ bool update() {
for (size_t i = 0; i < from.size(); ++i) {
string fname = from[i], tofname = to[i];
// it is necessary to remove the old file to not to get an error if appimage file is used by fuse
struct stat statbuf;
writeLog("Trying to get stat() for '%s'", tofname.c_str());
if (!stat(tofname.c_str(), &statbuf)) {
if (S_ISDIR(statbuf.st_mode)) {
writeLog("Fully clearing path '%s'..", tofname.c_str());
if (!remove_directory(tofname.c_str())) {
writeLog("Error: failed to clear path '%s'", tofname.c_str());
delFolder();
return false;
}
} else {
writeLog("Unlinking file '%s'", tofname.c_str());
if (unlink(tofname.c_str())) {
writeLog("Error: failed to unlink '%s'", tofname.c_str());
delFolder();
return false;
}
}
}
writeLog("Copying file '%s' to '%s'..", fname.c_str(), tofname.c_str());
int copyTries = 0, triesLimit = 30;
do {

View File

@@ -1726,8 +1726,9 @@ void Updates::feedUpdate(const MTPUpdate &update) {
auto &d = update.c_updateEncryptedMessagesRead();
} break;
case mtpc_updatePhoneCall: {
Core::App().calls().handleUpdate(&session(), update.c_updatePhoneCall());
case mtpc_updatePhoneCall:
case mtpc_updatePhoneCallSignalingData: {
Core::App().calls().handleUpdate(&session(), update);
} break;
case mtpc_updateUserBlocked: {

View File

@@ -4918,8 +4918,8 @@ void ApiWrap::clearPeerPhoto(not_null<PhotoData*> photo) {
if (self->userpicPhotoId() == photo->id) {
request(MTPphotos_UpdateProfilePhoto(
MTP_inputPhotoEmpty()
)).done([=](const MTPUserProfilePhoto &result) {
self->setPhoto(result);
)).done([=](const MTPphotos_Photo &result) {
self->setPhoto(MTP_userProfilePhotoEmpty());
}).send();
} else if (photo->peer && photo->peer->userpicPhotoId() == photo->id) {
const auto applier = [=](const MTPUpdates &result) {

View File

@@ -24,6 +24,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/core_settings.h"
#include "chat_helpers/emoji_suggestions_widget.h"
#include "chat_helpers/message_field.h"
#include "chat_helpers/send_context_menu.h"
#include "history/view/history_view_schedule_box.h"
#include "settings/settings_common.h"
#include "base/unique_qptr.h"
@@ -1058,19 +1059,19 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
*error &= ~Error::Solution;
}
};
const auto showError = [=](const QString &text) {
Ui::Toast::Show(text);
const auto showError = [](tr::phrase<> text) {
Ui::Toast::Show(text(tr::now));
};
const auto send = [=](Api::SendOptions sendOptions) {
collectError();
if (*error & Error::Question) {
showError(tr::lng_polls_choose_question(tr::now));
showError(tr::lng_polls_choose_question);
question->setFocus();
} else if (*error & Error::Options) {
showError(tr::lng_polls_choose_answers(tr::now));
showError(tr::lng_polls_choose_answers);
options->focusFirst();
} else if (*error & Error::Correct) {
showError(tr::lng_polls_choose_correct(tr::now));
showError(tr::lng_polls_choose_correct);
} else if (*error & Error::Solution) {
solution->showError();
} else if (!*error) {
@@ -1078,15 +1079,13 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
}
};
const auto sendSilent = [=] {
auto options = Api::SendOptions();
options.silent = true;
send(options);
send({ .silent = true });
};
const auto sendScheduled = [=] {
Ui::show(
HistoryView::PrepareScheduleBox(
this,
SendMenuType::Scheduled,
SendMenu::Type::Scheduled,
send),
Ui::LayerOption::KeepOther);
};
@@ -1101,15 +1100,22 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
FocusAtEnd(question);
}, lifetime());
const auto isNormal = (_sendType == Api::SendType::Normal);
const auto isScheduled = (_sendType == Api::SendType::Scheduled);
const auto submit = addButton(
tr::lng_polls_create_button(),
[=] { send({}); });
if (_sendType == Api::SendType::Normal) {
isNormal
? tr::lng_polls_create_button()
: tr::lng_schedule_button(),
[=] { isNormal ? send({}) : sendScheduled(); });
if (isNormal || isScheduled) {
const auto sendMenuType = [=] {
collectError();
return *error ? SendMenuType::Disabled : SendMenuType::Scheduled;
return (*error || isScheduled)
? SendMenu::Type::Disabled
: SendMenu::Type::Scheduled;
};
SetupSendMenuAndShortcuts(
SendMenu::SetupMenuAndShortcuts(
submit.data(),
sendMenuType,
sendSilent,

View File

@@ -91,25 +91,6 @@ auto ListFromMimeData(not_null<const QMimeData*> data) {
return result;
}
auto CheckMimeData(not_null<const QMimeData*> data, bool isAlbum) {
if (data->urls().size() > 1) {
return false;
} else if (data->hasImage()) {
return true;
}
if (isAlbum && data->hasUrls()) {
const auto url = data->urls().front();
if (url.isLocalFile()) {
using namespace Core;
const auto info = QFileInfo(Platform::File::UrlToLocal(url));
return IsMimeAcceptedForAlbum(MimeTypeForFile(info).name());
}
}
return true;
}
} // namespace
EditCaptionBox::EditCaptionBox(
@@ -663,7 +644,7 @@ void EditCaptionBox::prepare() {
if (action == Ui::InputField::MimeAction::Check) {
if (!data->hasText() && !_isAllowedEditMedia) {
return false;
} else if (CheckMimeData(data, _isAlbum)) {
} else if (Storage::ValidateDragData(data, _isAlbum)) {
return true;
}
return data->hasText();
@@ -766,7 +747,9 @@ void EditCaptionBox::setupEmojiPanel() {
void EditCaptionBox::setupDragArea() {
auto enterFilter = [=](not_null<const QMimeData*> data) {
return !_isAllowedEditMedia ? false : CheckMimeData(data, _isAlbum);
return !_isAllowedEditMedia
? false
: Storage::ValidateDragData(data, _isAlbum);
};
// Avoid both drag areas appearing at one time.
auto computeState = [=](const QMimeData *data) {

View File

@@ -15,6 +15,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "main/main_session.h"
#include "mtproto/mtproto_config.h"
#include "chat_helpers/message_field.h"
#include "chat_helpers/send_context_menu.h"
#include "chat_helpers/emoji_suggestions_widget.h"
#include "chat_helpers/tabbed_panel.h"
#include "chat_helpers/tabbed_selector.h"
@@ -1666,7 +1667,7 @@ SendFilesBox::SendFilesBox(
CompressConfirm compressed,
SendLimit limit,
Api::SendType sendType,
SendMenuType sendMenuType)
SendMenu::Type sendMenuType)
: _controller(controller)
, _sendType(sendType)
, _list(std::move(list))
@@ -1836,7 +1837,7 @@ void SendFilesBox::setupShadows(
void SendFilesBox::prepare() {
_send = addButton(tr::lng_send_button(), [=] { send({}); });
if (_sendType == Api::SendType::Normal) {
SetupSendMenuAndShortcuts(
SendMenu::SetupMenuAndShortcuts(
_send,
[=] { return _sendMenuType; },
[=] { sendSilent(); },
@@ -1865,12 +1866,13 @@ void SendFilesBox::prepare() {
void SendFilesBox::setupDragArea() {
// Avoid both drag areas appearing at one time.
auto computeState = [=](const QMimeData *data) {
using DragState = Storage::MimeDataState;
const auto state = Storage::ComputeMimeDataState(data);
return (state == Storage::MimeDataState::PhotoFiles)
? Storage::MimeDataState::Image
: (state == Storage::MimeDataState::Files)
// Temporary enable drag'n'drop only for images. TODO.
? Storage::MimeDataState::None
return (state == DragState::PhotoFiles)
? DragState::Image
: (state == DragState::Files
&& !Storage::ValidateDragData(data, true))
? DragState::None
: state;
};
const auto areas = DragArea::SetupDragAreaToContainer(
@@ -2416,7 +2418,7 @@ void SendFilesBox::sendSilent() {
void SendFilesBox::sendScheduled() {
const auto type = (_sendType == Api::SendType::ScheduledToUser)
? SendMenuType::ScheduledToUser
? SendMenu::Type::ScheduledToUser
: _sendMenuType;
const auto callback = [=](Api::SendOptions options) { send(options); };
Ui::show(

View File

@@ -40,7 +40,9 @@ namespace Window {
class SessionController;
} // namespace Window
enum class SendMenuType;
namespace SendMenu {
enum class Type;
} // namespace SendMenu
enum class SendFilesWay {
Album,
@@ -62,7 +64,7 @@ public:
CompressConfirm compressed,
SendLimit limit,
Api::SendType sendType,
SendMenuType sendMenuType);
SendMenu::Type sendMenuType);
void setConfirmedCallback(
Fn<void(
@@ -142,7 +144,7 @@ private:
CompressConfirm _compressConfirmInitial = CompressConfirm::None;
CompressConfirm _compressConfirm = CompressConfirm::None;
SendLimit _sendLimit = SendLimit::Many;
SendMenuType _sendMenuType = SendMenuType();
SendMenu::Type _sendMenuType = SendMenu::Type();
Fn<void(
Storage::PreparedList &&list,

View File

@@ -23,6 +23,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/wrap/slide_wrap.h"
#include "ui/text_options.h"
#include "chat_helpers/message_field.h"
#include "chat_helpers/send_context_menu.h"
#include "history/history.h"
#include "history/history_message.h"
#include "history/view/history_view_schedule_box.h"
@@ -408,13 +409,13 @@ void ShareBox::keyPressEvent(QKeyEvent *e) {
}
}
SendMenuType ShareBox::sendMenuType() const {
SendMenu::Type ShareBox::sendMenuType() const {
const auto selected = _inner->selected();
return ranges::all_of(selected, HistoryView::CanScheduleUntilOnline)
? SendMenuType::ScheduledToUser
? SendMenu::Type::ScheduledToUser
: (selected.size() == 1 && selected.front()->isSelf())
? SendMenuType::Reminder
: SendMenuType::Scheduled;
? SendMenu::Type::Reminder
: SendMenu::Type::Scheduled;
}
void ShareBox::createButtons() {
@@ -423,7 +424,7 @@ void ShareBox::createButtons() {
const auto send = addButton(tr::lng_share_confirm(), [=] {
submit({});
});
SetupSendMenuAndShortcuts(
SendMenu::SetupMenuAndShortcuts(
send,
[=] { return sendMenuType(); },
[=] { submitSilent(); },

View File

@@ -14,7 +14,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/effects/round_checkbox.h"
#include "mtproto/sender.h"
enum class SendMenuType;
namespace SendMenu {
enum class Type;
} // namespace SendMenu
namespace Window {
class SessionNavigation;
@@ -82,7 +84,7 @@ private:
void copyLink();
bool searchByUsername(bool useCache = false);
SendMenuType sendMenuType() const;
SendMenu::Type sendMenuType() const;
void scrollTo(Ui::ScrollToRequest request);
void needSearchByUsername();

View File

@@ -14,12 +14,12 @@ CallSignalBars {
width: pixels;
radius: pixels;
skip: pixels;
min: pixels;
max: pixels;
color: color;
inactiveOpacity: double;
}
callWidth: 300px;
callHeight: 470px;
callRadius: 6px;
callShadow: Shadow {
left: icon {{ "call_shadow_left", windowShadowFg }};
@@ -34,16 +34,83 @@ callShadow: Shadow {
fallback: windowShadowFgFallback;
}
callButton: IconButton {
width: 72px;
height: 72px;
callWidthMin: 300px;
callHeightMin: 440px;
callWidth: 720px;
callHeight: 540px;
iconPosition: point(-1px, -1px);
callBottomControlsHeight: 87px;
CallBodyLayout {
height: pixels;
photoTop: pixels;
photoSize: pixels;
nameTop: pixels;
statusTop: pixels;
muteStroke: pixels;
muteSize: pixels;
mutePosition: point;
}
callBodyLayout: CallBodyLayout {
height: 284px;
photoTop: 21px;
photoSize: 160px;
nameTop: 221px;
statusTop: 254px;
muteStroke: 3px;
muteSize: 36px;
mutePosition: point(142px, 135px);
}
callBodyWithPreview: CallBodyLayout {
height: 185px;
photoTop: 21px;
photoSize: 100px;
nameTop: 132px;
statusTop: 163px;
muteStroke: 3px;
muteSize: 0px;
mutePosition: point(90px, 84px);
}
callMutedPeerIcon: icon {{ "calls_mute_userpic", callIconFg }};
callOutgoingPreviewMin: size(360px, 120px);
callOutgoingPreview: size(540px, 180px); // default, for height == callHeight.
callOutgoingPreviewMax: size(1620px, 540px);
callOutgoingDefaultSize: size(160px, 110px);
callInnerPadding: 12px;
callFingerprintPadding: margins(10px, 4px, 8px, 5px);
callFingerprintSkip: 4px;
callFingerprintSignalBarsSkip: 2px;
callSignalBarsPadding: margins(8px, 9px, 11px, 5px);
callFingerprintTop: 8px;
callFingerprintBottom: -16px;
callTooltipMutedIcon: icon{{ "calls_mute_tooltip", videoPlayIconFg }};
callTooltipMutedIconPosition: point(10px, 5px);
callTooltipPadding: margins(41px, 7px, 15px, 8px);
callButton: IconButton {
width: 68px;
height: 79px;
iconPosition: point(-1px, 16px);
rippleAreaPosition: point(12px, 12px);
rippleAreaSize: 48px;
rippleAreaSize: 44px;
ripple: defaultRippleAnimation;
}
callButtonLabel: FlatLabel(defaultFlatLabel) {
textFg: callNameFg;
style: TextStyle(defaultTextStyle) {
font: font(11px);
linkFont: font(11px);
linkFontOver: font(11px underline);
}
}
callAnswer: CallButton {
button: IconButton(callButton) {
@@ -56,6 +123,7 @@ callAnswer: CallButton {
angle: 135.;
outerRadius: 12px;
outerBg: callAnswerBgOuter;
label: callButtonLabel;
}
callHangup: CallButton {
button: IconButton(callButton) {
@@ -66,30 +134,57 @@ callHangup: CallButton {
}
bg: callHangupBg;
outerBg: callHangupBg;
label: callButtonLabel;
}
callCancel: CallButton {
button: IconButton(callButton) {
icon: icon {{ "box_button_close", callCancelFg }};
icon: icon {{ "call_cancel", callIconFgActive }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callCancelRipple;
color: callIconActiveRipple;
}
}
bg: callCancelBg;
outerBg: callCancelBg;
bg: callIconBgActive;
outerBg: callIconBgActive;
label: callButtonLabel;
}
callMuteToggle: IconButton(callButton) {
icon: icon {{ "call_record_active", callIconFg }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callMuteRipple;
callMicrophoneMute: CallButton {
button: IconButton(callButton) {
icon: icon {{ "call_record_active", callIconFg }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callMuteRipple;
}
}
bg: callIconBg;
outerBg: callMuteRipple;
label: callButtonLabel;
}
callMicrophoneUnmute: CallButton(callMicrophoneMute) {
button: IconButton(callButton) {
icon: icon {{ "call_record_muted", callIconFgActive }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callIconActiveRipple;
}
}
bg: callIconBgActive;
}
callCameraMute: CallButton(callMicrophoneMute) {
button: IconButton(callButton) {
icon: icon {{ "call_camera_active", callIconFg }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callMuteRipple;
}
}
}
callUnmuteIcon: icon {{ "call_record_muted", callIconFg }};
callCameraUnmute: CallButton(callMicrophoneUnmute) {
button: IconButton(callButton) {
icon: icon {{ "call_camera_muted", callIconFgActive }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callIconActiveRipple;
}
}
}
callBottomShadowSize: 124px;
callControlsTop: 80px;
callControlsSkip: 0px;
callMuteRight: 8px;
callNameTop: 15px;
callName: FlatLabel(defaultFlatLabel) {
minWidth: 260px;
maxHeight: 30px;
@@ -101,7 +196,6 @@ callName: FlatLabel(defaultFlatLabel) {
linkFontOver: font(21px semibold underline);
}
}
callStatusTop: 46px;
callStatus: FlatLabel(defaultFlatLabel) {
minWidth: 260px;
maxHeight: 20px;
@@ -113,10 +207,16 @@ callStatus: FlatLabel(defaultFlatLabel) {
linkFontOver: font(14px underline);
}
}
callFingerprintPadding: margins(9px, 4px, 9px, 5px);
callFingerprintSkip: 3px;
callFingerprintBottom: 8px;
callRemoteAudioMute: FlatLabel(callStatus) {
minWidth: 0px;
textFg: videoPlayIconFg;
style: TextStyle(defaultTextStyle) {
font: font(12px);
linkFont: font(12px);
linkFontOver: font(12px underline);
}
}
callRemoteAudioMuteSkip: 12px;
callBarHeight: 38px;
callBarMuteToggle: IconButton {
@@ -124,7 +224,7 @@ callBarMuteToggle: IconButton {
height: 38px;
icon: icon {{ "call_record_active", callBarFg }};
iconPosition: point(9px, 8px);
iconPosition: point(3px, 2px);
ripple: RippleAnimation(defaultRippleAnimation) {
color: callBarMuteRipple;
@@ -137,7 +237,7 @@ callBarRightSkip: 12px;
callBarSkip: 10px;
callBarHangup: IconButton(callBarMuteToggle) {
icon: icon {{ "call_discard", callBarFg }};
iconPosition: point(9px, 11px);
iconPosition: point(3px, 1px);
}
callBarLabel: LabelSimple(defaultLabelSimple) {
font: semiboldFont;
@@ -200,14 +300,93 @@ callDebugLabel: FlatLabel(defaultFlatLabel) {
callPanelDuration: 150;
callPanelSignalBars: CallSignalBars {
width: 3px;
width: 2px;
radius: 1px;
skip: 1px;
skip: 2px;
min: 4px;
max: 10px;
color: callNameFg;
inactiveOpacity: 0.5;
}
callBarSignalBars: CallSignalBars(callPanelSignalBars) {
width: 3px;
skip: 1px;
min: 3px;
max: 12px;
color: callBarFg;
}
callSignalMargin: 8px;
callSignalPadding: 4px;
callTitleButton: IconButton {
width: 34px;
height: 30px;
iconPosition: point(0px, 0px);
}
callTitleMinimizeIcon: icon {
{ "calls_minimize_shadow", windowShadowFg },
{ "calls_minimize_main", callNameFg },
};
callTitleMinimizeIconOver: icon {
{ size(34px, 30px), callBgButton },
{ size(34px, 30px), callMuteRipple },
{ "calls_minimize_shadow", windowShadowFg },
{ "calls_minimize_main", callNameFg },
};
callTitleMaximizeIcon: icon {
{ "calls_maximize_shadow", windowShadowFg },
{ "calls_maximize_main", callNameFg },
};
callTitleMaximizeIconOver: icon {
{ size(34px, 30px), callBgButton },
{ size(34px, 30px), callMuteRipple },
{ "calls_maximize_shadow", windowShadowFg },
{ "calls_maximize_main", callNameFg },
};
callTitleRestoreIcon: icon {
{ "calls_restore_shadow", windowShadowFg },
{ "calls_restore_main", callNameFg },
};
callTitleRestoreIconOver: icon {
{ size(34px, 30px), callBgButton },
{ size(34px, 30px), callMuteRipple },
{ "calls_restore_shadow", windowShadowFg },
{ "calls_restore_main", callNameFg },
};
callTitleCloseIcon: icon {
{ "calls_close_shadow", windowShadowFg },
{ "calls_close_main", callNameFg },
};
callTitleCloseIconOver: icon {
{ size(34px, 30px), titleButtonCloseBgOver },
{ "calls_close_shadow", windowShadowFg },
{ "calls_close_main", titleButtonCloseFgOver },
};
callTitle: WindowTitle(defaultWindowTitle) {
height: 0px;
bg: callBgOpaque;
bgActive: callBgOpaque;
fg: transparent;
fgActive: transparent;
minimize: IconButton(callTitleButton) {
icon: callTitleMinimizeIcon;
iconOver: callTitleMinimizeIconOver;
}
minimizeIconActive: callTitleMinimizeIcon;
minimizeIconActiveOver: callTitleMinimizeIconOver;
maximize: IconButton(callTitleButton) {
icon: callTitleMaximizeIcon;
iconOver: callTitleMaximizeIconOver;
}
maximizeIconActive: callTitleMaximizeIcon;
maximizeIconActiveOver: callTitleMaximizeIconOver;
restoreIcon: callTitleRestoreIcon;
restoreIconOver: callTitleRestoreIconOver;
restoreIconActive: callTitleRestoreIcon;
restoreIconActiveOver: callTitleRestoreIconOver;
close: IconButton(callTitleButton) {
icon: callTitleCloseIcon;
iconOver: callTitleCloseIconOver;
}
closeIconActive: callTitleCloseIcon;
closeIconActiveOver: callTitleCloseIconOver;
}
callTitleShadow: icon {{ "calls_shadow_controls", windowShadowFg }};

View File

@@ -317,7 +317,7 @@ void BoxController::rowActionClicked(not_null<PeerListRow*> row) {
auto user = row->peer()->asUser();
Assert(user != nullptr);
Core::App().calls().startOutgoingCall(user);
Core::App().calls().startOutgoingCall(user, false);
}
void BoxController::receivedCalls(const QVector<MTPMessage> &result) {

View File

@@ -8,6 +8,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/calls_call.h"
#include "main/main_session.h"
#include "main/main_account.h"
#include "main/main_app_config.h"
#include "apiwrap.h"
#include "lang/lang_keys.h"
#include "boxes/confirm_box.h"
@@ -21,38 +23,96 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio_track.h"
#include "base/platform/base_platform_info.h"
#include "calls/calls_panel.h"
#include "calls/calls_controller.h"
#include "webrtc/webrtc_video_track.h"
#include "data/data_user.h"
#include "data/data_session.h"
#include "facades.h"
#include <tgcalls/Instance.h>
#include <tgcalls/VideoCaptureInterface.h>
namespace tgcalls {
class InstanceImpl;
class InstanceImplLegacy;
class InstanceImplReference;
void SetLegacyGlobalServerConfig(const std::string &serverConfig);
} // namespace tgcalls
namespace Calls {
namespace {
constexpr auto kMinLayer = 65;
constexpr auto kHangupTimeoutMs = 5000;
constexpr auto kSha256Size = 32;
const auto kDefaultVersion = "2.4.4"_q;
#ifndef DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
const auto RegisterTag = tgcalls::Register<tgcalls::InstanceImpl>();
//const auto RegisterTagReference = tgcalls::Register<tgcalls::InstanceImplReference>();
#endif // DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
const auto RegisterTagLegacy = tgcalls::Register<tgcalls::InstanceImplLegacy>();
void AppendEndpoint(
std::vector<TgVoipEndpoint> &list,
std::vector<tgcalls::Endpoint> &list,
const MTPPhoneConnection &connection) {
connection.match([&](const MTPDphoneConnection &data) {
if (data.vpeer_tag().v.length() != 16) {
return;
}
auto endpoint = TgVoipEndpoint{
tgcalls::Endpoint endpoint = {
.endpointId = (int64_t)data.vid().v,
.host = TgVoipEdpointHost{
.host = tgcalls::EndpointHost{
.ipv4 = data.vip().v.toStdString(),
.ipv6 = data.vipv6().v.toStdString() },
.port = (uint16_t)data.vport().v,
.type = TgVoipEndpointType::UdpRelay
.type = tgcalls::EndpointType::UdpRelay,
};
const auto tag = data.vpeer_tag().v;
if (tag.size() >= 16) {
memcpy(endpoint.peerTag, tag.data(), 16);
}
list.push_back(std::move(endpoint));
}, [&](const MTPDphoneConnectionWebrtc &data) {
});
}
void AppendServer(
std::vector<tgcalls::RtcServer> &list,
const MTPPhoneConnection &connection) {
connection.match([&](const MTPDphoneConnection &data) {
}, [&](const MTPDphoneConnectionWebrtc &data) {
const auto host = qs(data.vip());
const auto hostv6 = qs(data.vipv6());
const auto port = uint16_t(data.vport().v);
if (data.is_stun()) {
const auto pushStun = [&](const QString &host) {
if (host.isEmpty()) {
return;
}
list.push_back(tgcalls::RtcServer{
.host = host.toStdString(),
.port = port,
.isTurn = false
});
};
pushStun(host);
pushStun(hostv6);
}
const auto username = qs(data.vusername());
const auto password = qs(data.vpassword());
if (data.is_turn() && !username.isEmpty() && !password.isEmpty()) {
const auto pushTurn = [&](const QString &host) {
list.push_back(tgcalls::RtcServer{
.host = host.toStdString(),
.port = port,
.login = username.toStdString(),
.password = password.toStdString(),
.isTurn = true,
});
};
pushTurn(host);
pushTurn(hostv6);
}
});
}
@@ -71,10 +131,6 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
| (gsl::to_integer<uint64>(hash[12]));
}
[[nodiscard]] std::vector<std::string> CollectVersions() {
return { TgVoip::getVersion() };
}
[[nodiscard]] QVector<MTPstring> WrapVersions(
const std::vector<std::string> &data) {
auto result = QVector<MTPstring>();
@@ -86,28 +142,35 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
}
[[nodiscard]] QVector<MTPstring> CollectVersionsForApi() {
return WrapVersions(CollectVersions());
return WrapVersions(tgcalls::Meta::Versions() | ranges::action::reverse);
}
[[nodiscard]] Webrtc::VideoState StartVideoState(bool enabled) {
using State = Webrtc::VideoState;
return enabled ? State::Active : State::Inactive;
}
} // namespace
Call::Delegate::~Delegate() = default;
Call::Call(
not_null<Delegate*> delegate,
not_null<UserData*> user,
Type type)
Type type,
bool video)
: _delegate(delegate)
, _user(user)
, _api(&_user->session().mtp())
, _type(type) {
_discardByTimeoutTimer.setCallback([this] { hangup(); });
, _type(type)
, _videoIncoming(std::make_unique<Webrtc::VideoTrack>(StartVideoState(video)))
, _videoOutgoing(std::make_unique<Webrtc::VideoTrack>(StartVideoState(video))) {
_discardByTimeoutTimer.setCallback([=] { hangup(); });
if (_type == Type::Outgoing) {
setState(State::Requesting);
} else {
startWaitingTrack();
}
setupOutgoingVideo();
}
void Call::generateModExpFirst(bytes::const_span randomSeed) {
@@ -161,8 +224,11 @@ void Call::startOutgoing() {
Expects(_state.current() == State::Requesting);
Expects(_gaHash.size() == kSha256Size);
const auto flags = _videoCapture
? MTPphone_RequestCall::Flag::f_video
: MTPphone_RequestCall::Flag(0);
_api.request(MTPphone_RequestCall(
MTP_flags(0),
MTP_flags(flags),
_user->inputUser,
MTP_int(rand_value<int32>()),
MTP_bytes(_gaHash),
@@ -170,7 +236,7 @@ void Call::startOutgoing() {
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
MTP_int(kMinLayer),
MTP_int(TgVoip::getConnectionMaxLayer()),
MTP_int(tgcalls::Meta::MaxLayer()),
MTP_vector(CollectVersionsForApi()))
)).done([=](const MTPphone_PhoneCall &result) {
Expects(result.type() == mtpc_phone_phoneCall);
@@ -222,7 +288,7 @@ void Call::startIncoming() {
}
void Call::answer() {
_delegate->requestMicrophonePermissionOrFail(crl::guard(this, [=] {
_delegate->requestPermissionsOrFail(crl::guard(this, [=] {
actuallyAnswer();
}));
}
@@ -251,10 +317,11 @@ void Call::actuallyAnswer() {
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
MTP_int(kMinLayer),
MTP_int(TgVoip::getConnectionMaxLayer()),
MTP_int(tgcalls::Meta::MaxLayer()),
MTP_vector(CollectVersionsForApi()))
)).done([=](const MTPphone_PhoneCall &result) {
Expects(result.type() == mtpc_phone_phoneCall);
auto &call = result.c_phone_phoneCall();
_user->session().data().processUsers(call.vusers());
if (call.vphone_call().type() != mtpc_phoneCallWaiting) {
@@ -270,12 +337,46 @@ void Call::actuallyAnswer() {
}).send();
}
void Call::setMute(bool mute) {
_mute = mute;
if (_controller) {
_controller->setMuteMicrophone(_mute);
void Call::setMuted(bool mute) {
_muted = mute;
if (_instance) {
_instance->setMuteMicrophone(mute);
}
_muteChanged.notify(_mute);
}
void Call::setupOutgoingVideo() {
const auto started = _videoOutgoing->state();
_videoOutgoing->stateValue(
) | rpl::start_with_next([=](Webrtc::VideoState state) {
if (_state.current() != State::Established
&& state != started
&& !_videoCapture) {
_videoOutgoing->setState(started);
} else if (state != Webrtc::VideoState::Inactive) {
// Paused not supported right now.
#ifndef DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
Assert(state == Webrtc::VideoState::Active);
if (!_videoCapture) {
_videoCapture = tgcalls::VideoCaptureInterface::Create();
_videoCapture->setOutput(_videoOutgoing->sink());
}
if (_instance) {
_instance->setVideoCapture(_videoCapture);
}
_videoCapture->setState(tgcalls::VideoState::Active);
#endif // DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
} else if (_videoCapture) {
_videoCapture->setState(tgcalls::VideoState::Inactive);
}
}, _lifetime);
}
not_null<Webrtc::VideoTrack*> Call::videoIncoming() const {
return _videoIncoming.get();
}
not_null<Webrtc::VideoTrack*> Call::videoOutgoing() const {
return _videoOutgoing.get();
}
crl::time Call::getDurationMs() const {
@@ -299,7 +400,7 @@ void Call::redial() {
if (_state.current() != State::Busy) {
return;
}
Assert(_controller == nullptr);
Assert(_instance == nullptr);
_type = Type::Outgoing;
setState(State::Requesting);
_answerAfterDhConfigReceived = false;
@@ -308,7 +409,7 @@ void Call::redial() {
}
QString Call::getDebugLog() const {
return QString::fromStdString(_controller->getDebugInfo());
return QString::fromStdString(_instance->getDebugInfo());
}
void Call::startWaitingTrack() {
@@ -322,6 +423,21 @@ void Call::startWaitingTrack() {
_waitingTrack->playInLoop();
}
void Call::sendSignalingData(const QByteArray &data) {
_api.request(MTPphone_SendSignalingData(
MTP_inputPhoneCall(
MTP_long(_id),
MTP_long(_accessHash)),
MTP_bytes(data)
)).done([=](const MTPBool &result) {
if (!mtpIsTrue(result)) {
finish(FinishType::Failed);
}
}).fail([=](const RPCError &error) {
handleRequestError(error);
}).send();
}
float64 Call::getWaitingSoundPeakValue() const {
if (_waitingTrack) {
auto when = crl::now() + kSoundSampleMs / 4;
@@ -360,6 +476,7 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
finish(FinishType::Failed);
return true;
}
_id = data.vid().v;
_accessHash = data.vaccess_hash().v;
auto gaHashBytes = bytes::make_span(data.vg_a_hash().v);
@@ -404,7 +521,7 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
}
if (_type == Type::Incoming
&& _state.current() == State::ExchangingKeys
&& !_controller) {
&& !_instance) {
startConfirmedCall(data);
}
} return true;
@@ -415,8 +532,8 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
return false;
}
if (data.is_need_debug()) {
auto debugLog = _controller
? _controller->getDebugInfo()
auto debugLog = _instance
? _instance->getDebugInfo()
: std::string();
if (!debugLog.empty()) {
user()->session().api().request(MTPphone_SaveCallDebug(
@@ -462,11 +579,49 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
Unexpected("phoneCall type inside an existing call handleUpdate()");
}
void Call::updateRemoteMediaState(
tgcalls::AudioState audio,
tgcalls::VideoState video) {
_remoteAudioState = [&] {
using From = tgcalls::AudioState;
using To = RemoteAudioState;
switch (audio) {
case From::Active: return To::Active;
case From::Muted: return To::Muted;
}
Unexpected("Audio state in remoteMediaStateUpdated.");
}();
_videoIncoming->setState([&] {
using From = tgcalls::VideoState;
using To = Webrtc::VideoState;
switch (video) {
case From::Inactive: return To::Inactive;
case From::Paused: return To::Paused;
case From::Active: return To::Active;
}
Unexpected("Video state in remoteMediaStateUpdated.");
}());
}
bool Call::handleSignalingData(
const MTPDupdatePhoneCallSignalingData &data) {
if (data.vphone_call_id().v != _id || !_instance) {
return false;
}
auto prepared = ranges::view::all(
data.vdata().v
) | ranges::view::transform([](char byte) {
return static_cast<uint8_t>(byte);
}) | ranges::to_vector;
_instance->receiveSignalingData(std::move(prepared));
return true;
}
void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
Expects(_type == Type::Outgoing);
if (_state.current() == State::ExchangingKeys
|| _controller) {
|| _instance) {
LOG(("Call Warning: Unexpected confirmAcceptedCall."));
return;
}
@@ -494,9 +649,9 @@ void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
MTP_int(kMinLayer),
MTP_int(TgVoip::getConnectionMaxLayer()),
MTP_int(tgcalls::Meta::MaxLayer()),
MTP_vector(CollectVersionsForApi()))
)).done([this](const MTPphone_PhoneCall &result) {
)).done([=](const MTPphone_PhoneCall &result) {
Expects(result.type() == mtpc_phone_phoneCall);
auto &call = result.c_phone_phoneCall();
@@ -508,7 +663,7 @@ void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
}
createAndStartController(call.vphone_call().c_phoneCall());
}).fail([this](const RPCError &error) {
}).fail([=](const RPCError &error) {
handleRequestError(error);
}).send();
}
@@ -539,83 +694,117 @@ void Call::startConfirmedCall(const MTPDphoneCall &call) {
void Call::createAndStartController(const MTPDphoneCall &call) {
_discardByTimeoutTimer.cancel();
if (!checkCallFields(call)) {
if (!checkCallFields(call) || _authKey.size() != 256) {
return;
}
const auto &protocol = call.vprotocol().c_phoneCallProtocol();
const auto &serverConfig = _user->session().serverConfig();
TgVoipConfig config;
config.dataSaving = TgVoipDataSaving::Never;
config.enableAEC = !Platform::IsMac10_7OrGreater();
config.enableNS = true;
config.enableAGC = true;
config.enableVolumeControl = true;
config.initializationTimeout = serverConfig.callConnectTimeoutMs / 1000.;
config.receiveTimeout = serverConfig.callPacketTimeoutMs / 1000.;
config.enableP2P = call.is_p2p_allowed();
config.maxApiLayer = protocol.vmax_layer().v;
auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>();
memcpy(encryptionKeyValue->data(), _authKey.data(), 256);
const auto weak = base::make_weak(this);
tgcalls::Descriptor descriptor = {
.config = tgcalls::Config{
.initializationTimeout = serverConfig.callConnectTimeoutMs / 1000.,
.receiveTimeout = serverConfig.callPacketTimeoutMs / 1000.,
.dataSaving = tgcalls::DataSaving::Never,
.enableP2P = call.is_p2p_allowed(),
.enableAEC = !Platform::IsMac10_7OrGreater(),
.enableNS = true,
.enableAGC = true,
.enableVolumeControl = true,
.maxApiLayer = protocol.vmax_layer().v,
},
.encryptionKey = tgcalls::EncryptionKey(
std::move(encryptionKeyValue),
(_type == Type::Outgoing)),
.videoCapture = _videoCapture,
.stateUpdated = [=](tgcalls::State state) {
crl::on_main(weak, [=] {
handleControllerStateChange(state);
});
},
.signalBarsUpdated = [=](int count) {
crl::on_main(weak, [=] {
handleControllerBarCountChange(count);
});
},
.remoteMediaStateUpdated = [=](tgcalls::AudioState audio, tgcalls::VideoState video) {
crl::on_main(weak, [=] {
updateRemoteMediaState(audio, video);
});
},
.signalingDataEmitted = [=](const std::vector<uint8_t> &data) {
const auto bytes = QByteArray(
reinterpret_cast<const char*>(data.data()),
data.size());
crl::on_main(weak, [=] {
sendSignalingData(bytes);
});
},
};
if (Logs::DebugEnabled()) {
auto callLogFolder = cWorkingDir() + qsl("DebugLogs");
auto callLogPath = callLogFolder + qsl("/last_call_log.txt");
auto callLogNative = QDir::toNativeSeparators(callLogPath);
#ifdef Q_OS_WIN
config.logPath = callLogNative.toStdWString();
descriptor.config.logPath = callLogNative.toStdWString();
#else // Q_OS_WIN
const auto callLogUtf = QFile::encodeName(callLogNative);
config.logPath.resize(callLogUtf.size());
ranges::copy(callLogUtf, config.logPath.begin());
descriptor.config.logPath.resize(callLogUtf.size());
ranges::copy(callLogUtf, descriptor.config.logPath.begin());
#endif // Q_OS_WIN
QFile(callLogPath).remove();
QDir().mkpath(callLogFolder);
}
auto endpoints = std::vector<TgVoipEndpoint>();
for (const auto &connection : call.vconnections().v) {
AppendEndpoint(endpoints, connection);
AppendEndpoint(descriptor.endpoints, connection);
}
for (const auto &connection : call.vconnections().v) {
AppendServer(descriptor.rtcServers, connection);
}
auto proxy = TgVoipProxy();
if (Global::UseProxyForCalls()
&& (Global::ProxySettings() == MTP::ProxyData::Settings::Enabled)) {
const auto &selected = Global::SelectedProxy();
if (selected.supportsCalls()) {
if (selected.supportsCalls() && !selected.host.isEmpty()) {
Assert(selected.type == MTP::ProxyData::Type::Socks5);
proxy.host = selected.host.toStdString();
proxy.port = selected.port;
proxy.login = selected.user.toStdString();
proxy.password = selected.password.toStdString();
descriptor.proxy = std::make_unique<tgcalls::Proxy>();
descriptor.proxy->host = selected.host.toStdString();
descriptor.proxy->port = selected.port;
descriptor.proxy->login = selected.user.toStdString();
descriptor.proxy->password = selected.password.toStdString();
}
}
auto encryptionKey = TgVoipEncryptionKey();
encryptionKey.isOutgoing = (_type == Type::Outgoing);
encryptionKey.value = ranges::view::all(
_authKey
) | ranges::view::transform([](bytes::type byte) {
return static_cast<uint8_t>(byte);
}) | ranges::to_vector;
const auto version = call.vprotocol().match([&](
const MTPDphoneCallProtocol &data) {
return data.vlibrary_versions().v;
}).value(0, MTP_bytes(kDefaultVersion)).v;
_controller = MakeController(
"2.4.4",
config,
TgVoipPersistentState(),
endpoints,
proxy.host.empty() ? nullptr : &proxy,
TgVoipNetworkType::Unknown,
encryptionKey);
const auto raw = _controller.get();
raw->setOnStateUpdated([=](TgVoipState state) {
handleControllerStateChange(raw, state);
});
raw->setOnSignalBarsUpdated([=](int count) {
handleControllerBarCountChange(count);
});
if (_mute) {
raw->setMuteMicrophone(_mute);
LOG(("Call Info: Creating instance with version '%1', allowP2P: %2"
).arg(QString::fromUtf8(version)
).arg(Logs::b(descriptor.config.enableP2P)));
_instance = tgcalls::Meta::Create(
version.toStdString(),
std::move(descriptor));
if (!_instance) {
LOG(("Call Error: Wrong library version: %1."
).arg(QString::fromUtf8(version)));
finish(FinishType::Failed);
return;
}
const auto raw = _instance.get();
if (_muted.current()) {
raw->setMuteMicrophone(_muted.current());
}
raw->setIncomingVideoOutput(_videoIncoming->sink());
const auto &settings = Core::App().settings();
raw->setAudioOutputDevice(
settings.callOutputDeviceID().toStdString());
@@ -626,32 +815,27 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled());
}
void Call::handleControllerStateChange(
not_null<Controller*> controller,
TgVoipState state) {
// NB! Can be called from an arbitrary thread!
// This can be called from ~VoIPController()!
void Call::handleControllerStateChange(tgcalls::State state) {
switch (state) {
case TgVoipState::WaitInit: {
case tgcalls::State::WaitInit: {
DEBUG_LOG(("Call Info: State changed to WaitingInit."));
setStateQueued(State::WaitingInit);
setState(State::WaitingInit);
} break;
case TgVoipState::WaitInitAck: {
case tgcalls::State::WaitInitAck: {
DEBUG_LOG(("Call Info: State changed to WaitingInitAck."));
setStateQueued(State::WaitingInitAck);
setState(State::WaitingInitAck);
} break;
case TgVoipState::Established: {
case tgcalls::State::Established: {
DEBUG_LOG(("Call Info: State changed to Established."));
setStateQueued(State::Established);
setState(State::Established);
} break;
case TgVoipState::Failed: {
auto error = QString::fromStdString(controller->getLastError());
case tgcalls::State::Failed: {
auto error = QString::fromStdString(_instance->getLastError());
LOG(("Call Info: State changed to Failed, error: %1.").arg(error));
setFailedQueued(error);
handleControllerError(error);
} break;
default: LOG(("Call Error: Unexpected state in handleStateChange: %1"
@@ -660,19 +844,11 @@ void Call::handleControllerStateChange(
}
void Call::handleControllerBarCountChange(int count) {
// NB! Can be called from an arbitrary thread!
// This can be called from ~VoIPController()!
crl::on_main(this, [=] {
setSignalBarCount(count);
});
setSignalBarCount(count);
}
void Call::setSignalBarCount(int count) {
if (_signalBarCount != count) {
_signalBarCount = count;
_signalBarCountChanged.notify(count);
}
_signalBarCount = count;
}
template <typename T>
@@ -766,28 +942,28 @@ void Call::setState(State state) {
}
void Call::setCurrentAudioDevice(bool input, std::string deviceID) {
if (_controller) {
if (_instance) {
if (input) {
_controller->setAudioInputDevice(deviceID);
_instance->setAudioInputDevice(deviceID);
} else {
_controller->setAudioOutputDevice(deviceID);
_instance->setAudioOutputDevice(deviceID);
}
}
}
void Call::setAudioVolume(bool input, float level) {
if (_controller) {
if (_instance) {
if (input) {
_controller->setInputVolume(level);
_instance->setInputVolume(level);
} else {
_controller->setOutputVolume(level);
_instance->setOutputVolume(level);
}
}
}
void Call::setAudioDuckingEnabled(bool enabled) {
if (_controller) {
_controller->setAudioOutputDuckingEnabled(enabled);
if (_instance) {
_instance->setAudioOutputDuckingEnabled(enabled);
}
}
@@ -818,10 +994,14 @@ void Call::finish(FinishType type, const MTPPhoneCallDiscardReason &reason) {
setState(hangupState);
auto duration = getDurationMs() / 1000;
auto connectionId = _controller ? _controller->getPreferredRelayId() : 0;
auto connectionId = _instance ? _instance->getPreferredRelayId() : 0;
_finishByTimeoutTimer.call(kHangupTimeoutMs, [this, finalState] { setState(finalState); });
const auto flags = ((_videoIncoming->state() != Webrtc::VideoState::Inactive)
|| (_videoOutgoing->state() != Webrtc::VideoState::Inactive))
? MTPphone_DiscardCall::Flag::f_video
: MTPphone_DiscardCall::Flag(0);
_api.request(MTPphone_DiscardCall(
MTP_flags(0),
MTP_flags(flags),
MTP_inputPhoneCall(
MTP_long(_id),
MTP_long(_accessHash)),
@@ -874,9 +1054,11 @@ void Call::handleControllerError(const QString &error) {
}
void Call::destroyController() {
if (_controller) {
if (_instance) {
const auto state = _instance->stop();
DEBUG_LOG(("Call Info: Destroying call controller.."));
_controller.reset();
_instance.reset();
DEBUG_LOG(("Call Info: Call controller destroyed."));
}
setSignalBarCount(kSignalBarFinished);
@@ -887,7 +1069,7 @@ Call::~Call() {
}
void UpdateConfig(const std::string &data) {
TgVoip::setGlobalServerConfig(data);
tgcalls::SetLegacyGlobalServerConfig(data);
}
} // namespace Calls

View File

@@ -19,12 +19,21 @@ class Track;
} // namespace Audio
} // namespace Media
enum class TgVoipState;
namespace tgcalls {
class Instance;
class VideoCaptureInterface;
enum class State;
enum class VideoState;
enum class AudioState;
} // namespace tgcalls
namespace Webrtc {
enum class VideoState;
class VideoTrack;
} // namespace Webrtc
namespace Calls {
class Controller;
struct DhConfig {
int32 version = 0;
int32 g = 0;
@@ -46,9 +55,9 @@ public:
Ended,
};
virtual void playSound(Sound sound) = 0;
virtual void requestMicrophonePermissionOrFail(Fn<void()> result) = 0;
virtual void requestPermissionsOrFail(Fn<void()> result) = 0;
virtual ~Delegate();
virtual ~Delegate() = default;
};
@@ -58,7 +67,7 @@ public:
Incoming,
Outgoing,
};
Call(not_null<Delegate*> delegate, not_null<UserData*> user, Type type);
Call(not_null<Delegate*> delegate, not_null<UserData*> user, Type type, bool video);
[[nodiscard]] Type type() const {
return _type;
@@ -70,6 +79,7 @@ public:
void start(bytes::const_span random);
bool handleUpdate(const MTPPhoneCall &call);
bool handleSignalingData(const MTPDupdatePhoneCallSignalingData &data);
enum State {
Starting,
@@ -88,28 +98,51 @@ public:
Ringing,
Busy,
};
State state() const {
[[nodiscard]] State state() const {
return _state.current();
}
rpl::producer<State> stateValue() const {
[[nodiscard]] rpl::producer<State> stateValue() const {
return _state.value();
}
enum class RemoteAudioState {
Muted,
Active,
};
[[nodiscard]] RemoteAudioState remoteAudioState() const {
return _remoteAudioState.current();
}
[[nodiscard]] auto remoteAudioStateValue() const
-> rpl::producer<RemoteAudioState> {
return _remoteAudioState.value();
}
[[nodiscard]] Webrtc::VideoState remoteVideoState() const {
return _remoteVideoState.current();
}
[[nodiscard]] auto remoteVideoStateValue() const
-> rpl::producer<Webrtc::VideoState> {
return _remoteVideoState.value();
}
static constexpr auto kSignalBarStarting = -1;
static constexpr auto kSignalBarFinished = -2;
static constexpr auto kSignalBarCount = 4;
base::Observable<int> &signalBarCountChanged() {
return _signalBarCountChanged;
[[nodiscard]] rpl::producer<int> signalBarCountValue() const {
return _signalBarCount.value();
}
void setMute(bool mute);
bool isMute() const {
return _mute;
void setMuted(bool mute);
[[nodiscard]] bool muted() const {
return _muted.current();
}
base::Observable<bool> &muteChanged() {
return _muteChanged;
[[nodiscard]] rpl::producer<bool> mutedValue() const {
return _muted.value();
}
[[nodiscard]] not_null<Webrtc::VideoTrack*> videoIncoming() const;
[[nodiscard]] not_null<Webrtc::VideoTrack*> videoOutgoing() const;
crl::time getDurationMs() const;
float64 getWaitingSoundPeakValue() const;
@@ -140,15 +173,17 @@ private:
};
void handleRequestError(const RPCError &error);
void handleControllerError(const QString &error);
void finish(FinishType type, const MTPPhoneCallDiscardReason &reason = MTP_phoneCallDiscardReasonDisconnect());
void finish(
FinishType type,
const MTPPhoneCallDiscardReason &reason
= MTP_phoneCallDiscardReasonDisconnect());
void startOutgoing();
void startIncoming();
void startWaitingTrack();
void sendSignalingData(const QByteArray &data);
void generateModExpFirst(bytes::const_span randomSeed);
void handleControllerStateChange(
not_null<Controller*> controller,
TgVoipState state);
void handleControllerStateChange(tgcalls::State state);
void handleControllerBarCountChange(int count);
void createAndStartController(const MTPDphoneCall &call);
@@ -166,21 +201,26 @@ private:
void setSignalBarCount(int count);
void destroyController();
not_null<Delegate*> _delegate;
not_null<UserData*> _user;
void setupOutgoingVideo();
void updateRemoteMediaState(
tgcalls::AudioState audio,
tgcalls::VideoState video);
const not_null<Delegate*> _delegate;
const not_null<UserData*> _user;
MTP::Sender _api;
Type _type = Type::Outgoing;
rpl::variable<State> _state = State::Starting;
rpl::variable<RemoteAudioState> _remoteAudioState = RemoteAudioState::Active;
rpl::variable<Webrtc::VideoState> _remoteVideoState;
FinishType _finishAfterRequestingCall = FinishType::None;
bool _answerAfterDhConfigReceived = false;
int _signalBarCount = kSignalBarStarting;
base::Observable<int> _signalBarCountChanged;
rpl::variable<int> _signalBarCount = kSignalBarStarting;
crl::time _startTime = 0;
base::DelayedCallTimer _finishByTimeoutTimer;
base::Timer _discardByTimeoutTimer;
bool _mute = false;
base::Observable<bool> _muteChanged;
rpl::variable<bool> _muted = false;
DhConfig _dhConfig;
bytes::vector _ga;
@@ -194,7 +234,10 @@ private:
uint64 _accessHash = 0;
uint64 _keyFingerprint = 0;
std::unique_ptr<Controller> _controller;
std::unique_ptr<tgcalls::Instance> _instance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
const std::unique_ptr<Webrtc::VideoTrack> _videoIncoming;
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
std::unique_ptr<Media::Audio::Track> _waitingTrack;

View File

@@ -8,6 +8,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/calls_controller.h"
#include "calls/calls_controller_tgvoip.h"
#include "calls/calls_controller_webrtc.h"
namespace Calls {
@@ -18,7 +19,20 @@ namespace Calls {
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey) {
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame) {
if (version == WebrtcController::Version()) {
return std::make_unique<WebrtcController>(
config,
persistentState,
endpoints,
proxy,
initialNetworkType,
encryptionKey,
std::move(sendSignalingData),
std::move(displayNextFrame));
}
return std::make_unique<TgVoipController>(
config,
persistentState,
@@ -28,4 +42,12 @@ namespace Calls {
encryptionKey);
}
std::vector<std::string> CollectControllerVersions() {
return { WebrtcController::Version(), TgVoipController::Version() };
}
int ControllerMaxLayer() {
return TgVoip::getConnectionMaxLayer();
}
} // namespace Calls

View File

@@ -26,6 +26,7 @@ public:
virtual void setInputVolume(float level) = 0;
virtual void setOutputVolume(float level) = 0;
virtual void setAudioOutputDuckingEnabled(bool enabled) = 0;
virtual bool receiveSignalingData(const QByteArray &data) = 0;
virtual std::string getLastError() = 0;
virtual std::string getDebugInfo() = 0;
@@ -48,6 +49,11 @@ public:
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey);
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame);
[[nodiscard]] std::vector<std::string> CollectControllerVersions();
[[nodiscard]] int ControllerMaxLayer();
} // namespace Calls

View File

@@ -33,7 +33,7 @@ public:
return TgVoip::getVersion();
}
[[nodiscard]] std::string version() override {
std::string version() override {
return Version();
}
void setNetworkType(TgVoipNetworkType networkType) override {
@@ -63,6 +63,9 @@ public:
void setAudioOutputDuckingEnabled(bool enabled) override {
_impl->setAudioOutputDuckingEnabled(enabled);
}
bool receiveSignalingData(const QByteArray &data) override {
return false;
}
std::string getLastError() override {
return _impl->getLastError();
}
@@ -81,8 +84,7 @@ public:
void setOnStateUpdated(Fn<void(TgVoipState)> onStateUpdated) override {
_impl->setOnStateUpdated(std::move(onStateUpdated));
}
void setOnSignalBarsUpdated(
Fn<void(int)> onSignalBarsUpdated) override {
void setOnSignalBarsUpdated(Fn<void(int)> onSignalBarsUpdated) override {
_impl->setOnSignalBarsUpdated(std::move(onSignalBarsUpdated));
}
TgVoipFinalState stop() override {

View File

@@ -0,0 +1,175 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "calls/calls_controller_webrtc.h"
#include "webrtc/webrtc_call_context.h"
namespace Calls {
namespace {
using namespace Webrtc;
[[nodiscard]] CallConnectionDescription ConvertEndpoint(const TgVoipEndpoint &data) {
return CallConnectionDescription{
.ip = QString::fromStdString(data.host.ipv4),
.ipv6 = QString::fromStdString(data.host.ipv6),
.peerTag = QByteArray(
reinterpret_cast<const char*>(data.peerTag),
base::array_size(data.peerTag)),
.connectionId = data.endpointId,
.port = data.port,
};
}
[[nodiscard]] CallContext::Config MakeContextConfig(
const TgVoipConfig &config,
const TgVoipPersistentState &persistentState,
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame) {
Expects(!endpoints.empty());
auto result = CallContext::Config{
.proxy = (proxy
? ProxyServer{
.host = QString::fromStdString(proxy->host),
.username = QString::fromStdString(proxy->login),
.password = QString::fromStdString(proxy->password),
.port = proxy->port }
: ProxyServer()),
.dataSaving = (config.dataSaving != TgVoipDataSaving::Never),
.key = QByteArray(
reinterpret_cast<const char*>(encryptionKey.value.data()),
encryptionKey.value.size()),
.outgoing = encryptionKey.isOutgoing,
.primary = ConvertEndpoint(endpoints.front()),
.alternatives = endpoints | ranges::view::drop(
1
) | ranges::view::transform(ConvertEndpoint) | ranges::to_vector,
.maxLayer = config.maxApiLayer,
.allowP2P = config.enableP2P,
.sendSignalingData = std::move(sendSignalingData),
.displayNextFrame = std::move(displayNextFrame),
};
return result;
}
} // namespace
WebrtcController::WebrtcController(
const TgVoipConfig &config,
const TgVoipPersistentState &persistentState,
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame)
: _impl(std::make_unique<CallContext>(MakeContextConfig(
config,
persistentState,
endpoints,
proxy,
initialNetworkType,
encryptionKey,
std::move(sendSignalingData),
std::move(displayNextFrame)))) {
}
WebrtcController::~WebrtcController() = default;
std::string WebrtcController::Version() {
return CallContext::Version().toStdString();
}
std::string WebrtcController::version() {
return Version();
}
void WebrtcController::setNetworkType(TgVoipNetworkType networkType) {
}
void WebrtcController::setMuteMicrophone(bool muteMicrophone) {
_impl->setIsMuted(muteMicrophone);
}
void WebrtcController::setAudioOutputGainControlEnabled(bool enabled) {
}
void WebrtcController::setEchoCancellationStrength(int strength) {
}
void WebrtcController::setAudioInputDevice(std::string id) {
}
void WebrtcController::setAudioOutputDevice(std::string id) {
}
void WebrtcController::setInputVolume(float level) {
}
void WebrtcController::setOutputVolume(float level) {
}
void WebrtcController::setAudioOutputDuckingEnabled(bool enabled) {
}
bool WebrtcController::receiveSignalingData(const QByteArray &data) {
return _impl->receiveSignalingData(data);
}
std::string WebrtcController::getLastError() {
return {};
}
std::string WebrtcController::getDebugInfo() {
return _impl->getDebugInfo().toStdString();
}
int64_t WebrtcController::getPreferredRelayId() {
return 0;
}
TgVoipTrafficStats WebrtcController::getTrafficStats() {
return {};
}
TgVoipPersistentState WebrtcController::getPersistentState() {
return TgVoipPersistentState{};
}
void WebrtcController::setOnStateUpdated(
Fn<void(TgVoipState)> onStateUpdated) {
_stateUpdatedLifetime.destroy();
_impl->state().changes(
) | rpl::start_with_next([=](CallState state) {
onStateUpdated([&] {
switch (state) {
case CallState::Initializing: return TgVoipState::WaitInit;
case CallState::Reconnecting: return TgVoipState::Reconnecting;
case CallState::Connected: return TgVoipState::Established;
case CallState::Failed: return TgVoipState::Failed;
}
Unexpected("State value in Webrtc::CallContext::state.");
}());
}, _stateUpdatedLifetime);
}
void WebrtcController::setOnSignalBarsUpdated(
Fn<void(int)> onSignalBarsUpdated) {
}
TgVoipFinalState WebrtcController::stop() {
_impl->stop();
return TgVoipFinalState();
}
} // namespace Calls

View File

@@ -0,0 +1,60 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "calls/calls_controller.h"
namespace Webrtc {
class CallContext;
} // namespace Webrtc
namespace Calls {
class WebrtcController final : public Controller {
public:
WebrtcController(
const TgVoipConfig &config,
const TgVoipPersistentState &persistentState,
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame);
~WebrtcController();
[[nodiscard]] static std::string Version();
std::string version() override;
void setNetworkType(TgVoipNetworkType networkType) override;
void setMuteMicrophone(bool muteMicrophone) override;
void setAudioOutputGainControlEnabled(bool enabled) override;
void setEchoCancellationStrength(int strength) override;
void setAudioInputDevice(std::string id) override;
void setAudioOutputDevice(std::string id) override;
void setInputVolume(float level) override;
void setOutputVolume(float level) override;
void setAudioOutputDuckingEnabled(bool enabled) override;
bool receiveSignalingData(const QByteArray &data) override;
std::string getLastError() override;
std::string getDebugInfo() override;
int64_t getPreferredRelayId() override;
TgVoipTrafficStats getTrafficStats() override;
TgVoipPersistentState getPersistentState() override;
void setOnStateUpdated(Fn<void(TgVoipState)> onStateUpdated) override;
void setOnSignalBarsUpdated(Fn<void(int)> onSignalBarsUpdated) override;
TgVoipFinalState stop() override;
private:
const std::unique_ptr<Webrtc::CallContext> _impl;
rpl::lifetime _stateUpdatedLifetime;
};
} // namespace Calls

View File

@@ -8,11 +8,19 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/calls_emoji_fingerprint.h"
#include "calls/calls_call.h"
#include "calls/calls_signal_bars.h"
#include "lang/lang_keys.h"
#include "data/data_user.h"
#include "ui/widgets/tooltip.h"
#include "ui/emoji_config.h"
#include "ui/rp_widget.h"
#include "styles/style_calls.h"
namespace Calls {
namespace {
constexpr auto kTooltipShowTimeoutMs = 1000;
const ushort Data[] = {
0xd83d, 0xde09, 0xd83d, 0xde0d, 0xd83d, 0xde1b, 0xd83d, 0xde2d, 0xd83d, 0xde31, 0xd83d, 0xde21,
0xd83d, 0xde0e, 0xd83d, 0xde34, 0xd83d, 0xde35, 0xd83d, 0xde08, 0xd83d, 0xde2c, 0xd83d, 0xde07,
@@ -143,7 +151,147 @@ std::vector<EmojiPtr> ComputeEmojiFingerprint(not_null<Call*> call) {
}
}
return result;
}
object_ptr<Ui::RpWidget> CreateFingerprintAndSignalBars(
not_null<QWidget*> parent,
not_null<Call*> call) {
class EmojiTooltipShower final : public Ui::AbstractTooltipShower {
public:
EmojiTooltipShower(not_null<QWidget*> window, const QString &text)
: _window(window)
, _text(text) {
}
QString tooltipText() const override {
return _text;
}
QPoint tooltipPos() const override {
return QCursor::pos();
}
bool tooltipWindowActive() const override {
return _window->isActiveWindow();
}
private:
const not_null<QWidget*> _window;
const QString _text;
};
auto result = object_ptr<Ui::RpWidget>(parent);
const auto raw = result.data();
// Emoji tooltip.
const auto shower = raw->lifetime().make_state<EmojiTooltipShower>(
parent->window(),
tr::lng_call_fingerprint_tooltip(
tr::now,
lt_user,
call->user()->name));
raw->setMouseTracking(true);
raw->events(
) | rpl::start_with_next([=](not_null<QEvent*> e) {
if (e->type() == QEvent::MouseMove) {
Ui::Tooltip::Show(kTooltipShowTimeoutMs, shower);
} else if (e->type() == QEvent::Leave) {
Ui::Tooltip::Hide();
}
}, raw->lifetime());
// Signal bars.
const auto bars = Ui::CreateChild<SignalBars>(
raw,
call,
st::callPanelSignalBars);
bars->setAttribute(Qt::WA_TransparentForMouseEvents);
// Geometry.
const auto print = ComputeEmojiFingerprint(call);
auto realSize = Ui::Emoji::GetSizeNormal();
auto size = realSize / cIntRetinaFactor();
auto count = print.size();
const auto printSize = QSize(
count * size + (count - 1) * st::callFingerprintSkip,
size);
const auto fullPrintSize = QRect(
QPoint(),
printSize
).marginsAdded(st::callFingerprintPadding).size();
const auto fullBarsSize = bars->rect().marginsAdded(
st::callSignalBarsPadding
).size();
const auto fullSize = QSize(
(fullPrintSize.width()
+ st::callFingerprintSignalBarsSkip
+ fullBarsSize.width()),
fullPrintSize.height());
raw->resize(fullSize);
bars->moveToRight(
st::callSignalBarsPadding.right(),
st::callSignalBarsPadding.top());
// Paint.
const auto background = raw->lifetime().make_state<QImage>(
fullSize * cIntRetinaFactor(),
QImage::Format_ARGB32_Premultiplied);
background->setDevicePixelRatio(cRetinaFactor());
rpl::merge(
rpl::single(rpl::empty_value()),
Ui::Emoji::Updated(),
style::PaletteChanged()
) | rpl::start_with_next([=] {
background->fill(Qt::transparent);
// Prepare.
auto p = QPainter(background);
const auto height = fullSize.height();
const auto fullPrintRect = QRect(QPoint(), fullPrintSize);
const auto fullBarsRect = QRect(
fullSize.width() - fullBarsSize.width(),
0,
fullBarsSize.width(),
height);
const auto bigRadius = height / 2;
const auto smallRadius = st::buttonRadius;
const auto hq = PainterHighQualityEnabler(p);
p.setPen(Qt::NoPen);
p.setBrush(st::callBgButton);
// Fingerprint part.
p.setClipRect(0, 0, fullPrintSize.width() / 2, height);
p.drawRoundedRect(fullPrintRect, bigRadius, bigRadius);
p.setClipRect(fullPrintSize.width() / 2, 0, fullSize.width(), height);
p.drawRoundedRect(fullPrintRect, smallRadius, smallRadius);
// Signal bars part.
const auto middle = fullBarsRect.center().x();
p.setClipRect(0, 0, middle, height);
p.drawRoundedRect(fullBarsRect, smallRadius, smallRadius);
p.setClipRect(middle, 0, fullBarsRect.width(), height);
p.drawRoundedRect(fullBarsRect, bigRadius, bigRadius);
// Emoji.
const auto realSize = Ui::Emoji::GetSizeNormal();
const auto size = realSize / cIntRetinaFactor();
auto left = st::callFingerprintPadding.left();
const auto top = st::callFingerprintPadding.top();
p.setClipping(false);
for (const auto emoji : print) {
Ui::Emoji::Draw(p, emoji, realSize, left, top);
left += st::callFingerprintSkip + size;
}
raw->update();
}, raw->lifetime());
raw->paintRequest(
) | rpl::start_with_next([=](QRect clip) {
QPainter(raw).drawImage(raw->rect(), *background);
}, raw->lifetime());
raw->show();
return result;
}
} // namespace Calls

View File

@@ -7,10 +7,21 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "base/object_ptr.h"
namespace Ui {
class RpWidget;
} // namespace Ui
namespace Calls {
class Call;
std::vector<EmojiPtr> ComputeEmojiFingerprint(not_null<Call*> call);
[[nodiscard]] std::vector<EmojiPtr> ComputeEmojiFingerprint(
not_null<Call*> call);
[[nodiscard]] object_ptr<Ui::RpWidget> CreateFingerprintAndSignalBars(
not_null<QWidget*> parent,
not_null<Call*> call);
} // namespace Calls

View File

@@ -35,15 +35,9 @@ constexpr auto kServerConfigUpdateTimeoutMs = 24 * 3600 * crl::time(1000);
Instance::Instance() = default;
Instance::~Instance() {
for (const auto panel : _pendingPanels) {
if (panel) {
delete panel;
}
}
}
Instance::~Instance() = default;
void Instance::startOutgoingCall(not_null<UserData*> user) {
void Instance::startOutgoingCall(not_null<UserData*> user, bool video) {
if (alreadyInCall()) { // Already in a call.
_currentCallPanel->showAndActivate();
return;
@@ -55,17 +49,21 @@ void Instance::startOutgoingCall(not_null<UserData*> user) {
tr::lng_call_error_not_available(tr::now, lt_user, user->name)));
return;
}
requestMicrophonePermissionOrFail(crl::guard(this, [=] {
createCall(user, Call::Type::Outgoing);
requestPermissionsOrFail(crl::guard(this, [=] {
createCall(user, Call::Type::Outgoing, video);
}));
}
void Instance::callFinished(not_null<Call*> call) {
destroyCall(call);
crl::on_main(call, [=] {
destroyCall(call);
});
}
void Instance::callFailed(not_null<Call*> call) {
destroyCall(call);
crl::on_main(call, [=] {
destroyCall(call);
});
}
void Instance::callRedial(not_null<Call*> call) {
@@ -107,7 +105,9 @@ void Instance::playSound(Sound sound) {
void Instance::destroyCall(not_null<Call*> call) {
if (_currentCall.get() == call) {
destroyCurrentPanel();
_currentCallPanel->closeBeforeDestroy();
_currentCallPanel = nullptr;
auto taken = base::take(_currentCall);
_currentCallChanges.fire(nullptr);
taken.reset();
@@ -119,19 +119,8 @@ void Instance::destroyCall(not_null<Call*> call) {
}
}
void Instance::destroyCurrentPanel() {
_pendingPanels.erase(
std::remove_if(
_pendingPanels.begin(),
_pendingPanels.end(),
[](auto &&panel) { return !panel; }),
_pendingPanels.end());
_pendingPanels.emplace_back(_currentCallPanel.release());
_pendingPanels.back()->hideAndDestroy(); // Always queues the destruction.
}
void Instance::createCall(not_null<UserData*> user, Call::Type type) {
auto call = std::make_unique<Call>(getCallDelegate(), user, type);
void Instance::createCall(not_null<UserData*> user, Call::Type type, bool video) {
auto call = std::make_unique<Call>(getCallDelegate(), user, type, video);
const auto raw = call.get();
user->session().account().sessionChanges(
@@ -232,13 +221,19 @@ void Instance::refreshServerConfig(not_null<Main::Session*> session) {
UpdateConfig(std::string(json.data(), json.size()));
}).fail([=](const RPCError &error) {
_serverConfigRequestSession = nullptr;
}).send();
}).send();
}
void Instance::handleUpdate(
not_null<Main::Session*> session,
const MTPDupdatePhoneCall& update) {
handleCallUpdate(session, update.vphone_call());
const MTPUpdate &update) {
update.match([&](const MTPDupdatePhoneCall &data) {
handleCallUpdate(session, data.vphone_call());
}, [&](const MTPDupdatePhoneCallSignalingData &data) {
handleSignalingData(data);
}, [](const auto &) {
Unexpected("Update type in Calls::Instance::handleUpdate.");
});
}
void Instance::showInfoPanel(not_null<Call*> call) {
@@ -272,8 +267,11 @@ void Instance::handleCallUpdate(
}
const auto &config = session->serverConfig();
if (alreadyInCall() || !user || user->isSelf()) {
const auto flags = phoneCall.is_video()
? MTPphone_DiscardCall::Flag::f_video
: MTPphone_DiscardCall::Flag(0);
session->api().request(MTPphone_DiscardCall(
MTP_flags(0),
MTP_flags(flags),
MTP_inputPhoneCall(phoneCall.vid(), phoneCall.vaccess_hash()),
MTP_int(0),
MTP_phoneCallDiscardReasonBusy(),
@@ -283,7 +281,7 @@ void Instance::handleCallUpdate(
< base::unixtime::now()) {
LOG(("Ignoring too old call."));
} else {
createCall(user, Call::Type::Incoming);
createCall(user, Call::Type::Incoming, phoneCall.is_video());
_currentCall->handleUpdate(call);
}
} else if (!_currentCall || !_currentCall->handleUpdate(call)) {
@@ -291,6 +289,14 @@ void Instance::handleCallUpdate(
}
}
void Instance::handleSignalingData(
const MTPDupdatePhoneCallSignalingData &data) {
if (!_currentCall || !_currentCall->handleSignalingData(data)) {
DEBUG_LOG(("API Warning: unexpected call signaling data %1"
).arg(data.vphone_call_id().v));
}
}
bool Instance::alreadyInCall() {
return (_currentCall && _currentCall->state() != Call::State::Busy);
}
@@ -303,13 +309,23 @@ rpl::producer<Call*> Instance::currentCallValue() const {
return _currentCallChanges.events_starting_with(currentCall());
}
void Instance::requestMicrophonePermissionOrFail(Fn<void()> onSuccess) {
Platform::PermissionStatus status=Platform::GetPermissionStatus(Platform::PermissionType::Microphone);
if (status==Platform::PermissionStatus::Granted) {
void Instance::requestPermissionsOrFail(Fn<void()> onSuccess) {
using Type = Platform::PermissionType;
requestPermissionOrFail(Type::Microphone, [=] {
requestPermissionOrFail(Type::Camera, [=] {
crl::on_main(onSuccess);
});
});
}
void Instance::requestPermissionOrFail(Platform::PermissionType type, Fn<void()> onSuccess) {
using Status = Platform::PermissionStatus;
const auto status = Platform::GetPermissionStatus(type);
if (status == Status::Granted) {
onSuccess();
} else if(status==Platform::PermissionStatus::CanRequest) {
Platform::RequestPermission(Platform::PermissionType::Microphone, crl::guard(this, [=](Platform::PermissionStatus status) {
if (status==Platform::PermissionStatus::Granted) {
} else if (status == Status::CanRequest) {
Platform::RequestPermission(type, crl::guard(this, [=](Status status) {
if (status == Status::Granted) {
crl::on_main(onSuccess);
} else {
if (_currentCall) {
@@ -321,8 +337,8 @@ void Instance::requestMicrophonePermissionOrFail(Fn<void()> onSuccess) {
if (alreadyInCall()) {
_currentCall->hangup();
}
Ui::show(Box<ConfirmBox>(tr::lng_no_mic_permission(tr::now), tr::lng_menu_settings(tr::now), crl::guard(this, [] {
Platform::OpenSystemSettingsForPermission(Platform::PermissionType::Microphone);
Ui::show(Box<ConfirmBox>(tr::lng_no_mic_permission(tr::now), tr::lng_menu_settings(tr::now), crl::guard(this, [=] {
Platform::OpenSystemSettingsForPermission(type);
Ui::hideLayer();
})));
}

View File

@@ -10,6 +10,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "mtproto/sender.h"
#include "calls/calls_call.h"
namespace Platform {
enum class PermissionType;
} // namespace Platform
namespace Media {
namespace Audio {
class Track;
@@ -32,10 +36,10 @@ public:
Instance();
~Instance();
void startOutgoingCall(not_null<UserData*> user);
void startOutgoingCall(not_null<UserData*> user, bool video);
void handleUpdate(
not_null<Main::Session*> session,
const MTPDupdatePhoneCall &update);
const MTPUpdate &update);
void showInfoPanel(not_null<Call*> call);
[[nodiscard]] Call *currentCall() const;
[[nodiscard]] rpl::producer<Call*> currentCallValue() const;
@@ -54,10 +58,12 @@ private:
void callRedial(not_null<Call*> call) override;
using Sound = Call::Delegate::Sound;
void playSound(Sound sound) override;
void createCall(not_null<UserData*> user, Call::Type type);
void createCall(not_null<UserData*> user, Call::Type type, bool video);
void destroyCall(not_null<Call*> call);
void destroyCurrentPanel();
void requestMicrophonePermissionOrFail(Fn<void()> onSuccess) override;
void requestPermissionsOrFail(Fn<void()> onSuccess) override;
void requestPermissionOrFail(Platform::PermissionType type, Fn<void()> onSuccess);
void handleSignalingData(const MTPDupdatePhoneCallSignalingData &data);
void refreshDhConfig();
void refreshServerConfig(not_null<Main::Session*> session);
@@ -78,7 +84,6 @@ private:
std::unique_ptr<Panel> _currentCallPanel;
base::Observable<Call*> _currentCallChanged;
base::Observable<FullMsgId> _newServiceMessage;
std::vector<QPointer<Panel>> _pendingPanels;
std::unique_ptr<Media::Audio::Track> _callConnectingTrack;
std::unique_ptr<Media::Audio::Track> _callEndedTrack;

File diff suppressed because it is too large Load Diff

View File

@@ -9,8 +9,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/weak_ptr.h"
#include "base/timer.h"
#include "base/object_ptr.h"
#include "calls/calls_call.h"
#include "ui/widgets/tooltip.h"
#include "ui/effects/animations.h"
#include "ui/rp_widget.h"
@@ -26,138 +26,118 @@ class IconButton;
class FlatLabel;
template <typename Widget>
class FadeWrap;
template <typename Widget>
class PaddingWrap;
class Window;
namespace Platform {
class TitleControls;
} // namespace Platform
} // namespace Ui
namespace style {
struct CallSignalBars;
struct CallBodyLayout;
} // namespace style
namespace Calls {
class SignalBars : public Ui::RpWidget, private base::Subscriber {
public:
SignalBars(
QWidget *parent,
not_null<Call*> call,
const style::CallSignalBars &st,
Fn<void()> displayedChangedCallback = nullptr);
bool isDisplayed() const;
protected:
void paintEvent(QPaintEvent *e) override;
private:
void changed(int count);
const style::CallSignalBars &_st;
int _count = Call::kSignalBarStarting;
Fn<void()> _displayedChangedCallback;
};
class Panel
: public Ui::RpWidget
, private base::Subscriber
, private Ui::AbstractTooltipShower {
class Userpic;
class SignalBars;
class VideoBubble;
class Panel final {
public:
Panel(not_null<Call*> call);
~Panel();
void showAndActivate();
void replaceCall(not_null<Call*> call);
void hideAndDestroy();
protected:
void paintEvent(QPaintEvent *e) override;
void closeEvent(QCloseEvent *e) override;
void resizeEvent(QResizeEvent *e) override;
void mousePressEvent(QMouseEvent *e) override;
void mouseReleaseEvent(QMouseEvent *e) override;
void mouseMoveEvent(QMouseEvent *e) override;
void leaveEventHook(QEvent *e) override;
void leaveToChildEvent(QEvent *e, QWidget *child) override;
bool eventHook(QEvent *e) override;
void closeBeforeDestroy();
private:
class Content;
class Button;
using State = Call::State;
using Type = Call::Type;
enum class AnswerHangupRedialState : uchar {
Answer,
Hangup,
Redial,
};
// AbstractTooltipShower interface
QString tooltipText() const override;
QPoint tooltipPos() const override;
bool tooltipWindowActive() const override;
[[nodiscard]] not_null<Ui::RpWidget*> widget() const;
void paint(QRect clip);
void initWindow();
void initWidget();
void initControls();
void reinitControls();
void reinitWithCall(Call *call);
void initLayout();
void initGeometry();
void hideDeactivated();
void createBottomImage();
void createDefaultCacheImage();
void refreshCacheImageUserPhoto();
void initBottomShadow();
void handleClose();
void processUserPhoto();
void refreshUserPhoto();
bool isGoodUserPhoto(PhotoData *photo);
void createUserpicCache(Image *image);
QRect signalBarsRect() const;
void paintSignalBarsBg(Painter &p);
void updateControlsGeometry();
void updateHangupGeometry();
void updateStatusGeometry();
void updateOutgoingVideoBubbleGeometry();
void stateChanged(State state);
void showControls();
void updateStatusText(State state);
void startDurationUpdateTimer(crl::time currentDuration);
void fillFingerprint();
void toggleOpacityAnimation(bool visible);
void finishAnimating();
void destroyDelayed();
void setIncomingSize(QSize size);
void fillTopShadow(QPainter &p, QRect incoming);
void fillBottomShadow(QPainter &p, QRect incoming);
void refreshOutgoingPreviewInBody(State state);
void toggleFullScreen(bool fullscreen);
void createRemoteAudioMute();
void refreshAnswerHangupRedialLabel();
[[nodiscard]] QRect incomingFrameGeometry() const;
[[nodiscard]] QRect outgoingFrameGeometry() const;
Call *_call = nullptr;
not_null<UserData*> _user;
std::shared_ptr<Data::CloudImageView> _userpic;
std::shared_ptr<Data::PhotoMedia> _photo;
bool _useTransparency = true;
style::margins _padding;
int _contentTop = 0;
const std::unique_ptr<Ui::Window> _window;
bool _dragging = false;
QPoint _dragStartMousePosition;
QPoint _dragStartMyPosition;
#ifdef Q_OS_WIN
std::unique_ptr<Ui::Platform::TitleControls> _controls;
#endif // Q_OS_WIN
rpl::lifetime _stateLifetime;
QSize _incomingFrameSize;
class Button;
rpl::lifetime _callLifetime;
not_null<const style::CallBodyLayout*> _bodySt;
object_ptr<Button> _answerHangupRedial;
object_ptr<Ui::FadeWrap<Button>> _decline;
object_ptr<Ui::FadeWrap<Button>> _cancel;
bool _hangupShown = false;
bool _outgoingPreviewInBody = false;
std::optional<AnswerHangupRedialState> _answerHangupRedialState;
Ui::Animations::Simple _hangupShownProgress;
object_ptr<Ui::IconButton> _mute;
object_ptr<Button> _camera;
object_ptr<Button> _mute;
object_ptr<Ui::FlatLabel> _name;
object_ptr<Ui::FlatLabel> _status;
object_ptr<SignalBars> _signalBars;
std::vector<EmojiPtr> _fingerprint;
QRect _fingerprintArea;
object_ptr<Ui::RpWidget> _fingerprint = { nullptr };
object_ptr<Ui::PaddingWrap<Ui::FlatLabel>> _remoteAudioMute = { nullptr };
std::unique_ptr<Userpic> _userpic;
std::unique_ptr<VideoBubble> _outgoingVideoBubble;
QPixmap _bottomShadow;
int _bodyTop = 0;
int _buttonsTop = 0;
base::Timer _updateDurationTimer;
base::Timer _updateOuterRippleTimer;
bool _visible = false;
QPixmap _userPhoto;
PhotoId _userPhotoId = 0;
bool _userPhotoFull = false;
Ui::Animations::Simple _opacityAnimation;
QPixmap _animationCache;
QPixmap _bottomCache;
QPixmap _cache;
};
} // namespace Calls

View File

@@ -0,0 +1,64 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "calls/calls_signal_bars.h"
#include "calls/calls_call.h"
#include "styles/style_calls.h"
namespace Calls {
SignalBars::SignalBars(
QWidget *parent,
not_null<Call*> call,
const style::CallSignalBars &st)
: RpWidget(parent)
, _st(st)
, _count(Call::kSignalBarStarting) {
resize(
_st.width + (_st.width + _st.skip) * (Call::kSignalBarCount - 1),
_st.max);
call->signalBarCountValue(
) | rpl::start_with_next([=](int count) {
changed(count);
}, lifetime());
}
void SignalBars::paintEvent(QPaintEvent *e) {
Painter p(this);
PainterHighQualityEnabler hq(p);
p.setPen(Qt::NoPen);
p.setBrush(_st.color);
for (auto i = 0; i < Call::kSignalBarCount; ++i) {
p.setOpacity((i < _count) ? 1. : _st.inactiveOpacity);
const auto barHeight = _st.min
+ (_st.max - _st.min) * (i / float64(Call::kSignalBarCount - 1));
const auto barLeft = i * (_st.width + _st.skip);
const auto barTop = height() - barHeight;
p.drawRoundedRect(
QRectF(
barLeft,
barTop,
_st.width,
barHeight),
_st.radius,
_st.radius);
}
p.setOpacity(1.);
}
void SignalBars::changed(int count) {
if (_count == Call::kSignalBarFinished) {
return;
} else if (_count != count) {
_count = count;
update();
}
}
} // namespace Calls

View File

@@ -0,0 +1,37 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "ui/rp_widget.h"
namespace style {
struct CallSignalBars;
} // namespace style
namespace Calls {
class Call;
class SignalBars final : public Ui::RpWidget {
public:
SignalBars(
QWidget *parent,
not_null<Call*> call,
const style::CallSignalBars &st);
private:
void paintEvent(QPaintEvent *e) override;
void changed(int count);
const style::CallSignalBars &_st;
int _count = 0;
};
} // namespace Calls

View File

@@ -14,7 +14,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/application.h"
#include "calls/calls_call.h"
#include "calls/calls_instance.h"
#include "calls/calls_panel.h"
#include "calls/calls_signal_bars.h"
#include "data/data_user.h"
#include "data/data_changes.h"
#include "main/main_session.h"
@@ -94,14 +94,14 @@ TopBar::TopBar(
void TopBar::initControls() {
_mute->setClickedCallback([=] {
if (const auto call = _call.get()) {
call->setMute(!call->isMute());
call->setMuted(!call->muted());
}
});
setMuted(_call->isMute());
subscribe(_call->muteChanged(), [=](bool mute) {
setMuted(mute);
_call->mutedValue(
) | rpl::start_with_next([=](bool muted) {
setMuted(muted);
update();
});
}, lifetime());
_call->user()->session().changes().peerUpdates(
Data::PeerUpdate::Flag::Name

View File

@@ -28,7 +28,7 @@ namespace Calls {
class Call;
class SignalBars;
class TopBar : public Ui::RpWidget, private base::Subscriber {
class TopBar : public Ui::RpWidget {
public:
TopBar(QWidget *parent, const base::weak_ptr<Call> &call);

View File

@@ -0,0 +1,217 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "calls/calls_userpic.h"
#include "data/data_peer.h"
#include "main/main_session.h"
#include "data/data_changes.h"
#include "data/data_peer.h"
#include "data/data_session.h"
#include "data/data_cloud_file.h"
#include "data/data_photo_media.h"
#include "data/data_file_origin.h"
#include "ui/empty_userpic.h"
#include "apiwrap.h" // requestFullPeer.
#include "styles/style_calls.h"
namespace Calls {
namespace {
} // namespace
Userpic::Userpic(
not_null<QWidget*> parent,
not_null<PeerData*> peer,
rpl::producer<bool> muted)
: _content(parent)
, _peer(peer) {
setGeometry(0, 0, 0);
setup(std::move(muted));
}
Userpic::~Userpic() = default;
void Userpic::setVisible(bool visible) {
_content.setVisible(visible);
}
void Userpic::setGeometry(int x, int y, int size) {
if (this->size() != size) {
_userPhoto = QPixmap();
_userPhotoFull = false;
}
_content.setGeometry(x, y, size, size);
_content.update();
if (_userPhoto.isNull()) {
refreshPhoto();
}
}
void Userpic::setup(rpl::producer<bool> muted) {
_content.show();
_content.setAttribute(Qt::WA_TransparentForMouseEvents);
_content.paintRequest(
) | rpl::start_with_next([=] {
paint();
}, lifetime());
std::move(
muted
) | rpl::start_with_next([=](bool muted) {
setMuted(muted);
}, lifetime());
_peer->session().changes().peerFlagsValue(
_peer,
Data::PeerUpdate::Flag::Photo
) | rpl::start_with_next([=] {
processPhoto();
}, lifetime());
_peer->session().downloaderTaskFinished(
) | rpl::start_with_next([=] {
refreshPhoto();
}, lifetime());
_mutedAnimation.stop();
}
void Userpic::setMuteLayout(QPoint position, int size, int stroke) {
_mutePosition = position;
_muteSize = size;
_muteStroke = stroke;
_content.update();
}
void Userpic::paint() {
Painter p(&_content);
p.drawPixmap(0, 0, _userPhoto);
if (_muted && _muteSize > 0) {
auto hq = PainterHighQualityEnabler(p);
auto pen = st::callBgOpaque->p;
pen.setWidth(_muteStroke);
p.setPen(pen);
p.setBrush(st::callHangupBg);
const auto rect = QRect(
_mutePosition.x() - _muteSize / 2,
_mutePosition.y() - _muteSize / 2,
_muteSize,
_muteSize);
p.drawEllipse(rect);
st::callMutedPeerIcon.paintInCenter(p, rect);
}
}
void Userpic::setMuted(bool muted) {
if (_muted == muted) {
return;
}
_muted = muted;
_content.update();
//_mutedAnimation.start(
// [=] { _content.update(); },
// _muted ? 0. : 1.,
// _muted ? 1. : 0.,
// st::fadeWrapDuration);
}
int Userpic::size() const {
return _content.width();
}
void Userpic::processPhoto() {
_userpic = _peer->createUserpicView();
_peer->loadUserpic();
const auto photo = _peer->userpicPhotoId()
? _peer->owner().photo(_peer->userpicPhotoId()).get()
: nullptr;
if (isGoodPhoto(photo)) {
_photo = photo->createMediaView();
_photo->wanted(Data::PhotoSize::Thumbnail, _peer->userpicPhotoOrigin());
} else {
_photo = nullptr;
if (_peer->userpicPhotoUnknown() || (photo && !photo->date)) {
_peer->session().api().requestFullPeer(_peer);
}
}
refreshPhoto();
}
void Userpic::refreshPhoto() {
if (!size()) {
return;
}
const auto isNewBigPhoto = [&] {
return _photo
&& (_photo->image(Data::PhotoSize::Thumbnail) != nullptr)
&& (_photo->owner()->id != _userPhotoId || !_userPhotoFull);
}();
if (isNewBigPhoto) {
_userPhotoId = _photo->owner()->id;
_userPhotoFull = true;
createCache(_photo->image(Data::PhotoSize::Thumbnail));
} else if (_userPhoto.isNull()) {
createCache(_userpic ? _userpic->image() : nullptr);
}
}
void Userpic::createCache(Image *image) {
const auto size = this->size();
const auto real = size * cIntRetinaFactor();
auto options = Images::Option::Smooth | Images::Option::Circled;
// _useTransparency ? (Images::Option::RoundedLarge | Images::Option::RoundedTopLeft | Images::Option::RoundedTopRight | Images::Option::Smooth) : Images::Option::None;
if (image) {
auto width = image->width();
auto height = image->height();
if (width > height) {
width = qMax((width * real) / height, 1);
height = real;
} else {
height = qMax((height * real) / width, 1);
width = real;
}
_userPhoto = image->pixNoCache(
width,
height,
options,
size,
size);
_userPhoto.setDevicePixelRatio(cRetinaFactor());
} else {
auto filled = QImage(QSize(real, real), QImage::Format_ARGB32_Premultiplied);
filled.setDevicePixelRatio(cRetinaFactor());
filled.fill(Qt::transparent);
{
Painter p(&filled);
Ui::EmptyUserpic(
Data::PeerUserpicColor(_peer->id),
_peer->name
).paint(p, 0, 0, size, size);
}
//Images::prepareRound(filled, ImageRoundRadius::Large, RectPart::TopLeft | RectPart::TopRight);
_userPhoto = Images::PixmapFast(std::move(filled));
}
_content.update();
}
bool Userpic::isGoodPhoto(PhotoData *photo) const {
if (!photo || photo->isNull()) {
return false;
}
const auto badAspect = [](int a, int b) {
return a > 10 * b;
};
const auto width = photo->width();
const auto height = photo->height();
return !badAspect(width, height) && !badAspect(height, width);
}
} // namespace Calls

View File

@@ -0,0 +1,67 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "ui/rp_widget.h"
#include "ui/effects/animations.h"
class PeerData;
class Image;
namespace Data {
class CloudImageView;
class PhotoMedia;
} // namespace Data
namespace Calls {
class Userpic final {
public:
Userpic(
not_null<QWidget*> parent,
not_null<PeerData*> peer,
rpl::producer<bool> muted);
~Userpic();
void setVisible(bool visible);
void setGeometry(int x, int y, int size);
void setMuteLayout(QPoint position, int size, int stroke);
[[nodiscard]] rpl::lifetime &lifetime() {
return _content.lifetime();
}
private:
void setup(rpl::producer<bool> muted);
void paint();
void setMuted(bool muted);
[[nodiscard]] int size() const;
void processPhoto();
void refreshPhoto();
[[nodiscard]] bool isGoodPhoto(PhotoData *photo) const;
void createCache(Image *image);
Ui::RpWidget _content;
not_null<PeerData*> _peer;
std::shared_ptr<Data::CloudImageView> _userpic;
std::shared_ptr<Data::PhotoMedia> _photo;
Ui::Animations::Simple _mutedAnimation;
QPixmap _userPhoto;
PhotoId _userPhotoId = 0;
QPoint _mutePosition;
int _muteSize = 0;
int _muteStroke = 0;
bool _userPhotoFull = false;
bool _muted = false;
};
} // namespace Calls

View File

@@ -0,0 +1,268 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "calls/calls_video_bubble.h"
#include "webrtc/webrtc_video_track.h"
#include "ui/image/image_prepare.h"
#include "ui/widgets/shadow.h"
#include "styles/style_calls.h"
#include "styles/style_widgets.h"
#include "styles/style_layers.h"
namespace Calls {
VideoBubble::VideoBubble(
not_null<QWidget*> parent,
not_null<Webrtc::VideoTrack*> track)
: _content(parent)
, _track(track)
, _state(Webrtc::VideoState::Inactive) {
setup();
}
void VideoBubble::setup() {
_content.show();
applyDragMode(_dragMode);
_content.paintRequest(
) | rpl::start_with_next([=] {
paint();
}, lifetime());
_track->stateValue(
) | rpl::start_with_next([=](Webrtc::VideoState state) {
setState(state);
}, lifetime());
_track->renderNextFrame(
) | rpl::start_with_next([=] {
if (_track->frameSize().isEmpty()) {
_track->markFrameShown();
} else {
updateVisibility();
// We update whole parent widget in this case.
// In case we update only bubble without the parent incoming
// video frame we may get full parent of old frame with a
// rectangular piece of a new frame rendered with that update().
//_content.update();
}
}, lifetime());
}
void VideoBubble::updateGeometry(
DragMode mode,
QRect boundingRect,
QSize sizeMin,
QSize sizeMax) {
Expects(!boundingRect.isEmpty());
Expects(sizeMax.isEmpty() || !sizeMin.isEmpty());
Expects(sizeMax.isEmpty() || sizeMin.width() <= sizeMax.width());
Expects(sizeMax.isEmpty() || sizeMin.height() <= sizeMax.height());
if (sizeMin.isEmpty()) {
sizeMin = boundingRect.size();
}
if (sizeMax.isEmpty()) {
sizeMax = sizeMin;
}
if (_dragMode != mode) {
applyDragMode(mode);
}
if (_boundingRect != boundingRect) {
applyBoundingRect(boundingRect);
}
if (_min != sizeMin || _max != sizeMax) {
applySizeConstraints(sizeMin, sizeMax);
}
if (_geometryDirty && !_lastFrameSize.isEmpty()) {
updateSizeToFrame(base::take(_lastFrameSize));
}
}
QRect VideoBubble::geometry() const {
return _content.isHidden() ? QRect() : _content.geometry();
}
void VideoBubble::applyBoundingRect(QRect rect) {
_boundingRect = rect;
_geometryDirty = true;
}
void VideoBubble::applyDragMode(DragMode mode) {
_dragMode = mode;
if (_dragMode == DragMode::None) {
_dragging = false;
_content.setCursor(style::cur_default);
}
_content.setAttribute(
Qt::WA_TransparentForMouseEvents,
true/*(_dragMode == DragMode::None)*/);
if (_dragMode == DragMode::SnapToCorners) {
_corner = RectPart::BottomRight;
} else {
_corner = RectPart::None;
_lastDraggableSize = _size;
}
_size = QSize();
_geometryDirty = true;
}
void VideoBubble::applySizeConstraints(QSize min, QSize max) {
_min = min;
_max = max;
_geometryDirty = true;
}
void VideoBubble::paint() {
Painter p(&_content);
prepareFrame();
if (!_frame.isNull()) {
const auto padding = st::boxRoundShadow.extend;
const auto inner = _content.rect().marginsRemoved(padding);
Ui::Shadow::paint(p, inner, _content.width(), st::boxRoundShadow);
const auto factor = cIntRetinaFactor();
p.drawImage(
inner,
_frame,
QRect(
QPoint(_frame.width() - (inner.width() * factor), 0),
inner.size() * factor));
}
_track->markFrameShown();
}
void VideoBubble::prepareFrame() {
const auto original = _track->frameSize();
if (original.isEmpty()) {
_frame = QImage();
return;
}
const auto padding = st::boxRoundShadow.extend;
const auto size = _content.rect().marginsRemoved(padding).size()
* cIntRetinaFactor();
// Should we check 'original' and 'size' aspect ratios?..
const auto request = Webrtc::FrameRequest{
.resize = size,
.outer = size,
};
const auto frame = _track->frame(request);
if (_frame.width() < size.width() || _frame.height() < size.height()) {
_frame = QImage(
size * cIntRetinaFactor(),
QImage::Format_ARGB32_Premultiplied);
}
Assert(_frame.width() >= frame.width()
&& _frame.height() >= frame.height());
const auto toPerLine = _frame.bytesPerLine();
const auto fromPerLine = frame.bytesPerLine();
const auto lineSize = frame.width() * 4;
auto to = _frame.bits();
auto from = frame.bits();
const auto till = from + frame.height() * fromPerLine;
for (; from != till; from += fromPerLine, to += toPerLine) {
memcpy(to, from, lineSize);
}
Images::prepareRound(
_frame,
ImageRoundRadius::Large,
RectPart::AllCorners,
QRect(QPoint(), size));
_frame = std::move(_frame).mirrored(true, false);
}
void VideoBubble::setState(Webrtc::VideoState state) {
if (state == Webrtc::VideoState::Paused) {
using namespace Images;
static constexpr auto kRadius = 24;
_pausedFrame = Images::BlurLargeImage(_track->frame({}), kRadius);
if (_pausedFrame.isNull()) {
state = Webrtc::VideoState::Inactive;
}
}
_state = state;
updateVisibility();
}
void VideoBubble::updateSizeToFrame(QSize frame) {
Expects(!frame.isEmpty());
if (_lastFrameSize == frame) {
return;
}
_lastFrameSize = frame;
auto size = !_size.isEmpty()
? QSize(
std::clamp(_size.width(), _min.width(), _max.width()),
std::clamp(_size.height(), _min.height(), _max.height()))
: (_dragMode == DragMode::None || _lastDraggableSize.isEmpty())
? QSize()
: _lastDraggableSize;
if (size.isEmpty()) {
size = frame.scaled((_min + _max) / 2, Qt::KeepAspectRatio);
} else {
const auto area = size.width() * size.height();
const auto w = int(std::round(std::max(
std::sqrt((frame.width() * float64(area)) / (frame.height() * 1.)),
1.)));
const auto h = area / w;
size = QSize(w, h);
if (w > _max.width() || h > _max.height()) {
size = size.scaled(_max, Qt::KeepAspectRatio);
}
}
size = QSize(std::max(1, size.width()), std::max(1, size.height()));
setInnerSize(size);
}
void VideoBubble::setInnerSize(QSize size) {
if (_size == size && !_geometryDirty) {
return;
}
_geometryDirty = false;
_size = size;
const auto topLeft = [&] {
switch (_corner) {
case RectPart::None:
return _boundingRect.topLeft() + QPoint(
(_boundingRect.width() - size.width()) / 2,
(_boundingRect.height() - size.height()) / 2);
case RectPart::TopLeft:
return _boundingRect.topLeft();
case RectPart::TopRight:
return QPoint(
_boundingRect.x() + _boundingRect.width() - size.width(),
_boundingRect.y());
case RectPart::BottomRight:
return QPoint(
_boundingRect.x() + _boundingRect.width() - size.width(),
_boundingRect.y() + _boundingRect.height() - size.height());
case RectPart::BottomLeft:
return QPoint(
_boundingRect.x(),
_boundingRect.y() + _boundingRect.height() - size.height());
}
Unexpected("Corner value in VideoBubble::setInnerSize.");
}();
const auto inner = QRect(topLeft, size);
_content.setGeometry(inner.marginsAdded(st::boxRoundShadow.extend));
}
void VideoBubble::updateVisibility() {
const auto size = _track->frameSize();
const auto visible = (_state != Webrtc::VideoState::Inactive)
&& !size.isEmpty();
if (visible) {
updateSizeToFrame(size);
}
_content.setVisible(visible);
}
} // namespace Calls

View File

@@ -0,0 +1,65 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "ui/rp_widget.h"
namespace Webrtc {
class VideoTrack;
enum class VideoState;
} // namespace Webrtc
namespace Calls {
class VideoBubble final {
public:
VideoBubble(
not_null<QWidget*> parent,
not_null<Webrtc::VideoTrack*> track);
enum class DragMode {
None,
SnapToCorners,
};
void updateGeometry(
DragMode mode,
QRect boundingRect,
QSize sizeMin = QSize(),
QSize sizeMax = QSize());
[[nodiscard]] QRect geometry() const;
[[nodiscard]] rpl::lifetime &lifetime() {
return _content.lifetime();
}
private:
void setup();
void paint();
void setState(Webrtc::VideoState state);
void applyDragMode(DragMode mode);
void applyBoundingRect(QRect rect);
void applySizeConstraints(QSize min, QSize max);
void updateSizeToFrame(QSize frame);
void updateVisibility();
void setInnerSize(QSize size);
void prepareFrame();
Ui::RpWidget _content;
const not_null<Webrtc::VideoTrack*> _track;
Webrtc::VideoState _state = Webrtc::VideoState();
QImage _frame, _pausedFrame;
QSize _min, _max, _size, _lastDraggableSize, _lastFrameSize;
QRect _boundingRect;
DragMode _dragMode = DragMode::None;
RectPart _corner = RectPart::None;
bool _dragging = false;
bool _geometryDirty = false;
};
} // namespace Calls

View File

@@ -16,6 +16,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "data/data_file_origin.h"
#include "data/data_session.h"
#include "data/stickers/data_stickers.h"
#include "chat_helpers/send_context_menu.h" // SendMenu::FillSendMenu
#include "chat_helpers/stickers_lottie.h"
#include "mainwindow.h"
#include "apiwrap.h"
@@ -24,6 +25,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/application.h"
#include "core/core_settings.h"
#include "lottie/lottie_single_player.h"
#include "ui/widgets/popup_menu.h"
#include "ui/widgets/scroll_area.h"
#include "ui/image/image.h"
#include "ui/ui_utility.h"
@@ -45,8 +47,10 @@ FieldAutocomplete::FieldAutocomplete(
, _scroll(this, st::mentionScroll) {
_scroll->setGeometry(rect());
using Inner = internal::FieldAutocompleteInner;
_inner = _scroll->setOwnedWidget(
object_ptr<internal::FieldAutocompleteInner>(
object_ptr<Inner>(
_controller,
this,
&_mrows,
@@ -55,18 +59,41 @@ FieldAutocomplete::FieldAutocomplete(
&_srows));
_inner->setGeometry(rect());
connect(_inner, SIGNAL(mentionChosen(not_null<UserData*>,FieldAutocomplete::ChooseMethod)), this, SIGNAL(mentionChosen(not_null<UserData*>,FieldAutocomplete::ChooseMethod)));
connect(_inner, SIGNAL(hashtagChosen(QString,FieldAutocomplete::ChooseMethod)), this, SIGNAL(hashtagChosen(QString,FieldAutocomplete::ChooseMethod)));
connect(_inner, SIGNAL(botCommandChosen(QString,FieldAutocomplete::ChooseMethod)), this, SIGNAL(botCommandChosen(QString,FieldAutocomplete::ChooseMethod)));
connect(_inner, SIGNAL(stickerChosen(not_null<DocumentData*>,FieldAutocomplete::ChooseMethod)), this, SIGNAL(stickerChosen(not_null<DocumentData*>,FieldAutocomplete::ChooseMethod)));
connect(_inner, SIGNAL(mustScrollTo(int, int)), _scroll, SLOT(scrollToY(int, int)));
_inner->scrollToRequested(
) | rpl::start_with_next([=](Inner::ScrollTo data) {
_scroll->scrollToY(data.top, data.bottom);
}, lifetime());
_scroll->show();
_inner->show();
hide();
connect(_scroll, SIGNAL(geometryChanged()), _inner, SLOT(onParentGeometryChanged()));
connect(
_scroll,
&Ui::ScrollArea::geometryChanged,
_inner,
&Inner::onParentGeometryChanged);
}
auto FieldAutocomplete::mentionChosen() const
-> rpl::producer<FieldAutocomplete::MentionChosen> {
return _inner->mentionChosen();
}
auto FieldAutocomplete::hashtagChosen() const
-> rpl::producer<FieldAutocomplete::HashtagChosen> {
return _inner->hashtagChosen();
}
auto FieldAutocomplete::botCommandChosen() const
-> rpl::producer<FieldAutocomplete::BotCommandChosen> {
return _inner->botCommandChosen();
}
auto FieldAutocomplete::stickerChosen() const
-> rpl::producer<FieldAutocomplete::StickerChosen> {
return _inner->stickerChosen();
}
FieldAutocomplete::~FieldAutocomplete() = default;
@@ -583,9 +610,10 @@ bool FieldAutocomplete::eventFilter(QObject *obj, QEvent *e) {
&& ((key >= Qt::Key_1 && key <= Qt::Key_9)
|| key == Qt::Key_Q
|| key == Qt::Key_W)) {
bool handled = false;
emit moderateKeyActivate(key, &handled);
return handled;
return _moderateKeyActivateCallback
? _moderateKeyActivateCallback(key)
: false;
}
}
}
@@ -875,32 +903,52 @@ bool FieldAutocompleteInner::moveSel(int key) {
return true;
}
bool FieldAutocompleteInner::chooseSelected(FieldAutocomplete::ChooseMethod method) const {
bool FieldAutocompleteInner::chooseSelected(
FieldAutocomplete::ChooseMethod method) const {
return chooseAtIndex(method, _sel);
}
bool FieldAutocompleteInner::chooseAtIndex(
FieldAutocomplete::ChooseMethod method,
int index,
Api::SendOptions options) const {
if (index < 0) {
return false;
}
if (!_srows->empty()) {
if (_sel >= 0 && _sel < _srows->size()) {
emit stickerChosen((*_srows)[_sel].document, method);
if (index < _srows->size()) {
const auto document = (*_srows)[index].document;
_stickerChosen.fire({ document, options, method });
return true;
}
} else if (!_mrows->empty()) {
if (_sel >= 0 && _sel < _mrows->size()) {
emit mentionChosen(_mrows->at(_sel).user, method);
if (index < _mrows->size()) {
_mentionChosen.fire({ _mrows->at(index).user, method });
return true;
}
} else if (!_hrows->empty()) {
if (_sel >= 0 && _sel < _hrows->size()) {
emit hashtagChosen('#' + _hrows->at(_sel), method);
if (index < _hrows->size()) {
_hashtagChosen.fire({ '#' + _hrows->at(index), method });
return true;
}
} else if (!_brows->empty()) {
if (_sel >= 0 && _sel < _brows->size()) {
const auto user = _brows->at(_sel).user;
const auto command = _brows->at(_sel).command;
int32 botStatus = _parent->chat() ? _parent->chat()->botStatus : ((_parent->channel() && _parent->channel()->isMegagroup()) ? _parent->channel()->mgInfo->botStatus : -1);
if (botStatus == 0 || botStatus == 2 || _parent->filter().indexOf('@') > 0) {
emit botCommandChosen('/' + command->command + '@' + user->username, method);
} else {
emit botCommandChosen('/' + command->command, method);
}
if (index < _brows->size()) {
const auto user = _brows->at(index).user;
const auto command = _brows->at(index).command;
const auto botStatus = _parent->chat()
? _parent->chat()->botStatus
: ((_parent->channel() && _parent->channel()->isMegagroup())
? _parent->channel()->mgInfo->botStatus
: -1);
const auto insertUsername = (botStatus == 0
|| botStatus == 2
|| _parent->filter().indexOf('@') > 0);
const auto commandString = QString("/%1%2")
.arg(command->command)
.arg(insertUsername ? ('@' + user->username) : QString());
_botCommandChosen.fire({ commandString, method });
return true;
}
}
@@ -969,6 +1017,29 @@ void FieldAutocompleteInner::mouseReleaseEvent(QMouseEvent *e) {
chooseSelected(FieldAutocomplete::ChooseMethod::ByClick);
}
void FieldAutocompleteInner::contextMenuEvent(QContextMenuEvent *e) {
if (_sel < 0 || _srows->empty() || _down >= 0) {
return;
}
const auto index = _sel;
const auto type = SendMenu::Type::Scheduled;
const auto method = FieldAutocomplete::ChooseMethod::ByClick;
_menu = base::make_unique_q<Ui::PopupMenu>(this);
const auto send = [=](Api::SendOptions options) {
chooseAtIndex(method, index, options);
};
SendMenu::FillSendMenu(
_menu,
[&] { return type; },
SendMenu::DefaultSilentCallback(send),
SendMenu::DefaultScheduleCallback(this, type, send));
if (!_menu->actions().empty()) {
_menu->popup(QCursor::pos());
}
}
void FieldAutocompleteInner::enterEventHook(QEvent *e) {
setMouseTracking(true);
}
@@ -1000,10 +1071,15 @@ void FieldAutocompleteInner::setSel(int sel, bool scroll) {
if (scroll && _sel >= 0) {
if (_srows->empty()) {
emit mustScrollTo(_sel * st::mentionHeight, (_sel + 1) * st::mentionHeight);
_scrollToRequested.fire({
_sel * st::mentionHeight,
(_sel + 1) * st::mentionHeight });
} else {
int32 row = _sel / _stickersPerRow;
emit mustScrollTo(st::stickerPanPadding + row * st::stickerPanSize.height(), st::stickerPanPadding + (row + 1) * st::stickerPanSize.height());
const auto padding = st::stickerPanPadding;
_scrollToRequested.fire({
padding + row * st::stickerPanSize.height(),
padding + (row + 1) * st::stickerPanSize.height() });
}
}
}
@@ -1131,4 +1207,29 @@ void FieldAutocompleteInner::showPreview() {
}
}
auto FieldAutocompleteInner::mentionChosen() const
-> rpl::producer<FieldAutocomplete::MentionChosen> {
return _mentionChosen.events();
}
auto FieldAutocompleteInner::hashtagChosen() const
-> rpl::producer<FieldAutocomplete::HashtagChosen> {
return _hashtagChosen.events();
}
auto FieldAutocompleteInner::botCommandChosen() const
-> rpl::producer<FieldAutocomplete::BotCommandChosen> {
return _botCommandChosen.events();
}
auto FieldAutocompleteInner::stickerChosen() const
-> rpl::producer<FieldAutocomplete::StickerChosen> {
return _stickerChosen.events();
}
auto FieldAutocompleteInner::scrollToRequested() const
-> rpl::producer<ScrollTo> {
return _scrollToRequested.events();
}
} // namespace internal

View File

@@ -7,12 +7,14 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "api/api_common.h"
#include "ui/effects/animations.h"
#include "ui/rp_widget.h"
#include "base/timer.h"
#include "base/object_ptr.h"
namespace Ui {
class PopupMenu;
class ScrollArea;
} // namespace Ui
@@ -59,7 +61,6 @@ class FieldAutocompleteInner;
} // namespace internal
class FieldAutocomplete final : public Ui::RpWidget {
Q_OBJECT
public:
FieldAutocomplete(
@@ -90,6 +91,24 @@ public:
ByTab,
ByClick,
};
struct MentionChosen {
not_null<UserData*> user;
ChooseMethod method;
};
struct HashtagChosen {
QString hashtag;
ChooseMethod method;
};
struct BotCommandChosen {
QString command;
ChooseMethod method;
};
struct StickerChosen {
not_null<DocumentData*> sticker;
Api::SendOptions options;
ChooseMethod method;
};
bool chooseSelected(ChooseMethod method) const;
bool stickersShown() const {
@@ -102,15 +121,16 @@ public:
return rect().contains(QRect(mapFromGlobal(globalRect.topLeft()), globalRect.size()));
}
void setModerateKeyActivateCallback(Fn<bool(int)> callback) {
_moderateKeyActivateCallback = std::move(callback);
}
void hideFast();
signals:
void mentionChosen(not_null<UserData*> user, FieldAutocomplete::ChooseMethod method) const;
void hashtagChosen(QString hashtag, FieldAutocomplete::ChooseMethod method) const;
void botCommandChosen(QString command, FieldAutocomplete::ChooseMethod method) const;
void stickerChosen(not_null<DocumentData*> sticker, FieldAutocomplete::ChooseMethod method) const;
void moderateKeyActivate(int key, bool *outHandled) const;
rpl::producer<MentionChosen> mentionChosen() const;
rpl::producer<HashtagChosen> hashtagChosen() const;
rpl::producer<BotCommandChosen> botCommandChosen() const;
rpl::producer<StickerChosen> stickerChosen() const;
public slots:
void showAnimated();
@@ -160,11 +180,12 @@ private:
QRect _boundings;
bool _addInlineBots;
int32 _width, _height;
bool _hiding = false;
Ui::Animations::Simple _a_opacity;
Fn<bool(int)> _moderateKeyActivateCallback;
friend class internal::FieldAutocompleteInner;
};
@@ -174,9 +195,13 @@ namespace internal {
class FieldAutocompleteInner final
: public Ui::RpWidget
, private base::Subscriber {
Q_OBJECT
public:
struct ScrollTo {
int top;
int bottom;
};
FieldAutocompleteInner(
not_null<Window::SessionController*> controller,
not_null<FieldAutocomplete*> parent,
@@ -188,18 +213,21 @@ public:
void clearSel(bool hidden = false);
bool moveSel(int key);
bool chooseSelected(FieldAutocomplete::ChooseMethod method) const;
bool chooseAtIndex(
FieldAutocomplete::ChooseMethod method,
int index,
Api::SendOptions options = Api::SendOptions()) const;
void setRecentInlineBotsInRows(int32 bots);
void rowsUpdated();
signals:
void mentionChosen(not_null<UserData*> user, FieldAutocomplete::ChooseMethod method) const;
void hashtagChosen(QString hashtag, FieldAutocomplete::ChooseMethod method) const;
void botCommandChosen(QString command, FieldAutocomplete::ChooseMethod method) const;
void stickerChosen(not_null<DocumentData*> sticker, FieldAutocomplete::ChooseMethod method) const;
void mustScrollTo(int scrollToTop, int scrollToBottom);
rpl::producer<FieldAutocomplete::MentionChosen> mentionChosen() const;
rpl::producer<FieldAutocomplete::HashtagChosen> hashtagChosen() const;
rpl::producer<FieldAutocomplete::BotCommandChosen>
botCommandChosen() const;
rpl::producer<FieldAutocomplete::StickerChosen> stickerChosen() const;
rpl::producer<ScrollTo> scrollToRequested() const;
public slots:
void onParentGeometryChanged();
private:
@@ -212,6 +240,7 @@ private:
void mousePressEvent(QMouseEvent *e) override;
void mouseMoveEvent(QMouseEvent *e) override;
void mouseReleaseEvent(QMouseEvent *e) override;
void contextMenuEvent(QContextMenuEvent *e) override;
void updateSelectedRow();
void setSel(int sel, bool scroll = false);
@@ -231,6 +260,7 @@ private:
const not_null<StickerRows*> _srows;
rpl::lifetime _stickersLifetime;
std::weak_ptr<Lottie::FrameRenderer> _lottieRenderer;
base::unique_qptr<Ui::PopupMenu> _menu;
int _stickersPerRow = 1;
int _recentInlineBotsInRows = 0;
int _sel = -1;
@@ -242,6 +272,12 @@ private:
bool _previewShown = false;
rpl::event_stream<FieldAutocomplete::MentionChosen> _mentionChosen;
rpl::event_stream<FieldAutocomplete::HashtagChosen> _hashtagChosen;
rpl::event_stream<FieldAutocomplete::BotCommandChosen> _botCommandChosen;
rpl::event_stream<FieldAutocomplete::StickerChosen> _stickerChosen;
rpl::event_stream<ScrollTo> _scrollToRequested;
base::Timer _previewTimer;
};

View File

@@ -7,6 +7,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "chat_helpers/gifs_list_widget.h"
#include "apiwrap.h" // ApiWrap::toggleSavedGif
#include "base/const_string.h"
#include "data/data_photo.h"
#include "data/data_document.h"
@@ -16,8 +17,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "data/data_photo_media.h"
#include "data/data_document_media.h"
#include "data/stickers/data_stickers.h"
#include "chat_helpers/send_context_menu.h" // SendMenu::FillSendMenu
#include "ui/widgets/buttons.h"
#include "ui/widgets/input_fields.h"
#include "ui/widgets/popup_menu.h"
#include "ui/effects/ripple_animation.h"
#include "ui/image/image.h"
#include "boxes/stickers_box.h"
@@ -29,6 +32,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "window/window_session_controller.h"
#include "history/view/history_view_cursor_state.h"
#include "app.h"
#include "storage/storage_account.h" // Account::writeSavedGifs
#include "styles/style_chat_helpers.h"
#include <QtWidgets/QApplication>
@@ -42,6 +46,21 @@ constexpr auto kSearchBotUsername = "gif"_cs;
} // namespace
void DeleteSavedGif(not_null<DocumentData*> document) {
auto &data = document->owner();
document->session().api().toggleSavedGif(
document,
Data::FileOriginSavedGifs(),
false);
const auto index = data.stickers().savedGifs().indexOf(document);
if (index >= 0) {
data.stickers().savedGifsRef().remove(index);
document->session().local().writeSavedGifs();
}
data.stickers().notifySavedGifsUpdated();
}
class GifsListWidget::Footer : public TabbedSelector::InnerFooter {
public:
Footer(not_null<GifsListWidget*> parent);
@@ -165,11 +184,12 @@ GifsListWidget::GifsListWidget(
});
}
rpl::producer<not_null<DocumentData*>> GifsListWidget::fileChosen() const {
rpl::producer<TabbedSelector::FileChosen> GifsListWidget::fileChosen() const {
return _fileChosen.events();
}
rpl::producer<not_null<PhotoData*>> GifsListWidget::photoChosen() const {
auto GifsListWidget::photoChosen() const
-> rpl::producer<TabbedSelector::PhotoChosen> {
return _photoChosen.events();
}
@@ -343,6 +363,43 @@ void GifsListWidget::mousePressEvent(QMouseEvent *e) {
_previewTimer.callOnce(QApplication::startDragTime());
}
void GifsListWidget::fillContextMenu(
not_null<Ui::PopupMenu*> menu,
SendMenu::Type type) {
if (_selected < 0 || _pressed >= 0) {
return;
}
const auto row = _selected / MatrixRowShift;
const auto column = _selected % MatrixRowShift;
const auto send = [=](Api::SendOptions options) {
selectInlineResult(row, column, options, true);
};
SendMenu::FillSendMenu(
menu,
[&] { return type; },
SendMenu::DefaultSilentCallback(send),
SendMenu::DefaultScheduleCallback(this, type, send));
[&] {
const auto row = _selected / MatrixRowShift;
const auto column = _selected % MatrixRowShift;
if (row >= _rows.size() || column >= _rows[row].items.size()) {
return;
}
const auto item = _rows[row].items[column];
if (const auto document = item->getDocument()) {
auto &data = document->owner();
if (data.stickers().savedGifs().indexOf(document) < 0) {
return;
}
menu->addAction(tr::lng_context_delete_gif(tr::now), [=] {
ChatHelpers::DeleteSavedGif(document);
});
}
}();
}
void GifsListWidget::mouseReleaseEvent(QMouseEvent *e) {
_previewTimer.cancel();
@@ -370,28 +427,40 @@ void GifsListWidget::mouseReleaseEvent(QMouseEvent *e) {
}
void GifsListWidget::selectInlineResult(int row, int column) {
selectInlineResult(row, column, Api::SendOptions());
}
void GifsListWidget::selectInlineResult(
int row,
int column,
Api::SendOptions options,
bool forceSend) {
if (row >= _rows.size() || column >= _rows[row].items.size()) {
return;
}
const auto ctrl = (QGuiApplication::keyboardModifiers()
forceSend |= (QGuiApplication::keyboardModifiers()
== Qt::ControlModifier);
auto item = _rows[row].items[column];
if (const auto photo = item->getPhoto()) {
using Data::PhotoSize;
const auto media = photo->activeMediaView();
if (ctrl
if (forceSend
|| (media && media->image(PhotoSize::Thumbnail))
|| (media && media->image(PhotoSize::Large))) {
_photoChosen.fire_copy(photo);
_photoChosen.fire_copy({
.photo = photo,
.options = options });
} else if (!photo->loading(PhotoSize::Thumbnail)) {
photo->load(PhotoSize::Thumbnail, Data::FileOrigin());
}
} else if (const auto document = item->getDocument()) {
const auto media = document->activeMediaView();
const auto preview = Data::VideoPreviewState(media.get());
if (ctrl || (media && preview.loaded())) {
_fileChosen.fire_copy(document);
if (forceSend || (media && preview.loaded())) {
_fileChosen.fire_copy({
.document = document,
.options = options });
} else if (!preview.usingThumbnail()) {
if (preview.loading()) {
document->cancel();
@@ -403,7 +472,7 @@ void GifsListWidget::selectInlineResult(int row, int column) {
}
} else if (const auto inlineResult = item->getResult()) {
if (inlineResult->onChoose(item)) {
_inlineResultChosen.fire({ inlineResult, _searchBot });
_inlineResultChosen.fire({ inlineResult, _searchBot, options });
}
}
}

Some files were not shown because too many files have changed in this diff Show More