Compare commits

..

105 Commits

Author SHA1 Message Date
John Preston
887b6629f6 Version 2.3: Fix emoji fingerprint on Retina. 2020-08-15 00:02:26 +04:00
John Preston
b6a66bbf1b Version 2.3.
- Video Calls (alpha version).
2020-08-14 20:20:59 +04:00
John Preston
a0eb073728 Close call panel instead of hiding. 2020-08-14 19:59:24 +04:00
John Preston
34840766b2 Move fingerprint / signal bars to a separate widget. 2020-08-14 16:53:06 +04:00
John Preston
1aaf7df54a Add labels to call buttons. 2020-08-14 16:53:06 +04:00
John Preston
1b60829da8 Add top window controls shadow. 2020-08-14 16:53:06 +04:00
John Preston
fbe02dbb8d Add bottom shadow to the incoming video. 2020-08-14 16:53:06 +04:00
John Preston
cb18c9a9b3 Show remote audio muted tooltip. 2020-08-14 16:53:06 +04:00
John Preston
a106d80a41 Fix accept call button outer ripple. 2020-08-14 16:53:06 +04:00
John Preston
465c661c45 Show peer-s microphone mute state on userpic. 2020-08-14 16:53:06 +04:00
John Preston
8af40c22a4 Improve mute button styles. 2020-08-14 16:53:06 +04:00
John Preston
f1b6d1fdae Improve top controls design on Windows. 2020-08-14 16:53:06 +04:00
John Preston
476b9c44c1 Closed alpha version 2.2.0.2. 2020-08-13 15:42:23 +04:00
John Preston
377ff2f421 Use expanding incoming frame scale if aspect is good. 2020-08-13 15:37:00 +04:00
John Preston
cdc87086f3 Add some paddings. 2020-08-13 15:16:39 +04:00
John Preston
99f3173ae6 Use smaller emoji. 2020-08-13 14:39:55 +04:00
John Preston
7de5cabd79 Add window controls to call panel on Windows. 2020-08-13 14:32:11 +04:00
John Preston
925f6df06a Fix call panel behaviour on macOS. 2020-08-13 13:02:15 +04:00
John Preston
c93d3ae924 Toggle fullscreen by double click. 2020-08-12 20:58:24 +04:00
John Preston
537645c282 Hide window title for calls panel. 2020-08-12 20:58:05 +04:00
John Preston
38b9111bf5 Use Ui::Window for the Calls::Panel. 2020-08-12 17:35:31 +04:00
John Preston
79feb0c6d9 Show video icon in call bubble in history. 2020-08-12 12:47:19 +04:00
John Preston
97fe05c7ed Mirror outgoing video horizontally. 2020-08-12 12:47:07 +04:00
John Preston
e01bf8e1cd Closed alpha version 2.2.0.1. 2020-08-11 21:33:15 +04:00
John Preston
369ec46064 Allow building without WebRTC. 2020-08-11 21:33:15 +04:00
John Preston
13f2ceaf47 Update building docs. 2020-08-11 18:21:40 +04:00
John Preston
eda22b925f Fix build on Linux 64 bit. 2020-08-11 18:21:40 +04:00
John Preston
bd16708781 Update tgcalls library. 2020-08-11 18:21:40 +04:00
John Preston
38546c701a Round outgoing video preview. 2020-08-11 18:21:40 +04:00
John Preston
4971e281fa Improve design for all controls states. 2020-08-11 18:21:40 +04:00
John Preston
d4b8fa70a7 Extract Calls::Userpic and Calls::VideoBubble. 2020-08-11 18:21:40 +04:00
John Preston
95de762529 Support three-value VideoState. 2020-08-11 18:21:40 +04:00
John Preston
a89634b767 Start call panel redesign. 2020-08-11 18:21:40 +04:00
John Preston
4bf6550e24 Update API scheme to layer 117. 2020-08-11 18:21:40 +04:00
John Preston
83759adb5f Start video call from video call service message. 2020-08-11 18:21:40 +04:00
John Preston
6f90e57523 Support special video calls service messages. 2020-08-11 18:21:40 +04:00
John Preston
aba8f72c36 Fix camera start. 2020-08-11 18:21:40 +04:00
John Preston
35dfb9fab3 Show outgoing video stream. 2020-08-11 18:21:40 +04:00
John Preston
b692242012 Allow reference tgcalls implementation. 2020-08-11 18:21:40 +04:00
John Preston
c1f727bde9 Update tgcalls to use new protocol. 2020-08-11 18:21:40 +04:00
John Preston
f3808bdc24 Start using webrtc::VideoTrack. 2020-08-11 18:21:40 +04:00
John Preston
16177eae2b Simplify frame presentation. 2020-08-11 18:21:40 +04:00
John Preston
da3bbba497 Allow enable/disable video in a call. 2020-08-11 18:21:40 +04:00
John Preston
a4ee90e8c6 Remove webrtc test code usage on Windows. 2020-08-11 18:21:40 +04:00
John Preston
7fec49a752 Remove webrtc test code usage on macOS. 2020-08-11 18:21:40 +04:00
John Preston
78c9c1e7f8 Update tgcalls library. 2020-08-11 18:21:40 +04:00
John Preston
662424319c Fix build on Windows. 2020-08-11 18:21:40 +04:00
John Preston
5c55f31972 Use test capture from webrtc on macOS. 2020-08-11 18:21:40 +04:00
John Preston
29d42a6936 Fix runtime errors in macOS. 2020-08-11 18:21:40 +04:00
John Preston
46550381a4 Fix build on macOS. 2020-08-11 18:21:40 +04:00
John Preston
6c272e38ad Update tgcalls to support codecs negotiation. 2020-08-11 18:21:40 +04:00
John Preston
0a019411ee Use typed signaling messages. 2020-08-11 18:21:40 +04:00
John Preston
815300bffc Fix video between tdesktop instances. 2020-08-11 18:21:40 +04:00
John Preston
12e0399cf4 Add tgcalls library. 2020-08-11 18:21:40 +04:00
John Preston
b703f4e555 Fix build on Linux. 2020-08-11 18:21:40 +04:00
John Preston
71040464c5 Request camera permissions on macOS. 2020-08-11 18:21:40 +04:00
John Preston
6d36176a8d Add test implementation of webrtc calls. 2020-08-11 18:21:40 +04:00
John Preston
438a560a79 Link tdesktop with external_webrtc. 2020-08-11 18:21:40 +04:00
John Preston
145ace2fa0 Use external_webrtc in libtgvoip. 2020-08-11 18:21:40 +04:00
John Preston
4ba7de8df1 Fix build on Windows. 2020-08-11 18:21:40 +04:00
23rd
b6c70572a9 Fixed Github CI macOS build. 2020-08-11 18:20:43 +04:00
23rd
b12256f1ee Added ability to schedule polls from section of scheduled messages.
Fixed #7433.
2020-08-11 18:13:40 +04:00
23rd
14cda49db2 Moved code for send context menu to namespace. 2020-08-11 18:13:40 +04:00
23rd
af9440db38 Slightly refactored InlineBots::Layout::Widget. 2020-08-11 18:13:40 +04:00
23rd
1eea07d88a Added ability to schedule and send silently stickers from autocomplete. 2020-08-11 18:13:40 +04:00
23rd
cf56658664 Slightly refactored FieldAutocomplete. 2020-08-11 18:13:40 +04:00
23rd
2ac425f350 Moved default send callbacks to separate functions. 2020-08-11 18:13:40 +04:00
23rd
edceed28d7 Moved code for send context menu to separate file. 2020-08-11 18:13:40 +04:00
23rd
1fee7d1a41 Added ability to schedule and send silently media from inline bots. 2020-08-11 18:13:40 +04:00
23rd
d1d153e886 Added Api::SendOptions passing from TabbedSelector for inline results. 2020-08-11 18:13:40 +04:00
23rd
5b95585725 Added other items to TabbedPanel's context menu. 2020-08-11 18:13:40 +04:00
23rd
43056107fd Added ability to pass SendMenuType to TabbedSelector. 2020-08-11 18:13:40 +04:00
23rd
7db9843543 Added initial context menu to TabbedPanel.
Added ability to schedule and send silently stickers and GIFs.
2020-08-11 18:13:40 +04:00
23rd
a95a324401 Separated send menu filling to another method. 2020-08-11 18:13:40 +04:00
23rd
759e802eba Added ability to pass Api::SendOptions from TabbedSelector. 2020-08-11 18:13:40 +04:00
23rd
b71062561a Added ability to drag'n'drop videos in SendFilesBox. 2020-08-11 18:13:09 +04:00
23rd
2576312cd4 Fixed autodownloading of dictionaries at logout. 2020-08-11 18:13:09 +04:00
23rd
972bbbce6a Fixed online status stuck when switching between accounts one more time.
The first attempt to fix this bug is here: 8171ed6c12.
It caused crash so it was reverted here: 2ef47222f4.
2020-08-11 12:28:41 +03:00
Ilya Fedin
65e2bbee3e Override cursor only if no buttons pressed 2020-08-11 12:58:07 +04:00
ilya-fedin
bbe2adc44f Fix AppImage overwrite by updater 2020-08-11 10:58:17 +04:00
Ilya Fedin
449fa0ef2c Don't focus parent window after opening file dialog 2020-08-11 10:57:11 +04:00
Ilya Fedin
dc9d0aae54 Add support for showing window menu on Wayland 2020-08-11 10:56:29 +04:00
Ilya Fedin
08b67e383e Add -D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF to docs & actions 2020-08-11 10:55:50 +04:00
Ilya Fedin
4e5e30a8dd Update fcitx5-qt 2020-08-11 10:55:50 +04:00
Ilya Fedin
cbeaee24c9 Update submodules & patches commit 2020-08-11 10:55:50 +04:00
Ilya Fedin
26bedd95d7 DESKTOP_APP_USE_PACKAGED_FONTS respects DESKTOP_APP_USE_PACKAGED_LAZY now 2020-08-11 10:55:50 +04:00
Ilya Fedin
252bdd2353 TDESKTOP_DISABLE_DBUS_INTEGRATION -> DESKTOP_APP_DISABLE_DBUS_INTEGRATION 2020-08-11 10:55:50 +04:00
John Preston
361b99b0c9 Use threaded decoding in ffmpeg streaming. 2020-07-28 20:14:45 +04:00
John Preston
e293a26029 Fix crash in incorrect passcode input. 2020-07-28 20:14:45 +04:00
Magnus Groß
4f3f1e18a2 Export chats with ISO 8601 dates
Fixes #6020
2020-07-27 09:29:25 +04:00
John Preston
188b98b4d8 Update cmake_helpers submodule. 2020-07-27 09:28:12 +04:00
John Preston
fe639078a6 Version 2.2: Fix default-night mode. 2020-07-26 16:55:04 +04:00
John Preston
e5434ea491 Version 2.2.
- Quickly switch between different Telegram accounts
if you use multiple phone numbers.
- Share and store unlimited files of any type, now up to 2'000 MB each.
- Edit your scheduled messages.
- Use Auto-Night Mode to make Telegram night mode match
the system Dark Mode settings.
- Also added an option to switch to system window frame
in Windows and Linux.
2020-07-26 13:48:13 +04:00
John Preston
ce4338fae4 Closed alpha version 2.1.22.1. 2020-07-25 11:36:02 +04:00
John Preston
5a1a8af222 Suggest enabling archive and mute in settings. 2020-07-25 11:25:51 +04:00
John Preston
152b49c65c Reset notify settings on unarchive. 2020-07-25 11:25:22 +04:00
John Preston
5c5414b680 Improve video userpics in chat history. 2020-07-25 11:25:22 +04:00
John Preston
f99960e1f6 Play video userpics in photo change messages. 2020-07-25 11:25:22 +04:00
John Preston
e363b254f6 Add Unarchive button to ContactStatus bar. 2020-07-25 11:25:19 +04:00
John Preston
3aea9cb3ca Add archive and mute settings editing. 2020-07-25 11:24:37 +04:00
John Preston
060fe6a928 Play video userpics in profiles and settings. 2020-07-25 11:24:11 +04:00
John Preston
8c45b5e0f8 Show video userpics in media viewer. 2020-07-25 11:24:11 +04:00
John Preston
0126578dbd Allow to load video components of photos. 2020-07-25 11:24:11 +04:00
John Preston
1a9c241b96 Update API scheme to layer 116. 2020-07-25 11:24:11 +04:00
John Preston
638d4d63c5 Update API scheme to layer 115. 2020-07-25 11:24:11 +04:00
232 changed files with 6090 additions and 1986 deletions

View File

@@ -423,8 +423,8 @@ jobs:
git clone -b v5.12.8 --depth=1 git://code.qt.io/qt/qt5.git qt_${QT}
cd qt_${QT}
perl init-repository --module-subset=qtbase,qtwayland,qtimageformats,qtsvg,qtx11extras
git submodule update qtbase qtwayland qtimageformats qtsvg qtx11extras
perl init-repository --module-subset=qtbase,qtwayland,qtimageformats,qtsvg
git submodule update qtbase qtwayland qtimageformats qtsvg
cd qtbase
find ../../patches/qtbase_${QT} -type f -print0 | sort -z | xargs -r0 git apply
cd ..
@@ -530,6 +530,7 @@ jobs:
-D CMAKE_CXX_FLAGS="-s" \
-D TDESKTOP_API_TEST=ON \
-D DESKTOP_APP_USE_PACKAGED=OFF \
-D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF \
$DEFINE
cd ../out/Debug

View File

@@ -79,6 +79,7 @@ jobs:
- name: First set up.
run: |
sudo chown -R `whoami`:admin /usr/local/share
brew install automake fdk-aac lame libass libtool libvorbis libvpx \
ninja opus sdl shtool texi2html theora x264 xvid yasm pkg-config
@@ -462,7 +463,11 @@ jobs:
echo ::set-env name=ARTIFACT_NAME::Telegram
fi
./configure.sh -D TDESKTOP_API_TEST=ON -D DESKTOP_APP_USE_PACKAGED=OFF $DEFINE
./configure.sh \
-D TDESKTOP_API_TEST=ON \
-D DESKTOP_APP_USE_PACKAGED=OFF \
-D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF \
$DEFINE
cd ../out

View File

@@ -365,6 +365,7 @@ jobs:
call configure.bat ^
-D TDESKTOP_API_TEST=ON ^
-D DESKTOP_APP_USE_PACKAGED=OFF ^
-D DESKTOP_APP_DISABLE_CRASH_REPORTS=OFF ^
%TDESKTOP_BUILD_DEFINE% ^
-DCMAKE_SYSTEM_VERSION=%SDK%

6
.gitmodules vendored
View File

@@ -94,3 +94,9 @@
[submodule "Telegram/ThirdParty/fcitx5-qt"]
path = Telegram/ThirdParty/fcitx5-qt
url = https://github.com/fcitx/fcitx5-qt.git
[submodule "Telegram/lib_webrtc"]
path = Telegram/lib_webrtc
url = https://github.com/desktop-app/lib_webrtc.git
[submodule "Telegram/ThirdParty/tgcalls"]
path = Telegram/ThirdParty/tgcalls
url = https://github.com/TelegramMessenger/tgcalls.git

View File

@@ -18,6 +18,7 @@ endif()
add_subdirectory(lib_storage)
add_subdirectory(lib_lottie)
add_subdirectory(lib_qr)
add_subdirectory(lib_webrtc)
add_subdirectory(codegen)
include(CheckCXXSourceCompiles)
@@ -34,6 +35,7 @@ include(cmake/lib_ffmpeg.cmake)
include(cmake/lib_mtproto.cmake)
include(cmake/lib_scheme.cmake)
include(cmake/lib_tgvoip.cmake)
include(cmake/lib_tgcalls.cmake)
set(style_files
boxes/boxes.style
@@ -101,10 +103,13 @@ endif()
target_link_libraries(Telegram
PRIVATE
tdesktop::lib_tgcalls_legacy
tdesktop::lib_tgcalls
tdesktop::lib_tgvoip
tdesktop::lib_mtproto
tdesktop::lib_scheme
tdesktop::lib_export
tdesktop::lib_tgvoip
desktop-app::lib_webrtc
desktop-app::lib_base
desktop-app::lib_crl
desktop-app::lib_ui
@@ -190,9 +195,13 @@ PRIVATE
api/api_bot.h
api/api_chat_filters.cpp
api/api_chat_filters.h
api/api_chat_invite.cpp
api/api_chat_invite.h
api/api_common.h
api/api_editing.cpp
api/api_editing.h
api/api_global_privacy.cpp
api/api_global_privacy.h
api/api_hash.cpp
api/api_hash.h
api/api_media.cpp
@@ -309,17 +318,20 @@ PRIVATE
calls/calls_box_controller.h
calls/calls_call.cpp
calls/calls_call.h
calls/calls_controller.cpp
calls/calls_controller.h
calls/calls_controller_tgvoip.h
calls/calls_emoji_fingerprint.cpp
calls/calls_emoji_fingerprint.h
calls/calls_instance.cpp
calls/calls_instance.h
calls/calls_panel.cpp
calls/calls_panel.h
calls/calls_signal_bars.cpp
calls/calls_signal_bars.h
calls/calls_top_bar.cpp
calls/calls_top_bar.h
calls/calls_userpic.cpp
calls/calls_userpic.h
calls/calls_video_bubble.cpp
calls/calls_video_bubble.h
chat_helpers/bot_keyboard.cpp
chat_helpers/bot_keyboard.h
chat_helpers/emoji_keywords.cpp
@@ -336,6 +348,8 @@ PRIVATE
chat_helpers/gifs_list_widget.h
chat_helpers/message_field.cpp
chat_helpers/message_field.h
chat_helpers/send_context_menu.cpp
chat_helpers/send_context_menu.h
chat_helpers/spellchecker_common.cpp
chat_helpers/spellchecker_common.h
chat_helpers/stickers_emoji_image_loader.cpp
@@ -1216,6 +1230,18 @@ elseif (APPLE)
)
endif()
endif()
elseif (LINUX)
if (NOT TDESKTOP_DISABLE_GTK_INTEGRATION)
find_package(PkgConfig REQUIRED)
pkg_search_module(GTK REQUIRED gtk+-2.0 gtk+-3.0)
target_include_directories(Telegram PRIVATE ${GTK_INCLUDE_DIRS})
if (DESKTOP_APP_USE_PACKAGED)
find_library(X11_LIBRARY X11)
target_link_libraries(Telegram PRIVATE ${X11_LIBRARY})
endif()
endif()
endif()
if (build_macstore)

View File

@@ -13,6 +13,8 @@ pacman --noconfirm -S pkg-config
PKG_CONFIG_PATH="/mingw64/lib/pkgconfig:$PKG_CONFIG_PATH"
./configure --toolchain=msvc \
--extra-cflags="-DCONFIG_SAFE_BITSTREAM_READER=1" \
--extra-cxxflags="-DCONFIG_SAFE_BITSTREAM_READER=1" \
--extra-ldflags="-libpath:$FullExecPath/../opus/win32/VS2015/Win32/Release" \
--disable-programs \
--disable-doc \

Binary file not shown.

Before

Width:  |  Height:  |  Size: 456 B

After

Width:  |  Height:  |  Size: 595 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 377 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 733 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 727 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 214 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 426 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 912 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 405 B

After

Width:  |  Height:  |  Size: 460 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 736 B

After

Width:  |  Height:  |  Size: 970 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 886 B

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 412 B

After

Width:  |  Height:  |  Size: 489 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 800 B

After

Width:  |  Height:  |  Size: 1002 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 945 B

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 459 B

After

Width:  |  Height:  |  Size: 744 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 843 B

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 305 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 524 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 820 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 385 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 873 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 230 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 393 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 616 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 333 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 552 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 889 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 173 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 304 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 545 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 228 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 420 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 661 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 637 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 545 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 290 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 499 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 846 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 420 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 715 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.3 KiB

View File

@@ -424,6 +424,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_settings_passcode_title" = "Local passcode";
"lng_settings_password_title" = "Two-step verification";
"lng_settings_sessions_title" = "Active sessions";
"lng_settings_new_unknown" = "New chats from unknown users";
"lng_settings_auto_archive" = "Archive and Mute";
"lng_settings_auto_archive_about" = "Automatically archive and mute new chats, groups and channels from non-contacts.";
"lng_settings_destroy_title" = "Delete my account";
"lng_settings_network_proxy" = "Network and proxy";
"lng_settings_version_info" = "Version and updates";
@@ -439,6 +442,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_settings_auto_night_warning" = "You have enabled auto-night mode. If you want to change the dark mode settings, you'll need to disable it first.";
"lng_settings_auto_night_disable" = "Disable";
"lng_suggest_hide_new_title" = "Hide new chats?";
"lng_suggest_hide_new_about" = "You are receiving lots of new chats from users who are not in your Contact List.\n\nDo you want to have such chats **automatically muted** and **archived**?";
"lng_suggest_hide_new_to_settings" = "Go to Settings";
"lng_settings_spellchecker" = "Spell checker";
"lng_settings_system_spellchecker" = "Use system spell checker";
"lng_settings_custom_spellchecker" = "Use spell checker";
@@ -1132,6 +1139,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_group_invite_members#one" = "{count} member, among them:";
"lng_group_invite_members#other" = "{count} members, among them:";
"lng_channel_invite_private" = "This channel is private.\nPlease join it to continue viewing its content.";
"lng_group_invite_create" = "Create an invite link";
"lng_group_invite_about" = "Telegram users will be able to join\nyour group by following this link.";
@@ -1286,6 +1294,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_new_contact_share_done" = "{user} can now see your phone number.";
"lng_new_contact_add_name" = "Add {user} to contacts";
"lng_new_contact_add_done" = "{user} is now in your contact list.";
"lng_new_contact_unarchive" = "Unarchive";
"lng_cant_send_to_not_contact" = "Sorry, you can only send messages to\nmutual contacts at the moment.\n{more_info}";
"lng_cant_invite_not_contact" = "Sorry, you can only add mutual contacts\nto groups at the moment.\n{more_info}";
"lng_cant_more_info" = "More info »";
@@ -1459,6 +1468,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_context_copy_text" = "Copy Text";
"lng_context_open_gif" = "Open GIF";
"lng_context_save_gif" = "Save GIF";
"lng_context_delete_gif" = "Delete GIF";
"lng_context_attached_stickers" = "Attached Stickers";
"lng_context_to_msg" = "Go To Message";
"lng_context_reply_msg" = "Reply";
@@ -1726,16 +1736,33 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_call_box_status_group" = "({amount}) {status}";
"lng_call_outgoing" = "Outgoing call";
"lng_call_video_outgoing" = "Outgoing video call";
"lng_call_incoming" = "Incoming call";
"lng_call_video_incoming" = "Incoming video call";
"lng_call_missed" = "Missed call";
"lng_call_video_missed" = "Missed video call";
"lng_call_cancelled" = "Cancelled call";
"lng_call_video_cancelled" = "Cancelled video call";
"lng_call_declined" = "Declined call";
"lng_call_video_declined" = "Declined video call";
"lng_call_duration_info" = "{time}, {duration}";
"lng_call_type_and_duration" = "{type} ({duration})";
"lng_call_rate_label" = "Please rate the quality of your call";
"lng_call_rate_comment" = "Comment (optional)";
"lng_call_start_video" = "Start Video";
"lng_call_stop_video" = "Stop Video";
"lng_call_end_call" = "End Call";
"lng_call_mute_audio" = "Mute";
"lng_call_unmute_audio" = "Unmute";
"lng_call_accept" = "Accept";
"lng_call_decline" = "Decline";
"lng_call_redial" = "Redial";
"lng_call_cancel" = "Cancel";
"lng_call_microphone_off" = "{user}'s microphone is off";
"lng_no_mic_permission" = "Telegram needs access to your microphone so that you can make calls and record voice messages.";
"lng_player_message_today" = "Today at {time}";

View File

@@ -62,10 +62,9 @@ inputMediaUploadedPhoto#1e287d04 flags:# file:InputFile stickers:flags.0?Vector<
inputMediaPhoto#b3ba0635 flags:# id:InputPhoto ttl_seconds:flags.0?int = InputMedia;
inputMediaGeoPoint#f9c44144 geo_point:InputGeoPoint = InputMedia;
inputMediaContact#f8ab7dfb phone_number:string first_name:string last_name:string vcard:string = InputMedia;
inputMediaUploadedDocument#5b38c6c1 flags:# nosound_video:flags.3?true file:InputFile thumb:flags.2?InputFile mime_type:string attributes:Vector<DocumentAttribute> stickers:flags.0?Vector<InputDocument> ttl_seconds:flags.1?int = InputMedia;
inputMediaUploadedDocument#5b38c6c1 flags:# nosound_video:flags.3?true force_file:flags.4?true file:InputFile thumb:flags.2?InputFile mime_type:string attributes:Vector<DocumentAttribute> stickers:flags.0?Vector<InputDocument> ttl_seconds:flags.1?int = InputMedia;
inputMediaDocument#23ab23d2 flags:# id:InputDocument ttl_seconds:flags.0?int = InputMedia;
inputMediaVenue#c13d1c11 geo_point:InputGeoPoint title:string address:string provider:string venue_id:string venue_type:string = InputMedia;
inputMediaGifExternal#4843b0fd url:string q:string = InputMedia;
inputMediaPhotoExternal#e5bbfe1a flags:# url:string ttl_seconds:flags.0?int = InputMedia;
inputMediaDocumentExternal#fb52dc99 flags:# url:string ttl_seconds:flags.0?int = InputMedia;
inputMediaGame#d33f43f3 id:InputGame = InputMedia;
@@ -75,7 +74,7 @@ inputMediaPoll#f94e5f1 flags:# poll:Poll correct_answers:flags.0?Vector<bytes> s
inputMediaDice#e66fbf7b emoticon:string = InputMedia;
inputChatPhotoEmpty#1ca48f57 = InputChatPhoto;
inputChatUploadedPhoto#927c55b4 file:InputFile = InputChatPhoto;
inputChatUploadedPhoto#c642724e flags:# file:flags.0?InputFile video:flags.1?InputFile video_start_ts:flags.2?double = InputChatPhoto;
inputChatPhoto#8953ad37 id:InputPhoto = InputChatPhoto;
inputGeoPointEmpty#e4c123d6 = InputGeoPoint;
@@ -110,10 +109,10 @@ storage.fileMp4#b3cea0e4 = storage.FileType;
storage.fileWebp#1081464c = storage.FileType;
userEmpty#200250ba id:int = User;
user#938458c1 flags:# self:flags.10?true contact:flags.11?true mutual_contact:flags.12?true deleted:flags.13?true bot:flags.14?true bot_chat_history:flags.15?true bot_nochats:flags.16?true verified:flags.17?true restricted:flags.18?true min:flags.20?true bot_inline_geo:flags.21?true support:flags.23?true scam:flags.24?true id:int access_hash:flags.0?long first_name:flags.1?string last_name:flags.2?string username:flags.3?string phone:flags.4?string photo:flags.5?UserProfilePhoto status:flags.6?UserStatus bot_info_version:flags.14?int restriction_reason:flags.18?Vector<RestrictionReason> bot_inline_placeholder:flags.19?string lang_code:flags.22?string = User;
user#938458c1 flags:# self:flags.10?true contact:flags.11?true mutual_contact:flags.12?true deleted:flags.13?true bot:flags.14?true bot_chat_history:flags.15?true bot_nochats:flags.16?true verified:flags.17?true restricted:flags.18?true min:flags.20?true bot_inline_geo:flags.21?true support:flags.23?true scam:flags.24?true apply_min_photo:flags.25?true id:int access_hash:flags.0?long first_name:flags.1?string last_name:flags.2?string username:flags.3?string phone:flags.4?string photo:flags.5?UserProfilePhoto status:flags.6?UserStatus bot_info_version:flags.14?int restriction_reason:flags.18?Vector<RestrictionReason> bot_inline_placeholder:flags.19?string lang_code:flags.22?string = User;
userProfilePhotoEmpty#4f11bae1 = UserProfilePhoto;
userProfilePhoto#ecd75d8c photo_id:long photo_small:FileLocation photo_big:FileLocation dc_id:int = UserProfilePhoto;
userProfilePhoto#69d3ab26 flags:# has_video:flags.0?true photo_id:long photo_small:FileLocation photo_big:FileLocation dc_id:int = UserProfilePhoto;
userStatusEmpty#9d05049 = UserStatus;
userStatusOnline#edb93949 expires:int = UserStatus;
@@ -129,7 +128,7 @@ channel#d31a961e flags:# creator:flags.0?true left:flags.2?true broadcast:flags.
channelForbidden#289da732 flags:# broadcast:flags.5?true megagroup:flags.8?true id:int access_hash:long title:string until_date:flags.16?int = Chat;
chatFull#1b7c9db3 flags:# can_set_username:flags.7?true has_scheduled:flags.8?true id:int about:string participants:ChatParticipants chat_photo:flags.2?Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:flags.3?Vector<BotInfo> pinned_msg_id:flags.6?int folder_id:flags.11?int = ChatFull;
channelFull#f0e6672a flags:# can_view_participants:flags.3?true can_set_username:flags.6?true can_set_stickers:flags.7?true hidden_prehistory:flags.10?true can_view_stats:flags.12?true can_set_location:flags.16?true has_scheduled:flags.19?true id:int about:string participants_count:flags.0?int admins_count:flags.1?int kicked_count:flags.2?int banned_count:flags.2?int online_count:flags.13?int read_inbox_max_id:int read_outbox_max_id:int unread_count:int chat_photo:Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:Vector<BotInfo> migrated_from_chat_id:flags.4?int migrated_from_max_id:flags.4?int pinned_msg_id:flags.5?int stickerset:flags.8?StickerSet available_min_id:flags.9?int folder_id:flags.11?int linked_chat_id:flags.14?int location:flags.15?ChannelLocation slowmode_seconds:flags.17?int slowmode_next_send_date:flags.18?int stats_dc:flags.12?int pts:int = ChatFull;
channelFull#f0e6672a flags:# can_view_participants:flags.3?true can_set_username:flags.6?true can_set_stickers:flags.7?true hidden_prehistory:flags.10?true can_set_location:flags.16?true has_scheduled:flags.19?true can_view_stats:flags.20?true id:int about:string participants_count:flags.0?int admins_count:flags.1?int kicked_count:flags.2?int banned_count:flags.2?int online_count:flags.13?int read_inbox_max_id:int read_outbox_max_id:int unread_count:int chat_photo:Photo notify_settings:PeerNotifySettings exported_invite:ExportedChatInvite bot_info:Vector<BotInfo> migrated_from_chat_id:flags.4?int migrated_from_max_id:flags.4?int pinned_msg_id:flags.5?int stickerset:flags.8?StickerSet available_min_id:flags.9?int folder_id:flags.11?int linked_chat_id:flags.14?int location:flags.15?ChannelLocation slowmode_seconds:flags.17?int slowmode_next_send_date:flags.18?int stats_dc:flags.12?int pts:int = ChatFull;
chatParticipant#c8d7493e user_id:int inviter_id:int date:int = ChatParticipant;
chatParticipantCreator#da13538a user_id:int = ChatParticipant;
@@ -139,7 +138,7 @@ chatParticipantsForbidden#fc900c2b flags:# chat_id:int self_participant:flags.0?
chatParticipants#3f460fed chat_id:int participants:Vector<ChatParticipant> version:int = ChatParticipants;
chatPhotoEmpty#37c1011c = ChatPhoto;
chatPhoto#475cdbd5 photo_small:FileLocation photo_big:FileLocation dc_id:int = ChatPhoto;
chatPhoto#d20b9f3c flags:# has_video:flags.0?true photo_small:FileLocation photo_big:FileLocation dc_id:int = ChatPhoto;
messageEmpty#83e5de54 id:int = Message;
message#452c0e65 flags:# out:flags.1?true mentioned:flags.4?true media_unread:flags.5?true silent:flags.13?true post:flags.14?true from_scheduled:flags.18?true legacy:flags.19?true edit_hide:flags.21?true id:int from_id:flags.8?int to_id:Peer fwd_from:flags.2?MessageFwdHeader via_bot_id:flags.11?int reply_to_msg_id:flags.3?int date:int message:string media:flags.9?MessageMedia reply_markup:flags.6?ReplyMarkup entities:flags.7?Vector<MessageEntity> views:flags.10?int edit_date:flags.15?int post_author:flags.16?string grouped_id:flags.17?long restriction_reason:flags.22?Vector<RestrictionReason> = Message;
@@ -187,7 +186,7 @@ dialog#2c171f72 flags:# pinned:flags.2?true unread_mark:flags.3?true peer:Peer t
dialogFolder#71bd134c flags:# pinned:flags.2?true folder:Folder peer:Peer top_message:int unread_muted_peers_count:int unread_unmuted_peers_count:int unread_muted_messages_count:int unread_unmuted_messages_count:int = Dialog;
photoEmpty#2331b22d id:long = Photo;
photo#d07504a5 flags:# has_stickers:flags.0?true id:long access_hash:long file_reference:bytes date:int sizes:Vector<PhotoSize> dc_id:int = Photo;
photo#fb197a65 flags:# has_stickers:flags.0?true id:long access_hash:long file_reference:bytes date:int sizes:Vector<PhotoSize> video_sizes:flags.1?Vector<VideoSize> dc_id:int = Photo;
photoSizeEmpty#e17e23c type:string = PhotoSize;
photoSize#77bfb61b type:string location:FileLocation w:int h:int size:int = PhotoSize;
@@ -213,7 +212,7 @@ inputPeerNotifySettings#9c3d198e flags:# show_previews:flags.0?Bool silent:flags
peerNotifySettings#af509d20 flags:# show_previews:flags.0?Bool silent:flags.1?Bool mute_until:flags.2?int sound:flags.3?string = PeerNotifySettings;
peerSettings#818426cd flags:# report_spam:flags.0?true add_contact:flags.1?true block_contact:flags.2?true share_contact:flags.3?true need_contacts_exception:flags.4?true report_geo:flags.5?true = PeerSettings;
peerSettings#733f2961 flags:# report_spam:flags.0?true add_contact:flags.1?true block_contact:flags.2?true share_contact:flags.3?true need_contacts_exception:flags.4?true report_geo:flags.5?true autoarchived:flags.7?true geo_distance:flags.6?int = PeerSettings;
wallPaper#a437c3ed id:long flags:# creator:flags.0?true default:flags.1?true pattern:flags.3?true dark:flags.4?true access_hash:long slug:string document:Document settings:flags.2?WallPaperSettings = WallPaper;
wallPaperNoFile#8af40b25 flags:# default:flags.1?true dark:flags.4?true settings:flags.2?WallPaperSettings = WallPaper;
@@ -226,7 +225,7 @@ inputReportReasonOther#e1746d0a text:string = ReportReason;
inputReportReasonCopyright#9b89f93a = ReportReason;
inputReportReasonGeoIrrelevant#dbd4feed = ReportReason;
userFull#edf17c12 flags:# blocked:flags.0?true phone_calls_available:flags.4?true phone_calls_private:flags.5?true can_pin_message:flags.7?true has_scheduled:flags.12?true user:User about:flags.1?string settings:PeerSettings profile_photo:flags.2?Photo notify_settings:PeerNotifySettings bot_info:flags.3?BotInfo pinned_msg_id:flags.6?int common_chats_count:int folder_id:flags.11?int = UserFull;
userFull#edf17c12 flags:# blocked:flags.0?true phone_calls_available:flags.4?true phone_calls_private:flags.5?true can_pin_message:flags.7?true has_scheduled:flags.12?true video_calls_available:flags.13?true user:User about:flags.1?string settings:PeerSettings profile_photo:flags.2?Photo notify_settings:PeerNotifySettings bot_info:flags.3?BotInfo pinned_msg_id:flags.6?int common_chats_count:int folder_id:flags.11?int = UserFull;
contact#f911c994 user_id:int mutual:Bool = Contact;
@@ -358,6 +357,7 @@ updateDialogFilter#26ffde7d flags:# id:int filter:flags.0?DialogFilter = Update;
updateDialogFilterOrder#a5d72105 order:Vector<int> = Update;
updateDialogFilters#3504914f = Update;
updatePhoneCallSignalingData#2661bf09 phone_call_id:long data:bytes = Update;
updateChannelParticipant#65d2b464 flags:# channel_id:int date:int user_id:int prev_participant:flags.0?ChannelParticipant new_participant:flags.1?ChannelParticipant qts:int = Update;
updates.state#a56c2a3e pts:int qts:int date:int seq:int unread_count:int = updates.State;
@@ -395,7 +395,7 @@ help.inviteText#18cb9f78 message:string = help.InviteText;
encryptedChatEmpty#ab7ec0a0 id:int = EncryptedChat;
encryptedChatWaiting#3bf703dc id:int access_hash:long date:int admin_id:int participant_id:int = EncryptedChat;
encryptedChatRequested#c878527e id:int access_hash:long date:int admin_id:int participant_id:int g_a:bytes = EncryptedChat;
encryptedChatRequested#62718a82 flags:# folder_id:flags.0?int id:int access_hash:long date:int admin_id:int participant_id:int g_a:bytes = EncryptedChat;
encryptedChat#fa56ce36 id:int access_hash:long date:int admin_id:int participant_id:int g_a_or_b:bytes key_fingerprint:long = EncryptedChat;
encryptedChatDiscarded#13d6dd27 id:int = EncryptedChat;
@@ -529,6 +529,7 @@ chatInviteExported#fc2e05bc link:string = ExportedChatInvite;
chatInviteAlready#5a686d7c chat:Chat = ChatInvite;
chatInvite#dfc2f58e flags:# channel:flags.0?true broadcast:flags.1?true public:flags.2?true megagroup:flags.3?true title:string photo:Photo participants_count:int participants:flags.4?Vector<User> = ChatInvite;
chatInvitePeek#61695cb0 chat:Chat expires:int = ChatInvite;
inputStickerSetEmpty#ffb62b95 = InputStickerSet;
inputStickerSetID#9de7a269 id:long access_hash:long = InputStickerSet;
@@ -619,11 +620,6 @@ channels.channelParticipant#d0d9b163 participant:ChannelParticipant users:Vector
help.termsOfService#780a0310 flags:# popup:flags.0?true id:DataJSON text:string entities:Vector<MessageEntity> min_age_confirm:flags.1?int = help.TermsOfService;
foundGif#162ecc1f url:string thumb_url:string content_url:string content_type:string w:int h:int = FoundGif;
foundGifCached#9c750409 url:string photo:Photo document:Document = FoundGif;
messages.foundGifs#450a1c0a next_offset:int results:Vector<FoundGif> = messages.FoundGifs;
messages.savedGifsNotModified#e8025ca2 = messages.SavedGifs;
messages.savedGifs#2e0709a5 hash:int gifs:Vector<Document> = messages.SavedGifs;
@@ -822,13 +818,14 @@ inputStickerSetItem#ffa0a496 flags:# document:InputDocument emoji:string mask_co
inputPhoneCall#1e36fded id:long access_hash:long = InputPhoneCall;
phoneCallEmpty#5366c915 id:long = PhoneCall;
phoneCallWaiting#1b8f4ad1 flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int protocol:PhoneCallProtocol receive_date:flags.0?int = PhoneCall;
phoneCallRequested#87eabb53 flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_hash:bytes protocol:PhoneCallProtocol = PhoneCall;
phoneCallAccepted#997c454a flags:# video:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_b:bytes protocol:PhoneCallProtocol = PhoneCall;
phoneCall#8742ae7f flags:# p2p_allowed:flags.5?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_or_b:bytes key_fingerprint:long protocol:PhoneCallProtocol connections:Vector<PhoneConnection> start_date:int = PhoneCall;
phoneCallDiscarded#50ca4de1 flags:# need_rating:flags.2?true need_debug:flags.3?true video:flags.5?true id:long reason:flags.0?PhoneCallDiscardReason duration:flags.1?int = PhoneCall;
phoneCallWaiting#1b8f4ad1 flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int protocol:PhoneCallProtocol receive_date:flags.0?int = PhoneCall;
phoneCallRequested#87eabb53 flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_hash:bytes protocol:PhoneCallProtocol = PhoneCall;
phoneCallAccepted#997c454a flags:# video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_b:bytes protocol:PhoneCallProtocol = PhoneCall;
phoneCall#8742ae7f flags:# p2p_allowed:flags.5?true video:flags.6?true id:long access_hash:long date:int admin_id:int participant_id:int g_a_or_b:bytes key_fingerprint:long protocol:PhoneCallProtocol connections:Vector<PhoneConnection> start_date:int = PhoneCall;
phoneCallDiscarded#50ca4de1 flags:# need_rating:flags.2?true need_debug:flags.3?true video:flags.6?true id:long reason:flags.0?PhoneCallDiscardReason duration:flags.1?int = PhoneCall;
phoneConnection#9d4c17c0 id:long ip:string ipv6:string port:int peer_tag:bytes = PhoneConnection;
phoneConnectionWebrtc#635fe375 flags:# turn:flags.0?true stun:flags.1?true id:long ip:string ipv6:string port:int username:string password:string = PhoneConnection;
phoneCallProtocol#fc878fc8 flags:# udp_p2p:flags.0?true udp_reflector:flags.1?true min_layer:int max_layer:int library_versions:Vector<string> = PhoneCallProtocol;
@@ -1141,7 +1138,17 @@ stats.broadcastStats#bdf78394 period:StatsDateRangeDays followers:StatsAbsValueA
help.promoDataEmpty#98f6ac75 expires:int = help.PromoData;
help.promoData#8c39793f flags:# proxy:flags.0?true expires:int peer:Peer chats:Vector<Chat> users:Vector<User> psa_type:flags.1?string psa_message:flags.2?string = help.PromoData;
videoSize#435bb987 type:string location:FileLocation w:int h:int size:int = VideoSize;
videoSize#e831c556 flags:# type:string location:FileLocation w:int h:int size:int video_start_ts:flags.0?double = VideoSize;
statsGroupTopPoster#18f3d0f7 user_id:int messages:int avg_chars:int = StatsGroupTopPoster;
statsGroupTopAdmin#6014f412 user_id:int deleted:int kicked:int banned:int = StatsGroupTopAdmin;
statsGroupTopInviter#31962a4c user_id:int invitations:int = StatsGroupTopInviter;
stats.megagroupStats#ef7ff916 period:StatsDateRangeDays members:StatsAbsValueAndPrev messages:StatsAbsValueAndPrev viewers:StatsAbsValueAndPrev posters:StatsAbsValueAndPrev growth_graph:StatsGraph members_graph:StatsGraph new_members_by_source_graph:StatsGraph languages_graph:StatsGraph messages_graph:StatsGraph actions_graph:StatsGraph top_hours_graph:StatsGraph weekdays_graph:StatsGraph top_posters:Vector<StatsGroupTopPoster> top_admins:Vector<StatsGroupTopAdmin> top_inviters:Vector<StatsGroupTopInviter> users:Vector<User> = stats.MegagroupStats;
globalPrivacySettings#bea2f424 flags:# archive_and_mute_new_noncontact_peers:flags.0?Bool = GlobalPrivacySettings;
---functions---
@@ -1237,6 +1244,8 @@ account.getThemes#285946f8 format:string hash:int = account.Themes;
account.setContentSettings#b574b16b flags:# sensitive_enabled:flags.0?true = Bool;
account.getContentSettings#8b9b4dae = account.ContentSettings;
account.getMultiWallPapers#65ad71dc wallpapers:Vector<InputWallPaper> = Vector<WallPaper>;
account.getGlobalPrivacySettings#eb2b4cf6 = GlobalPrivacySettings;
account.setGlobalPrivacySettings#1edaaac2 settings:GlobalPrivacySettings = GlobalPrivacySettings;
users.getUsers#d91a548 id:Vector<InputUser> = Vector<User>;
users.getFullUser#ca30a5b1 id:InputUser = UserFull;
@@ -1312,7 +1321,6 @@ messages.migrateChat#15a3b8e3 chat_id:int = Updates;
messages.searchGlobal#bf7225a4 flags:# folder_id:flags.0?int q:string offset_rate:int offset_peer:InputPeer offset_id:int limit:int = messages.Messages;
messages.reorderStickerSets#78337739 flags:# masks:flags.0?true order:Vector<long> = Bool;
messages.getDocumentByHash#338e2464 sha256:bytes size:int mime_type:string = Document;
messages.searchGifs#bf9a776b q:string offset:int = messages.FoundGifs;
messages.getSavedGifs#83bf3d52 hash:int = messages.SavedGifs;
messages.saveGif#327a30cb id:InputDocument unsave:Bool = Bool;
messages.getInlineBotResults#514e999d flags:# bot:InputUser peer:InputPeer geo_point:flags.0?InputGeoPoint query:string offset:string = messages.BotResults;
@@ -1391,8 +1399,8 @@ updates.getState#edd4882a = updates.State;
updates.getDifference#25939651 flags:# pts:int pts_total_limit:flags.0?int date:int qts:int = updates.Difference;
updates.getChannelDifference#3173d78 flags:# force:flags.0?true channel:InputChannel filter:ChannelMessagesFilter pts:int limit:int = updates.ChannelDifference;
photos.updateProfilePhoto#f0bb5152 id:InputPhoto = UserProfilePhoto;
photos.uploadProfilePhoto#4f32c098 file:InputFile = photos.Photo;
photos.updateProfilePhoto#72d4742c id:InputPhoto = photos.Photo;
photos.uploadProfilePhoto#89f30f69 flags:# file:flags.0?InputFile video:flags.1?InputFile video_start_ts:flags.2?double = photos.Photo;
photos.deletePhotos#87cf7f2f id:Vector<InputPhoto> = Vector<long>;
photos.getUserPhotos#91cd32a8 user_id:InputUser offset:int max_id:long limit:int = photos.Photos;
@@ -1425,6 +1433,7 @@ help.getUserInfo#38a08d3 user_id:InputUser = help.UserInfo;
help.editUserInfo#66b91b70 user_id:InputUser message:string entities:Vector<MessageEntity> = help.UserInfo;
help.getPromoData#c0977421 = help.PromoData;
help.hidePromoData#1e251c95 peer:InputPeer = Bool;
help.dismissSuggestion#77fa99f suggestion:string = Bool;
channels.readHistory#cc104937 channel:InputChannel max_id:int = Bool;
channels.deleteMessages#84c1fd4e channel:InputChannel id:Vector<int> = messages.AffectedMessages;
@@ -1501,5 +1510,6 @@ folders.deleteFolder#1c295881 folder_id:int = Updates;
stats.getBroadcastStats#ab42441a flags:# dark:flags.0?true channel:InputChannel = stats.BroadcastStats;
stats.loadAsyncGraph#621d5fa0 flags:# token:string x:flags.0?long = StatsGraph;
stats.getMegagroupStats#dcdf8607 flags:# dark:flags.0?true channel:InputChannel = stats.MegagroupStats;
// LAYER 114
// LAYER 117

View File

@@ -9,7 +9,7 @@
<Identity Name="TelegramMessengerLLP.TelegramDesktop"
ProcessorArchitecture="ARCHITECTURE"
Publisher="CN=536BC709-8EE1-4478-AF22-F0F0F26FF64A"
Version="2.1.22.0" />
Version="2.3.0.0" />
<Properties>
<DisplayName>Telegram Desktop</DisplayName>
<PublisherDisplayName>Telegram FZ-LLC</PublisherDisplayName>

View File

@@ -44,8 +44,8 @@ IDI_ICON1 ICON "..\\art\\icon256.ico"
//
VS_VERSION_INFO VERSIONINFO
FILEVERSION 2,1,22,0
PRODUCTVERSION 2,1,22,0
FILEVERSION 2,3,0,0
PRODUCTVERSION 2,3,0,0
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
@@ -62,10 +62,10 @@ BEGIN
BEGIN
VALUE "CompanyName", "Telegram FZ-LLC"
VALUE "FileDescription", "Telegram Desktop"
VALUE "FileVersion", "2.1.22.0"
VALUE "FileVersion", "2.3.0.0"
VALUE "LegalCopyright", "Copyright (C) 2014-2020"
VALUE "ProductName", "Telegram Desktop"
VALUE "ProductVersion", "2.1.22.0"
VALUE "ProductVersion", "2.3.0.0"
END
END
BLOCK "VarFileInfo"

View File

@@ -35,8 +35,8 @@ LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
//
VS_VERSION_INFO VERSIONINFO
FILEVERSION 2,1,22,0
PRODUCTVERSION 2,1,22,0
FILEVERSION 2,3,0,0
PRODUCTVERSION 2,3,0,0
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
@@ -53,10 +53,10 @@ BEGIN
BEGIN
VALUE "CompanyName", "Telegram FZ-LLC"
VALUE "FileDescription", "Telegram Desktop Updater"
VALUE "FileVersion", "2.1.22.0"
VALUE "FileVersion", "2.3.0.0"
VALUE "LegalCopyright", "Copyright (C) 2014-2020"
VALUE "ProductName", "Telegram Desktop"
VALUE "ProductVersion", "2.1.22.0"
VALUE "ProductVersion", "2.3.0.0"
END
END
BLOCK "VarFileInfo"

View File

@@ -298,6 +298,28 @@ bool update() {
for (size_t i = 0; i < from.size(); ++i) {
string fname = from[i], tofname = to[i];
// it is necessary to remove the old file to not to get an error if appimage file is used by fuse
struct stat statbuf;
writeLog("Trying to get stat() for '%s'", tofname.c_str());
if (!stat(tofname.c_str(), &statbuf)) {
if (S_ISDIR(statbuf.st_mode)) {
writeLog("Fully clearing path '%s'..", tofname.c_str());
if (!remove_directory(tofname.c_str())) {
writeLog("Error: failed to clear path '%s'", tofname.c_str());
delFolder();
return false;
}
} else {
writeLog("Unlinking file '%s'", tofname.c_str());
if (unlink(tofname.c_str())) {
writeLog("Error: failed to unlink '%s'", tofname.c_str());
delFolder();
return false;
}
}
}
writeLog("Copying file '%s' to '%s'..", fname.c_str(), tofname.c_str());
int copyTries = 0, triesLimit = 30;
do {

View File

@@ -0,0 +1,230 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "api/api_chat_invite.h"
#include "apiwrap.h"
#include "window/window_session_controller.h"
#include "main/main_session.h"
#include "ui/empty_userpic.h"
#include "core/application.h"
#include "data/data_session.h"
#include "data/data_photo.h"
#include "data/data_photo_media.h"
#include "data/data_channel.h"
#include "data/data_user.h"
#include "data/data_file_origin.h"
#include "boxes/confirm_box.h"
#include "boxes/abstract_box.h"
#include "styles/style_boxes.h"
#include "styles/style_layers.h"
namespace Api {
void CheckChatInvite(
not_null<Window::SessionController*> controller,
const QString &hash,
ChannelData *invitePeekChannel) {
const auto session = &controller->session();
const auto weak = base::make_weak(controller.get());
session->api().checkChatInvite(hash, [=](const MTPChatInvite &result) {
Core::App().hideMediaView();
result.match([=](const MTPDchatInvite &data) {
const auto box = Ui::show(Box<ConfirmInviteBox>(
session,
data,
invitePeekChannel,
[=] { session->api().importChatInvite(hash); }));
if (invitePeekChannel) {
box->boxClosing(
) | rpl::filter([=] {
return !invitePeekChannel->amIn();
}) | rpl::start_with_next([=] {
if (const auto strong = weak.get()) {
strong->clearSectionStack(Window::SectionShow(
Window::SectionShow::Way::ClearStack,
anim::type::normal,
anim::activation::background));
}
}, box->lifetime());
}
}, [=](const MTPDchatInviteAlready &data) {
if (const auto chat = session->data().processChat(data.vchat())) {
if (const auto channel = chat->asChannel()) {
channel->clearInvitePeek();
}
if (const auto strong = weak.get()) {
strong->showPeerHistory(
chat,
Window::SectionShow::Way::Forward);
}
}
}, [=](const MTPDchatInvitePeek &data) {
if (const auto chat = session->data().processChat(data.vchat())) {
if (const auto channel = chat->asChannel()) {
channel->setInvitePeek(hash, data.vexpires().v);
if (const auto strong = weak.get()) {
strong->showPeerHistory(
chat,
Window::SectionShow::Way::Forward);
}
}
}
});
}, [=](const RPCError &error) {
if (error.code() != 400) {
return;
}
Core::App().hideMediaView();
Ui::show(Box<InformBox>(tr::lng_group_invite_bad_link(tr::now)));
});
}
} // namespace Api
ConfirmInviteBox::ConfirmInviteBox(
QWidget*,
not_null<Main::Session*> session,
const MTPDchatInvite &data,
ChannelData *invitePeekChannel,
Fn<void()> submit)
: _session(session)
, _submit(std::move(submit))
, _title(this, st::confirmInviteTitle)
, _status(this, st::confirmInviteStatus)
, _participants(GetParticipants(_session, data))
, _isChannel(data.is_channel() && !data.is_megagroup()) {
const auto title = qs(data.vtitle());
const auto count = data.vparticipants_count().v;
const auto status = [&] {
return invitePeekChannel
? tr::lng_channel_invite_private(tr::now)
: (!_participants.empty() && _participants.size() < count)
? tr::lng_group_invite_members(tr::now, lt_count, count)
: (count > 0)
? tr::lng_chat_status_members(tr::now, lt_count_decimal, count)
: _isChannel
? tr::lng_channel_status(tr::now)
: tr::lng_group_status(tr::now);
}();
_title->setText(title);
_status->setText(status);
const auto photo = _session->data().processPhoto(data.vphoto());
if (!photo->isNull()) {
_photo = photo->createMediaView();
_photo->wanted(Data::PhotoSize::Small, Data::FileOrigin());
if (!_photo->image(Data::PhotoSize::Small)) {
_session->downloaderTaskFinished(
) | rpl::start_with_next([=] {
update();
}, lifetime());
}
} else {
_photoEmpty = std::make_unique<Ui::EmptyUserpic>(
Data::PeerUserpicColor(0),
title);
}
}
ConfirmInviteBox::~ConfirmInviteBox() = default;
auto ConfirmInviteBox::GetParticipants(
not_null<Main::Session*> session,
const MTPDchatInvite &data)
-> std::vector<Participant> {
const auto participants = data.vparticipants();
if (!participants) {
return {};
}
const auto &v = participants->v;
auto result = std::vector<Participant>();
result.reserve(v.size());
for (const auto &participant : v) {
if (const auto user = session->data().processUser(participant)) {
result.push_back(Participant{ user });
}
}
return result;
}
void ConfirmInviteBox::prepare() {
addButton(
(_isChannel
? tr::lng_profile_join_channel()
: tr::lng_profile_join_group()),
_submit);
addButton(tr::lng_cancel(), [=] { closeBox(); });
while (_participants.size() > 4) {
_participants.pop_back();
}
auto newHeight = st::confirmInviteStatusTop + _status->height() + st::boxPadding.bottom();
if (!_participants.empty()) {
int skip = (st::boxWideWidth - 4 * st::confirmInviteUserPhotoSize) / 5;
int padding = skip / 2;
_userWidth = (st::confirmInviteUserPhotoSize + 2 * padding);
int sumWidth = _participants.size() * _userWidth;
int left = (st::boxWideWidth - sumWidth) / 2;
for (const auto &participant : _participants) {
auto name = new Ui::FlatLabel(this, st::confirmInviteUserName);
name->resizeToWidth(st::confirmInviteUserPhotoSize + padding);
name->setText(participant.user->firstName.isEmpty()
? participant.user->name
: participant.user->firstName);
name->moveToLeft(left + (padding / 2), st::confirmInviteUserNameTop);
left += _userWidth;
}
newHeight += st::confirmInviteUserHeight;
}
setDimensions(st::boxWideWidth, newHeight);
}
void ConfirmInviteBox::resizeEvent(QResizeEvent *e) {
BoxContent::resizeEvent(e);
_title->move((width() - _title->width()) / 2, st::confirmInviteTitleTop);
_status->move((width() - _status->width()) / 2, st::confirmInviteStatusTop);
}
void ConfirmInviteBox::paintEvent(QPaintEvent *e) {
BoxContent::paintEvent(e);
Painter p(this);
if (_photo) {
if (const auto image = _photo->image(Data::PhotoSize::Small)) {
p.drawPixmap(
(width() - st::confirmInvitePhotoSize) / 2,
st::confirmInvitePhotoTop,
image->pixCircled(
st::confirmInvitePhotoSize,
st::confirmInvitePhotoSize));
}
} else if (_photoEmpty) {
_photoEmpty->paint(
p,
(width() - st::confirmInvitePhotoSize) / 2,
st::confirmInvitePhotoTop,
width(),
st::confirmInvitePhotoSize);
}
int sumWidth = _participants.size() * _userWidth;
int left = (width() - sumWidth) / 2;
for (auto &participant : _participants) {
participant.user->paintUserpicLeft(
p,
participant.userpic,
left + (_userWidth - st::confirmInviteUserPhotoSize) / 2,
st::confirmInviteUserPhotoTop,
width(),
st::confirmInviteUserPhotoSize);
left += _userWidth;
}
}

View File

@@ -0,0 +1,74 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "ui/layers/box_content.h"
class UserData;
class ChannelData;
namespace Window {
class SessionController;
} // namespace Window
namespace Data {
class CloudImageView;
class PhotoMedia;
} // namespace Data
namespace Ui {
class EmptyUserpic;
} // namespace Ui
namespace Api {
void CheckChatInvite(
not_null<Window::SessionController*> controller,
const QString &hash,
ChannelData *invitePeekChannel = nullptr);
} // namespace Api
class ConfirmInviteBox final : public Ui::BoxContent {
public:
ConfirmInviteBox(
QWidget*,
not_null<Main::Session*> session,
const MTPDchatInvite &data,
ChannelData *invitePeekChannel,
Fn<void()> submit);
~ConfirmInviteBox();
protected:
void prepare() override;
void resizeEvent(QResizeEvent *e) override;
void paintEvent(QPaintEvent *e) override;
private:
struct Participant {
not_null<UserData*> user;
std::shared_ptr<Data::CloudImageView> userpic;
};
static std::vector<Participant> GetParticipants(
not_null<Main::Session*> session,
const MTPDchatInvite &data);
const not_null<Main::Session*> _session;
Fn<void()> _submit;
object_ptr<Ui::FlatLabel> _title;
object_ptr<Ui::FlatLabel> _status;
std::shared_ptr<Data::PhotoMedia> _photo;
std::unique_ptr<Ui::EmptyUserpic> _photoEmpty;
std::vector<Participant> _participants;
bool _isChannel = false;
int _userWidth = 0;
};

View File

@@ -0,0 +1,103 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "api/api_global_privacy.h"
#include "apiwrap.h"
#include "main/main_session.h"
#include "main/main_account.h"
#include "main/main_app_config.h"
namespace Api {
GlobalPrivacy::GlobalPrivacy(not_null<ApiWrap*> api)
: _session(&api->session())
, _api(&api->instance()) {
}
void GlobalPrivacy::reload(Fn<void()> callback) {
if (callback) {
_callbacks.push_back(std::move(callback));
}
if (_requestId) {
return;
}
_requestId = _api.request(MTPaccount_GetGlobalPrivacySettings(
)).done([=](const MTPGlobalPrivacySettings &result) {
_requestId = 0;
apply(result);
for (const auto &callback : base::take(_callbacks)) {
callback();
}
}).fail([=](const RPCError &error) {
_requestId = 0;
for (const auto &callback : base::take(_callbacks)) {
callback();
}
}).send();
_session->account().appConfig().value(
) | rpl::start_with_next([=] {
_showArchiveAndMute = _session->account().appConfig().get<bool>(
u"autoarchive_setting_available"_q,
false);
}, _session->lifetime());
}
bool GlobalPrivacy::archiveAndMuteCurrent() const {
return _archiveAndMute.current();
}
rpl::producer<bool> GlobalPrivacy::archiveAndMute() const {
return _archiveAndMute.value();
}
rpl::producer<bool> GlobalPrivacy::showArchiveAndMute() const {
using namespace rpl::mappers;
return rpl::combine(
archiveAndMute(),
_showArchiveAndMute.value(),
_1 || _2);
}
rpl::producer<> GlobalPrivacy::suggestArchiveAndMute() const {
return _session->account().appConfig().suggestionRequested(
u"AUTOARCHIVE_POPULAR"_q);
}
void GlobalPrivacy::dismissArchiveAndMuteSuggestion() {
_session->account().appConfig().dismissSuggestion(
u"AUTOARCHIVE_POPULAR"_q);
}
void GlobalPrivacy::update(bool archiveAndMute) {
using Flag = MTPDglobalPrivacySettings::Flag;
_api.request(_requestId).cancel();
_requestId = _api.request(MTPaccount_SetGlobalPrivacySettings(
MTP_globalPrivacySettings(
MTP_flags(Flag::f_archive_and_mute_new_noncontact_peers),
MTP_bool(archiveAndMute))
)).done([=](const MTPGlobalPrivacySettings &result) {
_requestId = 0;
apply(result);
}).fail([=](const RPCError &error) {
_requestId = 0;
}).send();
_archiveAndMute = archiveAndMute;
}
void GlobalPrivacy::apply(const MTPGlobalPrivacySettings &data) {
data.match([&](const MTPDglobalPrivacySettings &data) {
_archiveAndMute = data.varchive_and_mute_new_noncontact_peers()
? mtpIsTrue(*data.varchive_and_mute_new_noncontact_peers())
: false;
});
}
} // namespace Api

View File

@@ -0,0 +1,45 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "mtproto/sender.h"
class ApiWrap;
namespace Main {
class Session;
} // namespace Main
namespace Api {
class GlobalPrivacy final {
public:
explicit GlobalPrivacy(not_null<ApiWrap*> api);
void reload(Fn<void()> callback = nullptr);
void update(bool archiveAndMute);
[[nodiscard]] bool archiveAndMuteCurrent() const;
[[nodiscard]] rpl::producer<bool> archiveAndMute() const;
[[nodiscard]] rpl::producer<bool> showArchiveAndMute() const;
[[nodiscard]] rpl::producer<> suggestArchiveAndMute() const;
void dismissArchiveAndMuteSuggestion();
private:
void apply(const MTPGlobalPrivacySettings &data);
const not_null<Main::Session*> _session;
MTP::Sender _api;
mtpRequestId _requestId = 0;
rpl::variable<bool> _archiveAndMute = false;
rpl::variable<bool> _showArchiveAndMute = false;
std::vector<Fn<void()>> _callbacks;
};
} // namespace Api

View File

@@ -1726,8 +1726,9 @@ void Updates::feedUpdate(const MTPUpdate &update) {
auto &d = update.c_updateEncryptedMessagesRead();
} break;
case mtpc_updatePhoneCall: {
Core::App().calls().handleUpdate(&session(), update.c_updatePhoneCall());
case mtpc_updatePhoneCall:
case mtpc_updatePhoneCallSignalingData: {
Core::App().calls().handleUpdate(&session(), update);
} break;
case mtpc_updateUserBlocked: {

View File

@@ -13,6 +13,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "api/api_text_entities.h"
#include "api/api_self_destruct.h"
#include "api/api_sensitive_content.h"
#include "api/api_global_privacy.h"
#include "api/api_updates.h"
#include "data/stickers/data_stickers.h"
#include "data/data_drafts.h"
@@ -186,7 +187,8 @@ ApiWrap::ApiWrap(not_null<Main::Session*> session)
, _topPromotionTimer([=] { refreshTopPromotion(); })
, _updateNotifySettingsTimer([=] { sendNotifySettingsUpdates(); })
, _selfDestruct(std::make_unique<Api::SelfDestruct>(this))
, _sensitiveContent(std::make_unique<Api::SensitiveContent>(this)) {
, _sensitiveContent(std::make_unique<Api::SensitiveContent>(this))
, _globalPrivacy(std::make_unique<Api::GlobalPrivacy>(this)) {
crl::on_main(session, [=] {
// You can't use _session->lifetime() in the constructor,
// only queued, because it is not constructed yet.
@@ -1645,7 +1647,7 @@ void ApiWrap::requestSelfParticipant(not_null<ChannelData*> channel) {
}).fail([=](const RPCError &error) {
_selfParticipantRequests.erase(channel);
if (error.type() == qstr("CHANNEL_PRIVATE")) {
channel->markForbidden();
channel->privateErrorReceived();
}
finalize(-1, 0);
}).afterDelay(kSmallDelayMs).send();
@@ -1962,6 +1964,9 @@ void ApiWrap::joinChannel(not_null<ChannelData*> channel) {
applyUpdates(result);
}).fail([=](const RPCError &error) {
if (error.type() == qstr("CHANNEL_PRIVATE")
&& channel->invitePeekExpires()) {
channel->privateErrorReceived();
} else if (error.type() == qstr("CHANNEL_PRIVATE")
|| error.type() == qstr("CHANNEL_PUBLIC_GROUP_NA")
|| error.type() == qstr("USER_BANNED_IN_CHANNEL")) {
Ui::show(Box<InformBox>(channel->isMegagroup()
@@ -4874,7 +4879,10 @@ void ApiWrap::photoUploadReady(
};
if (peer->isSelf()) {
request(MTPphotos_UploadProfilePhoto(
file
MTP_flags(MTPphotos_UploadProfilePhoto::Flag::f_file),
file,
MTPInputFile(), // video
MTPdouble() // video_start_ts
)).done([=](const MTPphotos_Photo &result) {
result.match([&](const MTPDphotos_photo &data) {
_session->data().processPhoto(data.vphoto());
@@ -4885,13 +4893,21 @@ void ApiWrap::photoUploadReady(
const auto history = _session->data().history(chat);
history->sendRequestId = request(MTPmessages_EditChatPhoto(
chat->inputChat,
MTP_inputChatUploadedPhoto(file)
MTP_inputChatUploadedPhoto(
MTP_flags(MTPDinputChatUploadedPhoto::Flag::f_file),
file,
MTPInputFile(), // video
MTPdouble()) // video_start_ts
)).done(applier).afterRequest(history->sendRequestId).send();
} else if (const auto channel = peer->asChannel()) {
const auto history = _session->data().history(channel);
history->sendRequestId = request(MTPchannels_EditPhoto(
channel->inputChannel,
MTP_inputChatUploadedPhoto(file)
MTP_inputChatUploadedPhoto(
MTP_flags(MTPDinputChatUploadedPhoto::Flag::f_file),
file,
MTPInputFile(), // video
MTPdouble()) // video_start_ts
)).done(applier).afterRequest(history->sendRequestId).send();
}
}
@@ -4902,8 +4918,8 @@ void ApiWrap::clearPeerPhoto(not_null<PhotoData*> photo) {
if (self->userpicPhotoId() == photo->id) {
request(MTPphotos_UpdateProfilePhoto(
MTP_inputPhotoEmpty()
)).done([=](const MTPUserProfilePhoto &result) {
self->setPhoto(result);
)).done([=](const MTPphotos_Photo &result) {
self->setPhoto(MTP_userProfilePhotoEmpty());
}).send();
} else if (photo->peer && photo->peer->userpicPhotoId() == photo->id) {
const auto applier = [=](const MTPUpdates &result) {
@@ -5227,6 +5243,10 @@ Api::SensitiveContent &ApiWrap::sensitiveContent() {
return *_sensitiveContent;
}
Api::GlobalPrivacy &ApiWrap::globalPrivacy() {
return *_globalPrivacy;
}
void ApiWrap::createPoll(
const PollData &data,
const SendAction &action,

View File

@@ -50,6 +50,12 @@ struct CloudPasswordState;
} // namespace Core
namespace Api {
class Updates;
class SelfDestruct;
class SensitiveContent;
class GlobalPrivacy;
namespace details {
inline QString ToString(const QString &value) {
@@ -66,8 +72,6 @@ inline QString ToString(uint64 value) {
} // namespace details
class Updates;
template <
typename ...Types,
typename = std::enable_if_t<(sizeof...(Types) > 0)>>
@@ -86,9 +90,6 @@ QString RequestKey(Types &&...values) {
return result;
}
class SelfDestruct;
class SensitiveContent;
} // namespace Api
class ApiWrap : public MTP::Sender, private base::Subscriber {
@@ -460,6 +461,7 @@ public:
[[nodiscard]] Api::SelfDestruct &selfDestruct();
[[nodiscard]] Api::SensitiveContent &sensitiveContent();
[[nodiscard]] Api::GlobalPrivacy &globalPrivacy();
void createPoll(
const PollData &data,
@@ -821,6 +823,7 @@ private:
const std::unique_ptr<Api::SelfDestruct> _selfDestruct;
const std::unique_ptr<Api::SensitiveContent> _sensitiveContent;
const std::unique_ptr<Api::GlobalPrivacy> _globalPrivacy;
base::flat_map<FullMsgId, mtpRequestId> _pollVotesRequestIds;
base::flat_map<FullMsgId, mtpRequestId> _pollCloseRequestIds;

View File

@@ -94,7 +94,6 @@ confirmInviteTitle: FlatLabel(defaultFlatLabel) {
confirmInviteStatus: FlatLabel(boxLabel) {
align: align(center);
minWidth: 320px;
maxHeight: 20px;
textFg: windowSubTextFg;
}
confirmInviteTitleTop: 106px;

View File

@@ -861,146 +861,6 @@ void DeleteMessagesBox::deleteAndClear() {
session->data().sendHistoryChangeNotifications();
}
ConfirmInviteBox::ConfirmInviteBox(
QWidget*,
not_null<Main::Session*> session,
const MTPDchatInvite &data,
Fn<void()> submit)
: _session(session)
, _submit(std::move(submit))
, _title(this, st::confirmInviteTitle)
, _status(this, st::confirmInviteStatus)
, _participants(GetParticipants(_session, data))
, _isChannel(data.is_channel() && !data.is_megagroup()) {
const auto title = qs(data.vtitle());
const auto count = data.vparticipants_count().v;
const auto status = [&] {
return (!_participants.empty() && _participants.size() < count)
? tr::lng_group_invite_members(tr::now, lt_count, count)
: (count > 0)
? tr::lng_chat_status_members(tr::now, lt_count_decimal, count)
: _isChannel
? tr::lng_channel_status(tr::now)
: tr::lng_group_status(tr::now);
}();
_title->setText(title);
_status->setText(status);
const auto photo = _session->data().processPhoto(data.vphoto());
if (!photo->isNull()) {
_photo = photo->createMediaView();
_photo->wanted(Data::PhotoSize::Small, Data::FileOrigin());
if (!_photo->image(Data::PhotoSize::Small)) {
_session->downloaderTaskFinished(
) | rpl::start_with_next([=] {
update();
}, lifetime());
}
} else {
_photoEmpty = std::make_unique<Ui::EmptyUserpic>(
Data::PeerUserpicColor(0),
title);
}
}
auto ConfirmInviteBox::GetParticipants(
not_null<Main::Session*> session,
const MTPDchatInvite &data)
-> std::vector<Participant> {
const auto participants = data.vparticipants();
if (!participants) {
return {};
}
const auto &v = participants->v;
auto result = std::vector<Participant>();
result.reserve(v.size());
for (const auto &participant : v) {
if (const auto user = session->data().processUser(participant)) {
result.push_back(Participant{ user });
}
}
return result;
}
void ConfirmInviteBox::prepare() {
addButton(
(_isChannel
? tr::lng_profile_join_channel()
: tr::lng_profile_join_group()),
_submit);
addButton(tr::lng_cancel(), [=] { closeBox(); });
while (_participants.size() > 4) {
_participants.pop_back();
}
auto newHeight = st::confirmInviteStatusTop + _status->height() + st::boxPadding.bottom();
if (!_participants.empty()) {
int skip = (st::boxWideWidth - 4 * st::confirmInviteUserPhotoSize) / 5;
int padding = skip / 2;
_userWidth = (st::confirmInviteUserPhotoSize + 2 * padding);
int sumWidth = _participants.size() * _userWidth;
int left = (st::boxWideWidth - sumWidth) / 2;
for (const auto &participant : _participants) {
auto name = new Ui::FlatLabel(this, st::confirmInviteUserName);
name->resizeToWidth(st::confirmInviteUserPhotoSize + padding);
name->setText(participant.user->firstName.isEmpty()
? participant.user->name
: participant.user->firstName);
name->moveToLeft(left + (padding / 2), st::confirmInviteUserNameTop);
left += _userWidth;
}
newHeight += st::confirmInviteUserHeight;
}
setDimensions(st::boxWideWidth, newHeight);
}
void ConfirmInviteBox::resizeEvent(QResizeEvent *e) {
BoxContent::resizeEvent(e);
_title->move((width() - _title->width()) / 2, st::confirmInviteTitleTop);
_status->move((width() - _status->width()) / 2, st::confirmInviteStatusTop);
}
void ConfirmInviteBox::paintEvent(QPaintEvent *e) {
BoxContent::paintEvent(e);
Painter p(this);
if (_photo) {
if (const auto image = _photo->image(Data::PhotoSize::Small)) {
p.drawPixmap(
(width() - st::confirmInvitePhotoSize) / 2,
st::confirmInvitePhotoTop,
image->pixCircled(
st::confirmInvitePhotoSize,
st::confirmInvitePhotoSize));
}
} else if (_photoEmpty) {
_photoEmpty->paint(
p,
(width() - st::confirmInvitePhotoSize) / 2,
st::confirmInvitePhotoTop,
width(),
st::confirmInvitePhotoSize);
}
int sumWidth = _participants.size() * _userWidth;
int left = (width() - sumWidth) / 2;
for (auto &participant : _participants) {
participant.user->paintUserpicLeft(
p,
participant.userpic,
left + (_userWidth - st::confirmInviteUserPhotoSize) / 2,
st::confirmInviteUserPhotoTop,
width(),
st::confirmInviteUserPhotoSize);
left += _userWidth;
}
}
ConfirmInviteBox::~ConfirmInviteBox() = default;
ConfirmDontWarnBox::ConfirmDontWarnBox(
QWidget*,
rpl::producer<TextWithEntities> text,

View File

@@ -206,46 +206,6 @@ private:
};
class ConfirmInviteBox final
: public Ui::BoxContent
, private base::Subscriber {
public:
ConfirmInviteBox(
QWidget*,
not_null<Main::Session*> session,
const MTPDchatInvite &data,
Fn<void()> submit);
~ConfirmInviteBox();
protected:
void prepare() override;
void resizeEvent(QResizeEvent *e) override;
void paintEvent(QPaintEvent *e) override;
private:
struct Participant {
not_null<UserData*> user;
std::shared_ptr<Data::CloudImageView> userpic;
};
static std::vector<Participant> GetParticipants(
not_null<Main::Session*> session,
const MTPDchatInvite &data);
const not_null<Main::Session*> _session;
Fn<void()> _submit;
object_ptr<Ui::FlatLabel> _title;
object_ptr<Ui::FlatLabel> _status;
std::shared_ptr<Data::PhotoMedia> _photo;
std::unique_ptr<Ui::EmptyUserpic> _photoEmpty;
std::vector<Participant> _participants;
bool _isChannel = false;
int _userWidth = 0;
};
class ConfirmDontWarnBox : public Ui::BoxContent {
public:
ConfirmDontWarnBox(

View File

@@ -24,6 +24,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/core_settings.h"
#include "chat_helpers/emoji_suggestions_widget.h"
#include "chat_helpers/message_field.h"
#include "chat_helpers/send_context_menu.h"
#include "history/view/history_view_schedule_box.h"
#include "settings/settings_common.h"
#include "base/unique_qptr.h"
@@ -1058,19 +1059,19 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
*error &= ~Error::Solution;
}
};
const auto showError = [=](const QString &text) {
Ui::Toast::Show(text);
const auto showError = [](tr::phrase<> text) {
Ui::Toast::Show(text(tr::now));
};
const auto send = [=](Api::SendOptions sendOptions) {
collectError();
if (*error & Error::Question) {
showError(tr::lng_polls_choose_question(tr::now));
showError(tr::lng_polls_choose_question);
question->setFocus();
} else if (*error & Error::Options) {
showError(tr::lng_polls_choose_answers(tr::now));
showError(tr::lng_polls_choose_answers);
options->focusFirst();
} else if (*error & Error::Correct) {
showError(tr::lng_polls_choose_correct(tr::now));
showError(tr::lng_polls_choose_correct);
} else if (*error & Error::Solution) {
solution->showError();
} else if (!*error) {
@@ -1078,15 +1079,13 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
}
};
const auto sendSilent = [=] {
auto options = Api::SendOptions();
options.silent = true;
send(options);
send({ .silent = true });
};
const auto sendScheduled = [=] {
Ui::show(
HistoryView::PrepareScheduleBox(
this,
SendMenuType::Scheduled,
SendMenu::Type::Scheduled,
send),
Ui::LayerOption::KeepOther);
};
@@ -1101,15 +1100,22 @@ object_ptr<Ui::RpWidget> CreatePollBox::setupContent() {
FocusAtEnd(question);
}, lifetime());
const auto isNormal = (_sendType == Api::SendType::Normal);
const auto isScheduled = (_sendType == Api::SendType::Scheduled);
const auto submit = addButton(
tr::lng_polls_create_button(),
[=] { send({}); });
if (_sendType == Api::SendType::Normal) {
isNormal
? tr::lng_polls_create_button()
: tr::lng_schedule_button(),
[=] { isNormal ? send({}) : sendScheduled(); });
if (isNormal || isScheduled) {
const auto sendMenuType = [=] {
collectError();
return *error ? SendMenuType::Disabled : SendMenuType::Scheduled;
return (*error || isScheduled)
? SendMenu::Type::Disabled
: SendMenu::Type::Scheduled;
};
SetupSendMenuAndShortcuts(
SendMenu::SetupMenuAndShortcuts(
submit.data(),
sendMenuType,
sendSilent,

View File

@@ -91,25 +91,6 @@ auto ListFromMimeData(not_null<const QMimeData*> data) {
return result;
}
auto CheckMimeData(not_null<const QMimeData*> data, bool isAlbum) {
if (data->urls().size() > 1) {
return false;
} else if (data->hasImage()) {
return true;
}
if (isAlbum && data->hasUrls()) {
const auto url = data->urls().front();
if (url.isLocalFile()) {
using namespace Core;
const auto info = QFileInfo(Platform::File::UrlToLocal(url));
return IsMimeAcceptedForAlbum(MimeTypeForFile(info).name());
}
}
return true;
}
} // namespace
EditCaptionBox::EditCaptionBox(
@@ -663,7 +644,7 @@ void EditCaptionBox::prepare() {
if (action == Ui::InputField::MimeAction::Check) {
if (!data->hasText() && !_isAllowedEditMedia) {
return false;
} else if (CheckMimeData(data, _isAlbum)) {
} else if (Storage::ValidateDragData(data, _isAlbum)) {
return true;
}
return data->hasText();
@@ -766,7 +747,9 @@ void EditCaptionBox::setupEmojiPanel() {
void EditCaptionBox::setupDragArea() {
auto enterFilter = [=](not_null<const QMimeData*> data) {
return !_isAllowedEditMedia ? false : CheckMimeData(data, _isAlbum);
return !_isAllowedEditMedia
? false
: Storage::ValidateDragData(data, _isAlbum);
};
// Avoid both drag areas appearing at one time.
auto computeState = [=](const QMimeData *data) {
@@ -816,8 +799,7 @@ int EditCaptionBox::errorTopSkip() const {
void EditCaptionBox::checkStreamedIsStarted() {
if (!_streamed) {
return;
}
if (_streamed->paused()) {
} else if (_streamed->paused()) {
_streamed->resume();
}
if (!_streamed->active() && !_streamed->failed()) {

View File

@@ -15,6 +15,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "main/main_session.h"
#include "mtproto/mtproto_config.h"
#include "chat_helpers/message_field.h"
#include "chat_helpers/send_context_menu.h"
#include "chat_helpers/emoji_suggestions_widget.h"
#include "chat_helpers/tabbed_panel.h"
#include "chat_helpers/tabbed_selector.h"
@@ -1666,7 +1667,7 @@ SendFilesBox::SendFilesBox(
CompressConfirm compressed,
SendLimit limit,
Api::SendType sendType,
SendMenuType sendMenuType)
SendMenu::Type sendMenuType)
: _controller(controller)
, _sendType(sendType)
, _list(std::move(list))
@@ -1836,7 +1837,7 @@ void SendFilesBox::setupShadows(
void SendFilesBox::prepare() {
_send = addButton(tr::lng_send_button(), [=] { send({}); });
if (_sendType == Api::SendType::Normal) {
SetupSendMenuAndShortcuts(
SendMenu::SetupMenuAndShortcuts(
_send,
[=] { return _sendMenuType; },
[=] { sendSilent(); },
@@ -1865,12 +1866,13 @@ void SendFilesBox::prepare() {
void SendFilesBox::setupDragArea() {
// Avoid both drag areas appearing at one time.
auto computeState = [=](const QMimeData *data) {
using DragState = Storage::MimeDataState;
const auto state = Storage::ComputeMimeDataState(data);
return (state == Storage::MimeDataState::PhotoFiles)
? Storage::MimeDataState::Image
: (state == Storage::MimeDataState::Files)
// Temporary enable drag'n'drop only for images. TODO.
? Storage::MimeDataState::None
return (state == DragState::PhotoFiles)
? DragState::Image
: (state == DragState::Files
&& !Storage::ValidateDragData(data, true))
? DragState::None
: state;
};
const auto areas = DragArea::SetupDragAreaToContainer(
@@ -2416,7 +2418,7 @@ void SendFilesBox::sendSilent() {
void SendFilesBox::sendScheduled() {
const auto type = (_sendType == Api::SendType::ScheduledToUser)
? SendMenuType::ScheduledToUser
? SendMenu::Type::ScheduledToUser
: _sendMenuType;
const auto callback = [=](Api::SendOptions options) { send(options); };
Ui::show(

View File

@@ -40,7 +40,9 @@ namespace Window {
class SessionController;
} // namespace Window
enum class SendMenuType;
namespace SendMenu {
enum class Type;
} // namespace SendMenu
enum class SendFilesWay {
Album,
@@ -62,7 +64,7 @@ public:
CompressConfirm compressed,
SendLimit limit,
Api::SendType sendType,
SendMenuType sendMenuType);
SendMenu::Type sendMenuType);
void setConfirmedCallback(
Fn<void(
@@ -142,7 +144,7 @@ private:
CompressConfirm _compressConfirmInitial = CompressConfirm::None;
CompressConfirm _compressConfirm = CompressConfirm::None;
SendLimit _sendLimit = SendLimit::Many;
SendMenuType _sendMenuType = SendMenuType();
SendMenu::Type _sendMenuType = SendMenu::Type();
Fn<void(
Storage::PreparedList &&list,

View File

@@ -23,6 +23,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/wrap/slide_wrap.h"
#include "ui/text_options.h"
#include "chat_helpers/message_field.h"
#include "chat_helpers/send_context_menu.h"
#include "history/history.h"
#include "history/history_message.h"
#include "history/view/history_view_schedule_box.h"
@@ -408,13 +409,13 @@ void ShareBox::keyPressEvent(QKeyEvent *e) {
}
}
SendMenuType ShareBox::sendMenuType() const {
SendMenu::Type ShareBox::sendMenuType() const {
const auto selected = _inner->selected();
return ranges::all_of(selected, HistoryView::CanScheduleUntilOnline)
? SendMenuType::ScheduledToUser
? SendMenu::Type::ScheduledToUser
: (selected.size() == 1 && selected.front()->isSelf())
? SendMenuType::Reminder
: SendMenuType::Scheduled;
? SendMenu::Type::Reminder
: SendMenu::Type::Scheduled;
}
void ShareBox::createButtons() {
@@ -423,7 +424,7 @@ void ShareBox::createButtons() {
const auto send = addButton(tr::lng_share_confirm(), [=] {
submit({});
});
SetupSendMenuAndShortcuts(
SendMenu::SetupMenuAndShortcuts(
send,
[=] { return sendMenuType(); },
[=] { submitSilent(); },

View File

@@ -14,7 +14,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/effects/round_checkbox.h"
#include "mtproto/sender.h"
enum class SendMenuType;
namespace SendMenu {
enum class Type;
} // namespace SendMenu
namespace Window {
class SessionNavigation;
@@ -82,7 +84,7 @@ private:
void copyLink();
bool searchByUsername(bool useCache = false);
SendMenuType sendMenuType() const;
SendMenu::Type sendMenuType() const;
void scrollTo(Ui::ScrollToRequest request);
void needSearchByUsername();

View File

@@ -14,12 +14,12 @@ CallSignalBars {
width: pixels;
radius: pixels;
skip: pixels;
min: pixels;
max: pixels;
color: color;
inactiveOpacity: double;
}
callWidth: 300px;
callHeight: 470px;
callRadius: 6px;
callShadow: Shadow {
left: icon {{ "call_shadow_left", windowShadowFg }};
@@ -34,16 +34,83 @@ callShadow: Shadow {
fallback: windowShadowFgFallback;
}
callButton: IconButton {
width: 72px;
height: 72px;
callWidthMin: 300px;
callHeightMin: 440px;
callWidth: 720px;
callHeight: 540px;
iconPosition: point(-1px, -1px);
callBottomControlsHeight: 87px;
CallBodyLayout {
height: pixels;
photoTop: pixels;
photoSize: pixels;
nameTop: pixels;
statusTop: pixels;
muteStroke: pixels;
muteSize: pixels;
mutePosition: point;
}
callBodyLayout: CallBodyLayout {
height: 284px;
photoTop: 21px;
photoSize: 160px;
nameTop: 221px;
statusTop: 254px;
muteStroke: 3px;
muteSize: 36px;
mutePosition: point(142px, 135px);
}
callBodyWithPreview: CallBodyLayout {
height: 185px;
photoTop: 21px;
photoSize: 100px;
nameTop: 132px;
statusTop: 163px;
muteStroke: 3px;
muteSize: 0px;
mutePosition: point(90px, 84px);
}
callMutedPeerIcon: icon {{ "calls_mute_userpic", callIconFg }};
callOutgoingPreviewMin: size(360px, 120px);
callOutgoingPreview: size(540px, 180px); // default, for height == callHeight.
callOutgoingPreviewMax: size(1620px, 540px);
callOutgoingDefaultSize: size(160px, 110px);
callInnerPadding: 12px;
callFingerprintPadding: margins(10px, 4px, 8px, 5px);
callFingerprintSkip: 4px;
callFingerprintSignalBarsSkip: 2px;
callSignalBarsPadding: margins(8px, 9px, 11px, 5px);
callFingerprintTop: 8px;
callFingerprintBottom: -16px;
callTooltipMutedIcon: icon{{ "calls_mute_tooltip", videoPlayIconFg }};
callTooltipMutedIconPosition: point(10px, 5px);
callTooltipPadding: margins(41px, 7px, 15px, 8px);
callButton: IconButton {
width: 68px;
height: 79px;
iconPosition: point(-1px, 16px);
rippleAreaPosition: point(12px, 12px);
rippleAreaSize: 48px;
rippleAreaSize: 44px;
ripple: defaultRippleAnimation;
}
callButtonLabel: FlatLabel(defaultFlatLabel) {
textFg: callNameFg;
style: TextStyle(defaultTextStyle) {
font: font(11px);
linkFont: font(11px);
linkFontOver: font(11px underline);
}
}
callAnswer: CallButton {
button: IconButton(callButton) {
@@ -56,6 +123,7 @@ callAnswer: CallButton {
angle: 135.;
outerRadius: 12px;
outerBg: callAnswerBgOuter;
label: callButtonLabel;
}
callHangup: CallButton {
button: IconButton(callButton) {
@@ -66,30 +134,57 @@ callHangup: CallButton {
}
bg: callHangupBg;
outerBg: callHangupBg;
label: callButtonLabel;
}
callCancel: CallButton {
button: IconButton(callButton) {
icon: icon {{ "box_button_close", callCancelFg }};
icon: icon {{ "call_cancel", callIconFgActive }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callCancelRipple;
color: callIconActiveRipple;
}
}
bg: callCancelBg;
outerBg: callCancelBg;
bg: callIconBgActive;
outerBg: callIconBgActive;
label: callButtonLabel;
}
callMuteToggle: IconButton(callButton) {
icon: icon {{ "call_record_active", callIconFg }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callMuteRipple;
callMicrophoneMute: CallButton {
button: IconButton(callButton) {
icon: icon {{ "call_record_active", callIconFg }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callMuteRipple;
}
}
bg: callIconBg;
outerBg: callMuteRipple;
label: callButtonLabel;
}
callMicrophoneUnmute: CallButton(callMicrophoneMute) {
button: IconButton(callButton) {
icon: icon {{ "call_record_muted", callIconFgActive }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callIconActiveRipple;
}
}
bg: callIconBgActive;
}
callCameraMute: CallButton(callMicrophoneMute) {
button: IconButton(callButton) {
icon: icon {{ "call_camera_active", callIconFg }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callMuteRipple;
}
}
}
callUnmuteIcon: icon {{ "call_record_muted", callIconFg }};
callCameraUnmute: CallButton(callMicrophoneUnmute) {
button: IconButton(callButton) {
icon: icon {{ "call_camera_muted", callIconFgActive }};
ripple: RippleAnimation(defaultRippleAnimation) {
color: callIconActiveRipple;
}
}
}
callBottomShadowSize: 124px;
callControlsTop: 80px;
callControlsSkip: 0px;
callMuteRight: 8px;
callNameTop: 15px;
callName: FlatLabel(defaultFlatLabel) {
minWidth: 260px;
maxHeight: 30px;
@@ -101,7 +196,6 @@ callName: FlatLabel(defaultFlatLabel) {
linkFontOver: font(21px semibold underline);
}
}
callStatusTop: 46px;
callStatus: FlatLabel(defaultFlatLabel) {
minWidth: 260px;
maxHeight: 20px;
@@ -113,10 +207,16 @@ callStatus: FlatLabel(defaultFlatLabel) {
linkFontOver: font(14px underline);
}
}
callFingerprintPadding: margins(9px, 4px, 9px, 5px);
callFingerprintSkip: 3px;
callFingerprintBottom: 8px;
callRemoteAudioMute: FlatLabel(callStatus) {
minWidth: 0px;
textFg: videoPlayIconFg;
style: TextStyle(defaultTextStyle) {
font: font(12px);
linkFont: font(12px);
linkFontOver: font(12px underline);
}
}
callRemoteAudioMuteSkip: 12px;
callBarHeight: 38px;
callBarMuteToggle: IconButton {
@@ -124,7 +224,7 @@ callBarMuteToggle: IconButton {
height: 38px;
icon: icon {{ "call_record_active", callBarFg }};
iconPosition: point(9px, 8px);
iconPosition: point(3px, 2px);
ripple: RippleAnimation(defaultRippleAnimation) {
color: callBarMuteRipple;
@@ -137,7 +237,7 @@ callBarRightSkip: 12px;
callBarSkip: 10px;
callBarHangup: IconButton(callBarMuteToggle) {
icon: icon {{ "call_discard", callBarFg }};
iconPosition: point(9px, 11px);
iconPosition: point(3px, 1px);
}
callBarLabel: LabelSimple(defaultLabelSimple) {
font: semiboldFont;
@@ -200,14 +300,93 @@ callDebugLabel: FlatLabel(defaultFlatLabel) {
callPanelDuration: 150;
callPanelSignalBars: CallSignalBars {
width: 3px;
width: 2px;
radius: 1px;
skip: 1px;
skip: 2px;
min: 4px;
max: 10px;
color: callNameFg;
inactiveOpacity: 0.5;
}
callBarSignalBars: CallSignalBars(callPanelSignalBars) {
width: 3px;
skip: 1px;
min: 3px;
max: 12px;
color: callBarFg;
}
callSignalMargin: 8px;
callSignalPadding: 4px;
callTitleButton: IconButton {
width: 34px;
height: 30px;
iconPosition: point(0px, 0px);
}
callTitleMinimizeIcon: icon {
{ "calls_minimize_shadow", windowShadowFg },
{ "calls_minimize_main", callNameFg },
};
callTitleMinimizeIconOver: icon {
{ size(34px, 30px), callBgButton },
{ size(34px, 30px), callMuteRipple },
{ "calls_minimize_shadow", windowShadowFg },
{ "calls_minimize_main", callNameFg },
};
callTitleMaximizeIcon: icon {
{ "calls_maximize_shadow", windowShadowFg },
{ "calls_maximize_main", callNameFg },
};
callTitleMaximizeIconOver: icon {
{ size(34px, 30px), callBgButton },
{ size(34px, 30px), callMuteRipple },
{ "calls_maximize_shadow", windowShadowFg },
{ "calls_maximize_main", callNameFg },
};
callTitleRestoreIcon: icon {
{ "calls_restore_shadow", windowShadowFg },
{ "calls_restore_main", callNameFg },
};
callTitleRestoreIconOver: icon {
{ size(34px, 30px), callBgButton },
{ size(34px, 30px), callMuteRipple },
{ "calls_restore_shadow", windowShadowFg },
{ "calls_restore_main", callNameFg },
};
callTitleCloseIcon: icon {
{ "calls_close_shadow", windowShadowFg },
{ "calls_close_main", callNameFg },
};
callTitleCloseIconOver: icon {
{ size(34px, 30px), titleButtonCloseBgOver },
{ "calls_close_shadow", windowShadowFg },
{ "calls_close_main", titleButtonCloseFgOver },
};
callTitle: WindowTitle(defaultWindowTitle) {
height: 0px;
bg: callBgOpaque;
bgActive: callBgOpaque;
fg: transparent;
fgActive: transparent;
minimize: IconButton(callTitleButton) {
icon: callTitleMinimizeIcon;
iconOver: callTitleMinimizeIconOver;
}
minimizeIconActive: callTitleMinimizeIcon;
minimizeIconActiveOver: callTitleMinimizeIconOver;
maximize: IconButton(callTitleButton) {
icon: callTitleMaximizeIcon;
iconOver: callTitleMaximizeIconOver;
}
maximizeIconActive: callTitleMaximizeIcon;
maximizeIconActiveOver: callTitleMaximizeIconOver;
restoreIcon: callTitleRestoreIcon;
restoreIconOver: callTitleRestoreIconOver;
restoreIconActive: callTitleRestoreIcon;
restoreIconActiveOver: callTitleRestoreIconOver;
close: IconButton(callTitleButton) {
icon: callTitleCloseIcon;
iconOver: callTitleCloseIconOver;
}
closeIconActive: callTitleCloseIcon;
closeIconActiveOver: callTitleCloseIconOver;
}
callTitleShadow: icon {{ "calls_shadow_controls", windowShadowFg }};

View File

@@ -317,7 +317,7 @@ void BoxController::rowActionClicked(not_null<PeerListRow*> row) {
auto user = row->peer()->asUser();
Assert(user != nullptr);
Core::App().calls().startOutgoingCall(user);
Core::App().calls().startOutgoingCall(user, false);
}
void BoxController::receivedCalls(const QVector<MTPMessage> &result) {

View File

@@ -8,6 +8,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/calls_call.h"
#include "main/main_session.h"
#include "main/main_account.h"
#include "main/main_app_config.h"
#include "apiwrap.h"
#include "lang/lang_keys.h"
#include "boxes/confirm_box.h"
@@ -21,38 +23,96 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio_track.h"
#include "base/platform/base_platform_info.h"
#include "calls/calls_panel.h"
#include "calls/calls_controller.h"
#include "webrtc/webrtc_video_track.h"
#include "data/data_user.h"
#include "data/data_session.h"
#include "facades.h"
#include <tgcalls/Instance.h>
#include <tgcalls/VideoCaptureInterface.h>
namespace tgcalls {
class InstanceImpl;
class InstanceImplLegacy;
class InstanceImplReference;
void SetLegacyGlobalServerConfig(const std::string &serverConfig);
} // namespace tgcalls
namespace Calls {
namespace {
constexpr auto kMinLayer = 65;
constexpr auto kHangupTimeoutMs = 5000;
constexpr auto kSha256Size = 32;
const auto kDefaultVersion = "2.4.4"_q;
#ifndef DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
const auto RegisterTag = tgcalls::Register<tgcalls::InstanceImpl>();
//const auto RegisterTagReference = tgcalls::Register<tgcalls::InstanceImplReference>();
#endif // DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
const auto RegisterTagLegacy = tgcalls::Register<tgcalls::InstanceImplLegacy>();
void AppendEndpoint(
std::vector<TgVoipEndpoint> &list,
std::vector<tgcalls::Endpoint> &list,
const MTPPhoneConnection &connection) {
connection.match([&](const MTPDphoneConnection &data) {
if (data.vpeer_tag().v.length() != 16) {
return;
}
auto endpoint = TgVoipEndpoint{
tgcalls::Endpoint endpoint = {
.endpointId = (int64_t)data.vid().v,
.host = TgVoipEdpointHost{
.host = tgcalls::EndpointHost{
.ipv4 = data.vip().v.toStdString(),
.ipv6 = data.vipv6().v.toStdString() },
.port = (uint16_t)data.vport().v,
.type = TgVoipEndpointType::UdpRelay
.type = tgcalls::EndpointType::UdpRelay,
};
const auto tag = data.vpeer_tag().v;
if (tag.size() >= 16) {
memcpy(endpoint.peerTag, tag.data(), 16);
}
list.push_back(std::move(endpoint));
}, [&](const MTPDphoneConnectionWebrtc &data) {
});
}
void AppendServer(
std::vector<tgcalls::RtcServer> &list,
const MTPPhoneConnection &connection) {
connection.match([&](const MTPDphoneConnection &data) {
}, [&](const MTPDphoneConnectionWebrtc &data) {
const auto host = qs(data.vip());
const auto hostv6 = qs(data.vipv6());
const auto port = uint16_t(data.vport().v);
if (data.is_stun()) {
const auto pushStun = [&](const QString &host) {
if (host.isEmpty()) {
return;
}
list.push_back(tgcalls::RtcServer{
.host = host.toStdString(),
.port = port,
.isTurn = false
});
};
pushStun(host);
pushStun(hostv6);
}
const auto username = qs(data.vusername());
const auto password = qs(data.vpassword());
if (data.is_turn() && !username.isEmpty() && !password.isEmpty()) {
const auto pushTurn = [&](const QString &host) {
list.push_back(tgcalls::RtcServer{
.host = host.toStdString(),
.port = port,
.login = username.toStdString(),
.password = password.toStdString(),
.isTurn = true,
});
};
pushTurn(host);
pushTurn(hostv6);
}
});
}
@@ -71,10 +131,6 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
| (gsl::to_integer<uint64>(hash[12]));
}
[[nodiscard]] std::vector<std::string> CollectVersions() {
return { TgVoip::getVersion() };
}
[[nodiscard]] QVector<MTPstring> WrapVersions(
const std::vector<std::string> &data) {
auto result = QVector<MTPstring>();
@@ -86,28 +142,35 @@ uint64 ComputeFingerprint(bytes::const_span authKey) {
}
[[nodiscard]] QVector<MTPstring> CollectVersionsForApi() {
return WrapVersions(CollectVersions());
return WrapVersions(tgcalls::Meta::Versions() | ranges::action::reverse);
}
[[nodiscard]] Webrtc::VideoState StartVideoState(bool enabled) {
using State = Webrtc::VideoState;
return enabled ? State::Active : State::Inactive;
}
} // namespace
Call::Delegate::~Delegate() = default;
Call::Call(
not_null<Delegate*> delegate,
not_null<UserData*> user,
Type type)
Type type,
bool video)
: _delegate(delegate)
, _user(user)
, _api(&_user->session().mtp())
, _type(type) {
_discardByTimeoutTimer.setCallback([this] { hangup(); });
, _type(type)
, _videoIncoming(std::make_unique<Webrtc::VideoTrack>(StartVideoState(video)))
, _videoOutgoing(std::make_unique<Webrtc::VideoTrack>(StartVideoState(video))) {
_discardByTimeoutTimer.setCallback([=] { hangup(); });
if (_type == Type::Outgoing) {
setState(State::Requesting);
} else {
startWaitingTrack();
}
setupOutgoingVideo();
}
void Call::generateModExpFirst(bytes::const_span randomSeed) {
@@ -161,8 +224,11 @@ void Call::startOutgoing() {
Expects(_state.current() == State::Requesting);
Expects(_gaHash.size() == kSha256Size);
const auto flags = _videoCapture
? MTPphone_RequestCall::Flag::f_video
: MTPphone_RequestCall::Flag(0);
_api.request(MTPphone_RequestCall(
MTP_flags(0),
MTP_flags(flags),
_user->inputUser,
MTP_int(rand_value<int32>()),
MTP_bytes(_gaHash),
@@ -170,7 +236,7 @@ void Call::startOutgoing() {
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
MTP_int(kMinLayer),
MTP_int(TgVoip::getConnectionMaxLayer()),
MTP_int(tgcalls::Meta::MaxLayer()),
MTP_vector(CollectVersionsForApi()))
)).done([=](const MTPphone_PhoneCall &result) {
Expects(result.type() == mtpc_phone_phoneCall);
@@ -222,7 +288,7 @@ void Call::startIncoming() {
}
void Call::answer() {
_delegate->requestMicrophonePermissionOrFail(crl::guard(this, [=] {
_delegate->requestPermissionsOrFail(crl::guard(this, [=] {
actuallyAnswer();
}));
}
@@ -251,10 +317,11 @@ void Call::actuallyAnswer() {
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
MTP_int(kMinLayer),
MTP_int(TgVoip::getConnectionMaxLayer()),
MTP_int(tgcalls::Meta::MaxLayer()),
MTP_vector(CollectVersionsForApi()))
)).done([=](const MTPphone_PhoneCall &result) {
Expects(result.type() == mtpc_phone_phoneCall);
auto &call = result.c_phone_phoneCall();
_user->session().data().processUsers(call.vusers());
if (call.vphone_call().type() != mtpc_phoneCallWaiting) {
@@ -270,12 +337,46 @@ void Call::actuallyAnswer() {
}).send();
}
void Call::setMute(bool mute) {
_mute = mute;
if (_controller) {
_controller->setMuteMicrophone(_mute);
void Call::setMuted(bool mute) {
_muted = mute;
if (_instance) {
_instance->setMuteMicrophone(mute);
}
_muteChanged.notify(_mute);
}
void Call::setupOutgoingVideo() {
const auto started = _videoOutgoing->state();
_videoOutgoing->stateValue(
) | rpl::start_with_next([=](Webrtc::VideoState state) {
if (_state.current() != State::Established
&& state != started
&& !_videoCapture) {
_videoOutgoing->setState(started);
} else if (state != Webrtc::VideoState::Inactive) {
// Paused not supported right now.
#ifndef DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
Assert(state == Webrtc::VideoState::Active);
if (!_videoCapture) {
_videoCapture = tgcalls::VideoCaptureInterface::Create();
_videoCapture->setOutput(_videoOutgoing->sink());
}
if (_instance) {
_instance->setVideoCapture(_videoCapture);
}
_videoCapture->setState(tgcalls::VideoState::Active);
#endif // DESKTOP_APP_DISABLE_WEBRTC_INTEGRATION
} else if (_videoCapture) {
_videoCapture->setState(tgcalls::VideoState::Inactive);
}
}, _lifetime);
}
not_null<Webrtc::VideoTrack*> Call::videoIncoming() const {
return _videoIncoming.get();
}
not_null<Webrtc::VideoTrack*> Call::videoOutgoing() const {
return _videoOutgoing.get();
}
crl::time Call::getDurationMs() const {
@@ -299,7 +400,7 @@ void Call::redial() {
if (_state.current() != State::Busy) {
return;
}
Assert(_controller == nullptr);
Assert(_instance == nullptr);
_type = Type::Outgoing;
setState(State::Requesting);
_answerAfterDhConfigReceived = false;
@@ -308,7 +409,7 @@ void Call::redial() {
}
QString Call::getDebugLog() const {
return QString::fromStdString(_controller->getDebugInfo());
return QString::fromStdString(_instance->getDebugInfo());
}
void Call::startWaitingTrack() {
@@ -322,6 +423,21 @@ void Call::startWaitingTrack() {
_waitingTrack->playInLoop();
}
void Call::sendSignalingData(const QByteArray &data) {
_api.request(MTPphone_SendSignalingData(
MTP_inputPhoneCall(
MTP_long(_id),
MTP_long(_accessHash)),
MTP_bytes(data)
)).done([=](const MTPBool &result) {
if (!mtpIsTrue(result)) {
finish(FinishType::Failed);
}
}).fail([=](const RPCError &error) {
handleRequestError(error);
}).send();
}
float64 Call::getWaitingSoundPeakValue() const {
if (_waitingTrack) {
auto when = crl::now() + kSoundSampleMs / 4;
@@ -360,6 +476,7 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
finish(FinishType::Failed);
return true;
}
_id = data.vid().v;
_accessHash = data.vaccess_hash().v;
auto gaHashBytes = bytes::make_span(data.vg_a_hash().v);
@@ -404,7 +521,7 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
}
if (_type == Type::Incoming
&& _state.current() == State::ExchangingKeys
&& !_controller) {
&& !_instance) {
startConfirmedCall(data);
}
} return true;
@@ -415,8 +532,8 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
return false;
}
if (data.is_need_debug()) {
auto debugLog = _controller
? _controller->getDebugInfo()
auto debugLog = _instance
? _instance->getDebugInfo()
: std::string();
if (!debugLog.empty()) {
user()->session().api().request(MTPphone_SaveCallDebug(
@@ -462,11 +579,49 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
Unexpected("phoneCall type inside an existing call handleUpdate()");
}
void Call::updateRemoteMediaState(
tgcalls::AudioState audio,
tgcalls::VideoState video) {
_remoteAudioState = [&] {
using From = tgcalls::AudioState;
using To = RemoteAudioState;
switch (audio) {
case From::Active: return To::Active;
case From::Muted: return To::Muted;
}
Unexpected("Audio state in remoteMediaStateUpdated.");
}();
_videoIncoming->setState([&] {
using From = tgcalls::VideoState;
using To = Webrtc::VideoState;
switch (video) {
case From::Inactive: return To::Inactive;
case From::Paused: return To::Paused;
case From::Active: return To::Active;
}
Unexpected("Video state in remoteMediaStateUpdated.");
}());
}
bool Call::handleSignalingData(
const MTPDupdatePhoneCallSignalingData &data) {
if (data.vphone_call_id().v != _id || !_instance) {
return false;
}
auto prepared = ranges::view::all(
data.vdata().v
) | ranges::view::transform([](char byte) {
return static_cast<uint8_t>(byte);
}) | ranges::to_vector;
_instance->receiveSignalingData(std::move(prepared));
return true;
}
void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
Expects(_type == Type::Outgoing);
if (_state.current() == State::ExchangingKeys
|| _controller) {
|| _instance) {
LOG(("Call Warning: Unexpected confirmAcceptedCall."));
return;
}
@@ -494,9 +649,9 @@ void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
MTP_flags(MTPDphoneCallProtocol::Flag::f_udp_p2p
| MTPDphoneCallProtocol::Flag::f_udp_reflector),
MTP_int(kMinLayer),
MTP_int(TgVoip::getConnectionMaxLayer()),
MTP_int(tgcalls::Meta::MaxLayer()),
MTP_vector(CollectVersionsForApi()))
)).done([this](const MTPphone_PhoneCall &result) {
)).done([=](const MTPphone_PhoneCall &result) {
Expects(result.type() == mtpc_phone_phoneCall);
auto &call = result.c_phone_phoneCall();
@@ -508,7 +663,7 @@ void Call::confirmAcceptedCall(const MTPDphoneCallAccepted &call) {
}
createAndStartController(call.vphone_call().c_phoneCall());
}).fail([this](const RPCError &error) {
}).fail([=](const RPCError &error) {
handleRequestError(error);
}).send();
}
@@ -539,83 +694,117 @@ void Call::startConfirmedCall(const MTPDphoneCall &call) {
void Call::createAndStartController(const MTPDphoneCall &call) {
_discardByTimeoutTimer.cancel();
if (!checkCallFields(call)) {
if (!checkCallFields(call) || _authKey.size() != 256) {
return;
}
const auto &protocol = call.vprotocol().c_phoneCallProtocol();
const auto &serverConfig = _user->session().serverConfig();
TgVoipConfig config;
config.dataSaving = TgVoipDataSaving::Never;
config.enableAEC = !Platform::IsMac10_7OrGreater();
config.enableNS = true;
config.enableAGC = true;
config.enableVolumeControl = true;
config.initializationTimeout = serverConfig.callConnectTimeoutMs / 1000.;
config.receiveTimeout = serverConfig.callPacketTimeoutMs / 1000.;
config.enableP2P = call.is_p2p_allowed();
config.maxApiLayer = protocol.vmax_layer().v;
auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>();
memcpy(encryptionKeyValue->data(), _authKey.data(), 256);
const auto weak = base::make_weak(this);
tgcalls::Descriptor descriptor = {
.config = tgcalls::Config{
.initializationTimeout = serverConfig.callConnectTimeoutMs / 1000.,
.receiveTimeout = serverConfig.callPacketTimeoutMs / 1000.,
.dataSaving = tgcalls::DataSaving::Never,
.enableP2P = call.is_p2p_allowed(),
.enableAEC = !Platform::IsMac10_7OrGreater(),
.enableNS = true,
.enableAGC = true,
.enableVolumeControl = true,
.maxApiLayer = protocol.vmax_layer().v,
},
.encryptionKey = tgcalls::EncryptionKey(
std::move(encryptionKeyValue),
(_type == Type::Outgoing)),
.videoCapture = _videoCapture,
.stateUpdated = [=](tgcalls::State state) {
crl::on_main(weak, [=] {
handleControllerStateChange(state);
});
},
.signalBarsUpdated = [=](int count) {
crl::on_main(weak, [=] {
handleControllerBarCountChange(count);
});
},
.remoteMediaStateUpdated = [=](tgcalls::AudioState audio, tgcalls::VideoState video) {
crl::on_main(weak, [=] {
updateRemoteMediaState(audio, video);
});
},
.signalingDataEmitted = [=](const std::vector<uint8_t> &data) {
const auto bytes = QByteArray(
reinterpret_cast<const char*>(data.data()),
data.size());
crl::on_main(weak, [=] {
sendSignalingData(bytes);
});
},
};
if (Logs::DebugEnabled()) {
auto callLogFolder = cWorkingDir() + qsl("DebugLogs");
auto callLogPath = callLogFolder + qsl("/last_call_log.txt");
auto callLogNative = QDir::toNativeSeparators(callLogPath);
#ifdef Q_OS_WIN
config.logPath = callLogNative.toStdWString();
descriptor.config.logPath = callLogNative.toStdWString();
#else // Q_OS_WIN
const auto callLogUtf = QFile::encodeName(callLogNative);
config.logPath.resize(callLogUtf.size());
ranges::copy(callLogUtf, config.logPath.begin());
descriptor.config.logPath.resize(callLogUtf.size());
ranges::copy(callLogUtf, descriptor.config.logPath.begin());
#endif // Q_OS_WIN
QFile(callLogPath).remove();
QDir().mkpath(callLogFolder);
}
auto endpoints = std::vector<TgVoipEndpoint>();
for (const auto &connection : call.vconnections().v) {
AppendEndpoint(endpoints, connection);
AppendEndpoint(descriptor.endpoints, connection);
}
for (const auto &connection : call.vconnections().v) {
AppendServer(descriptor.rtcServers, connection);
}
auto proxy = TgVoipProxy();
if (Global::UseProxyForCalls()
&& (Global::ProxySettings() == MTP::ProxyData::Settings::Enabled)) {
const auto &selected = Global::SelectedProxy();
if (selected.supportsCalls()) {
if (selected.supportsCalls() && !selected.host.isEmpty()) {
Assert(selected.type == MTP::ProxyData::Type::Socks5);
proxy.host = selected.host.toStdString();
proxy.port = selected.port;
proxy.login = selected.user.toStdString();
proxy.password = selected.password.toStdString();
descriptor.proxy = std::make_unique<tgcalls::Proxy>();
descriptor.proxy->host = selected.host.toStdString();
descriptor.proxy->port = selected.port;
descriptor.proxy->login = selected.user.toStdString();
descriptor.proxy->password = selected.password.toStdString();
}
}
auto encryptionKey = TgVoipEncryptionKey();
encryptionKey.isOutgoing = (_type == Type::Outgoing);
encryptionKey.value = ranges::view::all(
_authKey
) | ranges::view::transform([](bytes::type byte) {
return static_cast<uint8_t>(byte);
}) | ranges::to_vector;
const auto version = call.vprotocol().match([&](
const MTPDphoneCallProtocol &data) {
return data.vlibrary_versions().v;
}).value(0, MTP_bytes(kDefaultVersion)).v;
_controller = MakeController(
"2.4.4",
config,
TgVoipPersistentState(),
endpoints,
proxy.host.empty() ? nullptr : &proxy,
TgVoipNetworkType::Unknown,
encryptionKey);
const auto raw = _controller.get();
raw->setOnStateUpdated([=](TgVoipState state) {
handleControllerStateChange(raw, state);
});
raw->setOnSignalBarsUpdated([=](int count) {
handleControllerBarCountChange(count);
});
if (_mute) {
raw->setMuteMicrophone(_mute);
LOG(("Call Info: Creating instance with version '%1', allowP2P: %2"
).arg(QString::fromUtf8(version)
).arg(Logs::b(descriptor.config.enableP2P)));
_instance = tgcalls::Meta::Create(
version.toStdString(),
std::move(descriptor));
if (!_instance) {
LOG(("Call Error: Wrong library version: %1."
).arg(QString::fromUtf8(version)));
finish(FinishType::Failed);
return;
}
const auto raw = _instance.get();
if (_muted.current()) {
raw->setMuteMicrophone(_muted.current());
}
raw->setIncomingVideoOutput(_videoIncoming->sink());
const auto &settings = Core::App().settings();
raw->setAudioOutputDevice(
settings.callOutputDeviceID().toStdString());
@@ -626,32 +815,27 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
raw->setAudioOutputDuckingEnabled(settings.callAudioDuckingEnabled());
}
void Call::handleControllerStateChange(
not_null<Controller*> controller,
TgVoipState state) {
// NB! Can be called from an arbitrary thread!
// This can be called from ~VoIPController()!
void Call::handleControllerStateChange(tgcalls::State state) {
switch (state) {
case TgVoipState::WaitInit: {
case tgcalls::State::WaitInit: {
DEBUG_LOG(("Call Info: State changed to WaitingInit."));
setStateQueued(State::WaitingInit);
setState(State::WaitingInit);
} break;
case TgVoipState::WaitInitAck: {
case tgcalls::State::WaitInitAck: {
DEBUG_LOG(("Call Info: State changed to WaitingInitAck."));
setStateQueued(State::WaitingInitAck);
setState(State::WaitingInitAck);
} break;
case TgVoipState::Established: {
case tgcalls::State::Established: {
DEBUG_LOG(("Call Info: State changed to Established."));
setStateQueued(State::Established);
setState(State::Established);
} break;
case TgVoipState::Failed: {
auto error = QString::fromStdString(controller->getLastError());
case tgcalls::State::Failed: {
auto error = QString::fromStdString(_instance->getLastError());
LOG(("Call Info: State changed to Failed, error: %1.").arg(error));
setFailedQueued(error);
handleControllerError(error);
} break;
default: LOG(("Call Error: Unexpected state in handleStateChange: %1"
@@ -660,19 +844,11 @@ void Call::handleControllerStateChange(
}
void Call::handleControllerBarCountChange(int count) {
// NB! Can be called from an arbitrary thread!
// This can be called from ~VoIPController()!
crl::on_main(this, [=] {
setSignalBarCount(count);
});
setSignalBarCount(count);
}
void Call::setSignalBarCount(int count) {
if (_signalBarCount != count) {
_signalBarCount = count;
_signalBarCountChanged.notify(count);
}
_signalBarCount = count;
}
template <typename T>
@@ -766,28 +942,28 @@ void Call::setState(State state) {
}
void Call::setCurrentAudioDevice(bool input, std::string deviceID) {
if (_controller) {
if (_instance) {
if (input) {
_controller->setAudioInputDevice(deviceID);
_instance->setAudioInputDevice(deviceID);
} else {
_controller->setAudioOutputDevice(deviceID);
_instance->setAudioOutputDevice(deviceID);
}
}
}
void Call::setAudioVolume(bool input, float level) {
if (_controller) {
if (_instance) {
if (input) {
_controller->setInputVolume(level);
_instance->setInputVolume(level);
} else {
_controller->setOutputVolume(level);
_instance->setOutputVolume(level);
}
}
}
void Call::setAudioDuckingEnabled(bool enabled) {
if (_controller) {
_controller->setAudioOutputDuckingEnabled(enabled);
if (_instance) {
_instance->setAudioOutputDuckingEnabled(enabled);
}
}
@@ -818,10 +994,14 @@ void Call::finish(FinishType type, const MTPPhoneCallDiscardReason &reason) {
setState(hangupState);
auto duration = getDurationMs() / 1000;
auto connectionId = _controller ? _controller->getPreferredRelayId() : 0;
auto connectionId = _instance ? _instance->getPreferredRelayId() : 0;
_finishByTimeoutTimer.call(kHangupTimeoutMs, [this, finalState] { setState(finalState); });
const auto flags = ((_videoIncoming->state() != Webrtc::VideoState::Inactive)
|| (_videoOutgoing->state() != Webrtc::VideoState::Inactive))
? MTPphone_DiscardCall::Flag::f_video
: MTPphone_DiscardCall::Flag(0);
_api.request(MTPphone_DiscardCall(
MTP_flags(0),
MTP_flags(flags),
MTP_inputPhoneCall(
MTP_long(_id),
MTP_long(_accessHash)),
@@ -874,9 +1054,11 @@ void Call::handleControllerError(const QString &error) {
}
void Call::destroyController() {
if (_controller) {
if (_instance) {
const auto state = _instance->stop();
DEBUG_LOG(("Call Info: Destroying call controller.."));
_controller.reset();
_instance.reset();
DEBUG_LOG(("Call Info: Call controller destroyed."));
}
setSignalBarCount(kSignalBarFinished);
@@ -887,7 +1069,7 @@ Call::~Call() {
}
void UpdateConfig(const std::string &data) {
TgVoip::setGlobalServerConfig(data);
tgcalls::SetLegacyGlobalServerConfig(data);
}
} // namespace Calls

View File

@@ -19,12 +19,21 @@ class Track;
} // namespace Audio
} // namespace Media
enum class TgVoipState;
namespace tgcalls {
class Instance;
class VideoCaptureInterface;
enum class State;
enum class VideoState;
enum class AudioState;
} // namespace tgcalls
namespace Webrtc {
enum class VideoState;
class VideoTrack;
} // namespace Webrtc
namespace Calls {
class Controller;
struct DhConfig {
int32 version = 0;
int32 g = 0;
@@ -46,9 +55,9 @@ public:
Ended,
};
virtual void playSound(Sound sound) = 0;
virtual void requestMicrophonePermissionOrFail(Fn<void()> result) = 0;
virtual void requestPermissionsOrFail(Fn<void()> result) = 0;
virtual ~Delegate();
virtual ~Delegate() = default;
};
@@ -58,7 +67,7 @@ public:
Incoming,
Outgoing,
};
Call(not_null<Delegate*> delegate, not_null<UserData*> user, Type type);
Call(not_null<Delegate*> delegate, not_null<UserData*> user, Type type, bool video);
[[nodiscard]] Type type() const {
return _type;
@@ -70,6 +79,7 @@ public:
void start(bytes::const_span random);
bool handleUpdate(const MTPPhoneCall &call);
bool handleSignalingData(const MTPDupdatePhoneCallSignalingData &data);
enum State {
Starting,
@@ -88,28 +98,51 @@ public:
Ringing,
Busy,
};
State state() const {
[[nodiscard]] State state() const {
return _state.current();
}
rpl::producer<State> stateValue() const {
[[nodiscard]] rpl::producer<State> stateValue() const {
return _state.value();
}
enum class RemoteAudioState {
Muted,
Active,
};
[[nodiscard]] RemoteAudioState remoteAudioState() const {
return _remoteAudioState.current();
}
[[nodiscard]] auto remoteAudioStateValue() const
-> rpl::producer<RemoteAudioState> {
return _remoteAudioState.value();
}
[[nodiscard]] Webrtc::VideoState remoteVideoState() const {
return _remoteVideoState.current();
}
[[nodiscard]] auto remoteVideoStateValue() const
-> rpl::producer<Webrtc::VideoState> {
return _remoteVideoState.value();
}
static constexpr auto kSignalBarStarting = -1;
static constexpr auto kSignalBarFinished = -2;
static constexpr auto kSignalBarCount = 4;
base::Observable<int> &signalBarCountChanged() {
return _signalBarCountChanged;
[[nodiscard]] rpl::producer<int> signalBarCountValue() const {
return _signalBarCount.value();
}
void setMute(bool mute);
bool isMute() const {
return _mute;
void setMuted(bool mute);
[[nodiscard]] bool muted() const {
return _muted.current();
}
base::Observable<bool> &muteChanged() {
return _muteChanged;
[[nodiscard]] rpl::producer<bool> mutedValue() const {
return _muted.value();
}
[[nodiscard]] not_null<Webrtc::VideoTrack*> videoIncoming() const;
[[nodiscard]] not_null<Webrtc::VideoTrack*> videoOutgoing() const;
crl::time getDurationMs() const;
float64 getWaitingSoundPeakValue() const;
@@ -140,15 +173,17 @@ private:
};
void handleRequestError(const RPCError &error);
void handleControllerError(const QString &error);
void finish(FinishType type, const MTPPhoneCallDiscardReason &reason = MTP_phoneCallDiscardReasonDisconnect());
void finish(
FinishType type,
const MTPPhoneCallDiscardReason &reason
= MTP_phoneCallDiscardReasonDisconnect());
void startOutgoing();
void startIncoming();
void startWaitingTrack();
void sendSignalingData(const QByteArray &data);
void generateModExpFirst(bytes::const_span randomSeed);
void handleControllerStateChange(
not_null<Controller*> controller,
TgVoipState state);
void handleControllerStateChange(tgcalls::State state);
void handleControllerBarCountChange(int count);
void createAndStartController(const MTPDphoneCall &call);
@@ -166,21 +201,26 @@ private:
void setSignalBarCount(int count);
void destroyController();
not_null<Delegate*> _delegate;
not_null<UserData*> _user;
void setupOutgoingVideo();
void updateRemoteMediaState(
tgcalls::AudioState audio,
tgcalls::VideoState video);
const not_null<Delegate*> _delegate;
const not_null<UserData*> _user;
MTP::Sender _api;
Type _type = Type::Outgoing;
rpl::variable<State> _state = State::Starting;
rpl::variable<RemoteAudioState> _remoteAudioState = RemoteAudioState::Active;
rpl::variable<Webrtc::VideoState> _remoteVideoState;
FinishType _finishAfterRequestingCall = FinishType::None;
bool _answerAfterDhConfigReceived = false;
int _signalBarCount = kSignalBarStarting;
base::Observable<int> _signalBarCountChanged;
rpl::variable<int> _signalBarCount = kSignalBarStarting;
crl::time _startTime = 0;
base::DelayedCallTimer _finishByTimeoutTimer;
base::Timer _discardByTimeoutTimer;
bool _mute = false;
base::Observable<bool> _muteChanged;
rpl::variable<bool> _muted = false;
DhConfig _dhConfig;
bytes::vector _ga;
@@ -194,7 +234,10 @@ private:
uint64 _accessHash = 0;
uint64 _keyFingerprint = 0;
std::unique_ptr<Controller> _controller;
std::unique_ptr<tgcalls::Instance> _instance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
const std::unique_ptr<Webrtc::VideoTrack> _videoIncoming;
const std::unique_ptr<Webrtc::VideoTrack> _videoOutgoing;
std::unique_ptr<Media::Audio::Track> _waitingTrack;

View File

@@ -8,6 +8,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/calls_controller.h"
#include "calls/calls_controller_tgvoip.h"
#include "calls/calls_controller_webrtc.h"
namespace Calls {
@@ -18,7 +19,20 @@ namespace Calls {
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey) {
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame) {
if (version == WebrtcController::Version()) {
return std::make_unique<WebrtcController>(
config,
persistentState,
endpoints,
proxy,
initialNetworkType,
encryptionKey,
std::move(sendSignalingData),
std::move(displayNextFrame));
}
return std::make_unique<TgVoipController>(
config,
persistentState,
@@ -28,4 +42,12 @@ namespace Calls {
encryptionKey);
}
std::vector<std::string> CollectControllerVersions() {
return { WebrtcController::Version(), TgVoipController::Version() };
}
int ControllerMaxLayer() {
return TgVoip::getConnectionMaxLayer();
}
} // namespace Calls

View File

@@ -26,6 +26,7 @@ public:
virtual void setInputVolume(float level) = 0;
virtual void setOutputVolume(float level) = 0;
virtual void setAudioOutputDuckingEnabled(bool enabled) = 0;
virtual bool receiveSignalingData(const QByteArray &data) = 0;
virtual std::string getLastError() = 0;
virtual std::string getDebugInfo() = 0;
@@ -48,6 +49,11 @@ public:
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey);
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame);
[[nodiscard]] std::vector<std::string> CollectControllerVersions();
[[nodiscard]] int ControllerMaxLayer();
} // namespace Calls

View File

@@ -33,7 +33,7 @@ public:
return TgVoip::getVersion();
}
[[nodiscard]] std::string version() override {
std::string version() override {
return Version();
}
void setNetworkType(TgVoipNetworkType networkType) override {
@@ -63,6 +63,9 @@ public:
void setAudioOutputDuckingEnabled(bool enabled) override {
_impl->setAudioOutputDuckingEnabled(enabled);
}
bool receiveSignalingData(const QByteArray &data) override {
return false;
}
std::string getLastError() override {
return _impl->getLastError();
}
@@ -81,8 +84,7 @@ public:
void setOnStateUpdated(Fn<void(TgVoipState)> onStateUpdated) override {
_impl->setOnStateUpdated(std::move(onStateUpdated));
}
void setOnSignalBarsUpdated(
Fn<void(int)> onSignalBarsUpdated) override {
void setOnSignalBarsUpdated(Fn<void(int)> onSignalBarsUpdated) override {
_impl->setOnSignalBarsUpdated(std::move(onSignalBarsUpdated));
}
TgVoipFinalState stop() override {

View File

@@ -0,0 +1,175 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "calls/calls_controller_webrtc.h"
#include "webrtc/webrtc_call_context.h"
namespace Calls {
namespace {
using namespace Webrtc;
[[nodiscard]] CallConnectionDescription ConvertEndpoint(const TgVoipEndpoint &data) {
return CallConnectionDescription{
.ip = QString::fromStdString(data.host.ipv4),
.ipv6 = QString::fromStdString(data.host.ipv6),
.peerTag = QByteArray(
reinterpret_cast<const char*>(data.peerTag),
base::array_size(data.peerTag)),
.connectionId = data.endpointId,
.port = data.port,
};
}
[[nodiscard]] CallContext::Config MakeContextConfig(
const TgVoipConfig &config,
const TgVoipPersistentState &persistentState,
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame) {
Expects(!endpoints.empty());
auto result = CallContext::Config{
.proxy = (proxy
? ProxyServer{
.host = QString::fromStdString(proxy->host),
.username = QString::fromStdString(proxy->login),
.password = QString::fromStdString(proxy->password),
.port = proxy->port }
: ProxyServer()),
.dataSaving = (config.dataSaving != TgVoipDataSaving::Never),
.key = QByteArray(
reinterpret_cast<const char*>(encryptionKey.value.data()),
encryptionKey.value.size()),
.outgoing = encryptionKey.isOutgoing,
.primary = ConvertEndpoint(endpoints.front()),
.alternatives = endpoints | ranges::view::drop(
1
) | ranges::view::transform(ConvertEndpoint) | ranges::to_vector,
.maxLayer = config.maxApiLayer,
.allowP2P = config.enableP2P,
.sendSignalingData = std::move(sendSignalingData),
.displayNextFrame = std::move(displayNextFrame),
};
return result;
}
} // namespace
WebrtcController::WebrtcController(
const TgVoipConfig &config,
const TgVoipPersistentState &persistentState,
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame)
: _impl(std::make_unique<CallContext>(MakeContextConfig(
config,
persistentState,
endpoints,
proxy,
initialNetworkType,
encryptionKey,
std::move(sendSignalingData),
std::move(displayNextFrame)))) {
}
WebrtcController::~WebrtcController() = default;
std::string WebrtcController::Version() {
return CallContext::Version().toStdString();
}
std::string WebrtcController::version() {
return Version();
}
void WebrtcController::setNetworkType(TgVoipNetworkType networkType) {
}
void WebrtcController::setMuteMicrophone(bool muteMicrophone) {
_impl->setIsMuted(muteMicrophone);
}
void WebrtcController::setAudioOutputGainControlEnabled(bool enabled) {
}
void WebrtcController::setEchoCancellationStrength(int strength) {
}
void WebrtcController::setAudioInputDevice(std::string id) {
}
void WebrtcController::setAudioOutputDevice(std::string id) {
}
void WebrtcController::setInputVolume(float level) {
}
void WebrtcController::setOutputVolume(float level) {
}
void WebrtcController::setAudioOutputDuckingEnabled(bool enabled) {
}
bool WebrtcController::receiveSignalingData(const QByteArray &data) {
return _impl->receiveSignalingData(data);
}
std::string WebrtcController::getLastError() {
return {};
}
std::string WebrtcController::getDebugInfo() {
return _impl->getDebugInfo().toStdString();
}
int64_t WebrtcController::getPreferredRelayId() {
return 0;
}
TgVoipTrafficStats WebrtcController::getTrafficStats() {
return {};
}
TgVoipPersistentState WebrtcController::getPersistentState() {
return TgVoipPersistentState{};
}
void WebrtcController::setOnStateUpdated(
Fn<void(TgVoipState)> onStateUpdated) {
_stateUpdatedLifetime.destroy();
_impl->state().changes(
) | rpl::start_with_next([=](CallState state) {
onStateUpdated([&] {
switch (state) {
case CallState::Initializing: return TgVoipState::WaitInit;
case CallState::Reconnecting: return TgVoipState::Reconnecting;
case CallState::Connected: return TgVoipState::Established;
case CallState::Failed: return TgVoipState::Failed;
}
Unexpected("State value in Webrtc::CallContext::state.");
}());
}, _stateUpdatedLifetime);
}
void WebrtcController::setOnSignalBarsUpdated(
Fn<void(int)> onSignalBarsUpdated) {
}
TgVoipFinalState WebrtcController::stop() {
_impl->stop();
return TgVoipFinalState();
}
} // namespace Calls

View File

@@ -0,0 +1,60 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "calls/calls_controller.h"
namespace Webrtc {
class CallContext;
} // namespace Webrtc
namespace Calls {
class WebrtcController final : public Controller {
public:
WebrtcController(
const TgVoipConfig &config,
const TgVoipPersistentState &persistentState,
const std::vector<TgVoipEndpoint> &endpoints,
const TgVoipProxy *proxy,
TgVoipNetworkType initialNetworkType,
const TgVoipEncryptionKey &encryptionKey,
Fn<void(QByteArray)> sendSignalingData,
Fn<void(QImage)> displayNextFrame);
~WebrtcController();
[[nodiscard]] static std::string Version();
std::string version() override;
void setNetworkType(TgVoipNetworkType networkType) override;
void setMuteMicrophone(bool muteMicrophone) override;
void setAudioOutputGainControlEnabled(bool enabled) override;
void setEchoCancellationStrength(int strength) override;
void setAudioInputDevice(std::string id) override;
void setAudioOutputDevice(std::string id) override;
void setInputVolume(float level) override;
void setOutputVolume(float level) override;
void setAudioOutputDuckingEnabled(bool enabled) override;
bool receiveSignalingData(const QByteArray &data) override;
std::string getLastError() override;
std::string getDebugInfo() override;
int64_t getPreferredRelayId() override;
TgVoipTrafficStats getTrafficStats() override;
TgVoipPersistentState getPersistentState() override;
void setOnStateUpdated(Fn<void(TgVoipState)> onStateUpdated) override;
void setOnSignalBarsUpdated(Fn<void(int)> onSignalBarsUpdated) override;
TgVoipFinalState stop() override;
private:
const std::unique_ptr<Webrtc::CallContext> _impl;
rpl::lifetime _stateUpdatedLifetime;
};
} // namespace Calls

View File

@@ -8,11 +8,19 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "calls/calls_emoji_fingerprint.h"
#include "calls/calls_call.h"
#include "calls/calls_signal_bars.h"
#include "lang/lang_keys.h"
#include "data/data_user.h"
#include "ui/widgets/tooltip.h"
#include "ui/emoji_config.h"
#include "ui/rp_widget.h"
#include "styles/style_calls.h"
namespace Calls {
namespace {
constexpr auto kTooltipShowTimeoutMs = 1000;
const ushort Data[] = {
0xd83d, 0xde09, 0xd83d, 0xde0d, 0xd83d, 0xde1b, 0xd83d, 0xde2d, 0xd83d, 0xde31, 0xd83d, 0xde21,
0xd83d, 0xde0e, 0xd83d, 0xde34, 0xd83d, 0xde35, 0xd83d, 0xde08, 0xd83d, 0xde2c, 0xd83d, 0xde07,
@@ -143,7 +151,147 @@ std::vector<EmojiPtr> ComputeEmojiFingerprint(not_null<Call*> call) {
}
}
return result;
}
object_ptr<Ui::RpWidget> CreateFingerprintAndSignalBars(
not_null<QWidget*> parent,
not_null<Call*> call) {
class EmojiTooltipShower final : public Ui::AbstractTooltipShower {
public:
EmojiTooltipShower(not_null<QWidget*> window, const QString &text)
: _window(window)
, _text(text) {
}
QString tooltipText() const override {
return _text;
}
QPoint tooltipPos() const override {
return QCursor::pos();
}
bool tooltipWindowActive() const override {
return _window->isActiveWindow();
}
private:
const not_null<QWidget*> _window;
const QString _text;
};
auto result = object_ptr<Ui::RpWidget>(parent);
const auto raw = result.data();
// Emoji tooltip.
const auto shower = raw->lifetime().make_state<EmojiTooltipShower>(
parent->window(),
tr::lng_call_fingerprint_tooltip(
tr::now,
lt_user,
call->user()->name));
raw->setMouseTracking(true);
raw->events(
) | rpl::start_with_next([=](not_null<QEvent*> e) {
if (e->type() == QEvent::MouseMove) {
Ui::Tooltip::Show(kTooltipShowTimeoutMs, shower);
} else if (e->type() == QEvent::Leave) {
Ui::Tooltip::Hide();
}
}, raw->lifetime());
// Signal bars.
const auto bars = Ui::CreateChild<SignalBars>(
raw,
call,
st::callPanelSignalBars);
bars->setAttribute(Qt::WA_TransparentForMouseEvents);
// Geometry.
const auto print = ComputeEmojiFingerprint(call);
auto realSize = Ui::Emoji::GetSizeNormal();
auto size = realSize / cIntRetinaFactor();
auto count = print.size();
const auto printSize = QSize(
count * size + (count - 1) * st::callFingerprintSkip,
size);
const auto fullPrintSize = QRect(
QPoint(),
printSize
).marginsAdded(st::callFingerprintPadding).size();
const auto fullBarsSize = bars->rect().marginsAdded(
st::callSignalBarsPadding
).size();
const auto fullSize = QSize(
(fullPrintSize.width()
+ st::callFingerprintSignalBarsSkip
+ fullBarsSize.width()),
fullPrintSize.height());
raw->resize(fullSize);
bars->moveToRight(
st::callSignalBarsPadding.right(),
st::callSignalBarsPadding.top());
// Paint.
const auto background = raw->lifetime().make_state<QImage>(
fullSize * cIntRetinaFactor(),
QImage::Format_ARGB32_Premultiplied);
background->setDevicePixelRatio(cRetinaFactor());
rpl::merge(
rpl::single(rpl::empty_value()),
Ui::Emoji::Updated(),
style::PaletteChanged()
) | rpl::start_with_next([=] {
background->fill(Qt::transparent);
// Prepare.
auto p = QPainter(background);
const auto height = fullSize.height();
const auto fullPrintRect = QRect(QPoint(), fullPrintSize);
const auto fullBarsRect = QRect(
fullSize.width() - fullBarsSize.width(),
0,
fullBarsSize.width(),
height);
const auto bigRadius = height / 2;
const auto smallRadius = st::buttonRadius;
const auto hq = PainterHighQualityEnabler(p);
p.setPen(Qt::NoPen);
p.setBrush(st::callBgButton);
// Fingerprint part.
p.setClipRect(0, 0, fullPrintSize.width() / 2, height);
p.drawRoundedRect(fullPrintRect, bigRadius, bigRadius);
p.setClipRect(fullPrintSize.width() / 2, 0, fullSize.width(), height);
p.drawRoundedRect(fullPrintRect, smallRadius, smallRadius);
// Signal bars part.
const auto middle = fullBarsRect.center().x();
p.setClipRect(0, 0, middle, height);
p.drawRoundedRect(fullBarsRect, smallRadius, smallRadius);
p.setClipRect(middle, 0, fullBarsRect.width(), height);
p.drawRoundedRect(fullBarsRect, bigRadius, bigRadius);
// Emoji.
const auto realSize = Ui::Emoji::GetSizeNormal();
const auto size = realSize / cIntRetinaFactor();
auto left = st::callFingerprintPadding.left();
const auto top = st::callFingerprintPadding.top();
p.setClipping(false);
for (const auto emoji : print) {
Ui::Emoji::Draw(p, emoji, realSize, left, top);
left += st::callFingerprintSkip + size;
}
raw->update();
}, raw->lifetime());
raw->paintRequest(
) | rpl::start_with_next([=](QRect clip) {
QPainter(raw).drawImage(raw->rect(), *background);
}, raw->lifetime());
raw->show();
return result;
}
} // namespace Calls

View File

@@ -7,10 +7,21 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "base/object_ptr.h"
namespace Ui {
class RpWidget;
} // namespace Ui
namespace Calls {
class Call;
std::vector<EmojiPtr> ComputeEmojiFingerprint(not_null<Call*> call);
[[nodiscard]] std::vector<EmojiPtr> ComputeEmojiFingerprint(
not_null<Call*> call);
[[nodiscard]] object_ptr<Ui::RpWidget> CreateFingerprintAndSignalBars(
not_null<QWidget*> parent,
not_null<Call*> call);
} // namespace Calls

View File

@@ -35,15 +35,9 @@ constexpr auto kServerConfigUpdateTimeoutMs = 24 * 3600 * crl::time(1000);
Instance::Instance() = default;
Instance::~Instance() {
for (const auto panel : _pendingPanels) {
if (panel) {
delete panel;
}
}
}
Instance::~Instance() = default;
void Instance::startOutgoingCall(not_null<UserData*> user) {
void Instance::startOutgoingCall(not_null<UserData*> user, bool video) {
if (alreadyInCall()) { // Already in a call.
_currentCallPanel->showAndActivate();
return;
@@ -55,17 +49,21 @@ void Instance::startOutgoingCall(not_null<UserData*> user) {
tr::lng_call_error_not_available(tr::now, lt_user, user->name)));
return;
}
requestMicrophonePermissionOrFail(crl::guard(this, [=] {
createCall(user, Call::Type::Outgoing);
requestPermissionsOrFail(crl::guard(this, [=] {
createCall(user, Call::Type::Outgoing, video);
}));
}
void Instance::callFinished(not_null<Call*> call) {
destroyCall(call);
crl::on_main(call, [=] {
destroyCall(call);
});
}
void Instance::callFailed(not_null<Call*> call) {
destroyCall(call);
crl::on_main(call, [=] {
destroyCall(call);
});
}
void Instance::callRedial(not_null<Call*> call) {
@@ -107,7 +105,9 @@ void Instance::playSound(Sound sound) {
void Instance::destroyCall(not_null<Call*> call) {
if (_currentCall.get() == call) {
destroyCurrentPanel();
_currentCallPanel->closeBeforeDestroy();
_currentCallPanel = nullptr;
auto taken = base::take(_currentCall);
_currentCallChanges.fire(nullptr);
taken.reset();
@@ -119,19 +119,8 @@ void Instance::destroyCall(not_null<Call*> call) {
}
}
void Instance::destroyCurrentPanel() {
_pendingPanels.erase(
std::remove_if(
_pendingPanels.begin(),
_pendingPanels.end(),
[](auto &&panel) { return !panel; }),
_pendingPanels.end());
_pendingPanels.emplace_back(_currentCallPanel.release());
_pendingPanels.back()->hideAndDestroy(); // Always queues the destruction.
}
void Instance::createCall(not_null<UserData*> user, Call::Type type) {
auto call = std::make_unique<Call>(getCallDelegate(), user, type);
void Instance::createCall(not_null<UserData*> user, Call::Type type, bool video) {
auto call = std::make_unique<Call>(getCallDelegate(), user, type, video);
const auto raw = call.get();
user->session().account().sessionChanges(
@@ -232,13 +221,19 @@ void Instance::refreshServerConfig(not_null<Main::Session*> session) {
UpdateConfig(std::string(json.data(), json.size()));
}).fail([=](const RPCError &error) {
_serverConfigRequestSession = nullptr;
}).send();
}).send();
}
void Instance::handleUpdate(
not_null<Main::Session*> session,
const MTPDupdatePhoneCall& update) {
handleCallUpdate(session, update.vphone_call());
const MTPUpdate &update) {
update.match([&](const MTPDupdatePhoneCall &data) {
handleCallUpdate(session, data.vphone_call());
}, [&](const MTPDupdatePhoneCallSignalingData &data) {
handleSignalingData(data);
}, [](const auto &) {
Unexpected("Update type in Calls::Instance::handleUpdate.");
});
}
void Instance::showInfoPanel(not_null<Call*> call) {
@@ -272,8 +267,11 @@ void Instance::handleCallUpdate(
}
const auto &config = session->serverConfig();
if (alreadyInCall() || !user || user->isSelf()) {
const auto flags = phoneCall.is_video()
? MTPphone_DiscardCall::Flag::f_video
: MTPphone_DiscardCall::Flag(0);
session->api().request(MTPphone_DiscardCall(
MTP_flags(0),
MTP_flags(flags),
MTP_inputPhoneCall(phoneCall.vid(), phoneCall.vaccess_hash()),
MTP_int(0),
MTP_phoneCallDiscardReasonBusy(),
@@ -283,7 +281,7 @@ void Instance::handleCallUpdate(
< base::unixtime::now()) {
LOG(("Ignoring too old call."));
} else {
createCall(user, Call::Type::Incoming);
createCall(user, Call::Type::Incoming, phoneCall.is_video());
_currentCall->handleUpdate(call);
}
} else if (!_currentCall || !_currentCall->handleUpdate(call)) {
@@ -291,6 +289,14 @@ void Instance::handleCallUpdate(
}
}
void Instance::handleSignalingData(
const MTPDupdatePhoneCallSignalingData &data) {
if (!_currentCall || !_currentCall->handleSignalingData(data)) {
DEBUG_LOG(("API Warning: unexpected call signaling data %1"
).arg(data.vphone_call_id().v));
}
}
bool Instance::alreadyInCall() {
return (_currentCall && _currentCall->state() != Call::State::Busy);
}
@@ -303,13 +309,23 @@ rpl::producer<Call*> Instance::currentCallValue() const {
return _currentCallChanges.events_starting_with(currentCall());
}
void Instance::requestMicrophonePermissionOrFail(Fn<void()> onSuccess) {
Platform::PermissionStatus status=Platform::GetPermissionStatus(Platform::PermissionType::Microphone);
if (status==Platform::PermissionStatus::Granted) {
void Instance::requestPermissionsOrFail(Fn<void()> onSuccess) {
using Type = Platform::PermissionType;
requestPermissionOrFail(Type::Microphone, [=] {
requestPermissionOrFail(Type::Camera, [=] {
crl::on_main(onSuccess);
});
});
}
void Instance::requestPermissionOrFail(Platform::PermissionType type, Fn<void()> onSuccess) {
using Status = Platform::PermissionStatus;
const auto status = Platform::GetPermissionStatus(type);
if (status == Status::Granted) {
onSuccess();
} else if(status==Platform::PermissionStatus::CanRequest) {
Platform::RequestPermission(Platform::PermissionType::Microphone, crl::guard(this, [=](Platform::PermissionStatus status) {
if (status==Platform::PermissionStatus::Granted) {
} else if (status == Status::CanRequest) {
Platform::RequestPermission(type, crl::guard(this, [=](Status status) {
if (status == Status::Granted) {
crl::on_main(onSuccess);
} else {
if (_currentCall) {
@@ -321,8 +337,8 @@ void Instance::requestMicrophonePermissionOrFail(Fn<void()> onSuccess) {
if (alreadyInCall()) {
_currentCall->hangup();
}
Ui::show(Box<ConfirmBox>(tr::lng_no_mic_permission(tr::now), tr::lng_menu_settings(tr::now), crl::guard(this, [] {
Platform::OpenSystemSettingsForPermission(Platform::PermissionType::Microphone);
Ui::show(Box<ConfirmBox>(tr::lng_no_mic_permission(tr::now), tr::lng_menu_settings(tr::now), crl::guard(this, [=] {
Platform::OpenSystemSettingsForPermission(type);
Ui::hideLayer();
})));
}

View File

@@ -10,6 +10,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "mtproto/sender.h"
#include "calls/calls_call.h"
namespace Platform {
enum class PermissionType;
} // namespace Platform
namespace Media {
namespace Audio {
class Track;
@@ -32,10 +36,10 @@ public:
Instance();
~Instance();
void startOutgoingCall(not_null<UserData*> user);
void startOutgoingCall(not_null<UserData*> user, bool video);
void handleUpdate(
not_null<Main::Session*> session,
const MTPDupdatePhoneCall &update);
const MTPUpdate &update);
void showInfoPanel(not_null<Call*> call);
[[nodiscard]] Call *currentCall() const;
[[nodiscard]] rpl::producer<Call*> currentCallValue() const;
@@ -54,10 +58,12 @@ private:
void callRedial(not_null<Call*> call) override;
using Sound = Call::Delegate::Sound;
void playSound(Sound sound) override;
void createCall(not_null<UserData*> user, Call::Type type);
void createCall(not_null<UserData*> user, Call::Type type, bool video);
void destroyCall(not_null<Call*> call);
void destroyCurrentPanel();
void requestMicrophonePermissionOrFail(Fn<void()> onSuccess) override;
void requestPermissionsOrFail(Fn<void()> onSuccess) override;
void requestPermissionOrFail(Platform::PermissionType type, Fn<void()> onSuccess);
void handleSignalingData(const MTPDupdatePhoneCallSignalingData &data);
void refreshDhConfig();
void refreshServerConfig(not_null<Main::Session*> session);
@@ -78,7 +84,6 @@ private:
std::unique_ptr<Panel> _currentCallPanel;
base::Observable<Call*> _currentCallChanged;
base::Observable<FullMsgId> _newServiceMessage;
std::vector<QPointer<Panel>> _pendingPanels;
std::unique_ptr<Media::Audio::Track> _callConnectingTrack;
std::unique_ptr<Media::Audio::Track> _callEndedTrack;

File diff suppressed because it is too large Load Diff

View File

@@ -9,8 +9,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/weak_ptr.h"
#include "base/timer.h"
#include "base/object_ptr.h"
#include "calls/calls_call.h"
#include "ui/widgets/tooltip.h"
#include "ui/effects/animations.h"
#include "ui/rp_widget.h"
@@ -26,138 +26,118 @@ class IconButton;
class FlatLabel;
template <typename Widget>
class FadeWrap;
template <typename Widget>
class PaddingWrap;
class Window;
namespace Platform {
class TitleControls;
} // namespace Platform
} // namespace Ui
namespace style {
struct CallSignalBars;
struct CallBodyLayout;
} // namespace style
namespace Calls {
class SignalBars : public Ui::RpWidget, private base::Subscriber {
public:
SignalBars(
QWidget *parent,
not_null<Call*> call,
const style::CallSignalBars &st,
Fn<void()> displayedChangedCallback = nullptr);
bool isDisplayed() const;
protected:
void paintEvent(QPaintEvent *e) override;
private:
void changed(int count);
const style::CallSignalBars &_st;
int _count = Call::kSignalBarStarting;
Fn<void()> _displayedChangedCallback;
};
class Panel
: public Ui::RpWidget
, private base::Subscriber
, private Ui::AbstractTooltipShower {
class Userpic;
class SignalBars;
class VideoBubble;
class Panel final {
public:
Panel(not_null<Call*> call);
~Panel();
void showAndActivate();
void replaceCall(not_null<Call*> call);
void hideAndDestroy();
protected:
void paintEvent(QPaintEvent *e) override;
void closeEvent(QCloseEvent *e) override;
void resizeEvent(QResizeEvent *e) override;
void mousePressEvent(QMouseEvent *e) override;
void mouseReleaseEvent(QMouseEvent *e) override;
void mouseMoveEvent(QMouseEvent *e) override;
void leaveEventHook(QEvent *e) override;
void leaveToChildEvent(QEvent *e, QWidget *child) override;
bool eventHook(QEvent *e) override;
void closeBeforeDestroy();
private:
class Content;
class Button;
using State = Call::State;
using Type = Call::Type;
enum class AnswerHangupRedialState : uchar {
Answer,
Hangup,
Redial,
};
// AbstractTooltipShower interface
QString tooltipText() const override;
QPoint tooltipPos() const override;
bool tooltipWindowActive() const override;
[[nodiscard]] not_null<Ui::RpWidget*> widget() const;
void paint(QRect clip);
void initWindow();
void initWidget();
void initControls();
void reinitControls();
void reinitWithCall(Call *call);
void initLayout();
void initGeometry();
void hideDeactivated();
void createBottomImage();
void createDefaultCacheImage();
void refreshCacheImageUserPhoto();
void initBottomShadow();
void handleClose();
void processUserPhoto();
void refreshUserPhoto();
bool isGoodUserPhoto(PhotoData *photo);
void createUserpicCache(Image *image);
QRect signalBarsRect() const;
void paintSignalBarsBg(Painter &p);
void updateControlsGeometry();
void updateHangupGeometry();
void updateStatusGeometry();
void updateOutgoingVideoBubbleGeometry();
void stateChanged(State state);
void showControls();
void updateStatusText(State state);
void startDurationUpdateTimer(crl::time currentDuration);
void fillFingerprint();
void toggleOpacityAnimation(bool visible);
void finishAnimating();
void destroyDelayed();
void setIncomingSize(QSize size);
void fillTopShadow(QPainter &p, QRect incoming);
void fillBottomShadow(QPainter &p, QRect incoming);
void refreshOutgoingPreviewInBody(State state);
void toggleFullScreen(bool fullscreen);
void createRemoteAudioMute();
void refreshAnswerHangupRedialLabel();
[[nodiscard]] QRect incomingFrameGeometry() const;
[[nodiscard]] QRect outgoingFrameGeometry() const;
Call *_call = nullptr;
not_null<UserData*> _user;
std::shared_ptr<Data::CloudImageView> _userpic;
std::shared_ptr<Data::PhotoMedia> _photo;
bool _useTransparency = true;
style::margins _padding;
int _contentTop = 0;
const std::unique_ptr<Ui::Window> _window;
bool _dragging = false;
QPoint _dragStartMousePosition;
QPoint _dragStartMyPosition;
#ifdef Q_OS_WIN
std::unique_ptr<Ui::Platform::TitleControls> _controls;
#endif // Q_OS_WIN
rpl::lifetime _stateLifetime;
QSize _incomingFrameSize;
class Button;
rpl::lifetime _callLifetime;
not_null<const style::CallBodyLayout*> _bodySt;
object_ptr<Button> _answerHangupRedial;
object_ptr<Ui::FadeWrap<Button>> _decline;
object_ptr<Ui::FadeWrap<Button>> _cancel;
bool _hangupShown = false;
bool _outgoingPreviewInBody = false;
std::optional<AnswerHangupRedialState> _answerHangupRedialState;
Ui::Animations::Simple _hangupShownProgress;
object_ptr<Ui::IconButton> _mute;
object_ptr<Button> _camera;
object_ptr<Button> _mute;
object_ptr<Ui::FlatLabel> _name;
object_ptr<Ui::FlatLabel> _status;
object_ptr<SignalBars> _signalBars;
std::vector<EmojiPtr> _fingerprint;
QRect _fingerprintArea;
object_ptr<Ui::RpWidget> _fingerprint = { nullptr };
object_ptr<Ui::PaddingWrap<Ui::FlatLabel>> _remoteAudioMute = { nullptr };
std::unique_ptr<Userpic> _userpic;
std::unique_ptr<VideoBubble> _outgoingVideoBubble;
QPixmap _bottomShadow;
int _bodyTop = 0;
int _buttonsTop = 0;
base::Timer _updateDurationTimer;
base::Timer _updateOuterRippleTimer;
bool _visible = false;
QPixmap _userPhoto;
PhotoId _userPhotoId = 0;
bool _userPhotoFull = false;
Ui::Animations::Simple _opacityAnimation;
QPixmap _animationCache;
QPixmap _bottomCache;
QPixmap _cache;
};
} // namespace Calls

View File

@@ -0,0 +1,64 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "calls/calls_signal_bars.h"
#include "calls/calls_call.h"
#include "styles/style_calls.h"
namespace Calls {
SignalBars::SignalBars(
QWidget *parent,
not_null<Call*> call,
const style::CallSignalBars &st)
: RpWidget(parent)
, _st(st)
, _count(Call::kSignalBarStarting) {
resize(
_st.width + (_st.width + _st.skip) * (Call::kSignalBarCount - 1),
_st.max);
call->signalBarCountValue(
) | rpl::start_with_next([=](int count) {
changed(count);
}, lifetime());
}
void SignalBars::paintEvent(QPaintEvent *e) {
Painter p(this);
PainterHighQualityEnabler hq(p);
p.setPen(Qt::NoPen);
p.setBrush(_st.color);
for (auto i = 0; i < Call::kSignalBarCount; ++i) {
p.setOpacity((i < _count) ? 1. : _st.inactiveOpacity);
const auto barHeight = _st.min
+ (_st.max - _st.min) * (i / float64(Call::kSignalBarCount - 1));
const auto barLeft = i * (_st.width + _st.skip);
const auto barTop = height() - barHeight;
p.drawRoundedRect(
QRectF(
barLeft,
barTop,
_st.width,
barHeight),
_st.radius,
_st.radius);
}
p.setOpacity(1.);
}
void SignalBars::changed(int count) {
if (_count == Call::kSignalBarFinished) {
return;
} else if (_count != count) {
_count = count;
update();
}
}
} // namespace Calls

View File

@@ -0,0 +1,37 @@
/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "ui/rp_widget.h"
namespace style {
struct CallSignalBars;
} // namespace style
namespace Calls {
class Call;
class SignalBars final : public Ui::RpWidget {
public:
SignalBars(
QWidget *parent,
not_null<Call*> call,
const style::CallSignalBars &st);
private:
void paintEvent(QPaintEvent *e) override;
void changed(int count);
const style::CallSignalBars &_st;
int _count = 0;
};
} // namespace Calls

View File

@@ -14,7 +14,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/application.h"
#include "calls/calls_call.h"
#include "calls/calls_instance.h"
#include "calls/calls_panel.h"
#include "calls/calls_signal_bars.h"
#include "data/data_user.h"
#include "data/data_changes.h"
#include "main/main_session.h"
@@ -94,14 +94,14 @@ TopBar::TopBar(
void TopBar::initControls() {
_mute->setClickedCallback([=] {
if (const auto call = _call.get()) {
call->setMute(!call->isMute());
call->setMuted(!call->muted());
}
});
setMuted(_call->isMute());
subscribe(_call->muteChanged(), [=](bool mute) {
setMuted(mute);
_call->mutedValue(
) | rpl::start_with_next([=](bool muted) {
setMuted(muted);
update();
});
}, lifetime());
_call->user()->session().changes().peerUpdates(
Data::PeerUpdate::Flag::Name

Some files were not shown because too many files have changed in this diff Show More