Compare commits

...

77 Commits

Author SHA1 Message Date
John Preston
a10d668131 Version 4.14.14.
- Fix webview regression on Linux X11.
2024-02-09 20:18:40 +04:00
John Preston
8a62bacaa6 Fix anti-aliasing in emoji categories search. 2024-02-09 20:15:02 +04:00
100backslash001
bacab01f7e Remove duplicates from configure.py 2024-02-09 10:49:18 +04:00
23rd
5d3400033a Fixed ability to copy whole transcribed text and copy album captions. 2024-02-08 17:35:29 +03:00
23rd
0e571ea679 Added ability to copy filename of named documents. 2024-02-08 17:32:25 +03:00
23rd
b959262140 Fixed legal link in some files. 2024-02-08 15:20:42 +03:00
Ilya Fedin
fd3ce905c0 Update lib_webview 2024-02-08 11:45:09 +04:00
Ilya Fedin
29debc07c4 Let specify arbitrary build configuration via Docker
Default to RelWithDebInfo for both optimizations and debug information
2024-02-06 20:31:44 +04:00
Ilya Fedin
5334096d68 Fix bot webview height with fractional scaling 2024-02-06 10:25:27 +04:00
Ilya Fedin
bb3f8fbbe8 Check whether webview is destructed after Webview::Window::init 2024-02-05 16:22:52 +04:00
John Preston
2f0b50cb37 Version 4.14.13: Always send webpages optionally.
If the link cannot be displayed everywhere
(channel blocked in some countries) the
send request without `optional` flag fails.
2024-02-02 21:41:19 +04:00
John Preston
6185fa980d Version 4.14.13.
- Fix display of statistics for single posts.
- Allow editing tag name from search tags.
- Fix a crash in tags removal.
2024-02-02 21:38:23 +04:00
John Preston
1cce1e8a90 Fix a crash in tags removal. 2024-02-02 21:33:58 +04:00
23rd
237baf11df Fixed display of statistics for single posts. 2024-02-02 21:33:46 +04:00
Ilya Fedin
46023f4260 Update lib_webrtc 2024-02-02 20:46:54 +04:00
John Preston
8459c29073 Allow editing tag name from search tags. 2024-02-02 11:25:04 +04:00
John Preston
f072173d7c Fix tags for just-sent Saved Messages. 2024-02-02 09:52:32 +04:00
John Preston
bec34c34b1 Version 4.14.12: Fix build with GCC. 2024-02-01 21:13:07 +04:00
John Preston
04ab148b2e Version 4.14.12.
- Tags in Saved Messages.
- Audio output device selection for music and videos.
- Audio input device selection for voice messages recording.
- Default device changes should be applied instantly. (Windows / macOS)
2024-02-01 17:52:37 +04:00
John Preston
e314abefb8 Fix build with Xcode. 2024-02-01 17:52:37 +04:00
John Preston
cdaa23363f Allow reaction reports in public groups. 2024-02-01 17:52:37 +04:00
John Preston
5401d00548 Allow reporting / banning from reactions in groups. 2024-02-01 17:30:40 +04:00
John Preston
c06699e8e7 Update API scheme on layer 173. 2024-02-01 12:47:52 +04:00
John Preston
2f40a44b5c Improve things for OpenAL devices management. 2024-02-01 12:37:34 +04:00
John Preston
0945e04f6b Allow filtering by tag on click in sublists. 2024-02-01 12:37:34 +04:00
John Preston
db7c16f82b Fix about message reaction text color. 2024-02-01 12:37:34 +04:00
John Preston
30548c2859 Support tags search in sublists. 2024-02-01 12:37:34 +04:00
John Preston
0163938e00 Add shadow below tags list widget. 2024-02-01 12:37:34 +04:00
John Preston
3421b656db Fix saved tag click in separate window messages. 2024-02-01 12:37:34 +04:00
John Preston
39b80c98c7 Support tags in ComposeSearch. 2024-02-01 12:37:34 +04:00
John Preston
323500f6dd Improve design of locked tags. 2024-02-01 12:37:34 +04:00
John Preston
11cf0486cb Implement required paywalls in tags. 2024-02-01 12:37:34 +04:00
John Preston
46579ac84d Support about text above reactions strip. 2024-02-01 12:37:34 +04:00
John Preston
f324c53440 New premium promo section about tags. 2024-02-01 12:37:34 +04:00
John Preston
87df90227e Remove legacy premium reactions code. 2024-02-01 12:37:34 +04:00
John Preston
6f57302562 Track tag counts in all sublists. 2024-02-01 12:37:34 +04:00
John Preston
d116c8fea0 Allow editing tag names in Saved Messages. 2024-02-01 12:37:34 +04:00
John Preston
32462fca9b Show tag names in Saved Messages. 2024-02-01 12:37:34 +04:00
John Preston
55a174190e Fix reactions expanding. 2024-02-01 12:37:34 +04:00
John Preston
9f738cded2 Fix saved messages sublists pinning updates. 2024-02-01 12:37:34 +04:00
John Preston
87c1329490 Re-enable tags in Saved Messages. 2024-02-01 12:37:34 +04:00
John Preston
ded2015dc2 Update API scheme to layer 173. 2024-02-01 12:37:34 +04:00
23rd
98a71cce89 Improved toast phrase for forwarded messages to self. 2024-02-01 12:37:34 +04:00
23rd
a3ba1ba2a1 Fixed ability to create group without ttl even with ttl in settings. 2024-02-01 12:37:33 +04:00
23rd
42842619b0 Added external link to username field when it is available for purchase. 2024-02-01 12:37:33 +04:00
23rd
32d3b90cdc Replaced behavior of statistics info to wait all charts. 2024-02-01 12:37:33 +04:00
GitHub Action
b182aeb51e Update User-Agent for DNS to Chrome 121.0.6167.85. 2024-02-01 12:37:25 +04:00
Ilya Fedin
7049929a59 Update libproxy 2024-01-31 23:27:41 +04:00
Ilya Fedin
aa5413da4e Ensure cant reproduce label is not automatically removed 2024-01-31 11:04:04 +04:00
Zephyr Lykos
32dfe0f65e Update cppgir to fix nullptr dereference in glib 2.79 2024-01-31 11:03:36 +04:00
Ilya Fedin
5d33290218 Shorten socket paths on Linux 2024-01-29 11:46:12 +04:00
Ilya Fedin
852196a013 Add cant reproduce label handling 2024-01-27 08:52:14 +04:00
Ilya Fedin
0ff3d4b2ed Add needs user action label handling 2024-01-27 08:52:14 +04:00
Ilya Fedin
b843dab87a Get error message from the right object when copying log file 2024-01-27 07:17:24 +04:00
23rd
20bd7db4d9 Slightly improved display of top label in Premium Settings for user. 2024-01-25 19:28:31 +04:00
23rd
92f0358800 Fixed text insertion from menu bar on macOS when input field is hidden. 2024-01-25 19:28:30 +04:00
John Preston
0372f2be9c Beta version 4.14.11.
- Fix crash when accepting calls.
- Fix possible crash in loopback audio on Windows.
2024-01-25 19:26:55 +04:00
John Preston
7dfb93f7c2 Add some assertions for debugging a crash. 2024-01-25 18:30:21 +04:00
John Preston
3c150d9742 Fix possible crash in loopback audio on Windows. 2024-01-25 18:30:21 +04:00
John Preston
3c5cace175 Fix chats list bottom buttons skip. 2024-01-25 17:55:10 +04:00
John Preston
7e7fd6f1b4 Fix crash in accepting calls. 2024-01-25 17:54:31 +04:00
John Preston
eebe7adbcb Beta version 4.14.10.
- Allow choosing audio device for music and video files.
- Allow choosing microphone device for voice recording.
- Allow pause and resume voice messages recording.
- Track audio device changes on Windows and macOS.
2024-01-25 11:57:58 +04:00
23rd
5561bdeb5e Fixed top of ttl button in voice record bar with float media player. 2024-01-25 11:57:58 +04:00
John Preston
a670095294 Update patched OpenAL on macOS / Windows. 2024-01-25 11:57:58 +04:00
John Preston
104ba4db7c Fully migrate to Webrtc::Environment. 2024-01-25 11:57:58 +04:00
John Preston
9a6ab3b0f2 Improve call device migration. 2024-01-25 11:04:27 +04:00
John Preston
e5b89b1572 Separate settings for calls. 2024-01-25 11:04:27 +04:00
John Preston
65a80766f4 Implement media device tracking on macOS. 2024-01-25 11:04:27 +04:00
John Preston
736b489eb7 Fix build of updated tgcalls on macOS. 2024-01-25 11:04:27 +04:00
John Preston
d3778f92d2 Implement media devices tracking on Windows. 2024-01-25 11:04:27 +04:00
23rd
30e694420a Added icon to button for resuming of voice recording. 2024-01-25 11:04:27 +04:00
23rd
091c13bc23 Added initial ability to pause and resume voice recording. 2024-01-25 11:04:27 +04:00
23rd
5130c5df80 Replaced display of stop icon in voice record bar with pause icon. 2024-01-25 11:04:27 +04:00
23rd
7542f04010 Added specific phrases to chats list for voice messages with ttl. 2024-01-25 11:04:27 +04:00
23rd
dff4191ac9 Added reward phrase to about label in gift box. 2024-01-25 11:04:27 +04:00
23rd
15e2874da8 Fixed display of loading progress in voice messages with ttl. 2024-01-25 11:04:27 +04:00
John Preston
8eb49d5efc Fix possible connection problem in calls. 2024-01-22 11:06:42 +04:00
190 changed files with 3709 additions and 1728 deletions

View File

@@ -85,7 +85,7 @@ jobs:
docker run --rm \
-v $PWD:/usr/src/tdesktop \
-e DEBUG=1 \
-e CONFIG=Debug \
tdesktop:centos_env \
/usr/src/tdesktop/Telegram/build/docker/centos_env/build.sh \
-D CMAKE_C_FLAGS_DEBUG="" \

View File

@@ -9,11 +9,38 @@ on:
- cron: '0 0 * * *'
jobs:
noResponse:
waiting-for-answer:
runs-on: ubuntu-latest
steps:
- uses: lee-dohm/no-response@v0.5.0
with:
token: ${{ github.token }}
# Label requiring a response
responseRequiredLabel: waiting for answer
needs-user-action:
runs-on: ubuntu-latest
steps:
- uses: lee-dohm/no-response@v0.5.0
with:
token: ${{ github.token }}
responseRequiredLabel: needs user action
cant-reproduce:
if: github.event_name != 'issue_comment'
runs-on: ubuntu-latest
steps:
- uses: lee-dohm/no-response@v0.5.0
with:
token: ${{ github.token }}
responseRequiredLabel: cant reproduce
closeComment: >
This issue has been automatically closed because no developer succeeded to
reproduce the issue with the given reproduction steps. With only the
information that is currently in the issue, we don't have enough
information to take action. Please reach out if you find what's missing to
reproduce the issue so that we can investigate further.
Note that GitHub is a developer communication platform. If you're an ordinary
user seeking for help, get to support crew via `Settings -> Ask question` in
the application.

View File

@@ -1019,6 +1019,7 @@ PRIVATE
media/audio/media_audio.h
media/audio/media_audio_capture.cpp
media/audio/media_audio_capture.h
media/audio/media_audio_capture_common.h
media/audio/media_audio_ffmpeg_loader.cpp
media/audio/media_audio_ffmpeg_loader.h
media/audio/media_audio_loader.cpp
@@ -1234,8 +1235,6 @@ PRIVATE
platform/mac/touchbar/mac_touchbar_manager.mm
platform/mac/touchbar/mac_touchbar_media_view.h
platform/mac/touchbar/mac_touchbar_media_view.mm
platform/win/audio_win.cpp
platform/win/audio_win.h
platform/win/file_utilities_win.cpp
platform/win/file_utilities_win.h
platform/win/launcher_win.cpp
@@ -1259,7 +1258,6 @@ PRIVATE
platform/win/windows_autostart_task.h
platform/win/windows_toast_activator.cpp
platform/win/windows_toast_activator.h
platform/platform_audio.h
platform/platform_file_utilities.h
platform/platform_launcher.h
platform/platform_integration.cpp

Binary file not shown.

After

Width:  |  Height:  |  Size: 212 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 267 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 368 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 283 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 378 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 550 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 634 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 692 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 614 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 593 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 528 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -433,7 +433,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_username_invalid" = "This username is invalid.";
"lng_username_occupied" = "This username is already occupied.";
"lng_username_too_short" = "This username is too short.";
"lng_username_purchase_available" = "Sorry, this link is occupied by someone. But it's available for purchase through\nofficial {link}.";
"lng_username_purchase_available" = "**This username is already taken.** However, it is currently available for purchase. {link}";
"lng_username_purchase_available_link" = "Learn more...";
"lng_username_bad_symbols" = "Only a-z, 0-9, and underscores allowed.";
"lng_username_available" = "This username is available.";
"lng_username_not_found" = "User @{user} not found.";
@@ -630,6 +631,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_settings_call_accept_calls" = "Accept calls from this device";
"lng_settings_call_device_default" = "Same as the System";
"lng_settings_section_devices" = "Speakers and Camera";
"lng_settings_devices_calls" = "Calls and video chats";
"lng_settings_devices_calls_same" = "Use the same devices for calls";
"lng_settings_devices_inactive" = "Unavailable";
"lng_settings_language" = "Language";
"lng_settings_default_scale" = "Default interface scale";
"lng_settings_connection_type" = "Connection type";
@@ -1499,6 +1505,11 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_report_messages_none" = "Select Messages";
"lng_report_messages_count#one" = "Report {count} Message";
"lng_report_messages_count#other" = "Report {count} Messages";
"lng_report_reaction" = "Report reaction";
"lng_report_and_ban" = "Ban and report";
"lng_report_reaction_title" = "Report reaction";
"lng_report_reaction_about" = "Are you sure you want to report reactions from this user?";
"lng_report_and_ban_button" = "Ban user";
"lng_report_details_about" = "Please enter any additional details relevant to your report.";
"lng_report_details" = "Additional Details";
"lng_report_reason_spam" = "Spam";
@@ -2020,6 +2031,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_premium_summary_about_emoji_status" = "Add any of thousands emoji next to your name to display current activity.";
"lng_premium_summary_subtitle_infinite_reactions" = "Infinite Reactions";
"lng_premium_summary_about_infinite_reactions" = "React with thousands of emoji — with multiple reactions per message.";
"lng_premium_summary_subtitle_tags_for_messages" = "Tags for Messages";
"lng_premium_summary_about_tags_for_messages" = "Organize your Saved Messages with tags for quicker access.";
"lng_premium_summary_subtitle_premium_stickers" = "Premium Stickers";
"lng_premium_summary_about_premium_stickers" = "Exclusive enlarged stickers featuring additional effects, updated monthly.";
"lng_premium_summary_subtitle_animated_emoji" = "Animated Emoji";
@@ -2440,6 +2453,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_in_dlg_contact" = "Contact";
"lng_in_dlg_audio" = "Voice message";
"lng_in_dlg_video_message" = "Video message";
"lng_in_dlg_voice_message_ttl" = "One-time Voice Message";
"lng_in_dlg_video_message_ttl" = "One-time Video Message";
"lng_in_dlg_file" = "File";
"lng_in_dlg_sticker" = "Sticker";
"lng_in_dlg_sticker_emoji" = "{emoji} Sticker";
@@ -2737,6 +2752,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_context_copy_email" = "Copy Email Address";
"lng_context_copy_hashtag" = "Copy Hashtag";
"lng_context_copy_mention" = "Copy Username";
"lng_context_copy_filename" = "Copy Filename";
"lng_context_save_image" = "Save Image As...";
"lng_context_copy_image" = "Copy Image";
"lng_context_cancel_download" = "Cancel Download";
@@ -2793,6 +2809,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_context_seen_reacted_all" = "Show All Reactions";
"lng_context_set_as_quick" = "Set as Quick";
"lng_context_filter_by_tag" = "Filter by Tag";
"lng_context_tag_add_name" = "Add Name";
"lng_context_tag_edit_name" = "Edit Name";
"lng_context_remove_tag" = "Remove Tag";
"lng_context_delete_from_disk" = "Delete from disk";
"lng_context_delete_all_files" = "Delete all files";
@@ -2802,10 +2820,21 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_context_read_hidden" = "read";
"lng_context_read_show" = "show when";
"lng_add_tag_about" = "Tag this message with an emoji for quick search.";
"lng_subscribe_tag_about" = "Organize your Saved Messages with tags. {link}";
"lng_subscribe_tag_link" = "Learn More...";
"lng_edit_tag_about" = "You can label your emoji tag with a text name.";
"lng_edit_tag_name" = "Name";
"lng_add_tag_button" = "Add tags";
"lng_add_tag_phrase" = "to messages {arrow}";
"lng_add_tag_phrase_long" = "to your Saved Messages {arrow}";
"lng_unlock_tags" = "Unlock";
"lng_context_animated_emoji" = "This message contains emoji from **{name} pack**.";
"lng_context_animated_emoji_many#one" = "This message contains emoji from **{count} pack**.";
"lng_context_animated_emoji_many#other" = "This message contains emoji from **{count} packs**.";
"lng_context_animated_reaction" = "This reaction is from **{name} pack**.";
"lng_context_animated_tag" = "This tag is from **{name} pack**.";
"lng_context_animated_reactions" = "Reactions contain emoji from **{name} pack**.";
"lng_context_animated_reactions_many#one" = "Reactions contain emoji from **{count} pack**.";
"lng_context_animated_reactions_many#other" = "Reactions contain emoji from **{count} packs**.";
@@ -4456,6 +4485,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
"lng_stories_channel_archive_done_many#one" = "{count} story is hidden from the channel page.";
"lng_stories_channel_archive_done_many#other" = "{count} stories are hidden from the channel page.";
"lng_stories_save_promo" = "Subscribe to {link} to download other people's unprotected stories to disk.";
"lng_stories_reaction_as_message" = "Send reaction as a private message";
"lng_stealth_mode_menu_item" = "Stealth Mode";
"lng_stealth_mode_title" = "Stealth Mode";

View File

@@ -10,7 +10,7 @@
<Identity Name="TelegramMessengerLLP.TelegramDesktop"
ProcessorArchitecture="ARCHITECTURE"
Publisher="CN=536BC709-8EE1-4478-AF22-F0F0F26FF64A"
Version="4.14.9.0" />
Version="4.14.14.0" />
<Properties>
<DisplayName>Telegram Desktop</DisplayName>
<PublisherDisplayName>Telegram Messenger LLP</PublisherDisplayName>

View File

@@ -44,8 +44,8 @@ IDI_ICON1 ICON "..\\art\\icon256.ico"
//
VS_VERSION_INFO VERSIONINFO
FILEVERSION 4,14,9,0
PRODUCTVERSION 4,14,9,0
FILEVERSION 4,14,14,0
PRODUCTVERSION 4,14,14,0
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
@@ -62,10 +62,10 @@ BEGIN
BEGIN
VALUE "CompanyName", "Telegram FZ-LLC"
VALUE "FileDescription", "Telegram Desktop"
VALUE "FileVersion", "4.14.9.0"
VALUE "FileVersion", "4.14.14.0"
VALUE "LegalCopyright", "Copyright (C) 2014-2024"
VALUE "ProductName", "Telegram Desktop"
VALUE "ProductVersion", "4.14.9.0"
VALUE "ProductVersion", "4.14.14.0"
END
END
BLOCK "VarFileInfo"

View File

@@ -35,8 +35,8 @@ LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
//
VS_VERSION_INFO VERSIONINFO
FILEVERSION 4,14,9,0
PRODUCTVERSION 4,14,9,0
FILEVERSION 4,14,14,0
PRODUCTVERSION 4,14,14,0
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
@@ -53,10 +53,10 @@ BEGIN
BEGIN
VALUE "CompanyName", "Telegram FZ-LLC"
VALUE "FileDescription", "Telegram Desktop Updater"
VALUE "FileVersion", "4.14.9.0"
VALUE "FileVersion", "4.14.14.0"
VALUE "LegalCopyright", "Copyright (C) 2014-2024"
VALUE "ProductName", "Telegram Desktop"
VALUE "ProductVersion", "4.14.9.0"
VALUE "ProductVersion", "4.14.14.0"
END
END
BLOCK "VarFileInfo"

View File

@@ -10,6 +10,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "apiwrap.h"
#include "data/data_channel.h"
#include "data/data_histories.h"
#include "data/data_message_reaction_id.h"
#include "data/data_peer.h"
#include "data/data_session.h"
#include "history/history.h"
@@ -43,6 +44,23 @@ constexpr auto kSearchPerPage = 50;
return result;
}
[[nodiscard]] QString RequestToToken(
const MessagesSearch::Request &request) {
auto result = request.query;
if (request.from) {
result += '\n' + QString::number(request.from->id.value);
}
for (const auto &tag : request.tags) {
result += '\n';
if (const auto customId = tag.custom()) {
result += u"custom"_q + QString::number(customId);
} else {
result += u"emoji"_q + tag.emoji();
}
}
return result;
}
} // namespace
MessagesSearch::MessagesSearch(not_null<History*> history)
@@ -54,9 +72,8 @@ MessagesSearch::~MessagesSearch() {
base::take(_searchInHistoryRequest));
}
void MessagesSearch::searchMessages(const QString &query, PeerData *from) {
_query = query;
_from = from;
void MessagesSearch::searchMessages(Request request) {
_request = std::move(request);
_offsetId = {};
searchRequest();
}
@@ -69,8 +86,7 @@ void MessagesSearch::searchMore() {
}
void MessagesSearch::searchRequest() {
const auto nextToken = _query
+ QString::number(_from ? _from->id.value : 0);
const auto nextToken = RequestToToken(_request);
if (!_offsetId) {
const auto it = _cacheOfStartByToken.find(nextToken);
if (it != end(_cacheOfStartByToken)) {
@@ -80,18 +96,21 @@ void MessagesSearch::searchRequest() {
}
}
auto callback = [=](Fn<void()> finish) {
const auto flags = _from
? MTP_flags(MTPmessages_Search::Flag::f_from_id)
: MTP_flags(0);
using Flag = MTPmessages_Search::Flag;
const auto from = _request.from;
const auto fromPeer = _history->peer->isUser() ? nullptr : from;
const auto savedPeer = _history->peer->isSelf() ? from : nullptr;
_requestId = _history->session().api().request(MTPmessages_Search(
flags,
MTP_flags((fromPeer ? Flag::f_from_id : Flag())
| (savedPeer ? Flag::f_saved_peer_id : Flag())
| (_request.tags.empty() ? Flag() : Flag::f_saved_reaction)),
_history->peer->input,
MTP_string(_query),
(_from
? _from->input
: MTP_inputPeerEmpty()),
MTPInputPeer(), // saved_peer_id
MTPVector<MTPReaction>(), // saved_reaction
MTP_string(_request.query),
(fromPeer ? fromPeer->input : MTP_inputPeerEmpty()),
(savedPeer ? savedPeer->input : MTP_inputPeerEmpty()),
MTP_vector_from_range(_request.tags | ranges::views::transform(
Data::ReactionToMTP
)),
MTPint(), // top_msg_id
MTP_inputMessagesFilterEmpty(),
MTP_int(0), // min_date

View File

@@ -7,10 +7,17 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#pragma once
#include "base/qt/qt_compare.h"
#include "data/data_message_reaction_id.h"
class HistoryItem;
class History;
class PeerData;
namespace Data {
struct ReactionId;
} // namespace Data
namespace Api {
struct FoundMessages {
@@ -21,10 +28,23 @@ struct FoundMessages {
class MessagesSearch final {
public:
struct Request {
QString query;
PeerData *from = nullptr;
std::vector<Data::ReactionId> tags;
friend inline bool operator==(
const Request &,
const Request &) = default;
friend inline auto operator<=>(
const Request &,
const Request &) = default;
};
explicit MessagesSearch(not_null<History*> history);
~MessagesSearch();
void searchMessages(const QString &query, PeerData *from);
void searchMessages(Request request);
void searchMore();
[[nodiscard]] rpl::producer<FoundMessages> messagesFounds() const;
@@ -41,8 +61,7 @@ private:
base::flat_map<QString, TLMessages> _cacheOfStartByToken;
QString _query;
PeerData *_from = nullptr;
Request _request;
MsgId _offsetId;
int _searchInHistoryRequest = 0; // Not real mtpRequestId.

View File

@@ -11,12 +11,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
namespace Api {
bool MessagesSearchMerged::RequestCompare::operator()(
const Request &a,
const Request &b) const {
return (a.query < b.query) && (a.from < b.from);
}
MessagesSearchMerged::MessagesSearchMerged(not_null<History*> history)
: _apiSearch(history) {
if (const auto migrated = history->migrateFrom()) {
@@ -88,9 +82,9 @@ void MessagesSearchMerged::clear() {
void MessagesSearchMerged::search(const Request &search) {
if (_migratedSearch) {
_waitingForTotal = true;
_migratedSearch->searchMessages(search.query, search.from);
_migratedSearch->searchMessages(search);
}
_apiSearch.searchMessages(search.query, search.from);
_apiSearch.searchMessages(search);
}
void MessagesSearchMerged::searchMore() {

View File

@@ -12,19 +12,17 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
class History;
class PeerData;
namespace Data {
struct ReactionId;
} // namespace Data
namespace Api {
// Search in both of history and migrated history, if it exists.
class MessagesSearchMerged final {
public:
struct Request {
QString query;
PeerData *from = nullptr;
};
struct RequestCompare {
bool operator()(const Request &a, const Request &b) const;
};
using CachedRequests = std::set<Request, RequestCompare>;
using Request = MessagesSearch::Request;
using CachedRequests = base::flat_set<Request>;
MessagesSearchMerged(not_null<History*> history);

View File

@@ -12,12 +12,10 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/random.h"
#include "ui/boxes/confirm_box.h"
#include "boxes/abstract_box.h"
#include "boxes/peer_list_controllers.h"
#include "boxes/premium_limits_box.h"
#include "boxes/peers/add_participants_box.h"
#include "boxes/peers/edit_peer_common.h"
#include "boxes/peers/edit_participant_box.h"
#include "boxes/peers/edit_participants_box.h"
#include "core/application.h"
#include "core/core_settings.h"
#include "chat_helpers/emoji_suggestions_widget.h"
@@ -27,34 +25,26 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "menu/menu_ttl.h"
#include "ui/controls/userpic_button.h"
#include "ui/widgets/checkbox.h"
#include "ui/widgets/buttons.h"
#include "ui/widgets/labels.h"
#include "ui/toast/toast.h"
#include "ui/widgets/fields/input_field.h"
#include "ui/widgets/fields/special_fields.h"
#include "ui/widgets/popup_menu.h"
#include "ui/text/format_values.h"
#include "ui/text/text_options.h"
#include "ui/text/text_utilities.h"
#include "ui/unread_badge.h"
#include "ui/ui_utility.h"
#include "ui/painter.h"
#include "data/data_channel.h"
#include "data/data_chat.h"
#include "data/data_user.h"
#include "data/data_session.h"
#include "data/data_changes.h"
#include "data/data_cloud_file.h"
#include "apiwrap.h"
#include "api/api_invite_links.h"
#include "api/api_peer_photo.h"
#include "api/api_self_destruct.h"
#include "main/main_session.h"
#include "styles/style_info.h"
#include "styles/style_layers.h"
#include "styles/style_menu_icons.h"
#include "styles/style_boxes.h"
#include "styles/style_dialogs.h"
#include "styles/style_widgets.h"
#include <QtGui/QGuiApplication>
#include <QtGui/QClipboard>
@@ -599,6 +589,8 @@ void GroupInfoBox::prepare() {
addButton(tr::lng_cancel(), [this] { closeBox(); });
if (_type == Type::Group) {
_navigation->session().api().selfDestruct().reload();
const auto top = addTopButton(st::infoTopBarMenu);
const auto menu =
top->lifetime().make_state<base::unique_qptr<Ui::PopupMenu>>();
@@ -607,21 +599,21 @@ void GroupInfoBox::prepare() {
top,
st::popupMenuWithIcons);
const auto ttl = ttlPeriod();
const auto text = tr::lng_manage_messages_ttl_menu(tr::now)
+ (_ttlPeriod
? ('\t' + Ui::FormatTTLTiny(_ttlPeriod))
: QString());
+ (ttl ? ('\t' + Ui::FormatTTLTiny(ttl)) : QString());
(*menu)->addAction(
text,
[=, show = uiShow()] {
show->showBox(Box(TTLMenu::TTLBox, TTLMenu::Args{
.show = show,
.startTtl = _ttlPeriod,
.startTtl = ttlPeriod(),
.about = nullptr,
.callback = crl::guard(this, [=](
TimeId t,
Fn<void()> close) {
_ttlPeriod = t;
_ttlPeriodOverridden = true;
close();
}),
}));
@@ -687,6 +679,13 @@ void GroupInfoBox::submitName() {
}
}
TimeId GroupInfoBox::ttlPeriod() const {
return _ttlPeriodOverridden
? _ttlPeriod
: _navigation->session().api().selfDestruct()
.periodDefaultHistoryTTLCurrent();
}
void GroupInfoBox::createGroup(
QPointer<Ui::BoxContent> selectUsersBox,
const QString &title,
@@ -705,15 +704,13 @@ void GroupInfoBox::createGroup(
}
}
_creationRequestId = _api.request(MTPmessages_CreateChat(
MTP_flags(_ttlPeriod
? MTPmessages_CreateChat::Flag::f_ttl_period
: MTPmessages_CreateChat::Flags(0)),
MTP_flags(MTPmessages_CreateChat::Flag::f_ttl_period),
MTP_vector<TLUsers>(inputs),
MTP_string(title),
MTP_int(_ttlPeriod)
MTP_int(ttlPeriod())
)).done([=](const MTPUpdates &result) {
auto image = _photo->takeResultImage();
const auto period = _ttlPeriod;
const auto period = ttlPeriod();
const auto navigation = _navigation;
const auto done = _done;
@@ -799,16 +796,17 @@ void GroupInfoBox::createChannel(
? Flag::f_megagroup
: Flag::f_broadcast)
| ((_type == Type::Forum) ? Flag::f_forum : Flag())
| ((_type == Type::Megagroup && _ttlPeriod)
| ((_type == Type::Megagroup)
? MTPchannels_CreateChannel::Flag::f_ttl_period
: MTPchannels_CreateChannel::Flags(0));
const auto ttl = ttlPeriod();
_creationRequestId = _api.request(MTPchannels_CreateChannel(
MTP_flags(flags),
MTP_string(title),
MTP_string(description),
MTPInputGeoPoint(), // geo_point
MTPstring(), // address
MTP_int((_type == Type::Megagroup) ? _ttlPeriod : 0)
MTP_int((_type == Type::Megagroup) ? ttl : 0)
)).done([=](const MTPUpdates &result) {
_navigation->session().api().applyUpdates(result);
@@ -841,8 +839,8 @@ void GroupInfoBox::createChannel(
channel,
{ std::move(image) });
}
if (_ttlPeriod && channel->isMegagroup()) {
channel->setMessagesTTL(_ttlPeriod);
if (ttl && channel->isMegagroup()) {
channel->setMessagesTTL(ttl);
}
channel->session().api().requestFullPeer(channel);
_createdChannel = channel;

View File

@@ -132,6 +132,8 @@ private:
void descriptionResized();
void updateMaxHeight();
[[nodiscard]] TimeId ttlPeriod() const;
const not_null<Window::SessionNavigation*> _navigation;
MTP::Sender _api;
@@ -150,6 +152,7 @@ private:
bool _creatingInviteLink = false;
ChannelData *_createdChannel = nullptr;
TimeId _ttlPeriod = 0;
bool _ttlPeriodOverridden = false;
};

View File

@@ -221,7 +221,7 @@ void DeleteMessagesBox::prepare() {
? QString()
: QString(" (%1)").arg(total));
});
search->searchMessages(QString(), _moderateFrom);
search->searchMessages({ .from = _moderateFrom });
}
} else {
details.text = (_ids.size() == 1)

View File

@@ -83,6 +83,31 @@ GiftOptions GiftOptionFromTL(const MTPDuserFull &data) {
return result;
}
[[nodiscard]] Fn<TextWithEntities(TextWithEntities)> BoostsForGiftText(
const std::vector<not_null<UserData*>> users) {
Expects(!users.empty());
const auto session = &users.front()->session();
const auto emoji = Ui::Text::SingleCustomEmoji(
session->data().customEmojiManager().registerInternalEmoji(
st::premiumGiftsBoostIcon,
QMargins(0, st::premiumGiftsUserpicBadgeInner, 0, 0),
false));
return [=, count = users.size()](TextWithEntities text) {
text.append('\n');
text.append('\n');
text.append(tr::lng_premium_gifts_about_reward(
tr::now,
lt_count,
count * BoostsForGift(session),
lt_emoji,
emoji,
Ui::Text::RichLangValue));
return text;
};
}
using TagUser1 = lngtag_user;
using TagUser2 = lngtag_second_user;
using TagUser3 = lngtag_name;
@@ -293,16 +318,21 @@ void GiftBox(
std::move(titleLabel)),
st::premiumGiftTitlePadding);
auto textLabel = object_ptr<Ui::FlatLabel>(
box,
tr::lng_premium_gift_about(
lt_user,
user->session().changes().peerFlagsValue(
user,
Data::PeerUpdate::Flag::Name
) | rpl::map([=] { return TextWithEntities{ user->firstName }; }),
Ui::Text::RichLangValue),
st::premiumPreviewAbout);
auto textLabel = object_ptr<Ui::FlatLabel>(box, st::premiumPreviewAbout);
tr::lng_premium_gift_about(
lt_user,
user->session().changes().peerFlagsValue(
user,
Data::PeerUpdate::Flag::Name
) | rpl::map([=] { return TextWithEntities{ user->firstName }; }),
Ui::Text::RichLangValue
) | rpl::map(
BoostsForGiftText({ user })
) | rpl::start_with_next([
raw = textLabel.data(),
session = &user->session()](const TextWithEntities &t) {
raw->setMarkedText(t, Core::MarkedTextContext{ .session = session });
}, textLabel->lifetime());
textLabel->setTextColorOverride(stTitle.textFg->c);
textLabel->resizeToWidth(available);
box->addRow(
@@ -476,11 +506,6 @@ void GiftsBox(
// About.
{
const auto emoji = Ui::Text::SingleCustomEmoji(
session->data().customEmojiManager().registerInternalEmoji(
st::premiumGiftsBoostIcon,
QMargins(0, st::premiumGiftsUserpicBadgeInner, 0, 0),
false));
auto text = rpl::conditional(
state->isPaymentComplete.value(),
ComplexAboutLabel(
@@ -505,18 +530,7 @@ void GiftsBox(
tr::lng_premium_gifts_about_user2,
tr::lng_premium_gifts_about_user3,
tr::lng_premium_gifts_about_user_more
) | rpl::map([=, count = users.size()](TextWithEntities text) {
text.append('\n');
text.append('\n');
text.append(tr::lng_premium_gifts_about_reward(
tr::now,
lt_count,
count * BoostsForGift(session),
lt_emoji,
emoji,
Ui::Text::RichLangValue));
return text;
})
) | rpl::map(BoostsForGiftText(users))
);
const auto label = box->addRow(
object_ptr<Ui::CenterWrap<Ui::FlatLabel>>(

View File

@@ -585,9 +585,8 @@ void Controller::checkUsernameAvailability() {
showUsernameError(tr::lng_create_channel_link_invalid());
} else if (type == u"USERNAME_PURCHASE_AVAILABLE"_q) {
_goodUsername = false;
_usernameCheckInfo.fire({
.type = UsernameCheckInfo::Type::PurchaseAvailable,
});
_usernameCheckInfo.fire(
UsernameCheckInfo::PurchaseAvailable(checking, _peer));
} else if (type == u"USERNAME_OCCUPIED"_q && checking != username) {
showUsernameError(tr::lng_create_channel_link_occupied());
}

View File

@@ -110,6 +110,8 @@ void PreloadSticker(const std::shared_ptr<Data::DocumentMedia> &media) {
return tr::lng_premium_summary_subtitle_emoji_status();
case PremiumPreview::InfiniteReactions:
return tr::lng_premium_summary_subtitle_infinite_reactions();
case PremiumPreview::TagsForMessages:
return tr::lng_premium_summary_subtitle_tags_for_messages();
case PremiumPreview::Stickers:
return tr::lng_premium_summary_subtitle_premium_stickers();
case PremiumPreview::AnimatedEmoji:
@@ -146,6 +148,8 @@ void PreloadSticker(const std::shared_ptr<Data::DocumentMedia> &media) {
return tr::lng_premium_summary_about_emoji_status();
case PremiumPreview::InfiniteReactions:
return tr::lng_premium_summary_about_infinite_reactions();
case PremiumPreview::TagsForMessages:
return tr::lng_premium_summary_about_tags_for_messages();
case PremiumPreview::Stickers:
return tr::lng_premium_summary_about_premium_stickers();
case PremiumPreview::AnimatedEmoji:
@@ -471,6 +475,7 @@ struct VideoPreviewDocument {
return "advanced_chat_management";
case PremiumPreview::EmojiStatus: return "emoji_status";
case PremiumPreview::InfiniteReactions: return "infinite_reactions";
case PremiumPreview::TagsForMessages: return "saved_tags";
case PremiumPreview::ProfileBadge: return "profile_badge";
case PremiumPreview::AnimatedUserpics: return "animated_userpics";
case PremiumPreview::RealTimeTranslation: return "translations";

View File

@@ -61,6 +61,7 @@ enum class PremiumPreview {
AnimatedUserpics,
RealTimeTranslation,
Wallpapers,
TagsForMessages,
kCount,
};

View File

@@ -507,7 +507,6 @@ void ReactionsSettingsBox(
};
auto firstCheckedButton = (Ui::RpWidget*)(nullptr);
const auto premiumPossible = controller->session().premiumPossible();
auto list = reactions.list(Data::Reactions::Type::Active);
if (const auto favorite = reactions.favorite()) {
if (favorite->id.custom()) {
@@ -520,11 +519,6 @@ void ReactionsSettingsBox(
rpl::single<QString>(base::duplicate(r.title)),
st::settingsButton));
const auto premium = r.premium;
if (premium && !premiumPossible) {
continue;
}
const auto iconSize = st::settingsReactionSize;
const auto left = button->st().iconLeft;
auto iconPositionValue = button->sizeValue(
@@ -556,12 +550,6 @@ void ReactionsSettingsBox(
&button->lifetime());
}
button->setClickedCallback([=, id = r.id] {
if (premium && !controller->session().premium()) {
ShowPremiumPreviewBox(
controller,
PremiumPreview::InfiniteReactions);
return;
}
checkButton(button);
state->selectedId = id;
});

View File

@@ -33,17 +33,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
namespace {
[[nodiscard]] TextWithEntities PurchaseAvailableText() {
constexpr auto kUsernameAuction = "auction";
return tr::lng_username_purchase_available(
tr::now,
lt_link,
Ui::Text::Link(
'@' + QString(kUsernameAuction),
u"https://t.me/"_q + kUsernameAuction),
Ui::Text::RichLangValue);
}
class UsernameEditor final : public Ui::RpWidget {
public:
UsernameEditor(not_null<Ui::RpWidget*>, not_null<PeerData*> peer);
@@ -268,9 +257,8 @@ void UsernameEditor::checkInfoPurchaseAvailable() {
_username->showError();
_errorText = u".bad."_q;
_checkInfoChanged.fire({
.type = UsernameCheckInfo::Type::PurchaseAvailable,
});
_checkInfoChanged.fire(
UsernameCheckInfo::PurchaseAvailable(_checkUsername, _peer));
}
void UsernameEditor::updateFail(const QString &error) {
@@ -424,9 +412,7 @@ void AddUsernameCheckLabel(
container->widthValue()
) | rpl::start_with_next([=](const UsernameCheckInfo &info, int w) {
using Type = UsernameCheckInfo::Type;
label->setMarkedText((info.type == Type::PurchaseAvailable)
? PurchaseAvailableText()
: info.text);
label->setMarkedText(info.text);
const auto &color = (info.type == Type::Good)
? st::boxTextFgGood
: (info.type == Type::Error)
@@ -437,3 +423,25 @@ void AddUsernameCheckLabel(
}, label->lifetime());
Ui::AddSkip(container, skip);
}
UsernameCheckInfo UsernameCheckInfo::PurchaseAvailable(
const QString &username,
not_null<PeerData*> peer) {
if (const auto fragmentLink = AppConfig::FragmentLink(&peer->session())) {
return {
.type = UsernameCheckInfo::Type::Default,
.text = tr::lng_username_purchase_available(
tr::now,
lt_link,
Ui::Text::Link(
tr::lng_username_purchase_available_link(tr::now),
(*fragmentLink) + u"/username/"_q + username),
Ui::Text::RichLangValue),
};
} else {
return {
.type = UsernameCheckInfo::Type::Error,
.text = { u"INTERNAL_SERVER_ERROR"_q },
};
}
}

View File

@@ -19,11 +19,14 @@ void UsernamesBox(
not_null<PeerData*> peer);
struct UsernameCheckInfo final {
[[nodiscard]] static UsernameCheckInfo PurchaseAvailable(
const QString &username,
not_null<PeerData*> peer);
enum class Type {
Good,
Error,
Default,
PurchaseAvailable,
};
Type type;
TextWithEntities text;

View File

@@ -25,8 +25,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/audio/media_audio_track.h"
#include "base/platform/base_platform_info.h"
#include "calls/calls_panel.h"
#include "webrtc/webrtc_environment.h"
#include "webrtc/webrtc_video_track.h"
#include "webrtc/webrtc_media_devices.h"
#include "webrtc/webrtc_create_adm.h"
#include "data/data_user.h"
#include "data/data_session.h"
@@ -215,6 +215,22 @@ Call::Call(
, _api(&_user->session().mtp())
, _type(type)
, _discardByTimeoutTimer([=] { hangup(); })
, _playbackDeviceId(
&Core::App().mediaDevices(),
Webrtc::DeviceType::Playback,
Webrtc::DeviceIdValueWithFallback(
Core::App().settings().callPlaybackDeviceIdValue(),
Core::App().settings().playbackDeviceIdValue()))
, _captureDeviceId(
&Core::App().mediaDevices(),
Webrtc::DeviceType::Capture,
Webrtc::DeviceIdValueWithFallback(
Core::App().settings().callCaptureDeviceIdValue(),
Core::App().settings().captureDeviceIdValue()))
, _cameraDeviceId(
&Core::App().mediaDevices(),
Webrtc::DeviceType::Camera,
Core::App().settings().cameraDeviceIdValue())
, _videoIncoming(
std::make_unique<Webrtc::VideoTrack>(
StartVideoState(video)))
@@ -228,6 +244,7 @@ Call::Call(
_discardByTimeoutTimer.callOnce(config.callRingTimeoutMs);
startWaitingTrack();
}
setupMediaDevices();
setupOutgoingVideo();
}
@@ -410,18 +427,39 @@ void Call::setMuted(bool mute) {
}
}
void Call::setupMediaDevices() {
_playbackDeviceId.changes() | rpl::filter([=] {
return _instance && _setDeviceIdCallback;
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
_setDeviceIdCallback(deviceId);
// Value doesn't matter here, just trigger reading of the new value.
_instance->setAudioOutputDevice(deviceId.value.toStdString());
}, _lifetime);
_captureDeviceId.changes() | rpl::filter([=] {
return _instance && _setDeviceIdCallback;
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
_setDeviceIdCallback(deviceId);
// Value doesn't matter here, just trigger reading of the new value.
_instance->setAudioInputDevice(deviceId.value.toStdString());
}, _lifetime);
}
void Call::setupOutgoingVideo() {
static const auto hasDevices = [] {
return !Webrtc::GetVideoInputList().empty();
const auto cameraId = [] {
return Core::App().mediaDevices().defaultId(
Webrtc::DeviceType::Camera);
};
const auto started = _videoOutgoing->state();
if (!hasDevices()) {
if (cameraId().isEmpty()) {
_videoOutgoing->setState(Webrtc::VideoState::Inactive);
}
_videoOutgoing->stateValue(
) | rpl::start_with_next([=](Webrtc::VideoState state) {
if (state != Webrtc::VideoState::Inactive
&& !hasDevices()
&& cameraId().isEmpty()
&& !_videoCaptureIsScreencast) {
_errors.fire({ ErrorType::NoCamera });
_videoOutgoing->setState(Webrtc::VideoState::Inactive);
@@ -455,6 +493,20 @@ void Call::setupOutgoingVideo() {
}
}
}, _lifetime);
_cameraDeviceId.changes(
) | rpl::filter([=] {
return !_videoCaptureIsScreencast;
}) | rpl::start_with_next([=](Webrtc::DeviceResolvedId deviceId) {
const auto &id = deviceId.value;
_videoCaptureDeviceId = id;
if (_videoCapture) {
_videoCapture->switchToDevice(id.toStdString(), false);
if (_instance) {
_instance->sendVideoDeviceUpdated();
}
}
}, _lifetime);
}
not_null<Webrtc::VideoTrack*> Call::videoIncoming() const {
@@ -848,6 +900,34 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
const auto versionString = version.toStdString();
const auto &settings = Core::App().settings();
const auto weak = base::make_weak(this);
_setDeviceIdCallback = nullptr;
const auto playbackDeviceIdInitial = _playbackDeviceId.current();
const auto captureDeviceIdInitial = _captureDeviceId.current();
const auto saveSetDeviceIdCallback = [=](
Fn<void(Webrtc::DeviceResolvedId)> setDeviceIdCallback) {
setDeviceIdCallback(playbackDeviceIdInitial);
setDeviceIdCallback(captureDeviceIdInitial);
crl::on_main(weak, [=] {
_setDeviceIdCallback = std::move(setDeviceIdCallback);
const auto playback = _playbackDeviceId.current();
if (_instance && playback != playbackDeviceIdInitial) {
_setDeviceIdCallback(playback);
// Value doesn't matter here, just trigger reading of the...
_instance->setAudioOutputDevice(
playback.value.toStdString());
}
const auto capture = _captureDeviceId.current();
if (_instance && capture != captureDeviceIdInitial) {
_setDeviceIdCallback(capture);
// Value doesn't matter here, just trigger reading of the...
_instance->setAudioInputDevice(capture.value.toStdString());
}
});
};
tgcalls::Descriptor descriptor = {
.version = versionString,
.config = tgcalls::Config{
@@ -866,8 +946,8 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
std::move(encryptionKeyValue),
(_type == Type::Outgoing)),
.mediaDevicesConfig = tgcalls::MediaDevicesConfig{
.audioInputId = settings.callInputDeviceId().toStdString(),
.audioOutputId = settings.callOutputDeviceId().toStdString(),
.audioInputId = captureDeviceIdInitial.value.toStdString(),
.audioOutputId = playbackDeviceIdInitial.value.toStdString(),
.inputVolume = 1.f,//settings.callInputVolume() / 100.f,
.outputVolume = 1.f,//settings.callOutputVolume() / 100.f,
},
@@ -898,7 +978,7 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
});
},
.createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator(
settings.callAudioBackend()),
saveSetDeviceIdCallback),
};
if (Logs::DebugEnabled()) {
const auto callLogFolder = cWorkingDir() + u"DebugLogs"_q;
@@ -1096,29 +1176,6 @@ void Call::setState(State state) {
}
}
void Call::setCurrentAudioDevice(bool input, const QString &deviceId) {
if (_instance) {
const auto id = deviceId.toStdString();
if (input) {
_instance->setAudioInputDevice(id);
} else {
_instance->setAudioOutputDevice(id);
}
}
}
void Call::setCurrentCameraDevice(const QString &deviceId) {
if (!_videoCaptureIsScreencast) {
_videoCaptureDeviceId = deviceId;
if (_videoCapture) {
_videoCapture->switchToDevice(deviceId.toStdString(), false);
if (_instance) {
_instance->sendVideoDeviceUpdated();
}
}
}
}
//void Call::setAudioVolume(bool input, float level) {
// if (_instance) {
// if (input) {
@@ -1168,10 +1225,11 @@ void Call::toggleCameraSharing(bool enabled) {
}
_delegate->callRequestPermissionsOrFail(crl::guard(this, [=] {
toggleScreenSharing(std::nullopt);
const auto deviceId = Core::App().settings().callVideoInputDeviceId();
_videoCaptureDeviceId = deviceId;
_videoCaptureDeviceId = _cameraDeviceId.current().value;
if (_videoCapture) {
_videoCapture->switchToDevice(deviceId.toStdString(), false);
_videoCapture->switchToDevice(
_videoCaptureDeviceId.toStdString(),
false);
if (_instance) {
_instance->sendVideoDeviceUpdated();
}

View File

@@ -12,6 +12,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/bytes.h"
#include "mtproto/sender.h"
#include "mtproto/mtproto_auth_key.h"
#include "webrtc/webrtc_device_resolver.h"
namespace Media {
namespace Audio {
@@ -190,11 +191,9 @@ public:
QString getDebugLog() const;
void setCurrentAudioDevice(bool input, const QString &deviceId);
//void setAudioVolume(bool input, float level);
void setAudioDuckingEnabled(bool enabled);
void setCurrentCameraDevice(const QString &deviceId);
[[nodiscard]] QString videoDeviceId() const {
return _videoCaptureDeviceId;
}
@@ -250,6 +249,7 @@ private:
void setSignalBarCount(int count);
void destroyController();
void setupMediaDevices();
void setupOutgoingVideo();
void updateRemoteMediaState(
tgcalls::AudioState audio,
@@ -271,6 +271,11 @@ private:
base::DelayedCallTimer _finishByTimeoutTimer;
base::Timer _discardByTimeoutTimer;
Fn<void(Webrtc::DeviceResolvedId)> _setDeviceIdCallback;
Webrtc::DeviceResolver _playbackDeviceId;
Webrtc::DeviceResolver _captureDeviceId;
Webrtc::DeviceResolver _cameraDeviceId;
rpl::variable<bool> _muted = false;
DhConfig _dhConfig;

View File

@@ -522,20 +522,6 @@ void Instance::showInfoPanel(not_null<GroupCall*> call) {
}
}
void Instance::setCurrentAudioDevice(bool input, const QString &deviceId) {
if (input) {
Core::App().settings().setCallInputDeviceId(deviceId);
} else {
Core::App().settings().setCallOutputDeviceId(deviceId);
}
Core::App().saveSettingsDelayed();
if (const auto call = currentCall()) {
call->setCurrentAudioDevice(input, deviceId);
} else if (const auto group = currentGroupCall()) {
group->setCurrentAudioDevice(input, deviceId);
}
}
FnMut<void()> Instance::addAsyncWaiter() {
auto semaphore = std::make_unique<crl::semaphore>();
const auto raw = semaphore.get();
@@ -846,7 +832,7 @@ std::shared_ptr<tgcalls::VideoCaptureInterface> Instance::getVideoCapture(
if (deviceId) {
result->switchToDevice(
(deviceId->isEmpty()
? Core::App().settings().callVideoInputDeviceId()
? Core::App().settings().cameraDeviceId()
: *deviceId).toStdString(),
isScreenCapture);
}
@@ -854,7 +840,7 @@ std::shared_ptr<tgcalls::VideoCaptureInterface> Instance::getVideoCapture(
}
const auto startDeviceId = (deviceId && !deviceId->isEmpty())
? *deviceId
: Core::App().settings().callVideoInputDeviceId();
: Core::App().settings().cameraDeviceId();
auto result = std::shared_ptr<tgcalls::VideoCaptureInterface>(
tgcalls::VideoCaptureInterface::Create(
tgcalls::StaticThreads::getThreads(),

View File

@@ -103,8 +103,6 @@ public:
-> std::shared_ptr<tgcalls::VideoCaptureInterface>;
void requestPermissionsOrFail(Fn<void()> onSuccess, bool video = true);
void setCurrentAudioDevice(bool input, const QString &deviceId);
[[nodiscard]] FnMut<void()> addAsyncWaiter();
[[nodiscard]] bool isSharingScreen() const;

View File

@@ -48,8 +48,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/power_save_blocker.h"
#include "media/streaming/media_streaming_utility.h"
#include "window/main_window.h"
#include "webrtc/webrtc_environment.h"
#include "webrtc/webrtc_video_track.h"
#include "webrtc/webrtc_media_devices.h"
#include "styles/style_calls.h"
#include "styles/style_chat.h"
@@ -238,13 +238,14 @@ void Panel::initControls() {
}
});
_screencast->entity()->setClickedCallback([=] {
const auto env = &Core::App().mediaDevices();
if (!_call) {
return;
} else if (!Webrtc::DesktopCaptureAllowed()) {
} else if (!env->desktopCaptureAllowed()) {
if (auto box = Group::ScreenSharingPrivacyRequestBox()) {
_layerBg->showBox(std::move(box));
}
} else if (const auto source = Webrtc::UniqueDesktopCaptureSource()) {
} else if (const auto source = env->uniqueDesktopCaptureSource()) {
if (_call->isSharingScreen()) {
_call->toggleScreenSharing(std::nullopt);
} else {

View File

@@ -29,7 +29,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/global_shortcuts.h"
#include "base/random.h"
#include "webrtc/webrtc_video_track.h"
#include "webrtc/webrtc_media_devices.h"
#include "webrtc/webrtc_create_adm.h"
#include <tgcalls/group/GroupInstanceCustomImpl.h>
@@ -52,14 +51,6 @@ constexpr auto kFixSpeakingLargeVideoDuration = 3 * crl::time(1000);
constexpr auto kFullAsMediumsCount = 4; // 1 Full is like 4 Mediums.
constexpr auto kMaxMediumQualities = 16; // 4 Fulls or 16 Mediums.
[[nodiscard]] std::unique_ptr<Webrtc::MediaDevices> CreateMediaDevices() {
const auto &settings = Core::App().settings();
return Webrtc::CreateMediaDevices(
settings.callInputDeviceId(),
settings.callOutputDeviceId(),
settings.callVideoInputDeviceId());
}
[[nodiscard]] const Data::GroupCallParticipant *LookupParticipant(
not_null<PeerData*> peer,
CallId id,
@@ -590,12 +581,27 @@ GroupCall::GroupCall(
, _scheduleDate(info.scheduleDate)
, _lastSpokeCheckTimer([=] { checkLastSpoke(); })
, _checkJoinedTimer([=] { checkJoined(); })
, _playbackDeviceId(
&Core::App().mediaDevices(),
Webrtc::DeviceType::Playback,
Webrtc::DeviceIdValueWithFallback(
Core::App().settings().callPlaybackDeviceIdValue(),
Core::App().settings().playbackDeviceIdValue()))
, _captureDeviceId(
&Core::App().mediaDevices(),
Webrtc::DeviceType::Capture,
Webrtc::DeviceIdValueWithFallback(
Core::App().settings().callCaptureDeviceIdValue(),
Core::App().settings().captureDeviceIdValue()))
, _cameraDeviceId(
&Core::App().mediaDevices(),
Webrtc::DeviceType::Camera,
Webrtc::DeviceIdOrDefault(Core::App().settings().cameraDeviceIdValue()))
, _pushToTalkCancelTimer([=] { pushToTalkCancel(); })
, _connectingSoundTimer([=] { playConnectingSoundOnce(); })
, _listenersHidden(info.rtmp)
, _rtmp(info.rtmp)
, _rtmpVolume(Group::kDefaultVolume)
, _mediaDevices(CreateMediaDevices()) {
, _rtmpVolume(Group::kDefaultVolume) {
_muted.value(
) | rpl::combine_previous(
) | rpl::start_with_next([=](MuteState previous, MuteState state) {
@@ -2058,28 +2064,28 @@ void GroupCall::applyOtherParticipantUpdate(
}
void GroupCall::setupMediaDevices() {
_mediaDevices->audioInputId(
) | rpl::start_with_next([=](QString id) {
_audioInputId = id;
if (_instance) {
_instance->setAudioInputDevice(id.toStdString());
}
_playbackDeviceId.changes() | rpl::filter([=] {
return _instance && _setDeviceIdCallback;
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
_setDeviceIdCallback(deviceId);
// Value doesn't matter here, just trigger reading of the new value.
_instance->setAudioOutputDevice(deviceId.value.toStdString());
}, _lifetime);
_mediaDevices->audioOutputId(
) | rpl::start_with_next([=](QString id) {
_audioOutputId = id;
if (_instance) {
_instance->setAudioOutputDevice(id.toStdString());
}
_captureDeviceId.changes() | rpl::filter([=] {
return _instance && _setDeviceIdCallback;
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
_setDeviceIdCallback(deviceId);
// Value doesn't matter here, just trigger reading of the new value.
_instance->setAudioInputDevice(deviceId.value.toStdString());
}, _lifetime);
_mediaDevices->videoInputId(
) | rpl::start_with_next([=](QString id) {
_cameraInputId = id;
if (_cameraCapture) {
_cameraCapture->switchToDevice(id.toStdString(), false);
}
_cameraDeviceId.changes() | rpl::filter([=] {
return _cameraCapture != nullptr;
}) | rpl::start_with_next([=](const Webrtc::DeviceResolvedId &deviceId) {
_cameraCapture->switchToDevice(deviceId.value.toStdString(), false);
}, _lifetime);
}
@@ -2117,7 +2123,7 @@ bool GroupCall::emitShareCameraError() {
return emitError(Error::DisabledNoCamera);
} else if (mutedByAdmin()) {
return emitError(Error::MutedNoCamera);
} else if (Webrtc::GetVideoInputList().empty()) {
} else if (_cameraDeviceId.current().value.isEmpty()) {
return emitError(Error::NoCamera);
}
return false;
@@ -2126,7 +2132,7 @@ bool GroupCall::emitShareCameraError() {
void GroupCall::emitShareCameraError(Error error) {
_cameraState = Webrtc::VideoState::Inactive;
if (error == Error::CameraFailed
&& Webrtc::GetVideoInputList().empty()) {
&& _cameraDeviceId.current().value.isEmpty()) {
error = Error::NoCamera;
}
_errors.fire_copy(error);
@@ -2180,7 +2186,7 @@ void GroupCall::setupOutgoingVideo() {
return;
} else if (!_cameraCapture) {
_cameraCapture = _delegate->groupCallGetVideoCapture(
_cameraInputId);
_cameraDeviceId.current().value);
if (!_cameraCapture) {
return emitShareCameraError(Error::CameraFailed);
}
@@ -2192,7 +2198,7 @@ void GroupCall::setupOutgoingVideo() {
});
} else {
_cameraCapture->switchToDevice(
_cameraInputId.toStdString(),
_cameraDeviceId.current().value.toStdString(),
false);
}
if (_instance) {
@@ -2338,6 +2344,32 @@ bool GroupCall::tryCreateController() {
const auto weak = base::make_weak(&_instanceGuard);
const auto myLevel = std::make_shared<tgcalls::GroupLevelValue>();
const auto playbackDeviceIdInitial = _playbackDeviceId.current();
const auto captureDeviceIdInitial = _captureDeviceId.current();
const auto saveSetDeviceIdCallback = [=](
Fn<void(Webrtc::DeviceResolvedId)> setDeviceIdCallback) {
setDeviceIdCallback(playbackDeviceIdInitial);
setDeviceIdCallback(captureDeviceIdInitial);
crl::on_main(weak, [=] {
_setDeviceIdCallback = std::move(setDeviceIdCallback);
const auto playback = _playbackDeviceId.current();
if (_instance && playback != playbackDeviceIdInitial) {
_setDeviceIdCallback(playback);
// Value doesn't matter here, just trigger reading of the...
_instance->setAudioOutputDevice(
playback.value.toStdString());
}
const auto capture = _captureDeviceId.current();
if (_instance && capture != captureDeviceIdInitial) {
_setDeviceIdCallback(capture);
// Value doesn't matter here, just trigger reading of the...
_instance->setAudioInputDevice(capture.value.toStdString());
}
});
};
tgcalls::GroupInstanceDescriptor descriptor = {
.threads = tgcalls::StaticThreads::getThreads(),
.config = tgcalls::GroupConfig{
@@ -2360,10 +2392,10 @@ bool GroupCall::tryCreateController() {
}
crl::on_main(weak, [=] { audioLevelsUpdated(data); });
},
.initialInputDeviceId = _audioInputId.toStdString(),
.initialOutputDeviceId = _audioOutputId.toStdString(),
.initialInputDeviceId = captureDeviceIdInitial.value.toStdString(),
.initialOutputDeviceId = playbackDeviceIdInitial.value.toStdString(),
.createAudioDeviceModule = Webrtc::AudioDeviceModuleCreator(
settings.callAudioBackend()),
saveSetDeviceIdCallback),
.videoCapture = _cameraCapture,
.requestCurrentTime = [=, call = base::make_weak(this)](
std::function<void(int64_t)> done) {
@@ -3290,14 +3322,6 @@ void GroupCall::requestVideoQuality(
updateRequestedVideoChannelsDelayed();
}
void GroupCall::setCurrentAudioDevice(bool input, const QString &deviceId) {
if (input) {
_mediaDevices->switchToAudioInput(deviceId);
} else {
_mediaDevices->switchToAudioOutput(deviceId);
}
}
void GroupCall::toggleMute(const Group::MuteRequest &data) {
if (_rtmp) {
_rtmpVolume = data.mute ? 0 : Group::kDefaultVolume;

View File

@@ -12,6 +12,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/bytes.h"
#include "mtproto/sender.h"
#include "mtproto/mtproto_auth_key.h"
#include "webrtc/webrtc_device_resolver.h"
class History;
@@ -381,7 +382,6 @@ public:
return _videoIsWorking.value();
}
void setCurrentAudioDevice(bool input, const QString &deviceId);
[[nodiscard]] bool isSharingScreen() const;
[[nodiscard]] rpl::producer<bool> isSharingScreenValue() const;
[[nodiscard]] bool isScreenPaused() const;
@@ -667,6 +667,11 @@ private:
crl::time _lastSendProgressUpdate = 0;
Fn<void(Webrtc::DeviceResolvedId)> _setDeviceIdCallback;
Webrtc::DeviceResolver _playbackDeviceId;
Webrtc::DeviceResolver _captureDeviceId;
Webrtc::DeviceResolver _cameraDeviceId;
std::shared_ptr<GlobalShortcutManager> _shortcutManager;
std::shared_ptr<GlobalShortcutValue> _pushToTalk;
base::Timer _pushToTalkCancelTimer;
@@ -677,11 +682,6 @@ private:
bool _reloadedStaleCall = false;
int _rtmpVolume = 0;
std::unique_ptr<Webrtc::MediaDevices> _mediaDevices;
QString _audioInputId;
QString _audioOutputId;
QString _cameraInputId;
rpl::lifetime _lifetime;
};

View File

@@ -54,8 +54,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/power_save_blocker.h"
#include "apiwrap.h" // api().kick.
#include "api/api_chat_participants.h" // api().kick.
#include "webrtc/webrtc_environment.h"
#include "webrtc/webrtc_video_track.h"
#include "webrtc/webrtc_media_devices.h" // UniqueDesktopCaptureSource.
#include "webrtc/webrtc_audio_input_tester.h"
#include "styles/style_calls.h"
#include "styles/style_layers.h"
@@ -1374,9 +1374,10 @@ void Panel::chooseShareScreenSource() {
return;
}
const auto choose = [=] {
if (!Webrtc::DesktopCaptureAllowed()) {
const auto env = &Core::App().mediaDevices();
if (!env->desktopCaptureAllowed()) {
screenSharingPrivacyRequest();
} else if (const auto source = Webrtc::UniqueDesktopCaptureSource()) {
} else if (const auto source = env->uniqueDesktopCaptureSource()) {
if (_call->isSharingScreen()) {
_call->toggleScreenSharing(std::nullopt);
} else {
@@ -2003,7 +2004,8 @@ void Panel::trackControlOver(not_null<Ui::RpWidget*> control, bool over) {
}
void Panel::showStickedTooltip() {
static const auto kHasCamera = !Webrtc::GetVideoInputList().empty();
static const auto kHasCamera = !Core::App().mediaDevices().defaultId(
Webrtc::DeviceType::Camera).isEmpty();
const auto callReady = (_call->state() == State::Joined
|| _call->state() == State::Connecting);
if (!(_stickedTooltipsShown & StickedTooltip::Camera)

View File

@@ -42,7 +42,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "core/application.h"
#include "core/core_settings.h"
#include "webrtc/webrtc_audio_input_tester.h"
#include "webrtc/webrtc_media_devices.h"
#include "webrtc/webrtc_device_resolver.h"
#include "settings/settings_calls.h"
#include "main/main_session.h"
#include "apiwrap.h"
@@ -250,8 +250,7 @@ void SettingsBox(
const auto weakBox = Ui::MakeWeak(box);
struct State {
rpl::event_stream<QString> outputNameStream;
rpl::event_stream<QString> inputNameStream;
std::unique_ptr<Webrtc::DeviceResolver> deviceId;
std::unique_ptr<Webrtc::AudioInputTester> micTester;
Ui::LevelMeter *micTestLevel = nullptr;
float micLevel = 0.;
@@ -295,42 +294,43 @@ void SettingsBox(
Ui::AddSkip(layout);
}
auto playbackIdWithFallback = Webrtc::DeviceIdValueWithFallback(
Core::App().settings().callPlaybackDeviceIdValue(),
Core::App().settings().playbackDeviceIdValue());
AddButtonWithLabel(
layout,
tr::lng_group_call_speakers(),
rpl::single(
CurrentAudioOutputName()
) | rpl::then(
state->outputNameStream.events()
),
PlaybackDeviceNameValue(rpl::duplicate(playbackIdWithFallback)),
st::groupCallSettingsButton
)->addClickHandler([=] {
box->getDelegate()->show(ChooseAudioOutputBox(crl::guard(box, [=](
const QString &id,
const QString &name) {
state->outputNameStream.fire_copy(name);
}), &st::groupCallCheckbox, &st::groupCallRadio));
box->getDelegate()->show(ChoosePlaybackDeviceBox(
rpl::duplicate(playbackIdWithFallback),
crl::guard(box, [=](const QString &id) {
Core::App().settings().setCallPlaybackDeviceId(id);
Core::App().saveSettingsDelayed();
}),
&st::groupCallCheckbox,
&st::groupCallRadio));
});
if (!rtmp) {
auto captureIdWithFallback = Webrtc::DeviceIdValueWithFallback(
Core::App().settings().callCaptureDeviceIdValue(),
Core::App().settings().captureDeviceIdValue());
AddButtonWithLabel(
layout,
tr::lng_group_call_microphone(),
rpl::single(
CurrentAudioInputName()
) | rpl::then(
state->inputNameStream.events()
),
CaptureDeviceNameValue(rpl::duplicate(captureIdWithFallback)),
st::groupCallSettingsButton
)->addClickHandler([=] {
box->getDelegate()->show(ChooseAudioInputBox(crl::guard(box, [=](
const QString &id,
const QString &name) {
state->inputNameStream.fire_copy(name);
if (state->micTester) {
state->micTester->setDeviceId(id);
}
}), &st::groupCallCheckbox, &st::groupCallRadio));
box->getDelegate()->show(ChooseCaptureDeviceBox(
rpl::duplicate(captureIdWithFallback),
crl::guard(box, [=](const QString &id) {
Core::App().settings().setCallCaptureDeviceId(id);
Core::App().saveSettingsDelayed();
}),
&st::groupCallCheckbox,
&st::groupCallRadio));
});
state->micTestLevel = box->addRow(
@@ -771,9 +771,14 @@ void SettingsBox(
box->setShowFinishedCallback([=] {
// Means we finished showing the box.
crl::on_main(box, [=] {
state->deviceId = std::make_unique<Webrtc::DeviceResolver>(
&Core::App().mediaDevices(),
Webrtc::DeviceType::Capture,
Webrtc::DeviceIdValueWithFallback(
Core::App().settings().callCaptureDeviceIdValue(),
Core::App().settings().captureDeviceIdValue()));
state->micTester = std::make_unique<Webrtc::AudioInputTester>(
Core::App().settings().callAudioBackend(),
Core::App().settings().callInputDeviceId());
state->deviceId->value());
state->levelUpdateTimer.callEach(kMicTestUpdateInterval);
});
});
@@ -880,10 +885,13 @@ std::pair<Fn<void()>, rpl::lifetime> ShareInviteLinkAction(
MicLevelTester::MicLevelTester(Fn<void()> show)
: _show(std::move(show))
, _timer([=] { check(); })
, _tester(
std::make_unique<Webrtc::AudioInputTester>(
Core::App().settings().callAudioBackend(),
Core::App().settings().callInputDeviceId())) {
, _deviceId(std::make_unique<Webrtc::DeviceResolver>(
&Core::App().mediaDevices(),
Webrtc::DeviceType::Capture,
Webrtc::DeviceIdValueWithFallback(
Core::App().settings().callCaptureDeviceIdValue(),
Core::App().settings().captureDeviceIdValue())))
, _tester(std::make_unique<Webrtc::AudioInputTester>(_deviceId->value())) {
_timer.callEach(kMicrophoneTooltipCheckInterval);
}

View File

@@ -11,6 +11,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
namespace Webrtc {
class AudioInputTester;
class DeviceResolver;
} // namespace Webrtc
namespace Calls {
@@ -38,6 +39,7 @@ private:
Fn<void()> _show;
base::Timer _timer;
std::unique_ptr<Webrtc::DeviceResolver> _deviceId;
std::unique_ptr<Webrtc::AudioInputTester> _tester;
int _loudCount = 0;
int _quietCount = 0;

View File

@@ -71,7 +71,6 @@ ComposeIcons {
menuSpoilerOff: icon;
stripBubble: icon;
stripPremiumLocked: icon;
stripExpandPanel: icon;
stripExpandDropdown: icon;
}
@@ -123,6 +122,8 @@ EmojiPan {
removeSet: IconButton;
boxLabel: FlatLabel;
icons: ComposeIcons;
about: FlatLabel;
aboutPadding: margins;
autocompleteBottomSkip: pixels;
}
@@ -609,10 +610,6 @@ defaultComposeIcons: ComposeIcons {
{ "chat/reactions_bubble_shadow", windowShadowFg },
{ "chat/reactions_bubble", windowBg },
};
stripPremiumLocked: icon{
{ "chat/reactions_premium_bg", historyPeerArchiveUserpicBg },
{ "chat/reactions_premium_star", historyPeerUserpicFg },
};
stripExpandPanel: icon{
{ "chat/reactions_round_big", windowBgRipple },
{ "chat/reactions_expand_panel", windowSubTextFg },
@@ -622,6 +619,14 @@ defaultComposeIcons: ComposeIcons {
{ "chat/reactions_expand_panel", windowSubTextFg },
};
}
defaultEmojiPanAbout: FlatLabel(defaultFlatLabel) {
minWidth: 10px;
align: align(top);
textFg: windowSubTextFg;
style: TextStyle(defaultTextStyle) {
font: font(11px);
}
}
defaultEmojiPan: EmojiPan {
margin: margins(7px, 0px, 7px, 0px);
padding: margins(7px, 0px, 4px, 7px);
@@ -663,6 +668,8 @@ defaultEmojiPan: EmojiPan {
removeSet: stickerPanRemoveSet;
boxLabel: boxLabel;
icons: defaultComposeIcons;
about: defaultEmojiPanAbout;
aboutPadding: margins(12px, 2px, 12px, 2px);
autocompleteBottomSkip: 0px;
}
statusEmojiPan: EmojiPan(defaultEmojiPan) {
@@ -1122,6 +1129,7 @@ historyRecordLockBodyShadow: icon {{ "voice_lock/record_lock_body_shadow", histo
historyRecordLockBody: icon {{ "voice_lock/record_lock_body", historyToDownBg }};
historyRecordLockMargin: margins(4px, 4px, 4px, 4px);
historyRecordLockArrow: icon {{ "voice_lock/voice_arrow", historyToDownFg }};
historyRecordLockInput: icon {{ "voice_lock/input_mic_s", historyToDownFg }};
historyRecordLockRippleMargin: margins(6px, 6px, 6px, 6px);
historyRecordDelete: IconButton(historyAttach) {
@@ -1306,3 +1314,22 @@ ttlMediaButton: RoundButton(defaultActiveButton) {
textTop: 6px;
}
ttlMediaButtonBottomSkip: 14px;
editTagAbout: FlatLabel(defaultFlatLabel) {
minWidth: 256px;
}
editTagField: InputField(defaultInputField) {
textBg: transparent;
textMargins: margins(24px, 10px, 32px, 2px);
placeholderFg: placeholderFg;
placeholderFgActive: placeholderFgActive;
placeholderFgError: placeholderFgActive;
placeholderMargins: margins(2px, 0px, 2px, 0px);
placeholderScale: 0.;
heightMin: 36px;
}
editTagLimit: FlatLabel(defaultFlatLabel) {
textFg: windowSubTextFg;
}

View File

@@ -331,7 +331,7 @@ void FieldAutocomplete::showFiltered(
plainQuery = base::StringViewMid(query, 1);
break;
}
bool resetScroll = (_type != type || _filter != plainQuery);
const auto resetScroll = (_type != type || _filter != plainQuery);
if (resetScroll) {
_type = type;
_filter = TextUtilities::RemoveAccents(plainQuery.toString());
@@ -342,10 +342,11 @@ void FieldAutocomplete::showFiltered(
}
void FieldAutocomplete::showStickers(EmojiPtr emoji) {
bool resetScroll = (_emoji != emoji);
_emoji = emoji;
_type = Type::Stickers;
if (!emoji) {
const auto resetScroll = (_emoji != emoji);
if (resetScroll || emoji) {
_emoji = emoji;
_type = Type::Stickers;
} else if (!emoji) {
rowsUpdated(
base::take(_mrows),
base::take(_hrows),

View File

@@ -635,7 +635,7 @@ void StickersListFooter::paint(
if (context.expanding) {
const auto both = clip.intersected(
context.clip.marginsRemoved(
{ context.radius, 0, context.radius, 0 }));
{ 0/*context.radius*/, 0, context.radius, 0 }));
if (both.isEmpty()) {
return;
}

View File

@@ -88,6 +88,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "storage/localstorage.h"
#include "payments/payments_checkout_process.h"
#include "export/export_manager.h"
#include "webrtc/webrtc_environment.h"
#include "window/window_session_controller.h"
#include "window/window_controller.h"
#include "boxes/abstract_box.h"
@@ -150,6 +151,7 @@ Application::Application()
, _private(std::make_unique<Private>())
, _platformIntegration(Platform::Integration::Create())
, _batterySaving(std::make_unique<base::BatterySaving>())
, _mediaDevices(std::make_unique<Webrtc::Environment>())
, _databases(std::make_unique<Storage::Databases>())
, _animationsManager(std::make_unique<Ui::Animations::Manager>())
, _clearEmojiImageLoaderTimer([=] { clearEmojiSourceImages(); })

View File

@@ -101,6 +101,10 @@ namespace Calls {
class Instance;
} // namespace Calls
namespace Webrtc {
class Environment;
} // namespace Webrtc
namespace Core {
struct LocalUrlHandler;
@@ -238,6 +242,9 @@ public:
[[nodiscard]] Media::Audio::Instance &audio() {
return *_audio;
}
[[nodiscard]] Webrtc::Environment &mediaDevices() {
return *_mediaDevices;
}
// Langpack and emoji keywords.
[[nodiscard]] Lang::Instance &langpack() {
@@ -383,6 +390,7 @@ private:
const std::unique_ptr<Private> _private;
const std::unique_ptr<Platform::Integration> _platformIntegration;
const std::unique_ptr<base::BatterySaving> _batterySaving;
const std::unique_ptr<Webrtc::Environment> _mediaDevices;
const std::unique_ptr<Storage::Databases> _databases;
const std::unique_ptr<Ui::Animations::Manager> _animationsManager;

View File

@@ -33,6 +33,12 @@ namespace {
void SearchByHashtag(ClickContext context, const QString &tag) {
const auto my = context.other.value<ClickHandlerContext>();
if (const auto delegate = my.elementDelegate
? my.elementDelegate()
: nullptr) {
delegate->elementSearchInList(tag, my.itemId);
return;
}
const auto controller = my.sessionWindow.get();
if (!controller) {
return;
@@ -287,7 +293,9 @@ void BotCommandClickHandler::onClick(ClickContext context) const {
return;
}
const auto my = context.other.value<ClickHandlerContext>();
if (const auto delegate = my.elementDelegate ? my.elementDelegate() : nullptr) {
if (const auto delegate = my.elementDelegate
? my.elementDelegate()
: nullptr) {
delegate->elementSendBotCommand(_cmd, my.itemId);
} else if (const auto controller = my.sessionWindow.get()) {
auto &data = controller->session().data();

View File

@@ -17,6 +17,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/gl/gl_detection.h"
#include "ui/widgets/fields/input_field.h"
#include "webrtc/webrtc_create_adm.h"
#include "webrtc/webrtc_device_common.h"
#include "window/section_widget.h"
namespace Core {
@@ -159,8 +160,8 @@ QByteArray Settings::serialize() const {
+ Serialize::stringSize(_downloadPath.current())
+ Serialize::bytearraySize(_downloadPathBookmark)
+ sizeof(qint32) * 9
+ Serialize::stringSize(_callOutputDeviceId)
+ Serialize::stringSize(_callInputDeviceId)
+ Serialize::stringSize(QString()) // legacy call output device id
+ Serialize::stringSize(QString()) // legacy call input device id
+ sizeof(qint32) * 5;
for (const auto &[key, value] : _soundOverrides) {
size += Serialize::stringSize(key) + Serialize::stringSize(value);
@@ -170,7 +171,7 @@ QByteArray Settings::serialize() const {
+ sizeof(qint32)
+ (_dictionariesEnabled.current().size() * sizeof(quint64))
+ sizeof(qint32) * 12
+ Serialize::stringSize(_callVideoInputDeviceId)
+ Serialize::stringSize(_cameraDeviceId.current())
+ sizeof(qint32) * 2
+ Serialize::bytearraySize(_groupCallPushToTalkShortcut)
+ sizeof(qint64)
@@ -194,7 +195,7 @@ QByteArray Settings::serialize() const {
+ (_accountsOrder.size() * sizeof(quint64))
+ sizeof(qint32) * 7
+ (skipLanguages.size() * sizeof(quint64))
+ sizeof(qint32)
+ sizeof(qint32) * 2
+ sizeof(quint64)
+ sizeof(qint32) * 3
+ Serialize::bytearraySize(mediaViewPosition)
@@ -204,6 +205,11 @@ QByteArray Settings::serialize() const {
for (const auto &id : _recentEmojiSkip) {
size += Serialize::stringSize(id);
}
size += sizeof(qint32) * 2
+ Serialize::stringSize(_playbackDeviceId.current())
+ Serialize::stringSize(_captureDeviceId.current())
+ Serialize::stringSize(_callPlaybackDeviceId.current())
+ Serialize::stringSize(_callCaptureDeviceId.current());
auto result = QByteArray();
result.reserve(size);
@@ -228,8 +234,8 @@ QByteArray Settings::serialize() const {
<< qint32(_notificationsCount)
<< static_cast<qint32>(_notificationsCorner)
<< qint32(_autoLock)
<< _callOutputDeviceId
<< _callInputDeviceId
<< QString() // legacy call output device id
<< QString() // legacy call input device id
<< qint32(_callOutputVolume)
<< qint32(_callInputVolume)
<< qint32(_callAudioDuckingEnabled ? 1 : 0)
@@ -273,7 +279,7 @@ QByteArray Settings::serialize() const {
<< qint32(_notifyFromAll ? 1 : 0)
<< qint32(_nativeWindowFrame.current() ? 1 : 0)
<< qint32(_systemDarkModeEnabled.current() ? 1 : 0)
<< _callVideoInputDeviceId
<< _cameraDeviceId.current()
<< qint32(_ipRevealWarning ? 1 : 0)
<< qint32(_groupCallPushToTalk ? 1 : 0)
<< _groupCallPushToTalkShortcut
@@ -327,9 +333,7 @@ QByteArray Settings::serialize() const {
}
stream
<< qint32(_rememberedDeleteMessageOnlyForYou ? 1 : 0);
stream
<< qint32(_rememberedDeleteMessageOnlyForYou ? 1 : 0)
<< qint32(_translateChatEnabled.current() ? 1 : 0)
<< quint64(QLocale::Language(_translateToRaw.current()))
<< qint32(_windowTitleContent.current().hideChatName ? 1 : 0)
@@ -339,14 +343,20 @@ QByteArray Settings::serialize() const {
<< qint32(_ignoreBatterySaving.current() ? 1 : 0)
<< quint64(_macRoundIconDigest.value_or(0))
<< qint32(_storiesClickTooltipHidden.current() ? 1 : 0)
<< qint32(_recentEmojiSkip.size())
<< qint32(_ttlVoiceClickTooltipHidden.current() ? 1 : 0);
<< qint32(_recentEmojiSkip.size());
for (const auto &id : _recentEmojiSkip) {
stream << id;
}
stream
<< qint32(_trayIconMonochrome.current() ? 1 : 0);
<< qint32(_trayIconMonochrome.current() ? 1 : 0)
<< qint32(_ttlVoiceClickTooltipHidden.current() ? 1 : 0)
<< _playbackDeviceId.current()
<< _captureDeviceId.current()
<< _callPlaybackDeviceId.current()
<< _callCaptureDeviceId.current();
}
Ensures(result.size() == size);
return result;
}
@@ -375,9 +385,13 @@ void Settings::addFromSerialized(const QByteArray &serialized) {
qint32 notificationsCount = _notificationsCount;
qint32 notificationsCorner = static_cast<qint32>(_notificationsCorner);
qint32 autoLock = _autoLock;
QString callOutputDeviceId = _callOutputDeviceId;
QString callInputDeviceId = _callInputDeviceId;
QString callVideoInputDeviceId = _callVideoInputDeviceId;
QString playbackDeviceId = _playbackDeviceId.current();
QString captureDeviceId = _captureDeviceId.current();
QString cameraDeviceId = _cameraDeviceId.current();
QString legacyCallPlaybackDeviceId = _callPlaybackDeviceId.current();
QString legacyCallCaptureDeviceId = _callCaptureDeviceId.current();
QString callPlaybackDeviceId = _callPlaybackDeviceId.current();
QString callCaptureDeviceId = _callCaptureDeviceId.current();
qint32 callOutputVolume = _callOutputVolume;
qint32 callInputVolume = _callInputVolume;
qint32 callAudioDuckingEnabled = _callAudioDuckingEnabled ? 1 : 0;
@@ -417,7 +431,7 @@ void Settings::addFromSerialized(const QByteArray &serialized) {
qint32 groupCallPushToTalk = _groupCallPushToTalk ? 1 : 0;
QByteArray groupCallPushToTalkShortcut = _groupCallPushToTalkShortcut;
qint64 groupCallPushToTalkDelay = _groupCallPushToTalkDelay;
qint32 callAudioBackend = 0;
qint32 legacyCallAudioBackend = 0;
qint32 disableCallsLegacy = 0;
QByteArray windowPosition;
std::vector<RecentEmojiPreload> recentEmojiPreload;
@@ -475,8 +489,8 @@ void Settings::addFromSerialized(const QByteArray &serialized) {
>> notificationsCount
>> notificationsCorner
>> autoLock
>> callOutputDeviceId
>> callInputDeviceId
>> legacyCallPlaybackDeviceId
>> legacyCallCaptureDeviceId
>> callOutputVolume
>> callInputVolume
>> callAudioDuckingEnabled
@@ -539,7 +553,7 @@ void Settings::addFromSerialized(const QByteArray &serialized) {
stream >> systemDarkModeEnabled;
}
if (!stream.atEnd()) {
stream >> callVideoInputDeviceId;
stream >> cameraDeviceId;
}
if (!stream.atEnd()) {
stream >> ipRevealWarning;
@@ -551,7 +565,7 @@ void Settings::addFromSerialized(const QByteArray &serialized) {
>> groupCallPushToTalkDelay;
}
if (!stream.atEnd()) {
stream >> callAudioBackend;
stream >> legacyCallAudioBackend;
}
if (!stream.atEnd()) {
stream >> disableCallsLegacy;
@@ -666,7 +680,8 @@ void Settings::addFromSerialized(const QByteArray &serialized) {
});
}
}
}
if (!stream.atEnd()) {
stream >> rememberedDeleteMessageOnlyForYou;
}
if (!stream.atEnd()) {
@@ -714,6 +729,24 @@ void Settings::addFromSerialized(const QByteArray &serialized) {
if (!stream.atEnd()) {
stream >> ttlVoiceClickTooltipHidden;
}
if (!stream.atEnd()) {
stream
>> playbackDeviceId
>> captureDeviceId;
}
if (!stream.atEnd()) {
stream
>> callPlaybackDeviceId
>> callCaptureDeviceId;
} else {
const auto &defaultId = Webrtc::kDefaultDeviceId;
callPlaybackDeviceId = (legacyCallPlaybackDeviceId == defaultId)
? QString()
: legacyCallPlaybackDeviceId;
callCaptureDeviceId = (legacyCallCaptureDeviceId == defaultId)
? QString()
: legacyCallCaptureDeviceId;
}
if (stream.status() != QDataStream::Ok) {
LOG(("App Error: "
"Bad data for Core::Settings::constructFromSerialized()"));
@@ -757,9 +790,12 @@ void Settings::addFromSerialized(const QByteArray &serialized) {
_countUnreadMessages = (countUnreadMessages == 1);
_notifyAboutPinned = (notifyAboutPinned == 1);
_autoLock = autoLock;
_callOutputDeviceId = callOutputDeviceId;
_callInputDeviceId = callInputDeviceId;
_callVideoInputDeviceId = callVideoInputDeviceId;
_playbackDeviceId = playbackDeviceId;
_captureDeviceId = captureDeviceId;
const auto kOldDefault = u"default"_q;
_cameraDeviceId = cameraDeviceId;
_callPlaybackDeviceId = callPlaybackDeviceId;
_callCaptureDeviceId = callCaptureDeviceId;
_callOutputVolume = callOutputVolume;
_callInputVolume = callInputVolume;
_callAudioDuckingEnabled = (callAudioDuckingEnabled == 1);
@@ -955,10 +991,6 @@ void Settings::setTabbedReplacedWithInfo(bool enabled) {
}
}
Webrtc::Backend Settings::callAudioBackend() const {
return Webrtc::Backend::OpenAL;
}
void Settings::setDialogsWidthRatio(float64 ratio) {
_dialogsWidthRatio = ratio;
}
@@ -1216,9 +1248,11 @@ void Settings::resetOnLastLogout() {
_notifyAboutPinned = true;
//_autoLock = 3600;
//_callOutputDeviceId = u"default"_q;
//_callInputDeviceId = u"default"_q;
//_callVideoInputDeviceId = u"default"_q;
//_playbackDeviceId = QString();
//_captureDeviceId = QString();
//_cameraDeviceId = QString();
//_callPlaybackDeviceId = QString();
//_callCaptureDeviceId = QString();
//_callOutputVolume = 100;
//_callInputVolume = 100;
//_callAudioDuckingEnabled = true;

View File

@@ -29,10 +29,6 @@ namespace Window {
enum class Column;
} // namespace Window
namespace Webrtc {
enum class Backend;
} // namespace Webrtc
namespace Calls::Group {
enum class StickedTooltip;
} // namespace Calls::Group
@@ -263,30 +259,68 @@ public:
void setAutoLock(int value) {
_autoLock = value;
}
[[nodiscard]] QString callOutputDeviceId() const {
return _callOutputDeviceId.isEmpty()
? u"default"_q
: _callOutputDeviceId;
[[nodiscard]] QString playbackDeviceId() const {
return _playbackDeviceId.current();
}
void setCallOutputDeviceId(const QString &value) {
_callOutputDeviceId = value;
[[nodiscard]] rpl::producer<QString> playbackDeviceIdChanges() const {
return _playbackDeviceId.changes();
}
[[nodiscard]] QString callInputDeviceId() const {
return _callInputDeviceId.isEmpty()
? u"default"_q
: _callInputDeviceId;
[[nodiscard]] rpl::producer<QString> playbackDeviceIdValue() const {
return _playbackDeviceId.value();
}
void setCallInputDeviceId(const QString &value) {
_callInputDeviceId = value;
void setPlaybackDeviceId(const QString &value) {
_playbackDeviceId = value;
}
[[nodiscard]] QString callVideoInputDeviceId() const {
return _callVideoInputDeviceId.isEmpty()
? u"default"_q
: _callVideoInputDeviceId;
[[nodiscard]] QString captureDeviceId() const {
return _captureDeviceId.current();
}
void setCallVideoInputDeviceId(const QString &value) {
_callVideoInputDeviceId = value;
[[nodiscard]] rpl::producer<QString> captureDeviceIdChanges() const {
return _captureDeviceId.changes();
}
[[nodiscard]] rpl::producer<QString> captureDeviceIdValue() const {
return _captureDeviceId.value();
}
void setCaptureDeviceId(const QString &value) {
_captureDeviceId = value;
}
[[nodiscard]] QString cameraDeviceId() const {
return _cameraDeviceId.current();
}
[[nodiscard]] rpl::producer<QString> cameraDeviceIdChanges() const {
return _cameraDeviceId.changes();
}
[[nodiscard]] rpl::producer<QString> cameraDeviceIdValue() const {
return _cameraDeviceId.value();
}
void setCameraDeviceId(const QString &value) {
_cameraDeviceId = value;
}
[[nodiscard]] QString callPlaybackDeviceId() const {
return _callPlaybackDeviceId.current();
}
[[nodiscard]] rpl::producer<QString> callPlaybackDeviceIdChanges() const {
return _callPlaybackDeviceId.changes();
}
[[nodiscard]] rpl::producer<QString> callPlaybackDeviceIdValue() const {
return _callPlaybackDeviceId.value();
}
void setCallPlaybackDeviceId(const QString &value) {
_callPlaybackDeviceId = value;
}
[[nodiscard]] QString callCaptureDeviceId() const {
return _callCaptureDeviceId.current();
}
[[nodiscard]] rpl::producer<QString> callCaptureDeviceIdChanges() const {
return _callCaptureDeviceId.changes();
}
[[nodiscard]] rpl::producer<QString> callCaptureDeviceIdValue() const {
return _callCaptureDeviceId.value();
}
void setCallCaptureDeviceId(const QString &value) {
_callCaptureDeviceId = value;
}
[[nodiscard]] int callOutputVolume() const {
return _callOutputVolume;
}
@@ -305,7 +339,6 @@ public:
void setCallAudioDuckingEnabled(bool value) {
_callAudioDuckingEnabled = value;
}
[[nodiscard]] Webrtc::Backend callAudioBackend() const;
[[nodiscard]] bool disableCallsLegacy() const {
return _disableCallsLegacy;
}
@@ -875,9 +908,11 @@ private:
bool _countUnreadMessages = true;
rpl::variable<bool> _notifyAboutPinned = true;
int _autoLock = 3600;
QString _callOutputDeviceId = u"default"_q;
QString _callInputDeviceId = u"default"_q;
QString _callVideoInputDeviceId = u"default"_q;
rpl::variable<QString> _playbackDeviceId;
rpl::variable<QString> _captureDeviceId;
rpl::variable<QString> _cameraDeviceId;
rpl::variable<QString> _callPlaybackDeviceId;
rpl::variable<QString> _callCaptureDeviceId;
int _callOutputVolume = 100;
int _callInputVolume = 100;
bool _callAudioDuckingEnabled = true;

View File

@@ -23,6 +23,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/boxes/confirm_box.h"
#include "boxes/share_box.h"
#include "boxes/connection_box.h"
#include "boxes/premium_preview_box.h"
#include "boxes/sticker_set_box.h"
#include "boxes/sessions_box.h"
#include "boxes/language_box.h"
@@ -657,6 +658,17 @@ bool CopyPeerId(
return true;
}
bool ShowSearchTagsPromo(
Window::SessionController *controller,
const Match &match,
const QVariant &context) {
if (!controller) {
return false;
}
ShowPremiumPreviewBox(controller, PremiumPreview::TagsForMessages);
return true;
}
void ExportTestChatTheme(
not_null<Window::SessionController*> controller,
not_null<const Data::CloudTheme*> theme) {
@@ -1020,6 +1032,10 @@ const std::vector<LocalUrlHandler> &InternalUrlHandlers() {
{
u"^copy:(.+)$"_q,
CopyPeerId
},
{
u"about_tags"_q,
ShowSearchTagsPromo
}
};
return Result;

View File

@@ -22,7 +22,7 @@ constexpr auto AppId = "{53F49750-6209-4FBF-9CA8-7A333C87D1ED}"_cs;
constexpr auto AppNameOld = "Telegram Win (Unofficial)"_cs;
constexpr auto AppName = "Telegram Desktop"_cs;
constexpr auto AppFile = "Telegram"_cs;
constexpr auto AppVersion = 4014009;
constexpr auto AppVersionStr = "4.14.9";
constexpr auto AppVersion = 4014014;
constexpr auto AppVersionStr = "4.14.14";
constexpr auto AppBetaVersion = false;
constexpr auto AppAlphaVersion = TDESKTOP_ALPHA_VERSION;

View File

@@ -93,7 +93,7 @@ MTPInputMedia WebPageForMTP(
bool required) {
using Flag = MTPDinputMediaWebPage::Flag;
return MTP_inputMediaWebPage(
MTP_flags((required ? Flag() : Flag::f_optional)
MTP_flags(((false && required) ? Flag() : Flag::f_optional)
| (draft.forceLargeMedia ? Flag::f_force_large_media : Flag())
| (draft.forceSmallMedia ? Flag::f_force_small_media : Flag())),
MTP_string(draft.url));

View File

@@ -302,20 +302,27 @@ bool UpdateExtendedMedia(
});
}
} // namespace
TextForMimeData WithCaptionClipboardText(
const QString &attachType,
TextForMimeData &&caption) {
auto result = TextForMimeData();
result.reserve(5 + attachType.size() + caption.expanded.size());
result.append(u"[ "_q).append(attachType).append(u" ]"_q);
if (!caption.empty()) {
result.append('\n').append(std::move(caption));
if (attachType.isEmpty()) {
result.reserve(1 + caption.expanded.size());
if (!caption.empty()) {
result.append(std::move(caption));
}
} else {
result.reserve(5 + attachType.size() + caption.expanded.size());
result.append(u"[ "_q).append(attachType).append(u" ]"_q);
if (!caption.empty()) {
result.append('\n').append(std::move(caption));
}
}
return result;
}
} // namespace
Invoice ComputeInvoiceData(
not_null<HistoryItem*> item,
const MTPDmessageMediaInvoice &data) {
@@ -767,9 +774,7 @@ QString MediaPhoto::pinnedTextSubstring() const {
}
TextForMimeData MediaPhoto::clipboardText() const {
return WithCaptionClipboardText(
tr::lng_in_dlg_photo(tr::now),
parent()->clipboardText());
return TextForMimeData();
}
bool MediaPhoto::allowsEditCaption() const {
@@ -983,13 +988,17 @@ ItemPreview MediaFile::toPreview(ToPreviewOptions options) const {
const auto type = [&] {
using namespace Ui::Text;
if (_document->isVideoMessage()) {
return tr::lng_in_dlg_video_message(tr::now);
return (item->media() && item->media()->ttlSeconds())
? tr::lng_in_dlg_video_message_ttl(tr::now)
: tr::lng_in_dlg_video_message(tr::now);
} else if (_document->isAnimation()) {
return u"GIF"_q;
} else if (_document->isVideoFile()) {
return tr::lng_in_dlg_video(tr::now);
} else if (_document->isVoiceMessage()) {
return tr::lng_in_dlg_audio(tr::now);
return (item->media() && item->media()->ttlSeconds())
? tr::lng_in_dlg_voice_message_ttl(tr::now)
: tr::lng_in_dlg_audio(tr::now);
} else if (const auto name = FormatSongNameFor(_document).string();
!name.isEmpty()) {
return name;
@@ -1020,13 +1029,19 @@ TextWithEntities MediaFile::notificationText() const {
}
const auto type = [&] {
if (_document->isVideoMessage()) {
return tr::lng_in_dlg_video_message(tr::now);
const auto media = parent()->media();
return (media && media->ttlSeconds())
? tr::lng_in_dlg_video_message_ttl(tr::now)
: tr::lng_in_dlg_video_message(tr::now);
} else if (_document->isAnimation()) {
return u"GIF"_q;
} else if (_document->isVideoFile()) {
return tr::lng_in_dlg_video(tr::now);
} else if (_document->isVoiceMessage()) {
return tr::lng_in_dlg_audio(tr::now);
const auto media = parent()->media();
return (media && media->ttlSeconds())
? tr::lng_in_dlg_voice_message_ttl(tr::now)
: tr::lng_in_dlg_audio(tr::now);
} else if (!_document->filename().isEmpty()) {
return _document->filename();
} else if (_document->isAudioFile()) {
@@ -1062,36 +1077,9 @@ QString MediaFile::pinnedTextSubstring() const {
}
TextForMimeData MediaFile::clipboardText() const {
const auto attachType = [&] {
const auto name = Ui::Text::FormatSongNameFor(_document).string();
const auto addName = !name.isEmpty()
? u" : "_q + name
: QString();
if (const auto sticker = _document->sticker()) {
if (!_emoji.isEmpty()) {
return tr::lng_in_dlg_sticker_emoji(
tr::now,
lt_emoji,
_emoji);
}
return tr::lng_in_dlg_sticker(tr::now);
} else if (_document->isAnimation()) {
if (_document->isVideoMessage()) {
return tr::lng_in_dlg_video_message(tr::now);
}
return u"GIF"_q;
} else if (_document->isVideoFile()) {
return tr::lng_in_dlg_video(tr::now);
} else if (_document->isVoiceMessage()) {
return tr::lng_in_dlg_audio(tr::now) + addName;
} else if (_document->isSong()) {
return tr::lng_in_dlg_audio_file(tr::now) + addName;
}
return tr::lng_in_dlg_file(tr::now) + addName;
}();
auto caption = parent()->clipboardText();
if (_document->isVoiceMessage()) {
if (_document->isVoiceMessage() || _document->isVideoMessage()) {
const auto &entry = _document->session().api().transcribes().entry(
parent());
if (!entry.requestId
@@ -1099,17 +1087,18 @@ TextForMimeData MediaFile::clipboardText() const {
&& !entry.toolong
&& !entry.failed
&& (entry.pending || !entry.result.isEmpty())) {
const auto text = "{{\n"
const auto hasCaption = !caption.rich.text.isEmpty();
const auto text = (hasCaption ? "{{\n" : "")
+ entry.result
+ (entry.result.isEmpty() ? "" : " ")
+ (entry.pending ? "[...]" : "")
+ "\n}}"
+ (caption.rich.text.isEmpty() ? "" : "\n");
caption = TextForMimeData{ text, { text } }.append(std::move(caption));
+ (hasCaption ? "\n}}\n" : "");
caption = TextForMimeData{ text, { text } }.append(
std::move(caption));
}
}
return WithCaptionClipboardText(attachType, std::move(caption));
return caption;
}
bool MediaFile::allowsEditCaption() const {

View File

@@ -709,10 +709,6 @@ private:
};
[[nodiscard]] TextForMimeData WithCaptionClipboardText(
const QString &attachType,
TextForMimeData &&caption);
[[nodiscard]] Invoice ComputeInvoiceData(
not_null<HistoryItem*> item,
const MTPDmessageMediaInvoice &data);

View File

@@ -31,6 +31,15 @@ ReactionId SearchTagFromQuery(const QString &query) {
return {};
}
std::vector<ReactionId> SearchTagsFromQuery(
const QString &query) {
auto result = std::vector<ReactionId>();
if (const auto tag = SearchTagFromQuery(query)) {
result.push_back(tag);
}
return result;
}
QString ReactionEntityData(const ReactionId &id) {
if (id.empty()) {
return {};

View File

@@ -47,6 +47,8 @@ struct MessageReaction {
[[nodiscard]] QString SearchTagToQuery(const ReactionId &tagId);
[[nodiscard]] ReactionId SearchTagFromQuery(const QString &query);
[[nodiscard]] std::vector<ReactionId> SearchTagsFromQuery(
const QString &query);
[[nodiscard]] QString ReactionEntityData(const ReactionId &id);

View File

@@ -20,6 +20,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "data/data_document.h"
#include "data/data_document_media.h"
#include "data/data_peer_values.h"
#include "data/data_saved_sublist.h"
#include "data/stickers/data_custom_emoji.h"
#include "storage/localimageloader.h"
#include "ui/image/image_location_factory.h"
@@ -208,13 +209,6 @@ PossibleItemReactionsRef LookupPossibleReactions(
} else if (id.custom()
&& allowed.type == AllowedReactionsType::Default) {
return false;
} else if (reaction.premium
&& !session->premium()
&& !ranges::contains(all, id, &MessageReaction::id)) {
if (premiumPossible) {
result.morePremiumAvailable = true;
}
return false;
}
return true;
});
@@ -247,7 +241,6 @@ PossibleItemReactions::PossibleItemReactions(
: recent(other.recent | ranges::views::transform([](const auto &value) {
return *value;
}) | ranges::to_vector)
, morePremiumAvailable(other.morePremiumAvailable)
, customAllowed(other.customAllowed)
, tags(other.tags){
}
@@ -258,6 +251,8 @@ Reactions::Reactions(not_null<Session*> owner)
, _repaintTimer([=] { repaintCollected(); }) {
refreshDefault();
_myTags.emplace(nullptr);
base::timer_each(
kRefreshFullListEach
) | rpl::start_with_next([=] {
@@ -320,17 +315,18 @@ void Reactions::refreshDefault() {
requestDefault();
}
void Reactions::refreshMyTags() {
requestMyTags();
void Reactions::refreshMyTags(SavedSublist *sublist) {
requestMyTags(sublist);
}
void Reactions::refreshMyTagsDelayed() {
if (_myTagsRequestId || _myTagsRequestScheduled) {
auto &my = _myTags[nullptr];
if (my.requestId || my.requestScheduled) {
return;
}
_myTagsRequestScheduled = true;
my.requestScheduled = true;
base::call_delayed(kMyTagsRequestTimeout, &_owner->session(), [=] {
if (_myTagsRequestScheduled) {
if (_myTags[nullptr].requestScheduled) {
requestMyTags();
}
});
@@ -346,14 +342,27 @@ const std::vector<Reaction> &Reactions::list(Type type) const {
case Type::Recent: return _recent;
case Type::Top: return _top;
case Type::All: return _available;
case Type::MyTags: return _myTags;
case Type::MyTags:
return _myTags.find((SavedSublist*)nullptr)->second.tags;
case Type::Tags: return _tags;
}
Unexpected("Type in Reactions::list.");
}
const std::vector<MyTagInfo> &Reactions::myTagsInfo() const {
return _myTagsInfo;
return _myTags.find((SavedSublist*)nullptr)->second.info;
}
const QString &Reactions::myTagTitle(const ReactionId &id) const {
const auto i = _myTags.find((SavedSublist*)nullptr);
if (i != end(_myTags)) {
const auto j = ranges::find(i->second.info, id, &MyTagInfo::id);
if (j != end(i->second.info)) {
return j->title;
}
}
static const auto kEmpty = QString();
return kEmpty;
}
ReactionId Reactions::favoriteId() const {
@@ -380,14 +389,18 @@ void Reactions::setFavorite(const ReactionId &id) {
applyFavorite(id);
}
void Reactions::incrementMyTag(const ReactionId &id) {
auto i = ranges::find(_myTagsInfo, id, &MyTagInfo::id);
if (i == end(_myTagsInfo)) {
_myTagsInfo.push_back({ .id = id, .count = 0 });
i = end(_myTagsInfo) - 1;
void Reactions::incrementMyTag(const ReactionId &id, SavedSublist *sublist) {
if (sublist) {
incrementMyTag(id, nullptr);
}
auto &my = _myTags[sublist];
auto i = ranges::find(my.info, id, &MyTagInfo::id);
if (i == end(my.info)) {
my.info.push_back({ .id = id, .count = 0 });
i = end(my.info) - 1;
}
++i->count;
while (i != begin(_myTagsInfo)) {
while (i != begin(my.info)) {
auto j = i - 1;
if (j->count >= i->count) {
break;
@@ -395,38 +408,64 @@ void Reactions::incrementMyTag(const ReactionId &id) {
std::swap(*i, *j);
i = j;
}
scheduleMyTagsUpdate();
scheduleMyTagsUpdate(sublist);
}
void Reactions::decrementMyTag(const ReactionId &id) {
auto i = ranges::find(_myTagsInfo, id, &MyTagInfo::id);
if (i->count <= 0) {
void Reactions::decrementMyTag(const ReactionId &id, SavedSublist *sublist) {
if (sublist) {
decrementMyTag(id, nullptr);
}
auto &my = _myTags[sublist];
auto i = ranges::find(my.info, id, &MyTagInfo::id);
if (i != end(my.info) && i->count > 0) {
--i->count;
while (i + 1 != end(my.info)) {
auto j = i + 1;
if (j->count <= i->count) {
break;
}
std::swap(*i, *j);
i = j;
}
}
scheduleMyTagsUpdate(sublist);
}
void Reactions::renameTag(const ReactionId &id, const QString &name) {
auto changed = false;
for (auto &[sublist, my] : _myTags) {
auto i = ranges::find(my.info, id, &MyTagInfo::id);
if (i == end(my.info) || i->title == name) {
continue;
}
i->title = name;
changed = true;
scheduleMyTagsUpdate(sublist);
}
if (!changed) {
return;
}
--i->count;
while (i + 1 != end(_myTagsInfo)) {
auto j = i + 1;
if (j->count <= i->count) {
break;
}
std::swap(*i, *j);
i = j;
}
scheduleMyTagsUpdate();
_myTagRenamed.fire_copy(id);
using Flag = MTPmessages_UpdateSavedReactionTag::Flag;
_owner->session().api().request(MTPmessages_UpdateSavedReactionTag(
MTP_flags(name.isEmpty() ? Flag(0) : Flag::f_title),
ReactionToMTP(id),
MTP_string(name)
)).send();
}
void Reactions::scheduleMyTagsUpdate() {
_myTagsUpdateScheduled = true;
void Reactions::scheduleMyTagsUpdate(SavedSublist *sublist) {
auto &my = _myTags[sublist];
my.updateScheduled = true;
crl::on_main(&session(), [=] {
if (!_myTagsUpdateScheduled) {
auto &my = _myTags[sublist];
if (!my.updateScheduled) {
return;
}
_myTagsUpdateScheduled = false;
_myTagsIds = _myTagsInfo | ranges::views::transform(
&MyTagInfo::id
) | ranges::to_vector;
_myTags = resolveByIds(_myTagsIds, _unresolvedMyTags);
_myTagsUpdated.fire({});
my.updateScheduled = false;
my.tags = resolveByInfos(my.info, _unresolvedMyTags, sublist);
_myTagsUpdated.fire_copy(sublist);
});
}
@@ -492,13 +531,20 @@ rpl::producer<> Reactions::favoriteUpdates() const {
}
rpl::producer<> Reactions::myTagsUpdates() const {
return _myTagsUpdated.events();
return _myTagsUpdated.events(
) | rpl::filter(
!rpl::mappers::_1
) | rpl::to_empty;
}
rpl::producer<> Reactions::tagsUpdates() const {
return _tagsUpdated.events();
}
rpl::producer<ReactionId> Reactions::myTagRenamed() const {
return _myTagRenamed.events();
}
void Reactions::preloadImageFor(const ReactionId &id) {
if (_images.contains(id) || id.emoji().isEmpty()) {
return;
@@ -741,23 +787,29 @@ void Reactions::requestGeneric() {
}).send();
}
void Reactions::requestMyTags() {
if (_myTagsRequestId) {
void Reactions::requestMyTags(SavedSublist *sublist) {
auto &my = _myTags[sublist];
if (my.requestId) {
return;
}
auto &api = _owner->session().api();
_myTagsRequestScheduled = false;
_myTagsRequestId = api.request(MTPmessages_GetSavedReactionTags(
MTP_long(_myTagsHash)
my.requestScheduled = false;
using Flag = MTPmessages_GetSavedReactionTags::Flag;
my.requestId = api.request(MTPmessages_GetSavedReactionTags(
MTP_flags(sublist ? Flag::f_peer : Flag()),
(sublist ? sublist->peer()->input : MTP_inputPeerEmpty()),
MTP_long(my.hash)
)).done([=](const MTPmessages_SavedReactionTags &result) {
_myTagsRequestId = 0;
auto &my = _myTags[sublist];
my.requestId = 0;
result.match([&](const MTPDmessages_savedReactionTags &data) {
updateMyTags(data);
updateMyTags(sublist, data);
}, [](const MTPDmessages_savedReactionTagsNotModified&) {
});
}).fail([=] {
_myTagsRequestId = 0;
_myTagsHash = 0;
auto &my = _myTags[sublist];
my.requestId = 0;
my.hash = 0;
}).send();
}
@@ -849,14 +901,28 @@ void Reactions::updateGeneric(const MTPDmessages_stickerSet &data) {
}
}
void Reactions::updateMyTags(const MTPDmessages_savedReactionTags &data) {
_myTagsHash = data.vhash().v;
_myTagsInfo = ListFromMTP(data);
_myTagsIds = _myTagsInfo | ranges::views::transform(
&MyTagInfo::id
) | ranges::to_vector;
_myTags = resolveByIds(_myTagsIds, _unresolvedMyTags);
_myTagsUpdated.fire({});
void Reactions::updateMyTags(
SavedSublist *sublist,
const MTPDmessages_savedReactionTags &data) {
auto &my = _myTags[sublist];
my.hash = data.vhash().v;
auto list = ListFromMTP(data);
auto renamed = base::flat_set<ReactionId>();
if (!sublist) {
for (const auto &info : list) {
const auto j = ranges::find(my.info, info.id, &MyTagInfo::id);
const auto was = (j != end(my.info)) ? j->title : QString();
if (info.title != was) {
renamed.emplace(info.id);
}
}
}
my.info = std::move(list);
my.tags = resolveByInfos(my.info, _unresolvedMyTags, sublist);
_myTagsUpdated.fire_copy(sublist);
for (const auto &id : renamed) {
_myTagRenamed.fire_copy(id);
}
}
void Reactions::updateTags(const MTPDmessages_reactions &data) {
@@ -908,7 +974,9 @@ void Reactions::customEmojiResolveDone(not_null<DocumentData*> document) {
const auto j = _unresolvedRecent.find(id);
const auto recent = (j != end(_unresolvedRecent));
const auto k = _unresolvedMyTags.find(id);
const auto myTag = (k != end(_unresolvedMyTags));
const auto myTagSublists = (k != end(_unresolvedMyTags))
? base::take(k->second)
: base::flat_set<SavedSublist*>();
const auto l = _unresolvedTags.find(id);
const auto tag = (l != end(_unresolvedTags));
if (favorite) {
@@ -923,9 +991,12 @@ void Reactions::customEmojiResolveDone(not_null<DocumentData*> document) {
_unresolvedRecent.erase(j);
_recent = resolveByIds(_recentIds, _unresolvedRecent);
}
if (myTag) {
if (!myTagSublists.empty()) {
_unresolvedMyTags.erase(k);
_myTags = resolveByIds(_myTagsIds, _unresolvedMyTags);
for (const auto &sublist : myTagSublists) {
auto &my = _myTags[sublist];
my.tags = resolveByInfos(my.info, _unresolvedMyTags, sublist);
}
}
if (tag) {
_unresolvedTags.erase(l);
@@ -940,8 +1011,8 @@ void Reactions::customEmojiResolveDone(not_null<DocumentData*> document) {
if (recent) {
_recentUpdated.fire({});
}
if (myTag) {
_myTagsUpdated.fire({});
for (const auto &sublist : myTagSublists) {
_myTagsUpdated.fire_copy(sublist);
}
if (tag) {
_tagsUpdated.fire({});
@@ -978,6 +1049,50 @@ std::vector<Reaction> Reactions::resolveByIds(
return result;
}
std::optional<Reaction> Reactions::resolveByInfo(
const MyTagInfo &info,
SavedSublist *sublist) {
const auto withInfo = [&](Reaction reaction) {
reaction.count = info.count;
reaction.title = sublist ? myTagTitle(reaction.id) : info.title;
return reaction;
};
if (const auto emoji = info.id.emoji(); !emoji.isEmpty()) {
const auto i = ranges::find(_available, info.id, &Reaction::id);
if (i != end(_available)) {
return withInfo(*i);
}
} else if (const auto customId = info.id.custom()) {
const auto document = _owner->document(customId);
if (document->sticker()) {
return withInfo(CustomReaction(document));
}
}
return {};
}
std::vector<Reaction> Reactions::resolveByInfos(
const std::vector<MyTagInfo> &infos,
base::flat_map<
ReactionId,
base::flat_set<SavedSublist*>> &unresolved,
SavedSublist *sublist) {
auto result = std::vector<Reaction>();
result.reserve(infos.size());
for (const auto &tag : infos) {
if (auto resolved = resolveByInfo(tag, sublist)) {
result.push_back(*resolved);
} else if (const auto i = unresolved.find(tag.id)
; i != end(unresolved)) {
i->second.emplace(sublist);
} else {
unresolved[tag.id].emplace(sublist);
resolve(tag.id);
}
}
return result;
}
void Reactions::resolve(const ReactionId &id) {
if (const auto emoji = id.emoji(); !emoji.isEmpty()) {
refreshDefault();
@@ -1016,7 +1131,6 @@ std::optional<Reaction> Reactions::parse(const MTPAvailableReaction &entry) {
*data.varound_animation()).get()
: nullptr),
.active = !data.is_inactive(),
.premium = data.is_premium(),
})
: std::nullopt;
});
@@ -1107,6 +1221,20 @@ Reaction *Reactions::lookupTemporary(const ReactionId &id) {
return nullptr;
}
rpl::producer<std::vector<Reaction>> Reactions::myTagsValue(
SavedSublist *sublist) {
refreshMyTags(sublist);
const auto list = [=] {
return _myTags[sublist].tags;
};
return rpl::single(
list()
) | rpl::then(_myTagsUpdated.events(
) | rpl::filter(
rpl::mappers::_1 == sublist
) | rpl::map(list));
}
void Reactions::repaintCollected() {
const auto now = crl::now();
auto closest = crl::time();
@@ -1209,7 +1337,8 @@ void MessageReactions::add(const ReactionId &id, bool addToRecent) {
auto my = 0;
const auto tags = _item->reactionsAreTags();
if (tags) {
history->owner().reactions().incrementMyTag(id);
const auto sublist = _item->savedSublist();
history->owner().reactions().incrementMyTag(id, sublist);
}
_list.erase(ranges::remove_if(_list, [&](MessageReaction &one) {
const auto removing = one.my && (my == myLimit || ++my == myLimit);
@@ -1233,7 +1362,8 @@ void MessageReactions::add(const ReactionId &id, bool addToRecent) {
}
}
if (tags) {
history->owner().reactions().decrementMyTag(one.id);
const auto sublist = _item->savedSublist();
history->owner().reactions().decrementMyTag(one.id, sublist);
}
return removed;
}), end(_list));
@@ -1273,6 +1403,7 @@ void MessageReactions::remove(const ReactionId &id) {
return;
}
i->my = false;
const auto tags = _item->reactionsAreTags();
const auto removed = !--i->count;
if (removed) {
_list.erase(i);
@@ -1290,6 +1421,10 @@ void MessageReactions::remove(const ReactionId &id) {
}
}
}
if (tags) {
const auto sublist = _item->savedSublist();
history->owner().reactions().decrementMyTag(id, sublist);
}
auto &owner = history->owner();
owner.reactions().send(_item, false);
owner.notifyItemDataChange(_item);

View File

@@ -21,6 +21,7 @@ class CustomEmoji;
namespace Data {
class SavedSublist;
class DocumentMedia;
class Session;
@@ -34,13 +35,12 @@ struct Reaction {
//not_null<DocumentData*> activateEffects;
DocumentData *centerIcon = nullptr;
DocumentData *aroundAnimation = nullptr;
int count = 0;
bool active = false;
bool premium = false;
};
struct PossibleItemReactionsRef {
std::vector<not_null<const Reaction*>> recent;
bool morePremiumAvailable = false;
bool customAllowed = false;
bool tags = false;
};
@@ -50,7 +50,6 @@ struct PossibleItemReactions {
explicit PossibleItemReactions(const PossibleItemReactionsRef &other);
std::vector<Reaction> recent;
bool morePremiumAvailable = false;
bool customAllowed = false;
bool tags = false;
};
@@ -78,7 +77,7 @@ public:
void refreshRecent();
void refreshRecentDelayed();
void refreshDefault();
void refreshMyTags();
void refreshMyTags(SavedSublist *sublist = nullptr);
void refreshMyTagsDelayed();
void refreshTags();
@@ -92,11 +91,13 @@ public:
};
[[nodiscard]] const std::vector<Reaction> &list(Type type) const;
[[nodiscard]] const std::vector<MyTagInfo> &myTagsInfo() const;
[[nodiscard]] const QString &myTagTitle(const ReactionId &id) const;
[[nodiscard]] ReactionId favoriteId() const;
[[nodiscard]] const Reaction *favorite() const;
void setFavorite(const ReactionId &id);
void incrementMyTag(const ReactionId &id);
void decrementMyTag(const ReactionId &id);
void incrementMyTag(const ReactionId &id, SavedSublist *sublist);
void decrementMyTag(const ReactionId &id, SavedSublist *sublist);
void renameTag(const ReactionId &id, const QString &name);
[[nodiscard]] DocumentData *chooseGenericAnimation(
not_null<DocumentData*> custom) const;
@@ -106,6 +107,7 @@ public:
[[nodiscard]] rpl::producer<> favoriteUpdates() const;
[[nodiscard]] rpl::producer<> myTagsUpdates() const;
[[nodiscard]] rpl::producer<> tagsUpdates() const;
[[nodiscard]] rpl::producer<ReactionId> myTagRenamed() const;
enum class ImageSize {
BottomInfo,
@@ -127,6 +129,9 @@ public:
void clearTemporary();
[[nodiscard]] Reaction *lookupTemporary(const ReactionId &id);
[[nodiscard]] rpl::producer<std::vector<Reaction>> myTagsValue(
SavedSublist *sublist = nullptr);
[[nodiscard]] static bool HasUnread(const MTPMessageReactions &data);
static void CheckUnknownForUnread(
not_null<Session*> owner,
@@ -140,6 +145,20 @@ private:
std::unique_ptr<Ui::AnimatedIcon> icon;
bool fromSelectAnimation = false;
};
struct TagsBySublist {
TagsBySublist() = default;
TagsBySublist(TagsBySublist&&) = default;
TagsBySublist(const TagsBySublist&) = delete;
TagsBySublist &operator=(TagsBySublist&&) = default;
TagsBySublist &operator=(const TagsBySublist&) = delete;
std::vector<Reaction> tags;
std::vector<MyTagInfo> info;
uint64 hash = 0;
mtpRequestId requestId = 0;
bool requestScheduled = false;
bool updateScheduled = false;
};
[[nodiscard]] not_null<CustomEmojiManager::Listener*> resolveListener();
void customEmojiResolveDone(not_null<DocumentData*> document) override;
@@ -148,14 +167,16 @@ private:
void requestRecent();
void requestDefault();
void requestGeneric();
void requestMyTags();
void requestMyTags(SavedSublist *sublist = nullptr);
void requestTags();
void updateTop(const MTPDmessages_reactions &data);
void updateRecent(const MTPDmessages_reactions &data);
void updateDefault(const MTPDmessages_availableReactions &data);
void updateGeneric(const MTPDmessages_stickerSet &data);
void updateMyTags(const MTPDmessages_savedReactionTags &data);
void updateMyTags(
SavedSublist *sublist,
const MTPDmessages_savedReactionTags &data);
void updateTags(const MTPDmessages_reactions &data);
void recentUpdated();
@@ -167,9 +188,18 @@ private:
[[nodiscard]] std::vector<Reaction> resolveByIds(
const std::vector<ReactionId> &ids,
base::flat_set<ReactionId> &unresolved);
[[nodiscard]] std::optional<Reaction> resolveByInfo(
const MyTagInfo &info,
SavedSublist *sublist);
[[nodiscard]] std::vector<Reaction> resolveByInfos(
const std::vector<MyTagInfo> &infos,
base::flat_map<
ReactionId,
base::flat_set<SavedSublist*>> &unresolved,
SavedSublist *sublist);
void resolve(const ReactionId &id);
void applyFavorite(const ReactionId &id);
void scheduleMyTagsUpdate();
void scheduleMyTagsUpdate(SavedSublist *sublist);
[[nodiscard]] std::optional<Reaction> parse(
const MTPAvailableReaction &entry);
@@ -192,10 +222,10 @@ private:
std::vector<Reaction> _recent;
std::vector<ReactionId> _recentIds;
base::flat_set<ReactionId> _unresolvedRecent;
std::vector<Reaction> _myTags;
std::vector<ReactionId> _myTagsIds;
std::vector<MyTagInfo> _myTagsInfo;
base::flat_set<ReactionId> _unresolvedMyTags;
base::flat_map<SavedSublist*, TagsBySublist> _myTags;
base::flat_map<
ReactionId,
base::flat_set<SavedSublist*>> _unresolvedMyTags;
std::vector<Reaction> _tags;
std::vector<ReactionId> _tagsIds;
base::flat_set<ReactionId> _unresolvedTags;
@@ -216,8 +246,9 @@ private:
rpl::event_stream<> _recentUpdated;
rpl::event_stream<> _defaultUpdated;
rpl::event_stream<> _favoriteUpdated;
rpl::event_stream<> _myTagsUpdated;
rpl::event_stream<SavedSublist*> _myTagsUpdated;
rpl::event_stream<> _tagsUpdated;
rpl::event_stream<ReactionId> _myTagRenamed;
// We need &i->second stay valid while inserting new items.
// So we use std::map instead of base::flat_map here.
@@ -237,11 +268,6 @@ private:
mtpRequestId _genericRequestId = 0;
mtpRequestId _myTagsRequestId = 0;
bool _myTagsRequestScheduled = false;
bool _myTagsUpdateScheduled = false;
uint64 _myTagsHash = 0;
mtpRequestId _tagsRequestId = 0;
uint64 _tagsHash = 0;

View File

@@ -277,7 +277,7 @@ void SavedMessages::apply(const MTPDupdatePinnedSavedDialogs &update) {
if (!ranges::none_of(order, notLoaded)) {
loadPinned();
} else {
_chatsList.pinned()->applyList(_owner, order);
_chatsList.pinned()->applyList(this, order);
_owner->notifyPinnedDialogsOrderUpdated();
}
}

View File

@@ -295,6 +295,16 @@ Session::Session(not_null<Main::Session*> session)
}
}, _lifetime);
_reactions->myTagRenamed(
) | rpl::start_with_next([=](const ReactionId &id) {
const auto i = _viewsByTag.find(id);
if (i != end(_viewsByTag)) {
for (const auto &view : i->second) {
notifyItemDataChange(view->data());
}
}
}, _lifetime);
Spellchecker::HighlightReady(
) | rpl::start_with_next([=](uint64 processId) {
highlightProcessDone(processId);
@@ -4608,6 +4618,28 @@ rpl::producer<not_null<PeerData*>> Session::peerDecorationsUpdated() const {
return _peerDecorationsUpdated.events();
}
void Session::viewTagsChanged(
not_null<ViewElement*> view,
std::vector<Data::ReactionId> &&was,
std::vector<Data::ReactionId> &&now) {
for (const auto &id : now) {
const auto i = ranges::remove(was, id);
if (i != end(was)) {
was.erase(i, end(was));
} else {
_viewsByTag[id].emplace(view);
}
}
for (const auto &id : was) {
const auto i = _viewsByTag.find(id);
if (i != end(_viewsByTag)
&& i->second.remove(view)
&& i->second.empty()) {
_viewsByTag.erase(i);
}
}
}
void Session::clearLocalStorage() {
_cache->close();
_cache->clear();

View File

@@ -62,6 +62,7 @@ class NotifySettings;
class CustomEmojiManager;
class Stories;
class SavedMessages;
struct ReactionId;
struct RepliesReadTillUpdate {
FullMsgId id;
@@ -742,6 +743,11 @@ public:
void applyStatsDcId(not_null<ChannelData*>, MTP::DcId);
[[nodiscard]] MTP::DcId statsDcId(not_null<ChannelData*>);
void viewTagsChanged(
not_null<ViewElement*> view,
std::vector<ReactionId> &&was,
std::vector<ReactionId> &&now);
void clearLocalStorage();
private:
@@ -1005,9 +1011,14 @@ private:
base::flat_map<uint64, not_null<GroupCall*>> _groupCalls;
rpl::event_stream<InviteToCall> _invitesToCalls;
base::flat_map<uint64, base::flat_set<not_null<UserData*>>> _invitedToCallUsers;
base::flat_map<
uint64,
base::flat_set<not_null<UserData*>>> _invitedToCallUsers;
base::flat_set<not_null<ViewElement*>> _shownSpoilers;
base::flat_map<
ReactionId,
base::flat_set<not_null<ViewElement*>>> _viewsByTag;
History *_topPromoted = nullptr;

View File

@@ -626,3 +626,10 @@ searchedBarPosition: point(17px, 7px);
dialogsSearchTagSkip: point(8px, 4px);
dialogsSearchTagBottom: 10px;
dialogsSearchTagLocked: icon{{ "dialogs/mini_tag_lock", lightButtonFgOver }};
dialogsSearchTagPromo: defaultTextStyle;
dialogsSearchTagArrow: icon{{ "dialogs/mini_arrow", windowSubTextFg }};
dialogsSearchTagArrowPadding: margins(-6px, 3px, 0px, 0px);
dialogsSearchTagPromoLeft: 6px;
dialogsSearchTagPromoRight: 1px;
dialogsSearchTagPromoSkip: 6px;

View File

@@ -16,10 +16,12 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "dialogs/dialogs_widget.h"
#include "dialogs/dialogs_search_from_controllers.h"
#include "dialogs/dialogs_search_tags.h"
#include "history/view/history_view_context_menu.h"
#include "history/history.h"
#include "history/history_item.h"
#include "core/shortcuts.h"
#include "core/application.h"
#include "core/click_handler_types.h"
#include "core/shortcuts.h"
#include "ui/widgets/buttons.h"
#include "ui/widgets/popup_menu.h"
#include "ui/widgets/scroll_area.h"
@@ -1430,8 +1432,10 @@ void InnerWidget::mousePressEvent(QMouseEvent *e) {
});
} else if (_pressed) {
auto row = _pressed;
const auto updateCallback = [this, row] {
if (!_pinnedShiftAnimation.animating()) {
const auto weak = Ui::MakeWeak(this);
const auto updateCallback = [weak, row] {
const auto strong = weak.data();
if (!strong || !strong->_pinnedShiftAnimation.animating()) {
row->entry()->updateChatListEntry();
}
};
@@ -1781,7 +1785,11 @@ void InnerWidget::mousePressReleased(
}
}
if (auto activated = ClickHandler::unpressed()) {
ActivateClickHandler(window(), activated, ClickContext{ button });
ActivateClickHandler(window(), activated, ClickContext{
button,
QVariant::fromValue(ClickHandlerContext{
.sessionWindow = _controller,
}) });
}
}
@@ -1855,6 +1863,9 @@ void InnerWidget::setSearchedPressed(int pressed) {
}
void InnerWidget::resizeEvent(QResizeEvent *e) {
if (_searchTags) {
_searchTags->resizeToWidth(width() - 2 * _searchTagsLeft);
}
resizeEmptyLabel();
moveCancelSearchButtons();
}
@@ -2985,21 +2996,12 @@ void InnerWidget::searchInChat(
if (peer->isSelf()) {
const auto reactions = &peer->owner().reactions();
const auto list = [=] {
// Disable reactions as tags for now.
//return reactions->list(Data::Reactions::Type::MyTags);
return std::vector<Data::Reaction>();
};
_searchTags = std::make_unique<SearchTags>(
&peer->owner(),
rpl::single(
list()
) | rpl::then(
reactions->myTagsUpdates() | rpl::map(list)
),
reactions->myTagsValue(sublist),
tags);
_searchTags->selectedValue(
_searchTags->selectedChanges(
) | rpl::start_with_next([=](std::vector<Data::ReactionId> &&list) {
_searchTagsSelected = std::move(list);
}, _searchTags->lifetime());
@@ -3009,8 +3011,18 @@ void InnerWidget::searchInChat(
update(0, searchInChatOffset(), width(), height);
}, _searchTags->lifetime());
_searchTags->heightValue() | rpl::filter(
rpl::mappers::_1 > 0
_searchTags->menuRequests(
) | rpl::start_with_next([=](Data::ReactionId id) {
HistoryView::ShowTagInListMenu(
&_menu,
_lastMousePosition.value_or(QCursor::pos()),
this,
id,
_controller);
}, _searchTags->lifetime());
_searchTags->heightValue() | rpl::skip(
1
) | rpl::start_with_next([=] {
refresh();
moveCancelSearchButtons();
@@ -3054,11 +3066,11 @@ void InnerWidget::searchInChat(
_searchInChat || !_filter.isEmpty());
}
auto InnerWidget::searchTagsValue() const
auto InnerWidget::searchTagsChanges() const
-> rpl::producer<std::vector<Data::ReactionId>> {
return _searchTags
? _searchTags->selectedValue()
: rpl::single(std::vector<Data::ReactionId>());
? _searchTags->selectedChanges()
: rpl::never<std::vector<Data::ReactionId>>();
}
void InnerWidget::refreshSearchInChatLabel() {

View File

@@ -143,7 +143,7 @@ public:
Key key,
PeerData *from,
std::vector<Data::ReactionId> tags);
[[nodiscard]] auto searchTagsValue() const
[[nodiscard]] auto searchTagsChanges() const
-> rpl::producer<std::vector<Data::ReactionId>>;
void applyFilterUpdate(QString newFilter, bool force = false);

View File

@@ -7,6 +7,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "dialogs/dialogs_pinned_list.h"
#include "data/data_saved_messages.h"
#include "dialogs/dialogs_key.h"
#include "dialogs/dialogs_entry.h"
#include "history/history.h"
@@ -86,6 +87,8 @@ void PinnedList::clear() {
void PinnedList::applyList(
not_null<Data::Session*> owner,
const QVector<MTPDialogPeer> &list) {
Expects(this != owner->savedMessages().chatsList()->pinned());
clear();
for (const auto &peer : list) {
peer.match([&](const MTPDdialogPeer &data) {
@@ -98,9 +101,28 @@ void PinnedList::applyList(
}
}
void PinnedList::applyList(
not_null<Data::SavedMessages*> sublistsOwner,
const QVector<MTPDialogPeer> &list) {
Expects(this == sublistsOwner->chatsList()->pinned());
clear();
for (const auto &peer : list) {
peer.match([&](const MTPDdialogPeer &data) {
if (const auto peerId = peerFromMTP(data.vpeer())) {
const auto peer = sublistsOwner->owner().peer(peerId);
addPinned(sublistsOwner->sublist(peer));
}
}, [](const MTPDdialogPeerFolder &data) {
});
}
}
void PinnedList::applyList(
not_null<Data::Forum*> forum,
const QVector<MTPint> &list) {
Expects(this == forum->topicsList()->pinned());
clear();
for (const auto &topicId : list) {
addPinned(forum->topicFor(topicId.v));

View File

@@ -10,6 +10,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
class History;
namespace Data {
class SavedMessages;
class Session;
class Forum;
} // namespace Data
@@ -36,6 +37,9 @@ public:
void applyList(
not_null<Data::Session*> owner,
const QVector<MTPDialogPeer> &list);
void applyList(
not_null<Data::SavedMessages*> sublistsOwner,
const QVector<MTPDialogPeer> &list);
void applyList(
not_null<Data::Forum*> forum,
const QVector<MTPint> &list);

View File

@@ -8,25 +8,90 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "dialogs/dialogs_search_tags.h"
#include "base/qt/qt_key_modifiers.h"
#include "boxes/premium_preview_box.h"
#include "core/click_handler_types.h"
#include "core/ui_integration.h"
#include "data/stickers/data_custom_emoji.h"
#include "data/data_document.h"
#include "data/data_message_reactions.h"
#include "data/data_peer_values.h"
#include "data/data_session.h"
#include "history/view/reactions/history_view_reactions.h"
#include "main/main_session.h"
#include "lang/lang_keys.h"
#include "ui/effects/animation_value.h"
#include "ui/text/text_utilities.h"
#include "ui/painter.h"
#include "ui/power_saving.h"
#include "window/window_session_controller.h"
#include "styles/style_chat.h"
#include "styles/style_dialogs.h"
namespace Dialogs {
namespace {
[[nodiscard]] QString ComposeText(const Data::Reaction &tag) {
auto result = tag.title;
if (!result.isEmpty() && tag.count > 0) {
result.append(' ');
}
if (tag.count > 0) {
result.append(QString::number(tag.count));
}
return TextUtilities::SingleLine(result);
}
[[nodiscard]] ClickHandlerPtr MakePromoLink() {
return std::make_shared<LambdaClickHandler>([=](ClickContext context) {
const auto my = context.other.value<ClickHandlerContext>();
if (const auto controller = my.sessionWindow.get()) {
ShowPremiumPreviewBox(
controller,
PremiumPreview::TagsForMessages);
}
});
}
[[nodiscard]] Ui::Text::String FillAdditionalText(
not_null<Data::Session*> owner,
int width) {
auto emoji = Ui::Text::SingleCustomEmoji(
owner->customEmojiManager().registerInternalEmoji(
st::dialogsSearchTagArrow,
st::dialogsSearchTagArrowPadding));
auto result = Ui::Text::String();
const auto context = Core::MarkedTextContext{
.session = &owner->session(),
.customEmojiRepaint = [] {},
.customEmojiLoopLimit = 1,
};
const auto attempt = [&](const auto &phrase) {
result.setMarkedText(
st::dialogsSearchTagPromo,
phrase(tr::now, lt_arrow, emoji, Ui::Text::WithEntities),
kMarkupTextOptions,
context);
return result.maxWidth() < width;
};
if (attempt(tr::lng_add_tag_phrase_long)
|| attempt(tr::lng_add_tag_phrase)) {
return result;
}
return {};
}
} // namespace
struct SearchTags::Tag {
Data::ReactionId id;
std::unique_ptr<Ui::Text::CustomEmoji> custom;
QString text;
int textWidth = 0;
mutable QImage image;
QRect geometry;
ClickHandlerPtr link;
bool selected = false;
bool promo = false;
};
SearchTags::SearchTags(
@@ -35,10 +100,13 @@ SearchTags::SearchTags(
std::vector<Data::ReactionId> selected)
: _owner(owner)
, _added(selected) {
std::move(
tags
) | rpl::start_with_next([=](const std::vector<Data::Reaction> &list) {
fill(list);
rpl::combine(
std::move(tags),
Data::AmPremiumValue(&owner->session())
) | rpl::start_with_next([=](
const std::vector<Data::Reaction> &list,
bool premium) {
fill(list, premium);
}, _lifetime);
// Mark the `selected` reactions as selected in `_tags`.
@@ -57,12 +125,22 @@ SearchTags::SearchTags(
SearchTags::~SearchTags() = default;
void SearchTags::fill(const std::vector<Data::Reaction> &list) {
void SearchTags::fill(
const std::vector<Data::Reaction> &list,
bool premium) {
const auto selected = collectSelected();
_tags.clear();
_tags.reserve(list.size());
const auto link = [&](Data::ReactionId id) {
return std::make_shared<LambdaClickHandler>(crl::guard(this, [=] {
return std::make_shared<GenericClickHandler>(crl::guard(this, [=](
ClickContext context) {
if (!premium) {
MakePromoLink()->onClick(context);
return;
} else if (context.button == Qt::RightButton) {
_menuRequests.fire_copy(id);
return;
}
const auto i = ranges::find(_tags, id, &Tag::id);
if (i != end(_tags)) {
if (!i->selected && !base::IsShiftPressed()) {
@@ -75,7 +153,7 @@ void SearchTags::fill(const std::vector<Data::Reaction> &list) {
}
}));
};
const auto push = [&](Data::ReactionId id) {
const auto push = [&](Data::ReactionId id, const QString &text) {
const auto customId = id.custom();
_tags.push_back({
.id = id,
@@ -84,6 +162,8 @@ void SearchTags::fill(const std::vector<Data::Reaction> &list) {
customId,
[=] { _repaintRequests.fire({}); })
: nullptr),
.text = text,
.textWidth = st::reactionInlineTagFont->width(text),
.link = link(id),
.selected = ranges::contains(selected, id),
});
@@ -91,42 +171,70 @@ void SearchTags::fill(const std::vector<Data::Reaction> &list) {
_owner->reactions().preloadImageFor(id);
}
};
if (!premium) {
const auto text = (list.empty() && _added.empty())
? tr::lng_add_tag_button(tr::now)
: tr::lng_unlock_tags(tr::now);
_tags.push_back({
.id = Data::ReactionId(),
.text = text,
.textWidth = st::reactionInlineTagFont->width(text),
.link = MakePromoLink(),
.promo = true,
});
}
for (const auto &reaction : list) {
push(reaction.id);
if (reaction.count > 0
|| ranges::contains(_added, reaction.id)
|| ranges::contains(selected, reaction.id)) {
push(reaction.id, ComposeText(reaction));
}
}
for (const auto &reaction : _added) {
if (!ranges::contains(_tags, reaction, &Tag::id)) {
push(reaction);
push(reaction, QString());
}
}
if (_width > 0) {
layout();
_repaintRequests.fire({});
}
}
void SearchTags::layout() {
Expects(_width > 0);
const auto &bg = validateBg(false);
if (_tags.empty()) {
_additionalText = {};
_height = 0;
return;
}
const auto &bg = validateBg(false, false);
const auto skip = st::dialogsSearchTagSkip;
const auto size = bg.size() / bg.devicePixelRatio();
const auto xsingle = size.width() + skip.x();
const auto ysingle = size.height() + skip.y();
const auto columns = std::max((_width + skip.x()) / xsingle, 1);
const auto rows = (_tags.size() + columns - 1) / columns;
for (auto row = 0; row != rows; ++row) {
for (auto column = 0; column != columns; ++column) {
const auto index = row * columns + column;
if (index >= _tags.size()) {
break;
}
const auto x = column * xsingle;
const auto y = row * ysingle;
_tags[index].geometry = QRect(QPoint(x, y), size);
const auto xbase = size.width();
const auto ybase = size.height();
auto x = 0;
auto y = 0;
for (auto &tag : _tags) {
const auto width = xbase + (tag.promo
? std::max(0, tag.textWidth - st::dialogsSearchTagPromoLeft - st::dialogsSearchTagPromoRight)
: tag.textWidth);
if (x > 0 && x + width > _width) {
x = 0;
y += ybase + skip.y();
}
tag.geometry = QRect(x, y, width, ybase);
x += width + skip.x();
}
_height = y + ybase + st::dialogsSearchTagBottom;
if (_tags.size() == 1 && _tags.front().promo) {
_additionalLeft = x - skip.x() + st::dialogsSearchTagPromoSkip;
const auto additionalWidth = _width - _additionalLeft;
_additionalText = FillAdditionalText(_owner, additionalWidth);
} else {
_additionalText = {};
}
const auto bottom = st::dialogsSearchTagBottom;
_height = rows ? (rows * ysingle - skip.y() + bottom) : 0;
}
void SearchTags::resizeToWidth(int width) {
@@ -153,12 +261,20 @@ ClickHandlerPtr SearchTags::lookupHandler(QPoint point) const {
for (const auto &tag : _tags) {
if (tag.geometry.contains(point.x(), point.y())) {
return tag.link;
} else if (tag.promo
&& !_additionalText.isEmpty()
&& tag.geometry.united(QRect(
_additionalLeft,
tag.geometry.y(),
_additionalText.maxWidth(),
tag.geometry.height())).contains(point.x(), point.y())) {
return tag.link;
}
}
return nullptr;
}
auto SearchTags::selectedValue() const
auto SearchTags::selectedChanges() const
-> rpl::producer<std::vector<Data::ReactionId>> {
return _selectedChanges.events() | rpl::map([=] {
return collectSelected();
@@ -202,8 +318,12 @@ void SearchTags::paintCustomFrame(
_customCache);
}
rpl::producer<Data::ReactionId> SearchTags::menuRequests() const {
return _menuRequests.events();
}
void SearchTags::paint(
QPainter &p,
Painter &p,
QPoint position,
crl::time now,
bool paused) const {
@@ -212,8 +332,9 @@ void SearchTags::paint(
const auto padding = st::reactionInlinePadding;
for (const auto &tag : _tags) {
const auto geometry = tag.geometry.translated(position);
p.drawImage(geometry.topLeft(), validateBg(tag.selected));
if (!tag.custom && tag.image.isNull()) {
paintBackground(p, geometry, tag);
paintText(p, geometry, tag);
if (!tag.custom && !tag.promo && tag.image.isNull()) {
tag.image = _owner->reactions().resolveImageFor(
tag.id,
::Data::Reactions::ImageSize::InlineList);
@@ -222,7 +343,13 @@ void SearchTags::paint(
const auto image = QRect(
inner.topLeft() + QPoint(skip, skip),
QSize(st::reactionInlineImage, st::reactionInlineImage));
if (const auto custom = tag.custom.get()) {
if (tag.promo) {
st::dialogsSearchTagLocked.paintInCenter(p, QRect(
inner.x(),
inner.y() + skip,
size - st::dialogsSearchTagPromoLeft,
st::reactionInlineImage));
} else if (const auto custom = tag.custom.get()) {
const auto textFg = tag.selected
? st::dialogsNameFgActive->c
: st::dialogsNameFgOver->c;
@@ -237,18 +364,86 @@ void SearchTags::paint(
p.drawImage(image.topLeft(), tag.image);
}
}
paintAdditionalText(p, position);
}
const QImage &SearchTags::validateBg(bool selected) const {
void SearchTags::paintAdditionalText(Painter &p, QPoint position) const {
if (_additionalText.isEmpty()) {
return;
}
const auto x = position.x() + _additionalLeft;
const auto tag = _tags.front().geometry;
const auto height = st::dialogsSearchTagPromo.font->height;
const auto y = position.y() + tag.y() + (tag.height() - height) / 2;
p.setPen(st::windowSubTextFg);
_additionalText.drawLeft(p, x, y, _width - _additionalLeft, _width);
}
void SearchTags::paintBackground(
QPainter &p,
QRect geometry,
const Tag &tag) const {
const auto &image = validateBg(tag.selected, tag.promo);
const auto ratio = int(image.devicePixelRatio());
const auto size = image.size() / ratio;
if (const auto fill = geometry.width() - size.width(); fill > 0) {
const auto left = size.width() / 2;
const auto right = size.width() - left;
const auto x = geometry.x();
const auto y = geometry.y();
p.drawImage(
QRect(x, y, left, size.height()),
image,
QRect(QPoint(), QSize(left, size.height()) * ratio));
p.fillRect(
QRect(x + left, y, fill, size.height()),
bgColor(tag.selected, tag.promo));
p.drawImage(
QRect(x + left + fill, y, right, size.height()),
image,
QRect(left * ratio, 0, right * ratio, size.height() * ratio));
} else {
p.drawImage(geometry.topLeft(), image);
}
}
void SearchTags::paintText(
QPainter &p,
QRect geometry,
const Tag &tag) const {
using namespace HistoryView::Reactions;
auto &image = selected ? _selectedBg : _normalBg;
if (tag.text.isEmpty()) {
return;
}
p.setPen(tag.promo
? st::lightButtonFgOver
: tag.selected
? st::dialogsTextFgActive
: st::windowSubTextFg);
p.setFont(st::reactionInlineTagFont);
const auto position = tag.promo
? st::reactionInlineTagPromoPosition
: st::reactionInlineTagNamePosition;
const auto x = geometry.x() + position.x();
const auto y = geometry.y() + position.y();
p.drawText(x, y + st::reactionInlineTagFont->ascent, tag.text);
}
QColor SearchTags::bgColor(bool selected, bool promo) const {
return promo
? st::lightButtonBgOver->c
: selected
? st::dialogsBgActive->c
: st::dialogsBgOver->c;
}
const QImage &SearchTags::validateBg(bool selected, bool promo) const {
using namespace HistoryView::Reactions;
auto &image = promo ? _promoBg : selected ? _selectedBg : _normalBg;
if (image.isNull()) {
const auto tagBg = selected
? st::dialogsBgActive->c
: st::dialogsBgOver->c;
const auto dotBg = selected
? anim::with_alpha(tagBg, InlineList::TagDotAlpha())
: st::windowSubTextFg->c;
const auto tagBg = bgColor(selected, promo);
const auto dotBg = st::transparent->c;
image = InlineList::PrepareTagBg(tagBg, dotBg);
}
return image;

View File

@@ -9,6 +9,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "base/weak_ptr.h"
class Painter;
namespace Data {
class Session;
struct Reaction;
@@ -35,11 +37,13 @@ public:
[[nodiscard]] rpl::producer<> repaintRequests() const;
[[nodiscard]] ClickHandlerPtr lookupHandler(QPoint point) const;
[[nodiscard]] auto selectedValue() const
[[nodiscard]] auto selectedChanges() const
-> rpl::producer<std::vector<Data::ReactionId>>;
[[nodiscard]] rpl::producer<Data::ReactionId> menuRequests() const;
void paint(
QPainter &p,
Painter &p,
QPoint position,
crl::time now,
bool paused) const;
@@ -49,7 +53,7 @@ public:
private:
struct Tag;
void fill(const std::vector<Data::Reaction> &list);
void fill(const std::vector<Data::Reaction> &list, bool premium);
void paintCustomFrame(
QPainter &p,
not_null<Ui::Text::CustomEmoji*> emoji,
@@ -59,19 +63,27 @@ private:
const QColor &textColor) const;
void layout();
[[nodiscard]] std::vector<Data::ReactionId> collectSelected() const;
[[nodiscard]] const QImage &validateBg(bool selected) const;
[[nodiscard]] QColor bgColor(bool selected, bool promo) const;
[[nodiscard]] const QImage &validateBg(bool selected, bool promo) const;
void paintAdditionalText(Painter &p, QPoint position) const;
void paintBackground(QPainter &p, QRect geometry, const Tag &tag) const;
void paintText(QPainter &p, QRect geometry, const Tag &tag) const;
const not_null<Data::Session*> _owner;
std::vector<Data::ReactionId> _added;
std::vector<Tag> _tags;
Ui::Text::String _additionalText;
rpl::event_stream<> _selectedChanges;
rpl::event_stream<> _repaintRequests;
rpl::event_stream<Data::ReactionId> _menuRequests;
mutable QImage _normalBg;
mutable QImage _selectedBg;
mutable QImage _promoBg;
mutable QImage _customCache;
mutable int _customSkip = 0;
rpl::variable<int> _height;
int _width = 0;
int _additionalLeft = 0;
rpl::lifetime _lifetime;

View File

@@ -327,6 +327,8 @@ Widget::Widget(
) | rpl::start_with_next([=] {
setSearchInChat((_openedForum && !_searchInChat)
? Key(_openedForum->history())
: _searchInChat.sublist()
? Key(session().data().history(session().user()))
: _searchInChat, nullptr);
applyFilterUpdate(true);
}, lifetime());
@@ -983,7 +985,7 @@ void Widget::setupShortcuts() {
if (_openedForum && !controller()->activeChatCurrent()) {
request->check(Command::Search) && request->handle([=] {
const auto history = _openedForum->history();
controller()->content()->searchInChat(history);
controller()->searchInChat(history);
return true;
});
}
@@ -1926,11 +1928,12 @@ void Widget::searchMessages(QString query, Key inChat) {
controller()->closeFolder();
}
auto tags = std::vector<Data::ReactionId>();
if (const auto tagId = Data::SearchTagFromQuery(query)) {
inChat = session().data().history(session().user());
auto tags = Data::SearchTagsFromQuery(query);
if (!tags.empty()) {
if (!inChat.sublist()) {
inChat = session().data().history(session().user());
}
query = QString();
tags.push_back(tagId);
}
const auto inChatChanged = [&] {
const auto inPeer = inChat.peer();
@@ -2651,7 +2654,7 @@ bool Widget::setSearchInChat(
}
if (searchInPeerUpdated) {
_searchInChat = chat;
controller()->searchInChat = _searchInChat;
controller()->setSearchInChat(_searchInChat);
updateJumpToDateVisibility();
updateStoriesVisibility();
}
@@ -2665,7 +2668,7 @@ bool Widget::setSearchInChat(
}
_searchTags = std::move(tags);
_inner->searchInChat(_searchInChat, _searchFromAuthor, _searchTags);
_searchTagsLifetime = _inner->searchTagsValue(
_searchTagsLifetime = _inner->searchTagsChanges(
) | rpl::start_with_next([=](std::vector<Data::ReactionId> &&list) {
if (_searchTags != list) {
clearSearchCache();
@@ -3002,7 +3005,7 @@ void Widget::updateControlsGeometry() {
}
const auto scrollTop = forumReportTop
+ (_forumReportBar ? _forumReportBar->bar().height() : 0);
const auto scrollHeight = height() - scrollTop;
const auto scrollHeight = height() - scrollTop - bottomSkip;
const auto wasScrollHeight = _scroll->height();
_scroll->setGeometry(0, scrollTop, scrollWidth, scrollHeight);
if (scrollHeight != wasScrollHeight) {

View File

@@ -15,6 +15,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "history/history_item_text.h"
#include "history/admin_log/history_admin_log_section.h"
#include "history/admin_log/history_admin_log_filter.h"
#include "history/view/history_view_context_menu.h"
#include "history/view/history_view_message.h"
#include "history/view/history_view_service_message.h"
#include "history/view/history_view_cursor_state.h"
@@ -642,6 +643,12 @@ void InnerWidget::elementSendBotCommand(
const FullMsgId &context) {
}
void InnerWidget::elementSearchInList(
const QString &query,
const FullMsgId &context) {
}
void InnerWidget::elementHandleViaClick(not_null<UserData*> bot) {
}
@@ -1245,6 +1252,12 @@ void InnerWidget::showContextMenu(QContextMenuEvent *e, bool showFromTouch) {
_menu->addAction(lnkIsVideo ? tr::lng_context_save_video(tr::now) : (lnkIsVoice ? tr::lng_context_save_audio(tr::now) : (lnkIsAudio ? tr::lng_context_save_audio_file(tr::now) : tr::lng_context_save_file(tr::now))), base::fn_delayed(st::defaultDropdownMenu.menu.ripple.hideDuration, this, [this, lnkDocument] {
saveDocumentToFile(lnkDocument);
}), &st::menuIconDownload);
HistoryView::AddCopyFilename(
_menu,
lnkDocument,
[] { return false; });
if (lnkDocument->hasAttachedStickers()) {
const auto controller = _controller;
auto callback = [=] {

View File

@@ -122,6 +122,9 @@ public:
void elementSendBotCommand(
const QString &command,
const FullMsgId &context) override;
void elementSearchInList(
const QString &query,
const FullMsgId &context) override;
void elementHandleViaClick(not_null<UserData*> bot) override;
bool elementIsChatWide() override;
not_null<Ui::PathShiftGradient*> elementPathShiftGradient() override;

View File

@@ -258,6 +258,13 @@ public:
_widget->elementSendBotCommand(command, context);
}
}
void elementSearchInList(
const QString &query,
const FullMsgId &context) override {
if (_widget) {
_widget->elementSearchInList(query, context);
}
}
void elementHandleViaClick(not_null<UserData*> bot) override {
if (_widget) {
_widget->elementHandleViaClick(bot);
@@ -386,14 +393,6 @@ HistoryInner::HistoryInner(
reactionChosen(reaction);
}, lifetime());
_reactionsManager->premiumPromoChosen(
) | rpl::start_with_next([=](FullMsgId context) {
_reactionsManager->updateButton({});
ShowPremiumPreviewBox(
_controller,
PremiumPreview::InfiniteReactions);
}, lifetime());
session().data().peerDecorationsUpdated(
) | rpl::start_with_next([=] {
update();
@@ -2224,6 +2223,11 @@ void HistoryInner::showContextMenu(QContextMenuEvent *e, bool showFromTouch) {
_menu->addAction(lnkIsVideo ? tr::lng_context_save_video(tr::now) : (lnkIsVoice ? tr::lng_context_save_audio(tr::now) : (lnkIsAudio ? tr::lng_context_save_audio_file(tr::now) : tr::lng_context_save_file(tr::now))), base::fn_delayed(st::defaultDropdownMenu.menu.ripple.hideDuration, this, [=] {
saveDocumentToFile(itemId, document);
}), &st::menuIconDownload);
HistoryView::AddCopyFilename(
_menu,
document,
[=] { return showCopyRestrictionForSelected(); });
}
if (document->hasAttachedStickers()) {
_menu->addAction(tr::lng_context_attached_stickers(tr::now), [=] {
@@ -2644,9 +2648,7 @@ void HistoryInner::showContextMenu(QContextMenuEvent *e, bool showFromTouch) {
desiredPosition,
reactItem,
[=](ChosenReaction reaction) { reactionChosen(reaction); },
[=](FullMsgId context) { ShowPremiumPreviewBox(
controller,
PremiumPreview::InfiniteReactions); },
ItemReactionsAbout(reactItem),
_controller->cachedReactionIconFactory().createMethod())
: AttachSelectorResult::Skipped;
if (attached == AttachSelectorResult::Failed) {
@@ -3406,6 +3408,15 @@ void HistoryInner::elementSendBotCommand(
_widget->sendBotCommand({ _history->peer, command, context });
}
void HistoryInner::elementSearchInList(
const QString &query,
const FullMsgId &context) {
const auto inChat = _history->peer->isUser()
? Dialogs::Key()
: Dialogs::Key(_history);
_controller->searchMessages(query, inChat);
}
void HistoryInner::elementHandleViaClick(not_null<UserData*> bot) {
_widget->insertBotCommand('@' + bot->username());
}

View File

@@ -158,6 +158,9 @@ public:
void elementSendBotCommand(
const QString &command,
const FullMsgId &context);
void elementSearchInList(
const QString &query,
const FullMsgId &context);
void elementHandleViaClick(not_null<UserData*> bot);
bool elementIsChatWide();
not_null<Ui::PathShiftGradient*> elementPathShiftGradient();

View File

@@ -765,7 +765,7 @@ HistoryItem::HistoryItem(
: id(id)
, _history(history)
, _from(from ? history->owner().peer(from) : history->peer)
, _flags(FinalizeMessageFlags(flags))
, _flags(FinalizeMessageFlags(history, flags))
, _date(date) {
if (isHistoryEntry() && IsClientMsgId(id)) {
_history->registerClientSideMessage(this);
@@ -2481,8 +2481,7 @@ const std::vector<Data::MessageReaction> &HistoryItem::reactions() const {
}
bool HistoryItem::reactionsAreTags() const {
// Disable reactions as tags for now.
return false;// _flags & MessageFlag::ReactionsAreTags;
return _flags & MessageFlag::ReactionsAreTags;
}
auto HistoryItem::recentReactions() const

View File

@@ -624,11 +624,16 @@ std::optional<bool> PeerHasThisCall(
});
}
[[nodiscard]] MessageFlags FinalizeMessageFlags(MessageFlags flags) {
[[nodiscard]] MessageFlags FinalizeMessageFlags(
not_null<History*> history,
MessageFlags flags) {
if (!(flags & MessageFlag::FakeHistoryItem)
&& !(flags & MessageFlag::IsOrWasScheduled)
&& !(flags & MessageFlag::AdminLogEntry)) {
flags |= MessageFlag::HistoryEntry;
if (history->peer->isSelf()) {
flags |= MessageFlag::ReactionsAreTags;
}
}
return flags;
}

View File

@@ -66,7 +66,9 @@ void CheckReactionNotificationSchedule(
not_null<PeerData*> peer,
PeerId from,
not_null<HistoryItem*> fwd);
[[nodiscard]] MessageFlags FinalizeMessageFlags(MessageFlags flags);
[[nodiscard]] MessageFlags FinalizeMessageFlags(
not_null<History*> history,
MessageFlags flags);
[[nodiscard]] bool CopyMarkupToForward(not_null<const HistoryItem*> item);
[[nodiscard]] TextWithEntities EnsureNonEmpty(
const TextWithEntities &text = TextWithEntities());

View File

@@ -17,15 +17,21 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "ui/text/text_options.h"
TextForMimeData HistoryItemText(not_null<HistoryItem*> item) {
auto textResult = item->clipboardText();
const auto media = item->media();
auto mediaResult = media ? media->clipboardText() : TextForMimeData();
auto textResult = mediaResult.empty()
? item->clipboardText()
: TextForMimeData();
auto logEntryOriginalResult = [&] {
const auto entry = item->Get<HistoryMessageLogEntryOriginal>();
if (!entry) {
return TextForMimeData();
}
const auto title = TextUtilities::SingleLine(entry->page->title.isEmpty()
? entry->page->author
: entry->page->title);
const auto title = TextUtilities::SingleLine(
entry->page->title.isEmpty()
? entry->page->author
: entry->page->title);
auto titleResult = TextForMimeData::Rich(
TextUtilities::ParseEntities(
title,
@@ -41,6 +47,11 @@ TextForMimeData HistoryItemText(not_null<HistoryItem*> item) {
return titleResult;
}();
auto result = textResult;
if (result.empty()) {
result = std::move(mediaResult);
} else if (!mediaResult.empty()) {
result.append(qstr("\n\n")).append(std::move(mediaResult));
}
if (result.empty()) {
result = std::move(logEntryOriginalResult);
} else if (!logEntryOriginalResult.empty()) {
@@ -78,7 +89,7 @@ TextForMimeData HistoryGroupText(not_null<const Data::Group*> group) {
return result;
}
}
auto caption = [&] {
return [&] {
auto &&nonempty = ranges::views::all(
group->items
) | ranges::views::filter(
@@ -92,7 +103,4 @@ TextForMimeData HistoryGroupText(not_null<const Data::Group*> group) {
auto result = (*first)->clipboardText();
return (++first == end) ? result : TextForMimeData();
}();
return Data::WithCaptionClipboardText(
tr::lng_in_dlg_album(tr::now),
std::move(caption));
}

View File

@@ -337,6 +337,14 @@ HistoryWidget::HistoryWidget(
) | rpl::start_with_next([=] {
fieldChanged();
}, _field->lifetime());
#ifdef Q_OS_MAC
// Removed an ability to insert text from the menu bar
// when the field is hidden.
_field->shownValue(
) | rpl::start_with_next([=](bool shown) {
_field->setEnabled(shown);
}, _field->lifetime());
#endif // Q_OS_MAC
connect(
controller->widget()->windowHandle(),
&QWindow::visibleChanged,
@@ -840,7 +848,9 @@ HistoryWidget::HistoryWidget(
}, _topBar->lifetime());
_topBar->searchRequest(
) | rpl::start_with_next([=] {
searchInChat();
if (_history) {
controller->searchInChat(_history);
}
}, _topBar->lifetime());
session().api().sendActions(
@@ -1832,7 +1842,7 @@ void HistoryWidget::setupShortcuts() {
}) | rpl::start_with_next([=](not_null<Shortcuts::Request*> request) {
using Command = Shortcuts::Command;
request->check(Command::Search, 1) && request->handle([=] {
searchInChat();
controller()->searchInChat(_history);
return true;
});
if (session().supportMode()) {
@@ -4783,24 +4793,31 @@ bool HistoryWidget::updateCmdStartShown() {
return commandsChanged || buttonChanged || textChanged;
}
void HistoryWidget::searchInChat() {
if (_history) {
controller()->content()->searchInChat(_history);
bool HistoryWidget::searchInChatEmbedded(Dialogs::Key chat, QString query) {
const auto peer = chat.peer();
if (!peer || peer != controller()->singlePeer()) {
return false;
} else if (_peer != peer) {
const auto weak = Ui::MakeWeak(this);
controller()->showPeerHistory(peer);
if (!weak) {
return false;
}
}
if (_peer != peer) {
return false;
} else if (_composeSearch) {
_composeSearch->setQuery(query);
_composeSearch->setInnerFocus();
return true;
}
switchToSearch(query);
return true;
}
void HistoryWidget::searchInChatEmbedded(std::optional<QString> query) {
if (!_history) {
return;
} else if (_composeSearch) {
if (query) {
_composeSearch->setQuery(*query);
}
_composeSearch->setInnerFocus();
return;
}
void HistoryWidget::switchToSearch(QString query) {
const auto search = crl::guard(_list, [=] {
if (!_history) {
if (!_peer) {
return;
}
const auto update = [=] {
@@ -4810,22 +4827,33 @@ void HistoryWidget::searchInChatEmbedded(std::optional<QString> query) {
updateControlsGeometry();
};
const auto from = (PeerData*)nullptr;
_composeSearch = std::make_unique<HistoryView::ComposeSearch>(
this,
controller(),
_history,
query.value_or(QString()));
from,
query);
update();
setInnerFocus();
_composeSearch->activations(
) | rpl::start_with_next([=](not_null<HistoryItem*> item) {
controller()->showPeerHistory(
item->history()->peer->id,
::Window::SectionShow::Way::ClearStack,
item->fullId().msg);
}, _composeSearch->lifetime());
_composeSearch->destroyRequests(
) | rpl::take(
1
) | rpl::start_with_next([=] {
_composeSearch = nullptr;
update();
setInnerFocus();
update();
setInnerFocus();
}, _composeSearch->lifetime());
});
if (!preventsClose(search)) {

View File

@@ -251,7 +251,7 @@ public:
[[nodiscard]] rpl::producer<> cancelRequests() const {
return _cancelRequests.events();
}
void searchInChatEmbedded(std::optional<QString> query = {});
bool searchInChatEmbedded(Dialogs::Key chat, QString query);
void updateNotifyControls();
@@ -641,7 +641,7 @@ private:
bool kbWasHidden() const;
void searchInChat();
void switchToSearch(QString query);
MTP::Sender _api;
FullReplyTo _replyTo;

View File

@@ -1558,6 +1558,14 @@ void ComposeControls::initField() {
) | rpl::start_with_next([=] {
fieldChanged();
}, _field->lifetime());
#ifdef Q_OS_MAC
// Removed an ability to insert text from the menu bar
// when the field is hidden.
_field->shownValue(
) | rpl::start_with_next([=](bool shown) {
_field->setEnabled(shown);
}, _field->lifetime());
#endif // Q_OS_MAC
InitMessageField(_show, _field, [=](not_null<DocumentData*> emoji) {
if (_history && Data::AllowEmojiWithoutPremium(_history->peer)) {
return true;

View File

@@ -9,16 +9,26 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "api/api_messages_search_merged.h"
#include "boxes/peer_list_box.h"
#include "core/click_handler_types.h"
#include "core/ui_integration.h"
#include "data/data_message_reactions.h"
#include "data/data_saved_messages.h"
#include "data/data_session.h"
#include "data/data_user.h"
#include "dialogs/dialogs_search_from_controllers.h" // SearchFromBox
#include "dialogs/dialogs_search_tags.h"
#include "dialogs/ui/dialogs_layout.h"
#include "history/view/history_view_context_menu.h"
#include "history/history.h"
#include "history/history_item.h"
#include "lang/lang_keys.h"
#include "main/main_session.h"
#include "ui/effects/show_animation.h"
#include "ui/widgets/buttons.h"
#include "ui/widgets/labels.h"
#include "ui/widgets/multi_select.h"
#include "ui/widgets/popup_menu.h"
#include "ui/widgets/shadow.h"
#include "ui/widgets/scroll_area.h"
#include "ui/painter.h"
#include "window/window_session_controller.h"
@@ -255,7 +265,12 @@ List CreateList(
class TopBar final : public Ui::RpWidget {
public:
TopBar(not_null<Ui::RpWidget*> parent, const QString &query);
TopBar(
not_null<Ui::RpWidget*> parent,
not_null<Window::SessionController*> window,
not_null<History*> history,
PeerData *from,
const QString &query);
void setInnerFocus();
void setQuery(const QString &query);
@@ -275,12 +290,20 @@ protected:
private:
void clearItems();
void refreshTags();
void updateSize();
void requestSearch(bool cache = true);
void requestSearchDelayed();
base::unique_qptr<Ui::IconButton> _cancel;
std::vector<Data::ReactionId> _searchTagsSelected;
base::unique_qptr<Ui::MultiSelect> _select;
std::unique_ptr<Dialogs::SearchTags> _searchTags;
base::unique_qptr<Ui::PopupMenu> _menu;
std::optional<QPoint> _mouseGlobalPosition;
const not_null<Window::SessionController*> _window;
const not_null<History*> _history;
rpl::variable<PeerData*> _from = nullptr;
base::Timer _searchTimer;
@@ -293,29 +316,43 @@ private:
rpl::event_stream<not_null<QKeyEvent*>> _keyEvents;
};
TopBar::TopBar(not_null<Ui::RpWidget*> parent, const QString &query)
TopBar::TopBar(
not_null<Ui::RpWidget*> parent,
not_null<Window::SessionController*> window,
not_null<History*> history,
PeerData *from,
const QString &query)
: Ui::RpWidget(parent)
, _cancel(base::make_unique_q<Ui::IconButton>(this, st::historyTopBarBack))
, _searchTagsSelected(Data::SearchTagsFromQuery(query))
, _select(base::make_unique_q<Ui::MultiSelect>(
this,
st::searchInChatMultiSelect,
tr::lng_dlg_filter(),
query))
_searchTagsSelected.empty() ? query : QString()))
, _window(window)
, _history(history)
, _searchTimer([=] { requestSearch(); }) {
if (from) {
setFrom(from);
}
refreshTags();
moveToLeft(0, 0);
parent->geometryValue(
) | rpl::start_with_next([=](const QRect &r) {
moveToLeft(0, 0);
resize(r.width(), st::topBarHeight);
) | rpl::start_with_next([=] {
updateSize();
}, lifetime());
sizeValue(
) | rpl::start_with_next([=](const QSize &s) {
_cancel->moveToLeft(0, (s.height() - _cancel->height()) / 2);
const auto height = st::topBarHeight;
_cancel->moveToLeft(0, (height - _cancel->height()) / 2);
const auto selectLeft = _cancel->x() + _cancel->width();
_select->resizeToWidth(s.width() - selectLeft);
_select->moveToLeft(selectLeft, (s.height() - _select->height()) / 2);
_select->moveToLeft(selectLeft, (height - _select->height()) / 2);
}, lifetime());
@@ -355,8 +392,22 @@ void TopBar::setInnerFocus() {
_select->setInnerFocus();
}
void TopBar::updateSize() {
const auto height = st::topBarHeight
+ (_searchTags ? _searchTags->height() : 0);
resize(parentWidget()->width(), height);
}
void TopBar::setQuery(const QString &query) {
_select->setQuery(query);
if (auto tags = Data::SearchTagsFromQuery(query); !tags.empty()) {
if (_searchTagsSelected != tags) {
_searchTagsSelected = std::move(tags);
refreshTags();
}
_select->setQuery(QString());
} else {
_select->setQuery(query);
}
}
void TopBar::clearItems() {
@@ -372,8 +423,127 @@ void TopBar::clearItems() {
});
}
void TopBar::refreshTags() {
if (!_history->peer->isSelf()) {
_searchTags = nullptr;
return;
}
auto fullTagsList = _from.value() | rpl::map([=](PeerData *from) {
const auto sublist = from
? _history->owner().savedMessages().sublist(from).get()
: nullptr;
return _history->owner().reactions().myTagsValue(sublist);
}) | rpl::flatten_latest();
_searchTags = std::make_unique<Dialogs::SearchTags>(
&_history->owner(),
std::move(fullTagsList),
_searchTagsSelected);
const auto parent = _searchTags->lifetime().make_state<Ui::RpWidget>(
this);
const auto shadow = _searchTags->lifetime().make_state<Ui::PlainShadow>(
parentWidget());
parent->show();
_searchTags->heightValue(
) | rpl::start_with_next([=](int height) {
updateSize();
shadow->setVisible(height > 0);
}, _searchTags->lifetime());
geometryValue() | rpl::start_with_next([=](QRect geometry) {
shadow->setGeometry(
geometry.x(),
geometry.y() + geometry.height(),
geometry.width(),
st::lineWidth);
}, shadow->lifetime());
_searchTags->selectedChanges(
) | rpl::start_with_next([=](std::vector<Data::ReactionId> &&list) {
_searchTagsSelected = std::move(list);
requestSearch(false);
}, _searchTags->lifetime());
_searchTags->menuRequests(
) | rpl::start_with_next([=](Data::ReactionId id) {
ShowTagInListMenu(
&_menu,
_mouseGlobalPosition.value_or(QCursor::pos()),
this,
id,
_window);
}, _searchTags->lifetime());
if (!_searchTagsSelected.empty()) {
crl::on_main(this, [=] {
requestSearch(false);
});
}
const auto padding = st::searchInChatTagsPadding;
const auto position = QPoint(padding.left(), padding.top());
_searchTags->repaintRequests() | rpl::start_with_next([=] {
parent->update();
}, _searchTags->lifetime());
widthValue() | rpl::start_with_next([=](int width) {
width -= padding.left() + padding.right();
_searchTags->resizeToWidth(width);
}, _searchTags->lifetime());
rpl::combine(
widthValue(),
_searchTags->heightValue()
) | rpl::start_with_next([=](int width, int height) {
height += padding.top() + padding.bottom();
parent->setGeometry(0, st::topBarHeight, width, height);
}, _searchTags->lifetime());
parent->paintRequest() | rpl::start_with_next([=](const QRect &r) {
auto p = Painter(parent);
p.fillRect(r, st::dialogsBg);
_searchTags->paint(p, position, crl::now(), false);
}, parent->lifetime());
parent->setMouseTracking(true);
parent->events() | rpl::start_with_next([=](not_null<QEvent*> e) {
if (e->type() == QEvent::MouseMove) {
const auto mouse = static_cast<QMouseEvent*>(e.get());
_mouseGlobalPosition = mouse->globalPos();
const auto point = mouse->pos() - position;
const auto handler = _searchTags->lookupHandler(point);
ClickHandler::setActive(handler);
parent->setCursor(handler
? style::cur_pointer
: style::cur_default);
} else if (e->type() == QEvent::MouseButtonPress) {
const auto mouse = static_cast<QMouseEvent*>(e.get());
if (mouse->button() == Qt::LeftButton) {
ClickHandler::pressed();
}
} else if (e->type() == QEvent::MouseButtonRelease) {
const auto mouse = static_cast<QMouseEvent*>(e.get());
if (mouse->button() == Qt::LeftButton) {
const auto handler = ClickHandler::unpressed();
ActivateClickHandler(parent, handler, ClickContext{
.button = mouse->button(),
.other = QVariant::fromValue(ClickHandlerContext{
.sessionWindow = _window,
}),
});
}
}
}, parent->lifetime());
}
void TopBar::requestSearch(bool cache) {
const auto search = SearchRequest{ _select->getQuery(), _from.current() };
const auto search = SearchRequest{
_select->getQuery(),
_from.current(),
_searchTagsSelected
};
if (cache) {
_typedRequests.insert(search);
}
@@ -382,7 +552,11 @@ void TopBar::requestSearch(bool cache) {
void TopBar::requestSearchDelayed() {
// Check cached queries.
const auto search = SearchRequest{ _select->getQuery(), _from.current() };
const auto search = SearchRequest{
_select->getQuery(),
_from.current(),
_searchTagsSelected
};
if (_typedRequests.contains(search)) {
requestSearch(false);
return;
@@ -418,7 +592,7 @@ void TopBar::setFrom(PeerData *peer) {
_from = peer;
requestSearchDelayed();
});
if (!peer) {
if (!peer || _history->peer->isSelf()) {
return;
}
@@ -655,6 +829,7 @@ public:
not_null<Ui::RpWidget*> parent,
not_null<Window::SessionController*> window,
not_null<History*> history,
PeerData *from,
const QString &query);
~Inner();
@@ -662,6 +837,7 @@ public:
void setInnerFocus();
void setQuery(const QString &query);
[[nodiscard]] rpl::producer<not_null<HistoryItem*>> activations() const;
[[nodiscard]] rpl::producer<> destroyRequests() const;
[[nodiscard]] rpl::lifetime &lifetime();
@@ -685,6 +861,7 @@ private:
rpl::event_stream<BottomBar::Index> jumps;
} _pendingJump;
rpl::event_stream<not_null<HistoryItem*>> _activations;
rpl::event_stream<> _destroyRequests;
};
@@ -693,10 +870,11 @@ ComposeSearch::Inner::Inner(
not_null<Ui::RpWidget*> parent,
not_null<Window::SessionController*> window,
not_null<History*> history,
PeerData *from,
const QString &query)
: _window(window)
, _history(history)
, _topBar(base::make_unique_q<TopBar>(parent, query))
, _topBar(base::make_unique_q<TopBar>(parent, window, history, from, query))
, _bottomBar(base::make_unique_q<BottomBar>(parent, HasChooseFrom(history)))
, _list(CreateList(parent, history))
, _apiSearch(history) {
@@ -720,8 +898,10 @@ ComposeSearch::Inner::Inner(
_topBar->searchRequests(
) | rpl::start_with_next([=](const SearchRequest &search) {
if (search.query.isEmpty() && !search.from) {
return;
if (search.query.isEmpty() && search.tags.empty()) {
if (!search.from || _history->peer->isSelf()) {
return;
}
}
_apiSearch.clear();
_apiSearch.search(search);
@@ -749,8 +929,12 @@ ComposeSearch::Inner::Inner(
_apiSearch.newFounds(
) | rpl::start_with_next([=] {
const auto &apiData = _apiSearch.messages();
const auto weak = Ui::MakeWeak(_bottomBar.get());
_bottomBar->setTotal(apiData.total);
_list.controller->addItems(apiData.messages, true);
if (weak) {
// Activating the first search result may switch the chat.
_list.controller->addItems(apiData.messages, true);
}
}, _topBar->lifetime());
_apiSearch.nextFounds(
@@ -761,16 +945,6 @@ ComposeSearch::Inner::Inner(
_list.controller->addItems(_apiSearch.messages().messages, false);
}, _topBar->lifetime());
const auto goToMessage = [=](const FullMsgId &itemId) {
const auto item = _history->owner().message(itemId);
if (item) {
_window->showPeerHistory(
item->history()->peer->id,
::Window::SectionShow::Way::ClearStack,
item->fullId().msg);
}
};
rpl::merge(
_pendingJump.jumps.events() | rpl::filter(rpl::mappers::_1 >= 0),
_bottomBar->showItemRequests()
@@ -786,8 +960,14 @@ ComposeSearch::Inner::Inner(
return;
}
_pendingJump.data = {};
goToMessage(messages[index]);
hideList();
const auto item = _history->owner().message(messages[index]);
if (item) {
const auto weak = Ui::MakeWeak(_topBar.get());
_activations.fire_copy(item);
if (weak) {
hideList();
}
}
}, _bottomBar->lifetime());
_list.controller->showItemRequests(
@@ -878,6 +1058,11 @@ void ComposeSearch::Inner::hideList() {
}
}
auto ComposeSearch::Inner::activations() const
-> rpl::producer<not_null<HistoryItem*>> {
return _activations.events();
}
rpl::producer<> ComposeSearch::Inner::destroyRequests() const {
return _destroyRequests.events();
}
@@ -893,8 +1078,9 @@ ComposeSearch::ComposeSearch(
not_null<Ui::RpWidget*> parent,
not_null<Window::SessionController*> window,
not_null<History*> history,
PeerData *from,
const QString &query)
: _inner(std::make_unique<Inner>(parent, window, history, query)) {
: _inner(std::make_unique<Inner>(parent, window, history, from, query)) {
}
ComposeSearch::~ComposeSearch() {
@@ -912,6 +1098,10 @@ void ComposeSearch::setQuery(const QString &query) {
_inner->setQuery(query);
}
rpl::producer<not_null<HistoryItem*>> ComposeSearch::activations() const {
return _inner->activations();
}
rpl::producer<> ComposeSearch::destroyRequests() const {
return _inner->destroyRequests();
}

View File

@@ -25,6 +25,7 @@ public:
not_null<Ui::RpWidget*> parent,
not_null<Window::SessionController*> window,
not_null<History*> history,
PeerData *from = nullptr,
const QString &query = QString());
~ComposeSearch();
@@ -32,6 +33,7 @@ public:
void setInnerFocus();
void setQuery(const QString &query);
[[nodiscard]] rpl::producer<not_null<HistoryItem*>> activations() const;
[[nodiscard]] rpl::producer<> destroyRequests() const;
[[nodiscard]] rpl::lifetime &lifetime();

View File

@@ -29,6 +29,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/player/media_player_instance.h"
#include "ui/controls/send_button.h"
#include "ui/effects/animation_value.h"
#include "ui/effects/animation_value_f.h"
#include "ui/effects/ripple_animation.h"
#include "ui/text/format_values.h"
#include "ui/text/text_utilities.h"
@@ -83,16 +84,14 @@ enum class FilterType {
const int duration = kPrecision
* (float64(samples) / ::Media::Player::kDefaultFrequency);
const auto durationString = Ui::FormatDurationText(duration / kPrecision);
const auto decimalPart = duration % kPrecision;
return QString("%1%2%3")
.arg(durationString, QLocale().decimalPoint())
.arg(decimalPart);
const auto decimalPart = QString::number(duration % kPrecision);
return durationString + QLocale().decimalPoint() + decimalPart;
}
[[nodiscard]] std::unique_ptr<VoiceData> ProcessCaptureResult(
const ::Media::Capture::Result &data) {
const VoiceWaveform &waveform) {
auto voiceData = std::make_unique<VoiceData>();
voiceData->waveform = data.waveform;
voiceData->waveform = waveform;
voiceData->wavemax = voiceData->waveform.empty()
? uchar(0)
: *ranges::max_element(voiceData->waveform);
@@ -426,12 +425,11 @@ public:
not_null<Ui::RpWidget*> parent,
const style::RecordBar &st,
not_null<Main::Session*> session,
::Media::Capture::Result &&data,
::Media::Capture::Result *data,
const style::font &font);
void requestPaintProgress(float64 progress);
rpl::producer<> stopRequests() const;
::Media::Capture::Result *data() const;
void playPause();
@@ -455,7 +453,7 @@ private:
const not_null<DocumentData*> _document;
const std::unique_ptr<VoiceData> _voiceData;
const std::shared_ptr<Data::DocumentMedia> _mediaView;
const std::unique_ptr<::Media::Capture::Result> _data;
const not_null<::Media::Capture::Result*> _data;
const base::unique_qptr<Ui::IconButton> _delete;
const style::font &_durationFont;
const QString _duration;
@@ -485,15 +483,15 @@ ListenWrap::ListenWrap(
not_null<Ui::RpWidget*> parent,
const style::RecordBar &st,
not_null<Main::Session*> session,
::Media::Capture::Result &&data,
::Media::Capture::Result *data,
const style::font &font)
: _parent(parent)
, _st(st)
, _session(session)
, _document(DummyDocument(&session->data()))
, _voiceData(ProcessCaptureResult(data))
, _voiceData(ProcessCaptureResult(data->waveform))
, _mediaView(_document->createMediaView())
, _data(std::make_unique<::Media::Capture::Result>(std::move(data)))
, _data(data)
, _delete(base::make_unique_q<Ui::IconButton>(parent, _st.remove))
, _durationFont(font)
, _duration(Ui::FormatDurationText(
@@ -576,6 +574,8 @@ void ListenWrap::init() {
if (!_isShowAnimation) {
p.setOpacity(progress);
} else {
p.fillRect(bgRect, _st.bg);
}
p.setPen(Qt::NoPen);
p.setBrush(_st.cancelActive);
@@ -816,10 +816,6 @@ rpl::producer<> ListenWrap::stopRequests() const {
return _delete->clicks() | rpl::to_empty;
}
::Media::Capture::Result *ListenWrap::data() const {
return _data.get();
}
rpl::lifetime &ListenWrap::lifetime() {
return _lifetime;
}
@@ -832,6 +828,7 @@ public:
void requestPaintProgress(float64 progress);
void requestPaintLockToStopProgress(float64 progress);
void requestPaintPauseToInputProgress(float64 progress);
void setVisibleTopPart(int part);
[[nodiscard]] rpl::producer<> locks() const;
@@ -858,6 +855,7 @@ private:
Ui::Animations::Simple _lockEnderAnimation;
float64 _lockToStopProgress = 0.;
float64 _pauseToInputProgress = 0.;
rpl::variable<float64> _progress = 0.;
int _visibleTopPart = -1;
@@ -894,6 +892,7 @@ void RecordLock::init() {
setAttribute(Qt::WA_TransparentForMouseEvents, true);
_lockEnderAnimation.stop();
_lockToStopProgress = 0.;
_pauseToInputProgress = 0.;
_progress = 0.;
}
}, lifetime());
@@ -960,15 +959,31 @@ void RecordLock::drawProgress(QPainter &p) {
const auto lockTranslation = QPoint(
(inner.width() - size.width()) / 2,
(_st.originTop.height() * 2 - size.height()) / 2);
const auto xRadius = anim::interpolate(2, 3, _lockToStopProgress);
const auto xRadius = anim::interpolateF(2, 3, _lockToStopProgress);
const auto pauseLineOffset = blockRectWidth / 2
+ st::historyRecordLockIconLineWidth;
if (_lockToStopProgress == 1.) {
// Paint the block.
auto hq = PainterHighQualityEnabler(p);
p.translate(inner.topLeft() + lockTranslation);
p.setPen(Qt::NoPen);
p.setBrush(_st.fg);
p.drawRoundedRect(blockRect, xRadius, 3);
if (_pauseToInputProgress > 0.) {
p.setOpacity(_pauseToInputProgress);
st::historyRecordLockInput.paintInCenter(
p,
blockRect.toRect());
p.setOpacity(1. - _pauseToInputProgress);
}
p.drawRoundedRect(
blockRect - QMargins(0, 0, pauseLineOffset, 0),
xRadius,
3);
p.drawRoundedRect(
blockRect - QMargins(pauseLineOffset, 0, 0, 0),
xRadius,
3);
} else {
// Paint an animation frame.
auto frame = QImage(
@@ -984,7 +999,20 @@ void RecordLock::drawProgress(QPainter &p) {
q.setBrush(_arcPen.brush());
q.translate(lockTranslation);
q.drawRoundedRect(blockRect, xRadius, 3);
{
const auto offset = anim::interpolateF(
0,
pauseLineOffset,
_lockToStopProgress);
q.drawRoundedRect(
blockRect - QMarginsF(0, 0, offset, 0),
xRadius,
3);
q.drawRoundedRect(
blockRect - QMarginsF(offset, 0, 0, 0),
xRadius,
3);
}
const auto offsetTranslate = _lockToStopProgress *
(lineHeight + arcHeight + _arcPen.width() * 2);
@@ -996,7 +1024,12 @@ void RecordLock::drawProgress(QPainter &p) {
q.rotate(kLockArcAngle * progress);
}
q.setPen(_arcPen);
const auto lockProgress = 1. - _lockToStopProgress;
{
auto arcPen = _arcPen;
arcPen.setWidthF(_arcPen.widthF() * lockProgress);
q.setPen(arcPen);
}
const auto rLine = QLineF(0, 0, 0, -lineHeight);
q.drawLine(rLine);
@@ -1008,7 +1041,6 @@ void RecordLock::drawProgress(QPainter &p) {
0,
arc::kHalfLength);
const auto lockProgress = 1. - _lockToStopProgress;
if (progress == 1. && lockProgress < 1.) {
q.drawLine(
-arcWidth,
@@ -1058,6 +1090,11 @@ void RecordLock::requestPaintLockToStopProgress(float64 progress) {
update();
}
void RecordLock::requestPaintPauseToInputProgress(float64 progress) {
_pauseToInputProgress = progress;
update();
}
float64 RecordLock::lockToStopProgress() const {
return _lockToStopProgress;
}
@@ -1258,6 +1295,7 @@ void VoiceRecordBar::updateTTLGeometry(
const auto parent = parentWidget();
const auto me = Ui::MapFrom(_outerContainer, parent, geometry());
const auto anyTop = me.y() - st::historyRecordLockPosition.y();
const auto ttlFrom = anyTop - _ttlButton->height() * 2;
if (type == TTLAnimationType::RightLeft) {
const auto finalRight = _outerContainer->width()
- rect::right(me)
@@ -1265,17 +1303,17 @@ void VoiceRecordBar::updateTTLGeometry(
const auto from = -_ttlButton->width();
const auto right = anim::interpolate(from, finalRight, progress);
_ttlButton->moveToRight(right, _ttlButton->y());
_ttlButton->moveToRight(right, ttlFrom);
#if 0
} else if (type == TTLAnimationType::TopBottom) {
const auto ttlFrom = anyTop - _ttlButton->height() * 2;
const auto ttlTo = anyTop - _lock->height();
_ttlButton->moveToLeft(
_ttlButton->x(),
anim::interpolate(ttlFrom, ttlTo, 1. - progress));
#endif
} else if (type == TTLAnimationType::RightTopStatic) {
_ttlButton->moveToRight(
-_ttlButton->width(),
anyTop - _ttlButton->height() * 2);
_ttlButton->moveToRight(-_ttlButton->width(), ttlFrom);
}
}
@@ -1381,48 +1419,7 @@ void VoiceRecordBar::init() {
_showLockAnimation.start(std::move(callback), from, to, duration);
}, lifetime());
_lock->setClickedCallback([=] {
if (!_lock->isStopState()) {
return;
}
::Media::Capture::instance()->startedChanges(
) | rpl::filter([=](bool capturing) {
return !capturing && _listen;
}) | rpl::take(1) | rpl::start_with_next([=] {
_lockShowing = false;
const auto to = 1.;
const auto &duration = st::historyRecordVoiceShowDuration;
auto callback = [=](float64 value) {
_listen->requestPaintProgress(value);
const auto reverseValue = to - value;
_level->requestPaintProgress(reverseValue);
update();
if (to == value) {
_recordingLifetime.destroy();
}
updateTTLGeometry(TTLAnimationType::TopBottom, 1. - value);
};
_showListenAnimation.stop();
_showListenAnimation.start(std::move(callback), 0., to, duration);
}, lifetime());
stopRecording(StopType::Listen);
});
_lock->locks(
) | rpl::start_with_next([=] {
if (_hasTTLFilter && _hasTTLFilter()) {
if (!_ttlButton) {
_ttlButton = std::make_unique<TTLButton>(
_outerContainer,
_st);
}
_ttlButton->show();
}
updateTTLGeometry(TTLAnimationType::RightTopStatic, 0);
const auto setLevelAsSend = [=] {
_level->setType(VoiceRecordButton::Type::Send);
_level->clicks(
@@ -1437,6 +1434,69 @@ void VoiceRecordBar::init() {
) | rpl::start_with_next([=](bool enter) {
_inField = enter;
}, _recordingLifetime);
};
const auto paintShowListenCallback = [=](float64 value) {
_listen->requestPaintProgress(value);
_level->requestPaintProgress(1. - value);
_lock->requestPaintPauseToInputProgress(value);
update();
};
_lock->setClickedCallback([=] {
if (isListenState()) {
startRecording();
_showListenAnimation.stop();
_showListenAnimation.start([=](float64 value) {
_listen->requestPaintProgress(1.);
paintShowListenCallback(value);
if (!value) {
_listen = nullptr;
}
}, 1., 0., st::historyRecordVoiceShowDuration * 2);
setLevelAsSend();
return;
}
if (!_lock->isStopState()) {
return;
}
stopRecording(StopType::Listen);
});
_paused.value() | rpl::distinct_until_changed(
) | rpl::start_with_next([=](bool paused) {
if (!paused) {
return;
}
// _lockShowing = false;
const auto to = 1.;
const auto &duration = st::historyRecordVoiceShowDuration;
auto callback = [=](float64 value) {
paintShowListenCallback(value);
if (to == value) {
_recordingLifetime.destroy();
}
};
_showListenAnimation.stop();
_showListenAnimation.start(std::move(callback), 0., to, duration);
}, lifetime());
_lock->locks(
) | rpl::start_with_next([=] {
if (_hasTTLFilter && _hasTTLFilter()) {
if (!_ttlButton) {
_ttlButton = std::make_unique<TTLButton>(
_outerContainer,
_st);
}
_ttlButton->show();
}
updateTTLGeometry(TTLAnimationType::RightTopStatic, 0);
setLevelAsSend();
const auto &duration = st::historyRecordVoiceShowDuration;
const auto from = 0.;
@@ -1589,7 +1649,12 @@ void VoiceRecordBar::startRecording() {
startRedCircleAnimation();
_recording = true;
instance()->start();
if (_paused.current()) {
_paused = false;
instance()->pause(false, nullptr);
} else {
instance()->start();
}
instance()->updated(
) | rpl::start_with_next_error([=](const Update &update) {
_recordingTipRequired = (update.samples < kMinSamples);
@@ -1658,7 +1723,7 @@ void VoiceRecordBar::stop(bool send) {
const auto type = send ? StopType::Send : StopType::Cancel;
stopRecording(type, ttlBeforeHide);
};
_lockShowing = false;
// _lockShowing = false;
visibilityAnimate(false, std::move(disappearanceCallback));
}
@@ -1668,6 +1733,7 @@ void VoiceRecordBar::finish() {
_inField = false;
_redCircleProgress = 0.;
_recordingSamples = 0;
_paused = false;
_showAnimation.stop();
_lockToStopAnimation.stop();
@@ -1677,6 +1743,8 @@ void VoiceRecordBar::finish() {
[[maybe_unused]] const auto s = takeTTLState();
_sendActionUpdates.fire({ Api::SendProgressType::RecordVoice, -1 });
_data = {};
}
void VoiceRecordBar::hideFast() {
@@ -1692,42 +1760,52 @@ void VoiceRecordBar::stopRecording(StopType type, bool ttlBeforeHide) {
instance()->stop(crl::guard(this, [=](Result &&data) {
_cancelRequests.fire({});
}));
return;
}
instance()->stop(crl::guard(this, [=](Result &&data) {
if (data.bytes.isEmpty()) {
// Close everything.
stop(false);
return;
}
} else if (type == StopType::Listen) {
instance()->pause(true, crl::guard(this, [=](Result &&data) {
if (data.bytes.isEmpty()) {
// Close everything.
stop(false);
return;
}
_paused = true;
_data = std::move(data);
window()->raise();
window()->activateWindow();
const auto duration = Duration(data.samples);
if (type == StopType::Send) {
window()->raise();
window()->activateWindow();
_listen = std::make_unique<ListenWrap>(
this,
_st,
&_show->session(),
&_data,
_cancelFont);
_listenChanges.fire({});
// _lockShowing = false;
}));
} else if (type == StopType::Send) {
instance()->stop(crl::guard(this, [=](Result &&data) {
if (data.bytes.isEmpty()) {
// Close everything.
stop(false);
return;
}
_data = std::move(data);
window()->raise();
window()->activateWindow();
const auto options = Api::SendOptions{
.ttlSeconds = (ttlBeforeHide
? std::numeric_limits<int>::max()
: 0),
};
_sendVoiceRequests.fire({
data.bytes,
data.waveform,
duration,
_data.bytes,
_data.waveform,
Duration(_data.samples),
options,
});
} else if (type == StopType::Listen) {
_listen = std::make_unique<ListenWrap>(
this,
_st,
&_show->session(),
std::move(data),
_cancelFont);
_listenChanges.fire({});
_lockShowing = false;
}
}));
}));
}
}
void VoiceRecordBar::drawDuration(QPainter &p) {
@@ -1784,14 +1862,13 @@ void VoiceRecordBar::drawMessage(QPainter &p, float64 recordActive) {
void VoiceRecordBar::requestToSendWithOptions(Api::SendOptions options) {
if (isListenState()) {
const auto data = _listen->data();
if (takeTTLState()) {
options.ttlSeconds = std::numeric_limits<int>::max();
}
_sendVoiceRequests.fire({
data->bytes,
data->waveform,
Duration(data->samples),
_data.bytes,
_data.waveform,
Duration(_data.samples),
options,
});
}
@@ -1810,7 +1887,7 @@ rpl::producer<> VoiceRecordBar::cancelRequests() const {
}
bool VoiceRecordBar::isRecording() const {
return _recording.current();
return _recording.current() && !_paused.current();
}
bool VoiceRecordBar::isRecordingLocked() const {
@@ -1906,7 +1983,11 @@ float64 VoiceRecordBar::showAnimationRatio() const {
}
float64 VoiceRecordBar::showListenAnimationRatio() const {
return _showListenAnimation.value(_listen ? 1. : 0.);
const auto value = _showListenAnimation.value(_listen ? 1. : 0.);
if (_paused.current()) {
return value * value;
}
return value;
}
void VoiceRecordBar::computeAndSetLockProgress(QPoint globalPos) {

View File

@@ -10,6 +10,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "api/api_common.h"
#include "base/timer.h"
#include "history/view/controls/compose_controls_common.h"
#include "media/audio/media_audio_capture_common.h"
#include "ui/effects/animations.h"
#include "ui/round_rect.h"
#include "ui/rp_widget.h"
@@ -162,6 +163,9 @@ private:
std::unique_ptr<Ui::AbstractButton> _ttlButton;
std::unique_ptr<ListenWrap> _listen;
::Media::Capture::Result _data;
rpl::variable<bool> _paused;
base::Timer _startTimer;
rpl::event_stream<SendActionUpdate> _sendActionUpdates;

View File

@@ -26,12 +26,15 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "history/view/media/history_view_media.h"
#include "history/view/media/history_view_web_page.h"
#include "history/view/reactions/history_view_reactions_list.h"
#include "info/info_memento.h"
#include "info/profile/info_profile_widget.h"
#include "ui/widgets/popup_menu.h"
#include "ui/widgets/menu/menu_action.h"
#include "ui/widgets/menu/menu_common.h"
#include "ui/widgets/menu/menu_multiline_action.h"
#include "ui/image/image.h"
#include "ui/toast/toast.h"
#include "ui/text/format_song_document_name.h"
#include "ui/text/text_utilities.h"
#include "ui/controls/delete_message_context_action.h"
#include "ui/controls/who_reacted_context_action.h"
@@ -41,6 +44,8 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "menu/menu_send.h"
#include "ui/boxes/confirm_box.h"
#include "ui/boxes/show_or_premium_box.h"
#include "ui/widgets/fields/input_field.h"
#include "ui/power_saving.h"
#include "boxes/delete_messages_box.h"
#include "boxes/report_messages_box.h"
#include "boxes/sticker_set_box.h"
@@ -85,6 +90,7 @@ namespace HistoryView {
namespace {
constexpr auto kRescheduleLimit = 20;
constexpr auto kTagNameLimit = 12;
bool HasEditMessageAction(
const ContextMenuRequest &request,
@@ -321,6 +327,10 @@ void AddDocumentActions(
AddSaveSoundForNotifications(menu, item, document, controller);
}
AddSaveDocumentAction(menu, item, document, list);
AddCopyFilename(
menu,
document,
[=] { return list->showCopyRestrictionForSelected(); });
}
void AddPostLinkAction(
@@ -980,6 +990,139 @@ void AddCopyLinkAction(
&st::menuIconCopy);
}
void EditTagBox(
not_null<Ui::GenericBox*> box,
not_null<Window::SessionController*> controller,
const Data::ReactionId &id) {
const auto owner = &controller->session().data();
const auto title = owner->reactions().myTagTitle(id);
box->setTitle(title.isEmpty()
? tr::lng_context_tag_add_name()
: tr::lng_context_tag_edit_name());
box->addRow(object_ptr<Ui::FlatLabel>(
box,
tr::lng_edit_tag_about(),
st::editTagAbout));
const auto field = box->addRow(object_ptr<Ui::InputField>(
box,
st::editTagField,
tr::lng_edit_tag_name(),
title));
field->setMaxLength(kTagNameLimit * 2);
box->setFocusCallback([=] {
field->setFocusFast();
});
struct State {
std::unique_ptr<Ui::Text::CustomEmoji> custom;
QImage image;
rpl::variable<int> length;
};
const auto state = field->lifetime().make_state<State>();
state->length = rpl::single(
int(title.size())
) | rpl::then(field->changes() | rpl::map([=] {
return int(field->getLastText().size());
}));
if (const auto customId = id.custom()) {
state->custom = owner->customEmojiManager().create(
customId,
[=] { field->update(); });
} else {
owner->reactions().preloadImageFor(id);
}
field->paintRequest() | rpl::start_with_next([=](QRect clip) {
auto p = QPainter(field);
const auto top = st::editTagField.textMargins.top();
if (const auto custom = state->custom.get()) {
const auto inactive = !field->window()->isActiveWindow();
custom->paint(p, {
.textColor = st::windowFg->c,
.now = crl::now(),
.position = QPoint(0, top),
.paused = inactive || On(PowerSaving::kEmojiChat),
});
} else {
if (state->image.isNull()) {
state->image = owner->reactions().resolveImageFor(
id,
::Data::Reactions::ImageSize::InlineList);
}
if (!state->image.isNull()) {
const auto size = st::reactionInlineSize;
const auto skip = (size - st::reactionInlineImage) / 2;
p.drawImage(skip, top + skip, state->image);
}
}
}, field->lifetime());
const auto warning = Ui::CreateChild<Ui::FlatLabel>(
field,
state->length.value() | rpl::map([](int count) {
return (count > kTagNameLimit / 2)
? QString::number(kTagNameLimit - count)
: QString();
}),
st::editTagLimit);
state->length.value() | rpl::map(
rpl::mappers::_1 > kTagNameLimit
) | rpl::start_with_next([=](bool exceeded) {
warning->setTextColorOverride(exceeded
? st::attentionButtonFg->c
: std::optional<QColor>());
}, warning->lifetime());
rpl::combine(
field->sizeValue(),
warning->sizeValue()
) | rpl::start_with_next([=] {
warning->moveToRight(0, st::editTagField.textMargins.top());
}, warning->lifetime());
warning->setAttribute(Qt::WA_TransparentForMouseEvents);
const auto save = [=] {
const auto text = field->getLastText();
if (text.size() > kTagNameLimit) {
field->showError();
return;
}
const auto weak = Ui::MakeWeak(box);
controller->session().data().reactions().renameTag(id, text);
if (const auto strong = weak.data()) {
strong->closeBox();
}
};
field->submits(
) | rpl::start_with_next(save, field->lifetime());
box->addButton(tr::lng_settings_save(), save);
box->addButton(tr::lng_cancel(), [=] {
box->closeBox();
});
}
void ShowWhoReadInfo(
not_null<Window::SessionController*> controller,
FullMsgId itemId,
Ui::WhoReadParticipant who) {
const auto peer = controller->session().data().peer(itemId.peer);
const auto participant = peer->owner().peer(PeerId(who.id));
const auto migrated = participant->migrateFrom();
const auto origin = who.dateReacted
? Info::Profile::Origin{
Info::Profile::GroupReactionOrigin{ peer, itemId.msg },
}
: Info::Profile::Origin();
auto memento = std::make_shared<Info::Memento>(
std::vector<std::shared_ptr<Info::ContentMemento>>{
std::make_shared<Info::Profile::Memento>(
participant,
migrated ? migrated->id : PeerId(),
origin),
});
controller->showSection(std::move(memento));
}
} // namespace
ContextMenuRequest::ContextMenuRequest(
@@ -1283,11 +1426,12 @@ void AddWhoReactedAction(
});
controller->show(std::move(box));
};
const auto participantChosen = [=](uint64 id) {
const auto itemId = item->fullId();
const auto participantChosen = [=](Ui::WhoReadParticipant who) {
if (const auto strong = weak.data()) {
strong->hideMenu();
}
controller->showPeerInfo(PeerId(id));
ShowWhoReadInfo(controller, itemId, who);
};
const auto showAllChosen = [=, itemId = item->fullId()]{
// Pressing on an item that has a submenu doesn't hide it :(
@@ -1320,6 +1464,37 @@ void AddWhoReactedAction(
}
}
void AddEditTagAction(
not_null<Ui::PopupMenu*> menu,
const Data::ReactionId &id,
not_null<Window::SessionController*> controller) {
const auto owner = &controller->session().data();
const auto editLabel = owner->reactions().myTagTitle(id).isEmpty()
? tr::lng_context_tag_add_name(tr::now)
: tr::lng_context_tag_edit_name(tr::now);
menu->addAction(editLabel, [=] {
controller->show(Box(EditTagBox, controller, id));
}, &st::menuIconTagRename);
}
void AddTagPackAction(
not_null<Ui::PopupMenu*> menu,
const Data::ReactionId &id,
not_null<Window::SessionController*> controller) {
if (const auto custom = id.custom()) {
const auto owner = &controller->session().data();
if (const auto set = owner->document(custom)->sticker()) {
if (set->set.id) {
AddEmojiPacksAction(
menu,
{ set->set },
EmojiPacksSource::Tag,
controller);
}
}
}
}
void ShowTagMenu(
not_null<base::unique_qptr<Ui::PopupMenu>*> menu,
QPoint position,
@@ -1340,7 +1515,9 @@ void ShowTagMenu(
.sessionWindow = controller,
}),
});
}, &st::menuIconFave);
}, &st::menuIconTagFilter);
AddEditTagAction(menu->get(), id, controller);
const auto removeTag = [=] {
if (const auto item = owner->message(itemId)) {
@@ -1359,23 +1536,57 @@ void ShowTagMenu(
(*menu)->menu(),
tr::lng_context_remove_tag(tr::now),
removeTag),
&st::menuIconDisableAttention,
&st::menuIconDisableAttention));
&st::menuIconTagRemoveAttention,
&st::menuIconTagRemoveAttention));
AddTagPackAction(menu->get(), id, controller);
if (const auto custom = id.custom()) {
if (const auto set = owner->document(custom)->sticker()) {
if (set->set.id) {
AddEmojiPacksAction(
menu->get(),
{ set->set },
EmojiPacksSource::Reaction,
controller);
}
}
}
(*menu)->popup(position);
}
void ShowTagInListMenu(
not_null<base::unique_qptr<Ui::PopupMenu>*> menu,
QPoint position,
not_null<QWidget*> context,
const Data::ReactionId &id,
not_null<Window::SessionController*> controller) {
*menu = base::make_unique_q<Ui::PopupMenu>(
context,
st::popupMenuExpandedSeparator);
AddEditTagAction(menu->get(), id, controller);
AddTagPackAction(menu->get(), id, controller);
(*menu)->popup(position);
}
void AddCopyFilename(
not_null<Ui::PopupMenu*> menu,
not_null<DocumentData*> document,
Fn<bool()> showCopyRestrictionForSelected) {
const auto filenameToCopy = [&] {
if (document->isAudioFile()) {
return TextForMimeData().append(
Ui::Text::FormatSongNameFor(document).string());
} else if (document->sticker()
|| document->isAnimation()
|| document->isVideoMessage()
|| document->isVideoFile()
|| document->isVoiceMessage()) {
return TextForMimeData();
} else {
return TextForMimeData().append(document->filename());
}
}();
if (!filenameToCopy.empty()) {
menu->addAction(tr::lng_context_copy_filename(tr::now), [=] {
if (!showCopyRestrictionForSelected()) {
TextUtilities::SetClipboardText(filenameToCopy);
}
}, &st::menuIconCopy);
}
}
void ShowWhoReactedMenu(
not_null<base::unique_qptr<Ui::PopupMenu>*> menu,
QPoint position,
@@ -1392,8 +1603,9 @@ void ShowWhoReactedMenu(
struct State {
int addedToBottom = 0;
};
const auto participantChosen = [=](uint64 id) {
controller->showPeerInfo(PeerId(id));
const auto itemId = item->fullId();
const auto participantChosen = [=](Ui::WhoReadParticipant who) {
ShowWhoReadInfo(controller, itemId, who);
};
const auto showAllChosen = [=, itemId = item->fullId()]{
if (const auto item = controller->session().data().message(itemId)) {
@@ -1534,6 +1746,12 @@ void AddEmojiPacksAction(
lt_name,
TextWithEntities{ name },
Ui::Text::RichLangValue);
case EmojiPacksSource::Tag:
return tr::lng_context_animated_tag(
tr::now,
lt_name,
TextWithEntities{ name },
Ui::Text::RichLangValue);
case EmojiPacksSource::Reaction:
if (!name.text.isEmpty()) {
return tr::lng_context_animated_reaction(

View File

@@ -88,11 +88,22 @@ void ShowWhoReactedMenu(
const Data::ReactionId &id,
not_null<Window::SessionController*> controller,
rpl::lifetime &lifetime);
void ShowTagInListMenu(
not_null<base::unique_qptr<Ui::PopupMenu>*> menu,
QPoint position,
not_null<QWidget*> context,
const Data::ReactionId &id,
not_null<Window::SessionController*> controller);
void AddCopyFilename(
not_null<Ui::PopupMenu*> menu,
not_null<DocumentData*> document,
Fn<bool()> showCopyRestrictionForSelected);
enum class EmojiPacksSource {
Message,
Reaction,
Reactions,
Tag,
};
[[nodiscard]] std::vector<StickerSetIdentifier> CollectEmojiPacks(
not_null<HistoryItem*> item,

View File

@@ -166,6 +166,11 @@ void DefaultElementDelegate::elementSendBotCommand(
const FullMsgId &context) {
}
void DefaultElementDelegate::elementSearchInList(
const QString &query,
const FullMsgId &context) {
}
void DefaultElementDelegate::elementHandleViaClick(
not_null<UserData*> bot) {
}

View File

@@ -98,6 +98,9 @@ public:
virtual void elementSendBotCommand(
const QString &command,
const FullMsgId &context) = 0;
virtual void elementSearchInList(
const QString &query,
const FullMsgId &context) = 0;
virtual void elementHandleViaClick(not_null<UserData*> bot) = 0;
virtual bool elementIsChatWide() = 0;
virtual not_null<Ui::PathShiftGradient*> elementPathShiftGradient() = 0;
@@ -146,6 +149,9 @@ public:
void elementSendBotCommand(
const QString &command,
const FullMsgId &context) override;
void elementSearchInList(
const QString &query,
const FullMsgId &context) override;
void elementHandleViaClick(not_null<UserData*> bot) override;
bool elementIsChatWide() override;
void elementReplyTo(const FullReplyTo &to) override;

View File

@@ -398,14 +398,6 @@ ListWidget::ListWidget(
reactionChosen(reaction);
}, lifetime());
_reactionsManager->premiumPromoChosen(
) | rpl::start_with_next([=] {
_reactionsManager->updateButton({});
ShowPremiumPreviewBox(
_controller,
PremiumPreview::InfiniteReactions);
}, lifetime());
Reactions::SetupManagerList(
_reactionsManager.get(),
_reactionsItem.value());
@@ -1731,6 +1723,12 @@ void ListWidget::elementSendBotCommand(
_delegate->listSendBotCommand(command, context);
}
void ListWidget::elementSearchInList(
const QString &query,
const FullMsgId &context) {
_delegate->listSearch(query, context);
}
void ListWidget::elementHandleViaClick(not_null<UserData*> bot) {
_delegate->listHandleViaClick(bot);
}
@@ -2641,9 +2639,7 @@ void ListWidget::showContextMenu(QContextMenuEvent *e, bool showFromTouch) {
desiredPosition,
reactItem,
[=](ChosenReaction reaction) { reactionChosen(reaction); },
[=](FullMsgId context) { ShowPremiumPreviewBox(
_controller,
PremiumPreview::InfiniteReactions); },
ItemReactionsAbout(reactItem),
_controller->cachedReactionIconFactory().createMethod())
: AttachSelectorResult::Skipped;
if (attached == AttachSelectorResult::Failed) {

Some files were not shown because too many files have changed in this diff Show More