Compare commits
51 Commits
dap-fix-in
...
vim-syntax
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0e6f44d87c | ||
|
|
707a4c7f20 | ||
|
|
854076f96d | ||
|
|
cf931247d0 | ||
|
|
b74477d12e | ||
|
|
3077abf9cf | ||
|
|
07dab4e94a | ||
|
|
59686f1f44 | ||
|
|
a60bea8a3d | ||
|
|
b820aa1fcd | ||
|
|
55d91bce53 | ||
|
|
b798392050 | ||
|
|
657c8b1084 | ||
|
|
2bb8aa2f73 | ||
|
|
beeb42da29 | ||
|
|
6d66ff1d95 | ||
|
|
e0b818af62 | ||
|
|
58a400b1ee | ||
|
|
8ab7d44d51 | ||
|
|
56d4c0af9f | ||
|
|
feeda7fa37 | ||
|
|
4a5c55a8f2 | ||
|
|
7c1ae9bcc3 | ||
|
|
6f97da3435 | ||
|
|
63c1033448 | ||
|
|
b16911e756 | ||
|
|
b14401f817 | ||
|
|
17cf865d1e | ||
|
|
b7ec437b13 | ||
|
|
f1aab1120d | ||
|
|
3f90bc81bd | ||
|
|
9d5fb3c3f3 | ||
|
|
864767ad35 | ||
|
|
ec69b68e72 | ||
|
|
9dd18e5ee1 | ||
|
|
2ebe16a52f | ||
|
|
1ed4647203 | ||
|
|
ebed567adb | ||
|
|
a6544c70c5 | ||
|
|
b363e1a482 | ||
|
|
65e3e84cbc | ||
|
|
1e1d4430c2 | ||
|
|
c874f1fa9d | ||
|
|
9a9e96ed5a | ||
|
|
8c46e290df | ||
|
|
aacbb9c2f4 | ||
|
|
f90333f92e | ||
|
|
b24f614ca3 | ||
|
|
cefa0cbed8 | ||
|
|
3fb1023667 | ||
|
|
9c715b470e |
3
Cargo.lock
generated
3
Cargo.lock
generated
@@ -114,6 +114,7 @@ dependencies = [
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"smol",
|
||||
"sqlez",
|
||||
"streaming_diff",
|
||||
"telemetry",
|
||||
"telemetry_events",
|
||||
@@ -133,6 +134,7 @@ dependencies = [
|
||||
"workspace-hack",
|
||||
"zed_actions",
|
||||
"zed_llm_client",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8760,6 +8762,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"shellexpand 2.1.2",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"streaming-iterator",
|
||||
|
||||
@@ -31,8 +31,6 @@
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"alt-f4": "debugger::RerunLastSession",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"ctrl-shift-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
@@ -583,11 +581,24 @@
|
||||
"ctrl-alt-r": "task::Rerun",
|
||||
"alt-t": "task::Rerun",
|
||||
"alt-shift-t": "task::Spawn",
|
||||
"alt-shift-r": ["task::Spawn", { "reveal_target": "center" }]
|
||||
"alt-shift-r": ["task::Spawn", { "reveal_target": "center" }],
|
||||
// also possible to spawn tasks by name:
|
||||
// "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }]
|
||||
// or by tag:
|
||||
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
|
||||
"f5": "debugger::RerunLastSession"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_running",
|
||||
"bindings": {
|
||||
"f5": "zed::NoAction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_stopped",
|
||||
"bindings": {
|
||||
"f5": "debugger::Continue"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -873,7 +884,8 @@
|
||||
"context": "DebugPanel",
|
||||
"bindings": {
|
||||
"ctrl-t": "debugger::ToggleThreadPicker",
|
||||
"ctrl-i": "debugger::ToggleSessionPicker"
|
||||
"ctrl-i": "debugger::ToggleSessionPicker",
|
||||
"shift-alt-escape": "debugger::ToggleExpandItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -928,6 +940,13 @@
|
||||
"tab": "channel_modal::ToggleMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"bindings": {
|
||||
"ctrl-shift-a": "file_finder::ToggleSplitMenu",
|
||||
"ctrl-shift-i": "file_finder::ToggleFilterMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)",
|
||||
"bindings": {
|
||||
|
||||
@@ -4,8 +4,6 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f4": "debugger::Start",
|
||||
"alt-f4": "debugger::RerunLastSession",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"shift-cmd-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
@@ -635,7 +633,8 @@
|
||||
"cmd-k shift-right": "workspace::SwapPaneRight",
|
||||
"cmd-k shift-up": "workspace::SwapPaneUp",
|
||||
"cmd-k shift-down": "workspace::SwapPaneDown",
|
||||
"cmd-shift-x": "zed::Extensions"
|
||||
"cmd-shift-x": "zed::Extensions",
|
||||
"f5": "debugger::RerunLastSession"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -652,6 +651,20 @@
|
||||
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_running",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f5": "zed::NoAction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace && debugger_stopped",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f5": "debugger::Continue"
|
||||
}
|
||||
},
|
||||
// Bindings from Sublime Text
|
||||
{
|
||||
"context": "Editor",
|
||||
@@ -936,7 +949,8 @@
|
||||
"context": "DebugPanel",
|
||||
"bindings": {
|
||||
"cmd-t": "debugger::ToggleThreadPicker",
|
||||
"cmd-i": "debugger::ToggleSessionPicker"
|
||||
"cmd-i": "debugger::ToggleSessionPicker",
|
||||
"shift-alt-escape": "debugger::ToggleExpandItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -987,6 +1001,14 @@
|
||||
"tab": "channel_modal::ToggleMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-a": "file_finder::ToggleSplitMenu",
|
||||
"cmd-shift-i": "file_finder::ToggleFilterMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder || (FileFinder > Picker > Editor) || (FileFinder > Picker > menu)",
|
||||
"use_key_equivalents": true,
|
||||
|
||||
@@ -52,10 +52,10 @@
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"ctrl-right": "editor::MoveToNextSubwordEnd",
|
||||
"ctrl-left": "editor::MoveToPreviousSubwordStart",
|
||||
"ctrl-shift-right": "editor::SelectToNextSubwordEnd",
|
||||
"ctrl-shift-left": "editor::SelectToPreviousSubwordStart"
|
||||
"alt-right": "editor::MoveToNextSubwordEnd",
|
||||
"alt-left": "editor::MoveToPreviousSubwordStart",
|
||||
"alt-shift-right": "editor::SelectToNextSubwordEnd",
|
||||
"alt-shift-left": "editor::SelectToPreviousSubwordStart"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -838,6 +838,19 @@
|
||||
"tab": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "MessageEditor > Editor && VimControl",
|
||||
"bindings": {
|
||||
"enter": "agent::Chat",
|
||||
// TODO: Implement search
|
||||
"/": null,
|
||||
"?": null,
|
||||
"#": null,
|
||||
"*": null,
|
||||
"n": null,
|
||||
"shift-n": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "os != macos && Editor && edit_prediction_conflict",
|
||||
"bindings": {
|
||||
|
||||
@@ -128,6 +128,8 @@
|
||||
//
|
||||
// Default: true
|
||||
"restore_on_file_reopen": true,
|
||||
// Whether to automatically close files that have been deleted on disk.
|
||||
"close_on_file_delete": false,
|
||||
// Size of the drop target in the editor.
|
||||
"drop_target_size": 0.2,
|
||||
// Whether the window should be closed when using 'close active item' on a window with no tabs.
|
||||
@@ -731,13 +733,6 @@
|
||||
// The model to use.
|
||||
"model": "claude-sonnet-4"
|
||||
},
|
||||
// The model to use when applying edits from the agent.
|
||||
"editor_model": {
|
||||
// The provider to use.
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-sonnet-4"
|
||||
},
|
||||
// Additional parameters for language model requests. When making a request to a model, parameters will be taken
|
||||
// from the last entry in this list that matches the model's provider and name. In each entry, both provider
|
||||
// and model are optional, so that you can specify parameters for either one.
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
// Some example tasks for common languages.
|
||||
//
|
||||
// For more documentation on how to configure debug tasks,
|
||||
// see: https://zed.dev/docs/debugger
|
||||
[
|
||||
{
|
||||
"label": "Debug active PHP file",
|
||||
|
||||
5
assets/settings/initial_local_debug_tasks.json
Normal file
5
assets/settings/initial_local_debug_tasks.json
Normal file
@@ -0,0 +1,5 @@
|
||||
// Project-local debug tasks
|
||||
//
|
||||
// For more documentation on how to configure debug tasks,
|
||||
// see: https://zed.dev/docs/debugger
|
||||
[]
|
||||
@@ -46,6 +46,7 @@ git.workspace = true
|
||||
gpui.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
indoc.workspace = true
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
inventory.workspace = true
|
||||
@@ -78,6 +79,7 @@ serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
sqlez.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
telemetry.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
@@ -97,6 +99,7 @@ workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
zed_llm_client.workspace = true
|
||||
zstd.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
buffer_diff = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1017,6 +1017,15 @@ impl ActiveThread {
|
||||
self.play_notification_sound(cx);
|
||||
self.show_notification("Waiting for tool confirmation", IconName::Info, window, cx);
|
||||
}
|
||||
ThreadEvent::ToolUseLimitReached => {
|
||||
self.play_notification_sound(cx);
|
||||
self.show_notification(
|
||||
"Consecutive tool use limit reached.",
|
||||
IconName::Warning,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
ThreadEvent::StreamedAssistantText(message_id, text) => {
|
||||
if let Some(rendered_message) = self.rendered_messages_by_id.get_mut(&message_id) {
|
||||
rendered_message.append_text(text, cx);
|
||||
|
||||
@@ -1372,6 +1372,7 @@ impl AgentDiff {
|
||||
| ThreadEvent::ToolFinished { .. }
|
||||
| ThreadEvent::CheckpointChanged
|
||||
| ThreadEvent::ToolConfirmationNeeded
|
||||
| ThreadEvent::ToolUseLimitReached
|
||||
| ThreadEvent::CancelEditing => {}
|
||||
}
|
||||
}
|
||||
@@ -1464,7 +1465,10 @@ impl AgentDiff {
|
||||
if !AgentSettings::get_global(cx).single_file_review {
|
||||
for (editor, _) in self.reviewing_editors.drain() {
|
||||
editor
|
||||
.update(cx, |editor, cx| editor.end_temporary_diff_override(cx))
|
||||
.update(cx, |editor, cx| {
|
||||
editor.end_temporary_diff_override(cx);
|
||||
editor.unregister_addon::<EditorAgentDiffAddon>();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
return;
|
||||
@@ -1560,7 +1564,10 @@ impl AgentDiff {
|
||||
|
||||
if in_workspace {
|
||||
editor
|
||||
.update(cx, |editor, cx| editor.end_temporary_diff_override(cx))
|
||||
.update(cx, |editor, cx| {
|
||||
editor.end_temporary_diff_override(cx);
|
||||
editor.unregister_addon::<EditorAgentDiffAddon>();
|
||||
})
|
||||
.ok();
|
||||
self.reviewing_editors.remove(&editor);
|
||||
}
|
||||
|
||||
@@ -734,6 +734,7 @@ impl Display for RulesContext {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ImageContext {
|
||||
pub project_path: Option<ProjectPath>,
|
||||
pub full_path: Option<Arc<Path>>,
|
||||
pub original_image: Arc<gpui::Image>,
|
||||
// TODO: handle this elsewhere and remove `ignore-interior-mutability` opt-out in clippy.toml
|
||||
// needed due to a false positive of `clippy::mutable_key_type`.
|
||||
|
||||
@@ -14,7 +14,7 @@ use http_client::HttpClientWithUrl;
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, CodeLabel, HighlightId};
|
||||
use lsp::CompletionContext;
|
||||
use project::{Completion, CompletionIntent, ProjectPath, Symbol, WorktreeId};
|
||||
use project::{Completion, CompletionIntent, CompletionResponse, ProjectPath, Symbol, WorktreeId};
|
||||
use prompt_store::PromptStore;
|
||||
use rope::Point;
|
||||
use text::{Anchor, OffsetRangeExt, ToPoint};
|
||||
@@ -746,7 +746,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
_trigger: CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let state = buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
let line_start = Point::new(position.row, 0);
|
||||
@@ -756,13 +756,13 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
MentionCompletion::try_parse(line, offset_to_line)
|
||||
});
|
||||
let Some(state) = state else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let Some((workspace, context_store)) =
|
||||
self.workspace.upgrade().zip(self.context_store.upgrade())
|
||||
else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
@@ -815,10 +815,10 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
cx.spawn(async move |_, cx| {
|
||||
let matches = search_task.await;
|
||||
let Some(editor) = editor.upgrade() else {
|
||||
return Ok(None);
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
Ok(Some(cx.update(|cx| {
|
||||
let completions = cx.update(|cx| {
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| match mat {
|
||||
@@ -901,7 +901,14 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
),
|
||||
})
|
||||
.collect()
|
||||
})?))
|
||||
})?;
|
||||
|
||||
Ok(vec![CompletionResponse {
|
||||
completions,
|
||||
// Since this does its own filtering (see `filter_completions()` returns false),
|
||||
// there is no benefit to computing whether this set of completions is incomplete.
|
||||
is_incomplete: true,
|
||||
}])
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ use assistant_context_editor::AssistantContext;
|
||||
use collections::{HashSet, IndexSet};
|
||||
use futures::{self, FutureExt};
|
||||
use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity};
|
||||
use language::Buffer;
|
||||
use language::{Buffer, File as _};
|
||||
use language_model::LanguageModelImage;
|
||||
use project::image_store::is_image_file;
|
||||
use project::{Project, ProjectItem, ProjectPath, Symbol};
|
||||
@@ -304,11 +304,13 @@ impl ContextStore {
|
||||
project.open_image(project_path.clone(), cx)
|
||||
})?;
|
||||
let image_item = open_image_task.await?;
|
||||
let image = image_item.read_with(cx, |image_item, _| image_item.image.clone())?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let item = image_item.read(cx);
|
||||
this.insert_image(
|
||||
Some(image_item.read(cx).project_path(cx)),
|
||||
image,
|
||||
Some(item.project_path(cx)),
|
||||
Some(item.file.full_path(cx).into()),
|
||||
item.image.clone(),
|
||||
remove_if_exists,
|
||||
cx,
|
||||
)
|
||||
@@ -317,12 +319,13 @@ impl ContextStore {
|
||||
}
|
||||
|
||||
pub fn add_image_instance(&mut self, image: Arc<Image>, cx: &mut Context<ContextStore>) {
|
||||
self.insert_image(None, image, false, cx);
|
||||
self.insert_image(None, None, image, false, cx);
|
||||
}
|
||||
|
||||
fn insert_image(
|
||||
&mut self,
|
||||
project_path: Option<ProjectPath>,
|
||||
full_path: Option<Arc<Path>>,
|
||||
image: Arc<Image>,
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<ContextStore>,
|
||||
@@ -330,6 +333,7 @@ impl ContextStore {
|
||||
let image_task = LanguageModelImage::from_image(image.clone(), cx).shared();
|
||||
let context = AgentContextHandle::Image(ImageContext {
|
||||
project_path,
|
||||
full_path,
|
||||
original_image: image,
|
||||
image_task,
|
||||
context_id: self.next_context_id.post_inc(),
|
||||
|
||||
@@ -152,7 +152,7 @@ impl HistoryStore {
|
||||
let entries = join_all(entries)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|result| result.log_err())
|
||||
.filter_map(|result| result.log_with_level(log::Level::Debug))
|
||||
.collect::<VecDeque<_>>();
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
|
||||
@@ -112,6 +112,7 @@ pub(crate) fn create_editor(
|
||||
editor.set_placeholder_text("Message the agent – @ to include context", cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_soft_wrap();
|
||||
editor.set_use_modal_editing(true);
|
||||
editor.set_context_menu_options(ContextMenuOptions {
|
||||
min_entries_visible: 12,
|
||||
max_entries_visible: 12,
|
||||
|
||||
@@ -179,18 +179,17 @@ impl TerminalTransaction {
|
||||
// Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal
|
||||
let input = Self::sanitize_input(hunk);
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(input));
|
||||
.update(cx, |terminal, _| terminal.input(input.into_bytes()));
|
||||
}
|
||||
|
||||
pub fn undo(&self, cx: &mut App) {
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.to_string()));
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.as_bytes()));
|
||||
}
|
||||
|
||||
pub fn complete(&self, cx: &mut App) {
|
||||
self.terminal.update(cx, |terminal, _| {
|
||||
terminal.input(CARRIAGE_RETURN.to_string())
|
||||
});
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(CARRIAGE_RETURN.as_bytes()));
|
||||
}
|
||||
|
||||
fn sanitize_input(mut input: String) -> String {
|
||||
|
||||
@@ -106,7 +106,7 @@ impl TerminalInlineAssistant {
|
||||
});
|
||||
let prompt_editor_render = prompt_editor.clone();
|
||||
let block = terminal_view::BlockProperties {
|
||||
height: 2,
|
||||
height: 4,
|
||||
render: Box::new(move |_| prompt_editor_render.clone().into_any_element()),
|
||||
};
|
||||
terminal_view.update(cx, |terminal_view, cx| {
|
||||
@@ -202,7 +202,7 @@ impl TerminalInlineAssistant {
|
||||
.update(cx, |terminal, cx| {
|
||||
terminal
|
||||
.terminal()
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.to_string()));
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.as_bytes()));
|
||||
})
|
||||
.log_err();
|
||||
|
||||
|
||||
@@ -1673,6 +1673,7 @@ impl Thread {
|
||||
}
|
||||
CompletionRequestStatus::ToolUseLimitReached => {
|
||||
thread.tool_use_limit_reached = true;
|
||||
cx.emit(ThreadEvent::ToolUseLimitReached);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2843,6 +2844,7 @@ pub enum ThreadEvent {
|
||||
},
|
||||
CheckpointChanged,
|
||||
ToolConfirmationNeeded,
|
||||
ToolUseLimitReached,
|
||||
CancelEditing,
|
||||
CompletionCanceled,
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use std::borrow::Cow;
|
||||
use std::cell::{Ref, RefCell};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, CompletionMode};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
@@ -17,8 +16,7 @@ use gpui::{
|
||||
App, BackgroundExecutor, Context, Entity, EventEmitter, Global, ReadGlobal, SharedString,
|
||||
Subscription, Task, prelude::*,
|
||||
};
|
||||
use heed::Database;
|
||||
use heed::types::SerdeBincode;
|
||||
|
||||
use language_model::{LanguageModelToolResultContent, LanguageModelToolUseId, Role, TokenUsage};
|
||||
use project::context_server_store::{ContextServerStatus, ContextServerStore};
|
||||
use project::{Project, ProjectItem, ProjectPath, Worktree};
|
||||
@@ -35,6 +33,42 @@ use crate::context_server_tool::ContextServerTool;
|
||||
use crate::thread::{
|
||||
DetailedSummaryState, ExceededWindowError, MessageId, ProjectSnapshot, Thread, ThreadId,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use sqlez::{
|
||||
bindable::{Bind, Column},
|
||||
connection::Connection,
|
||||
statement::Statement,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum DataType {
|
||||
#[serde(rename = "json")]
|
||||
Json,
|
||||
#[serde(rename = "zstd")]
|
||||
Zstd,
|
||||
}
|
||||
|
||||
impl Bind for DataType {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
let value = match self {
|
||||
DataType::Json => "json",
|
||||
DataType::Zstd => "zstd",
|
||||
};
|
||||
value.bind(statement, start_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for DataType {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let (value, next_index) = String::column(statement, start_index)?;
|
||||
let data_type = match value.as_str() {
|
||||
"json" => DataType::Json,
|
||||
"zstd" => DataType::Zstd,
|
||||
_ => anyhow::bail!("Unknown data type: {}", value),
|
||||
};
|
||||
Ok((data_type, next_index))
|
||||
}
|
||||
}
|
||||
|
||||
const RULES_FILE_NAMES: [&'static str; 6] = [
|
||||
".rules",
|
||||
@@ -866,25 +900,27 @@ impl Global for GlobalThreadsDatabase {}
|
||||
|
||||
pub(crate) struct ThreadsDatabase {
|
||||
executor: BackgroundExecutor,
|
||||
env: heed::Env,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerializedThread>,
|
||||
connection: Arc<Mutex<Connection>>,
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for SerializedThread {
|
||||
type EItem = SerializedThread;
|
||||
impl ThreadsDatabase {
|
||||
fn connection(&self) -> Arc<Mutex<Connection>> {
|
||||
self.connection.clone()
|
||||
}
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, heed::BoxedError> {
|
||||
serde_json::to_vec(item).map(Cow::Owned).map_err(Into::into)
|
||||
const COMPRESSION_LEVEL: i32 = 3;
|
||||
}
|
||||
|
||||
impl Bind for ThreadId {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
self.to_string().bind(statement, start_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for SerializedThread {
|
||||
type DItem = SerializedThread;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
|
||||
// We implement this type manually because we want to call `SerializedThread::from_json`,
|
||||
// instead of the Deserialize trait implementation for `SerializedThread`.
|
||||
SerializedThread::from_json(bytes).map_err(Into::into)
|
||||
impl Column for ThreadId {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let (id_str, next_index) = String::column(statement, start_index)?;
|
||||
Ok((ThreadId::from(id_str.as_str()), next_index))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -900,8 +936,8 @@ impl ThreadsDatabase {
|
||||
let database_future = executor
|
||||
.spawn({
|
||||
let executor = executor.clone();
|
||||
let database_path = paths::data_dir().join("threads/threads-db.1.mdb");
|
||||
async move { ThreadsDatabase::new(database_path, executor) }
|
||||
let threads_dir = paths::data_dir().join("threads");
|
||||
async move { ThreadsDatabase::new(threads_dir, executor) }
|
||||
})
|
||||
.then(|result| future::ready(result.map(Arc::new).map_err(Arc::new)))
|
||||
.boxed()
|
||||
@@ -910,41 +946,144 @@ impl ThreadsDatabase {
|
||||
cx.set_global(GlobalThreadsDatabase(database_future));
|
||||
}
|
||||
|
||||
pub fn new(path: PathBuf, executor: BackgroundExecutor) -> Result<Self> {
|
||||
std::fs::create_dir_all(&path)?;
|
||||
pub fn new(threads_dir: PathBuf, executor: BackgroundExecutor) -> Result<Self> {
|
||||
std::fs::create_dir_all(&threads_dir)?;
|
||||
|
||||
let sqlite_path = threads_dir.join("threads.db");
|
||||
let mdb_path = threads_dir.join("threads-db.1.mdb");
|
||||
|
||||
let needs_migration_from_heed = mdb_path.exists();
|
||||
|
||||
let connection = Connection::open_file(&sqlite_path.to_string_lossy());
|
||||
|
||||
connection.exec(indoc! {"
|
||||
CREATE TABLE IF NOT EXISTS threads (
|
||||
id TEXT PRIMARY KEY,
|
||||
summary TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
data_type TEXT NOT NULL,
|
||||
data BLOB NOT NULL
|
||||
)
|
||||
"})?()
|
||||
.map_err(|e| anyhow!("Failed to create threads table: {}", e))?;
|
||||
|
||||
let db = Self {
|
||||
executor: executor.clone(),
|
||||
connection: Arc::new(Mutex::new(connection)),
|
||||
};
|
||||
|
||||
if needs_migration_from_heed {
|
||||
let db_connection = db.connection();
|
||||
let executor_clone = executor.clone();
|
||||
executor
|
||||
.spawn(async move {
|
||||
log::info!("Starting threads.db migration");
|
||||
Self::migrate_from_heed(&mdb_path, db_connection, executor_clone)?;
|
||||
std::fs::remove_dir_all(mdb_path)?;
|
||||
log::info!("threads.db migrated to sqlite");
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
// Remove this migration after 2025-09-01
|
||||
fn migrate_from_heed(
|
||||
mdb_path: &Path,
|
||||
connection: Arc<Mutex<Connection>>,
|
||||
_executor: BackgroundExecutor,
|
||||
) -> Result<()> {
|
||||
use heed::types::SerdeBincode;
|
||||
struct SerializedThreadHeed(SerializedThread);
|
||||
|
||||
impl heed::BytesEncode<'_> for SerializedThreadHeed {
|
||||
type EItem = SerializedThreadHeed;
|
||||
|
||||
fn bytes_encode(
|
||||
item: &Self::EItem,
|
||||
) -> Result<std::borrow::Cow<[u8]>, heed::BoxedError> {
|
||||
serde_json::to_vec(&item.0)
|
||||
.map(std::borrow::Cow::Owned)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for SerializedThreadHeed {
|
||||
type DItem = SerializedThreadHeed;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
|
||||
SerializedThread::from_json(bytes)
|
||||
.map(SerializedThreadHeed)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
const ONE_GB_IN_BYTES: usize = 1024 * 1024 * 1024;
|
||||
|
||||
let env = unsafe {
|
||||
heed::EnvOpenOptions::new()
|
||||
.map_size(ONE_GB_IN_BYTES)
|
||||
.max_dbs(1)
|
||||
.open(path)?
|
||||
.open(mdb_path)?
|
||||
};
|
||||
|
||||
let mut txn = env.write_txn()?;
|
||||
let threads = env.create_database(&mut txn, Some("threads"))?;
|
||||
txn.commit()?;
|
||||
let txn = env.write_txn()?;
|
||||
let threads: heed::Database<SerdeBincode<ThreadId>, SerializedThreadHeed> = env
|
||||
.open_database(&txn, Some("threads"))?
|
||||
.ok_or_else(|| anyhow!("threads database not found"))?;
|
||||
|
||||
Ok(Self {
|
||||
executor,
|
||||
env,
|
||||
threads,
|
||||
})
|
||||
for result in threads.iter(&txn)? {
|
||||
let (thread_id, thread_heed) = result?;
|
||||
Self::save_thread_sync(&connection, thread_id, thread_heed.0)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn save_thread_sync(
|
||||
connection: &Arc<Mutex<Connection>>,
|
||||
id: ThreadId,
|
||||
thread: SerializedThread,
|
||||
) -> Result<()> {
|
||||
let json_data = serde_json::to_string(&thread)?;
|
||||
let summary = thread.summary.to_string();
|
||||
let updated_at = thread.updated_at.to_rfc3339();
|
||||
|
||||
let connection = connection.lock().unwrap();
|
||||
|
||||
let compressed = zstd::encode_all(json_data.as_bytes(), Self::COMPRESSION_LEVEL)?;
|
||||
let data_type = DataType::Zstd;
|
||||
let data = compressed;
|
||||
|
||||
let mut insert = connection.exec_bound::<(ThreadId, String, String, DataType, Vec<u8>)>(indoc! {"
|
||||
INSERT OR REPLACE INTO threads (id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?)
|
||||
"})?;
|
||||
|
||||
insert((id, summary, updated_at, data_type, data))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SerializedThreadMetadata>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let txn = env.read_txn()?;
|
||||
let mut iter = threads.iter(&txn)?;
|
||||
let connection = connection.lock().unwrap();
|
||||
let mut select =
|
||||
connection.select_bound::<(), (ThreadId, String, String)>(indoc! {"
|
||||
SELECT id, summary, updated_at FROM threads ORDER BY updated_at DESC
|
||||
"})?;
|
||||
|
||||
let rows = select(())?;
|
||||
let mut threads = Vec::new();
|
||||
while let Some((key, value)) = iter.next().transpose()? {
|
||||
|
||||
for (id, summary, updated_at) in rows {
|
||||
threads.push(SerializedThreadMetadata {
|
||||
id: key,
|
||||
summary: value.summary,
|
||||
updated_at: value.updated_at,
|
||||
id,
|
||||
summary: summary.into(),
|
||||
updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -953,36 +1092,51 @@ impl ThreadsDatabase {
|
||||
}
|
||||
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SerializedThread>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let txn = env.read_txn()?;
|
||||
let thread = threads.get(&txn, &id)?;
|
||||
Ok(thread)
|
||||
let connection = connection.lock().unwrap();
|
||||
let mut select = connection.select_bound::<ThreadId, (DataType, Vec<u8>)>(indoc! {"
|
||||
SELECT data_type, data FROM threads WHERE id = ? LIMIT 1
|
||||
"})?;
|
||||
|
||||
let rows = select(id)?;
|
||||
if let Some((data_type, data)) = rows.into_iter().next() {
|
||||
let json_data = match data_type {
|
||||
DataType::Zstd => {
|
||||
let decompressed = zstd::decode_all(&data[..])?;
|
||||
String::from_utf8(decompressed)?
|
||||
}
|
||||
DataType::Json => String::from_utf8(data)?,
|
||||
};
|
||||
|
||||
let thread = SerializedThread::from_json(json_data.as_bytes())?;
|
||||
Ok(Some(thread))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SerializedThread) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let mut txn = env.write_txn()?;
|
||||
threads.put(&mut txn, &id, &thread)?;
|
||||
txn.commit()?;
|
||||
Ok(())
|
||||
})
|
||||
self.executor
|
||||
.spawn(async move { Self::save_thread_sync(&connection, id, thread) })
|
||||
}
|
||||
|
||||
pub fn delete_thread(&self, id: ThreadId) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let mut txn = env.write_txn()?;
|
||||
threads.delete(&mut txn, &id)?;
|
||||
txn.commit()?;
|
||||
let connection = connection.lock().unwrap();
|
||||
|
||||
let mut delete = connection.exec_bound::<ThreadId>(indoc! {"
|
||||
DELETE FROM threads WHERE id = ?
|
||||
"})?;
|
||||
|
||||
delete(id)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -304,7 +304,7 @@ impl AddedContext {
|
||||
AgentContextHandle::Thread(handle) => Some(Self::pending_thread(handle, cx)),
|
||||
AgentContextHandle::TextThread(handle) => Some(Self::pending_text_thread(handle, cx)),
|
||||
AgentContextHandle::Rules(handle) => Self::pending_rules(handle, prompt_store, cx),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle)),
|
||||
AgentContextHandle::Image(handle) => Some(Self::image(handle, cx)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,7 +318,7 @@ impl AddedContext {
|
||||
AgentContext::Thread(context) => Self::attached_thread(context),
|
||||
AgentContext::TextThread(context) => Self::attached_text_thread(context),
|
||||
AgentContext::Rules(context) => Self::attached_rules(context),
|
||||
AgentContext::Image(context) => Self::image(context.clone()),
|
||||
AgentContext::Image(context) => Self::image(context.clone(), cx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -333,14 +333,8 @@ impl AddedContext {
|
||||
|
||||
fn file(handle: FileContextHandle, full_path: &Path, cx: &App) -> AddedContext {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let name = full_path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| full_path_string.clone());
|
||||
let parent = full_path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
AddedContext {
|
||||
kind: ContextKind::File,
|
||||
name,
|
||||
@@ -370,14 +364,8 @@ impl AddedContext {
|
||||
|
||||
fn directory(handle: DirectoryContextHandle, full_path: &Path) -> AddedContext {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let name = full_path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| full_path_string.clone());
|
||||
let parent = full_path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
AddedContext {
|
||||
kind: ContextKind::Directory,
|
||||
name,
|
||||
@@ -605,13 +593,23 @@ impl AddedContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn image(context: ImageContext) -> AddedContext {
|
||||
fn image(context: ImageContext, cx: &App) -> AddedContext {
|
||||
let (name, parent, icon_path) = if let Some(full_path) = context.full_path.as_ref() {
|
||||
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
|
||||
let (name, parent) =
|
||||
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
|
||||
let icon_path = FileIcons::get_icon(&full_path, cx);
|
||||
(name, parent, icon_path)
|
||||
} else {
|
||||
("Image".into(), None, None)
|
||||
};
|
||||
|
||||
AddedContext {
|
||||
kind: ContextKind::Image,
|
||||
name: "Image".into(),
|
||||
parent: None,
|
||||
name,
|
||||
parent,
|
||||
tooltip: None,
|
||||
icon_path: None,
|
||||
icon_path,
|
||||
status: match context.status() {
|
||||
ImageStatus::Loading => ContextStatus::Loading {
|
||||
message: "Loading…".into(),
|
||||
@@ -639,6 +637,22 @@ impl AddedContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_file_name_and_directory_from_full_path(
|
||||
path: &Path,
|
||||
name_fallback: &SharedString,
|
||||
) -> (SharedString, Option<SharedString>) {
|
||||
let name = path
|
||||
.file_name()
|
||||
.map(|n| n.to_string_lossy().into_owned().into())
|
||||
.unwrap_or_else(|| name_fallback.clone());
|
||||
let parent = path
|
||||
.parent()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|n| n.to_string_lossy().into_owned().into());
|
||||
|
||||
(name, parent)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ContextFileExcerpt {
|
||||
pub file_name_and_range: SharedString,
|
||||
@@ -765,37 +779,49 @@ impl Component for AddedContext {
|
||||
let mut next_context_id = ContextId::zero();
|
||||
let image_ready = (
|
||||
"Ready",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
let image_loading = (
|
||||
"Loading",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: cx
|
||||
.background_spawn(async move {
|
||||
smol::Timer::after(Duration::from_secs(60 * 5)).await;
|
||||
Some(LanguageModelImage::empty())
|
||||
})
|
||||
.shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: cx
|
||||
.background_spawn(async move {
|
||||
smol::Timer::after(Duration::from_secs(60 * 5)).await;
|
||||
Some(LanguageModelImage::empty())
|
||||
})
|
||||
.shared(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
let image_error = (
|
||||
"Error",
|
||||
AddedContext::image(ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(None).shared(),
|
||||
}),
|
||||
AddedContext::image(
|
||||
ImageContext {
|
||||
context_id: next_context_id.post_inc(),
|
||||
project_path: None,
|
||||
full_path: None,
|
||||
original_image: Arc::new(Image::empty()),
|
||||
image_task: Task::ready(None).shared(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
);
|
||||
|
||||
Some(
|
||||
|
||||
@@ -372,6 +372,8 @@ impl AgentSettingsContent {
|
||||
None,
|
||||
None,
|
||||
Some(language_model.supports_tools()),
|
||||
Some(language_model.supports_images()),
|
||||
None,
|
||||
)),
|
||||
api_url,
|
||||
});
|
||||
|
||||
@@ -48,7 +48,7 @@ impl SlashCommandCompletionProvider {
|
||||
name_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
let candidates = slash_commands
|
||||
.command_names(cx)
|
||||
@@ -71,28 +71,27 @@ impl SlashCommandCompletionProvider {
|
||||
.await;
|
||||
|
||||
cx.update(|_, cx| {
|
||||
Some(
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let completions = matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
@@ -118,22 +117,27 @@ impl SlashCommandCompletionProvider {
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
replace_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
insert_text_mode: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
});
|
||||
|
||||
Some(project::Completion {
|
||||
replace_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
insert_text_mode: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
vec![project::CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: false,
|
||||
}]
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -147,7 +151,7 @@ impl SlashCommandCompletionProvider {
|
||||
last_argument_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let new_cancel_flag = Arc::new(AtomicBool::new(false));
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
@@ -165,28 +169,27 @@ impl SlashCommandCompletionProvider {
|
||||
let workspace = self.workspace.clone();
|
||||
let arguments = arguments.to_vec();
|
||||
cx.background_spawn(async move {
|
||||
Ok(Some(
|
||||
completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
let completions = completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if new_argument.after_completion.run()
|
||||
@@ -210,34 +213,41 @@ impl SlashCommandCompletionProvider {
|
||||
!new_argument.after_completion.run()
|
||||
}
|
||||
}
|
||||
}) as Arc<_>
|
||||
});
|
||||
}) as Arc<_>
|
||||
});
|
||||
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
replace_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
icon_path: None,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
insert_text_mode: None,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
project::Completion {
|
||||
replace_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
icon_path: None,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
insert_text_mode: None,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(vec![project::CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: false,
|
||||
}])
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Some(Vec::new())))
|
||||
Task::ready(Ok(vec![project::CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: false,
|
||||
}]))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -251,7 +261,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<project::CompletionResponse>>> {
|
||||
let Some((name, arguments, command_range, last_argument_range)) =
|
||||
buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
@@ -295,7 +305,10 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
Some((name, arguments, command_range, last_argument_range))
|
||||
})
|
||||
else {
|
||||
return Task::ready(Ok(Some(Vec::new())));
|
||||
return Task::ready(Ok(vec![project::CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: false,
|
||||
}]));
|
||||
};
|
||||
|
||||
if let Some((arguments, argument_range)) = arguments {
|
||||
|
||||
@@ -35,6 +35,7 @@ pub struct ChannelBuffer {
|
||||
pub enum ChannelBufferEvent {
|
||||
CollaboratorsChanged,
|
||||
Disconnected,
|
||||
Connected,
|
||||
BufferEdited,
|
||||
ChannelChanged,
|
||||
}
|
||||
@@ -103,6 +104,17 @@ impl ChannelBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn connected(&mut self, cx: &mut Context<Self>) {
|
||||
self.connected = true;
|
||||
if self.subscription.is_none() {
|
||||
let Ok(subscription) = self.client.subscribe_to_entity(self.channel_id.0) else {
|
||||
return;
|
||||
};
|
||||
self.subscription = Some(subscription.set_entity(&cx.entity(), &mut cx.to_async()));
|
||||
cx.emit(ChannelBufferEvent::Connected);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remote_id(&self, cx: &App) -> BufferId {
|
||||
self.buffer.read(cx).remote_id()
|
||||
}
|
||||
|
||||
@@ -972,6 +972,7 @@ impl ChannelStore {
|
||||
.log_err();
|
||||
|
||||
if let Some(operations) = operations {
|
||||
channel_buffer.connected(cx);
|
||||
let client = this.client.clone();
|
||||
cx.background_spawn(async move {
|
||||
let operations = operations.await;
|
||||
@@ -1012,8 +1013,8 @@ impl ChannelStore {
|
||||
|
||||
if let Some(this) = this.upgrade() {
|
||||
this.update(cx, |this, cx| {
|
||||
for (_, buffer) in this.opened_buffers.drain() {
|
||||
if let OpenEntityHandle::Open(buffer) = buffer {
|
||||
for (_, buffer) in &this.opened_buffers {
|
||||
if let OpenEntityHandle::Open(buffer) = &buffer {
|
||||
if let Some(buffer) = buffer.upgrade() {
|
||||
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
|
||||
}
|
||||
|
||||
@@ -354,6 +354,10 @@ impl ChannelView {
|
||||
editor.set_read_only(true);
|
||||
cx.notify();
|
||||
}),
|
||||
ChannelBufferEvent::Connected => self.editor.update(cx, |editor, cx| {
|
||||
editor.set_read_only(false);
|
||||
cx.notify();
|
||||
}),
|
||||
ChannelBufferEvent::ChannelChanged => {
|
||||
self.editor.update(cx, |_, cx| {
|
||||
cx.emit(editor::EditorEvent::TitleChanged);
|
||||
|
||||
@@ -12,7 +12,7 @@ use language::{
|
||||
Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry, ToOffset,
|
||||
language_settings::SoftWrap,
|
||||
};
|
||||
use project::{Completion, CompletionSource, search::SearchQuery};
|
||||
use project::{Completion, CompletionResponse, CompletionSource, search::SearchQuery};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
@@ -64,9 +64,9 @@ impl CompletionProvider for MessageEditorCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let Some(handle) = self.0.upgrade() else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
handle.update(cx, |message_editor, cx| {
|
||||
message_editor.completions(buffer, buffer_position, cx)
|
||||
@@ -248,22 +248,21 @@ impl MessageEditor {
|
||||
buffer: &Entity<Buffer>,
|
||||
end_anchor: Anchor,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
if let Some((start_anchor, query, candidates)) =
|
||||
self.collect_mention_candidates(buffer, end_anchor, cx)
|
||||
{
|
||||
if !candidates.is_empty() {
|
||||
return cx.spawn(async move |_, cx| {
|
||||
Ok(Some(
|
||||
Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
&candidates,
|
||||
start_anchor..end_anchor,
|
||||
Self::completion_for_mention,
|
||||
)
|
||||
.await,
|
||||
))
|
||||
let completion_response = Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
&candidates,
|
||||
start_anchor..end_anchor,
|
||||
Self::completion_for_mention,
|
||||
)
|
||||
.await;
|
||||
Ok(vec![completion_response])
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -273,21 +272,23 @@ impl MessageEditor {
|
||||
{
|
||||
if !candidates.is_empty() {
|
||||
return cx.spawn(async move |_, cx| {
|
||||
Ok(Some(
|
||||
Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
candidates,
|
||||
start_anchor..end_anchor,
|
||||
Self::completion_for_emoji,
|
||||
)
|
||||
.await,
|
||||
))
|
||||
let completion_response = Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
candidates,
|
||||
start_anchor..end_anchor,
|
||||
Self::completion_for_emoji,
|
||||
)
|
||||
.await;
|
||||
Ok(vec![completion_response])
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Task::ready(Ok(Some(Vec::new())))
|
||||
Task::ready(Ok(vec![CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: false,
|
||||
}]))
|
||||
}
|
||||
|
||||
async fn resolve_completions_for_candidates(
|
||||
@@ -296,18 +297,19 @@ impl MessageEditor {
|
||||
candidates: &[StringMatchCandidate],
|
||||
range: Range<Anchor>,
|
||||
completion_fn: impl Fn(&StringMatch) -> (String, CodeLabel),
|
||||
) -> Vec<Completion> {
|
||||
) -> CompletionResponse {
|
||||
const LIMIT: usize = 10;
|
||||
let matches = fuzzy::match_strings(
|
||||
candidates,
|
||||
query,
|
||||
true,
|
||||
10,
|
||||
LIMIT,
|
||||
&Default::default(),
|
||||
cx.background_executor().clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
matches
|
||||
let completions = matches
|
||||
.into_iter()
|
||||
.map(|mat| {
|
||||
let (new_text, label) = completion_fn(&mat);
|
||||
@@ -322,7 +324,12 @@ impl MessageEditor {
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
CompletionResponse {
|
||||
is_incomplete: completions.len() >= LIMIT,
|
||||
completions,
|
||||
}
|
||||
}
|
||||
|
||||
fn completion_for_mention(mat: &StringMatch) -> (String, CodeLabel) {
|
||||
|
||||
@@ -50,6 +50,7 @@ project.workspace = true
|
||||
rpc.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
# serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
shlex.workspace = true
|
||||
sysinfo.workspace = true
|
||||
|
||||
@@ -3,11 +3,12 @@ use crate::session::DebugSession;
|
||||
use crate::session::running::RunningState;
|
||||
use crate::{
|
||||
ClearAllBreakpoints, Continue, Detach, FocusBreakpointList, FocusConsole, FocusFrames,
|
||||
FocusLoadedSources, FocusModules, FocusTerminal, FocusVariables, Pause, Restart,
|
||||
ShowStackTrace, StepBack, StepInto, StepOut, StepOver, Stop, ToggleIgnoreBreakpoints,
|
||||
ToggleSessionPicker, ToggleThreadPicker, persistence, spawn_task_or_modal,
|
||||
FocusLoadedSources, FocusModules, FocusTerminal, FocusVariables, NewProcessModal,
|
||||
NewProcessMode, Pause, Restart, ShowStackTrace, StepBack, StepInto, StepOut, StepOver, Stop,
|
||||
ToggleExpandItem, ToggleIgnoreBreakpoints, ToggleSessionPicker, ToggleThreadPicker,
|
||||
persistence, spawn_task_or_modal,
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::Result;
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use dap::StartDebuggingRequestArguments;
|
||||
use dap::adapters::DebugAdapterName;
|
||||
@@ -24,7 +25,7 @@ use gpui::{
|
||||
|
||||
use language::Buffer;
|
||||
use project::debugger::session::{Session, SessionStateEvent};
|
||||
use project::{Fs, ProjectPath, WorktreeId};
|
||||
use project::{Fs, WorktreeId};
|
||||
use project::{Project, debugger::session::ThreadStatus};
|
||||
use rpc::proto::{self};
|
||||
use settings::Settings;
|
||||
@@ -69,6 +70,7 @@ pub struct DebugPanel {
|
||||
pub(crate) thread_picker_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
pub(crate) session_picker_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
fs: Arc<dyn Fs>,
|
||||
is_zoomed: bool,
|
||||
_subscriptions: [Subscription; 1],
|
||||
}
|
||||
|
||||
@@ -103,6 +105,7 @@ impl DebugPanel {
|
||||
fs: workspace.app_state().fs.clone(),
|
||||
thread_picker_menu_handle,
|
||||
session_picker_menu_handle,
|
||||
is_zoomed: false,
|
||||
_subscriptions: [focus_subscription],
|
||||
debug_scenario_scheduled_last: true,
|
||||
}
|
||||
@@ -334,10 +337,17 @@ impl DebugPanel {
|
||||
let Some(task_inventory) = task_store.read(cx).task_inventory() else {
|
||||
return;
|
||||
};
|
||||
let workspace = self.workspace.clone();
|
||||
let Some(scenario) = task_inventory.read(cx).last_scheduled_scenario().cloned() else {
|
||||
window.defer(cx, move |window, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
NewProcessModal::show(workspace, window, NewProcessMode::Launch, None, cx);
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
return;
|
||||
};
|
||||
let workspace = self.workspace.clone();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let task_contexts = workspace
|
||||
@@ -942,68 +952,69 @@ impl DebugPanel {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub(crate) fn save_scenario(
|
||||
&self,
|
||||
scenario: &DebugScenario,
|
||||
worktree_id: WorktreeId,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<ProjectPath>> {
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let Some(mut path) = workspace.absolute_path_of_worktree(worktree_id, cx) else {
|
||||
return Task::ready(Err(anyhow!("Couldn't get worktree path")));
|
||||
};
|
||||
// TODO: restore once we have proper comment preserving file edits
|
||||
// pub(crate) fn save_scenario(
|
||||
// &self,
|
||||
// scenario: &DebugScenario,
|
||||
// worktree_id: WorktreeId,
|
||||
// window: &mut Window,
|
||||
// cx: &mut App,
|
||||
// ) -> Task<Result<ProjectPath>> {
|
||||
// self.workspace
|
||||
// .update(cx, |workspace, cx| {
|
||||
// let Some(mut path) = workspace.absolute_path_of_worktree(worktree_id, cx) else {
|
||||
// return Task::ready(Err(anyhow!("Couldn't get worktree path")));
|
||||
// };
|
||||
|
||||
let serialized_scenario = serde_json::to_value(scenario);
|
||||
// let serialized_scenario = serde_json::to_value(scenario);
|
||||
|
||||
cx.spawn_in(window, async move |workspace, cx| {
|
||||
let serialized_scenario = serialized_scenario?;
|
||||
let fs =
|
||||
workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?;
|
||||
// cx.spawn_in(window, async move |workspace, cx| {
|
||||
// let serialized_scenario = serialized_scenario?;
|
||||
// let fs =
|
||||
// workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?;
|
||||
|
||||
path.push(paths::local_settings_folder_relative_path());
|
||||
if !fs.is_dir(path.as_path()).await {
|
||||
fs.create_dir(path.as_path()).await?;
|
||||
}
|
||||
path.pop();
|
||||
// path.push(paths::local_settings_folder_relative_path());
|
||||
// if !fs.is_dir(path.as_path()).await {
|
||||
// fs.create_dir(path.as_path()).await?;
|
||||
// }
|
||||
// path.pop();
|
||||
|
||||
path.push(paths::local_debug_file_relative_path());
|
||||
let path = path.as_path();
|
||||
// path.push(paths::local_debug_file_relative_path());
|
||||
// let path = path.as_path();
|
||||
|
||||
if !fs.is_file(path).await {
|
||||
let content =
|
||||
serde_json::to_string_pretty(&serde_json::Value::Array(vec![
|
||||
serialized_scenario,
|
||||
]))?;
|
||||
// if !fs.is_file(path).await {
|
||||
// fs.create_file(path, Default::default()).await?;
|
||||
// fs.write(
|
||||
// path,
|
||||
// initial_local_debug_tasks_content().to_string().as_bytes(),
|
||||
// )
|
||||
// .await?;
|
||||
// }
|
||||
|
||||
fs.create_file(path, Default::default()).await?;
|
||||
fs.save(path, &content.into(), Default::default()).await?;
|
||||
} else {
|
||||
let content = fs.load(path).await?;
|
||||
let mut values = serde_json::from_str::<Vec<serde_json::Value>>(&content)?;
|
||||
values.push(serialized_scenario);
|
||||
fs.save(
|
||||
path,
|
||||
&serde_json::to_string_pretty(&values).map(Into::into)?,
|
||||
Default::default(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
// let content = fs.load(path).await?;
|
||||
// let mut values =
|
||||
// serde_json_lenient::from_str::<Vec<serde_json::Value>>(&content)?;
|
||||
// values.push(serialized_scenario);
|
||||
// fs.save(
|
||||
// path,
|
||||
// &serde_json_lenient::to_string_pretty(&values).map(Into::into)?,
|
||||
// Default::default(),
|
||||
// )
|
||||
// .await?;
|
||||
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
.project_path_for_absolute_path(&path, cx)
|
||||
.context(
|
||||
"Couldn't get project path for .zed/debug.json in active worktree",
|
||||
)
|
||||
})?
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|err| Task::ready(Err(err)))
|
||||
}
|
||||
// workspace.update(cx, |workspace, cx| {
|
||||
// workspace
|
||||
// .project()
|
||||
// .read(cx)
|
||||
// .project_path_for_absolute_path(&path, cx)
|
||||
// .context(
|
||||
// "Couldn't get project path for .zed/debug.json in active worktree",
|
||||
// )
|
||||
// })?
|
||||
// })
|
||||
// })
|
||||
// .unwrap_or_else(|err| Task::ready(Err(err)))
|
||||
// }
|
||||
|
||||
pub(crate) fn toggle_thread_picker(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.thread_picker_menu_handle.toggle(window, cx);
|
||||
@@ -1012,6 +1023,22 @@ impl DebugPanel {
|
||||
pub(crate) fn toggle_session_picker(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.session_picker_menu_handle.toggle(window, cx);
|
||||
}
|
||||
|
||||
fn toggle_zoom(
|
||||
&mut self,
|
||||
_: &workspace::ToggleZoom,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if self.is_zoomed {
|
||||
cx.emit(PanelEvent::ZoomOut);
|
||||
} else {
|
||||
if !self.focus_handle(cx).contains_focused(window, cx) {
|
||||
cx.focus_self(window);
|
||||
}
|
||||
cx.emit(PanelEvent::ZoomIn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn register_session_inner(
|
||||
@@ -1167,6 +1194,15 @@ impl Panel for DebugPanel {
|
||||
}
|
||||
|
||||
fn set_active(&mut self, _: bool, _: &mut Window, _: &mut Context<Self>) {}
|
||||
|
||||
fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool {
|
||||
self.is_zoomed
|
||||
}
|
||||
|
||||
fn set_zoomed(&mut self, zoomed: bool, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.is_zoomed = zoomed;
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for DebugPanel {
|
||||
@@ -1307,6 +1343,23 @@ impl Render for DebugPanel {
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.on_action(cx.listener(Self::toggle_zoom))
|
||||
.on_action(cx.listener(|panel, _: &ToggleExpandItem, _, cx| {
|
||||
let Some(session) = panel.active_session() else {
|
||||
return;
|
||||
};
|
||||
let active_pane = session
|
||||
.read(cx)
|
||||
.running_state()
|
||||
.read(cx)
|
||||
.active_pane()
|
||||
.clone();
|
||||
active_pane.update(cx, |pane, cx| {
|
||||
let is_zoomed = pane.is_zoomed();
|
||||
pane.set_zoomed(!is_zoomed, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}))
|
||||
.when(self.active_session.is_some(), |this| {
|
||||
this.on_mouse_down(
|
||||
MouseButton::Right,
|
||||
@@ -1410,4 +1463,10 @@ impl workspace::DebuggerProvider for DebuggerProvider {
|
||||
fn debug_scenario_scheduled_last(&self, cx: &App) -> bool {
|
||||
self.0.read(cx).debug_scenario_scheduled_last
|
||||
}
|
||||
|
||||
fn active_thread_state(&self, cx: &App) -> Option<ThreadStatus> {
|
||||
let session = self.0.read(cx).active_session()?;
|
||||
let thread = session.read(cx).running_state().read(cx).thread_id()?;
|
||||
session.read(cx).session(cx).read(cx).thread_state(thread)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use debugger_panel::{DebugPanel, ToggleFocus};
|
||||
use editor::Editor;
|
||||
use feature_flags::{DebuggerFeatureFlag, FeatureFlagViewExt};
|
||||
use gpui::{App, EntityInputHandler, actions};
|
||||
use new_session_modal::{NewSessionModal, NewSessionMode};
|
||||
use new_process_modal::{NewProcessModal, NewProcessMode};
|
||||
use project::debugger::{self, breakpoint_store::SourceBreakpoint};
|
||||
use session::DebugSession;
|
||||
use settings::Settings;
|
||||
@@ -15,7 +15,7 @@ use workspace::{ItemHandle, ShutdownDebugAdapters, Workspace};
|
||||
pub mod attach_modal;
|
||||
pub mod debugger_panel;
|
||||
mod dropdown_menus;
|
||||
mod new_session_modal;
|
||||
mod new_process_modal;
|
||||
mod persistence;
|
||||
pub(crate) mod session;
|
||||
mod stack_trace_view;
|
||||
@@ -49,6 +49,7 @@ actions!(
|
||||
ToggleThreadPicker,
|
||||
ToggleSessionPicker,
|
||||
RerunLastSession,
|
||||
ToggleExpandItem,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -210,7 +211,7 @@ pub fn init(cx: &mut App) {
|
||||
},
|
||||
)
|
||||
.register_action(|workspace: &mut Workspace, _: &Start, window, cx| {
|
||||
NewSessionModal::show(workspace, window, NewSessionMode::Launch, None, cx);
|
||||
NewProcessModal::show(workspace, window, NewProcessMode::Debug, None, cx);
|
||||
})
|
||||
.register_action(
|
||||
|workspace: &mut Workspace, _: &RerunLastSession, window, cx| {
|
||||
@@ -352,7 +353,7 @@ fn spawn_task_or_modal(
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
Spawn::ViaModal { reveal_target } => {
|
||||
NewSessionModal::show(workspace, window, NewSessionMode::Task, *reveal_target, cx);
|
||||
NewProcessModal::show(workspace, window, NewProcessMode::Task, *reveal_target, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,8 @@ pub mod variable_list;
|
||||
use std::{any::Any, ops::ControlFlow, path::PathBuf, sync::Arc, time::Duration};
|
||||
|
||||
use crate::{
|
||||
new_session_modal::resolve_path,
|
||||
ToggleExpandItem,
|
||||
new_process_modal::resolve_path,
|
||||
persistence::{self, DebuggerPaneItem, SerializedLayout},
|
||||
};
|
||||
|
||||
@@ -347,6 +348,7 @@ pub(crate) fn new_debugger_pane(
|
||||
false
|
||||
}
|
||||
})));
|
||||
pane.set_can_toggle_zoom(false, cx);
|
||||
pane.display_nav_history_buttons(None);
|
||||
pane.set_custom_drop_handle(cx, custom_drop_handle);
|
||||
pane.set_should_display_tab_bar(|_, _| true);
|
||||
@@ -472,17 +474,19 @@ pub(crate) fn new_debugger_pane(
|
||||
},
|
||||
)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.on_click(cx.listener(move |pane, _, window, cx| {
|
||||
pane.toggle_zoom(&workspace::ToggleZoom, window, cx);
|
||||
.on_click(cx.listener(move |pane, _, _, cx| {
|
||||
let is_zoomed = pane.is_zoomed();
|
||||
pane.set_zoomed(!is_zoomed, cx);
|
||||
cx.notify();
|
||||
}))
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
let zoomed_text =
|
||||
if zoomed { "Zoom Out" } else { "Zoom In" };
|
||||
if zoomed { "Minimize" } else { "Expand" };
|
||||
Tooltip::for_action_in(
|
||||
zoomed_text,
|
||||
&workspace::ToggleZoom,
|
||||
&ToggleExpandItem,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
@@ -566,7 +570,7 @@ impl RunningState {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn relativlize_paths(
|
||||
pub(crate) fn relativize_paths(
|
||||
key: Option<&str>,
|
||||
config: &mut serde_json::Value,
|
||||
context: &TaskContext,
|
||||
@@ -574,12 +578,12 @@ impl RunningState {
|
||||
match config {
|
||||
serde_json::Value::Object(obj) => {
|
||||
obj.iter_mut()
|
||||
.for_each(|(key, value)| Self::relativlize_paths(Some(key), value, context));
|
||||
.for_each(|(key, value)| Self::relativize_paths(Some(key), value, context));
|
||||
}
|
||||
serde_json::Value::Array(array) => {
|
||||
array
|
||||
.iter_mut()
|
||||
.for_each(|value| Self::relativlize_paths(None, value, context));
|
||||
.for_each(|value| Self::relativize_paths(None, value, context));
|
||||
}
|
||||
serde_json::Value::String(s) if key == Some("program") || key == Some("cwd") => {
|
||||
// Some built-in zed tasks wrap their arguments in quotes as they might contain spaces.
|
||||
@@ -806,7 +810,7 @@ impl RunningState {
|
||||
mut config,
|
||||
tcp_connection,
|
||||
} = scenario;
|
||||
Self::relativlize_paths(None, &mut config, &task_context);
|
||||
Self::relativize_paths(None, &mut config, &task_context);
|
||||
Self::substitute_variables_in_config(&mut config, &task_context);
|
||||
|
||||
let request_type = dap_registry
|
||||
@@ -1260,18 +1264,6 @@ impl RunningState {
|
||||
Event::Focus => {
|
||||
this.active_pane = source_pane.clone();
|
||||
}
|
||||
Event::ZoomIn => {
|
||||
source_pane.update(cx, |pane, cx| {
|
||||
pane.set_zoomed(true, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
Event::ZoomOut => {
|
||||
source_pane.update(cx, |pane, cx| {
|
||||
pane.set_zoomed(false, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ use gpui::{
|
||||
use language::{Buffer, CodeLabel, ToOffset};
|
||||
use menu::Confirm;
|
||||
use project::{
|
||||
Completion,
|
||||
Completion, CompletionResponse,
|
||||
debugger::session::{CompletionsQuery, OutputToken, Session, SessionEvent},
|
||||
};
|
||||
use settings::Settings;
|
||||
@@ -262,9 +262,9 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
|
||||
_trigger: editor::CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let Some(console) = self.0.upgrade() else {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let support_completions = console
|
||||
@@ -322,7 +322,7 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
buffer: &Entity<Buffer>,
|
||||
buffer_position: language::Anchor,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let (variables, string_matches) = console.update(cx, |console, cx| {
|
||||
let mut variables = HashMap::default();
|
||||
let mut string_matches = Vec::default();
|
||||
@@ -354,39 +354,43 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
let query = buffer.read(cx).text();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
const LIMIT: usize = 10;
|
||||
let matches = fuzzy::match_strings(
|
||||
&string_matches,
|
||||
&query,
|
||||
true,
|
||||
10,
|
||||
LIMIT,
|
||||
&Default::default(),
|
||||
cx.background_executor().clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
Ok(Some(
|
||||
matches
|
||||
.iter()
|
||||
.filter_map(|string_match| {
|
||||
let variable_value = variables.get(&string_match.string)?;
|
||||
let completions = matches
|
||||
.iter()
|
||||
.filter_map(|string_match| {
|
||||
let variable_value = variables.get(&string_match.string)?;
|
||||
|
||||
Some(project::Completion {
|
||||
replace_range: buffer_position..buffer_position,
|
||||
new_text: string_match.string.clone(),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..string_match.string.len(),
|
||||
text: format!("{} {}", string_match.string, variable_value),
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
insert_text_mode: None,
|
||||
})
|
||||
Some(project::Completion {
|
||||
replace_range: buffer_position..buffer_position,
|
||||
new_text: string_match.string.clone(),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..string_match.string.len(),
|
||||
text: format!("{} {}", string_match.string, variable_value),
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
insert_text_mode: None,
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(vec![project::CompletionResponse {
|
||||
is_incomplete: completions.len() >= LIMIT,
|
||||
completions,
|
||||
}])
|
||||
})
|
||||
}
|
||||
|
||||
@@ -396,7 +400,7 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
buffer: &Entity<Buffer>,
|
||||
buffer_position: language::Anchor,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let completion_task = console.update(cx, |console, cx| {
|
||||
console.session.update(cx, |state, cx| {
|
||||
let frame_id = console.stack_frame_list.read(cx).opened_stack_frame_id();
|
||||
@@ -411,53 +415,56 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
cx.background_executor().spawn(async move {
|
||||
let completions = completion_task.await?;
|
||||
|
||||
Ok(Some(
|
||||
completions
|
||||
.into_iter()
|
||||
.map(|completion| {
|
||||
let new_text = completion
|
||||
.text
|
||||
.as_ref()
|
||||
.unwrap_or(&completion.label)
|
||||
.to_owned();
|
||||
let buffer_text = snapshot.text();
|
||||
let buffer_bytes = buffer_text.as_bytes();
|
||||
let new_bytes = new_text.as_bytes();
|
||||
let completions = completions
|
||||
.into_iter()
|
||||
.map(|completion| {
|
||||
let new_text = completion
|
||||
.text
|
||||
.as_ref()
|
||||
.unwrap_or(&completion.label)
|
||||
.to_owned();
|
||||
let buffer_text = snapshot.text();
|
||||
let buffer_bytes = buffer_text.as_bytes();
|
||||
let new_bytes = new_text.as_bytes();
|
||||
|
||||
let mut prefix_len = 0;
|
||||
for i in (0..new_bytes.len()).rev() {
|
||||
if buffer_bytes.ends_with(&new_bytes[0..i]) {
|
||||
prefix_len = i;
|
||||
break;
|
||||
}
|
||||
let mut prefix_len = 0;
|
||||
for i in (0..new_bytes.len()).rev() {
|
||||
if buffer_bytes.ends_with(&new_bytes[0..i]) {
|
||||
prefix_len = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let buffer_offset = buffer_position.to_offset(&snapshot);
|
||||
let start = buffer_offset - prefix_len;
|
||||
let start = snapshot.clip_offset(start, Bias::Left);
|
||||
let start = snapshot.anchor_before(start);
|
||||
let replace_range = start..buffer_position;
|
||||
let buffer_offset = buffer_position.to_offset(&snapshot);
|
||||
let start = buffer_offset - prefix_len;
|
||||
let start = snapshot.clip_offset(start, Bias::Left);
|
||||
let start = snapshot.anchor_before(start);
|
||||
let replace_range = start..buffer_position;
|
||||
|
||||
project::Completion {
|
||||
replace_range,
|
||||
new_text,
|
||||
label: CodeLabel {
|
||||
filter_range: 0..completion.label.len(),
|
||||
text: completion.label,
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
source: project::CompletionSource::BufferWord {
|
||||
word_range: buffer_position..language::Anchor::MAX,
|
||||
resolved: false,
|
||||
},
|
||||
insert_text_mode: None,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
project::Completion {
|
||||
replace_range,
|
||||
new_text,
|
||||
label: CodeLabel {
|
||||
filter_range: 0..completion.label.len(),
|
||||
text: completion.label,
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
source: project::CompletionSource::BufferWord {
|
||||
word_range: buffer_position..language::Anchor::MAX,
|
||||
resolved: false,
|
||||
},
|
||||
insert_text_mode: None,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(vec![project::CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: false,
|
||||
}])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ mod inline_values;
|
||||
#[cfg(test)]
|
||||
mod module_list;
|
||||
#[cfg(test)]
|
||||
mod new_session_modal;
|
||||
mod new_process_modal;
|
||||
#[cfg(test)]
|
||||
mod persistence;
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use dap::DapRegistry;
|
||||
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
|
||||
use project::{FakeFs, Fs, Project};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use task::{DebugRequest, DebugScenario, LaunchRequest, TaskContext, VariableName, ZedDebugConfig};
|
||||
use util::path;
|
||||
|
||||
use crate::new_session_modal::NewSessionMode;
|
||||
// use crate::new_process_modal::NewProcessMode;
|
||||
use crate::tests::{init_test, init_test_workspace};
|
||||
|
||||
#[gpui::test]
|
||||
@@ -152,111 +152,111 @@ async fn test_debug_session_substitutes_variables_and_relativizes_paths(
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_save_debug_scenario_to_file(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
// #[gpui::test]
|
||||
// async fn test_save_debug_scenario_to_file(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
// init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(executor.clone());
|
||||
fs.insert_tree(
|
||||
path!("/project"),
|
||||
json!({
|
||||
"main.rs": "fn main() {}"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
// let fs = FakeFs::new(executor.clone());
|
||||
// fs.insert_tree(
|
||||
// path!("/project"),
|
||||
// json!({
|
||||
// "main.rs": "fn main() {}"
|
||||
// }),
|
||||
// )
|
||||
// .await;
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
|
||||
let workspace = init_test_workspace(&project, cx).await;
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
// let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
|
||||
// let workspace = init_test_workspace(&project, cx).await;
|
||||
// let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
workspace
|
||||
.update(cx, |workspace, window, cx| {
|
||||
crate::new_session_modal::NewSessionModal::show(
|
||||
workspace,
|
||||
window,
|
||||
NewSessionMode::Launch,
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.unwrap();
|
||||
// workspace
|
||||
// .update(cx, |workspace, window, cx| {
|
||||
// crate::new_process_modal::NewProcessModal::show(
|
||||
// workspace,
|
||||
// window,
|
||||
// NewProcessMode::Debug,
|
||||
// None,
|
||||
// cx,
|
||||
// );
|
||||
// })
|
||||
// .unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
// cx.run_until_parked();
|
||||
|
||||
let modal = workspace
|
||||
.update(cx, |workspace, _, cx| {
|
||||
workspace.active_modal::<crate::new_session_modal::NewSessionModal>(cx)
|
||||
})
|
||||
.unwrap()
|
||||
.expect("Modal should be active");
|
||||
// let modal = workspace
|
||||
// .update(cx, |workspace, _, cx| {
|
||||
// workspace.active_modal::<crate::new_process_modal::NewProcessModal>(cx)
|
||||
// })
|
||||
// .unwrap()
|
||||
// .expect("Modal should be active");
|
||||
|
||||
modal.update_in(cx, |modal, window, cx| {
|
||||
modal.set_configure("/project/main", "/project", false, window, cx);
|
||||
modal.save_scenario(window, cx);
|
||||
});
|
||||
// modal.update_in(cx, |modal, window, cx| {
|
||||
// modal.set_configure("/project/main", "/project", false, window, cx);
|
||||
// modal.save_scenario(window, cx);
|
||||
// });
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
// cx.executor().run_until_parked();
|
||||
|
||||
let debug_json_content = fs
|
||||
.load(path!("/project/.zed/debug.json").as_ref())
|
||||
.await
|
||||
.expect("debug.json should exist");
|
||||
// let debug_json_content = fs
|
||||
// .load(path!("/project/.zed/debug.json").as_ref())
|
||||
// .await
|
||||
// .expect("debug.json should exist");
|
||||
|
||||
let expected_content = vec![
|
||||
"[",
|
||||
" {",
|
||||
r#" "adapter": "fake-adapter","#,
|
||||
r#" "label": "main (fake-adapter)","#,
|
||||
r#" "request": "launch","#,
|
||||
r#" "program": "/project/main","#,
|
||||
r#" "cwd": "/project","#,
|
||||
r#" "args": [],"#,
|
||||
r#" "env": {}"#,
|
||||
" }",
|
||||
"]",
|
||||
];
|
||||
// let expected_content = vec![
|
||||
// "[",
|
||||
// " {",
|
||||
// r#" "adapter": "fake-adapter","#,
|
||||
// r#" "label": "main (fake-adapter)","#,
|
||||
// r#" "request": "launch","#,
|
||||
// r#" "program": "/project/main","#,
|
||||
// r#" "cwd": "/project","#,
|
||||
// r#" "args": [],"#,
|
||||
// r#" "env": {}"#,
|
||||
// " }",
|
||||
// "]",
|
||||
// ];
|
||||
|
||||
let actual_lines: Vec<&str> = debug_json_content.lines().collect();
|
||||
pretty_assertions::assert_eq!(expected_content, actual_lines);
|
||||
// let actual_lines: Vec<&str> = debug_json_content.lines().collect();
|
||||
// pretty_assertions::assert_eq!(expected_content, actual_lines);
|
||||
|
||||
modal.update_in(cx, |modal, window, cx| {
|
||||
modal.set_configure("/project/other", "/project", true, window, cx);
|
||||
modal.save_scenario(window, cx);
|
||||
});
|
||||
// modal.update_in(cx, |modal, window, cx| {
|
||||
// modal.set_configure("/project/other", "/project", true, window, cx);
|
||||
// modal.save_scenario(window, cx);
|
||||
// });
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
// cx.executor().run_until_parked();
|
||||
|
||||
let debug_json_content = fs
|
||||
.load(path!("/project/.zed/debug.json").as_ref())
|
||||
.await
|
||||
.expect("debug.json should exist after second save");
|
||||
// let debug_json_content = fs
|
||||
// .load(path!("/project/.zed/debug.json").as_ref())
|
||||
// .await
|
||||
// .expect("debug.json should exist after second save");
|
||||
|
||||
let expected_content = vec![
|
||||
"[",
|
||||
" {",
|
||||
r#" "adapter": "fake-adapter","#,
|
||||
r#" "label": "main (fake-adapter)","#,
|
||||
r#" "request": "launch","#,
|
||||
r#" "program": "/project/main","#,
|
||||
r#" "cwd": "/project","#,
|
||||
r#" "args": [],"#,
|
||||
r#" "env": {}"#,
|
||||
" },",
|
||||
" {",
|
||||
r#" "adapter": "fake-adapter","#,
|
||||
r#" "label": "other (fake-adapter)","#,
|
||||
r#" "request": "launch","#,
|
||||
r#" "program": "/project/other","#,
|
||||
r#" "cwd": "/project","#,
|
||||
r#" "args": [],"#,
|
||||
r#" "env": {}"#,
|
||||
" }",
|
||||
"]",
|
||||
];
|
||||
// let expected_content = vec![
|
||||
// "[",
|
||||
// " {",
|
||||
// r#" "adapter": "fake-adapter","#,
|
||||
// r#" "label": "main (fake-adapter)","#,
|
||||
// r#" "request": "launch","#,
|
||||
// r#" "program": "/project/main","#,
|
||||
// r#" "cwd": "/project","#,
|
||||
// r#" "args": [],"#,
|
||||
// r#" "env": {}"#,
|
||||
// " },",
|
||||
// " {",
|
||||
// r#" "adapter": "fake-adapter","#,
|
||||
// r#" "label": "other (fake-adapter)","#,
|
||||
// r#" "request": "launch","#,
|
||||
// r#" "program": "/project/other","#,
|
||||
// r#" "cwd": "/project","#,
|
||||
// r#" "args": [],"#,
|
||||
// r#" "env": {}"#,
|
||||
// " }",
|
||||
// "]",
|
||||
// ];
|
||||
|
||||
let actual_lines: Vec<&str> = debug_json_content.lines().collect();
|
||||
pretty_assertions::assert_eq!(expected_content, actual_lines);
|
||||
}
|
||||
// let actual_lines: Vec<&str> = debug_json_content.lines().collect();
|
||||
// pretty_assertions::assert_eq!(expected_content, actual_lines);
|
||||
// }
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dap_adapter_config_conversion_and_validation(cx: &mut TestAppContext) {
|
||||
@@ -1,9 +1,8 @@
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollStrategy, SharedString,
|
||||
Size, StrikethroughStyle, StyledText, UniformListScrollHandle, div, px, uniform_list,
|
||||
Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px, uniform_list,
|
||||
};
|
||||
use gpui::{AsyncWindowContext, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
use language::CodeLabel;
|
||||
use language::{Buffer, LanguageName, LanguageRegistry};
|
||||
@@ -18,6 +17,7 @@ use task::TaskContext;
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
cmp::{Reverse, min},
|
||||
@@ -47,15 +47,10 @@ pub const MENU_ASIDE_MAX_WIDTH: Pixels = px(500.);
|
||||
// Constants for the markdown cache. The purpose of this cache is to reduce flickering due to
|
||||
// documentation not yet being parsed.
|
||||
//
|
||||
// The size of the cache is set to the number of items fetched around the current selection plus one
|
||||
// for the current selection and another to avoid cases where and adjacent selection exits the
|
||||
// cache. The only current benefit of a larger cache would be doing less markdown parsing when the
|
||||
// selection revisits items.
|
||||
//
|
||||
// One future benefit of a larger cache would be reducing flicker on backspace. This would require
|
||||
// not recreating the menu on every change, by not re-querying the language server when
|
||||
// `is_incomplete = false`.
|
||||
const MARKDOWN_CACHE_MAX_SIZE: usize = MARKDOWN_CACHE_BEFORE_ITEMS + MARKDOWN_CACHE_AFTER_ITEMS + 2;
|
||||
// The size of the cache is set to 16, which is roughly 3 times more than the number of items
|
||||
// fetched around the current selection. This way documentation is more often ready for render when
|
||||
// revisiting previous entries, such as when pressing backspace.
|
||||
const MARKDOWN_CACHE_MAX_SIZE: usize = 16;
|
||||
const MARKDOWN_CACHE_BEFORE_ITEMS: usize = 2;
|
||||
const MARKDOWN_CACHE_AFTER_ITEMS: usize = 2;
|
||||
|
||||
@@ -197,27 +192,48 @@ pub enum ContextMenuOrigin {
|
||||
QuickActionBar,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CompletionsMenu {
|
||||
pub id: CompletionId,
|
||||
sort_completions: bool,
|
||||
pub initial_position: Anchor,
|
||||
pub initial_query: Option<Arc<String>>,
|
||||
pub is_incomplete: bool,
|
||||
pub buffer: Entity<Buffer>,
|
||||
pub completions: Rc<RefCell<Box<[Completion]>>>,
|
||||
match_candidates: Rc<[StringMatchCandidate]>,
|
||||
pub entries: Rc<RefCell<Vec<StringMatch>>>,
|
||||
match_candidates: Arc<[StringMatchCandidate]>,
|
||||
pub entries: Rc<RefCell<Box<[StringMatch]>>>,
|
||||
pub selected_item: usize,
|
||||
filter_task: Task<()>,
|
||||
cancel_filter: Arc<AtomicBool>,
|
||||
scroll_handle: UniformListScrollHandle,
|
||||
resolve_completions: bool,
|
||||
show_completion_documentation: bool,
|
||||
pub(super) ignore_completion_provider: bool,
|
||||
last_rendered_range: Rc<RefCell<Option<Range<usize>>>>,
|
||||
markdown_cache: Rc<RefCell<VecDeque<(usize, Entity<Markdown>)>>>,
|
||||
markdown_cache: Rc<RefCell<VecDeque<(MarkdownCacheKey, Entity<Markdown>)>>>,
|
||||
language_registry: Option<Arc<LanguageRegistry>>,
|
||||
language: Option<LanguageName>,
|
||||
snippet_sort_order: SnippetSortOrder,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum MarkdownCacheKey {
|
||||
ForCandidate {
|
||||
candidate_id: usize,
|
||||
},
|
||||
ForCompletionMatch {
|
||||
new_text: String,
|
||||
markdown_source: SharedString,
|
||||
},
|
||||
}
|
||||
|
||||
// TODO: There should really be a wrapper around fuzzy match tasks that does this.
|
||||
impl Drop for CompletionsMenu {
|
||||
fn drop(&mut self) {
|
||||
self.cancel_filter.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
impl CompletionsMenu {
|
||||
pub fn new(
|
||||
id: CompletionId,
|
||||
@@ -225,6 +241,8 @@ impl CompletionsMenu {
|
||||
show_completion_documentation: bool,
|
||||
ignore_completion_provider: bool,
|
||||
initial_position: Anchor,
|
||||
initial_query: Option<Arc<String>>,
|
||||
is_incomplete: bool,
|
||||
buffer: Entity<Buffer>,
|
||||
completions: Box<[Completion]>,
|
||||
snippet_sort_order: SnippetSortOrder,
|
||||
@@ -242,17 +260,21 @@ impl CompletionsMenu {
|
||||
id,
|
||||
sort_completions,
|
||||
initial_position,
|
||||
initial_query,
|
||||
is_incomplete,
|
||||
buffer,
|
||||
show_completion_documentation,
|
||||
ignore_completion_provider,
|
||||
completions: RefCell::new(completions).into(),
|
||||
match_candidates,
|
||||
entries: RefCell::new(Vec::new()).into(),
|
||||
entries: Rc::new(RefCell::new(Box::new([]))),
|
||||
selected_item: 0,
|
||||
filter_task: Task::ready(()),
|
||||
cancel_filter: Arc::new(AtomicBool::new(false)),
|
||||
scroll_handle: UniformListScrollHandle::new(),
|
||||
resolve_completions: true,
|
||||
last_rendered_range: RefCell::new(None).into(),
|
||||
markdown_cache: RefCell::new(VecDeque::with_capacity(MARKDOWN_CACHE_MAX_SIZE)).into(),
|
||||
markdown_cache: RefCell::new(VecDeque::new()).into(),
|
||||
language_registry,
|
||||
language,
|
||||
snippet_sort_order,
|
||||
@@ -303,16 +325,20 @@ impl CompletionsMenu {
|
||||
positions: vec![],
|
||||
string: completion.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
.collect();
|
||||
Self {
|
||||
id,
|
||||
sort_completions,
|
||||
initial_position: selection.start,
|
||||
initial_query: None,
|
||||
is_incomplete: false,
|
||||
buffer,
|
||||
completions: RefCell::new(completions).into(),
|
||||
match_candidates,
|
||||
entries: RefCell::new(entries).into(),
|
||||
selected_item: 0,
|
||||
filter_task: Task::ready(()),
|
||||
cancel_filter: Arc::new(AtomicBool::new(false)),
|
||||
scroll_handle: UniformListScrollHandle::new(),
|
||||
resolve_completions: false,
|
||||
show_completion_documentation: false,
|
||||
@@ -390,14 +416,7 @@ impl CompletionsMenu {
|
||||
) {
|
||||
if self.selected_item != match_index {
|
||||
self.selected_item = match_index;
|
||||
self.scroll_handle
|
||||
.scroll_to_item(self.selected_item, ScrollStrategy::Top);
|
||||
self.resolve_visible_completions(provider, cx);
|
||||
self.start_markdown_parse_for_nearby_entries(cx);
|
||||
if let Some(provider) = provider {
|
||||
self.handle_selection_changed(provider, window, cx);
|
||||
}
|
||||
cx.notify();
|
||||
self.handle_selection_changed(provider, window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -418,18 +437,25 @@ impl CompletionsMenu {
|
||||
}
|
||||
|
||||
fn handle_selection_changed(
|
||||
&self,
|
||||
provider: &dyn CompletionProvider,
|
||||
&mut self,
|
||||
provider: Option<&dyn CompletionProvider>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
let entries = self.entries.borrow();
|
||||
let entry = if self.selected_item < entries.len() {
|
||||
Some(&entries[self.selected_item])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
provider.selection_changed(entry, window, cx);
|
||||
self.scroll_handle
|
||||
.scroll_to_item(self.selected_item, ScrollStrategy::Top);
|
||||
if let Some(provider) = provider {
|
||||
let entries = self.entries.borrow();
|
||||
let entry = if self.selected_item < entries.len() {
|
||||
Some(&entries[self.selected_item])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
provider.selection_changed(entry, window, cx);
|
||||
}
|
||||
self.resolve_visible_completions(provider, cx);
|
||||
self.start_markdown_parse_for_nearby_entries(cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn resolve_visible_completions(
|
||||
@@ -444,6 +470,19 @@ impl CompletionsMenu {
|
||||
return;
|
||||
};
|
||||
|
||||
let entries = self.entries.borrow();
|
||||
if entries.is_empty() {
|
||||
return;
|
||||
}
|
||||
if self.selected_item >= entries.len() {
|
||||
log::error!(
|
||||
"bug: completion selected_item >= entries.len(): {} >= {}",
|
||||
self.selected_item,
|
||||
entries.len()
|
||||
);
|
||||
self.selected_item = entries.len() - 1;
|
||||
}
|
||||
|
||||
// Attempt to resolve completions for every item that will be displayed. This matters
|
||||
// because single line documentation may be displayed inline with the completion.
|
||||
//
|
||||
@@ -455,7 +494,6 @@ impl CompletionsMenu {
|
||||
let visible_count = last_rendered_range
|
||||
.clone()
|
||||
.map_or(APPROXIMATE_VISIBLE_COUNT, |range| range.count());
|
||||
let entries = self.entries.borrow();
|
||||
let entry_range = if self.selected_item == 0 {
|
||||
0..min(visible_count, entries.len())
|
||||
} else if self.selected_item == entries.len() - 1 {
|
||||
@@ -508,11 +546,11 @@ impl CompletionsMenu {
|
||||
.update(cx, |editor, cx| {
|
||||
// `resolve_completions` modified state affecting display.
|
||||
cx.notify();
|
||||
editor.with_completions_menu_matching_id(
|
||||
completion_id,
|
||||
|| (),
|
||||
|this| this.start_markdown_parse_for_nearby_entries(cx),
|
||||
);
|
||||
editor.with_completions_menu_matching_id(completion_id, |menu| {
|
||||
if let Some(menu) = menu {
|
||||
menu.start_markdown_parse_for_nearby_entries(cx)
|
||||
}
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
@@ -548,11 +586,11 @@ impl CompletionsMenu {
|
||||
return None;
|
||||
}
|
||||
let candidate_id = entries[index].candidate_id;
|
||||
match &self.completions.borrow()[candidate_id].documentation {
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(source)) if !source.is_empty() => Some(
|
||||
self.get_or_create_markdown(candidate_id, source.clone(), false, cx)
|
||||
.1,
|
||||
),
|
||||
let completions = self.completions.borrow();
|
||||
match &completions[candidate_id].documentation {
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(source)) if !source.is_empty() => self
|
||||
.get_or_create_markdown(candidate_id, Some(source), false, &completions, cx)
|
||||
.map(|(_, markdown)| markdown),
|
||||
Some(_) => None,
|
||||
_ => None,
|
||||
}
|
||||
@@ -561,38 +599,75 @@ impl CompletionsMenu {
|
||||
fn get_or_create_markdown(
|
||||
&self,
|
||||
candidate_id: usize,
|
||||
source: SharedString,
|
||||
source: Option<&SharedString>,
|
||||
is_render: bool,
|
||||
completions: &[Completion],
|
||||
cx: &mut Context<Editor>,
|
||||
) -> (bool, Entity<Markdown>) {
|
||||
) -> Option<(bool, Entity<Markdown>)> {
|
||||
let mut markdown_cache = self.markdown_cache.borrow_mut();
|
||||
if let Some((cache_index, (_, markdown))) = markdown_cache
|
||||
.iter()
|
||||
.find_position(|(id, _)| *id == candidate_id)
|
||||
{
|
||||
let markdown = if is_render && cache_index != 0 {
|
||||
|
||||
let mut has_completion_match_cache_entry = false;
|
||||
let mut matching_entry = markdown_cache.iter().find_position(|(key, _)| match key {
|
||||
MarkdownCacheKey::ForCandidate { candidate_id: id } => *id == candidate_id,
|
||||
MarkdownCacheKey::ForCompletionMatch { .. } => {
|
||||
has_completion_match_cache_entry = true;
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if has_completion_match_cache_entry && matching_entry.is_none() {
|
||||
if let Some(source) = source {
|
||||
matching_entry = markdown_cache.iter().find_position(|(key, _)| {
|
||||
matches!(key, MarkdownCacheKey::ForCompletionMatch { markdown_source, .. }
|
||||
if markdown_source == source)
|
||||
});
|
||||
} else {
|
||||
// Heuristic guess that documentation can be reused when new_text matches. This is
|
||||
// to mitigate documentation flicker while typing. If this is wrong, then resolution
|
||||
// should cause the correct documentation to be displayed soon.
|
||||
let completion = &completions[candidate_id];
|
||||
matching_entry = markdown_cache.iter().find_position(|(key, _)| {
|
||||
matches!(key, MarkdownCacheKey::ForCompletionMatch { new_text, .. }
|
||||
if new_text == &completion.new_text)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((cache_index, (key, markdown))) = matching_entry {
|
||||
let markdown = markdown.clone();
|
||||
|
||||
// Since the markdown source matches, the key can now be ForCandidate.
|
||||
if source.is_some() && matches!(key, MarkdownCacheKey::ForCompletionMatch { .. }) {
|
||||
markdown_cache[cache_index].0 = MarkdownCacheKey::ForCandidate { candidate_id };
|
||||
}
|
||||
|
||||
if is_render && cache_index != 0 {
|
||||
// Move the current selection's cache entry to the front.
|
||||
markdown_cache.rotate_right(1);
|
||||
let cache_len = markdown_cache.len();
|
||||
markdown_cache.swap(0, (cache_index + 1) % cache_len);
|
||||
&markdown_cache[0].1
|
||||
} else {
|
||||
markdown
|
||||
};
|
||||
}
|
||||
|
||||
let is_parsing = markdown.update(cx, |markdown, cx| {
|
||||
// `reset` is called as it's possible for documentation to change due to resolve
|
||||
// requests. It does nothing if `source` is unchanged.
|
||||
markdown.reset(source, cx);
|
||||
if let Some(source) = source {
|
||||
// `reset` is called as it's possible for documentation to change due to resolve
|
||||
// requests. It does nothing if `source` is unchanged.
|
||||
markdown.reset(source.clone(), cx);
|
||||
}
|
||||
markdown.is_parsing()
|
||||
});
|
||||
return (is_parsing, markdown.clone());
|
||||
return Some((is_parsing, markdown));
|
||||
}
|
||||
|
||||
let Some(source) = source else {
|
||||
// Can't create markdown as there is no source.
|
||||
return None;
|
||||
};
|
||||
|
||||
if markdown_cache.len() < MARKDOWN_CACHE_MAX_SIZE {
|
||||
let markdown = cx.new(|cx| {
|
||||
Markdown::new(
|
||||
source,
|
||||
source.clone(),
|
||||
self.language_registry.clone(),
|
||||
self.language.clone(),
|
||||
cx,
|
||||
@@ -601,17 +676,20 @@ impl CompletionsMenu {
|
||||
// Handles redraw when the markdown is done parsing. The current render is for a
|
||||
// deferred draw, and so without this did not redraw when `markdown` notified.
|
||||
cx.observe(&markdown, |_, _, cx| cx.notify()).detach();
|
||||
markdown_cache.push_front((candidate_id, markdown.clone()));
|
||||
(true, markdown)
|
||||
markdown_cache.push_front((
|
||||
MarkdownCacheKey::ForCandidate { candidate_id },
|
||||
markdown.clone(),
|
||||
));
|
||||
Some((true, markdown))
|
||||
} else {
|
||||
debug_assert_eq!(markdown_cache.capacity(), MARKDOWN_CACHE_MAX_SIZE);
|
||||
// Moves the last cache entry to the start. The ring buffer is full, so this does no
|
||||
// copying and just shifts indexes.
|
||||
markdown_cache.rotate_right(1);
|
||||
markdown_cache[0].0 = candidate_id;
|
||||
markdown_cache[0].0 = MarkdownCacheKey::ForCandidate { candidate_id };
|
||||
let markdown = &markdown_cache[0].1;
|
||||
markdown.update(cx, |markdown, cx| markdown.reset(source, cx));
|
||||
(true, markdown.clone())
|
||||
markdown.update(cx, |markdown, cx| markdown.reset(source.clone(), cx));
|
||||
Some((true, markdown.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -774,37 +852,46 @@ impl CompletionsMenu {
|
||||
}
|
||||
|
||||
let mat = &self.entries.borrow()[self.selected_item];
|
||||
let multiline_docs = match self.completions.borrow_mut()[mat.candidate_id]
|
||||
.documentation
|
||||
.as_ref()?
|
||||
{
|
||||
CompletionDocumentation::MultiLinePlainText(text) => div().child(text.clone()),
|
||||
CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
let completions = self.completions.borrow_mut();
|
||||
let multiline_docs = match completions[mat.candidate_id].documentation.as_ref() {
|
||||
Some(CompletionDocumentation::MultiLinePlainText(text)) => div().child(text.clone()),
|
||||
Some(CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
plain_text: Some(text),
|
||||
..
|
||||
} => div().child(text.clone()),
|
||||
CompletionDocumentation::MultiLineMarkdown(source) if !source.is_empty() => {
|
||||
let (is_parsing, markdown) =
|
||||
self.get_or_create_markdown(mat.candidate_id, source.clone(), true, cx);
|
||||
if is_parsing {
|
||||
}) => div().child(text.clone()),
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(source)) if !source.is_empty() => {
|
||||
let Some((false, markdown)) = self.get_or_create_markdown(
|
||||
mat.candidate_id,
|
||||
Some(source),
|
||||
true,
|
||||
&completions,
|
||||
cx,
|
||||
) else {
|
||||
return None;
|
||||
}
|
||||
div().child(
|
||||
MarkdownElement::new(markdown, hover_markdown_style(window, cx))
|
||||
.code_block_renderer(markdown::CodeBlockRenderer::Default {
|
||||
copy_button: false,
|
||||
copy_button_on_hover: false,
|
||||
border: false,
|
||||
})
|
||||
.on_url_click(open_markdown_url),
|
||||
)
|
||||
};
|
||||
Self::render_markdown(markdown, window, cx)
|
||||
}
|
||||
CompletionDocumentation::MultiLineMarkdown(_) => return None,
|
||||
CompletionDocumentation::SingleLine(_) => return None,
|
||||
CompletionDocumentation::Undocumented => return None,
|
||||
CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
plain_text: None, ..
|
||||
} => {
|
||||
None => {
|
||||
// Handle the case where documentation hasn't yet been resolved but there's a
|
||||
// `new_text` match in the cache.
|
||||
//
|
||||
// TODO: It's inconsistent that documentation caching based on matching `new_text`
|
||||
// only works for markdown. Consider generally caching the results of resolving
|
||||
// completions.
|
||||
let Some((false, markdown)) =
|
||||
self.get_or_create_markdown(mat.candidate_id, None, true, &completions, cx)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
Self::render_markdown(markdown, window, cx)
|
||||
}
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(_)) => return None,
|
||||
Some(CompletionDocumentation::SingleLine(_)) => return None,
|
||||
Some(CompletionDocumentation::Undocumented) => return None,
|
||||
Some(CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
plain_text: None,
|
||||
..
|
||||
}) => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
@@ -824,6 +911,177 @@ impl CompletionsMenu {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_markdown(
|
||||
markdown: Entity<Markdown>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Div {
|
||||
div().child(
|
||||
MarkdownElement::new(markdown, hover_markdown_style(window, cx))
|
||||
.code_block_renderer(markdown::CodeBlockRenderer::Default {
|
||||
copy_button: false,
|
||||
copy_button_on_hover: false,
|
||||
border: false,
|
||||
})
|
||||
.on_url_click(open_markdown_url),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn filter(
|
||||
&mut self,
|
||||
query: Option<Arc<String>>,
|
||||
provider: Option<Rc<dyn CompletionProvider>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
self.cancel_filter.store(true, Ordering::Relaxed);
|
||||
if let Some(query) = query {
|
||||
self.cancel_filter = Arc::new(AtomicBool::new(false));
|
||||
let matches = self.do_async_filtering(query, cx);
|
||||
let id = self.id;
|
||||
self.filter_task = cx.spawn_in(window, async move |editor, cx| {
|
||||
let matches = matches.await;
|
||||
editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
editor.with_completions_menu_matching_id(id, |this| {
|
||||
if let Some(this) = this {
|
||||
this.set_filter_results(matches, provider, window, cx);
|
||||
}
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
} else {
|
||||
self.filter_task = Task::ready(());
|
||||
let matches = self.unfiltered_matches();
|
||||
self.set_filter_results(matches, provider, window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn do_async_filtering(
|
||||
&self,
|
||||
query: Arc<String>,
|
||||
cx: &Context<Editor>,
|
||||
) -> Task<Vec<StringMatch>> {
|
||||
let matches_task = cx.background_spawn({
|
||||
let query = query.clone();
|
||||
let match_candidates = self.match_candidates.clone();
|
||||
let cancel_filter = self.cancel_filter.clone();
|
||||
let background_executor = cx.background_executor().clone();
|
||||
async move {
|
||||
fuzzy::match_strings(
|
||||
&match_candidates,
|
||||
&query,
|
||||
query.chars().any(|c| c.is_uppercase()),
|
||||
100,
|
||||
&cancel_filter,
|
||||
background_executor,
|
||||
)
|
||||
.await
|
||||
}
|
||||
});
|
||||
|
||||
let completions = self.completions.clone();
|
||||
let sort_completions = self.sort_completions;
|
||||
let snippet_sort_order = self.snippet_sort_order;
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let mut matches = matches_task.await;
|
||||
|
||||
if sort_completions {
|
||||
matches = Self::sort_string_matches(
|
||||
matches,
|
||||
Some(&query),
|
||||
snippet_sort_order,
|
||||
completions.borrow().as_ref(),
|
||||
);
|
||||
}
|
||||
|
||||
matches
|
||||
})
|
||||
}
|
||||
|
||||
/// Like `do_async_filtering` but there is no filter query, so no need to spawn tasks.
|
||||
pub fn unfiltered_matches(&self) -> Vec<StringMatch> {
|
||||
let mut matches = self
|
||||
.match_candidates
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(candidate_id, candidate)| StringMatch {
|
||||
candidate_id,
|
||||
score: Default::default(),
|
||||
positions: Default::default(),
|
||||
string: candidate.string.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
if self.sort_completions {
|
||||
matches = Self::sort_string_matches(
|
||||
matches,
|
||||
None,
|
||||
self.snippet_sort_order,
|
||||
self.completions.borrow().as_ref(),
|
||||
);
|
||||
}
|
||||
|
||||
matches
|
||||
}
|
||||
|
||||
pub fn set_filter_results(
|
||||
&mut self,
|
||||
matches: Vec<StringMatch>,
|
||||
provider: Option<Rc<dyn CompletionProvider>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
*self.entries.borrow_mut() = matches.into_boxed_slice();
|
||||
self.selected_item = 0;
|
||||
self.handle_selection_changed(provider.as_deref(), window, cx);
|
||||
}
|
||||
|
||||
fn sort_string_matches(
|
||||
matches: Vec<StringMatch>,
|
||||
query: Option<&str>,
|
||||
snippet_sort_order: SnippetSortOrder,
|
||||
completions: &[Completion],
|
||||
) -> Vec<StringMatch> {
|
||||
let mut sortable_items: Vec<SortableMatch<'_>> = matches
|
||||
.into_iter()
|
||||
.map(|string_match| {
|
||||
let completion = &completions[string_match.candidate_id];
|
||||
|
||||
let is_snippet = matches!(
|
||||
&completion.source,
|
||||
CompletionSource::Lsp { lsp_completion, .. }
|
||||
if lsp_completion.kind == Some(CompletionItemKind::SNIPPET)
|
||||
);
|
||||
|
||||
let sort_text =
|
||||
if let CompletionSource::Lsp { lsp_completion, .. } = &completion.source {
|
||||
lsp_completion.sort_text.as_deref()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let (sort_kind, sort_label) = completion.sort_key();
|
||||
|
||||
SortableMatch {
|
||||
string_match,
|
||||
is_snippet,
|
||||
sort_text,
|
||||
sort_kind,
|
||||
sort_label,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self::sort_matches(&mut sortable_items, query, snippet_sort_order);
|
||||
|
||||
sortable_items
|
||||
.into_iter()
|
||||
.map(|sortable| sortable.string_match)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn sort_matches(
|
||||
matches: &mut Vec<SortableMatch<'_>>,
|
||||
query: Option<&str>,
|
||||
@@ -857,6 +1115,7 @@ impl CompletionsMenu {
|
||||
let fuzzy_bracket_threshold = max_score * (3.0 / 5.0);
|
||||
|
||||
let query_start_lower = query
|
||||
.as_ref()
|
||||
.and_then(|q| q.chars().next())
|
||||
.and_then(|c| c.to_lowercase().next());
|
||||
|
||||
@@ -890,6 +1149,7 @@ impl CompletionsMenu {
|
||||
};
|
||||
let sort_mixed_case_prefix_length = Reverse(
|
||||
query
|
||||
.as_ref()
|
||||
.map(|q| {
|
||||
q.chars()
|
||||
.zip(mat.string_match.string.chars())
|
||||
@@ -920,97 +1180,32 @@ impl CompletionsMenu {
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn filter(
|
||||
&mut self,
|
||||
query: Option<&str>,
|
||||
provider: Option<Rc<dyn CompletionProvider>>,
|
||||
editor: WeakEntity<Editor>,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) {
|
||||
let mut matches = if let Some(query) = query {
|
||||
fuzzy::match_strings(
|
||||
&self.match_candidates,
|
||||
query,
|
||||
query.chars().any(|c| c.is_uppercase()),
|
||||
100,
|
||||
&Default::default(),
|
||||
cx.background_executor().clone(),
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
self.match_candidates
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(candidate_id, candidate)| StringMatch {
|
||||
candidate_id,
|
||||
score: Default::default(),
|
||||
positions: Default::default(),
|
||||
string: candidate.string.clone(),
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
pub fn preserve_markdown_cache(&mut self, prev_menu: CompletionsMenu) {
|
||||
self.markdown_cache = prev_menu.markdown_cache.clone();
|
||||
|
||||
if self.sort_completions {
|
||||
let completions = self.completions.borrow();
|
||||
|
||||
let mut sortable_items: Vec<SortableMatch<'_>> = matches
|
||||
.into_iter()
|
||||
.map(|string_match| {
|
||||
let completion = &completions[string_match.candidate_id];
|
||||
|
||||
let is_snippet = matches!(
|
||||
&completion.source,
|
||||
CompletionSource::Lsp { lsp_completion, .. }
|
||||
if lsp_completion.kind == Some(CompletionItemKind::SNIPPET)
|
||||
);
|
||||
|
||||
let sort_text =
|
||||
if let CompletionSource::Lsp { lsp_completion, .. } = &completion.source {
|
||||
lsp_completion.sort_text.as_deref()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let (sort_kind, sort_label) = completion.sort_key();
|
||||
|
||||
SortableMatch {
|
||||
string_match,
|
||||
is_snippet,
|
||||
sort_text,
|
||||
sort_kind,
|
||||
sort_label,
|
||||
// Convert ForCandidate cache keys to ForCompletionMatch keys.
|
||||
let prev_completions = prev_menu.completions.borrow();
|
||||
self.markdown_cache
|
||||
.borrow_mut()
|
||||
.retain_mut(|(key, _markdown)| match key {
|
||||
MarkdownCacheKey::ForCompletionMatch { .. } => true,
|
||||
MarkdownCacheKey::ForCandidate { candidate_id } => {
|
||||
if let Some(completion) = prev_completions.get(*candidate_id) {
|
||||
match &completion.documentation {
|
||||
Some(CompletionDocumentation::MultiLineMarkdown(source)) => {
|
||||
*key = MarkdownCacheKey::ForCompletionMatch {
|
||||
new_text: completion.new_text.clone(),
|
||||
markdown_source: source.clone(),
|
||||
};
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self::sort_matches(&mut sortable_items, query, self.snippet_sort_order);
|
||||
|
||||
matches = sortable_items
|
||||
.into_iter()
|
||||
.map(|sortable| sortable.string_match)
|
||||
.collect();
|
||||
}
|
||||
|
||||
*self.entries.borrow_mut() = matches;
|
||||
self.selected_item = 0;
|
||||
// This keeps the display consistent when y_flipped.
|
||||
self.scroll_handle.scroll_to_item(0, ScrollStrategy::Top);
|
||||
|
||||
if let Some(provider) = provider {
|
||||
cx.update(|window, cx| {
|
||||
// Since this is async, it's possible the menu has been closed and possibly even
|
||||
// another opened. `provider.selection_changed` should not be called in this case.
|
||||
let this_menu_still_active = editor
|
||||
.read_with(cx, |editor, _cx| {
|
||||
editor.with_completions_menu_matching_id(self.id, || false, |_| true)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
if this_menu_still_active {
|
||||
self.handle_selection_changed(&*provider, window, cx);
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -123,7 +123,7 @@ use markdown::Markdown;
|
||||
use mouse_context_menu::MouseContextMenu;
|
||||
use persistence::DB;
|
||||
use project::{
|
||||
BreakpointWithPosition, ProjectPath,
|
||||
BreakpointWithPosition, CompletionResponse, ProjectPath,
|
||||
debugger::{
|
||||
breakpoint_store::{
|
||||
BreakpointEditAction, BreakpointSessionState, BreakpointState, BreakpointStore,
|
||||
@@ -932,6 +932,7 @@ pub struct Editor {
|
||||
/// typing enters text into each of them, even the ones that aren't focused.
|
||||
pub(crate) show_cursor_when_unfocused: bool,
|
||||
columnar_selection_tail: Option<Anchor>,
|
||||
columnar_display_point: Option<DisplayPoint>,
|
||||
add_selections_state: Option<AddSelectionsState>,
|
||||
select_next_state: Option<SelectNextState>,
|
||||
select_prev_state: Option<SelectNextState>,
|
||||
@@ -986,7 +987,7 @@ pub struct Editor {
|
||||
context_menu: RefCell<Option<CodeContextMenu>>,
|
||||
context_menu_options: Option<ContextMenuOptions>,
|
||||
mouse_context_menu: Option<MouseContextMenu>,
|
||||
completion_tasks: Vec<(CompletionId, Task<Option<()>>)>,
|
||||
completion_tasks: Vec<(CompletionId, Task<()>)>,
|
||||
inline_blame_popover: Option<InlineBlamePopover>,
|
||||
signature_help_state: SignatureHelpState,
|
||||
auto_signature_help: Option<bool>,
|
||||
@@ -1199,7 +1200,7 @@ impl Default for SelectionHistoryMode {
|
||||
|
||||
struct DeferredSelectionEffectsState {
|
||||
changed: bool,
|
||||
show_completions: bool,
|
||||
should_update_completions: bool,
|
||||
autoscroll: Option<Autoscroll>,
|
||||
old_cursor_position: Anchor,
|
||||
history_entry: SelectionHistoryEntry,
|
||||
@@ -1797,6 +1798,7 @@ impl Editor {
|
||||
selections,
|
||||
scroll_manager: ScrollManager::new(cx),
|
||||
columnar_selection_tail: None,
|
||||
columnar_display_point: None,
|
||||
add_selections_state: None,
|
||||
select_next_state: None,
|
||||
select_prev_state: None,
|
||||
@@ -2655,7 +2657,7 @@ impl Editor {
|
||||
&mut self,
|
||||
local: bool,
|
||||
old_cursor_position: &Anchor,
|
||||
show_completions: bool,
|
||||
should_update_completions: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
@@ -2718,14 +2720,7 @@ impl Editor {
|
||||
|
||||
if local {
|
||||
let new_cursor_position = self.selections.newest_anchor().head();
|
||||
let mut context_menu = self.context_menu.borrow_mut();
|
||||
let completion_menu = match context_menu.as_ref() {
|
||||
Some(CodeContextMenu::Completions(menu)) => Some(menu),
|
||||
_ => {
|
||||
*context_menu = None;
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(buffer_id) = new_cursor_position.buffer_id {
|
||||
if !self.registered_buffers.contains_key(&buffer_id) {
|
||||
if let Some(project) = self.project.as_ref() {
|
||||
@@ -2742,50 +2737,40 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(completion_menu) = completion_menu {
|
||||
let cursor_position = new_cursor_position.to_offset(buffer);
|
||||
let (word_range, kind) =
|
||||
buffer.surrounding_word(completion_menu.initial_position, true);
|
||||
if kind == Some(CharKind::Word)
|
||||
&& word_range.to_inclusive().contains(&cursor_position)
|
||||
{
|
||||
let mut completion_menu = completion_menu.clone();
|
||||
drop(context_menu);
|
||||
|
||||
let query = Self::completion_query(buffer, cursor_position);
|
||||
let completion_provider = self.completion_provider.clone();
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
completion_menu
|
||||
.filter(query.as_deref(), completion_provider, this.clone(), cx)
|
||||
.await;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let mut context_menu = this.context_menu.borrow_mut();
|
||||
let Some(CodeContextMenu::Completions(menu)) = context_menu.as_ref()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
if menu.id > completion_menu.id {
|
||||
return;
|
||||
}
|
||||
|
||||
*context_menu = Some(CodeContextMenu::Completions(completion_menu));
|
||||
drop(context_menu);
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
|
||||
if show_completions {
|
||||
self.show_completions(&ShowCompletions { trigger: None }, window, cx);
|
||||
}
|
||||
} else {
|
||||
drop(context_menu);
|
||||
self.hide_context_menu(window, cx);
|
||||
let mut context_menu = self.context_menu.borrow_mut();
|
||||
let completion_menu = match context_menu.as_ref() {
|
||||
Some(CodeContextMenu::Completions(menu)) => Some(menu),
|
||||
Some(CodeContextMenu::CodeActions(_)) => {
|
||||
*context_menu = None;
|
||||
None
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let completion_position = completion_menu.map(|menu| menu.initial_position);
|
||||
drop(context_menu);
|
||||
|
||||
if should_update_completions {
|
||||
if let Some(completion_position) = completion_position {
|
||||
let new_cursor_offset = new_cursor_position.to_offset(buffer);
|
||||
let position_matches =
|
||||
new_cursor_offset == completion_position.to_offset(buffer);
|
||||
let continue_showing = if position_matches {
|
||||
let (word_range, kind) = buffer.surrounding_word(new_cursor_offset, true);
|
||||
if let Some(CharKind::Word) = kind {
|
||||
word_range.start < new_cursor_offset
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if continue_showing {
|
||||
self.show_completions(&ShowCompletions { trigger: None }, window, cx);
|
||||
} else {
|
||||
self.hide_context_menu(window, cx);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
drop(context_menu);
|
||||
}
|
||||
|
||||
hide_hover(self, cx);
|
||||
@@ -2979,7 +2964,7 @@ impl Editor {
|
||||
self.change_selections_inner(true, autoscroll, window, cx, change)
|
||||
}
|
||||
|
||||
pub(crate) fn change_selections_without_showing_completions<R>(
|
||||
pub(crate) fn change_selections_without_updating_completions<R>(
|
||||
&mut self,
|
||||
autoscroll: Option<Autoscroll>,
|
||||
window: &mut Window,
|
||||
@@ -2991,7 +2976,7 @@ impl Editor {
|
||||
|
||||
fn change_selections_inner<R>(
|
||||
&mut self,
|
||||
show_completions: bool,
|
||||
should_update_completions: bool,
|
||||
autoscroll: Option<Autoscroll>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -2999,14 +2984,14 @@ impl Editor {
|
||||
) -> R {
|
||||
if let Some(state) = &mut self.deferred_selection_effects_state {
|
||||
state.autoscroll = autoscroll.or(state.autoscroll);
|
||||
state.show_completions = show_completions;
|
||||
state.should_update_completions = should_update_completions;
|
||||
let (changed, result) = self.selections.change_with(cx, change);
|
||||
state.changed |= changed;
|
||||
return result;
|
||||
}
|
||||
let mut state = DeferredSelectionEffectsState {
|
||||
changed: false,
|
||||
show_completions,
|
||||
should_update_completions,
|
||||
autoscroll,
|
||||
old_cursor_position: self.selections.newest_anchor().head(),
|
||||
history_entry: SelectionHistoryEntry {
|
||||
@@ -3066,7 +3051,7 @@ impl Editor {
|
||||
self.selections_did_change(
|
||||
true,
|
||||
&old_cursor_position,
|
||||
state.show_completions,
|
||||
state.should_update_completions,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -3319,12 +3304,18 @@ impl Editor {
|
||||
SelectMode::Character,
|
||||
);
|
||||
});
|
||||
if position.column() != goal_column {
|
||||
self.columnar_display_point = Some(DisplayPoint::new(position.row(), goal_column));
|
||||
} else {
|
||||
self.columnar_display_point = None;
|
||||
}
|
||||
}
|
||||
|
||||
let tail = self.selections.newest::<Point>(cx).tail();
|
||||
self.columnar_selection_tail = Some(display_map.buffer_snapshot.anchor_before(tail));
|
||||
|
||||
if !reset {
|
||||
self.columnar_display_point = None;
|
||||
self.select_columns(
|
||||
tail.to_display_point(&display_map),
|
||||
position,
|
||||
@@ -3347,7 +3338,9 @@ impl Editor {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
if let Some(tail) = self.columnar_selection_tail.as_ref() {
|
||||
let tail = tail.to_display_point(&display_map);
|
||||
let tail = self
|
||||
.columnar_display_point
|
||||
.unwrap_or_else(|| tail.to_display_point(&display_map));
|
||||
self.select_columns(tail, position, goal_column, &display_map, window, cx);
|
||||
} else if let Some(mut pending) = self.selections.pending_anchor() {
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
@@ -3463,7 +3456,7 @@ impl Editor {
|
||||
let selection_ranges = (start_row.0..=end_row.0)
|
||||
.map(DisplayRow)
|
||||
.filter_map(|row| {
|
||||
if start_column <= display_map.line_len(row) && !display_map.is_block_line(row) {
|
||||
if !display_map.is_block_line(row) {
|
||||
let start = display_map
|
||||
.clip_point(DisplayPoint::new(row, start_column), Bias::Left)
|
||||
.to_point(display_map);
|
||||
@@ -3481,8 +3474,19 @@ impl Editor {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut non_empty_ranges = selection_ranges
|
||||
.iter()
|
||||
.filter(|selection_range| selection_range.start != selection_range.end)
|
||||
.peekable();
|
||||
|
||||
let ranges = if non_empty_ranges.peek().is_some() {
|
||||
non_empty_ranges.cloned().collect()
|
||||
} else {
|
||||
selection_ranges
|
||||
};
|
||||
|
||||
self.change_selections(None, window, cx, |s| {
|
||||
s.select_ranges(selection_ranges);
|
||||
s.select_ranges(ranges);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
@@ -3958,7 +3962,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
let had_active_inline_completion = this.has_active_inline_completion();
|
||||
this.change_selections_without_showing_completions(
|
||||
this.change_selections_without_updating_completions(
|
||||
Some(Autoscroll::fit()),
|
||||
window,
|
||||
cx,
|
||||
@@ -5004,7 +5008,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.open_completions_menu(true, None, window, cx);
|
||||
self.open_or_update_completions_menu(true, None, window, cx);
|
||||
}
|
||||
|
||||
pub fn show_completions(
|
||||
@@ -5013,10 +5017,10 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.open_completions_menu(false, options.trigger.as_deref(), window, cx);
|
||||
self.open_or_update_completions_menu(false, options.trigger.as_deref(), window, cx);
|
||||
}
|
||||
|
||||
fn open_completions_menu(
|
||||
fn open_or_update_completions_menu(
|
||||
&mut self,
|
||||
ignore_completion_provider: bool,
|
||||
trigger: Option<&str>,
|
||||
@@ -5026,9 +5030,6 @@ impl Editor {
|
||||
if self.pending_rename.is_some() {
|
||||
return;
|
||||
}
|
||||
if !self.snippet_stack.is_empty() && self.context_menu.borrow().as_ref().is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let position = self.selections.newest_anchor().head();
|
||||
if position.diff_base_anchor.is_some() {
|
||||
@@ -5041,11 +5042,52 @@ impl Editor {
|
||||
return;
|
||||
};
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let show_completion_documentation = buffer_snapshot
|
||||
.settings_at(buffer_position, cx)
|
||||
.show_completion_documentation;
|
||||
|
||||
let query = Self::completion_query(&self.buffer.read(cx).read(cx), position);
|
||||
let query: Option<Arc<String>> =
|
||||
Self::completion_query(&self.buffer.read(cx).read(cx), position)
|
||||
.map(|query| query.into());
|
||||
|
||||
let provider = if ignore_completion_provider {
|
||||
None
|
||||
} else {
|
||||
self.completion_provider.clone()
|
||||
};
|
||||
|
||||
let sort_completions = provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.sort_completions());
|
||||
|
||||
let filter_completions = provider
|
||||
.as_ref()
|
||||
.map_or(true, |provider| provider.filter_completions());
|
||||
|
||||
// When `is_incomplete` is false, can filter completions instead of re-querying when the
|
||||
// current query is a suffix of the initial query.
|
||||
if let Some(CodeContextMenu::Completions(menu)) = self.context_menu.borrow_mut().as_mut() {
|
||||
if !menu.is_incomplete && filter_completions {
|
||||
// If the new query is a suffix of the old query (typing more characters) and
|
||||
// the previous result was complete, the existing completions can be filtered.
|
||||
//
|
||||
// Note that this is always true for snippet completions.
|
||||
let query_matches = match (&menu.initial_query, &query) {
|
||||
(Some(initial_query), Some(query)) => query.starts_with(initial_query.as_ref()),
|
||||
(None, _) => true,
|
||||
_ => false,
|
||||
};
|
||||
if query_matches {
|
||||
let position_matches = if menu.initial_position == position {
|
||||
true
|
||||
} else {
|
||||
let snapshot = self.buffer.read(cx).read(cx);
|
||||
menu.initial_position.to_offset(&snapshot) == position.to_offset(&snapshot)
|
||||
};
|
||||
if position_matches {
|
||||
menu.filter(query.clone(), provider.clone(), window, cx);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let trigger_kind = match trigger {
|
||||
Some(trigger) if buffer.read(cx).completion_triggers().contains(trigger) => {
|
||||
@@ -5064,14 +5106,14 @@ impl Editor {
|
||||
trigger_kind,
|
||||
};
|
||||
|
||||
let (old_range, word_kind) = buffer_snapshot.surrounding_word(buffer_position);
|
||||
let (old_range, word_to_exclude) = if word_kind == Some(CharKind::Word) {
|
||||
let (replace_range, word_kind) = buffer_snapshot.surrounding_word(buffer_position);
|
||||
let (replace_range, word_to_exclude) = if word_kind == Some(CharKind::Word) {
|
||||
let word_to_exclude = buffer_snapshot
|
||||
.text_for_range(old_range.clone())
|
||||
.text_for_range(replace_range.clone())
|
||||
.collect::<String>();
|
||||
(
|
||||
buffer_snapshot.anchor_before(old_range.start)
|
||||
..buffer_snapshot.anchor_after(old_range.end),
|
||||
buffer_snapshot.anchor_before(replace_range.start)
|
||||
..buffer_snapshot.anchor_after(replace_range.end),
|
||||
Some(word_to_exclude),
|
||||
)
|
||||
} else {
|
||||
@@ -5085,6 +5127,10 @@ impl Editor {
|
||||
let completion_settings =
|
||||
language_settings(language.clone(), buffer_snapshot.file(), cx).completions;
|
||||
|
||||
let show_completion_documentation = buffer_snapshot
|
||||
.settings_at(buffer_position, cx)
|
||||
.show_completion_documentation;
|
||||
|
||||
// The document can be large, so stay in reasonable bounds when searching for words,
|
||||
// otherwise completion pop-up might be slow to appear.
|
||||
const WORD_LOOKUP_ROWS: u32 = 5_000;
|
||||
@@ -5100,18 +5146,13 @@ impl Editor {
|
||||
let word_search_range = buffer_snapshot.point_to_offset(min_word_search)
|
||||
..buffer_snapshot.point_to_offset(max_word_search);
|
||||
|
||||
let provider = if ignore_completion_provider {
|
||||
None
|
||||
} else {
|
||||
self.completion_provider.clone()
|
||||
};
|
||||
let skip_digits = query
|
||||
.as_ref()
|
||||
.map_or(true, |query| !query.chars().any(|c| c.is_digit(10)));
|
||||
|
||||
let (mut words, provided_completions) = match &provider {
|
||||
let (mut words, provider_responses) = match &provider {
|
||||
Some(provider) => {
|
||||
let completions = provider.completions(
|
||||
let provider_responses = provider.completions(
|
||||
position.excerpt_id,
|
||||
&buffer,
|
||||
buffer_position,
|
||||
@@ -5132,7 +5173,7 @@ impl Editor {
|
||||
}),
|
||||
};
|
||||
|
||||
(words, completions)
|
||||
(words, provider_responses)
|
||||
}
|
||||
None => (
|
||||
cx.background_spawn(async move {
|
||||
@@ -5142,137 +5183,165 @@ impl Editor {
|
||||
skip_digits,
|
||||
})
|
||||
}),
|
||||
Task::ready(Ok(None)),
|
||||
Task::ready(Ok(Vec::new())),
|
||||
),
|
||||
};
|
||||
|
||||
let sort_completions = provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.sort_completions());
|
||||
|
||||
let filter_completions = provider
|
||||
.as_ref()
|
||||
.map_or(true, |provider| provider.filter_completions());
|
||||
|
||||
let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order;
|
||||
|
||||
let id = post_inc(&mut self.next_completion_id);
|
||||
let task = cx.spawn_in(window, async move |editor, cx| {
|
||||
async move {
|
||||
editor.update(cx, |this, _| {
|
||||
this.completion_tasks.retain(|(task_id, _)| *task_id >= id);
|
||||
})?;
|
||||
let Ok(()) = editor.update(cx, |this, _| {
|
||||
this.completion_tasks.retain(|(task_id, _)| *task_id >= id);
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut completions = Vec::new();
|
||||
if let Some(provided_completions) = provided_completions.await.log_err().flatten() {
|
||||
completions.extend(provided_completions);
|
||||
// TODO: Ideally completions from different sources would be selectively re-queried, so
|
||||
// that having one source with `is_incomplete: true` doesn't cause all to be re-queried.
|
||||
let mut completions = Vec::new();
|
||||
let mut is_incomplete = false;
|
||||
if let Some(provider_responses) = provider_responses.await.log_err() {
|
||||
if !provider_responses.is_empty() {
|
||||
for response in provider_responses {
|
||||
completions.extend(response.completions);
|
||||
is_incomplete = is_incomplete || response.is_incomplete;
|
||||
}
|
||||
if completion_settings.words == WordsCompletionMode::Fallback {
|
||||
words = Task::ready(BTreeMap::default());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut words = words.await;
|
||||
if let Some(word_to_exclude) = &word_to_exclude {
|
||||
words.remove(word_to_exclude);
|
||||
}
|
||||
for lsp_completion in &completions {
|
||||
words.remove(&lsp_completion.new_text);
|
||||
}
|
||||
completions.extend(words.into_iter().map(|(word, word_range)| Completion {
|
||||
replace_range: old_range.clone(),
|
||||
new_text: word.clone(),
|
||||
label: CodeLabel::plain(word, None),
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
source: CompletionSource::BufferWord {
|
||||
word_range,
|
||||
resolved: false,
|
||||
},
|
||||
insert_text_mode: Some(InsertTextMode::AS_IS),
|
||||
confirm: None,
|
||||
}));
|
||||
let mut words = words.await;
|
||||
if let Some(word_to_exclude) = &word_to_exclude {
|
||||
words.remove(word_to_exclude);
|
||||
}
|
||||
for lsp_completion in &completions {
|
||||
words.remove(&lsp_completion.new_text);
|
||||
}
|
||||
completions.extend(words.into_iter().map(|(word, word_range)| Completion {
|
||||
replace_range: replace_range.clone(),
|
||||
new_text: word.clone(),
|
||||
label: CodeLabel::plain(word, None),
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
source: CompletionSource::BufferWord {
|
||||
word_range,
|
||||
resolved: false,
|
||||
},
|
||||
insert_text_mode: Some(InsertTextMode::AS_IS),
|
||||
confirm: None,
|
||||
}));
|
||||
|
||||
let menu = if completions.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let mut menu = editor.update(cx, |editor, cx| {
|
||||
let languages = editor
|
||||
.workspace
|
||||
.as_ref()
|
||||
.and_then(|(workspace, _)| workspace.upgrade())
|
||||
.map(|workspace| workspace.read(cx).app_state().languages.clone());
|
||||
CompletionsMenu::new(
|
||||
id,
|
||||
sort_completions,
|
||||
show_completion_documentation,
|
||||
ignore_completion_provider,
|
||||
position,
|
||||
buffer.clone(),
|
||||
completions.into(),
|
||||
snippet_sort_order,
|
||||
languages,
|
||||
language,
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
|
||||
menu.filter(
|
||||
if filter_completions {
|
||||
query.as_deref()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
provider,
|
||||
editor.clone(),
|
||||
let menu = if completions.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let Ok((mut menu, matches_task)) = editor.update(cx, |editor, cx| {
|
||||
let languages = editor
|
||||
.workspace
|
||||
.as_ref()
|
||||
.and_then(|(workspace, _)| workspace.upgrade())
|
||||
.map(|workspace| workspace.read(cx).app_state().languages.clone());
|
||||
let menu = CompletionsMenu::new(
|
||||
id,
|
||||
sort_completions,
|
||||
show_completion_documentation,
|
||||
ignore_completion_provider,
|
||||
position,
|
||||
query.clone(),
|
||||
is_incomplete,
|
||||
buffer.clone(),
|
||||
completions.into(),
|
||||
snippet_sort_order,
|
||||
languages,
|
||||
language,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
);
|
||||
|
||||
menu.visible().then_some(menu)
|
||||
let query = if filter_completions { query } else { None };
|
||||
let matches_task = if let Some(query) = query {
|
||||
menu.do_async_filtering(query, cx)
|
||||
} else {
|
||||
Task::ready(menu.unfiltered_matches())
|
||||
};
|
||||
(menu, matches_task)
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
let matches = matches_task.await;
|
||||
|
||||
let Ok(()) = editor.update_in(cx, |editor, window, cx| {
|
||||
// Newer menu already set, so exit.
|
||||
match editor.context_menu.borrow().as_ref() {
|
||||
None => {}
|
||||
Some(CodeContextMenu::Completions(prev_menu)) => {
|
||||
if prev_menu.id > id {
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => return,
|
||||
_ => {}
|
||||
};
|
||||
|
||||
// Only valid to take prev_menu because it the new menu is immediately set
|
||||
// below, or the menu is hidden.
|
||||
match editor.context_menu.borrow_mut().take() {
|
||||
Some(CodeContextMenu::Completions(prev_menu)) => {
|
||||
let position_matches =
|
||||
if prev_menu.initial_position == menu.initial_position {
|
||||
true
|
||||
} else {
|
||||
let snapshot = editor.buffer.read(cx).read(cx);
|
||||
prev_menu.initial_position.to_offset(&snapshot)
|
||||
== menu.initial_position.to_offset(&snapshot)
|
||||
};
|
||||
if position_matches {
|
||||
// Preserve markdown cache before `set_filter_results` because it will
|
||||
// try to populate the documentation cache.
|
||||
menu.preserve_markdown_cache(prev_menu);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
menu.set_filter_results(matches, provider, window, cx);
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
|
||||
menu.visible().then_some(menu)
|
||||
};
|
||||
|
||||
editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
if editor.focus_handle.is_focused(window) {
|
||||
if let Some(menu) = menu {
|
||||
*editor.context_menu.borrow_mut() =
|
||||
Some(CodeContextMenu::Completions(menu));
|
||||
|
||||
crate::hover_popover::hide_hover(editor, cx);
|
||||
if editor.show_edit_predictions_in_menu() {
|
||||
editor.update_visible_inline_completion(window, cx);
|
||||
} else {
|
||||
editor.discard_inline_completion(false, cx);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if editor.focus_handle.is_focused(window) && menu.is_some() {
|
||||
let mut menu = menu.unwrap();
|
||||
menu.resolve_visible_completions(editor.completion_provider.as_deref(), cx);
|
||||
crate::hover_popover::hide_hover(editor, cx);
|
||||
*editor.context_menu.borrow_mut() =
|
||||
Some(CodeContextMenu::Completions(menu));
|
||||
|
||||
if editor.show_edit_predictions_in_menu() {
|
||||
editor.update_visible_inline_completion(window, cx);
|
||||
} else {
|
||||
editor.discard_inline_completion(false, cx);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
} else if editor.completion_tasks.len() <= 1 {
|
||||
// If there are no more completion tasks and the last menu was
|
||||
// empty, we should hide it.
|
||||
if editor.completion_tasks.len() <= 1 {
|
||||
// If there are no more completion tasks and the last menu was empty, we should hide it.
|
||||
let was_hidden = editor.hide_context_menu(window, cx).is_none();
|
||||
// If it was already hidden and we don't show inline
|
||||
// completions in the menu, we should also show the
|
||||
// inline-completion when available.
|
||||
// If it was already hidden and we don't show inline completions in the menu, we should
|
||||
// also show the inline-completion when available.
|
||||
if was_hidden && editor.show_edit_predictions_in_menu() {
|
||||
editor.update_visible_inline_completion(window, cx);
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
|
||||
self.completion_tasks.push((id, task));
|
||||
@@ -5292,17 +5361,16 @@ impl Editor {
|
||||
pub fn with_completions_menu_matching_id<R>(
|
||||
&self,
|
||||
id: CompletionId,
|
||||
on_absent: impl FnOnce() -> R,
|
||||
on_match: impl FnOnce(&mut CompletionsMenu) -> R,
|
||||
f: impl FnOnce(Option<&mut CompletionsMenu>) -> R,
|
||||
) -> R {
|
||||
let mut context_menu = self.context_menu.borrow_mut();
|
||||
let Some(CodeContextMenu::Completions(completions_menu)) = &mut *context_menu else {
|
||||
return on_absent();
|
||||
return f(None);
|
||||
};
|
||||
if completions_menu.id != id {
|
||||
return on_absent();
|
||||
return f(None);
|
||||
}
|
||||
on_match(completions_menu)
|
||||
f(Some(completions_menu))
|
||||
}
|
||||
|
||||
pub fn confirm_completion(
|
||||
@@ -5375,7 +5443,7 @@ impl Editor {
|
||||
.clone();
|
||||
cx.stop_propagation();
|
||||
|
||||
let buffer_handle = completions_menu.buffer;
|
||||
let buffer_handle = completions_menu.buffer.clone();
|
||||
|
||||
let CompletionEdit {
|
||||
new_text,
|
||||
@@ -5463,9 +5531,18 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
let mut common_prefix_len = 0;
|
||||
for (a, b) in old_text.chars().zip(new_text.chars()) {
|
||||
if a == b {
|
||||
common_prefix_len += a.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
cx.emit(EditorEvent::InputHandled {
|
||||
utf16_range_to_replace: None,
|
||||
text: new_text.clone().into(),
|
||||
text: new_text[common_prefix_len..].into(),
|
||||
});
|
||||
|
||||
self.transact(window, cx, |this, window, cx| {
|
||||
@@ -8861,7 +8938,10 @@ impl Editor {
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|range| (range, snippet_text.clone()));
|
||||
buffer.edit(edits, Some(AutoindentMode::EachLine), cx);
|
||||
let autoindent_mode = AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
};
|
||||
buffer.edit(edits, Some(autoindent_mode), cx);
|
||||
|
||||
let snapshot = &*buffer.read(cx);
|
||||
let snippet = &snippet;
|
||||
@@ -8890,7 +8970,9 @@ impl Editor {
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
tabstop_ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start, snapshot));
|
||||
// Sort in reverse order so that the first range is the newest created
|
||||
// selection. Completions will use it and autoscroll will prioritize it.
|
||||
tabstop_ranges.sort_unstable_by(|a, b| b.start.cmp(&a.start, snapshot));
|
||||
|
||||
Tabstop {
|
||||
is_end_tabstop,
|
||||
@@ -9018,7 +9100,7 @@ impl Editor {
|
||||
}
|
||||
if let Some(current_ranges) = snippet.ranges.get(snippet.active_index) {
|
||||
self.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
|
||||
s.select_anchor_ranges(current_ranges.iter().cloned())
|
||||
s.select_ranges(current_ranges.iter().cloned())
|
||||
});
|
||||
|
||||
if let Some(choices) = &snippet.choices[snippet.active_index] {
|
||||
@@ -20185,7 +20267,7 @@ pub trait CompletionProvider {
|
||||
trigger: CompletionContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>>;
|
||||
) -> Task<Result<Vec<CompletionResponse>>>;
|
||||
|
||||
fn resolve_completions(
|
||||
&self,
|
||||
@@ -20294,7 +20376,7 @@ fn snippet_completions(
|
||||
buffer: &Entity<Buffer>,
|
||||
buffer_position: text::Anchor,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<Completion>>> {
|
||||
) -> Task<Result<CompletionResponse>> {
|
||||
let languages = buffer.read(cx).languages_at(buffer_position);
|
||||
let snippet_store = project.snippets().read(cx);
|
||||
|
||||
@@ -20313,7 +20395,10 @@ fn snippet_completions(
|
||||
.collect();
|
||||
|
||||
if scopes.is_empty() {
|
||||
return Task::ready(Ok(vec![]));
|
||||
return Task::ready(Ok(CompletionResponse {
|
||||
completions: vec![],
|
||||
is_incomplete: false,
|
||||
}));
|
||||
}
|
||||
|
||||
let snapshot = buffer.read(cx).text_snapshot();
|
||||
@@ -20323,7 +20408,8 @@ fn snippet_completions(
|
||||
let executor = cx.background_executor().clone();
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let mut all_results: Vec<Completion> = Vec::new();
|
||||
let mut is_incomplete = false;
|
||||
let mut completions: Vec<Completion> = Vec::new();
|
||||
for (scope, snippets) in scopes.into_iter() {
|
||||
let classifier = CharClassifier::new(Some(scope)).for_completion(true);
|
||||
let mut last_word = chars
|
||||
@@ -20333,7 +20419,10 @@ fn snippet_completions(
|
||||
last_word = last_word.chars().rev().collect();
|
||||
|
||||
if last_word.is_empty() {
|
||||
return Ok(vec![]);
|
||||
return Ok(CompletionResponse {
|
||||
completions: vec![],
|
||||
is_incomplete: true,
|
||||
});
|
||||
}
|
||||
|
||||
let as_offset = text::ToOffset::to_offset(&buffer_position, &snapshot);
|
||||
@@ -20354,16 +20443,21 @@ fn snippet_completions(
|
||||
})
|
||||
.collect::<Vec<StringMatchCandidate>>();
|
||||
|
||||
const MAX_RESULTS: usize = 100;
|
||||
let mut matches = fuzzy::match_strings(
|
||||
&candidates,
|
||||
&last_word,
|
||||
last_word.chars().any(|c| c.is_uppercase()),
|
||||
100,
|
||||
MAX_RESULTS,
|
||||
&Default::default(),
|
||||
executor.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
if matches.len() >= MAX_RESULTS {
|
||||
is_incomplete = true;
|
||||
}
|
||||
|
||||
// Remove all candidates where the query's start does not match the start of any word in the candidate
|
||||
if let Some(query_start) = last_word.chars().next() {
|
||||
matches.retain(|string_match| {
|
||||
@@ -20383,76 +20477,72 @@ fn snippet_completions(
|
||||
.map(|m| m.string)
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let mut result: Vec<Completion> = snippets
|
||||
.iter()
|
||||
.filter_map(|snippet| {
|
||||
let matching_prefix = snippet
|
||||
.prefix
|
||||
.iter()
|
||||
.find(|prefix| matched_strings.contains(*prefix))?;
|
||||
let start = as_offset - last_word.len();
|
||||
let start = snapshot.anchor_before(start);
|
||||
let range = start..buffer_position;
|
||||
let lsp_start = to_lsp(&start);
|
||||
let lsp_range = lsp::Range {
|
||||
start: lsp_start,
|
||||
end: lsp_end,
|
||||
};
|
||||
Some(Completion {
|
||||
replace_range: range,
|
||||
new_text: snippet.body.clone(),
|
||||
source: CompletionSource::Lsp {
|
||||
insert_range: None,
|
||||
server_id: LanguageServerId(usize::MAX),
|
||||
resolved: true,
|
||||
lsp_completion: Box::new(lsp::CompletionItem {
|
||||
label: snippet.prefix.first().unwrap().clone(),
|
||||
kind: Some(CompletionItemKind::SNIPPET),
|
||||
label_details: snippet.description.as_ref().map(|description| {
|
||||
lsp::CompletionItemLabelDetails {
|
||||
detail: Some(description.clone()),
|
||||
description: None,
|
||||
}
|
||||
}),
|
||||
insert_text_format: Some(InsertTextFormat::SNIPPET),
|
||||
text_edit: Some(lsp::CompletionTextEdit::InsertAndReplace(
|
||||
lsp::InsertReplaceEdit {
|
||||
new_text: snippet.body.clone(),
|
||||
insert: lsp_range,
|
||||
replace: lsp_range,
|
||||
},
|
||||
)),
|
||||
filter_text: Some(snippet.body.clone()),
|
||||
sort_text: Some(char::MAX.to_string()),
|
||||
..lsp::CompletionItem::default()
|
||||
completions.extend(snippets.iter().filter_map(|snippet| {
|
||||
let matching_prefix = snippet
|
||||
.prefix
|
||||
.iter()
|
||||
.find(|prefix| matched_strings.contains(*prefix))?;
|
||||
let start = as_offset - last_word.len();
|
||||
let start = snapshot.anchor_before(start);
|
||||
let range = start..buffer_position;
|
||||
let lsp_start = to_lsp(&start);
|
||||
let lsp_range = lsp::Range {
|
||||
start: lsp_start,
|
||||
end: lsp_end,
|
||||
};
|
||||
Some(Completion {
|
||||
replace_range: range,
|
||||
new_text: snippet.body.clone(),
|
||||
source: CompletionSource::Lsp {
|
||||
insert_range: None,
|
||||
server_id: LanguageServerId(usize::MAX),
|
||||
resolved: true,
|
||||
lsp_completion: Box::new(lsp::CompletionItem {
|
||||
label: snippet.prefix.first().unwrap().clone(),
|
||||
kind: Some(CompletionItemKind::SNIPPET),
|
||||
label_details: snippet.description.as_ref().map(|description| {
|
||||
lsp::CompletionItemLabelDetails {
|
||||
detail: Some(description.clone()),
|
||||
description: None,
|
||||
}
|
||||
}),
|
||||
lsp_defaults: None,
|
||||
},
|
||||
label: CodeLabel {
|
||||
text: matching_prefix.clone(),
|
||||
runs: Vec::new(),
|
||||
filter_range: 0..matching_prefix.len(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: Some(
|
||||
CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
single_line: snippet.name.clone().into(),
|
||||
plain_text: snippet
|
||||
.description
|
||||
.clone()
|
||||
.map(|description| description.into()),
|
||||
},
|
||||
),
|
||||
insert_text_mode: None,
|
||||
confirm: None,
|
||||
})
|
||||
insert_text_format: Some(InsertTextFormat::SNIPPET),
|
||||
text_edit: Some(lsp::CompletionTextEdit::InsertAndReplace(
|
||||
lsp::InsertReplaceEdit {
|
||||
new_text: snippet.body.clone(),
|
||||
insert: lsp_range,
|
||||
replace: lsp_range,
|
||||
},
|
||||
)),
|
||||
filter_text: Some(snippet.body.clone()),
|
||||
sort_text: Some(char::MAX.to_string()),
|
||||
..lsp::CompletionItem::default()
|
||||
}),
|
||||
lsp_defaults: None,
|
||||
},
|
||||
label: CodeLabel {
|
||||
text: matching_prefix.clone(),
|
||||
runs: Vec::new(),
|
||||
filter_range: 0..matching_prefix.len(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: Some(CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
single_line: snippet.name.clone().into(),
|
||||
plain_text: snippet
|
||||
.description
|
||||
.clone()
|
||||
.map(|description| description.into()),
|
||||
}),
|
||||
insert_text_mode: None,
|
||||
confirm: None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
all_results.append(&mut result);
|
||||
}))
|
||||
}
|
||||
|
||||
Ok(all_results)
|
||||
Ok(CompletionResponse {
|
||||
completions,
|
||||
is_incomplete,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -20465,25 +20555,17 @@ impl CompletionProvider for Entity<Project> {
|
||||
options: CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
self.update(cx, |project, cx| {
|
||||
let snippets = snippet_completions(project, buffer, buffer_position, cx);
|
||||
let project_completions = project.completions(buffer, buffer_position, options, cx);
|
||||
cx.background_spawn(async move {
|
||||
let snippets_completions = snippets.await?;
|
||||
match project_completions.await? {
|
||||
Some(mut completions) => {
|
||||
completions.extend(snippets_completions);
|
||||
Ok(Some(completions))
|
||||
}
|
||||
None => {
|
||||
if snippets_completions.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(snippets_completions))
|
||||
}
|
||||
}
|
||||
let mut responses = project_completions.await?;
|
||||
let snippets = snippets.await?;
|
||||
if !snippets.completions.is_empty() {
|
||||
responses.push(snippets);
|
||||
}
|
||||
Ok(responses)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use super::*;
|
||||
use crate::{
|
||||
JoinLines,
|
||||
code_context_menus::CodeContextMenu,
|
||||
inline_completion_tests::FakeInlineCompletionProvider,
|
||||
linked_editing_ranges::LinkedEditingRanges,
|
||||
scroll::scroll_amount::ScrollAmount,
|
||||
@@ -8512,108 +8513,123 @@ async fn test_snippet_placeholder_choices(cx: &mut TestAppContext) {
|
||||
async fn test_snippets(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let (text, insertion_ranges) = marked_text_ranges(
|
||||
indoc! {"
|
||||
a.ˇ b
|
||||
a.ˇ b
|
||||
a.ˇ b
|
||||
"},
|
||||
false,
|
||||
);
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx));
|
||||
let (editor, cx) = cx.add_window_view(|window, cx| build_editor(buffer, window, cx));
|
||||
cx.set_state(indoc! {"
|
||||
a.ˇ b
|
||||
a.ˇ b
|
||||
a.ˇ b
|
||||
"});
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap();
|
||||
|
||||
let insertion_ranges = editor
|
||||
.selections
|
||||
.all(cx)
|
||||
.iter()
|
||||
.map(|s| s.range().clone())
|
||||
.collect::<Vec<_>>();
|
||||
editor
|
||||
.insert_snippet(&insertion_ranges, snippet, window, cx)
|
||||
.unwrap();
|
||||
|
||||
fn assert(editor: &mut Editor, cx: &mut Context<Editor>, marked_text: &str) {
|
||||
let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false);
|
||||
assert_eq!(editor.text(cx), expected_text);
|
||||
assert_eq!(editor.selections.ranges::<usize>(cx), selection_ranges);
|
||||
}
|
||||
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
"},
|
||||
);
|
||||
|
||||
// Can't move earlier than the first tab stop
|
||||
assert!(!editor.move_to_prev_snippet_tabstop(window, cx));
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
"},
|
||||
);
|
||||
|
||||
assert!(editor.move_to_next_snippet_tabstop(window, cx));
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(one, «two», three) b
|
||||
a.f(one, «two», three) b
|
||||
a.f(one, «two», three) b
|
||||
"},
|
||||
);
|
||||
|
||||
editor.move_to_prev_snippet_tabstop(window, cx);
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
a.f(«one», two, «three») b
|
||||
"},
|
||||
);
|
||||
|
||||
assert!(editor.move_to_next_snippet_tabstop(window, cx));
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(one, «two», three) b
|
||||
a.f(one, «two», three) b
|
||||
a.f(one, «two», three) b
|
||||
"},
|
||||
);
|
||||
assert!(editor.move_to_next_snippet_tabstop(window, cx));
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
"},
|
||||
);
|
||||
|
||||
// As soon as the last tab stop is reached, snippet state is gone
|
||||
editor.move_to_prev_snippet_tabstop(window, cx);
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
"},
|
||||
);
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
"});
|
||||
|
||||
// Can't move earlier than the first tab stop
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
assert!(!editor.move_to_prev_snippet_tabstop(window, cx))
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_next_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(one, «twoˇ», three) b
|
||||
a.f(one, «twoˇ», three) b
|
||||
a.f(one, «twoˇ», three) b
|
||||
"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_prev_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
a.f(«oneˇ», two, «threeˇ») b
|
||||
"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_next_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(one, «twoˇ», three) b
|
||||
a.f(one, «twoˇ», three) b
|
||||
a.f(one, «twoˇ», three) b
|
||||
"});
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_next_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
"});
|
||||
|
||||
// As soon as the last tab stop is reached, snippet state is gone
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
assert!(!editor.move_to_prev_snippet_tabstop(window, cx))
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
a.f(one, two, three)ˇ b
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_snippet_indentation(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
let snippet = Snippet::parse(indoc! {"
|
||||
/*
|
||||
* Multiline comment with leading indentation
|
||||
*
|
||||
* $1
|
||||
*/
|
||||
$0"})
|
||||
.unwrap();
|
||||
let insertion_ranges = editor
|
||||
.selections
|
||||
.all(cx)
|
||||
.iter()
|
||||
.map(|s| s.range().clone())
|
||||
.collect::<Vec<_>>();
|
||||
editor
|
||||
.insert_snippet(&insertion_ranges, snippet, window, cx)
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/*
|
||||
* Multiline comment with leading indentation
|
||||
*
|
||||
* ˇ
|
||||
*/
|
||||
"});
|
||||
|
||||
cx.update_editor(|editor, window, cx| assert!(editor.move_to_next_snippet_tabstop(window, cx)));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/*
|
||||
* Multiline comment with leading indentation
|
||||
*
|
||||
*•
|
||||
*/
|
||||
ˇ"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -11184,14 +11200,15 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
"});
|
||||
cx.simulate_keystroke(".");
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
two
|
||||
three
|
||||
"},
|
||||
vec!["first_completion", "second_completion"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
@@ -11291,7 +11308,6 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
additional edit
|
||||
"});
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.second_completion
|
||||
two s
|
||||
@@ -11299,7 +11315,9 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
additional edit
|
||||
"},
|
||||
vec!["fourth_completion", "fifth_completion", "sixth_completion"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
@@ -11309,7 +11327,6 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
cx.simulate_keystroke("i");
|
||||
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.second_completion
|
||||
two si
|
||||
@@ -11317,7 +11334,9 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
additional edit
|
||||
"},
|
||||
vec!["fourth_completion", "fifth_completion", "sixth_completion"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
@@ -11351,10 +11370,11 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
|
||||
});
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
"editor.<clo|>",
|
||||
vec!["close", "clobber"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
@@ -11371,6 +11391,128 @@ async fn test_completion(cx: &mut TestAppContext) {
|
||||
apply_additional_edits.await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_completion_reuse(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string()]),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let counter = Arc::new(AtomicUsize::new(0));
|
||||
cx.set_state("objˇ");
|
||||
cx.simulate_keystroke(".");
|
||||
|
||||
// Initial completion request returns complete results
|
||||
let is_incomplete = false;
|
||||
handle_completion_request(
|
||||
"obj.|<>",
|
||||
vec!["a", "ab", "abc"],
|
||||
is_incomplete,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.ˇ");
|
||||
check_displayed_completions(vec!["a", "ab", "abc"], &mut cx);
|
||||
|
||||
// Type "a" - filters existing completions
|
||||
cx.simulate_keystroke("a");
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.aˇ");
|
||||
check_displayed_completions(vec!["a", "ab", "abc"], &mut cx);
|
||||
|
||||
// Type "b" - filters existing completions
|
||||
cx.simulate_keystroke("b");
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.abˇ");
|
||||
check_displayed_completions(vec!["ab", "abc"], &mut cx);
|
||||
|
||||
// Type "c" - filters existing completions
|
||||
cx.simulate_keystroke("c");
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.abcˇ");
|
||||
check_displayed_completions(vec!["abc"], &mut cx);
|
||||
|
||||
// Backspace to delete "c" - filters existing completions
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.backspace(&Backspace, window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.abˇ");
|
||||
check_displayed_completions(vec!["ab", "abc"], &mut cx);
|
||||
|
||||
// Moving cursor to the left dismisses menu.
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.move_left(&MoveLeft, window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
cx.assert_editor_state("obj.aˇb");
|
||||
cx.update_editor(|editor, _, _| {
|
||||
assert_eq!(editor.context_menu_visible(), false);
|
||||
});
|
||||
|
||||
// Type "b" - new request
|
||||
cx.simulate_keystroke("b");
|
||||
let is_incomplete = false;
|
||||
handle_completion_request(
|
||||
"obj.<ab|>a",
|
||||
vec!["ab", "abc"],
|
||||
is_incomplete,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 2);
|
||||
cx.assert_editor_state("obj.abˇb");
|
||||
check_displayed_completions(vec!["ab", "abc"], &mut cx);
|
||||
|
||||
// Backspace to delete "b" - since query was "ab" and is now "a", new request is made.
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.backspace(&Backspace, window, cx);
|
||||
});
|
||||
let is_incomplete = false;
|
||||
handle_completion_request(
|
||||
"obj.<a|>b",
|
||||
vec!["a", "ab", "abc"],
|
||||
is_incomplete,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 3);
|
||||
cx.assert_editor_state("obj.aˇb");
|
||||
check_displayed_completions(vec!["a", "ab", "abc"], &mut cx);
|
||||
|
||||
// Backspace to delete "a" - dismisses menu.
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.backspace(&Backspace, window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 3);
|
||||
cx.assert_editor_state("obj.ˇb");
|
||||
cx.update_editor(|editor, _, _| {
|
||||
assert_eq!(editor.context_menu_visible(), false);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_word_completion(cx: &mut TestAppContext) {
|
||||
let lsp_fetch_timeout_ms = 10;
|
||||
@@ -12051,9 +12193,11 @@ async fn test_no_duplicated_completion_requests(cx: &mut TestAppContext) {
|
||||
let task_completion_item = closure_completion_item.clone();
|
||||
counter_clone.fetch_add(1, atomic::Ordering::Release);
|
||||
async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
task_completion_item,
|
||||
])))
|
||||
Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
|
||||
is_incomplete: true,
|
||||
item_defaults: None,
|
||||
items: vec![task_completion_item],
|
||||
})))
|
||||
}
|
||||
});
|
||||
|
||||
@@ -17127,6 +17271,64 @@ async fn test_indent_guide_ends_before_empty_line(cx: &mut TestAppContext) {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_indent_guide_ignored_only_whitespace_lines(cx: &mut TestAppContext) {
|
||||
let (buffer_id, mut cx) = setup_indent_guides_editor(
|
||||
&"
|
||||
function component() {
|
||||
\treturn (
|
||||
\t\t\t
|
||||
\t\t<div>
|
||||
\t\t\t<abc></abc>
|
||||
\t\t</div>
|
||||
\t)
|
||||
}"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_indent_guides(
|
||||
0..8,
|
||||
vec![
|
||||
indent_guide(buffer_id, 1, 6, 0),
|
||||
indent_guide(buffer_id, 2, 5, 1),
|
||||
indent_guide(buffer_id, 4, 4, 2),
|
||||
],
|
||||
None,
|
||||
&mut cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_indent_guide_fallback_to_next_non_entirely_whitespace_line(cx: &mut TestAppContext) {
|
||||
let (buffer_id, mut cx) = setup_indent_guides_editor(
|
||||
&"
|
||||
function component() {
|
||||
\treturn (
|
||||
\t
|
||||
\t\t<div>
|
||||
\t\t\t<abc></abc>
|
||||
\t\t</div>
|
||||
\t)
|
||||
}"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert_indent_guides(
|
||||
0..8,
|
||||
vec![
|
||||
indent_guide(buffer_id, 1, 6, 0),
|
||||
indent_guide(buffer_id, 2, 5, 1),
|
||||
indent_guide(buffer_id, 4, 4, 2),
|
||||
],
|
||||
None,
|
||||
&mut cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_indent_guide_continuing_off_screen(cx: &mut TestAppContext) {
|
||||
let (buffer_id, mut cx) = setup_indent_guides_editor(
|
||||
@@ -21051,6 +21253,22 @@ pub fn handle_signature_help_request(
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn check_displayed_completions(expected: Vec<&'static str>, cx: &mut EditorLspTestContext) {
|
||||
cx.update_editor(|editor, _, _| {
|
||||
if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow().as_ref() {
|
||||
let entries = menu.entries.borrow();
|
||||
let entries = entries
|
||||
.iter()
|
||||
.map(|entry| entry.string.as_str())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(entries, expected);
|
||||
} else {
|
||||
panic!("Expected completions menu");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Handle completion request passing a marked string specifying where the completion
|
||||
/// should be triggered from using '|' character, what range should be replaced, and what completions
|
||||
/// should be returned using '<' and '>' to delimit the range.
|
||||
@@ -21058,10 +21276,11 @@ pub fn handle_signature_help_request(
|
||||
/// Also see `handle_completion_request_with_insert_and_replace`.
|
||||
#[track_caller]
|
||||
pub fn handle_completion_request(
|
||||
cx: &mut EditorLspTestContext,
|
||||
marked_string: &str,
|
||||
completions: Vec<&'static str>,
|
||||
is_incomplete: bool,
|
||||
counter: Arc<AtomicUsize>,
|
||||
cx: &mut EditorLspTestContext,
|
||||
) -> impl Future<Output = ()> {
|
||||
let complete_from_marker: TextRangeMarker = '|'.into();
|
||||
let replace_range_marker: TextRangeMarker = ('<', '>').into();
|
||||
@@ -21085,8 +21304,10 @@ pub fn handle_completion_request(
|
||||
params.text_document_position.position,
|
||||
complete_from_position
|
||||
);
|
||||
Ok(Some(lsp::CompletionResponse::Array(
|
||||
completions
|
||||
Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
|
||||
is_incomplete: is_incomplete,
|
||||
item_defaults: None,
|
||||
items: completions
|
||||
.iter()
|
||||
.map(|completion_text| lsp::CompletionItem {
|
||||
label: completion_text.to_string(),
|
||||
@@ -21097,7 +21318,7 @@ pub fn handle_completion_request(
|
||||
..Default::default()
|
||||
})
|
||||
.collect(),
|
||||
)))
|
||||
})))
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -682,7 +682,7 @@ impl EditorElement {
|
||||
editor.select(
|
||||
SelectPhase::BeginColumnar {
|
||||
position,
|
||||
reset: false,
|
||||
reset: true,
|
||||
goal_column: point_for_position.exact_unclipped.column(),
|
||||
},
|
||||
window,
|
||||
|
||||
@@ -1095,14 +1095,15 @@ mod tests {
|
||||
//prompt autocompletion menu
|
||||
cx.simulate_keystroke(".");
|
||||
handle_completion_request(
|
||||
&mut cx,
|
||||
indoc! {"
|
||||
one.|<>
|
||||
two
|
||||
three
|
||||
"},
|
||||
vec!["first_completion", "second_completion"],
|
||||
true,
|
||||
counter.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible()) // wait until completion menu is visible
|
||||
|
||||
@@ -600,7 +600,7 @@ pub(crate) fn handle_from(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.change_selections_without_showing_completions(None, window, cx, |s| {
|
||||
this.change_selections_without_updating_completions(None, window, cx, |s| {
|
||||
s.select(base_selections);
|
||||
});
|
||||
})
|
||||
|
||||
@@ -532,7 +532,9 @@ impl EditorTestContext {
|
||||
#[track_caller]
|
||||
pub fn assert_editor_selections(&mut self, expected_selections: Vec<Range<usize>>) {
|
||||
let expected_marked_text =
|
||||
generate_marked_text(&self.buffer_text(), &expected_selections, true);
|
||||
generate_marked_text(&self.buffer_text(), &expected_selections, true)
|
||||
.replace(" \n", "•\n");
|
||||
|
||||
self.assert_selections(expected_selections, expected_marked_text)
|
||||
}
|
||||
|
||||
@@ -561,7 +563,8 @@ impl EditorTestContext {
|
||||
) {
|
||||
let actual_selections = self.editor_selections();
|
||||
let actual_marked_text =
|
||||
generate_marked_text(&self.buffer_text(), &actual_selections, true);
|
||||
generate_marked_text(&self.buffer_text(), &actual_selections, true)
|
||||
.replace(" \n", "•\n");
|
||||
if expected_selections != actual_selections {
|
||||
pretty_assertions::assert_eq!(
|
||||
actual_marked_text,
|
||||
|
||||
@@ -246,6 +246,7 @@ impl ExampleContext {
|
||||
| ThreadEvent::StreamedAssistantThinking(_, _)
|
||||
| ThreadEvent::UsePendingTools { .. }
|
||||
| ThreadEvent::CompletionCanceled => {}
|
||||
ThreadEvent::ToolUseLimitReached => {}
|
||||
ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
|
||||
@@ -759,8 +759,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.map(|c| c.label.text)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
|
||||
@@ -38,8 +38,8 @@ use std::{
|
||||
};
|
||||
use text::Point;
|
||||
use ui::{
|
||||
ContextMenu, HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, PopoverMenu,
|
||||
PopoverMenuHandle, Tooltip, prelude::*,
|
||||
ButtonLike, ContextMenu, HighlightedLabel, Indicator, KeyBinding, ListItem, ListItemSpacing,
|
||||
PopoverMenu, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
};
|
||||
use util::{ResultExt, maybe, paths::PathWithPosition, post_inc};
|
||||
use workspace::{
|
||||
@@ -47,7 +47,10 @@ use workspace::{
|
||||
notifications::NotifyResultExt, pane,
|
||||
};
|
||||
|
||||
actions!(file_finder, [SelectPrevious, ToggleMenu]);
|
||||
actions!(
|
||||
file_finder,
|
||||
[SelectPrevious, ToggleFilterMenu, ToggleSplitMenu]
|
||||
);
|
||||
|
||||
impl ModalView for FileFinder {
|
||||
fn on_before_dismiss(
|
||||
@@ -56,7 +59,14 @@ impl ModalView for FileFinder {
|
||||
cx: &mut Context<Self>,
|
||||
) -> workspace::DismissDecision {
|
||||
let submenu_focused = self.picker.update(cx, |picker, cx| {
|
||||
picker.delegate.popover_menu_handle.is_focused(window, cx)
|
||||
picker
|
||||
.delegate
|
||||
.filter_popover_menu_handle
|
||||
.is_focused(window, cx)
|
||||
|| picker
|
||||
.delegate
|
||||
.split_popover_menu_handle
|
||||
.is_focused(window, cx)
|
||||
});
|
||||
workspace::DismissDecision::Dismiss(!submenu_focused)
|
||||
}
|
||||
@@ -212,9 +222,30 @@ impl FileFinder {
|
||||
window.dispatch_action(Box::new(menu::SelectPrevious), cx);
|
||||
}
|
||||
|
||||
fn handle_toggle_menu(&mut self, _: &ToggleMenu, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn handle_filter_toggle_menu(
|
||||
&mut self,
|
||||
_: &ToggleFilterMenu,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.picker.update(cx, |picker, cx| {
|
||||
let menu_handle = &picker.delegate.popover_menu_handle;
|
||||
let menu_handle = &picker.delegate.filter_popover_menu_handle;
|
||||
if menu_handle.is_deployed() {
|
||||
menu_handle.hide(cx);
|
||||
} else {
|
||||
menu_handle.show(window, cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_split_toggle_menu(
|
||||
&mut self,
|
||||
_: &ToggleSplitMenu,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.picker.update(cx, |picker, cx| {
|
||||
let menu_handle = &picker.delegate.split_popover_menu_handle;
|
||||
if menu_handle.is_deployed() {
|
||||
menu_handle.hide(cx);
|
||||
} else {
|
||||
@@ -345,7 +376,8 @@ impl Render for FileFinder {
|
||||
.w(modal_max_width)
|
||||
.on_modifiers_changed(cx.listener(Self::handle_modifiers_changed))
|
||||
.on_action(cx.listener(Self::handle_select_prev))
|
||||
.on_action(cx.listener(Self::handle_toggle_menu))
|
||||
.on_action(cx.listener(Self::handle_filter_toggle_menu))
|
||||
.on_action(cx.listener(Self::handle_split_toggle_menu))
|
||||
.on_action(cx.listener(Self::handle_toggle_ignored))
|
||||
.on_action(cx.listener(Self::go_to_file_split_left))
|
||||
.on_action(cx.listener(Self::go_to_file_split_right))
|
||||
@@ -371,7 +403,8 @@ pub struct FileFinderDelegate {
|
||||
history_items: Vec<FoundPath>,
|
||||
separate_history: bool,
|
||||
first_update: bool,
|
||||
popover_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
filter_popover_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
split_popover_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
focus_handle: FocusHandle,
|
||||
include_ignored: Option<bool>,
|
||||
include_ignored_refresh: Task<()>,
|
||||
@@ -758,7 +791,8 @@ impl FileFinderDelegate {
|
||||
history_items,
|
||||
separate_history,
|
||||
first_update: true,
|
||||
popover_menu_handle: PopoverMenuHandle::default(),
|
||||
filter_popover_menu_handle: PopoverMenuHandle::default(),
|
||||
split_popover_menu_handle: PopoverMenuHandle::default(),
|
||||
focus_handle: cx.focus_handle(),
|
||||
include_ignored: FileFinderSettings::get_global(cx).include_ignored,
|
||||
include_ignored_refresh: Task::ready(()),
|
||||
@@ -1137,8 +1171,13 @@ impl FileFinderDelegate {
|
||||
fn key_context(&self, window: &Window, cx: &App) -> KeyContext {
|
||||
let mut key_context = KeyContext::new_with_defaults();
|
||||
key_context.add("FileFinder");
|
||||
if self.popover_menu_handle.is_focused(window, cx) {
|
||||
key_context.add("menu_open");
|
||||
|
||||
if self.filter_popover_menu_handle.is_focused(window, cx) {
|
||||
key_context.add("filter_menu_open");
|
||||
}
|
||||
|
||||
if self.split_popover_menu_handle.is_focused(window, cx) {
|
||||
key_context.add("split_menu_open");
|
||||
}
|
||||
key_context
|
||||
}
|
||||
@@ -1492,62 +1531,112 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_footer(&self, _: &mut Window, cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
|
||||
let context = self.focus_handle.clone();
|
||||
fn render_footer(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<AnyElement> {
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.p_2()
|
||||
.p_1p5()
|
||||
.justify_between()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(
|
||||
IconButton::new("toggle-ignored", IconName::Sliders)
|
||||
.on_click({
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
move |_, window, cx| {
|
||||
focus_handle.dispatch_action(&ToggleIncludeIgnored, window, cx);
|
||||
}
|
||||
PopoverMenu::new("filter-menu-popover")
|
||||
.with_handle(self.filter_popover_menu_handle.clone())
|
||||
.attach(gpui::Corner::BottomRight)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
.offset(gpui::Point {
|
||||
x: px(1.0),
|
||||
y: px(1.0),
|
||||
})
|
||||
.style(ButtonStyle::Subtle)
|
||||
.shape(IconButtonShape::Square)
|
||||
.toggle_state(self.include_ignored.unwrap_or(false))
|
||||
.tooltip({
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("filter-trigger", IconName::Sliders)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_size(IconSize::Small)
|
||||
.toggle_state(self.include_ignored.unwrap_or(false))
|
||||
.when(self.include_ignored.is_some(), |this| {
|
||||
this.indicator(Indicator::dot().color(Color::Info))
|
||||
}),
|
||||
{
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Filter Options",
|
||||
&ToggleFilterMenu,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
},
|
||||
)
|
||||
.menu({
|
||||
let focus_handle = focus_handle.clone();
|
||||
let include_ignored = self.include_ignored;
|
||||
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Use ignored files",
|
||||
&ToggleIncludeIgnored,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
Some(ContextMenu::build(window, cx, {
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |menu, _, _| {
|
||||
menu.context(focus_handle.clone())
|
||||
.header("Filter Options")
|
||||
.toggleable_entry(
|
||||
"Include Ignored Files",
|
||||
include_ignored.unwrap_or(false),
|
||||
ui::IconPosition::End,
|
||||
Some(ToggleIncludeIgnored.boxed_clone()),
|
||||
move |window, cx| {
|
||||
window.focus(&focus_handle);
|
||||
window.dispatch_action(
|
||||
ToggleIncludeIgnored.boxed_clone(),
|
||||
cx,
|
||||
);
|
||||
},
|
||||
)
|
||||
}
|
||||
}))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Button::new("open-selection", "Open").on_click(|_, window, cx| {
|
||||
window.dispatch_action(menu::Confirm.boxed_clone(), cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
PopoverMenu::new("menu-popover")
|
||||
.with_handle(self.popover_menu_handle.clone())
|
||||
.attach(gpui::Corner::TopRight)
|
||||
.anchor(gpui::Corner::BottomRight)
|
||||
PopoverMenu::new("split-menu-popover")
|
||||
.with_handle(self.split_popover_menu_handle.clone())
|
||||
.attach(gpui::Corner::BottomRight)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
.offset(gpui::Point {
|
||||
x: px(1.0),
|
||||
y: px(1.0),
|
||||
})
|
||||
.trigger(
|
||||
Button::new("actions-trigger", "Split…")
|
||||
.selected_label_color(Color::Accent),
|
||||
ButtonLike::new("split-trigger")
|
||||
.child(Label::new("Split…"))
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
|
||||
.children(
|
||||
KeyBinding::for_action_in(
|
||||
&ToggleSplitMenu,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
),
|
||||
)
|
||||
.menu({
|
||||
let focus_handle = focus_handle.clone();
|
||||
|
||||
move |window, cx| {
|
||||
Some(ContextMenu::build(window, cx, {
|
||||
let context = context.clone();
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |menu, _, _| {
|
||||
menu.context(context)
|
||||
menu.context(focus_handle.clone())
|
||||
.action(
|
||||
"Split Left",
|
||||
pane::SplitLeft.boxed_clone(),
|
||||
@@ -1565,6 +1654,21 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
}))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("open-selection", "Open")
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&menu::Confirm,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(|_, window, cx| {
|
||||
window.dispatch_action(menu::Confirm.boxed_clone(), cx)
|
||||
}),
|
||||
),
|
||||
)
|
||||
.into_any(),
|
||||
|
||||
@@ -39,15 +39,32 @@ pub struct UserCaretPosition {
|
||||
}
|
||||
|
||||
impl UserCaretPosition {
|
||||
pub fn at_selection_end(selection: &Selection<Point>, snapshot: &MultiBufferSnapshot) -> Self {
|
||||
pub(crate) fn at_selection_end(
|
||||
selection: &Selection<Point>,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> Self {
|
||||
let selection_end = selection.head();
|
||||
let line_start = Point::new(selection_end.row, 0);
|
||||
let chars_to_last_position = snapshot
|
||||
.text_summary_for_range::<text::TextSummary, _>(line_start..selection_end)
|
||||
.chars as u32;
|
||||
let (line, character) = if let Some((buffer_snapshot, point, _)) =
|
||||
snapshot.point_to_buffer_point(selection_end)
|
||||
{
|
||||
let line_start = Point::new(point.row, 0);
|
||||
|
||||
let chars_to_last_position = buffer_snapshot
|
||||
.text_summary_for_range::<text::TextSummary, _>(line_start..point)
|
||||
.chars as u32;
|
||||
(line_start.row, chars_to_last_position)
|
||||
} else {
|
||||
let line_start = Point::new(selection_end.row, 0);
|
||||
|
||||
let chars_to_last_position = snapshot
|
||||
.text_summary_for_range::<text::TextSummary, _>(line_start..selection_end)
|
||||
.chars as u32;
|
||||
(selection_end.row, chars_to_last_position)
|
||||
};
|
||||
|
||||
Self {
|
||||
line: NonZeroU32::new(selection_end.row + 1).expect("added 1"),
|
||||
character: NonZeroU32::new(chars_to_last_position + 1).expect("added 1"),
|
||||
line: NonZeroU32::new(line + 1).expect("added 1"),
|
||||
character: NonZeroU32::new(character + 1).expect("added 1"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,6 +202,7 @@ pub enum Part {
|
||||
InlineDataPart(InlineDataPart),
|
||||
FunctionCallPart(FunctionCallPart),
|
||||
FunctionResponsePart(FunctionResponsePart),
|
||||
ThoughtPart(ThoughtPart),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -235,6 +236,13 @@ pub struct FunctionResponsePart {
|
||||
pub function_response: FunctionResponse,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ThoughtPart {
|
||||
pub thought: bool,
|
||||
pub thought_signature: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CitationSource {
|
||||
@@ -281,6 +289,22 @@ pub struct UsageMetadata {
|
||||
pub total_token_count: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ThinkingConfig {
|
||||
pub thinking_budget: u32,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub enum GoogleModelMode {
|
||||
#[default]
|
||||
Default,
|
||||
Thinking {
|
||||
budget_tokens: Option<u32>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct GenerationConfig {
|
||||
@@ -296,6 +320,8 @@ pub struct GenerationConfig {
|
||||
pub top_p: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub top_k: Option<usize>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub thinking_config: Option<ThinkingConfig>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -488,6 +514,8 @@ pub enum Model {
|
||||
/// The name displayed in the UI, such as in the assistant panel model dropdown menu.
|
||||
display_name: Option<String>,
|
||||
max_tokens: usize,
|
||||
#[serde(default)]
|
||||
mode: GoogleModelMode,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -544,6 +572,21 @@ impl Model {
|
||||
Model::Custom { max_tokens, .. } => *max_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mode(&self) -> GoogleModelMode {
|
||||
match self {
|
||||
Self::Gemini15Pro
|
||||
| Self::Gemini15Flash
|
||||
| Self::Gemini20Pro
|
||||
| Self::Gemini20Flash
|
||||
| Self::Gemini20FlashThinking
|
||||
| Self::Gemini20FlashLite
|
||||
| Self::Gemini25ProExp0325
|
||||
| Self::Gemini25ProPreview0325
|
||||
| Self::Gemini25FlashPreview0417 => GoogleModelMode::Default,
|
||||
Self::Custom { mode, .. } => *mode,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Model {
|
||||
|
||||
@@ -147,14 +147,49 @@ impl Keymap {
|
||||
});
|
||||
|
||||
let mut bindings: SmallVec<[(KeyBinding, usize); 1]> = SmallVec::new();
|
||||
let mut is_pending = None;
|
||||
|
||||
// (pending, is_no_action, depth, keystrokes)
|
||||
let mut pending_info_opt: Option<(bool, bool, usize, &[Keystroke])> = None;
|
||||
|
||||
'outer: for (binding, pending) in possibilities {
|
||||
for depth in (0..=context_stack.len()).rev() {
|
||||
if self.binding_enabled(binding, &context_stack[0..depth]) {
|
||||
if is_pending.is_none() {
|
||||
is_pending = Some(pending);
|
||||
let is_no_action = is_no_action(&*binding.action);
|
||||
// We only want to consider a binding pending if it has an action
|
||||
// This, however, means that if we have both a NoAction binding and a binding
|
||||
// with an action at the same depth, we should still set is_pending to true.
|
||||
if let Some(pending_info) = pending_info_opt.as_mut() {
|
||||
let (
|
||||
already_pending,
|
||||
pending_is_no_action,
|
||||
pending_depth,
|
||||
pending_keystrokes,
|
||||
) = *pending_info;
|
||||
|
||||
// We only want to change the pending status if it's not already pending AND if
|
||||
// the existing pending status was set by a NoAction binding. This avoids a NoAction
|
||||
// binding erroneously setting the pending status to true when a binding with an action
|
||||
// already set it to false
|
||||
//
|
||||
// We also want to change the pending status if the keystrokes don't match,
|
||||
// meaning it's different keystrokes than the NoAction that set pending to false
|
||||
if pending
|
||||
&& !already_pending
|
||||
&& pending_is_no_action
|
||||
&& (pending_depth == depth
|
||||
|| pending_keystrokes != binding.keystrokes())
|
||||
{
|
||||
pending_info.0 = !is_no_action;
|
||||
}
|
||||
} else {
|
||||
pending_info_opt = Some((
|
||||
pending && !is_no_action,
|
||||
is_no_action,
|
||||
depth,
|
||||
binding.keystrokes(),
|
||||
));
|
||||
}
|
||||
|
||||
if !pending {
|
||||
bindings.push((binding.clone(), depth));
|
||||
continue 'outer;
|
||||
@@ -174,7 +209,7 @@ impl Keymap {
|
||||
})
|
||||
.collect();
|
||||
|
||||
(bindings, is_pending.unwrap_or_default())
|
||||
(bindings, pending_info_opt.unwrap_or_default().0)
|
||||
}
|
||||
|
||||
/// Check if the given binding is enabled, given a certain key context.
|
||||
@@ -310,6 +345,102 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Tests for https://github.com/zed-industries/zed/issues/30259
|
||||
fn test_multiple_keystroke_binding_disabled() {
|
||||
let bindings = [
|
||||
KeyBinding::new("space w w", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w w", NoAction {}, Some("editor")),
|
||||
];
|
||||
|
||||
let mut keymap = Keymap::default();
|
||||
keymap.add_bindings(bindings.clone());
|
||||
|
||||
let space = || Keystroke::parse("space").unwrap();
|
||||
let w = || Keystroke::parse("w").unwrap();
|
||||
|
||||
let space_w = [space(), w()];
|
||||
let space_w_w = [space(), w(), w()];
|
||||
|
||||
let workspace_context = || [KeyContext::parse("workspace").unwrap()];
|
||||
|
||||
let editor_workspace_context = || {
|
||||
[
|
||||
KeyContext::parse("workspace").unwrap(),
|
||||
KeyContext::parse("editor").unwrap(),
|
||||
]
|
||||
};
|
||||
|
||||
// Ensure `space` results in pending input on the workspace, but not editor
|
||||
let space_workspace = keymap.bindings_for_input(&[space()], &workspace_context());
|
||||
assert!(space_workspace.0.is_empty());
|
||||
assert_eq!(space_workspace.1, true);
|
||||
|
||||
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
|
||||
assert!(space_editor.0.is_empty());
|
||||
assert_eq!(space_editor.1, false);
|
||||
|
||||
// Ensure `space w` results in pending input on the workspace, but not editor
|
||||
let space_w_workspace = keymap.bindings_for_input(&space_w, &workspace_context());
|
||||
assert!(space_w_workspace.0.is_empty());
|
||||
assert_eq!(space_w_workspace.1, true);
|
||||
|
||||
let space_w_editor = keymap.bindings_for_input(&space_w, &editor_workspace_context());
|
||||
assert!(space_w_editor.0.is_empty());
|
||||
assert_eq!(space_w_editor.1, false);
|
||||
|
||||
// Ensure `space w w` results in the binding in the workspace, but not in the editor
|
||||
let space_w_w_workspace = keymap.bindings_for_input(&space_w_w, &workspace_context());
|
||||
assert!(!space_w_w_workspace.0.is_empty());
|
||||
assert_eq!(space_w_w_workspace.1, false);
|
||||
|
||||
let space_w_w_editor = keymap.bindings_for_input(&space_w_w, &editor_workspace_context());
|
||||
assert!(space_w_w_editor.0.is_empty());
|
||||
assert_eq!(space_w_w_editor.1, false);
|
||||
|
||||
// Now test what happens if we have another binding defined AFTER the NoAction
|
||||
// that should result in pending
|
||||
let bindings = [
|
||||
KeyBinding::new("space w w", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w w", NoAction {}, Some("editor")),
|
||||
KeyBinding::new("space w x", ActionAlpha {}, Some("editor")),
|
||||
];
|
||||
let mut keymap = Keymap::default();
|
||||
keymap.add_bindings(bindings.clone());
|
||||
|
||||
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
|
||||
assert!(space_editor.0.is_empty());
|
||||
assert_eq!(space_editor.1, true);
|
||||
|
||||
// Now test what happens if we have another binding defined BEFORE the NoAction
|
||||
// that should result in pending
|
||||
let bindings = [
|
||||
KeyBinding::new("space w w", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w x", ActionAlpha {}, Some("editor")),
|
||||
KeyBinding::new("space w w", NoAction {}, Some("editor")),
|
||||
];
|
||||
let mut keymap = Keymap::default();
|
||||
keymap.add_bindings(bindings.clone());
|
||||
|
||||
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
|
||||
assert!(space_editor.0.is_empty());
|
||||
assert_eq!(space_editor.1, true);
|
||||
|
||||
// Now test what happens if we have another binding defined at a higher context
|
||||
// that should result in pending
|
||||
let bindings = [
|
||||
KeyBinding::new("space w w", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w x", ActionAlpha {}, Some("workspace")),
|
||||
KeyBinding::new("space w w", NoAction {}, Some("editor")),
|
||||
];
|
||||
let mut keymap = Keymap::default();
|
||||
keymap.add_bindings(bindings.clone());
|
||||
|
||||
let space_editor = keymap.bindings_for_input(&[space()], &editor_workspace_context());
|
||||
assert!(space_editor.0.is_empty());
|
||||
assert_eq!(space_editor.1, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bindings_for_action() {
|
||||
let bindings = [
|
||||
|
||||
@@ -11,7 +11,7 @@ use gpui::{
|
||||
InteractiveElement, IntoElement, ObjectFit, ParentElement, Render, Styled, Task, WeakEntity,
|
||||
Window, canvas, div, fill, img, opaque_grey, point, size,
|
||||
};
|
||||
use language::File as _;
|
||||
use language::{DiskState, File as _};
|
||||
use persistence::IMAGE_VIEWER;
|
||||
use project::{ImageItem, Project, ProjectPath, image_store::ImageItemEvent};
|
||||
use settings::Settings;
|
||||
@@ -191,6 +191,10 @@ impl Item for ImageView {
|
||||
focus_handle: cx.focus_handle(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn has_deleted_file(&self, cx: &App) -> bool {
|
||||
self.image_item.read(cx).file.disk_state() == DiskState::Deleted
|
||||
}
|
||||
}
|
||||
|
||||
fn breadcrumbs_text_for_image(project: &Project, image: &ImageItem, cx: &App) -> String {
|
||||
|
||||
@@ -11,7 +11,7 @@ use language::{
|
||||
DiagnosticSeverity, LanguageServerId, Point, ToOffset as _, ToPoint as _,
|
||||
};
|
||||
use project::lsp_store::CompletionDocumentation;
|
||||
use project::{Completion, CompletionSource, Project, ProjectPath};
|
||||
use project::{Completion, CompletionResponse, CompletionSource, Project, ProjectPath};
|
||||
use std::cell::RefCell;
|
||||
use std::fmt::Write as _;
|
||||
use std::ops::Range;
|
||||
@@ -641,18 +641,18 @@ impl CompletionProvider for RustStyleCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let Some(replace_range) = completion_replace_range(&buffer.read(cx).snapshot(), &position)
|
||||
else {
|
||||
return Task::ready(Ok(Some(Vec::new())));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
self.div_inspector.update(cx, |div_inspector, _cx| {
|
||||
div_inspector.rust_completion_replace_range = Some(replace_range.clone());
|
||||
});
|
||||
|
||||
Task::ready(Ok(Some(
|
||||
STYLE_METHODS
|
||||
Task::ready(Ok(vec![CompletionResponse {
|
||||
completions: STYLE_METHODS
|
||||
.iter()
|
||||
.map(|(_, method)| Completion {
|
||||
replace_range: replace_range.clone(),
|
||||
@@ -667,7 +667,8 @@ impl CompletionProvider for RustStyleCompletionProvider {
|
||||
confirm: None,
|
||||
})
|
||||
.collect(),
|
||||
)))
|
||||
is_incomplete: false,
|
||||
}]))
|
||||
}
|
||||
|
||||
fn resolve_completions(
|
||||
|
||||
@@ -51,6 +51,7 @@ schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
shellexpand.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
streaming-iterator.workspace = true
|
||||
|
||||
@@ -34,7 +34,7 @@ pub use highlight_map::HighlightMap;
|
||||
use http_client::HttpClient;
|
||||
pub use language_registry::{LanguageName, LoadedLanguage};
|
||||
use lsp::{CodeActionKind, InitializeParams, LanguageServerBinary, LanguageServerBinaryOptions};
|
||||
pub use manifest::{ManifestName, ManifestProvider, ManifestQuery};
|
||||
pub use manifest::{ManifestDelegate, ManifestName, ManifestProvider, ManifestQuery};
|
||||
use parking_lot::Mutex;
|
||||
use regex::Regex;
|
||||
use schemars::{
|
||||
@@ -323,7 +323,6 @@ pub trait LspAdapterDelegate: Send + Sync {
|
||||
fn http_client(&self) -> Arc<dyn HttpClient>;
|
||||
fn worktree_id(&self) -> WorktreeId;
|
||||
fn worktree_root_path(&self) -> &Path;
|
||||
fn exists(&self, path: &Path, is_dir: Option<bool>) -> bool;
|
||||
fn update_status(&self, language: LanguageServerName, status: BinaryStatus);
|
||||
fn registered_lsp_adapters(&self) -> Vec<Arc<dyn LspAdapter>>;
|
||||
async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option<Arc<Path>>;
|
||||
|
||||
@@ -23,6 +23,7 @@ use serde_json::Value;
|
||||
use settings::{
|
||||
Settings, SettingsLocation, SettingsSources, SettingsStore, add_references_to_properties,
|
||||
};
|
||||
use shellexpand;
|
||||
use std::{borrow::Cow, num::NonZeroU32, path::Path, sync::Arc};
|
||||
use util::serde::default_true;
|
||||
|
||||
@@ -1331,9 +1332,10 @@ impl settings::Settings for AllLanguageSettings {
|
||||
disabled_globs: completion_globs
|
||||
.iter()
|
||||
.filter_map(|g| {
|
||||
let expanded_g = shellexpand::tilde(g).into_owned();
|
||||
Some(DisabledGlob {
|
||||
matcher: globset::Glob::new(g).ok()?.compile_matcher(),
|
||||
is_absolute: Path::new(g).is_absolute(),
|
||||
matcher: globset::Glob::new(&expanded_g).ok()?.compile_matcher(),
|
||||
is_absolute: Path::new(&expanded_g).is_absolute(),
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
@@ -1712,10 +1714,12 @@ mod tests {
|
||||
};
|
||||
#[cfg(windows)]
|
||||
let glob_str = glob_str.as_str();
|
||||
|
||||
let expanded_glob_str = shellexpand::tilde(glob_str).into_owned();
|
||||
DisabledGlob {
|
||||
matcher: globset::Glob::new(glob_str).unwrap().compile_matcher(),
|
||||
is_absolute: Path::new(glob_str).is_absolute(),
|
||||
matcher: globset::Glob::new(&expanded_glob_str)
|
||||
.unwrap()
|
||||
.compile_matcher(),
|
||||
is_absolute: Path::new(&expanded_glob_str).is_absolute(),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
@@ -1811,6 +1815,12 @@ mod tests {
|
||||
let dot_env_file = make_test_file(&[".env"]);
|
||||
let settings = build_settings(&[".env"]);
|
||||
assert!(!settings.enabled_for_file(&dot_env_file, &cx));
|
||||
|
||||
// Test tilde expansion
|
||||
let home = shellexpand::tilde("~").into_owned().to_string();
|
||||
let home_file = make_test_file(&[&home, "test.rs"]);
|
||||
let settings = build_settings(&["~/test.rs"]);
|
||||
assert!(!settings.enabled_for_file(&home_file, &cx));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use std::{borrow::Borrow, path::Path, sync::Arc};
|
||||
|
||||
use gpui::SharedString;
|
||||
|
||||
use crate::LspAdapterDelegate;
|
||||
use settings::WorktreeId;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct ManifestName(SharedString);
|
||||
@@ -39,10 +38,15 @@ pub struct ManifestQuery {
|
||||
/// Path to the file, relative to worktree root.
|
||||
pub path: Arc<Path>,
|
||||
pub depth: usize,
|
||||
pub delegate: Arc<dyn LspAdapterDelegate>,
|
||||
pub delegate: Arc<dyn ManifestDelegate>,
|
||||
}
|
||||
|
||||
pub trait ManifestProvider {
|
||||
fn name(&self) -> ManifestName;
|
||||
fn search(&self, query: ManifestQuery) -> Option<Arc<Path>>;
|
||||
}
|
||||
|
||||
pub trait ManifestDelegate: Send + Sync {
|
||||
fn worktree_id(&self) -> WorktreeId;
|
||||
fn exists(&self, path: &Path, is_dir: Option<bool>) -> bool;
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ use collections::HashMap;
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
use settings::WorktreeId;
|
||||
|
||||
use crate::LanguageName;
|
||||
use crate::{LanguageName, ManifestName};
|
||||
|
||||
/// Represents a single toolchain.
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -44,10 +44,13 @@ pub trait ToolchainLister: Send + Sync {
|
||||
async fn list(
|
||||
&self,
|
||||
worktree_root: PathBuf,
|
||||
subroot_relative_path: Option<Arc<Path>>,
|
||||
project_env: Option<HashMap<String, String>>,
|
||||
) -> ToolchainList;
|
||||
// Returns a term which we should use in UI to refer to a toolchain.
|
||||
fn term(&self) -> SharedString;
|
||||
/// Returns the name of the manifest file for this toolchain.
|
||||
fn manifest_name(&self) -> ManifestName;
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
|
||||
@@ -4,6 +4,7 @@ use client::{Client, UserStore, zed_urls};
|
||||
use futures::{
|
||||
AsyncBufReadExt, FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream,
|
||||
};
|
||||
use google_ai::GoogleModelMode;
|
||||
use gpui::{
|
||||
AnyElement, AnyView, App, AsyncApp, Context, Entity, SemanticVersion, Subscription, Task,
|
||||
};
|
||||
@@ -750,7 +751,8 @@ impl LanguageModel for CloudLanguageModel {
|
||||
let client = self.client.clone();
|
||||
let llm_api_token = self.llm_api_token.clone();
|
||||
let model_id = self.model.id.to_string();
|
||||
let generate_content_request = into_google(request, model_id.clone());
|
||||
let generate_content_request =
|
||||
into_google(request, model_id.clone(), GoogleModelMode::Default);
|
||||
async move {
|
||||
let http_client = &client.http_client();
|
||||
let token = llm_api_token.acquire(&client).await?;
|
||||
@@ -922,7 +924,8 @@ impl LanguageModel for CloudLanguageModel {
|
||||
}
|
||||
zed_llm_client::LanguageModelProvider::Google => {
|
||||
let client = self.client.clone();
|
||||
let request = into_google(request, self.model.id.to_string());
|
||||
let request =
|
||||
into_google(request, self.model.id.to_string(), GoogleModelMode::Default);
|
||||
let llm_api_token = self.llm_api_token.clone();
|
||||
let future = self.request_limiter.stream(async move {
|
||||
let PerformLlmCompletionResponse {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::BTreeMap;
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use credentials_provider::CredentialsProvider;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use futures::Stream;
|
||||
use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
|
||||
use gpui::{
|
||||
AnyView, AppContext as _, AsyncApp, Entity, FontStyle, Subscription, Task, TextStyle,
|
||||
@@ -12,11 +13,14 @@ use language_model::{
|
||||
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
|
||||
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
|
||||
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
|
||||
LanguageModelToolChoice, RateLimiter, Role,
|
||||
LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent,
|
||||
RateLimiter, Role, StopReason,
|
||||
};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::pin::Pin;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{Icon, IconName, List, prelude::*};
|
||||
@@ -28,6 +32,13 @@ const PROVIDER_ID: &str = "deepseek";
|
||||
const PROVIDER_NAME: &str = "DeepSeek";
|
||||
const DEEPSEEK_API_KEY_VAR: &str = "DEEPSEEK_API_KEY";
|
||||
|
||||
#[derive(Default)]
|
||||
struct RawToolCall {
|
||||
id: String,
|
||||
name: String,
|
||||
arguments: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug, PartialEq)]
|
||||
pub struct DeepSeekSettings {
|
||||
pub api_url: String,
|
||||
@@ -280,11 +291,11 @@ impl LanguageModel for DeepSeekLanguageModel {
|
||||
}
|
||||
|
||||
fn supports_tools(&self) -> bool {
|
||||
false
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool {
|
||||
false
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_images(&self) -> bool {
|
||||
@@ -339,35 +350,12 @@ impl LanguageModel for DeepSeekLanguageModel {
|
||||
BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
|
||||
>,
|
||||
> {
|
||||
let request = into_deepseek(
|
||||
request,
|
||||
self.model.id().to_string(),
|
||||
self.max_output_tokens(),
|
||||
);
|
||||
let request = into_deepseek(request, &self.model, self.max_output_tokens());
|
||||
let stream = self.stream_completion(request, cx);
|
||||
|
||||
async move {
|
||||
let stream = stream.await?;
|
||||
Ok(stream
|
||||
.map(|result| {
|
||||
result
|
||||
.and_then(|response| {
|
||||
response
|
||||
.choices
|
||||
.first()
|
||||
.context("Empty response")
|
||||
.map(|choice| {
|
||||
choice
|
||||
.delta
|
||||
.content
|
||||
.clone()
|
||||
.unwrap_or_default()
|
||||
.map(LanguageModelCompletionEvent::Text)
|
||||
})
|
||||
})
|
||||
.map_err(LanguageModelCompletionError::Other)
|
||||
})
|
||||
.boxed())
|
||||
let mapper = DeepSeekEventMapper::new();
|
||||
Ok(mapper.map_stream(stream.await?).boxed())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
@@ -375,69 +363,67 @@ impl LanguageModel for DeepSeekLanguageModel {
|
||||
|
||||
pub fn into_deepseek(
|
||||
request: LanguageModelRequest,
|
||||
model: String,
|
||||
model: &deepseek::Model,
|
||||
max_output_tokens: Option<u32>,
|
||||
) -> deepseek::Request {
|
||||
let is_reasoner = model == "deepseek-reasoner";
|
||||
let is_reasoner = *model == deepseek::Model::Reasoner;
|
||||
|
||||
let len = request.messages.len();
|
||||
let merged_messages =
|
||||
request
|
||||
.messages
|
||||
.into_iter()
|
||||
.fold(Vec::with_capacity(len), |mut acc, msg| {
|
||||
let role = msg.role;
|
||||
let content = msg.string_contents();
|
||||
let mut messages = Vec::new();
|
||||
for message in request.messages {
|
||||
for content in message.content {
|
||||
match content {
|
||||
MessageContent::Text(text) | MessageContent::Thinking { text, .. } => messages
|
||||
.push(match message.role {
|
||||
Role::User => deepseek::RequestMessage::User { content: text },
|
||||
Role::Assistant => deepseek::RequestMessage::Assistant {
|
||||
content: Some(text),
|
||||
tool_calls: Vec::new(),
|
||||
},
|
||||
Role::System => deepseek::RequestMessage::System { content: text },
|
||||
}),
|
||||
MessageContent::RedactedThinking(_) => {}
|
||||
MessageContent::Image(_) => {}
|
||||
MessageContent::ToolUse(tool_use) => {
|
||||
let tool_call = deepseek::ToolCall {
|
||||
id: tool_use.id.to_string(),
|
||||
content: deepseek::ToolCallContent::Function {
|
||||
function: deepseek::FunctionContent {
|
||||
name: tool_use.name.to_string(),
|
||||
arguments: serde_json::to_string(&tool_use.input)
|
||||
.unwrap_or_default(),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if is_reasoner {
|
||||
if let Some(last_msg) = acc.last_mut() {
|
||||
match (last_msg, role) {
|
||||
(deepseek::RequestMessage::User { content: last }, Role::User) => {
|
||||
last.push(' ');
|
||||
last.push_str(&content);
|
||||
return acc;
|
||||
}
|
||||
|
||||
(
|
||||
deepseek::RequestMessage::Assistant {
|
||||
content: last_content,
|
||||
..
|
||||
},
|
||||
Role::Assistant,
|
||||
) => {
|
||||
*last_content = last_content
|
||||
.take()
|
||||
.map(|c| {
|
||||
let mut s =
|
||||
String::with_capacity(c.len() + content.len() + 1);
|
||||
s.push_str(&c);
|
||||
s.push(' ');
|
||||
s.push_str(&content);
|
||||
s
|
||||
})
|
||||
.or(Some(content));
|
||||
|
||||
return acc;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if let Some(deepseek::RequestMessage::Assistant { tool_calls, .. }) =
|
||||
messages.last_mut()
|
||||
{
|
||||
tool_calls.push(tool_call);
|
||||
} else {
|
||||
messages.push(deepseek::RequestMessage::Assistant {
|
||||
content: None,
|
||||
tool_calls: vec![tool_call],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
acc.push(match role {
|
||||
Role::User => deepseek::RequestMessage::User { content },
|
||||
Role::Assistant => deepseek::RequestMessage::Assistant {
|
||||
content: Some(content),
|
||||
tool_calls: Vec::new(),
|
||||
},
|
||||
Role::System => deepseek::RequestMessage::System { content },
|
||||
});
|
||||
acc
|
||||
});
|
||||
MessageContent::ToolResult(tool_result) => {
|
||||
match &tool_result.content {
|
||||
LanguageModelToolResultContent::Text(text) => {
|
||||
messages.push(deepseek::RequestMessage::Tool {
|
||||
content: text.to_string(),
|
||||
tool_call_id: tool_result.tool_use_id.to_string(),
|
||||
});
|
||||
}
|
||||
LanguageModelToolResultContent::Image(_) => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deepseek::Request {
|
||||
model,
|
||||
messages: merged_messages,
|
||||
model: model.id().to_string(),
|
||||
messages,
|
||||
stream: true,
|
||||
max_tokens: max_output_tokens,
|
||||
temperature: if is_reasoner {
|
||||
@@ -460,6 +446,103 @@ pub fn into_deepseek(
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DeepSeekEventMapper {
|
||||
tool_calls_by_index: HashMap<usize, RawToolCall>,
|
||||
}
|
||||
|
||||
impl DeepSeekEventMapper {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tool_calls_by_index: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_stream(
|
||||
mut self,
|
||||
events: Pin<Box<dyn Send + Stream<Item = Result<deepseek::StreamResponse>>>>,
|
||||
) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
|
||||
{
|
||||
events.flat_map(move |event| {
|
||||
futures::stream::iter(match event {
|
||||
Ok(event) => self.map_event(event),
|
||||
Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))],
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn map_event(
|
||||
&mut self,
|
||||
event: deepseek::StreamResponse,
|
||||
) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
|
||||
let Some(choice) = event.choices.first() else {
|
||||
return vec![Err(LanguageModelCompletionError::Other(anyhow!(
|
||||
"Response contained no choices"
|
||||
)))];
|
||||
};
|
||||
|
||||
let mut events = Vec::new();
|
||||
if let Some(content) = choice.delta.content.clone() {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
|
||||
}
|
||||
|
||||
if let Some(tool_calls) = choice.delta.tool_calls.as_ref() {
|
||||
for tool_call in tool_calls {
|
||||
let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
|
||||
|
||||
if let Some(tool_id) = tool_call.id.clone() {
|
||||
entry.id = tool_id;
|
||||
}
|
||||
|
||||
if let Some(function) = tool_call.function.as_ref() {
|
||||
if let Some(name) = function.name.clone() {
|
||||
entry.name = name;
|
||||
}
|
||||
|
||||
if let Some(arguments) = function.arguments.clone() {
|
||||
entry.arguments.push_str(&arguments);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match choice.finish_reason.as_deref() {
|
||||
Some("stop") => {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
|
||||
}
|
||||
Some("tool_calls") => {
|
||||
events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
|
||||
match serde_json::Value::from_str(&tool_call.arguments) {
|
||||
Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
|
||||
LanguageModelToolUse {
|
||||
id: tool_call.id.clone().into(),
|
||||
name: tool_call.name.as_str().into(),
|
||||
is_input_complete: true,
|
||||
input,
|
||||
raw_input: tool_call.arguments.clone(),
|
||||
},
|
||||
)),
|
||||
Err(error) => Err(LanguageModelCompletionError::BadInputJson {
|
||||
id: tool_call.id.into(),
|
||||
tool_name: tool_call.name.as_str().into(),
|
||||
raw_input: tool_call.arguments.into(),
|
||||
json_parse_error: error.to_string(),
|
||||
}),
|
||||
}
|
||||
}));
|
||||
|
||||
events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
|
||||
}
|
||||
Some(stop_reason) => {
|
||||
log::error!("Unexpected DeepSeek stop_reason: {stop_reason:?}",);
|
||||
events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
events
|
||||
}
|
||||
}
|
||||
|
||||
struct ConfigurationView {
|
||||
api_key_editor: Entity<Editor>,
|
||||
state: Entity<State>,
|
||||
|
||||
@@ -4,7 +4,8 @@ use credentials_provider::CredentialsProvider;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
|
||||
use google_ai::{
|
||||
FunctionDeclaration, GenerateContentResponse, Part, SystemInstruction, UsageMetadata,
|
||||
FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction,
|
||||
ThinkingConfig, UsageMetadata,
|
||||
};
|
||||
use gpui::{
|
||||
AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
|
||||
@@ -45,11 +46,41 @@ pub struct GoogleSettings {
|
||||
pub available_models: Vec<AvailableModel>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(tag = "type", rename_all = "lowercase")]
|
||||
pub enum ModelMode {
|
||||
#[default]
|
||||
Default,
|
||||
Thinking {
|
||||
/// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
|
||||
budget_tokens: Option<u32>,
|
||||
},
|
||||
}
|
||||
|
||||
impl From<ModelMode> for GoogleModelMode {
|
||||
fn from(value: ModelMode) -> Self {
|
||||
match value {
|
||||
ModelMode::Default => GoogleModelMode::Default,
|
||||
ModelMode::Thinking { budget_tokens } => GoogleModelMode::Thinking { budget_tokens },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<GoogleModelMode> for ModelMode {
|
||||
fn from(value: GoogleModelMode) -> Self {
|
||||
match value {
|
||||
GoogleModelMode::Default => ModelMode::Default,
|
||||
GoogleModelMode::Thinking { budget_tokens } => ModelMode::Thinking { budget_tokens },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AvailableModel {
|
||||
name: String,
|
||||
display_name: Option<String>,
|
||||
max_tokens: usize,
|
||||
mode: Option<ModelMode>,
|
||||
}
|
||||
|
||||
pub struct GoogleLanguageModelProvider {
|
||||
@@ -216,6 +247,7 @@ impl LanguageModelProvider for GoogleLanguageModelProvider {
|
||||
name: model.name.clone(),
|
||||
display_name: model.display_name.clone(),
|
||||
max_tokens: model.max_tokens,
|
||||
mode: model.mode.unwrap_or_default().into(),
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -343,7 +375,7 @@ impl LanguageModel for GoogleLanguageModel {
|
||||
cx: &App,
|
||||
) -> BoxFuture<'static, Result<usize>> {
|
||||
let model_id = self.model.id().to_string();
|
||||
let request = into_google(request, model_id.clone());
|
||||
let request = into_google(request, model_id.clone(), self.model.mode());
|
||||
let http_client = self.http_client.clone();
|
||||
let api_key = self.state.read(cx).api_key.clone();
|
||||
|
||||
@@ -379,7 +411,7 @@ impl LanguageModel for GoogleLanguageModel {
|
||||
>,
|
||||
>,
|
||||
> {
|
||||
let request = into_google(request, self.model.id().to_string());
|
||||
let request = into_google(request, self.model.id().to_string(), self.model.mode());
|
||||
let request = self.stream_completion(request, cx);
|
||||
let future = self.request_limiter.stream(async move {
|
||||
let response = request
|
||||
@@ -394,6 +426,7 @@ impl LanguageModel for GoogleLanguageModel {
|
||||
pub fn into_google(
|
||||
mut request: LanguageModelRequest,
|
||||
model_id: String,
|
||||
mode: GoogleModelMode,
|
||||
) -> google_ai::GenerateContentRequest {
|
||||
fn map_content(content: Vec<MessageContent>) -> Vec<Part> {
|
||||
content
|
||||
@@ -504,6 +537,12 @@ pub fn into_google(
|
||||
stop_sequences: Some(request.stop),
|
||||
max_output_tokens: None,
|
||||
temperature: request.temperature.map(|t| t as f64).or(Some(1.0)),
|
||||
thinking_config: match mode {
|
||||
GoogleModelMode::Thinking { budget_tokens } => {
|
||||
budget_tokens.map(|thinking_budget| ThinkingConfig { thinking_budget })
|
||||
}
|
||||
GoogleModelMode::Default => None,
|
||||
},
|
||||
top_p: None,
|
||||
top_k: None,
|
||||
}),
|
||||
@@ -620,6 +659,7 @@ impl GoogleEventMapper {
|
||||
)));
|
||||
}
|
||||
Part::FunctionResponsePart(_) => {}
|
||||
Part::ThoughtPart(_) => {}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,14 +4,11 @@ use futures::{Stream, TryFutureExt, stream};
|
||||
use gpui::{AnyView, App, AsyncApp, Context, Subscription, Task};
|
||||
use http_client::HttpClient;
|
||||
use language_model::{
|
||||
AuthenticateError, LanguageModelCompletionError, LanguageModelCompletionEvent,
|
||||
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
|
||||
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
|
||||
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
|
||||
LanguageModelRequestTool, LanguageModelToolChoice, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, StopReason,
|
||||
};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider,
|
||||
LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
|
||||
LanguageModelRequest, RateLimiter, Role,
|
||||
LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason,
|
||||
};
|
||||
use ollama::{
|
||||
ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OllamaFunctionTool,
|
||||
@@ -54,6 +51,10 @@ pub struct AvailableModel {
|
||||
pub keep_alive: Option<KeepAlive>,
|
||||
/// Whether the model supports tools
|
||||
pub supports_tools: Option<bool>,
|
||||
/// Whether the model supports vision
|
||||
pub supports_images: Option<bool>,
|
||||
/// Whether to enable think mode
|
||||
pub supports_thinking: Option<bool>,
|
||||
}
|
||||
|
||||
pub struct OllamaLanguageModelProvider {
|
||||
@@ -99,6 +100,8 @@ impl State {
|
||||
None,
|
||||
None,
|
||||
Some(capabilities.supports_tools()),
|
||||
Some(capabilities.supports_vision()),
|
||||
Some(capabilities.supports_thinking()),
|
||||
);
|
||||
Ok(ollama_model)
|
||||
}
|
||||
@@ -219,6 +222,8 @@ impl LanguageModelProvider for OllamaLanguageModelProvider {
|
||||
max_tokens: model.max_tokens,
|
||||
keep_alive: model.keep_alive.clone(),
|
||||
supports_tools: model.supports_tools,
|
||||
supports_vision: model.supports_images,
|
||||
supports_thinking: model.supports_thinking,
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -273,22 +278,59 @@ pub struct OllamaLanguageModel {
|
||||
|
||||
impl OllamaLanguageModel {
|
||||
fn to_ollama_request(&self, request: LanguageModelRequest) -> ChatRequest {
|
||||
let supports_vision = self.model.supports_vision.unwrap_or(false);
|
||||
|
||||
ChatRequest {
|
||||
model: self.model.name.clone(),
|
||||
messages: request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|msg| match msg.role {
|
||||
Role::User => ChatMessage::User {
|
||||
content: msg.string_contents(),
|
||||
},
|
||||
Role::Assistant => ChatMessage::Assistant {
|
||||
content: msg.string_contents(),
|
||||
tool_calls: None,
|
||||
},
|
||||
Role::System => ChatMessage::System {
|
||||
content: msg.string_contents(),
|
||||
},
|
||||
.map(|msg| {
|
||||
let images = if supports_vision {
|
||||
msg.content
|
||||
.iter()
|
||||
.filter_map(|content| match content {
|
||||
MessageContent::Image(image) => Some(image.source.to_string()),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
match msg.role {
|
||||
Role::User => ChatMessage::User {
|
||||
content: msg.string_contents(),
|
||||
images: if images.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(images)
|
||||
},
|
||||
},
|
||||
Role::Assistant => {
|
||||
let content = msg.string_contents();
|
||||
let thinking =
|
||||
msg.content.into_iter().find_map(|content| match content {
|
||||
MessageContent::Thinking { text, .. } if !text.is_empty() => {
|
||||
Some(text)
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
ChatMessage::Assistant {
|
||||
content,
|
||||
tool_calls: None,
|
||||
images: if images.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(images)
|
||||
},
|
||||
thinking,
|
||||
}
|
||||
}
|
||||
Role::System => ChatMessage::System {
|
||||
content: msg.string_contents(),
|
||||
},
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
keep_alive: self.model.keep_alive.clone().unwrap_or_default(),
|
||||
@@ -299,6 +341,7 @@ impl OllamaLanguageModel {
|
||||
temperature: request.temperature.or(Some(1.0)),
|
||||
..Default::default()
|
||||
}),
|
||||
think: self.model.supports_thinking,
|
||||
tools: request.tools.into_iter().map(tool_into_ollama).collect(),
|
||||
}
|
||||
}
|
||||
@@ -326,7 +369,7 @@ impl LanguageModel for OllamaLanguageModel {
|
||||
}
|
||||
|
||||
fn supports_images(&self) -> bool {
|
||||
false
|
||||
self.model.supports_vision.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
|
||||
@@ -424,7 +467,7 @@ fn map_to_language_model_completion_events(
|
||||
let mut events = Vec::new();
|
||||
|
||||
match delta.message {
|
||||
ChatMessage::User { content } => {
|
||||
ChatMessage::User { content, images: _ } => {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
|
||||
}
|
||||
ChatMessage::System { content } => {
|
||||
@@ -433,8 +476,16 @@ fn map_to_language_model_completion_events(
|
||||
ChatMessage::Assistant {
|
||||
content,
|
||||
tool_calls,
|
||||
images: _,
|
||||
thinking,
|
||||
} => {
|
||||
// Check for tool calls
|
||||
if let Some(text) = thinking {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Thinking {
|
||||
text,
|
||||
signature: None,
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) {
|
||||
match tool_call {
|
||||
OllamaToolCall::Function(function) => {
|
||||
@@ -455,7 +506,7 @@ fn map_to_language_model_completion_events(
|
||||
state.used_tools = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
} else if !content.is_empty() {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,8 @@ use workspace::{
|
||||
searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
|
||||
};
|
||||
|
||||
const SEND_LINE: &str = "// Send:";
|
||||
const RECEIVE_LINE: &str = "// Receive:";
|
||||
const SEND_LINE: &str = "// Send:\n";
|
||||
const RECEIVE_LINE: &str = "// Receive:\n";
|
||||
const MAX_STORED_LOG_ENTRIES: usize = 2000;
|
||||
|
||||
pub struct LogStore {
|
||||
@@ -464,8 +464,7 @@ impl LogStore {
|
||||
while log_lines.len() >= MAX_STORED_LOG_ENTRIES {
|
||||
log_lines.pop_front();
|
||||
}
|
||||
let entry: &str = message.as_ref();
|
||||
let entry = entry.to_string();
|
||||
let entry = format!("{}\n", message.as_ref().trim());
|
||||
let visible = message.should_include(current_severity);
|
||||
log_lines.push_back(message);
|
||||
|
||||
@@ -580,7 +579,7 @@ impl LogStore {
|
||||
});
|
||||
cx.emit(Event::NewServerLogEntry {
|
||||
id: language_server_id,
|
||||
entry: message.to_string(),
|
||||
entry: format!("{}\n\n", message),
|
||||
kind: LogKind::Rpc,
|
||||
});
|
||||
cx.notify();
|
||||
@@ -644,13 +643,7 @@ impl LspLogView {
|
||||
let last_point = editor.buffer().read(cx).len(cx);
|
||||
let newest_cursor_is_at_end =
|
||||
editor.selections.newest::<usize>(cx).start >= last_point;
|
||||
editor.edit(
|
||||
vec![
|
||||
(last_point..last_point, entry.trim()),
|
||||
(last_point..last_point, "\n"),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
editor.edit(vec![(last_point..last_point, entry.as_str())], cx);
|
||||
let entry_length = entry.len();
|
||||
if entry_length > 1024 {
|
||||
editor.fold_ranges(
|
||||
|
||||
@@ -379,17 +379,19 @@ impl ContextProvider for PythonContextProvider {
|
||||
};
|
||||
|
||||
let module_target = self.build_module_target(variables);
|
||||
let worktree_id = location
|
||||
.file_location
|
||||
.buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|f| f.worktree_id(cx));
|
||||
let location_file = location.file_location.buffer.read(cx).file().cloned();
|
||||
let worktree_id = location_file.as_ref().map(|f| f.worktree_id(cx));
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let raw_toolchain = if let Some(worktree_id) = worktree_id {
|
||||
let file_path = location_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.path().parent())
|
||||
.map(Arc::from)
|
||||
.unwrap_or_else(|| Arc::from("".as_ref()));
|
||||
|
||||
toolchains
|
||||
.active_toolchain(worktree_id, Arc::from("".as_ref()), "Python".into(), cx)
|
||||
.active_toolchain(worktree_id, file_path, "Python".into(), cx)
|
||||
.await
|
||||
.map_or_else(
|
||||
|| String::from("python3"),
|
||||
@@ -398,14 +400,16 @@ impl ContextProvider for PythonContextProvider {
|
||||
} else {
|
||||
String::from("python3")
|
||||
};
|
||||
|
||||
let active_toolchain = format!("\"{raw_toolchain}\"");
|
||||
let toolchain = (PYTHON_ACTIVE_TOOLCHAIN_PATH, active_toolchain);
|
||||
let raw_toolchain = (PYTHON_ACTIVE_TOOLCHAIN_PATH_RAW, raw_toolchain);
|
||||
let raw_toolchain_var = (PYTHON_ACTIVE_TOOLCHAIN_PATH_RAW, raw_toolchain);
|
||||
|
||||
Ok(task::TaskVariables::from_iter(
|
||||
test_target
|
||||
.into_iter()
|
||||
.chain(module_target.into_iter())
|
||||
.chain([toolchain, raw_toolchain]),
|
||||
.chain([toolchain, raw_toolchain_var]),
|
||||
))
|
||||
})
|
||||
}
|
||||
@@ -689,9 +693,13 @@ fn get_worktree_venv_declaration(worktree_root: &Path) -> Option<String> {
|
||||
|
||||
#[async_trait]
|
||||
impl ToolchainLister for PythonToolchainProvider {
|
||||
fn manifest_name(&self) -> language::ManifestName {
|
||||
ManifestName::from(SharedString::new_static("pyproject.toml"))
|
||||
}
|
||||
async fn list(
|
||||
&self,
|
||||
worktree_root: PathBuf,
|
||||
subroot_relative_path: Option<Arc<Path>>,
|
||||
project_env: Option<HashMap<String, String>>,
|
||||
) -> ToolchainList {
|
||||
let env = project_env.unwrap_or_default();
|
||||
@@ -702,7 +710,14 @@ impl ToolchainLister for PythonToolchainProvider {
|
||||
&environment,
|
||||
);
|
||||
let mut config = Configuration::default();
|
||||
config.workspace_directories = Some(vec![worktree_root.clone()]);
|
||||
|
||||
let mut directories = vec![worktree_root.clone()];
|
||||
if let Some(subroot_relative_path) = subroot_relative_path {
|
||||
debug_assert!(subroot_relative_path.is_relative());
|
||||
directories.push(worktree_root.join(subroot_relative_path));
|
||||
}
|
||||
|
||||
config.workspace_directories = Some(directories);
|
||||
for locator in locators.iter() {
|
||||
locator.configure(&config);
|
||||
}
|
||||
|
||||
@@ -1690,7 +1690,9 @@ impl MultiBuffer {
|
||||
last_range.context.start <= range.context.start,
|
||||
"Last range: {last_range:?} Range: {range:?}"
|
||||
);
|
||||
if last_range.context.end >= range.context.start {
|
||||
if last_range.context.end >= range.context.start
|
||||
|| last_range.context.end.row + 1 == range.context.start.row
|
||||
{
|
||||
last_range.context.end = range.context.end.max(last_range.context.end);
|
||||
*counts.last_mut().unwrap() += 1;
|
||||
continue;
|
||||
@@ -5780,7 +5782,7 @@ impl MultiBufferSnapshot {
|
||||
// then add to the indent stack with the depth found
|
||||
let mut found_indent = false;
|
||||
let mut last_row = first_row;
|
||||
if line_indent.is_line_empty() {
|
||||
if line_indent.is_line_blank() {
|
||||
while !found_indent {
|
||||
let Some((target_row, new_line_indent, _)) = row_indents.next() else {
|
||||
break;
|
||||
@@ -5790,7 +5792,7 @@ impl MultiBufferSnapshot {
|
||||
break;
|
||||
}
|
||||
|
||||
if new_line_indent.is_line_empty() {
|
||||
if new_line_indent.is_line_blank() {
|
||||
continue;
|
||||
}
|
||||
last_row = target_row.min(end_row);
|
||||
|
||||
@@ -1592,7 +1592,6 @@ fn test_set_excerpts_for_buffer_ordering(cx: &mut TestAppContext) {
|
||||
six
|
||||
seven
|
||||
eight
|
||||
-----
|
||||
nine
|
||||
ten
|
||||
eleven
|
||||
@@ -1848,7 +1847,6 @@ fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) {
|
||||
zero
|
||||
one
|
||||
two
|
||||
-----
|
||||
three
|
||||
four
|
||||
five
|
||||
|
||||
@@ -38,6 +38,8 @@ pub struct Model {
|
||||
pub max_tokens: usize,
|
||||
pub keep_alive: Option<KeepAlive>,
|
||||
pub supports_tools: Option<bool>,
|
||||
pub supports_vision: Option<bool>,
|
||||
pub supports_thinking: Option<bool>,
|
||||
}
|
||||
|
||||
fn get_max_tokens(name: &str) -> usize {
|
||||
@@ -67,6 +69,8 @@ impl Model {
|
||||
display_name: Option<&str>,
|
||||
max_tokens: Option<usize>,
|
||||
supports_tools: Option<bool>,
|
||||
supports_vision: Option<bool>,
|
||||
supports_thinking: Option<bool>,
|
||||
) -> Self {
|
||||
Self {
|
||||
name: name.to_owned(),
|
||||
@@ -76,6 +80,8 @@ impl Model {
|
||||
max_tokens: max_tokens.unwrap_or_else(|| get_max_tokens(name)),
|
||||
keep_alive: Some(KeepAlive::indefinite()),
|
||||
supports_tools,
|
||||
supports_vision,
|
||||
supports_thinking,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,9 +104,14 @@ pub enum ChatMessage {
|
||||
Assistant {
|
||||
content: String,
|
||||
tool_calls: Option<Vec<OllamaToolCall>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
images: Option<Vec<String>>,
|
||||
thinking: Option<String>,
|
||||
},
|
||||
User {
|
||||
content: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
images: Option<Vec<String>>,
|
||||
},
|
||||
System {
|
||||
content: String,
|
||||
@@ -140,6 +151,7 @@ pub struct ChatRequest {
|
||||
pub keep_alive: KeepAlive,
|
||||
pub options: Option<ChatOptions>,
|
||||
pub tools: Vec<OllamaTool>,
|
||||
pub think: Option<bool>,
|
||||
}
|
||||
|
||||
impl ChatRequest {
|
||||
@@ -215,6 +227,14 @@ impl ModelShow {
|
||||
// .contains expects &String, which would require an additional allocation
|
||||
self.capabilities.iter().any(|v| v == "tools")
|
||||
}
|
||||
|
||||
pub fn supports_vision(&self) -> bool {
|
||||
self.capabilities.iter().any(|v| v == "vision")
|
||||
}
|
||||
|
||||
pub fn supports_thinking(&self) -> bool {
|
||||
self.capabilities.iter().any(|v| v == "thinking")
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn complete(
|
||||
@@ -459,9 +479,12 @@ mod tests {
|
||||
ChatMessage::Assistant {
|
||||
content,
|
||||
tool_calls,
|
||||
images: _,
|
||||
thinking,
|
||||
} => {
|
||||
assert!(content.is_empty());
|
||||
assert!(tool_calls.is_some_and(|v| !v.is_empty()));
|
||||
assert!(thinking.is_none());
|
||||
}
|
||||
_ => panic!("Deserialized wrong role"),
|
||||
}
|
||||
@@ -523,4 +546,70 @@ mod tests {
|
||||
assert!(result.capabilities.contains(&"tools".to_string()));
|
||||
assert!(result.capabilities.contains(&"completion".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_chat_request_with_images() {
|
||||
let base64_image = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==";
|
||||
|
||||
let request = ChatRequest {
|
||||
model: "llava".to_string(),
|
||||
messages: vec![ChatMessage::User {
|
||||
content: "What do you see in this image?".to_string(),
|
||||
images: Some(vec![base64_image.to_string()]),
|
||||
}],
|
||||
stream: false,
|
||||
keep_alive: KeepAlive::default(),
|
||||
options: None,
|
||||
think: None,
|
||||
tools: vec![],
|
||||
};
|
||||
|
||||
let serialized = serde_json::to_string(&request).unwrap();
|
||||
assert!(serialized.contains("images"));
|
||||
assert!(serialized.contains(base64_image));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_chat_request_without_images() {
|
||||
let request = ChatRequest {
|
||||
model: "llama3.2".to_string(),
|
||||
messages: vec![ChatMessage::User {
|
||||
content: "Hello, world!".to_string(),
|
||||
images: None,
|
||||
}],
|
||||
stream: false,
|
||||
keep_alive: KeepAlive::default(),
|
||||
options: None,
|
||||
think: None,
|
||||
tools: vec![],
|
||||
};
|
||||
|
||||
let serialized = serde_json::to_string(&request).unwrap();
|
||||
assert!(!serialized.contains("images"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_json_format_with_images() {
|
||||
let base64_image = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==";
|
||||
|
||||
let request = ChatRequest {
|
||||
model: "llava".to_string(),
|
||||
messages: vec![ChatMessage::User {
|
||||
content: "What do you see?".to_string(),
|
||||
images: Some(vec![base64_image.to_string()]),
|
||||
}],
|
||||
stream: false,
|
||||
keep_alive: KeepAlive::default(),
|
||||
options: None,
|
||||
think: None,
|
||||
tools: vec![],
|
||||
};
|
||||
|
||||
let serialized = serde_json::to_string(&request).unwrap();
|
||||
|
||||
let parsed: serde_json::Value = serde_json::from_str(&serialized).unwrap();
|
||||
let message_images = parsed["messages"][0]["images"].as_array().unwrap();
|
||||
assert_eq!(message_images.len(), 1);
|
||||
assert_eq!(message_images[0].as_str().unwrap(), base64_image);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -408,6 +408,7 @@ pub fn task_file_name() -> &'static str {
|
||||
}
|
||||
|
||||
/// Returns the relative path to a `debug.json` file within a project.
|
||||
/// .zed/debug.json
|
||||
pub fn local_debug_file_relative_path() -> &'static Path {
|
||||
Path::new(".zed/debug.json")
|
||||
}
|
||||
|
||||
@@ -2194,4 +2194,8 @@ impl Session {
|
||||
self.shutdown(cx).detach();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn thread_state(&self, thread_id: ThreadId) -> Option<ThreadStatus> {
|
||||
self.thread_states.thread_state(thread_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
mod signature_help;
|
||||
|
||||
use crate::{
|
||||
CodeAction, CompletionSource, CoreCompletion, DocumentHighlight, DocumentSymbol, Hover,
|
||||
HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart,
|
||||
InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink, LspAction, MarkupContent,
|
||||
PrepareRenameResponse, ProjectTransaction, ResolveState,
|
||||
CodeAction, CompletionSource, CoreCompletion, CoreCompletionResponse, DocumentHighlight,
|
||||
DocumentSymbol, Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel,
|
||||
InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink,
|
||||
LspAction, MarkupContent, PrepareRenameResponse, ProjectTransaction, ResolveState,
|
||||
lsp_store::{LocalLspStore, LspStore},
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
@@ -2095,7 +2095,7 @@ impl LspCommand for GetHover {
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl LspCommand for GetCompletions {
|
||||
type Response = Vec<CoreCompletion>;
|
||||
type Response = CoreCompletionResponse;
|
||||
type LspRequest = lsp::request::Completion;
|
||||
type ProtoRequest = proto::GetCompletions;
|
||||
|
||||
@@ -2127,19 +2127,22 @@ impl LspCommand for GetCompletions {
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<Self::Response> {
|
||||
let mut response_list = None;
|
||||
let mut completions = if let Some(completions) = completions {
|
||||
let (mut completions, mut is_incomplete) = if let Some(completions) = completions {
|
||||
match completions {
|
||||
lsp::CompletionResponse::Array(completions) => completions,
|
||||
lsp::CompletionResponse::Array(completions) => (completions, false),
|
||||
lsp::CompletionResponse::List(mut list) => {
|
||||
let is_incomplete = list.is_incomplete;
|
||||
let items = std::mem::take(&mut list.items);
|
||||
response_list = Some(list);
|
||||
items
|
||||
(items, is_incomplete)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Vec::new()
|
||||
(Vec::new(), false)
|
||||
};
|
||||
|
||||
let unfiltered_completions_count = completions.len();
|
||||
|
||||
let language_server_adapter = lsp_store
|
||||
.read_with(&mut cx, |lsp_store, _| {
|
||||
lsp_store.language_server_adapter_for_id(server_id)
|
||||
@@ -2259,11 +2262,17 @@ impl LspCommand for GetCompletions {
|
||||
});
|
||||
})?;
|
||||
|
||||
// If completions were filtered out due to errors that may be transient, mark the result
|
||||
// incomplete so that it is re-queried.
|
||||
if unfiltered_completions_count != completions.len() {
|
||||
is_incomplete = true;
|
||||
}
|
||||
|
||||
language_server_adapter
|
||||
.process_completions(&mut completions)
|
||||
.await;
|
||||
|
||||
Ok(completions
|
||||
let completions = completions
|
||||
.into_iter()
|
||||
.zip(completion_edits)
|
||||
.map(|(mut lsp_completion, mut edit)| {
|
||||
@@ -2290,7 +2299,12 @@ impl LspCommand for GetCompletions {
|
||||
},
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
.collect();
|
||||
|
||||
Ok(CoreCompletionResponse {
|
||||
completions,
|
||||
is_incomplete,
|
||||
})
|
||||
}
|
||||
|
||||
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCompletions {
|
||||
@@ -2332,18 +2346,20 @@ impl LspCommand for GetCompletions {
|
||||
}
|
||||
|
||||
fn response_to_proto(
|
||||
completions: Vec<CoreCompletion>,
|
||||
response: CoreCompletionResponse,
|
||||
_: &mut LspStore,
|
||||
_: PeerId,
|
||||
buffer_version: &clock::Global,
|
||||
_: &mut App,
|
||||
) -> proto::GetCompletionsResponse {
|
||||
proto::GetCompletionsResponse {
|
||||
completions: completions
|
||||
completions: response
|
||||
.completions
|
||||
.iter()
|
||||
.map(LspStore::serialize_completion)
|
||||
.collect(),
|
||||
version: serialize_version(buffer_version),
|
||||
can_reuse: !response.is_incomplete,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2360,11 +2376,16 @@ impl LspCommand for GetCompletions {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
message
|
||||
let completions = message
|
||||
.completions
|
||||
.into_iter()
|
||||
.map(LspStore::deserialize_completion)
|
||||
.collect()
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
Ok(CoreCompletionResponse {
|
||||
completions,
|
||||
is_incomplete: !message.can_reuse,
|
||||
})
|
||||
}
|
||||
|
||||
fn buffer_id_from_proto(message: &proto::GetCompletions) -> Result<BufferId> {
|
||||
|
||||
@@ -3,14 +3,15 @@ pub mod lsp_ext_command;
|
||||
pub mod rust_analyzer_ext;
|
||||
|
||||
use crate::{
|
||||
CodeAction, Completion, CompletionSource, CoreCompletion, Hover, InlayHint, LspAction,
|
||||
ProjectItem, ProjectPath, ProjectTransaction, ResolveState, Symbol, ToolchainStore,
|
||||
CodeAction, Completion, CompletionResponse, CompletionSource, CoreCompletion, Hover, InlayHint,
|
||||
LspAction, ProjectItem, ProjectPath, ProjectTransaction, ResolveState, Symbol, ToolchainStore,
|
||||
buffer_store::{BufferStore, BufferStoreEvent},
|
||||
environment::ProjectEnvironment,
|
||||
lsp_command::{self, *},
|
||||
lsp_store,
|
||||
manifest_tree::{
|
||||
AdapterQuery, LanguageServerTree, LanguageServerTreeNode, LaunchDisposition, ManifestTree,
|
||||
AdapterQuery, LanguageServerTree, LanguageServerTreeNode, LaunchDisposition,
|
||||
ManifestQueryDelegate, ManifestTree,
|
||||
},
|
||||
prettier_store::{self, PrettierStore, PrettierStoreEvent},
|
||||
project_settings::{LspSettings, ProjectSettings},
|
||||
@@ -997,7 +998,7 @@ impl LocalLspStore {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
async move {
|
||||
futures::future::join_all(shutdown_futures).await;
|
||||
join_all(shutdown_futures).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1036,7 +1037,7 @@ impl LocalLspStore {
|
||||
else {
|
||||
return Vec::new();
|
||||
};
|
||||
let delegate = LocalLspAdapterDelegate::from_local_lsp(self, &worktree, cx);
|
||||
let delegate = Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot()));
|
||||
let root = self.lsp_tree.update(cx, |this, cx| {
|
||||
this.get(
|
||||
project_path,
|
||||
@@ -2290,7 +2291,8 @@ impl LocalLspStore {
|
||||
})
|
||||
.map(|(delegate, servers)| (true, delegate, servers))
|
||||
.unwrap_or_else(|| {
|
||||
let delegate = LocalLspAdapterDelegate::from_local_lsp(self, &worktree, cx);
|
||||
let lsp_delegate = LocalLspAdapterDelegate::from_local_lsp(self, &worktree, cx);
|
||||
let delegate = Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot()));
|
||||
let servers = self
|
||||
.lsp_tree
|
||||
.clone()
|
||||
@@ -2304,7 +2306,7 @@ impl LocalLspStore {
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
(false, delegate, servers)
|
||||
(false, lsp_delegate, servers)
|
||||
});
|
||||
let servers = servers
|
||||
.into_iter()
|
||||
@@ -3585,6 +3587,7 @@ impl LspStore {
|
||||
prettier_store: Entity<PrettierStore>,
|
||||
toolchain_store: Entity<ToolchainStore>,
|
||||
environment: Entity<ProjectEnvironment>,
|
||||
manifest_tree: Entity<ManifestTree>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
fs: Arc<dyn Fs>,
|
||||
@@ -3618,7 +3621,7 @@ impl LspStore {
|
||||
sender,
|
||||
)
|
||||
};
|
||||
let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
|
||||
|
||||
Self {
|
||||
mode: LspStoreMode::Local(LocalLspStore {
|
||||
weak: cx.weak_entity(),
|
||||
@@ -4465,10 +4468,13 @@ impl LspStore {
|
||||
)
|
||||
.map(|(delegate, servers)| (true, delegate, servers))
|
||||
.or_else(|| {
|
||||
let delegate = adapters
|
||||
let lsp_delegate = adapters
|
||||
.entry(worktree_id)
|
||||
.or_insert_with(|| get_adapter(worktree_id, cx))
|
||||
.clone()?;
|
||||
let delegate = Arc::new(ManifestQueryDelegate::new(
|
||||
worktree.read(cx).snapshot(),
|
||||
));
|
||||
let path = file
|
||||
.path()
|
||||
.parent()
|
||||
@@ -4483,7 +4489,7 @@ impl LspStore {
|
||||
cx,
|
||||
);
|
||||
|
||||
Some((false, delegate, nodes.collect()))
|
||||
Some((false, lsp_delegate, nodes.collect()))
|
||||
})
|
||||
else {
|
||||
continue;
|
||||
@@ -5075,7 +5081,7 @@ impl LspStore {
|
||||
position: PointUtf16,
|
||||
context: CompletionContext,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let language_registry = self.languages.clone();
|
||||
|
||||
if let Some((upstream_client, project_id)) = self.upstream_client() {
|
||||
@@ -5099,11 +5105,17 @@ impl LspStore {
|
||||
});
|
||||
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let completions = task.await?;
|
||||
let mut result = Vec::new();
|
||||
populate_labels_for_completions(completions, language, lsp_adapter, &mut result)
|
||||
.await;
|
||||
Ok(Some(result))
|
||||
let completion_response = task.await?;
|
||||
let completions = populate_labels_for_completions(
|
||||
completion_response.completions,
|
||||
language,
|
||||
lsp_adapter,
|
||||
)
|
||||
.await;
|
||||
Ok(vec![CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: completion_response.is_incomplete,
|
||||
}])
|
||||
})
|
||||
} else if let Some(local) = self.as_local() {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
@@ -5117,7 +5129,7 @@ impl LspStore {
|
||||
)
|
||||
.completions;
|
||||
if !completion_settings.lsp {
|
||||
return Task::ready(Ok(None));
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
}
|
||||
|
||||
let server_ids: Vec<_> = buffer.update(cx, |buffer, cx| {
|
||||
@@ -5184,25 +5196,23 @@ impl LspStore {
|
||||
}
|
||||
})?;
|
||||
|
||||
let mut has_completions_returned = false;
|
||||
let mut completions = Vec::new();
|
||||
for (lsp_adapter, task) in tasks {
|
||||
if let Ok(Some(new_completions)) = task.await {
|
||||
has_completions_returned = true;
|
||||
populate_labels_for_completions(
|
||||
new_completions,
|
||||
let futures = tasks.into_iter().map(async |(lsp_adapter, task)| {
|
||||
let completion_response = task.await.ok()??;
|
||||
let completions = populate_labels_for_completions(
|
||||
completion_response.completions,
|
||||
language.clone(),
|
||||
lsp_adapter,
|
||||
&mut completions,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
if has_completions_returned {
|
||||
Ok(Some(completions))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
Some(CompletionResponse {
|
||||
completions,
|
||||
is_incomplete: completion_response.is_incomplete,
|
||||
})
|
||||
});
|
||||
|
||||
let responses: Vec<Option<CompletionResponse>> = join_all(futures).await;
|
||||
|
||||
Ok(responses.into_iter().flatten().collect())
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("No upstream client or local language server")))
|
||||
@@ -6476,7 +6486,7 @@ impl LspStore {
|
||||
worktree_id,
|
||||
path: Arc::from("".as_ref()),
|
||||
};
|
||||
let delegate = LocalLspAdapterDelegate::from_local_lsp(local, &worktree, cx);
|
||||
let delegate = Arc::new(ManifestQueryDelegate::new(worktree.read(cx).snapshot()));
|
||||
local.lsp_tree.update(cx, |language_server_tree, cx| {
|
||||
for node in language_server_tree.get(
|
||||
path,
|
||||
@@ -9541,8 +9551,7 @@ async fn populate_labels_for_completions(
|
||||
new_completions: Vec<CoreCompletion>,
|
||||
language: Option<Arc<Language>>,
|
||||
lsp_adapter: Option<Arc<CachedLspAdapter>>,
|
||||
completions: &mut Vec<Completion>,
|
||||
) {
|
||||
) -> Vec<Completion> {
|
||||
let lsp_completions = new_completions
|
||||
.iter()
|
||||
.filter_map(|new_completion| {
|
||||
@@ -9566,6 +9575,7 @@ async fn populate_labels_for_completions(
|
||||
.into_iter()
|
||||
.fuse();
|
||||
|
||||
let mut completions = Vec::new();
|
||||
for completion in new_completions {
|
||||
match completion.source.lsp_completion(true) {
|
||||
Some(lsp_completion) => {
|
||||
@@ -9606,6 +9616,7 @@ async fn populate_labels_for_completions(
|
||||
}
|
||||
}
|
||||
}
|
||||
completions
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -10204,14 +10215,6 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate {
|
||||
self.worktree.id()
|
||||
}
|
||||
|
||||
fn exists(&self, path: &Path, is_dir: Option<bool>) -> bool {
|
||||
self.worktree.entry_for_path(path).map_or(false, |entry| {
|
||||
is_dir.map_or(true, |is_required_to_be_dir| {
|
||||
is_required_to_be_dir == entry.is_dir()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn worktree_root_path(&self) -> &Path {
|
||||
self.worktree.abs_path().as_ref()
|
||||
}
|
||||
|
||||
@@ -11,16 +11,17 @@ use std::{
|
||||
borrow::Borrow,
|
||||
collections::{BTreeMap, hash_map::Entry},
|
||||
ops::ControlFlow,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Subscription};
|
||||
use language::{LspAdapterDelegate, ManifestName, ManifestQuery};
|
||||
use language::{ManifestDelegate, ManifestName, ManifestQuery};
|
||||
pub use manifest_store::ManifestProviders;
|
||||
use path_trie::{LabelPresence, RootPathTrie, TriePath};
|
||||
use settings::{SettingsStore, WorktreeId};
|
||||
use worktree::{Event as WorktreeEvent, Worktree};
|
||||
use worktree::{Event as WorktreeEvent, Snapshot, Worktree};
|
||||
|
||||
use crate::{
|
||||
ProjectPath,
|
||||
@@ -89,7 +90,7 @@ pub(crate) enum ManifestTreeEvent {
|
||||
impl EventEmitter<ManifestTreeEvent> for ManifestTree {}
|
||||
|
||||
impl ManifestTree {
|
||||
pub(crate) fn new(worktree_store: Entity<WorktreeStore>, cx: &mut App) -> Entity<Self> {
|
||||
pub fn new(worktree_store: Entity<WorktreeStore>, cx: &mut App) -> Entity<Self> {
|
||||
cx.new(|cx| Self {
|
||||
root_points: Default::default(),
|
||||
_subscriptions: [
|
||||
@@ -106,11 +107,11 @@ impl ManifestTree {
|
||||
worktree_store,
|
||||
})
|
||||
}
|
||||
fn root_for_path(
|
||||
pub(crate) fn root_for_path(
|
||||
&mut self,
|
||||
ProjectPath { worktree_id, path }: ProjectPath,
|
||||
manifests: &mut dyn Iterator<Item = ManifestName>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
delegate: Arc<dyn ManifestDelegate>,
|
||||
cx: &mut App,
|
||||
) -> BTreeMap<ManifestName, ProjectPath> {
|
||||
debug_assert_eq!(delegate.worktree_id(), worktree_id);
|
||||
@@ -218,3 +219,26 @@ impl ManifestTree {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ManifestQueryDelegate {
|
||||
worktree: Snapshot,
|
||||
}
|
||||
impl ManifestQueryDelegate {
|
||||
pub fn new(worktree: Snapshot) -> Self {
|
||||
Self { worktree }
|
||||
}
|
||||
}
|
||||
|
||||
impl ManifestDelegate for ManifestQueryDelegate {
|
||||
fn exists(&self, path: &Path, is_dir: Option<bool>) -> bool {
|
||||
self.worktree.entry_for_path(path).map_or(false, |entry| {
|
||||
is_dir.map_or(true, |is_required_to_be_dir| {
|
||||
is_required_to_be_dir == entry.is_dir()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn worktree_id(&self) -> WorktreeId {
|
||||
self.worktree.id()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ use std::{
|
||||
use collections::{HashMap, IndexMap};
|
||||
use gpui::{App, AppContext as _, Entity, Subscription};
|
||||
use language::{
|
||||
Attach, CachedLspAdapter, LanguageName, LanguageRegistry, LspAdapterDelegate,
|
||||
Attach, CachedLspAdapter, LanguageName, LanguageRegistry, ManifestDelegate,
|
||||
language_settings::AllLanguageSettings,
|
||||
};
|
||||
use lsp::LanguageServerName;
|
||||
@@ -151,7 +151,7 @@ impl LanguageServerTree {
|
||||
&'a mut self,
|
||||
path: ProjectPath,
|
||||
query: AdapterQuery<'_>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
delegate: Arc<dyn ManifestDelegate>,
|
||||
cx: &mut App,
|
||||
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
|
||||
let settings_location = SettingsLocation {
|
||||
@@ -181,7 +181,7 @@ impl LanguageServerTree {
|
||||
LanguageServerName,
|
||||
(LspSettings, BTreeSet<LanguageName>, Arc<CachedLspAdapter>),
|
||||
>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
delegate: Arc<dyn ManifestDelegate>,
|
||||
cx: &mut App,
|
||||
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
|
||||
let worktree_id = path.worktree_id;
|
||||
@@ -401,7 +401,7 @@ impl<'tree> ServerTreeRebase<'tree> {
|
||||
&'a mut self,
|
||||
path: ProjectPath,
|
||||
query: AdapterQuery<'_>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
delegate: Arc<dyn ManifestDelegate>,
|
||||
cx: &mut App,
|
||||
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
|
||||
let settings_location = SettingsLocation {
|
||||
|
||||
@@ -35,6 +35,7 @@ pub use git_store::{
|
||||
ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate,
|
||||
git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal},
|
||||
};
|
||||
pub use manifest_tree::ManifestTree;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use buffer_store::{BufferStore, BufferStoreEvent};
|
||||
@@ -554,6 +555,23 @@ impl std::fmt::Debug for Completion {
|
||||
}
|
||||
}
|
||||
|
||||
/// Response from a source of completions.
|
||||
pub struct CompletionResponse {
|
||||
pub completions: Vec<Completion>,
|
||||
/// When false, indicates that the list is complete and so does not need to be re-queried if it
|
||||
/// can be filtered instead.
|
||||
pub is_incomplete: bool,
|
||||
}
|
||||
|
||||
/// Response from language server completion request.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub(crate) struct CoreCompletionResponse {
|
||||
pub completions: Vec<CoreCompletion>,
|
||||
/// When false, indicates that the list is complete and so does not need to be re-queried if it
|
||||
/// can be filtered instead.
|
||||
pub is_incomplete: bool,
|
||||
}
|
||||
|
||||
/// A generic completion that can come from different sources.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct CoreCompletion {
|
||||
@@ -874,11 +892,13 @@ impl Project {
|
||||
cx.new(|cx| ContextServerStore::new(worktree_store.clone(), cx));
|
||||
|
||||
let environment = cx.new(|_| ProjectEnvironment::new(env));
|
||||
let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
|
||||
let toolchain_store = cx.new(|cx| {
|
||||
ToolchainStore::local(
|
||||
languages.clone(),
|
||||
worktree_store.clone(),
|
||||
environment.clone(),
|
||||
manifest_tree.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -946,6 +966,7 @@ impl Project {
|
||||
prettier_store.clone(),
|
||||
toolchain_store.clone(),
|
||||
environment.clone(),
|
||||
manifest_tree,
|
||||
languages.clone(),
|
||||
client.http_client(),
|
||||
fs.clone(),
|
||||
@@ -3084,16 +3105,13 @@ impl Project {
|
||||
path: ProjectPath,
|
||||
language_name: LanguageName,
|
||||
cx: &App,
|
||||
) -> Task<Option<ToolchainList>> {
|
||||
if let Some(toolchain_store) = self.toolchain_store.clone() {
|
||||
) -> Task<Option<(ToolchainList, Arc<Path>)>> {
|
||||
if let Some(toolchain_store) = self.toolchain_store.as_ref().map(Entity::downgrade) {
|
||||
cx.spawn(async move |cx| {
|
||||
cx.update(|cx| {
|
||||
toolchain_store
|
||||
.read(cx)
|
||||
.list_toolchains(path, language_name, cx)
|
||||
})
|
||||
.ok()?
|
||||
.await
|
||||
toolchain_store
|
||||
.update(cx, |this, cx| this.list_toolchains(path, language_name, cx))
|
||||
.ok()?
|
||||
.await
|
||||
})
|
||||
} else {
|
||||
Task::ready(None)
|
||||
@@ -3429,7 +3447,7 @@ impl Project {
|
||||
position: T,
|
||||
context: CompletionContext,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<Option<Vec<Completion>>>> {
|
||||
) -> Task<Result<Vec<CompletionResponse>>> {
|
||||
let position = position.to_point_utf16(buffer.read(cx));
|
||||
self.lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store.completions(buffer, position, context, cx)
|
||||
|
||||
@@ -3014,7 +3014,12 @@ async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
|
||||
.next()
|
||||
.await;
|
||||
|
||||
let completions = completions.await.unwrap().unwrap();
|
||||
let completions = completions
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.collect::<Vec<_>>();
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
assert_eq!(completions.len(), 1);
|
||||
@@ -3097,7 +3102,12 @@ async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
.next()
|
||||
.await;
|
||||
|
||||
let completions = completions.await.unwrap().unwrap();
|
||||
let completions = completions
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.collect::<Vec<_>>();
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
assert_eq!(completions.len(), 1);
|
||||
@@ -3139,7 +3149,12 @@ async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
.next()
|
||||
.await;
|
||||
|
||||
let completions = completions.await.unwrap().unwrap();
|
||||
let completions = completions
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.collect::<Vec<_>>();
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
assert_eq!(completions.len(), 1);
|
||||
@@ -3210,7 +3225,12 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
let completions = completions.await.unwrap().unwrap();
|
||||
let completions = completions
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.collect::<Vec<_>>();
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "fullyQualifiedName");
|
||||
@@ -3237,7 +3257,12 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
let completions = completions.await.unwrap().unwrap();
|
||||
let completions = completions
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.collect::<Vec<_>>();
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "component");
|
||||
@@ -3305,7 +3330,12 @@ async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
let completions = completions.await.unwrap().unwrap();
|
||||
let completions = completions
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "fully\nQualified\nName");
|
||||
}
|
||||
|
||||
@@ -514,7 +514,7 @@ impl Project {
|
||||
terminal_handle: &Entity<Terminal>,
|
||||
cx: &mut App,
|
||||
) {
|
||||
terminal_handle.update(cx, |terminal, _| terminal.input(command));
|
||||
terminal_handle.update(cx, |terminal, _| terminal.input(command.into_bytes()));
|
||||
}
|
||||
|
||||
pub fn local_terminal_handles(&self) -> &Vec<WeakEntity<terminal::Terminal>> {
|
||||
|
||||
@@ -19,7 +19,11 @@ use rpc::{
|
||||
use settings::WorktreeId;
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::{ProjectEnvironment, ProjectPath, worktree_store::WorktreeStore};
|
||||
use crate::{
|
||||
ProjectEnvironment, ProjectPath,
|
||||
manifest_tree::{ManifestQueryDelegate, ManifestTree},
|
||||
worktree_store::WorktreeStore,
|
||||
};
|
||||
|
||||
pub struct ToolchainStore(ToolchainStoreInner);
|
||||
enum ToolchainStoreInner {
|
||||
@@ -42,6 +46,7 @@ impl ToolchainStore {
|
||||
languages: Arc<LanguageRegistry>,
|
||||
worktree_store: Entity<WorktreeStore>,
|
||||
project_environment: Entity<ProjectEnvironment>,
|
||||
manifest_tree: Entity<ManifestTree>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let entity = cx.new(|_| LocalToolchainStore {
|
||||
@@ -49,6 +54,7 @@ impl ToolchainStore {
|
||||
worktree_store,
|
||||
project_environment,
|
||||
active_toolchains: Default::default(),
|
||||
manifest_tree,
|
||||
});
|
||||
let subscription = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| {
|
||||
cx.emit(e.clone())
|
||||
@@ -80,11 +86,11 @@ impl ToolchainStore {
|
||||
&self,
|
||||
path: ProjectPath,
|
||||
language_name: LanguageName,
|
||||
cx: &App,
|
||||
) -> Task<Option<ToolchainList>> {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Option<(ToolchainList, Arc<Path>)>> {
|
||||
match &self.0 {
|
||||
ToolchainStoreInner::Local(local, _) => {
|
||||
local.read(cx).list_toolchains(path, language_name, cx)
|
||||
local.update(cx, |this, cx| this.list_toolchains(path, language_name, cx))
|
||||
}
|
||||
ToolchainStoreInner::Remote(remote) => {
|
||||
remote.read(cx).list_toolchains(path, language_name, cx)
|
||||
@@ -181,7 +187,7 @@ impl ToolchainStore {
|
||||
})?
|
||||
.await;
|
||||
let has_values = toolchains.is_some();
|
||||
let groups = if let Some(toolchains) = &toolchains {
|
||||
let groups = if let Some((toolchains, _)) = &toolchains {
|
||||
toolchains
|
||||
.groups
|
||||
.iter()
|
||||
@@ -195,8 +201,8 @@ impl ToolchainStore {
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let toolchains = if let Some(toolchains) = toolchains {
|
||||
toolchains
|
||||
let (toolchains, relative_path) = if let Some((toolchains, relative_path)) = toolchains {
|
||||
let toolchains = toolchains
|
||||
.toolchains
|
||||
.into_iter()
|
||||
.map(|toolchain| {
|
||||
@@ -207,15 +213,17 @@ impl ToolchainStore {
|
||||
raw_json: toolchain.as_json.to_string(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.collect::<Vec<_>>();
|
||||
(toolchains, relative_path)
|
||||
} else {
|
||||
vec![]
|
||||
(vec![], Arc::from(Path::new("")))
|
||||
};
|
||||
|
||||
Ok(proto::ListToolchainsResponse {
|
||||
has_values,
|
||||
toolchains,
|
||||
groups,
|
||||
relative_worktree_path: Some(relative_path.to_string_lossy().into_owned()),
|
||||
})
|
||||
}
|
||||
pub fn as_language_toolchain_store(&self) -> Arc<dyn LanguageToolchainStore> {
|
||||
@@ -231,6 +239,7 @@ struct LocalToolchainStore {
|
||||
worktree_store: Entity<WorktreeStore>,
|
||||
project_environment: Entity<ProjectEnvironment>,
|
||||
active_toolchains: BTreeMap<(WorktreeId, LanguageName), BTreeMap<Arc<Path>, Toolchain>>,
|
||||
manifest_tree: Entity<ManifestTree>,
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -312,36 +321,73 @@ impl LocalToolchainStore {
|
||||
})
|
||||
}
|
||||
pub(crate) fn list_toolchains(
|
||||
&self,
|
||||
&mut self,
|
||||
path: ProjectPath,
|
||||
language_name: LanguageName,
|
||||
cx: &App,
|
||||
) -> Task<Option<ToolchainList>> {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Option<(ToolchainList, Arc<Path>)>> {
|
||||
let registry = self.languages.clone();
|
||||
let Some(abs_path) = self
|
||||
.worktree_store
|
||||
.read(cx)
|
||||
.worktree_for_id(path.worktree_id, cx)
|
||||
.map(|worktree| worktree.read(cx).abs_path())
|
||||
else {
|
||||
return Task::ready(None);
|
||||
};
|
||||
|
||||
let manifest_tree = self.manifest_tree.downgrade();
|
||||
|
||||
let environment = self.project_environment.clone();
|
||||
cx.spawn(async move |cx| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let language = cx
|
||||
.background_spawn(registry.language_for_name(language_name.as_ref()))
|
||||
.await
|
||||
.ok()?;
|
||||
let toolchains = language.toolchain_lister()?;
|
||||
let manifest_name = toolchains.manifest_name();
|
||||
let (snapshot, worktree) = this
|
||||
.update(cx, |this, cx| {
|
||||
this.worktree_store
|
||||
.read(cx)
|
||||
.worktree_for_id(path.worktree_id, cx)
|
||||
.map(|worktree| (worktree.read(cx).snapshot(), worktree))
|
||||
})
|
||||
.ok()
|
||||
.flatten()?;
|
||||
let worktree_id = snapshot.id();
|
||||
let worktree_root = snapshot.abs_path().to_path_buf();
|
||||
let relative_path = manifest_tree
|
||||
.update(cx, |this, cx| {
|
||||
this.root_for_path(
|
||||
path,
|
||||
&mut std::iter::once(manifest_name.clone()),
|
||||
Arc::new(ManifestQueryDelegate::new(snapshot)),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.ok()?
|
||||
.remove(&manifest_name)
|
||||
.unwrap_or_else(|| ProjectPath {
|
||||
path: Arc::from(Path::new("")),
|
||||
worktree_id,
|
||||
});
|
||||
let abs_path = worktree
|
||||
.update(cx, |this, _| this.absolutize(&relative_path.path).ok())
|
||||
.ok()
|
||||
.flatten()?;
|
||||
|
||||
let project_env = environment
|
||||
.update(cx, |environment, cx| {
|
||||
environment.get_directory_environment(abs_path.clone(), cx)
|
||||
environment.get_directory_environment(abs_path.as_path().into(), cx)
|
||||
})
|
||||
.ok()?
|
||||
.await;
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let language = registry
|
||||
.language_for_name(language_name.as_ref())
|
||||
.await
|
||||
.ok()?;
|
||||
let toolchains = language.toolchain_lister()?;
|
||||
Some(toolchains.list(abs_path.to_path_buf(), project_env).await)
|
||||
Some((
|
||||
toolchains
|
||||
.list(
|
||||
worktree_root,
|
||||
Some(relative_path.path.clone())
|
||||
.filter(|_| *relative_path.path != *Path::new("")),
|
||||
project_env,
|
||||
)
|
||||
.await,
|
||||
relative_path.path,
|
||||
))
|
||||
})
|
||||
.await
|
||||
})
|
||||
@@ -404,7 +450,7 @@ impl RemoteToolchainStore {
|
||||
path: ProjectPath,
|
||||
language_name: LanguageName,
|
||||
cx: &App,
|
||||
) -> Task<Option<ToolchainList>> {
|
||||
) -> Task<Option<(ToolchainList, Arc<Path>)>> {
|
||||
let project_id = self.project_id;
|
||||
let client = self.client.clone();
|
||||
cx.background_spawn(async move {
|
||||
@@ -444,11 +490,20 @@ impl RemoteToolchainStore {
|
||||
Some((usize::try_from(group.start_index).ok()?, group.name.into()))
|
||||
})
|
||||
.collect();
|
||||
Some(ToolchainList {
|
||||
toolchains,
|
||||
default: None,
|
||||
groups,
|
||||
})
|
||||
let relative_path = Arc::from(Path::new(
|
||||
response
|
||||
.relative_worktree_path
|
||||
.as_deref()
|
||||
.unwrap_or_default(),
|
||||
));
|
||||
Some((
|
||||
ToolchainList {
|
||||
toolchains,
|
||||
default: None,
|
||||
groups,
|
||||
},
|
||||
relative_path,
|
||||
))
|
||||
})
|
||||
}
|
||||
pub(crate) fn active_toolchain(
|
||||
|
||||
@@ -2343,6 +2343,11 @@ impl ProjectPanel {
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
if clip_is_cut {
|
||||
// Convert the clipboard cut entry to a copy entry after the first paste.
|
||||
self.clipboard = self.clipboard.take().map(ClipboardEntry::to_copy_entry);
|
||||
}
|
||||
|
||||
self.expand_entry(worktree_id, entry.id, cx);
|
||||
Some(())
|
||||
});
|
||||
@@ -5033,6 +5038,13 @@ impl ClipboardEntry {
|
||||
ClipboardEntry::Copied(entries) | ClipboardEntry::Cut(entries) => entries,
|
||||
}
|
||||
}
|
||||
|
||||
fn to_copy_entry(self) -> Self {
|
||||
match self {
|
||||
ClipboardEntry::Copied(_) => self,
|
||||
ClipboardEntry::Cut(entries) => ClipboardEntry::Copied(entries),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1170,6 +1170,91 @@ async fn test_copy_paste(cx: &mut gpui::TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_cut_paste(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor().clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"one.txt": "",
|
||||
"two.txt": "",
|
||||
"a": {},
|
||||
"b": {}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
|
||||
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
let panel = workspace.update(cx, ProjectPanel::new).unwrap();
|
||||
|
||||
select_path_with_mark(&panel, "root/one.txt", cx);
|
||||
select_path_with_mark(&panel, "root/two.txt", cx);
|
||||
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root",
|
||||
" > a",
|
||||
" > b",
|
||||
" one.txt <== marked",
|
||||
" two.txt <== selected <== marked",
|
||||
]
|
||||
);
|
||||
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.cut(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
select_path(&panel, "root/a", cx);
|
||||
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.paste(&Default::default(), window, cx);
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root",
|
||||
" v a",
|
||||
" one.txt <== marked",
|
||||
" two.txt <== selected <== marked",
|
||||
" > b",
|
||||
],
|
||||
"Cut entries should be moved on first paste."
|
||||
);
|
||||
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.cancel(&menu::Cancel {}, window, cx)
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
select_path(&panel, "root/b", cx);
|
||||
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.paste(&Default::default(), window, cx);
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root",
|
||||
" v a",
|
||||
" one.txt",
|
||||
" two.txt",
|
||||
" v b",
|
||||
" one.txt",
|
||||
" two.txt <== selected",
|
||||
],
|
||||
"Cut entries should only be copied for the second paste!"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_cut_paste_between_different_worktrees(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
@@ -195,6 +195,8 @@ message LspExtGoToParentModuleResponse {
|
||||
message GetCompletionsResponse {
|
||||
repeated Completion completions = 1;
|
||||
repeated VectorClockEntry version = 2;
|
||||
// `!is_complete`, inverted for a default of `is_complete = true`
|
||||
bool can_reuse = 3;
|
||||
}
|
||||
|
||||
message ApplyCompletionAdditionalEdits {
|
||||
|
||||
@@ -23,6 +23,7 @@ message ListToolchainsResponse {
|
||||
repeated Toolchain toolchains = 1;
|
||||
bool has_values = 2;
|
||||
repeated ToolchainGroup groups = 3;
|
||||
optional string relative_worktree_path = 4;
|
||||
}
|
||||
|
||||
message ActivateToolchain {
|
||||
|
||||
@@ -9,8 +9,8 @@ use http_client::HttpClient;
|
||||
use language::{Buffer, BufferEvent, LanguageRegistry, proto::serialize_operation};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{
|
||||
LspStore, LspStoreEvent, PrettierStore, ProjectEnvironment, ProjectPath, ToolchainStore,
|
||||
WorktreeId,
|
||||
LspStore, LspStoreEvent, ManifestTree, PrettierStore, ProjectEnvironment, ProjectPath,
|
||||
ToolchainStore, WorktreeId,
|
||||
buffer_store::{BufferStore, BufferStoreEvent},
|
||||
debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
|
||||
git_store::GitStore,
|
||||
@@ -87,12 +87,13 @@ impl HeadlessProject {
|
||||
});
|
||||
|
||||
let environment = cx.new(|_| ProjectEnvironment::new(None));
|
||||
|
||||
let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
|
||||
let toolchain_store = cx.new(|cx| {
|
||||
ToolchainStore::local(
|
||||
languages.clone(),
|
||||
worktree_store.clone(),
|
||||
environment.clone(),
|
||||
manifest_tree.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -172,6 +173,7 @@ impl HeadlessProject {
|
||||
prettier_store.clone(),
|
||||
toolchain_store.clone(),
|
||||
environment,
|
||||
manifest_tree,
|
||||
languages.clone(),
|
||||
http_client.clone(),
|
||||
fs.clone(),
|
||||
|
||||
@@ -513,8 +513,8 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
|
||||
|
||||
assert_eq!(
|
||||
result
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.flat_map(|response| response.completions)
|
||||
.map(|c| c.label.text)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["boop".to_string()]
|
||||
|
||||
@@ -92,7 +92,7 @@ pub fn python_env_kernel_specifications(
|
||||
let background_executor = cx.background_executor().clone();
|
||||
|
||||
async move {
|
||||
let toolchains = if let Some(toolchains) = toolchains.await {
|
||||
let toolchains = if let Some((toolchains, _)) = toolchains.await {
|
||||
toolchains
|
||||
} else {
|
||||
return Ok(Vec::new());
|
||||
|
||||
@@ -115,3 +115,7 @@ pub fn initial_tasks_content() -> Cow<'static, str> {
|
||||
pub fn initial_debug_tasks_content() -> Cow<'static, str> {
|
||||
asset_str::<SettingsAssets>("settings/initial_debug_tasks.json")
|
||||
}
|
||||
|
||||
pub fn initial_local_debug_tasks_content() -> Cow<'static, str> {
|
||||
asset_str::<SettingsAssets>("settings/initial_local_debug_tasks.json")
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
/// The mappings defined in this file where created from reading the alacritty source
|
||||
use alacritty_terminal::term::TermMode;
|
||||
use gpui::Keystroke;
|
||||
@@ -41,162 +43,138 @@ impl AlacModifiers {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) -> Option<String> {
|
||||
pub fn to_esc_str(
|
||||
keystroke: &Keystroke,
|
||||
mode: &TermMode,
|
||||
alt_is_meta: bool,
|
||||
) -> Option<Cow<'static, str>> {
|
||||
let modifiers = AlacModifiers::new(keystroke);
|
||||
|
||||
// Manual Bindings including modifiers
|
||||
let manual_esc_str = match (keystroke.key.as_ref(), &modifiers) {
|
||||
let manual_esc_str: Option<&'static str> = match (keystroke.key.as_ref(), &modifiers) {
|
||||
//Basic special keys
|
||||
("tab", AlacModifiers::None) => Some("\x09".to_string()),
|
||||
("escape", AlacModifiers::None) => Some("\x1b".to_string()),
|
||||
("enter", AlacModifiers::None) => Some("\x0d".to_string()),
|
||||
("enter", AlacModifiers::Shift) => Some("\x0d".to_string()),
|
||||
("enter", AlacModifiers::Alt) => Some("\x1b\x0d".to_string()),
|
||||
("backspace", AlacModifiers::None) => Some("\x7f".to_string()),
|
||||
("tab", AlacModifiers::None) => Some("\x09"),
|
||||
("escape", AlacModifiers::None) => Some("\x1b"),
|
||||
("enter", AlacModifiers::None) => Some("\x0d"),
|
||||
("enter", AlacModifiers::Shift) => Some("\x0d"),
|
||||
("enter", AlacModifiers::Alt) => Some("\x1b\x0d"),
|
||||
("backspace", AlacModifiers::None) => Some("\x7f"),
|
||||
//Interesting escape codes
|
||||
("tab", AlacModifiers::Shift) => Some("\x1b[Z".to_string()),
|
||||
("backspace", AlacModifiers::Ctrl) => Some("\x08".to_string()),
|
||||
("backspace", AlacModifiers::Alt) => Some("\x1b\x7f".to_string()),
|
||||
("backspace", AlacModifiers::Shift) => Some("\x7f".to_string()),
|
||||
("space", AlacModifiers::Ctrl) => Some("\x00".to_string()),
|
||||
("home", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
|
||||
Some("\x1b[1;2H".to_string())
|
||||
}
|
||||
("end", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
|
||||
Some("\x1b[1;2F".to_string())
|
||||
}
|
||||
("tab", AlacModifiers::Shift) => Some("\x1b[Z"),
|
||||
("backspace", AlacModifiers::Ctrl) => Some("\x08"),
|
||||
("backspace", AlacModifiers::Alt) => Some("\x1b\x7f"),
|
||||
("backspace", AlacModifiers::Shift) => Some("\x7f"),
|
||||
("space", AlacModifiers::Ctrl) => Some("\x00"),
|
||||
("home", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => Some("\x1b[1;2H"),
|
||||
("end", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => Some("\x1b[1;2F"),
|
||||
("pageup", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
|
||||
Some("\x1b[5;2~".to_string())
|
||||
Some("\x1b[5;2~")
|
||||
}
|
||||
("pagedown", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
|
||||
Some("\x1b[6;2~".to_string())
|
||||
Some("\x1b[6;2~")
|
||||
}
|
||||
("home", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1bOH".to_string())
|
||||
}
|
||||
("home", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1b[H".to_string())
|
||||
}
|
||||
("end", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1bOF".to_string())
|
||||
}
|
||||
("end", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1b[F".to_string())
|
||||
}
|
||||
("up", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1bOA".to_string())
|
||||
}
|
||||
("up", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1b[A".to_string())
|
||||
}
|
||||
("down", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1bOB".to_string())
|
||||
}
|
||||
("down", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1b[B".to_string())
|
||||
}
|
||||
("right", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1bOC".to_string())
|
||||
}
|
||||
("right", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1b[C".to_string())
|
||||
}
|
||||
("left", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1bOD".to_string())
|
||||
}
|
||||
("left", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
|
||||
Some("\x1b[D".to_string())
|
||||
}
|
||||
("back", AlacModifiers::None) => Some("\x7f".to_string()),
|
||||
("insert", AlacModifiers::None) => Some("\x1b[2~".to_string()),
|
||||
("delete", AlacModifiers::None) => Some("\x1b[3~".to_string()),
|
||||
("pageup", AlacModifiers::None) => Some("\x1b[5~".to_string()),
|
||||
("pagedown", AlacModifiers::None) => Some("\x1b[6~".to_string()),
|
||||
("f1", AlacModifiers::None) => Some("\x1bOP".to_string()),
|
||||
("f2", AlacModifiers::None) => Some("\x1bOQ".to_string()),
|
||||
("f3", AlacModifiers::None) => Some("\x1bOR".to_string()),
|
||||
("f4", AlacModifiers::None) => Some("\x1bOS".to_string()),
|
||||
("f5", AlacModifiers::None) => Some("\x1b[15~".to_string()),
|
||||
("f6", AlacModifiers::None) => Some("\x1b[17~".to_string()),
|
||||
("f7", AlacModifiers::None) => Some("\x1b[18~".to_string()),
|
||||
("f8", AlacModifiers::None) => Some("\x1b[19~".to_string()),
|
||||
("f9", AlacModifiers::None) => Some("\x1b[20~".to_string()),
|
||||
("f10", AlacModifiers::None) => Some("\x1b[21~".to_string()),
|
||||
("f11", AlacModifiers::None) => Some("\x1b[23~".to_string()),
|
||||
("f12", AlacModifiers::None) => Some("\x1b[24~".to_string()),
|
||||
("f13", AlacModifiers::None) => Some("\x1b[25~".to_string()),
|
||||
("f14", AlacModifiers::None) => Some("\x1b[26~".to_string()),
|
||||
("f15", AlacModifiers::None) => Some("\x1b[28~".to_string()),
|
||||
("f16", AlacModifiers::None) => Some("\x1b[29~".to_string()),
|
||||
("f17", AlacModifiers::None) => Some("\x1b[31~".to_string()),
|
||||
("f18", AlacModifiers::None) => Some("\x1b[32~".to_string()),
|
||||
("f19", AlacModifiers::None) => Some("\x1b[33~".to_string()),
|
||||
("f20", AlacModifiers::None) => Some("\x1b[34~".to_string()),
|
||||
("home", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => Some("\x1bOH"),
|
||||
("home", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => Some("\x1b[H"),
|
||||
("end", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => Some("\x1bOF"),
|
||||
("end", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => Some("\x1b[F"),
|
||||
("up", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => Some("\x1bOA"),
|
||||
("up", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => Some("\x1b[A"),
|
||||
("down", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => Some("\x1bOB"),
|
||||
("down", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => Some("\x1b[B"),
|
||||
("right", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => Some("\x1bOC"),
|
||||
("right", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => Some("\x1b[C"),
|
||||
("left", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => Some("\x1bOD"),
|
||||
("left", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => Some("\x1b[D"),
|
||||
("back", AlacModifiers::None) => Some("\x7f"),
|
||||
("insert", AlacModifiers::None) => Some("\x1b[2~"),
|
||||
("delete", AlacModifiers::None) => Some("\x1b[3~"),
|
||||
("pageup", AlacModifiers::None) => Some("\x1b[5~"),
|
||||
("pagedown", AlacModifiers::None) => Some("\x1b[6~"),
|
||||
("f1", AlacModifiers::None) => Some("\x1bOP"),
|
||||
("f2", AlacModifiers::None) => Some("\x1bOQ"),
|
||||
("f3", AlacModifiers::None) => Some("\x1bOR"),
|
||||
("f4", AlacModifiers::None) => Some("\x1bOS"),
|
||||
("f5", AlacModifiers::None) => Some("\x1b[15~"),
|
||||
("f6", AlacModifiers::None) => Some("\x1b[17~"),
|
||||
("f7", AlacModifiers::None) => Some("\x1b[18~"),
|
||||
("f8", AlacModifiers::None) => Some("\x1b[19~"),
|
||||
("f9", AlacModifiers::None) => Some("\x1b[20~"),
|
||||
("f10", AlacModifiers::None) => Some("\x1b[21~"),
|
||||
("f11", AlacModifiers::None) => Some("\x1b[23~"),
|
||||
("f12", AlacModifiers::None) => Some("\x1b[24~"),
|
||||
("f13", AlacModifiers::None) => Some("\x1b[25~"),
|
||||
("f14", AlacModifiers::None) => Some("\x1b[26~"),
|
||||
("f15", AlacModifiers::None) => Some("\x1b[28~"),
|
||||
("f16", AlacModifiers::None) => Some("\x1b[29~"),
|
||||
("f17", AlacModifiers::None) => Some("\x1b[31~"),
|
||||
("f18", AlacModifiers::None) => Some("\x1b[32~"),
|
||||
("f19", AlacModifiers::None) => Some("\x1b[33~"),
|
||||
("f20", AlacModifiers::None) => Some("\x1b[34~"),
|
||||
// NumpadEnter, Action::Esc("\n".into());
|
||||
//Mappings for caret notation keys
|
||||
("a", AlacModifiers::Ctrl) => Some("\x01".to_string()), //1
|
||||
("A", AlacModifiers::CtrlShift) => Some("\x01".to_string()), //1
|
||||
("b", AlacModifiers::Ctrl) => Some("\x02".to_string()), //2
|
||||
("B", AlacModifiers::CtrlShift) => Some("\x02".to_string()), //2
|
||||
("c", AlacModifiers::Ctrl) => Some("\x03".to_string()), //3
|
||||
("C", AlacModifiers::CtrlShift) => Some("\x03".to_string()), //3
|
||||
("d", AlacModifiers::Ctrl) => Some("\x04".to_string()), //4
|
||||
("D", AlacModifiers::CtrlShift) => Some("\x04".to_string()), //4
|
||||
("e", AlacModifiers::Ctrl) => Some("\x05".to_string()), //5
|
||||
("E", AlacModifiers::CtrlShift) => Some("\x05".to_string()), //5
|
||||
("f", AlacModifiers::Ctrl) => Some("\x06".to_string()), //6
|
||||
("F", AlacModifiers::CtrlShift) => Some("\x06".to_string()), //6
|
||||
("g", AlacModifiers::Ctrl) => Some("\x07".to_string()), //7
|
||||
("G", AlacModifiers::CtrlShift) => Some("\x07".to_string()), //7
|
||||
("h", AlacModifiers::Ctrl) => Some("\x08".to_string()), //8
|
||||
("H", AlacModifiers::CtrlShift) => Some("\x08".to_string()), //8
|
||||
("i", AlacModifiers::Ctrl) => Some("\x09".to_string()), //9
|
||||
("I", AlacModifiers::CtrlShift) => Some("\x09".to_string()), //9
|
||||
("j", AlacModifiers::Ctrl) => Some("\x0a".to_string()), //10
|
||||
("J", AlacModifiers::CtrlShift) => Some("\x0a".to_string()), //10
|
||||
("k", AlacModifiers::Ctrl) => Some("\x0b".to_string()), //11
|
||||
("K", AlacModifiers::CtrlShift) => Some("\x0b".to_string()), //11
|
||||
("l", AlacModifiers::Ctrl) => Some("\x0c".to_string()), //12
|
||||
("L", AlacModifiers::CtrlShift) => Some("\x0c".to_string()), //12
|
||||
("m", AlacModifiers::Ctrl) => Some("\x0d".to_string()), //13
|
||||
("M", AlacModifiers::CtrlShift) => Some("\x0d".to_string()), //13
|
||||
("n", AlacModifiers::Ctrl) => Some("\x0e".to_string()), //14
|
||||
("N", AlacModifiers::CtrlShift) => Some("\x0e".to_string()), //14
|
||||
("o", AlacModifiers::Ctrl) => Some("\x0f".to_string()), //15
|
||||
("O", AlacModifiers::CtrlShift) => Some("\x0f".to_string()), //15
|
||||
("p", AlacModifiers::Ctrl) => Some("\x10".to_string()), //16
|
||||
("P", AlacModifiers::CtrlShift) => Some("\x10".to_string()), //16
|
||||
("q", AlacModifiers::Ctrl) => Some("\x11".to_string()), //17
|
||||
("Q", AlacModifiers::CtrlShift) => Some("\x11".to_string()), //17
|
||||
("r", AlacModifiers::Ctrl) => Some("\x12".to_string()), //18
|
||||
("R", AlacModifiers::CtrlShift) => Some("\x12".to_string()), //18
|
||||
("s", AlacModifiers::Ctrl) => Some("\x13".to_string()), //19
|
||||
("S", AlacModifiers::CtrlShift) => Some("\x13".to_string()), //19
|
||||
("t", AlacModifiers::Ctrl) => Some("\x14".to_string()), //20
|
||||
("T", AlacModifiers::CtrlShift) => Some("\x14".to_string()), //20
|
||||
("u", AlacModifiers::Ctrl) => Some("\x15".to_string()), //21
|
||||
("U", AlacModifiers::CtrlShift) => Some("\x15".to_string()), //21
|
||||
("v", AlacModifiers::Ctrl) => Some("\x16".to_string()), //22
|
||||
("V", AlacModifiers::CtrlShift) => Some("\x16".to_string()), //22
|
||||
("w", AlacModifiers::Ctrl) => Some("\x17".to_string()), //23
|
||||
("W", AlacModifiers::CtrlShift) => Some("\x17".to_string()), //23
|
||||
("x", AlacModifiers::Ctrl) => Some("\x18".to_string()), //24
|
||||
("X", AlacModifiers::CtrlShift) => Some("\x18".to_string()), //24
|
||||
("y", AlacModifiers::Ctrl) => Some("\x19".to_string()), //25
|
||||
("Y", AlacModifiers::CtrlShift) => Some("\x19".to_string()), //25
|
||||
("z", AlacModifiers::Ctrl) => Some("\x1a".to_string()), //26
|
||||
("Z", AlacModifiers::CtrlShift) => Some("\x1a".to_string()), //26
|
||||
("@", AlacModifiers::Ctrl) => Some("\x00".to_string()), //0
|
||||
("[", AlacModifiers::Ctrl) => Some("\x1b".to_string()), //27
|
||||
("\\", AlacModifiers::Ctrl) => Some("\x1c".to_string()), //28
|
||||
("]", AlacModifiers::Ctrl) => Some("\x1d".to_string()), //29
|
||||
("^", AlacModifiers::Ctrl) => Some("\x1e".to_string()), //30
|
||||
("_", AlacModifiers::Ctrl) => Some("\x1f".to_string()), //31
|
||||
("?", AlacModifiers::Ctrl) => Some("\x7f".to_string()), //127
|
||||
("a", AlacModifiers::Ctrl) => Some("\x01"), //1
|
||||
("A", AlacModifiers::CtrlShift) => Some("\x01"), //1
|
||||
("b", AlacModifiers::Ctrl) => Some("\x02"), //2
|
||||
("B", AlacModifiers::CtrlShift) => Some("\x02"), //2
|
||||
("c", AlacModifiers::Ctrl) => Some("\x03"), //3
|
||||
("C", AlacModifiers::CtrlShift) => Some("\x03"), //3
|
||||
("d", AlacModifiers::Ctrl) => Some("\x04"), //4
|
||||
("D", AlacModifiers::CtrlShift) => Some("\x04"), //4
|
||||
("e", AlacModifiers::Ctrl) => Some("\x05"), //5
|
||||
("E", AlacModifiers::CtrlShift) => Some("\x05"), //5
|
||||
("f", AlacModifiers::Ctrl) => Some("\x06"), //6
|
||||
("F", AlacModifiers::CtrlShift) => Some("\x06"), //6
|
||||
("g", AlacModifiers::Ctrl) => Some("\x07"), //7
|
||||
("G", AlacModifiers::CtrlShift) => Some("\x07"), //7
|
||||
("h", AlacModifiers::Ctrl) => Some("\x08"), //8
|
||||
("H", AlacModifiers::CtrlShift) => Some("\x08"), //8
|
||||
("i", AlacModifiers::Ctrl) => Some("\x09"), //9
|
||||
("I", AlacModifiers::CtrlShift) => Some("\x09"), //9
|
||||
("j", AlacModifiers::Ctrl) => Some("\x0a"), //10
|
||||
("J", AlacModifiers::CtrlShift) => Some("\x0a"), //10
|
||||
("k", AlacModifiers::Ctrl) => Some("\x0b"), //11
|
||||
("K", AlacModifiers::CtrlShift) => Some("\x0b"), //11
|
||||
("l", AlacModifiers::Ctrl) => Some("\x0c"), //12
|
||||
("L", AlacModifiers::CtrlShift) => Some("\x0c"), //12
|
||||
("m", AlacModifiers::Ctrl) => Some("\x0d"), //13
|
||||
("M", AlacModifiers::CtrlShift) => Some("\x0d"), //13
|
||||
("n", AlacModifiers::Ctrl) => Some("\x0e"), //14
|
||||
("N", AlacModifiers::CtrlShift) => Some("\x0e"), //14
|
||||
("o", AlacModifiers::Ctrl) => Some("\x0f"), //15
|
||||
("O", AlacModifiers::CtrlShift) => Some("\x0f"), //15
|
||||
("p", AlacModifiers::Ctrl) => Some("\x10"), //16
|
||||
("P", AlacModifiers::CtrlShift) => Some("\x10"), //16
|
||||
("q", AlacModifiers::Ctrl) => Some("\x11"), //17
|
||||
("Q", AlacModifiers::CtrlShift) => Some("\x11"), //17
|
||||
("r", AlacModifiers::Ctrl) => Some("\x12"), //18
|
||||
("R", AlacModifiers::CtrlShift) => Some("\x12"), //18
|
||||
("s", AlacModifiers::Ctrl) => Some("\x13"), //19
|
||||
("S", AlacModifiers::CtrlShift) => Some("\x13"), //19
|
||||
("t", AlacModifiers::Ctrl) => Some("\x14"), //20
|
||||
("T", AlacModifiers::CtrlShift) => Some("\x14"), //20
|
||||
("u", AlacModifiers::Ctrl) => Some("\x15"), //21
|
||||
("U", AlacModifiers::CtrlShift) => Some("\x15"), //21
|
||||
("v", AlacModifiers::Ctrl) => Some("\x16"), //22
|
||||
("V", AlacModifiers::CtrlShift) => Some("\x16"), //22
|
||||
("w", AlacModifiers::Ctrl) => Some("\x17"), //23
|
||||
("W", AlacModifiers::CtrlShift) => Some("\x17"), //23
|
||||
("x", AlacModifiers::Ctrl) => Some("\x18"), //24
|
||||
("X", AlacModifiers::CtrlShift) => Some("\x18"), //24
|
||||
("y", AlacModifiers::Ctrl) => Some("\x19"), //25
|
||||
("Y", AlacModifiers::CtrlShift) => Some("\x19"), //25
|
||||
("z", AlacModifiers::Ctrl) => Some("\x1a"), //26
|
||||
("Z", AlacModifiers::CtrlShift) => Some("\x1a"), //26
|
||||
("@", AlacModifiers::Ctrl) => Some("\x00"), //0
|
||||
("[", AlacModifiers::Ctrl) => Some("\x1b"), //27
|
||||
("\\", AlacModifiers::Ctrl) => Some("\x1c"), //28
|
||||
("]", AlacModifiers::Ctrl) => Some("\x1d"), //29
|
||||
("^", AlacModifiers::Ctrl) => Some("\x1e"), //30
|
||||
("_", AlacModifiers::Ctrl) => Some("\x1f"), //31
|
||||
("?", AlacModifiers::Ctrl) => Some("\x7f"), //127
|
||||
_ => None,
|
||||
};
|
||||
if manual_esc_str.is_some() {
|
||||
return manual_esc_str;
|
||||
if let Some(esc_str) = manual_esc_str {
|
||||
return Some(Cow::Borrowed(esc_str));
|
||||
}
|
||||
|
||||
// Automated bindings applying modifiers
|
||||
@@ -235,8 +213,8 @@ pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) ->
|
||||
"home" => Some(format!("\x1b[1;{}H", modifier_code)),
|
||||
_ => None,
|
||||
};
|
||||
if modified_esc_str.is_some() {
|
||||
return modified_esc_str;
|
||||
if let Some(esc_str) = modified_esc_str {
|
||||
return Some(Cow::Owned(esc_str));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -250,7 +228,7 @@ pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) ->
|
||||
} else {
|
||||
&keystroke.key
|
||||
};
|
||||
return Some(format!("\x1b{}", key));
|
||||
return Some(Cow::Owned(format!("\x1b{}", key)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -306,33 +284,27 @@ mod test {
|
||||
let alt_screen = TermMode::ALT_SCREEN;
|
||||
assert_eq!(
|
||||
to_esc_str(&shift_pageup, &alt_screen, false),
|
||||
Some("\x1b[5;2~".to_string())
|
||||
Some("\x1b[5;2~".into())
|
||||
);
|
||||
assert_eq!(
|
||||
to_esc_str(&shift_pagedown, &alt_screen, false),
|
||||
Some("\x1b[6;2~".to_string())
|
||||
Some("\x1b[6;2~".into())
|
||||
);
|
||||
assert_eq!(
|
||||
to_esc_str(&shift_home, &alt_screen, false),
|
||||
Some("\x1b[1;2H".to_string())
|
||||
Some("\x1b[1;2H".into())
|
||||
);
|
||||
assert_eq!(
|
||||
to_esc_str(&shift_end, &alt_screen, false),
|
||||
Some("\x1b[1;2F".to_string())
|
||||
Some("\x1b[1;2F".into())
|
||||
);
|
||||
|
||||
let pageup = Keystroke::parse("pageup").unwrap();
|
||||
let pagedown = Keystroke::parse("pagedown").unwrap();
|
||||
let any = TermMode::ANY;
|
||||
|
||||
assert_eq!(
|
||||
to_esc_str(&pageup, &any, false),
|
||||
Some("\x1b[5~".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
to_esc_str(&pagedown, &any, false),
|
||||
Some("\x1b[6~".to_string())
|
||||
);
|
||||
assert_eq!(to_esc_str(&pageup, &any, false), Some("\x1b[5~".into()));
|
||||
assert_eq!(to_esc_str(&pagedown, &any, false), Some("\x1b[6~".into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -361,27 +333,18 @@ mod test {
|
||||
let left = Keystroke::parse("left").unwrap();
|
||||
let right = Keystroke::parse("right").unwrap();
|
||||
|
||||
assert_eq!(to_esc_str(&up, &none, false), Some("\x1b[A".to_string()));
|
||||
assert_eq!(to_esc_str(&down, &none, false), Some("\x1b[B".to_string()));
|
||||
assert_eq!(to_esc_str(&right, &none, false), Some("\x1b[C".to_string()));
|
||||
assert_eq!(to_esc_str(&left, &none, false), Some("\x1b[D".to_string()));
|
||||
assert_eq!(to_esc_str(&up, &none, false), Some("\x1b[A".into()));
|
||||
assert_eq!(to_esc_str(&down, &none, false), Some("\x1b[B".into()));
|
||||
assert_eq!(to_esc_str(&right, &none, false), Some("\x1b[C".into()));
|
||||
assert_eq!(to_esc_str(&left, &none, false), Some("\x1b[D".into()));
|
||||
|
||||
assert_eq!(
|
||||
to_esc_str(&up, &app_cursor, false),
|
||||
Some("\x1bOA".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
to_esc_str(&down, &app_cursor, false),
|
||||
Some("\x1bOB".to_string())
|
||||
);
|
||||
assert_eq!(to_esc_str(&up, &app_cursor, false), Some("\x1bOA".into()));
|
||||
assert_eq!(to_esc_str(&down, &app_cursor, false), Some("\x1bOB".into()));
|
||||
assert_eq!(
|
||||
to_esc_str(&right, &app_cursor, false),
|
||||
Some("\x1bOC".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
to_esc_str(&left, &app_cursor, false),
|
||||
Some("\x1bOD".to_string())
|
||||
Some("\x1bOC".into())
|
||||
);
|
||||
assert_eq!(to_esc_str(&left, &app_cursor, false), Some("\x1bOD".into()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -724,12 +724,13 @@ impl Terminal {
|
||||
// The terminal only supports pasting strings, not images.
|
||||
Some(text) => format(text),
|
||||
_ => format(""),
|
||||
},
|
||||
}
|
||||
.into_bytes(),
|
||||
)
|
||||
}
|
||||
AlacTermEvent::PtyWrite(out) => self.write_to_pty(out),
|
||||
AlacTermEvent::PtyWrite(out) => self.write_to_pty(out.into_bytes()),
|
||||
AlacTermEvent::TextAreaSizeRequest(format) => {
|
||||
self.write_to_pty(format(self.last_content.terminal_bounds.into()))
|
||||
self.write_to_pty(format(self.last_content.terminal_bounds.into()).into_bytes())
|
||||
}
|
||||
AlacTermEvent::CursorBlinkingChange => {
|
||||
let terminal = self.term.lock();
|
||||
@@ -761,7 +762,7 @@ impl Terminal {
|
||||
// followed by a color request sequence.
|
||||
let color = self.term.lock().colors()[index]
|
||||
.unwrap_or_else(|| to_alac_rgb(get_color_at_index(index, cx.theme().as_ref())));
|
||||
self.write_to_pty(format(color));
|
||||
self.write_to_pty(format(color).into_bytes());
|
||||
}
|
||||
AlacTermEvent::ChildExit(error_code) => {
|
||||
self.register_task_finished(Some(error_code), cx);
|
||||
@@ -1227,11 +1228,11 @@ impl Terminal {
|
||||
}
|
||||
|
||||
///Write the Input payload to the tty.
|
||||
fn write_to_pty(&self, input: impl Into<Vec<u8>>) {
|
||||
fn write_to_pty(&self, input: impl Into<Cow<'static, [u8]>>) {
|
||||
self.pty_tx.notify(input.into());
|
||||
}
|
||||
|
||||
pub fn input(&mut self, input: impl Into<Vec<u8>>) {
|
||||
pub fn input(&mut self, input: impl Into<Cow<'static, [u8]>>) {
|
||||
self.events
|
||||
.push_back(InternalEvent::Scroll(AlacScroll::Bottom));
|
||||
self.events.push_back(InternalEvent::SetSelection(None));
|
||||
@@ -1345,7 +1346,10 @@ impl Terminal {
|
||||
// Keep default terminal behavior
|
||||
let esc = to_esc_str(keystroke, &self.last_content.mode, alt_is_meta);
|
||||
if let Some(esc) = esc {
|
||||
self.input(esc);
|
||||
match esc {
|
||||
Cow::Borrowed(string) => self.input(string.as_bytes()),
|
||||
Cow::Owned(string) => self.input(string.into_bytes()),
|
||||
};
|
||||
true
|
||||
} else {
|
||||
false
|
||||
@@ -1378,7 +1382,7 @@ impl Terminal {
|
||||
text.replace("\r\n", "\r").replace('\n', "\r")
|
||||
};
|
||||
|
||||
self.input(paste_text);
|
||||
self.input(paste_text.into_bytes());
|
||||
}
|
||||
|
||||
pub fn sync(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -1487,13 +1491,13 @@ impl Terminal {
|
||||
|
||||
pub fn focus_in(&self) {
|
||||
if self.last_content.mode.contains(TermMode::FOCUS_IN_OUT) {
|
||||
self.write_to_pty("\x1b[I".to_string());
|
||||
self.write_to_pty("\x1b[I".as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn focus_out(&mut self) {
|
||||
if self.last_content.mode.contains(TermMode::FOCUS_IN_OUT) {
|
||||
self.write_to_pty("\x1b[O".to_string());
|
||||
self.write_to_pty("\x1b[O".as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1660,7 +1664,7 @@ impl Terminal {
|
||||
MouseButton::Middle => {
|
||||
if let Some(item) = _cx.read_from_primary() {
|
||||
let text = item.text().unwrap_or_default().to_string();
|
||||
self.input(text);
|
||||
self.input(text.into_bytes());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -1832,7 +1836,7 @@ impl Terminal {
|
||||
.map(|name| name.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
let argv = fpi.argv.clone();
|
||||
let argv = fpi.argv.as_slice();
|
||||
let process_name = format!(
|
||||
"{}{}",
|
||||
fpi.name,
|
||||
|
||||
@@ -74,10 +74,12 @@ fn serialize_pane(pane: &Entity<Pane>, active: bool, cx: &mut App) -> Serialized
|
||||
.map(|item| item.item_id().as_u64())
|
||||
.filter(|active_id| items_to_serialize.contains(active_id));
|
||||
|
||||
let pinned_count = pane.pinned_count();
|
||||
SerializedPane {
|
||||
active,
|
||||
children,
|
||||
active_item,
|
||||
pinned_count,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -229,10 +231,11 @@ async fn deserialize_pane_group(
|
||||
})
|
||||
.log_err()?;
|
||||
let active_item = serialized_pane.active_item;
|
||||
|
||||
let pinned_count = serialized_pane.pinned_count;
|
||||
let terminal = pane
|
||||
.update_in(cx, |pane, window, cx| {
|
||||
populate_pane_items(pane, new_items, active_item, window, cx);
|
||||
pane.set_pinned_count(pinned_count);
|
||||
// Avoid blank panes in splits
|
||||
if pane.items_len() == 0 {
|
||||
let working_directory = workspace
|
||||
@@ -339,6 +342,8 @@ pub(crate) struct SerializedPane {
|
||||
pub active: bool,
|
||||
pub children: Vec<u64>,
|
||||
pub active_item: Option<u64>,
|
||||
#[serde(default)]
|
||||
pub pinned_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
||||
@@ -325,7 +325,6 @@ impl TerminalPanel {
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(terminal_panel)
|
||||
}
|
||||
|
||||
@@ -393,6 +392,9 @@ impl TerminalPanel {
|
||||
pane::Event::Focus => {
|
||||
self.active_pane = pane.clone();
|
||||
}
|
||||
pane::Event::ItemPinned | pane::Event::ItemUnpinned => {
|
||||
self.serialize(cx);
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -266,7 +266,7 @@ impl TerminalView {
|
||||
pub(crate) fn commit_text(&mut self, text: &str, cx: &mut Context<Self>) {
|
||||
if !text.is_empty() {
|
||||
self.terminal.update(cx, |term, _| {
|
||||
term.input(text.to_string());
|
||||
term.input(text.to_string().into_bytes());
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -643,7 +643,7 @@ impl TerminalView {
|
||||
fn send_text(&mut self, text: &SendText, _: &mut Window, cx: &mut Context<Self>) {
|
||||
self.clear_bell(cx);
|
||||
self.terminal.update(cx, |term, _| {
|
||||
term.input(text.0.to_string());
|
||||
term.input(text.0.to_string().into_bytes());
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -158,7 +158,7 @@ impl ActiveToolchain {
|
||||
let project = workspace
|
||||
.read_with(cx, |this, _| this.project().clone())
|
||||
.ok()?;
|
||||
let toolchains = cx
|
||||
let (toolchains, relative_path) = cx
|
||||
.update(|_, cx| {
|
||||
project.read(cx).available_toolchains(
|
||||
ProjectPath {
|
||||
|
||||
@@ -10,7 +10,7 @@ use gpui::{
|
||||
use language::{LanguageName, Toolchain, ToolchainList};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{Project, ProjectPath, WorktreeId};
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::{borrow::Cow, path::Path, sync::Arc};
|
||||
use ui::{HighlightedLabel, ListItem, ListItemSpacing, prelude::*};
|
||||
use util::ResultExt;
|
||||
use workspace::{ModalView, Workspace};
|
||||
@@ -172,18 +172,8 @@ impl ToolchainSelectorDelegate {
|
||||
let relative_path = this
|
||||
.read_with(cx, |this, _| this.delegate.relative_path.clone())
|
||||
.ok()?;
|
||||
let placeholder_text = format!(
|
||||
"Select a {} for `{}`…",
|
||||
term.to_lowercase(),
|
||||
relative_path.to_string_lossy()
|
||||
)
|
||||
.into();
|
||||
let _ = this.update_in(cx, move |this, window, cx| {
|
||||
this.delegate.placeholder_text = placeholder_text;
|
||||
this.refresh_placeholder(window, cx);
|
||||
});
|
||||
|
||||
let available_toolchains = project
|
||||
let (available_toolchains, relative_path) = project
|
||||
.update(cx, |this, cx| {
|
||||
this.available_toolchains(
|
||||
ProjectPath {
|
||||
@@ -196,6 +186,21 @@ impl ToolchainSelectorDelegate {
|
||||
})
|
||||
.ok()?
|
||||
.await?;
|
||||
let pretty_path = {
|
||||
let path = relative_path.to_string_lossy();
|
||||
if path.is_empty() {
|
||||
Cow::Borrowed("worktree root")
|
||||
} else {
|
||||
Cow::Owned(format!("`{}`", path))
|
||||
}
|
||||
};
|
||||
let placeholder_text =
|
||||
format!("Select a {} for {pretty_path}…", term.to_lowercase(),).into();
|
||||
let _ = this.update_in(cx, move |this, window, cx| {
|
||||
this.delegate.relative_path = relative_path;
|
||||
this.delegate.placeholder_text = placeholder_text;
|
||||
this.refresh_placeholder(window, cx);
|
||||
});
|
||||
|
||||
let _ = this.update_in(cx, move |this, window, cx| {
|
||||
this.delegate.candidates = available_toolchains;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use collections::{HashMap, HashSet};
|
||||
use command_palette_hooks::CommandInterceptResult;
|
||||
use editor::{
|
||||
Bias, Editor, ToPoint,
|
||||
@@ -166,7 +166,21 @@ struct VimSave {
|
||||
pub filename: String,
|
||||
}
|
||||
|
||||
actions!(vim, [VisualCommand, CountCommand, ShellCommand]);
|
||||
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
|
||||
enum DeleteMarks {
|
||||
Marks(String),
|
||||
AllLocal,
|
||||
}
|
||||
|
||||
actions!(
|
||||
vim,
|
||||
[VisualCommand, CountCommand, ShellCommand, ArgumentRequired]
|
||||
);
|
||||
#[derive(Clone, Deserialize, JsonSchema, PartialEq)]
|
||||
struct VimEdit {
|
||||
pub filename: String,
|
||||
}
|
||||
|
||||
impl_internal_actions!(
|
||||
vim,
|
||||
[
|
||||
@@ -178,6 +192,8 @@ impl_internal_actions!(
|
||||
ShellExec,
|
||||
VimSet,
|
||||
VimSave,
|
||||
DeleteMarks,
|
||||
VimEdit,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -239,6 +255,25 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
|
||||
})
|
||||
});
|
||||
|
||||
Vim::action(editor, cx, |_, _: &ArgumentRequired, window, cx| {
|
||||
let _ = window.prompt(
|
||||
gpui::PromptLevel::Critical,
|
||||
"Argument required",
|
||||
None,
|
||||
&["Cancel"],
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
Vim::action(editor, cx, |vim, _: &ShellCommand, window, cx| {
|
||||
let Some(workspace) = vim.workspace(window) else {
|
||||
return;
|
||||
};
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
command_palette::CommandPalette::toggle(workspace, "'<,'>!", window, cx);
|
||||
})
|
||||
});
|
||||
|
||||
Vim::action(editor, cx, |vim, action: &VimSave, window, cx| {
|
||||
vim.update_editor(window, cx, |_, editor, window, cx| {
|
||||
let Some(project) = editor.project.clone() else {
|
||||
@@ -280,6 +315,96 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
|
||||
});
|
||||
});
|
||||
|
||||
Vim::action(editor, cx, |vim, action: &DeleteMarks, window, cx| {
|
||||
fn err(s: String, window: &mut Window, cx: &mut Context<Editor>) {
|
||||
let _ = window.prompt(
|
||||
gpui::PromptLevel::Critical,
|
||||
&format!("Invalid argument: {}", s),
|
||||
None,
|
||||
&["Cancel"],
|
||||
cx,
|
||||
);
|
||||
}
|
||||
vim.update_editor(window, cx, |vim, editor, window, cx| match action {
|
||||
DeleteMarks::Marks(s) => {
|
||||
if s.starts_with('-') || s.ends_with('-') || s.contains(['\'', '`']) {
|
||||
err(s.clone(), window, cx);
|
||||
return;
|
||||
}
|
||||
|
||||
let to_delete = if s.len() < 3 {
|
||||
Some(s.clone())
|
||||
} else {
|
||||
s.chars()
|
||||
.tuple_windows::<(_, _, _)>()
|
||||
.map(|(a, b, c)| {
|
||||
if b == '-' {
|
||||
if match a {
|
||||
'a'..='z' => a <= c && c <= 'z',
|
||||
'A'..='Z' => a <= c && c <= 'Z',
|
||||
'0'..='9' => a <= c && c <= '9',
|
||||
_ => false,
|
||||
} {
|
||||
Some((a..=c).collect_vec())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else if a == '-' {
|
||||
if c == '-' { None } else { Some(vec![c]) }
|
||||
} else if c == '-' {
|
||||
if a == '-' { None } else { Some(vec![a]) }
|
||||
} else {
|
||||
Some(vec![a, b, c])
|
||||
}
|
||||
})
|
||||
.fold_options(HashSet::<char>::default(), |mut set, chars| {
|
||||
set.extend(chars.iter().copied());
|
||||
set
|
||||
})
|
||||
.map(|set| set.iter().collect::<String>())
|
||||
};
|
||||
|
||||
let Some(to_delete) = to_delete else {
|
||||
err(s.clone(), window, cx);
|
||||
return;
|
||||
};
|
||||
|
||||
for c in to_delete.chars().filter(|c| !c.is_whitespace()) {
|
||||
vim.delete_mark(c.to_string(), editor, window, cx);
|
||||
}
|
||||
}
|
||||
DeleteMarks::AllLocal => {
|
||||
for s in 'a'..='z' {
|
||||
vim.delete_mark(s.to_string(), editor, window, cx);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
Vim::action(editor, cx, |vim, action: &VimEdit, window, cx| {
|
||||
vim.update_editor(window, cx, |vim, editor, window, cx| {
|
||||
let Some(workspace) = vim.workspace(window) else {
|
||||
return;
|
||||
};
|
||||
let Some(project) = editor.project.clone() else {
|
||||
return;
|
||||
};
|
||||
let Some(worktree) = project.read(cx).visible_worktrees(cx).next() else {
|
||||
return;
|
||||
};
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: Arc::from(Path::new(&action.filename)),
|
||||
};
|
||||
|
||||
let _ = workspace.update(cx, |workspace, cx| {
|
||||
workspace
|
||||
.open_path(project_path, None, true, window, cx)
|
||||
.detach_and_log_err(cx);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Vim::action(editor, cx, |vim, _: &CountCommand, window, cx| {
|
||||
let Some(workspace) = vim.workspace(window) else {
|
||||
return;
|
||||
@@ -952,6 +1077,9 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
|
||||
}),
|
||||
VimCommand::new(("reg", "isters"), ToggleRegistersView).bang(ToggleRegistersView),
|
||||
VimCommand::new(("marks", ""), ToggleMarksView).bang(ToggleMarksView),
|
||||
VimCommand::new(("delm", "arks"), ArgumentRequired)
|
||||
.bang(DeleteMarks::AllLocal)
|
||||
.args(|_, args| Some(DeleteMarks::Marks(args).boxed_clone())),
|
||||
VimCommand::new(("sor", "t"), SortLinesCaseSensitive).range(select_range),
|
||||
VimCommand::new(("sort i", ""), SortLinesCaseInsensitive).range(select_range),
|
||||
VimCommand::str(("E", "xplore"), "project_panel::ToggleFocus"),
|
||||
@@ -971,7 +1099,8 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
|
||||
VimCommand::new(("%", ""), EndOfDocument),
|
||||
VimCommand::new(("0", ""), StartOfDocument),
|
||||
VimCommand::new(("e", "dit"), editor::actions::ReloadFile)
|
||||
.bang(editor::actions::ReloadFile),
|
||||
.bang(editor::actions::ReloadFile)
|
||||
.args(|_, args| Some(VimEdit { filename: args }.boxed_clone())),
|
||||
VimCommand::new(("ex", ""), editor::actions::ReloadFile).bang(editor::actions::ReloadFile),
|
||||
VimCommand::new(("cpp", "link"), editor::actions::CopyPermalinkToLine).range(act_on_range),
|
||||
VimCommand::str(("opt", "ions"), "zed::OpenDefaultSettings"),
|
||||
@@ -1701,6 +1830,7 @@ mod test {
|
||||
use std::path::Path;
|
||||
|
||||
use crate::{
|
||||
VimAddon,
|
||||
state::Mode,
|
||||
test::{NeovimBackedTestContext, VimTestContext},
|
||||
};
|
||||
@@ -2053,4 +2183,35 @@ mod test {
|
||||
a
|
||||
ˇa"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_del_marks(cx: &mut TestAppContext) {
|
||||
let mut cx = NeovimBackedTestContext::new(cx).await;
|
||||
|
||||
cx.set_shared_state(indoc! {"
|
||||
ˇa
|
||||
b
|
||||
a
|
||||
b
|
||||
a
|
||||
"})
|
||||
.await;
|
||||
|
||||
cx.simulate_shared_keystrokes("m a").await;
|
||||
|
||||
let mark = cx.update_editor(|editor, window, cx| {
|
||||
let vim = editor.addon::<VimAddon>().unwrap().entity.clone();
|
||||
vim.update(cx, |vim, cx| vim.get_mark("a", editor, window, cx))
|
||||
});
|
||||
assert!(mark.is_some());
|
||||
|
||||
cx.simulate_shared_keystrokes(": d e l m space a").await;
|
||||
cx.simulate_shared_keystrokes("enter").await;
|
||||
|
||||
let mark = cx.update_editor(|editor, window, cx| {
|
||||
let vim = editor.addon::<VimAddon>().unwrap().entity.clone();
|
||||
vim.update(cx, |vim, cx| vim.get_mark("a", editor, window, cx))
|
||||
});
|
||||
assert!(mark.is_none())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,6 +235,60 @@ impl Vim {
|
||||
found
|
||||
})
|
||||
}
|
||||
Motion::FindForward { .. } => {
|
||||
self.update_editor(window, cx, |_, editor, window, cx| {
|
||||
let text_layout_details = editor.text_layout_details(window);
|
||||
editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
|
||||
s.move_with(|map, selection| {
|
||||
let goal = selection.goal;
|
||||
let cursor = if selection.is_empty() || selection.reversed {
|
||||
selection.head()
|
||||
} else {
|
||||
movement::left(map, selection.head())
|
||||
};
|
||||
|
||||
let (point, goal) = motion
|
||||
.move_point(
|
||||
map,
|
||||
cursor,
|
||||
selection.goal,
|
||||
times,
|
||||
&text_layout_details,
|
||||
)
|
||||
.unwrap_or((cursor, goal));
|
||||
selection.set_tail(selection.head(), goal);
|
||||
selection.set_head(movement::right(map, point), goal);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
Motion::FindBackward { .. } => {
|
||||
self.update_editor(window, cx, |_, editor, window, cx| {
|
||||
let text_layout_details = editor.text_layout_details(window);
|
||||
editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
|
||||
s.move_with(|map, selection| {
|
||||
let goal = selection.goal;
|
||||
let cursor = if selection.is_empty() || selection.reversed {
|
||||
selection.head()
|
||||
} else {
|
||||
movement::left(map, selection.head())
|
||||
};
|
||||
|
||||
let (point, goal) = motion
|
||||
.move_point(
|
||||
map,
|
||||
cursor,
|
||||
selection.goal,
|
||||
times,
|
||||
&text_layout_details,
|
||||
)
|
||||
.unwrap_or((cursor, goal));
|
||||
selection.set_tail(selection.head(), goal);
|
||||
selection.set_head(point, goal);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
_ => self.helix_move_and_collapse(motion, times, window, cx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -548,6 +548,8 @@ impl Vim {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.record_current_action(cx);
|
||||
let count = Vim::take_count(cx).unwrap_or(1);
|
||||
Vim::take_forced_motion(cx);
|
||||
self.update_editor(window, cx, |_, editor, window, cx| {
|
||||
editor.transact(window, cx, |editor, _, cx| {
|
||||
let selections = editor.selections.all::<Point>(cx);
|
||||
@@ -560,7 +562,7 @@ impl Vim {
|
||||
.into_iter()
|
||||
.map(|row| {
|
||||
let start_of_line = Point::new(row, 0);
|
||||
(start_of_line..start_of_line, "\n".to_string())
|
||||
(start_of_line..start_of_line, "\n".repeat(count))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
editor.edit(edits, cx);
|
||||
@@ -575,10 +577,17 @@ impl Vim {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.record_current_action(cx);
|
||||
let count = Vim::take_count(cx).unwrap_or(1);
|
||||
Vim::take_forced_motion(cx);
|
||||
self.update_editor(window, cx, |_, editor, window, cx| {
|
||||
editor.transact(window, cx, |editor, _, cx| {
|
||||
editor.transact(window, cx, |editor, window, cx| {
|
||||
let selections = editor.selections.all::<Point>(cx);
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let (_map, display_selections) = editor.selections.all_display(cx);
|
||||
let original_positions = display_selections
|
||||
.iter()
|
||||
.map(|s| (s.id, s.head()))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let selection_end_rows: BTreeSet<u32> = selections
|
||||
.into_iter()
|
||||
@@ -588,10 +597,18 @@ impl Vim {
|
||||
.into_iter()
|
||||
.map(|row| {
|
||||
let end_of_line = Point::new(row, snapshot.line_len(MultiBufferRow(row)));
|
||||
(end_of_line..end_of_line, "\n".to_string())
|
||||
(end_of_line..end_of_line, "\n".repeat(count))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
editor.edit(edits, cx);
|
||||
|
||||
editor.change_selections(None, window, cx, |s| {
|
||||
s.move_with(|_, selection| {
|
||||
if let Some(position) = original_positions.get(&selection.id) {
|
||||
selection.collapse_to(*position, SelectionGoal::None);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -1331,10 +1348,19 @@ mod test {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_insert_empty_line_above(cx: &mut gpui::TestAppContext) {
|
||||
async fn test_insert_empty_line(cx: &mut gpui::TestAppContext) {
|
||||
let mut cx = NeovimBackedTestContext::new(cx).await;
|
||||
cx.simulate("[ space", "ˇ").await.assert_matches();
|
||||
cx.simulate("[ space", "The ˇquick").await.assert_matches();
|
||||
cx.simulate_at_each_offset(
|
||||
"3 [ space",
|
||||
indoc! {"
|
||||
The qˇuick
|
||||
brown ˇfox
|
||||
jumps ˇover"},
|
||||
)
|
||||
.await
|
||||
.assert_matches();
|
||||
cx.simulate_at_each_offset(
|
||||
"[ space",
|
||||
indoc! {"
|
||||
@@ -1353,6 +1379,36 @@ mod test {
|
||||
)
|
||||
.await
|
||||
.assert_matches();
|
||||
|
||||
cx.simulate("] space", "ˇ").await.assert_matches();
|
||||
cx.simulate("] space", "The ˇquick").await.assert_matches();
|
||||
cx.simulate_at_each_offset(
|
||||
"3 ] space",
|
||||
indoc! {"
|
||||
The qˇuick
|
||||
brown ˇfox
|
||||
jumps ˇover"},
|
||||
)
|
||||
.await
|
||||
.assert_matches();
|
||||
cx.simulate_at_each_offset(
|
||||
"] space",
|
||||
indoc! {"
|
||||
The qˇuick
|
||||
brown ˇfox
|
||||
jumps ˇover"},
|
||||
)
|
||||
.await
|
||||
.assert_matches();
|
||||
cx.simulate(
|
||||
"] space",
|
||||
indoc! {"
|
||||
The quick
|
||||
ˇ
|
||||
brown fox"},
|
||||
)
|
||||
.await
|
||||
.assert_matches();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
||||
@@ -279,6 +279,10 @@ impl Vim {
|
||||
if name == "`" {
|
||||
name = "'".to_string();
|
||||
}
|
||||
if matches!(&name[..], "-" | " ") {
|
||||
// Not allowed marks
|
||||
return;
|
||||
}
|
||||
let entity_id = workspace.entity_id();
|
||||
Vim::update_globals(cx, |vim_globals, cx| {
|
||||
let Some(marks_state) = vim_globals.marks.get(&entity_id) else {
|
||||
@@ -326,6 +330,30 @@ impl Vim {
|
||||
.update(cx, |ms, cx| ms.get_mark(name, editor.buffer(), cx))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_mark(
|
||||
&self,
|
||||
name: String,
|
||||
editor: &mut Editor,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
let Some(workspace) = self.workspace(window) else {
|
||||
return;
|
||||
};
|
||||
if name == "`" || name == "'" {
|
||||
return;
|
||||
}
|
||||
let entity_id = workspace.entity_id();
|
||||
Vim::update_globals(cx, |vim_globals, cx| {
|
||||
let Some(marks_state) = vim_globals.marks.get(&entity_id) else {
|
||||
return;
|
||||
};
|
||||
marks_state.update(cx, |ms, cx| {
|
||||
ms.delete_mark(name.clone(), editor.buffer(), cx);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn jump_motion(
|
||||
|
||||
@@ -124,7 +124,20 @@ impl Vim {
|
||||
}
|
||||
|
||||
let display_range = if !selection.is_empty() {
|
||||
selection.start..selection.end
|
||||
// If vim is in VISUAL LINE mode and the column for the
|
||||
// selection's end point is 0, that means that the
|
||||
// cursor is at the newline character (\n) at the end of
|
||||
// the line. In this situation we'll want to move one
|
||||
// position to the left, ensuring we don't join the last
|
||||
// line of the selection with the line directly below.
|
||||
let end_point =
|
||||
if vim.mode == Mode::VisualLine && selection.end.column() == 0 {
|
||||
movement::left(&display_map, selection.end)
|
||||
} else {
|
||||
selection.end
|
||||
};
|
||||
|
||||
selection.start..end_point
|
||||
} else if line_mode {
|
||||
let point = if before {
|
||||
movement::line_beginning(&display_map, selection.start, false)
|
||||
@@ -553,6 +566,17 @@ mod test {
|
||||
ˇfox jumps over
|
||||
the lazy dog"});
|
||||
cx.shared_clipboard().await.assert_eq("The quick brown\n");
|
||||
|
||||
// Copy line and paste in visual mode, with cursor on newline character.
|
||||
cx.set_shared_state(indoc! {"
|
||||
ˇThe quick brown
|
||||
fox jumps over
|
||||
the lazy dog"})
|
||||
.await;
|
||||
cx.simulate_shared_keystrokes("y y shift-v j $ p").await;
|
||||
cx.shared_state().await.assert_eq(indoc! {"
|
||||
ˇThe quick brown
|
||||
the lazy dog"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
||||
@@ -37,6 +37,7 @@ pub enum Object {
|
||||
SquareBrackets,
|
||||
CurlyBrackets,
|
||||
AngleBrackets,
|
||||
SyntaxNode,
|
||||
Argument,
|
||||
IndentObj { include_below: bool },
|
||||
Tag,
|
||||
@@ -276,7 +277,10 @@ actions!(
|
||||
Method,
|
||||
Class,
|
||||
Comment,
|
||||
EntireFile
|
||||
/// Selects the entire file.
|
||||
EntireFile,
|
||||
/// Selects a syntax node.
|
||||
SyntaxNode,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -368,6 +372,9 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
|
||||
vim.object(Object::IndentObj { include_below }, window, cx)
|
||||
},
|
||||
);
|
||||
Vim::action(editor, cx, |vim, _: &SyntaxNode, window, cx| {
|
||||
vim.object(Object::SyntaxNode, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
impl Vim {
|
||||
@@ -409,7 +416,8 @@ impl Object {
|
||||
| Object::Class
|
||||
| Object::EntireFile
|
||||
| Object::Comment
|
||||
| Object::IndentObj { .. } => true,
|
||||
| Object::IndentObj { .. }
|
||||
| Object::SyntaxNode => true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -437,7 +445,8 @@ impl Object {
|
||||
| Object::Comment
|
||||
| Object::EntireFile
|
||||
| Object::CurlyBrackets
|
||||
| Object::AngleBrackets => true,
|
||||
| Object::AngleBrackets
|
||||
| Object::SyntaxNode => true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -467,7 +476,8 @@ impl Object {
|
||||
| Object::Tag
|
||||
| Object::Comment
|
||||
| Object::Argument
|
||||
| Object::IndentObj { .. } => Mode::Visual,
|
||||
| Object::IndentObj { .. }
|
||||
| Object::SyntaxNode => Mode::Visual,
|
||||
Object::Method | Object::Class => {
|
||||
if around {
|
||||
Mode::VisualLine
|
||||
@@ -683,6 +693,13 @@ impl Object {
|
||||
Object::Argument => argument(map, relative_to, around),
|
||||
Object::IndentObj { include_below } => indent(map, relative_to, around, include_below),
|
||||
Object::EntireFile => entire_file(map),
|
||||
Object::SyntaxNode => {
|
||||
if around {
|
||||
larger_syntax_node(map, selection, times.unwrap_or(1))
|
||||
} else {
|
||||
smaller_syntax_node(map, selection, times.unwrap_or(1))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1654,6 +1671,61 @@ fn surrounding_markers(
|
||||
)
|
||||
}
|
||||
|
||||
fn smaller_syntax_node(
|
||||
map: &DisplaySnapshot,
|
||||
selection: Selection<DisplayPoint>,
|
||||
unwrap_or: usize,
|
||||
) -> Option<Range<DisplayPoint>> {
|
||||
todo!("take in editor and use ")
|
||||
}
|
||||
|
||||
fn larger_syntax_node(
|
||||
map: &DisplaySnapshot,
|
||||
selection: Selection<DisplayPoint>,
|
||||
count: usize,
|
||||
) -> Option<Range<DisplayPoint>> {
|
||||
let selection = selection.map(|p| {
|
||||
map.display_point_to_anchor(p, Bias::Left)
|
||||
.to_offset(&map.buffer_snapshot)
|
||||
});
|
||||
let old_range = selection.start..selection.end;
|
||||
let buffer = &map.buffer_snapshot;
|
||||
|
||||
if let Some((node, _)) = buffer.syntax_ancestor(old_range.clone()) {
|
||||
// manually select word at selection
|
||||
if ["string_content", "inline"].contains(&node.kind()) {
|
||||
let (word_range, _) = buffer.surrounding_word(old_range.start, None);
|
||||
// ignore if word is already selected
|
||||
if !word_range.is_empty() && old_range != word_range {
|
||||
let (last_word_range, _) = buffer.surrounding_word(old_range.end, None);
|
||||
// only select word if start and end point belongs to same word
|
||||
if word_range == last_word_range {
|
||||
return Some(
|
||||
word_range.start.to_display_point(map)
|
||||
..word_range.end.to_display_point(map),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut new_range = old_range.clone();
|
||||
while let Some((node, containing_range)) = buffer.syntax_ancestor(new_range.clone()) {
|
||||
new_range = match containing_range {
|
||||
multi_buffer::MultiOrSingleBufferOffsetRange::Single(_) => break,
|
||||
multi_buffer::MultiOrSingleBufferOffsetRange::Multi(range) => range,
|
||||
};
|
||||
if !node.is_named() {
|
||||
continue;
|
||||
}
|
||||
if !map.intersects_fold(new_range.start) && !map.intersects_fold(new_range.end) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return Some(new_range.start.to_display_point(map)..new_range.end.to_display_point(map));
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use gpui::KeyBinding;
|
||||
|
||||
@@ -557,7 +557,9 @@ impl MarksState {
|
||||
}
|
||||
return;
|
||||
};
|
||||
let buffer = buffer.unwrap();
|
||||
let Some(buffer) = buffer else {
|
||||
return;
|
||||
};
|
||||
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
self.buffer_marks.entry(buffer_id).or_default().insert(
|
||||
@@ -588,7 +590,7 @@ impl MarksState {
|
||||
}
|
||||
|
||||
let singleton = multi_buffer.read(cx).as_singleton()?;
|
||||
let excerpt_id = *multi_buffer.read(cx).excerpt_ids().first().unwrap();
|
||||
let excerpt_id = *multi_buffer.read(cx).excerpt_ids().first()?;
|
||||
let buffer_id = singleton.read(cx).remote_id();
|
||||
if let Some(anchors) = self.buffer_marks.get(&buffer_id) {
|
||||
let text_anchors = anchors.get(name)?;
|
||||
@@ -611,6 +613,60 @@ impl MarksState {
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn delete_mark(
|
||||
&mut self,
|
||||
mark_name: String,
|
||||
multi_buffer: &Entity<MultiBuffer>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let path = if let Some(target) = self.global_marks.get(&mark_name.clone()) {
|
||||
let name = mark_name.clone();
|
||||
if let Some(workspace_id) = self.workspace_id(cx) {
|
||||
cx.background_spawn(async move {
|
||||
DB.delete_global_marks_path(workspace_id, name).await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
self.buffer_marks.iter_mut().for_each(|(_, m)| {
|
||||
m.remove(&mark_name.clone());
|
||||
});
|
||||
|
||||
match target {
|
||||
MarkLocation::Buffer(entity_id) => {
|
||||
self.multibuffer_marks
|
||||
.get_mut(&entity_id)
|
||||
.map(|m| m.remove(&mark_name.clone()));
|
||||
return;
|
||||
}
|
||||
MarkLocation::Path(path) => path.clone(),
|
||||
}
|
||||
} else {
|
||||
self.multibuffer_marks
|
||||
.get_mut(&multi_buffer.entity_id())
|
||||
.map(|m| m.remove(&mark_name.clone()));
|
||||
|
||||
if let Some(singleton) = multi_buffer.read(cx).as_singleton() {
|
||||
let buffer_id = singleton.read(cx).remote_id();
|
||||
self.buffer_marks
|
||||
.get_mut(&buffer_id)
|
||||
.map(|m| m.remove(&mark_name.clone()));
|
||||
let Some(path) = self.path_for_buffer(&singleton, cx) else {
|
||||
return;
|
||||
};
|
||||
path
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
};
|
||||
self.global_marks.remove(&mark_name.clone());
|
||||
self.serialized_marks
|
||||
.get_mut(&path.clone())
|
||||
.map(|m| m.remove(&mark_name.clone()));
|
||||
if let Some(workspace_id) = self.workspace_id(cx) {
|
||||
cx.background_spawn(async move { DB.delete_mark(workspace_id, path, mark_name).await })
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Global for VimGlobals {}
|
||||
@@ -1689,6 +1745,21 @@ impl VimDb {
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub(crate) async fn delete_mark(
|
||||
&self,
|
||||
workspace_id: WorkspaceId,
|
||||
path: Arc<Path>,
|
||||
mark_name: String,
|
||||
) -> Result<()> {
|
||||
self.write(move |conn| {
|
||||
conn.exec_bound(sql!(
|
||||
DELETE FROM vim_marks
|
||||
WHERE workspace_id = ? AND mark_name = ? AND path = ?
|
||||
))?((workspace_id, mark_name, path))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub(crate) async fn set_global_mark_path(
|
||||
&self,
|
||||
workspace_id: WorkspaceId,
|
||||
@@ -1716,4 +1787,18 @@ impl VimDb {
|
||||
WHERE workspace_id = ?
|
||||
))?(workspace_id)
|
||||
}
|
||||
|
||||
pub(crate) async fn delete_global_marks_path(
|
||||
&self,
|
||||
workspace_id: WorkspaceId,
|
||||
mark_name: String,
|
||||
) -> Result<()> {
|
||||
self.write(move |conn| {
|
||||
conn.exec_bound(sql!(
|
||||
DELETE FROM vim_global_marks_paths
|
||||
WHERE workspace_id = ? AND mark_name = ?
|
||||
))?((workspace_id, mark_name))
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use editor::test::editor_lsp_test_context::EditorLspTestContext;
|
||||
use gpui::{Context, Entity, SemanticVersion, UpdateGlobal};
|
||||
use gpui::{Context, Entity, SemanticVersion, UpdateGlobal, actions};
|
||||
use search::{BufferSearchBar, project_search::ProjectSearchBar};
|
||||
|
||||
use crate::{state::Operator, *};
|
||||
|
||||
actions!(agent, [Chat]);
|
||||
|
||||
pub struct VimTestContext {
|
||||
cx: EditorLspTestContext,
|
||||
}
|
||||
|
||||
@@ -433,6 +433,12 @@ impl Vim {
|
||||
fn activate(editor: &mut Editor, window: &mut Window, cx: &mut Context<Editor>) {
|
||||
let vim = Vim::new(window, cx);
|
||||
|
||||
if !editor.mode().is_full() {
|
||||
vim.update(cx, |vim, _| {
|
||||
vim.mode = Mode::Insert;
|
||||
});
|
||||
}
|
||||
|
||||
editor.register_addon(VimAddon {
|
||||
entity: vim.clone(),
|
||||
});
|
||||
|
||||
11
crates/vim/test_data/test_del_marks.json
Normal file
11
crates/vim/test_data/test_del_marks.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{"Put":{"state":"ˇa\nb\na\nb\na\n"}}
|
||||
{"Key":"m"}
|
||||
{"Key":"a"}
|
||||
{"Key":":"}
|
||||
{"Key":"d"}
|
||||
{"Key":"e"}
|
||||
{"Key":"l"}
|
||||
{"Key":"m"}
|
||||
{"Key":"space"}
|
||||
{"Key":"a"}
|
||||
{"Key":"enter"}
|
||||
78
crates/vim/test_data/test_insert_empty_line.json
Normal file
78
crates/vim/test_data/test_insert_empty_line.json
Normal file
@@ -0,0 +1,78 @@
|
||||
{"Put":{"state":"ˇ"}}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"\nˇ","mode":"Normal"}}
|
||||
{"Put":{"state":"The ˇquick"}}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"\nThe ˇquick","mode":"Normal"}}
|
||||
{"Put":{"state":"The qˇuick\nbrown fox\njumps over"}}
|
||||
{"Key":"3"}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"\n\n\nThe qˇuick\nbrown fox\njumps over","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nbrown ˇfox\njumps over"}}
|
||||
{"Key":"3"}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\n\n\n\nbrown ˇfox\njumps over","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nbrown fox\njumps ˇover"}}
|
||||
{"Key":"3"}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\nbrown fox\n\n\n\njumps ˇover","mode":"Normal"}}
|
||||
{"Put":{"state":"The qˇuick\nbrown fox\njumps over"}}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"\nThe qˇuick\nbrown fox\njumps over","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nbrown ˇfox\njumps over"}}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\n\nbrown ˇfox\njumps over","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nbrown fox\njumps ˇover"}}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\nbrown fox\n\njumps ˇover","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nˇ\nbrown fox"}}
|
||||
{"Key":"["}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\n\nˇ\nbrown fox","mode":"Normal"}}
|
||||
{"Put":{"state":"ˇ"}}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"ˇ\n","mode":"Normal"}}
|
||||
{"Put":{"state":"The ˇquick"}}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The ˇquick\n","mode":"Normal"}}
|
||||
{"Put":{"state":"The qˇuick\nbrown fox\njumps over"}}
|
||||
{"Key":"3"}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The qˇuick\n\n\n\nbrown fox\njumps over","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nbrown ˇfox\njumps over"}}
|
||||
{"Key":"3"}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\nbrown ˇfox\n\n\n\njumps over","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nbrown fox\njumps ˇover"}}
|
||||
{"Key":"3"}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\nbrown fox\njumps ˇover\n\n\n","mode":"Normal"}}
|
||||
{"Put":{"state":"The qˇuick\nbrown fox\njumps over"}}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The qˇuick\n\nbrown fox\njumps over","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nbrown ˇfox\njumps over"}}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\nbrown ˇfox\n\njumps over","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nbrown fox\njumps ˇover"}}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\nbrown fox\njumps ˇover\n","mode":"Normal"}}
|
||||
{"Put":{"state":"The quick\nˇ\nbrown fox"}}
|
||||
{"Key":"]"}
|
||||
{"Key":"space"}
|
||||
{"Get":{"state":"The quick\nˇ\n\nbrown fox","mode":"Normal"}}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user