Compare commits

...

11 Commits

Author SHA1 Message Date
Richard Feldman
e19533e8c4 make it way more complicated but still broken
Co-authored-by: Agus Zubiaga <hi@aguz.me>
2025-03-27 15:49:35 -04:00
Richard Feldman
3148583f79 wip
Co-authored-by: Agus Zubiaga <hi@aguz.me>
2025-03-27 15:29:15 -04:00
Marshall Bowers
ab5ba66b94 feature_flags: Remove predict-edits feature flag (#27605)
This PR removes the `predict-edits` feature flag.

The feature is shipped, and we aren't referencing the flag anywhere
anymore.

Release Notes:

- N/A
2025-03-27 17:01:27 +00:00
Marshall Bowers
a20a534ecf assistant2: Allow dismissing the tool list with the keyboard (#27603)
This PR adds the ability to dismiss the tool list in the profile
configuration modal using the keyboard.

Release Notes:

- N/A
2025-03-27 16:53:59 +00:00
Danilo Leal
5bb979820b docs: Fix link to the subtle mode heading (#27606)
Release Notes:

- N/A
2025-03-27 13:51:32 -03:00
Marshall Bowers
2dee03ebca assistant2: Allow customizing tools for default profiles (#27594)
This PR adds support for customizing the tools for the default profiles.

Release Notes:

- N/A
2025-03-27 15:13:00 +00:00
Danilo Leal
1c7cf1a5c1 docs: Clarify how to turn edit predictions off (#27592)
Closes https://github.com/zed-industries/zed/issues/27590

Release Notes:

- N/A
2025-03-27 11:25:52 -03:00
Agus Zubiaga
f15a241d3e assistant2: Serialize token usage (#27586)
We'll need this for detecting old long threads

Release Notes:

- N/A
2025-03-27 13:38:08 +00:00
Richard Feldman
76d3a9a0f0 Retry on 5xx errors from cloud language model providers (#27584)
Release Notes:

- N/A
2025-03-27 09:35:16 -04:00
Peter Tripp
e6c473a488 html: Update HTML Extension to v0.2.0 (#27548)
Includes:
- https://github.com/zed-industries/zed/pull/27524

Release Notes:

- N/A
2025-03-27 09:00:30 -04:00
张小白
06960670bd windows: Enable collab tests (#27587)
Release Notes:

- N/A
2025-03-27 20:42:22 +08:00
23 changed files with 645 additions and 355 deletions

2
Cargo.lock generated
View File

@@ -17430,7 +17430,7 @@ dependencies = [
[[package]]
name = "zed_html"
version = "0.1.6"
version = "0.2.0"
dependencies = [
"zed_extension_api 0.1.0",
]

View File

@@ -3,7 +3,7 @@ use std::sync::Arc;
use assistant_settings::AssistantSettings;
use assistant_tool::ToolWorkingSet;
use fs::Fs;
use gpui::{prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable};
use gpui::{prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
use settings::Settings as _;
use ui::{prelude::*, ListItem, ListItemSpacing, Navigable, NavigableEntry};
use workspace::{ModalView, Workspace};
@@ -15,7 +15,10 @@ use crate::{AssistantPanel, ManageProfiles};
enum Mode {
ChooseProfile(Entity<ProfilePicker>),
ViewProfile(ViewProfileMode),
ConfigureTools(Entity<ToolPicker>),
ConfigureTools {
tool_picker: Entity<ToolPicker>,
_subscription: Subscription,
},
}
#[derive(Clone)]
@@ -98,16 +101,27 @@ impl ManageProfilesModal {
return;
};
self.mode = Mode::ConfigureTools(cx.new(|cx| {
let tool_picker = cx.new(|cx| {
let delegate = ToolPickerDelegate::new(
self.fs.clone(),
self.tools.clone(),
profile_id,
profile_id.clone(),
profile,
cx,
);
ToolPicker::new(delegate, window, cx)
}));
});
let dismiss_subscription = cx.subscribe_in(&tool_picker, window, {
let profile_id = profile_id.clone();
move |this, _tool_picker, _: &DismissEvent, window, cx| {
this.view_profile(profile_id.clone(), window, cx);
}
});
self.mode = Mode::ConfigureTools {
tool_picker,
_subscription: dismiss_subscription,
};
self.focus_handle(cx).focus(window);
}
@@ -122,7 +136,7 @@ impl Focusable for ManageProfilesModal {
fn focus_handle(&self, cx: &App) -> FocusHandle {
match &self.mode {
Mode::ChooseProfile(profile_picker) => profile_picker.focus_handle(cx),
Mode::ConfigureTools(tool_picker) => tool_picker.focus_handle(cx),
Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx),
Mode::ViewProfile(_) => self.focus_handle.clone(),
}
}
@@ -195,7 +209,7 @@ impl Render for ManageProfilesModal {
Mode::ViewProfile(mode) => self
.render_view_profile(mode.clone(), window, cx)
.into_any_element(),
Mode::ConfigureTools(tool_picker) => tool_picker.clone().into_any_element(),
Mode::ConfigureTools { tool_picker, .. } => tool_picker.clone().into_any_element(),
})
}
}

View File

@@ -1,7 +1,8 @@
use std::sync::Arc;
use assistant_settings::{
AgentProfile, AssistantSettings, AssistantSettingsContent, VersionedAssistantSettingsContent,
AgentProfile, AgentProfileContent, AssistantSettings, AssistantSettingsContent,
ContextServerPresetContent, VersionedAssistantSettingsContent,
};
use assistant_tool::{ToolSource, ToolWorkingSet};
use fs::Fs;
@@ -184,26 +185,43 @@ impl PickerDelegate for ToolPickerDelegate {
update_settings_file::<AssistantSettings>(self.fs.clone(), cx, {
let profile_id = self.profile_id.clone();
let default_profile = self.profile.clone();
let tool = tool.clone();
move |settings, _cx| match settings {
AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
settings,
)) => {
if let Some(profiles) = &mut settings.profiles {
if let Some(profile) = profiles.get_mut(&profile_id) {
match tool.source {
ToolSource::Native => {
*profile.tools.entry(tool.name).or_default() = is_enabled;
}
ToolSource::ContextServer { id } => {
let preset = profile
.context_servers
.entry(id.clone().into())
.or_default();
*preset.tools.entry(tool.name.clone()).or_default() =
is_enabled;
}
}
let profiles = settings.profiles.get_or_insert_default();
let profile =
profiles
.entry(profile_id)
.or_insert_with(|| AgentProfileContent {
name: default_profile.name.into(),
tools: default_profile.tools,
context_servers: default_profile
.context_servers
.into_iter()
.map(|(server_id, preset)| {
(
server_id,
ContextServerPresetContent {
tools: preset.tools,
},
)
})
.collect(),
});
match tool.source {
ToolSource::Native => {
*profile.tools.entry(tool.name).or_default() = is_enabled;
}
ToolSource::ContextServer { id } => {
let preset = profile
.context_servers
.entry(id.clone().into())
.or_default();
*preset.tools.entry(tool.name.clone()).or_default() = is_enabled;
}
}
}

View File

@@ -259,6 +259,7 @@ impl ContextPicker {
&path_prefix,
false,
context_store.clone(),
None,
cx,
)
.into_any()
@@ -400,6 +401,7 @@ impl ContextPicker {
RecentEntry::Thread(ThreadContextEntry {
id: thread.id,
summary: thread.summary,
highlight_positions: None,
})
}),
)
@@ -517,6 +519,7 @@ fn recent_context_picker_entries(
RecentEntry::Thread(ThreadContextEntry {
id: thread.id,
summary: thread.summary,
highlight_positions: None,
})
}),
);

View File

@@ -9,7 +9,7 @@ use gpui::{
};
use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
use ui::{prelude::*, ListItem, Tooltip};
use ui::{prelude::*, HighlightedLabel, ListItem, Tooltip};
use util::ResultExt as _;
use workspace::{notifications::NotifyResultExt, Workspace};
@@ -193,6 +193,7 @@ impl PickerDelegate for FileContextPickerDelegate {
&path_match.path_prefix,
path_match.is_dir,
self.context_store.clone(),
Some(&path_match.positions),
cx,
)),
)
@@ -279,6 +280,7 @@ pub fn render_file_context_entry(
path_prefix: &Arc<str>,
is_directory: bool,
context_store: WeakEntity<ContextStore>,
highlight_positions: Option<&[usize]>,
cx: &App,
) -> Stateful<Div> {
let (file_name, directory) = if path == Path::new("") {
@@ -325,6 +327,11 @@ pub fn render_file_context_entry(
.map(Icon::from_path)
.unwrap_or_else(|| Icon::new(IconName::File));
let label = match highlight_positions {
Some(positions) => HighlightedLabel::new(file_name, positions.to_vec()).into_any_element(),
None => Label::new(file_name).into_any_element(),
};
h_flex()
.id(id)
.gap_1p5()
@@ -333,7 +340,7 @@ pub fn render_file_context_entry(
.child(
h_flex()
.gap_1()
.child(Label::new(file_name))
.child(label)
.children(directory.map(|directory| {
Label::new(directory)
.size(LabelSize::Small)

View File

@@ -3,7 +3,7 @@ use std::sync::Arc;
use fuzzy::StringMatchCandidate;
use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity};
use picker::{Picker, PickerDelegate};
use ui::{prelude::*, ListItem};
use ui::{prelude::*, HighlightedLabel, ListItem};
use crate::context_picker::{ConfirmBehavior, ContextPicker};
use crate::context_store::{self, ContextStore};
@@ -51,6 +51,7 @@ impl Render for ThreadContextPicker {
pub struct ThreadContextEntry {
pub id: ThreadId,
pub summary: SharedString,
pub highlight_positions: Option<Vec<usize>>,
}
pub struct ThreadContextPickerDelegate {
@@ -173,8 +174,18 @@ impl PickerDelegate for ThreadContextPickerDelegate {
) -> Option<Self::ListItem> {
let thread = &self.matches[ix];
let highlights = thread
.highlight_positions
.as_ref()
.map(|vec| vec.as_slice());
Some(ListItem::new(ix).inset(true).toggle_state(selected).child(
render_thread_context_entry(thread, self.context_store.clone(), cx),
render_thread_context_entry_with_highlights(
thread,
self.context_store.clone(),
highlights.as_deref(),
cx,
),
))
}
}
@@ -182,12 +193,31 @@ impl PickerDelegate for ThreadContextPickerDelegate {
pub fn render_thread_context_entry(
thread: &ThreadContextEntry,
context_store: WeakEntity<ContextStore>,
cx: &mut App,
cx: &App,
) -> Div {
render_thread_context_entry_with_highlights(thread, context_store, None, cx)
}
pub fn render_thread_context_entry_with_highlights(
thread: &ThreadContextEntry,
context_store: WeakEntity<ContextStore>,
highlight_positions: Option<&[usize]>,
cx: &App,
) -> Div {
let added = context_store.upgrade().map_or(false, |ctx_store| {
ctx_store.read(cx).includes_thread(&thread.id).is_some()
});
// Choose between regular label or highlighted label based on position data
let summary_element = match highlight_positions {
Some(positions) => HighlightedLabel::new(thread.summary.clone(), positions.to_vec())
.truncate()
.into_any_element(),
None => Label::new(thread.summary.clone())
.truncate()
.into_any_element(),
};
h_flex()
.gap_1p5()
.w_full()
@@ -201,7 +231,7 @@ pub fn render_thread_context_entry(
.size(IconSize::XSmall)
.color(Color::Muted),
)
.child(Label::new(thread.summary.clone()).truncate()),
.child(summary_element),
)
.when(added, |el| {
el.child(
@@ -222,40 +252,60 @@ pub(crate) fn search_threads(
thread_store: Entity<ThreadStore>,
cx: &mut App,
) -> Task<Vec<ThreadContextEntry>> {
let threads = thread_store.update(cx, |this, _cx| {
this.threads()
.into_iter()
.map(|thread| ThreadContextEntry {
id: thread.id,
summary: thread.summary,
})
.collect::<Vec<_>>()
});
// Get threads from the thread store
let threads = thread_store
.read(cx)
.threads()
.into_iter()
.map(|thread| ThreadContextEntry {
id: thread.id,
summary: thread.summary,
highlight_positions: None, // Initialize with no highlights
})
.collect::<Vec<_>>();
// Return early for empty queries or if there are no threads
if threads.is_empty() || query.is_empty() {
return Task::ready(threads);
}
// Create candidates list for fuzzy matching
let candidates: Vec<_> = threads
.iter()
.enumerate()
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
.collect();
let executor = cx.background_executor().clone();
cx.background_spawn(async move {
if query.is_empty() {
threads
} else {
let candidates = threads
.iter()
.enumerate()
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
.collect::<Vec<_>>();
let matches = fuzzy::match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
executor,
)
.await;
let threads_clone = threads.clone();
matches
.into_iter()
.map(|mat| threads[mat.candidate_id].clone())
.collect()
}
// Use background executor for the matching
cx.background_executor().spawn(async move {
// Perform fuzzy matching in background
let matches = fuzzy::match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
executor,
)
.await;
// Create result entries with highlight positions included
let result = matches
.into_iter()
.filter_map(|mat| {
let thread = threads_clone.get(mat.candidate_id)?;
// Create a new entry with the highlight positions
Some(ThreadContextEntry {
id: thread.id.clone(),
summary: thread.summary.clone(),
highlight_positions: Some(mat.positions),
})
})
.collect::<Vec<ThreadContextEntry>>();
result
})
}

View File

@@ -286,8 +286,7 @@ impl Thread {
tool_use,
action_log: cx.new(|_| ActionLog::new()),
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
// TODO: persist token usage?
cumulative_token_usage: TokenUsage::default(),
cumulative_token_usage: serialized.cumulative_token_usage,
feedback: None,
}
}
@@ -648,6 +647,7 @@ impl Thread {
})
.collect(),
initial_project_snapshot,
cumulative_token_usage: this.cumulative_token_usage.clone(),
})
})
}

View File

@@ -16,7 +16,7 @@ use gpui::{
};
use heed::types::SerdeBincode;
use heed::Database;
use language_model::{LanguageModelToolUseId, Role};
use language_model::{LanguageModelToolUseId, Role, TokenUsage};
use project::Project;
use prompt_store::PromptBuilder;
use serde::{Deserialize, Serialize};
@@ -308,6 +308,8 @@ pub struct SerializedThread {
pub messages: Vec<SerializedMessage>,
#[serde(default)]
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
#[serde(default)]
pub cumulative_token_usage: TokenUsage,
}
impl SerializedThread {
@@ -390,6 +392,7 @@ impl LegacySerializedThread {
updated_at: self.updated_at,
messages: self.messages.into_iter().map(|msg| msg.upgrade()).collect(),
initial_project_snapshot: self.initial_project_snapshot,
cumulative_token_usage: TokenUsage::default(),
}
}
}

View File

@@ -1,6 +1,3 @@
// todo(windows): Actually run the tests
#![cfg(not(target_os = "windows"))]
use std::sync::Arc;
use call::Room;

View File

@@ -36,6 +36,7 @@ use std::{
},
};
use text::Point;
use util::{path, uri};
use workspace::{CloseIntent, Workspace};
#[gpui::test(iterations = 10)]
@@ -190,9 +191,9 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor(
client_a
.fs()
.insert_tree("/dir", json!({ "a.txt": "Some text\n" }))
.insert_tree(path!("/dir"), json!({ "a.txt": "Some text\n" }))
.await;
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -306,14 +307,14 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"main.rs": "fn main() { a }",
"other.rs": "",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -351,7 +352,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
.set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -468,7 +469,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
.set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -562,14 +563,14 @@ async fn test_collaborating_with_code_actions(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"main.rs": "mod other;\nfn main() { let foo = other::foo(); }",
"other.rs": "pub fn foo() -> usize { 4 }",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -592,7 +593,7 @@ async fn test_collaborating_with_code_actions(
.set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(params.range.start, lsp::Position::new(0, 0));
assert_eq!(params.range.end, lsp::Position::new(0, 0));
@@ -614,7 +615,7 @@ async fn test_collaborating_with_code_actions(
.set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(params.range.start, lsp::Position::new(1, 31));
assert_eq!(params.range.end, lsp::Position::new(1, 31));
@@ -626,7 +627,7 @@ async fn test_collaborating_with_code_actions(
changes: Some(
[
(
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(
lsp::Position::new(1, 22),
@@ -636,7 +637,7 @@ async fn test_collaborating_with_code_actions(
)],
),
(
lsp::Url::from_file_path("/a/other.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(
lsp::Position::new(0, 0),
@@ -697,7 +698,7 @@ async fn test_collaborating_with_code_actions(
changes: Some(
[
(
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(
lsp::Position::new(1, 22),
@@ -707,7 +708,7 @@ async fn test_collaborating_with_code_actions(
)],
),
(
lsp::Url::from_file_path("/a/other.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(
lsp::Position::new(0, 0),
@@ -780,14 +781,14 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
client_a
.fs()
.insert_tree(
"/dir",
path!("/dir"),
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;"
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -813,7 +814,10 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
fake_language_server
.set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
assert_eq!(
params.text_document.uri.as_str(),
uri!("file:///dir/one.rs")
);
assert_eq!(params.position, lsp::Position::new(0, 7));
Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
lsp::Position::new(0, 6),
@@ -856,7 +860,10 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
fake_language_server
.set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
assert_eq!(
params.text_document.uri.as_str(),
uri!("file:///dir/one.rs")
);
assert_eq!(params.position, lsp::Position::new(0, 8));
Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
lsp::Position::new(0, 6),
@@ -894,7 +901,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
.set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri.as_str(),
"file:///dir/one.rs"
uri!("file:///dir/one.rs")
);
assert_eq!(
params.text_document_position.position,
@@ -905,14 +912,14 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
changes: Some(
[
(
lsp::Url::from_file_path("/dir/one.rs").unwrap(),
lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
vec![lsp::TextEdit::new(
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
"THREE".to_string(),
)],
),
(
lsp::Url::from_file_path("/dir/two.rs").unwrap(),
lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
vec![
lsp::TextEdit::new(
lsp::Range::new(
@@ -999,17 +1006,17 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
client_a
.fs()
.insert_tree(
"/dir",
path!("/dir"),
json!({
"main.rs": "const ONE: usize = 1;",
}),
)
.await;
let (project_a, _) = client_a.build_local_project("/dir", cx_a).await;
let (project_a, _) = client_a.build_local_project(path!("/dir"), cx_a).await;
let _buffer_a = project_a
.update(cx_a, |p, cx| {
p.open_local_buffer_with_lsp("/dir/main.rs", cx)
p.open_local_buffer_with_lsp(path!("/dir/main.rs"), cx)
})
.await
.unwrap();
@@ -1106,7 +1113,7 @@ async fn test_share_project(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
".gitignore": "ignored-dir",
"a.txt": "a-contents",
@@ -1120,7 +1127,7 @@ async fn test_share_project(
.await;
// Invite client B to collaborate on a project
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
active_call_a
.update(cx_a, |call, cx| {
call.invite(client_b.user_id().unwrap(), Some(project_a.clone()), cx)
@@ -1292,14 +1299,14 @@ async fn test_on_input_format_from_host_to_guest(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"main.rs": "fn main() { a }",
"other.rs": "// Test file",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -1325,7 +1332,7 @@ async fn test_on_input_format_from_host_to_guest(
|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -1414,14 +1421,14 @@ async fn test_on_input_format_from_guest_to_host(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"main.rs": "fn main() { a }",
"other.rs": "// Test file",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -1455,7 +1462,7 @@ async fn test_on_input_format_from_guest_to_host(
.set_request_handler::<lsp::request::OnTypeFormatting, _, _>(|params, _| async move {
assert_eq!(
params.text_document_position.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.text_document_position.position,
@@ -1575,14 +1582,14 @@ async fn test_mutual_editor_inlay_hint_cache_update(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"main.rs": "fn main() { a } // and some long comment to ensure inlay hints are not trimmed out",
"other.rs": "// Test file",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await
@@ -1605,7 +1612,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
// The host opens a rust file.
let _buffer_a = project_a
.update(cx_a, |project, cx| {
project.open_local_buffer("/a/main.rs", cx)
project.open_local_buffer(path!("/a/main.rs"), cx)
})
.await
.unwrap();
@@ -1629,7 +1636,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
let edits_made = task_edits_made.load(atomic::Ordering::Acquire);
Ok(Some(vec![lsp::InlayHint {
@@ -1809,14 +1816,14 @@ async fn test_inlay_hint_refresh_is_forwarded(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"main.rs": "fn main() { a } // and some long comment to ensure inlay hints are not trimmed out",
"other.rs": "// Test file",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await
@@ -1864,7 +1871,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
let other_hints = task_other_hints.load(atomic::Ordering::Acquire);
let character = if other_hints { 0 } else { 2 };
@@ -1967,7 +1974,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
client_a
.fs()
.insert_tree(
"/my-repo",
path!("/my-repo"),
json!({
".git": {},
"file.txt": "line1\nline2\nline3\nline\n",
@@ -1993,11 +2000,12 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
.collect(),
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
};
client_a
.fs()
.set_blame_for_repo(Path::new("/my-repo/.git"), vec![("file.txt".into(), blame)]);
client_a.fs().set_blame_for_repo(
Path::new(path!("/my-repo/.git")),
vec![("file.txt".into(), blame)],
);
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/my-repo"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -2177,7 +2185,7 @@ async fn test_collaborating_with_editorconfig(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"src": {
"main.rs": "mod other;\nfn main() { let foo = other::foo(); }",
@@ -2190,7 +2198,7 @@ async fn test_collaborating_with_editorconfig(
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -2309,7 +2317,7 @@ fn main() { let foo = other::foo(); }"};
client_a
.fs()
.atomic_write(
PathBuf::from("/a/src/.editorconfig"),
PathBuf::from(path!("/a/src/.editorconfig")),
"[*]\ntab_width = 3\n".to_owned(),
)
.await

View File

@@ -16,6 +16,7 @@ use project::WorktreeSettings;
use rpc::proto::PeerId;
use serde_json::json;
use settings::SettingsStore;
use util::path;
use workspace::{item::ItemHandle as _, SplitDirection, Workspace};
use super::TestClient;
@@ -50,7 +51,7 @@ async fn test_basic_following(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"1.txt": "one\none\none",
"2.txt": "two\ntwo\ntwo",
@@ -58,7 +59,7 @@ async fn test_basic_following(
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await
@@ -436,7 +437,9 @@ async fn test_basic_following(
);
// TODO: Re-enable this test once we can replace our swift Livekit SDK with the rust SDK
#[cfg(not(target_os = "macos"))]
// todo(windows)
// Fix this on Windows
#[cfg(all(not(target_os = "macos"), not(target_os = "windows")))]
{
use crate::rpc::RECONNECT_TIMEOUT;
use gpui::TestScreenCaptureSource;
@@ -1220,7 +1223,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"1.txt": "one",
"2.txt": "two",
@@ -1228,7 +1231,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await
@@ -1435,7 +1438,7 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"w.rs": "",
"x.rs": "",
@@ -1446,7 +1449,7 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut
client_b
.fs()
.insert_tree(
"/b",
path!("/b"),
json!({
"y.rs": "",
"z.rs": "",
@@ -1460,8 +1463,8 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut
let active_call_a = cx_a.read(ActiveCall::global);
let active_call_b = cx_b.read(ActiveCall::global);
let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
let (project_a, worktree_id_a) = client_a.build_local_project(path!("/a"), cx_a).await;
let (project_b, worktree_id_b) = client_b.build_local_project(path!("/b"), cx_b).await;
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
@@ -1718,7 +1721,7 @@ async fn test_following_into_excluded_file(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
".git": {
"COMMIT_EDITMSG": "write your commit message here",
@@ -1729,7 +1732,7 @@ async fn test_following_into_excluded_file(
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await

View File

@@ -9,6 +9,7 @@ use git_ui::project_diff::ProjectDiff;
use gpui::{TestAppContext, VisualTestContext};
use project::ProjectPath;
use serde_json::json;
use util::path;
use workspace::Workspace;
//
@@ -29,7 +30,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
".git": {},
"changed.txt": "after\n",
@@ -41,7 +42,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
.await;
client_a.fs().set_git_content_for_repo(
Path::new("/a/.git"),
Path::new(path!("/a/.git")),
&[
("changed.txt".into(), "before\n".to_string(), None),
("unchanged.txt".into(), "unchanged\n".to_string(), None),
@@ -49,7 +50,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
("secret.pem".into(), "shh\n".to_string(), None),
],
);
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let active_call_a = cx_a.read(ActiveCall::global);
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
@@ -93,7 +94,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
".git": {},
"changed.txt": "before\n",

View File

@@ -50,6 +50,7 @@ use std::{
time::Duration,
};
use unindent::Unindent as _;
use util::{path, separator, uri};
use workspace::Pane;
#[ctor::ctor]
@@ -1459,7 +1460,7 @@ async fn test_project_reconnect(
client_a
.fs()
.insert_tree(
"/root-1",
path!("/root-1"),
json!({
"dir1": {
"a.txt": "a",
@@ -1487,7 +1488,7 @@ async fn test_project_reconnect(
client_a
.fs()
.insert_tree(
"/root-2",
path!("/root-2"),
json!({
"2.txt": "2",
}),
@@ -1496,7 +1497,7 @@ async fn test_project_reconnect(
client_a
.fs()
.insert_tree(
"/root-3",
path!("/root-3"),
json!({
"3.txt": "3",
}),
@@ -1504,9 +1505,11 @@ async fn test_project_reconnect(
.await;
let active_call_a = cx_a.read(ActiveCall::global);
let (project_a1, _) = client_a.build_local_project("/root-1/dir1", cx_a).await;
let (project_a2, _) = client_a.build_local_project("/root-2", cx_a).await;
let (project_a3, _) = client_a.build_local_project("/root-3", cx_a).await;
let (project_a1, _) = client_a
.build_local_project(path!("/root-1/dir1"), cx_a)
.await;
let (project_a2, _) = client_a.build_local_project(path!("/root-2"), cx_a).await;
let (project_a3, _) = client_a.build_local_project(path!("/root-3"), cx_a).await;
let worktree_a1 =
project_a1.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
let project1_id = active_call_a
@@ -1533,7 +1536,7 @@ async fn test_project_reconnect(
});
let (worktree_a2, _) = project_a1
.update(cx_a, |p, cx| {
p.find_or_create_worktree("/root-1/dir2", true, cx)
p.find_or_create_worktree(path!("/root-1/dir2"), true, cx)
})
.await
.unwrap();
@@ -1579,7 +1582,7 @@ async fn test_project_reconnect(
client_a
.fs()
.insert_tree(
"/root-1/dir1/subdir2",
path!("/root-1/dir1/subdir2"),
json!({
"f.txt": "f-contents",
"g.txt": "g-contents",
@@ -1591,7 +1594,7 @@ async fn test_project_reconnect(
client_a
.fs()
.remove_dir(
"/root-1/dir1/subdir1".as_ref(),
path!("/root-1/dir1/subdir1").as_ref(),
RemoveOptions {
recursive: true,
..Default::default()
@@ -1606,7 +1609,7 @@ async fn test_project_reconnect(
});
let (worktree_a3, _) = project_a1
.update(cx_a, |p, cx| {
p.find_or_create_worktree("/root-1/dir3", true, cx)
p.find_or_create_worktree(path!("/root-1/dir3"), true, cx)
})
.await
.unwrap();
@@ -1647,13 +1650,13 @@ async fn test_project_reconnect(
.map(|p| p.to_str().unwrap())
.collect::<Vec<_>>(),
vec![
"a.txt",
"b.txt",
"subdir2",
"subdir2/f.txt",
"subdir2/g.txt",
"subdir2/h.txt",
"subdir2/i.txt"
separator!("a.txt"),
separator!("b.txt"),
separator!("subdir2"),
separator!("subdir2/f.txt"),
separator!("subdir2/g.txt"),
separator!("subdir2/h.txt"),
separator!("subdir2/i.txt")
]
);
assert!(worktree_a3.read(cx).has_update_observer());
@@ -1680,13 +1683,13 @@ async fn test_project_reconnect(
.map(|p| p.to_str().unwrap())
.collect::<Vec<_>>(),
vec![
"a.txt",
"b.txt",
"subdir2",
"subdir2/f.txt",
"subdir2/g.txt",
"subdir2/h.txt",
"subdir2/i.txt"
separator!("a.txt"),
separator!("b.txt"),
separator!("subdir2"),
separator!("subdir2/f.txt"),
separator!("subdir2/g.txt"),
separator!("subdir2/h.txt"),
separator!("subdir2/i.txt")
]
);
assert!(project.worktree_for_id(worktree2_id, cx).is_none());
@@ -1719,18 +1722,21 @@ async fn test_project_reconnect(
// While client B is disconnected, add and remove files from client A's project
client_a
.fs()
.insert_file("/root-1/dir1/subdir2/j.txt", "j-contents".into())
.insert_file(path!("/root-1/dir1/subdir2/j.txt"), "j-contents".into())
.await;
client_a
.fs()
.remove_file("/root-1/dir1/subdir2/i.txt".as_ref(), Default::default())
.remove_file(
path!("/root-1/dir1/subdir2/i.txt").as_ref(),
Default::default(),
)
.await
.unwrap();
// While client B is disconnected, add and remove worktrees from client A's project.
let (worktree_a4, _) = project_a1
.update(cx_a, |p, cx| {
p.find_or_create_worktree("/root-1/dir4", true, cx)
p.find_or_create_worktree(path!("/root-1/dir4"), true, cx)
})
.await
.unwrap();
@@ -1773,13 +1779,13 @@ async fn test_project_reconnect(
.map(|p| p.to_str().unwrap())
.collect::<Vec<_>>(),
vec![
"a.txt",
"b.txt",
"subdir2",
"subdir2/f.txt",
"subdir2/g.txt",
"subdir2/h.txt",
"subdir2/j.txt"
separator!("a.txt"),
separator!("b.txt"),
separator!("subdir2"),
separator!("subdir2/f.txt"),
separator!("subdir2/g.txt"),
separator!("subdir2/h.txt"),
separator!("subdir2/j.txt")
]
);
assert!(project.worktree_for_id(worktree2_id, cx).is_none());
@@ -2316,14 +2322,14 @@ async fn test_propagate_saves_and_fs_changes(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"file1.rs": "",
"file2": ""
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let worktree_a = project_a.read_with(cx_a, |p, cx| p.worktrees(cx).next().unwrap());
let project_id = active_call_a
@@ -2409,18 +2415,25 @@ async fn test_propagate_saves_and_fs_changes(
client_a
.fs()
.rename(
"/a/file1.rs".as_ref(),
"/a/file1.js".as_ref(),
path!("/a/file1.rs").as_ref(),
path!("/a/file1.js").as_ref(),
Default::default(),
)
.await
.unwrap();
client_a
.fs()
.rename("/a/file2".as_ref(), "/a/file3".as_ref(), Default::default())
.rename(
path!("/a/file2").as_ref(),
path!("/a/file3").as_ref(),
Default::default(),
)
.await
.unwrap();
client_a.fs().insert_file("/a/file4", "4".into()).await;
client_a
.fs()
.insert_file(path!("/a/file4"), "4".into())
.await;
executor.run_until_parked();
worktree_a.read_with(cx_a, |tree, _| {
@@ -2959,7 +2972,7 @@ async fn test_git_status_sync(
client_a
.fs()
.insert_tree(
"/dir",
path!("/dir"),
json!({
".git": {},
"a.txt": "a",
@@ -2972,11 +2985,11 @@ async fn test_git_status_sync(
// Initially, a.txt is uncommitted, but present in the index,
// and b.txt is unmerged.
client_a.fs().set_head_for_repo(
"/dir/.git".as_ref(),
path!("/dir/.git").as_ref(),
&[("b.txt".into(), "B".into()), ("c.txt".into(), "c".into())],
);
client_a.fs().set_index_for_repo(
"/dir/.git".as_ref(),
path!("/dir/.git").as_ref(),
&[
("a.txt".into(), "".into()),
("b.txt".into(), "B".into()),
@@ -2984,7 +2997,7 @@ async fn test_git_status_sync(
],
);
client_a.fs().set_unmerged_paths_for_repo(
"/dir/.git".as_ref(),
path!("/dir/.git").as_ref(),
&[(
"b.txt".into(),
UnmergedStatus {
@@ -3003,7 +3016,7 @@ async fn test_git_status_sync(
second_head: UnmergedStatusCode::Deleted,
});
let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let (project_local, _worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| {
call.share_project(project_local.clone(), cx)
@@ -3068,15 +3081,15 @@ async fn test_git_status_sync(
// Delete b.txt from the index, mark conflict as resolved,
// and modify c.txt in the working copy.
client_a.fs().set_index_for_repo(
"/dir/.git".as_ref(),
path!("/dir/.git").as_ref(),
&[("a.txt".into(), "a".into()), ("c.txt".into(), "c".into())],
);
client_a
.fs()
.set_unmerged_paths_for_repo("/dir/.git".as_ref(), &[]);
.set_unmerged_paths_for_repo(path!("/dir/.git").as_ref(), &[]);
client_a
.fs()
.atomic_write("/dir/c.txt".into(), "CC".into())
.atomic_write(path!("/dir/c.txt").into(), "CC".into())
.await
.unwrap();
@@ -3109,7 +3122,7 @@ async fn test_git_status_sync(
// Now remove the original git repository and check that collaborators are notified.
client_a
.fs()
.remove_dir("/dir/.git".as_ref(), RemoveOptions::default())
.remove_dir(path!("/dir/.git").as_ref(), RemoveOptions::default())
.await
.unwrap();
@@ -3145,14 +3158,14 @@ async fn test_fs_operations(
client_a
.fs()
.insert_tree(
"/dir",
path!("/dir"),
json!({
"a.txt": "a-contents",
"b.txt": "b-contents",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -3283,13 +3296,13 @@ async fn test_fs_operations(
.map(|p| p.to_string_lossy())
.collect::<Vec<_>>(),
[
"DIR",
"DIR/SUBDIR",
"DIR/SUBDIR/f.txt",
"DIR/e.txt",
"a.txt",
"b.txt",
"d.txt"
separator!("DIR"),
separator!("DIR/SUBDIR"),
separator!("DIR/SUBDIR/f.txt"),
separator!("DIR/e.txt"),
separator!("a.txt"),
separator!("b.txt"),
separator!("d.txt")
]
);
});
@@ -3301,13 +3314,13 @@ async fn test_fs_operations(
.map(|p| p.to_string_lossy())
.collect::<Vec<_>>(),
[
"DIR",
"DIR/SUBDIR",
"DIR/SUBDIR/f.txt",
"DIR/e.txt",
"a.txt",
"b.txt",
"d.txt"
separator!("DIR"),
separator!("DIR/SUBDIR"),
separator!("DIR/SUBDIR/f.txt"),
separator!("DIR/e.txt"),
separator!("a.txt"),
separator!("b.txt"),
separator!("d.txt")
]
);
});
@@ -3327,14 +3340,14 @@ async fn test_fs_operations(
.map(|p| p.to_string_lossy())
.collect::<Vec<_>>(),
[
"DIR",
"DIR/SUBDIR",
"DIR/SUBDIR/f.txt",
"DIR/e.txt",
"a.txt",
"b.txt",
"d.txt",
"f.txt"
separator!("DIR"),
separator!("DIR/SUBDIR"),
separator!("DIR/SUBDIR/f.txt"),
separator!("DIR/e.txt"),
separator!("a.txt"),
separator!("b.txt"),
separator!("d.txt"),
separator!("f.txt")
]
);
});
@@ -3346,14 +3359,14 @@ async fn test_fs_operations(
.map(|p| p.to_string_lossy())
.collect::<Vec<_>>(),
[
"DIR",
"DIR/SUBDIR",
"DIR/SUBDIR/f.txt",
"DIR/e.txt",
"a.txt",
"b.txt",
"d.txt",
"f.txt"
separator!("DIR"),
separator!("DIR/SUBDIR"),
separator!("DIR/SUBDIR/f.txt"),
separator!("DIR/e.txt"),
separator!("a.txt"),
separator!("b.txt"),
separator!("d.txt"),
separator!("f.txt")
]
);
});
@@ -3570,13 +3583,13 @@ async fn test_buffer_conflict_after_save(
client_a
.fs()
.insert_tree(
"/dir",
path!("/dir"),
json!({
"a.txt": "a-contents",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -3634,13 +3647,13 @@ async fn test_buffer_reloading(
client_a
.fs()
.insert_tree(
"/dir",
path!("/dir"),
json!({
"a.txt": "a\nb\nc",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -3662,7 +3675,11 @@ async fn test_buffer_reloading(
let new_contents = Rope::from("d\ne\nf");
client_a
.fs()
.save("/dir/a.txt".as_ref(), &new_contents, LineEnding::Windows)
.save(
path!("/dir/a.txt").as_ref(),
&new_contents,
LineEnding::Windows,
)
.await
.unwrap();
@@ -3692,9 +3709,9 @@ async fn test_editing_while_guest_opens_buffer(
client_a
.fs()
.insert_tree("/dir", json!({ "a.txt": "a-contents" }))
.insert_tree(path!("/dir"), json!({ "a.txt": "a-contents" }))
.await;
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -4007,19 +4024,19 @@ async fn test_collaborating_with_diagnostics(
client_a
.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"a.rs": "let one = two",
"other.rs": "",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
// Cause the language server to start.
let _buffer = project_a
.update(cx_a, |project, cx| {
project.open_local_buffer_with_lsp("/a/other.rs", cx)
project.open_local_buffer_with_lsp(path!("/a/other.rs"), cx)
})
.await
.unwrap();
@@ -4031,7 +4048,7 @@ async fn test_collaborating_with_diagnostics(
.await;
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -4051,7 +4068,7 @@ async fn test_collaborating_with_diagnostics(
.unwrap();
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
severity: Some(lsp::DiagnosticSeverity::ERROR),
@@ -4125,7 +4142,7 @@ async fn test_collaborating_with_diagnostics(
// Simulate a language server reporting more errors for a file.
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: vec![
lsp::Diagnostic {
@@ -4219,7 +4236,7 @@ async fn test_collaborating_with_diagnostics(
// Simulate a language server reporting no errors for a file.
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: vec![],
},
@@ -4275,7 +4292,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
client_a
.fs()
.insert_tree(
"/test",
path!("/test"),
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = 2;",
@@ -4286,7 +4303,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/test", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/test"), cx_a).await;
// Share a project as client A
let active_call_a = cx_a.read(ActiveCall::global);
@@ -4325,7 +4342,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
for file_name in file_names {
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
&lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path(Path::new("/test").join(file_name)).unwrap(),
uri: lsp::Url::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -4392,9 +4409,9 @@ async fn test_reloading_buffer_manually(
client_a
.fs()
.insert_tree("/a", json!({ "a.rs": "let one = 1;" }))
.insert_tree(path!("/a"), json!({ "a.rs": "let one = 1;" }))
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
let buffer_a = project_a
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
.await
@@ -4422,7 +4439,7 @@ async fn test_reloading_buffer_manually(
client_a
.fs()
.save(
"/a/a.rs".as_ref(),
path!("/a/a.rs").as_ref(),
&Rope::from("let seven = 7;"),
LineEnding::Unix,
)
@@ -4544,39 +4561,45 @@ async fn test_formatting_buffer(
"let honey = \"two\"\n"
);
// Ensure buffer can be formatted using an external command. Notice how the
// host's configuration is honored as opposed to using the guest's settings.
cx_a.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
vec![Formatter::External {
command: "awk".into(),
arguments: Some(vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into()),
}]
.into(),
)));
// There is no `awk` command on Windows.
#[cfg(not(target_os = "windows"))]
{
// Ensure buffer can be formatted using an external command. Notice how the
// host's configuration is honored as opposed to using the guest's settings.
cx_a.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
vec![Formatter::External {
command: "awk".into(),
arguments: Some(
vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
),
}]
.into(),
)));
});
});
});
});
executor.allow_parking();
project_b
.update(cx_b, |project, cx| {
project.format(
HashSet::from_iter([buffer_b.clone()]),
LspFormatTarget::Buffers,
true,
FormatTrigger::Save,
cx,
)
})
.await
.unwrap();
assert_eq!(
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
format!("let honey = \"{}/a.rs\"\n", directory.to_str().unwrap())
);
executor.allow_parking();
project_b
.update(cx_b, |project, cx| {
project.format(
HashSet::from_iter([buffer_b.clone()]),
LspFormatTarget::Buffers,
true,
FormatTrigger::Save,
cx,
)
})
.await
.unwrap();
assert_eq!(
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
format!("let honey = \"{}/a.rs\"\n", directory.to_str().unwrap())
);
}
}
#[gpui::test(iterations = 10)]
@@ -4734,7 +4757,7 @@ async fn test_definition(
client_a
.fs()
.insert_tree(
"/root",
path!("/root"),
json!({
"dir-1": {
"a.rs": "const ONE: usize = b::TWO + b::THREE;",
@@ -4746,7 +4769,9 @@ async fn test_definition(
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/root/dir-1", cx_a).await;
let (project_a, worktree_id) = client_a
.build_local_project(path!("/root/dir-1"), cx_a)
.await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -4767,7 +4792,7 @@ async fn test_definition(
|_, _| async move {
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(),
lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
),
)))
@@ -4798,7 +4823,7 @@ async fn test_definition(
|_, _| async move {
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(),
lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)),
),
)))
@@ -4835,7 +4860,7 @@ async fn test_definition(
);
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
lsp::Url::from_file_path("/root/dir-2/c.rs").unwrap(),
lsp::Url::from_file_path(path!("/root/dir-2/c.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(0, 5), lsp::Position::new(0, 7)),
),
)))
@@ -4887,7 +4912,7 @@ async fn test_references(
client_a
.fs()
.insert_tree(
"/root",
path!("/root"),
json!({
"dir-1": {
"one.rs": "const ONE: usize = 1;",
@@ -4899,7 +4924,9 @@ async fn test_references(
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/root/dir-1", cx_a).await;
let (project_a, worktree_id) = client_a
.build_local_project(path!("/root/dir-1"), cx_a)
.await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -4922,7 +4949,7 @@ async fn test_references(
move |params, _| {
assert_eq!(
params.text_document_position.text_document.uri.as_str(),
"file:///root/dir-1/one.rs"
uri!("file:///root/dir-1/one.rs")
);
let rx = rx.clone();
async move {
@@ -4951,15 +4978,15 @@ async fn test_references(
lsp_response_tx
.unbounded_send(Ok(Some(vec![
lsp::Location {
uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(),
uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)),
},
lsp::Location {
uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(),
uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 35), lsp::Position::new(0, 38)),
},
lsp::Location {
uri: lsp::Url::from_file_path("/root/dir-2/three.rs").unwrap(),
uri: lsp::Url::from_file_path(path!("/root/dir-2/three.rs")).unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)),
},
])))
@@ -4984,7 +5011,7 @@ async fn test_references(
assert_eq!(references[1].buffer, references[0].buffer);
assert_eq!(
three_buffer.file().unwrap().full_path(cx),
Path::new("/root/dir-2/three.rs")
Path::new(path!("/root/dir-2/three.rs"))
);
assert_eq!(references[0].range.to_offset(two_buffer), 24..27);
@@ -5138,7 +5165,7 @@ async fn test_document_highlights(
client_a
.fs()
.insert_tree(
"/root-1",
path!("/root-1"),
json!({
"main.rs": "fn double(number: i32) -> i32 { number + number }",
}),
@@ -5150,7 +5177,7 @@ async fn test_document_highlights(
.register_fake_lsp("Rust", Default::default());
client_a.language_registry().add(rust_lang());
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/root-1"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -5175,7 +5202,7 @@ async fn test_document_highlights(
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
uri!("file:///root-1/main.rs")
);
assert_eq!(
params.text_document_position_params.position,
@@ -5238,7 +5265,7 @@ async fn test_lsp_hover(
client_a
.fs()
.insert_tree(
"/root-1",
path!("/root-1"),
json!({
"main.rs": "use std::collections::HashMap;",
}),
@@ -5272,7 +5299,7 @@ async fn test_lsp_hover(
),
];
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/root-1"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -5312,7 +5339,7 @@ async fn test_lsp_hover(
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
uri!("file:///root-1/main.rs")
);
let name = new_server_name.clone();
async move {
@@ -5338,7 +5365,7 @@ async fn test_lsp_hover(
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
uri!("file:///root-1/main.rs")
);
assert_eq!(
params.text_document_position_params.position,
@@ -5445,7 +5472,7 @@ async fn test_project_symbols(
client_a
.fs()
.insert_tree(
"/code",
path!("/code"),
json!({
"crate-1": {
"one.rs": "const ONE: usize = 1;",
@@ -5459,7 +5486,9 @@ async fn test_project_symbols(
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/code/crate-1", cx_a).await;
let (project_a, worktree_id) = client_a
.build_local_project(path!("/code/crate-1"), cx_a)
.await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -5482,7 +5511,7 @@ async fn test_project_symbols(
lsp::SymbolInformation {
name: "TWO".into(),
location: lsp::Location {
uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(),
uri: lsp::Url::from_file_path(path!("/code/crate-2/two.rs")).unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
},
kind: lsp::SymbolKind::CONSTANT,
@@ -5513,13 +5542,13 @@ async fn test_project_symbols(
buffer_b_2.read_with(cx_b, |buffer, cx| {
assert_eq!(
buffer.file().unwrap().full_path(cx),
Path::new("/code/crate-2/two.rs")
Path::new(path!("/code/crate-2/two.rs"))
);
});
// Attempt to craft a symbol and violate host's privacy by opening an arbitrary file.
let mut fake_symbol = symbols[0].clone();
fake_symbol.path.path = Path::new("/code/secrets").into();
fake_symbol.path.path = Path::new(path!("/code/secrets")).into();
let error = project_b
.update(cx_b, |project, cx| {
project.open_buffer_for_symbol(&fake_symbol, cx)
@@ -5552,14 +5581,14 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
client_a
.fs()
.insert_tree(
"/root",
path!("/root"),
json!({
"a.rs": "const ONE: usize = b::TWO;",
"b.rs": "const TWO: usize = 2",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/root", cx_a).await;
let (project_a, worktree_id) = client_a.build_local_project(path!("/root"), cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
@@ -5578,7 +5607,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
|_, _| async move {
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
lsp::Location::new(
lsp::Url::from_file_path("/root/b.rs").unwrap(),
lsp::Url::from_file_path(path!("/root/b.rs")).unwrap(),
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
),
)))

View File

@@ -27,7 +27,7 @@ use std::{
rc::Rc,
sync::Arc,
};
use util::ResultExt;
use util::{path, ResultExt};
#[gpui::test(
iterations = 100,
@@ -280,7 +280,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let mut paths = client.fs().paths(false);
paths.remove(0);
let new_root_path = if paths.is_empty() || rng.gen() {
Path::new("/").join(plan.next_root_dir_name())
Path::new(path!("/")).join(plan.next_root_dir_name())
} else {
paths.choose(rng).unwrap().clone()
};
@@ -547,7 +547,7 @@ impl RandomizedTest for ProjectCollaborationTest {
first_root_name
);
let root_path = Path::new("/").join(&first_root_name);
let root_path = Path::new(path!("/")).join(&first_root_name);
client.fs().create_dir(&root_path).await.unwrap();
client
.fs()

View File

@@ -26,6 +26,7 @@ use remote_server::{HeadlessAppState, HeadlessProject};
use serde_json::json;
use settings::SettingsStore;
use std::{path::Path, sync::Arc};
use util::{path, separator};
#[gpui::test(iterations = 10)]
async fn test_sharing_an_ssh_remote_project(
@@ -52,7 +53,7 @@ async fn test_sharing_an_ssh_remote_project(
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree(
"/code",
path!("/code"),
json!({
"project1": {
".zed": {
@@ -92,7 +93,7 @@ async fn test_sharing_an_ssh_remote_project(
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
let (project_a, worktree_id) = client_a
.build_ssh_project("/code/project1", client_ssh, cx_a)
.build_ssh_project(path!("/code/project1"), client_ssh, cx_a)
.await;
// While the SSH worktree is being scanned, user A shares the remote project.
@@ -178,7 +179,7 @@ async fn test_sharing_an_ssh_remote_project(
.unwrap();
assert_eq!(
remote_fs
.load("/code/project1/src/renamed.rs".as_ref())
.load(path!("/code/project1/src/renamed.rs").as_ref())
.await
.unwrap(),
"fn one() -> usize { 100 }"
@@ -193,7 +194,7 @@ async fn test_sharing_an_ssh_remote_project(
.path()
.to_string_lossy()
.to_string(),
"src/renamed.rs".to_string()
separator!("src/renamed.rs").to_string()
);
});
}
@@ -408,7 +409,10 @@ async fn test_ssh_collaboration_formatting_with_prettier(
let buffer_text = "let one = \"two\"";
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
remote_fs
.insert_tree("/project", serde_json::json!({ "a.ts": buffer_text }))
.insert_tree(
path!("/project"),
serde_json::json!({ "a.ts": buffer_text }),
)
.await;
let test_plugin = "test_plugin";
@@ -455,7 +459,7 @@ async fn test_ssh_collaboration_formatting_with_prettier(
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
let (project_a, worktree_id) = client_a
.build_ssh_project("/project", client_ssh, cx_a)
.build_ssh_project(path!("/project"), client_ssh, cx_a)
.await;
// While the SSH worktree is being scanned, user A shares the remote project.

View File

@@ -43,6 +43,7 @@ use std::{
Arc,
},
};
use util::path;
use workspace::{Workspace, WorkspaceStore};
#[cfg(not(target_os = "macos"))]
@@ -741,7 +742,7 @@ impl TestClient {
pub async fn build_test_project(&self, cx: &mut TestAppContext) -> Entity<Project> {
self.fs()
.insert_tree(
"/a",
path!("/a"),
json!({
"1.txt": "one\none\none",
"2.js": "function two() { return 2; }",
@@ -749,7 +750,7 @@ impl TestClient {
}),
)
.await;
self.build_local_project("/a", cx).await.0
self.build_local_project(path!("/a"), cx).await.0
}
pub async fn host_workspace(

View File

@@ -59,11 +59,6 @@ impl FeatureFlag for Assistant2FeatureFlag {
const NAME: &'static str = "assistant2";
}
pub struct PredictEditsFeatureFlag;
impl FeatureFlag for PredictEditsFeatureFlag {
const NAME: &'static str = "predict-edits";
}
pub struct PredictEditsRateCompletionsFeatureFlag;
impl FeatureFlag for PredictEditsRateCompletionsFeatureFlag {
const NAME: &'static str = "predict-edits-rate-completions";

View File

@@ -953,28 +953,38 @@ impl FileFinderDelegate {
let path = &path_match.path;
let path_string = path.to_string_lossy();
let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
let mut path_positions = path_match.positions.clone();
let positions = path_match.positions.clone();
let file_name = path.file_name().map_or_else(
|| path_match.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
// Calculate where the filename starts in the full path
let file_name_start = path_match.path_prefix.len() + path_string.len() - file_name.len();
let file_name_positions = path_positions
.iter()
.filter_map(|pos| {
if pos >= &file_name_start {
Some(pos - file_name_start)
} else {
None
}
})
.collect();
// Create a copy of the full path without the filename (this is the parent directory)
let parent_path = full_path[..full_path.len() - file_name.len()].to_string();
// Process each highlight position
let mut file_name_positions = Vec::new();
let mut parent_path_positions = Vec::new();
for &pos in &positions {
// For the filename part
if pos >= file_name_start && pos < full_path.len() {
// This position is in the filename part
file_name_positions.push(pos - file_name_start);
}
// For the parent path part
if pos < parent_path.len() {
// This position is in the parent path part
parent_path_positions.push(pos);
}
}
let full_path = full_path.trim_end_matches(&file_name).to_string();
path_positions.retain(|idx| *idx < full_path.len());
(file_name, file_name_positions, full_path, path_positions)
(file_name, file_name_positions, parent_path, parent_path_positions)
}
fn lookup_absolute_path(
@@ -1339,7 +1349,119 @@ impl PickerDelegate for FileFinderDelegate {
.size(IconSize::Small.rems())
.into_any_element(),
};
let (file_name_label, full_path_label) = self.labels_for_match(path_match, window, cx, ix);
// Get the path information
let path_info = match &path_match {
Match::History {
path: entry_path,
panel_match,
} => {
let worktree_id = entry_path.project.worktree_id;
let project_relative_path = &entry_path.project.path;
let has_worktree = self
.project
.read(cx)
.worktree_for_id(worktree_id, cx)
.is_some();
// Use window to avoid unused variable warning
let _ = window;
if let Some(absolute_path) =
entry_path.absolute.as_ref().filter(|_| !has_worktree)
{
(
absolute_path
.file_name()
.map_or_else(
|| project_relative_path.to_string_lossy(),
|file_name| file_name.to_string_lossy(),
)
.to_string(),
absolute_path.to_string_lossy().to_string(),
Vec::new(),
)
} else {
let mut path = Arc::clone(project_relative_path);
if project_relative_path.as_ref() == Path::new("") {
if let Some(absolute_path) = &entry_path.absolute {
path = Arc::from(absolute_path.as_path());
}
}
let mut path_match = PathMatch {
score: ix as f64,
positions: Vec::new(),
worktree_id: worktree_id.to_usize(),
path,
is_dir: false, // File finder doesn't support directories
path_prefix: "".into(),
distance_to_relative_ancestor: usize::MAX,
};
if let Some(found_path_match) = &panel_match {
path_match
.positions
.extend(found_path_match.0.positions.iter())
}
let path_string = path_match.path.to_string_lossy();
let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
let positions = path_match.positions.clone();
let file_name = path_match.path.file_name().map_or_else(
|| path_match.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
(file_name, full_path, positions)
}
}
Match::Search(path_match) => {
let path_string = path_match.0.path.to_string_lossy();
let full_path = [path_match.0.path_prefix.as_ref(), path_string.as_ref()].join("");
let positions = path_match.0.positions.clone();
let file_name = path_match.0.path.file_name().map_or_else(
|| path_match.0.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
(file_name, full_path, positions)
}
};
let (file_name, full_path, positions) = path_info;
// Calculate where the filename starts in the full path
let file_name_start = full_path.len() - file_name.len();
// Create a parent path
let parent_path = full_path[..file_name_start].to_string();
// Create parent path label with highlighting
let parent_highlight_positions: Vec<usize> = positions
.iter()
.filter(|&&pos| pos < parent_path.len())
.copied()
.collect();
let parent_path_label = HighlightedLabel::new(parent_path, parent_highlight_positions)
.size(LabelSize::Small)
.color(Color::Muted);
// Create filename label with highlighting
let file_highlight_positions: Vec<usize> = positions
.iter()
.filter_map(|&pos| {
if pos >= file_name_start {
Some(pos - file_name_start)
} else {
None
}
})
.collect();
let file_name_label = HighlightedLabel::new(file_name.clone(), file_highlight_positions);
let file_icon = maybe!({
if !settings.file_icons {
@@ -1362,7 +1484,7 @@ impl PickerDelegate for FileFinderDelegate {
.gap_2()
.py_px()
.child(file_name_label)
.child(full_path_label),
.child(parent_path_label),
),
)
}

View File

@@ -27,9 +27,11 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use serde_json::value::RawValue;
use settings::{Settings, SettingsStore};
use smol::io::{AsyncReadExt, BufReader};
use smol::Timer;
use std::{
future,
sync::{Arc, LazyLock},
time::Duration,
};
use strum::IntoEnumIterator;
use ui::{prelude::*, TintColor};
@@ -456,6 +458,8 @@ pub struct CloudLanguageModel {
}
impl CloudLanguageModel {
const MAX_RETRIES: usize = 3;
async fn perform_llm_completion(
client: Arc<Client>,
llm_api_token: LlmApiToken,
@@ -464,9 +468,10 @@ impl CloudLanguageModel {
let http_client = &client.http_client();
let mut token = llm_api_token.acquire(&client).await?;
let mut did_retry = false;
let mut retries_remaining = Self::MAX_RETRIES;
let mut retry_delay = Duration::from_secs(1);
let response = loop {
loop {
let request_builder = http_client::Request::builder();
let request = request_builder
.method(Method::POST)
@@ -475,36 +480,53 @@ impl CloudLanguageModel {
.header("Authorization", format!("Bearer {token}"))
.body(serde_json::to_string(&body)?.into())?;
let mut response = http_client.send(request).await?;
if response.status().is_success() {
break response;
} else if !did_retry
&& response
.headers()
.get(EXPIRED_LLM_TOKEN_HEADER_NAME)
.is_some()
let status = response.status();
if status.is_success() {
return Ok(response);
} else if response
.headers()
.get(EXPIRED_LLM_TOKEN_HEADER_NAME)
.is_some()
{
did_retry = true;
retries_remaining -= 1;
token = llm_api_token.refresh(&client).await?;
} else if response.status() == StatusCode::FORBIDDEN
} else if status == StatusCode::FORBIDDEN
&& response
.headers()
.get(MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME)
.is_some()
{
break Err(anyhow!(MaxMonthlySpendReachedError))?;
} else if response.status() == StatusCode::PAYMENT_REQUIRED {
break Err(anyhow!(PaymentRequiredError))?;
return Err(anyhow!(MaxMonthlySpendReachedError));
} else if status.as_u16() >= 500 && status.as_u16() < 600 {
// If we encounter an error in the 500 range, retry after a delay.
// We've seen at least these in the wild from API providers:
// * 500 Internal Server Error
// * 502 Bad Gateway
// * 529 Service Overloaded
if retries_remaining == 0 {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
return Err(anyhow!(
"cloud language model completion failed after {} retries with status {status}: {body}",
Self::MAX_RETRIES
));
}
Timer::after(retry_delay).await;
retries_remaining -= 1;
retry_delay *= 2; // If it fails again, wait longer.
} else if status == StatusCode::PAYMENT_REQUIRED {
return Err(anyhow!(PaymentRequiredError));
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
break Err(anyhow!(
"cloud language model completion failed with status {}: {body}",
response.status()
))?;
return Err(anyhow!(
"cloud language model completion failed with status {status}: {body}",
));
}
};
Ok(response)
}
}
}

View File

@@ -15,10 +15,17 @@ impl HighlightedLabel {
/// Constructs a label with the given characters highlighted.
/// Characters are identified by UTF-8 byte position.
pub fn new(label: impl Into<SharedString>, highlight_indices: Vec<usize>) -> Self {
let label_str = label.into();
// Filter out indices that are out of bounds
let valid_indices = highlight_indices
.into_iter()
.filter(|&idx| idx < label_str.len())
.collect();
Self {
base: LabelLike::new(),
label: label.into(),
highlight_indices,
label: label_str,
highlight_indices: valid_indices,
}
}
}

View File

@@ -32,15 +32,9 @@ Clicking on it would take you to a modal with a button ("Enable Edit Prediction"
![Onboarding banner and modal](https://zed.dev/img/edit-prediction/docs.webp)
But, if you haven't come across the banner, start using Zed's Edit Prediction by adding this to your settings:
But, if you haven't come across the banner, Zed's Edit Prediction is the default edit prediction provider and you should see it right away in your status bar.
```json
"features": {
"edit_prediction_provider": "zed"
},
```
### Switching modes
### Switching modes {#switching-modes}
Zed's Edit Prediction comes with two different display modes:
@@ -224,7 +218,7 @@ If you would like to use the default keybinding, you can free it up by either mo
## Disabling Automatic Edit Prediction
To disable predictions that appear automatically as you type, set this within `settings.json`:
To not have predictions appear automatically as you type, set this within `settings.json`:
```json
{
@@ -246,6 +240,18 @@ You can also add this as a language-specific setting in your `settings.json` to
}
```
Alternatively, if you're using Zed's Edit Prediction, you can [use Subtle Mode](#switching-modes).
### Turning Off Completely
To completely turn off edit prediction across all providers, explicitly set the settings to `none`, like so:
```json
"features": {
"edit_prediction_provider": "none"
},
```
## Configuring GitHub Copilot {#github-copilot}
To use GitHub Copilot, set this within `settings.json`:

View File

@@ -1,6 +1,6 @@
[package]
name = "zed_html"
version = "0.1.6"
version = "0.2.0"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"

View File

@@ -1,7 +1,7 @@
id = "html"
name = "HTML"
description = "HTML support."
version = "0.1.6"
version = "0.2.0"
schema_version = 1
authors = ["Isaac Clayton <slightknack@gmail.com>"]
repository = "https://github.com/zed-industries/zed"