Compare commits

..

1 Commits

Author SHA1 Message Date
smit
6a9fa435f0 initial idea 2025-03-17 15:29:58 +05:30
94 changed files with 2885 additions and 3776 deletions

19
Cargo.lock generated
View File

@@ -692,7 +692,6 @@ name = "assistant_tool"
version = "0.1.0"
dependencies = [
"anyhow",
"clock",
"collections",
"derive_more",
"gpui",
@@ -717,7 +716,6 @@ dependencies = [
"gpui",
"language",
"language_model",
"pretty_assertions",
"project",
"rand 0.8.5",
"release_channel",
@@ -727,10 +725,8 @@ dependencies = [
"settings",
"theme",
"ui",
"unindent",
"util",
"workspace",
"worktree",
]
[[package]]
@@ -2743,7 +2739,6 @@ dependencies = [
"futures 0.3.31",
"gpui",
"http_client",
"http_client_tls",
"log",
"parking_lot",
"paths",
@@ -5509,10 +5504,8 @@ dependencies = [
"indoc",
"pretty_assertions",
"regex",
"schemars",
"serde",
"serde_json",
"settings",
"url",
"util",
]
@@ -6208,19 +6201,13 @@ dependencies = [
"futures 0.3.31",
"http 1.2.0",
"log",
"rustls 0.23.23",
"rustls-platform-verifier",
"serde",
"serde_json",
"url",
]
[[package]]
name = "http_client_tls"
version = "0.1.0"
dependencies = [
"rustls 0.23.23",
"rustls-platform-verifier",
]
[[package]]
name = "httparse"
version = "1.9.5"
@@ -11259,7 +11246,6 @@ dependencies = [
"smol",
"tempfile",
"thiserror 1.0.69",
"urlencoding",
"util",
]
@@ -11474,7 +11460,6 @@ dependencies = [
"futures 0.3.31",
"gpui",
"http_client",
"http_client_tls",
"log",
"regex",
"reqwest 0.12.8",

View File

@@ -65,7 +65,6 @@ members = [
"crates/gpui_tokio",
"crates/html_to_markdown",
"crates/http_client",
"crates/http_client_tls",
"crates/image_viewer",
"crates/indexed_docs",
"crates/inline_completion",
@@ -263,7 +262,6 @@ gpui_macros = { path = "crates/gpui_macros" }
gpui_tokio = { path = "crates/gpui_tokio" }
html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" }
http_client_tls = { path = "crates/http_client_tls" }
image_viewer = { path = "crates/image_viewer" }
indexed_docs = { path = "crates/indexed_docs" }
inline_completion = { path = "crates/inline_completion" }
@@ -565,7 +563,6 @@ unindent = "0.2.0"
unicode-segmentation = "1.10"
unicode-script = "0.5.7"
url = "2.2"
urlencoding = "2.1.2"
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
wasmparser = "0.221"
wasm-encoder = "0.221"

View File

@@ -850,24 +850,8 @@
//
// The minimum column number to show the inline blame information at
// "min_column": 0
},
// How git hunks are displayed visually in the editor.
// This setting can take two values:
//
// 1. Show unstaged hunks filled and staged hunks hollow:
// "hunk_style": "staged_hollow"
// 2. Show unstaged hunks hollow and staged hunks filled:
// "hunk_style": "unstaged_hollow"
"hunk_style": "staged_hollow"
}
},
// The list of custom Git hosting providers.
"git_hosting_providers": [
// {
// "provider": "github",
// "name": "BigCorp GitHub",
// "base_url": "https://code.big-corp.com"
// }
],
// Configuration for how direnv configuration should be loaded. May take 2 values:
// 1. Load direnv configuration using `direnv export json` directly.
// "load_direnv": "direct"

View File

@@ -3569,7 +3569,6 @@ impl CodeActionProvider for AssistantCodeActionProvider {
title: "Fix with Assistant".into(),
..Default::default()
})),
resolved: true,
}]))
} else {
Task::ready(Ok(Vec::new()))

View File

@@ -116,7 +116,7 @@ impl ActiveThread {
pub fn cancel_last_completion(&mut self, cx: &mut App) -> bool {
self.last_error.take();
self.thread
.update(cx, |thread, cx| thread.cancel_last_completion(cx))
.update(cx, |thread, _cx| thread.cancel_last_completion())
}
pub fn last_error(&self) -> Option<ThreadError> {
@@ -343,11 +343,8 @@ impl ActiveThread {
});
}
ThreadEvent::ToolFinished {
pending_tool_use,
canceled,
..
pending_tool_use, ..
} => {
let canceled = *canceled;
if let Some(tool_use) = pending_tool_use {
self.render_scripting_tool_use_markdown(
tool_use.id.clone(),
@@ -361,7 +358,7 @@ impl ActiveThread {
if self.thread.read(cx).all_tools_finished() {
let pending_refresh_buffers = self.thread.update(cx, |thread, cx| {
thread.action_log().update(cx, |action_log, _cx| {
action_log.take_stale_buffers_in_context()
action_log.take_pending_refresh_buffers()
})
});
@@ -399,10 +396,7 @@ impl ActiveThread {
this.update(&mut cx, |this, cx| {
this.thread.update(cx, |thread, cx| {
thread.attach_tool_results(updated_context, cx);
if !canceled {
thread.send_to_model(model, RequestKind::Chat, cx);
}
thread.send_tool_results_to_model(model, updated_context, cx);
});
})
})

View File

@@ -1729,7 +1729,6 @@ impl CodeActionProvider for AssistantCodeActionProvider {
title: "Fix with Assistant".into(),
..Default::default()
})),
resolved: true,
}]))
} else {
Task::ready(Ok(Vec::new()))

View File

@@ -158,7 +158,7 @@ impl MessageEditor {
return;
}
if self.thread.read(cx).is_generating() {
if self.thread.read(cx).is_streaming() {
return;
}
@@ -328,7 +328,7 @@ impl Render for MessageEditor {
let focus_handle = self.editor.focus_handle(cx);
let inline_context_picker = self.inline_context_picker.clone();
let bg_color = cx.theme().colors().editor_background;
let is_generating = self.thread.read(cx).is_generating();
let is_streaming_completion = self.thread.read(cx).is_streaming();
let is_model_selected = self.is_model_selected(cx);
let is_editor_empty = self.is_editor_empty(cx);
let submit_label_color = if is_editor_empty {
@@ -352,7 +352,7 @@ impl Render for MessageEditor {
v_flex()
.size_full()
.when(is_generating, |parent| {
.when(is_streaming_completion, |parent| {
let focus_handle = self.editor.focus_handle(cx).clone();
parent.child(
h_flex().py_3().w_full().justify_center().child(
@@ -625,7 +625,7 @@ impl Render for MessageEditor {
.disabled(
is_editor_empty
|| !is_model_selected
|| is_generating,
|| is_streaming_completion,
)
.child(
h_flex()
@@ -660,7 +660,7 @@ impl Render for MessageEditor {
"Type a message to submit",
))
})
.when(is_generating, |button| {
.when(is_streaming_completion, |button| {
button.tooltip(Tooltip::text(
"Cancel to submit a new message",
))

View File

@@ -1,4 +1,3 @@
use std::fmt::Write as _;
use std::io::Write;
use std::sync::Arc;
@@ -241,7 +240,7 @@ impl Thread {
self.messages.iter()
}
pub fn is_generating(&self) -> bool {
pub fn is_streaming(&self) -> bool {
!self.pending_completions.is_empty() || !self.all_tools_finished()
}
@@ -268,8 +267,8 @@ impl Thread {
.into_iter()
.chain(self.scripting_tool_use.pending_tool_uses());
// If the only pending tool uses left are the ones with errors, then
// that means that we've finished running all of the pending tools.
// If the only pending tool uses left are the ones with errors, then that means that we've finished running all
// of the pending tools.
all_pending_tool_uses.all(|tool_use| tool_use.status.is_error())
}
@@ -561,39 +560,9 @@ impl Thread {
request.messages.push(context_message);
}
self.attach_stale_files(&mut request.messages, cx);
request
}
fn attach_stale_files(&self, messages: &mut Vec<LanguageModelRequestMessage>, cx: &App) {
const STALE_FILES_HEADER: &str = "These files changed since last read:";
let mut stale_message = String::new();
for stale_file in self.action_log.read(cx).stale_buffers(cx) {
let Some(file) = stale_file.read(cx).file() else {
continue;
};
if stale_message.is_empty() {
write!(&mut stale_message, "{}", STALE_FILES_HEADER).ok();
}
writeln!(&mut stale_message, "- {}", file.path().display()).ok();
}
if !stale_message.is_empty() {
let context_message = LanguageModelRequestMessage {
role: Role::User,
content: vec![stale_message.into()],
cache: false,
};
messages.push(context_message);
}
}
pub fn stream_completion(
&mut self,
request: LanguageModelRequest,
@@ -714,7 +683,7 @@ impl Thread {
)));
}
thread.cancel_last_completion(cx);
thread.cancel_last_completion();
}
}
cx.emit(ThreadEvent::DoneStreaming);
@@ -864,7 +833,6 @@ impl Thread {
cx.emit(ThreadEvent::ToolFinished {
tool_use_id,
pending_tool_use,
canceled: false,
});
})
.ok();
@@ -894,7 +862,6 @@ impl Thread {
cx.emit(ThreadEvent::ToolFinished {
tool_use_id,
pending_tool_use,
canceled: false,
});
})
.ok();
@@ -905,8 +872,9 @@ impl Thread {
.run_pending_tool(tool_use_id, insert_output_task);
}
pub fn attach_tool_results(
pub fn send_tool_results_to_model(
&mut self,
model: Arc<dyn LanguageModel>,
updated_context: Vec<ContextSnapshot>,
cx: &mut Context<Self>,
) {
@@ -925,25 +893,17 @@ impl Thread {
Vec::new(),
cx,
);
self.send_to_model(model, RequestKind::Chat, cx);
}
/// Cancels the last pending completion, if there are any pending.
///
/// Returns whether a completion was canceled.
pub fn cancel_last_completion(&mut self, cx: &mut Context<Self>) -> bool {
if self.pending_completions.pop().is_some() {
pub fn cancel_last_completion(&mut self) -> bool {
if let Some(_last_completion) = self.pending_completions.pop() {
true
} else {
let mut canceled = false;
for pending_tool_use in self.tool_use.cancel_pending() {
canceled = true;
cx.emit(ThreadEvent::ToolFinished {
tool_use_id: pending_tool_use.id.clone(),
pending_tool_use: Some(pending_tool_use),
canceled: true,
});
}
canceled
false
}
}
@@ -1154,8 +1114,6 @@ pub enum ThreadEvent {
tool_use_id: LanguageModelToolUseId,
/// The pending tool use that corresponds to this tool.
pending_tool_use: Option<PendingToolUse>,
/// Whether the tool was canceled by the user.
canceled: bool,
},
}

View File

@@ -118,22 +118,6 @@ impl ToolUseState {
this
}
pub fn cancel_pending(&mut self) -> Vec<PendingToolUse> {
let mut pending_tools = Vec::new();
for (tool_use_id, tool_use) in self.pending_tool_uses_by_id.drain() {
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id,
content: "Tool canceled by user".into(),
is_error: true,
},
);
pending_tools.push(tool_use.clone());
}
pending_tools
}
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
self.pending_tool_uses_by_id.values().collect()
}

View File

@@ -1,5 +1,5 @@
use anyhow::anyhow;
use assistant2::{RequestKind, Thread, ThreadEvent, ThreadStore};
use assistant2::{Thread, ThreadEvent, ThreadStore};
use assistant_tool::ToolWorkingSet;
use client::{Client, UserStore};
use collections::HashMap;
@@ -103,7 +103,6 @@ impl HeadlessAssistant {
ThreadEvent::ToolFinished {
tool_use_id,
pending_tool_use,
..
} => {
if let Some(pending_tool_use) = pending_tool_use {
println!(
@@ -122,8 +121,9 @@ impl HeadlessAssistant {
let model_registry = LanguageModelRegistry::read_global(cx);
if let Some(model) = model_registry.active_model() {
thread.update(cx, |thread, cx| {
thread.attach_tool_results(vec![], cx);
thread.send_to_model(model, RequestKind::Chat, cx);
// Currently evals do not support specifying context.
let updated_context = vec![];
thread.send_tool_results_to_model(model, updated_context, cx);
});
}
}

View File

@@ -48,12 +48,7 @@ fn main() {
let crate_dir = PathBuf::from("../zed-agent-bench");
let evaluation_data_dir = crate_dir.join("evaluation_data").canonicalize().unwrap();
let repos_dir = crate_dir.join("repos");
if !repos_dir.exists() {
std::fs::create_dir_all(&repos_dir).unwrap();
}
let repos_dir = repos_dir.canonicalize().unwrap();
let repos_dir = crate_dir.join("repos").canonicalize().unwrap();
let all_evals = std::fs::read_dir(&evaluation_data_dir)
.unwrap()

View File

@@ -14,7 +14,6 @@ path = "src/assistant_tool.rs"
[dependencies]
anyhow.workspace = true
collections.workspace = true
clock.workspace = true
derive_more.workspace = true
gpui.workspace = true
language.workspace = true

View File

@@ -4,7 +4,7 @@ mod tool_working_set;
use std::sync::Arc;
use anyhow::Result;
use collections::{HashMap, HashSet};
use collections::HashSet;
use gpui::Context;
use gpui::{App, Entity, SharedString, Task};
use language::Buffer;
@@ -58,53 +58,31 @@ pub trait Tool: 'static + Send + Sync {
/// Tracks actions performed by tools in a thread
#[derive(Debug)]
pub struct ActionLog {
/// Buffers that user manually added to the context, and whose content has
/// changed since the model last saw them.
stale_buffers_in_context: HashSet<Entity<Buffer>>,
/// Buffers that we want to notify the model about when they change.
tracked_buffers: HashMap<Entity<Buffer>, TrackedBuffer>,
}
#[derive(Debug, Default)]
struct TrackedBuffer {
version: clock::Global,
changed_buffers: HashSet<Entity<Buffer>>,
pending_refresh: HashSet<Entity<Buffer>>,
}
impl ActionLog {
/// Creates a new, empty action log.
pub fn new() -> Self {
Self {
stale_buffers_in_context: HashSet::default(),
tracked_buffers: HashMap::default(),
changed_buffers: HashSet::default(),
pending_refresh: HashSet::default(),
}
}
/// Track a buffer as read, so we can notify the model about user edits.
pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
let tracked_buffer = self.tracked_buffers.entry(buffer.clone()).or_default();
tracked_buffer.version = buffer.read(cx).version();
}
/// Mark a buffer as edited, so we can refresh it in the context
pub fn buffer_edited(&mut self, buffers: HashSet<Entity<Buffer>>, cx: &mut Context<Self>) {
for buffer in &buffers {
let tracked_buffer = self.tracked_buffers.entry(buffer.clone()).or_default();
tracked_buffer.version = buffer.read(cx).version();
}
self.stale_buffers_in_context.extend(buffers);
}
/// Iterate over buffers changed since last read or edited by the model
pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
self.tracked_buffers
.iter()
.filter(|(buffer, tracked)| tracked.version != buffer.read(cx).version)
.map(|(buffer, _)| buffer)
/// Registers buffers that have changed and need refreshing.
pub fn notify_buffers_changed(
&mut self,
buffers: HashSet<Entity<Buffer>>,
_cx: &mut Context<Self>,
) {
self.changed_buffers.extend(buffers.clone());
self.pending_refresh.extend(buffers);
}
/// Takes and returns the set of buffers pending refresh, clearing internal state.
pub fn take_stale_buffers_in_context(&mut self) -> HashSet<Entity<Buffer>> {
std::mem::take(&mut self.stale_buffers_in_context)
pub fn take_pending_refresh_buffers(&mut self) -> HashSet<Entity<Buffer>> {
std::mem::take(&mut self.pending_refresh)
}
}

View File

@@ -30,7 +30,6 @@ theme.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
worktree.workspace = true
settings.workspace = true
[dev-dependencies]
@@ -38,7 +37,4 @@ rand.workspace = true
collections = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
unindent.workspace = true
workspace = { workspace = true, features = ["test-support"] }

View File

@@ -1,15 +1,16 @@
use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool};
use gpui::{App, AppContext, Entity, Task};
use gpui::{App, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use std::{fs, path::PathBuf, sync::Arc};
use util::paths::PathMatcher;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct DeletePathToolInput {
/// The path of the file or directory to delete.
/// The glob to match files in the project to delete.
///
/// <example>
/// If the project has the following files:
@@ -18,9 +19,9 @@ pub struct DeletePathToolInput {
/// - directory2/a/things.txt
/// - directory3/a/other.txt
///
/// You can delete the first file by providing a path of "directory1/a/something.txt"
/// You can delete the first two files by providing a glob of "*thing*.txt"
/// </example>
pub path: String,
pub glob: String,
}
pub struct DeletePathTool;
@@ -47,26 +48,119 @@ impl Tool for DeletePathTool {
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let path_str = match serde_json::from_value::<DeletePathToolInput>(input) {
Ok(input) => input.path,
let glob = match serde_json::from_value::<DeletePathToolInput>(input) {
Ok(input) => input.glob,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
let path_matcher = match PathMatcher::new(&[glob.clone()]) {
Ok(matcher) => matcher,
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {}", err))),
};
match project
.read(cx)
.find_project_path(&path_str, cx)
.and_then(|path| project.update(cx, |project, cx| project.delete_file(path, false, cx)))
{
Some(deletion_task) => cx.background_spawn(async move {
match deletion_task.await {
Ok(()) => Ok(format!("Deleted {}", &path_str)),
Err(err) => Err(anyhow!("Failed to delete {}: {}", &path_str, err)),
struct Match {
display_path: String,
path: PathBuf,
}
let mut matches = Vec::new();
let mut deleted_paths = Vec::new();
let mut errors = Vec::new();
for worktree_handle in project.read(cx).worktrees(cx) {
let worktree = worktree_handle.read(cx);
let worktree_root = worktree.abs_path().to_path_buf();
// Don't consider ignored entries.
for entry in worktree.entries(false, 0) {
if path_matcher.is_match(&entry.path) {
matches.push(Match {
path: worktree_root.join(&entry.path),
display_path: entry.path.display().to_string(),
});
}
}),
None => Task::ready(Err(anyhow!(
"Couldn't delete {} because that path isn't in this project.",
path_str
))),
}
}
if matches.is_empty() {
return Task::ready(Ok(format!("No paths in the project matched {glob:?}")));
}
let paths_matched = matches.len();
// Delete the files
for Match { path, display_path } in matches {
match fs::remove_file(&path) {
Ok(()) => {
deleted_paths.push(display_path);
}
Err(file_err) => {
// Try to remove directory if it's not a file. Retrying as a directory
// on error saves a syscall compared to checking whether it's
// a directory up front for every single file.
if let Err(dir_err) = fs::remove_dir_all(&path) {
let error = if path.is_dir() {
format!("Failed to delete directory {}: {dir_err}", display_path)
} else {
format!("Failed to delete file {}: {file_err}", display_path)
};
errors.push(error);
} else {
deleted_paths.push(display_path);
}
}
}
}
if errors.is_empty() {
// 0 deleted paths should never happen if there were no errors;
// we already returned if matches was empty.
let answer = if deleted_paths.len() == 1 {
format!(
"Deleted {}",
deleted_paths.first().unwrap_or(&String::new())
)
} else {
// Sort to group entries in the same directory together
deleted_paths.sort();
let mut buf = format!("Deleted these {} paths:\n", deleted_paths.len());
for path in deleted_paths.iter() {
buf.push('\n');
buf.push_str(path);
}
buf
};
Task::ready(Ok(answer))
} else {
if deleted_paths.is_empty() {
Task::ready(Err(anyhow!(
"{glob:?} matched {} deleted because of {}:\n{}",
if paths_matched == 1 {
"1 path, but it was not".to_string()
} else {
format!("{} paths, but none were", paths_matched)
},
if errors.len() == 1 {
"this error".to_string()
} else {
format!("{} errors", errors.len())
},
errors.join("\n")
)))
} else {
// Sort to group entries in the same directory together
deleted_paths.sort();
Task::ready(Ok(format!(
"Deleted {} paths matching glob {glob:?}:\n{}\n\nErrors:\n{}",
deleted_paths.len(),
deleted_paths.join("\n"),
errors.join("\n")
)))
}
}
}
}

View File

@@ -1 +1 @@
Deletes the file or directory (and the directory's contents, recursively) at the specified path in the project, and returns confirmation of the deletion.
Deletes all files and directories in the project which match the given glob, and returns a list of the paths that were deleted.

View File

@@ -1,6 +1,5 @@
mod edit_action;
pub mod log;
mod resolve_search_block;
use anyhow::{anyhow, Context, Result};
use assistant_tool::{ActionLog, Tool};
@@ -8,17 +7,16 @@ use collections::HashSet;
use edit_action::{EditAction, EditActionParser};
use futures::StreamExt;
use gpui::{App, AsyncApp, Entity, Task};
use language::OffsetRangeExt;
use language_model::{
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
};
use log::{EditToolLog, EditToolRequestId};
use project::Project;
use resolve_search_block::resolve_search_block;
use project::{search::SearchQuery, Project};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::fmt::Write;
use std::sync::Arc;
use util::paths::PathMatcher;
use util::ResultExt;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
@@ -129,13 +127,25 @@ impl Tool for EditFilesTool {
struct EditToolRequest {
parser: EditActionParser,
output: String,
changed_buffers: HashSet<Entity<language::Buffer>>,
bad_searches: Vec<BadSearch>,
project: Entity<Project>,
action_log: Entity<ActionLog>,
tool_log: Option<(Entity<EditToolLog>, EditToolRequestId)>,
}
#[derive(Debug)]
enum DiffResult {
BadSearch(BadSearch),
Diff(language::Diff),
}
#[derive(Debug)]
struct BadSearch {
file_path: String,
search: String,
}
impl EditToolRequest {
fn new(
input: EditFilesToolInput,
@@ -190,9 +200,8 @@ impl EditToolRequest {
let mut request = Self {
parser: EditActionParser::new(),
// we start with the success header so we don't need to shift the output in the common case
output: Self::SUCCESS_OUTPUT_HEADER.to_string(),
changed_buffers: HashSet::default(),
bad_searches: Vec::new(),
action_log,
project,
tool_log,
@@ -223,11 +232,7 @@ impl EditToolRequest {
Ok(())
}
async fn apply_action(
&mut self,
(action, source): (EditAction, String),
cx: &mut AsyncApp,
) -> Result<()> {
async fn apply_action(&mut self, action: EditAction, cx: &mut AsyncApp) -> Result<()> {
let project_path = self.project.read_with(cx, |project, cx| {
project
.find_project_path(action.file_path(), cx)
@@ -239,30 +244,35 @@ impl EditToolRequest {
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await?;
let diff = match action {
let result = match action {
EditAction::Replace {
old,
new,
file_path: _,
file_path,
} => {
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
let diff = cx
.background_executor()
.spawn(Self::replace_diff(old, new, snapshot))
.await;
anyhow::Ok(diff)
cx.background_executor()
.spawn(Self::replace_diff(old, new, file_path, snapshot))
.await
}
EditAction::Write { content, .. } => Ok(buffer
.read_with(cx, |buffer, cx| buffer.diff(content, cx))?
.await),
EditAction::Write { content, .. } => Ok(DiffResult::Diff(
buffer
.read_with(cx, |buffer, cx| buffer.diff(content, cx))?
.await,
)),
}?;
let _clock = buffer.update(cx, |buffer, cx| buffer.apply_diff(diff, cx))?;
match result {
DiffResult::BadSearch(invalid_replace) => {
self.bad_searches.push(invalid_replace);
}
DiffResult::Diff(diff) => {
let _clock = buffer.update(cx, |buffer, cx| buffer.apply_diff(diff, cx))?;
write!(&mut self.output, "\n\n{}", source)?;
self.changed_buffers.insert(buffer);
self.changed_buffers.insert(buffer);
}
}
Ok(())
}
@@ -270,9 +280,29 @@ impl EditToolRequest {
async fn replace_diff(
old: String,
new: String,
file_path: std::path::PathBuf,
snapshot: language::BufferSnapshot,
) -> language::Diff {
let edit_range = resolve_search_block(&snapshot, &old).to_offset(&snapshot);
) -> Result<DiffResult> {
let query = SearchQuery::text(
old.clone(),
false,
true,
true,
PathMatcher::new(&[])?,
PathMatcher::new(&[])?,
None,
)?;
let matches = query.search(&snapshot, None).await;
if matches.is_empty() {
return Ok(DiffResult::BadSearch(BadSearch {
search: new.clone(),
file_path: file_path.display().to_string(),
}));
}
let edit_range = matches[0].clone();
let diff = language::text_diff(&old, &new);
let edits = diff
@@ -290,71 +320,84 @@ impl EditToolRequest {
edits,
};
diff
anyhow::Ok(DiffResult::Diff(diff))
}
const SUCCESS_OUTPUT_HEADER: &str = "Successfully applied. Here's a list of changes:";
const ERROR_OUTPUT_HEADER_NO_EDITS: &str = "I couldn't apply any edits!";
const ERROR_OUTPUT_HEADER_WITH_EDITS: &str =
"Errors occurred. First, here's a list of the edits we managed to apply:";
async fn finalize(self, cx: &mut AsyncApp) -> Result<String> {
let changed_buffer_count = self.changed_buffers.len();
let mut answer = match self.changed_buffers.len() {
0 => "No files were edited.".to_string(),
1 => "Successfully edited ".to_string(),
_ => "Successfully edited these files:\n\n".to_string(),
};
// Save each buffer once at the end
for buffer in &self.changed_buffers {
self.project
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
.await?;
let (path, save_task) = self.project.update(cx, |project, cx| {
let path = buffer
.read(cx)
.file()
.map(|file| file.path().display().to_string());
let task = project.save_buffer(buffer.clone(), cx);
(path, task)
})?;
save_task.await?;
if let Some(path) = path {
writeln!(&mut answer, "{}", path)?;
}
}
self.action_log
.update(cx, |log, cx| log.buffer_edited(self.changed_buffers, cx))
.update(cx, |log, cx| {
log.notify_buffers_changed(self.changed_buffers, cx)
})
.log_err();
let errors = self.parser.errors();
if errors.is_empty() {
if changed_buffer_count == 0 {
return Err(anyhow!(
"The instructions didn't lead to any changes. You might need to consult the file contents first."
));
}
Ok(self.output)
if errors.is_empty() && self.bad_searches.is_empty() {
let answer = answer.trim_end().to_string();
Ok(answer)
} else {
let mut output = self.output;
if !self.bad_searches.is_empty() {
writeln!(
&mut answer,
"\nThese searches failed because they didn't match any strings:"
)?;
if output.is_empty() {
output.replace_range(
0..Self::SUCCESS_OUTPUT_HEADER.len(),
Self::ERROR_OUTPUT_HEADER_NO_EDITS,
);
} else {
output.replace_range(
0..Self::SUCCESS_OUTPUT_HEADER.len(),
Self::ERROR_OUTPUT_HEADER_WITH_EDITS,
);
for replace in self.bad_searches {
writeln!(
&mut answer,
"- '{}' does not appear in `{}`",
replace.search.replace("\r", "\\r").replace("\n", "\\n"),
replace.file_path
)?;
}
writeln!(&mut answer, "Make sure to use exact searches.")?;
}
if !errors.is_empty() {
writeln!(
&mut output,
"\n\nThese SEARCH/REPLACE blocks failed to parse:"
&mut answer,
"\nThese SEARCH/REPLACE blocks failed to parse:"
)?;
for error in errors {
writeln!(&mut output, "- {}", error)?;
writeln!(&mut answer, "- {}", error)?;
}
}
writeln!(
&mut output,
"\nYou can fix errors by running the tool again. You can include instructions, \
&mut answer,
"\nYou can fix errors by running the tool again. You can include instructions,\
but errors are part of the conversation so you don't need to repeat them."
)?;
Err(anyhow!(output))
Err(anyhow!(answer.trim_end().to_string()))
}
}
}

View File

@@ -1,8 +1,4 @@
use std::{
mem::take,
ops::Range,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use util::ResultExt;
/// Represents an edit action to be performed on a file.
@@ -32,14 +28,12 @@ impl EditAction {
#[derive(Debug)]
pub struct EditActionParser {
state: State,
pre_fence_line: Vec<u8>,
marker_ix: usize,
line: usize,
column: usize,
marker_ix: usize,
action_source: Vec<u8>,
fence_start_offset: usize,
block_range: Range<usize>,
old_range: Range<usize>,
new_range: Range<usize>,
old_bytes: Vec<u8>,
new_bytes: Vec<u8>,
errors: Vec<ParseError>,
}
@@ -64,14 +58,12 @@ impl EditActionParser {
pub fn new() -> Self {
Self {
state: State::Default,
pre_fence_line: Vec::new(),
marker_ix: 0,
line: 1,
column: 0,
action_source: Vec::new(),
fence_start_offset: 0,
marker_ix: 0,
block_range: Range::default(),
old_range: Range::default(),
new_range: Range::default(),
old_bytes: Vec::new(),
new_bytes: Vec::new(),
errors: Vec::new(),
}
}
@@ -84,7 +76,7 @@ impl EditActionParser {
///
/// If a block fails to parse, it will simply be skipped and an error will be recorded.
/// All errors can be accessed through the `EditActionsParser::errors` method.
pub fn parse_chunk(&mut self, input: &str) -> Vec<(EditAction, String)> {
pub fn parse_chunk(&mut self, input: &str) -> Vec<EditAction> {
use State::*;
const FENCE: &[u8] = b"```";
@@ -105,21 +97,20 @@ impl EditActionParser {
self.column += 1;
}
let action_offset = self.action_source.len();
match &self.state {
Default => match self.match_marker(byte, FENCE, false) {
Default => match match_marker(byte, FENCE, false, &mut self.marker_ix) {
MarkerMatch::Complete => {
self.fence_start_offset = action_offset + 1 - FENCE.len();
self.to_state(OpenFence);
}
MarkerMatch::Partial => {}
MarkerMatch::None => {
if self.marker_ix > 0 {
self.marker_ix = 0;
} else if self.action_source.ends_with(b"\n") {
self.action_source.clear();
} else if self.pre_fence_line.ends_with(b"\n") {
self.pre_fence_line.clear();
}
self.pre_fence_line.push(byte);
}
},
OpenFence => {
@@ -134,34 +125,39 @@ impl EditActionParser {
}
}
SearchBlock => {
if self.extend_block_range(byte, DIVIDER, NL_DIVIDER) {
self.old_range = take(&mut self.block_range);
if collect_until_marker(
byte,
DIVIDER,
NL_DIVIDER,
true,
&mut self.marker_ix,
&mut self.old_bytes,
) {
self.to_state(ReplaceBlock);
}
}
ReplaceBlock => {
if self.extend_block_range(byte, REPLACE_MARKER, NL_REPLACE_MARKER) {
self.new_range = take(&mut self.block_range);
if collect_until_marker(
byte,
REPLACE_MARKER,
NL_REPLACE_MARKER,
true,
&mut self.marker_ix,
&mut self.new_bytes,
) {
self.to_state(CloseFence);
}
}
CloseFence => {
if self.expect_marker(byte, FENCE, false) {
self.action_source.push(byte);
if let Some(action) = self.action() {
actions.push(action);
}
self.errors();
self.reset();
continue;
}
}
};
self.action_source.push(byte);
}
actions
@@ -172,76 +168,37 @@ impl EditActionParser {
&self.errors
}
fn action(&mut self) -> Option<(EditAction, String)> {
let old_range = take(&mut self.old_range);
let new_range = take(&mut self.new_range);
let action_source = take(&mut self.action_source);
let action_source = String::from_utf8(action_source).log_err()?;
let mut file_path_bytes = action_source[..self.fence_start_offset].to_owned();
if file_path_bytes.ends_with("\n") {
file_path_bytes.pop();
if file_path_bytes.ends_with("\r") {
file_path_bytes.pop();
}
}
let file_path = PathBuf::from(file_path_bytes);
if old_range.is_empty() && new_range.is_empty() {
fn action(&mut self) -> Option<EditAction> {
if self.old_bytes.is_empty() && self.new_bytes.is_empty() {
self.push_error(ParseErrorKind::NoOp);
return None;
}
if old_range.is_empty() {
return Some((
EditAction::Write {
file_path,
content: action_source[new_range].to_owned(),
},
action_source,
));
let mut pre_fence_line = std::mem::take(&mut self.pre_fence_line);
if pre_fence_line.ends_with(b"\n") {
pre_fence_line.pop();
pop_carriage_return(&mut pre_fence_line);
}
let old = action_source[old_range].to_owned();
let new = action_source[new_range].to_owned();
let file_path = PathBuf::from(String::from_utf8(pre_fence_line).log_err()?);
let content = String::from_utf8(std::mem::take(&mut self.new_bytes)).log_err()?;
let action = EditAction::Replace {
file_path,
old,
new,
};
if self.old_bytes.is_empty() {
Some(EditAction::Write { file_path, content })
} else {
let old = String::from_utf8(std::mem::take(&mut self.old_bytes)).log_err()?;
Some((action, action_source))
}
fn to_state(&mut self, state: State) {
self.state = state;
self.marker_ix = 0;
}
fn reset(&mut self) {
self.action_source.clear();
self.block_range = Range::default();
self.old_range = Range::default();
self.new_range = Range::default();
self.fence_start_offset = 0;
self.marker_ix = 0;
self.to_state(State::Default);
}
fn push_error(&mut self, kind: ParseErrorKind) {
self.errors.push(ParseError {
line: self.line,
column: self.column,
kind,
});
Some(EditAction::Replace {
file_path,
old,
new: content,
})
}
}
fn expect_marker(&mut self, byte: u8, marker: &'static [u8], trailing_newline: bool) -> bool {
match self.match_marker(byte, marker, trailing_newline) {
match match_marker(byte, marker, trailing_newline, &mut self.marker_ix) {
MarkerMatch::Complete => true,
MarkerMatch::Partial => false,
MarkerMatch::None => {
@@ -255,68 +212,24 @@ impl EditActionParser {
}
}
fn extend_block_range(&mut self, byte: u8, marker: &[u8], nl_marker: &[u8]) -> bool {
let marker = if self.block_range.is_empty() {
// do not require another newline if block is empty
marker
} else {
nl_marker
};
let offset = self.action_source.len();
match self.match_marker(byte, marker, true) {
MarkerMatch::Complete => {
if self.action_source[self.block_range.clone()].ends_with(b"\r") {
self.block_range.end -= 1;
}
true
}
MarkerMatch::Partial => false,
MarkerMatch::None => {
if self.marker_ix > 0 {
self.marker_ix = 0;
self.block_range.end = offset;
// The beginning of marker might match current byte
match self.match_marker(byte, marker, true) {
MarkerMatch::Complete => return true,
MarkerMatch::Partial => return false,
MarkerMatch::None => { /* no match, keep collecting */ }
}
}
if self.block_range.is_empty() {
self.block_range.start = offset;
}
self.block_range.end = offset + 1;
false
}
}
fn to_state(&mut self, state: State) {
self.state = state;
self.marker_ix = 0;
}
fn match_marker(&mut self, byte: u8, marker: &[u8], trailing_newline: bool) -> MarkerMatch {
if trailing_newline && self.marker_ix >= marker.len() {
if byte == b'\n' {
MarkerMatch::Complete
} else if byte == b'\r' {
MarkerMatch::Partial
} else {
MarkerMatch::None
}
} else if byte == marker[self.marker_ix] {
self.marker_ix += 1;
fn reset(&mut self) {
self.pre_fence_line.clear();
self.old_bytes.clear();
self.new_bytes.clear();
self.to_state(State::Default);
}
if self.marker_ix < marker.len() || trailing_newline {
MarkerMatch::Partial
} else {
MarkerMatch::Complete
}
} else {
MarkerMatch::None
}
fn push_error(&mut self, kind: ParseErrorKind) {
self.errors.push(ParseError {
line: self.line,
column: self.column,
kind,
});
}
}
@@ -327,6 +240,80 @@ enum MarkerMatch {
Complete,
}
fn match_marker(
byte: u8,
marker: &[u8],
trailing_newline: bool,
marker_ix: &mut usize,
) -> MarkerMatch {
if trailing_newline && *marker_ix >= marker.len() {
if byte == b'\n' {
MarkerMatch::Complete
} else if byte == b'\r' {
MarkerMatch::Partial
} else {
MarkerMatch::None
}
} else if byte == marker[*marker_ix] {
*marker_ix += 1;
if *marker_ix < marker.len() || trailing_newline {
MarkerMatch::Partial
} else {
MarkerMatch::Complete
}
} else {
MarkerMatch::None
}
}
fn collect_until_marker(
byte: u8,
marker: &[u8],
nl_marker: &[u8],
trailing_newline: bool,
marker_ix: &mut usize,
buf: &mut Vec<u8>,
) -> bool {
let marker = if buf.is_empty() {
// do not require another newline if block is empty
marker
} else {
nl_marker
};
match match_marker(byte, marker, trailing_newline, marker_ix) {
MarkerMatch::Complete => {
pop_carriage_return(buf);
true
}
MarkerMatch::Partial => false,
MarkerMatch::None => {
if *marker_ix > 0 {
buf.extend_from_slice(&marker[..*marker_ix]);
*marker_ix = 0;
// The beginning of marker might match current byte
match match_marker(byte, marker, trailing_newline, marker_ix) {
MarkerMatch::Complete => return true,
MarkerMatch::Partial => return false,
MarkerMatch::None => { /* no match, keep collecting */ }
}
}
buf.push(byte);
false
}
}
}
fn pop_carriage_return(buf: &mut Vec<u8>) {
if buf.ends_with(b"\r") {
buf.pop();
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct ParseError {
line: usize,
@@ -385,16 +372,16 @@ fn replacement() {}
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1);
assert_eq!(
actions[0].0,
actions[0],
EditAction::Replace {
file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -412,16 +399,16 @@ fn replacement() {}
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1);
assert_eq!(
actions[0].0,
actions[0],
EditAction::Replace {
file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -443,16 +430,16 @@ This change makes the function better.
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1);
assert_eq!(
actions[0].0,
actions[0],
EditAction::Replace {
file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -481,27 +468,24 @@ fn new_util() -> bool { true }
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 2);
let (action, _) = &actions[0];
assert_eq!(
action,
&EditAction::Replace {
actions[0],
EditAction::Replace {
file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(),
}
);
let (action2, _) = &actions[1];
assert_eq!(
action2,
&EditAction::Replace {
actions[1],
EditAction::Replace {
file_path: PathBuf::from("src/utils.rs"),
old: "fn old_util() -> bool { false }".to_string(),
new: "fn new_util() -> bool { true }".to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -533,18 +517,16 @@ fn replacement() {
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1);
let (action, _) = &actions[0];
assert_eq!(
action,
&EditAction::Replace {
actions[0],
EditAction::Replace {
file_path: PathBuf::from("src/main.rs"),
old: "fn original() {\n println!(\"This is the original function\");\n let x = 42;\n if x > 0 {\n println!(\"Positive number\");\n }\n}".to_string(),
new: "fn replacement() {\n println!(\"This is the replacement function\");\n let x = 100;\n if x > 50 {\n println!(\"Large number\");\n } else {\n println!(\"Small number\");\n }\n}".to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -565,16 +547,16 @@ fn new_function() {
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1);
assert_eq!(
actions[0].0,
actions[0],
EditAction::Write {
file_path: PathBuf::from("src/main.rs"),
content: "fn new_function() {\n println!(\"This function is being added\");\n}"
.to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -592,11 +574,9 @@ fn this_will_be_deleted() {
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(&input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1);
assert_eq!(
actions[0].0,
actions[0],
EditAction::Replace {
file_path: PathBuf::from("src/main.rs"),
old: "fn this_will_be_deleted() {\n println!(\"Deleting this function\");\n}"
@@ -604,13 +584,12 @@ fn this_will_be_deleted() {
new: "".to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(&input.replace("\n", "\r\n"));
assert_no_errors(&parser);
assert_eq!(actions.len(), 1);
assert_eq!(
actions[0].0,
actions[0],
EditAction::Replace {
file_path: PathBuf::from("src/main.rs"),
old:
@@ -619,6 +598,7 @@ fn this_will_be_deleted() {
new: "".to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -663,27 +643,26 @@ fn replacement() {}"#;
let mut parser = EditActionParser::new();
let actions1 = parser.parse_chunk(input_part1);
assert_no_errors(&parser);
assert_eq!(actions1.len(), 0);
assert_eq!(parser.errors().len(), 0);
let actions2 = parser.parse_chunk(input_part2);
// No actions should be complete yet
assert_no_errors(&parser);
assert_eq!(actions2.len(), 0);
assert_eq!(parser.errors().len(), 0);
let actions3 = parser.parse_chunk(input_part3);
// The third chunk should complete the action
assert_no_errors(&parser);
assert_eq!(actions3.len(), 1);
let (action, _) = &actions3[0];
assert_eq!(
action,
&EditAction::Replace {
actions3[0],
EditAction::Replace {
file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(),
}
);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -692,35 +671,28 @@ fn replacement() {}"#;
let actions1 = parser.parse_chunk("src/main.rs\n```rust\n<<<<<<< SEARCH\n");
// Check parser is in the correct state
assert_no_errors(&parser);
assert_eq!(parser.state, State::SearchBlock);
assert_eq!(
parser.action_source,
b"src/main.rs\n```rust\n<<<<<<< SEARCH\n"
);
assert_eq!(parser.pre_fence_line, b"src/main.rs\n");
assert_eq!(parser.errors().len(), 0);
// Continue parsing
let actions2 = parser.parse_chunk("original code\n=======\n");
assert_no_errors(&parser);
assert_eq!(parser.state, State::ReplaceBlock);
assert_eq!(
&parser.action_source[parser.old_range.clone()],
b"original code"
);
assert_eq!(parser.old_bytes, b"original code");
assert_eq!(parser.errors().len(), 0);
let actions3 = parser.parse_chunk("replacement code\n>>>>>>> REPLACE\n```\n");
// After complete parsing, state should reset
assert_no_errors(&parser);
assert_eq!(parser.state, State::Default);
assert_eq!(parser.action_source, b"\n");
assert!(parser.old_range.is_empty());
assert!(parser.new_range.is_empty());
assert_eq!(parser.pre_fence_line, b"\n");
assert!(parser.old_bytes.is_empty());
assert!(parser.new_bytes.is_empty());
assert_eq!(actions1.len(), 0);
assert_eq!(actions2.len(), 0);
assert_eq!(actions3.len(), 1);
assert_eq!(parser.errors().len(), 0);
}
#[test]
@@ -774,10 +746,9 @@ fn new_utils_func() {}
// Only the second block should be parsed
assert_eq!(actions.len(), 1);
let (action, _) = &actions[0];
assert_eq!(
action,
&EditAction::Replace {
actions[0],
EditAction::Replace {
file_path: PathBuf::from("src/utils.rs"),
old: "fn utils_func() {}".to_string(),
new: "fn new_utils_func() {}".to_string(),
@@ -813,19 +784,18 @@ fn new_utils_func() {}
let (chunk, rest) = remaining.split_at(chunk_size);
let chunk_actions = parser.parse_chunk(chunk);
actions.extend(chunk_actions);
actions.extend(parser.parse_chunk(chunk));
remaining = rest;
}
assert_examples_in_system_prompt(&actions, parser.errors());
}
fn assert_examples_in_system_prompt(actions: &[(EditAction, String)], errors: &[ParseError]) {
fn assert_examples_in_system_prompt(actions: &[EditAction], errors: &[ParseError]) {
assert_eq!(actions.len(), 5);
assert_eq!(
actions[0].0,
actions[0],
EditAction::Replace {
file_path: PathBuf::from("mathweb/flask/app.py"),
old: "from flask import Flask".to_string(),
@@ -834,7 +804,7 @@ fn new_utils_func() {}
);
assert_eq!(
actions[1].0,
actions[1],
EditAction::Replace {
file_path: PathBuf::from("mathweb/flask/app.py"),
old: line_endings!("def factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n").to_string(),
@@ -843,7 +813,7 @@ fn new_utils_func() {}
);
assert_eq!(
actions[2].0,
actions[2],
EditAction::Replace {
file_path: PathBuf::from("mathweb/flask/app.py"),
old: " return str(factorial(n))".to_string(),
@@ -852,7 +822,7 @@ fn new_utils_func() {}
);
assert_eq!(
actions[3].0,
actions[3],
EditAction::Write {
file_path: PathBuf::from("hello.py"),
content: line_endings!(
@@ -863,7 +833,7 @@ fn new_utils_func() {}
);
assert_eq!(
actions[4].0,
actions[4],
EditAction::Replace {
file_path: PathBuf::from("main.py"),
old: line_endings!(
@@ -912,20 +882,4 @@ fn replacement() {}
assert_eq!(format!("{}", error), expected_error);
}
// helpers
fn assert_no_errors(parser: &EditActionParser) {
let errors = parser.errors();
assert!(
errors.is_empty(),
"Expected no errors, but found:\n\n{}",
errors
.iter()
.map(|e| e.to_string())
.collect::<Vec<String>>()
.join("\n")
);
}
}

View File

@@ -80,7 +80,7 @@ impl EditToolLog {
&mut self,
id: EditToolRequestId,
chunk: &str,
new_actions: &[(EditAction, String)],
new_actions: &[EditAction],
cx: &mut Context<Self>,
) {
if let Some(request) = self.requests.get_mut(id.0 as usize) {
@@ -92,9 +92,7 @@ impl EditToolLog {
response.push_str(chunk);
}
}
request
.parsed_edits
.extend(new_actions.iter().cloned().map(|(action, _)| action));
request.parsed_edits.extend(new_actions.iter().cloned());
cx.emit(EditToolLogEvent::Updated);
}

View File

@@ -1,226 +0,0 @@
use language::{Anchor, Bias, BufferSnapshot};
use std::ops::Range;
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
enum SearchDirection {
Up,
Left,
Diagonal,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
struct SearchState {
cost: u32,
direction: SearchDirection,
}
impl SearchState {
fn new(cost: u32, direction: SearchDirection) -> Self {
Self { cost, direction }
}
}
struct SearchMatrix {
cols: usize,
data: Vec<SearchState>,
}
impl SearchMatrix {
fn new(rows: usize, cols: usize) -> Self {
SearchMatrix {
cols,
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
}
}
fn get(&self, row: usize, col: usize) -> SearchState {
self.data[row * self.cols + col]
}
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
self.data[row * self.cols + col] = cost;
}
}
pub fn resolve_search_block(buffer: &BufferSnapshot, search_query: &str) -> Range<Anchor> {
const INSERTION_COST: u32 = 3;
const DELETION_COST: u32 = 10;
const WHITESPACE_INSERTION_COST: u32 = 1;
const WHITESPACE_DELETION_COST: u32 = 1;
let buffer_len = buffer.len();
let query_len = search_query.len();
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
let mut leading_deletion_cost = 0_u32;
for (row, query_byte) in search_query.bytes().enumerate() {
let deletion_cost = if query_byte.is_ascii_whitespace() {
WHITESPACE_DELETION_COST
} else {
DELETION_COST
};
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
matrix.set(
row + 1,
0,
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
);
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
WHITESPACE_INSERTION_COST
} else {
INSERTION_COST
};
let up = SearchState::new(
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
SearchDirection::Up,
);
let left = SearchState::new(
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
SearchDirection::Left,
);
let diagonal = SearchState::new(
if query_byte == *buffer_byte {
matrix.get(row, col).cost
} else {
matrix
.get(row, col)
.cost
.saturating_add(deletion_cost + insertion_cost)
},
SearchDirection::Diagonal,
);
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
}
}
// Traceback to find the best match
let mut best_buffer_end = buffer_len;
let mut best_cost = u32::MAX;
for col in 1..=buffer_len {
let cost = matrix.get(query_len, col).cost;
if cost < best_cost {
best_cost = cost;
best_buffer_end = col;
}
}
let mut query_ix = query_len;
let mut buffer_ix = best_buffer_end;
while query_ix > 0 && buffer_ix > 0 {
let current = matrix.get(query_ix, buffer_ix);
match current.direction {
SearchDirection::Diagonal => {
query_ix -= 1;
buffer_ix -= 1;
}
SearchDirection::Up => {
query_ix -= 1;
}
SearchDirection::Left => {
buffer_ix -= 1;
}
}
}
let mut start = buffer.offset_to_point(buffer.clip_offset(buffer_ix, Bias::Left));
start.column = 0;
let mut end = buffer.offset_to_point(buffer.clip_offset(best_buffer_end, Bias::Right));
if end.column > 0 {
end.column = buffer.line_len(end.row);
}
buffer.anchor_after(start)..buffer.anchor_before(end)
}
#[cfg(test)]
mod tests {
use crate::edit_files_tool::resolve_search_block::resolve_search_block;
use gpui::{prelude::*, App};
use language::{Buffer, OffsetRangeExt as _};
use unindent::Unindent as _;
use util::test::{generate_marked_text, marked_text_ranges};
#[gpui::test]
fn test_resolve_search_block(cx: &mut App) {
assert_resolved(
concat!(
" Lorem\n",
"« ipsum\n",
" dolor sit amet»\n",
" consecteur",
),
"ipsum\ndolor",
cx,
);
assert_resolved(
&"
«fn foo1(a: usize) -> usize {
40
fn foo2(b: usize) -> usize {
42
}
"
.unindent(),
"fn foo1(b: usize) {\n40\n}",
cx,
);
assert_resolved(
&"
fn main() {
« Foo
.bar()
.baz()
.qux()»
}
fn foo2(b: usize) -> usize {
42
}
"
.unindent(),
"Foo.bar.baz.qux()",
cx,
);
assert_resolved(
&"
class Something {
one() { return 1; }
« two() { return 2222; }
three() { return 333; }
four() { return 4444; }
five() { return 5555; }
six() { return 6666; }
» seven() { return 7; }
eight() { return 8; }
}
"
.unindent(),
&"
two() { return 2222; }
four() { return 4444; }
five() { return 5555; }
six() { return 6666; }
"
.unindent(),
cx,
);
}
#[track_caller]
fn assert_resolved(text_with_expected_range: &str, query: &str, cx: &mut App) {
let (text, _) = marked_text_ranges(text_with_expected_range, false);
let buffer = cx.new(|cx| Buffer::local(text.clone(), cx));
let snapshot = buffer.read(cx).snapshot();
let range = resolve_search_block(&snapshot, query).to_offset(&snapshot);
let text_with_actual_range = generate_marked_text(&text, &[range], false);
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
}
}

View File

@@ -1,13 +1,12 @@
use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool};
use gpui::{App, AppContext, Entity, Task};
use gpui::{App, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::{path::PathBuf, sync::Arc};
use util::paths::PathMatcher;
use worktree::Snapshot;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct PathSearchToolInput {
@@ -57,38 +56,34 @@ impl Tool for PathSearchTool {
Ok(matcher) => matcher,
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {}", err))),
};
let snapshots: Vec<Snapshot> = project
.read(cx)
.worktrees(cx)
.map(|worktree| worktree.read(cx).snapshot())
.collect();
cx.background_spawn(async move {
let mut matches = Vec::new();
let mut matches = Vec::new();
for worktree in snapshots {
let root_name = worktree.root_name();
for worktree_handle in project.read(cx).worktrees(cx) {
let worktree = worktree_handle.read(cx);
let root_name = worktree.root_name();
// Don't consider ignored entries.
for entry in worktree.entries(false, 0) {
if path_matcher.is_match(&entry.path) {
matches.push(
PathBuf::from(root_name)
.join(&entry.path)
.to_string_lossy()
.to_string(),
);
}
// Don't consider ignored entries.
for entry in worktree.entries(false, 0) {
if path_matcher.is_match(&entry.path) {
matches.push(
PathBuf::from(root_name)
.join(&entry.path)
.to_string_lossy()
.to_string(),
);
}
}
}
if matches.is_empty() {
Ok(format!("No paths in the project matched the glob {glob:?}"))
} else {
// Sort to group entries in the same directory together.
matches.sort();
Ok(matches.join("\n"))
}
})
if matches.is_empty() {
Task::ready(Ok(format!(
"No paths in the project matched the glob {glob:?}"
)))
} else {
// Sort to group entries in the same directory together.
matches.sort();
Task::ready(Ok(matches.join("\n")))
}
}
}

View File

@@ -49,7 +49,7 @@ impl Tool for ReadFileTool {
input: serde_json::Value,
_messages: &[LanguageModelRequestMessage],
project: Entity<Project>,
action_log: Entity<ActionLog>,
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input = match serde_json::from_value::<ReadFileToolInput>(input) {
@@ -60,15 +60,14 @@ impl Tool for ReadFileTool {
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
return Task::ready(Err(anyhow!("Path not found in project")));
};
cx.spawn(|mut cx| async move {
cx.spawn(|cx| async move {
let buffer = cx
.update(|cx| {
project.update(cx, |project, cx| project.open_buffer(project_path, cx))
})?
.await?;
let result = buffer.read_with(&cx, |buffer, _cx| {
buffer.read_with(&cx, |buffer, _cx| {
if buffer
.file()
.map_or(false, |file| file.disk_state().exists())
@@ -77,13 +76,7 @@ impl Tool for ReadFileTool {
} else {
Err(anyhow!("File does not exist"))
}
})??;
action_log.update(&mut cx, |log, cx| {
log.buffer_read(buffer, cx);
})?;
anyhow::Ok(result)
})?
})
}
}

View File

@@ -27,7 +27,6 @@ feature_flags.workspace = true
futures.workspace = true
gpui.workspace = true
http_client.workspace = true
http_client_tls.workspace = true
log.workspace = true
paths.workspace = true
parking_lot.workspace = true

View File

@@ -1154,7 +1154,7 @@ impl Client {
async_tungstenite::async_tls::client_async_tls_with_connector(
request,
stream,
Some(http_client_tls::tls_config().into()),
Some(http_client::tls_config().into()),
)
.await?;
Ok(Connection::new(

View File

@@ -29,12 +29,6 @@ impl std::fmt::Display for ChannelId {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct ProjectId(pub u64);
impl ProjectId {
pub fn to_proto(&self) -> u64 {
self.0
}
}
#[derive(
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
)]

View File

@@ -43,20 +43,6 @@ pub enum Relation {
Contributor,
}
impl Model {
/// Returns the timestamp of when the user's account was created.
///
/// This will be the earlier of the `created_at` and `github_user_created_at` timestamps.
pub fn account_created_at(&self) -> NaiveDateTime {
let mut account_created_at = self.created_at;
if let Some(github_created_at) = self.github_user_created_at {
account_created_at = account_created_at.min(github_created_at);
}
account_created_at
}
}
impl Related<super::access_token::Entity> for Entity {
fn to() -> RelationDef {
Relation::AccessToken.def()

View File

@@ -5,7 +5,6 @@ mod token;
use crate::api::events::SnowflakeRow;
use crate::api::CloudflareIpCountryHeader;
use crate::build_kinesis_client;
use crate::rpc::MIN_ACCOUNT_AGE_FOR_LLM_USE;
use crate::{db::UserId, executor::Executor, Cents, Config, Error, Result};
use anyhow::{anyhow, Context as _};
use authorization::authorize_access_to_language_model;
@@ -218,13 +217,6 @@ async fn perform_completion(
params.model,
);
let bypass_account_age_check = claims.has_llm_subscription || claims.bypass_account_age_check;
if !bypass_account_age_check {
if Utc::now().naive_utc() - claims.account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE {
Err(anyhow!("account too young"))?
}
}
authorize_access_to_language_model(
&state.config,
&claims,

View File

@@ -3,7 +3,7 @@ use crate::llm::{DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT};
use crate::Cents;
use crate::{db::billing_preference, Config};
use anyhow::{anyhow, Result};
use chrono::{NaiveDateTime, Utc};
use chrono::Utc;
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
use std::time::Duration;
@@ -20,10 +20,9 @@ pub struct LlmTokenClaims {
pub system_id: Option<String>,
pub metrics_id: Uuid,
pub github_user_login: String,
pub account_created_at: NaiveDateTime,
pub is_staff: bool,
pub has_llm_closed_beta_feature_flag: bool,
pub bypass_account_age_check: bool,
#[serde(default)]
pub has_predict_edits_feature_flag: bool,
pub has_llm_subscription: bool,
pub max_monthly_spend_in_cents: u32,
@@ -38,7 +37,8 @@ impl LlmTokenClaims {
user: &user::Model,
is_staff: bool,
billing_preferences: Option<billing_preference::Model>,
feature_flags: &Vec<String>,
has_llm_closed_beta_feature_flag: bool,
has_predict_edits_feature_flag: bool,
has_llm_subscription: bool,
plan: rpc::proto::Plan,
system_id: Option<String>,
@@ -58,17 +58,9 @@ impl LlmTokenClaims {
system_id,
metrics_id: user.metrics_id,
github_user_login: user.github_login.clone(),
account_created_at: user.account_created_at(),
is_staff,
has_llm_closed_beta_feature_flag: feature_flags
.iter()
.any(|flag| flag == "llm-closed-beta"),
bypass_account_age_check: feature_flags
.iter()
.any(|flag| flag == "bypass-account-age-check"),
has_predict_edits_feature_flag: feature_flags
.iter()
.any(|flag| flag == "predict-edits"),
has_llm_closed_beta_feature_flag,
has_predict_edits_feature_flag,
has_llm_subscription,
max_monthly_spend_in_cents: billing_preferences
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0, |preferences| {

View File

@@ -307,7 +307,6 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::SynchronizeBuffers>)
.add_request_handler(forward_read_only_project_request::<proto::InlayHints>)
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
.add_request_handler(forward_mutating_project_request::<proto::GetCodeLens>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
.add_request_handler(forward_read_only_project_request::<proto::GitGetBranches>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
@@ -348,7 +347,6 @@ impl Server {
.add_message_handler(create_buffer_for_peer)
.add_request_handler(update_buffer)
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshInlayHints>)
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshCodeLens>)
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateBufferFile>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
@@ -4036,7 +4034,7 @@ async fn accept_terms_of_service(
}
/// The minimum account age an account must have in order to use the LLM service.
pub const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30);
const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30);
async fn get_llm_api_token(
_request: proto::GetLlmToken,
@@ -4047,6 +4045,8 @@ async fn get_llm_api_token(
let flags = db.get_user_flags(session.user_id()).await?;
let has_language_models_feature_flag = flags.iter().any(|flag| flag == "language-models");
let has_llm_closed_beta_feature_flag = flags.iter().any(|flag| flag == "llm-closed-beta");
let has_predict_edits_feature_flag = flags.iter().any(|flag| flag == "predict-edits");
if !session.is_staff() && !has_language_models_feature_flag {
Err(anyhow!("permission denied"))?
@@ -4063,13 +4063,27 @@ async fn get_llm_api_token(
}
let has_llm_subscription = session.has_llm_subscription(&db).await?;
let bypass_account_age_check =
has_llm_subscription || flags.iter().any(|flag| flag == "bypass-account-age-check");
if !bypass_account_age_check {
let mut account_created_at = user.created_at;
if let Some(github_created_at) = user.github_user_created_at {
account_created_at = account_created_at.min(github_created_at);
}
if Utc::now().naive_utc() - account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE {
Err(anyhow!("account too young"))?
}
}
let billing_preferences = db.get_billing_preferences(user.id).await?;
let token = LlmTokenClaims::create(
&user,
session.is_staff(),
billing_preferences,
&flags,
has_llm_closed_beta_feature_flag,
has_predict_edits_feature_flag,
has_llm_subscription,
session.current_plan(&db).await?,
session.system_id.clone(),

View File

@@ -1337,7 +1337,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let host_diff_base = host_project.read_with(host_cx, |project, cx| {
project
.git_store()
.buffer_store()
.read(cx)
.get_unstaged_diff(host_buffer.read(cx).remote_id(), cx)
.unwrap()
@@ -1346,7 +1346,7 @@ impl RandomizedTest for ProjectCollaborationTest {
});
let guest_diff_base = guest_project.read_with(client_cx, |project, cx| {
project
.git_store()
.buffer_store()
.read(cx)
.get_unstaged_diff(guest_buffer.read(cx).remote_id(), cx)
.unwrap()

View File

@@ -271,7 +271,7 @@ impl TestServer {
let git_hosting_provider_registry = cx.update(GitHostingProviderRegistry::default_global);
git_hosting_provider_registry
.register_hosting_provider(Arc::new(git_hosting_providers::Github::public_instance()));
.register_hosting_provider(Arc::new(git_hosting_providers::Github::new()));
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));

View File

@@ -240,7 +240,7 @@ pub struct BlockContext<'a, 'b> {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
pub enum BlockId {
ExcerptBoundary(ExcerptId),
ExcerptBoundary(Option<ExcerptId>),
FoldedBuffer(ExcerptId),
Custom(CustomBlockId),
}
@@ -249,9 +249,10 @@ impl From<BlockId> for ElementId {
fn from(value: BlockId) -> Self {
match value {
BlockId::Custom(CustomBlockId(id)) => ("Block", id).into(),
BlockId::ExcerptBoundary(excerpt_id) => {
("ExcerptBoundary", EntityId::from(excerpt_id)).into()
}
BlockId::ExcerptBoundary(next_excerpt) => match next_excerpt {
Some(id) => ("ExcerptBoundary", EntityId::from(id)).into(),
None => "LastExcerptBoundary".into(),
},
BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id)).into(),
}
}
@@ -279,10 +280,12 @@ pub enum Block {
Custom(Arc<CustomBlock>),
FoldedBuffer {
first_excerpt: ExcerptInfo,
prev_excerpt: Option<ExcerptInfo>,
height: u32,
},
ExcerptBoundary {
excerpt: ExcerptInfo,
prev_excerpt: Option<ExcerptInfo>,
next_excerpt: Option<ExcerptInfo>,
height: u32,
starts_new_buffer: bool,
},
@@ -292,10 +295,9 @@ impl Block {
pub fn id(&self) -> BlockId {
match self {
Block::Custom(block) => BlockId::Custom(block.id),
Block::ExcerptBoundary {
excerpt: next_excerpt,
..
} => BlockId::ExcerptBoundary(next_excerpt.id),
Block::ExcerptBoundary { next_excerpt, .. } => {
BlockId::ExcerptBoundary(next_excerpt.as_ref().map(|info| info.id))
}
Block::FoldedBuffer { first_excerpt, .. } => BlockId::FoldedBuffer(first_excerpt.id),
}
}
@@ -318,7 +320,7 @@ impl Block {
match self {
Block::Custom(block) => matches!(block.placement, BlockPlacement::Above(_)),
Block::FoldedBuffer { .. } => false,
Block::ExcerptBoundary { .. } => true,
Block::ExcerptBoundary { next_excerpt, .. } => next_excerpt.is_some(),
}
}
@@ -326,7 +328,7 @@ impl Block {
match self {
Block::Custom(block) => matches!(block.placement, BlockPlacement::Below(_)),
Block::FoldedBuffer { .. } => false,
Block::ExcerptBoundary { .. } => false,
Block::ExcerptBoundary { next_excerpt, .. } => next_excerpt.is_none(),
}
}
@@ -345,16 +347,6 @@ impl Block {
Block::ExcerptBoundary { .. } => true,
}
}
pub fn is_buffer_header(&self) -> bool {
match self {
Block::Custom(_) => false,
Block::FoldedBuffer { .. } => true,
Block::ExcerptBoundary {
starts_new_buffer, ..
} => *starts_new_buffer,
}
}
}
impl Debug for Block {
@@ -363,21 +355,24 @@ impl Debug for Block {
Self::Custom(block) => f.debug_struct("Custom").field("block", block).finish(),
Self::FoldedBuffer {
first_excerpt,
prev_excerpt,
height,
} => f
.debug_struct("FoldedBuffer")
.field("first_excerpt", &first_excerpt)
.field("prev_excerpt", prev_excerpt)
.field("height", height)
.finish(),
Self::ExcerptBoundary {
starts_new_buffer,
excerpt,
height,
next_excerpt,
prev_excerpt,
..
} => f
.debug_struct("ExcerptBoundary")
.field("excerpt", excerpt)
.field("prev_excerpt", prev_excerpt)
.field("next_excerpt", next_excerpt)
.field("starts_new_buffer", starts_new_buffer)
.field("height", height)
.finish(),
}
}
@@ -729,13 +724,23 @@ impl BlockMap {
std::iter::from_fn(move || {
let excerpt_boundary = boundaries.next()?;
let wrap_row = wrap_snapshot
.make_wrap_point(Point::new(excerpt_boundary.row.0, 0), Bias::Left)
.row();
let wrap_row = if excerpt_boundary.next.is_some() {
wrap_snapshot.make_wrap_point(Point::new(excerpt_boundary.row.0, 0), Bias::Left)
} else {
wrap_snapshot.make_wrap_point(
Point::new(
excerpt_boundary.row.0,
buffer.line_len(excerpt_boundary.row),
),
Bias::Left,
)
}
.row();
let new_buffer_id = match (&excerpt_boundary.prev, &excerpt_boundary.next) {
(None, next) => Some(next.buffer_id),
(Some(prev), next) => {
(_, None) => None,
(None, Some(next)) => Some(next.buffer_id),
(Some(prev), Some(next)) => {
if prev.buffer_id != next.buffer_id {
Some(next.buffer_id)
} else {
@@ -744,18 +749,24 @@ impl BlockMap {
}
};
let prev_excerpt = excerpt_boundary
.prev
.filter(|prev| !folded_buffers.contains(&prev.buffer_id));
let mut height = 0;
if let Some(new_buffer_id) = new_buffer_id {
let first_excerpt = excerpt_boundary.next.clone();
let first_excerpt = excerpt_boundary.next.clone().unwrap();
if folded_buffers.contains(&new_buffer_id) {
let mut last_excerpt_end_row = first_excerpt.end_row;
while let Some(next_boundary) = boundaries.peek() {
if next_boundary.next.buffer_id == new_buffer_id {
last_excerpt_end_row = next_boundary.next.end_row;
} else {
break;
if let Some(next_excerpt_boundary) = &next_boundary.next {
if next_excerpt_boundary.buffer_id == new_buffer_id {
last_excerpt_end_row = next_excerpt_boundary.end_row;
} else {
break;
}
}
boundaries.next();
@@ -774,6 +785,7 @@ impl BlockMap {
return Some((
BlockPlacement::Replace(WrapRow(wrap_row)..=WrapRow(wrap_end_row)),
Block::FoldedBuffer {
prev_excerpt,
height: height + buffer_header_height,
first_excerpt,
},
@@ -781,16 +793,27 @@ impl BlockMap {
}
}
if new_buffer_id.is_some() {
height += buffer_header_height;
} else {
height += excerpt_header_height;
if excerpt_boundary.next.is_some() {
if new_buffer_id.is_some() {
height += buffer_header_height;
} else {
height += excerpt_header_height;
}
}
if height == 0 {
return None;
}
Some((
BlockPlacement::Above(WrapRow(wrap_row)),
if excerpt_boundary.next.is_some() {
BlockPlacement::Above(WrapRow(wrap_row))
} else {
BlockPlacement::Below(WrapRow(wrap_row))
},
Block::ExcerptBoundary {
excerpt: excerpt_boundary.next,
prev_excerpt,
next_excerpt: excerpt_boundary.next,
height,
starts_new_buffer: new_buffer_id.is_some(),
},
@@ -838,14 +861,31 @@ impl BlockMap {
placement_comparison.then_with(|| match (block_a, block_b) {
(
Block::ExcerptBoundary {
excerpt: excerpt_a, ..
next_excerpt: next_excerpt_a,
..
},
Block::ExcerptBoundary {
excerpt: excerpt_b, ..
next_excerpt: next_excerpt_b,
..
},
) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)),
(Block::ExcerptBoundary { .. }, Block::Custom(_)) => Ordering::Less,
(Block::Custom(_), Block::ExcerptBoundary { .. }) => Ordering::Greater,
) => next_excerpt_a
.as_ref()
.map(|excerpt| excerpt.id)
.cmp(&next_excerpt_b.as_ref().map(|excerpt| excerpt.id)),
(Block::ExcerptBoundary { next_excerpt, .. }, Block::Custom(_)) => {
if next_excerpt.is_some() {
Ordering::Less
} else {
Ordering::Greater
}
}
(Block::Custom(_), Block::ExcerptBoundary { next_excerpt, .. }) => {
if next_excerpt.is_some() {
Ordering::Greater
} else {
Ordering::Less
}
}
(Block::Custom(block_a), Block::Custom(block_b)) => block_a
.priority
.cmp(&block_b.priority)
@@ -1365,19 +1405,51 @@ impl BlockSnapshot {
pub fn sticky_header_excerpt(&self, position: f32) -> Option<StickyHeaderExcerpt<'_>> {
let top_row = position as u32;
let mut cursor = self.transforms.cursor::<BlockRow>(&());
cursor.seek(&BlockRow(top_row), Bias::Right, &());
cursor.seek(&BlockRow(top_row), Bias::Left, &());
while let Some(transform) = cursor.item() {
let start = cursor.start().0;
let end = cursor.end(&()).0;
match &transform.block {
Some(Block::ExcerptBoundary { excerpt, .. }) => {
return Some(StickyHeaderExcerpt { excerpt })
Some(Block::ExcerptBoundary {
prev_excerpt,
next_excerpt,
starts_new_buffer,
..
}) => {
let matches_start = (start as f32) < position;
if matches_start && top_row <= end {
return next_excerpt.as_ref().map(|excerpt| StickyHeaderExcerpt {
next_buffer_row: None,
excerpt,
});
}
let next_buffer_row = if *starts_new_buffer { Some(end) } else { None };
return prev_excerpt.as_ref().map(|excerpt| StickyHeaderExcerpt {
excerpt,
next_buffer_row,
});
}
Some(block) if block.is_buffer_header() => return None,
_ => {
cursor.prev(&());
continue;
Some(Block::FoldedBuffer {
prev_excerpt: Some(excerpt),
..
}) if top_row <= start => {
return Some(StickyHeaderExcerpt {
next_buffer_row: Some(end),
excerpt,
});
}
Some(Block::FoldedBuffer { .. }) | Some(Block::Custom(_)) | None => {}
}
// This is needed to iterate past None / FoldedBuffer / Custom blocks. For FoldedBuffer,
// if scrolled slightly past the header of a folded block, the next block is needed for
// the sticky header.
cursor.next(&());
}
None
@@ -1391,9 +1463,14 @@ impl BlockSnapshot {
return Some(Block::Custom(custom_block.clone()));
}
BlockId::ExcerptBoundary(next_excerpt_id) => {
let excerpt_range = buffer.range_for_excerpt(next_excerpt_id)?;
self.wrap_snapshot
.make_wrap_point(excerpt_range.start, Bias::Left)
if let Some(next_excerpt_id) = next_excerpt_id {
let excerpt_range = buffer.range_for_excerpt(next_excerpt_id)?;
self.wrap_snapshot
.make_wrap_point(excerpt_range.start, Bias::Left)
} else {
self.wrap_snapshot
.make_wrap_point(buffer.max_point(), Bias::Left)
}
}
BlockId::FoldedBuffer(excerpt_id) => self
.wrap_snapshot
@@ -1671,6 +1748,7 @@ impl BlockChunks<'_> {
pub struct StickyHeaderExcerpt<'a> {
pub excerpt: &'a ExcerptInfo,
pub next_buffer_row: Option<u32>,
}
impl<'a> Iterator for BlockChunks<'a> {
@@ -2176,9 +2254,9 @@ mod tests {
assert_eq!(
blocks,
vec![
(0..1, BlockId::ExcerptBoundary(excerpt_ids[0])), // path, header
(3..4, BlockId::ExcerptBoundary(excerpt_ids[1])), // path, header
(6..7, BlockId::ExcerptBoundary(excerpt_ids[2])), // path, header
(0..1, BlockId::ExcerptBoundary(Some(excerpt_ids[0]))), // path, header
(3..4, BlockId::ExcerptBoundary(Some(excerpt_ids[1]))), // path, header
(6..7, BlockId::ExcerptBoundary(Some(excerpt_ids[2]))), // path, header
]
);
}
@@ -2875,7 +2953,10 @@ mod tests {
.iter()
.filter(|(_, block)| {
match block {
Block::FoldedBuffer { .. } => true,
Block::FoldedBuffer { prev_excerpt, .. } => {
assert!(prev_excerpt.is_none());
true
}
_ => false,
}
})

View File

@@ -38,6 +38,7 @@ mod proposed_changes_editor;
mod rust_analyzer_ext;
pub mod scroll;
mod selections_collection;
mod smooth_cursor_manager;
pub mod tasks;
#[cfg(test)]
@@ -69,7 +70,7 @@ pub use element::{
CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition,
};
use futures::{
future::{self, join, Shared},
future::{self, Shared},
FutureExt,
};
use fuzzy::StringMatchCandidate;
@@ -82,10 +83,10 @@ use code_context_menus::{
use git::blame::GitBlame;
use gpui::{
div, impl_actions, point, prelude::*, pulsating_between, px, relative, size, Action, Animation,
AnimationExt, AnyElement, App, AppContext, AsyncWindowContext, AvailableSpace, Background,
Bounds, ClipboardEntry, ClipboardItem, Context, DispatchPhase, Edges, Entity,
EntityInputHandler, EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId, FontWeight,
Global, HighlightStyle, Hsla, KeyContext, Modifiers, MouseButton, MouseDownEvent, PaintQuad,
AnimationExt, AnyElement, App, AsyncWindowContext, AvailableSpace, Background, Bounds,
ClipboardEntry, ClipboardItem, Context, DispatchPhase, Edges, Entity, EntityInputHandler,
EventEmitter, FocusHandle, FocusOutEvent, Focusable, FontId, FontWeight, Global,
HighlightStyle, Hsla, KeyContext, Modifiers, MouseButton, MouseDownEvent, PaintQuad,
ParentElement, Pixels, Render, SharedString, Size, Stateful, Styled, StyledText, Subscription,
Task, TextStyle, TextStyleRefinement, UTF16Selection, UnderlineStyle, UniformListScrollHandle,
WeakEntity, WeakFocusHandle, Window,
@@ -152,6 +153,7 @@ use selections_collection::{
use serde::{Deserialize, Serialize};
use settings::{update_settings_file, Settings, SettingsLocation, SettingsStore};
use smallvec::SmallVec;
use smooth_cursor_manager::SmoothCursorManager;
use snippet::Snippet;
use std::{
any::TypeId,
@@ -760,6 +762,7 @@ pub struct Editor {
toggle_fold_multiple_buffers: Task<()>,
_scroll_cursor_center_top_bottom_task: Task<()>,
serialize_selections: Task<()>,
smooth_cursor_manager: SmoothCursorManager,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)]
@@ -1233,15 +1236,11 @@ impl Editor {
project_subscriptions.push(cx.subscribe_in(
project,
window,
|editor, _, event, window, cx| match event {
project::Event::RefreshCodeLens => {
// we always query lens with actions, without storing them, always refreshing them
}
project::Event::RefreshInlayHints => {
|editor, _, event, window, cx| {
if let project::Event::RefreshInlayHints = event {
editor
.refresh_inlay_hints(InlayHintRefreshReason::RefreshRequested, cx);
}
project::Event::SnippetEdit(id, snippet_edits) => {
} else if let project::Event::SnippetEdit(id, snippet_edits) = event {
if let Some(buffer) = editor.buffer.read(cx).buffer(*id) {
let focus_handle = editor.focus_handle(cx);
if focus_handle.is_focused(window) {
@@ -1261,7 +1260,6 @@ impl Editor {
}
}
}
_ => {}
},
));
if let Some(task_inventory) = project
@@ -1472,6 +1470,7 @@ impl Editor {
serialize_selections: Task::ready(()),
text_style_refinement: None,
load_diff_task: load_uncommitted_diff,
smooth_cursor_manager: SmoothCursorManager::Inactive,
};
this.tasks_update_task = Some(this.refresh_runnables(window, cx));
this._subscriptions.extend(project_subscriptions);
@@ -2035,6 +2034,7 @@ impl Editor {
local: bool,
old_cursor_position: &Anchor,
show_completions: bool,
pre_edit_pixel_points: HashMap<usize, Option<gpui::Point<Pixels>>>,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -2167,6 +2167,23 @@ impl Editor {
hide_hover(self, cx);
let mut post_edit_pixel_points = HashMap::default();
for selection in self.selections.disjoint_anchors().iter() {
let head_point =
self.to_pixel_point(selection.head(), &self.snapshot(window, cx), window);
post_edit_pixel_points.insert(selection.id, head_point);
}
if let Some(pending) = self.selections.pending_anchor() {
let head_point =
self.to_pixel_point(pending.head(), &self.snapshot(window, cx), window);
post_edit_pixel_points.insert(pending.id, head_point);
}
self.smooth_cursor_manager
.update(pre_edit_pixel_points, post_edit_pixel_points);
if old_cursor_position.to_display_point(&display_map).row()
!= new_cursor_position.to_display_point(&display_map).row()
{
@@ -2284,6 +2301,21 @@ impl Editor {
change: impl FnOnce(&mut MutableSelectionsCollection<'_>) -> R,
) -> R {
let old_cursor_position = self.selections.newest_anchor().head();
let mut pre_edit_pixel_points = HashMap::default();
for selection in self.selections.disjoint_anchors().iter() {
let head_point =
self.to_pixel_point(selection.head(), &self.snapshot(window, cx), window);
pre_edit_pixel_points.insert(selection.id, head_point);
}
if let Some(pending) = self.selections.pending_anchor() {
let head_point =
self.to_pixel_point(pending.head(), &self.snapshot(window, cx), window);
pre_edit_pixel_points.insert(pending.id, head_point);
}
self.push_to_selection_history();
let (changed, result) = self.selections.change_with(cx, change);
@@ -2292,7 +2324,14 @@ impl Editor {
if let Some(autoscroll) = autoscroll {
self.request_autoscroll(autoscroll, cx);
}
self.selections_did_change(true, &old_cursor_position, request_completions, window, cx);
self.selections_did_change(
true,
&old_cursor_position,
request_completions,
pre_edit_pixel_points,
window,
cx,
);
if self.should_open_signature_help_automatically(
&old_cursor_position,
@@ -3105,6 +3144,20 @@ impl Editor {
let initial_buffer_versions =
jsx_tag_auto_close::construct_initial_buffer_versions_map(this, &edits, cx);
let mut pre_edit_pixel_points = HashMap::default();
for selection in this.selections.disjoint_anchors().iter() {
let head_point =
this.to_pixel_point(selection.head(), &this.snapshot(window, cx), window);
pre_edit_pixel_points.insert(selection.id, head_point);
}
if let Some(pending) = this.selections.pending_anchor() {
let head_point =
this.to_pixel_point(pending.head(), &this.snapshot(window, cx), window);
pre_edit_pixel_points.insert(pending.id, head_point);
}
this.buffer.update(cx, |buffer, cx| {
buffer.edit(edits, this.autoindent_mode.clone(), cx);
});
@@ -3206,6 +3259,22 @@ impl Editor {
linked_editing_ranges::refresh_linked_ranges(this, window, cx);
this.refresh_inline_completion(true, false, window, cx);
jsx_tag_auto_close::handle_from(this, initial_buffer_versions, window, cx);
let mut post_edit_pixel_points = HashMap::default();
for selection in this.selections.disjoint_anchors().iter() {
let head_point =
this.to_pixel_point(selection.head(), &this.snapshot(window, cx), window);
post_edit_pixel_points.insert(selection.id, head_point);
}
if let Some(pending) = this.selections.pending_anchor() {
let head_point =
this.to_pixel_point(pending.head(), &this.snapshot(window, cx), window);
post_edit_pixel_points.insert(pending.id, head_point);
}
this.smooth_cursor_manager
.update(pre_edit_pixel_points, post_edit_pixel_points);
});
}
@@ -3258,6 +3327,20 @@ impl Editor {
pub fn newline(&mut self, _: &Newline, window: &mut Window, cx: &mut Context<Self>) {
self.transact(window, cx, |this, window, cx| {
let mut pre_edit_pixel_points = HashMap::default();
for selection in this.selections.disjoint_anchors().iter() {
let head_point =
this.to_pixel_point(selection.head(), &this.snapshot(window, cx), window);
pre_edit_pixel_points.insert(selection.id, head_point);
}
if let Some(pending) = this.selections.pending_anchor() {
let head_point =
this.to_pixel_point(pending.head(), &this.snapshot(window, cx), window);
pre_edit_pixel_points.insert(pending.id, head_point);
}
let (edits, selection_fixup_info): (Vec<_>, Vec<_>) = {
let selections = this.selections.all::<usize>(cx);
let multi_buffer = this.buffer.read(cx);
@@ -3368,6 +3451,23 @@ impl Editor {
s.select(new_selections)
});
this.refresh_inline_completion(true, false, window, cx);
let mut post_edit_pixel_points = HashMap::default();
for selection in this.selections.disjoint_anchors().iter() {
let head_point =
this.to_pixel_point(selection.head(), &this.snapshot(window, cx), window);
post_edit_pixel_points.insert(selection.id, head_point);
}
if let Some(pending) = this.selections.pending_anchor() {
let head_point =
this.to_pixel_point(pending.head(), &this.snapshot(window, cx), window);
post_edit_pixel_points.insert(pending.id, head_point);
}
this.smooth_cursor_manager
.update(pre_edit_pixel_points, post_edit_pixel_points);
});
}
@@ -12448,22 +12548,14 @@ impl Editor {
if split {
workspace.split_item(SplitDirection::Right, item.clone(), window, cx);
} else {
if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation {
let (preview_item_id, preview_item_idx) =
workspace.active_pane().update(cx, |pane, _| {
(pane.preview_item_id(), pane.preview_item_idx())
});
workspace.add_item_to_active_pane(item.clone(), preview_item_idx, true, window, cx);
if let Some(preview_item_id) = preview_item_id {
workspace.active_pane().update(cx, |pane, cx| {
pane.remove_item(preview_item_id, false, false, window, cx);
});
let destination_index = workspace.active_pane().update(cx, |pane, cx| {
if PreviewTabsSettings::get_global(cx).enable_preview_from_code_navigation {
pane.close_current_preview_item(window, cx)
} else {
None
}
} else {
workspace.add_item_to_active_pane(item.clone(), None, true, window, cx);
}
});
workspace.add_item_to_active_pane(item.clone(), destination_index, true, window, cx);
}
workspace.active_pane().update(cx, |pane, cx| {
pane.set_preview_item_id(Some(item_id), cx);
@@ -13198,7 +13290,14 @@ impl Editor {
s.clear_pending();
}
});
self.selections_did_change(false, &old_cursor_position, true, window, cx);
self.selections_did_change(
false,
&old_cursor_position,
true,
HashMap::default(),
window,
cx,
);
}
fn push_to_selection_history(&mut self) {
@@ -14028,6 +14127,8 @@ impl Editor {
self.change_selections(Some(autoscroll), window, cx, |s| {
s.select_ranges([destination..destination]);
});
} else if all_diff_hunks_expanded {
window.dispatch_action(::git::ExpandCommitEditor.boxed_clone(), cx);
}
}
@@ -17038,16 +17139,7 @@ impl CodeActionProvider for Entity<Project> {
cx: &mut App,
) -> Task<Result<Vec<CodeAction>>> {
self.update(cx, |project, cx| {
let code_lens = project.code_lens(buffer, range.clone(), cx);
let code_actions = project.code_actions(buffer, range, None, cx);
cx.background_spawn(async move {
let (code_lens, code_actions) = join(code_lens, code_actions).await;
Ok(code_lens
.context("code lens fetch")?
.into_iter()
.chain(code_actions.context("code action fetch")?)
.collect())
})
project.code_actions(buffer, range, None, cx)
})
}

View File

@@ -17233,187 +17233,6 @@ async fn test_tree_sitter_brackets_newline_insertion(cx: &mut TestAppContext) {
"});
}
#[gpui::test(iterations = 10)]
async fn test_apply_code_lens_actions_with_commands(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/dir"),
json!({
"a.ts": "a",
}),
)
.await;
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(Arc::new(Language::new(
LanguageConfig {
name: "TypeScript".into(),
matcher: LanguageMatcher {
path_suffixes: vec!["ts".to_string()],
..Default::default()
},
..Default::default()
},
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
)));
let mut fake_language_servers = language_registry.register_fake_lsp(
"TypeScript",
FakeLspAdapter {
capabilities: lsp::ServerCapabilities {
code_lens_provider: Some(lsp::CodeLensOptions {
resolve_provider: Some(true),
}),
execute_command_provider: Some(lsp::ExecuteCommandOptions {
commands: vec!["_the/command".to_string()],
..lsp::ExecuteCommandOptions::default()
}),
..lsp::ServerCapabilities::default()
},
..FakeLspAdapter::default()
},
);
let (buffer, _handle) = project
.update(cx, |p, cx| {
p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
})
.await
.unwrap();
cx.executor().run_until_parked();
let fake_server = fake_language_servers.next().await.unwrap();
let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
let anchor = buffer_snapshot.anchor_at(0, text::Bias::Left);
drop(buffer_snapshot);
let actions = cx
.update_window(*workspace, |_, window, cx| {
project.code_actions(&buffer, anchor..anchor, window, cx)
})
.unwrap();
fake_server
.handle_request::<lsp::request::CodeLensRequest, _, _>(|_, _| async move {
Ok(Some(vec![
lsp::CodeLens {
range: lsp::Range::default(),
command: Some(lsp::Command {
title: "Code lens command".to_owned(),
command: "_the/command".to_owned(),
arguments: None,
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range::default(),
command: Some(lsp::Command {
title: "Command not in capabilities".to_owned(),
command: "not in capabilities".to_owned(),
arguments: None,
}),
data: None,
},
lsp::CodeLens {
range: lsp::Range {
start: lsp::Position {
line: 1,
character: 1,
},
end: lsp::Position {
line: 1,
character: 1,
},
},
command: Some(lsp::Command {
title: "Command not in range".to_owned(),
command: "_the/command".to_owned(),
arguments: None,
}),
data: None,
},
]))
})
.next()
.await;
let actions = actions.await.unwrap();
assert_eq!(
actions.len(),
1,
"Should have only one valid action for the 0..0 range"
);
let action = actions[0].clone();
let apply = project.update(cx, |project, cx| {
project.apply_code_action(buffer.clone(), action, true, cx)
});
// Resolving the code action does not populate its edits. In absence of
// edits, we must execute the given command.
fake_server.handle_request::<lsp::request::CodeLensResolve, _, _>(|mut lens, _| async move {
let lens_command = lens.command.as_mut().expect("should have a command");
assert_eq!(lens_command.title, "Code lens command");
lens_command.arguments = Some(vec![json!("the-argument")]);
Ok(lens)
});
// While executing the command, the language server sends the editor
// a `workspaceEdit` request.
fake_server
.handle_request::<lsp::request::ExecuteCommand, _, _>({
let fake = fake_server.clone();
move |params, _| {
assert_eq!(params.command, "_the/command");
let fake = fake.clone();
async move {
fake.server
.request::<lsp::request::ApplyWorkspaceEdit>(
lsp::ApplyWorkspaceEditParams {
label: None,
edit: lsp::WorkspaceEdit {
changes: Some(
[(
lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
vec![lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(0, 0),
lsp::Position::new(0, 0),
),
new_text: "X".into(),
}],
)]
.into_iter()
.collect(),
),
..Default::default()
},
},
)
.await
.unwrap();
Ok(Some(json!(null)))
}
}
})
.next()
.await;
// Applying the code lens command returns a project transaction containing the edits
// sent by the language server in its `workspaceEdit` request.
let transaction = apply.await.unwrap();
assert!(transaction.0.contains_key(&buffer));
buffer.update(cx, |buffer, cx| {
assert_eq!(buffer.text(), "Xa");
buffer.undo(cx);
assert_eq!(buffer.text(), "a");
});
}
mod autoclose_tags {
use super::*;
use language::language_settings::JsxTagAutoCloseSettings;

View File

@@ -21,9 +21,9 @@ use crate::{
EditorSettings, EditorSnapshot, EditorStyle, FocusedBlock, GoToHunk, GoToPreviousHunk,
GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, InlayHintRefreshReason,
InlineCompletion, JumpData, LineDown, LineHighlight, LineUp, OpenExcerpts, PageDown, PageUp,
Point, RowExt, RowRangeExt, SelectPhase, SelectedTextHighlight, Selection, SoftWrap,
StickyHeaderExcerpt, ToPoint, ToggleFold, COLUMNAR_SELECTION_MODIFIERS, CURSORS_VISIBLE_FOR,
FILE_HEADER_HEIGHT, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN,
Point, RowExt, RowRangeExt, SelectPhase, SelectedTextHighlight, Selection, SmoothCursorManager,
SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, COLUMNAR_SELECTION_MODIFIERS,
CURSORS_VISIBLE_FOR, FILE_HEADER_HEIGHT, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN,
MULTI_BUFFER_EXCERPT_HEADER_HEIGHT,
};
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
@@ -55,7 +55,7 @@ use multi_buffer::{
Anchor, ExcerptId, ExcerptInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint,
MultiBufferRow, RowInfo,
};
use project::project_settings::{self, GitGutterSetting, GitHunkStyleSetting, ProjectSettings};
use project::project_settings::{self, GitGutterSetting, ProjectSettings};
use settings::Settings;
use smallvec::{smallvec, SmallVec};
use std::{
@@ -83,6 +83,7 @@ const INLINE_BLAME_PADDING_EM_WIDTHS: f32 = 7.;
const MIN_SCROLL_THUMB_SIZE: f32 = 25.;
struct SelectionLayout {
id: usize,
head: DisplayPoint,
cursor_shape: CursorShape,
is_newest: bool,
@@ -140,6 +141,7 @@ impl SelectionLayout {
}
Self {
id: selection.id,
head,
cursor_shape,
is_newest,
@@ -1151,12 +1153,29 @@ impl EditorElement {
let cursor_layouts = self.editor.update(cx, |editor, cx| {
let mut cursors = Vec::new();
let is_animating =
!matches!(editor.smooth_cursor_manager, SmoothCursorManager::Inactive);
let animated_selection_ids = if is_animating {
match &editor.smooth_cursor_manager {
SmoothCursorManager::Active { cursors } => {
cursors.keys().copied().collect::<HashSet<_>>()
}
_ => HashSet::default(),
}
} else {
HashSet::default()
};
let show_local_cursors = editor.show_local_cursors(window, cx);
for (player_color, selections) in selections {
for selection in selections {
let cursor_position = selection.head;
if animated_selection_ids.contains(&selection.id) {
continue;
}
let in_range = visible_display_row_range.contains(&cursor_position.row());
if (selection.is_local && !show_local_cursors)
|| !in_range
@@ -1283,6 +1302,19 @@ impl EditorElement {
}
}
if is_animating {
let animated_cursors = self.layout_animated_cursors(
editor,
content_origin,
line_height,
em_advance,
selections,
window,
cx,
);
cursors.extend(animated_cursors);
}
cursors
});
@@ -1293,6 +1325,47 @@ impl EditorElement {
cursor_layouts
}
fn layout_animated_cursors(
&self,
editor: &mut Editor,
content_origin: gpui::Point<Pixels>,
line_height: Pixels,
em_advance: Pixels,
selections: &[(PlayerColor, Vec<SelectionLayout>)],
window: &mut Window,
cx: &mut App,
) -> Vec<CursorLayout> {
let new_positions = editor.smooth_cursor_manager.animate();
if !new_positions.is_empty() {
window.request_animation_frame();
}
new_positions
.into_iter()
.map(|(id, position)| {
// todo smit: worst way to get cursor shape and player color
let (cursor_shape, player_color) = selections
.iter()
.find_map(|(player_color, sels)| {
sels.iter()
.find(|sel| sel.id == id)
.map(|sel| (sel.cursor_shape, *player_color))
})
.unwrap_or((CursorShape::Bar, editor.current_user_player_color(cx)));
let mut cursor = CursorLayout {
color: player_color.cursor,
block_width: em_advance,
origin: position,
line_height,
shape: cursor_shape,
block_text: None,
cursor_name: None,
};
cursor.layout(content_origin, None, window, cx);
cursor
})
.collect()
}
fn layout_scrollbars(
&self,
snapshot: &EditorSnapshot,
@@ -2135,15 +2208,6 @@ impl EditorElement {
let scroll_top = scroll_position.y * line_height;
let max_line_number_length = 1 + self
.editor
.read(cx)
.buffer()
.read(cx)
.snapshot(cx)
.widest_line_number()
.ilog10();
let elements = buffer_rows
.into_iter()
.enumerate()
@@ -2160,10 +2224,17 @@ impl EditorElement {
};
let editor = self.editor.clone();
let is_wide = max_line_number_length > 3
let max_row = self
.editor
.read(cx)
.buffer()
.read(cx)
.snapshot(cx)
.widest_line_number();
let is_wide = max_row > 999
&& row_info
.buffer_row
.is_some_and(|row| (row + 1).ilog10() + 1 == max_line_number_length);
.is_some_and(|row| row.ilog10() == max_row.ilog10());
let toggle = IconButton::new(("expand", ix), icon_name)
.icon_color(Color::Custom(cx.theme().colors().editor_line_number))
@@ -2640,7 +2711,7 @@ impl EditorElement {
}
Block::ExcerptBoundary {
excerpt,
next_excerpt,
height,
starts_new_buffer,
..
@@ -2648,31 +2719,40 @@ impl EditorElement {
let color = cx.theme().colors().clone();
let mut result = v_flex().id(block_id).w_full();
let jump_data = header_jump_data(snapshot, block_row_start, *height, excerpt);
if let Some(next_excerpt) = next_excerpt {
let jump_data =
header_jump_data(snapshot, block_row_start, *height, next_excerpt);
if *starts_new_buffer {
if sticky_header_excerpt_id != Some(excerpt.id) {
let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
if *starts_new_buffer {
if sticky_header_excerpt_id != Some(next_excerpt.id) {
let selected = selected_buffer_ids.contains(&next_excerpt.buffer_id);
result = result.child(self.render_buffer_header(
excerpt, false, selected, false, jump_data, window, cx,
));
result = result.child(self.render_buffer_header(
next_excerpt,
false,
selected,
false,
jump_data,
window,
cx,
));
} else {
result = result
.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height()));
}
} else {
result =
result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height()));
}
} else {
result = result.child(
h_flex().relative().child(
div()
.top(line_height / 2.)
.absolute()
.w_full()
.h_px()
.bg(color.border_variant),
),
);
};
result = result.child(
h_flex().relative().child(
div()
.top(line_height / 2.)
.absolute()
.w_full()
.h_px()
.bg(color.border_variant),
),
);
};
}
result.into_any()
}
@@ -2963,7 +3043,6 @@ impl EditorElement {
element,
available_space: size(AvailableSpace::MinContent, element_size.height.into()),
style: BlockStyle::Fixed,
is_buffer_header: block.is_buffer_header(),
});
}
@@ -3014,7 +3093,6 @@ impl EditorElement {
element,
available_space: size(width.into(), element_size.height.into()),
style,
is_buffer_header: block.is_buffer_header(),
});
}
@@ -3065,7 +3143,6 @@ impl EditorElement {
element,
available_space: size(width, element_size.height.into()),
style,
is_buffer_header: block.is_buffer_header(),
});
}
}
@@ -3127,13 +3204,15 @@ impl EditorElement {
fn layout_sticky_buffer_header(
&self,
StickyHeaderExcerpt { excerpt }: StickyHeaderExcerpt<'_>,
StickyHeaderExcerpt {
excerpt,
next_buffer_row,
}: StickyHeaderExcerpt<'_>,
scroll_position: f32,
line_height: Pixels,
snapshot: &EditorSnapshot,
hitbox: &Hitbox,
selected_buffer_ids: &Vec<BufferId>,
blocks: &[BlockLayout],
window: &mut Window,
cx: &mut App,
) -> AnyElement {
@@ -3169,23 +3248,17 @@ impl EditorElement {
.into_any_element();
let mut origin = hitbox.origin;
// Move floating header up to avoid colliding with the next buffer header.
for block in blocks.iter() {
if !block.is_buffer_header {
continue;
}
let Some(display_row) = block.row.filter(|row| row.0 > scroll_position as u32) else {
continue;
};
if let Some(next_buffer_row) = next_buffer_row {
// Push up the sticky header when the excerpt is getting close to the top of the viewport
let max_row = next_buffer_row - FILE_HEADER_HEIGHT * 2;
let max_row = display_row.0.saturating_sub(FILE_HEADER_HEIGHT);
let offset = scroll_position - max_row as f32;
if offset > 0.0 {
origin.y -= Pixels(offset) * line_height;
}
break;
}
let size = size(
@@ -4377,6 +4450,8 @@ impl EditorElement {
};
if let Some((hunk_bounds, background_color, corner_radii, status)) = hunk_to_paint {
let unstaged = status.has_secondary_hunk();
// Flatten the background color with the editor color to prevent
// elements below transparent hunks from showing through
let flattened_background_color = cx
@@ -4385,7 +4460,7 @@ impl EditorElement {
.editor_background
.blend(background_color);
if !Self::diff_hunk_hollow(status, cx) {
if unstaged {
window.paint_quad(quad(
hunk_bounds,
corner_radii,
@@ -5594,8 +5669,8 @@ impl EditorElement {
window: &mut Window,
cx: &mut App,
) -> Pixels {
let digit_count = snapshot.widest_line_number().ilog10() + 1;
self.column_pixels(digit_count as usize, window, cx)
let digit_count = (snapshot.widest_line_number() as f32).log10().floor() as usize + 1;
self.column_pixels(digit_count, window, cx)
}
fn shape_line_number(
@@ -5618,18 +5693,6 @@ impl EditorElement {
&[run],
)
}
fn diff_hunk_hollow(status: DiffHunkStatus, cx: &mut App) -> bool {
let unstaged = status.has_secondary_hunk();
let unstaged_hollow = ProjectSettings::get_global(cx)
.git
.hunk_style
.map_or(false, |style| {
matches!(style, GitHunkStyleSetting::UnstagedHollow)
});
unstaged == unstaged_hollow
}
}
fn header_jump_data(
@@ -6781,9 +6844,10 @@ impl Element for EditorElement {
}
};
let unstaged = diff_status.has_secondary_hunk();
let hunk_opacity = if is_light { 0.16 } else { 0.12 };
let hollow_highlight = LineHighlight {
let staged_highlight = LineHighlight {
background: (background_color.opacity(if is_light {
0.08
} else {
@@ -6797,13 +6861,13 @@ impl Element for EditorElement {
}),
};
let filled_highlight =
let unstaged_highlight =
solid_background(background_color.opacity(hunk_opacity)).into();
let background = if Self::diff_hunk_hollow(diff_status, cx) {
hollow_highlight
let background = if unstaged {
unstaged_highlight
} else {
filled_highlight
staged_highlight
};
highlighted_rows
@@ -7041,7 +7105,6 @@ impl Element for EditorElement {
&snapshot,
&hitbox,
&selected_buffer_ids,
&blocks,
window,
cx,
)
@@ -7925,7 +7988,6 @@ struct BlockLayout {
element: AnyElement,
available_space: Size<AvailableSpace>,
style: BlockStyle,
is_buffer_header: bool,
}
pub fn layout_line(

View File

@@ -1109,14 +1109,14 @@ mod tests {
px(14.0),
None,
0,
1,
2,
FoldPlaceholder::test(),
cx,
)
});
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
assert_eq!(snapshot.text(), "abc\ndefg\n\nhijkl\nmn");
assert_eq!(snapshot.text(), "abc\ndefg\nhijkl\nmn");
let col_2_x = snapshot
.x_for_display_point(DisplayPoint::new(DisplayRow(0), 2), &text_layout_details);
@@ -1181,13 +1181,13 @@ mod tests {
);
let col_5_x = snapshot
.x_for_display_point(DisplayPoint::new(DisplayRow(3), 5), &text_layout_details);
.x_for_display_point(DisplayPoint::new(DisplayRow(2), 5), &text_layout_details);
// Move up and down across second excerpt's header
assert_eq!(
up(
&snapshot,
DisplayPoint::new(DisplayRow(3), 5),
DisplayPoint::new(DisplayRow(2), 5),
SelectionGoal::HorizontalPosition(col_5_x.0),
false,
&text_layout_details
@@ -1206,38 +1206,38 @@ mod tests {
&text_layout_details
),
(
DisplayPoint::new(DisplayRow(3), 5),
DisplayPoint::new(DisplayRow(2), 5),
SelectionGoal::HorizontalPosition(col_5_x.0)
),
);
let max_point_x = snapshot
.x_for_display_point(DisplayPoint::new(DisplayRow(4), 2), &text_layout_details);
.x_for_display_point(DisplayPoint::new(DisplayRow(3), 2), &text_layout_details);
// Can't move down off the end, and attempting to do so leaves the selection goal unchanged
assert_eq!(
down(
&snapshot,
DisplayPoint::new(DisplayRow(4), 0),
DisplayPoint::new(DisplayRow(3), 0),
SelectionGoal::HorizontalPosition(0.0),
false,
&text_layout_details
),
(
DisplayPoint::new(DisplayRow(4), 2),
DisplayPoint::new(DisplayRow(3), 2),
SelectionGoal::HorizontalPosition(0.0)
),
);
assert_eq!(
down(
&snapshot,
DisplayPoint::new(DisplayRow(4), 2),
DisplayPoint::new(DisplayRow(3), 2),
SelectionGoal::HorizontalPosition(max_point_x.0),
false,
&text_layout_details
),
(
DisplayPoint::new(DisplayRow(4), 2),
DisplayPoint::new(DisplayRow(3), 2),
SelectionGoal::HorizontalPosition(max_point_x.0)
),
);

View File

@@ -0,0 +1,117 @@
use collections::HashMap;
use gpui::Pixels;
const DELTA_PERCENT_PER_FRAME: f32 = 0.01;
pub struct Cursor {
current_position: gpui::Point<Pixels>,
target_position: gpui::Point<Pixels>,
}
pub enum SmoothCursorManager {
Inactive,
Active { cursors: HashMap<usize, Cursor> },
}
impl SmoothCursorManager {
pub fn update(
&mut self,
source_positions: HashMap<usize, Option<gpui::Point<Pixels>>>,
target_positions: HashMap<usize, Option<gpui::Point<Pixels>>>,
) {
if source_positions.len() == 1 && target_positions.len() == 1 {
let old_id = source_positions.keys().next().unwrap();
let new_id = target_positions.keys().next().unwrap();
if old_id != new_id {
if let (Some(Some(old_pos)), Some(Some(new_pos))) = (
source_positions.values().next(),
target_positions.values().next(),
) {
*self = Self::Active {
cursors: HashMap::from_iter([(
*new_id,
Cursor {
current_position: *old_pos,
target_position: *new_pos,
},
)]),
};
return;
}
}
}
match self {
Self::Inactive => {
let mut cursors = HashMap::default();
for (id, target_position) in target_positions.iter() {
let Some(target_position) = target_position else {
continue;
};
let Some(Some(source_position)) = source_positions.get(id) else {
continue;
};
if source_position == target_position {
continue;
}
cursors.insert(
*id,
Cursor {
current_position: *source_position,
target_position: *target_position,
},
);
}
if !cursors.is_empty() {
*self = Self::Active { cursors };
}
}
Self::Active { cursors } => {
for (id, target_position) in target_positions.iter() {
let Some(target_position) = target_position else {
continue;
};
if let Some(cursor) = cursors.get_mut(id) {
cursor.target_position = *target_position;
}
}
}
}
}
pub fn animate(&mut self) -> HashMap<usize, gpui::Point<Pixels>> {
match self {
Self::Inactive => HashMap::default(),
Self::Active { cursors } => {
let mut new_positions = HashMap::default();
let mut completed = Vec::new();
for (id, cursor) in cursors.iter_mut() {
let dx = cursor.target_position.x - cursor.current_position.x;
let dy = cursor.target_position.y - cursor.current_position.y;
let distance = (dx.0.powi(2) + dy.0.powi(2)).sqrt();
if distance < 0.2 {
new_positions.insert(*id, cursor.target_position);
completed.push(*id);
} else {
cursor.current_position.x =
Pixels(cursor.current_position.x.0 + dx.0 * DELTA_PERCENT_PER_FRAME);
cursor.current_position.y =
Pixels(cursor.current_position.y.0 + dy.0 * DELTA_PERCENT_PER_FRAME);
new_positions.insert(*id, cursor.current_position);
}
}
for id in completed {
cursors.remove(&id);
}
if cursors.is_empty() {
*self = Self::Inactive;
}
new_positions
}
}
}
}

View File

@@ -36,7 +36,7 @@ serde_json.workspace = true
smol.workspace = true
sysinfo.workspace = true
ui.workspace = true
urlencoding.workspace = true
urlencoding = "2.1.2"
util.workspace = true
workspace.workspace = true
zed_actions.workspace = true

View File

@@ -52,7 +52,7 @@ use util::ResultExt;
#[cfg(any(test, feature = "test-support"))]
use collections::{btree_map, BTreeMap};
#[cfg(any(test, feature = "test-support"))]
use git::FakeGitRepositoryState;
use git::repository::FakeGitRepositoryState;
#[cfg(any(test, feature = "test-support"))]
use parking_lot::Mutex;
#[cfg(any(test, feature = "test-support"))]
@@ -885,7 +885,7 @@ enum FakeFsEntry {
mtime: MTime,
len: u64,
entries: BTreeMap<String, Arc<Mutex<FakeFsEntry>>>,
git_repo_state: Option<Arc<Mutex<git::FakeGitRepositoryState>>>,
git_repo_state: Option<Arc<Mutex<git::repository::FakeGitRepositoryState>>>,
},
Symlink {
target: PathBuf,
@@ -2095,7 +2095,7 @@ impl Fs for FakeFs {
)))
})
.clone();
Some(git::FakeGitRepository::open(state))
Some(git::repository::FakeGitRepository::open(state))
} else {
None
}

View File

@@ -42,4 +42,3 @@ pretty_assertions.workspace = true
serde_json.workspace = true
text = { workspace = true, features = ["test-support"] }
unindent.workspace = true
gpui = { workspace = true, features = ["test-support"] }

View File

@@ -1,294 +0,0 @@
use crate::{
blame::Blame,
repository::{
Branch, CommitDetails, DiffType, GitRepository, PushOptions, Remote, RemoteCommandOutput,
RepoPath, ResetMode,
},
status::{FileStatus, GitStatus},
};
use anyhow::{Context, Result};
use askpass::AskPassSession;
use collections::{HashMap, HashSet};
use futures::{future::BoxFuture, FutureExt as _};
use gpui::{AsyncApp, SharedString};
use parking_lot::Mutex;
use rope::Rope;
use std::{path::PathBuf, sync::Arc};
#[derive(Debug, Clone)]
pub struct FakeGitRepository {
state: Arc<Mutex<FakeGitRepositoryState>>,
}
#[derive(Debug, Clone)]
pub struct FakeGitRepositoryState {
pub path: PathBuf,
pub event_emitter: smol::channel::Sender<PathBuf>,
pub head_contents: HashMap<RepoPath, String>,
pub index_contents: HashMap<RepoPath, String>,
pub blames: HashMap<RepoPath, Blame>,
pub statuses: HashMap<RepoPath, FileStatus>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
pub simulated_index_write_error_message: Option<String>,
}
impl FakeGitRepository {
pub fn open(state: Arc<Mutex<FakeGitRepositoryState>>) -> Arc<dyn GitRepository> {
Arc::new(FakeGitRepository { state })
}
}
impl FakeGitRepositoryState {
pub fn new(path: PathBuf, event_emitter: smol::channel::Sender<PathBuf>) -> Self {
FakeGitRepositoryState {
path,
event_emitter,
head_contents: Default::default(),
index_contents: Default::default(),
blames: Default::default(),
statuses: Default::default(),
current_branch_name: Default::default(),
branches: Default::default(),
simulated_index_write_error_message: None,
}
}
}
impl GitRepository for FakeGitRepository {
fn reload_index(&self) {}
fn load_index_text(&self, path: RepoPath, _: AsyncApp) -> BoxFuture<Option<String>> {
let state = self.state.lock();
let content = state.index_contents.get(path.as_ref()).cloned();
async { content }.boxed()
}
fn load_committed_text(&self, path: RepoPath, _: AsyncApp) -> BoxFuture<Option<String>> {
let state = self.state.lock();
let content = state.head_contents.get(path.as_ref()).cloned();
async { content }.boxed()
}
fn set_index_text(
&self,
path: RepoPath,
content: Option<String>,
_env: HashMap<String, String>,
cx: AsyncApp,
) -> BoxFuture<anyhow::Result<()>> {
let state = self.state.clone();
let executor = cx.background_executor().clone();
async move {
executor.simulate_random_delay().await;
let mut state = state.lock();
if let Some(message) = state.simulated_index_write_error_message.clone() {
return Err(anyhow::anyhow!(message));
}
if let Some(content) = content {
state.index_contents.insert(path.clone(), content);
} else {
state.index_contents.remove(&path);
}
state
.event_emitter
.try_send(state.path.clone())
.expect("Dropped repo change event");
Ok(())
}
.boxed()
}
fn remote_url(&self, _name: &str) -> Option<String> {
None
}
fn head_sha(&self) -> Option<String> {
None
}
fn merge_head_shas(&self) -> Vec<String> {
vec![]
}
fn show(&self, _: String, _: AsyncApp) -> BoxFuture<Result<CommitDetails>> {
unimplemented!()
}
fn reset(&self, _: String, _: ResetMode, _: HashMap<String, String>) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn checkout_files(
&self,
_: String,
_: Vec<RepoPath>,
_: HashMap<String, String>,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn path(&self) -> PathBuf {
let state = self.state.lock();
state.path.clone()
}
fn main_repository_path(&self) -> PathBuf {
self.path()
}
fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus> {
let state = self.state.lock();
let mut entries = state
.statuses
.iter()
.filter_map(|(repo_path, status)| {
if path_prefixes
.iter()
.any(|path_prefix| repo_path.0.starts_with(path_prefix))
{
Some((repo_path.to_owned(), *status))
} else {
None
}
})
.collect::<Vec<_>>();
entries.sort_unstable_by(|(a, _), (b, _)| a.cmp(&b));
Ok(GitStatus {
entries: entries.into(),
})
}
fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> {
let state = self.state.lock();
let current_branch = &state.current_branch_name;
let result = Ok(state
.branches
.iter()
.map(|branch_name| Branch {
is_head: Some(branch_name) == current_branch.as_ref(),
name: branch_name.into(),
most_recent_commit: None,
upstream: None,
})
.collect());
async { result }.boxed()
}
fn change_branch(&self, name: String, _: AsyncApp) -> BoxFuture<Result<()>> {
let mut state = self.state.lock();
state.current_branch_name = Some(name.to_owned());
state
.event_emitter
.try_send(state.path.clone())
.expect("Dropped repo change event");
async { Ok(()) }.boxed()
}
fn create_branch(&self, name: String, _: AsyncApp) -> BoxFuture<Result<()>> {
let mut state = self.state.lock();
state.branches.insert(name.to_owned());
state
.event_emitter
.try_send(state.path.clone())
.expect("Dropped repo change event");
async { Ok(()) }.boxed()
}
fn blame(
&self,
path: RepoPath,
_content: Rope,
_cx: AsyncApp,
) -> BoxFuture<Result<crate::blame::Blame>> {
let state = self.state.lock();
let result = state
.blames
.get(&path)
.with_context(|| format!("failed to get blame for {:?}", path.0))
.cloned();
async { result }.boxed()
}
fn stage_paths(
&self,
_paths: Vec<RepoPath>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn unstage_paths(
&self,
_paths: Vec<RepoPath>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn commit(
&self,
_message: SharedString,
_name_and_email: Option<(SharedString, SharedString)>,
_env: HashMap<String, String>,
_: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn push(
&self,
_branch: String,
_remote: String,
_options: Option<PushOptions>,
_ask_pass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> {
unimplemented!()
}
fn pull(
&self,
_branch: String,
_remote: String,
_ask_pass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> {
unimplemented!()
}
fn fetch(
&self,
_ask_pass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> {
unimplemented!()
}
fn get_remotes(
&self,
_branch: Option<String>,
_cx: AsyncApp,
) -> BoxFuture<Result<Vec<Remote>>> {
unimplemented!()
}
fn check_for_pushed_commit(&self, _cx: AsyncApp) -> BoxFuture<Result<Vec<SharedString>>> {
unimplemented!()
}
fn diff(&self, _diff: DiffType, _cx: AsyncApp) -> BoxFuture<Result<String>> {
unimplemented!()
}
}

View File

@@ -5,25 +5,20 @@ mod remote;
pub mod repository;
pub mod status;
#[cfg(any(test, feature = "test-support"))]
mod fake_repository;
#[cfg(any(test, feature = "test-support"))]
pub use fake_repository::*;
pub use crate::hosting_provider::*;
pub use crate::remote::*;
use anyhow::{anyhow, Context as _, Result};
pub use git2 as libgit;
use gpui::action_with_deprecated_aliases;
use gpui::actions;
pub use repository::WORK_DIRECTORY_REPO_PATH;
use serde::{Deserialize, Serialize};
use std::ffi::OsStr;
use std::fmt;
use std::str::FromStr;
use std::sync::LazyLock;
pub use crate::hosting_provider::*;
pub use crate::remote::*;
pub use git2 as libgit;
pub use repository::WORK_DIRECTORY_REPO_PATH;
pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git"));
pub static GITIGNORE: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".gitignore"));
pub static FSMONITOR_DAEMON: LazyLock<&'static OsStr> =

View File

@@ -1,8 +1,9 @@
use crate::status::GitStatus;
use crate::status::FileStatus;
use crate::SHORT_SHA_LENGTH;
use anyhow::{anyhow, Context as _, Result};
use crate::{blame::Blame, status::GitStatus};
use anyhow::{anyhow, Context, Result};
use askpass::{AskPassResult, AskPassSession};
use collections::HashMap;
use collections::{HashMap, HashSet};
use futures::future::BoxFuture;
use futures::{select_biased, AsyncWriteExt, FutureExt as _};
use git2::BranchType;
@@ -12,12 +13,11 @@ use rope::Rope;
use schemars::JsonSchema;
use serde::Deserialize;
use std::borrow::Borrow;
use std::path::Component;
use std::process::Stdio;
use std::sync::LazyLock;
use std::{
cmp::Ordering,
path::{Path, PathBuf},
path::{Component, Path, PathBuf},
sync::Arc,
};
use sum_tree::MapSeekTarget;
@@ -1056,6 +1056,304 @@ async fn run_remote_command(
}
}
#[derive(Debug, Clone)]
pub struct FakeGitRepository {
state: Arc<Mutex<FakeGitRepositoryState>>,
}
#[derive(Debug, Clone)]
pub struct FakeGitRepositoryState {
pub path: PathBuf,
pub event_emitter: smol::channel::Sender<PathBuf>,
pub head_contents: HashMap<RepoPath, String>,
pub index_contents: HashMap<RepoPath, String>,
pub blames: HashMap<RepoPath, Blame>,
pub statuses: HashMap<RepoPath, FileStatus>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
pub simulated_index_write_error_message: Option<String>,
}
impl FakeGitRepository {
pub fn open(state: Arc<Mutex<FakeGitRepositoryState>>) -> Arc<dyn GitRepository> {
Arc::new(FakeGitRepository { state })
}
}
impl FakeGitRepositoryState {
pub fn new(path: PathBuf, event_emitter: smol::channel::Sender<PathBuf>) -> Self {
FakeGitRepositoryState {
path,
event_emitter,
head_contents: Default::default(),
index_contents: Default::default(),
blames: Default::default(),
statuses: Default::default(),
current_branch_name: Default::default(),
branches: Default::default(),
simulated_index_write_error_message: None,
}
}
}
impl GitRepository for FakeGitRepository {
fn reload_index(&self) {}
fn load_index_text(&self, path: RepoPath, _: AsyncApp) -> BoxFuture<Option<String>> {
let state = self.state.lock();
let content = state.index_contents.get(path.as_ref()).cloned();
async { content }.boxed()
}
fn load_committed_text(&self, path: RepoPath, _: AsyncApp) -> BoxFuture<Option<String>> {
let state = self.state.lock();
let content = state.head_contents.get(path.as_ref()).cloned();
async { content }.boxed()
}
fn set_index_text(
&self,
path: RepoPath,
content: Option<String>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<anyhow::Result<()>> {
let mut state = self.state.lock();
if let Some(message) = state.simulated_index_write_error_message.clone() {
return async { Err(anyhow::anyhow!(message)) }.boxed();
}
if let Some(content) = content {
state.index_contents.insert(path.clone(), content);
} else {
state.index_contents.remove(&path);
}
state
.event_emitter
.try_send(state.path.clone())
.expect("Dropped repo change event");
async { Ok(()) }.boxed()
}
fn remote_url(&self, _name: &str) -> Option<String> {
None
}
fn head_sha(&self) -> Option<String> {
None
}
fn merge_head_shas(&self) -> Vec<String> {
vec![]
}
fn show(&self, _: String, _: AsyncApp) -> BoxFuture<Result<CommitDetails>> {
unimplemented!()
}
fn reset(&self, _: String, _: ResetMode, _: HashMap<String, String>) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn checkout_files(
&self,
_: String,
_: Vec<RepoPath>,
_: HashMap<String, String>,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn path(&self) -> PathBuf {
let state = self.state.lock();
state.path.clone()
}
fn main_repository_path(&self) -> PathBuf {
self.path()
}
fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus> {
let state = self.state.lock();
let mut entries = state
.statuses
.iter()
.filter_map(|(repo_path, status)| {
if path_prefixes
.iter()
.any(|path_prefix| repo_path.0.starts_with(path_prefix))
{
Some((repo_path.to_owned(), *status))
} else {
None
}
})
.collect::<Vec<_>>();
entries.sort_unstable_by(|(a, _), (b, _)| a.cmp(&b));
Ok(GitStatus {
entries: entries.into(),
})
}
fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> {
let state = self.state.lock();
let current_branch = &state.current_branch_name;
let result = Ok(state
.branches
.iter()
.map(|branch_name| Branch {
is_head: Some(branch_name) == current_branch.as_ref(),
name: branch_name.into(),
most_recent_commit: None,
upstream: None,
})
.collect());
async { result }.boxed()
}
fn change_branch(&self, name: String, _: AsyncApp) -> BoxFuture<Result<()>> {
let mut state = self.state.lock();
state.current_branch_name = Some(name.to_owned());
state
.event_emitter
.try_send(state.path.clone())
.expect("Dropped repo change event");
async { Ok(()) }.boxed()
}
fn create_branch(&self, name: String, _: AsyncApp) -> BoxFuture<Result<()>> {
let mut state = self.state.lock();
state.branches.insert(name.to_owned());
state
.event_emitter
.try_send(state.path.clone())
.expect("Dropped repo change event");
async { Ok(()) }.boxed()
}
fn blame(
&self,
path: RepoPath,
_content: Rope,
_cx: AsyncApp,
) -> BoxFuture<Result<crate::blame::Blame>> {
let state = self.state.lock();
let result = state
.blames
.get(&path)
.with_context(|| format!("failed to get blame for {:?}", path.0))
.cloned();
async { result }.boxed()
}
fn stage_paths(
&self,
_paths: Vec<RepoPath>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn unstage_paths(
&self,
_paths: Vec<RepoPath>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn commit(
&self,
_message: SharedString,
_name_and_email: Option<(SharedString, SharedString)>,
_env: HashMap<String, String>,
_: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn push(
&self,
_branch: String,
_remote: String,
_options: Option<PushOptions>,
_ask_pass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> {
unimplemented!()
}
fn pull(
&self,
_branch: String,
_remote: String,
_ask_pass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> {
unimplemented!()
}
fn fetch(
&self,
_ask_pass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<RemoteCommandOutput>> {
unimplemented!()
}
fn get_remotes(
&self,
_branch: Option<String>,
_cx: AsyncApp,
) -> BoxFuture<Result<Vec<Remote>>> {
unimplemented!()
}
fn check_for_pushed_commit(&self, _cx: AsyncApp) -> BoxFuture<Result<Vec<SharedString>>> {
unimplemented!()
}
fn diff(&self, _diff: DiffType, _cx: AsyncApp) -> BoxFuture<Result<String>> {
unimplemented!()
}
}
fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
match relative_file_path.components().next() {
None => anyhow::bail!("repo path should not be empty"),
Some(Component::Prefix(_)) => anyhow::bail!(
"repo path `{}` should be relative, not a windows prefix",
relative_file_path.to_string_lossy()
),
Some(Component::RootDir) => {
anyhow::bail!(
"repo path `{}` should be relative",
relative_file_path.to_string_lossy()
)
}
Some(Component::CurDir) => {
anyhow::bail!(
"repo path `{}` should not start with `.`",
relative_file_path.to_string_lossy()
)
}
Some(Component::ParentDir) => {
anyhow::bail!(
"repo path `{}` should not start with `..`",
relative_file_path.to_string_lossy()
)
}
_ => Ok(()),
}
}
pub static WORK_DIRECTORY_REPO_PATH: LazyLock<RepoPath> =
LazyLock::new(|| RepoPath(Path::new("").into()));
@@ -1228,35 +1526,6 @@ fn parse_upstream_track(upstream_track: &str) -> Result<UpstreamTracking> {
}))
}
fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
match relative_file_path.components().next() {
None => anyhow::bail!("repo path should not be empty"),
Some(Component::Prefix(_)) => anyhow::bail!(
"repo path `{}` should be relative, not a windows prefix",
relative_file_path.to_string_lossy()
),
Some(Component::RootDir) => {
anyhow::bail!(
"repo path `{}` should be relative",
relative_file_path.to_string_lossy()
)
}
Some(Component::CurDir) => {
anyhow::bail!(
"repo path `{}` should not start with `.`",
relative_file_path.to_string_lossy()
)
}
Some(Component::ParentDir) => {
anyhow::bail!(
"repo path `{}` should not start with `..`",
relative_file_path.to_string_lossy()
)
}
_ => Ok(()),
}
}
#[test]
fn test_branches_parsing() {
// suppress "help: octal escapes are not supported, `\0` is always null"

View File

@@ -19,10 +19,8 @@ git.workspace = true
gpui.workspace = true
http_client.workspace = true
regex.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
url.workspace = true
util.workspace = true

View File

@@ -1,5 +1,4 @@
mod providers;
mod settings;
use std::sync::Arc;
@@ -11,19 +10,16 @@ use url::Url;
use util::maybe;
pub use crate::providers::*;
pub use crate::settings::*;
/// Initializes the Git hosting providers.
pub fn init(cx: &mut App) {
crate::settings::init(cx);
pub fn init(cx: &App) {
let provider_registry = GitHostingProviderRegistry::global(cx);
provider_registry.register_hosting_provider(Arc::new(Bitbucket::public_instance()));
provider_registry.register_hosting_provider(Arc::new(Bitbucket));
provider_registry.register_hosting_provider(Arc::new(Chromium));
provider_registry.register_hosting_provider(Arc::new(Codeberg));
provider_registry.register_hosting_provider(Arc::new(Gitee));
provider_registry.register_hosting_provider(Arc::new(Github::public_instance()));
provider_registry.register_hosting_provider(Arc::new(Gitlab::public_instance()));
provider_registry.register_hosting_provider(Arc::new(Github::new()));
provider_registry.register_hosting_provider(Arc::new(Gitlab::new()));
provider_registry.register_hosting_provider(Arc::new(Sourcehut));
}

View File

@@ -7,31 +7,15 @@ use git::{
RemoteUrl,
};
pub struct Bitbucket {
name: String,
base_url: Url,
}
impl Bitbucket {
pub fn new(name: impl Into<String>, base_url: Url) -> Self {
Self {
name: name.into(),
base_url,
}
}
pub fn public_instance() -> Self {
Self::new("Bitbucket", Url::parse("https://bitbucket.org").unwrap())
}
}
pub struct Bitbucket;
impl GitHostingProvider for Bitbucket {
fn name(&self) -> String {
self.name.clone()
"Bitbucket".to_string()
}
fn base_url(&self) -> Url {
self.base_url.clone()
Url::parse("https://bitbucket.org").unwrap()
}
fn supports_avatars(&self) -> bool {
@@ -106,7 +90,7 @@ mod tests {
#[test]
fn test_parse_remote_url_given_ssh_url() {
let parsed_remote = Bitbucket::public_instance()
let parsed_remote = Bitbucket
.parse_remote_url("git@bitbucket.org:zed-industries/zed.git")
.unwrap();
@@ -121,7 +105,7 @@ mod tests {
#[test]
fn test_parse_remote_url_given_https_url() {
let parsed_remote = Bitbucket::public_instance()
let parsed_remote = Bitbucket
.parse_remote_url("https://bitbucket.org/zed-industries/zed.git")
.unwrap();
@@ -136,7 +120,7 @@ mod tests {
#[test]
fn test_parse_remote_url_given_https_url_with_username() {
let parsed_remote = Bitbucket::public_instance()
let parsed_remote = Bitbucket
.parse_remote_url("https://thorstenballzed@bitbucket.org/zed-industries/zed.git")
.unwrap();
@@ -151,7 +135,7 @@ mod tests {
#[test]
fn test_build_bitbucket_permalink() {
let permalink = Bitbucket::public_instance().build_permalink(
let permalink = Bitbucket.build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),
@@ -169,7 +153,7 @@ mod tests {
#[test]
fn test_build_bitbucket_permalink_with_single_line_selection() {
let permalink = Bitbucket::public_instance().build_permalink(
let permalink = Bitbucket.build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),
@@ -187,7 +171,7 @@ mod tests {
#[test]
fn test_build_bitbucket_permalink_with_multi_line_selection() {
let permalink = Bitbucket::public_instance().build_permalink(
let permalink = Bitbucket.build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),

View File

@@ -45,24 +45,19 @@ struct User {
pub avatar_url: String,
}
#[derive(Debug)]
pub struct Github {
name: String,
base_url: Url,
}
impl Github {
pub fn new(name: impl Into<String>, base_url: Url) -> Self {
pub fn new() -> Self {
Self {
name: name.into(),
base_url,
name: "GitHub".to_string(),
base_url: Url::parse("https://github.com").unwrap(),
}
}
pub fn public_instance() -> Self {
Self::new("GitHub", Url::parse("https://github.com").unwrap())
}
pub fn from_remote_url(remote_url: &str) -> Result<Self> {
let host = get_host_from_git_remote_url(remote_url)?;
if host == "github.com" {
@@ -76,10 +71,10 @@ impl Github {
bail!("not a GitHub URL");
}
Ok(Self::new(
"GitHub Self-Hosted",
Url::parse(&format!("https://{}", host))?,
))
Ok(Self {
name: "GitHub Self-Hosted".to_string(),
base_url: Url::parse(&format!("https://{}", host))?,
})
}
async fn fetch_github_commit_author(
@@ -313,7 +308,7 @@ mod tests {
#[test]
fn test_parse_remote_url_given_ssh_url() {
let parsed_remote = Github::public_instance()
let parsed_remote = Github::new()
.parse_remote_url("git@github.com:zed-industries/zed.git")
.unwrap();
@@ -328,7 +323,7 @@ mod tests {
#[test]
fn test_parse_remote_url_given_https_url() {
let parsed_remote = Github::public_instance()
let parsed_remote = Github::new()
.parse_remote_url("https://github.com/zed-industries/zed.git")
.unwrap();
@@ -343,7 +338,7 @@ mod tests {
#[test]
fn test_parse_remote_url_given_https_url_with_username() {
let parsed_remote = Github::public_instance()
let parsed_remote = Github::new()
.parse_remote_url("https://jlannister@github.com/some-org/some-repo.git")
.unwrap();
@@ -362,7 +357,7 @@ mod tests {
owner: "zed-industries".into(),
repo: "zed".into(),
};
let permalink = Github::public_instance().build_permalink(
let permalink = Github::new().build_permalink(
remote,
BuildPermalinkParams {
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
@@ -377,7 +372,7 @@ mod tests {
#[test]
fn test_build_github_permalink() {
let permalink = Github::public_instance().build_permalink(
let permalink = Github::new().build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),
@@ -395,7 +390,7 @@ mod tests {
#[test]
fn test_build_github_permalink_with_single_line_selection() {
let permalink = Github::public_instance().build_permalink(
let permalink = Github::new().build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),
@@ -413,7 +408,7 @@ mod tests {
#[test]
fn test_build_github_permalink_with_multi_line_selection() {
let permalink = Github::public_instance().build_permalink(
let permalink = Github::new().build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),
@@ -436,7 +431,7 @@ mod tests {
repo: "zed".into(),
};
let github = Github::public_instance();
let github = Github::new();
let message = "This does not contain a pull request";
assert!(github.extract_pull_request(&remote, message).is_none());

View File

@@ -17,17 +17,13 @@ pub struct Gitlab {
}
impl Gitlab {
pub fn new(name: impl Into<String>, base_url: Url) -> Self {
pub fn new() -> Self {
Self {
name: name.into(),
base_url,
name: "GitLab".to_string(),
base_url: Url::parse("https://gitlab.com").unwrap(),
}
}
pub fn public_instance() -> Self {
Self::new("GitLab", Url::parse("https://gitlab.com").unwrap())
}
pub fn from_remote_url(remote_url: &str) -> Result<Self> {
let host = get_host_from_git_remote_url(remote_url)?;
if host == "gitlab.com" {
@@ -41,10 +37,10 @@ impl Gitlab {
bail!("not a GitLab URL");
}
Ok(Self::new(
"GitLab Self-Hosted",
Url::parse(&format!("https://{}", host))?,
))
Ok(Self {
name: "GitLab Self-Hosted".to_string(),
base_url: Url::parse(&format!("https://{}", host))?,
})
}
}
@@ -139,7 +135,7 @@ mod tests {
#[test]
fn test_parse_remote_url_given_ssh_url() {
let parsed_remote = Gitlab::public_instance()
let parsed_remote = Gitlab::new()
.parse_remote_url("git@gitlab.com:zed-industries/zed.git")
.unwrap();
@@ -154,7 +150,7 @@ mod tests {
#[test]
fn test_parse_remote_url_given_https_url() {
let parsed_remote = Gitlab::public_instance()
let parsed_remote = Gitlab::new()
.parse_remote_url("https://gitlab.com/zed-industries/zed.git")
.unwrap();
@@ -204,7 +200,7 @@ mod tests {
#[test]
fn test_build_gitlab_permalink() {
let permalink = Gitlab::public_instance().build_permalink(
let permalink = Gitlab::new().build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),
@@ -222,7 +218,7 @@ mod tests {
#[test]
fn test_build_gitlab_permalink_with_single_line_selection() {
let permalink = Gitlab::public_instance().build_permalink(
let permalink = Gitlab::new().build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),
@@ -240,7 +236,7 @@ mod tests {
#[test]
fn test_build_gitlab_permalink_with_multi_line_selection() {
let permalink = Gitlab::public_instance().build_permalink(
let permalink = Gitlab::new().build_permalink(
ParsedGitRemote {
owner: "zed-industries".into(),
repo: "zed".into(),

View File

@@ -11,7 +11,7 @@ pub struct Sourcehut;
impl GitHostingProvider for Sourcehut {
fn name(&self) -> String {
"SourceHut".to_string()
"Gitee".to_string()
}
fn base_url(&self) -> Url {

View File

@@ -1,84 +0,0 @@
use std::sync::Arc;
use anyhow::Result;
use git::GitHostingProviderRegistry;
use gpui::App;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use url::Url;
use util::ResultExt as _;
use crate::{Bitbucket, Github, Gitlab};
pub(crate) fn init(cx: &mut App) {
GitHostingProviderSettings::register(cx);
init_git_hosting_provider_settings(cx);
}
fn init_git_hosting_provider_settings(cx: &mut App) {
update_git_hosting_providers_from_settings(cx);
cx.observe_global::<SettingsStore>(update_git_hosting_providers_from_settings)
.detach();
}
fn update_git_hosting_providers_from_settings(cx: &mut App) {
let settings = GitHostingProviderSettings::get_global(cx);
let provider_registry = GitHostingProviderRegistry::global(cx);
for provider in settings.git_hosting_providers.iter() {
let Some(url) = Url::parse(&provider.base_url).log_err() else {
continue;
};
let provider = match provider.provider {
GitHostingProviderKind::Bitbucket => Arc::new(Bitbucket::new(&provider.name, url)) as _,
GitHostingProviderKind::Github => Arc::new(Github::new(&provider.name, url)) as _,
GitHostingProviderKind::Gitlab => Arc::new(Gitlab::new(&provider.name, url)) as _,
};
provider_registry.register_hosting_provider(provider);
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum GitHostingProviderKind {
Github,
Gitlab,
Bitbucket,
}
/// A custom Git hosting provider.
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct GitHostingProviderConfig {
/// The type of the provider.
///
/// Must be one of `github`, `gitlab`, or `bitbucket`.
pub provider: GitHostingProviderKind,
/// The base URL for the provider (e.g., "https://code.corp.big.com").
pub base_url: String,
/// The display name for the provider (e.g., "BigCorp GitHub").
pub name: String,
}
#[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
pub struct GitHostingProviderSettings {
/// The list of custom Git hosting providers.
#[serde(default)]
pub git_hosting_providers: Vec<GitHostingProviderConfig>,
}
impl Settings for GitHostingProviderSettings {
const KEY: Option<&'static str> = None;
type FileContent = Self;
fn load(sources: settings::SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
sources.json_merge()
}
}

View File

@@ -7,7 +7,7 @@ use gpui::{
InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render,
SharedString, Styled, Subscription, Task, Window,
};
use picker::{Picker, PickerDelegate, PickerEditorPosition};
use picker::{Picker, PickerDelegate};
use project::git::Repository;
use std::sync::Arc;
use time::OffsetDateTime;
@@ -17,10 +17,13 @@ use util::ResultExt;
use workspace::notifications::DetachAndPromptErr;
use workspace::{ModalView, Workspace};
pub fn register(workspace: &mut Workspace) {
workspace.register_action(open);
workspace.register_action(switch);
workspace.register_action(checkout_branch);
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {
workspace.register_action(open);
workspace.register_action(switch);
workspace.register_action(checkout_branch);
})
.detach();
}
pub fn checkout_branch(
@@ -222,13 +225,6 @@ impl PickerDelegate for BranchListDelegate {
"Select branch...".into()
}
fn editor_position(&self) -> PickerEditorPosition {
match self.style {
BranchListStyle::Modal => PickerEditorPosition::Start,
BranchListStyle::Popover => PickerEditorPosition::End,
}
}
fn match_count(&self) -> usize {
self.matches.len()
}

View File

@@ -54,6 +54,16 @@ impl ModalContainerProperties {
}
}
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, window, cx| {
let Some(window) = window else {
return;
};
CommitModal::register(workspace, window, cx)
})
.detach();
}
pub struct CommitModal {
git_panel: Entity<GitPanel>,
commit_editor: Entity<Editor>,
@@ -98,7 +108,7 @@ struct RestoreDock {
}
impl CommitModal {
pub fn register(workspace: &mut Workspace) {
pub fn register(workspace: &mut Workspace, _: &mut Window, _cx: &mut Context<Workspace>) {
workspace.register_action(|workspace, _: &Commit, window, cx| {
CommitModal::toggle(workspace, window, cx);
});

View File

@@ -127,13 +127,18 @@ const GIT_PANEL_KEY: &str = "GitPanel";
const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50);
pub fn register(workspace: &mut Workspace) {
workspace.register_action(|workspace, _: &ToggleFocus, window, cx| {
workspace.toggle_panel_focus::<GitPanel>(window, cx);
});
workspace.register_action(|workspace, _: &ExpandCommitEditor, window, cx| {
CommitModal::toggle(workspace, window, cx)
});
pub fn init(cx: &mut App) {
cx.observe_new(
|workspace: &mut Workspace, _window, _: &mut Context<Workspace>| {
workspace.register_action(|workspace, _: &ToggleFocus, window, cx| {
workspace.toggle_panel_focus::<GitPanel>(window, cx);
});
workspace.register_action(|workspace, _: &ExpandCommitEditor, window, cx| {
CommitModal::toggle(workspace, window, cx)
});
},
)
.detach();
}
#[derive(Debug, Clone)]
@@ -875,20 +880,7 @@ impl GitPanel {
}
};
let should_open_single_path = entry.worktree_path.starts_with("..")
|| self
.workspace
.update(cx, |workspace, cx| {
workspace
.item_of_type::<ProjectDiff>(cx)
.map_or(false, |project_diff| {
project_diff
.read(cx)
.has_excerpt_for_path(&entry.repo_path.0, cx)
})
})
.unwrap_or(false);
if should_open_single_path {
if entry.worktree_path.starts_with("..") {
self.workspace
.update(cx, |workspace, cx| {
workspace
@@ -2014,7 +2006,7 @@ impl GitPanel {
}
fn can_push_and_pull(&self, cx: &App) -> bool {
!self.project.read(cx).is_via_collab()
crate::can_push_and_pull(&self.project, cx)
}
fn get_current_remote(
@@ -2250,14 +2242,7 @@ impl GitPanel {
fn update_visible_entries(&mut self, cx: &mut Context<Self>) {
self.entries.clear();
self.single_staged_entry.take();
self.conflicted_count = 0;
self.conflicted_staged_count = 0;
self.new_count = 0;
self.tracked_count = 0;
self.new_staged_count = 0;
self.tracked_staged_count = 0;
self.entry_count = 0;
self.single_staged_entry.take();
let mut changed_entries = Vec::new();
let mut new_entries = Vec::new();
let mut conflict_entries = Vec::new();
@@ -2837,36 +2822,6 @@ impl GitPanel {
)
}
pub(crate) fn render_remote_button(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
let branch = self
.active_repository
.as_ref()?
.read(cx)
.current_branch()
.cloned();
if !self.can_push_and_pull(cx) {
return None;
}
let spinner = self.render_spinner();
Some(
h_flex()
.gap_1()
.flex_shrink_0()
.children(spinner)
.when_some(branch, |this, branch| {
let focus_handle = Some(self.focus_handle(cx));
this.children(render_remote_button(
"remote-button",
&branch,
focus_handle,
true,
))
})
.into_any_element(),
)
}
pub fn render_footer(
&self,
window: &mut Window,
@@ -2906,7 +2861,12 @@ impl GitPanel {
});
let footer = v_flex()
.child(PanelRepoFooter::new(display_name, branch, Some(git_panel)))
.child(PanelRepoFooter::new(
"footer-button",
display_name,
branch,
Some(git_panel),
))
.child(
panel_editor_container(window, cx)
.id("commit-editor-container")
@@ -3844,7 +3804,7 @@ impl Render for GitPanel {
deferred(
anchored()
.position(*position)
.anchor(Corner::TopLeft)
.anchor(gpui::Corner::TopLeft)
.child(menu.clone()),
)
.with_priority(1)
@@ -3999,6 +3959,7 @@ impl Render for GitPanelMessageTooltip {
#[derive(IntoElement, IntoComponent)]
#[component(scope = "Version Control")]
pub struct PanelRepoFooter {
id: SharedString,
active_repository: SharedString,
branch: Option<Branch>,
// Getting a GitPanel in previews will be difficult.
@@ -4009,19 +3970,26 @@ pub struct PanelRepoFooter {
impl PanelRepoFooter {
pub fn new(
id: impl Into<SharedString>,
active_repository: SharedString,
branch: Option<Branch>,
git_panel: Option<Entity<GitPanel>>,
) -> Self {
Self {
id: id.into(),
active_repository,
branch,
git_panel,
}
}
pub fn new_preview(active_repository: SharedString, branch: Option<Branch>) -> Self {
pub fn new_preview(
id: impl Into<SharedString>,
active_repository: SharedString,
branch: Option<Branch>,
) -> Self {
Self {
id: id.into(),
active_repository,
branch,
git_panel: None,
@@ -4102,14 +4070,14 @@ impl RenderOnce for PanelRepoFooter {
let project = project.clone();
move |window, cx| {
let project = project.clone()?;
Some(cx.new(|cx| RepositorySelector::new(project, rems(16.), window, cx)))
Some(cx.new(|cx| RepositorySelector::new(project, window, cx)))
}
})
.trigger_with_tooltip(
repo_selector_trigger.disabled(single_repo).truncate(true),
Tooltip::text("Switch active repository"),
)
.anchor(Corner::BottomLeft)
.attach(gpui::Corner::BottomLeft)
.into_any_element();
let branch_selector_button = Button::new("branch-selector", truncated_branch_name)
@@ -4131,12 +4099,17 @@ impl RenderOnce for PanelRepoFooter {
branch_selector_button,
Tooltip::for_action_title("Switch Branch", &zed_actions::git::Branch),
)
.anchor(Corner::BottomLeft)
.anchor(Corner::TopLeft)
.offset(gpui::Point {
x: px(0.0),
y: px(-2.0),
});
let spinner = self
.git_panel
.as_ref()
.and_then(|git_panel| git_panel.read(cx).render_spinner());
h_flex()
.w_full()
.px_2()
@@ -4171,11 +4144,28 @@ impl RenderOnce for PanelRepoFooter {
})
.child(branch_selector),
)
.children(if let Some(git_panel) = self.git_panel {
git_panel.update(cx, |git_panel, cx| git_panel.render_remote_button(cx))
} else {
None
})
.child(
h_flex()
.gap_1()
.flex_shrink_0()
.children(spinner)
.when_some(branch, |this, branch| {
let mut focus_handle = None;
if let Some(git_panel) = self.git_panel.as_ref() {
if !git_panel.read(cx).can_push_and_pull(cx) {
return this;
}
focus_handle = Some(git_panel.focus_handle(cx));
}
this.children(render_remote_button(
self.id.clone(),
&branch,
focus_handle,
true,
))
}),
)
}
}
@@ -4266,6 +4256,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"no-branch",
active_repository(1).clone(),
None,
))
@@ -4278,6 +4269,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"unknown-upstream",
active_repository(2).clone(),
Some(branch(unknown_upstream)),
))
@@ -4290,6 +4282,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"no-remote-upstream",
active_repository(3).clone(),
Some(branch(no_remote_upstream)),
))
@@ -4302,6 +4295,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"not-ahead-or-behind",
active_repository(4).clone(),
Some(branch(not_ahead_or_behind_upstream)),
))
@@ -4314,6 +4308,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"behind-remote",
active_repository(5).clone(),
Some(branch(behind_upstream)),
))
@@ -4326,6 +4321,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"ahead-of-remote",
active_repository(6).clone(),
Some(branch(ahead_of_upstream)),
))
@@ -4338,6 +4334,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"ahead-and-behind",
active_repository(7).clone(),
Some(branch(ahead_and_behind_upstream)),
))
@@ -4357,6 +4354,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"short-branch",
SharedString::from("zed"),
Some(custom("main", behind_upstream)),
))
@@ -4369,6 +4367,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"long-branch",
SharedString::from("zed"),
Some(custom(
"redesign-and-update-git-ui-list-entry-style",
@@ -4384,6 +4383,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"long-repo",
SharedString::from("zed-industries-community-examples"),
Some(custom("gpui", ahead_of_upstream)),
))
@@ -4396,6 +4396,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"long-repo-and-branch",
SharedString::from("zed-industries-community-examples"),
Some(custom(
"redesign-and-update-git-ui-list-entry-style",
@@ -4411,6 +4412,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"uppercase-repo",
SharedString::from("LICENSES"),
Some(custom("main", ahead_of_upstream)),
))
@@ -4423,6 +4425,7 @@ impl ComponentPreview for PanelRepoFooter {
.w(example_width)
.overflow_hidden()
.child(PanelRepoFooter::new_preview(
"uppercase-branch",
SharedString::from("zed"),
Some(custom("update-README", behind_upstream)),
))

View File

@@ -2,14 +2,14 @@ use std::any::Any;
use ::settings::Settings;
use command_palette_hooks::CommandPaletteFilter;
use commit_modal::CommitModal;
use git::{
repository::{Branch, Upstream, UpstreamTracking, UpstreamTrackingStatus},
status::{FileStatus, StatusCode, UnmergedStatus, UnmergedStatusCode},
};
use git_panel_settings::GitPanelSettings;
use gpui::{actions, App, FocusHandle};
use gpui::{actions, App, Entity, FocusHandle};
use onboarding::{clear_dismissed, GitOnboardingModal};
use project::Project;
use project_diff::ProjectDiff;
use ui::prelude::*;
use workspace::Workspace;
@@ -29,14 +29,12 @@ actions!(git, [ResetOnboarding]);
pub fn init(cx: &mut App) {
GitPanelSettings::register(cx);
branch_picker::init(cx);
cx.observe_new(ProjectDiff::register).detach();
commit_modal::init(cx);
git_panel::init(cx);
cx.observe_new(|workspace: &mut Workspace, _, cx| {
ProjectDiff::register(workspace, cx);
CommitModal::register(workspace);
git_panel::register(workspace);
repository_selector::register(workspace);
branch_picker::register(workspace);
let project = workspace.project().read(cx);
if project.is_read_only(cx) {
return;
@@ -122,6 +120,10 @@ pub fn git_status_icon(status: FileStatus) -> impl IntoElement {
GitStatusIcon::new(status)
}
fn can_push_and_pull(project: &Entity<Project>, cx: &App) -> bool {
!project.read(cx).is_via_collab()
}
fn render_remote_button(
id: impl Into<SharedString>,
branch: &Branch,

View File

@@ -26,11 +26,7 @@ use project::{
git::{GitEvent, GitStore},
Project, ProjectPath,
};
use std::{
any::{Any, TypeId},
path::Path,
sync::Arc,
};
use std::any::{Any, TypeId};
use theme::ActiveTheme;
use ui::{prelude::*, vertical_divider, KeyBinding, Tooltip};
use util::ResultExt as _;
@@ -69,15 +65,17 @@ const CONFLICT_NAMESPACE: &'static str = "0";
const TRACKED_NAMESPACE: &'static str = "1";
const NEW_NAMESPACE: &'static str = "2";
const MAX_DIFF_TRACKED_PATHS: usize = 1000;
const MAX_DIFF_UNTRACKED_PATHS: usize = 1000;
impl ProjectDiff {
pub(crate) fn register(workspace: &mut Workspace, cx: &mut Context<Workspace>) {
pub(crate) fn register(
workspace: &mut Workspace,
_window: Option<&mut Window>,
cx: &mut Context<Workspace>,
) {
workspace.register_action(Self::deploy);
workspace.register_action(|workspace, _: &Add, window, cx| {
Self::deploy(workspace, &Diff, window, cx);
});
workspace::register_serializable_item::<ProjectDiff>(cx);
}
@@ -346,7 +344,6 @@ impl ProjectDiff {
let mut result = vec![];
repo.update(cx, |repo, cx| {
let (mut tracked_count, mut untracked_count) = (0, 0);
for entry in repo.status() {
if !entry.status.has_changes() {
continue;
@@ -354,24 +351,11 @@ impl ProjectDiff {
let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path) else {
continue;
};
let namespace = if repo.has_conflict(&entry.repo_path) {
if tracked_count >= MAX_DIFF_TRACKED_PATHS {
continue;
}
tracked_count += 1;
CONFLICT_NAMESPACE
} else if entry.status.is_created() {
if untracked_count >= MAX_DIFF_UNTRACKED_PATHS {
continue;
}
untracked_count += 1;
NEW_NAMESPACE
} else {
if tracked_count >= MAX_DIFF_TRACKED_PATHS {
continue;
}
tracked_count += 1;
TRACKED_NAMESPACE
};
let path_key = PathKey::namespaced(namespace, entry.repo_path.0.clone());
@@ -503,14 +487,6 @@ impl ProjectDiff {
Ok(())
}
pub fn has_excerpt_for_path(&self, path: &Arc<Path>, cx: &App) -> bool {
let multibuffer = self.multibuffer.read(cx);
multibuffer.has_excerpt_for_path(&PathKey::namespaced(CONFLICT_NAMESPACE, path.clone()))
|| multibuffer
.has_excerpt_for_path(&PathKey::namespaced(TRACKED_NAMESPACE, path.clone()))
|| multibuffer.has_excerpt_for_path(&PathKey::namespaced(NEW_NAMESPACE, path.clone()))
}
#[cfg(any(test, feature = "test-support"))]
pub fn excerpt_paths(&self, cx: &App) -> Vec<String> {
self.multibuffer
@@ -697,6 +673,8 @@ impl Render for ProjectDiff {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let is_empty = self.multibuffer.read(cx).is_empty();
let can_push_and_pull = crate::can_push_and_pull(&self.project, cx);
div()
.track_focus(&self.focus_handle)
.key_context(if is_empty { "EmptyPane" } else { "GitDiff" })
@@ -706,16 +684,6 @@ impl Render for ProjectDiff {
.justify_center()
.size_full()
.when(is_empty, |el| {
let remote_button = if let Some(panel) = self
.workspace
.upgrade()
.and_then(|workspace| workspace.read(cx).panel::<GitPanel>(cx))
{
panel.update(cx, |panel, cx| panel.render_remote_button(cx))
} else {
None
};
let keybinding_focus_handle = self.focus_handle(cx).clone();
el.child(
v_flex()
.gap_1()
@@ -724,33 +692,52 @@ impl Render for ProjectDiff {
.justify_around()
.child(Label::new("No uncommitted changes")),
)
.map(|el| match remote_button {
Some(button) => el.child(h_flex().justify_around().child(button)),
None => el.child(
h_flex()
.justify_around()
.child(Label::new("Remote up to date")),
),
.when(can_push_and_pull, |this_div| {
let keybinding_focus_handle = self.focus_handle(cx);
this_div.when_some(self.current_branch.as_ref(), |this_div, branch| {
let remote_button = crate::render_remote_button(
"project-diff-remote-button",
branch,
Some(keybinding_focus_handle.clone()),
false,
);
match remote_button {
Some(button) => {
this_div.child(h_flex().justify_around().child(button))
}
None => this_div.child(
h_flex()
.justify_around()
.child(Label::new("Remote up to date")),
),
}
})
})
.child(
h_flex().justify_around().mt_1().child(
Button::new("project-diff-close-button", "Close")
// .style(ButtonStyle::Transparent)
.key_binding(KeyBinding::for_action_in(
&CloseActiveItem::default(),
&keybinding_focus_handle,
window,
cx,
))
.on_click(move |_, window, cx| {
window.focus(&keybinding_focus_handle);
window.dispatch_action(
Box::new(CloseActiveItem::default()),
.map(|this| {
let keybinding_focus_handle = self.focus_handle(cx).clone();
this.child(
h_flex().justify_around().mt_1().child(
Button::new("project-diff-close-button", "Close")
// .style(ButtonStyle::Transparent)
.key_binding(KeyBinding::for_action_in(
&CloseActiveItem::default(),
&keybinding_focus_handle,
window,
cx,
);
}),
),
),
))
.on_click(move |_, window, cx| {
window.focus(&keybinding_focus_handle);
window.dispatch_action(
Box::new(CloseActiveItem::default()),
cx,
);
}),
),
)
}),
)
})
.when(!is_empty, |el| el.child(self.editor.clone()))
@@ -819,7 +806,6 @@ impl ProjectDiffToolbar {
fn project_diff(&self, _: &App) -> Option<Entity<ProjectDiff>> {
self.project_diff.as_ref()?.upgrade()
}
fn dispatch_action(&self, action: &dyn Action, window: &mut Window, cx: &mut Context<Self>) {
if let Some(project_diff) = self.project_diff(cx) {
project_diff.focus_handle(cx).focus(window);
@@ -933,6 +919,12 @@ impl Render for ProjectDiffToolbar {
&StageAndNext,
&focus_handle,
))
// don't actually disable the button so it's mashable
.color(if button_states.stage {
Color::Default
} else {
Color::Disabled
})
.on_click(cx.listener(|this, _, window, cx| {
this.dispatch_action(&StageAndNext, window, cx)
})),
@@ -944,6 +936,11 @@ impl Render for ProjectDiffToolbar {
&UnstageAndNext,
&focus_handle,
))
.color(if button_states.unstage {
Color::Default
} else {
Color::Disabled
})
.on_click(cx.listener(|this, _, window, cx| {
this.dispatch_action(&UnstageAndNext, window, cx)
})),

View File

@@ -9,33 +9,14 @@ use project::{
};
use std::sync::Arc;
use ui::{prelude::*, ListItem, ListItemSpacing};
use workspace::{ModalView, Workspace};
pub fn register(workspace: &mut Workspace) {
workspace.register_action(open);
}
pub fn open(
workspace: &mut Workspace,
_: &zed_actions::git::SelectRepo,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
let project = workspace.project().clone();
workspace.toggle_modal(window, cx, |window, cx| {
RepositorySelector::new(project, rems(34.), window, cx)
})
}
pub struct RepositorySelector {
width: Rems,
picker: Entity<Picker<RepositorySelectorDelegate>>,
}
impl RepositorySelector {
pub fn new(
project_handle: Entity<Project>,
width: Rems,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
@@ -67,7 +48,7 @@ impl RepositorySelector {
.max_height(Some(rems(20.).into()))
});
RepositorySelector { picker, width }
RepositorySelector { picker }
}
}
@@ -110,12 +91,10 @@ impl Focusable for RepositorySelector {
impl Render for RepositorySelector {
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
div().w(self.width).child(self.picker.clone())
self.picker.clone()
}
}
impl ModalView for RepositorySelector {}
pub struct RepositorySelectorDelegate {
project: WeakEntity<Project>,
repository_selector: WeakEntity<RepositorySelector>,

View File

@@ -252,22 +252,6 @@ fn is_printable_key(key: &str) -> bool {
| "f17"
| "f18"
| "f19"
| "f20"
| "f21"
| "f22"
| "f23"
| "f24"
| "f25"
| "f26"
| "f27"
| "f28"
| "f29"
| "f30"
| "f31"
| "f32"
| "f33"
| "f34"
| "f35"
| "backspace"
| "delete"
| "left"

View File

@@ -58,22 +58,6 @@ pub fn key_to_native(key: &str) -> Cow<str> {
"f17" => NSF17FunctionKey,
"f18" => NSF18FunctionKey,
"f19" => NSF19FunctionKey,
"f20" => NSF20FunctionKey,
"f21" => NSF21FunctionKey,
"f22" => NSF22FunctionKey,
"f23" => NSF23FunctionKey,
"f24" => NSF24FunctionKey,
"f25" => NSF25FunctionKey,
"f26" => NSF26FunctionKey,
"f27" => NSF27FunctionKey,
"f28" => NSF28FunctionKey,
"f29" => NSF29FunctionKey,
"f30" => NSF30FunctionKey,
"f31" => NSF31FunctionKey,
"f32" => NSF32FunctionKey,
"f33" => NSF33FunctionKey,
"f34" => NSF34FunctionKey,
"f35" => NSF35FunctionKey,
_ => return Cow::Borrowed(key),
};
Cow::Owned(String::from_utf16(&[code]).unwrap())
@@ -348,22 +332,6 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
Some(NSF17FunctionKey) => "f17".to_string(),
Some(NSF18FunctionKey) => "f18".to_string(),
Some(NSF19FunctionKey) => "f19".to_string(),
Some(NSF20FunctionKey) => "f20".to_string(),
Some(NSF21FunctionKey) => "f21".to_string(),
Some(NSF22FunctionKey) => "f22".to_string(),
Some(NSF23FunctionKey) => "f23".to_string(),
Some(NSF24FunctionKey) => "f24".to_string(),
Some(NSF25FunctionKey) => "f25".to_string(),
Some(NSF26FunctionKey) => "f26".to_string(),
Some(NSF27FunctionKey) => "f27".to_string(),
Some(NSF28FunctionKey) => "f28".to_string(),
Some(NSF29FunctionKey) => "f29".to_string(),
Some(NSF30FunctionKey) => "f30".to_string(),
Some(NSF31FunctionKey) => "f31".to_string(),
Some(NSF32FunctionKey) => "f32".to_string(),
Some(NSF33FunctionKey) => "f33".to_string(),
Some(NSF34FunctionKey) => "f34".to_string(),
Some(NSF35FunctionKey) => "f35".to_string(),
_ => {
// Cases to test when modifying this:
//

View File

@@ -25,3 +25,5 @@ log.workspace = true
serde.workspace = true
serde_json.workspace = true
url.workspace = true
rustls.workspace = true
rustls-platform-verifier.workspace = true

View File

@@ -8,14 +8,33 @@ pub use http::{self, Method, Request, Response, StatusCode, Uri};
use futures::future::BoxFuture;
use http::request::Builder;
use rustls::ClientConfig;
use rustls_platform_verifier::ConfigVerifierExt;
#[cfg(feature = "test-support")]
use std::fmt;
use std::{
any::type_name,
sync::{Arc, Mutex},
sync::{Arc, Mutex, OnceLock},
};
pub use url::Url;
static TLS_CONFIG: OnceLock<rustls::ClientConfig> = OnceLock::new();
pub fn tls_config() -> ClientConfig {
TLS_CONFIG
.get_or_init(|| {
// rustls uses the `aws_lc_rs` provider by default
// This only errors if the default provider has already
// been installed. We can ignore this `Result`.
rustls::crypto::aws_lc_rs::default_provider()
.install_default()
.ok();
ClientConfig::with_platform_verifier()
})
.clone()
}
#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
pub enum RedirectPolicy {
#[default]

View File

@@ -1,20 +0,0 @@
[package]
name = "http_client_tls"
version = "0.1.0"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
[lints]
workspace = true
[features]
test-support = []
[lib]
path = "src/http_client_tls.rs"
doctest = true
[dependencies]
rustls.workspace = true
rustls-platform-verifier.workspace = true

View File

@@ -1 +0,0 @@
../../LICENSE-APACHE

View File

@@ -1,21 +0,0 @@
use std::sync::OnceLock;
use rustls::ClientConfig;
use rustls_platform_verifier::ConfigVerifierExt;
static TLS_CONFIG: OnceLock<rustls::ClientConfig> = OnceLock::new();
pub fn tls_config() -> ClientConfig {
TLS_CONFIG
.get_or_init(|| {
// rustls uses the `aws_lc_rs` provider by default
// This only errors if the default provider has already
// been installed. We can ignore this `Result`.
rustls::crypto::aws_lc_rs::default_provider()
.install_default()
.ok();
ClientConfig::with_platform_verifier()
})
.clone()
}

View File

@@ -1,14 +1,17 @@
use anyhow::Context as _;
use gpui::{App, UpdateGlobal};
use json::json_task_context;
pub use language::*;
use node_runtime::NodeRuntime;
use python::{PythonContextProvider, PythonToolchainProvider};
use rust_embed::RustEmbed;
use settings::SettingsStore;
use smol::stream::StreamExt;
use std::{str, sync::Arc};
use typescript::typescript_task_context;
use util::{asset_str, ResultExt};
pub use language::*;
use crate::{bash::bash_task_context, rust::RustContextProvider};
mod bash;
mod c;
@@ -46,7 +49,7 @@ pub static LANGUAGE_GIT_COMMIT: std::sync::LazyLock<Arc<Language>> =
))
});
pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mut App) {
#[cfg(feature = "load-grammars")]
languages.register_native_grammars([
("bash", tree_sitter_bash::LANGUAGE),
@@ -71,149 +74,193 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
("gitcommit", tree_sitter_gitcommit::LANGUAGE),
]);
let c_lsp_adapter = Arc::new(c::CLspAdapter);
let css_lsp_adapter = Arc::new(css::CssLspAdapter::new(node.clone()));
let eslint_adapter = Arc::new(typescript::EsLintLspAdapter::new(node.clone()));
let go_context_provider = Arc::new(go::GoContextProvider);
let go_lsp_adapter = Arc::new(go::GoLspAdapter);
let json_context_provider = Arc::new(json_task_context());
let json_lsp_adapter = Arc::new(json::JsonLspAdapter::new(node.clone(), languages.clone()));
let node_version_lsp_adapter = Arc::new(json::NodeVersionAdapter);
let py_lsp_adapter = Arc::new(python::PyLspAdapter::new());
let python_context_provider = Arc::new(python::PythonContextProvider);
let python_lsp_adapter = Arc::new(python::PythonLspAdapter::new(node.clone()));
let python_toolchain_provider = Arc::new(python::PythonToolchainProvider::default());
let rust_context_provider = Arc::new(rust::RustContextProvider);
let rust_lsp_adapter = Arc::new(rust::RustLspAdapter);
let tailwind_adapter = Arc::new(tailwind::TailwindLspAdapter::new(node.clone()));
let typescript_context = Arc::new(typescript::typescript_task_context());
let typescript_lsp_adapter = Arc::new(typescript::TypeScriptLspAdapter::new(node.clone()));
let vtsls_adapter = Arc::new(vtsls::VtslsLspAdapter::new(node.clone()));
let yaml_lsp_adapter = Arc::new(yaml::YamlLspAdapter::new(node.clone()));
// Following are a series of helper macros for registering languages.
// Macros are used instead of a function or for loop in order to avoid
// code duplication and improve readability as the types get quite verbose
// to type out in some cases.
// Additionally, the `provider` fields in LoadedLanguage
// would have be `Copy` if we were to use a function or for-loop to register the languages
// due to the fact that we pass an `Arc<Fn>` to `languages.register_language`
// that loads and initializes the language lazily.
// We avoid this entirely by using a Macro
let built_in_languages = [
LanguageInfo {
name: "bash",
context: Some(Arc::new(bash::bash_task_context())),
..Default::default()
},
LanguageInfo {
name: "c",
adapters: vec![c_lsp_adapter.clone()],
..Default::default()
},
LanguageInfo {
name: "cpp",
adapters: vec![c_lsp_adapter.clone()],
..Default::default()
},
LanguageInfo {
name: "css",
adapters: vec![css_lsp_adapter.clone()],
..Default::default()
},
LanguageInfo {
name: "diff",
adapters: vec![],
..Default::default()
},
LanguageInfo {
name: "go",
adapters: vec![go_lsp_adapter.clone()],
context: Some(go_context_provider.clone()),
..Default::default()
},
LanguageInfo {
name: "gomod",
adapters: vec![go_lsp_adapter.clone()],
context: Some(go_context_provider.clone()),
..Default::default()
},
LanguageInfo {
name: "gowork",
adapters: vec![go_lsp_adapter.clone()],
context: Some(go_context_provider.clone()),
..Default::default()
},
LanguageInfo {
name: "json",
adapters: vec![json_lsp_adapter.clone(), node_version_lsp_adapter.clone()],
context: Some(json_context_provider.clone()),
..Default::default()
},
LanguageInfo {
name: "jsonc",
adapters: vec![json_lsp_adapter.clone()],
context: Some(json_context_provider.clone()),
..Default::default()
},
LanguageInfo {
name: "markdown",
adapters: vec![],
..Default::default()
},
LanguageInfo {
name: "markdown-inline",
adapters: vec![],
..Default::default()
},
LanguageInfo {
name: "python",
adapters: vec![python_lsp_adapter.clone(), py_lsp_adapter.clone()],
context: Some(python_context_provider),
toolchain: Some(python_toolchain_provider),
},
LanguageInfo {
name: "rust",
adapters: vec![rust_lsp_adapter],
context: Some(rust_context_provider),
..Default::default()
},
LanguageInfo {
name: "tsx",
adapters: vec![typescript_lsp_adapter.clone(), vtsls_adapter.clone()],
context: Some(typescript_context.clone()),
..Default::default()
},
LanguageInfo {
name: "typescript",
adapters: vec![typescript_lsp_adapter.clone(), vtsls_adapter.clone()],
context: Some(typescript_context.clone()),
..Default::default()
},
LanguageInfo {
name: "javascript",
adapters: vec![typescript_lsp_adapter.clone(), vtsls_adapter.clone()],
context: Some(typescript_context.clone()),
..Default::default()
},
LanguageInfo {
name: "jsdoc",
adapters: vec![typescript_lsp_adapter.clone(), vtsls_adapter.clone()],
..Default::default()
},
LanguageInfo {
name: "regex",
adapters: vec![],
..Default::default()
},
LanguageInfo {
name: "yaml",
adapters: vec![yaml_lsp_adapter],
..Default::default()
},
];
for registration in built_in_languages {
register_language(
&languages,
registration.name,
registration.adapters,
registration.context,
registration.toolchain,
);
macro_rules! context_provider {
($name:expr) => {
Some(Arc::new($name) as Arc<dyn ContextProvider>)
};
() => {
None
};
}
macro_rules! toolchain_provider {
($name:expr) => {
Some(Arc::new($name) as Arc<dyn ToolchainLister>)
};
() => {
None
};
}
macro_rules! adapters {
($($item:expr),+ $(,)?) => {
vec![
$(Arc::new($item) as Arc<dyn LspAdapter>,)*
]
};
() => {
vec![]
};
}
macro_rules! register_language {
($name:expr, adapters => $adapters:expr, context => $context:expr, toolchain => $toolchain:expr) => {
let config = load_config($name);
for adapter in $adapters {
languages.register_lsp_adapter(config.name.clone(), adapter);
}
languages.register_language(
config.name.clone(),
config.grammar.clone(),
config.matcher.clone(),
config.hidden,
Arc::new(move || {
Ok(LoadedLanguage {
config: config.clone(),
queries: load_queries($name),
context_provider: $context,
toolchain_provider: $toolchain,
})
}),
);
};
($name:expr) => {
register_language!($name, adapters => adapters![], context => context_provider!(), toolchain => toolchain_provider!())
};
($name:expr, adapters => $adapters:expr, context => $context:expr, toolchain => $toolchain:expr) => {
register_language!($name, adapters => $adapters, context => $context, toolchain => $toolchain)
};
($name:expr, adapters => $adapters:expr, context => $context:expr) => {
register_language!($name, adapters => $adapters, context => $context, toolchain => toolchain_provider!())
};
($name:expr, adapters => $adapters:expr) => {
register_language!($name, adapters => $adapters, context => context_provider!(), toolchain => toolchain_provider!())
};
}
register_language!(
"bash",
adapters => adapters![],
context => context_provider!(bash_task_context()),
toolchain => toolchain_provider!()
);
register_language!(
"c",
adapters => adapters![c::CLspAdapter]
);
register_language!(
"cpp",
adapters => adapters![c::CLspAdapter]
);
register_language!(
"css",
adapters => adapters![css::CssLspAdapter::new(node_runtime.clone())]
);
register_language!("diff");
register_language!(
"go",
adapters => adapters![go::GoLspAdapter],
context => context_provider!(go::GoContextProvider)
);
register_language!(
"gomod",
adapters => adapters![go::GoLspAdapter],
context => context_provider!(go::GoContextProvider)
);
register_language!(
"gowork",
adapters => adapters![go::GoLspAdapter],
context => context_provider!(go::GoContextProvider)
);
register_language!(
"json",
adapters => adapters![
json::JsonLspAdapter::new(node_runtime.clone(), languages.clone(),),
json::NodeVersionAdapter,
],
context => context_provider!(json_task_context())
);
register_language!(
"jsonc",
adapters => adapters![
json::JsonLspAdapter::new(node_runtime.clone(), languages.clone(),),
],
context => context_provider!(json_task_context())
);
register_language!("markdown");
register_language!("markdown-inline");
register_language!(
"python",
adapters => adapters![
python::PythonLspAdapter::new(node_runtime.clone()),
python::PyLspAdapter::new()
],
context => context_provider!(PythonContextProvider),
toolchain => toolchain_provider!(PythonToolchainProvider::default())
);
register_language!(
"rust",
adapters => adapters![rust::RustLspAdapter],
context => context_provider!(RustContextProvider)
);
register_language!(
"tsx",
adapters => adapters![
typescript::TypeScriptLspAdapter::new(node_runtime.clone()),
vtsls::VtslsLspAdapter::new(node_runtime.clone()),
],
context => context_provider!(typescript_task_context()),
toolchain => toolchain_provider!()
);
register_language!(
"typescript",
adapters => adapters![
typescript::TypeScriptLspAdapter::new(node_runtime.clone()),
vtsls::VtslsLspAdapter::new(node_runtime.clone()),
],
context => context_provider!(typescript_task_context())
);
register_language!(
"javascript",
adapters => adapters![
typescript::TypeScriptLspAdapter::new(node_runtime.clone()),
vtsls::VtslsLspAdapter::new(node_runtime.clone()),
],
context => context_provider!(typescript_task_context())
);
register_language!(
"jsdoc",
adapters => adapters![
typescript::TypeScriptLspAdapter::new(node_runtime.clone()),
vtsls::VtslsLspAdapter::new(node_runtime.clone()),
]
);
register_language!("regex");
register_language!("yaml",
adapters => adapters![
yaml::YamlLspAdapter::new(node_runtime.clone()),
]
);
// Register globally available language servers.
//
// This will allow users to add support for a built-in language server (e.g., Tailwind)
@@ -231,23 +278,23 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
languages.register_available_lsp_adapter(
LanguageServerName("tailwindcss-language-server".into()),
{
let adapter = tailwind_adapter.clone();
move || adapter.clone()
let node_runtime = node_runtime.clone();
move || Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone()))
},
);
languages.register_available_lsp_adapter(LanguageServerName("eslint".into()), {
let adapter = eslint_adapter.clone();
move || adapter.clone()
let node_runtime = node_runtime.clone();
move || Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone()))
});
languages.register_available_lsp_adapter(LanguageServerName("vtsls".into()), {
let adapter = vtsls_adapter.clone();
move || adapter.clone()
let node_runtime = node_runtime.clone();
move || Arc::new(vtsls::VtslsLspAdapter::new(node_runtime.clone()))
});
languages.register_available_lsp_adapter(
LanguageServerName("typescript-language-server".into()),
{
let adapter = typescript_lsp_adapter.clone();
move || adapter.clone()
let node_runtime = node_runtime.clone();
move || Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone()))
},
);
@@ -269,12 +316,18 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
];
for language in tailwind_languages {
languages.register_lsp_adapter(language.into(), tailwind_adapter.clone());
languages.register_lsp_adapter(
language.into(),
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
);
}
let eslint_languages = ["TSX", "TypeScript", "JavaScript", "Vue.js", "Svelte"];
for language in eslint_languages {
languages.register_lsp_adapter(language.into(), eslint_adapter.clone());
languages.register_lsp_adapter(
language.into(),
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
);
}
let mut subscription = languages.subscribe();
@@ -299,41 +352,6 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
.detach();
}
#[derive(Default)]
struct LanguageInfo {
name: &'static str,
adapters: Vec<Arc<dyn LspAdapter>>,
context: Option<Arc<dyn ContextProvider>>,
toolchain: Option<Arc<dyn ToolchainLister>>,
}
fn register_language(
languages: &LanguageRegistry,
name: &'static str,
adapters: Vec<Arc<dyn LspAdapter>>,
context: Option<Arc<dyn ContextProvider>>,
toolchain: Option<Arc<dyn ToolchainLister>>,
) {
let config = load_config(name);
for adapter in adapters {
languages.register_lsp_adapter(config.name.clone(), adapter);
}
languages.register_language(
config.name.clone(),
config.grammar.clone(),
config.matcher.clone(),
config.hidden,
Arc::new(move || {
Ok(LoadedLanguage {
config: config.clone(),
queries: load_queries(name),
context_provider: context.clone(),
toolchain_provider: toolchain.clone(),
})
}),
);
}
#[cfg(any(test, feature = "test-support"))]
pub fn language(name: &str, grammar: tree_sitter::Language) -> Arc<Language> {
Arc::new(

View File

@@ -7,7 +7,7 @@ use gpui::{App, AsyncApp, Task};
use http_client::github::AssetKind;
use http_client::github::{latest_github_release, GitHubLspBinaryVersion};
pub use language::*;
use lsp::LanguageServerBinary;
use lsp::{LanguageServerBinary, LanguageServerName};
use regex::Regex;
use smol::fs::{self};
use std::fmt::Display;

View File

@@ -632,9 +632,6 @@ impl LanguageServer {
diagnostic: Some(DiagnosticWorkspaceClientCapabilities {
refresh_support: None,
}),
code_lens: Some(CodeLensWorkspaceClientCapabilities {
refresh_support: Some(true),
}),
workspace_edit: Some(WorkspaceEditClientCapabilities {
resource_operations: Some(vec![
ResourceOperationKind::Create,
@@ -766,9 +763,6 @@ impl LanguageServer {
did_save: Some(true),
..TextDocumentSyncClientCapabilities::default()
}),
code_lens: Some(CodeLensClientCapabilities {
dynamic_registration: Some(false),
}),
..TextDocumentClientCapabilities::default()
}),
experimental: Some(json!({

View File

@@ -346,16 +346,17 @@ impl std::fmt::Debug for ExcerptInfo {
#[derive(Debug)]
pub struct ExcerptBoundary {
pub prev: Option<ExcerptInfo>,
pub next: ExcerptInfo,
pub next: Option<ExcerptInfo>,
/// The row in the `MultiBuffer` where the boundary is located
pub row: MultiBufferRow,
}
impl ExcerptBoundary {
pub fn starts_new_buffer(&self) -> bool {
match (self.prev.as_ref(), &self.next) {
match (self.prev.as_ref(), self.next.as_ref()) {
(None, _) => true,
(Some(prev), next) => prev.buffer_id != next.buffer_id,
(Some(_), None) => false,
(Some(prev), Some(next)) => prev.buffer_id != next.buffer_id,
}
}
}
@@ -1596,10 +1597,6 @@ impl MultiBuffer {
self.update_path_excerpts(path, buffer, &buffer_snapshot, new, cx)
}
pub fn has_excerpt_for_path(&self, path: &PathKey) -> bool {
self.excerpts_by_path.contains_key(path)
}
fn update_path_excerpts(
&mut self,
path: PathKey,
@@ -4099,7 +4096,6 @@ impl MultiBufferSnapshot {
}
pub fn widest_line_number(&self) -> u32 {
// widest_line_number is 0-based, so 1 is added to get the displayed line number.
self.excerpts.summary().widest_line_number + 1
}
@@ -5203,19 +5199,27 @@ impl MultiBufferSnapshot {
cursor.next_excerpt();
let mut visited_end = false;
iter::from_fn(move || loop {
if self.singleton {
return None;
}
let next_region = cursor.region()?;
let next_region = cursor.region();
cursor.next_excerpt();
if !bounds.contains(&next_region.range.start.key) {
prev_region = Some(next_region);
continue;
}
let next_region_start = next_region.range.start.value.unwrap();
let next_region_start = if let Some(region) = &next_region {
if !bounds.contains(&region.range.start.key) {
prev_region = next_region;
continue;
}
region.range.start.value.unwrap()
} else {
if !bounds.contains(&self.len()) {
return None;
}
self.max_point()
};
let next_region_end = if let Some(region) = cursor.region() {
region.range.start.value.unwrap()
} else {
@@ -5230,21 +5234,29 @@ impl MultiBufferSnapshot {
end_row: MultiBufferRow(next_region_start.row),
});
let next = ExcerptInfo {
id: next_region.excerpt.id,
buffer: next_region.excerpt.buffer.clone(),
buffer_id: next_region.excerpt.buffer_id,
range: next_region.excerpt.range.clone(),
end_row: if next_region.excerpt.has_trailing_newline {
let next = next_region.as_ref().map(|region| ExcerptInfo {
id: region.excerpt.id,
buffer: region.excerpt.buffer.clone(),
buffer_id: region.excerpt.buffer_id,
range: region.excerpt.range.clone(),
end_row: if region.excerpt.has_trailing_newline {
MultiBufferRow(next_region_end.row - 1)
} else {
MultiBufferRow(next_region_end.row)
},
};
});
if next.is_none() {
if visited_end {
return None;
} else {
visited_end = true;
}
}
let row = MultiBufferRow(next_region_start.row);
prev_region = Some(next_region);
prev_region = next_region;
return Some(ExcerptBoundary { row, prev, next });
})

View File

@@ -341,17 +341,17 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
) -> Vec<(MultiBufferRow, String, bool)> {
snapshot
.excerpt_boundaries_in_range(range)
.map(|boundary| {
.filter_map(|boundary| {
let starts_new_buffer = boundary.starts_new_buffer();
(
boundary.row,
boundary
.next
.buffer
.text_for_range(boundary.next.range.context)
.collect::<String>(),
starts_new_buffer,
)
boundary.next.map(|next| {
(
boundary.row,
next.buffer
.text_for_range(next.range.context)
.collect::<String>(),
starts_new_buffer,
)
})
})
.collect::<Vec<_>>()
}
@@ -2695,7 +2695,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
let actual_text = snapshot.text();
let actual_boundary_rows = snapshot
.excerpt_boundaries_in_range(0..)
.map(|b| b.row)
.filter_map(|b| if b.next.is_some() { Some(b.row) } else { None })
.collect::<HashSet<_>>();
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -234,19 +234,6 @@ pub(crate) struct InlayHints {
pub range: Range<Anchor>,
}
#[derive(Debug, Copy, Clone)]
pub(crate) struct GetCodeLens;
impl GetCodeLens {
pub(crate) fn can_resolve_lens(capabilities: &ServerCapabilities) -> bool {
capabilities
.code_lens_provider
.as_ref()
.and_then(|code_lens_options| code_lens_options.resolve_provider)
.unwrap_or(false)
}
}
#[derive(Debug)]
pub(crate) struct LinkedEditingRange {
pub position: Anchor,
@@ -2242,18 +2229,18 @@ impl LspCommand for GetCodeActions {
.unwrap_or_default()
.into_iter()
.filter_map(|entry| {
let (lsp_action, resolved) = match entry {
let lsp_action = match entry {
lsp::CodeActionOrCommand::CodeAction(lsp_action) => {
if let Some(command) = lsp_action.command.as_ref() {
if !available_commands.contains(&command.command) {
return None;
}
}
(LspAction::Action(Box::new(lsp_action)), false)
LspAction::Action(Box::new(lsp_action))
}
lsp::CodeActionOrCommand::Command(command) => {
if available_commands.contains(&command.command) {
(LspAction::Command(command), true)
LspAction::Command(command)
} else {
return None;
}
@@ -2272,7 +2259,6 @@ impl LspCommand for GetCodeActions {
server_id,
range: self.range.clone(),
lsp_action,
resolved,
})
})
.collect())
@@ -3051,152 +3037,6 @@ impl LspCommand for InlayHints {
}
}
#[async_trait(?Send)]
impl LspCommand for GetCodeLens {
type Response = Vec<CodeAction>;
type LspRequest = lsp::CodeLensRequest;
type ProtoRequest = proto::GetCodeLens;
fn display_name(&self) -> &str {
"Code Lens"
}
fn check_capabilities(&self, capabilities: AdapterServerCapabilities) -> bool {
capabilities
.server_capabilities
.code_lens_provider
.as_ref()
.map_or(false, |code_lens_options| {
code_lens_options.resolve_provider.unwrap_or(false)
})
}
fn to_lsp(
&self,
path: &Path,
_: &Buffer,
_: &Arc<LanguageServer>,
_: &App,
) -> Result<lsp::CodeLensParams> {
Ok(lsp::CodeLensParams {
text_document: lsp::TextDocumentIdentifier {
uri: file_path_to_lsp_url(path)?,
},
work_done_progress_params: lsp::WorkDoneProgressParams::default(),
partial_result_params: lsp::PartialResultParams::default(),
})
}
async fn response_from_lsp(
self,
message: Option<Vec<lsp::CodeLens>>,
lsp_store: Entity<LspStore>,
buffer: Entity<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncApp,
) -> anyhow::Result<Vec<CodeAction>> {
let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
let language_server = cx.update(|cx| {
lsp_store
.read(cx)
.language_server_for_id(server_id)
.with_context(|| {
format!("Missing the language server that just returned a response {server_id}")
})
})??;
let server_capabilities = language_server.capabilities();
let available_commands = server_capabilities
.execute_command_provider
.as_ref()
.map(|options| options.commands.as_slice())
.unwrap_or_default();
Ok(message
.unwrap_or_default()
.into_iter()
.filter(|code_lens| {
code_lens
.command
.as_ref()
.is_none_or(|command| available_commands.contains(&command.command))
})
.map(|code_lens| {
let code_lens_range = range_from_lsp(code_lens.range);
let start = snapshot.clip_point_utf16(code_lens_range.start, Bias::Left);
let end = snapshot.clip_point_utf16(code_lens_range.end, Bias::Right);
let range = snapshot.anchor_before(start)..snapshot.anchor_after(end);
CodeAction {
server_id,
range,
lsp_action: LspAction::CodeLens(code_lens),
resolved: false,
}
})
.collect())
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCodeLens {
proto::GetCodeLens {
project_id,
buffer_id: buffer.remote_id().into(),
version: serialize_version(&buffer.version()),
}
}
async fn from_proto(
message: proto::GetCodeLens,
_: Entity<LspStore>,
buffer: Entity<Buffer>,
mut cx: AsyncApp,
) -> Result<Self> {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})?
.await?;
Ok(Self)
}
fn response_to_proto(
response: Vec<CodeAction>,
_: &mut LspStore,
_: PeerId,
buffer_version: &clock::Global,
_: &mut App,
) -> proto::GetCodeLensResponse {
proto::GetCodeLensResponse {
lens_actions: response
.iter()
.map(LspStore::serialize_code_action)
.collect(),
version: serialize_version(buffer_version),
}
}
async fn response_from_proto(
self,
message: proto::GetCodeLensResponse,
_: Entity<LspStore>,
buffer: Entity<Buffer>,
mut cx: AsyncApp,
) -> anyhow::Result<Vec<CodeAction>> {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})?
.await?;
message
.lens_actions
.into_iter()
.map(LspStore::deserialize_code_action)
.collect::<Result<Vec<_>>>()
.context("deserializing proto code lens response")
}
fn buffer_id_from_proto(message: &proto::GetCodeLens) -> Result<BufferId> {
BufferId::new(message.buffer_id)
}
}
#[async_trait(?Send)]
impl LspCommand for LinkedEditingRange {
type Response = Vec<Range<Anchor>>;

View File

@@ -807,27 +807,6 @@ impl LocalLspStore {
})
.detach();
language_server
.on_request::<lsp::request::CodeLensRefresh, _, _>({
let this = this.clone();
move |(), mut cx| {
let this = this.clone();
async move {
this.update(&mut cx, |this, cx| {
cx.emit(LspStoreEvent::RefreshCodeLens);
this.downstream_client.as_ref().map(|(client, project_id)| {
client.send(proto::RefreshCodeLens {
project_id: *project_id,
})
})
})?
.transpose()?;
Ok(())
}
}
})
.detach();
language_server
.on_request::<lsp::request::ShowMessageRequest, _, _>({
let this = this.clone();
@@ -1649,8 +1628,7 @@ impl LocalLspStore {
) -> anyhow::Result<()> {
match &mut action.lsp_action {
LspAction::Action(lsp_action) => {
if !action.resolved
&& GetCodeActions::can_resolve_actions(&lang_server.capabilities())
if GetCodeActions::can_resolve_actions(&lang_server.capabilities())
&& lsp_action.data.is_some()
&& (lsp_action.command.is_none() || lsp_action.edit.is_none())
{
@@ -1661,17 +1639,8 @@ impl LocalLspStore {
);
}
}
LspAction::CodeLens(lens) => {
if !action.resolved && GetCodeLens::can_resolve_lens(&lang_server.capabilities()) {
*lens = lang_server
.request::<lsp::request::CodeLensResolve>(lens.clone())
.await?;
}
}
LspAction::Command(_) => {}
}
action.resolved = true;
anyhow::Ok(())
}
@@ -2918,7 +2887,6 @@ pub enum LspStoreEvent {
},
Notification(String),
RefreshInlayHints,
RefreshCodeLens,
DiagnosticsUpdated {
language_server_id: LanguageServerId,
path: ProjectPath,
@@ -2974,7 +2942,6 @@ impl LspStore {
client.add_entity_request_handler(Self::handle_resolve_inlay_hint);
client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
client.add_entity_request_handler(Self::handle_refresh_inlay_hints);
client.add_entity_request_handler(Self::handle_refresh_code_lens);
client.add_entity_request_handler(Self::handle_on_type_formatting);
client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
client.add_entity_request_handler(Self::handle_register_buffer_with_language_servers);
@@ -4349,7 +4316,6 @@ impl LspStore {
cx,
)
}
pub fn code_actions(
&mut self,
buffer_handle: &Entity<Buffer>,
@@ -4429,66 +4395,6 @@ impl LspStore {
}
}
pub fn code_lens(
&mut self,
buffer_handle: &Entity<Buffer>,
cx: &mut Context<Self>,
) -> Task<Result<Vec<CodeAction>>> {
if let Some((upstream_client, project_id)) = self.upstream_client() {
let request_task = upstream_client.request(proto::MultiLspQuery {
buffer_id: buffer_handle.read(cx).remote_id().into(),
version: serialize_version(&buffer_handle.read(cx).version()),
project_id,
strategy: Some(proto::multi_lsp_query::Strategy::All(
proto::AllLanguageServers {},
)),
request: Some(proto::multi_lsp_query::Request::GetCodeLens(
GetCodeLens.to_proto(project_id, buffer_handle.read(cx)),
)),
});
let buffer = buffer_handle.clone();
cx.spawn(|weak_project, cx| async move {
let Some(project) = weak_project.upgrade() else {
return Ok(Vec::new());
};
let responses = request_task.await?.responses;
let code_lens = join_all(
responses
.into_iter()
.filter_map(|lsp_response| match lsp_response.response? {
proto::lsp_response::Response::GetCodeLensResponse(response) => {
Some(response)
}
unexpected => {
debug_panic!("Unexpected response: {unexpected:?}");
None
}
})
.map(|code_lens_response| {
GetCodeLens.response_from_proto(
code_lens_response,
project.clone(),
buffer.clone(),
cx.clone(),
)
}),
)
.await;
Ok(code_lens
.into_iter()
.collect::<Result<Vec<Vec<_>>>>()?
.into_iter()
.flatten()
.collect())
})
} else {
let code_lens_task =
self.request_multiple_lsp_locally(buffer_handle, None::<usize>, GetCodeLens, cx);
cx.spawn(|_, _| async move { Ok(code_lens_task.await.into_iter().flatten().collect()) })
}
}
#[inline(never)]
pub fn completions(
&self,
@@ -6402,43 +6308,6 @@ impl LspStore {
.collect(),
})
}
Some(proto::multi_lsp_query::Request::GetCodeLens(get_code_lens)) => {
let get_code_lens = GetCodeLens::from_proto(
get_code_lens,
this.clone(),
buffer.clone(),
cx.clone(),
)
.await?;
let code_lens_actions = this
.update(&mut cx, |project, cx| {
project.request_multiple_lsp_locally(
&buffer,
None::<usize>,
get_code_lens,
cx,
)
})?
.await
.into_iter();
this.update(&mut cx, |project, cx| proto::MultiLspQueryResponse {
responses: code_lens_actions
.map(|actions| proto::LspResponse {
response: Some(proto::lsp_response::Response::GetCodeLensResponse(
GetCodeLens::response_to_proto(
actions,
project,
sender_id,
&buffer_version,
cx,
),
)),
})
.collect(),
})
}
None => anyhow::bail!("empty multi lsp query request"),
}
}
@@ -7342,17 +7211,6 @@ impl LspStore {
})
}
async fn handle_refresh_code_lens(
this: Entity<Self>,
_: TypedEnvelope<proto::RefreshCodeLens>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
this.update(&mut cx, |_, cx| {
cx.emit(LspStoreEvent::RefreshCodeLens);
})?;
Ok(proto::Ack {})
}
async fn handle_open_buffer_for_symbol(
this: Entity<Self>,
envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
@@ -8576,10 +8434,6 @@ impl LspStore {
proto::code_action::Kind::Command as i32,
serde_json::to_vec(command).unwrap(),
),
LspAction::CodeLens(code_lens) => (
proto::code_action::Kind::CodeLens as i32,
serde_json::to_vec(code_lens).unwrap(),
),
};
proto::CodeAction {
@@ -8588,7 +8442,6 @@ impl LspStore {
end: Some(serialize_anchor(&action.range.end)),
lsp_action,
kind,
resolved: action.resolved,
}
}
@@ -8596,11 +8449,11 @@ impl LspStore {
let start = action
.start
.and_then(deserialize_anchor)
.context("invalid start")?;
.ok_or_else(|| anyhow!("invalid start"))?;
let end = action
.end
.and_then(deserialize_anchor)
.context("invalid end")?;
.ok_or_else(|| anyhow!("invalid end"))?;
let lsp_action = match proto::code_action::Kind::from_i32(action.kind) {
Some(proto::code_action::Kind::Action) => {
LspAction::Action(serde_json::from_slice(&action.lsp_action)?)
@@ -8608,15 +8461,11 @@ impl LspStore {
Some(proto::code_action::Kind::Command) => {
LspAction::Command(serde_json::from_slice(&action.lsp_action)?)
}
Some(proto::code_action::Kind::CodeLens) => {
LspAction::CodeLens(serde_json::from_slice(&action.lsp_action)?)
}
None => anyhow::bail!("Unknown action kind {}", action.kind),
};
Ok(CodeAction {
server_id: LanguageServerId(action.server_id as usize),
range: start..end,
resolved: action.resolved,
lsp_action,
})
}

View File

@@ -6,14 +6,6 @@ use crate::{LanguageServerPromptRequest, LspStore, LspStoreEvent};
pub const RUST_ANALYZER_NAME: &str = "rust-analyzer";
pub const EXTRA_SUPPORTED_COMMANDS: &[&str] = &[
"rust-analyzer.runSingle",
"rust-analyzer.showReferences",
"rust-analyzer.gotoLocation",
"rust-analyzer.triggerParameterHints",
"rust-analyzer.rename",
];
/// Experimental: Informs the end user about the state of the server
///
/// [Rust Analyzer Specification](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/lsp-extensions.md#server-status)

View File

@@ -280,7 +280,6 @@ pub enum Event {
Reshared,
Rejoined,
RefreshInlayHints,
RefreshCodeLens,
RevealInProjectPanel(ProjectEntryId),
SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>),
ExpandedAllForEntry(WorktreeId, ProjectEntryId),
@@ -510,8 +509,6 @@ pub struct CodeAction {
/// The raw code action provided by the language server.
/// Can be either an action or a command.
pub lsp_action: LspAction,
/// Whether the action needs to be resolved using the language server.
pub resolved: bool,
}
/// An action sent back by a language server.
@@ -522,8 +519,6 @@ pub enum LspAction {
Action(Box<lsp::CodeAction>),
/// A command data to run as an action.
Command(lsp::Command),
/// A code lens data to run as an action.
CodeLens(lsp::CodeLens),
}
impl LspAction {
@@ -531,11 +526,6 @@ impl LspAction {
match self {
Self::Action(action) => &action.title,
Self::Command(command) => &command.title,
Self::CodeLens(lens) => lens
.command
.as_ref()
.map(|command| command.title.as_str())
.unwrap_or("Unknown command"),
}
}
@@ -543,7 +533,6 @@ impl LspAction {
match self {
Self::Action(action) => action.kind.clone(),
Self::Command(_) => Some(lsp::CodeActionKind::new("command")),
Self::CodeLens(_) => Some(lsp::CodeActionKind::new("code lens")),
}
}
@@ -551,7 +540,6 @@ impl LspAction {
match self {
Self::Action(action) => action.edit.as_ref(),
Self::Command(_) => None,
Self::CodeLens(_) => None,
}
}
@@ -559,7 +547,6 @@ impl LspAction {
match self {
Self::Action(action) => action.command.as_ref(),
Self::Command(command) => Some(command),
Self::CodeLens(lens) => lens.command.as_ref(),
}
}
}
@@ -665,7 +652,6 @@ impl Hover {
enum EntitySubscription {
Project(PendingEntitySubscription<Project>),
BufferStore(PendingEntitySubscription<BufferStore>),
GitStore(PendingEntitySubscription<GitStore>),
WorktreeStore(PendingEntitySubscription<WorktreeStore>),
LspStore(PendingEntitySubscription<LspStore>),
SettingsObserver(PendingEntitySubscription<SettingsObserver>),
@@ -864,6 +850,7 @@ impl Project {
buffer_store.clone(),
environment.clone(),
fs.clone(),
client.clone().into(),
cx,
)
});
@@ -992,6 +979,7 @@ impl Project {
buffer_store.clone(),
environment.clone(),
ssh_proto.clone(),
ProjectId(SSH_PROJECT_ID),
cx,
)
});
@@ -1108,7 +1096,6 @@ impl Project {
let subscriptions = [
EntitySubscription::Project(client.subscribe_to_entity::<Self>(remote_id)?),
EntitySubscription::BufferStore(client.subscribe_to_entity::<BufferStore>(remote_id)?),
EntitySubscription::GitStore(client.subscribe_to_entity::<GitStore>(remote_id)?),
EntitySubscription::WorktreeStore(
client.subscribe_to_entity::<WorktreeStore>(remote_id)?,
),
@@ -1137,7 +1124,7 @@ impl Project {
async fn from_join_project_response(
response: TypedEnvelope<proto::JoinProjectResponse>,
subscriptions: [EntitySubscription; 6],
subscriptions: [EntitySubscription; 5],
client: Arc<Client>,
run_tasks: bool,
user_store: Entity<UserStore>,
@@ -1254,7 +1241,7 @@ impl Project {
remote_id,
replica_id,
},
git_store: git_store.clone(),
git_store,
buffers_needing_diff: Default::default(),
git_diff_debouncer: DebouncedDelay::new(),
terminals: Terminals {
@@ -1284,9 +1271,6 @@ impl Project {
EntitySubscription::WorktreeStore(subscription) => {
subscription.set_entity(&worktree_store, &mut cx)
}
EntitySubscription::GitStore(subscription) => {
subscription.set_entity(&git_store, &mut cx)
}
EntitySubscription::SettingsObserver(subscription) => {
subscription.set_entity(&settings_observer, &mut cx)
}
@@ -1877,9 +1861,6 @@ impl Project {
self.settings_observer.update(cx, |settings_observer, cx| {
settings_observer.shared(project_id, self.client.clone().into(), cx)
});
self.git_store.update(cx, |git_store, cx| {
git_store.shared(project_id, self.client.clone().into(), cx)
});
self.client_state = ProjectClientState::Shared {
remote_id: project_id,
@@ -1961,9 +1942,6 @@ impl Project {
self.settings_observer.update(cx, |settings_observer, cx| {
settings_observer.unshared(cx);
});
self.git_store.update(cx, |git_store, cx| {
git_store.unshared(cx);
});
self.client
.send(proto::UnshareProject {
@@ -2189,8 +2167,10 @@ impl Project {
if self.is_disconnected(cx) {
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
}
self.git_store
.update(cx, |git_store, cx| git_store.open_unstaged_diff(buffer, cx))
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_unstaged_diff(buffer, cx)
})
}
pub fn open_uncommitted_diff(
@@ -2201,8 +2181,9 @@ impl Project {
if self.is_disconnected(cx) {
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
}
self.git_store.update(cx, |git_store, cx| {
git_store.open_uncommitted_diff(buffer, cx)
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_uncommitted_diff(buffer, cx)
})
}
@@ -2502,7 +2483,6 @@ impl Project {
};
}
LspStoreEvent::RefreshInlayHints => cx.emit(Event::RefreshInlayHints),
LspStoreEvent::RefreshCodeLens => cx.emit(Event::RefreshCodeLens),
LspStoreEvent::LanguageServerPrompt(prompt) => {
cx.emit(Event::LanguageServerPrompt(prompt.clone()))
}
@@ -2761,8 +2741,8 @@ impl Project {
if buffers.is_empty() {
None
} else {
Some(this.git_store.update(cx, |git_store, cx| {
git_store.recalculate_buffer_diffs(buffers, cx)
Some(this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.recalculate_buffer_diffs(buffers, cx)
}))
}
})
@@ -3183,34 +3163,6 @@ impl Project {
})
}
pub fn code_lens<T: Clone + ToOffset>(
&mut self,
buffer_handle: &Entity<Buffer>,
range: Range<T>,
cx: &mut Context<Self>,
) -> Task<Result<Vec<CodeAction>>> {
let snapshot = buffer_handle.read(cx).snapshot();
let range = snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end);
let code_lens_actions = self
.lsp_store
.update(cx, |lsp_store, cx| lsp_store.code_lens(buffer_handle, cx));
cx.background_spawn(async move {
let mut code_lens_actions = code_lens_actions.await?;
code_lens_actions.retain(|code_lens_action| {
range
.start
.cmp(&code_lens_action.range.start, &snapshot)
.is_ge()
&& range
.end
.cmp(&code_lens_action.range.end, &snapshot)
.is_le()
});
Ok(code_lens_actions)
})
}
pub fn apply_code_action(
&self,
buffer_handle: Entity<Buffer>,
@@ -3848,8 +3800,7 @@ impl Project {
/// # Returns
///
/// Returns `Some(ProjectPath)` if a matching worktree is found, otherwise `None`.
pub fn find_project_path(&self, path: impl AsRef<Path>, cx: &App) -> Option<ProjectPath> {
let path = path.as_ref();
pub fn find_project_path(&self, path: &Path, cx: &App) -> Option<ProjectPath> {
let worktree_store = self.worktree_store.read(cx);
for worktree in worktree_store.visible_worktrees(cx) {
@@ -4014,9 +3965,6 @@ impl Project {
buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
}
});
this.git_store.update(cx, |git_store, _| {
git_store.forget_shared_diffs_for(&peer_id);
});
cx.emit(Event::CollaboratorLeft(peer_id));
Ok(())

View File

@@ -168,10 +168,6 @@ pub struct GitSettings {
///
/// Default: on
pub inline_blame: Option<InlineBlameSettings>,
/// How hunks are displayed visually in the editor.
///
/// Default: staged_hollow
pub hunk_style: Option<GitHunkStyleSetting>,
}
impl GitSettings {
@@ -207,11 +203,20 @@ impl GitSettings {
#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum GitHunkStyleSetting {
/// Show unstaged hunks with a filled background and staged hunks hollow.
/// Show unstaged hunks with a transparent background
#[default]
StagedHollow,
/// Show unstaged hunks hollow and staged hunks with a filled background.
UnstagedHollow,
Transparent,
/// Show unstaged hunks with a pattern background
Pattern,
/// Show unstaged hunks with a border background
Border,
/// Show staged hunks with a pattern background
StagedPattern,
/// Show staged hunks with a pattern background
StagedTransparent,
/// Show staged hunks with a pattern background
StagedBorder,
}
#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]

View File

@@ -6414,6 +6414,8 @@ async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
.await
.unwrap();
let range = Anchor::MIN..snapshot.anchor_after(snapshot.max_point());
let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
.step_by(5)
.map(|i| {
@@ -6442,7 +6444,9 @@ async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
// Stage every hunk with a different call
uncommitted_diff.update(cx, |diff, cx| {
let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
let hunks = diff
.hunks_intersecting_range(range.clone(), &snapshot, cx)
.collect::<Vec<_>>();
for hunk in hunks {
diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
}
@@ -6476,7 +6480,9 @@ async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
// Unstage every hunk with a different call
uncommitted_diff.update(cx, |diff, cx| {
let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
let hunks = diff
.hunks_intersecting_range(range, &snapshot, cx)
.collect::<Vec<_>>();
for hunk in hunks {
diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
}

View File

@@ -346,12 +346,7 @@ message Envelope {
GitDiff git_diff = 319;
GitDiffResponse git_diff_response = 320;
GitInit git_init = 321;
CodeLens code_lens = 322;
GetCodeLens get_code_lens = 323;
GetCodeLensResponse get_code_lens_response = 324;
RefreshCodeLens refresh_code_lens = 325; // current max
GitInit git_init = 321; // current max
}
reserved 87 to 88;
@@ -1268,25 +1263,6 @@ message RefreshInlayHints {
uint64 project_id = 1;
}
message CodeLens {
bytes lsp_lens = 1;
}
message GetCodeLens {
uint64 project_id = 1;
uint64 buffer_id = 2;
repeated VectorClockEntry version = 3;
}
message GetCodeLensResponse {
repeated CodeAction lens_actions = 1;
repeated VectorClockEntry version = 2;
}
message RefreshCodeLens {
uint64 project_id = 1;
}
message MarkupContent {
bool is_markdown = 1;
string value = 2;
@@ -1322,11 +1298,9 @@ message CodeAction {
Anchor end = 3;
bytes lsp_action = 4;
Kind kind = 5;
bool resolved = 6;
enum Kind {
Action = 0;
Command = 1;
CodeLens = 2;
}
}
@@ -2372,7 +2346,6 @@ message MultiLspQuery {
GetHover get_hover = 5;
GetCodeActions get_code_actions = 6;
GetSignatureHelp get_signature_help = 7;
GetCodeLens get_code_lens = 8;
}
}
@@ -2392,7 +2365,6 @@ message LspResponse {
GetHoverResponse get_hover_response = 1;
GetCodeActionsResponse get_code_actions_response = 2;
GetSignatureHelpResponse get_signature_help_response = 3;
GetCodeLensResponse get_code_lens_response = 4;
}
}

View File

@@ -340,9 +340,6 @@ messages!(
(ResolveCompletionDocumentationResponse, Background),
(ResolveInlayHint, Background),
(ResolveInlayHintResponse, Background),
(RefreshCodeLens, Background),
(GetCodeLens, Background),
(GetCodeLensResponse, Background),
(RespondToChannelInvite, Foreground),
(RespondToContactRequest, Foreground),
(RoomUpdated, Foreground),
@@ -516,7 +513,6 @@ request_messages!(
(GetUsers, UsersResponse),
(IncomingCall, Ack),
(InlayHints, InlayHintsResponse),
(GetCodeLens, GetCodeLensResponse),
(InviteChannelMember, Ack),
(JoinChannel, JoinRoomResponse),
(JoinChannelBuffer, JoinChannelBufferResponse),
@@ -538,7 +534,6 @@ request_messages!(
(PrepareRename, PrepareRenameResponse),
(CountLanguageModelTokens, CountLanguageModelTokensResponse),
(RefreshInlayHints, Ack),
(RefreshCodeLens, Ack),
(RejoinChannelBuffers, RejoinChannelBuffersResponse),
(RejoinRoom, RejoinRoomResponse),
(ReloadBuffers, ReloadBuffersResponse),
@@ -637,7 +632,6 @@ entity_messages!(
ApplyCodeActionKind,
FormatBuffers,
GetCodeActions,
GetCodeLens,
GetCompletions,
GetDefinition,
GetDeclaration,
@@ -665,7 +659,6 @@ entity_messages!(
PerformRename,
PrepareRename,
RefreshInlayHints,
RefreshCodeLens,
ReloadBuffers,
RemoveProjectCollaborator,
RenameProjectEntry,

View File

@@ -39,7 +39,6 @@ shlex.workspace = true
smol.workspace = true
tempfile.workspace = true
thiserror.workspace = true
urlencoding.workspace = true
util.workspace = true
[dev-dependencies]

View File

@@ -203,8 +203,7 @@ impl SshConnectionOptions {
anyhow::bail!("unsupported argument: {:?}", arg);
}
let mut input = &arg as &str;
// Destination might be: username1@username2@ip2@ip1
if let Some((u, rest)) = input.rsplit_once('@') {
if let Some((u, rest)) = input.split_once('@') {
input = rest;
username = Some(u.to_string());
}
@@ -239,9 +238,7 @@ impl SshConnectionOptions {
pub fn ssh_url(&self) -> String {
let mut result = String::from("ssh://");
if let Some(username) = &self.username {
// Username might be: username1@username2@ip2
let username = urlencoding::encode(username);
result.push_str(&username);
result.push_str(username);
result.push('@');
}
result.push_str(&self.host);

View File

@@ -89,15 +89,14 @@ impl HeadlessProject {
let environment = project::ProjectEnvironment::new(&worktree_store, None, cx);
let git_store = cx.new(|cx| {
let mut store = GitStore::local(
GitStore::local(
&worktree_store,
buffer_store.clone(),
environment.clone(),
fs.clone(),
session.clone().into(),
cx,
);
store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
store
)
});
let prettier_store = cx.new(|cx| {
PrettierStore::new(

View File

@@ -24,7 +24,6 @@ anyhow.workspace = true
bytes.workspace = true
futures.workspace = true
http_client.workspace = true
http_client_tls.workspace = true
serde.workspace = true
smol.workspace = true
log.workspace = true

View File

@@ -56,7 +56,7 @@ impl ReqwestClient {
}
let client = client
.use_preconfigured_tls(http_client_tls::tls_config())
.use_preconfigured_tls(http_client::tls_config())
.build()?;
let mut client: ReqwestClient = client.into();
client.proxy = proxy;

View File

@@ -1523,11 +1523,14 @@ impl Terminal {
// Doesn't make sense to scroll the alt screen
if !self.last_content.mode.contains(TermMode::ALT_SCREEN) {
let scroll_lines = match self.drag_line_delta(e, region) {
let scroll_delta = match self.drag_line_delta(e, region) {
Some(value) => value,
None => return,
};
let scroll_lines =
(scroll_delta / self.last_content.terminal_bounds.line_height) as i32;
self.events
.push_back(InternalEvent::Scroll(AlacScroll::Delta(scroll_lines)));
}
@@ -1536,21 +1539,18 @@ impl Terminal {
}
}
fn drag_line_delta(&self, e: &MouseMoveEvent, region: Bounds<Pixels>) -> Option<i32> {
let top = region.origin.y;
let bottom = region.bottom_left().y;
let scroll_lines = if e.position.y < top {
let scroll_delta = (top - e.position.y).pow(1.1);
(scroll_delta / self.last_content.terminal_bounds.line_height).ceil() as i32
fn drag_line_delta(&self, e: &MouseMoveEvent, region: Bounds<Pixels>) -> Option<Pixels> {
//TODO: Why do these need to be doubled? Probably the same problem that the IME has
let top = region.origin.y + (self.last_content.terminal_bounds.line_height * 2.);
let bottom = region.bottom_left().y - (self.last_content.terminal_bounds.line_height * 2.);
let scroll_delta = if e.position.y < top {
(top - e.position.y).pow(1.1)
} else if e.position.y > bottom {
let scroll_delta = -((e.position.y - bottom).pow(1.1));
(scroll_delta / self.last_content.terminal_bounds.line_height).floor() as i32
-((e.position.y - bottom).pow(1.1))
} else {
return None;
return None; //Nothing to do
};
Some(scroll_lines)
Some(scroll_delta)
}
pub fn mouse_down(&mut self, e: &MouseDownEvent, _cx: &mut Context<Self>) {

View File

@@ -94,7 +94,6 @@ impl BufferId {
self.into()
}
}
impl From<BufferId> for u64 {
fn from(id: BufferId) -> Self {
id.0.get()

View File

@@ -805,7 +805,7 @@ impl Pane {
.cloned()
}
pub fn preview_item_idx(&self) -> Option<usize> {
fn preview_item_idx(&self) -> Option<usize> {
if let Some(preview_item_id) = self.preview_item_id {
self.items
.iter()

View File

@@ -845,9 +845,7 @@ async fn test_update_gitignore(cx: &mut TestAppContext) {
});
}
// TODO fix flaky test
#[allow(dead_code)]
//#[gpui::test]
#[gpui::test]
async fn test_write_file(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
@@ -2432,9 +2430,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
// you can't rename a directory which some program has already open. This is a
// limitation of the Windows. See:
// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
// TODO: re-enable flaky test.
//#[gpui::test]
#[allow(dead_code)]
#[gpui::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn test_file_status(cx: &mut TestAppContext) {
init_test(cx);
@@ -2627,9 +2623,7 @@ async fn test_file_status(cx: &mut TestAppContext) {
});
}
// TODO fix flaky test
#[allow(unused)]
//#[gpui::test]
#[gpui::test]
async fn test_git_repository_status(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
@@ -2743,9 +2737,7 @@ async fn test_git_repository_status(cx: &mut TestAppContext) {
});
}
// TODO fix flaky test
#[allow(unused)]
//#[gpui::test]
#[gpui::test]
async fn test_git_status_postprocessing(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
@@ -3541,8 +3533,6 @@ fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
repo.cherrypick(commit, None).expect("Failed to cherrypick");
}
// TODO remove this once flaky test is fixed
#[allow(dead_code)]
#[track_caller]
fn git_stash(repo: &mut git2::Repository) {
use git2::Signature;
@@ -3552,8 +3542,6 @@ fn git_stash(repo: &mut git2::Repository) {
.expect("Failed to stash");
}
// TODO remove this once flaky test is fixed
#[allow(dead_code)]
#[track_caller]
fn git_reset(offset: usize, repo: &git2::Repository) {
let head = repo.head().expect("Couldn't get repo head");
@@ -3586,7 +3574,6 @@ fn git_checkout(name: &str, repo: &git2::Repository) {
repo.checkout_head(None).expect("Failed to check out head");
}
// TODO remove this once flaky test is fixed
#[allow(dead_code)]
#[track_caller]
fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {

View File

@@ -124,7 +124,7 @@ time.workspace = true
toolchain_selector.workspace = true
ui.workspace = true
url.workspace = true
urlencoding.workspace = true
urlencoding = "2.1.2"
util.workspace = true
uuid.workspace = true
vim.workspace = true

View File

@@ -116,7 +116,7 @@ pub mod workspace {
pub mod git {
use gpui::{action_with_deprecated_aliases, actions};
actions!(git, [CheckoutBranch, Switch, SelectRepo]);
actions!(git, [CheckoutBranch, Switch]);
action_with_deprecated_aliases!(git, Branch, ["branches::OpenRecent"]);
}

View File

@@ -75,7 +75,6 @@
- [CSS](./languages/css.md)
- [Dart](./languages/dart.md)
- [Deno](./languages/deno.md)
- [Diff](./languages/diff.md)
- [Docker](./languages/docker.md)
- [Elixir](./languages/elixir.md)
- [Elm](./languages/elm.md)

View File

@@ -1348,8 +1348,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files
"git_gutter": "tracked_files",
"inline_blame": {
"enabled": true
},
"hunk_style": "staged_hollow"
}
}
}
```
@@ -1382,26 +1381,6 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files
}
```
### Gutter Debounce
- Description: Sets the debounce threshold (in milliseconds) after which changes are reflected in the git gutter.
- Setting: `gutter_debounce`
- Default: `null`
**Options**
`integer` values representing milliseconds
Example:
```json
{
"git": {
"gutter_debounce": 100
}
}
```
### Inline Git Blame
- Description: Whether or not to show git blame information inline, on the currently focused line.
@@ -1418,42 +1397,6 @@ Example:
}
```
### Hunk Style
- Description: What styling we should use for the diff hunks.
- Setting: `hunk_style`
- Default:
```json
{
"git": {
"hunk_style": "staged_hollow"
}
}
```
**Options**
1. Show the staged hunks faded out and with a border:
```json
{
"git": {
"hunk_style": "staged_hollow"
}
}
```
2. Show unstaged hunks faded out and with a border:
```json
{
"git": {
"hunk_style": "unstaged_hollow"
}
}
```
**Options**
1. Disable inline git blame:
@@ -2633,6 +2576,7 @@ Run the `theme selector: toggle` action in the command palette to see a current
"folder_icons": true,
"git_status": true,
"indent_size": 20,
"indent_guides": true,
"auto_reveal_entries": true,
"auto_fold_dirs": true,
"scrollbar": {

View File

@@ -1,9 +1,6 @@
# Language Support in Zed
Zed supports hundreds of programming languages and text formats.
Some work out-of-the box and others rely on 3rd party extensions.
> The ones included out-of-the-box, natively built into Zed, are marked with \*.
Zed supports hundreds of programming languages and text formats. Some work out-of-the box and others rely on 3rd party extensions.
## Languages with Documentation
@@ -12,14 +9,13 @@ Some work out-of-the box and others rely on 3rd party extensions.
- [Astro](./languages/astro.md)
- [Bash](./languages/bash.md)
- [Biome](./languages/biome.md)
- [C](./languages/c.md) \*
- [C++](./languages/cpp.md) \*
- [C](./languages/c.md)
- [C++](./languages/cpp.md)
- [C#](./languages/csharp.md)
- [Clojure](./languages/clojure.md)
- [CSS](./languages/css.md) \*
- [CSS](./languages/css.md)
- [Dart](./languages/dart.md)
- [Deno](./languages/deno.md)
- [Diff](./languages/diff.md) \*
- [Docker](./languages/docker.md)
- [Elixir](./languages/elixir.md)
- [Elm](./languages/elm.md)
@@ -29,48 +25,48 @@ Some work out-of-the box and others rely on 3rd party extensions.
- [GDScript](./languages/gdscript.md)
- [Gleam](./languages/gleam.md)
- [GLSL](./languages/glsl.md)
- [Go](./languages/go.md) \*
- [Go](./languages/go.md)
- [Groovy](./languages/groovy.md)
- [Haskell](./languages/haskell.md)
- [Helm](./languages/helm.md)
- [HTML](./languages/html.md)
- [Java](./languages/java.md)
- [JavaScript](./languages/javascript.md) \*
- [JavaScript](./languages/javascript.md)
- [Julia](./languages/julia.md)
- [JSON](./languages/json.md) \*
- [JSON](./languages/json.md)
- [Jsonnet](./languages/jsonnet.md)
- [Kotlin](./languages/kotlin.md)
- [Lua](./languages/lua.md)
- [Luau](./languages/luau.md)
- [Makefile](./languages/makefile.md)
- [Markdown](./languages/markdown.md) \*
- [Markdown](./languages/markdown.md)
- [Nim](./languages/nim.md)
- [OCaml](./languages/ocaml.md)
- [PHP](./languages/php.md)
- [Prisma](./languages/prisma.md)
- [Proto](./languages/proto.md)
- [PureScript](./languages/purescript.md)
- [Python](./languages/python.md) \*
- [Python](./languages/python.md)
- [R](./languages/r.md)
- [Rego](./languages/rego.md)
- [ReStructuredText](./languages/rst.md)
- [Racket](./languages/racket.md)
- [Roc](./languages/roc.md)
- [Ruby](./languages/ruby.md)
- [Rust](./languages/rust.md) \* (Zed's written in Rust)
- [Rust](./languages/rust.md)
- [Scala](./languages/scala.md)
- [Scheme](./languages/scheme.md)
- [Shell Script](./languages/sh.md)
- [Svelte](./languages/svelte.md)
- [Swift](./languages/swift.md)
- [Tailwind CSS](./languages/tailwindcss.md) \*
- [Tailwind CSS](./languages/tailwindcss.md)
- [Terraform](./languages/terraform.md)
- [TOML](./languages/toml.md)
- [TypeScript](./languages/typescript.md) \*
- [TypeScript](./languages/typescript.md)
- [Uiua](./languages/uiua.md)
- [Vue](./languages/vue.md)
- [XML](./languages/xml.md)
- [YAML](./languages/yaml.md) \*
- [YAML](./languages/yaml.md)
- [Yarn](./languages/yarn.md)
- [Zig](./languages/zig.md)

View File

@@ -105,8 +105,8 @@ Treesitter is a powerful tool that Zed uses to understand the structure of your
| A comment | `g c` |
| An argument, or list item, etc. | `i a` |
| An argument, or list item, etc. (including trailing comma) | `a a` |
| Around an HTML-like tag | `a t` |
| Inside an HTML-like tag | `i t` |
| Around an HTML-like tag | `i a` |
| Inside an HTML-like tag | `i a` |
| The current indent level, and one line before and after | `a I` |
| The current indent level, and one line before | `a i` |
| The current indent level | `i i` |

View File

@@ -46,10 +46,8 @@ mkShell' {
passthru = { inherit (attrs) env; };
})).env; # exfil `env`; it's not in drvAttrs
in
(removeAttrs baseEnvs [
"LK_CUSTOM_WEBRTC" # download the staticlib during the build as usual
"ZED_UPDATE_EXPLANATION" # allow auto-updates
])
# unsetting this var so we download the staticlib during the build
(removeAttrs baseEnvs [ "LK_CUSTOM_WEBRTC" ])
// {
# note: different than `$FONTCONFIG_FILE` in `build.nix` this refers to relative paths
# outside the nix store instead of to `$src`