Compare commits

..

3 Commits

Author SHA1 Message Date
Richard Feldman
44501581ee Start on streaming JSON 2025-03-07 10:14:16 -05:00
Richard Feldman
ae95142cc8 Got basic chunk streaming working 2025-03-07 00:23:37 -05:00
Richard Feldman
b1b8d596b9 Use full_moon for lexing 2025-03-06 21:53:18 -05:00
87 changed files with 3351 additions and 4673 deletions

View File

@@ -26,6 +26,3 @@ rustflags = [
"-C",
"target-feature=+crt-static", # This fixes the linking issue when compiling livekit on Windows
]
[env]
MACOSX_DEPLOYMENT_TARGET = "10.15.7"

1411
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -171,6 +171,7 @@ members = [
"extensions/emmet",
"extensions/glsl",
"extensions/haskell",
"extensions/html",
"extensions/perplexity",
"extensions/proto",
@@ -451,7 +452,7 @@ livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "
], default-features = false }
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
markup5ever_rcdom = "0.3.0"
mlua = { version = "0.10", features = ["lua54", "vendored", "async", "send"] }
mlua = { version = "0.10", features = ["lua54", "vendored"] }
nanoid = "0.4"
nbformat = { version = "0.10.0" }
nix = "0.29"

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.36197 1.67985C5.3748 1.41534 4.36011 2.00117 4.0956 2.98834L2.17985 10.138C1.91534 11.1252 2.50117 12.1399 3.48833 12.4044L10.638 14.3202C11.6252 14.5847 12.6399 13.9988 12.9044 13.0117L14.8202 5.86197C15.0847 4.8748 14.4988 3.86012 13.5117 3.59561L6.36197 1.67985ZM10.0457 4.58266C9.77896 4.51119 9.50479 4.66948 9.43332 4.93621L8.76235 7.44028C8.69088 7.70701 8.84917 7.98118 9.11591 8.05265L11.62 8.72362C11.8867 8.79509 12.1609 8.6368 12.2324 8.37006L12.9033 5.86599C12.9748 5.59926 12.8165 5.32509 12.5498 5.25362L10.0457 4.58266Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 707 B

View File

@@ -475,7 +475,9 @@
"ctrl-alt-delete": "editor::DeleteToNextSubwordEnd",
"ctrl-alt-d": "editor::DeleteToNextSubwordEnd",
"ctrl-alt-left": "editor::MoveToPreviousSubwordStart",
// "ctrl-alt-b": "editor::MoveToPreviousSubwordStart",
"ctrl-alt-right": "editor::MoveToNextSubwordEnd",
"ctrl-alt-f": "editor::MoveToNextSubwordEnd",
"ctrl-alt-shift-left": "editor::SelectToPreviousSubwordStart",
"ctrl-alt-shift-b": "editor::SelectToPreviousSubwordStart",
"ctrl-alt-shift-right": "editor::SelectToNextSubwordEnd",

View File

@@ -108,8 +108,8 @@
"cmd-right": ["editor::MoveToEndOfLine", { "stop_at_soft_wraps": true }],
"ctrl-e": ["editor::MoveToEndOfLine", { "stop_at_soft_wraps": false }],
"end": ["editor::MoveToEndOfLine", { "stop_at_soft_wraps": true }],
"cmd-up": "editor::MoveToBeginning",
"cmd-down": "editor::MoveToEnd",
"cmd-up": "editor::MoveToStartOfExcerpt",
"cmd-down": "editor::MoveToEndOfExcerpt",
"cmd-home": "editor::MoveToBeginning", // Typed via `cmd-fn-left`
"cmd-end": "editor::MoveToEnd", // Typed via `cmd-fn-right`
"shift-up": "editor::SelectUp",
@@ -124,8 +124,8 @@
"alt-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
"ctrl-shift-up": "editor::SelectToStartOfParagraph",
"ctrl-shift-down": "editor::SelectToEndOfParagraph",
"cmd-shift-up": "editor::SelectToBeginning",
"cmd-shift-down": "editor::SelectToEnd",
"cmd-shift-up": "editor::SelectToStartOfExcerpt",
"cmd-shift-down": "editor::SelectToEndOfExcerpt",
"cmd-a": "editor::SelectAll",
"cmd-l": "editor::SelectLine",
"cmd-shift-i": "editor::Format",
@@ -172,16 +172,6 @@
"alt-enter": "editor::OpenSelectionsInMultibuffer"
}
},
{
"context": "Editor && multibuffer",
"use_key_equivalents": true,
"bindings": {
"cmd-up": "editor::MoveToStartOfExcerpt",
"cmd-down": "editor::MoveToStartOfNextExcerpt",
"cmd-shift-up": "editor::SelectToStartOfExcerpt",
"cmd-shift-down": "editor::SelectToStartOfNextExcerpt"
}
},
{
"context": "Editor && mode == full && edit_prediction",
"use_key_equivalents": true,

View File

@@ -845,7 +845,7 @@
// "hunk_style": "transparent"
// 2. Show unstaged hunks with a pattern background:
// "hunk_style": "pattern"
"hunk_style": "staged_border"
"hunk_style": "transparent"
},
// Configuration for how direnv configuration should be loaded. May take 2 values:
// 1. Load direnv configuration using `direnv export json` directly.
@@ -1055,6 +1055,7 @@
// }
//
"file_types": {
"Plain Text": ["txt"],
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json"],
"Shell Script": [".env.*"]
},

View File

@@ -9,10 +9,7 @@ use gpui::{
};
use language::{LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId};
use lsp::LanguageServerName;
use project::{
EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project,
ProjectEnvironmentEvent, WorktreeId,
};
use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId};
use smallvec::SmallVec;
use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration};
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip};
@@ -76,22 +73,7 @@ impl ActivityIndicator {
})
.detach();
cx.subscribe(
&project.read(cx).lsp_store(),
|_, _, event, cx| match event {
LspStoreEvent::LanguageServerUpdate { .. } => cx.notify(),
_ => {}
},
)
.detach();
cx.subscribe(
&project.read(cx).environment().clone(),
|_, _, event, cx| match event {
ProjectEnvironmentEvent::ErrorsUpdated => cx.notify(),
},
)
.detach();
cx.observe(&project, |_, _, cx| cx.notify()).detach();
if let Some(auto_updater) = auto_updater.as_ref() {
cx.observe(auto_updater, |_, _, cx| cx.notify()).detach();
@@ -222,7 +204,7 @@ impl ActivityIndicator {
message: error.0.clone(),
on_click: Some(Arc::new(move |this, window, cx| {
this.project.update(cx, |project, cx| {
project.remove_environment_error(worktree_id, cx);
project.remove_environment_error(cx, worktree_id);
});
window.dispatch_action(Box::new(workspace::OpenLog), cx);
})),

View File

@@ -38,7 +38,7 @@ use language_model::{
use language_model_selector::{LanguageModelSelector, LanguageModelSelectorPopoverMenu};
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use project::{CodeAction, LspAction, ProjectTransaction};
use project::{ActionVariant, CodeAction, ProjectTransaction};
use prompt_store::PromptBuilder;
use rope::Rope;
use settings::{update_settings_file, Settings, SettingsStore};
@@ -3569,7 +3569,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
Task::ready(Ok(vec![CodeAction {
server_id: language::LanguageServerId(0),
range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end),
lsp_action: LspAction::Action(Box::new(lsp::CodeAction {
lsp_action: ActionVariant::Action(Box::new(lsp::CodeAction {
title: "Fix with Assistant".into(),
..Default::default()
})),

View File

@@ -1,11 +1,12 @@
use std::sync::Arc;
use assistant_tool::ToolWorkingSet;
use collections::HashMap;
use editor::{Editor, MultiBuffer};
use gpui::{
list, AbsoluteLength, AnyElement, App, ClickEvent, DefiniteLength, EdgesRefinement, Empty,
Entity, Focusable, Length, ListAlignment, ListOffset, ListState, StyleRefinement, Subscription,
Task, TextStyleRefinement, UnderlineStyle,
Task, TextStyleRefinement, UnderlineStyle, WeakEntity,
};
use language::{Buffer, LanguageRegistry};
use language_model::{LanguageModelRegistry, LanguageModelToolUseId, Role};
@@ -14,6 +15,7 @@ use settings::Settings as _;
use theme::ThemeSettings;
use ui::{prelude::*, Disclosure, KeyBinding};
use util::ResultExt as _;
use workspace::Workspace;
use crate::thread::{MessageId, RequestKind, Thread, ThreadError, ThreadEvent};
use crate::thread_store::ThreadStore;
@@ -21,7 +23,9 @@ use crate::tool_use::{ToolUse, ToolUseStatus};
use crate::ui::ContextPill;
pub struct ActiveThread {
workspace: WeakEntity<Workspace>,
language_registry: Arc<LanguageRegistry>,
tools: Arc<ToolWorkingSet>,
thread_store: Entity<ThreadStore>,
thread: Entity<Thread>,
save_thread_task: Option<Task<()>>,
@@ -42,7 +46,9 @@ impl ActiveThread {
pub fn new(
thread: Entity<Thread>,
thread_store: Entity<ThreadStore>,
workspace: WeakEntity<Workspace>,
language_registry: Arc<LanguageRegistry>,
tools: Arc<ToolWorkingSet>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
@@ -52,7 +58,9 @@ impl ActiveThread {
];
let mut this = Self {
workspace,
language_registry,
tools,
thread_store,
thread: thread.clone(),
save_thread_task: None,
@@ -165,8 +173,6 @@ impl ActiveThread {
text_style.refine(&TextStyleRefinement {
font_family: Some(theme_settings.ui_font.family.clone()),
font_fallbacks: theme_settings.ui_font.fallbacks.clone(),
font_features: Some(theme_settings.ui_font.features.clone()),
font_size: Some(ui_font_size.into()),
color: Some(cx.theme().colors().text),
..Default::default()
@@ -201,8 +207,6 @@ impl ActiveThread {
},
text: Some(TextStyleRefinement {
font_family: Some(theme_settings.buffer_font.family.clone()),
font_fallbacks: theme_settings.buffer_font.fallbacks.clone(),
font_features: Some(theme_settings.buffer_font.features.clone()),
font_size: Some(buffer_font_size.into()),
..Default::default()
}),
@@ -210,8 +214,6 @@ impl ActiveThread {
},
inline_code: TextStyleRefinement {
font_family: Some(theme_settings.buffer_font.family.clone()),
font_fallbacks: theme_settings.buffer_font.fallbacks.clone(),
font_features: Some(theme_settings.buffer_font.features.clone()),
font_size: Some(buffer_font_size.into()),
background_color: Some(colors.editor_foreground.opacity(0.1)),
..Default::default()
@@ -292,16 +294,46 @@ impl ActiveThread {
cx.notify();
}
ThreadEvent::UsePendingTools => {
self.thread.update(cx, |thread, cx| {
thread.use_pending_tools(cx);
});
let pending_tool_uses = self
.thread
.read(cx)
.pending_tool_uses()
.into_iter()
.filter(|tool_use| tool_use.status.is_idle())
.cloned()
.collect::<Vec<_>>();
for tool_use in pending_tool_uses {
if let Some(tool) = self.tools.tool(&tool_use.name, cx) {
let task = tool.run(tool_use.input, self.workspace.clone(), window, cx);
self.thread.update(cx, |thread, cx| {
thread.insert_tool_output(tool_use.id.clone(), task, cx);
});
}
}
}
ThreadEvent::ToolFinished { .. } => {
if self.thread.read(cx).all_tools_finished() {
let all_tools_finished = self
.thread
.read(cx)
.pending_tool_uses()
.into_iter()
.all(|tool_use| tool_use.status.is_error());
if all_tools_finished {
let model_registry = LanguageModelRegistry::read_global(cx);
if let Some(model) = model_registry.active_model() {
self.thread.update(cx, |thread, cx| {
thread.send_tool_results_to_model(model, cx);
// Insert a user message to contain the tool results.
thread.insert_user_message(
// TODO: Sending up a user message without any content results in the model sending back
// responses that also don't have any content. We currently don't handle this case well,
// so for now we provide some text to keep the model on track.
"Here are the tool results.",
Vec::new(),
cx,
);
thread.send_to_model(model, RequestKind::Chat, true, cx);
});
}
}

View File

@@ -92,6 +92,7 @@ pub struct AssistantPanel {
context_editor: Option<Entity<ContextEditor>>,
configuration: Option<Entity<AssistantConfiguration>>,
configuration_subscription: Option<Subscription>,
tools: Arc<ToolWorkingSet>,
local_timezone: UtcOffset,
active_view: ActiveView,
history_store: Entity<HistoryStore>,
@@ -132,7 +133,7 @@ impl AssistantPanel {
log::info!("[assistant2-debug] finished initializing ContextStore");
workspace.update_in(&mut cx, |workspace, window, cx| {
cx.new(|cx| Self::new(workspace, thread_store, context_store, window, cx))
cx.new(|cx| Self::new(workspace, thread_store, context_store, tools, window, cx))
})
})
}
@@ -141,6 +142,7 @@ impl AssistantPanel {
workspace: &Workspace,
thread_store: Entity<ThreadStore>,
context_store: Entity<assistant_context_editor::ContextStore>,
tools: Arc<ToolWorkingSet>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
@@ -168,8 +170,8 @@ impl AssistantPanel {
Self {
active_view: ActiveView::Thread,
workspace,
project: project.clone(),
workspace: workspace.clone(),
project,
fs: fs.clone(),
language_registry: language_registry.clone(),
thread_store: thread_store.clone(),
@@ -177,7 +179,9 @@ impl AssistantPanel {
ActiveThread::new(
thread.clone(),
thread_store.clone(),
workspace,
language_registry,
tools.clone(),
window,
cx,
)
@@ -187,6 +191,7 @@ impl AssistantPanel {
context_editor: None,
configuration: None,
configuration_subscription: None,
tools,
local_timezone: UtcOffset::from_whole_seconds(
chrono::Local::now().offset().local_minus_utc(),
)
@@ -241,7 +246,9 @@ impl AssistantPanel {
ActiveThread::new(
thread.clone(),
self.thread_store.clone(),
self.workspace.clone(),
self.language_registry.clone(),
self.tools.clone(),
window,
cx,
)
@@ -374,7 +381,9 @@ impl AssistantPanel {
ActiveThread::new(
thread.clone(),
this.thread_store.clone(),
this.workspace.clone(),
this.language_registry.clone(),
this.tools.clone(),
window,
cx,
)

View File

@@ -27,7 +27,7 @@ use language::{Buffer, Point, Selection, TransactionId};
use language_model::{report_assistant_event, LanguageModelRegistry};
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use project::LspAction;
use project::ActionVariant;
use project::{CodeAction, ProjectTransaction};
use prompt_store::PromptBuilder;
use settings::{Settings, SettingsStore};
@@ -1728,7 +1728,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
Task::ready(Ok(vec![CodeAction {
server_id: language::LanguageServerId(0),
range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end),
lsp_action: LspAction::Action(Box::new(lsp::CodeAction {
lsp_action: ActionVariant::Action(Box::new(lsp::CodeAction {
title: "Fix with Assistant".into(),
..Default::default()
})),

View File

@@ -389,7 +389,6 @@ impl Render for MessageEditor {
let text_style = TextStyle {
color: cx.theme().colors().text,
font_family: settings.ui_font.family.clone(),
font_fallbacks: settings.ui_font.fallbacks.clone(),
font_features: settings.ui_font.features.clone(),
font_size: font_size.into(),
font_weight: settings.ui_font.weight,

View File

@@ -5,14 +5,13 @@ use assistant_tool::ToolWorkingSet;
use chrono::{DateTime, Utc};
use collections::{BTreeMap, HashMap, HashSet};
use futures::StreamExt as _;
use gpui::{App, Context, Entity, EventEmitter, SharedString, Task};
use gpui::{App, Context, EventEmitter, SharedString, Task};
use language_model::{
LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest,
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
Role, StopReason,
};
use project::Project;
use serde::{Deserialize, Serialize};
use util::{post_inc, TryFutureExt as _};
use uuid::Uuid;
@@ -72,17 +71,12 @@ pub struct Thread {
context_by_message: HashMap<MessageId, Vec<ContextId>>,
completion_count: usize,
pending_completions: Vec<PendingCompletion>,
project: Entity<Project>,
tools: Arc<ToolWorkingSet>,
tool_use: ToolUseState,
}
impl Thread {
pub fn new(
project: Entity<Project>,
tools: Arc<ToolWorkingSet>,
_cx: &mut Context<Self>,
) -> Self {
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
Self {
id: ThreadId::new(),
updated_at: Utc::now(),
@@ -94,7 +88,6 @@ impl Thread {
context_by_message: HashMap::default(),
completion_count: 0,
pending_completions: Vec::new(),
project,
tools,
tool_use: ToolUseState::new(),
}
@@ -103,7 +96,6 @@ impl Thread {
pub fn from_saved(
id: ThreadId,
saved: SavedThread,
project: Entity<Project>,
tools: Arc<ToolWorkingSet>,
_cx: &mut Context<Self>,
) -> Self {
@@ -135,7 +127,6 @@ impl Thread {
context_by_message: HashMap::default(),
completion_count: 0,
pending_completions: Vec::new(),
project,
tools,
tool_use,
}
@@ -202,15 +193,6 @@ impl Thread {
self.tool_use.pending_tool_uses()
}
/// Returns whether all of the tool uses have finished running.
pub fn all_tools_finished(&self) -> bool {
// If the only pending tool uses left are the ones with errors, then that means that we've finished running all
// of the pending tools.
self.pending_tool_uses()
.into_iter()
.all(|tool_use| tool_use.status.is_error())
}
pub fn tool_uses_for_message(&self, id: MessageId) -> Vec<ToolUse> {
self.tool_use.tool_uses_for_message(id)
}
@@ -568,23 +550,6 @@ impl Thread {
});
}
pub fn use_pending_tools(&mut self, cx: &mut Context<Self>) {
let pending_tool_uses = self
.pending_tool_uses()
.into_iter()
.filter(|tool_use| tool_use.status.is_idle())
.cloned()
.collect::<Vec<_>>();
for tool_use in pending_tool_uses {
if let Some(tool) = self.tools.tool(&tool_use.name, cx) {
let task = tool.run(tool_use.input, self.project.clone(), cx);
self.insert_tool_output(tool_use.id.clone(), task, cx);
}
}
}
pub fn insert_tool_output(
&mut self,
tool_use_id: LanguageModelToolUseId,
@@ -611,23 +576,6 @@ impl Thread {
.run_pending_tool(tool_use_id, insert_output_task);
}
pub fn send_tool_results_to_model(
&mut self,
model: Arc<dyn LanguageModel>,
cx: &mut Context<Self>,
) {
// Insert a user message to contain the tool results.
self.insert_user_message(
// TODO: Sending up a user message without any content results in the model sending back
// responses that also don't have any content. We currently don't handle this case well,
// so for now we provide some text to keep the model on track.
"Here are the tool results.",
Vec::new(),
cx,
);
self.send_to_model(model, RequestKind::Chat, true, cx);
}
/// Cancels the last pending completion, if there are any pending.
///
/// Returns whether a completion was canceled.

View File

@@ -26,6 +26,7 @@ pub fn init(cx: &mut App) {
}
pub struct ThreadStore {
#[allow(unused)]
project: Entity<Project>,
tools: Arc<ToolWorkingSet>,
context_server_manager: Entity<ContextServerManager>,
@@ -77,7 +78,7 @@ impl ThreadStore {
}
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
cx.new(|cx| Thread::new(self.project.clone(), self.tools.clone(), cx))
cx.new(|cx| Thread::new(self.tools.clone(), cx))
}
pub fn open_thread(
@@ -95,15 +96,7 @@ impl ThreadStore {
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
this.update(&mut cx, |this, cx| {
cx.new(|cx| {
Thread::from_saved(
id.clone(),
thread,
this.project.clone(),
this.tools.clone(),
cx,
)
})
cx.new(|cx| Thread::from_saved(id.clone(), thread, this.tools.clone(), cx))
})
})
}

View File

@@ -104,53 +104,49 @@ impl ContextStore {
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
let this =
cx.new(|cx: &mut Context<Self>| {
let context_server_factory_registry =
ContextServerFactoryRegistry::default_global(cx);
let context_server_manager = cx.new(|cx| {
ContextServerManager::new(
context_server_factory_registry,
project.clone(),
cx,
)
});
let mut this = Self {
contexts: Vec::new(),
contexts_metadata: Vec::new(),
context_server_manager,
context_server_slash_command_ids: HashMap::default(),
host_contexts: Vec::new(),
fs,
languages,
slash_commands,
telemetry,
_watch_updates: cx.spawn(|this, mut cx| {
async move {
while events.next().await.is_some() {
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
}
anyhow::Ok(())
let this = cx.new(|cx: &mut Context<Self>| {
let context_server_factory_registry =
ContextServerFactoryRegistry::default_global(cx);
let context_server_manager = cx.new(|cx| {
ContextServerManager::new(context_server_factory_registry, project.clone(), cx)
});
let mut this = Self {
contexts: Vec::new(),
contexts_metadata: Vec::new(),
context_server_manager,
context_server_slash_command_ids: HashMap::default(),
host_contexts: Vec::new(),
fs,
languages,
slash_commands,
telemetry,
_watch_updates: cx.spawn(|this, mut cx| {
async move {
while events.next().await.is_some() {
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
}
.log_err()
}),
client_subscription: None,
_project_subscriptions: vec![
cx.subscribe(&project, Self::handle_project_event)
],
project_is_shared: false,
client: project.read(cx).client(),
project: project.clone(),
prompt_builder,
};
this.handle_project_shared(project.clone(), cx);
this.synchronize_contexts(cx);
this.register_context_server_handlers(cx);
this.reload(cx).detach_and_log_err(cx);
this
})?;
anyhow::Ok(())
}
.log_err()
}),
client_subscription: None,
_project_subscriptions: vec![
cx.observe(&project, Self::handle_project_changed),
cx.subscribe(&project, Self::handle_project_event),
],
project_is_shared: false,
client: project.read(cx).client(),
project: project.clone(),
prompt_builder,
};
this.handle_project_changed(project.clone(), cx);
this.synchronize_contexts(cx);
this.register_context_server_handlers(cx);
this.reload(cx).detach_and_log_err(cx);
this
})?;
Ok(this)
})
@@ -292,7 +288,7 @@ impl ContextStore {
})?
}
fn handle_project_shared(&mut self, _: Entity<Project>, cx: &mut Context<Self>) {
fn handle_project_changed(&mut self, _: Entity<Project>, cx: &mut Context<Self>) {
let is_shared = self.project.read(cx).is_shared();
let was_shared = mem::replace(&mut self.project_is_shared, is_shared);
if is_shared == was_shared {
@@ -322,14 +318,11 @@ impl ContextStore {
fn handle_project_event(
&mut self,
project: Entity<Project>,
_: Entity<Project>,
event: &project::Event,
cx: &mut Context<Self>,
) {
match event {
project::Event::RemoteIdChanged(_) => {
self.handle_project_shared(project, cx);
}
project::Event::Reshared => {
self.advertise_contexts(cx);
}

View File

@@ -5,9 +5,9 @@ use assistant_slash_command::{AfterCompletion, SlashCommandLine, SlashCommandWor
use editor::{CompletionProvider, Editor};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity, Window};
use language::{Anchor, Buffer, ToPoint};
use language::{Anchor, Buffer, LanguageServerId, ToPoint};
use parking_lot::Mutex;
use project::{lsp_store::CompletionDocumentation, CompletionIntent, CompletionSource};
use project::{lsp_store::CompletionDocumentation, CompletionIntent};
use rope::Point;
use std::{
cell::RefCell,
@@ -125,8 +125,10 @@ impl SlashCommandCompletionProvider {
)),
new_text,
label: command.label(cx),
server_id: LanguageServerId(0),
lsp_completion: Default::default(),
confirm,
source: CompletionSource::Custom,
resolved: true,
})
})
.collect()
@@ -223,8 +225,10 @@ impl SlashCommandCompletionProvider {
label: new_argument.label,
new_text,
documentation: None,
server_id: LanguageServerId(0),
lsp_completion: Default::default(),
confirm,
source: CompletionSource::Custom,
resolved: true,
}
})
.collect())

View File

@@ -17,6 +17,6 @@ collections.workspace = true
derive_more.workspace = true
gpui.workspace = true
parking_lot.workspace = true
project.workspace = true
serde.workspace = true
serde_json.workspace = true
workspace.workspace = true

View File

@@ -4,8 +4,8 @@ mod tool_working_set;
use std::sync::Arc;
use anyhow::Result;
use gpui::{App, Entity, Task};
use project::Project;
use gpui::{App, Task, WeakEntity, Window};
use workspace::Workspace;
pub use crate::tool_registry::*;
pub use crate::tool_working_set::*;
@@ -31,7 +31,8 @@ pub trait Tool: 'static + Send + Sync {
fn run(
self: Arc<Self>,
input: serde_json::Value,
project: Entity<Project>,
workspace: WeakEntity<Workspace>,
window: &mut Window,
cx: &mut App,
) -> Task<Result<String>>;
}

View File

@@ -20,3 +20,4 @@ project.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
workspace.workspace = true

View File

@@ -1,11 +1,11 @@
use std::sync::Arc;
use anyhow::Result;
use anyhow::{anyhow, Result};
use assistant_tool::Tool;
use gpui::{App, Entity, Task};
use project::Project;
use gpui::{App, Task, WeakEntity, Window};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use workspace::Workspace;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct ListWorktreesToolInput {}
@@ -34,9 +34,16 @@ impl Tool for ListWorktreesTool {
fn run(
self: Arc<Self>,
_input: serde_json::Value,
project: Entity<Project>,
workspace: WeakEntity<Workspace>,
_window: &mut Window,
cx: &mut App,
) -> Task<Result<String>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow!("workspace dropped")));
};
let project = workspace.read(cx).project().clone();
cx.spawn(|cx| async move {
cx.update(|cx| {
#[derive(Debug, Serialize)]

View File

@@ -3,8 +3,7 @@ use std::sync::Arc;
use anyhow::{anyhow, Result};
use assistant_tool::Tool;
use chrono::{Local, Utc};
use gpui::{App, Entity, Task};
use project::Project;
use gpui::{App, Task, WeakEntity, Window};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@@ -42,7 +41,8 @@ impl Tool for NowTool {
fn run(
self: Arc<Self>,
input: serde_json::Value,
_project: Entity<Project>,
_workspace: WeakEntity<workspace::Workspace>,
_window: &mut Window,
_cx: &mut App,
) -> Task<Result<String>> {
let input: NowToolInput = match serde_json::from_value(input) {

View File

@@ -3,10 +3,11 @@ use std::sync::Arc;
use anyhow::{anyhow, Result};
use assistant_tool::Tool;
use gpui::{App, Entity, Task};
use project::{Project, ProjectPath, WorktreeId};
use gpui::{App, Task, WeakEntity, Window};
use project::{ProjectPath, WorktreeId};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use workspace::Workspace;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct ReadFileToolInput {
@@ -37,14 +38,20 @@ impl Tool for ReadFileTool {
fn run(
self: Arc<Self>,
input: serde_json::Value,
project: Entity<Project>,
workspace: WeakEntity<Workspace>,
_window: &mut Window,
cx: &mut App,
) -> Task<Result<String>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow!("workspace dropped")));
};
let input = match serde_json::from_value::<ReadFileToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
let project = workspace.read(cx).project().clone();
let project_path = ProjectPath {
worktree_id: WorktreeId::from_usize(input.worktree_id),
path: input.path,

View File

@@ -10,9 +10,9 @@ use gpui::{
};
use language::{
language_settings::SoftWrap, Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry,
ToOffset,
LanguageServerId, ToOffset,
};
use project::{search::SearchQuery, Completion, CompletionSource};
use project::{search::SearchQuery, Completion};
use settings::Settings;
use std::{
cell::RefCell,
@@ -309,9 +309,11 @@ impl MessageEditor {
old_range: range.clone(),
new_text,
label,
confirm: None,
documentation: None,
source: CompletionSource::Custom,
server_id: LanguageServerId(0), // TODO: Make this optional or something?
lsp_completion: Default::default(), // TODO: Make this optional or something?
confirm: None,
resolved: true,
}
})
.collect()

View File

@@ -78,7 +78,6 @@ pub struct ComponentId(pub &'static str);
#[derive(Clone)]
pub struct ComponentMetadata {
id: ComponentId,
name: SharedString,
scope: Option<ComponentScope>,
description: Option<SharedString>,
@@ -86,10 +85,6 @@ pub struct ComponentMetadata {
}
impl ComponentMetadata {
pub fn id(&self) -> ComponentId {
self.id.clone()
}
pub fn name(&self) -> SharedString {
self.name.clone()
}
@@ -161,11 +156,9 @@ pub fn components() -> AllComponents {
for (ref scope, name, description) in &data.components {
let preview = data.previews.get(name).cloned();
let component_name = SharedString::new_static(name);
let id = ComponentId(name);
all_components.insert(
id.clone(),
ComponentId(name),
ComponentMetadata {
id,
name: component_name,
scope: scope.clone(),
description: description.map(Into::into),

View File

@@ -23,4 +23,3 @@ project.workspace = true
ui.workspace = true
workspace.workspace = true
notifications.workspace = true
collections.workspace = true

View File

@@ -6,14 +6,12 @@ use std::iter::Iterator;
use std::sync::Arc;
use client::UserStore;
use component::{components, ComponentId, ComponentMetadata};
use component::{components, ComponentMetadata};
use gpui::{
list, prelude::*, uniform_list, App, Entity, EventEmitter, FocusHandle, Focusable, Task,
WeakEntity, Window,
};
use collections::HashMap;
use gpui::{ListState, ScrollHandle, UniformListScrollHandle};
use languages::LanguageRegistry;
use notifications::status_toast::{StatusToast, ToastIcon};
@@ -61,8 +59,6 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
}
enum PreviewEntry {
AllComponents,
Separator,
Component(ComponentMetadata),
SectionHeader(SharedString),
}
@@ -79,22 +75,13 @@ impl From<SharedString> for PreviewEntry {
}
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
enum PreviewPage {
#[default]
AllComponents,
Component(ComponentId),
}
struct ComponentPreview {
focus_handle: FocusHandle,
_view_scroll_handle: ScrollHandle,
nav_scroll_handle: UniformListScrollHandle,
component_map: HashMap<ComponentId, ComponentMetadata>,
active_page: PreviewPage,
components: Vec<ComponentMetadata>,
component_list: ListState,
cursor_index: usize,
selected_index: usize,
language_registry: Arc<LanguageRegistry>,
workspace: WeakEntity<Workspace>,
user_store: Entity<UserStore>,
@@ -108,25 +95,22 @@ impl ComponentPreview {
selected_index: impl Into<Option<usize>>,
cx: &mut Context<Self>,
) -> Self {
let sorted_components = components().all_sorted();
let components = components().all_sorted();
let initial_length = components.len();
let selected_index = selected_index.into().unwrap_or(0);
let component_list = ListState::new(
sorted_components.len(),
gpui::ListAlignment::Top,
px(1500.0),
{
let component_list =
ListState::new(initial_length, gpui::ListAlignment::Top, px(1500.0), {
let this = cx.entity().downgrade();
move |ix, window: &mut Window, cx: &mut App| {
this.update(cx, |this, cx| {
let component = this.get_component(ix);
this.render_preview(&component, window, cx)
this.render_preview(ix, &component, window, cx)
.into_any_element()
})
.unwrap()
}
},
);
});
let mut component_preview = Self {
focus_handle: cx.focus_handle(),
@@ -135,15 +119,13 @@ impl ComponentPreview {
language_registry,
user_store,
workspace,
active_page: PreviewPage::AllComponents,
component_map: components().0,
components: sorted_components,
components,
component_list,
cursor_index: selected_index,
selected_index,
};
if component_preview.cursor_index > 0 {
component_preview.scroll_to_preview(component_preview.cursor_index, cx);
if component_preview.selected_index > 0 {
component_preview.scroll_to_preview(component_preview.selected_index, cx);
}
component_preview.update_component_list(cx);
@@ -153,12 +135,7 @@ impl ComponentPreview {
fn scroll_to_preview(&mut self, ix: usize, cx: &mut Context<Self>) {
self.component_list.scroll_to_reveal_item(ix);
self.cursor_index = ix;
cx.notify();
}
fn set_active_page(&mut self, page: PreviewPage, cx: &mut Context<Self>) {
self.active_page = page;
self.selected_index = ix;
cx.notify();
}
@@ -169,6 +146,7 @@ impl ComponentPreview {
fn scope_ordered_entries(&self) -> Vec<PreviewEntry> {
use std::collections::HashMap;
// Group components by scope
let mut scope_groups: HashMap<Option<ComponentScope>, Vec<ComponentMetadata>> =
HashMap::default();
@@ -179,12 +157,15 @@ impl ComponentPreview {
.push(component.clone());
}
// Sort components within each scope by name
for components in scope_groups.values_mut() {
components.sort_by_key(|c| c.name().to_lowercase());
}
// Build entries with scopes in a defined order
let mut entries = Vec::new();
// Define scope order (we want Unknown at the end)
let known_scopes = [
ComponentScope::Layout,
ComponentScope::Input,
@@ -194,16 +175,15 @@ impl ComponentPreview {
ComponentScope::VersionControl,
];
// Always show all components first
entries.push(PreviewEntry::AllComponents);
entries.push(PreviewEntry::Separator);
// First add components with known scopes
for scope in known_scopes.iter() {
let scope_key = Some(scope.clone());
if let Some(components) = scope_groups.remove(&scope_key) {
if !components.is_empty() {
// Add section header
entries.push(PreviewEntry::SectionHeader(scope.to_string().into()));
// Add all components under this scope
for component in components {
entries.push(PreviewEntry::Component(component));
}
@@ -211,13 +191,16 @@ impl ComponentPreview {
}
}
// Handle components with Unknown scope
for (scope, components) in &scope_groups {
if let Some(ComponentScope::Unknown(_)) = scope {
if !components.is_empty() {
// Add the unknown scope header
if let Some(scope_value) = scope {
entries.push(PreviewEntry::SectionHeader(scope_value.to_string().into()));
}
// Add all components under this unknown scope
for component in components {
entries.push(PreviewEntry::Component(component.clone()));
}
@@ -225,9 +208,9 @@ impl ComponentPreview {
}
}
// Handle components with no scope
if let Some(components) = scope_groups.get(&None) {
if !components.is_empty() {
entries.push(PreviewEntry::Separator);
entries.push(PreviewEntry::SectionHeader("Uncategorized".into()));
for component in components {
@@ -243,42 +226,22 @@ impl ComponentPreview {
&self,
ix: usize,
entry: &PreviewEntry,
selected: bool,
cx: &Context<Self>,
) -> impl IntoElement {
match entry {
PreviewEntry::Component(component_metadata) => {
let id = component_metadata.id();
let selected = self.active_page == PreviewPage::Component(id.clone());
ListItem::new(ix)
.child(Label::new(component_metadata.name().clone()).color(Color::Default))
.selectable(true)
.toggle_state(selected)
.inset(true)
.on_click(cx.listener(move |this, _, _, cx| {
let id = id.clone();
this.set_active_page(PreviewPage::Component(id), cx);
}))
.into_any_element()
}
PreviewEntry::Component(component_metadata) => ListItem::new(ix)
.child(Label::new(component_metadata.name().clone()).color(Color::Default))
.selectable(true)
.toggle_state(selected)
.inset(true)
.on_click(cx.listener(move |this, _, _, cx| {
this.scroll_to_preview(ix, cx);
}))
.into_any_element(),
PreviewEntry::SectionHeader(shared_string) => ListSubHeader::new(shared_string)
.inset(true)
.into_any_element(),
PreviewEntry::AllComponents => {
let selected = self.active_page == PreviewPage::AllComponents;
ListItem::new(ix)
.child(Label::new("All Components").color(Color::Default))
.selectable(true)
.toggle_state(selected)
.inset(true)
.on_click(cx.listener(move |this, _, _, cx| {
this.set_active_page(PreviewPage::AllComponents, cx);
}))
.into_any_element()
}
PreviewEntry::Separator => ListItem::new(ix)
.child(h_flex().pt_3().child(Divider::horizontal_dashed()))
.into_any_element(),
}
}
@@ -297,13 +260,11 @@ impl ComponentPreview {
weak_entity
.update(cx, |this, cx| match entry {
PreviewEntry::Component(component) => this
.render_preview(component, window, cx)
.render_preview(ix, component, window, cx)
.into_any_element(),
PreviewEntry::SectionHeader(shared_string) => this
.render_scope_header(ix, shared_string.clone(), window, cx)
.into_any_element(),
PreviewEntry::AllComponents => div().w_full().h_0().into_any_element(),
PreviewEntry::Separator => div().w_full().h_0().into_any_element(),
})
.unwrap()
},
@@ -329,6 +290,7 @@ impl ComponentPreview {
fn render_preview(
&self,
_ix: usize,
component: &ComponentMetadata,
window: &mut Window,
cx: &mut App,
@@ -379,44 +341,6 @@ impl ComponentPreview {
.into_any_element()
}
fn render_all_components(&self) -> impl IntoElement {
v_flex()
.id("component-list")
.px_8()
.pt_4()
.size_full()
.child(
list(self.component_list.clone())
.flex_grow()
.with_sizing_behavior(gpui::ListSizingBehavior::Auto),
)
}
fn render_component_page(
&mut self,
component_id: &ComponentId,
window: &mut Window,
cx: &mut Context<Self>,
) -> impl IntoElement {
let component = self.component_map.get(&component_id);
if let Some(component) = component {
v_flex()
.w_full()
.flex_initial()
.min_h_full()
.child(self.render_preview(component, window, cx))
.into_any_element()
} else {
v_flex()
.size_full()
.items_center()
.justify_center()
.child("Component not found")
.into_any_element()
}
}
fn test_status_toast(&self, window: &mut Window, cx: &mut Context<Self>) {
if let Some(workspace) = self.workspace.upgrade() {
workspace.update(cx, |workspace, cx| {
@@ -439,9 +363,8 @@ impl ComponentPreview {
}
impl Render for ComponentPreview {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
fn render(&mut self, _window: &mut Window, cx: &mut Context<'_, Self>) -> impl IntoElement {
let sidebar_entries = self.scope_ordered_entries();
let active_page = self.active_page.clone();
h_flex()
.id("component-preview")
@@ -463,7 +386,12 @@ impl Render for ComponentPreview {
move |this, range, _window, cx| {
range
.map(|ix| {
this.render_sidebar_entry(ix, &sidebar_entries[ix], cx)
this.render_sidebar_entry(
ix,
&sidebar_entries[ix],
ix == this.selected_index,
cx,
)
})
.collect()
},
@@ -487,12 +415,18 @@ impl Render for ComponentPreview {
),
),
)
.child(match active_page {
PreviewPage::AllComponents => self.render_all_components().into_any_element(),
PreviewPage::Component(id) => self
.render_component_page(&id, window, cx)
.into_any_element(),
})
.child(
v_flex()
.id("component-list")
.px_8()
.pt_4()
.size_full()
.child(
list(self.component_list.clone())
.flex_grow()
.with_sizing_behavior(gpui::ListSizingBehavior::Auto),
),
)
}
}
@@ -531,7 +465,7 @@ impl Item for ComponentPreview {
let language_registry = self.language_registry.clone();
let user_store = self.user_store.clone();
let weak_workspace = self.workspace.clone();
let selected_index = self.cursor_index;
let selected_index = self.selected_index;
Some(cx.new(|cx| {
Self::new(

View File

@@ -31,3 +31,4 @@ settings.workspace = true
smol.workspace = true
url = { workspace = true, features = ["serde"] }
util.workspace = true
workspace.workspace = true

View File

@@ -1,9 +1,8 @@
use std::sync::Arc;
use anyhow::{anyhow, bail, Result};
use anyhow::{anyhow, bail};
use assistant_tool::Tool;
use gpui::{App, Entity, Task};
use project::Project;
use gpui::{App, Entity, Task, Window};
use crate::manager::ContextServerManager;
use crate::types;
@@ -50,11 +49,12 @@ impl Tool for ContextServerTool {
}
fn run(
self: Arc<Self>,
self: std::sync::Arc<Self>,
input: serde_json::Value,
_project: Entity<Project>,
_workspace: gpui::WeakEntity<workspace::Workspace>,
_: &mut Window,
cx: &mut App,
) -> Task<Result<String>> {
) -> gpui::Task<gpui::Result<String>> {
if let Some(server) = self.server_manager.read(cx).get_server(&self.server_id) {
cx.foreground_executor().spawn({
let tool_name = self.tool.name.clone();

View File

@@ -340,9 +340,7 @@ gpui::actions!(
MoveToPreviousWordStart,
MoveToStartOfParagraph,
MoveToStartOfExcerpt,
MoveToStartOfNextExcerpt,
MoveToEndOfExcerpt,
MoveToEndOfPreviousExcerpt,
MoveUp,
Newline,
NewlineAbove,
@@ -380,9 +378,7 @@ gpui::actions!(
SelectAll,
SelectAllMatches,
SelectToStartOfExcerpt,
SelectToStartOfNextExcerpt,
SelectToEndOfExcerpt,
SelectToEndOfPreviousExcerpt,
SelectDown,
SelectEnclosingSymbol,
SelectLargerSyntaxNode,

View File

@@ -6,11 +6,11 @@ use gpui::{
};
use language::Buffer;
use language::CodeLabel;
use lsp::LanguageServerId;
use markdown::Markdown;
use multi_buffer::{Anchor, ExcerptId};
use ordered_float::OrderedFloat;
use project::lsp_store::CompletionDocumentation;
use project::CompletionSource;
use project::{CodeAction, Completion, TaskSourceKind};
use std::{
@@ -233,9 +233,11 @@ impl CompletionsMenu {
runs: Default::default(),
filter_range: Default::default(),
},
server_id: LanguageServerId(usize::MAX),
documentation: None,
lsp_completion: Default::default(),
confirm: None,
source: CompletionSource::Custom,
resolved: true,
})
.collect();
@@ -498,12 +500,7 @@ impl CompletionsMenu {
// Ignore font weight for syntax highlighting, as we'll use it
// for fuzzy matches.
highlight.font_weight = None;
if completion
.source
.lsp_completion()
.and_then(|lsp_completion| lsp_completion.deprecated)
.unwrap_or(false)
{
if completion.lsp_completion.deprecated.unwrap_or(false) {
highlight.strikethrough = Some(StrikethroughStyle {
thickness: 1.0.into(),
..Default::default()
@@ -711,10 +708,7 @@ impl CompletionsMenu {
let completion = &completions[mat.candidate_id];
let sort_key = completion.sort_key();
let sort_text = completion
.source
.lsp_completion()
.and_then(|lsp_completion| lsp_completion.sort_text.as_deref());
let sort_text = completion.lsp_completion.sort_text.as_deref();
let score = Reverse(OrderedFloat(mat.score));
if mat.score >= 0.2 {

View File

@@ -138,9 +138,8 @@ use multi_buffer::{
use project::{
lsp_store::{CompletionDocumentation, FormatTrigger, LspFormatTarget, OpenLspBufferHandle},
project_settings::{GitGutterSetting, ProjectSettings},
CodeAction, Completion, CompletionIntent, CompletionSource, DocumentHighlight, InlayHint,
Location, LocationLink, PrepareRenameResponse, Project, ProjectItem, ProjectTransaction,
TaskSourceKind,
CodeAction, Completion, CompletionIntent, DocumentHighlight, InlayHint, Location, LocationLink,
PrepareRenameResponse, Project, ProjectItem, ProjectTransaction, TaskSourceKind,
};
use rand::prelude::*;
use rpc::{proto::*, ErrorExt};
@@ -1251,6 +1250,11 @@ impl Editor {
let mut project_subscriptions = Vec::new();
if mode == EditorMode::Full {
if let Some(project) = project.as_ref() {
if buffer.read(cx).is_singleton() {
project_subscriptions.push(cx.observe_in(project, window, |_, _, _, cx| {
cx.emit(EditorEvent::TitleChanged);
}));
}
project_subscriptions.push(cx.subscribe_in(
project,
window,
@@ -1573,16 +1577,13 @@ impl Editor {
}
}
if let Some(singleton_buffer) = self.buffer.read(cx).as_singleton() {
if let Some(extension) = singleton_buffer
.read(cx)
.file()
.and_then(|file| file.path().extension()?.to_str())
{
key_context.set("extension", extension.to_string());
}
} else {
key_context.add("multibuffer");
if let Some(extension) = self
.buffer
.read(cx)
.as_singleton()
.and_then(|buffer| buffer.read(cx).file()?.path().extension()?.to_str())
{
key_context.set("extension", extension.to_string());
}
if has_active_edit_prediction {
@@ -9848,31 +9849,6 @@ impl Editor {
})
}
pub fn move_to_start_of_next_excerpt(
&mut self,
_: &MoveToStartOfNextExcerpt,
window: &mut Window,
cx: &mut Context<Self>,
) {
if matches!(self.mode, EditorMode::SingleLine { .. }) {
cx.propagate();
return;
}
self.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
s.move_with(|map, selection| {
selection.collapse_to(
movement::start_of_excerpt(
map,
selection.head(),
workspace::searchable::Direction::Next,
),
SelectionGoal::None,
)
});
})
}
pub fn move_to_end_of_excerpt(
&mut self,
_: &MoveToEndOfExcerpt,
@@ -9898,31 +9874,6 @@ impl Editor {
})
}
pub fn move_to_end_of_previous_excerpt(
&mut self,
_: &MoveToEndOfPreviousExcerpt,
window: &mut Window,
cx: &mut Context<Self>,
) {
if matches!(self.mode, EditorMode::SingleLine { .. }) {
cx.propagate();
return;
}
self.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
s.move_with(|map, selection| {
selection.collapse_to(
movement::end_of_excerpt(
map,
selection.head(),
workspace::searchable::Direction::Prev,
),
SelectionGoal::None,
)
});
})
}
pub fn select_to_start_of_excerpt(
&mut self,
_: &SelectToStartOfExcerpt,
@@ -9944,27 +9895,6 @@ impl Editor {
})
}
pub fn select_to_start_of_next_excerpt(
&mut self,
_: &SelectToStartOfNextExcerpt,
window: &mut Window,
cx: &mut Context<Self>,
) {
if matches!(self.mode, EditorMode::SingleLine { .. }) {
cx.propagate();
return;
}
self.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
s.move_heads_with(|map, head, _| {
(
movement::start_of_excerpt(map, head, workspace::searchable::Direction::Next),
SelectionGoal::None,
)
});
})
}
pub fn select_to_end_of_excerpt(
&mut self,
_: &SelectToEndOfExcerpt,
@@ -9986,27 +9916,6 @@ impl Editor {
})
}
pub fn select_to_end_of_previous_excerpt(
&mut self,
_: &SelectToEndOfPreviousExcerpt,
window: &mut Window,
cx: &mut Context<Self>,
) {
if matches!(self.mode, EditorMode::SingleLine { .. }) {
cx.propagate();
return;
}
self.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
s.move_heads_with(|map, head, _| {
(
movement::end_of_excerpt(map, head, workspace::searchable::Direction::Prev),
SelectionGoal::None,
)
});
})
}
pub fn move_to_beginning(
&mut self,
_: &MoveToBeginning,
@@ -14978,14 +14887,14 @@ impl Editor {
&self,
window: &mut Window,
cx: &mut App,
) -> BTreeMap<DisplayRow, LineHighlight> {
) -> BTreeMap<DisplayRow, Background> {
let snapshot = self.snapshot(window, cx);
let mut used_highlight_orders = HashMap::default();
self.highlighted_rows
.iter()
.flat_map(|(_, highlighted_rows)| highlighted_rows.iter())
.fold(
BTreeMap::<DisplayRow, LineHighlight>::new(),
BTreeMap::<DisplayRow, Background>::new(),
|mut unique_rows, highlight| {
let start = highlight.range.start.to_display_point(&snapshot);
let end = highlight.range.end.to_display_point(&snapshot);
@@ -15533,9 +15442,14 @@ impl Editor {
}
multi_buffer::Event::DirtyChanged => cx.emit(EditorEvent::DirtyChanged),
multi_buffer::Event::Saved => cx.emit(EditorEvent::Saved),
multi_buffer::Event::FileHandleChanged
| multi_buffer::Event::Reloaded
| multi_buffer::Event::BufferDiffChanged => cx.emit(EditorEvent::TitleChanged),
multi_buffer::Event::FileHandleChanged | multi_buffer::Event::Reloaded => {
cx.emit(EditorEvent::TitleChanged)
}
// multi_buffer::Event::DiffBaseChanged => {
// self.scrollbar_marker_state.dirty = true;
// cx.emit(EditorEvent::DiffBaseChanged);
// cx.notify();
// }
multi_buffer::Event::Closed => cx.emit(EditorEvent::Closed),
multi_buffer::Event::DiagnosticsUpdated => {
self.refresh_active_diagnostics(cx);
@@ -16993,40 +16907,38 @@ fn snippet_completions(
Some(Completion {
old_range: range,
new_text: snippet.body.clone(),
source: CompletionSource::Lsp {
server_id: LanguageServerId(usize::MAX),
resolved: true,
lsp_completion: Box::new(lsp::CompletionItem {
label: snippet.prefix.first().unwrap().clone(),
kind: Some(CompletionItemKind::SNIPPET),
label_details: snippet.description.as_ref().map(|description| {
lsp::CompletionItemLabelDetails {
detail: Some(description.clone()),
description: None,
}
}),
insert_text_format: Some(InsertTextFormat::SNIPPET),
text_edit: Some(lsp::CompletionTextEdit::InsertAndReplace(
lsp::InsertReplaceEdit {
new_text: snippet.body.clone(),
insert: lsp_range,
replace: lsp_range,
},
)),
filter_text: Some(snippet.body.clone()),
sort_text: Some(char::MAX.to_string()),
..lsp::CompletionItem::default()
}),
},
resolved: false,
label: CodeLabel {
text: matching_prefix.clone(),
runs: Vec::new(),
runs: vec![],
filter_range: 0..matching_prefix.len(),
},
server_id: LanguageServerId(usize::MAX),
documentation: snippet
.description
.clone()
.map(|description| CompletionDocumentation::SingleLine(description.into())),
lsp_completion: lsp::CompletionItem {
label: snippet.prefix.first().unwrap().clone(),
kind: Some(CompletionItemKind::SNIPPET),
label_details: snippet.description.as_ref().map(|description| {
lsp::CompletionItemLabelDetails {
detail: Some(description.clone()),
description: None,
}
}),
insert_text_format: Some(InsertTextFormat::SNIPPET),
text_edit: Some(lsp::CompletionTextEdit::InsertAndReplace(
lsp::InsertReplaceEdit {
new_text: snippet.body.clone(),
insert: lsp_range,
replace: lsp_range,
},
)),
filter_text: Some(snippet.body.clone()),
sort_text: Some(char::MAX.to_string()),
..Default::default()
},
confirm: None,
})
})
@@ -18524,27 +18436,3 @@ impl Render for MissingEditPredictionKeybindingTooltip {
})
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct LineHighlight {
pub background: Background,
pub border: Option<gpui::Hsla>,
}
impl From<Hsla> for LineHighlight {
fn from(hsla: Hsla) -> Self {
Self {
background: hsla.into(),
border: None,
}
}
}
impl From<Background> for LineHighlight {
fn from(background: Background) -> Self {
Self {
background,
border: None,
}
}
}

View File

@@ -16413,199 +16413,6 @@ async fn test_folding_buffer_when_multibuffer_has_only_one_excerpt(cx: &mut Test
);
}
#[gpui::test]
async fn test_multi_buffer_navigation_with_folded_buffers(cx: &mut TestAppContext) {
init_test(cx, |_| {});
cx.update(|cx| {
let default_key_bindings = settings::KeymapFile::load_asset_allow_partial_failure(
"keymaps/default-linux.json",
cx,
)
.unwrap();
cx.bind_keys(default_key_bindings);
});
let (editor, cx) = cx.add_window_view(|window, cx| {
let multi_buffer = MultiBuffer::build_multi(
[
("a0\nb0\nc0\nd0\ne0\n", vec![Point::row_range(0..2)]),
("a1\nb1\nc1\nd1\ne1\n", vec![Point::row_range(0..2)]),
("a2\nb2\nc2\nd2\ne2\n", vec![Point::row_range(0..2)]),
("a3\nb3\nc3\nd3\ne3\n", vec![Point::row_range(0..2)]),
],
cx,
);
let mut editor = Editor::new(
EditorMode::Full,
multi_buffer.clone(),
None,
true,
window,
cx,
);
let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids();
// fold all but the second buffer, so that we test navigating between two
// adjacent folded buffers, as well as folded buffers at the start and
// end the multibuffer
editor.fold_buffer(buffer_ids[0], cx);
editor.fold_buffer(buffer_ids[2], cx);
editor.fold_buffer(buffer_ids[3], cx);
editor
});
cx.simulate_resize(size(px(1000.), px(1000.)));
let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await;
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
ˇ[FOLDED]
[EXCERPT]
a1
b1
[EXCERPT]
[FOLDED]
[EXCERPT]
[FOLDED]
"
});
cx.simulate_keystroke("down");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
ˇa1
b1
[EXCERPT]
[FOLDED]
[EXCERPT]
[FOLDED]
"
});
cx.simulate_keystroke("down");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
a1
ˇb1
[EXCERPT]
[FOLDED]
[EXCERPT]
[FOLDED]
"
});
cx.simulate_keystroke("down");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
a1
b1
ˇ[EXCERPT]
[FOLDED]
[EXCERPT]
[FOLDED]
"
});
cx.simulate_keystroke("down");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
a1
b1
[EXCERPT]
ˇ[FOLDED]
[EXCERPT]
[FOLDED]
"
});
for _ in 0..5 {
cx.simulate_keystroke("down");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
a1
b1
[EXCERPT]
[FOLDED]
[EXCERPT]
ˇ[FOLDED]
"
});
}
cx.simulate_keystroke("up");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
a1
b1
[EXCERPT]
ˇ[FOLDED]
[EXCERPT]
[FOLDED]
"
});
cx.simulate_keystroke("up");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
a1
b1
ˇ[EXCERPT]
[FOLDED]
[EXCERPT]
[FOLDED]
"
});
cx.simulate_keystroke("up");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
a1
ˇb1
[EXCERPT]
[FOLDED]
[EXCERPT]
[FOLDED]
"
});
cx.simulate_keystroke("up");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
ˇa1
b1
[EXCERPT]
[FOLDED]
[EXCERPT]
[FOLDED]
"
});
for _ in 0..5 {
cx.simulate_keystroke("up");
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
ˇ[FOLDED]
[EXCERPT]
a1
b1
[EXCERPT]
[FOLDED]
[EXCERPT]
[FOLDED]
"
});
}
}
#[gpui::test]
async fn test_inline_completion_text(cx: &mut TestAppContext) {
init_test(cx, |_| {});

View File

@@ -20,10 +20,10 @@ use crate::{
DisplayRow, DocumentHighlightRead, DocumentHighlightWrite, EditDisplayMode, Editor, EditorMode,
EditorSettings, EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GoToHunk,
GoToPreviousHunk, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor,
InlayHintRefreshReason, InlineCompletion, JumpData, LineDown, LineHighlight, LineUp,
OpenExcerpts, PageDown, PageUp, Point, RowExt, RowRangeExt, SelectPhase, SelectedTextHighlight,
Selection, SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, COLUMNAR_SELECTION_MODIFIERS,
CURSORS_VISIBLE_FOR, FILE_HEADER_HEIGHT, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN,
InlayHintRefreshReason, InlineCompletion, JumpData, LineDown, LineUp, OpenExcerpts, PageDown,
PageUp, Point, RowExt, RowRangeExt, SelectPhase, SelectedTextHighlight, Selection, SoftWrap,
StickyHeaderExcerpt, ToPoint, ToggleFold, COLUMNAR_SELECTION_MODIFIERS, CURSORS_VISIBLE_FOR,
FILE_HEADER_HEIGHT, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN,
MULTI_BUFFER_EXCERPT_HEADER_HEIGHT,
};
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
@@ -282,9 +282,7 @@ impl EditorElement {
register_action(editor, window, Editor::move_to_beginning);
register_action(editor, window, Editor::move_to_end);
register_action(editor, window, Editor::move_to_start_of_excerpt);
register_action(editor, window, Editor::move_to_start_of_next_excerpt);
register_action(editor, window, Editor::move_to_end_of_excerpt);
register_action(editor, window, Editor::move_to_end_of_previous_excerpt);
register_action(editor, window, Editor::select_up);
register_action(editor, window, Editor::select_down);
register_action(editor, window, Editor::select_left);
@@ -298,9 +296,7 @@ impl EditorElement {
register_action(editor, window, Editor::select_to_start_of_paragraph);
register_action(editor, window, Editor::select_to_end_of_paragraph);
register_action(editor, window, Editor::select_to_start_of_excerpt);
register_action(editor, window, Editor::select_to_start_of_next_excerpt);
register_action(editor, window, Editor::select_to_end_of_excerpt);
register_action(editor, window, Editor::select_to_end_of_previous_excerpt);
register_action(editor, window, Editor::select_to_beginning);
register_action(editor, window, Editor::select_to_end);
register_action(editor, window, Editor::select_all);
@@ -1695,7 +1691,7 @@ impl EditorElement {
let pos_y = content_origin.y
+ line_height * (row.0 as f32 - scroll_pixel_position.y / line_height);
let window_ix = row.0.saturating_sub(start_row.0) as usize;
let window_ix = row.minus(start_row) as usize;
let pos_x = {
let crease_trailer_layout = &crease_trailers[window_ix];
let line_layout = &line_layouts[window_ix];
@@ -4136,74 +4132,46 @@ impl EditorElement {
}
}
let mut paint_highlight = |highlight_row_start: DisplayRow,
highlight_row_end: DisplayRow,
highlight: crate::LineHighlight,
edges| {
let origin = point(
layout.hitbox.origin.x,
layout.hitbox.origin.y
+ (highlight_row_start.as_f32() - scroll_top)
* layout.position_map.line_height,
);
let size = size(
layout.hitbox.size.width,
layout.position_map.line_height
* highlight_row_end.next_row().minus(highlight_row_start) as f32,
);
let mut quad = fill(Bounds { origin, size }, highlight.background);
if let Some(border_color) = highlight.border {
quad.border_color = border_color;
quad.border_widths = edges
}
window.paint_quad(quad);
};
let mut paint_highlight =
|highlight_row_start: DisplayRow, highlight_row_end: DisplayRow, color| {
let origin = point(
layout.hitbox.origin.x,
layout.hitbox.origin.y
+ (highlight_row_start.as_f32() - scroll_top)
* layout.position_map.line_height,
);
let size = size(
layout.hitbox.size.width,
layout.position_map.line_height
* highlight_row_end.next_row().minus(highlight_row_start) as f32,
);
window.paint_quad(fill(Bounds { origin, size }, color));
};
let mut current_paint: Option<(LineHighlight, Range<DisplayRow>, Edges<Pixels>)> =
None;
let mut current_paint: Option<(gpui::Background, Range<DisplayRow>)> = None;
for (&new_row, &new_background) in &layout.highlighted_rows {
match &mut current_paint {
Some((current_background, current_range, mut edges)) => {
Some((current_background, current_range)) => {
let current_background = *current_background;
let new_range_started = current_background != new_background
|| current_range.end.next_row() != new_row;
if new_range_started {
if current_range.end.next_row() == new_row {
edges.bottom = px(0.);
};
paint_highlight(
current_range.start,
current_range.end,
current_background,
edges,
);
let edges = Edges {
top: if current_range.end.next_row() != new_row {
px(1.)
} else {
px(0.)
},
bottom: px(1.),
..Default::default()
};
current_paint = Some((new_background, new_row..new_row, edges));
current_paint = Some((new_background, new_row..new_row));
continue;
} else {
current_range.end = current_range.end.next_row();
}
}
None => {
let edges = Edges {
top: px(1.),
bottom: px(1.),
..Default::default()
};
current_paint = Some((new_background, new_row..new_row, edges))
}
None => current_paint = Some((new_background, new_row..new_row)),
};
}
if let Some((color, range, edges)) = current_paint {
paint_highlight(range.start, range.end, color, edges);
if let Some((color, range)) = current_paint {
paint_highlight(range.start, range.end, color);
}
let scroll_left =
@@ -4381,11 +4349,6 @@ impl EditorElement {
fn paint_gutter_diff_hunks(layout: &mut EditorLayout, window: &mut Window, cx: &mut App) {
let is_light = cx.theme().appearance().is_light();
let hunk_style = ProjectSettings::get_global(cx)
.git
.hunk_style
.unwrap_or_default();
if layout.display_hunks.is_empty() {
return;
}
@@ -4449,23 +4412,9 @@ impl EditorElement {
if let Some((hunk_bounds, mut background_color, corner_radii, secondary_status)) =
hunk_to_paint
{
match hunk_style {
GitHunkStyleSetting::Transparent | GitHunkStyleSetting::Pattern => {
if secondary_status.has_secondary_hunk() {
background_color =
background_color.opacity(if is_light { 0.2 } else { 0.32 });
}
}
GitHunkStyleSetting::StagedPattern
| GitHunkStyleSetting::StagedTransparent => {
if !secondary_status.has_secondary_hunk() {
background_color =
background_color.opacity(if is_light { 0.2 } else { 0.32 });
}
}
GitHunkStyleSetting::StagedBorder | GitHunkStyleSetting::Border => {
// Don't change the background color
}
if secondary_status.has_secondary_hunk() {
background_color =
background_color.opacity(if is_light { 0.2 } else { 0.32 });
}
// Flatten the background color with the editor color to prevent
@@ -6785,10 +6734,10 @@ impl Element for EditorElement {
.update(cx, |editor, cx| editor.highlighted_display_rows(window, cx));
let is_light = cx.theme().appearance().is_light();
let hunk_style = ProjectSettings::get_global(cx)
let use_pattern = ProjectSettings::get_global(cx)
.git
.hunk_style
.unwrap_or_default();
.map_or(false, |style| matches!(style, GitHunkStyleSetting::Pattern));
for (ix, row_info) in row_infos.iter().enumerate() {
let Some(diff_status) = row_info.diff_status else {
@@ -6808,74 +6757,26 @@ impl Element for EditorElement {
let unstaged = diff_status.has_secondary_hunk();
let hunk_opacity = if is_light { 0.16 } else { 0.12 };
let slash_width = line_height.0 / 1.5; // ~16 by default
let staged_highlight: LineHighlight = match hunk_style {
GitHunkStyleSetting::Transparent
| GitHunkStyleSetting::Pattern
| GitHunkStyleSetting::Border => {
solid_background(background_color.opacity(hunk_opacity)).into()
}
GitHunkStyleSetting::StagedPattern => {
pattern_slash(background_color.opacity(hunk_opacity), slash_width)
.into()
}
GitHunkStyleSetting::StagedTransparent => {
solid_background(background_color.opacity(if is_light {
0.08
} else {
0.04
}))
.into()
}
GitHunkStyleSetting::StagedBorder => LineHighlight {
background: (background_color.opacity(if is_light {
0.08
} else {
0.06
}))
.into(),
border: Some(if is_light {
background_color.opacity(0.48)
} else {
background_color.opacity(0.36)
}),
},
};
let unstaged_highlight = match hunk_style {
GitHunkStyleSetting::Transparent => {
solid_background(background_color.opacity(if is_light {
0.08
} else {
0.04
}))
.into()
}
GitHunkStyleSetting::Pattern => {
pattern_slash(background_color.opacity(hunk_opacity), slash_width)
.into()
}
GitHunkStyleSetting::Border => LineHighlight {
background: (background_color.opacity(if is_light {
0.08
} else {
0.02
}))
.into(),
border: Some(background_color.opacity(0.5)),
},
GitHunkStyleSetting::StagedPattern
| GitHunkStyleSetting::StagedTransparent
| GitHunkStyleSetting::StagedBorder => {
solid_background(background_color.opacity(hunk_opacity)).into()
}
let staged_background =
solid_background(background_color.opacity(hunk_opacity));
let unstaged_background = if use_pattern {
pattern_slash(
background_color.opacity(hunk_opacity),
window.rem_size().0 * 1.125, // ~18 by default
)
} else {
solid_background(background_color.opacity(if is_light {
0.08
} else {
0.04
}))
};
let background = if unstaged {
unstaged_highlight
unstaged_background
} else {
staged_highlight
staged_background
};
highlighted_rows
@@ -7724,7 +7625,7 @@ pub struct EditorLayout {
indent_guides: Option<Vec<IndentGuideLayout>>,
visible_display_row_range: Range<DisplayRow>,
active_rows: BTreeMap<DisplayRow, bool>,
highlighted_rows: BTreeMap<DisplayRow, LineHighlight>,
highlighted_rows: BTreeMap<DisplayRow, gpui::Background>,
line_elements: SmallVec<[AnyElement; 1]>,
line_numbers: Arc<HashMap<MultiBufferRow, LineNumberLayout>>,
display_hunks: Vec<(DisplayDiffHunk, Option<Hitbox>)>,
@@ -8896,16 +8797,14 @@ fn diff_hunk_controls(
.h(line_height)
.mr_1()
.gap_1()
.px_0p5()
.px_1()
.pb_1()
.border_x_1()
.border_b_1()
.border_color(cx.theme().colors().border_variant)
.rounded_b_lg()
.bg(cx.theme().colors().editor_background)
.gap_1()
.occlude()
.shadow_md()
.child(if status.has_secondary_hunk() {
Button::new(("stage", row as u64), "Stage")
.alpha(if status.is_pending() { 0.66 } else { 1.0 })
@@ -8962,7 +8861,7 @@ fn diff_hunk_controls(
})
})
.child(
Button::new("restore", "Restore")
Button::new("discard", "Restore")
.tooltip({
let focus_handle = editor.focus_handle(cx);
move |window, cx| {

View File

@@ -448,9 +448,7 @@ pub fn end_of_excerpt(
if start.row() > DisplayRow(0) {
*start.row_mut() -= 1;
}
start = map.clip_point(start, Bias::Left);
*start.column_mut() = 0;
start
map.clip_point(start, Bias::Left)
}
Direction::Next => {
let mut end = excerpt.end_anchor().to_display_point(&map);

View File

@@ -429,14 +429,12 @@ impl EditorTestContext {
if expected_selections.len() > 0 {
assert!(
is_selected,
"excerpt {ix} should be selected. got {:?}",
self.editor_state(),
"excerpt {} should be selected. Got {:?}",
ix,
self.editor_state()
);
} else {
assert!(
!is_selected,
"excerpt {ix} should not be selected, got: {selections:?}",
);
assert!(!is_selected, "excerpt {} should not be selected", ix);
}
continue;
}

View File

@@ -17,7 +17,6 @@ async-compression.workspace = true
async-tar.workspace = true
async-trait.workspace = true
collections.workspace = true
convert_case.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true

View File

@@ -4,7 +4,6 @@ use crate::{
use anyhow::{anyhow, bail, Context as _, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use convert_case::{Case, Casing as _};
use futures::io::BufReader;
use futures::AsyncReadExt;
use http_client::{self, AsyncBody, HttpClient};
@@ -98,11 +97,6 @@ impl ExtensionBuilder {
}
for (grammar_name, grammar_metadata) in &extension_manifest.grammars {
let snake_cased_grammar_name = grammar_name.to_case(Case::Snake);
if grammar_name.as_ref() != snake_cased_grammar_name.as_str() {
bail!("grammar name '{grammar_name}' must be written in snake_case: {snake_cased_grammar_name}");
}
log::info!(
"compiling grammar {grammar_name} for extension {}",
extension_dir.display()

View File

@@ -692,9 +692,7 @@ impl GitRepository for RealGitRepository {
PushOptions::Force => "--force-with-lease",
}))
.arg(remote_name)
.arg(format!("{}:{}", branch_name, branch_name))
.stdout(smol::process::Stdio::piped())
.stderr(smol::process::Stdio::piped());
.arg(format!("{}:{}", branch_name, branch_name));
let git_process = command.spawn()?;
run_remote_command(ask_pass, git_process)
@@ -716,9 +714,7 @@ impl GitRepository for RealGitRepository {
.current_dir(&working_directory)
.args(["pull"])
.arg(remote_name)
.arg(branch_name)
.stdout(smol::process::Stdio::piped())
.stderr(smol::process::Stdio::piped());
.arg(branch_name);
let git_process = command.spawn()?;
run_remote_command(ask_pass, git_process)
@@ -733,9 +729,7 @@ impl GitRepository for RealGitRepository {
.env("SSH_ASKPASS", ask_pass.script_path())
.env("SSH_ASKPASS_REQUIRE", "force")
.current_dir(&working_directory)
.args(["fetch", "--all"])
.stdout(smol::process::Stdio::piped())
.stderr(smol::process::Stdio::piped());
.args(["fetch", "--all"]);
let git_process = command.spawn()?;
run_remote_command(ask_pass, git_process)

View File

@@ -54,39 +54,6 @@ impl From<TrackedStatus> for FileStatus {
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum StageStatus {
Staged,
Unstaged,
PartiallyStaged,
}
impl StageStatus {
pub fn is_fully_staged(&self) -> bool {
matches!(self, StageStatus::Staged)
}
pub fn is_fully_unstaged(&self) -> bool {
matches!(self, StageStatus::Unstaged)
}
pub fn has_staged(&self) -> bool {
matches!(self, StageStatus::Staged | StageStatus::PartiallyStaged)
}
pub fn has_unstaged(&self) -> bool {
matches!(self, StageStatus::Unstaged | StageStatus::PartiallyStaged)
}
pub fn as_bool(self) -> Option<bool> {
match self {
StageStatus::Staged => Some(true),
StageStatus::Unstaged => Some(false),
StageStatus::PartiallyStaged => None,
}
}
}
impl FileStatus {
pub const fn worktree(worktree_status: StatusCode) -> Self {
FileStatus::Tracked(TrackedStatus {
@@ -139,15 +106,15 @@ impl FileStatus {
Ok(status)
}
pub fn staging(self) -> StageStatus {
pub fn is_staged(self) -> Option<bool> {
match self {
FileStatus::Untracked | FileStatus::Ignored | FileStatus::Unmerged { .. } => {
StageStatus::Unstaged
Some(false)
}
FileStatus::Tracked(tracked) => match (tracked.index_status, tracked.worktree_status) {
(StatusCode::Unmodified, _) => StageStatus::Unstaged,
(_, StatusCode::Unmodified) => StageStatus::Staged,
_ => StageStatus::PartiallyStaged,
(StatusCode::Unmodified, _) => Some(false),
(_, StatusCode::Unmodified) => Some(true),
_ => None,
},
}
}

View File

@@ -18,30 +18,10 @@ use workspace::{ModalView, Workspace};
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {
workspace.register_action(open);
workspace.register_action(switch);
workspace.register_action(checkout_branch);
})
.detach();
}
pub fn checkout_branch(
workspace: &mut Workspace,
_: &zed_actions::git::CheckoutBranch,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
open(workspace, &zed_actions::git::Branch, window, cx);
}
pub fn switch(
workspace: &mut Workspace,
_: &zed_actions::git::Switch,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
open(workspace, &zed_actions::git::Branch, window, cx);
}
pub fn open(
workspace: &mut Workspace,
_: &zed_actions::git::Branch,

View File

@@ -4,7 +4,7 @@ If you can accurately express the change in just the subject line, don't include
Don't repeat information from the subject line in the message body.
Only return the commit message in your response. Do not include any additional meta-commentary about the task. Do not include the raw diff output in the commit message.
Only return the commit message in your response. Do not include any additional meta-commentary about the task.
Follow good Git style:

View File

@@ -2,7 +2,7 @@
use crate::branch_picker::{self, BranchList};
use crate::git_panel::{commit_message_editor, GitPanel};
use git::{Commit, GenerateCommitMessage};
use git::Commit;
use panel::{panel_button, panel_editor_style, panel_filled_button};
use ui::{prelude::*, KeybindingHint, PopoverMenu, Tooltip};
@@ -372,24 +372,11 @@ impl Render for CommitModal {
.key_context("GitCommit")
.on_action(cx.listener(Self::dismiss))
.on_action(cx.listener(Self::commit))
.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| {
this.git_panel.update(cx, |panel, cx| {
panel.generate_commit_message(cx);
})
}))
.on_action(
cx.listener(|this, _: &zed_actions::git::Branch, window, cx| {
toggle_branch_picker(this, window, cx);
}),
)
.on_action(
cx.listener(|this, _: &zed_actions::git::CheckoutBranch, window, cx| {
toggle_branch_picker(this, window, cx);
}),
)
.on_action(
cx.listener(|this, _: &zed_actions::git::Switch, window, cx| {
toggle_branch_picker(this, window, cx);
this.branch_list.update(cx, |branch_list, cx| {
branch_list.popover_handle.toggle(window, cx);
})
}),
)
.elevation_3(cx)
@@ -428,13 +415,3 @@ impl Render for CommitModal {
)
}
}
fn toggle_branch_picker(
this: &mut CommitModal,
window: &mut Window,
cx: &mut Context<'_, CommitModal>,
) {
this.branch_list.update(cx, |branch_list, cx| {
branch_list.popover_handle.toggle(window, cx);
})
}

View File

@@ -1,9 +1,9 @@
use crate::askpass_modal::AskPassModal;
use crate::branch_picker;
use crate::commit_modal::CommitModal;
use crate::git_panel_settings::StatusStyle;
use crate::remote_output_toast::{RemoteAction, RemoteOutputToast};
use crate::repository_selector::filtered_repository_entries;
use crate::{branch_picker, render_remote_button};
use crate::{
git_panel_settings::GitPanelSettings, git_status_icon, repository_selector::RepositorySelector,
};
@@ -22,15 +22,15 @@ use git::repository::{
Branch, CommitDetails, CommitSummary, DiffType, PushOptions, Remote, RemoteCommandOutput,
ResetMode, Upstream, UpstreamTracking, UpstreamTrackingStatus,
};
use git::status::StageStatus;
use git::{repository::RepoPath, status::FileStatus, Commit, ToggleStaged};
use git::{ExpandCommitEditor, RestoreTrackedFiles, StageAll, TrashUntrackedFiles, UnstageAll};
use gpui::{
actions, anchored, deferred, percentage, uniform_list, Action, Animation, AnimationExt as _,
ClickEvent, Corner, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, KeyContext,
ListHorizontalSizingBehavior, ListSizingBehavior, Modifiers, ModifiersChangedEvent,
MouseButton, MouseDownEvent, Point, PromptLevel, ScrollStrategy, Stateful, Subscription, Task,
Transformation, UniformListScrollHandle, WeakEntity,
actions, anchored, deferred, hsla, percentage, point, uniform_list, Action, Animation,
AnimationExt as _, AnyView, BoxShadow, ClickEvent, Corner, DismissEvent, Entity, EventEmitter,
FocusHandle, Focusable, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior,
Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, Point, PromptLevel,
ScrollStrategy, Stateful, Subscription, Task, Transformation, UniformListScrollHandle,
WeakEntity,
};
use itertools::Itertools;
use language::{Buffer, File};
@@ -48,6 +48,7 @@ use project::{
};
use serde::{Deserialize, Serialize};
use settings::Settings as _;
use smallvec::smallvec;
use std::cell::RefCell;
use std::future::Future;
use std::path::{Path, PathBuf};
@@ -56,8 +57,8 @@ use std::{collections::HashSet, sync::Arc, time::Duration, usize};
use strum::{IntoEnumIterator, VariantNames};
use time::OffsetDateTime;
use ui::{
prelude::*, Checkbox, ContextMenu, ElevationIndex, PopoverMenu, Scrollbar, ScrollbarState,
Tooltip,
prelude::*, ButtonLike, Checkbox, ContextMenu, ElevationIndex, PopoverMenu, Scrollbar,
ScrollbarState, Tooltip,
};
use util::{maybe, post_inc, ResultExt, TryFutureExt};
use workspace::{AppState, OpenOptions, OpenVisible};
@@ -194,7 +195,7 @@ pub struct GitStatusEntry {
pub(crate) worktree_path: Arc<Path>,
pub(crate) abs_path: PathBuf,
pub(crate) status: FileStatus,
pub(crate) staging: StageStatus,
pub(crate) is_staged: Option<bool>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@@ -208,7 +209,7 @@ enum TargetStatus {
struct PendingOperation {
finished: bool,
target_status: TargetStatus,
entries: Vec<GitStatusEntry>,
repo_paths: HashSet<RepoPath>,
op_id: usize,
}
@@ -225,8 +226,6 @@ pub struct GitPanel {
add_coauthors: bool,
generate_commit_message_task: Option<Task<Option<()>>>,
entries: Vec<GitListEntry>,
single_staged_entry: Option<GitStatusEntry>,
single_tracked_entry: Option<GitStatusEntry>,
focus_handle: FocusHandle,
fs: Arc<dyn Fs>,
hide_scrollbar_task: Option<Task<()>>,
@@ -366,8 +365,6 @@ impl GitPanel {
pending: Vec::new(),
pending_commit: None,
pending_serialization: Task::ready(None),
single_staged_entry: None,
single_tracked_entry: None,
project,
scroll_handle,
scrollbar_state,
@@ -377,7 +374,7 @@ impl GitPanel {
tracked_count: 0,
tracked_staged_count: 0,
update_visible_entries_task: Task::ready(()),
width: None,
width: Some(px(360.)),
context_menu: None,
workspace,
modal_open: false,
@@ -831,13 +828,13 @@ impl GitPanel {
.repo_path_to_project_path(&entry.repo_path)?;
let workspace = self.workspace.clone();
if entry.status.staging().has_staged() {
self.change_file_stage(false, vec![entry.clone()], cx);
if entry.status.is_staged() != Some(false) {
self.perform_stage(false, vec![entry.repo_path.clone()], cx);
}
let filename = path.path.file_name()?.to_string_lossy();
if !entry.status.is_created() {
self.perform_checkout(vec![entry.clone()], cx);
self.perform_checkout(vec![entry.repo_path.clone()], cx);
} else {
let prompt = prompt(&format!("Trash {}?", filename), None, window, cx);
cx.spawn_in(window, |_, mut cx| async move {
@@ -866,7 +863,7 @@ impl GitPanel {
});
}
fn perform_checkout(&mut self, entries: Vec<GitStatusEntry>, cx: &mut Context<Self>) {
fn perform_checkout(&mut self, repo_paths: Vec<RepoPath>, cx: &mut Context<Self>) {
let workspace = self.workspace.clone();
let Some(active_repository) = self.active_repository.clone() else {
return;
@@ -876,19 +873,19 @@ impl GitPanel {
self.pending.push(PendingOperation {
op_id,
target_status: TargetStatus::Reverted,
entries: entries.clone(),
repo_paths: repo_paths.iter().cloned().collect(),
finished: false,
});
self.update_visible_entries(cx);
let task = cx.spawn(|_, mut cx| async move {
let tasks: Vec<_> = workspace.update(&mut cx, |workspace, cx| {
workspace.project().update(cx, |project, cx| {
entries
repo_paths
.iter()
.filter_map(|entry| {
.filter_map(|repo_path| {
let path = active_repository
.read(cx)
.repo_path_to_project_path(&entry.repo_path)?;
.repo_path_to_project_path(&repo_path)?;
Some(project.open_buffer(path, cx))
})
.collect()
@@ -898,15 +895,7 @@ impl GitPanel {
let buffers = futures::future::join_all(tasks).await;
active_repository
.update(&mut cx, |repo, _| {
repo.checkout_files(
"HEAD",
entries
.iter()
.map(|entries| entries.repo_path.clone())
.collect(),
)
})?
.update(&mut cx, |repo, _| repo.checkout_files("HEAD", repo_paths))?
.await??;
let tasks: Vec<_> = cx.update(|cx| {
@@ -994,7 +983,8 @@ impl GitPanel {
match prompt.await {
Ok(RestoreCancel::RestoreTrackedFiles) => {
this.update(&mut cx, |this, cx| {
this.perform_checkout(entries, cx);
let repo_paths = entries.into_iter().map(|entry| entry.repo_path).collect();
this.perform_checkout(repo_paths, cx);
})
.ok();
}
@@ -1063,10 +1053,16 @@ impl GitPanel {
})?;
let to_unstage = to_delete
.into_iter()
.filter(|entry| !entry.status.staging().is_fully_unstaged())
.filter_map(|entry| {
if entry.status.is_staged() != Some(false) {
Some(entry.repo_path.clone())
} else {
None
}
})
.collect();
this.update(&mut cx, |this, cx| {
this.change_file_stage(false, to_unstage, cx)
this.perform_stage(false, to_unstage, cx)
})?;
for task in tasks {
task.await?;
@@ -1079,25 +1075,25 @@ impl GitPanel {
}
fn stage_all(&mut self, _: &StageAll, _window: &mut Window, cx: &mut Context<Self>) {
let entries = self
let repo_paths = self
.entries
.iter()
.filter_map(|entry| entry.status_entry())
.filter(|status_entry| status_entry.staging.has_unstaged())
.cloned()
.filter(|status_entry| status_entry.is_staged != Some(true))
.map(|status_entry| status_entry.repo_path.clone())
.collect::<Vec<_>>();
self.change_file_stage(true, entries, cx);
self.perform_stage(true, repo_paths, cx);
}
fn unstage_all(&mut self, _: &UnstageAll, _window: &mut Window, cx: &mut Context<Self>) {
let entries = self
let repo_paths = self
.entries
.iter()
.filter_map(|entry| entry.status_entry())
.filter(|status_entry| status_entry.staging.has_staged())
.cloned()
.filter(|status_entry| status_entry.is_staged != Some(false))
.map(|status_entry| status_entry.repo_path.clone())
.collect::<Vec<_>>();
self.change_file_stage(false, entries, cx);
self.perform_stage(false, repo_paths, cx);
}
fn toggle_staged_for_entry(
@@ -1111,10 +1107,10 @@ impl GitPanel {
};
let (stage, repo_paths) = match entry {
GitListEntry::GitStatusEntry(status_entry) => {
if status_entry.status.staging().is_fully_staged() {
(false, vec![status_entry.clone()])
if status_entry.status.is_staged().unwrap_or(false) {
(false, vec![status_entry.repo_path.clone()])
} else {
(true, vec![status_entry.clone()])
(true, vec![status_entry.repo_path.clone()])
}
}
GitListEntry::Header(section) => {
@@ -1126,23 +1122,18 @@ impl GitPanel {
.filter_map(|entry| entry.status_entry())
.filter(|status_entry| {
section.contains(&status_entry, repository)
&& status_entry.staging.as_bool() != Some(goal_staged_state)
&& status_entry.is_staged != Some(goal_staged_state)
})
.map(|status_entry| status_entry.clone())
.map(|status_entry| status_entry.repo_path.clone())
.collect::<Vec<_>>();
(goal_staged_state, entries)
}
};
self.change_file_stage(stage, repo_paths, cx);
self.perform_stage(stage, repo_paths, cx);
}
fn change_file_stage(
&mut self,
stage: bool,
entries: Vec<GitStatusEntry>,
cx: &mut Context<Self>,
) {
fn perform_stage(&mut self, stage: bool, repo_paths: Vec<RepoPath>, cx: &mut Context<Self>) {
let Some(active_repository) = self.active_repository.clone() else {
return;
};
@@ -1154,9 +1145,10 @@ impl GitPanel {
} else {
TargetStatus::Unstaged
},
entries: entries.clone(),
repo_paths: repo_paths.iter().cloned().collect(),
finished: false,
});
let repo_paths = repo_paths.clone();
let repository = active_repository.read(cx);
self.update_counts(repository);
cx.notify();
@@ -1166,21 +1158,11 @@ impl GitPanel {
let result = cx
.update(|cx| {
if stage {
active_repository.update(cx, |repo, cx| {
let repo_paths = entries
.iter()
.map(|entry| entry.repo_path.clone())
.collect();
repo.stage_entries(repo_paths, cx)
})
active_repository
.update(cx, |repo, cx| repo.stage_entries(repo_paths.clone(), cx))
} else {
active_repository.update(cx, |repo, cx| {
let repo_paths = entries
.iter()
.map(|entry| entry.repo_path.clone())
.collect();
repo.unstage_entries(repo_paths, cx)
})
active_repository
.update(cx, |repo, cx| repo.unstage_entries(repo_paths.clone(), cx))
}
})?
.await;
@@ -1415,13 +1397,21 @@ impl GitPanel {
/// Suggests a commit message based on the changed files and their statuses
pub fn suggest_commit_message(&self) -> Option<String> {
let git_status_entry = if let Some(staged_entry) = &self.single_staged_entry {
Some(staged_entry)
} else if let Some(single_tracked_entry) = &self.single_tracked_entry {
Some(single_tracked_entry)
} else {
None
}?;
if self.total_staged_count() != 1 {
return None;
}
let entry = self
.entries
.iter()
.find(|entry| match entry.status_entry() {
Some(entry) => entry.is_staged.unwrap_or(false),
_ => false,
})?;
let GitListEntry::GitStatusEntry(git_status_entry) = entry.clone() else {
return None;
};
let action_text = if git_status_entry.status.is_deleted() {
Some("Delete")
@@ -1581,7 +1571,6 @@ impl GitPanel {
this.show_remote_output(RemoteAction::Fetch, remote_message, cx);
}
Err(e) => {
log::error!("Error while fetching {:?}", e);
this.show_err_toast(e, cx);
}
}
@@ -1640,10 +1629,7 @@ impl GitPanel {
Ok(remote_message) => {
this.show_remote_output(RemoteAction::Pull, remote_message, cx)
}
Err(err) => {
log::error!("Error while pull {:?}", err);
this.show_err_toast(err, cx)
}
Err(err) => this.show_err_toast(err, cx),
})
.ok();
@@ -1711,7 +1697,6 @@ impl GitPanel {
this.show_remote_output(RemoteAction::Push(remote), remote_message, cx);
}
Err(e) => {
log::error!("Error while pushing {:?}", e);
this.show_err_toast(e, cx);
}
})?;
@@ -1746,7 +1731,7 @@ impl GitPanel {
}
fn can_push_and_pull(&self, cx: &App) -> bool {
crate::can_push_and_pull(&self.project, cx)
!self.project.read(cx).is_via_collab()
}
fn get_current_remote(
@@ -1980,13 +1965,9 @@ impl GitPanel {
fn update_visible_entries(&mut self, cx: &mut Context<Self>) {
self.entries.clear();
self.single_staged_entry.take();
self.single_staged_entry.take();
let mut changed_entries = Vec::new();
let mut new_entries = Vec::new();
let mut conflict_entries = Vec::new();
let mut last_staged = None;
let mut staged_count = 0;
let Some(repo) = self.active_repository.as_ref() else {
// Just clear entries if no repository is active.
@@ -1999,15 +1980,12 @@ impl GitPanel {
for entry in repo.status() {
let is_conflict = repo.has_conflict(&entry.repo_path);
let is_new = entry.status.is_created();
let staging = entry.status.staging();
let is_staged = entry.status.is_staged();
if self.pending.iter().any(|pending| {
pending.target_status == TargetStatus::Reverted
&& !pending.finished
&& pending
.entries
.iter()
.any(|pending| pending.repo_path == entry.repo_path)
&& pending.repo_paths.contains(&entry.repo_path)
}) {
continue;
}
@@ -2024,14 +2002,9 @@ impl GitPanel {
worktree_path,
abs_path,
status: entry.status,
staging,
is_staged,
};
if staging.has_staged() {
staged_count += 1;
last_staged = Some(entry.clone());
}
if is_conflict {
conflict_entries.push(entry);
} else if is_new {
@@ -2041,40 +2014,6 @@ impl GitPanel {
}
}
let mut pending_staged_count = 0;
let mut last_pending_staged = None;
let mut pending_status_for_last_staged = None;
for pending in self.pending.iter() {
if pending.target_status == TargetStatus::Staged {
pending_staged_count += pending.entries.len();
last_pending_staged = pending.entries.iter().next().cloned();
}
if let Some(last_staged) = &last_staged {
if pending
.entries
.iter()
.any(|entry| entry.repo_path == last_staged.repo_path)
{
pending_status_for_last_staged = Some(pending.target_status);
}
}
}
if conflict_entries.len() == 0 && staged_count == 1 && pending_staged_count == 0 {
match pending_status_for_last_staged {
Some(TargetStatus::Staged) | None => {
self.single_staged_entry = last_staged;
}
_ => {}
}
} else if conflict_entries.len() == 0 && pending_staged_count == 1 {
self.single_staged_entry = last_pending_staged;
}
if conflict_entries.len() == 0 && changed_entries.len() == 1 {
self.single_tracked_entry = changed_entries.first().cloned();
}
if conflict_entries.len() > 0 {
self.entries.push(GitListEntry::Header(GitHeaderEntry {
header: Section::Conflict,
@@ -2139,39 +2078,35 @@ impl GitPanel {
};
if repo.has_conflict(&status_entry.repo_path) {
self.conflicted_count += 1;
if self.entry_staging(status_entry).has_staged() {
if self.entry_is_staged(status_entry) != Some(false) {
self.conflicted_staged_count += 1;
}
} else if status_entry.status.is_created() {
self.new_count += 1;
if self.entry_staging(status_entry).has_staged() {
if self.entry_is_staged(status_entry) != Some(false) {
self.new_staged_count += 1;
}
} else {
self.tracked_count += 1;
if self.entry_staging(status_entry).has_staged() {
if self.entry_is_staged(status_entry) != Some(false) {
self.tracked_staged_count += 1;
}
}
}
}
fn entry_staging(&self, entry: &GitStatusEntry) -> StageStatus {
fn entry_is_staged(&self, entry: &GitStatusEntry) -> Option<bool> {
for pending in self.pending.iter().rev() {
if pending
.entries
.iter()
.any(|pending_entry| pending_entry.repo_path == entry.repo_path)
{
if pending.repo_paths.contains(&entry.repo_path) {
match pending.target_status {
TargetStatus::Staged => return StageStatus::Staged,
TargetStatus::Unstaged => return StageStatus::Unstaged,
TargetStatus::Staged => return Some(true),
TargetStatus::Unstaged => return Some(false),
TargetStatus::Reverted => continue,
TargetStatus::Unchanged => continue,
}
}
}
entry.staging
entry.is_staged
}
pub(crate) fn has_staged_changes(&self) -> bool {
@@ -2664,9 +2599,9 @@ impl GitPanel {
let ix = self.entry_by_path(&repo_path)?;
let entry = self.entries.get(ix)?;
let entry_staging = self.entry_staging(entry.status_entry()?);
let is_staged = self.entry_is_staged(entry.status_entry()?);
let checkbox = Checkbox::new("stage-file", entry_staging.as_bool().into())
let checkbox = Checkbox::new("stage-file", is_staged.into())
.disabled(!self.has_write_access(cx))
.fill()
.elevation(ElevationIndex::Surface)
@@ -2812,7 +2747,7 @@ impl GitPanel {
let Some(entry) = self.entries.get(ix).and_then(|e| e.status_entry()) else {
return;
};
let stage_title = if entry.status.staging().is_fully_staged() {
let stage_title = if entry.status.is_staged() == Some(true) {
"Unstage File"
} else {
"Stage File"
@@ -2918,8 +2853,8 @@ impl GitPanel {
let checkbox_id: ElementId =
ElementId::Name(format!("entry_{}_{}_checkbox", display_name, ix).into());
let entry_staging = self.entry_staging(entry);
let mut is_staged: ToggleState = self.entry_staging(entry).as_bool().into();
let is_entry_staged = self.entry_is_staged(entry);
let mut is_staged: ToggleState = self.entry_is_staged(entry).into();
if !self.has_staged_changes() && !self.has_conflicts() && !entry.status.is_created() {
is_staged = ToggleState::Selected;
@@ -3038,7 +2973,7 @@ impl GitPanel {
})
})
.tooltip(move |window, cx| {
let tooltip_name = if entry_staging.is_fully_staged() {
let tooltip_name = if is_entry_staged.unwrap_or(false) {
"Unstage"
} else {
"Stage"
@@ -3311,6 +3246,159 @@ impl Render for GitPanelMessageTooltip {
}
}
fn git_action_tooltip(
label: impl Into<SharedString>,
action: &dyn Action,
command: impl Into<SharedString>,
focus_handle: Option<FocusHandle>,
window: &mut Window,
cx: &mut App,
) -> AnyView {
let label = label.into();
let command = command.into();
if let Some(handle) = focus_handle {
Tooltip::with_meta_in(
label.clone(),
Some(action),
command.clone(),
&handle,
window,
cx,
)
} else {
Tooltip::with_meta(label.clone(), Some(action), command.clone(), window, cx)
}
}
#[derive(IntoElement)]
struct SplitButton {
pub left: ButtonLike,
pub right: AnyElement,
}
impl SplitButton {
fn new(
id: impl Into<SharedString>,
left_label: impl Into<SharedString>,
ahead_count: usize,
behind_count: usize,
left_icon: Option<IconName>,
left_on_click: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static,
tooltip: impl Fn(&mut Window, &mut App) -> AnyView + 'static,
) -> Self {
let id = id.into();
fn count(count: usize) -> impl IntoElement {
h_flex()
.ml_neg_px()
.h(rems(0.875))
.items_center()
.overflow_hidden()
.px_0p5()
.child(
Label::new(count.to_string())
.size(LabelSize::XSmall)
.line_height_style(LineHeightStyle::UiLabel),
)
}
let should_render_counts = left_icon.is_none() && (ahead_count > 0 || behind_count > 0);
let left = ui::ButtonLike::new_rounded_left(ElementId::Name(
format!("split-button-left-{}", id).into(),
))
.layer(ui::ElevationIndex::ModalSurface)
.size(ui::ButtonSize::Compact)
.when(should_render_counts, |this| {
this.child(
h_flex()
.ml_neg_0p5()
.mr_1()
.when(behind_count > 0, |this| {
this.child(Icon::new(IconName::ArrowDown).size(IconSize::XSmall))
.child(count(behind_count))
})
.when(ahead_count > 0, |this| {
this.child(Icon::new(IconName::ArrowUp).size(IconSize::XSmall))
.child(count(ahead_count))
}),
)
})
.when_some(left_icon, |this, left_icon| {
this.child(
h_flex()
.ml_neg_0p5()
.mr_1()
.child(Icon::new(left_icon).size(IconSize::XSmall)),
)
})
.child(
div()
.child(Label::new(left_label).size(LabelSize::Small))
.mr_0p5(),
)
.on_click(left_on_click)
.tooltip(tooltip);
let right =
render_git_action_menu(ElementId::Name(format!("split-button-right-{}", id).into()))
.into_any_element();
// .on_click(right_on_click);
Self { left, right }
}
}
impl RenderOnce for SplitButton {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
h_flex()
.rounded_sm()
.border_1()
.border_color(cx.theme().colors().text_muted.alpha(0.12))
.child(self.left)
.child(
div()
.h_full()
.w_px()
.bg(cx.theme().colors().text_muted.alpha(0.16)),
)
.child(self.right)
.bg(ElevationIndex::Surface.on_elevation_bg(cx))
.shadow(smallvec![BoxShadow {
color: hsla(0.0, 0.0, 0.0, 0.16),
offset: point(px(0.), px(1.)),
blur_radius: px(0.),
spread_radius: px(0.),
}])
}
}
fn render_git_action_menu(id: impl Into<ElementId>) -> impl IntoElement {
PopoverMenu::new(id.into())
.trigger(
ui::ButtonLike::new_rounded_right("split-button-right")
.layer(ui::ElevationIndex::ModalSurface)
.size(ui::ButtonSize::None)
.child(
div()
.px_1()
.child(Icon::new(IconName::ChevronDownSmall).size(IconSize::XSmall)),
),
)
.menu(move |window, cx| {
Some(ContextMenu::build(window, cx, |context_menu, _, _| {
context_menu
.action("Fetch", git::Fetch.boxed_clone())
.action("Pull", git::Pull.boxed_clone())
.separator()
.action("Push", git::Push.boxed_clone())
.action("Force Push", git::ForcePush.boxed_clone())
}))
})
.anchor(Corner::TopRight)
}
#[derive(IntoElement, IntoComponent)]
#[component(scope = "Version Control")]
pub struct PanelRepoFooter {
@@ -3361,6 +3449,200 @@ impl PanelRepoFooter {
.menu(move |window, cx| Some(git_panel_context_menu(window, cx)))
.anchor(Corner::TopRight)
}
fn panel_focus_handle(&self, cx: &App) -> Option<FocusHandle> {
if let Some(git_panel) = self.git_panel.clone() {
Some(git_panel.focus_handle(cx))
} else {
None
}
}
fn render_push_button(&self, id: SharedString, ahead: u32, cx: &mut App) -> SplitButton {
let panel = self.git_panel.clone();
let panel_focus_handle = self.panel_focus_handle(cx);
SplitButton::new(
id,
"Push",
ahead as usize,
0,
None,
move |_, window, cx| {
if let Some(panel) = panel.as_ref() {
panel.update(cx, |panel, cx| {
panel.push(false, window, cx);
});
}
},
move |window, cx| {
git_action_tooltip(
"Push committed changes to remote",
&git::Push,
"git push",
panel_focus_handle.clone(),
window,
cx,
)
},
)
}
fn render_pull_button(
&self,
id: SharedString,
ahead: u32,
behind: u32,
cx: &mut App,
) -> SplitButton {
let panel = self.git_panel.clone();
let panel_focus_handle = self.panel_focus_handle(cx);
SplitButton::new(
id,
"Pull",
ahead as usize,
behind as usize,
None,
move |_, window, cx| {
if let Some(panel) = panel.as_ref() {
panel.update(cx, |panel, cx| {
panel.pull(window, cx);
});
}
},
move |window, cx| {
git_action_tooltip(
"Pull",
&git::Pull,
"git pull",
panel_focus_handle.clone(),
window,
cx,
)
},
)
}
fn render_fetch_button(&self, id: SharedString, cx: &mut App) -> SplitButton {
let panel = self.git_panel.clone();
let panel_focus_handle = self.panel_focus_handle(cx);
SplitButton::new(
id,
"Fetch",
0,
0,
Some(IconName::ArrowCircle),
move |_, window, cx| {
if let Some(panel) = panel.as_ref() {
panel.update(cx, |panel, cx| {
panel.fetch(window, cx);
});
}
},
move |window, cx| {
git_action_tooltip(
"Fetch updates from remote",
&git::Fetch,
"git fetch",
panel_focus_handle.clone(),
window,
cx,
)
},
)
}
fn render_publish_button(&self, id: SharedString, cx: &mut App) -> SplitButton {
let panel = self.git_panel.clone();
let panel_focus_handle = self.panel_focus_handle(cx);
SplitButton::new(
id,
"Publish",
0,
0,
Some(IconName::ArrowUpFromLine),
move |_, window, cx| {
if let Some(panel) = panel.as_ref() {
panel.update(cx, |panel, cx| {
panel.push(false, window, cx);
});
}
},
move |window, cx| {
git_action_tooltip(
"Publish branch to remote",
&git::Push,
"git push --set-upstream",
panel_focus_handle.clone(),
window,
cx,
)
},
)
}
fn render_republish_button(&self, id: SharedString, cx: &mut App) -> SplitButton {
let panel = self.git_panel.clone();
let panel_focus_handle = self.panel_focus_handle(cx);
SplitButton::new(
id,
"Republish",
0,
0,
Some(IconName::ArrowUpFromLine),
move |_, window, cx| {
if let Some(panel) = panel.as_ref() {
panel.update(cx, |panel, cx| {
panel.push(false, window, cx);
});
}
},
move |window, cx| {
git_action_tooltip(
"Re-publish branch to remote",
&git::Push,
"git push --set-upstream",
panel_focus_handle.clone(),
window,
cx,
)
},
)
}
fn render_relevant_button(
&self,
id: impl Into<SharedString>,
branch: &Branch,
cx: &mut App,
) -> Option<impl IntoElement> {
if let Some(git_panel) = self.git_panel.as_ref() {
if !git_panel.read(cx).can_push_and_pull(cx) {
return None;
}
}
let id = id.into();
let upstream = branch.upstream.as_ref();
Some(match upstream {
Some(Upstream {
tracking: UpstreamTracking::Tracked(UpstreamTrackingStatus { ahead, behind }),
..
}) => match (*ahead, *behind) {
(0, 0) => self.render_fetch_button(id, cx),
(ahead, 0) => self.render_push_button(id, ahead, cx),
(ahead, behind) => self.render_pull_button(id, ahead, behind, cx),
},
Some(Upstream {
tracking: UpstreamTracking::Gone,
..
}) => self.render_republish_button(id, cx),
None => self.render_publish_button(id, cx),
})
}
}
impl RenderOnce for PanelRepoFooter {
@@ -3476,20 +3758,8 @@ impl RenderOnce for PanelRepoFooter {
.children(spinner)
.child(self.render_overflow_menu(overflow_menu_id))
.when_some(branch, |this, branch| {
let mut focus_handle = None;
if let Some(git_panel) = self.git_panel.as_ref() {
if !git_panel.read(cx).can_push_and_pull(cx) {
return this;
}
focus_handle = Some(git_panel.focus_handle(cx));
}
this.children(render_remote_button(
self.id.clone(),
&branch,
focus_handle,
true,
))
let button = self.render_relevant_button(self.id.clone(), &branch, cx);
this.children(button)
}),
)
}
@@ -3875,14 +4145,14 @@ mod tests {
repo_path: "crates/gpui/gpui.rs".into(),
worktree_path: Path::new("gpui.rs").into(),
status: StatusCode::Modified.worktree(),
staging: StageStatus::Unstaged,
is_staged: Some(false),
}),
GitListEntry::GitStatusEntry(GitStatusEntry {
abs_path: path!("/root/zed/crates/util/util.rs").into(),
repo_path: "crates/util/util.rs".into(),
worktree_path: Path::new("../util/util.rs").into(),
status: StatusCode::Modified.worktree(),
staging: StageStatus::Unstaged,
is_staged: Some(false),
},),
],
);
@@ -3949,14 +4219,14 @@ mod tests {
repo_path: "crates/gpui/gpui.rs".into(),
worktree_path: Path::new("../../gpui/gpui.rs").into(),
status: StatusCode::Modified.worktree(),
staging: StageStatus::Unstaged,
is_staged: Some(false),
}),
GitListEntry::GitStatusEntry(GitStatusEntry {
abs_path: path!("/root/zed/crates/util/util.rs").into(),
repo_path: "crates/util/util.rs".into(),
worktree_path: Path::new("util.rs").into(),
status: StatusCode::Modified.worktree(),
staging: StageStatus::Unstaged,
is_staged: Some(false),
},),
],
);

View File

@@ -1,13 +1,9 @@
use ::settings::Settings;
use git::{
repository::{Branch, Upstream, UpstreamTracking, UpstreamTrackingStatus},
status::FileStatus,
};
use git::status::FileStatus;
use git_panel_settings::GitPanelSettings;
use gpui::{App, Entity, FocusHandle};
use project::Project;
use gpui::App;
use project_diff::ProjectDiff;
use ui::{ActiveTheme, Color, Icon, IconName, IntoElement, SharedString};
use ui::{ActiveTheme, Color, Icon, IconName, IntoElement};
use workspace::Workspace;
mod askpass_modal;
@@ -93,343 +89,3 @@ pub fn git_status_icon(status: FileStatus, cx: &App) -> impl IntoElement {
};
Icon::new(icon_name).color(Color::Custom(color))
}
fn can_push_and_pull(project: &Entity<Project>, cx: &App) -> bool {
!project.read(cx).is_via_collab()
}
fn render_remote_button(
id: impl Into<SharedString>,
branch: &Branch,
keybinding_target: Option<FocusHandle>,
show_fetch_button: bool,
) -> Option<impl IntoElement> {
let id = id.into();
let upstream = branch.upstream.as_ref();
match upstream {
Some(Upstream {
tracking: UpstreamTracking::Tracked(UpstreamTrackingStatus { ahead, behind }),
..
}) => match (*ahead, *behind) {
(0, 0) if show_fetch_button => {
Some(remote_button::render_fetch_button(keybinding_target, id))
}
(0, 0) => None,
(ahead, 0) => Some(remote_button::render_push_button(
keybinding_target.clone(),
id,
ahead,
)),
(ahead, behind) => Some(remote_button::render_pull_button(
keybinding_target.clone(),
id,
ahead,
behind,
)),
},
Some(Upstream {
tracking: UpstreamTracking::Gone,
..
}) => Some(remote_button::render_republish_button(
keybinding_target,
id,
)),
None => Some(remote_button::render_publish_button(keybinding_target, id)),
}
}
mod remote_button {
use gpui::{hsla, point, Action, AnyView, BoxShadow, ClickEvent, Corner, FocusHandle};
use ui::{
div, h_flex, px, rems, ActiveTheme, AnyElement, App, ButtonCommon, ButtonLike, Clickable,
ContextMenu, ElementId, ElevationIndex, FluentBuilder, Icon, IconName, IconSize,
IntoElement, Label, LabelCommon, LabelSize, LineHeightStyle, ParentElement, PopoverMenu,
RenderOnce, SharedString, Styled, Tooltip, Window,
};
pub fn render_fetch_button(
keybinding_target: Option<FocusHandle>,
id: SharedString,
) -> SplitButton {
SplitButton::new(
id,
"Fetch",
0,
0,
Some(IconName::ArrowCircle),
move |_, window, cx| {
window.dispatch_action(Box::new(git::Fetch), cx);
},
move |window, cx| {
git_action_tooltip(
"Fetch updates from remote",
&git::Fetch,
"git fetch",
keybinding_target.clone(),
window,
cx,
)
},
)
}
pub fn render_push_button(
keybinding_target: Option<FocusHandle>,
id: SharedString,
ahead: u32,
) -> SplitButton {
SplitButton::new(
id,
"Push",
ahead as usize,
0,
None,
move |_, window, cx| {
window.dispatch_action(Box::new(git::Push), cx);
},
move |window, cx| {
git_action_tooltip(
"Push committed changes to remote",
&git::Push,
"git push",
keybinding_target.clone(),
window,
cx,
)
},
)
}
pub fn render_pull_button(
keybinding_target: Option<FocusHandle>,
id: SharedString,
ahead: u32,
behind: u32,
) -> SplitButton {
SplitButton::new(
id,
"Pull",
ahead as usize,
behind as usize,
None,
move |_, window, cx| {
window.dispatch_action(Box::new(git::Pull), cx);
},
move |window, cx| {
git_action_tooltip(
"Pull",
&git::Pull,
"git pull",
keybinding_target.clone(),
window,
cx,
)
},
)
}
pub fn render_publish_button(
keybinding_target: Option<FocusHandle>,
id: SharedString,
) -> SplitButton {
SplitButton::new(
id,
"Publish",
0,
0,
Some(IconName::ArrowUpFromLine),
move |_, window, cx| {
window.dispatch_action(Box::new(git::Push), cx);
},
move |window, cx| {
git_action_tooltip(
"Publish branch to remote",
&git::Push,
"git push --set-upstream",
keybinding_target.clone(),
window,
cx,
)
},
)
}
pub fn render_republish_button(
keybinding_target: Option<FocusHandle>,
id: SharedString,
) -> SplitButton {
SplitButton::new(
id,
"Republish",
0,
0,
Some(IconName::ArrowUpFromLine),
move |_, window, cx| {
window.dispatch_action(Box::new(git::Push), cx);
},
move |window, cx| {
git_action_tooltip(
"Re-publish branch to remote",
&git::Push,
"git push --set-upstream",
keybinding_target.clone(),
window,
cx,
)
},
)
}
fn git_action_tooltip(
label: impl Into<SharedString>,
action: &dyn Action,
command: impl Into<SharedString>,
focus_handle: Option<FocusHandle>,
window: &mut Window,
cx: &mut App,
) -> AnyView {
let label = label.into();
let command = command.into();
if let Some(handle) = focus_handle {
Tooltip::with_meta_in(
label.clone(),
Some(action),
command.clone(),
&handle,
window,
cx,
)
} else {
Tooltip::with_meta(label.clone(), Some(action), command.clone(), window, cx)
}
}
fn render_git_action_menu(id: impl Into<ElementId>) -> impl IntoElement {
PopoverMenu::new(id.into())
.trigger(
ui::ButtonLike::new_rounded_right("split-button-right")
.layer(ui::ElevationIndex::ModalSurface)
.size(ui::ButtonSize::None)
.child(
div()
.px_1()
.child(Icon::new(IconName::ChevronDownSmall).size(IconSize::XSmall)),
),
)
.menu(move |window, cx| {
Some(ContextMenu::build(window, cx, |context_menu, _, _| {
context_menu
.action("Fetch", git::Fetch.boxed_clone())
.action("Pull", git::Pull.boxed_clone())
.separator()
.action("Push", git::Push.boxed_clone())
.action("Force Push", git::ForcePush.boxed_clone())
}))
})
.anchor(Corner::TopRight)
}
#[derive(IntoElement)]
pub struct SplitButton {
pub left: ButtonLike,
pub right: AnyElement,
}
impl SplitButton {
fn new(
id: impl Into<SharedString>,
left_label: impl Into<SharedString>,
ahead_count: usize,
behind_count: usize,
left_icon: Option<IconName>,
left_on_click: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static,
tooltip: impl Fn(&mut Window, &mut App) -> AnyView + 'static,
) -> Self {
let id = id.into();
fn count(count: usize) -> impl IntoElement {
h_flex()
.ml_neg_px()
.h(rems(0.875))
.items_center()
.overflow_hidden()
.px_0p5()
.child(
Label::new(count.to_string())
.size(LabelSize::XSmall)
.line_height_style(LineHeightStyle::UiLabel),
)
}
let should_render_counts = left_icon.is_none() && (ahead_count > 0 || behind_count > 0);
let left = ui::ButtonLike::new_rounded_left(ElementId::Name(
format!("split-button-left-{}", id).into(),
))
.layer(ui::ElevationIndex::ModalSurface)
.size(ui::ButtonSize::Compact)
.when(should_render_counts, |this| {
this.child(
h_flex()
.ml_neg_0p5()
.mr_1()
.when(behind_count > 0, |this| {
this.child(Icon::new(IconName::ArrowDown).size(IconSize::XSmall))
.child(count(behind_count))
})
.when(ahead_count > 0, |this| {
this.child(Icon::new(IconName::ArrowUp).size(IconSize::XSmall))
.child(count(ahead_count))
}),
)
})
.when_some(left_icon, |this, left_icon| {
this.child(
h_flex()
.ml_neg_0p5()
.mr_1()
.child(Icon::new(left_icon).size(IconSize::XSmall)),
)
})
.child(
div()
.child(Label::new(left_label).size(LabelSize::Small))
.mr_0p5(),
)
.on_click(left_on_click)
.tooltip(tooltip);
let right = render_git_action_menu(ElementId::Name(
format!("split-button-right-{}", id).into(),
))
.into_any_element();
Self { left, right }
}
}
impl RenderOnce for SplitButton {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
h_flex()
.rounded_sm()
.border_1()
.border_color(cx.theme().colors().text_muted.alpha(0.12))
.child(div().flex_grow().child(self.left))
.child(
div()
.h_full()
.w_px()
.bg(cx.theme().colors().text_muted.alpha(0.16)),
)
.child(self.right)
.bg(ElevationIndex::Surface.on_elevation_bg(cx))
.shadow(smallvec::smallvec![BoxShadow {
color: hsla(0.0, 0.0, 0.0, 0.16),
offset: point(px(0.), px(1.)),
blur_radius: px(0.),
spread_radius: px(0.),
}])
}
}
}

View File

@@ -10,8 +10,7 @@ use editor::{
use feature_flags::FeatureFlagViewExt;
use futures::StreamExt;
use git::{
repository::Branch, status::FileStatus, Commit, StageAll, StageAndNext, ToggleStaged,
UnstageAll, UnstageAndNext,
status::FileStatus, Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext,
};
use gpui::{
actions, Action, AnyElement, AnyView, App, AppContext as _, AsyncWindowContext, Entity,
@@ -25,27 +24,27 @@ use project::{
};
use std::any::{Any, TypeId};
use theme::ActiveTheme;
use ui::{prelude::*, vertical_divider, KeyBinding, Tooltip};
use ui::{prelude::*, vertical_divider, Tooltip};
use util::ResultExt as _;
use workspace::{
item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams},
searchable::SearchableItemHandle,
CloseActiveItem, ItemNavHistory, SerializableItem, ToolbarItemEvent, ToolbarItemLocation,
ToolbarItemView, Workspace,
ItemNavHistory, SerializableItem, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
Workspace,
};
actions!(git, [Diff, Add]);
actions!(git, [Diff]);
pub struct ProjectDiff {
project: Entity<Project>,
multibuffer: Entity<MultiBuffer>,
editor: Entity<Editor>,
project: Entity<Project>,
git_store: Entity<GitStore>,
workspace: WeakEntity<Workspace>,
focus_handle: FocusHandle,
update_needed: postage::watch::Sender<()>,
pending_scroll: Option<PathKey>,
current_branch: Option<Branch>,
_task: Task<Result<()>>,
_subscription: Subscription,
}
@@ -71,9 +70,6 @@ impl ProjectDiff {
let Some(window) = window else { return };
cx.when_flag_enabled::<feature_flags::GitUiFeatureFlag>(window, |workspace, _, _cx| {
workspace.register_action(Self::deploy);
workspace.register_action(|workspace, _: &Add, window, cx| {
Self::deploy(workspace, &Diff, window, cx);
});
});
workspace::register_serializable_item::<ProjectDiff>(cx);
@@ -142,7 +138,6 @@ impl ProjectDiff {
window,
cx,
);
diff_display_editor.disable_inline_diagnostics();
diff_display_editor.set_expand_all_diff_hunks(cx);
diff_display_editor.register_addon(GitPanelAddon {
workspace: workspace.downgrade(),
@@ -183,7 +178,6 @@ impl ProjectDiff {
multibuffer,
pending_scroll: None,
update_needed: send,
current_branch: None,
_task: worker,
_subscription: git_store_subscription,
}
@@ -449,20 +443,6 @@ impl ProjectDiff {
mut cx: AsyncWindowContext,
) -> Result<()> {
while let Some(_) = recv.next().await {
this.update(&mut cx, |this, cx| {
let new_branch =
this.git_store
.read(cx)
.active_repository()
.and_then(|active_repository| {
active_repository.read(cx).current_branch().cloned()
});
if new_branch != this.current_branch {
this.current_branch = new_branch;
cx.notify();
}
})?;
let buffers_to_load = this.update(&mut cx, |this, cx| this.load_buffers(cx))?;
for buffer_to_load in buffers_to_load {
if let Some(buffer) = buffer_to_load.await.log_err() {
@@ -661,11 +641,9 @@ impl Item for ProjectDiff {
}
impl Render for ProjectDiff {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let is_empty = self.multibuffer.read(cx).is_empty();
let can_push_and_pull = crate::can_push_and_pull(&self.project, cx);
div()
.track_focus(&self.focus_handle)
.key_context(if is_empty { "EmptyPane" } else { "GitDiff" })
@@ -675,61 +653,7 @@ impl Render for ProjectDiff {
.justify_center()
.size_full()
.when(is_empty, |el| {
el.child(
v_flex()
.gap_1()
.child(
h_flex()
.justify_around()
.child(Label::new("No uncommitted changes")),
)
.when(can_push_and_pull, |this_div| {
let keybinding_focus_handle = self.focus_handle(cx);
this_div.when_some(self.current_branch.as_ref(), |this_div, branch| {
let remote_button = crate::render_remote_button(
"project-diff-remote-button",
branch,
Some(keybinding_focus_handle.clone()),
false,
);
match remote_button {
Some(button) => {
this_div.child(h_flex().justify_around().child(button))
}
None => this_div.child(
h_flex()
.justify_around()
.child(Label::new("Remote up to date")),
),
}
})
})
.map(|this| {
let keybinding_focus_handle = self.focus_handle(cx).clone();
this.child(
h_flex().justify_around().mt_1().child(
Button::new("project-diff-close-button", "Close")
// .style(ButtonStyle::Transparent)
.key_binding(KeyBinding::for_action_in(
&CloseActiveItem::default(),
&keybinding_focus_handle,
window,
cx,
))
.on_click(move |_, window, cx| {
window.focus(&keybinding_focus_handle);
window.dispatch_action(
Box::new(CloseActiveItem::default()),
cx,
);
}),
),
)
}),
)
el.child(Label::new("No uncommitted changes"))
})
.when(!is_empty, |el| el.child(self.editor.clone()))
}

View File

@@ -634,7 +634,7 @@ impl Display for ColorSpace {
}
/// A background color, which can be either a solid color or a linear gradient.
#[derive(Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq)]
#[repr(C)]
pub struct Background {
pub(crate) tag: BackgroundTag,
@@ -646,28 +646,6 @@ pub struct Background {
pad: u32,
}
impl std::fmt::Debug for Background {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self.tag {
BackgroundTag::Solid => write!(f, "Solid({:?})", self.solid),
BackgroundTag::LinearGradient => {
write!(
f,
"LinearGradient({}, {:?}, {:?})",
self.gradient_angle_or_pattern_height, self.colors[0], self.colors[1]
)
}
BackgroundTag::PatternSlash => {
write!(
f,
"PatternSlash({:?}, {})",
self.solid, self.gradient_angle_or_pattern_height
)
}
}
}
}
impl Eq for Background {}
impl Default for Background {
fn default() -> Self {

View File

@@ -49,7 +49,7 @@ use std::{
num::NonZeroU32,
ops::{Deref, DerefMut, Range},
path::{Path, PathBuf},
rc, str,
str,
sync::{Arc, LazyLock},
time::{Duration, Instant},
vec,
@@ -125,7 +125,6 @@ pub struct Buffer {
/// Memoize calls to has_changes_since(saved_version).
/// The contents of a cell are (self.version, has_changes) at the time of a last call.
has_unsaved_edits: Cell<(clock::Global, bool)>,
change_bits: Vec<rc::Weak<Cell<bool>>>,
_subscriptions: Vec<gpui::Subscription>,
}
@@ -979,7 +978,6 @@ impl Buffer {
completion_triggers_timestamp: Default::default(),
deferred_ops: OperationQueue::new(),
has_conflict: false,
change_bits: Default::default(),
_subscriptions: Vec::new(),
}
}
@@ -1254,7 +1252,6 @@ impl Buffer {
self.non_text_state_update_count += 1;
self.syntax_map.lock().clear(&self.text);
self.language = language;
self.was_changed();
self.reparse(cx);
cx.emit(BufferEvent::LanguageChanged);
}
@@ -1289,7 +1286,6 @@ impl Buffer {
.set((self.saved_version().clone(), false));
self.has_conflict = false;
self.saved_mtime = mtime;
self.was_changed();
cx.emit(BufferEvent::Saved);
cx.notify();
}
@@ -1385,7 +1381,6 @@ impl Buffer {
self.file = Some(new_file);
if file_changed {
self.was_changed();
self.non_text_state_update_count += 1;
if was_dirty != self.is_dirty() {
cx.emit(BufferEvent::DirtyChanged);
@@ -1963,23 +1958,6 @@ impl Buffer {
self.text.subscribe()
}
/// Adds a bit to the list of bits that are set when the buffer's text changes.
///
/// This allows downstream code to check if the buffer's text has changed without
/// waiting for an effect cycle, which would be required if using eents.
pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
self.change_bits.push(bit);
}
fn was_changed(&mut self) {
self.change_bits.retain(|change_bit| {
change_bit.upgrade().map_or(false, |bit| {
bit.replace(true);
true
})
});
}
/// Starts a transaction, if one is not already in-progress. When undoing or
/// redoing edits, all of the edits performed within a transaction are undone
/// or redone together.
@@ -2390,7 +2368,6 @@ impl Buffer {
}
self.text.apply_ops(buffer_ops);
self.deferred_ops.insert(deferred_ops);
self.was_changed();
self.flush_deferred_ops(cx);
self.did_edit(&old_version, was_dirty, cx);
// Notify independently of whether the buffer was edited as the operations could include a
@@ -2525,8 +2502,7 @@ impl Buffer {
}
}
fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
self.was_changed();
fn send_operation(&self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
cx.emit(BufferEvent::Operation {
operation,
is_local,

View File

@@ -736,7 +736,7 @@ impl Element for MarkdownElement {
markdown_end,
);
}
_ => log::debug!("unsupported markdown tag {:?}", tag),
_ => log::error!("unsupported markdown tag {:?}", tag),
}
}
MarkdownEvent::End(tag) => match tag {
@@ -853,7 +853,7 @@ impl Element for MarkdownElement {
MarkdownTagEnd::TableCell => {
builder.pop_div();
}
_ => log::debug!("unsupported markdown tag end: {:?}", tag),
_ => log::error!("unsupported markdown tag end: {:?}", tag),
},
MarkdownEvent::Text(parsed) => {
builder.push_text(parsed, range.start);

View File

@@ -31,7 +31,7 @@ use smol::future::yield_now;
use std::{
any::type_name,
borrow::Cow,
cell::{Cell, Ref, RefCell},
cell::{Ref, RefCell},
cmp, fmt,
future::Future,
io,
@@ -39,7 +39,6 @@ use std::{
mem,
ops::{Range, RangeBounds, Sub},
path::Path,
rc::Rc,
str,
sync::Arc,
time::{Duration, Instant},
@@ -77,7 +76,6 @@ pub struct MultiBuffer {
history: History,
title: Option<String>,
capability: Capability,
buffer_changed_since_sync: Rc<Cell<bool>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -123,7 +121,6 @@ pub enum Event {
Discarded,
DirtyChanged,
DiagnosticsUpdated,
BufferDiffChanged,
}
/// A diff hunk, representing a range of consequent lines in a multibuffer.
@@ -256,7 +253,6 @@ impl DiffState {
if let Some(changed_range) = changed_range.clone() {
this.buffer_diff_changed(diff, changed_range, cx)
}
cx.emit(Event::BufferDiffChanged);
}
BufferDiffEvent::LanguageChanged => this.buffer_diff_language_changed(diff, cx),
_ => {}
@@ -570,7 +566,6 @@ impl MultiBuffer {
capability,
title: None,
buffers_by_path: Default::default(),
buffer_changed_since_sync: Default::default(),
history: History {
next_transaction_id: clock::Lamport::default(),
undo_stack: Vec::new(),
@@ -590,7 +585,6 @@ impl MultiBuffer {
subscriptions: Default::default(),
singleton: false,
capability,
buffer_changed_since_sync: Default::default(),
history: History {
next_transaction_id: Default::default(),
undo_stack: Default::default(),
@@ -604,11 +598,7 @@ impl MultiBuffer {
pub fn clone(&self, new_cx: &mut Context<Self>) -> Self {
let mut buffers = HashMap::default();
let buffer_changed_since_sync = Rc::new(Cell::new(false));
for (buffer_id, buffer_state) in self.buffers.borrow().iter() {
buffer_state.buffer.update(new_cx, |buffer, _| {
buffer.record_changes(Rc::downgrade(&buffer_changed_since_sync));
});
buffers.insert(
*buffer_id,
BufferState {
@@ -637,7 +627,6 @@ impl MultiBuffer {
capability: self.capability,
history: self.history.clone(),
title: self.title.clone(),
buffer_changed_since_sync,
}
}
@@ -1737,25 +1726,19 @@ impl MultiBuffer {
self.sync(cx);
let buffer_id = buffer.read(cx).remote_id();
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer_id = buffer_snapshot.remote_id();
let mut buffers = self.buffers.borrow_mut();
let buffer_state = buffers.entry(buffer_id).or_insert_with(|| {
self.buffer_changed_since_sync.replace(true);
buffer.update(cx, |buffer, _| {
buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync));
});
BufferState {
last_version: buffer_snapshot.version().clone(),
last_non_text_state_update_count: buffer_snapshot.non_text_state_update_count(),
excerpts: Default::default(),
_subscriptions: [
cx.observe(&buffer, |_, _, cx| cx.notify()),
cx.subscribe(&buffer, Self::on_buffer_event),
],
buffer: buffer.clone(),
}
let buffer_state = buffers.entry(buffer_id).or_insert_with(|| BufferState {
last_version: buffer_snapshot.version().clone(),
last_non_text_state_update_count: buffer_snapshot.non_text_state_update_count(),
excerpts: Default::default(),
_subscriptions: [
cx.observe(&buffer, |_, _, cx| cx.notify()),
cx.subscribe(&buffer, Self::on_buffer_event),
],
buffer: buffer.clone(),
});
let mut snapshot = self.snapshot.borrow_mut();
@@ -2251,7 +2234,6 @@ impl MultiBuffer {
cx: &mut Context<Self>,
) {
self.sync(cx);
self.buffer_changed_since_sync.replace(true);
let diff = diff.read(cx);
let buffer_id = diff.buffer_id;
@@ -2730,11 +2712,6 @@ impl MultiBuffer {
}
fn sync(&self, cx: &App) {
let changed = self.buffer_changed_since_sync.replace(false);
if !changed {
return;
}
let mut snapshot = self.snapshot.borrow_mut();
let mut excerpts_to_edit = Vec::new();
let mut non_text_state_updated = false;
@@ -3546,7 +3523,10 @@ impl MultiBufferSnapshot {
) -> impl Iterator<Item = MultiBufferDiffHunk> + '_ {
let query_range = range.start.to_point(self)..range.end.to_point(self);
self.lift_buffer_metadata(query_range.clone(), move |buffer, buffer_range| {
let diff = self.diffs.get(&buffer.remote_id())?;
let Some(diff) = self.diffs.get(&buffer.remote_id()) else {
log::debug!("no diff found for {:?}", buffer.remote_id());
return None;
};
let buffer_start = buffer.anchor_before(buffer_range.start);
let buffer_end = buffer.anchor_after(buffer_range.end);
Some(

View File

@@ -3,7 +3,7 @@ use std::{path::Path, sync::Arc};
use util::ResultExt;
use collections::HashMap;
use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task};
use gpui::{App, AppContext as _, Context, Entity, Task};
use settings::Settings as _;
use worktree::WorktreeId;
@@ -19,12 +19,6 @@ pub struct ProjectEnvironment {
environment_error_messages: HashMap<WorktreeId, EnvironmentErrorMessage>,
}
pub enum ProjectEnvironmentEvent {
ErrorsUpdated,
}
impl EventEmitter<ProjectEnvironmentEvent> for ProjectEnvironment {}
impl ProjectEnvironment {
pub fn new(
worktree_store: &Entity<WorktreeStore>,
@@ -71,13 +65,8 @@ impl ProjectEnvironment {
self.environment_error_messages.iter()
}
pub(crate) fn remove_environment_error(
&mut self,
worktree_id: WorktreeId,
cx: &mut Context<Self>,
) {
pub(crate) fn remove_environment_error(&mut self, worktree_id: WorktreeId) {
self.environment_error_messages.remove(&worktree_id);
cx.emit(ProjectEnvironmentEvent::ErrorsUpdated);
}
/// Returns the project environment, if possible.
@@ -169,9 +158,8 @@ impl ProjectEnvironment {
}
if let Some(error) = error_message {
this.update(&mut cx, |this, cx| {
this.update(&mut cx, |this, _| {
this.environment_error_messages.insert(worktree_id, error);
cx.emit(ProjectEnvironmentEvent::ErrorsUpdated)
})
.log_err();
}

View File

@@ -1353,7 +1353,7 @@ impl Repository {
let to_stage = self
.repository_entry
.status()
.filter(|entry| !entry.status.staging().is_fully_staged())
.filter(|entry| !entry.status.is_staged().unwrap_or(false))
.map(|entry| entry.repo_path.clone())
.collect();
self.stage_entries(to_stage, cx)
@@ -1363,7 +1363,7 @@ impl Repository {
let to_unstage = self
.repository_entry
.status()
.filter(|entry| entry.status.staging().has_staged())
.filter(|entry| entry.status.is_staged().unwrap_or(true))
.map(|entry| entry.repo_path.clone())
.collect();
self.unstage_entries(to_unstage, cx)

View File

@@ -2,9 +2,9 @@ mod signature_help;
use crate::{
lsp_store::{LocalLspStore, LspStore},
CodeAction, CompletionSource, CoreCompletion, DocumentHighlight, Hover, HoverBlock,
ActionVariant, CodeAction, CoreCompletion, DocumentHighlight, Hover, HoverBlock,
HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintLabelPartTooltip,
InlayHintTooltip, Location, LocationLink, LspAction, MarkupContent, PrepareRenameResponse,
InlayHintTooltip, Location, LocationLink, MarkupContent, PrepareRenameResponse,
ProjectTransaction, ResolveState,
};
use anyhow::{anyhow, Context as _, Result};
@@ -2011,11 +2011,9 @@ impl LspCommand for GetCompletions {
CoreCompletion {
old_range,
new_text,
source: CompletionSource::Lsp {
server_id,
lsp_completion: Box::new(lsp_completion),
resolved: false,
},
server_id,
lsp_completion,
resolved: false,
}
})
.collect())
@@ -2258,11 +2256,11 @@ impl LspCommand for GetCodeActions {
return None;
}
}
LspAction::Action(Box::new(lsp_action))
ActionVariant::Action(Box::new(lsp_action))
}
lsp::CodeActionOrCommand::Command(command) => {
if available_commands.contains(&command.command) {
LspAction::Command(command)
ActionVariant::Command(command)
} else {
return None;
}

View File

@@ -14,8 +14,8 @@ use crate::{
toolchain_store::{EmptyToolchainStore, ToolchainStoreEvent},
worktree_store::{WorktreeStore, WorktreeStoreEvent},
yarn::YarnPathStore,
CodeAction, Completion, CompletionSource, CoreCompletion, Hover, InlayHint, LspAction,
ProjectItem as _, ProjectPath, ProjectTransaction, ResolveState, Symbol, ToolchainStore,
ActionVariant, CodeAction, Completion, CoreCompletion, Hover, InlayHint, ProjectItem as _,
ProjectPath, ProjectTransaction, ResolveState, Symbol, ToolchainStore,
};
use anyhow::{anyhow, Context as _, Result};
use async_trait::async_trait;
@@ -1629,7 +1629,7 @@ impl LocalLspStore {
action: &mut CodeAction,
) -> anyhow::Result<()> {
match &mut action.lsp_action {
LspAction::Action(lsp_action) => {
ActionVariant::Action(lsp_action) => {
if GetCodeActions::can_resolve_actions(&lang_server.capabilities())
&& lsp_action.data.is_some()
&& (lsp_action.command.is_none() || lsp_action.edit.is_none())
@@ -1641,7 +1641,7 @@ impl LocalLspStore {
);
}
}
LspAction::Command(_) => {}
ActionVariant::Command(_) => {}
}
anyhow::Ok(())
}
@@ -4401,33 +4401,26 @@ impl LspStore {
let mut did_resolve = false;
if let Some((client, project_id)) = client {
for completion_index in completion_indices {
let server_id = {
let completion = &completions.borrow()[completion_index];
completion.source.server_id()
};
if let Some(server_id) = server_id {
if Self::resolve_completion_remote(
project_id,
server_id,
buffer_id,
completions.clone(),
completion_index,
client.clone(),
)
.await
.log_err()
.is_some()
{
did_resolve = true;
}
let server_id = completions.borrow()[completion_index].server_id;
if Self::resolve_completion_remote(
project_id,
server_id,
buffer_id,
completions.clone(),
completion_index,
client.clone(),
)
.await
.log_err()
.is_some()
{
did_resolve = true;
}
}
} else {
for completion_index in completion_indices {
let Some(server_id) = completions.borrow()[completion_index].source.server_id()
else {
continue;
};
let server_id = completions.borrow()[completion_index].server_id;
let server_and_adapter = this
.read_with(&cx, |lsp_store, _| {
@@ -4487,19 +4480,10 @@ impl LspStore {
let request = {
let completion = &completions.borrow()[completion_index];
match &completion.source {
CompletionSource::Lsp {
lsp_completion,
resolved,
..
} => {
if *resolved {
return Ok(());
}
server.request::<lsp::request::ResolveCompletionItem>(*lsp_completion.clone())
}
CompletionSource::Custom => return Ok(()),
if completion.resolved {
return Ok(());
}
server.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion.clone())
};
let completion_item = request.await?;
@@ -4524,20 +4508,15 @@ impl LspStore {
// vtsls might change the type of completion after resolution.
let mut completions = completions.borrow_mut();
let completion = &mut completions[completion_index];
if let Some(lsp_completion) = completion.source.lsp_completion_mut() {
if completion_item.insert_text_format != lsp_completion.insert_text_format {
lsp_completion.insert_text_format = completion_item.insert_text_format;
}
if completion_item.insert_text_format != completion.lsp_completion.insert_text_format {
completion.lsp_completion.insert_text_format = completion_item.insert_text_format;
}
}
let mut completions = completions.borrow_mut();
let completion = &mut completions[completion_index];
completion.source = CompletionSource::Lsp {
lsp_completion: Box::new(completion_item),
resolved: true,
server_id: server.server_id(),
};
completion.lsp_completion = completion_item;
completion.resolved = true;
Ok(())
}
@@ -4548,13 +4527,9 @@ impl LspStore {
completion_index: usize,
) -> Result<()> {
let completion_item = completions.borrow()[completion_index]
.source
.lsp_completion()
.cloned();
if let Some(lsp_documentation) = completion_item
.as_ref()
.and_then(|completion_item| completion_item.documentation.clone())
{
.lsp_completion
.clone();
if let Some(lsp_documentation) = completion_item.documentation.clone() {
let mut completions = completions.borrow_mut();
let completion = &mut completions[completion_index];
completion.documentation = Some(lsp_documentation.into());
@@ -4564,33 +4539,25 @@ impl LspStore {
completion.documentation = Some(CompletionDocumentation::Undocumented);
}
let mut new_label = match completion_item {
Some(completion_item) => {
// NB: Zed does not have `details` inside the completion resolve capabilities, but certain language servers violate the spec and do not return `details` immediately, e.g. https://github.com/yioneko/vtsls/issues/213
// So we have to update the label here anyway...
let language = snapshot.language();
match language {
Some(language) => {
adapter
.labels_for_completions(&[completion_item.clone()], language)
.await?
}
None => Vec::new(),
}
.pop()
.flatten()
.unwrap_or_else(|| {
CodeLabel::fallback_for_completion(
&completion_item,
language.map(|language| language.as_ref()),
)
})
// NB: Zed does not have `details` inside the completion resolve capabilities, but certain language servers violate the spec and do not return `details` immediately, e.g. https://github.com/yioneko/vtsls/issues/213
// So we have to update the label here anyway...
let language = snapshot.language();
let mut new_label = match language {
Some(language) => {
adapter
.labels_for_completions(&[completion_item.clone()], language)
.await?
}
None => CodeLabel::plain(
completions.borrow()[completion_index].new_text.clone(),
None,
),
};
None => Vec::new(),
}
.pop()
.flatten()
.unwrap_or_else(|| {
CodeLabel::fallback_for_completion(
&completion_item,
language.map(|language| language.as_ref()),
)
});
ensure_uniform_list_compatible_label(&mut new_label);
let mut completions = completions.borrow_mut();
@@ -4622,19 +4589,12 @@ impl LspStore {
) -> Result<()> {
let lsp_completion = {
let completion = &completions.borrow()[completion_index];
match &completion.source {
CompletionSource::Lsp {
lsp_completion,
resolved,
..
} => {
if *resolved {
return Ok(());
}
serde_json::to_string(lsp_completion).unwrap().into_bytes()
}
CompletionSource::Custom => return Ok(()),
if completion.resolved {
return Ok(());
}
serde_json::to_string(&completion.lsp_completion)
.unwrap()
.into_bytes()
};
let request = proto::ResolveCompletionDocumentation {
project_id,
@@ -4662,11 +4622,8 @@ impl LspStore {
let mut completions = completions.borrow_mut();
let completion = &mut completions[completion_index];
completion.documentation = Some(documentation);
completion.source = CompletionSource::Lsp {
server_id,
lsp_completion,
resolved: true,
};
completion.lsp_completion = lsp_completion;
completion.resolved = true;
let old_range = response
.old_start
@@ -4702,12 +4659,17 @@ impl LspStore {
completion: Some(Self::serialize_completion(&CoreCompletion {
old_range: completion.old_range,
new_text: completion.new_text,
source: completion.source,
server_id: completion.server_id,
lsp_completion: completion.lsp_completion,
resolved: completion.resolved,
})),
}
};
if let Some(transaction) = client.request(request).await?.transaction {
let response = client.request(request).await?;
completions.borrow_mut()[completion_index].resolved = true;
if let Some(transaction) = response.transaction {
let transaction = language::proto::deserialize_transaction(transaction)?;
buffer_handle
.update(&mut cx, |buffer, _| {
@@ -4725,9 +4687,8 @@ impl LspStore {
}
})
} else {
let server_id = completions.borrow()[completion_index].server_id;
let Some(server) = buffer_handle.update(cx, |buffer, cx| {
let completion = &completions.borrow()[completion_index];
let server_id = completion.source.server_id()?;
Some(
self.language_server_for_local_buffer(buffer, server_id, cx)?
.1
@@ -4748,11 +4709,7 @@ impl LspStore {
.await
.context("resolving completion")?;
let completion = completions.borrow()[completion_index].clone();
let additional_text_edits = completion
.source
.lsp_completion()
.as_ref()
.and_then(|lsp_completion| lsp_completion.additional_text_edits.clone());
let additional_text_edits = completion.lsp_completion.additional_text_edits;
if let Some(edits) = additional_text_edits {
let edits = this
.update(&mut cx, |this, cx| {
@@ -6710,19 +6667,33 @@ impl LspStore {
cx,
);
}
lsp::WorkDoneProgress::Report(report) => self.on_lsp_work_progress(
language_server_id,
token,
LanguageServerProgress {
title: None,
is_disk_based_diagnostics_progress,
is_cancellable: report.cancellable.unwrap_or(false),
message: report.message,
percentage: report.percentage.map(|p| p as usize),
last_update_at: cx.background_executor().now(),
},
cx,
),
lsp::WorkDoneProgress::Report(report) => {
if self.on_lsp_work_progress(
language_server_id,
token.clone(),
LanguageServerProgress {
title: None,
is_disk_based_diagnostics_progress,
is_cancellable: report.cancellable.unwrap_or(false),
message: report.message.clone(),
percentage: report.percentage.map(|p| p as usize),
last_update_at: cx.background_executor().now(),
},
cx,
) {
cx.emit(LspStoreEvent::LanguageServerUpdate {
language_server_id,
message: proto::update_language_server::Variant::WorkProgress(
proto::LspWorkProgress {
token,
message: report.message,
percentage: report.percentage,
is_cancellable: report.cancellable,
},
),
})
}
}
lsp::WorkDoneProgress::End(_) => {
language_server_status.progress_tokens.remove(&token);
self.on_lsp_work_end(language_server_id, token.clone(), cx);
@@ -6762,13 +6733,13 @@ impl LspStore {
token: String,
progress: LanguageServerProgress,
cx: &mut Context<Self>,
) {
let mut did_update = false;
) -> bool {
if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
match status.pending_work.entry(token.clone()) {
match status.pending_work.entry(token) {
btree_map::Entry::Vacant(entry) => {
entry.insert(progress.clone());
did_update = true;
entry.insert(progress);
cx.notify();
return true;
}
btree_map::Entry::Occupied(mut entry) => {
let entry = entry.get_mut();
@@ -6777,7 +6748,7 @@ impl LspStore {
{
entry.last_update_at = progress.last_update_at;
if progress.message.is_some() {
entry.message = progress.message.clone();
entry.message = progress.message;
}
if progress.percentage.is_some() {
entry.percentage = progress.percentage;
@@ -6785,25 +6756,14 @@ impl LspStore {
if progress.is_cancellable != entry.is_cancellable {
entry.is_cancellable = progress.is_cancellable;
}
did_update = true;
cx.notify();
return true;
}
}
}
}
if did_update {
cx.emit(LspStoreEvent::LanguageServerUpdate {
language_server_id,
message: proto::update_language_server::Variant::WorkProgress(
proto::LspWorkProgress {
token,
message: progress.message,
percentage: progress.percentage.map(|p| p as u32),
is_cancellable: Some(progress.is_cancellable),
},
),
})
}
false
}
fn on_lsp_work_end(
@@ -7182,7 +7142,8 @@ impl LspStore {
Rc::new(RefCell::new(Box::new([Completion {
old_range: completion.old_range,
new_text: completion.new_text,
source: completion.source,
lsp_completion: completion.lsp_completion,
server_id: completion.server_id,
documentation: None,
label: CodeLabel {
text: Default::default(),
@@ -7190,6 +7151,7 @@ impl LspStore {
filter_range: Default::default(),
},
confirm: None,
resolved: completion.resolved,
}]))),
0,
false,
@@ -8153,33 +8115,13 @@ impl LspStore {
}
pub(crate) fn serialize_completion(completion: &CoreCompletion) -> proto::Completion {
let (source, server_id, lsp_completion, resolved) = match &completion.source {
CompletionSource::Lsp {
server_id,
lsp_completion,
resolved,
} => (
proto::completion::Source::Lsp as i32,
server_id.0 as u64,
serde_json::to_vec(lsp_completion).unwrap(),
*resolved,
),
CompletionSource::Custom => (
proto::completion::Source::Custom as i32,
0,
Vec::new(),
true,
),
};
proto::Completion {
old_start: Some(serialize_anchor(&completion.old_range.start)),
old_end: Some(serialize_anchor(&completion.old_range.end)),
new_text: completion.new_text.clone(),
server_id,
lsp_completion,
resolved,
source,
server_id: completion.server_id.0 as u64,
lsp_completion: serde_json::to_vec(&completion.lsp_completion).unwrap(),
resolved: completion.resolved,
}
}
@@ -8192,28 +8134,24 @@ impl LspStore {
.old_end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("invalid old end"))?;
let lsp_completion = serde_json::from_slice(&completion.lsp_completion)?;
Ok(CoreCompletion {
old_range: old_start..old_end,
new_text: completion.new_text,
source: match proto::completion::Source::from_i32(completion.source) {
Some(proto::completion::Source::Custom) => CompletionSource::Custom,
Some(proto::completion::Source::Lsp) => CompletionSource::Lsp {
server_id: LanguageServerId::from_proto(completion.server_id),
lsp_completion: serde_json::from_slice(&completion.lsp_completion)?,
resolved: completion.resolved,
},
_ => anyhow::bail!("Unexpected completion source {}", completion.source),
},
server_id: LanguageServerId(completion.server_id as usize),
lsp_completion,
resolved: completion.resolved,
})
}
pub(crate) fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
let (kind, lsp_action) = match &action.lsp_action {
LspAction::Action(code_action) => (
ActionVariant::Action(code_action) => (
proto::code_action::Kind::Action as i32,
serde_json::to_vec(code_action).unwrap(),
),
LspAction::Command(command) => (
ActionVariant::Command(command) => (
proto::code_action::Kind::Command as i32,
serde_json::to_vec(command).unwrap(),
),
@@ -8239,10 +8177,10 @@ impl LspStore {
.ok_or_else(|| anyhow!("invalid end"))?;
let lsp_action = match proto::code_action::Kind::from_i32(action.kind) {
Some(proto::code_action::Kind::Action) => {
LspAction::Action(serde_json::from_slice(&action.lsp_action)?)
ActionVariant::Action(serde_json::from_slice(&action.lsp_action)?)
}
Some(proto::code_action::Kind::Command) => {
LspAction::Command(serde_json::from_slice(&action.lsp_action)?)
ActionVariant::Command(serde_json::from_slice(&action.lsp_action)?)
}
None => anyhow::bail!("Unknown action kind {}", action.kind),
};
@@ -8280,23 +8218,17 @@ fn remove_empty_hover_blocks(mut hover: Hover) -> Option<Hover> {
}
async fn populate_labels_for_completions(
new_completions: Vec<CoreCompletion>,
mut new_completions: Vec<CoreCompletion>,
language: Option<Arc<Language>>,
lsp_adapter: Option<Arc<CachedLspAdapter>>,
completions: &mut Vec<Completion>,
) {
let lsp_completions = new_completions
.iter()
.filter_map(|new_completion| {
if let CompletionSource::Lsp { lsp_completion, .. } = &new_completion.source {
Some(*lsp_completion.clone())
} else {
None
}
})
.iter_mut()
.map(|completion| mem::take(&mut completion.lsp_completion))
.collect::<Vec<_>>();
let mut labels = if let Some((language, lsp_adapter)) = language.as_ref().zip(lsp_adapter) {
let labels = if let Some((language, lsp_adapter)) = language.as_ref().zip(lsp_adapter) {
lsp_adapter
.labels_for_completions(&lsp_completions, language)
.await
@@ -8304,45 +8236,34 @@ async fn populate_labels_for_completions(
.unwrap_or_default()
} else {
Vec::new()
}
.into_iter()
.fuse();
};
for completion in new_completions {
match &completion.source {
CompletionSource::Lsp { lsp_completion, .. } => {
let documentation = if let Some(docs) = lsp_completion.documentation.clone() {
Some(docs.into())
} else {
None
};
for ((completion, lsp_completion), label) in new_completions
.into_iter()
.zip(lsp_completions)
.zip(labels.into_iter().chain(iter::repeat(None)))
{
let documentation = if let Some(docs) = lsp_completion.documentation.clone() {
Some(docs.into())
} else {
None
};
let mut label = labels.next().flatten().unwrap_or_else(|| {
CodeLabel::fallback_for_completion(&lsp_completion, language.as_deref())
});
ensure_uniform_list_compatible_label(&mut label);
completions.push(Completion {
label,
documentation,
old_range: completion.old_range,
new_text: completion.new_text,
source: completion.source,
confirm: None,
})
}
CompletionSource::Custom => {
let mut label = CodeLabel::plain(completion.new_text.clone(), None);
ensure_uniform_list_compatible_label(&mut label);
completions.push(Completion {
label,
documentation: None,
old_range: completion.old_range,
new_text: completion.new_text,
source: completion.source,
confirm: None,
})
}
}
let mut label = label.unwrap_or_else(|| {
CodeLabel::fallback_for_completion(&lsp_completion, language.as_deref())
});
ensure_uniform_list_compatible_label(&mut label);
completions.push(Completion {
old_range: completion.old_range,
new_text: completion.new_text,
label,
server_id: completion.server_id,
documentation,
lsp_completion,
confirm: None,
resolved: false,
})
}
}

View File

@@ -22,7 +22,7 @@ mod project_tests;
mod direnv;
mod environment;
use buffer_diff::BufferDiff;
pub use environment::{EnvironmentErrorMessage, ProjectEnvironmentEvent};
pub use environment::EnvironmentErrorMessage;
use git::Repository;
pub mod search_history;
mod yarn;
@@ -364,10 +364,14 @@ pub struct Completion {
pub new_text: String,
/// A label for this completion that is shown in the menu.
pub label: CodeLabel,
/// The id of the language server that produced this completion.
pub server_id: LanguageServerId,
/// The documentation for this completion.
pub documentation: Option<CompletionDocumentation>,
/// Completion data source which it was constructed from.
pub source: CompletionSource,
/// The raw completion provided by the language server.
pub lsp_completion: lsp::CompletionItem,
/// Whether this completion has been resolved, to ensure it happens once per completion.
pub resolved: bool,
/// An optional callback to invoke when this completion is confirmed.
/// Returns, whether new completions should be retriggered after the current one.
/// If `true` is returned, the editor will show a new completion menu after this completion is confirmed.
@@ -375,53 +379,15 @@ pub struct Completion {
pub confirm: Option<Arc<dyn Send + Sync + Fn(CompletionIntent, &mut Window, &mut App) -> bool>>,
}
#[derive(Debug, Clone)]
pub enum CompletionSource {
Lsp {
/// The id of the language server that produced this completion.
server_id: LanguageServerId,
/// The raw completion provided by the language server.
lsp_completion: Box<lsp::CompletionItem>,
/// Whether this completion has been resolved, to ensure it happens once per completion.
resolved: bool,
},
Custom,
}
impl CompletionSource {
pub fn server_id(&self) -> Option<LanguageServerId> {
if let CompletionSource::Lsp { server_id, .. } = self {
Some(*server_id)
} else {
None
}
}
pub fn lsp_completion(&self) -> Option<&lsp::CompletionItem> {
if let Self::Lsp { lsp_completion, .. } = self {
Some(lsp_completion)
} else {
None
}
}
fn lsp_completion_mut(&mut self) -> Option<&mut lsp::CompletionItem> {
if let Self::Lsp { lsp_completion, .. } = self {
Some(lsp_completion)
} else {
None
}
}
}
impl std::fmt::Debug for Completion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Completion")
.field("old_range", &self.old_range)
.field("new_text", &self.new_text)
.field("label", &self.label)
.field("server_id", &self.server_id)
.field("documentation", &self.documentation)
.field("source", &self.source)
.field("lsp_completion", &self.lsp_completion)
.finish()
}
}
@@ -431,7 +397,9 @@ impl std::fmt::Debug for Completion {
pub(crate) struct CoreCompletion {
old_range: Range<Anchor>,
new_text: String,
source: CompletionSource,
server_id: LanguageServerId,
lsp_completion: lsp::CompletionItem,
resolved: bool,
}
/// A code action provided by a language server.
@@ -443,12 +411,12 @@ pub struct CodeAction {
pub range: Range<Anchor>,
/// The raw code action provided by the language server.
/// Can be either an action or a command.
pub lsp_action: LspAction,
pub lsp_action: ActionVariant,
}
/// An action sent back by a language server.
#[derive(Clone, Debug)]
pub enum LspAction {
pub enum ActionVariant {
/// An action with the full data, may have a command or may not.
/// May require resolving.
Action(Box<lsp::CodeAction>),
@@ -456,7 +424,7 @@ pub enum LspAction {
Command(lsp::Command),
}
impl LspAction {
impl ActionVariant {
pub fn title(&self) -> &str {
match self {
Self::Action(action) => &action.title,
@@ -918,6 +886,7 @@ impl Project {
});
cx.subscribe(&ssh, Self::on_ssh_event).detach();
cx.observe(&ssh, |_, _, cx| cx.notify()).detach();
let this = Self {
buffer_ordered_messages_tx: tx,
@@ -1402,9 +1371,9 @@ impl Project {
self.environment.read(cx).environment_errors()
}
pub fn remove_environment_error(&mut self, worktree_id: WorktreeId, cx: &mut Context<Self>) {
self.environment.update(cx, |environment, cx| {
environment.remove_environment_error(worktree_id, cx);
pub fn remove_environment_error(&mut self, cx: &mut Context<Self>, worktree_id: WorktreeId) {
self.environment.update(cx, |environment, _| {
environment.remove_environment_error(worktree_id);
});
}
@@ -1795,6 +1764,7 @@ impl Project {
};
cx.emit(Event::RemoteIdChanged(Some(project_id)));
cx.notify();
Ok(())
}
@@ -1810,6 +1780,7 @@ impl Project {
self.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.send_project_updates(cx);
});
cx.notify();
cx.emit(Event::Reshared);
Ok(())
}
@@ -1839,12 +1810,13 @@ impl Project {
self.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
.unwrap();
cx.emit(Event::Rejoined);
cx.notify();
Ok(())
}
pub fn unshare(&mut self, cx: &mut Context<Self>) -> Result<()> {
self.unshare_internal(cx)?;
cx.emit(Event::RemoteIdChanged(None));
cx.notify();
Ok(())
}
@@ -1888,6 +1860,7 @@ impl Project {
}
self.disconnected_from_host_internal(cx);
cx.emit(Event::DisconnectedFromHost);
cx.notify();
}
pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut Context<Self>) {
@@ -2536,11 +2509,15 @@ impl Project {
}
}
fn on_worktree_added(&mut self, worktree: &Entity<Worktree>, _: &mut Context<Self>) {
let mut remotely_created_models = self.remotely_created_models.lock();
if remotely_created_models.retain_count > 0 {
remotely_created_models.worktrees.push(worktree.clone())
fn on_worktree_added(&mut self, worktree: &Entity<Worktree>, cx: &mut Context<Self>) {
{
let mut remotely_created_models = self.remotely_created_models.lock();
if remotely_created_models.retain_count > 0 {
remotely_created_models.worktrees.push(worktree.clone())
}
}
cx.observe(worktree, |_, _, cx| cx.notify()).detach();
cx.notify();
}
fn on_worktree_released(&mut self, id_to_remove: WorktreeId, cx: &mut Context<Self>) {
@@ -2552,6 +2529,8 @@ impl Project {
})
.log_err();
}
cx.notify();
}
fn on_buffer_event(
@@ -3825,6 +3804,7 @@ impl Project {
cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
this.collaborators
.insert(collaborator.peer_id, collaborator);
cx.notify();
})?;
Ok(())
@@ -3868,6 +3848,7 @@ impl Project {
old_peer_id,
new_peer_id,
});
cx.notify();
Ok(())
})?
}
@@ -3895,6 +3876,7 @@ impl Project {
});
cx.emit(Event::CollaboratorLeft(peer_id));
cx.notify();
Ok(())
})?
}
@@ -4310,6 +4292,7 @@ impl Project {
worktrees: Vec<proto::WorktreeMetadata>,
cx: &mut Context<Project>,
) -> Result<()> {
cx.notify();
self.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.set_worktrees_from_proto(worktrees, self.replica_id(), cx)
})
@@ -4637,38 +4620,27 @@ impl Completion {
/// A key that can be used to sort completions when displaying
/// them to the user.
pub fn sort_key(&self) -> (usize, &str) {
const DEFAULT_KIND_KEY: usize = 2;
let kind_key = self
.source
.lsp_completion()
.and_then(|lsp_completion| lsp_completion.kind)
.and_then(|lsp_completion_kind| match lsp_completion_kind {
lsp::CompletionItemKind::KEYWORD => Some(0),
lsp::CompletionItemKind::VARIABLE => Some(1),
_ => None,
})
.unwrap_or(DEFAULT_KIND_KEY);
let kind_key = match self.lsp_completion.kind {
Some(lsp::CompletionItemKind::KEYWORD) => 0,
Some(lsp::CompletionItemKind::VARIABLE) => 1,
_ => 2,
};
(kind_key, &self.label.text[self.label.filter_range.clone()])
}
/// Whether this completion is a snippet.
pub fn is_snippet(&self) -> bool {
self.source
.lsp_completion()
.map_or(false, |lsp_completion| {
lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
})
self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
}
/// Returns the corresponding color for this completion.
///
/// Will return `None` if this completion's kind is not [`CompletionItemKind::COLOR`].
pub fn color(&self) -> Option<Hsla> {
let lsp_completion = self.source.lsp_completion()?;
if lsp_completion.kind? == CompletionItemKind::COLOR {
return color_extractor::extract_color(lsp_completion);
match self.lsp_completion.kind {
Some(CompletionItemKind::COLOR) => color_extractor::extract_color(&self.lsp_completion),
_ => None,
}
None
}
}

View File

@@ -212,15 +212,6 @@ pub enum GitHunkStyleSetting {
Transparent,
/// Show unstaged hunks with a pattern background
Pattern,
/// Show unstaged hunks with a border background
Border,
/// Show staged hunks with a pattern background
StagedPattern,
/// Show staged hunks with a pattern background
StagedTransparent,
/// Show staged hunks with a pattern background
StagedBorder,
}
#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]

View File

@@ -1,6 +1,6 @@
syntax = "proto3";
package zed.messages;
import "google/protobuf/wrappers.proto";
// Looking for a number? Search "// current max"
@@ -999,12 +999,6 @@ message Completion {
uint64 server_id = 4;
bytes lsp_completion = 5;
bool resolved = 6;
Source source = 7;
enum Source {
Custom = 0;
Lsp = 1;
}
}
message GetCodeActions {

View File

@@ -15,22 +15,13 @@ doctest = false
[dependencies]
anyhow.workspace = true
assistant_tool.workspace = true
collections.workspace = true
shlex.workspace = true
futures.workspace = true
gpui.workspace = true
mlua.workspace = true
parking_lot.workspace = true
project.workspace = true
regex.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
util.workspace = true
workspace.workspace = true
regex.workspace = true
[dev-dependencies]
collections = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }

View File

@@ -1,7 +1,9 @@
---@diagnostic disable: undefined-global
-- Create a sandbox environment
local sandbox = {}
-- Allow access to standard libraries (safe subset)
sandbox.string = string
sandbox.table = table
sandbox.math = math
@@ -13,19 +15,24 @@ sandbox.pairs = pairs
sandbox.ipairs = ipairs
sandbox.search = search
-- Create a sandboxed version of LuaFileIO
local io = {}
-- File functions
io.open = sb_io_open
io.popen = sb_io_popen
-- Add the sandboxed io library to the sandbox environment
sandbox.io = io
-- Load the script with the sandbox environment
local user_script_fn, err = load(user_script, nil, "t", sandbox)
if not user_script_fn then
error("Failed to load user script: " .. tostring(err))
end
-- Execute the user script within the sandbox
local success, result = pcall(user_script_fn)
if not success then

View File

@@ -1,713 +0,0 @@
/// Models will commonly generate POSIX shell one-liner commands which
/// they run via io.popen() in Lua. Instead of giving those shell command
/// strings to the operating system - which is a security risk, and
/// which can eaisly fail on Windows, since Windows doesn't do POSIX - we
/// parse the shell command ourselves and translate it into a sequence of
/// commands in our normal sandbox. Essentially, this is an extremely
/// minimalstic shell which Lua popen() commands can execute in.
///
/// Our shell supports:
/// - Basic commands and args
/// - The operators `|`, `&&`, `;`, `>`, `1>`, `2>`, `&>`, `>&`
///
/// The operators currently have to have whitespace around them because the
/// `shlex` crate we use to tokenize the strings does not treat operators
/// as word boundaries, even though shells do. Fortunately, LLMs consistently
/// generate spaces around these operators anyway.
use mlua::{Error, Result};
#[derive(Debug, Clone, PartialEq, Default)]
pub struct ShellCmd {
pub command: String,
pub args: Vec<String>,
pub stdout_redirect: Option<String>,
pub stderr_redirect: Option<String>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Operator {
/// The `|` shell operator (highest precedence)
Pipe,
/// The `&&` shell operator (medium precedence)
And,
/// The `;` shell operator (lowest precedence)
Semicolon,
}
impl Operator {
fn precedence(&self) -> u8 {
match self {
Operator::Pipe => 3,
Operator::And => 2,
Operator::Semicolon => 1,
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum ShellAst {
Command(ShellCmd),
Operation {
operator: Operator,
left: Box<ShellAst>,
right: Box<ShellAst>,
},
}
impl ShellAst {
/// Parse a shell string and build an abstract syntax tree.
pub fn parse(string: impl AsRef<str>) -> Result<Self> {
let string = string.as_ref();
// Check for unsupported shell features
if string.contains('$')
|| string.contains('`')
|| string.contains('(')
|| string.contains(')')
|| string.contains('{')
|| string.contains('}')
{
return Err(Error::RuntimeError(
"Complex shell features (subshells, variables, backgrounding, etc.) are not available in this shell."
.to_string(),
));
}
let mut parser = ShellParser::new(string);
parser.parse_expression(0)
}
}
enum Redirect {
Stdout,
Stderr,
Both,
}
struct ShellParser<'a> {
lexer: shlex::Shlex<'a>,
current_token: Option<String>,
}
impl<'a> ShellParser<'a> {
fn new(input: &'a str) -> Self {
let mut lexer = shlex::Shlex::new(input);
let current_token = lexer.next();
Self {
lexer,
current_token,
}
}
fn advance(&mut self) {
self.current_token = self.lexer.next();
}
fn peek(&self) -> Option<&str> {
self.current_token.as_deref()
}
fn parse_expression(&mut self, min_precedence: u8) -> Result<ShellAst> {
// Parse the first command or atom
let mut left = ShellAst::Command(self.parse_command()?);
// While we have operators with sufficient precedence, keep building the tree
loop {
let op = match self.parse_operator() {
Some(op) if op.precedence() >= min_precedence => op,
_ => break,
};
// Consume the operator token
self.advance();
// Special case for trailing semicolons - if we have no more tokens,
// we don't need to parse another command
if op == Operator::Semicolon && self.peek().is_none() {
break;
}
// Parse the right side with higher precedence
// For left-associative operators, we use op.precedence() + 1
let right = self.parse_expression(op.precedence() + 1)?;
// Build the operation node
left = ShellAst::Operation {
operator: op,
left: Box::new(left),
right: Box::new(right),
};
}
Ok(left)
}
fn parse_operator(&self) -> Option<Operator> {
match self.peek()? {
"|" => Some(Operator::Pipe),
"&&" => Some(Operator::And),
";" => Some(Operator::Semicolon),
_ => None,
}
}
fn handle_redirection(&mut self, cmd: &mut ShellCmd, redirect: Redirect) -> Result<()> {
self.advance(); // consume the redirection operator
let target = self.peek().ok_or_else(|| {
Error::RuntimeError("Missing redirection target in shell".to_string())
})?;
match redirect {
Redirect::Stdout => {
cmd.stdout_redirect = Some(target.to_string());
}
Redirect::Stderr => {
cmd.stderr_redirect = Some(target.to_string());
}
Redirect::Both => {
cmd.stdout_redirect = Some(target.to_string());
cmd.stderr_redirect = Some(target.to_string());
}
}
self.advance(); // consume the target
Ok(())
}
fn parse_command(&mut self) -> Result<ShellCmd> {
let mut cmd = ShellCmd::default();
// Process tokens until we hit an operator or end of input
loop {
let redirect;
match self.peek() {
Some(token) => {
match token {
"|" | "&&" | ";" => break, // These are operators, not part of the command
">" | "1>" => {
redirect = Some(Redirect::Stdout);
}
"2>" => {
redirect = Some(Redirect::Stderr);
}
"&>" | ">&" => {
redirect = Some(Redirect::Both);
}
"&" => {
// Reject ampersand as it's used for backgrounding processes
return Err(Error::RuntimeError(
"Background processes (using &) are not available in this shell."
.to_string(),
));
}
_ => {
redirect = None;
}
}
}
None => {
break; // We ran out of tokens; exit the loop.
}
}
// We do this separate conditional after the borrow from the peek()
// has expired, to avoid a borrow checker error.
match redirect {
Some(redirect) => {
self.handle_redirection(&mut cmd, redirect)?;
}
None => {
// It's either the command name or an argument
let mut token = self.current_token.take().unwrap();
self.advance();
// Handle trailing semicolons
let original_token_len = token.len();
while token.ends_with(';') {
token.pop();
}
let had_semicolon = token.len() != original_token_len;
if cmd.command.is_empty() {
cmd.command = token;
} else {
cmd.args.push(token);
}
if had_semicolon {
// Put the semicolon back as the next token, so after we break we parse it.
self.current_token = Some(";".to_string());
break;
}
}
}
}
if cmd.command.is_empty() {
return Err(Error::RuntimeError(
"Missing command to run in shell".to_string(),
));
}
Ok(cmd)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_simple_command() {
// Basic command with no args or operators
let cmd = "ls";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Command(shell_cmd) = ast {
assert_eq!(shell_cmd.command, "ls");
assert!(shell_cmd.args.is_empty());
assert_eq!(shell_cmd.stdout_redirect, None);
assert_eq!(shell_cmd.stderr_redirect, None);
} else {
panic!("Expected Command node");
}
}
#[test]
fn test_command_with_args() {
// Command with arguments
let cmd = "ls -la /home";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Command(shell_cmd) = ast {
assert_eq!(shell_cmd.command, "ls");
assert_eq!(shell_cmd.args, vec!["-la".to_string(), "/home".to_string()]);
assert_eq!(shell_cmd.stdout_redirect, None);
assert_eq!(shell_cmd.stderr_redirect, None);
} else {
panic!("Expected Command node");
}
}
#[test]
fn test_simple_pipe() {
// Test pipe operator
let cmd = "ls -l | grep txt";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Operation {
operator,
left,
right,
} = ast
{
assert_eq!(operator, Operator::Pipe);
if let ShellAst::Command(left_cmd) = *left {
assert_eq!(left_cmd.command, "ls");
assert_eq!(left_cmd.args, vec!["-l".to_string()]);
} else {
panic!("Expected Command node for left side");
}
if let ShellAst::Command(right_cmd) = *right {
assert_eq!(right_cmd.command, "grep");
assert_eq!(right_cmd.args, vec!["txt".to_string()]);
} else {
panic!("Expected Command node for right side");
}
} else {
panic!("Expected Operation node");
}
}
#[test]
fn test_simple_and() {
// Test && operator
let cmd = "mkdir test && cd test";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Operation {
operator,
left,
right,
} = ast
{
assert_eq!(operator, Operator::And);
if let ShellAst::Command(left_cmd) = *left {
assert_eq!(left_cmd.command, "mkdir");
assert_eq!(left_cmd.args, vec!["test".to_string()]);
} else {
panic!("Expected Command node for left side");
}
if let ShellAst::Command(right_cmd) = *right {
assert_eq!(right_cmd.command, "cd");
assert_eq!(right_cmd.args, vec!["test".to_string()]);
} else {
panic!("Expected Command node for right side");
}
} else {
panic!("Expected Operation node");
}
}
#[test]
fn test_complex_chain_with_precedence() {
// Test a more complex chain with different precedence levels
let cmd = "echo hello | grep e && ls -l ; echo done";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
// The tree should be structured with precedence:
// - Pipe has highest precedence
// - Then And
// - Then Semicolon (lowest)
if let ShellAst::Operation {
operator,
left,
right,
} = &ast
{
assert_eq!(*operator, Operator::Semicolon);
if let ShellAst::Operation {
operator,
left: inner_left,
right: inner_right,
} = &**left
{
assert_eq!(*operator, Operator::And);
if let ShellAst::Operation {
operator,
left: pipe_left,
right: pipe_right,
} = &**inner_left
{
assert_eq!(*operator, Operator::Pipe);
if let ShellAst::Command(cmd) = &**pipe_left {
assert_eq!(cmd.command, "echo");
assert_eq!(cmd.args, vec!["hello".to_string()]);
} else {
panic!("Expected Command node for pipe left branch");
}
if let ShellAst::Command(cmd) = &**pipe_right {
assert_eq!(cmd.command, "grep");
assert_eq!(cmd.args, vec!["e".to_string()]);
} else {
panic!("Expected Command node for pipe right branch");
}
} else {
panic!("Expected Pipe operation node");
}
if let ShellAst::Command(cmd) = &**inner_right {
assert_eq!(cmd.command, "ls");
assert_eq!(cmd.args, vec!["-l".to_string()]);
} else {
panic!("Expected Command node for and right branch");
}
} else {
panic!("Expected And operation node");
}
if let ShellAst::Command(cmd) = &**right {
assert_eq!(cmd.command, "echo");
assert_eq!(cmd.args, vec!["done".to_string()]);
} else {
panic!("Expected Command node for semicolon right branch");
}
} else {
panic!("Expected Semicolon operation node");
}
}
#[test]
fn test_stdout_redirection() {
// Test stdout redirection
let cmd = "echo hello > output.txt";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Command(shell_cmd) = ast {
assert_eq!(shell_cmd.command, "echo");
assert_eq!(shell_cmd.args, vec!["hello".to_string()]);
assert_eq!(shell_cmd.stdout_redirect, Some("output.txt".to_string()));
assert_eq!(shell_cmd.stderr_redirect, None);
} else {
panic!("Expected Command node");
}
}
#[test]
fn test_stderr_redirection() {
// Test stderr redirection
let cmd = "find / -name test 2> errors.log";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Command(shell_cmd) = ast {
assert_eq!(shell_cmd.command, "find");
assert_eq!(
shell_cmd.args,
vec!["/".to_string(), "-name".to_string(), "test".to_string()]
);
assert_eq!(shell_cmd.stdout_redirect, None);
assert_eq!(shell_cmd.stderr_redirect, Some("errors.log".to_string()));
} else {
panic!("Expected Command node");
}
}
#[test]
fn test_both_redirections() {
// Test both stdout and stderr redirection
let cmd = "make &> build.log";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Command(shell_cmd) = ast {
assert_eq!(shell_cmd.command, "make");
assert!(shell_cmd.args.is_empty());
assert_eq!(shell_cmd.stdout_redirect, Some("build.log".to_string()));
assert_eq!(shell_cmd.stderr_redirect, Some("build.log".to_string()));
} else {
panic!("Expected Command node");
}
// Test alternative syntax
let cmd = "make >& build.log";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Command(shell_cmd) = ast {
assert_eq!(shell_cmd.command, "make");
assert!(shell_cmd.args.is_empty());
assert_eq!(shell_cmd.stdout_redirect, Some("build.log".to_string()));
assert_eq!(shell_cmd.stderr_redirect, Some("build.log".to_string()));
} else {
panic!("Expected Command node");
}
}
#[test]
fn test_multiple_operators() {
// Test multiple operators in a single command
let cmd =
"find . -name \"*.rs\" | grep impl && echo \"Found implementations\" ; echo \"Done\"";
// Verify the AST structure
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Operation {
operator: semicolon_op,
left: semicolon_left,
right: semicolon_right,
} = ast
{
assert_eq!(semicolon_op, Operator::Semicolon);
if let ShellAst::Operation {
operator: and_op,
left: and_left,
right: and_right,
} = *semicolon_left
{
assert_eq!(and_op, Operator::And);
if let ShellAst::Operation {
operator: pipe_op,
left: pipe_left,
right: pipe_right,
} = *and_left
{
assert_eq!(pipe_op, Operator::Pipe);
if let ShellAst::Command(cmd) = *pipe_left {
assert_eq!(cmd.command, "find");
assert_eq!(
cmd.args,
vec![".".to_string(), "-name".to_string(), "*.rs".to_string()]
);
} else {
panic!("Expected Command node for pipe left");
}
if let ShellAst::Command(cmd) = *pipe_right {
assert_eq!(cmd.command, "grep");
assert_eq!(cmd.args, vec!["impl".to_string()]);
} else {
panic!("Expected Command node for pipe right");
}
} else {
panic!("Expected Pipe operation");
}
if let ShellAst::Command(cmd) = *and_right {
assert_eq!(cmd.command, "echo");
assert_eq!(cmd.args, vec!["Found implementations".to_string()]);
} else {
panic!("Expected Command node for and right");
}
} else {
panic!("Expected And operation");
}
if let ShellAst::Command(cmd) = *semicolon_right {
assert_eq!(cmd.command, "echo");
assert_eq!(cmd.args, vec!["Done".to_string()]);
} else {
panic!("Expected Command node for semicolon right");
}
} else {
panic!("Expected Semicolon operation at root");
}
}
#[test]
fn test_pipe_with_redirections() {
// Test pipe with redirections
let cmd = "cat file.txt | grep error > results.txt 2> errors.log";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Operation {
operator,
left,
right,
} = ast
{
assert_eq!(operator, Operator::Pipe);
if let ShellAst::Command(left_cmd) = *left {
assert_eq!(left_cmd.command, "cat");
assert_eq!(left_cmd.args, vec!["file.txt".to_string()]);
assert_eq!(left_cmd.stdout_redirect, None);
assert_eq!(left_cmd.stderr_redirect, None);
} else {
panic!("Expected Command node for left side");
}
if let ShellAst::Command(right_cmd) = *right {
assert_eq!(right_cmd.command, "grep");
assert_eq!(right_cmd.args, vec!["error".to_string()]);
assert_eq!(right_cmd.stdout_redirect, Some("results.txt".to_string()));
assert_eq!(right_cmd.stderr_redirect, Some("errors.log".to_string()));
} else {
panic!("Expected Command node for right side");
}
} else {
panic!("Expected Operation node");
}
}
#[test]
fn test_quoted_arguments() {
// Test quoted arguments
let cmd = "echo \"hello world\" | grep \"o w\"";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Operation {
operator,
left,
right,
} = ast
{
assert_eq!(operator, Operator::Pipe);
if let ShellAst::Command(left_cmd) = *left {
assert_eq!(left_cmd.command, "echo");
assert_eq!(left_cmd.args, vec!["hello world".to_string()]);
} else {
panic!("Expected Command node for left side");
}
if let ShellAst::Command(right_cmd) = *right {
assert_eq!(right_cmd.command, "grep");
assert_eq!(right_cmd.args, vec!["o w".to_string()]);
} else {
panic!("Expected Command node for right side");
}
} else {
panic!("Expected Operation node");
}
}
#[test]
fn test_unsupported_features() {
// Test unsupported shell features
let result = ShellAst::parse("echo $HOME");
assert!(result.is_err());
let result = ShellAst::parse("echo `date`");
assert!(result.is_err());
let result = ShellAst::parse("echo $(date)");
assert!(result.is_err());
let result = ShellAst::parse("for i in {1..5}; do echo $i; done");
assert!(result.is_err());
}
#[test]
fn test_complex_command() {
let cmd = "find /path/to/dir -type f -name \"*.txt\" -exec grep \"pattern with spaces\";";
let ast = ShellAst::parse(cmd).expect("parsing failed for {cmd:?}");
if let ShellAst::Command(shell_cmd) = ast {
assert_eq!(shell_cmd.command, "find");
assert_eq!(
shell_cmd.args,
vec![
"/path/to/dir".to_string(),
"-type".to_string(),
"f".to_string(),
"-name".to_string(),
"*.txt".to_string(),
"-exec".to_string(),
"grep".to_string(),
"pattern with spaces".to_string(),
]
);
assert_eq!(shell_cmd.stdout_redirect, None);
assert_eq!(shell_cmd.stderr_redirect, None);
} else {
panic!("Expected Command node");
}
}
#[test]
fn test_empty_command() {
// Test empty command
let result = ShellAst::parse("");
assert!(result.is_err());
}
#[test]
fn test_missing_redirection_target() {
// Test missing redirection target
let result = ShellAst::parse("echo hello >");
assert!(result.is_err());
let result = ShellAst::parse("ls 2>");
assert!(result.is_err());
}
#[test]
fn test_ampersand_as_argument() {
// Test & as a background operator is not allowed
let result = ShellAst::parse("grep & file.txt");
assert!(result.is_err());
// Verify the error message mentions background processes
if let Err(Error::RuntimeError(msg)) = ShellAst::parse("grep & file.txt") {
assert!(msg.contains("Background processes"));
} else {
panic!("Expected RuntimeError about background processes");
}
}
}

View File

@@ -1,14 +1,20 @@
mod sandboxed_shell;
mod session;
use project::Project;
pub(crate) use session::*;
mod streaming_json;
mod streaming_lua;
use anyhow::anyhow;
use assistant_tool::{Tool, ToolRegistry};
use gpui::{App, AppContext as _, Entity, Task};
use gpui::{App, AppContext as _, Task, WeakEntity, Window};
use mlua::{Function, Lua, MultiValue, Result, UserData, UserDataMethods};
use schemars::JsonSchema;
use serde::Deserialize;
use std::sync::Arc;
use std::{
cell::RefCell,
collections::HashMap,
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
};
use workspace::Workspace;
pub fn init(cx: &App) {
let registry = ToolRegistry::global(cx);
@@ -28,7 +34,20 @@ impl Tool for ScriptingTool {
}
fn description(&self) -> String {
include_str!("scripting_tool_description.txt").into()
r#"You can write a Lua script and I'll run it on my code base and tell you what its output was,
including both stdout as well as the git diff of changes it made to the filesystem. That way,
you can get more information about the code base, or make changes to the code base directly.
The lua script will have access to `io` and it will run with the current working directory being in
the root of the code base, so you can use it to explore, search, make changes, etc. You can also have
the script print things, and I'll tell you what the output was. Note that `io` only has `open`, and
then the file it returns only has the methods read, write, and close - it doesn't have popen or
anything else. Also, I'm going to be putting this Lua script into JSON, so please don't use Lua's
double quote syntax for string literals - use one of Lua's other syntaxes for string literals, so I
don't have to escape the double quotes. There will be a global called `search` which accepts a regex
(it's implemented using Rust's regex crate, so use that regex syntax) and runs that regex on the contents
of every file in the code base (aside from gitignored files), then returns an array of tables with two
fields: "path" (the path to the file that had the matches) and "matches" (an array of strings, with each
string being a match that was found within the file)."#.into()
}
fn input_schema(&self) -> serde_json::Value {
@@ -39,21 +58,728 @@ impl Tool for ScriptingTool {
fn run(
self: Arc<Self>,
input: serde_json::Value,
project: Entity<Project>,
workspace: WeakEntity<Workspace>,
_window: &mut Window,
cx: &mut App,
) -> Task<anyhow::Result<String>> {
let root_dir = workspace.update(cx, |workspace, cx| {
let first_worktree = workspace
.visible_worktrees(cx)
.next()
.ok_or_else(|| anyhow!("no worktrees"))?;
workspace
.absolute_path_of_worktree(first_worktree.read(cx).id(), cx)
.ok_or_else(|| anyhow!("no worktree root"))
});
let root_dir = match root_dir {
Ok(root_dir) => root_dir,
Err(err) => return Task::ready(Err(err)),
};
let root_dir = match root_dir {
Ok(root_dir) => root_dir,
Err(err) => return Task::ready(Err(err)),
};
let input = match serde_json::from_value::<ScriptingToolInput>(input) {
Err(err) => return Task::ready(Err(err.into())),
Ok(input) => input,
};
let session = cx.new(|cx| Session::new(project, cx));
let lua_script = input.lua_script;
let script = session.update(cx, |session, cx| session.run_script(lua_script, cx));
cx.spawn(|_cx| async move {
let output = script.await?.stdout;
drop(session);
cx.background_spawn(async move {
let fs_changes = HashMap::new();
let output = run_sandboxed_lua(&lua_script, fs_changes, root_dir)
.map_err(|err| anyhow!(format!("{err}")))?;
let output = output.printed_lines.join("\n");
Ok(format!("The script output the following:\n{output}"))
})
}
}
const SANDBOX_PREAMBLE: &str = include_str!("sandbox_preamble.lua");
struct FileContent(RefCell<Vec<u8>>);
impl UserData for FileContent {
fn add_methods<M: UserDataMethods<Self>>(_methods: &mut M) {
// FileContent doesn't have any methods so far.
}
}
/// Sandboxed print() function in Lua.
fn print(lua: &Lua, printed_lines: Rc<RefCell<Vec<String>>>) -> Result<Function> {
lua.create_function(move |_, args: MultiValue| {
let mut string = String::new();
for arg in args.into_iter() {
// Lua's `print()` prints tab characters between each argument.
if !string.is_empty() {
string.push('\t');
}
// If the argument's to_string() fails, have the whole function call fail.
string.push_str(arg.to_string()?.as_str())
}
printed_lines.borrow_mut().push(string);
Ok(())
})
}
fn search(
lua: &Lua,
_fs_changes: Rc<RefCell<HashMap<PathBuf, Vec<u8>>>>,
root_dir: PathBuf,
) -> Result<Function> {
lua.create_function(move |lua, regex: String| {
use mlua::Table;
use regex::Regex;
use std::fs;
// Function to recursively search directory
let search_regex = match Regex::new(&regex) {
Ok(re) => re,
Err(e) => return Err(mlua::Error::runtime(format!("Invalid regex: {}", e))),
};
let mut search_results: Vec<Result<Table>> = Vec::new();
// Create an explicit stack for directories to process
let mut dir_stack = vec![root_dir.clone()];
while let Some(current_dir) = dir_stack.pop() {
// Process each entry in the current directory
let entries = match fs::read_dir(&current_dir) {
Ok(entries) => entries,
Err(e) => return Err(e.into()),
};
for entry_result in entries {
let entry = match entry_result {
Ok(e) => e,
Err(e) => return Err(e.into()),
};
let path = entry.path();
if path.is_dir() {
// Skip .git directory and other common directories to ignore
let dir_name = path.file_name().unwrap_or_default().to_string_lossy();
if !dir_name.starts_with('.')
&& dir_name != "node_modules"
&& dir_name != "target"
{
// Instead of recursive call, add to stack
dir_stack.push(path);
}
} else if path.is_file() {
// Skip binary files and very large files
if let Ok(metadata) = fs::metadata(&path) {
if metadata.len() > 1_000_000 {
// Skip files larger than 1MB
continue;
}
}
// Attempt to read the file as text
if let Ok(content) = fs::read_to_string(&path) {
let mut matches = Vec::new();
// Find all regex matches in the content
for capture in search_regex.find_iter(&content) {
matches.push(capture.as_str().to_string());
}
// If we found matches, create a result entry
if !matches.is_empty() {
let result_entry = lua.create_table()?;
result_entry.set("path", path.to_string_lossy().to_string())?;
let matches_table = lua.create_table()?;
for (i, m) in matches.iter().enumerate() {
matches_table.set(i + 1, m.clone())?;
}
result_entry.set("matches", matches_table)?;
search_results.push(Ok(result_entry));
}
}
}
}
}
// Create a table to hold our results
let results_table = lua.create_table()?;
for (i, result) in search_results.into_iter().enumerate() {
match result {
Ok(entry) => results_table.set(i + 1, entry)?,
Err(e) => return Err(e),
}
}
Ok(results_table)
})
}
/// Sandboxed io.open() function in Lua.
fn io_open(
lua: &Lua,
fs_changes: Rc<RefCell<HashMap<PathBuf, Vec<u8>>>>,
root_dir: PathBuf,
) -> Result<Function> {
lua.create_function(move |lua, (path_str, mode): (String, Option<String>)| {
let mode = mode.unwrap_or_else(|| "r".to_string());
// Parse the mode string to determine read/write permissions
let read_perm = mode.contains('r');
let write_perm = mode.contains('w') || mode.contains('a') || mode.contains('+');
let append = mode.contains('a');
let truncate = mode.contains('w');
// This will be the Lua value returned from the `open` function.
let file = lua.create_table()?;
// Store file metadata in the file
file.set("__path", path_str.clone())?;
file.set("__mode", mode.clone())?;
file.set("__read_perm", read_perm)?;
file.set("__write_perm", write_perm)?;
// Sandbox the path; it must be within root_dir
let path: PathBuf = {
let rust_path = Path::new(&path_str);
// Get absolute path
if rust_path.is_absolute() {
// Check if path starts with root_dir prefix without resolving symlinks
if !rust_path.starts_with(&root_dir) {
return Ok((
None,
format!(
"Error: Absolute path {} is outside the current working directory",
path_str
),
));
}
rust_path.to_path_buf()
} else {
// Make relative path absolute relative to cwd
root_dir.join(rust_path)
}
};
// close method
let close_fn = {
let fs_changes = fs_changes.clone();
lua.create_function(move |_lua, file_userdata: mlua::Table| {
let write_perm = file_userdata.get::<bool>("__write_perm")?;
let path = file_userdata.get::<String>("__path")?;
if write_perm {
// When closing a writable file, record the content
let content = file_userdata.get::<mlua::AnyUserData>("__content")?;
let content_ref = content.borrow::<FileContent>()?;
let content_vec = content_ref.0.borrow();
// Don't actually write to disk; instead, just update fs_changes.
let path_buf = PathBuf::from(&path);
fs_changes
.borrow_mut()
.insert(path_buf.clone(), content_vec.clone());
}
Ok(true)
})?
};
file.set("close", close_fn)?;
// If it's a directory, give it a custom read() and return early.
if path.is_dir() {
// TODO handle the case where we changed it in the in-memory fs
// Create a special directory handle
file.set("__is_directory", true)?;
// Store directory entries
let entries = match std::fs::read_dir(&path) {
Ok(entries) => {
let mut entry_names = Vec::new();
for entry in entries.flatten() {
entry_names.push(entry.file_name().to_string_lossy().into_owned());
}
entry_names
}
Err(e) => return Ok((None, format!("Error reading directory: {}", e))),
};
// Save the list of entries
file.set("__dir_entries", entries)?;
file.set("__dir_position", 0usize)?;
// Create a directory-specific read function
let read_fn = lua.create_function(|_lua, file_userdata: mlua::Table| {
let position = file_userdata.get::<usize>("__dir_position")?;
let entries = file_userdata.get::<Vec<String>>("__dir_entries")?;
if position >= entries.len() {
return Ok(None); // No more entries
}
let entry = entries[position].clone();
file_userdata.set("__dir_position", position + 1)?;
Ok(Some(entry))
})?;
file.set("read", read_fn)?;
// If we got this far, the directory was opened successfully
return Ok((Some(file), String::new()));
}
let is_in_changes = fs_changes.borrow().contains_key(&path);
let file_exists = is_in_changes || path.exists();
let mut file_content = Vec::new();
if file_exists && !truncate {
if is_in_changes {
file_content = fs_changes.borrow().get(&path).unwrap().clone();
} else {
// Try to read existing content if file exists and we're not truncating
match std::fs::read(&path) {
Ok(content) => file_content = content,
Err(e) => return Ok((None, format!("Error reading file: {}", e))),
}
}
}
// If in append mode, position should be at the end
let position = if append && file_exists {
file_content.len()
} else {
0
};
file.set("__position", position)?;
file.set(
"__content",
lua.create_userdata(FileContent(RefCell::new(file_content)))?,
)?;
// Create file methods
// read method
let read_fn = {
lua.create_function(
|_lua, (file_userdata, format): (mlua::Table, Option<mlua::Value>)| {
let read_perm = file_userdata.get::<bool>("__read_perm")?;
if !read_perm {
return Err(mlua::Error::runtime("File not open for reading"));
}
let content = file_userdata.get::<mlua::AnyUserData>("__content")?;
let mut position = file_userdata.get::<usize>("__position")?;
let content_ref = content.borrow::<FileContent>()?;
let content_vec = content_ref.0.borrow();
if position >= content_vec.len() {
return Ok(None); // EOF
}
match format {
Some(mlua::Value::String(s)) => {
let lossy_string = s.to_string_lossy();
let format_str: &str = lossy_string.as_ref();
// Only consider the first 2 bytes, since it's common to pass e.g. "*all" instead of "*a"
match &format_str[0..2] {
"*a" => {
// Read entire file from current position
let result = String::from_utf8_lossy(&content_vec[position..])
.to_string();
position = content_vec.len();
file_userdata.set("__position", position)?;
Ok(Some(result))
}
"*l" => {
// Read next line
let mut line = Vec::new();
let mut found_newline = false;
while position < content_vec.len() {
let byte = content_vec[position];
position += 1;
if byte == b'\n' {
found_newline = true;
break;
}
// Skip \r in \r\n sequence but add it if it's alone
if byte == b'\r' {
if position < content_vec.len()
&& content_vec[position] == b'\n'
{
position += 1;
found_newline = true;
break;
}
}
line.push(byte);
}
file_userdata.set("__position", position)?;
if !found_newline
&& line.is_empty()
&& position >= content_vec.len()
{
return Ok(None); // EOF
}
let result = String::from_utf8_lossy(&line).to_string();
Ok(Some(result))
}
"*n" => {
// Try to parse as a number (number of bytes to read)
match format_str.parse::<usize>() {
Ok(n) => {
let end =
std::cmp::min(position + n, content_vec.len());
let bytes = &content_vec[position..end];
let result = String::from_utf8_lossy(bytes).to_string();
position = end;
file_userdata.set("__position", position)?;
Ok(Some(result))
}
Err(_) => Err(mlua::Error::runtime(format!(
"Invalid format: {}",
format_str
))),
}
}
"*L" => {
// Read next line keeping the end of line
let mut line = Vec::new();
while position < content_vec.len() {
let byte = content_vec[position];
position += 1;
line.push(byte);
if byte == b'\n' {
break;
}
// If we encounter a \r, add it and check if the next is \n
if byte == b'\r'
&& position < content_vec.len()
&& content_vec[position] == b'\n'
{
line.push(content_vec[position]);
position += 1;
break;
}
}
file_userdata.set("__position", position)?;
if line.is_empty() && position >= content_vec.len() {
return Ok(None); // EOF
}
let result = String::from_utf8_lossy(&line).to_string();
Ok(Some(result))
}
_ => Err(mlua::Error::runtime(format!(
"Unsupported format: {}",
format_str
))),
}
}
Some(mlua::Value::Number(n)) => {
// Read n bytes
let n = n as usize;
let end = std::cmp::min(position + n, content_vec.len());
let bytes = &content_vec[position..end];
let result = String::from_utf8_lossy(bytes).to_string();
position = end;
file_userdata.set("__position", position)?;
Ok(Some(result))
}
Some(_) => Err(mlua::Error::runtime("Invalid format")),
None => {
// Default is to read a line
let mut line = Vec::new();
let mut found_newline = false;
while position < content_vec.len() {
let byte = content_vec[position];
position += 1;
if byte == b'\n' {
found_newline = true;
break;
}
// Handle \r\n
if byte == b'\r' {
if position < content_vec.len()
&& content_vec[position] == b'\n'
{
position += 1;
found_newline = true;
break;
}
}
line.push(byte);
}
file_userdata.set("__position", position)?;
if !found_newline && line.is_empty() && position >= content_vec.len() {
return Ok(None); // EOF
}
let result = String::from_utf8_lossy(&line).to_string();
Ok(Some(result))
}
}
},
)?
};
file.set("read", read_fn)?;
// write method
let write_fn = {
let fs_changes = fs_changes.clone();
lua.create_function(move |_lua, (file_userdata, text): (mlua::Table, String)| {
let write_perm = file_userdata.get::<bool>("__write_perm")?;
if !write_perm {
return Err(mlua::Error::runtime("File not open for writing"));
}
let content = file_userdata.get::<mlua::AnyUserData>("__content")?;
let position = file_userdata.get::<usize>("__position")?;
let content_ref = content.borrow::<FileContent>()?;
let mut content_vec = content_ref.0.borrow_mut();
let bytes = text.as_bytes();
// Ensure the vector has enough capacity
if position + bytes.len() > content_vec.len() {
content_vec.resize(position + bytes.len(), 0);
}
// Write the bytes
for (i, &byte) in bytes.iter().enumerate() {
content_vec[position + i] = byte;
}
// Update position
let new_position = position + bytes.len();
file_userdata.set("__position", new_position)?;
// Update fs_changes
let path = file_userdata.get::<String>("__path")?;
let path_buf = PathBuf::from(path);
fs_changes
.borrow_mut()
.insert(path_buf, content_vec.clone());
Ok(true)
})?
};
file.set("write", write_fn)?;
// If we got this far, the file was opened successfully
Ok((Some(file), String::new()))
})
}
/// Runs a Lua script in a sandboxed environment and returns the printed lines
pub fn run_sandboxed_lua(
script: &str,
fs_changes: HashMap<PathBuf, Vec<u8>>,
root_dir: PathBuf,
) -> Result<ScriptOutput> {
let lua = Lua::new();
lua.set_memory_limit(2 * 1024 * 1024 * 1024)?; // 2 GB
let globals = lua.globals();
// Track the lines the Lua script prints out.
let printed_lines = Rc::new(RefCell::new(Vec::new()));
let fs = Rc::new(RefCell::new(fs_changes));
globals.set("sb_print", print(&lua, printed_lines.clone())?)?;
globals.set("search", search(&lua, fs.clone(), root_dir.clone())?)?;
globals.set("sb_io_open", io_open(&lua, fs.clone(), root_dir)?)?;
globals.set("user_script", script)?;
lua.load(SANDBOX_PREAMBLE).exec()?;
drop(lua); // Necessary so the Rc'd values get decremented.
Ok(ScriptOutput {
printed_lines: Rc::try_unwrap(printed_lines)
.expect("There are still other references to printed_lines")
.into_inner(),
fs_changes: Rc::try_unwrap(fs)
.expect("There are still other references to fs_changes")
.into_inner(),
})
}
pub struct ScriptOutput {
printed_lines: Vec<String>,
#[allow(dead_code)]
fs_changes: HashMap<PathBuf, Vec<u8>>,
}
#[allow(dead_code)]
impl ScriptOutput {
fn fs_diff(&self) -> HashMap<PathBuf, String> {
let mut diff_map = HashMap::new();
for (path, content) in &self.fs_changes {
let diff = if path.exists() {
// Read the current file content
match std::fs::read(path) {
Ok(current_content) => {
// Convert both to strings for diffing
let new_content = String::from_utf8_lossy(content).to_string();
let old_content = String::from_utf8_lossy(&current_content).to_string();
// Generate a git-style diff
let new_lines: Vec<&str> = new_content.lines().collect();
let old_lines: Vec<&str> = old_content.lines().collect();
let path_str = path.to_string_lossy();
let mut diff = format!("diff --git a/{} b/{}\n", path_str, path_str);
diff.push_str(&format!("--- a/{}\n", path_str));
diff.push_str(&format!("+++ b/{}\n", path_str));
// Very basic diff algorithm - this is simplified
let mut i = 0;
let mut j = 0;
while i < old_lines.len() || j < new_lines.len() {
if i < old_lines.len()
&& j < new_lines.len()
&& old_lines[i] == new_lines[j]
{
i += 1;
j += 1;
continue;
}
// Find next matching line
let mut next_i = i;
let mut next_j = j;
let mut found = false;
// Look ahead for matches
for look_i in i..std::cmp::min(i + 10, old_lines.len()) {
for look_j in j..std::cmp::min(j + 10, new_lines.len()) {
if old_lines[look_i] == new_lines[look_j] {
next_i = look_i;
next_j = look_j;
found = true;
break;
}
}
if found {
break;
}
}
// Output the hunk header
diff.push_str(&format!(
"@@ -{},{} +{},{} @@\n",
i + 1,
if found {
next_i - i
} else {
old_lines.len() - i
},
j + 1,
if found {
next_j - j
} else {
new_lines.len() - j
}
));
// Output removed lines
for k in i..next_i {
diff.push_str(&format!("-{}\n", old_lines[k]));
}
// Output added lines
for k in j..next_j {
diff.push_str(&format!("+{}\n", new_lines[k]));
}
i = next_i;
j = next_j;
if found {
i += 1;
j += 1;
} else {
break;
}
}
diff
}
Err(_) => format!("Error reading current file: {}", path.display()),
}
} else {
// New file
let content_str = String::from_utf8_lossy(content).to_string();
let path_str = path.to_string_lossy();
let mut diff = format!("diff --git a/{} b/{}\n", path_str, path_str);
diff.push_str("new file mode 100644\n");
diff.push_str("--- /dev/null\n");
diff.push_str(&format!("+++ b/{}\n", path_str));
let lines: Vec<&str> = content_str.lines().collect();
diff.push_str(&format!("@@ -0,0 +1,{} @@\n", lines.len()));
for line in lines {
diff.push_str(&format!("+{}\n", line));
}
diff
};
diff_map.insert(path.clone(), diff);
}
diff_map
}
fn diff_to_string(&self) -> String {
let mut answer = String::new();
let diff_map = self.fs_diff();
if diff_map.is_empty() {
return "No changes to files".to_string();
}
// Sort the paths for consistent output
let mut paths: Vec<&PathBuf> = diff_map.keys().collect();
paths.sort();
for path in paths {
if !answer.is_empty() {
answer.push_str("\n");
}
answer.push_str(&diff_map[path]);
}
answer
}
}

View File

@@ -1,22 +0,0 @@
You can write a Lua script and I'll run it on my codebase and tell you what its
output was, including both stdout as well as the git diff of changes it made to
the filesystem. That way, you can get more information about the code base, or
make changes to the code base directly.
The Lua script will have access to `io` and it will run with the current working
directory being in the root of the code base, so you can use it to explore,
search, make changes, etc. You can also have the script print things, and I'll
tell you what the output was. Note that `io` only has `open`, and then the file
it returns only has the methods read, write, and close - it doesn't have popen
or anything else.
Also, I'm going to be putting this Lua script into JSON, so please don't use
Lua's double quote syntax for string literals - use one of Lua's other syntaxes
for string literals, so I don't have to escape the double quotes.
There will be a global called `search` which accepts a regex (it's implemented
using Rust's regex crate, so use that regex syntax) and runs that regex on the
contents of every file in the code base (aside from gitignored files), then
returns an array of tables with two fields: "path" (the path to the file that
had the matches) and "matches" (an array of strings, with each string being a
match that was found within the file).

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,152 @@
/// This module works with streaming_lua to allow us to run fragments of
/// Lua scripts that come back from LLM JSON tool calls immediately as they arrive,
/// even when the full script (and the full JSON) has not been received yet.
pub fn from_json(json_str: &str) {
// The JSON structure we're looking for is very simple:
// 1. Open curly bracket
// 2. Optional whitespace
// 3. Quoted key - either "lua_script" or "description" (if description, just parse it)
// 4. Colon
// 5. Optional whitespace
// 6. Open quote
// 7. Now we start streaming until we see a closed quote
// TODO all of this needs to be stored in state in a struct instead of in variables,
// and that includes the iterator part.
let mut chars = json_str.trim_start().chars().peekable();
// Skip the opening curly brace
if chars.next() != Some('{') {
return;
}
let key = parse_key(&mut chars);
if key.map(|k| k.as_str()) == Some("description") {
// TODO parse the description here
parse_comma_then_quote(&mut chars);
if parse_key(&mut chars).map(|k| k.as_str()) != Some("lua_script") {
return; // This was the only remaining valid option.
}
// TODO parse the script here, remembering to s/backslash//g to unescape everything.
} else if key.map(|k| k.as_str()) == Some("lua_script") {
// TODO parse the script here, remembering to s/backslash//g to unescape everything.
parse_comma_then_quote(&mut chars);
if parse_key(&mut chars).map(|k| k.as_str()) != Some("description") {
return; // This was the only remaining valid option.
}
// TODO parse the description here
} else {
// The key wasn't one of the two valid options.
return;
}
// Parse value
let mut value = String::new();
let mut escape_next = false;
while let Some(c) = chars.next() {
if escape_next {
value.push(match c {
'n' => '\n',
't' => '\t',
'r' => '\r',
'\\' => '\\',
'"' => '"',
_ => c,
});
escape_next = false;
} else if c == '\\' {
escape_next = true;
} else if c == '"' {
break; // End of value
} else {
value.push(c);
}
}
// Process the parsed key-value pair
match key.as_str() {
"lua_script" => {
// Handle the lua script
println!("Found lua script: {}", value);
}
"description" => {
// Handle the description
println!("Found description: {}", value);
}
_ => {} // Should not reach here due to earlier check
}
}
fn parse_key(chars: &mut impl Iterator<Item = char>) -> Option<String> {
// Skip whitespace until we reach the start of the key
while let Some(c) = chars.next() {
if c.is_whitespace() {
// Consume the whitespace and continue
} else if c == '"' {
break; // Found the start of the key
} else {
return None; // Invalid format - expected a quote to start the key
}
}
// Parse the key. We don't need to escape backslashes because the exact key
// we expect does not include backslashes or quotes.
let mut key = String::new();
while let Some(c) = chars.next() {
if c == '"' {
break; // End of key
}
key.push(c);
}
// Skip colon and whitespace and next opening quote.
let mut found_colon = false;
while let Some(c) = chars.next() {
if c == ':' {
found_colon = true;
} else if found_colon && !c.is_whitespace() {
if c == '"' {
break; // Found the opening quote
}
return None; // Invalid format - expected a quote after colon and whitespace
} else if !c.is_whitespace() {
return None; // Invalid format - expected whitespace or colon
}
}
Some(key)
}
fn parse_comma_then_quote(chars: &mut impl Iterator<Item = char>) -> bool {
// Skip any whitespace
while let Some(&c) = chars.peek() {
if !c.is_whitespace() {
break;
}
chars.next();
}
// Check for comma
if chars.next() != Some(',') {
return false;
}
// Skip any whitespace after the comma
while let Some(&c) = chars.peek() {
if !c.is_whitespace() {
break;
}
chars.next();
}
// Check for opening quote
if chars.next() != Some('"') {
return false;
}
true
}

View File

@@ -0,0 +1,268 @@
/// This module accepts fragments of Lua code from LLM responses, and executes
/// them as they come in (to the extent possible) rather than having to wait
/// for the entire script to arrive to execute it. (Since these are tool calls,
/// they will presumably come back in JSON; it's up to the caller to deal with
/// parsing the JSON, escaping `\\` and `\"` in the JSON-quoted Lua, etc.)
///
/// By design, Lua does not preserve top-level locals across chunks ("chunk" is a
/// Lua term for a chunk of Lua code that can be executed), and chunks are the
/// smallest unit of execution you can run in Lua. To make sure that top-level
/// locals the LLM writes are preserved across multiple silently translates
/// locals to globals. This should be harmless for our use case, because we only
/// have a single "file" and not multiple files where the distinction could matter.
///
/// Since fragments will invariably arrive that don't happen to correspond to valid
/// Lua chunks (e.g. maybe they have an opening quote for a string literal and the
/// close quote will be coming in the next fragment), we use a simple heuristic to
/// split them up: we take each fragment and split it into lines, and then whenever
/// we have a complete line, we send it to Lua to process as a chunk. If it comes back
/// with a syntax error due to it being incomplete (which mlua tells us), then we
/// know to keep waiting for more lines and try again.
///
/// Eventually we'll either succeed, or else the response will end and we'll know it
/// had an actual syntax error. (Again, it's the caller's responsibility to deal
/// with detecting when the response ends due to the JSON quote having finally closed.)
///
/// This heuristic relies on the assumption that the LLM is generating normal-looking
/// Lua code where statements are split using newlines rather than semicolons.
/// In practice, this is a safe assumption.
#[derive(Default)]
struct ChunkBuffer {
buffer: String,
incomplete_multiline_string: bool,
last_newline_index: usize,
}
impl ChunkBuffer {
pub fn receive_chunk(
&mut self,
src_chunk: &str,
exec_chunk: &mut impl FnMut(&str) -> mlua::Result<()>,
) -> mlua::Result<()> {
self.buffer.push_str(src_chunk);
// Execute each line until we hit an incomplete parse
while let Some(index) = &self.buffer[self.last_newline_index..].find('\n') {
let mut index = *index;
// LLMs can produce incredibly long multiline strings. We don't want to keep
// attempting to re-parse those every time a new line of the string comes in.
// that would be extremely wasteful! Instead, just keep waiting until it ends.
{
let line = &self.buffer[self.last_newline_index..index];
const LOCAL_PREFIX: &str = "local ";
// It's safe to assume we'll never see a line which
// includes both "]]" and "[[" other than single-line
// assignments which are just using them to escape quotes.
//
// If that assumption turns out not to hold, we can always
// make this more robust.
if line.contains("[[") && !line.contains("]]") {
self.incomplete_multiline_string = true;
}
// In practice, LLMs produce multiline strings that always end
// with the ]] at the start of the line.
if line.starts_with("]]") {
self.incomplete_multiline_string = false;
} else if line.starts_with("local ") {
// We can't have top-level locals because they don't preserve
// across chunk executions. So just turn locals into globals.
// Since this is just one script, they're the same anyway.
self.buffer
.replace_range(self.last_newline_index..LOCAL_PREFIX.len(), "");
index -= LOCAL_PREFIX.len();
}
}
self.last_newline_index = index;
if self.incomplete_multiline_string {
continue;
}
// Execute all lines up to (and including) this one.
match exec_chunk(&self.buffer[..index]) {
Ok(()) => {
// The chunk executed successfully. Advance the buffer
// to reflect the fact that we've executed that code.
self.buffer = self.buffer[index + 1..].to_string();
self.last_newline_index = 0;
}
Err(mlua::Error::SyntaxError {
incomplete_input: true,
message: _,
}) => {
// If it errored specifically because the input was incomplete, no problem.
// We'll keep trying with more and more lines until eventually we find a
// sequence of lines that are valid together!
}
Err(other) => {
return Err(other);
}
}
}
Ok(())
}
pub fn finish(
&mut self,
exec_chunk: &mut impl FnMut(&str) -> mlua::Result<()>,
) -> mlua::Result<()> {
if !self.buffer.is_empty() {
// Execute whatever is left in the buffer
match exec_chunk(&self.buffer) {
Ok(()) => {
// Clear the buffer as everything has been executed
self.buffer.clear();
self.last_newline_index = 0;
self.incomplete_multiline_string = false;
}
Err(err) => {
return Err(err);
}
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use mlua::Lua;
use std::cell::RefCell;
use std::rc::Rc;
#[test]
fn test_lua_runtime_receive_chunk() {
let mut chunk_buffer = ChunkBuffer::default();
let output = Rc::new(RefCell::new(String::new()));
let mut exec_chunk = |chunk: &str| -> mlua::Result<()> {
let lua = Lua::new();
// Clone the Rc to share ownership of the same RefCell
let output_ref = output.clone();
lua.globals().set(
"print",
lua.create_function(move |_, msg: String| {
let mut output = output_ref.borrow_mut();
output.push_str(&msg);
output.push('\n');
Ok(())
})?,
)?;
lua.load(chunk).exec()
};
exec_chunk("print('Hello, World!')").unwrap();
chunk_buffer
.receive_chunk("print('Hello, World!')", &mut exec_chunk)
.unwrap();
assert_eq!(*output.borrow(), "Hello, World!\n");
}
#[test]
fn test_lua_runtime_receive_chunk_shared_lua() {
let mut chunk_buffer = ChunkBuffer::default();
let output = Rc::new(RefCell::new(String::new()));
let lua = Lua::new();
// Set up the print function once for the shared Lua instance
{
let output_ref = output.clone();
lua.globals()
.set(
"print",
lua.create_function(move |_, msg: String| {
let mut output = output_ref.borrow_mut();
output.push_str(&msg);
output.push('\n');
Ok(())
})
.unwrap(),
)
.unwrap();
}
let mut exec_chunk = |chunk: &str| -> mlua::Result<()> { lua.load(chunk).exec() };
// Send first incomplete chunk
chunk_buffer
.receive_chunk("local message = 'Hello, '\n", &mut exec_chunk)
.unwrap();
// Send second chunk that completes the code
chunk_buffer
.receive_chunk(
"message = message .. 'World!'\nprint(message)",
&mut exec_chunk,
)
.unwrap();
chunk_buffer.finish(&mut exec_chunk).unwrap();
assert_eq!(*output.borrow(), "Hello, World!\n");
}
#[test]
fn test_multiline_string_across_chunks() {
let mut chunk_buffer = ChunkBuffer::default();
let output = Rc::new(RefCell::new(String::new()));
let lua = Lua::new();
// Set up the print function for the shared Lua instance
{
let output_ref = output.clone();
lua.globals()
.set(
"print",
lua.create_function(move |_, msg: String| {
let mut output = output_ref.borrow_mut();
output.push_str(&msg);
output.push('\n');
Ok(())
})
.unwrap(),
)
.unwrap();
}
let mut exec_chunk = |chunk: &str| -> mlua::Result<()> { lua.load(chunk).exec() };
// Send first chunk with the beginning of a multiline string
chunk_buffer
.receive_chunk("local multiline = [[This is the start\n", &mut exec_chunk)
.unwrap();
// Send second chunk with more lines
chunk_buffer
.receive_chunk("of a very long\nmultiline string\n", &mut exec_chunk)
.unwrap();
// Send third chunk with more content
chunk_buffer
.receive_chunk("that spans across\n", &mut exec_chunk)
.unwrap();
// Send final chunk that completes the multiline string
chunk_buffer
.receive_chunk("multiple chunks]]\nprint(multiline)", &mut exec_chunk)
.unwrap();
chunk_buffer.finish(&mut exec_chunk).unwrap();
let expected = "This is the start\nof a very long\nmultiline string\nthat spans across\nmultiple chunks\n";
assert_eq!(*output.borrow(), expected);
}
}

View File

@@ -311,7 +311,7 @@ const FILE_ICONS: &[(&str, &str)] = &[
("lock", "icons/file_icons/lock.svg"),
("log", "icons/file_icons/info.svg"),
("lua", "icons/file_icons/lua.svg"),
("luau", "icons/file_icons/luau.svg"),
("luau", "icons/file_icons/file.svg"),
("markdown", "icons/file_icons/book.svg"),
("metal", "icons/file_icons/metal.svg"),
("nim", "icons/file_icons/nim.svg"),

View File

@@ -307,7 +307,7 @@ impl TitleBar {
cx.notify()
}),
);
subscriptions.push(cx.subscribe(&project, |_, _, _, cx| cx.notify()));
subscriptions.push(cx.observe(&project, |_, _, cx| cx.notify()));
subscriptions.push(cx.observe(&active_call, |this, _, cx| this.active_call_changed(cx)));
subscriptions.push(cx.observe_window_activation(window, Self::window_activation_changed));
subscriptions.push(cx.observe(&user_store, |_, _, cx| cx.notify()));

View File

@@ -852,7 +852,6 @@ impl<T: Item> ItemHandle for Entity<T> {
.detach();
let item_id = self.item_id();
workspace.update_item_dirty_state(self, window, cx);
cx.observe_release_in(self, window, move |workspace, _, _, _| {
workspace.panes_by_item.remove(&item_id);
event_subscription.take();

View File

@@ -2424,10 +2424,14 @@ impl Pane {
.child(label),
);
let single_entry_to_resolve = self.items[ix]
.is_singleton(cx)
.then(|| self.items[ix].project_entry_ids(cx).get(0).copied())
.flatten();
let single_entry_to_resolve = {
let item_entries = self.items[ix].project_entry_ids(cx);
if item_entries.len() == 1 {
Some(item_entries[0])
} else {
None
}
};
let total_items = self.items.len();
let has_items_to_left = ix > 0;

View File

@@ -879,6 +879,8 @@ impl Workspace {
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
cx.observe_in(&project, window, |_, _, _, cx| cx.notify())
.detach();
cx.subscribe_in(&project, window, move |this, _, event, window, cx| {
match event {
project::Event::RemoteIdChanged(_) => {

View File

@@ -1570,6 +1570,7 @@ impl LocalWorktree {
this.update_abs_path_and_refresh(new_path, cx);
}
}
cx.notify();
})
.ok();
}
@@ -3424,7 +3425,6 @@ impl BackgroundScannerState {
}
fn remove_path(&mut self, path: &Path) {
log::info!("background scanner removing path {path:?}");
let mut new_entries;
let removed_entries;
{
@@ -3480,14 +3480,7 @@ impl BackgroundScannerState {
.git_repositories
.retain(|id, _| removed_ids.binary_search(id).is_err());
self.snapshot.repositories.retain(&(), |repository| {
let retain = !repository.work_directory.path_key().0.starts_with(path);
if !retain {
log::info!(
"dropping repository entry for {:?}",
repository.work_directory
);
}
retain
!repository.work_directory.path_key().0.starts_with(path)
});
#[cfg(test)]
@@ -3542,14 +3535,12 @@ impl BackgroundScannerState {
fs: &dyn Fs,
watcher: &dyn Watcher,
) -> Option<LocalRepositoryEntry> {
log::info!("insert git reposiutory for {dot_git_path:?}");
let work_dir_id = self
.snapshot
.entry_for_path(work_directory.path_key().0)
.map(|entry| entry.id)?;
if self.snapshot.git_repositories.get(&work_dir_id).is_some() {
log::info!("existing git repository for {work_directory:?}");
return None;
}
@@ -3557,7 +3548,6 @@ impl BackgroundScannerState {
let t0 = Instant::now();
let repository = fs.open_repo(&dot_git_abs_path)?;
log::info!("opened git repo for {dot_git_abs_path:?}");
let repository_path = repository.path();
watcher.add(&repository_path).log_err()?;
@@ -3616,7 +3606,6 @@ impl BackgroundScannerState {
.git_repositories
.insert(work_dir_id, local_repository.clone());
log::info!("inserting new local git repository");
Some(local_repository)
}
}
@@ -3960,6 +3949,10 @@ pub struct StatusEntry {
}
impl StatusEntry {
pub fn is_staged(&self) -> Option<bool> {
self.status.is_staged()
}
fn to_proto(&self) -> proto::StatusEntry {
let simple_status = match self.status {
FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
@@ -4360,7 +4353,7 @@ impl BackgroundScanner {
}
let ancestor_dot_git = ancestor.join(*DOT_GIT);
log::info!("considering ancestor: {ancestor_dot_git:?}");
log::debug!("considering ancestor: {ancestor_dot_git:?}");
// Check whether the directory or file called `.git` exists (in the
// case of worktrees it's a file.)
if self
@@ -4369,6 +4362,7 @@ impl BackgroundScanner {
.await
.is_ok_and(|metadata| metadata.is_some())
{
log::debug!(".git path exists");
if index != 0 {
// We canonicalize, since the FS events use the canonicalized path.
if let Some(ancestor_dot_git) =
@@ -4379,7 +4373,7 @@ impl BackgroundScanner {
.strip_prefix(ancestor)
.unwrap()
.into();
log::info!(
log::debug!(
"inserting parent git repo for this worktree: {location_in_repo:?}"
);
// We associate the external git repo with our root folder and
@@ -4402,10 +4396,12 @@ impl BackgroundScanner {
// Reached root of git repository.
break;
} else {
log::debug!(".git path doesn't exist");
}
}
log::info!("containing git repository: {containing_git_repository:?}");
log::debug!("containing git repository: {containing_git_repository:?}");
let (scan_job_tx, scan_job_rx) = channel::unbounded();
{
@@ -4830,7 +4826,7 @@ impl BackgroundScanner {
log::error!("skipping excluded directory {:?}", job.path);
return Ok(());
}
log::info!("scanning directory {:?}", job.path);
log::debug!("scanning directory {:?}", job.path);
root_abs_path = snapshot.abs_path().clone();
root_char_bag = snapshot.root_char_bag;
}
@@ -5412,7 +5408,7 @@ impl BackgroundScanner {
}
fn update_git_repositories(&self, dot_git_paths: Vec<PathBuf>) -> Task<()> {
log::info!("reloading repositories: {dot_git_paths:?}");
log::debug!("reloading repositories: {dot_git_paths:?}");
let mut status_updates = Vec::new();
{
@@ -5893,21 +5889,14 @@ impl WorktreeModelHandle for Entity<Worktree> {
.await
.unwrap();
let mut events = cx.events(&tree);
while events.next().await.is_some() {
if tree.update(cx, |tree, _| tree.entry_for_path(file_name).is_some()) {
break;
}
}
cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_some())
.await;
fs.remove_file(&root_path.join(file_name), Default::default())
.await
.unwrap();
while events.next().await.is_some() {
if tree.update(cx, |tree, _| tree.entry_for_path(file_name).is_none()) {
break;
}
}
cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_none())
.await;
cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
@@ -5961,22 +5950,19 @@ impl WorktreeModelHandle for Entity<Worktree> {
.await
.unwrap();
let mut events = cx.events(&tree);
while events.next().await.is_some() {
if tree.update(cx, |tree, _| scan_id_increased(tree, &mut git_dir_scan_id)) {
break;
}
}
cx.condition(&tree, |tree, _| {
scan_id_increased(tree, &mut git_dir_scan_id)
})
.await;
fs.remove_file(&root_path.join(file_name), Default::default())
.await
.unwrap();
while events.next().await.is_some() {
if tree.update(cx, |tree, _| scan_id_increased(tree, &mut git_dir_scan_id)) {
break;
}
}
cx.condition(&tree, |tree, _| {
scan_id_increased(tree, &mut git_dir_scan_id)
})
.await;
cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;

View File

@@ -12,7 +12,6 @@ use git::{
},
GITIGNORE,
};
use git2::RepositoryInitOptions;
use gpui::{AppContext as _, BorrowAppContext, Context, Task, TestAppContext};
use parking_lot::Mutex;
use postage::stream::Stream;
@@ -856,7 +855,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
"ignored-dir": {}
}));
let worktree = Worktree::local(
let tree = Worktree::local(
dir.path(),
true,
Arc::new(RealFs::default()),
@@ -869,34 +868,32 @@ async fn test_write_file(cx: &mut TestAppContext) {
#[cfg(not(target_os = "macos"))]
fs::fs_watcher::global(|_| {}).unwrap();
cx.read(|cx| worktree.read(cx).as_local().unwrap().scan_complete())
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
worktree.flush_fs_events(cx).await;
tree.flush_fs_events(cx).await;
worktree
.update(cx, |tree, cx| {
tree.write_file(
Path::new("tracked-dir/file.txt"),
"hello".into(),
Default::default(),
cx,
)
})
.await
.unwrap();
worktree
.update(cx, |tree, cx| {
tree.write_file(
Path::new("ignored-dir/file.txt"),
"world".into(),
Default::default(),
cx,
)
})
.await
.unwrap();
tree.update(cx, |tree, cx| {
tree.write_file(
Path::new("tracked-dir/file.txt"),
"hello".into(),
Default::default(),
cx,
)
})
.await
.unwrap();
tree.update(cx, |tree, cx| {
tree.write_file(
Path::new("ignored-dir/file.txt"),
"world".into(),
Default::default(),
cx,
)
})
.await
.unwrap();
worktree.read_with(cx, |tree, _| {
tree.read_with(cx, |tree, _| {
let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
assert!(!tracked.is_ignored);
@@ -3352,7 +3349,7 @@ async fn test_conflicted_cherry_pick(cx: &mut TestAppContext) {
.expect("Failed to get HEAD")
.peel_to_commit()
.expect("HEAD is not a commit");
git_checkout("refs/heads/main", &repo);
git_checkout("refs/heads/master", &repo);
std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
git_add("a.txt", &repo);
git_commit("improve letter", &repo);
@@ -3482,9 +3479,7 @@ const MODIFIED: GitSummary = GitSummary {
#[track_caller]
fn git_init(path: &Path) -> git2::Repository {
let mut init_opts = RepositoryInitOptions::new();
init_opts.initial_head("main");
git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
git2::Repository::init(path).expect("Failed to initialize git repository")
}
#[track_caller]

View File

@@ -326,7 +326,6 @@ fn main() {
.or_else(read_proxy_from_env);
let http = {
let _guard = Tokio::handle(cx).enter();
ReqwestClient::proxy_and_user_agent(proxy_url, &user_agent)
.expect("could not start HTTP client")
};

View File

@@ -114,9 +114,8 @@ pub mod workspace {
}
pub mod git {
use gpui::{action_with_deprecated_aliases, actions};
use gpui::action_with_deprecated_aliases;
actions!(git, [CheckoutBranch, Switch]);
action_with_deprecated_aliases!(git, Branch, ["branches::OpenRecent"]);
}

View File

@@ -1,6 +1,6 @@
# Haskell
Haskell support is available through the [Haskell extension](https://github.com/zed-extensions/haskell).
Haskell support is available through the [Haskell extension](https://github.com/zed-industries/zed/tree/main/extensions/haskell).
- Tree-sitter: [tree-sitter-haskell](https://github.com/tree-sitter/tree-sitter-haskell)
- Language Server: [haskell-language-server](https://github.com/haskell/haskell-language-server)

View File

@@ -89,12 +89,14 @@ Linux works on a large variety of systems configured in many different ways. We
If you see an error like "/lib64/libc.so.6: version 'GLIBC_2.29' not found" it means that your distribution's version of glibc is too old. You can either upgrade your system, or [install Zed from source](./development/linux.md).
### Graphics issues
### Zed fails to open windows
### Zed is very slow
Zed requires a GPU to run effectively. Under the hood, we use [Vulkan](https://www.vulkan.org/) to communicate with your GPU. If you are seeing problems with performance, or Zed fails to load, it is possible that Vulkan is the culprit.
If you're using an AMD GPU, you might get a 'Broken Pipe' error. Try using the RADV or Mesa drivers. (See the following GitHub issue for more details: [#13880](https://github.com/zed-industries/zed/issues/13880)).
If you see a notification saying `Zed failed to open a window: NoSupportedDeviceFound` this means that Vulkan cannot find a compatible GPU. You can begin troubleshooting Vulkan by installing the `vulkan-tools` package and running:
```sh
@@ -103,23 +105,21 @@ vkcube
This should output a line describing your current graphics setup and show a rotating cube. If this does not work, you should be able to fix it by installing Vulkan compatible GPU drivers, however in some cases (for example running Linux on an Arm-based MacBook) there is no Vulkan support yet.
You can find out which graphics card Zed is using by looking in the Zed log (`~/.local/share/zed/logs/Zed.log`) for `Using GPU: ...`.
If you see errors like `ERROR_INITIALIZATION_FAILED` or `GPU Crashed` or `ERROR_SURFACE_LOST_KHR` then you may be able to work around this by installing different drivers for your GPU, or by selecting a different GPU to run on. (See the following GitHub issue for more details: [#14225](https://github.com/zed-industries/zed/issues/14225))
If you see errors like `ERROR_INITIALIZATION_FAILED` or `GPU Crashed` or `ERROR_SURFACE_LOST_KHR` then you may be able to work around this by installing different drivers for your GPU, or by selecting a different GPU to run on. (See [#14225](https://github.com/zed-industries/zed/issues/14225))
As of Zed v0.146.x we log the selected GPU driver and you should see `Using GPU: ...` in the Zed log (`~/.local/share/zed/logs/Zed.log`).
On some systems the file `/etc/prime-discrete` can be used to enforce the use of a discrete GPU using [PRIME](https://wiki.archlinux.org/title/PRIME). Depending on the details of your setup, you may need to change the contents of this file to "on" (to force discrete graphics) or "off" (to force integrated graphics).
On others, you may be able to the environment variable `DRI_PRIME=1` when running Zed to force the use of the discrete GPU.
If you're using an AMD GPU, you might get a 'Broken Pipe' error. Try using the RADV or Mesa drivers. (See [#13880](https://github.com/zed-industries/zed/issues/13880))
If Zed is selecting your integrated GPU instead of your discrete GPU, you can fix this by exporting the environment variable `DRI_PRIME=1` before running Zed.
If you are using Mesa, and want more control over which GPU is selected you can run `MESA_VK_DEVICE_SELECT=list zed --foreground` to get a list of available GPUs and then export `MESA_VK_DEVICE_SELECT=xxxx:yyyy` to choose a specific device.
If you are using `amdvlk` you may find that zed only opens when run with `sudo $(which zed)`. To fix this, remove the `amdvlk` and `lib32-amdvlk` packages and install mesa/vulkan instead. ([#14141](https://github.com/zed-industries/zed/issues/14141)).
If you are using `amdvlk` you may find that zed only opens when run with `sudo $(which zed)`. To fix this, remove the `amdvlk` and `lib32-amdvlk` packages and install mesa/vulkan instead. ([#14141](https://github.com/zed-industries/zed/issues/14141).
If you have a discrete GPU and you are using [PRIME](https://wiki.archlinux.org/title/PRIME) (e.g. Pop_OS 24.04, ArchLinux, etc) you may be able to configure Zed to work by switching `/etc/prime-discrete` from 'off' to 'on' (or the reverse).
For more information, the [Arch guide to Vulkan](https://wiki.archlinux.org/title/Vulkan) has some good steps that translate well to most distributions.
If Vulkan is configured correctly, and Zed is still not working for you, please [file an issue](https://github.com/zed-industries/zed) with as much information as possible.
If Vulkan is configured correctly, and Zed is still slow for you, please [file an issue](https://github.com/zed-industries/zed) with as much information as possible.
### I can't open any files
@@ -153,3 +153,20 @@ If you are seeing "too many open files" then first try `sysctl fs.inotify`.
- You should see that `max_user_watches` is 8000 or higher (you can change the limit with `sudo sysctl fs.inotify.max_user_watches=64000`). Zed needs one watch per directory in all your open projects + one per git repository + a handful more for settings, themes, keymaps, extensions.
It is also possible that you are running out of file descriptors. You can check the limits with `ulimit` and update them by editing `/etc/security/limits.conf`.
### FIPS Mode OpenSSL internal error {#fips}
If your machine is running in FIPS mode (`cat /proc/sys/crypto/fips_enabled` is set to `1`) Zed may fail to start and output the following when launched with `zed --foreground`:
```
crypto/fips/fips.c:154: OpenSSL internal error: FATAL FIPS SELFTEST FAILURE
```
As a workaround, remove the bundled `libssl` and `libcrypto` libraries from the `zed.app/lib` directory:
```
rm ~/.local/zed.app/lib/libssl.so.1.1
rm ~/.local/zed.app/lib/libcrypto.so.1.1
```
This will force zed to fallback to the system `libssl` and `libcrypto` libraries.

View File

@@ -0,0 +1,16 @@
[package]
name = "zed_haskell"
version = "0.1.3"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
[lints]
workspace = true
[lib]
path = "src/haskell.rs"
crate-type = ["cdylib"]
[dependencies]
zed_extension_api = "0.1.0"

View File

@@ -0,0 +1 @@
../../LICENSE-APACHE

View File

@@ -0,0 +1,18 @@
id = "haskell"
name = "Haskell"
description = "Haskell support."
version = "0.1.3"
schema_version = 1
authors = [
"Pocæus <github@pocaeus.com>",
"Lei <45155667+leifu1128@users.noreply.github.com>"
]
repository = "https://github.com/zed-industries/zed"
[language_servers.hls]
name = "Haskell Language Server"
language = "Haskell"
[grammars.haskell]
repository = "https://github.com/tree-sitter/tree-sitter-haskell"
commit = "8a99848fc734f9c4ea523b3f2a07df133cbbcec2"

View File

@@ -0,0 +1,3 @@
("(" @open ")" @close)
("[" @open "]" @close)
("{" @open "}" @close)

View File

@@ -0,0 +1,14 @@
name = "Haskell"
grammar = "haskell"
path_suffixes = ["hs"]
autoclose_before = ",=)}]"
line_comments = ["-- "]
block_comment = ["{- ", " -}"]
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "\"", end = "\"", close = true, newline = false },
{ start = "'", end = "'", close = true, newline = false },
{ start = "`", end = "`", close = true, newline = false },
]

View File

@@ -0,0 +1,156 @@
;; Copyright 2022 nvim-treesitter
;;
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;; ----------------------------------------------------------------------------
;; Literals and comments
(integer) @number
(exp_negation) @number
(exp_literal (float)) @float
(char) @string
(string) @string
(con_unit) @symbol ; unit, as in ()
(comment) @comment
;; ----------------------------------------------------------------------------
;; Punctuation
[
"("
")"
"{"
"}"
"["
"]"
] @punctuation.bracket
[
(comma)
";"
] @punctuation.delimiter
;; ----------------------------------------------------------------------------
;; Keywords, operators, includes
[
"forall"
"∀"
] @keyword
(pragma) @constant
[
"if"
"then"
"else"
"case"
"of"
] @keyword
(exp_lambda_cases "\\" ("cases" @variant))
[
"import"
"qualified"
"module"
] @keyword
[
(operator)
(constructor_operator)
(type_operator)
(tycon_arrow)
(qualified_module) ; grabs the `.` (dot), ex: import System.IO
(all_names)
(wildcard)
"="
"|"
"::"
"=>"
"->"
"<-"
"\\"
"`"
"@"
] @operator
(module) @title
[
(where)
"let"
"in"
"class"
"instance"
"data"
"newtype"
"family"
"type"
"as"
"hiding"
"deriving"
"via"
"stock"
"anyclass"
"do"
"mdo"
"rec"
"infix"
"infixl"
"infixr"
] @keyword
;; ----------------------------------------------------------------------------
;; Functions and variables
(variable) @variable
(pat_wildcard) @variable
(signature name: (variable) @type)
(function
name: (variable) @function
patterns: (patterns))
((signature (fun)) . (function (variable) @function))
((signature (context (fun))) . (function (variable) @function))
((signature (forall (context (fun)))) . (function (variable) @function))
(exp_infix (variable) @operator) ; consider infix functions as operators
(exp_infix (exp_name) @function (#set! "priority" 101))
(exp_apply . (exp_name (variable) @function))
(exp_apply . (exp_name (qualified_variable (variable) @function)))
;; ----------------------------------------------------------------------------
;; Types
(type) @type
(type_variable) @type
(constructor) @constructor
; True or False
((constructor) @_bool (#match? @_bool "(True|False)")) @boolean
;; ----------------------------------------------------------------------------
;; Quasi-quotes
(quoter) @function
; Highlighting of quasiquote_body is handled by injections.scm

View File

@@ -0,0 +1,3 @@
(_ "[" "]" @end) @indent
(_ "{" "}" @end) @indent
(_ "(" ")" @end) @indent

View File

@@ -0,0 +1,26 @@
(adt
"data" @context
name: (type) @name) @item
(type_alias
"type" @context
name: (type) @name) @item
(newtype
"newtype" @context
name: (type) @name) @item
(signature
name: (variable) @name) @item
(class
"class" @context
(class_head) @name) @item
(instance
"instance" @context
(instance_head) @name) @item
(foreign_import
"foreign" @context
(impent) @name) @item

View File

@@ -0,0 +1,12 @@
(comment)+ @comment.around
[
(adt)
(type_alias)
(newtype)
] @class.around
(record_fields "{" (_)* @class.inside "}")
((signature)? (function)+) @function.around
(function rhs:(_) @function.inside)

View File

@@ -0,0 +1,67 @@
use zed::lsp::{Symbol, SymbolKind};
use zed::{CodeLabel, CodeLabelSpan};
use zed_extension_api::{self as zed, Result};
struct HaskellExtension;
impl zed::Extension for HaskellExtension {
fn new() -> Self {
Self
}
fn language_server_command(
&mut self,
_language_server_id: &zed::LanguageServerId,
worktree: &zed::Worktree,
) -> Result<zed::Command> {
let path = worktree
.which("haskell-language-server-wrapper")
.ok_or_else(|| "hls must be installed via ghcup".to_string())?;
Ok(zed::Command {
command: path,
args: vec!["lsp".to_string()],
env: worktree.shell_env(),
})
}
fn label_for_symbol(
&self,
_language_server_id: &zed::LanguageServerId,
symbol: Symbol,
) -> Option<CodeLabel> {
let name = &symbol.name;
let (code, display_range, filter_range) = match symbol.kind {
SymbolKind::Struct => {
let data_decl = "data ";
let code = format!("{data_decl}{name} = A");
let display_range = 0..data_decl.len() + name.len();
let filter_range = data_decl.len()..display_range.end;
(code, display_range, filter_range)
}
SymbolKind::Constructor => {
let data_decl = "data A = ";
let code = format!("{data_decl}{name}");
let display_range = data_decl.len()..data_decl.len() + name.len();
let filter_range = 0..name.len();
(code, display_range, filter_range)
}
SymbolKind::Variable => {
let code = format!("{name} :: T");
let display_range = 0..name.len();
let filter_range = 0..name.len();
(code, display_range, filter_range)
}
_ => return None,
};
Some(CodeLabel {
spans: vec![CodeLabelSpan::code_range(display_range)],
filter_range: filter_range.into(),
code,
})
}
}
zed::register_extension!(HaskellExtension);

View File

@@ -70,6 +70,7 @@ export ZED_RELEASE_CHANNEL="${channel}"
popd
export ZED_BUNDLE=true
export MACOSX_DEPLOYMENT_TARGET=10.15.7
cargo_bundle_version=$(cargo -q bundle --help 2>&1 | head -n 1 || echo "")
if [ "$cargo_bundle_version" != "cargo-bundle v0.6.0-zed" ]; then