Compare commits

...

5 Commits

Author SHA1 Message Date
Richard Feldman
de95efb7bb wip 2025-03-28 10:34:57 -04:00
Richard Feldman
60a7455f12 assistant2: Don't scroll down if user has scrolled up (#27614)
This caused undesirable and unnecessary scrolling, in that if you scroll
up, then as new messages stream into the panel, they jump the scroll bar
back down.

@agu-z and I paired on this and we think this is unnecessary now that we
don't see the Edit button in the UI, but @bennetbo we still see code for
the button in there, so...maybe we do still want this? (If so, we can
revert the second commit and go back to a more conditional way of
scrolling. 😄)

Release Notes:

- N/A

---------

Co-authored-by: Agus <agus@zed.dev>
2025-03-28 10:32:31 -04:00
Marshall Bowers
4315b2fc8a assistant2: Reload profile when making changes to the active profile's tools (#27664)
This PR makes it so the profile is reloaded when making changes to the
active profile's tools.

Release Notes:

- N/A
2025-03-28 14:08:42 +00:00
Marshall Bowers
68d453da52 assistant: Remove /search (#27661)
This PR removes the `/search` command.

This was feature-flagged and was never released to the general public.

Release Notes:

- N/A
2025-03-28 13:40:31 +00:00
Marshall Bowers
20eab9038f assistant: Remove /project (#27660)
This PR removes the `/project` command.

This was feature-flagged and was never released to the general public.

Release Notes:

- N/A
2025-03-28 13:08:16 +00:00
11 changed files with 72 additions and 464 deletions

5
Cargo.lock generated
View File

@@ -413,7 +413,6 @@ dependencies = [
"menu",
"multi_buffer",
"parking_lot",
"paths",
"pretty_assertions",
"project",
"prompt_library",
@@ -423,7 +422,6 @@ dependencies = [
"rope",
"schemars",
"search",
"semantic_index",
"serde",
"serde_json_lenient",
"settings",
@@ -671,13 +669,10 @@ dependencies = [
"http_client",
"indexed_docs",
"language",
"language_model",
"pretty_assertions",
"project",
"prompt_store",
"rope",
"schemars",
"semantic_index",
"serde",
"serde_json",
"settings",

View File

@@ -48,7 +48,6 @@ lsp.workspace = true
menu.workspace = true
multi_buffer.workspace = true
parking_lot.workspace = true
paths.workspace = true
project.workspace = true
prompt_library.workspace = true
prompt_store.workspace = true
@@ -56,7 +55,6 @@ proto.workspace = true
rope.workspace = true
schemars.workspace = true
search.workspace = true
semantic_index.workspace = true
serde.workspace = true
settings.workspace = true
smol.workspace = true

View File

@@ -10,7 +10,6 @@ use std::sync::Arc;
use assistant_settings::AssistantSettings;
use assistant_slash_command::SlashCommandRegistry;
use assistant_slash_commands::{ProjectSlashCommandFeatureFlag, SearchSlashCommandFeatureFlag};
use client::Client;
use command_palette_hooks::CommandPaletteFilter;
use feature_flags::FeatureFlagAppExt;
@@ -20,7 +19,6 @@ use language_model::{
LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, LanguageModelResponseMessage,
};
use prompt_store::PromptBuilder;
use semantic_index::{CloudEmbeddingProvider, SemanticDb};
use serde::Deserialize;
use settings::{Settings, SettingsStore};
@@ -102,33 +100,6 @@ pub fn init(
AssistantSettings::register(cx);
SlashCommandSettings::register(cx);
cx.spawn({
let client = client.clone();
async move |cx| {
let is_search_slash_command_enabled = cx
.update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())?
.await;
let is_project_slash_command_enabled = cx
.update(|cx| cx.wait_for_flag::<ProjectSlashCommandFeatureFlag>())?
.await;
if !is_search_slash_command_enabled && !is_project_slash_command_enabled {
return Ok(());
}
let embedding_provider = CloudEmbeddingProvider::new(client.clone());
let semantic_index = SemanticDb::new(
paths::embeddings_dir().join("semantic-index-db.0.mdb"),
Arc::new(embedding_provider),
cx,
)
.await?;
cx.update(|cx| cx.set_global(semantic_index))
}
})
.detach();
assistant_context_editor::init(client.clone(), cx);
prompt_library::init(cx);
init_language_model_settings(cx);
@@ -137,7 +108,7 @@ pub fn init(
assistant_panel::init(cx);
context_server::init(cx);
register_slash_commands(Some(prompt_builder.clone()), cx);
register_slash_commands(cx);
inline_assistant::init(
fs.clone(),
prompt_builder.clone(),
@@ -213,7 +184,7 @@ fn update_active_language_model_from_settings(cx: &mut App) {
});
}
fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut App) {
fn register_slash_commands(cx: &mut App) {
let slash_command_registry = SlashCommandRegistry::global(cx);
slash_command_registry.register_command(assistant_slash_commands::FileSlashCommand, true);
@@ -231,21 +202,6 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
.register_command(assistant_slash_commands::DiagnosticsSlashCommand, true);
slash_command_registry.register_command(assistant_slash_commands::FetchSlashCommand, true);
if let Some(prompt_builder) = prompt_builder {
cx.observe_flag::<assistant_slash_commands::ProjectSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
if is_enabled {
slash_command_registry.register_command(
assistant_slash_commands::ProjectSlashCommand::new(prompt_builder.clone()),
true,
);
}
}
})
.detach();
}
cx.observe_flag::<assistant_slash_commands::StreamingExampleSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
@@ -262,17 +218,6 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
update_slash_commands_from_settings(cx);
cx.observe_global::<SettingsStore>(update_slash_commands_from_settings)
.detach();
cx.observe_flag::<assistant_slash_commands::SearchSlashCommandFeatureFlag, _>({
let slash_command_registry = slash_command_registry.clone();
move |is_enabled, _cx| {
if is_enabled {
slash_command_registry
.register_command(assistant_slash_commands::SearchSlashCommand, true);
}
}
})
.detach();
}
fn update_slash_commands_from_settings(cx: &mut App) {

View File

@@ -12,9 +12,9 @@ use editor::{Editor, MultiBuffer};
use gpui::{
linear_color_stop, linear_gradient, list, percentage, pulsating_between, AbsoluteLength,
Animation, AnimationExt, AnyElement, App, ClickEvent, DefiniteLength, EdgesRefinement, Empty,
Entity, Focusable, Hsla, Length, ListAlignment, ListOffset, ListState, MouseButton,
ScrollHandle, Stateful, StyleRefinement, Subscription, Task, TextStyleRefinement,
Transformation, UnderlineStyle, WeakEntity, WindowHandle,
Entity, Focusable, Hsla, Length, ListAlignment, ListState, MouseButton, ScrollHandle, Stateful,
StyleRefinement, Subscription, Task, TextStyleRefinement, Transformation, UnderlineStyle,
WeakEntity, WindowHandle,
};
use language::{Buffer, LanguageRegistry};
use language_model::{LanguageModelRegistry, LanguageModelToolUseId, Role};
@@ -317,10 +317,6 @@ impl ActiveThread {
let rendered_message =
RenderedMessage::from_segments(segments, self.language_registry.clone(), window, cx);
self.rendered_messages_by_id.insert(*id, rendered_message);
self.list_state.scroll_to(ListOffset {
item_ix: old_len,
offset_in_item: Pixels(0.0),
});
}
fn edited_message(

View File

@@ -10,7 +10,10 @@ use assistant_tool::ToolWorkingSet;
use convert_case::{Case, Casing as _};
use editor::Editor;
use fs::Fs;
use gpui::{prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
use gpui::{
prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription,
WeakEntity,
};
use settings::{update_settings_file, Settings as _};
use ui::{prelude::*, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry};
use workspace::{ModalView, Workspace};
@@ -18,7 +21,7 @@ use workspace::{ModalView, Workspace};
use crate::assistant_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
use crate::assistant_configuration::profile_picker::{ProfilePicker, ProfilePickerDelegate};
use crate::assistant_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
use crate::{AssistantPanel, ManageProfiles};
use crate::{AssistantPanel, ManageProfiles, ThreadStore};
enum Mode {
ChooseProfile {
@@ -80,6 +83,7 @@ pub struct NewProfileMode {
pub struct ManageProfilesModal {
fs: Arc<dyn Fs>,
tools: Arc<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
focus_handle: FocusHandle,
mode: Mode,
}
@@ -93,9 +97,12 @@ impl ManageProfilesModal {
workspace.register_action(|workspace, _: &ManageProfiles, window, cx| {
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
let fs = workspace.app_state().fs.clone();
let thread_store = panel.read(cx).thread_store().read(cx);
let tools = thread_store.tools();
workspace.toggle_modal(window, cx, |window, cx| Self::new(fs, tools, window, cx))
let thread_store = panel.read(cx).thread_store();
let tools = thread_store.read(cx).tools();
let thread_store = thread_store.downgrade();
workspace.toggle_modal(window, cx, |window, cx| {
Self::new(fs, tools, thread_store, window, cx)
})
}
});
}
@@ -103,6 +110,7 @@ impl ManageProfilesModal {
pub fn new(
fs: Arc<dyn Fs>,
tools: Arc<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
@@ -111,6 +119,7 @@ impl ManageProfilesModal {
Self {
fs,
tools,
thread_store,
focus_handle,
mode: Mode::choose_profile(window, cx),
}
@@ -168,6 +177,7 @@ impl ManageProfilesModal {
let delegate = ToolPickerDelegate::new(
self.fs.clone(),
self.tools.clone(),
self.thread_store.clone(),
profile_id.clone(),
profile,
cx,

View File

@@ -9,10 +9,12 @@ use fs::Fs;
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
use gpui::{App, Context, DismissEvent, Entity, EventEmitter, Focusable, Task, WeakEntity, Window};
use picker::{Picker, PickerDelegate};
use settings::update_settings_file;
use settings::{update_settings_file, Settings as _};
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
use util::ResultExt as _;
use crate::ThreadStore;
pub struct ToolPicker {
picker: Entity<Picker<ToolPickerDelegate>>,
}
@@ -46,6 +48,7 @@ pub struct ToolEntry {
pub struct ToolPickerDelegate {
tool_picker: WeakEntity<ToolPicker>,
thread_store: WeakEntity<ThreadStore>,
fs: Arc<dyn Fs>,
tools: Vec<ToolEntry>,
profile_id: Arc<str>,
@@ -58,6 +61,7 @@ impl ToolPickerDelegate {
pub fn new(
fs: Arc<dyn Fs>,
tool_set: Arc<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
profile_id: Arc<str>,
profile: AgentProfile,
cx: &mut Context<ToolPicker>,
@@ -73,6 +77,7 @@ impl ToolPickerDelegate {
Self {
tool_picker: cx.entity().downgrade(),
thread_store,
fs,
tools: tool_entries,
profile_id,
@@ -183,6 +188,15 @@ impl PickerDelegate for ToolPickerDelegate {
}
};
let active_profile_id = &AssistantSettings::get_global(cx).default_profile;
if active_profile_id == &self.profile_id {
self.thread_store
.update(cx, |this, _cx| {
this.load_profile(&self.profile);
})
.log_err();
}
update_settings_file::<AssistantSettings>(self.fs.clone(), cx, {
let profile_id = self.profile_id.clone();
let default_profile = self.profile.clone();

View File

@@ -29,12 +29,9 @@ html_to_markdown.workspace = true
http_client.workspace = true
indexed_docs.workspace = true
language.workspace = true
language_model.workspace = true
project.workspace = true
prompt_store.workspace = true
rope.workspace = true
schemars.workspace = true
semantic_index.workspace = true
serde.workspace = true
serde_json.workspace = true
smol.workspace = true

View File

@@ -7,9 +7,7 @@ mod docs_command;
mod fetch_command;
mod file_command;
mod now_command;
mod project_command;
mod prompt_command;
mod search_command;
mod selection_command;
mod streaming_example_command;
mod symbols_command;
@@ -29,9 +27,7 @@ pub use crate::docs_command::*;
pub use crate::fetch_command::*;
pub use crate::file_command::*;
pub use crate::now_command::*;
pub use crate::project_command::*;
pub use crate::prompt_command::*;
pub use crate::search_command::*;
pub use crate::selection_command::*;
pub use crate::streaming_example_command::*;
pub use crate::symbols_command::*;

View File

@@ -1,197 +0,0 @@
use std::{
fmt::Write as _,
ops::DerefMut,
sync::{atomic::AtomicBool, Arc},
};
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use feature_flags::FeatureFlag;
use gpui::{App, Task, WeakEntity};
use language::{Anchor, CodeLabel, LspAdapterDelegate};
use language_model::{LanguageModelRegistry, LanguageModelTool};
use prompt_store::PromptBuilder;
use schemars::JsonSchema;
use semantic_index::SemanticDb;
use serde::Deserialize;
use ui::prelude::*;
use workspace::Workspace;
use super::{create_label_for_command, search_command::add_search_result_section};
pub struct ProjectSlashCommandFeatureFlag;
impl FeatureFlag for ProjectSlashCommandFeatureFlag {
const NAME: &'static str = "project-slash-command";
}
pub struct ProjectSlashCommand {
prompt_builder: Arc<PromptBuilder>,
}
impl ProjectSlashCommand {
pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
Self { prompt_builder }
}
}
impl SlashCommand for ProjectSlashCommand {
fn name(&self) -> String {
"project".into()
}
fn label(&self, cx: &App) -> CodeLabel {
create_label_for_command("project", &[], cx)
}
fn description(&self) -> String {
"Generate a semantic search based on context".into()
}
fn icon(&self) -> IconName {
IconName::Folder
}
fn menu_text(&self) -> String {
self.description()
}
fn requires_argument(&self) -> bool {
false
}
fn complete_argument(
self: Arc<Self>,
_arguments: &[String],
_cancel: Arc<AtomicBool>,
_workspace: Option<WeakEntity<Workspace>>,
_window: &mut Window,
_cx: &mut App,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Ok(Vec::new()))
}
fn run(
self: Arc<Self>,
_arguments: &[String],
_context_slash_command_output_sections: &[SlashCommandOutputSection<Anchor>],
context_buffer: language::BufferSnapshot,
workspace: WeakEntity<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
window: &mut Window,
cx: &mut App,
) -> Task<SlashCommandResult> {
let model_registry = LanguageModelRegistry::read_global(cx);
let current_model = model_registry.active_model();
let prompt_builder = self.prompt_builder.clone();
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
let project = workspace.read(cx).project().clone();
let fs = project.read(cx).fs().clone();
let Some(project_index) =
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
else {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
window.spawn(cx, async move |cx| {
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
let prompt =
prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?;
let search_queries = current_model
.use_tool::<SearchQueries>(
language_model::LanguageModelRequest {
messages: vec![language_model::LanguageModelRequestMessage {
role: language_model::Role::User,
content: vec![language_model::MessageContent::Text(prompt)],
cache: false,
}],
tools: vec![],
stop: vec![],
temperature: None,
},
cx.deref_mut(),
)
.await?
.search_queries;
let results = project_index
.read_with(cx, |project_index, cx| {
project_index.search(search_queries.clone(), 25, cx)
})?
.await?;
let results = SemanticDb::load_results(results, &fs, &cx).await?;
cx.background_spawn(async move {
let mut output = "Project context:\n".to_string();
let mut sections = Vec::new();
for (ix, query) in search_queries.into_iter().enumerate() {
let start_ix = output.len();
writeln!(&mut output, "Results for {query}:").unwrap();
let mut has_results = false;
for result in &results {
if result.query_index == ix {
add_search_result_section(result, &mut output, &mut sections);
has_results = true;
}
}
if has_results {
sections.push(SlashCommandOutputSection {
range: start_ix..output.len(),
icon: IconName::MagnifyingGlass,
label: query.into(),
metadata: None,
});
output.push('\n');
} else {
output.truncate(start_ix);
}
}
sections.push(SlashCommandOutputSection {
range: 0..output.len(),
icon: IconName::Book,
label: "Project context".into(),
metadata: None,
});
Ok(SlashCommandOutput {
text: output,
sections,
run_commands_in_text: true,
}
.to_event_stream())
})
.await
})
}
}
#[derive(JsonSchema, Deserialize)]
struct SearchQueries {
/// An array of semantic search queries.
///
/// These queries will be used to search the user's codebase.
/// The function can only accept 4 queries, otherwise it will error.
/// As such, it's important that you limit the length of the search_queries array to 5 queries or less.
search_queries: Vec<String>,
}
impl LanguageModelTool for SearchQueries {
fn name() -> String {
"search_queries".to_string()
}
fn description() -> String {
"Generate semantic search queries based on context".to_string()
}
}

View File

@@ -1,181 +0,0 @@
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use feature_flags::FeatureFlag;
use gpui::{App, Task, WeakEntity};
use language::{CodeLabel, LspAdapterDelegate};
use semantic_index::{LoadedSearchResult, SemanticDb};
use std::{
fmt::Write,
sync::{atomic::AtomicBool, Arc},
};
use ui::{prelude::*, IconName};
use workspace::Workspace;
use crate::create_label_for_command;
use crate::file_command::{build_entry_output_section, codeblock_fence_for_path};
pub struct SearchSlashCommandFeatureFlag;
impl FeatureFlag for SearchSlashCommandFeatureFlag {
const NAME: &'static str = "search-slash-command";
fn enabled_for_staff() -> bool {
false
}
}
pub struct SearchSlashCommand;
impl SlashCommand for SearchSlashCommand {
fn name(&self) -> String {
"search".into()
}
fn label(&self, cx: &App) -> CodeLabel {
create_label_for_command("search", &["--n"], cx)
}
fn description(&self) -> String {
"Search your project semantically".into()
}
fn icon(&self) -> IconName {
IconName::SearchCode
}
fn menu_text(&self) -> String {
self.description()
}
fn requires_argument(&self) -> bool {
true
}
fn complete_argument(
self: Arc<Self>,
_arguments: &[String],
_cancel: Arc<AtomicBool>,
_workspace: Option<WeakEntity<Workspace>>,
_window: &mut Window,
_cx: &mut App,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Ok(Vec::new()))
}
fn run(
self: Arc<Self>,
arguments: &[String],
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
_context_buffer: language::BufferSnapshot,
workspace: WeakEntity<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
window: &mut Window,
cx: &mut App,
) -> Task<SlashCommandResult> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
if arguments.is_empty() {
return Task::ready(Err(anyhow::anyhow!("missing search query")));
};
let mut limit = None;
let mut query = String::new();
for part in arguments {
if let Some(parameter) = part.strip_prefix("--") {
if let Ok(count) = parameter.parse::<usize>() {
limit = Some(count);
continue;
}
}
query.push_str(part);
query.push(' ');
}
query.pop();
if query.is_empty() {
return Task::ready(Err(anyhow::anyhow!("missing search query")));
}
let project = workspace.read(cx).project().clone();
let fs = project.read(cx).fs().clone();
let Some(project_index) =
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
else {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
window.spawn(cx, async move |cx| {
let results = project_index
.read_with(cx, |project_index, cx| {
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
})?
.await?;
let loaded_results = SemanticDb::load_results(results, &fs, &cx).await?;
let output = cx
.background_spawn(async move {
let mut text = format!("Search results for {query}:\n");
let mut sections = Vec::new();
for loaded_result in &loaded_results {
add_search_result_section(loaded_result, &mut text, &mut sections);
}
let query = SharedString::from(query);
sections.push(SlashCommandOutputSection {
range: 0..text.len(),
icon: IconName::MagnifyingGlass,
label: query,
metadata: None,
});
SlashCommandOutput {
text,
sections,
run_commands_in_text: false,
}
.to_event_stream()
})
.await;
Ok(output)
})
}
}
pub fn add_search_result_section(
loaded_result: &LoadedSearchResult,
text: &mut String,
sections: &mut Vec<SlashCommandOutputSection<usize>>,
) {
let LoadedSearchResult {
path,
full_path,
excerpt_content,
row_range,
..
} = loaded_result;
let section_start_ix = text.len();
text.push_str(&codeblock_fence_for_path(
Some(&path),
Some(row_range.clone()),
));
text.push_str(&excerpt_content);
if !text.ends_with('\n') {
text.push('\n');
}
writeln!(text, "```\n").unwrap();
let section_end_ix = text.len() - 1;
sections.push(build_entry_output_section(
section_start_ix..section_end_ix,
Some(&full_path),
false,
Some(row_range.start() + 1..row_range.end() + 1),
));
}

View File

@@ -19,6 +19,41 @@ pub struct BashToolInput {
cd: String,
}
/// Custom deserialization function for BashToolInput that handles missing "cd" field
/// Returns a BashToolInput with the project's first worktree root as default "cd" if missing
fn from_value_with_default_cd(value: serde_json::Value, project: Option<&Project>, cx: Option<&App>) -> Result<BashToolInput> {
// Try standard deserialization first
if let Ok(input) = serde_json::from_value::<BashToolInput>(value.clone()) {
return Ok(input);
}
// If that fails, check if it's because "cd" is missing
let mut obj = match value {
serde_json::Value::Object(obj) => obj,
_ => return Err(anyhow!("Expected object for BashToolInput")),
};
if !obj.contains_key("cd") {
// Find first worktree root to use as default if project context is available
if let (Some(project), Some(cx)) = (project, cx) {
if let Some(worktree) = project.worktrees(cx).next() {
let root_name = worktree.read(cx).root_name().to_string();
obj.insert("cd".to_string(), serde_json::Value::String(root_name));
} else {
// No worktrees available, use "." as fallback
obj.insert("cd".to_string(), serde_json::Value::String(".".to_string()));
}
} else {
// No project context, use "." as fallback
obj.insert("cd".to_string(), serde_json::Value::String(".".to_string()));
}
}
// Try to deserialize with the modified object
serde_json::from_value::<BashToolInput>(serde_json::Value::Object(obj))
.map_err(|e| anyhow!("Failed to deserialize BashToolInput: {}", e))
}
pub struct BashTool;
impl Tool for BashTool {
@@ -44,7 +79,7 @@ impl Tool for BashTool {
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<BashToolInput>(input.clone()) {
match from_value_with_default_cd(input.clone(), None, None) {
Ok(input) => {
if input.command.contains('\n') {
MarkdownString::code_block("bash", &input.command).0
@@ -64,7 +99,7 @@ impl Tool for BashTool {
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input: BashToolInput = match serde_json::from_value(input) {
let input: BashToolInput = match from_value_with_default_cd(input, Some(&project.read(cx)), Some(cx)) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};