Compare commits
33 Commits
miner
...
split-quer
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3a9e53d561 | ||
|
|
7ee2511b89 | ||
|
|
1eb90e0400 | ||
|
|
72e73544bb | ||
|
|
586cf40d63 | ||
|
|
18f2fc9aa3 | ||
|
|
b1beed4ca9 | ||
|
|
8d37c1c6ea | ||
|
|
0680f6469b | ||
|
|
57a543ebe8 | ||
|
|
d69fb469bd | ||
|
|
eb413ba404 | ||
|
|
9182bd51f2 | ||
|
|
481a554923 | ||
|
|
c129e3b668 | ||
|
|
12e3c3b502 | ||
|
|
54afa6f69f | ||
|
|
55511d1591 | ||
|
|
6c0cb9eaa3 | ||
|
|
24e7b69f8f | ||
|
|
a4cdca5141 | ||
|
|
86cd87e993 | ||
|
|
88000eb7e2 | ||
|
|
ab5a462e0c | ||
|
|
79430fc7d2 | ||
|
|
f96e4ba84f | ||
|
|
7be1ffb9ec | ||
|
|
93a5d0ca29 | ||
|
|
328d98dddc | ||
|
|
76ab9e4d66 | ||
|
|
c477c12956 | ||
|
|
1ffd87b87e | ||
|
|
df11b646da |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -28,4 +28,3 @@ DerivedData/
|
||||
.vscode
|
||||
.wrangler
|
||||
.flatpak-builder
|
||||
.aider*
|
||||
|
||||
1060
Cargo.lock
generated
1060
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -58,7 +58,6 @@ members = [
|
||||
"crates/markdown_preview",
|
||||
"crates/media",
|
||||
"crates/menu",
|
||||
"crates/miner",
|
||||
"crates/multi_buffer",
|
||||
"crates/node_runtime",
|
||||
"crates/notifications",
|
||||
@@ -241,7 +240,6 @@ task = { path = "crates/task" }
|
||||
tasks_ui = { path = "crates/tasks_ui" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_index = { path = "crates/semantic_index" }
|
||||
miner = { path = "crates/miner" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
settings = { path = "crates/settings" }
|
||||
snippet = { path = "crates/snippet" }
|
||||
@@ -310,7 +308,7 @@ heed = { version = "0.20.1", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
html5ever = "0.27.0"
|
||||
ignore = "0.4.22"
|
||||
image = "0.23"
|
||||
image = "0.25.1"
|
||||
indexmap = { version = "1.6.2", features = ["serde"] }
|
||||
indoc = "1"
|
||||
# We explicitly disable http2 support in isahc.
|
||||
|
||||
@@ -152,7 +152,8 @@
|
||||
// "focus": false
|
||||
// }
|
||||
// ],
|
||||
"ctrl->": "assistant::QuoteSelection"
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-alt-e": "editor::SelectEnclosingSymbol"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -188,7 +188,8 @@
|
||||
"focus": false
|
||||
}
|
||||
],
|
||||
"cmd->": "assistant::QuoteSelection"
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-alt-e": "editor::SelectEnclosingSymbol"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -397,7 +397,13 @@
|
||||
// 3. "gpt-4-turbo-preview"
|
||||
// 4. "gpt-4o"
|
||||
"default_model": "gpt-4o"
|
||||
}
|
||||
},
|
||||
// Whether to enable the /auto command in the assistant panel, which infers context.
|
||||
// Enabling this also enables indexing all the files in the project to generate
|
||||
// metadata used in context inference. The first time a project is indexed, indexing
|
||||
// can take a long time and use a lot of system resources. Later indexing is incremental
|
||||
// and much faster.
|
||||
"infer_context": false
|
||||
},
|
||||
// Whether the screen sharing icon is shown in the os status bar.
|
||||
"show_call_status_icon": true,
|
||||
|
||||
@@ -10,14 +10,14 @@ mod search;
|
||||
mod slash_command;
|
||||
mod streaming_diff;
|
||||
|
||||
pub use assistant_panel::AssistantPanel;
|
||||
|
||||
pub use assistant_panel::{AssistantPanel, AssistantPanelEvent};
|
||||
use assistant_settings::{AnthropicModel, AssistantSettings, CloudModel, OllamaModel, OpenAiModel};
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use client::{proto, Client};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
pub(crate) use completion_provider::*;
|
||||
pub(crate) use context_store::*;
|
||||
use fs::Fs;
|
||||
use gpui::{actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
pub(crate) use inline_assistant::*;
|
||||
pub(crate) use model_selector::*;
|
||||
@@ -26,8 +26,9 @@ use semantic_index::{CloudEmbeddingProvider, SemanticIndex};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use slash_command::{
|
||||
active_command, default_command, diagnostics_command, fetch_command, file_command, now_command,
|
||||
project_command, prompt_command, rustdoc_command, search_command, tabs_command, term_command,
|
||||
active_command, auto_command, default_command, diagnostics_command, fetch_command,
|
||||
file_command, now_command, project_command, prompt_command, rustdoc_command, search_command,
|
||||
tabs_command, term_command,
|
||||
};
|
||||
use std::{
|
||||
fmt::{self, Display},
|
||||
@@ -138,7 +139,7 @@ impl LanguageModel {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct LanguageModelRequestMessage {
|
||||
pub role: Role,
|
||||
pub content: String,
|
||||
@@ -159,7 +160,7 @@ impl LanguageModelRequestMessage {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize)]
|
||||
#[derive(Clone, Debug, Default, Serialize)]
|
||||
pub struct LanguageModelRequest {
|
||||
pub model: LanguageModel,
|
||||
pub messages: Vec<LanguageModelRequestMessage>,
|
||||
@@ -264,7 +265,7 @@ impl Assistant {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
|
||||
cx.set_global(Assistant::default());
|
||||
AssistantSettings::register(cx);
|
||||
|
||||
@@ -288,7 +289,7 @@ pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
assistant_slash_command::init(cx);
|
||||
register_slash_commands(cx);
|
||||
assistant_panel::init(cx);
|
||||
inline_assistant::init(client.telemetry().clone(), cx);
|
||||
inline_assistant::init(fs.clone(), client.telemetry().clone(), cx);
|
||||
RustdocStore::init_global(cx);
|
||||
|
||||
CommandPaletteFilter::update_global(cx, |filter, _cx| {
|
||||
@@ -310,6 +311,7 @@ pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
|
||||
fn register_slash_commands(cx: &mut AppContext) {
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
slash_command_registry.register_command(auto_command::AutoCommand, true);
|
||||
slash_command_registry.register_command(file_command::FileSlashCommand, true);
|
||||
slash_command_registry.register_command(active_command::ActiveSlashCommand, true);
|
||||
slash_command_registry.register_command(tabs_command::TabsSlashCommand, true);
|
||||
@@ -324,6 +326,24 @@ fn register_slash_commands(cx: &mut AppContext) {
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
}
|
||||
|
||||
pub fn humanize_token_count(count: usize) -> String {
|
||||
match count {
|
||||
0..=999 => count.to_string(),
|
||||
1000..=9999 => {
|
||||
let thousands = count / 1000;
|
||||
let hundreds = (count % 1000 + 50) / 100;
|
||||
if hundreds == 0 {
|
||||
format!("{}k", thousands)
|
||||
} else if hundreds == 10 {
|
||||
format!("{}k", thousands + 1)
|
||||
} else {
|
||||
format!("{}.{}k", thousands, hundreds)
|
||||
}
|
||||
}
|
||||
_ => format!("{}k", (count + 500) / 1000),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::{
|
||||
assistant_settings::{AssistantDockPosition, AssistantSettings},
|
||||
humanize_token_count,
|
||||
prompt_library::open_prompt_library,
|
||||
search::*,
|
||||
slash_command::{
|
||||
@@ -89,6 +90,10 @@ pub fn init(cx: &mut AppContext) {
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub enum AssistantPanelEvent {
|
||||
ContextEdited,
|
||||
}
|
||||
|
||||
pub struct AssistantPanel {
|
||||
workspace: WeakView<Workspace>,
|
||||
width: Option<Pixels>,
|
||||
@@ -360,11 +365,11 @@ impl AssistantPanel {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(assistant) = workspace.panel::<AssistantPanel>(cx) else {
|
||||
let Some(assistant_panel) = workspace.panel::<AssistantPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let context_editor = assistant
|
||||
let context_editor = assistant_panel
|
||||
.read(cx)
|
||||
.active_context_editor()
|
||||
.and_then(|editor| {
|
||||
@@ -391,25 +396,37 @@ impl AssistantPanel {
|
||||
return;
|
||||
};
|
||||
|
||||
if assistant.update(cx, |assistant, cx| assistant.is_authenticated(cx)) {
|
||||
if assistant_panel.update(cx, |panel, cx| panel.is_authenticated(cx)) {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(
|
||||
&active_editor,
|
||||
Some(cx.view().downgrade()),
|
||||
include_context,
|
||||
include_context.then_some(&assistant_panel),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
let assistant = assistant.downgrade();
|
||||
let assistant_panel = assistant_panel.downgrade();
|
||||
cx.spawn(|workspace, mut cx| async move {
|
||||
assistant
|
||||
assistant_panel
|
||||
.update(&mut cx, |assistant, cx| assistant.authenticate(cx))?
|
||||
.await?;
|
||||
if assistant.update(&mut cx, |assistant, cx| assistant.is_authenticated(cx))? {
|
||||
if assistant_panel
|
||||
.update(&mut cx, |assistant, cx| assistant.is_authenticated(cx))?
|
||||
{
|
||||
cx.update(|cx| {
|
||||
let assistant_panel = if include_context {
|
||||
assistant_panel.upgrade()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(&active_editor, Some(workspace), include_context, cx)
|
||||
assistant.assist(
|
||||
&active_editor,
|
||||
Some(workspace),
|
||||
assistant_panel.as_ref(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})?
|
||||
} else {
|
||||
@@ -460,7 +477,7 @@ impl AssistantPanel {
|
||||
_subscriptions: subscriptions,
|
||||
});
|
||||
self.show_saved_contexts = false;
|
||||
|
||||
cx.emit(AssistantPanelEvent::ContextEdited);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -472,6 +489,7 @@ impl AssistantPanel {
|
||||
) {
|
||||
match event {
|
||||
ContextEditorEvent::TabContentChanged => cx.notify(),
|
||||
ContextEditorEvent::Edited => cx.emit(AssistantPanelEvent::ContextEdited),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -863,18 +881,33 @@ impl AssistantPanel {
|
||||
context: &Model<Context>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<impl IntoElement> {
|
||||
let remaining_tokens = context.read(cx).remaining_tokens(cx)?;
|
||||
let remaining_tokens_color = if remaining_tokens <= 0 {
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
let token_count = context.read(cx).token_count()?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
let remaining_tokens = max_token_count as isize - token_count as isize;
|
||||
let token_count_color = if remaining_tokens <= 0 {
|
||||
Color::Error
|
||||
} else if remaining_tokens <= 500 {
|
||||
} else if token_count as f32 / max_token_count as f32 >= 0.8 {
|
||||
Color::Warning
|
||||
} else {
|
||||
Color::Muted
|
||||
};
|
||||
|
||||
Some(
|
||||
Label::new(remaining_tokens.to_string())
|
||||
.size(LabelSize::Small)
|
||||
.color(remaining_tokens_color),
|
||||
h_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Label::new(humanize_token_count(token_count))
|
||||
.size(LabelSize::Small)
|
||||
.color(token_count_color),
|
||||
)
|
||||
.child(Label::new("/").size(LabelSize::Small).color(Color::Muted))
|
||||
.child(
|
||||
Label::new(humanize_token_count(max_token_count))
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -978,6 +1011,7 @@ impl Panel for AssistantPanel {
|
||||
}
|
||||
|
||||
impl EventEmitter<PanelEvent> for AssistantPanel {}
|
||||
impl EventEmitter<AssistantPanelEvent> for AssistantPanel {}
|
||||
|
||||
impl FocusableView for AssistantPanel {
|
||||
fn focus_handle(&self, _cx: &AppContext) -> FocusHandle {
|
||||
@@ -1538,11 +1572,6 @@ impl Context {
|
||||
}
|
||||
}
|
||||
|
||||
fn remaining_tokens(&self, cx: &AppContext) -> Option<isize> {
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
Some(model.max_token_count() as isize - self.token_count? as isize)
|
||||
}
|
||||
|
||||
fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
|
||||
self.count_remaining_tokens(cx);
|
||||
}
|
||||
@@ -2183,6 +2212,7 @@ struct PendingCompletion {
|
||||
}
|
||||
|
||||
enum ContextEditorEvent {
|
||||
Edited,
|
||||
TabContentChanged,
|
||||
}
|
||||
|
||||
@@ -2775,6 +2805,7 @@ impl ContextEditor {
|
||||
EditorEvent::SelectionsChanged { .. } => {
|
||||
self.scroll_position = self.cursor_scroll_position(cx);
|
||||
}
|
||||
EditorEvent::BufferEdited => cx.emit(ContextEditorEvent::Edited),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -225,6 +225,7 @@ pub struct AssistantSettings {
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub provider: AssistantProvider,
|
||||
pub infer_context: bool,
|
||||
}
|
||||
|
||||
/// Assistant panel settings
|
||||
@@ -282,6 +283,7 @@ impl AssistantSettingsContent {
|
||||
}
|
||||
})
|
||||
},
|
||||
infer_context: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -381,6 +383,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
provider: None,
|
||||
infer_context: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -412,6 +415,14 @@ pub struct AssistantSettingsContentV1 {
|
||||
/// This can either be the internal `zed.dev` service or an external `openai` service,
|
||||
/// each with their respective default models and configurations.
|
||||
provider: Option<AssistantProviderContent>,
|
||||
/// When using the assistant panel, enable the /auto command to automatically
|
||||
/// infer context. Enabling this will enable background indexing of the project,
|
||||
/// to generate the metadata /auto needs to infer context. The first time a project
|
||||
/// is indexed, the indexing process can take a long time and use a lot of system
|
||||
/// resources. After the first time, later indexing is incremental and much faster.
|
||||
///
|
||||
/// Default: false
|
||||
infer_context: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
@@ -466,6 +477,7 @@ impl Settings for AssistantSettings {
|
||||
&mut settings.default_height,
|
||||
value.default_height.map(Into::into),
|
||||
);
|
||||
merge(&mut settings.infer_context, value.infer_context);
|
||||
if let Some(provider) = value.provider.clone() {
|
||||
match (&mut settings.provider, provider) {
|
||||
(
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use crate::{
|
||||
prompts::generate_content_prompt, AssistantPanel, CompletionProvider, Hunk,
|
||||
LanguageModelRequest, LanguageModelRequestMessage, Role, StreamingDiff,
|
||||
assistant_settings::AssistantSettings, humanize_token_count, prompts::generate_content_prompt,
|
||||
AssistantPanel, AssistantPanelEvent, CompletionProvider, Hunk, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, Role, StreamingDiff,
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::{hash_map, HashMap, HashSet, VecDeque};
|
||||
use editor::{
|
||||
@@ -14,6 +15,7 @@ use editor::{
|
||||
Anchor, AnchorRangeExt, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle,
|
||||
ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
|
||||
};
|
||||
use fs::Fs;
|
||||
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
|
||||
use gpui::{
|
||||
point, AppContext, EventEmitter, FocusHandle, FocusableView, FontStyle, FontWeight, Global,
|
||||
@@ -24,7 +26,7 @@ use language::{Buffer, Point, Selection, TransactionId};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use rope::Rope;
|
||||
use settings::Settings;
|
||||
use settings::{update_settings_file, Settings};
|
||||
use similar::TextDiff;
|
||||
use std::{
|
||||
cmp, mem,
|
||||
@@ -32,15 +34,15 @@ use std::{
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
task::{self, Poll},
|
||||
time::Instant,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, Tooltip};
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
|
||||
use util::RangeExt;
|
||||
use workspace::{notifications::NotificationId, Toast, Workspace};
|
||||
|
||||
pub fn init(telemetry: Arc<Telemetry>, cx: &mut AppContext) {
|
||||
cx.set_global(InlineAssistant::new(telemetry));
|
||||
pub fn init(fs: Arc<dyn Fs>, telemetry: Arc<Telemetry>, cx: &mut AppContext) {
|
||||
cx.set_global(InlineAssistant::new(fs, telemetry));
|
||||
}
|
||||
|
||||
const PROMPT_HISTORY_MAX_LEN: usize = 20;
|
||||
@@ -53,12 +55,13 @@ pub struct InlineAssistant {
|
||||
assist_groups: HashMap<InlineAssistGroupId, InlineAssistGroup>,
|
||||
prompt_history: VecDeque<String>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
impl Global for InlineAssistant {}
|
||||
|
||||
impl InlineAssistant {
|
||||
pub fn new(telemetry: Arc<Telemetry>) -> Self {
|
||||
pub fn new(fs: Arc<dyn Fs>, telemetry: Arc<Telemetry>) -> Self {
|
||||
Self {
|
||||
next_assist_id: InlineAssistId::default(),
|
||||
next_assist_group_id: InlineAssistGroupId::default(),
|
||||
@@ -67,6 +70,7 @@ impl InlineAssistant {
|
||||
assist_groups: HashMap::default(),
|
||||
prompt_history: VecDeque::default(),
|
||||
telemetry: Some(telemetry),
|
||||
fs,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,7 +78,7 @@ impl InlineAssistant {
|
||||
&mut self,
|
||||
editor: &View<Editor>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
include_context: bool,
|
||||
assistant_panel: Option<&View<AssistantPanel>>,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
@@ -151,7 +155,10 @@ impl InlineAssistant {
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
editor,
|
||||
assistant_panel,
|
||||
workspace.clone(),
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -208,7 +215,7 @@ impl InlineAssistant {
|
||||
InlineAssist::new(
|
||||
assist_id,
|
||||
assist_group_id,
|
||||
include_context,
|
||||
assistant_panel.is_some(),
|
||||
editor,
|
||||
&prompt_editor,
|
||||
block_ids[0],
|
||||
@@ -706,8 +713,6 @@ impl InlineAssistant {
|
||||
return;
|
||||
}
|
||||
|
||||
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
|
||||
|
||||
let Some(user_prompt) = assist
|
||||
.decorations
|
||||
.as_ref()
|
||||
@@ -716,115 +721,138 @@ impl InlineAssistant {
|
||||
return;
|
||||
};
|
||||
|
||||
let context = if assist.include_context {
|
||||
assist.workspace.as_ref().and_then(|workspace| {
|
||||
let workspace = workspace.upgrade()?.read(cx);
|
||||
let assistant_panel = workspace.panel::<AssistantPanel>(cx)?;
|
||||
assistant_panel.read(cx).active_context(cx)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let editor = if let Some(editor) = assist.editor.upgrade() {
|
||||
editor
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
let project_name = assist.workspace.as_ref().and_then(|workspace| {
|
||||
let workspace = workspace.upgrade()?;
|
||||
Some(
|
||||
workspace
|
||||
.read(cx)
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_root_names(cx)
|
||||
.collect::<Vec<&str>>()
|
||||
.join("/"),
|
||||
)
|
||||
});
|
||||
|
||||
self.prompt_history.retain(|prompt| *prompt != user_prompt);
|
||||
self.prompt_history.push_back(user_prompt.clone());
|
||||
if self.prompt_history.len() > PROMPT_HISTORY_MAX_LEN {
|
||||
self.prompt_history.pop_front();
|
||||
}
|
||||
|
||||
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
|
||||
let codegen = assist.codegen.clone();
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
let range = codegen.read(cx).range.clone();
|
||||
let start = snapshot.point_to_buffer_offset(range.start);
|
||||
let end = snapshot.point_to_buffer_offset(range.end);
|
||||
let (buffer, range) = if let Some((start, end)) = start.zip(end) {
|
||||
let (start_buffer, start_buffer_offset) = start;
|
||||
let (end_buffer, end_buffer_offset) = end;
|
||||
if start_buffer.remote_id() == end_buffer.remote_id() {
|
||||
(start_buffer.clone(), start_buffer_offset..end_buffer_offset)
|
||||
} else {
|
||||
self.finish_assist(assist_id, false, cx);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
self.finish_assist(assist_id, false, cx);
|
||||
return;
|
||||
};
|
||||
|
||||
let language = buffer.language_at(range.start);
|
||||
let language_name = if let Some(language) = language.as_ref() {
|
||||
if Arc::ptr_eq(language, &language::PLAIN_TEXT) {
|
||||
None
|
||||
} else {
|
||||
Some(language.name())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Higher Temperature increases the randomness of model outputs.
|
||||
// If Markdown or No Language is Known, increase the randomness for more creative output
|
||||
// If Code, decrease temperature to get more deterministic outputs
|
||||
let temperature = if let Some(language) = language_name.clone() {
|
||||
if language.as_ref() == "Markdown" {
|
||||
1.0
|
||||
} else {
|
||||
0.5
|
||||
}
|
||||
} else {
|
||||
1.0
|
||||
};
|
||||
|
||||
let prompt = cx.background_executor().spawn(async move {
|
||||
let language_name = language_name.as_deref();
|
||||
generate_content_prompt(user_prompt, language_name, buffer, range, project_name)
|
||||
});
|
||||
|
||||
let mut messages = Vec::new();
|
||||
if let Some(context) = context {
|
||||
let request = context.read(cx).to_completion_request(cx);
|
||||
messages = request.messages;
|
||||
}
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
let request = self.request_for_inline_assist(assist_id, cx);
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let prompt = prompt.await?;
|
||||
let request = request.await?;
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.start(request, cx))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn request_for_inline_assist(
|
||||
&self,
|
||||
assist_id: InlineAssistId,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<LanguageModelRequest>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let (user_prompt, context_request, project_name, buffer, range, model) = cx
|
||||
.read_global(|this: &InlineAssistant, cx: &WindowContext| {
|
||||
let assist = this.assists.get(&assist_id).context("invalid assist")?;
|
||||
let decorations = assist.decorations.as_ref().context("invalid assist")?;
|
||||
let editor = assist.editor.upgrade().context("invalid assist")?;
|
||||
let user_prompt = decorations.prompt_editor.read(cx).prompt(cx);
|
||||
let context_request = if assist.include_context {
|
||||
assist.workspace.as_ref().and_then(|workspace| {
|
||||
let workspace = workspace.upgrade()?.read(cx);
|
||||
let assistant_panel = workspace.panel::<AssistantPanel>(cx)?;
|
||||
Some(
|
||||
assistant_panel
|
||||
.read(cx)
|
||||
.active_context(cx)?
|
||||
.read(cx)
|
||||
.to_completion_request(cx),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let project_name = assist.workspace.as_ref().and_then(|workspace| {
|
||||
let workspace = workspace.upgrade()?;
|
||||
Some(
|
||||
workspace
|
||||
.read(cx)
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_root_names(cx)
|
||||
.collect::<Vec<&str>>()
|
||||
.join("/"),
|
||||
)
|
||||
});
|
||||
let buffer = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
let range = assist.codegen.read(cx).range.clone();
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
anyhow::Ok((
|
||||
user_prompt,
|
||||
context_request,
|
||||
project_name,
|
||||
buffer,
|
||||
range,
|
||||
model,
|
||||
))
|
||||
})??;
|
||||
|
||||
let language = buffer.language_at(range.start);
|
||||
let language_name = if let Some(language) = language.as_ref() {
|
||||
if Arc::ptr_eq(language, &language::PLAIN_TEXT) {
|
||||
None
|
||||
} else {
|
||||
Some(language.name())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Higher Temperature increases the randomness of model outputs.
|
||||
// If Markdown or No Language is Known, increase the randomness for more creative output
|
||||
// If Code, decrease temperature to get more deterministic outputs
|
||||
let temperature = if let Some(language) = language_name.clone() {
|
||||
if language.as_ref() == "Markdown" {
|
||||
1.0
|
||||
} else {
|
||||
0.5
|
||||
}
|
||||
} else {
|
||||
1.0
|
||||
};
|
||||
|
||||
let prompt = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
let language_name = language_name.as_deref();
|
||||
let start = buffer.point_to_buffer_offset(range.start);
|
||||
let end = buffer.point_to_buffer_offset(range.end);
|
||||
let (buffer, range) = if let Some((start, end)) = start.zip(end) {
|
||||
let (start_buffer, start_buffer_offset) = start;
|
||||
let (end_buffer, end_buffer_offset) = end;
|
||||
if start_buffer.remote_id() == end_buffer.remote_id() {
|
||||
(start_buffer.clone(), start_buffer_offset..end_buffer_offset)
|
||||
} else {
|
||||
return Err(anyhow!("invalid transformation range"));
|
||||
}
|
||||
} else {
|
||||
return Err(anyhow!("invalid transformation range"));
|
||||
};
|
||||
generate_content_prompt(user_prompt, language_name, buffer, range, project_name)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
if let Some(context_request) = context_request {
|
||||
messages = context_request.messages;
|
||||
}
|
||||
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: prompt,
|
||||
});
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
Ok(LanguageModelRequest {
|
||||
model,
|
||||
messages,
|
||||
stop: vec!["|END|>".to_string()],
|
||||
temperature,
|
||||
};
|
||||
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.start(request, cx))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
@@ -1142,6 +1170,7 @@ enum PromptEditorEvent {
|
||||
|
||||
struct PromptEditor {
|
||||
id: InlineAssistId,
|
||||
fs: Arc<dyn Fs>,
|
||||
height_in_lines: u8,
|
||||
editor: View<Editor>,
|
||||
edited_since_done: bool,
|
||||
@@ -1150,9 +1179,12 @@ struct PromptEditor {
|
||||
prompt_history_ix: Option<usize>,
|
||||
pending_prompt: String,
|
||||
codegen: Model<Codegen>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
_codegen_subscription: Subscription,
|
||||
editor_subscriptions: Vec<Subscription>,
|
||||
pending_token_count: Task<Result<()>>,
|
||||
token_count: Option<usize>,
|
||||
_token_count_subscriptions: Vec<Subscription>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
}
|
||||
|
||||
impl EventEmitter<PromptEditorEvent> for PromptEditor {}
|
||||
@@ -1160,6 +1192,7 @@ impl EventEmitter<PromptEditorEvent> for PromptEditor {}
|
||||
impl Render for PromptEditor {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let gutter_dimensions = *self.gutter_dimensions.lock();
|
||||
let fs = self.fs.clone();
|
||||
|
||||
let buttons = match &self.codegen.read(cx).status {
|
||||
CodegenStatus::Idle => {
|
||||
@@ -1245,85 +1278,100 @@ impl Render for PromptEditor {
|
||||
}
|
||||
};
|
||||
|
||||
v_flex().h_full().w_full().justify_end().child(
|
||||
h_flex()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.py_1p5()
|
||||
.w_full()
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.on_action(cx.listener(Self::move_up))
|
||||
.on_action(cx.listener(Self::move_down))
|
||||
.child(
|
||||
h_flex()
|
||||
.w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0))
|
||||
// .pr(gutter_dimensions.fold_area_width())
|
||||
.justify_center()
|
||||
.gap_2()
|
||||
.children(self.workspace.clone().map(|workspace| {
|
||||
IconButton::new("context", IconName::Context)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click({
|
||||
let workspace = workspace.clone();
|
||||
cx.listener(move |_, _, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
h_flex()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.py_1p5()
|
||||
.h_full()
|
||||
.w_full()
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.on_action(cx.listener(Self::move_up))
|
||||
.on_action(cx.listener(Self::move_down))
|
||||
.child(
|
||||
h_flex()
|
||||
.w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0))
|
||||
.justify_center()
|
||||
.gap_2()
|
||||
.child(
|
||||
PopoverMenu::new("model-switcher")
|
||||
.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
for model in CompletionProvider::global(cx).available_models() {
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let model = model.clone();
|
||||
move |_| {
|
||||
Label::new(model.display_name())
|
||||
.into_any_element()
|
||||
}
|
||||
},
|
||||
{
|
||||
let fs = fs.clone();
|
||||
let model = model.clone();
|
||||
move |cx| {
|
||||
let model = model.clone();
|
||||
update_settings_file::<AssistantSettings>(
|
||||
fs.clone(),
|
||||
cx,
|
||||
move |settings| settings.set_model(model),
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
menu
|
||||
})
|
||||
.tooltip(move |cx| {
|
||||
let token_count = workspace.upgrade().and_then(|workspace| {
|
||||
let panel =
|
||||
workspace.read(cx).panel::<AssistantPanel>(cx)?;
|
||||
let context = panel.read(cx).active_context(cx)?;
|
||||
context.read(cx).token_count()
|
||||
});
|
||||
if let Some(token_count) = token_count {
|
||||
.into()
|
||||
})
|
||||
.trigger(
|
||||
IconButton::new("context", IconName::Settings)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
format!(
|
||||
"{} Additional Context Tokens from Assistant",
|
||||
token_count
|
||||
"Using {}",
|
||||
CompletionProvider::global(cx)
|
||||
.model()
|
||||
.display_name()
|
||||
),
|
||||
Some(&crate::ToggleFocus),
|
||||
"Click to open…",
|
||||
None,
|
||||
"Click to Change Model",
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
Tooltip::for_action(
|
||||
"Toggle Assistant Panel",
|
||||
&crate::ToggleFocus,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
}))
|
||||
.children(
|
||||
if let CodegenStatus::Error(error) = &self.codegen.read(cx).status {
|
||||
let error_message = SharedString::from(error.to_string());
|
||||
Some(
|
||||
div()
|
||||
.id("error")
|
||||
.tooltip(move |cx| Tooltip::text(error_message.clone(), cx))
|
||||
.child(
|
||||
Icon::new(IconName::XCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Error),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(div().flex_1().child(self.render_prompt_editor(cx)))
|
||||
.child(h_flex().gap_2().pr_4().children(buttons)),
|
||||
)
|
||||
}),
|
||||
)
|
||||
.anchor(gpui::AnchorCorner::BottomRight),
|
||||
)
|
||||
.children(
|
||||
if let CodegenStatus::Error(error) = &self.codegen.read(cx).status {
|
||||
let error_message = SharedString::from(error.to_string());
|
||||
Some(
|
||||
div()
|
||||
.id("error")
|
||||
.tooltip(move |cx| Tooltip::text(error_message.clone(), cx))
|
||||
.child(
|
||||
Icon::new(IconName::XCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Error),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(div().flex_1().child(self.render_prompt_editor(cx)))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.pr_4()
|
||||
.children(self.render_token_count(cx))
|
||||
.children(buttons),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1336,13 +1384,17 @@ impl FocusableView for PromptEditor {
|
||||
impl PromptEditor {
|
||||
const MAX_LINES: u8 = 8;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: InlineAssistId,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
prompt_history: VecDeque<String>,
|
||||
prompt_buffer: Model<MultiBuffer>,
|
||||
codegen: Model<Codegen>,
|
||||
parent_editor: &View<Editor>,
|
||||
assistant_panel: Option<&View<AssistantPanel>>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let prompt_editor = cx.new_view(|cx| {
|
||||
@@ -1363,6 +1415,15 @@ impl PromptEditor {
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor
|
||||
});
|
||||
|
||||
let mut token_count_subscriptions = Vec::new();
|
||||
token_count_subscriptions
|
||||
.push(cx.subscribe(parent_editor, Self::handle_parent_editor_event));
|
||||
if let Some(assistant_panel) = assistant_panel {
|
||||
token_count_subscriptions
|
||||
.push(cx.subscribe(assistant_panel, Self::handle_assistant_panel_event));
|
||||
}
|
||||
|
||||
let mut this = Self {
|
||||
id,
|
||||
height_in_lines: 1,
|
||||
@@ -1375,9 +1436,14 @@ impl PromptEditor {
|
||||
_codegen_subscription: cx.observe(&codegen, Self::handle_codegen_changed),
|
||||
editor_subscriptions: Vec::new(),
|
||||
codegen,
|
||||
fs,
|
||||
pending_token_count: Task::ready(Ok(())),
|
||||
token_count: None,
|
||||
_token_count_subscriptions: token_count_subscriptions,
|
||||
workspace,
|
||||
};
|
||||
this.count_lines(cx);
|
||||
this.count_tokens(cx);
|
||||
this.subscribe_to_editor(cx);
|
||||
this
|
||||
}
|
||||
@@ -1436,6 +1502,47 @@ impl PromptEditor {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_parent_editor_event(
|
||||
&mut self,
|
||||
_: View<Editor>,
|
||||
event: &EditorEvent,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let EditorEvent::BufferEdited { .. } = event {
|
||||
self.count_tokens(cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_assistant_panel_event(
|
||||
&mut self,
|
||||
_: View<AssistantPanel>,
|
||||
event: &AssistantPanelEvent,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let AssistantPanelEvent::ContextEdited { .. } = event;
|
||||
self.count_tokens(cx);
|
||||
}
|
||||
|
||||
fn count_tokens(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let assist_id = self.id;
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| async move {
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
let request = cx
|
||||
.update_global(|inline_assistant: &mut InlineAssistant, cx| {
|
||||
inline_assistant.request_for_inline_assist(assist_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let token_count = cx
|
||||
.update(|cx| CompletionProvider::global(cx).count_tokens(request, cx))?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_prompt_editor_changed(&mut self, _: View<Editor>, cx: &mut ViewContext<Self>) {
|
||||
self.count_lines(cx);
|
||||
}
|
||||
@@ -1460,6 +1567,9 @@ impl PromptEditor {
|
||||
self.edited_since_done = true;
|
||||
cx.notify();
|
||||
}
|
||||
EditorEvent::BufferEdited => {
|
||||
self.count_tokens(cx);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -1551,6 +1661,63 @@ impl PromptEditor {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_token_count(&self, cx: &mut ViewContext<Self>) -> Option<impl IntoElement> {
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
let token_count = self.token_count?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
let remaining_tokens = max_token_count as isize - token_count as isize;
|
||||
let token_count_color = if remaining_tokens <= 0 {
|
||||
Color::Error
|
||||
} else if token_count as f32 / max_token_count as f32 >= 0.8 {
|
||||
Color::Warning
|
||||
} else {
|
||||
Color::Muted
|
||||
};
|
||||
|
||||
let mut token_count = h_flex()
|
||||
.id("token_count")
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Label::new(humanize_token_count(token_count))
|
||||
.size(LabelSize::Small)
|
||||
.color(token_count_color),
|
||||
)
|
||||
.child(Label::new("/").size(LabelSize::Small).color(Color::Muted))
|
||||
.child(
|
||||
Label::new(humanize_token_count(max_token_count))
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
);
|
||||
if let Some(workspace) = self.workspace.clone() {
|
||||
token_count = token_count
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Tokens Used by Inline Assistant",
|
||||
None,
|
||||
"Click to Open Assistant Panel",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.cursor_pointer()
|
||||
.on_mouse_down(gpui::MouseButton::Left, |_, cx| cx.stop_propagation())
|
||||
.on_click(move |_, cx| {
|
||||
cx.stop_propagation();
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(cx)
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
} else {
|
||||
token_count = token_count
|
||||
.cursor_default()
|
||||
.tooltip(|cx| Tooltip::text("Tokens Used by Inline Assistant", cx));
|
||||
}
|
||||
|
||||
Some(token_count)
|
||||
}
|
||||
|
||||
fn render_prompt_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let text_style = TextStyle {
|
||||
@@ -1893,6 +2060,11 @@ impl Codegen {
|
||||
|
||||
if lines.peek().is_some() {
|
||||
hunks_tx.send(diff.push_new("\n")).await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
|
||||
@@ -569,7 +569,7 @@ impl PromptLibrary {
|
||||
let provider = CompletionProvider::global(cx);
|
||||
if provider.is_authenticated() {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(&prompt_editor, None, false, cx)
|
||||
assistant.assist(&prompt_editor, None, None, cx)
|
||||
})
|
||||
} else {
|
||||
for window in cx.windows() {
|
||||
|
||||
@@ -18,6 +18,7 @@ use ui::ActiveTheme;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub mod active_command;
|
||||
pub mod auto_command;
|
||||
pub mod default_command;
|
||||
pub mod diagnostics_command;
|
||||
pub mod fetch_command;
|
||||
|
||||
242
crates/assistant/src/slash_command/auto_command.rs
Normal file
242
crates/assistant/src/slash_command/auto_command.rs
Normal file
@@ -0,0 +1,242 @@
|
||||
use super::create_label_for_command;
|
||||
use super::{SlashCommand, SlashCommandOutput};
|
||||
use crate::{CompletionProvider, LanguageModelRequest, LanguageModelRequestMessage, Role};
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::StreamExt;
|
||||
use gpui::{AppContext, AsyncAppContext, Task, WeakView};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use ui::WindowContext;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct AutoCommand;
|
||||
|
||||
impl SlashCommand for AutoCommand {
|
||||
fn name(&self) -> String {
|
||||
"auto".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Automatically infer what context to add, based on your prompt".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Automatically Infer Context".into()
|
||||
}
|
||||
|
||||
fn label(&self, cx: &AppContext) -> CodeLabel {
|
||||
create_label_for_command("auto", &["--prompt"], cx)
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_cancellation_flag: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
) -> Task<Result<Vec<String>>> {
|
||||
// There's no autocomplete for a prompt, since it's arbitrary text.
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(argument) = argument else {
|
||||
return Task::ready(Err(anyhow!("missing prompt")));
|
||||
};
|
||||
|
||||
// to_string() is needed so it can live long enough to be used in cx.spawn
|
||||
let original_prompt = argument.to_string();
|
||||
let task = cx.spawn(|cx: gpui::AsyncWindowContext| async move {
|
||||
let summaries: Vec<FileSummary> = serde_json::from_str(SUMMARY).unwrap_or_else(|_| {
|
||||
// Since we generate the JSON ourselves, this parsing should never fail. If it does, that's a bug.
|
||||
log::error!("JSON parsing of project file summaries failed");
|
||||
|
||||
// Handle this gracefully by not including any summaries. Assistant results
|
||||
// will be worse than if we actually had summaries, but we won't block the user.
|
||||
Vec::new()
|
||||
});
|
||||
|
||||
commands_for_summaries(&summaries, &original_prompt, &cx).await
|
||||
});
|
||||
|
||||
// As a convenience, append /auto's argument to the end of the prompt
|
||||
// so you don't have to write it again.
|
||||
let original_prompt = argument.to_string();
|
||||
|
||||
cx.background_executor().spawn(async move {
|
||||
let commands = task.await?;
|
||||
let mut prompt = String::new();
|
||||
|
||||
log::info!(
|
||||
"Translating this response into slash-commands: {:?}",
|
||||
commands
|
||||
);
|
||||
|
||||
for command in commands {
|
||||
prompt.push('/');
|
||||
prompt.push_str(&command.name);
|
||||
prompt.push(' ');
|
||||
prompt.push_str(&command.arg);
|
||||
prompt.push('\n');
|
||||
}
|
||||
|
||||
prompt.push('\n');
|
||||
prompt.push_str(&original_prompt);
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text: prompt,
|
||||
sections: Vec::new(),
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const PROMPT_INSTRUCTIONS_BEFORE_SUMMARY: &str = include_str!("prompt_before_summary.txt");
|
||||
const PROMPT_INSTRUCTIONS_AFTER_SUMMARY: &str = include_str!("prompt_after_summary.txt");
|
||||
const SUMMARY: &str = include_str!("/Users/rtfeldman/code/summarize-dir/combined_summaries.json");
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct FileSummary {
|
||||
filename: String,
|
||||
summary: String,
|
||||
}
|
||||
|
||||
fn summaries_prompt(summaries: &[FileSummary], original_prompt: &str) -> String {
|
||||
let json_summaries = serde_json::to_string(summaries).unwrap();
|
||||
|
||||
format!("{PROMPT_INSTRUCTIONS_BEFORE_SUMMARY}\n{json_summaries}\n{PROMPT_INSTRUCTIONS_AFTER_SUMMARY}\n{original_prompt}")
|
||||
}
|
||||
|
||||
/// The slash commands that the model is told about, and which we look for in the inference response.
|
||||
const SUPPORTED_SLASH_COMMANDS: &[&str] = &["search", "file"];
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct CommandToRun {
|
||||
name: String,
|
||||
arg: String,
|
||||
}
|
||||
|
||||
/// Given the pre-indexed file summaries for this project, as well as the original prompt
|
||||
/// string passed to `/auto`, get a list of slash commands to run, along with their arguments.
|
||||
///
|
||||
/// The prompt's output does not include the slashes (to reduce the chance that it makes a mistake),
|
||||
/// so taking one of these returned Strings and turning it into a real slash-command-with-argument
|
||||
/// involves prepending a slash to it.
|
||||
///
|
||||
/// This function will validate that each of the returned lines begins with one of SUPPORTED_SLASH_COMMANDS.
|
||||
/// Any other lines it encounters will be discarded, with a warning logged.
|
||||
async fn commands_for_summaries(
|
||||
summaries: &[FileSummary],
|
||||
original_prompt: &str,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<Vec<CommandToRun>> {
|
||||
if summaries.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let model = cx.update(|cx| CompletionProvider::global(cx).model())?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
// Rather than recursing (which would require this async function use a pinned box),
|
||||
// we use an explicit stack of arguments and answers for when we need to "recurse."
|
||||
let mut stack = vec![(summaries, String::new())];
|
||||
let mut final_response = Vec::new();
|
||||
|
||||
while let Some((current_summaries, mut accumulated_response)) = stack.pop() {
|
||||
// The split can result in one slice being empty and the other having one element.
|
||||
// Whenever that happens, skip the empty one.
|
||||
if current_summaries.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Inferring prompt context using {} file summaries",
|
||||
current_summaries.len()
|
||||
);
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
model: model.clone(),
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: summaries_prompt(¤t_summaries, original_prompt),
|
||||
}],
|
||||
stop: Vec::new(),
|
||||
temperature: 1.0,
|
||||
};
|
||||
|
||||
let token_count = cx
|
||||
.update(|cx| CompletionProvider::global(cx).count_tokens(request.clone(), cx))?
|
||||
.await?;
|
||||
|
||||
if token_count < max_token_count {
|
||||
let mut response_chunks = cx
|
||||
.update(|cx| CompletionProvider::global(cx).complete(request))?
|
||||
.await?;
|
||||
|
||||
while let Some(chunk) = response_chunks.next().await {
|
||||
accumulated_response.push_str(&chunk?);
|
||||
}
|
||||
|
||||
for line in accumulated_response.split('\n') {
|
||||
if let Some(first_space) = line.find(' ') {
|
||||
let command = &line[..first_space].trim();
|
||||
let arg = &line[first_space..].trim();
|
||||
|
||||
// Don't return empty or duplicate or duplicate commands
|
||||
if !command.is_empty()
|
||||
&& !final_response
|
||||
.iter()
|
||||
.any(|cmd: &CommandToRun| cmd.name == *command && cmd.arg == *arg)
|
||||
{
|
||||
if SUPPORTED_SLASH_COMMANDS
|
||||
.iter()
|
||||
.any(|supported| command == supported)
|
||||
{
|
||||
final_response.push(CommandToRun {
|
||||
name: command.to_string(),
|
||||
arg: arg.to_string(),
|
||||
});
|
||||
} else {
|
||||
log::warn!(
|
||||
"Context inference returned an unrecognized slash-commend line: {:?}",
|
||||
line
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if !line.trim().is_empty() {
|
||||
// All slash-commands currently supported in context inference need a space for the argument.
|
||||
log::warn!(
|
||||
"Context inference returned a non-blank line that contained no spaces (meaning no argument for the slash-command): {:?}",
|
||||
line
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if current_summaries.len() == 1 {
|
||||
log::warn!("Inferring context for a single file's summary failed because the prompt's token length exceeded the model's token limit.");
|
||||
} else {
|
||||
log::info!(
|
||||
"Context inference using file summaries resulted in a prompt containing {token_count} tokens, which exceeded the model's max of {max_token_count}. Retrying as two separate prompts, each including half the number of summaries.",
|
||||
);
|
||||
let (left, right) = current_summaries.split_at(current_summaries.len() / 2);
|
||||
stack.push((right, accumulated_response.clone()));
|
||||
stack.push((left, accumulated_response));
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the commands by name (reversed just so that /search appears before /file)
|
||||
final_response.sort_by(|cmd1, cmd2| cmd1.name.cmp(&cmd2.name).reverse());
|
||||
|
||||
Ok(final_response)
|
||||
}
|
||||
24
crates/assistant/src/slash_command/prompt_after_summary.txt
Normal file
24
crates/assistant/src/slash_command/prompt_after_summary.txt
Normal file
@@ -0,0 +1,24 @@
|
||||
Actions have a cost, so only include actions that you think
|
||||
will be helpful to you in doing a great job answering the
|
||||
prompt in the future.
|
||||
|
||||
You must respond ONLY with a list of actions you would like to
|
||||
perform. Each action should be on its own line, and followed by a space and then its parameter.
|
||||
|
||||
Actions can be performed more than once with different parameters.
|
||||
Here is an example valid response:
|
||||
|
||||
```
|
||||
file path/to/my/file.txt
|
||||
file path/to/another/file.txt
|
||||
search something to search for
|
||||
search something else to search for
|
||||
```
|
||||
|
||||
Once again, do not forget: you must respond ONLY in the format of
|
||||
one action per line, and the action name should be followed by
|
||||
its parameter. Your response must not include anything other
|
||||
than a list of actions, with one action per line, in this format.
|
||||
It is extremely important that you do not deviate from this format even slightly!
|
||||
|
||||
This is the end of my instructions for how to respond. The rest is the prompt:
|
||||
31
crates/assistant/src/slash_command/prompt_before_summary.txt
Normal file
31
crates/assistant/src/slash_command/prompt_before_summary.txt
Normal file
@@ -0,0 +1,31 @@
|
||||
I'm going to give you a prompt. I don't want you to respond
|
||||
to the prompt itself. I want you to figure out which of the following
|
||||
actions on my project, if any, would help you answer the prompt.
|
||||
|
||||
Here are the actions:
|
||||
|
||||
## file
|
||||
|
||||
This action's parameter is a file path to one of the files
|
||||
in the project. If you ask for this action, I will tell you
|
||||
the full contents of the file, so you can learn all the
|
||||
details of the file.
|
||||
|
||||
## search
|
||||
|
||||
This action's parameter is a string to do a semantic search for
|
||||
across the files in the project. (You will have a JSON summary
|
||||
of all the files in the project.) It will tell you which files this string
|
||||
(or similar strings; it is a semantic search) appear in,
|
||||
as well as some context of the lines surrounding each result.
|
||||
It's very important that you only use this action when you think
|
||||
that searching across the specific files in this project for the query
|
||||
in question will be useful. For example, don't use this command to search
|
||||
for queries you might put into a general Web search engine, because those
|
||||
will be too general to give useful results in this project-specific search.
|
||||
|
||||
---
|
||||
|
||||
That was the end of the list of actions.
|
||||
|
||||
Here is a JSON summary of each of the files in my project:
|
||||
@@ -2583,14 +2583,13 @@ async fn rejoin_dev_server_projects(
|
||||
)
|
||||
.await?
|
||||
};
|
||||
notify_rejoined_projects(&mut rejoined_projects, &session)?;
|
||||
|
||||
response.send(proto::RejoinRemoteProjectsResponse {
|
||||
rejoined_projects: rejoined_projects
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|project| project.to_proto())
|
||||
.collect(),
|
||||
})
|
||||
})?;
|
||||
notify_rejoined_projects(&mut rejoined_projects, &session)
|
||||
}
|
||||
|
||||
async fn reconnect_dev_server(
|
||||
|
||||
@@ -73,6 +73,7 @@ impl ConnectionPool {
|
||||
pub fn reset(&mut self) {
|
||||
self.connections.clear();
|
||||
self.connected_users.clear();
|
||||
self.connected_dev_servers.clear();
|
||||
self.channels.clear();
|
||||
}
|
||||
|
||||
|
||||
@@ -504,6 +504,29 @@ async fn test_dev_server_reconnect(
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_restart(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client1) = TestServer::start1(cx1).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
let cx = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
|
||||
|
||||
server.reset().await;
|
||||
cx.run_until_parked();
|
||||
|
||||
cx.simulate_keystrokes("cmd-p 1 enter");
|
||||
remote_workspace
|
||||
.update(cx, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx)
|
||||
.unwrap()
|
||||
.update(cx, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_create_dev_server_project_path_validation(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
|
||||
@@ -124,5 +124,6 @@ fn notification_window_options(
|
||||
display_id: Some(screen.id()),
|
||||
window_background: WindowBackgroundAppearance::default(),
|
||||
app_id: Some(app_id.to_owned()),
|
||||
window_min_size: Size::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -268,6 +268,7 @@ gpui::actions!(
|
||||
SelectAllMatches,
|
||||
SelectDown,
|
||||
SelectLargerSyntaxNode,
|
||||
SelectEnclosingSymbol,
|
||||
SelectLeft,
|
||||
SelectLine,
|
||||
SelectRight,
|
||||
|
||||
@@ -8226,6 +8226,58 @@ impl Editor {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn select_enclosing_symbol(
|
||||
&mut self,
|
||||
_: &SelectEnclosingSymbol,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let old_selections = self.selections.all::<usize>(cx).into_boxed_slice();
|
||||
|
||||
fn update_selection(
|
||||
selection: &Selection<usize>,
|
||||
buffer_snap: &MultiBufferSnapshot,
|
||||
) -> Option<Selection<usize>> {
|
||||
let cursor = selection.head();
|
||||
let (_buffer_id, symbols) = buffer_snap.symbols_containing(cursor, None)?;
|
||||
for symbol in symbols.iter().rev() {
|
||||
let start = symbol.range.start.to_offset(&buffer_snap);
|
||||
let end = symbol.range.end.to_offset(&buffer_snap);
|
||||
let new_range = start..end;
|
||||
if start < selection.start || end > selection.end {
|
||||
return Some(Selection {
|
||||
id: selection.id,
|
||||
start: new_range.start,
|
||||
end: new_range.end,
|
||||
goal: SelectionGoal::None,
|
||||
reversed: selection.reversed,
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
let mut selected_larger_symbol = false;
|
||||
let new_selections = old_selections
|
||||
.iter()
|
||||
.map(|selection| match update_selection(selection, &buffer) {
|
||||
Some(new_selection) => {
|
||||
if new_selection.range() != selection.range() {
|
||||
selected_larger_symbol = true;
|
||||
}
|
||||
new_selection
|
||||
}
|
||||
None => selection.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if selected_larger_symbol {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(new_selections);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn select_larger_syntax_node(
|
||||
&mut self,
|
||||
_: &SelectLargerSyntaxNode,
|
||||
|
||||
@@ -276,6 +276,7 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::toggle_comments);
|
||||
register_action(view, cx, Editor::select_larger_syntax_node);
|
||||
register_action(view, cx, Editor::select_smaller_syntax_node);
|
||||
register_action(view, cx, Editor::select_enclosing_symbol);
|
||||
register_action(view, cx, Editor::move_to_enclosing_bracket);
|
||||
register_action(view, cx, Editor::undo_selection);
|
||||
register_action(view, cx, Editor::redo_selection);
|
||||
|
||||
@@ -165,10 +165,16 @@ pub fn indent_guides_in_range(
|
||||
.indent_guides_in_range(start_anchor..end_anchor, ignore_disabled_for_language, cx)
|
||||
.into_iter()
|
||||
.filter(|indent_guide| {
|
||||
let start =
|
||||
MultiBufferRow(indent_guide.multibuffer_row_range.start.0.saturating_sub(1));
|
||||
// Filter out indent guides that are inside a fold
|
||||
!snapshot.is_line_folded(MultiBufferRow(
|
||||
indent_guide.multibuffer_row_range.start.0.saturating_sub(1),
|
||||
))
|
||||
let is_folded = snapshot.is_line_folded(start);
|
||||
let line_indent = snapshot.line_indent_for_buffer_row(start);
|
||||
|
||||
let contained_in_fold =
|
||||
line_indent.len(indent_guide.tab_size) <= indent_guide.indent_level();
|
||||
|
||||
!(is_folded && contained_in_fold)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ futures.workspace = true
|
||||
font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "5a5c4d4" }
|
||||
gpui_macros.workspace = true
|
||||
http.workspace = true
|
||||
image = "0.23"
|
||||
image = "0.25.1"
|
||||
itertools.workspace = true
|
||||
lazy_static.workspace = true
|
||||
linkme = "0.3"
|
||||
@@ -81,6 +81,9 @@ collections = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
http = { workspace = true, features = ["test-support"] }
|
||||
|
||||
[build-dependencies]
|
||||
embed-resource = "2.4"
|
||||
|
||||
[target.'cfg(target_os = "macos")'.build-dependencies]
|
||||
bindgen = "0.65.1"
|
||||
cbindgen = "0.26.0"
|
||||
@@ -143,9 +146,6 @@ windows.workspace = true
|
||||
windows-core = "0.57"
|
||||
clipboard-win = "3.1.1"
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
embed-resource = "2.4"
|
||||
|
||||
[[example]]
|
||||
name = "hello_world"
|
||||
path = "examples/hello_world.rs"
|
||||
|
||||
@@ -3,18 +3,25 @@
|
||||
//TODO: consider generating shader code for WGSL
|
||||
//TODO: deprecate "runtime-shaders" and "macos-blade"
|
||||
|
||||
fn main() {
|
||||
#[cfg(target_os = "macos")]
|
||||
macos::build();
|
||||
use std::env;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let manifest = std::path::Path::new("resources/windows/gpui.manifest.xml");
|
||||
let rc_file = std::path::Path::new("resources/windows/gpui.rc");
|
||||
println!("cargo:rerun-if-changed={}", manifest.display());
|
||||
println!("cargo:rerun-if-changed={}", rc_file.display());
|
||||
embed_resource::compile(rc_file, embed_resource::NONE);
|
||||
}
|
||||
fn main() {
|
||||
let target = env::var("CARGO_CFG_TARGET_OS");
|
||||
|
||||
match target.as_deref() {
|
||||
Ok("macos") => {
|
||||
#[cfg(target_os = "macos")]
|
||||
macos::build();
|
||||
}
|
||||
Ok("windows") => {
|
||||
let manifest = std::path::Path::new("resources/windows/gpui.manifest.xml");
|
||||
let rc_file = std::path::Path::new("resources/windows/gpui.rc");
|
||||
println!("cargo:rerun-if-changed={}", manifest.display());
|
||||
println!("cargo:rerun-if-changed={}", rc_file.display());
|
||||
embed_resource::compile(rc_file, embed_resource::NONE);
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
|
||||
@@ -51,6 +51,7 @@ fn main() {
|
||||
kind: WindowKind::PopUp,
|
||||
is_movable: false,
|
||||
app_id: None,
|
||||
window_min_size: Size::default(),
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{size, DevicePixels, Result, SharedString, Size};
|
||||
|
||||
use image::{Bgra, ImageBuffer};
|
||||
use image::RgbaImage;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
fmt,
|
||||
@@ -40,12 +40,12 @@ pub(crate) struct RenderImageParams {
|
||||
pub struct ImageData {
|
||||
/// The ID associated with this image
|
||||
pub id: ImageId,
|
||||
data: ImageBuffer<Bgra<u8>, Vec<u8>>,
|
||||
data: RgbaImage,
|
||||
}
|
||||
|
||||
impl ImageData {
|
||||
/// Create a new image from the given data.
|
||||
pub fn new(data: ImageBuffer<Bgra<u8>, Vec<u8>>) -> Self {
|
||||
pub fn new(data: RgbaImage) -> Self {
|
||||
static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
Self {
|
||||
|
||||
@@ -384,7 +384,13 @@ impl Asset for Image {
|
||||
};
|
||||
|
||||
let data = if let Ok(format) = image::guess_format(&bytes) {
|
||||
let data = image::load_from_memory_with_format(&bytes, format)?.into_bgra8();
|
||||
let mut data = image::load_from_memory_with_format(&bytes, format)?.into_rgba8();
|
||||
|
||||
// Convert from RGBA to BGRA.
|
||||
for pixel in data.chunks_exact_mut(4) {
|
||||
pixel.swap(0, 2);
|
||||
}
|
||||
|
||||
ImageData::new(data)
|
||||
} else {
|
||||
let pixmap =
|
||||
|
||||
@@ -2287,6 +2287,15 @@ impl Pixels {
|
||||
pub fn abs(&self) -> Self {
|
||||
Self(self.0.abs())
|
||||
}
|
||||
|
||||
/// Returns the f64 value of `Pixels`.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A f64 value of the `Pixels`.
|
||||
pub fn to_f64(self) -> f64 {
|
||||
self.0 as f64
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<Pixels> for Pixels {
|
||||
|
||||
@@ -567,6 +567,9 @@ pub struct WindowOptions {
|
||||
|
||||
/// Application identifier of the window. Can by used by desktop environments to group applications together.
|
||||
pub app_id: Option<String>,
|
||||
|
||||
/// Window minimum size
|
||||
pub window_min_size: Size<Pixels>,
|
||||
}
|
||||
|
||||
/// The variables that can be configured when creating a new window
|
||||
@@ -594,6 +597,9 @@ pub(crate) struct WindowParams {
|
||||
pub display_id: Option<DisplayId>,
|
||||
|
||||
pub window_background: WindowBackgroundAppearance,
|
||||
|
||||
#[cfg_attr(target_os = "linux", allow(dead_code))]
|
||||
pub window_min_size: Size<Pixels>,
|
||||
}
|
||||
|
||||
/// Represents the status of how a window should be opened.
|
||||
@@ -642,6 +648,7 @@ impl Default for WindowOptions {
|
||||
display_id: None,
|
||||
window_background: WindowBackgroundAppearance::default(),
|
||||
app_id: None,
|
||||
window_min_size: Size::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -583,19 +583,11 @@ impl Keystroke {
|
||||
let key_utf8 = state.key_get_utf8(keycode);
|
||||
let key_sym = state.key_get_one_sym(keycode);
|
||||
|
||||
// The logic here tries to replicate the logic in `../mac/events.rs`
|
||||
// "Consumed" modifiers are modifiers that have been used to translate a key, for example
|
||||
// pressing "shift" and "1" on US layout produces the key `!` but "consumes" the shift.
|
||||
// Notes:
|
||||
// - macOS gets the key character directly ("."), xkb gives us the key name ("period")
|
||||
// - macOS logic removes consumed shift modifier for symbols: "{", not "shift-{"
|
||||
// - macOS logic keeps consumed shift modifiers for letters: "shift-a", not "a" or "A"
|
||||
|
||||
let mut handle_consumed_modifiers = true;
|
||||
let key = match key_sym {
|
||||
Keysym::Return => "enter".to_owned(),
|
||||
Keysym::Prior => "pageup".to_owned(),
|
||||
Keysym::Next => "pagedown".to_owned(),
|
||||
Keysym::ISO_Left_Tab => "tab".to_owned(),
|
||||
|
||||
Keysym::comma => ",".to_owned(),
|
||||
Keysym::period => ".".to_owned(),
|
||||
@@ -633,30 +625,22 @@ impl Keystroke {
|
||||
Keysym::equal => "=".to_owned(),
|
||||
Keysym::plus => "+".to_owned(),
|
||||
|
||||
Keysym::ISO_Left_Tab => {
|
||||
handle_consumed_modifiers = false;
|
||||
"tab".to_owned()
|
||||
}
|
||||
|
||||
_ => {
|
||||
handle_consumed_modifiers = false;
|
||||
xkb::keysym_get_name(key_sym).to_lowercase()
|
||||
}
|
||||
_ => xkb::keysym_get_name(key_sym).to_lowercase(),
|
||||
};
|
||||
|
||||
if modifiers.shift {
|
||||
// we only include the shift for upper-case letters by convention,
|
||||
// so don't include for numbers and symbols, but do include for
|
||||
// tab/enter, etc.
|
||||
if key.chars().count() == 1 && key_utf8 == key {
|
||||
modifiers.shift = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore control characters (and DEL) for the purposes of ime_key
|
||||
let ime_key =
|
||||
(key_utf32 >= 32 && key_utf32 != 127 && !key_utf8.is_empty()).then_some(key_utf8);
|
||||
|
||||
if handle_consumed_modifiers {
|
||||
let mod_shift_index = state.get_keymap().mod_get_index(xkb::MOD_NAME_SHIFT);
|
||||
let is_shift_consumed = state.mod_index_is_consumed(keycode, mod_shift_index);
|
||||
|
||||
if modifiers.shift && is_shift_consumed {
|
||||
modifiers.shift = false;
|
||||
}
|
||||
}
|
||||
|
||||
Keystroke {
|
||||
modifiers,
|
||||
key,
|
||||
|
||||
@@ -671,12 +671,12 @@ impl LinuxClient for WaylandClient {
|
||||
return;
|
||||
};
|
||||
if state.mouse_focused_window.is_some() || state.keyboard_focused_window.is_some() {
|
||||
let serial = state.serial_tracker.get(SerialKind::KeyEnter);
|
||||
state.clipboard.set_primary(item.text);
|
||||
let serial = state.serial_tracker.get(SerialKind::KeyPress);
|
||||
let data_source = primary_selection_manager.create_source(&state.globals.qh, ());
|
||||
data_source.offer(state.clipboard.self_mime());
|
||||
data_source.offer(TEXT_MIME_TYPE.to_string());
|
||||
primary_selection.set_selection(Some(&data_source), serial);
|
||||
state.clipboard.set_primary(item.text);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -689,12 +689,12 @@ impl LinuxClient for WaylandClient {
|
||||
return;
|
||||
};
|
||||
if state.mouse_focused_window.is_some() || state.keyboard_focused_window.is_some() {
|
||||
let serial = state.serial_tracker.get(SerialKind::KeyEnter);
|
||||
state.clipboard.set(item.text);
|
||||
let serial = state.serial_tracker.get(SerialKind::KeyPress);
|
||||
let data_source = data_device_manager.create_data_source(&state.globals.qh, ());
|
||||
data_source.offer(state.clipboard.self_mime());
|
||||
data_source.offer(TEXT_MIME_TYPE.to_string());
|
||||
data_device.set_selection(Some(&data_source), serial);
|
||||
state.clipboard.set(item.text);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -344,6 +344,7 @@ struct MacWindowState {
|
||||
// Whether the next left-mouse click is also the focusing click.
|
||||
first_mouse: bool,
|
||||
fullscreen_restore_bounds: Bounds<Pixels>,
|
||||
ime_composing: bool,
|
||||
}
|
||||
|
||||
impl MacWindowState {
|
||||
@@ -504,6 +505,7 @@ impl MacWindow {
|
||||
focus,
|
||||
show,
|
||||
display_id,
|
||||
window_min_size,
|
||||
}: WindowParams,
|
||||
executor: ForegroundExecutor,
|
||||
renderer_context: renderer::Context,
|
||||
@@ -623,6 +625,7 @@ impl MacWindow {
|
||||
external_files_dragged: false,
|
||||
first_mouse: false,
|
||||
fullscreen_restore_bounds: Bounds::default(),
|
||||
ime_composing: false,
|
||||
})));
|
||||
|
||||
(*native_window).set_ivar(
|
||||
@@ -644,6 +647,11 @@ impl MacWindow {
|
||||
|
||||
native_window.setMovable_(is_movable as BOOL);
|
||||
|
||||
native_window.setContentMinSize_(NSSize {
|
||||
width: window_min_size.width.to_f64(),
|
||||
height: window_min_size.height.to_f64(),
|
||||
});
|
||||
|
||||
if titlebar.map_or(true, |titlebar| titlebar.appears_transparent) {
|
||||
native_window.setTitlebarAppearsTransparent_(YES);
|
||||
native_window.setTitleVisibility_(NSWindowTitleVisibility::NSWindowTitleHidden);
|
||||
@@ -1234,6 +1242,7 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent:
|
||||
let mut lock = window_state.lock();
|
||||
let previous_keydown_inserted_text = lock.previous_keydown_inserted_text.take();
|
||||
let mut last_inserts = lock.last_ime_inputs.take().unwrap();
|
||||
let ime_composing = std::mem::take(&mut lock.ime_composing);
|
||||
|
||||
let mut callback = lock.event_callback.take();
|
||||
drop(lock);
|
||||
@@ -1248,7 +1257,8 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent:
|
||||
let is_composing =
|
||||
with_input_handler(this, |input_handler| input_handler.marked_text_range())
|
||||
.flatten()
|
||||
.is_some();
|
||||
.is_some()
|
||||
|| ime_composing;
|
||||
|
||||
if let Some((text, range)) = last_insert {
|
||||
if !is_composing {
|
||||
@@ -1922,6 +1932,7 @@ fn send_to_input_handler(window: &Object, ime: ImeInput) {
|
||||
input_handler.replace_text_in_range(range, &text)
|
||||
}
|
||||
ImeInput::SetMarkedText(text, range, marked_range) => {
|
||||
lock.ime_composing = true;
|
||||
drop(lock);
|
||||
input_handler.replace_and_mark_text_in_range(range, &text, marked_range)
|
||||
}
|
||||
|
||||
@@ -267,14 +267,8 @@ fn handle_syskeydown_msg(
|
||||
) -> Option<isize> {
|
||||
// we need to call `DefWindowProcW`, or we will lose the system-wide `Alt+F4`, `Alt+{other keys}`
|
||||
// shortcuts.
|
||||
let Some(keystroke) = parse_syskeydown_msg_keystroke(wparam) else {
|
||||
return None;
|
||||
};
|
||||
let mut lock = state_ptr.state.borrow_mut();
|
||||
let Some(mut func) = lock.callbacks.input.take() else {
|
||||
return None;
|
||||
};
|
||||
drop(lock);
|
||||
let keystroke = parse_syskeydown_msg_keystroke(wparam)?;
|
||||
let mut func = state_ptr.state.borrow_mut().callbacks.input.take()?;
|
||||
let event = KeyDownEvent {
|
||||
keystroke,
|
||||
is_held: lparam.0 & (0x1 << 30) > 0,
|
||||
@@ -292,14 +286,8 @@ fn handle_syskeydown_msg(
|
||||
fn handle_syskeyup_msg(wparam: WPARAM, state_ptr: Rc<WindowsWindowStatePtr>) -> Option<isize> {
|
||||
// we need to call `DefWindowProcW`, or we will lose the system-wide `Alt+F4`, `Alt+{other keys}`
|
||||
// shortcuts.
|
||||
let Some(keystroke) = parse_syskeydown_msg_keystroke(wparam) else {
|
||||
return None;
|
||||
};
|
||||
let mut lock = state_ptr.state.borrow_mut();
|
||||
let Some(mut func) = lock.callbacks.input.take() else {
|
||||
return None;
|
||||
};
|
||||
drop(lock);
|
||||
let keystroke = parse_syskeydown_msg_keystroke(wparam)?;
|
||||
let mut func = state_ptr.state.borrow_mut().callbacks.input.take()?;
|
||||
let event = KeyUpEvent { keystroke };
|
||||
let result = if func(PlatformInput::KeyUp(event)).default_prevented {
|
||||
Some(0)
|
||||
@@ -614,35 +602,25 @@ fn handle_ime_composition(
|
||||
) -> Option<isize> {
|
||||
let mut ime_input = None;
|
||||
if lparam.0 as u32 & GCS_COMPSTR.0 > 0 {
|
||||
let Some((string, string_len)) = parse_ime_compostion_string(handle) else {
|
||||
return None;
|
||||
};
|
||||
let mut lock = state_ptr.state.borrow_mut();
|
||||
let Some(mut input_handler) = lock.input_handler.take() else {
|
||||
return None;
|
||||
};
|
||||
drop(lock);
|
||||
input_handler.replace_and_mark_text_in_range(None, string.as_str(), Some(0..string_len));
|
||||
let (comp_string, string_len) = parse_ime_compostion_string(handle)?;
|
||||
let mut input_handler = state_ptr.state.borrow_mut().input_handler.take()?;
|
||||
input_handler.replace_and_mark_text_in_range(
|
||||
None,
|
||||
&comp_string,
|
||||
Some(string_len..string_len),
|
||||
);
|
||||
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
|
||||
ime_input = Some(string);
|
||||
ime_input = Some(comp_string);
|
||||
}
|
||||
if lparam.0 as u32 & GCS_CURSORPOS.0 > 0 {
|
||||
let Some(ref comp_string) = ime_input else {
|
||||
return None;
|
||||
};
|
||||
let comp_string = &ime_input?;
|
||||
let caret_pos = retrieve_composition_cursor_position(handle);
|
||||
let mut lock = state_ptr.state.borrow_mut();
|
||||
let Some(mut input_handler) = lock.input_handler.take() else {
|
||||
return None;
|
||||
};
|
||||
drop(lock);
|
||||
input_handler.replace_and_mark_text_in_range(None, comp_string, Some(0..caret_pos));
|
||||
let mut input_handler = state_ptr.state.borrow_mut().input_handler.take()?;
|
||||
input_handler.replace_and_mark_text_in_range(None, comp_string, Some(caret_pos..caret_pos));
|
||||
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
|
||||
}
|
||||
if lparam.0 as u32 & GCS_RESULTSTR.0 > 0 {
|
||||
let Some(comp_result) = parse_ime_compostion_result(handle) else {
|
||||
return None;
|
||||
};
|
||||
let comp_result = parse_ime_compostion_result(handle)?;
|
||||
let mut lock = state_ptr.state.borrow_mut();
|
||||
let Some(mut input_handler) = lock.input_handler.take() else {
|
||||
return Some(1);
|
||||
@@ -663,11 +641,7 @@ fn handle_calc_client_size(
|
||||
lparam: LPARAM,
|
||||
state_ptr: Rc<WindowsWindowStatePtr>,
|
||||
) -> Option<isize> {
|
||||
if !state_ptr.hide_title_bar || state_ptr.state.borrow().is_fullscreen() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if wparam.0 == 0 {
|
||||
if !state_ptr.hide_title_bar || state_ptr.state.borrow().is_fullscreen() || wparam.0 == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -1097,13 +1071,14 @@ fn parse_syskeydown_msg_keystroke(wparam: WPARAM) -> Option<Keystroke> {
|
||||
VK_NEXT => "pagedown",
|
||||
VK_ESCAPE => "escape",
|
||||
VK_INSERT => "insert",
|
||||
VK_DELETE => "delete",
|
||||
_ => return basic_vkcode_to_string(vk_code, modifiers),
|
||||
}
|
||||
.to_owned();
|
||||
|
||||
Some(Keystroke {
|
||||
modifiers,
|
||||
key: key,
|
||||
key,
|
||||
ime_key: None,
|
||||
})
|
||||
}
|
||||
@@ -1160,7 +1135,7 @@ fn parse_keydown_msg_keystroke(wparam: WPARAM) -> Option<KeystrokeOrModifier> {
|
||||
|
||||
Some(KeystrokeOrModifier::Keystroke(Keystroke {
|
||||
modifiers,
|
||||
key: key,
|
||||
key,
|
||||
ime_key: None,
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -631,6 +631,7 @@ impl Window {
|
||||
display_id,
|
||||
window_background,
|
||||
app_id,
|
||||
window_min_size,
|
||||
} = options;
|
||||
|
||||
let bounds = window_bounds
|
||||
@@ -647,6 +648,7 @@ impl Window {
|
||||
show,
|
||||
display_id,
|
||||
window_background,
|
||||
window_min_size,
|
||||
},
|
||||
)?;
|
||||
let display_id = platform_window.display().map(|display| display.id());
|
||||
|
||||
@@ -1,25 +1,32 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use futures::StreamExt;
|
||||
use gpui::{AppContext, AsyncAppContext};
|
||||
use http::github::{latest_github_release, GitHubLspBinaryVersion};
|
||||
use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::LanguageServerBinary;
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::ContextProviderWithTasks;
|
||||
use serde_json::{json, Value};
|
||||
use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore};
|
||||
use smol::fs;
|
||||
use smol::{
|
||||
fs::{self},
|
||||
io::BufReader,
|
||||
};
|
||||
use std::{
|
||||
any::Any,
|
||||
env::consts,
|
||||
ffi::OsString,
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
use task::{TaskTemplate, TaskTemplates, VariableName};
|
||||
use util::{maybe, ResultExt};
|
||||
use util::{fs::remove_matching, maybe, ResultExt};
|
||||
|
||||
const SERVER_PATH: &str =
|
||||
"node_modules/vscode-langservers-extracted/bin/vscode-json-language-server";
|
||||
@@ -251,3 +258,137 @@ fn schema_file_match(path: &Path) -> String {
|
||||
.to_string()
|
||||
.replace('\\', "/")
|
||||
}
|
||||
|
||||
pub(super) struct NodeVersionAdapter;
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl LspAdapter for NodeVersionAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName("package-version-server".into())
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
let release = latest_github_release(
|
||||
"zed-industries/package-version-server",
|
||||
true,
|
||||
false,
|
||||
delegate.http_client(),
|
||||
)
|
||||
.await?;
|
||||
let os = match consts::OS {
|
||||
"macos" => "apple-darwin",
|
||||
"linux" => "unknown-linux-gnu",
|
||||
"windows" => "pc-windows-msvc",
|
||||
other => bail!("Running on unsupported os: {other}"),
|
||||
};
|
||||
let suffix = if consts::OS == "windows" {
|
||||
".zip"
|
||||
} else {
|
||||
".tar.gz"
|
||||
};
|
||||
let asset_name = format!("package-version-server-{}-{os}{suffix}", consts::ARCH);
|
||||
let asset = release
|
||||
.assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == asset_name)
|
||||
.with_context(|| format!("no asset found matching `{asset_name:?}`"))?;
|
||||
Ok(Box::new(GitHubLspBinaryVersion {
|
||||
name: release.tag_name,
|
||||
url: asset.browser_download_url.clone(),
|
||||
}))
|
||||
}
|
||||
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
latest_version: Box<dyn 'static + Send + Any>,
|
||||
container_dir: PathBuf,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
) -> Result<LanguageServerBinary> {
|
||||
let version = latest_version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||
let destination_path =
|
||||
container_dir.join(format!("package-version-server-{}", version.name));
|
||||
let destination_container_path =
|
||||
container_dir.join(format!("package-version-server-{}-tmp", version.name));
|
||||
if fs::metadata(&destination_path).await.is_err() {
|
||||
let mut response = delegate
|
||||
.http_client()
|
||||
.get(&version.url, Default::default(), true)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
||||
if version.url.ends_with(".zip") {
|
||||
node_runtime::extract_zip(
|
||||
&destination_container_path,
|
||||
BufReader::new(response.body_mut()),
|
||||
)
|
||||
.await?;
|
||||
} else if version.url.ends_with(".tar.gz") {
|
||||
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
|
||||
let archive = Archive::new(decompressed_bytes);
|
||||
archive.unpack(&destination_container_path).await?;
|
||||
}
|
||||
|
||||
fs::copy(
|
||||
destination_container_path.join("package-version-server"),
|
||||
&destination_path,
|
||||
)
|
||||
.await?;
|
||||
// todo("windows")
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
fs::set_permissions(
|
||||
&destination_path,
|
||||
<fs::Permissions as fs::unix::PermissionsExt>::from_mode(0o755),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
remove_matching(&container_dir, |entry| entry != destination_path).await;
|
||||
}
|
||||
|
||||
Ok(LanguageServerBinary {
|
||||
path: destination_path.join("package-version-server"),
|
||||
env: None,
|
||||
arguments: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn cached_server_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
_delegate: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_version_server_binary(container_dir).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_version_server_binary(container_dir)
|
||||
.await
|
||||
.map(|mut binary| {
|
||||
binary.arguments = vec!["--version".into()];
|
||||
binary
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_cached_version_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
||||
maybe!(async {
|
||||
let mut last = None;
|
||||
let mut entries = fs::read_dir(&container_dir).await?;
|
||||
while let Some(entry) = entries.next().await {
|
||||
last = Some(entry?.path());
|
||||
}
|
||||
|
||||
anyhow::Ok(LanguageServerBinary {
|
||||
path: last.ok_or_else(|| anyhow!("no cached binary"))?,
|
||||
env: None,
|
||||
arguments: Default::default(),
|
||||
})
|
||||
})
|
||||
.await
|
||||
.log_err()
|
||||
}
|
||||
|
||||
@@ -117,10 +117,13 @@ pub fn init(
|
||||
|
||||
language!(
|
||||
"json",
|
||||
vec![Arc::new(json::JsonLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
languages.clone(),
|
||||
))],
|
||||
vec![
|
||||
Arc::new(json::JsonLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
languages.clone(),
|
||||
)),
|
||||
Arc::new(json::NodeVersionAdapter)
|
||||
],
|
||||
json_task_context()
|
||||
);
|
||||
language!("markdown");
|
||||
|
||||
@@ -201,11 +201,18 @@ impl LspAdapter for RustLspAdapter {
|
||||
completion: &lsp::CompletionItem,
|
||||
language: &Arc<Language>,
|
||||
) -> Option<CodeLabel> {
|
||||
let detail = completion
|
||||
.detail
|
||||
.as_ref()
|
||||
.or(completion
|
||||
.label_details
|
||||
.as_ref()
|
||||
.and_then(|detail| detail.detail.as_ref()))
|
||||
.map(ToOwned::to_owned);
|
||||
match completion.kind {
|
||||
Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => {
|
||||
let detail = completion.detail.as_ref().unwrap();
|
||||
Some(lsp::CompletionItemKind::FIELD) if detail.is_some() => {
|
||||
let name = &completion.label;
|
||||
let text = format!("{}: {}", name, detail);
|
||||
let text = format!("{}: {}", name, detail.unwrap());
|
||||
let source = Rope::from(format!("struct S {{ {} }}", text).as_str());
|
||||
let runs = language.highlight_text(&source, 11..11 + text.len());
|
||||
return Some(CodeLabel {
|
||||
@@ -215,12 +222,11 @@ impl LspAdapter for RustLspAdapter {
|
||||
});
|
||||
}
|
||||
Some(lsp::CompletionItemKind::CONSTANT | lsp::CompletionItemKind::VARIABLE)
|
||||
if completion.detail.is_some()
|
||||
if detail.is_some()
|
||||
&& completion.insert_text_format != Some(lsp::InsertTextFormat::SNIPPET) =>
|
||||
{
|
||||
let detail = completion.detail.as_ref().unwrap();
|
||||
let name = &completion.label;
|
||||
let text = format!("{}: {}", name, detail);
|
||||
let text = format!("{}: {}", name, detail.unwrap());
|
||||
let source = Rope::from(format!("let {} = ();", text).as_str());
|
||||
let runs = language.highlight_text(&source, 4..4 + text.len());
|
||||
return Some(CodeLabel {
|
||||
@@ -230,12 +236,12 @@ impl LspAdapter for RustLspAdapter {
|
||||
});
|
||||
}
|
||||
Some(lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD)
|
||||
if completion.detail.is_some() =>
|
||||
if detail.is_some() =>
|
||||
{
|
||||
lazy_static! {
|
||||
static ref REGEX: Regex = Regex::new("\\(…?\\)").unwrap();
|
||||
}
|
||||
let detail = completion.detail.as_ref().unwrap();
|
||||
let detail = detail.unwrap();
|
||||
const FUNCTION_PREFIXES: [&'static str; 2] = ["async fn", "fn"];
|
||||
let prefix = FUNCTION_PREFIXES
|
||||
.iter()
|
||||
@@ -269,9 +275,14 @@ impl LspAdapter for RustLspAdapter {
|
||||
_ => None,
|
||||
};
|
||||
let highlight_id = language.grammar()?.highlight_id_for_name(highlight_name?)?;
|
||||
let mut label = CodeLabel::plain(completion.label.clone(), None);
|
||||
let mut label = completion.label.clone();
|
||||
if let Some(detail) = detail.filter(|detail| detail.starts_with(" (")) {
|
||||
use std::fmt::Write;
|
||||
write!(label, "{detail}").ok()?;
|
||||
}
|
||||
let mut label = CodeLabel::plain(label, None);
|
||||
label.runs.push((
|
||||
0..label.text.rfind('(').unwrap_or(label.text.len()),
|
||||
0..label.text.rfind('(').unwrap_or(completion.label.len()),
|
||||
highlight_id,
|
||||
));
|
||||
return Some(label);
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
[package]
|
||||
name = "miner"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[[bin]]
|
||||
name = "miner"
|
||||
path = "src/miner.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
heed.workspace = true
|
||||
ignore.workspace = true
|
||||
indicatif = "0.17.8"
|
||||
reqwest = { version = "0.12.5", features = ["json", "stream"] }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
tree-sitter-rust.workspace = true
|
||||
tokenizers = { version = "0.19.1", features = ["http"] }
|
||||
tokio.workspace = true
|
||||
@@ -1,794 +0,0 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::StreamExt;
|
||||
use heed::{
|
||||
types::{SerdeJson, Str},
|
||||
Database as HeedDatabase, EnvOpenOptions, RwTxn,
|
||||
};
|
||||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::{BTreeMap, HashMap, VecDeque},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::SystemTime,
|
||||
};
|
||||
use tokenizers::tokenizer::Tokenizer;
|
||||
use tokenizers::FromPretrainedParameters;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Message {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
pub struct OllamaClient {
|
||||
client: Client,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl OllamaClient {
|
||||
pub fn new(base_url: String) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
base_url,
|
||||
}
|
||||
}
|
||||
|
||||
async fn stream_completion(
|
||||
&self,
|
||||
model: String,
|
||||
messages: Vec<Message>,
|
||||
) -> Result<mpsc::Receiver<String>> {
|
||||
let (tx, rx) = mpsc::channel(100);
|
||||
|
||||
let request = serde_json::json!({
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"stream": true,
|
||||
});
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(format!("{}/api/chat", self.base_url))
|
||||
.json(&request)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
"error streaming completion: {:?}",
|
||||
response.text().await?
|
||||
));
|
||||
}
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut stream = response.bytes_stream();
|
||||
while let Some(chunk) = stream.next().await {
|
||||
if let Ok(chunk) = chunk {
|
||||
if let Ok(text) = String::from_utf8(chunk.to_vec()) {
|
||||
if let Ok(response) = serde_json::from_str::<serde_json::Value>(&text) {
|
||||
if let Some(content) = response["message"]["content"].as_str() {
|
||||
let _ = tx.send(content.to_string()).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(rx)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct HuggingFaceClient {
|
||||
client: Client,
|
||||
endpoint: String,
|
||||
api_key: String,
|
||||
}
|
||||
|
||||
impl HuggingFaceClient {
|
||||
pub fn new(endpoint: String, api_key: String) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
endpoint,
|
||||
api_key,
|
||||
}
|
||||
}
|
||||
|
||||
async fn stream_completion(
|
||||
&self,
|
||||
model: String,
|
||||
messages: Vec<Message>,
|
||||
) -> Result<mpsc::Receiver<String>> {
|
||||
let (tx, rx) = mpsc::channel(100);
|
||||
|
||||
let request = serde_json::json!({
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"stream": true,
|
||||
"max_tokens": 2048
|
||||
});
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(&self.endpoint)
|
||||
.header("Authorization", format!("Bearer {}", self.api_key))
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&request)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
"error streaming completion: {:?}",
|
||||
response.text().await?
|
||||
));
|
||||
}
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut stream = response.bytes_stream();
|
||||
while let Some(chunk) = stream.next().await {
|
||||
if let Ok(chunk) = chunk {
|
||||
if let Ok(text) = String::from_utf8(chunk.to_vec()) {
|
||||
for line in text.lines() {
|
||||
if line.starts_with("data:") {
|
||||
let json_str = line.trim_start_matches("data:");
|
||||
if json_str == "[DONE]" {
|
||||
break;
|
||||
}
|
||||
|
||||
if let Ok(response) =
|
||||
serde_json::from_str::<serde_json::Value>(json_str)
|
||||
{
|
||||
if let Some(content) =
|
||||
response["choices"][0]["delta"]["content"].as_str()
|
||||
{
|
||||
let _ = tx.send(content.to_string()).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(rx)
|
||||
}
|
||||
}
|
||||
|
||||
const CHUNK_SIZE: usize = 5000;
|
||||
const OVERLAP: usize = 2_000;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Entry {
|
||||
File(PathBuf),
|
||||
Directory(PathBuf),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct CachedSummary {
|
||||
summary: String,
|
||||
mtime: SystemTime,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Database {
|
||||
tx: mpsc::Sender<Box<dyn FnOnce(&HeedDatabase<Str, SerdeJson<CachedSummary>>, RwTxn) + Send>>,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
async fn new(db_path: &Path, root: &Path) -> Result<Self> {
|
||||
std::fs::create_dir_all(&db_path)?;
|
||||
let env = unsafe {
|
||||
EnvOpenOptions::new()
|
||||
.map_size(1024 * 1024 * 1024)
|
||||
.max_dbs(3000)
|
||||
.open(db_path)?
|
||||
};
|
||||
let mut wtxn = env.write_txn()?;
|
||||
let db_name = format!("summaries_{}", root.to_string_lossy());
|
||||
let db: HeedDatabase<Str, SerdeJson<CachedSummary>> =
|
||||
env.create_database(&mut wtxn, Some(&db_name))?;
|
||||
wtxn.commit()?;
|
||||
|
||||
let (tx, mut rx) = mpsc::channel::<
|
||||
Box<dyn FnOnce(&HeedDatabase<Str, SerdeJson<CachedSummary>>, RwTxn) + Send>,
|
||||
>(100);
|
||||
|
||||
tokio::spawn(async move {
|
||||
while let Some(f) = rx.recv().await {
|
||||
let wtxn = env.write_txn().unwrap();
|
||||
f(&db, wtxn);
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Self { tx })
|
||||
}
|
||||
|
||||
async fn transact<F, T>(&self, f: F) -> Result<T>
|
||||
where
|
||||
F: FnOnce(&HeedDatabase<Str, SerdeJson<CachedSummary>>, RwTxn) -> Result<T>
|
||||
+ Send
|
||||
+ 'static,
|
||||
T: 'static + Send,
|
||||
{
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
self.tx
|
||||
.send(Box::new(move |db, txn| {
|
||||
let result = f(db, txn);
|
||||
let _ = tx.send(result);
|
||||
}))
|
||||
.await
|
||||
.map_err(|_| anyhow!("database closed"))?;
|
||||
Ok(rx.await.map_err(|_| anyhow!("transaction failed"))??)
|
||||
}
|
||||
}
|
||||
|
||||
async fn summarize_project(
|
||||
db_path: &Path,
|
||||
root: &Path,
|
||||
num_workers: usize,
|
||||
) -> Result<BTreeMap<PathBuf, String>> {
|
||||
let database = Database::new(db_path, root).await?;
|
||||
|
||||
let tokenizer = Tokenizer::from_pretrained(
|
||||
"mistralai/Mistral-7B-Instruct-v0.1",
|
||||
Some(FromPretrainedParameters {
|
||||
revision: "main".into(),
|
||||
user_agent: HashMap::default(),
|
||||
auth_token: Some(
|
||||
std::env::var("HUGGINGFACE_API_TOKEN").expect("HUGGINGFACE_API_TOKEN not set"),
|
||||
),
|
||||
}),
|
||||
)
|
||||
.unwrap();
|
||||
let client = Arc::new(HuggingFaceClient::new(
|
||||
"https://c0es55wrh8muqy3g.us-east-1.aws.endpoints.huggingface.cloud/v1/chat/completions"
|
||||
.into(),
|
||||
std::env::var("HUGGINGFACE_API_TOKEN").expect("HUGGINGFACE_API_TOKEN not set"),
|
||||
));
|
||||
let queue = Arc::new(Mutex::new(VecDeque::new()));
|
||||
|
||||
let multi_progress = Arc::new(MultiProgress::new());
|
||||
let overall_progress = multi_progress.add(ProgressBar::new_spinner());
|
||||
overall_progress.set_style(
|
||||
ProgressStyle::default_spinner()
|
||||
.template("{spinner:.green} {msg}")
|
||||
.unwrap(),
|
||||
);
|
||||
overall_progress.set_message("Summarizing project...");
|
||||
|
||||
// Populate the queue with files and directories
|
||||
let mut walker = ignore::WalkBuilder::new(root)
|
||||
.hidden(true)
|
||||
.ignore(true)
|
||||
.build();
|
||||
while let Some(entry) = walker.next() {
|
||||
if let Ok(entry) = entry {
|
||||
let path = entry.path().to_owned();
|
||||
if entry.file_type().map_or(false, |ft| ft.is_dir()) {
|
||||
queue.lock().await.push_back(Entry::Directory(path));
|
||||
} else {
|
||||
queue.lock().await.push_back(Entry::File(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_entries = queue.lock().await.len();
|
||||
let progress_bar = multi_progress.add(ProgressBar::new(total_entries as u64));
|
||||
progress_bar.set_style(
|
||||
ProgressStyle::default_bar()
|
||||
.template("[{elapsed_precise}] {bar:40.cyan/blue} {pos}/{len} {msg}")
|
||||
.unwrap()
|
||||
.progress_chars("##-"),
|
||||
);
|
||||
|
||||
let summaries = Arc::new(Mutex::new(BTreeMap::new()));
|
||||
let paths_loaded_from_cache = Arc::new(Mutex::new(BTreeMap::new()));
|
||||
|
||||
let rust_language = tree_sitter_rust::language();
|
||||
|
||||
let workers: Vec<_> = (0..num_workers)
|
||||
.map(|_| {
|
||||
let queue = Arc::clone(&queue);
|
||||
let client = Arc::clone(&client);
|
||||
let summaries = Arc::clone(&summaries);
|
||||
let tokenizer = tokenizer.clone();
|
||||
let progress_bar = progress_bar.clone();
|
||||
let database = database.clone();
|
||||
let paths_loaded_from_cache = Arc::clone(&paths_loaded_from_cache);
|
||||
let mut parser = tree_sitter::Parser::new();
|
||||
parser.set_language(&rust_language).unwrap();
|
||||
let rust_language = rust_language.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
let mut queue_lock = queue.lock().await;
|
||||
let Some(entry) = queue_lock.pop_front() else {
|
||||
break;
|
||||
};
|
||||
|
||||
match entry {
|
||||
Entry::File(path) => {
|
||||
drop(queue_lock);
|
||||
let summary = async {
|
||||
let mtime = tokio::fs::metadata(&path).await?.modified()?;
|
||||
let key = path.to_string_lossy().to_string();
|
||||
|
||||
let cached = database
|
||||
.transact({
|
||||
let key = key.clone();
|
||||
move |db, txn| Ok(db.get(&txn, &key)?)
|
||||
})
|
||||
.await?;
|
||||
if let Some(cached) = cached {
|
||||
if cached.mtime == mtime {
|
||||
paths_loaded_from_cache
|
||||
.lock()
|
||||
.await
|
||||
.insert(path.clone(), true);
|
||||
return Ok(cached.summary);
|
||||
}
|
||||
}
|
||||
|
||||
progress_bar.set_message(format!("Summarizing {}", path.display()));
|
||||
|
||||
let content = tokio::fs::read_to_string(&path)
|
||||
.await
|
||||
.unwrap_or_else(|_| "binary file".into());
|
||||
|
||||
let mut summary = String::new();
|
||||
|
||||
if path.extension().map_or(false, |ext| ext == "rs") {
|
||||
let tree = parser.parse(&content, None).unwrap();
|
||||
let root_node = tree.root_node();
|
||||
|
||||
let export_query = tree_sitter::Query::new(
|
||||
&rust_language,
|
||||
include_str!("./rust_exports.scm"),
|
||||
)
|
||||
.unwrap();
|
||||
let mut export_cursor = tree_sitter::QueryCursor::new();
|
||||
let mut exports = Vec::new();
|
||||
for m in export_cursor.matches(
|
||||
&export_query,
|
||||
root_node,
|
||||
content.as_bytes(),
|
||||
) {
|
||||
let mut current_level = 0;
|
||||
let mut current_export = String::new();
|
||||
for c in m.captures {
|
||||
let export = content[c.node.byte_range()].to_string();
|
||||
let indent = " ".repeat(current_level);
|
||||
if current_level == 0 {
|
||||
current_export = format!("{}{}", indent, export);
|
||||
} else {
|
||||
current_export
|
||||
.push_str(&format!("\n{}{}", indent, export));
|
||||
}
|
||||
current_level += 1;
|
||||
}
|
||||
exports.push(current_export);
|
||||
}
|
||||
|
||||
let import_query = tree_sitter::Query::new(
|
||||
&rust_language,
|
||||
include_str!("./rust_imports.scm"),
|
||||
)
|
||||
.unwrap();
|
||||
let mut import_cursor = tree_sitter::QueryCursor::new();
|
||||
let imports: Vec<_> = import_cursor
|
||||
.matches(&import_query, root_node, content.as_bytes())
|
||||
.flat_map(|m| m.captures)
|
||||
.map(|c| content[c.node.byte_range()].to_string())
|
||||
.collect();
|
||||
|
||||
summary.push_str("Summary: Rust file containing ");
|
||||
if !exports.is_empty() {
|
||||
summary.push_str(&format!("{} exports", exports.len()));
|
||||
if !imports.is_empty() {
|
||||
summary.push_str(" and ");
|
||||
}
|
||||
}
|
||||
if !imports.is_empty() {
|
||||
summary.push_str(&format!("{} imports", imports.len()));
|
||||
}
|
||||
summary.push('.');
|
||||
|
||||
if !exports.is_empty() {
|
||||
summary.push_str("\nExports:\n");
|
||||
summary.push_str(&exports.join("\n"));
|
||||
}
|
||||
if !imports.is_empty() {
|
||||
summary.push_str("\nImports: ");
|
||||
summary.push_str(&imports.join(", "));
|
||||
}
|
||||
|
||||
println!("{}", summary);
|
||||
} else {
|
||||
let chunks = split_into_chunks(
|
||||
&content, &tokenizer, CHUNK_SIZE, OVERLAP,
|
||||
);
|
||||
let chunk_summaries =
|
||||
summarize_chunks(&client, &chunks).await?;
|
||||
summary =
|
||||
combine_summaries(&client, &chunk_summaries, true).await?;
|
||||
}
|
||||
|
||||
let cached_summary = CachedSummary {
|
||||
summary: summary.clone(),
|
||||
mtime,
|
||||
};
|
||||
database
|
||||
.transact(move |db, mut txn| {
|
||||
db.put(&mut txn, &key, &cached_summary)?;
|
||||
txn.commit()?;
|
||||
Ok(())
|
||||
})
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(summary)
|
||||
};
|
||||
|
||||
let summary = summary.await.unwrap_or_else(|error| {
|
||||
format!("path could not be summarized: {error:?}")
|
||||
});
|
||||
summaries.lock().await.insert(path, summary);
|
||||
progress_bar.inc(1);
|
||||
}
|
||||
Entry::Directory(path) => {
|
||||
let mut dir_summaries = Vec::new();
|
||||
let mut all_children_summarized = true;
|
||||
let mut all_children_from_cache = true;
|
||||
let dir_walker = ignore::WalkBuilder::new(&path)
|
||||
.hidden(true)
|
||||
.ignore(true)
|
||||
.max_depth(Some(1))
|
||||
.build();
|
||||
for entry in dir_walker {
|
||||
if let Ok(entry) = entry {
|
||||
if entry.path() != path {
|
||||
if let Some(summary) =
|
||||
summaries.lock().await.get(entry.path())
|
||||
{
|
||||
dir_summaries.push(summary.clone());
|
||||
if !paths_loaded_from_cache
|
||||
.lock()
|
||||
.await
|
||||
.get(entry.path())
|
||||
.unwrap_or(&false)
|
||||
{
|
||||
all_children_from_cache = false;
|
||||
}
|
||||
} else {
|
||||
all_children_summarized = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if all_children_summarized {
|
||||
drop(queue_lock);
|
||||
|
||||
let combined_summary = async {
|
||||
let key = path.to_string_lossy().to_string();
|
||||
let mtime = tokio::fs::metadata(&path).await?.modified()?;
|
||||
|
||||
if all_children_from_cache {
|
||||
if let Some(cached) = database
|
||||
.transact({
|
||||
let key = key.clone();
|
||||
move |db, txn| Ok(db.get(&txn, &key)?)
|
||||
})
|
||||
.await?
|
||||
{
|
||||
paths_loaded_from_cache
|
||||
.lock()
|
||||
.await
|
||||
.insert(path.clone(), true);
|
||||
return Ok(cached.summary);
|
||||
}
|
||||
}
|
||||
|
||||
progress_bar
|
||||
.set_message(format!("Summarizing {}", path.display()));
|
||||
|
||||
let combined_summary =
|
||||
combine_summaries(&client, &dir_summaries, false).await?;
|
||||
let cached_summary = CachedSummary {
|
||||
summary: combined_summary.clone(),
|
||||
mtime,
|
||||
};
|
||||
database
|
||||
.transact(move |db, mut txn| {
|
||||
db.put(&mut txn, &key, &cached_summary)?;
|
||||
txn.commit()?;
|
||||
Ok(())
|
||||
})
|
||||
.await?;
|
||||
anyhow::Ok(combined_summary)
|
||||
};
|
||||
|
||||
let combined_summary = combined_summary
|
||||
.await
|
||||
.unwrap_or_else(|_| "could not combine summaries".into());
|
||||
summaries.lock().await.insert(path, combined_summary);
|
||||
progress_bar.inc(1);
|
||||
} else {
|
||||
queue_lock.push_back(Entry::Directory(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
for worker in workers {
|
||||
worker.await??;
|
||||
}
|
||||
|
||||
// Remove deleted entries from the database
|
||||
database
|
||||
.transact(|db, mut txn| {
|
||||
let mut paths_to_delete = Vec::new();
|
||||
for item in db.iter(&txn)? {
|
||||
let (path, _) = item?;
|
||||
let path = PathBuf::from(path);
|
||||
if !path.exists() {
|
||||
paths_to_delete.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
for path in paths_to_delete {
|
||||
db.delete(&mut txn, &path.to_string_lossy())?;
|
||||
}
|
||||
txn.commit()?;
|
||||
Ok(())
|
||||
})
|
||||
.await?;
|
||||
|
||||
progress_bar.finish_with_message("Summarization complete");
|
||||
overall_progress.finish_with_message("Project summarization finished");
|
||||
|
||||
Ok(Arc::try_unwrap(summaries).unwrap().into_inner())
|
||||
}
|
||||
|
||||
fn split_into_chunks(
|
||||
content: &str,
|
||||
tokenizer: &Tokenizer,
|
||||
chunk_size: usize,
|
||||
overlap: usize,
|
||||
) -> Vec<String> {
|
||||
let mut chunks = Vec::new();
|
||||
let lines: Vec<&str> = content.lines().collect();
|
||||
let mut current_chunk = String::new();
|
||||
let mut current_tokens = 0;
|
||||
|
||||
for line in lines {
|
||||
let line_tokens = tokenizer.encode(line, false).unwrap().get_ids().len();
|
||||
if current_tokens + line_tokens > chunk_size {
|
||||
chunks.push(current_chunk.clone());
|
||||
current_chunk.clear();
|
||||
current_tokens = 0;
|
||||
}
|
||||
current_chunk.push_str(line);
|
||||
current_chunk.push('\n');
|
||||
current_tokens += line_tokens;
|
||||
}
|
||||
|
||||
if !current_chunk.is_empty() {
|
||||
chunks.push(current_chunk);
|
||||
}
|
||||
|
||||
// Add overlap
|
||||
for i in 1..chunks.len() {
|
||||
let overlap_text = chunks[i - 1]
|
||||
.lines()
|
||||
.rev()
|
||||
.take(overlap)
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.rev()
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
chunks[i] = format!("{}\n{}", overlap_text, chunks[i]);
|
||||
}
|
||||
|
||||
chunks
|
||||
}
|
||||
|
||||
async fn summarize_chunks(client: &HuggingFaceClient, chunks: &[String]) -> Result<Vec<String>> {
|
||||
let mut chunk_summaries = Vec::new();
|
||||
|
||||
for chunk in chunks {
|
||||
let summary = summarize_file(client, chunk).await?;
|
||||
chunk_summaries.push(summary);
|
||||
}
|
||||
|
||||
Ok(chunk_summaries)
|
||||
}
|
||||
|
||||
async fn summarize_file(client: &HuggingFaceClient, content: &str) -> Result<String> {
|
||||
let messages = vec![Message {
|
||||
role: "user".to_string(),
|
||||
content: format!(
|
||||
"You are a code summarization assistant. \
|
||||
Provide a brief summary of the given file, \
|
||||
focusing on its main functionality and purpose. \
|
||||
Be terse and start your response directly with \"Summary: \".\n\
|
||||
File:\n{}",
|
||||
content
|
||||
),
|
||||
}];
|
||||
|
||||
let mut receiver = client
|
||||
.stream_completion("tgi".to_string(), messages)
|
||||
.await?;
|
||||
|
||||
let mut summary = String::new();
|
||||
while let Some(content) = receiver.recv().await {
|
||||
summary.push_str(&content);
|
||||
}
|
||||
|
||||
Ok(summary)
|
||||
}
|
||||
|
||||
async fn combine_summaries(
|
||||
client: &HuggingFaceClient,
|
||||
summaries: &[String],
|
||||
is_chunk: bool,
|
||||
) -> Result<String> {
|
||||
let combined_content = summaries.join("\n## Summary\n");
|
||||
let prompt = if is_chunk {
|
||||
concat!(
|
||||
"You are a code summarization assistant. ",
|
||||
"Combine the given summaries into a single, coherent summary ",
|
||||
"that captures the overall functionality and structure of the code. ",
|
||||
"Ensure that the final summary is comprehensive and reflects ",
|
||||
"the content as if it was summarized from a single, complete file. ",
|
||||
"Be terse and start your response with \"Summary: \""
|
||||
)
|
||||
} else {
|
||||
concat!(
|
||||
"You are a code summarization assistant. ",
|
||||
"Combine the given summaries of different files or directories ",
|
||||
"into a single, coherent summary that captures the overall ",
|
||||
"structure and functionality of the project or directory. ",
|
||||
"Focus on the relationships between different components ",
|
||||
"and the high-level architecture. ",
|
||||
"Be terse and start your response with \"Summary: \""
|
||||
)
|
||||
};
|
||||
|
||||
let messages = vec![Message {
|
||||
role: "user".to_string(),
|
||||
content: format!("{}\n# Summaries\n{}", prompt, combined_content),
|
||||
}];
|
||||
|
||||
let mut receiver = client
|
||||
.stream_completion("tgi".to_string(), messages)
|
||||
.await?;
|
||||
|
||||
let mut combined_summary = String::new();
|
||||
while let Some(content) = receiver.recv().await {
|
||||
combined_summary.push_str(&content);
|
||||
}
|
||||
|
||||
Ok(combined_summary)
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
|
||||
if args.len() < 2 {
|
||||
eprintln!(
|
||||
"Usage: {} <project_path> [db_path] [num_workers] [--read=path]",
|
||||
args[0]
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let project_path = Path::new(&args[1]);
|
||||
if !project_path.exists() || !project_path.is_dir() {
|
||||
eprintln!("Error: The provided project path does not exist or is not a directory.");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let db_path = if args.len() >= 3 && !args[2].starts_with("--") {
|
||||
PathBuf::from(&args[2])
|
||||
} else {
|
||||
std::env::current_dir()?.join("project_summaries")
|
||||
};
|
||||
|
||||
let num_workers = if args.len() >= 4 && !args[3].starts_with("--") {
|
||||
args[3].parse().unwrap_or(8)
|
||||
} else {
|
||||
8
|
||||
};
|
||||
|
||||
println!("Summarizing project at: {}", project_path.display());
|
||||
println!("Using database at: {}", db_path.display());
|
||||
println!("Number of workers: {}", num_workers);
|
||||
let summaries = summarize_project(&db_path, project_path, num_workers).await?;
|
||||
println!("Finished summarization");
|
||||
|
||||
// Check if --read flag is provided
|
||||
if let Some(read_path) = args.iter().find(|arg| arg.starts_with("--read=")) {
|
||||
let path = Path::new(&read_path[7..]);
|
||||
let full_path = project_path.join(path);
|
||||
for (child_path, summary) in summaries.iter() {
|
||||
if child_path.parent() == Some(&full_path) {
|
||||
println!("<path>{}</path>", child_path.to_string_lossy());
|
||||
println!("<summary>{}</summary>", summary);
|
||||
println!();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
dbg!(summaries);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// #[derive(Debug, Serialize)]
|
||||
// struct ChatCompletionRequest {
|
||||
// model: String,
|
||||
// messages: Vec<Message>,
|
||||
// stream: bool,
|
||||
// }
|
||||
//
|
||||
// #[derive(Debug, Deserialize)]
|
||||
// struct ChatCompletionChunk {
|
||||
// choices: Vec<Choice>,
|
||||
// }
|
||||
|
||||
// #[derive(Debug, Deserialize)]
|
||||
// struct Choice {
|
||||
// delta: Delta,
|
||||
// }
|
||||
|
||||
// #[derive(Debug, Deserialize)]
|
||||
// struct Delta {
|
||||
// content: Option<String>,
|
||||
// }
|
||||
|
||||
// pub struct GroqClient {
|
||||
// client: Client,
|
||||
// api_key: String,
|
||||
// }
|
||||
|
||||
// impl GroqClient {
|
||||
// pub fn new(api_key: String) -> Self {
|
||||
// Self {
|
||||
// client: Client::new(),
|
||||
// api_key,
|
||||
// }
|
||||
// }
|
||||
|
||||
// async fn stream_completion(
|
||||
// &self,
|
||||
// model: String,
|
||||
// messages: Vec<Message>,
|
||||
// ) -> Result<mpsc::Receiver<String>> {
|
||||
// let (tx, rx) = mpsc::channel(100);
|
||||
|
||||
// let request = ChatCompletionRequest {
|
||||
// model,
|
||||
// messages,
|
||||
// stream: true,
|
||||
// };
|
||||
|
||||
// let response = self
|
||||
// .client
|
||||
// .post("https://api.groq.com/openai/v1/chat/completions")
|
||||
// .header("Authorization", format!("Bearer {}", self.api_key))
|
||||
// .json(&request)
|
||||
// .send
|
||||
@@ -1,6 +0,0 @@
|
||||
(mod_item name: (identifier) @export)
|
||||
(struct_item name: (type_identifier) @export)
|
||||
(impl_item type: (type_identifier) @export)
|
||||
(enum_item name: (type_identifier) @export)
|
||||
(function_item name: (identifier) @export)
|
||||
(trait_item name: (type_identifier) @export)
|
||||
@@ -1 +0,0 @@
|
||||
(use_declaration) @import
|
||||
@@ -1,6 +1,7 @@
|
||||
mod archive;
|
||||
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
pub use archive::extract_zip;
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use futures::AsyncReadExt;
|
||||
|
||||
@@ -72,7 +72,7 @@ pub struct ProjectPanel {
|
||||
width: Option<Pixels>,
|
||||
pending_serialization: Task<Option<()>>,
|
||||
show_scrollbar: bool,
|
||||
is_dragging_scrollbar: Rc<Cell<bool>>,
|
||||
scrollbar_drag_thumb_offset: Rc<Cell<Option<f32>>>,
|
||||
hide_scrollbar_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
@@ -289,7 +289,7 @@ impl ProjectPanel {
|
||||
pending_serialization: Task::ready(None),
|
||||
show_scrollbar: !Self::should_autohide_scrollbar(cx),
|
||||
hide_scrollbar_task: None,
|
||||
is_dragging_scrollbar: Default::default(),
|
||||
scrollbar_drag_thumb_offset: Default::default(),
|
||||
};
|
||||
this.update_visible_entries(None, cx);
|
||||
|
||||
@@ -2231,7 +2231,7 @@ impl ProjectPanel {
|
||||
|
||||
let height = scroll_handle
|
||||
.last_item_height
|
||||
.filter(|_| self.show_scrollbar || self.is_dragging_scrollbar.get())?;
|
||||
.filter(|_| self.show_scrollbar || self.scrollbar_drag_thumb_offset.get().is_some())?;
|
||||
|
||||
let total_list_length = height.0 as f64 * items_count as f64;
|
||||
let current_offset = scroll_handle.base_handle.offset().y.0.min(0.).abs() as f64;
|
||||
@@ -2270,7 +2270,7 @@ impl ProjectPanel {
|
||||
.on_mouse_up(
|
||||
MouseButton::Left,
|
||||
cx.listener(|this, _, cx| {
|
||||
if !this.is_dragging_scrollbar.get()
|
||||
if this.scrollbar_drag_thumb_offset.get().is_none()
|
||||
&& !this.focus_handle.contains_focused(cx)
|
||||
{
|
||||
this.hide_scrollbar(cx);
|
||||
@@ -2293,7 +2293,7 @@ impl ProjectPanel {
|
||||
.child(ProjectPanelScrollbar::new(
|
||||
percentage as f32..end_offset as f32,
|
||||
self.scroll_handle.clone(),
|
||||
self.is_dragging_scrollbar.clone(),
|
||||
self.scrollbar_drag_thumb_offset.clone(),
|
||||
cx.view().clone().into(),
|
||||
items_count,
|
||||
)),
|
||||
|
||||
@@ -9,7 +9,8 @@ use ui::{prelude::*, px, relative, IntoElement};
|
||||
pub(crate) struct ProjectPanelScrollbar {
|
||||
thumb: Range<f32>,
|
||||
scroll: UniformListScrollHandle,
|
||||
is_dragging_scrollbar: Rc<Cell<bool>>,
|
||||
// If Some(), there's an active drag, offset by percentage from the top of thumb.
|
||||
scrollbar_drag_state: Rc<Cell<Option<f32>>>,
|
||||
item_count: usize,
|
||||
view: AnyView,
|
||||
}
|
||||
@@ -18,14 +19,14 @@ impl ProjectPanelScrollbar {
|
||||
pub(crate) fn new(
|
||||
thumb: Range<f32>,
|
||||
scroll: UniformListScrollHandle,
|
||||
is_dragging_scrollbar: Rc<Cell<bool>>,
|
||||
scrollbar_drag_state: Rc<Cell<Option<f32>>>,
|
||||
view: AnyView,
|
||||
item_count: usize,
|
||||
) -> Self {
|
||||
Self {
|
||||
thumb,
|
||||
scroll,
|
||||
is_dragging_scrollbar,
|
||||
scrollbar_drag_state,
|
||||
item_count,
|
||||
view,
|
||||
}
|
||||
@@ -97,7 +98,7 @@ impl gpui::Element for ProjectPanelScrollbar {
|
||||
let item_count = self.item_count;
|
||||
cx.on_mouse_event({
|
||||
let scroll = self.scroll.clone();
|
||||
let is_dragging = self.is_dragging_scrollbar.clone();
|
||||
let is_dragging = self.scrollbar_drag_state.clone();
|
||||
move |event: &MouseDownEvent, phase, _cx| {
|
||||
if phase.bubble() && bounds.contains(&event.position) {
|
||||
if !thumb_bounds.contains(&event.position) {
|
||||
@@ -113,7 +114,9 @@ impl gpui::Element for ProjectPanelScrollbar {
|
||||
.set_offset(point(px(0.), -max_offset * percentage));
|
||||
}
|
||||
} else {
|
||||
is_dragging.set(true);
|
||||
let thumb_top_offset =
|
||||
(event.position.y - thumb_bounds.origin.y) / bounds.size.height;
|
||||
is_dragging.set(Some(thumb_top_offset));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -130,14 +133,15 @@ impl gpui::Element for ProjectPanelScrollbar {
|
||||
}
|
||||
}
|
||||
});
|
||||
let is_dragging = self.is_dragging_scrollbar.clone();
|
||||
let drag_state = self.scrollbar_drag_state.clone();
|
||||
let view_id = self.view.entity_id();
|
||||
cx.on_mouse_event(move |event: &MouseMoveEvent, _, cx| {
|
||||
if event.dragging() && is_dragging.get() {
|
||||
if let Some(drag_state) = drag_state.get().filter(|_| event.dragging()) {
|
||||
let scroll = scroll.0.borrow();
|
||||
if let Some(last_height) = scroll.last_item_height {
|
||||
let max_offset = item_count as f32 * last_height;
|
||||
let percentage = (event.position.y - bounds.origin.y) / bounds.size.height;
|
||||
let percentage =
|
||||
(event.position.y - bounds.origin.y) / bounds.size.height - drag_state;
|
||||
|
||||
let percentage = percentage.min(1. - thumb_percentage_size);
|
||||
scroll
|
||||
@@ -146,13 +150,13 @@ impl gpui::Element for ProjectPanelScrollbar {
|
||||
cx.notify(view_id);
|
||||
}
|
||||
} else {
|
||||
is_dragging.set(false);
|
||||
drag_state.set(None);
|
||||
}
|
||||
});
|
||||
let is_dragging = self.is_dragging_scrollbar.clone();
|
||||
let is_dragging = self.scrollbar_drag_state.clone();
|
||||
cx.on_mouse_event(move |_event: &MouseUpEvent, phase, cx| {
|
||||
if phase.bubble() {
|
||||
is_dragging.set(false);
|
||||
is_dragging.set(None);
|
||||
cx.notify(view_id);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -60,7 +60,12 @@ impl ImageView {
|
||||
let bytes = base64::decode(base64_encoded_data)?;
|
||||
|
||||
let format = image::guess_format(&bytes)?;
|
||||
let data = image::load_from_memory_with_format(&bytes, format)?.into_bgra8();
|
||||
let mut data = image::load_from_memory_with_format(&bytes, format)?.into_rgba8();
|
||||
|
||||
// Convert from RGBA to BGRA.
|
||||
for pixel in data.chunks_exact_mut(4) {
|
||||
pixel.swap(0, 2);
|
||||
}
|
||||
|
||||
let height = data.height();
|
||||
let width = data.width();
|
||||
|
||||
@@ -127,14 +127,16 @@ fn search(workspace: &mut Workspace, action: &Search, cx: &mut ViewContext<Works
|
||||
search_bar.set_replacement(None, cx);
|
||||
search_bar.set_search_options(SearchOptions::REGEX, cx);
|
||||
}
|
||||
vim.workspace_state.search = SearchState {
|
||||
direction,
|
||||
count,
|
||||
initial_query: query.clone(),
|
||||
prior_selections,
|
||||
prior_operator: vim.active_operator(),
|
||||
prior_mode: vim.state().mode,
|
||||
};
|
||||
vim.update_state(|state| {
|
||||
state.search = SearchState {
|
||||
direction,
|
||||
count,
|
||||
initial_query: query.clone(),
|
||||
prior_selections,
|
||||
prior_operator: state.operator_stack.last().cloned(),
|
||||
prior_mode: state.mode,
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
@@ -143,7 +145,9 @@ fn search(workspace: &mut Workspace, action: &Search, cx: &mut ViewContext<Works
|
||||
|
||||
// hook into the existing to clear out any vim search state on cmd+f or edit -> find.
|
||||
fn search_deploy(_: &mut Workspace, _: &buffer_search::Deploy, cx: &mut ViewContext<Workspace>) {
|
||||
Vim::update(cx, |vim, _| vim.workspace_state.search = Default::default());
|
||||
Vim::update(cx, |vim, _| {
|
||||
vim.update_state(|state| state.search = Default::default())
|
||||
});
|
||||
cx.propagate();
|
||||
}
|
||||
|
||||
@@ -154,27 +158,32 @@ fn search_submit(workspace: &mut Workspace, _: &SearchSubmit, cx: &mut ViewConte
|
||||
pane.update(cx, |pane, cx| {
|
||||
if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() {
|
||||
search_bar.update(cx, |search_bar, cx| {
|
||||
let state = &mut vim.workspace_state.search;
|
||||
let mut count = state.count;
|
||||
let direction = state.direction;
|
||||
let (mut prior_selections, prior_mode, prior_operator) =
|
||||
vim.update_state(|state| {
|
||||
let mut count = state.search.count;
|
||||
let direction = state.search.direction;
|
||||
// in the case that the query has changed, the search bar
|
||||
// will have selected the next match already.
|
||||
if (search_bar.query(cx) != state.search.initial_query)
|
||||
&& state.search.direction == Direction::Next
|
||||
{
|
||||
count = count.saturating_sub(1)
|
||||
}
|
||||
state.search.count = 1;
|
||||
search_bar.select_match(direction, count, cx);
|
||||
search_bar.focus_editor(&Default::default(), cx);
|
||||
|
||||
let prior_selections: Vec<_> =
|
||||
state.search.prior_selections.drain(..).collect();
|
||||
let prior_mode = state.search.prior_mode;
|
||||
let prior_operator = state.search.prior_operator.take();
|
||||
(prior_selections, prior_mode, prior_operator)
|
||||
});
|
||||
|
||||
// in the case that the query has changed, the search bar
|
||||
// will have selected the next match already.
|
||||
if (search_bar.query(cx) != state.initial_query)
|
||||
&& state.direction == Direction::Next
|
||||
{
|
||||
count = count.saturating_sub(1)
|
||||
}
|
||||
vim.workspace_state
|
||||
.registers
|
||||
.insert('/', search_bar.query(cx).into());
|
||||
state.count = 1;
|
||||
search_bar.select_match(direction, count, cx);
|
||||
search_bar.focus_editor(&Default::default(), cx);
|
||||
|
||||
let mut prior_selections: Vec<_> = state.prior_selections.drain(..).collect();
|
||||
let prior_mode = state.prior_mode;
|
||||
let prior_operator = state.prior_operator.take();
|
||||
let new_selections = vim.editor_selections(cx);
|
||||
|
||||
// If the active editor has changed during a search, don't panic.
|
||||
|
||||
@@ -93,6 +93,7 @@ pub struct EditorState {
|
||||
pub undo_modes: HashMap<TransactionId, Mode>,
|
||||
|
||||
pub selected_register: Option<char>,
|
||||
pub search: SearchState,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
@@ -152,7 +153,6 @@ impl From<String> for Register {
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
pub struct WorkspaceState {
|
||||
pub search: SearchState,
|
||||
pub last_find: Option<Motion>,
|
||||
|
||||
pub recording: bool,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use ignore::gitignore::Gitignore;
|
||||
use std::{ffi::OsStr, path::Path, sync::Arc};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum IgnoreStack {
|
||||
None,
|
||||
Some {
|
||||
|
||||
@@ -3825,19 +3825,8 @@ impl BackgroundScanner {
|
||||
.collect::<Vec<_>>()
|
||||
.await;
|
||||
|
||||
// Ensure .git and gitignore files are processed first.
|
||||
let mut ixs_to_move_to_front = Vec::new();
|
||||
for (ix, child_abs_path) in child_paths.iter().enumerate() {
|
||||
let filename = child_abs_path.file_name().unwrap();
|
||||
if filename == *DOT_GIT {
|
||||
ixs_to_move_to_front.insert(0, ix);
|
||||
} else if filename == *GITIGNORE {
|
||||
ixs_to_move_to_front.push(ix);
|
||||
}
|
||||
}
|
||||
for (dest_ix, src_ix) in ixs_to_move_to_front.into_iter().enumerate() {
|
||||
child_paths.swap(dest_ix, src_ix);
|
||||
}
|
||||
// Ensure that .git and .gitignore are processed first.
|
||||
child_paths.sort_unstable();
|
||||
|
||||
for child_abs_path in child_paths {
|
||||
let child_abs_path: Arc<Path> = child_abs_path.into();
|
||||
@@ -4087,6 +4076,7 @@ impl BackgroundScanner {
|
||||
|
||||
let is_dir = fs_entry.is_dir();
|
||||
fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
|
||||
|
||||
fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
|
||||
fs_entry.is_private = self.is_path_private(path);
|
||||
|
||||
@@ -4248,6 +4238,7 @@ impl BackgroundScanner {
|
||||
let was_ignored = entry.is_ignored;
|
||||
let abs_path: Arc<Path> = snapshot.abs_path().join(&entry.path).into();
|
||||
entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
|
||||
|
||||
if entry.is_dir() {
|
||||
let child_ignore_stack = if entry.is_ignored {
|
||||
IgnoreStack::all()
|
||||
|
||||
@@ -219,7 +219,7 @@ fn init_ui(app_state: Arc<AppState>, cx: &mut AppContext) -> Result<()> {
|
||||
|
||||
inline_completion_registry::init(app_state.client.telemetry().clone(), cx);
|
||||
|
||||
assistant::init(app_state.client.clone(), cx);
|
||||
assistant::init(app_state.fs.clone(), app_state.client.clone(), cx);
|
||||
|
||||
repl::init(app_state.fs.clone(), cx);
|
||||
|
||||
|
||||
@@ -105,6 +105,10 @@ pub fn build_window_options(display_uuid: Option<Uuid>, cx: &mut AppContext) ->
|
||||
display_id: display.map(|display| display.id()),
|
||||
window_background: cx.theme().window_background_appearance(),
|
||||
app_id: Some(app_id.to_owned()),
|
||||
window_min_size: gpui::Size {
|
||||
width: px(360.0),
|
||||
height: px(240.0),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3177,7 +3181,7 @@ mod tests {
|
||||
project_panel::init((), cx);
|
||||
outline_panel::init((), cx);
|
||||
terminal_view::init(cx);
|
||||
assistant::init(app_state.client.clone(), cx);
|
||||
assistant::init(app_state.fs.clone(), app_state.client.clone(), cx);
|
||||
tasks_ui::init(cx);
|
||||
initialize_workspace(app_state.clone(), cx);
|
||||
app_state
|
||||
|
||||
Reference in New Issue
Block a user